| models: | |
| - model: vihangd/DopeyTinyLlama-1.1B-v1 | |
| parameters: | |
| density: 0.25 | |
| weight: 0.33 | |
| - model: raidhon/coven_tiny_1.1b_32k_orpo_alpha | |
| parameters: | |
| density: 0.40 | |
| weight: 0.50 | |
| - model: l3utterfly/tinyllama-1.1b-layla-v4 | |
| parameters: | |
| density: 0.28 | |
| weight: 0.20 | |
| - model: ShieldX/manovyadh-1.1B-v1-chat | |
| parameters: | |
| density: 0.33 | |
| weight: 0.30 | |
| - model: TinyLlama/TinyLlama-1.1B-Chat-v1.0 | |
| parameters: | |
| density: 0.30 | |
| weight: 0.45 | |
| - model: AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling | |
| parameters: | |
| density: 0.32 | |
| weight: 0.26 | |
| - model: microsoft/rho-math-1b-interpreter-v0.1 | |
| parameters: | |
| density: 0.28 | |
| weight: 0.35 | |
| merge_method: task_arithmetic | |
| base_model: appvoid/palmer-003 | |
| parameters: | |
| normalize: false | |
| int8_mask: true | |
| dtype: float16 |