models: | |
- model: BEE-spoke-data/TinyLlama-1.1bee | |
parameters: | |
density: 0.33 | |
weight: 0.50 | |
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha | |
parameters: | |
density: 0.36 | |
weight: 0.40 | |
- model: ShieldX/manovyadh-1.1B-v1-chat | |
parameters: | |
density: 0.33 | |
weight: 0.30 | |
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0 | |
parameters: | |
density: 0.40 | |
weight: 0.45 | |
- model: matlok/tinyllama-cinder-openhermes-32k | |
parameters: | |
density: 0.32 | |
weight: 0.26 | |
- model: microsoft/rho-math-1b-interpreter-v0.1 | |
parameters: | |
density: 0.38 | |
weight: 0.35 | |
merge_method: dare_ties | |
base_model: appvoid/palmer-003 | |
parameters: | |
normalize: true | |
int8_mask: true | |
dtype: float16 |