File size: 737 Bytes
3b77bf4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
models:
- model: BEE-spoke-data/TinyLlama-1.1bee
parameters:
density: 0.33
weight: 0.50
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha
parameters:
density: 0.36
weight: 0.40
- model: ShieldX/manovyadh-1.1B-v1-chat
parameters:
density: 0.33
weight: 0.30
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
parameters:
density: 0.40
weight: 0.45
- model: matlok/tinyllama-cinder-openhermes-32k
parameters:
density: 0.32
weight: 0.26
- model: microsoft/rho-math-1b-interpreter-v0.1
parameters:
density: 0.38
weight: 0.35
merge_method: dare_ties
base_model: appvoid/palmer-003
parameters:
normalize: true
int8_mask: true
dtype: float16 |