models: | |
- model: FelixChao/WestSeverus-7B-DPO-v2 | |
parameters: | |
density: [1, 0.7, 0.1] | |
weight: [0, 0.3, 0.7, 1] | |
- model: jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B | |
parameters: | |
density: [1, 0.7, 0.3] | |
weight: [0, 0.25, 0.5, 1] | |
- model: mlabonne/Daredevil-7B | |
parameters: | |
density: 0.33 | |
weight: | |
- filter: mlp | |
value: [0.35, 0.65] | |
- value: 0 | |
merge_method: ties | |
base_model: mistralai/Mistral-7B-v0.1 | |
parameters: | |
int8_mask: true | |
normalize: true | |
t: | |
- filter: lm_head | |
value: [0.55] | |
- filter: embed_tokens | |
value: [0.7] | |
- filter: self_attn | |
value: [0.65, 0.35] | |
- filter: mlp | |
value: [0.35, 0.65] | |
- filter: layernorm | |
value: [0.4, 0.6] | |
- filter: modelnorm | |
value: [0.6] | |
- value: 0.5 # fallback for rest of tensors | |
dtype: bfloat16 | |