|
base_model: ArliAI/Mistral-Small-22B-ArliAI-RPMax-v1.1 |
|
parameters: |
|
epsilon: 0.04 |
|
lambda: 1.05 |
|
int8_mask: true |
|
rescale: true |
|
normalize: false |
|
dtype: bfloat16 |
|
tokenizer_source: base |
|
merge_method: della_linear |
|
models: |
|
- model: ArliAI/Mistral-Small-22B-ArliAI-RPMax-v1.1 |
|
parameters: |
|
weight: [0.2, 0.3, 0.2, 0.3, 0.2] |
|
density: [0.45, 0.55, 0.45, 0.55, 0.45] |
|
- model: gghfez/SeminalRP-22b |
|
parameters: |
|
weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421] |
|
density: [0.6, 0.4, 0.5, 0.4, 0.6] |
|
- model: anthracite-org/magnum-v4-22b |
|
parameters: |
|
weight: [0.208, 0.139, 0.139, 0.139, 0.208] |
|
density: [0.7] |
|
- model: TheDrummer/Cydonia-22B-v1.1 |
|
parameters: |
|
weight: [0.208, 0.139, 0.139, 0.139, 0.208] |
|
density: [0.7] |
|
- model: nbeerbower/Mistral-Small-Drummer-22B |
|
parameters: |
|
weight: [0.33] |
|
density: [0.45, 0.55, 0.45, 0.55, 0.45] |
|
|