L3.1-Siithamo-v0.1-8B / mergekit_config.yml
kromeurus's picture
Upload 7 files
7af8717 verified
raw
history blame
340 Bytes
models:
- model: ArliAI/ArliAI-Llama-3-8B-Formax-v1.0
parameters:
weight: [0.5, 0.3, 0.2, 0.1]
- model: parts/siitamol3.1
parameters:
weight: [0.5, 0.7, 0.8, 1]
base_model: parts/siitamol3.1
parameters:
normalize: false
int8_mask: true
merge_method: dare_linear
dtype: float32
out_dtype: bfloat16