Marcoroni-7b-DPO-Merge / mergekit_config.yml
nfaheem's picture
Upload folder using huggingface_hub
9cb04a6 verified
raw
history blame contribute delete
No virus
419 Bytes
base_model: madatnlp/marcoroni-7b-v3-safetensor
dtype: float16
merge_method: ties
parameters:
normalize: 1.0
slices:
- sources:
- layer_range: [0, 32]
model: madatnlp/marcoroni-7b-v3-safetensor
- layer_range: [0, 32]
model: fblgit/UNA-TheBeagle-7b-v1
parameters:
density: 0.3
weight: 0.5
- layer_range: [0, 32]
model: udkai/Turdus
parameters:
density: 0.7
weight: 0.3