base_model: mistralai/Mistral-7B-v0.1 models: - model: mlabonne/NeuralHermes-2.5-Mistral-7B parameters: density: 0.63 weight: 0.83 - model: Intel/neural-chat-7b-v3-3 parameters: density: 0.63 weight: 0.74 - model: meta-math/MetaMath-Mistral-7B parameters: density: 0.63 weight: 0.22 - model: openchat/openchat-3.5-0106 parameters: density: 0.63 weight: 0.37 - model: Open-Orca/Mistral-7B-OpenOrca parameters: density: 0.63 weight: 0.76 - model: cognitivecomputations/dolphin-2.2.1-mistral-7b parameters: density: 0.63 weight: 0.69 - model: viethq188/LeoScorpius-7B-Chat-DPO parameters: density: 0.63 weight: 0.38 - model: GreenNode/GreenNode-mini-7B-multilingual-v1olet parameters: density: 0.63 weight: 0.13 - model: berkeley-nest/Starling-LM-7B-alpha parameters: density: 0.63 weight: 0.33 merge_method: dare_ties parameters: normalize: true int8_mask: true dtype: bfloat16