models: - model: MaziyarPanahi/Calme-7B-Instruct-v0.9 parameters: density: 0.53 weight: 0.33333333 - model: cognitivecomputations/dolphin-2.8-mistral-7b-v02 parameters: density: 0.53 weight: 0.33333333 - model: Weyaxi/OpenHermes-2.5-neural-chat-v3-3-Slerp parameters: density: 0.53 weight: 0.33333333 merge_method: dare_ties base_model: amazingvince/Not-WizardLM-2-7B parameters: normalize: false int8_mask: true dtype: float16