models: - model: georgesung/llama2_7b_chat_uncensored parameters: density: [1, 0.7, 0.1] # density gradient weight: 1.0 - model: NousResearch/Llama-2-7b-chat-hf parameters: density: 0.5 weight: [0, 0.3, 0.7, 1] # weight gradient merge_method: ties base_model: NousResearch/Llama-2-7b-chat-hf parameters: normalize: true int8_mask: true dtype: bfloat16