models: - model: Sao10K/L3-8B-Lunaris-v1 - model: FPHam/L3-8B-Everything-COT parameters: density: 0.5 weight: 0.1 - model: Ayush-1722/Meta-Llama-3-8B-Instruct-Summarize-v0.2-24K-LoRANET-Merged parameters: density: 0.5 weight: 0.1 - model: OEvortex/Emotional-llama-8B parameters: density: 0.5 weight: 0.1 - model: ChaoticNeutrals/Domain-Fusion-L3-8B parameters: density: 0.75 weight: 0.05 - model: nothingiisreal/L3-8B-Celeste-V1.2 parameters: density: 0.75 weight: 0.05 - model: Orenguteng/Llama-3-8B-Lexi-Uncensored parameters: density: 0.75 weight: 0.05 - model: Sao10K/L3-8B-Niitama-v1 parameters: density: 0.75 weight: 0.05 base_model: Sao10K/L3-8B-Lunaris-v1 merge_method: ties parameters: normalize: true dtype: bfloat16