slices: - sources: - model: LeroyDyer/Mixtral_AI_CyberBrain_2.0 layer_range: [0, 32] - model: ezelikman/quietstar-8-ahead layer_range: [0, 32] # or, the equivalent models: syntax: # models: # - model: mistralai/Mistral-7B-Instruct-v0.2 # LaRGER MODEL MUST BE BASE or # BASE MODEL MUST BE THE TOKENIZER YOU WISH TO ADOPT # so for models with customized processes they must be the base model # If the base model has remote code then this must be collected and added # to the repo after and the config file adusted to allow for automapping to your new repo # - model: yanismiraoui/Yarn-Mistral-7b-128k-sharded merge_method: slerp base_model: ezelikman/quietstar-8-ahead parameters: t: - filter: self_attn value: [0.3, 0.6, 0.3786, 0.6, 0.6] - filter: mlp value: [0.7, 0.4, 0.6, 0.4, 0.7] - value: 0.5 # fallback for rest of tensors dtype: float16