LeroyDyer commited on
Commit
64ed4f9
1 Parent(s): f94c324

Delete mergekit_config.yml

Browse files
Files changed (1) hide show
  1. mergekit_config.yml +0 -26
mergekit_config.yml DELETED
@@ -1,26 +0,0 @@
1
-
2
- slices:
3
- - sources:
4
- - model: LeroyDyer/Mixtral_AI_CyberBrain_2.0
5
- layer_range: [0, 32]
6
- - model: ezelikman/quietstar-8-ahead
7
- layer_range: [0, 32]
8
- # or, the equivalent models: syntax:
9
- # models:
10
- # - model: mistralai/Mistral-7B-Instruct-v0.2
11
- # LaRGER MODEL MUST BE BASE or
12
- # BASE MODEL MUST BE THE TOKENIZER YOU WISH TO ADOPT
13
- # so for models with customized processes they must be the base model
14
- # If the base model has remote code then this must be collected and added
15
- # to the repo after and the config file adusted to allow for automapping to your new repo
16
- # - model: yanismiraoui/Yarn-Mistral-7b-128k-sharded
17
- merge_method: slerp
18
- base_model: ezelikman/quietstar-8-ahead
19
- parameters:
20
- t:
21
- - filter: self_attn
22
- value: [0.3, 0.6, 0.3786, 0.6, 0.6]
23
- - filter: mlp
24
- value: [0.7, 0.4, 0.6, 0.4, 0.7]
25
- - value: 0.5 # fallback for rest of tensors
26
- dtype: float16