File size: 640 Bytes
b1a50e8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
slices:
  - sources:
      - model: Undi95/PsyMedRP-v1-20B
        layer_range: [0, 62]  # PsyMedRP has 62 layers
      - model: TeeZee/DarkForest-20B-v2.0
        layer_range: [0, 62]  # DarkForest-20B has 62 layers
merge_method: slerp  # Or use another method like weight_average if needed
base_model: Undi95/PsyMedRP-v1-20B  # Can use either as the base model
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]  # Tune these for desired effect
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5  # Default averaging weight
dtype: bfloat16  # Use preferred dtype, like fp16 or float32 if needed