File size: 455 Bytes
b34eea1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19

  slices:
    - sources:
        - model: CorticalStack/pastiche-crown-clown-7b-dare-dpo
          layer_range: [0, 32]
        - model: Equall/Saul-Instruct-v1
          layer_range: [0, 32]
  merge_method: slerp
  base_model: CorticalStack/pastiche-crown-clown-7b-dare-dpo
  parameters:
    t:
      - filter: self_attn
        value: [0, 0.5, 0.3, 0.7, 1]
      - filter: mlp
        value: [1, 0.5, 0.7, 0.3, 0]
      - value: 0.5
  dtype: bfloat16