Ksgk-fy's picture
Upload folder using huggingface_hub
528edfa verified
raw
history blame contribute delete
565 Bytes
slices:
- sources:
- model: liminerity/M7-7b
layer_range: [0, 32]
- model: AurelPx/Percival_01-7b-slerp
layer_range: [0, 32]
merge_method: slerp
base_model: liminerity/M7-7b
parameters:
t:
- filter: self_attn
value: [0.06789542432736717, 0.9729308438600228, 0.01727158519231453, 0.7900292369855523, 0.35194811225280676]
- filter: mlp
value: [0.9321045756726328, 0.02706915613997718, 0.20997076301444773, 0.20997076301444773, 0.6480518877471932]
- value: 0.7998030767798668
dtype: bfloat16
random_seed: 0