Ksgk-fy's picture
Upload folder using huggingface_hub
56bfa54 verified
raw
history blame
562 Bytes
slices:
- sources:
- model: liminerity/M7-7b
layer_range: [0, 32]
- model: AurelPx/Percival_01-7b-slerp
layer_range: [0, 32]
merge_method: slerp
base_model: liminerity/M7-7b
parameters:
t:
- filter: self_attn
value: [0.5133343111685227, 0.3507301888180264, 0.3846749585340087, 0.9433114016244468, 0.9348421992124867]
- filter: mlp
value: [0.4866656888314773, 0.6492698111819736, 0.05668859837555318, 0.05668859837555318, 0.06515780078751332]
- value: 0.2528714899743384
dtype: bfloat16
random_seed: 0