File size: 348 Bytes
5c784d6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
base_model:
model:
path: NousResearch/Yarn-Mistral-7b-128k
dtype: bfloat16
merge_method: task_swapping
slices:
- sources:
- layer_range: [0, 32]
model:
model:
path: senseable/WestLake-7B-v2
parameters:
weight: 0.666
- layer_range: [0, 32]
model:
model:
path: NousResearch/Yarn-Mistral-7b-128k |