slices: | |
- sources: | |
- model: mistralai/Mistral-7B-v0.1 | |
layer_range: [0, 32] | |
- model: HuggingFaceH4/zephyr-7b-alpha | |
layer_range: [0, 32] | |
parameters: | |
density: 0.53 | |
weight: 0.4 | |
- model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser | |
layer_range: [0, 32] | |
parameters: | |
density: 0.53 | |
weight: 0.4 | |
merge_method: dare_linear | |
base_model: mistralai/Mistral-7B-v0.1 | |
parameters: | |
int8_mask: true | |
dtype: bfloat16 | |