models: | |
- model: cognitivecomputations/Dolphin3.0-Mistral-24B | |
parameters: | |
density: 0.4 | |
weight: 0.6 | |
- model: baconnier/Napoleon_24B_V0.0 | |
parameters: | |
density: 0.4 | |
weight: 0.4 | |
merge_method: dare_ties | |
base_model: cognitivecomputations/Dolphin3.0-Mistral-24B | |
parameters: | |
int8_mask: true | |
normalize: true | |
# CORRECTED: dtype must be top-level, not under parameters | |
dtype: bfloat16 # Valid options: float16|bfloat16|float32 | |
tokenizer_source: baconnier/Napoleon_24B_V0.0 | |