models: - model: /media/data5/hf_models/Mistral-7B-v0.1 # no parameters necessary for base model - model: /media/data5/hf_models/Mistral-7B-Merge-14-v0.3 parameters: weight: 0.3 density: 0.5 - model: /media/data5/hf_models/OpenHermes-2.5-neural-chat-v3-3-openchat-3.5-1210-Slerp parameters: weight: 0.2 density: 0.5 - model: /media/data5/hf_models/openchat-3.5-0106 parameters: weight: 0.2 density: 0.5 - model: /media/data5/hf_models/NeuralMarcoro14-7B parameters: weight: 0.3 density: 0.5 merge_method: dare_ties base_model: /media/data5/hf_models/Mistral-7B-v0.1 parameters: int8_mask: true tokenizer_source: union dtype: bfloat16