models: - model: mlabonne/AlphaMonarch-7B parameters: density: 0.8 weight: 0.33 - model: mistralai/Mistral-7B-Instruct-v0.2 parameters: density: 0.8 weight: 0.33 - model: Kukedlc/NeuralMaths-Experiment-7b parameters: density: 0.7 weight: 0.33 merge_method: ties base_model: mistralai/Mistral-7B-Instruct-v0.2 parameters: int8_mask: true dtype: bfloat16