models: - model: /home/azureuser/weights3/nemo/NeMo/cybertron/models/Meta-Llama-3.1-8B-Instruct parameters: density: 0.53 weight: 0.25 - model: /home/azureuser/weights2/sft/sft-pt_from_fineweb_V2_2_77b_14100_llama31/checkpoint-264 parameters: density: 0.53 weight: 0.75 merge_method: dare_ties base_model: /home/azureuser/weights/meta-llama/Meta-Llama-3.1-8B parameters: int8_mask: true dtype: bfloat16