base_model: meta-llama/Llama-3.1-8B-Instruct dtype: bfloat16 merge_method: dare_ties parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 32] model: akjindal53244/Llama-3.1-Storm-8B parameters: density: 0.7 weight: 0.2 - layer_range: [0, 32] model: arcee-ai/Llama-3.1-SuperNova-Lite parameters: density: 0.7 weight: 0.3 - layer_range: [0, 32] model: Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2 parameters: density: 0.7 weight: 0.5 - layer_range: [0, 32] model: meta-llama/Llama-3.1-8B-Instruct tokenizer_source: meta-llama/Llama-3.1-8B-Instruct