Pantheon-Stheno-v1.1 / mergekit_config.yml
lik07's picture
Upload folder using huggingface_hub
3f97e89 verified
raw
history blame contribute delete
644 Bytes
slices:
- sources:
- layer_range: [0, 16]
model: Sao10K/L3-8B-Stheno-v3.2
parameters:
density: 0.5
weight: 1.0
- layer_range: [0, 16]
model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
density: 0.5
weight: 0.9
- sources:
- layer_range: [16, 32]
model: Sao10K/L3-8B-Stheno-v3.2
parameters:
density: 0.5
weight: 0.9
- layer_range: [16, 32]
model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
density: 0.5
weight: 1.0
merge_method: dare_ties
tokenizer_source: base
base_model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
int8_mask: true
dtype: bfloat16