meged_intent_slot_llama3_8 / mergekit_config.yml
mjm4dl's picture
Upload folder using huggingface_hub
eb5409f verified
raw
history blame contribute delete
343 Bytes
models:
- model: mjm4dl/merge_model_slot_filling_intent_cl
parameters:
density: 0.4
weight: 0.6
- model: mjm4dl/model_XY_llama3_Meta-Llama-3-8B-Instruct_1_128_4
parameters:
density: 0.33
weight: 0.4
merge_method: ties
base_model: meta-llama/Llama-3.1-8B-Instruct
parameters:
normalize: true
dtype: float16