Meta-Llama-3-8B-InitializedEmbeds / mergekit_config.yml
chargoddard's picture
Upload folder using huggingface_hub
e1bc904 verified
raw
history blame
749 Bytes
merge_method: linear
dtype: float32
out_dtype: bfloat16
models:
- model: NousResearch/Meta-Llama-3-8B
parameters:
weight: 1.0
- model: NousResearch/Meta-Llama-3-8B-Instruct
parameters:
weight: 0.0
tokenizer:
source: NousResearch/Meta-Llama-3-8B-Instruct
tokens:
<|start_header_id|>:
source: NousResearch/Meta-Llama-3-8B-Instruct
force: true
<|end_header_id|>:
source: NousResearch/Meta-Llama-3-8B-Instruct
force: true
<|eot_id|>:
source: NousResearch/Meta-Llama-3-8B-Instruct
force: true
<|end_of_text|>:
source: NousResearch/Meta-Llama-3-8B-Instruct
force: true
<|begin_of_text|>:
source: NousResearch/Meta-Llama-3-8B-Instruct
force: true