reflect_llama8B_om2-mixed-t0-mstlrg-300k460k-t12_llama33-130k-t12_sft-t1_lr1e-6
/
last_checkpoint
/special_tokens_map.json
{ | |
"bos_token": { | |
"content": "<|begin_of_text|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": { | |
"content": "<|eot_id|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"pad_token": { | |
"content": "[PAD]", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |