Llama3_pubmedqa_finetune / special_tokens_map.json
vaatsav06's picture
Upload tokenizer
3d76654 verified
raw
history blame
337 Bytes
{
"bos_token": {
"content": "<|begin_of_text|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|end_of_text|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": "<|begin_of_text|>"
}