Text Generation
Transformers
PyTorch
English
llama
text-generation-inference
Inference Endpoints
tulu-2-dpo-70b-5.0bpw-h6-exl2 / tokenizer_config.json
LoneStriker's picture
Upload folder using huggingface_hub
ae1891e
raw
history blame contribute delete
783 Bytes
{
"add_bos_token":true,
"add_eos_token":false,
"model_max_length":2048,
"pad_token":null,
"sp_model_kwargs":{
},
"tokenizer_class":"LlamaTokenizer",
"clean_up_tokenization_spaces":false,
"bos_token":{
"__type":"AddedToken",
"content":"<s>",
"lstrip":false,
"normalized":true,
"rstrip":false,
"single_word":false
},
"eos_token":{
"__type":"AddedToken",
"content":"</s>",
"lstrip":false,
"normalized":true,
"rstrip":false,
"single_word":false
},
"unk_token":{
"__type":"AddedToken",
"content":"<unk>",
"lstrip":false,
"normalized":true,
"rstrip":false,
"single_word":false
}
}