Text Generation
Transformers
Safetensors
mistral
Mistral_Star
Mistral_Quiet
Mistral
Mixtral
Question-Answer
Token-Classification
Sequence-Classification
SpydazWeb-AI
chemistry
biology
legal
code
climate
medical
text-generation-inference
Not-For-All-Audiences
chain-of-thought
tree-of-knowledge
forest-of-thoughts
visual-spacial-sketchpad
alpha-mind
knowledge-graph
entity-detection
encyclopedia
wikipedia
stack-exchange
Reddit
Cyber-series
MegaMind
Cybertron
SpydazWeb
Spydaz
LCARS
star-trek
mega-transformers
Mulit-Mega-Merge
Multi-Lingual
Afro-Centric
African-Model
Ancient-One
Inference Endpoints
{ | |
"add_bos_token": true, | |
"add_eos_token": false, | |
"add_prefix_space": null, | |
"added_tokens_decoder": { | |
"0": { | |
"content": "<unk>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"1": { | |
"content": "<s>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"2": { | |
"content": "<|im_end|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
} | |
}, | |
"bos_token": "<s>", | |
"clean_up_tokenization_spaces": false, | |
"eos_token": "<|im_end|>", | |
"legacy": true, | |
"max_length": 2048, | |
"model_max_length": 32768, | |
"pad_token": "<unk>", | |
"padding_side": "left", | |
"stride": 0, | |
"tokenizer_class": "LlamaTokenizer", | |
"truncation_side": "right", | |
"truncation_strategy": "longest_first", | |
"unk_token": "<unk>", | |
"use_default_system_prompt": false | |
} | |