{ "version": "0.1.0", "model_type": "llama", "quantization": "q4f16_1", "model_config": { "hidden_size": 3072, "intermediate_size": 9216, "num_attention_heads": 24, "num_hidden_layers": 30, "rms_norm_eps": 1e-05, "vocab_size": 50688, "tie_word_embeddings": false, "position_embedding_base": 10000.0, "rope_scaling": null, "context_window_size": 2048, "prefill_chunk_size": 2048, "num_key_value_heads": 24, "head_dim": 128, "tensor_parallel_shards": 1, "pipeline_parallel_stages": 1, "max_batch_size": 80 }, "vocab_size": 50688, "context_window_size": 2048, "sliding_window_size": -1, "prefill_chunk_size": 2048, "attention_sink_size": -1, "tensor_parallel_shards": 1, "pipeline_parallel_stages": 1, "temperature": 1.0, "presence_penalty": 0.0, "frequency_penalty": 0.0, "repetition_penalty": 1.0, "top_p": 1.0, "tokenizer_files": [ "tokenizer.json", "tokenizer_config.json" ], "tokenizer_info": { "token_postproc_method": "byte_level", "prepend_space_in_encode": false, "strip_space_in_decode": false }, "conv_template": { "name": "LM", "system_template": "{system_message}", "system_message": "", "system_prefix_token_ids": [ 1 ], "add_role_after_system_message": true, "roles": { "user": "", "assistant": "" }, "role_templates": { "user": "{user_message}", "assistant": "{assistant_message}", "tool": "{tool_message}" }, "messages": [], "seps": [ "" ], "role_content_sep": "", "role_empty_sep": "", "stop_str": [], "stop_token_ids": [ 2 ], "function_string": "", "use_function_calling": false }, "pad_token_id": 0, "bos_token_id": 0, "eos_token_id": 0 }