{ | |
"architectures": ["LlamaForCausalLM"], | |
"model_type": "llama", | |
"hidden_size": 4096, | |
"intermediate_size": 14336, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 32, | |
"tie_word_embeddings": false, | |
"rms_norm_eps": 1e-6, | |
"max_position_embeddings": 2048, | |
"vocab_size": 32000, | |
"torch_dtype": "float16" | |
} | |