{ | |
"architectures": ["LLaMAForCausalLM"], | |
"hidden_size": 4096, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 24, | |
"model_max_length": 2048 | |
} | |
{ | |
"architectures": ["LLaMAForCausalLM"], | |
"hidden_size": 4096, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 24, | |
"model_max_length": 2048 | |
} | |