File size: 323 Bytes
4d1106c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"alignments": "linear",
"architectures": [
"LangBridgeModel"
],
"dim_enc": 2048,
"dim_lm": 4096,
"enc": "llama-lang-adapt/mt5-xl-lm-wura",
"freeze_encoder": true,
"freeze_language_model": true,
"lm": "meta-math/MetaMath-Mistral-7B",
"torch_dtype": "bfloat16",
"transformers_version": "4.37.2"
}
|