{ | |
"alignments": "linear", | |
"architectures": [ | |
"LangBridgeModel" | |
], | |
"dim_enc": 2048, | |
"dim_lm": 4096, | |
"enc": "llama-lang-adapt/mt5-xl-lm-wura", | |
"freeze_encoder": true, | |
"freeze_language_model": true, | |
"lm": "meta-math/MetaMath-Mistral-7B", | |
"torch_dtype": "bfloat16", | |
"transformers_version": "4.37.2" | |
} | |