Upload config
Browse files- config.json +1 -7
config.json
CHANGED
@@ -1,11 +1,6 @@
|
|
1 |
{
|
2 |
-
"architectures": [
|
3 |
-
"MambaModelForCausalLM"
|
4 |
-
],
|
5 |
"auto_map": {
|
6 |
-
"AutoConfig": "configuration_mamba.MambaConfig"
|
7 |
-
"AutoModel": "modeling_mamba.MambaModel",
|
8 |
-
"AutoModelForCausalLM": "modeling_mamba.MambaModelForCausalLM"
|
9 |
},
|
10 |
"d_model": 768,
|
11 |
"fused_add_norm": true,
|
@@ -15,7 +10,6 @@
|
|
15 |
"residual_in_fp32": true,
|
16 |
"rms_norm": true,
|
17 |
"ssm_cfg": {},
|
18 |
-
"torch_dtype": "float16",
|
19 |
"transformers_version": "4.37.2",
|
20 |
"vocab_size": 50277
|
21 |
}
|
|
|
1 |
{
|
|
|
|
|
|
|
2 |
"auto_map": {
|
3 |
+
"AutoConfig": "configuration_mamba.MambaConfig"
|
|
|
|
|
4 |
},
|
5 |
"d_model": 768,
|
6 |
"fused_add_norm": true,
|
|
|
10 |
"residual_in_fp32": true,
|
11 |
"rms_norm": true,
|
12 |
"ssm_cfg": {},
|
|
|
13 |
"transformers_version": "4.37.2",
|
14 |
"vocab_size": 50277
|
15 |
}
|