Upload config
Browse files- config.json +1 -6
config.json
CHANGED
@@ -1,10 +1,6 @@
|
|
1 |
{
|
2 |
-
"architectures": [
|
3 |
-
"MambaLMHeadModel"
|
4 |
-
],
|
5 |
"auto_map": {
|
6 |
-
"AutoConfig": "configuration_mamba.MambaConfig"
|
7 |
-
"AutoModelForCausalLM": "modeling_mamba.MambaLMHeadModel"
|
8 |
},
|
9 |
"bias": false,
|
10 |
"conv_bias": true,
|
@@ -18,7 +14,6 @@
|
|
18 |
"model_type": "mamba",
|
19 |
"n_layer": 24,
|
20 |
"pad_vocab_size_multiple": 8,
|
21 |
-
"torch_dtype": "float32",
|
22 |
"transformers_version": "4.37.2",
|
23 |
"vocab_size": 50280
|
24 |
}
|
|
|
1 |
{
|
|
|
|
|
|
|
2 |
"auto_map": {
|
3 |
+
"AutoConfig": "configuration_mamba.MambaConfig"
|
|
|
4 |
},
|
5 |
"bias": false,
|
6 |
"conv_bias": true,
|
|
|
14 |
"model_type": "mamba",
|
15 |
"n_layer": 24,
|
16 |
"pad_vocab_size_multiple": 8,
|
|
|
17 |
"transformers_version": "4.37.2",
|
18 |
"vocab_size": 50280
|
19 |
}
|