Update config.json
Browse files- config.json +14 -31
config.json
CHANGED
@@ -1,33 +1,16 @@
|
|
1 |
{
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
"dt_init_floor": 0.0001,
|
17 |
-
"dt_limit": [
|
18 |
-
0.0,
|
19 |
-
Infinity
|
20 |
-
],
|
21 |
-
"dt_max": 0.1,
|
22 |
-
"dt_min": 0.001,
|
23 |
-
"dtype": null,
|
24 |
-
"expand": 2,
|
25 |
-
"headdim": 64,
|
26 |
-
"layer_idx": null,
|
27 |
-
"ngroups": 1,
|
28 |
-
"norm_before_gate": false,
|
29 |
-
"process_group": null,
|
30 |
-
"rmsnorm": true,
|
31 |
-
"sequence_parallel": true,
|
32 |
-
"use_mem_eff_path": true
|
33 |
}
|
|
|
1 |
{
|
2 |
+
"d_model": 768,
|
3 |
+
"d_intermediate": 0,
|
4 |
+
"n_layer": 24,
|
5 |
+
"vocab_size": 50277,
|
6 |
+
"ssm_cfg": {
|
7 |
+
"layer": "Mamba2"
|
8 |
+
},
|
9 |
+
"attn_layer_idx": [],
|
10 |
+
"attn_cfg": {},
|
11 |
+
"rms_norm": true,
|
12 |
+
"residual_in_fp32": true,
|
13 |
+
"fused_add_norm": true,
|
14 |
+
"pad_vocab_size_multiple": 16,
|
15 |
+
"tie_embeddings": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
}
|