Commit
·
9615b18
1
Parent(s):
5355776
Update config.json
Browse files- config.json +10 -1
config.json
CHANGED
@@ -36,6 +36,15 @@
|
|
36 |
"q_lora_rank": 1536,
|
37 |
"qk_nope_head_dim": 128,
|
38 |
"qk_rope_head_dim": 64,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
"rms_norm_eps": 1e-06,
|
40 |
"rope_scaling": {
|
41 |
"beta_fast": 32,
|
@@ -54,7 +63,7 @@
|
|
54 |
"topk_group": 4,
|
55 |
"topk_method": "noaux_tc",
|
56 |
"torch_dtype": "bfloat16",
|
57 |
-
"transformers_version": "4.
|
58 |
"use_cache": true,
|
59 |
"v_head_dim": 128,
|
60 |
"vocab_size": 129280
|
|
|
36 |
"q_lora_rank": 1536,
|
37 |
"qk_nope_head_dim": 128,
|
38 |
"qk_rope_head_dim": 64,
|
39 |
+
"quantization_config": {
|
40 |
+
"activation_scheme": "dynamic",
|
41 |
+
"fmt": "e4m3",
|
42 |
+
"quant_method": "fp8",
|
43 |
+
"weight_block_size": [
|
44 |
+
128,
|
45 |
+
128
|
46 |
+
]
|
47 |
+
},
|
48 |
"rms_norm_eps": 1e-06,
|
49 |
"rope_scaling": {
|
50 |
"beta_fast": 32,
|
|
|
63 |
"topk_group": 4,
|
64 |
"topk_method": "noaux_tc",
|
65 |
"torch_dtype": "bfloat16",
|
66 |
+
"transformers_version": "4.33.1",
|
67 |
"use_cache": true,
|
68 |
"v_head_dim": 128,
|
69 |
"vocab_size": 129280
|