nickfraser commited on
Commit
db5a15b
·
1 Parent(s): 6f59b43

[math_model] Make it more obvious that softmax scale comes from the quantizer

Browse files
Files changed (1) hide show
  1. test_attn.py +1 -1
test_attn.py CHANGED
@@ -15,7 +15,7 @@ value = 2.*torch.rand((batch_size,seq_len,hidden_size)) - 1.
15
 
16
  quant_params = {
17
  "output_softmax_quant": {
18
- "act_scale": 1./240.,
19
  "act_scale_shape": [],
20
  "act_zp": 0.0,
21
  "act_zp_shape": [],
 
15
 
16
  quant_params = {
17
  "output_softmax_quant": {
18
+ "act_scale": torch.rand((1,))/240.,
19
  "act_scale_shape": [],
20
  "act_zp": 0.0,
21
  "act_zp_shape": [],