ybelkada commited on
Commit
24a2b9b
1 Parent(s): d37c4cf

Update llama_xformers_attention.py

Browse files
Files changed (1) hide show
  1. llama_xformers_attention.py +0 -4
llama_xformers_attention.py CHANGED
@@ -70,10 +70,6 @@ class LlamaXFormersAttention(LlamaAttention):
70
  attn_weights = None
71
 
72
  attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
73
-
74
  attn_output = self.o_proj(attn_output)
75
 
76
- if not output_attentions:
77
- attn_weights = None
78
-
79
  return attn_output, attn_weights, past_key_value
 
70
  attn_weights = None
71
 
72
  attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
 
73
  attn_output = self.o_proj(attn_output)
74
 
 
 
 
75
  return attn_output, attn_weights, past_key_value