radna commited on
Commit
058bab9
1 Parent(s): 2e6da05

Update flash_attention.py

Browse files
Files changed (1) hide show
  1. flash_attention.py +1 -1
flash_attention.py CHANGED
@@ -3,7 +3,7 @@ import torch.nn as nn
3
  from einops import rearrange
4
 
5
 
6
- from triton_flash_attn import _attention
7
  from triton_bert_padding import pad_input, unpad_input
8
 
9
 
 
3
  from einops import rearrange
4
 
5
 
6
+ from triton_flash_atn import _attention
7
  from triton_bert_padding import pad_input, unpad_input
8
 
9