radna commited on
Commit
c808872
1 Parent(s): 8f8935f

Update flash_attention.py

Browse files
Files changed (1) hide show
  1. flash_attention.py +2 -2
flash_attention.py CHANGED
@@ -3,8 +3,8 @@ import torch.nn as nn
3
  from einops import rearrange
4
 
5
  try:
6
- from .triton_flash_atn import _attention
7
- from .triton_bert_pading import pad_input, unpad_input
8
  except:
9
  print("FlashAttention is not installed.")
10
 
 
3
  from einops import rearrange
4
 
5
  try:
6
+ from triton_flash_atn import _attention
7
+ from triton_bert_pading import pad_input, unpad_input
8
  except:
9
  print("FlashAttention is not installed.")
10