radna commited on
Commit
ce84985
1 Parent(s): 05f5662

Update flash_attention.py

Browse files
Files changed (1) hide show
  1. flash_attention.py +2 -5
flash_attention.py CHANGED
@@ -2,12 +2,9 @@ import torch
2
  import torch.nn as nn
3
  from einops import rearrange
4
 
5
- try:
6
- from triton_flash_attn.py import _attention
7
- from triton_bert_padding.py import pad_input, unpad_input
8
- except:
9
- print("FlashAttention is not installed.")
10
 
 
 
11
 
12
 
13
 
 
2
  import torch.nn as nn
3
  from einops import rearrange
4
 
 
 
 
 
 
5
 
6
+ from triton_flash_attn import _attention
7
+ from triton_bert_padding import pad_input, unpad_input
8
 
9
 
10