spdraptor commited on
Commit
17a415c
·
1 Parent(s): 8b87815

flash attn

Browse files
app.py CHANGED
@@ -1,8 +1,8 @@
1
  #Script added by SPDraptor
2
  import spaces
3
  from typing import Optional
4
- import subprocess
5
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
6
  import torch
7
  print("cuda present = ",torch.cuda.is_available())
8
  import os
 
1
  #Script added by SPDraptor
2
  import spaces
3
  from typing import Optional
4
+ # import subprocess
5
+ # subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
6
  import torch
7
  print("cuda present = ",torch.cuda.is_available())
8
  import os
modules/__pycache__/masking_module.cpython-310.pyc CHANGED
Binary files a/modules/__pycache__/masking_module.cpython-310.pyc and b/modules/__pycache__/masking_module.cpython-310.pyc differ
 
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  tqdm
 
2
  einops
3
  accelerate
4
  spaces
 
1
  tqdm
2
+ flash-attn --no-build-isolation
3
  einops
4
  accelerate
5
  spaces