thomasgauthier commited on
Commit
ea9e4b8
1 Parent(s): 22b63ff

put flash attention in requirements instead

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. requirements.txt +2 -1
app.py CHANGED
@@ -4,8 +4,8 @@ from image_generator import process_and_generate
4
  from gradio_interface import create_gradio_interface
5
 
6
  if __name__ == "__main__":
7
- import subprocess
8
- subprocess.run('pip install "flash-attn>=2.1.0" --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
 
10
 
11
 
 
4
  from gradio_interface import create_gradio_interface
5
 
6
  if __name__ == "__main__":
7
+ # import subprocess
8
+ # subprocess.run('pip install "flash-attn>=2.1.0" --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
 
10
 
11
 
requirements.txt CHANGED
@@ -4,4 +4,5 @@ Pillow
4
  gradio
5
  janus @ git+https://github.com/deepseek-ai/Janus
6
  transformers
7
- spaces
 
 
4
  gradio
5
  janus @ git+https://github.com/deepseek-ai/Janus
6
  transformers
7
+ spaces
8
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl