haodongli commited on
Commit
ea0a1d3
1 Parent(s): ba08a43

add transformers.utils.move_cache()

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -14,7 +14,9 @@ from pathlib import Path
14
  import gradio
15
  from gradio.utils import get_cache_folder
16
  from infer import lotus, lotus_video
 
17
 
 
18
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
19
 
20
  def infer(path_input, seed):
 
14
  import gradio
15
  from gradio.utils import get_cache_folder
16
  from infer import lotus, lotus_video
17
+ import transformers
18
 
19
+ transformers.utils.move_cache()
20
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
21
 
22
  def infer(path_input, seed):