move cache
Browse files- app.py +4 -1
- baseline_utils.py +1 -1
app.py
CHANGED
@@ -5,7 +5,10 @@ from PIL import Image
|
|
5 |
from google.oauth2 import service_account
|
6 |
from baseline_utils import detect_text_in_image, summarize_diary_text, analyze_writer_image, generate_comic_book
|
7 |
import glob
|
8 |
-
import os
|
|
|
|
|
|
|
9 |
|
10 |
# Load secrets from Hugging Face Spaces environment
|
11 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
|
|
5 |
from google.oauth2 import service_account
|
6 |
from baseline_utils import detect_text_in_image, summarize_diary_text, analyze_writer_image, generate_comic_book
|
7 |
import glob
|
8 |
+
import os
|
9 |
+
from transformers.utils.hub import move_cache
|
10 |
+
|
11 |
+
move_cache()
|
12 |
|
13 |
# Load secrets from Hugging Face Spaces environment
|
14 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
baseline_utils.py
CHANGED
@@ -79,7 +79,7 @@ def generate_comic_book(diary_text, writer_description, num_pages=4):
|
|
79 |
"stabilityai/sdxl-turbo",
|
80 |
torch_dtype=torch.float16,
|
81 |
variant="fp16",
|
82 |
-
|
83 |
)
|
84 |
|
85 |
# Check for available device: CUDA, MPS, or CPU
|
|
|
79 |
"stabilityai/sdxl-turbo",
|
80 |
torch_dtype=torch.float16,
|
81 |
variant="fp16",
|
82 |
+
cache_dir="./SDXL-Turbo"
|
83 |
)
|
84 |
|
85 |
# Check for available device: CUDA, MPS, or CPU
|