Spaces:
Paused
Paused
More efficient telemetry uploading
#157
by
multimodalart
HF staff
- opened
- app.py +56 -29
- requirements.txt +2 -0
app.py
CHANGED
@@ -6,6 +6,9 @@ import json
|
|
6 |
import os
|
7 |
import random
|
8 |
import tempfile
|
|
|
|
|
|
|
9 |
|
10 |
import gradio as gr
|
11 |
import torch
|
@@ -105,15 +108,18 @@ def upload_stage1_result(stage1_param_path: str, stage1_result_path: str,
|
|
105 |
if not UPLOAD_REPO_ID:
|
106 |
return
|
107 |
try:
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
|
|
|
|
|
|
117 |
except Exception as e:
|
118 |
print(e)
|
119 |
|
@@ -140,18 +146,20 @@ def upload_stage2_info(stage1_param_file_hash_name: str,
|
|
140 |
param_file.write(json.dumps(stage2_params))
|
141 |
stage2_param_file_hash_name = get_param_file_hash_name(param_file.name)
|
142 |
save_name = f'{stage1_param_file_hash_name}_{stage2_param_file_hash_name}'
|
143 |
-
|
144 |
try:
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
|
|
150 |
if UPLOAD_RESULT_IMAGE:
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
|
|
155 |
except Exception as e:
|
156 |
print(e)
|
157 |
|
@@ -188,17 +196,16 @@ def upload_stage2_3_info(stage1_param_file_hash_name: str,
|
|
188 |
save_name = f'{stage1_param_file_hash_name}_{stage2_3_param_file_hash_name}'
|
189 |
|
190 |
try:
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
if UPLOAD_RESULT_IMAGE:
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
repo_type='dataset')
|
202 |
except Exception as e:
|
203 |
print(e)
|
204 |
|
@@ -221,6 +228,21 @@ def show_gallery_view() -> tuple[dict, dict]:
|
|
221 |
def show_upscaled_view() -> tuple[dict, dict]:
|
222 |
return _update_result_view(False)
|
223 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
224 |
|
225 |
examples = [
|
226 |
'high quality dslr photo, a photo product of a lemon inspired by natural and organic materials, wooden accents, intricately decorated with glowing vines of led lights, inspired by baroque luxury',
|
@@ -669,5 +691,10 @@ with gr.Blocks(css='style.css') as demo:
|
|
669 |
],
|
670 |
queue=False,
|
671 |
)
|
|
|
|
|
|
|
|
|
|
|
672 |
|
673 |
demo.queue(api_open=False, max_size=MAX_QUEUE_SIZE).launch(debug=DEBUG)
|
|
|
6 |
import os
|
7 |
import random
|
8 |
import tempfile
|
9 |
+
import shortuuid
|
10 |
+
from apscheduler.schedulers.background import BackgroundScheduler
|
11 |
+
import shutil
|
12 |
|
13 |
import gradio as gr
|
14 |
import torch
|
|
|
108 |
if not UPLOAD_REPO_ID:
|
109 |
return
|
110 |
try:
|
111 |
+
folder_params = "tmp/results/stage1_params"
|
112 |
+
folder_results = "tmp/results/stage1_results"
|
113 |
+
|
114 |
+
path_params = f"{folder_params}/{save_name}.json"
|
115 |
+
path_results = f"{folder_results}/{save_name}.pth"
|
116 |
+
|
117 |
+
os.makedirs(folder_params, exist_ok=True)
|
118 |
+
os.makedirs(folder_results, exist_ok=True)
|
119 |
+
|
120 |
+
shutil.copy(stage1_param_path, path_params)
|
121 |
+
shutil.copy(stage1_result_path, path_results)
|
122 |
+
|
123 |
except Exception as e:
|
124 |
print(e)
|
125 |
|
|
|
146 |
param_file.write(json.dumps(stage2_params))
|
147 |
stage2_param_file_hash_name = get_param_file_hash_name(param_file.name)
|
148 |
save_name = f'{stage1_param_file_hash_name}_{stage2_param_file_hash_name}'
|
149 |
+
|
150 |
try:
|
151 |
+
folder_params = "tmp/results/stage2_params"
|
152 |
+
|
153 |
+
os.makedirs(folder_params, exist_ok=True)
|
154 |
+
path_params = f"{folder_params}/{save_name}.json"
|
155 |
+
shutil.copy(param_file.name, path_params)
|
156 |
+
|
157 |
if UPLOAD_RESULT_IMAGE:
|
158 |
+
folder_results = "tmp/results/stage2_results"
|
159 |
+
os.makedirs(folder_results, exist_ok=True)
|
160 |
+
path_results = f"{folder_results}/{save_name}.png"
|
161 |
+
shutil.copy(stage2_output_path, path_results)
|
162 |
+
|
163 |
except Exception as e:
|
164 |
print(e)
|
165 |
|
|
|
196 |
save_name = f'{stage1_param_file_hash_name}_{stage2_3_param_file_hash_name}'
|
197 |
|
198 |
try:
|
199 |
+
folder_params = "tmp/results/stage2_3_params"
|
200 |
+
os.makedirs(folder_params, exist_ok=True)
|
201 |
+
path_params = f"{folder_params}/{save_name}.json"
|
202 |
+
shutil.copy(param_file.name, path_params)
|
203 |
+
|
204 |
if UPLOAD_RESULT_IMAGE:
|
205 |
+
folder_results = "tmp/results/stage2_3_results"
|
206 |
+
os.makedirs(folder_results, exist_ok=True)
|
207 |
+
path_results = f"{folder_results}/{save_name}.png"
|
208 |
+
shutil.copy(stage2_3_output_path, path_results)
|
|
|
209 |
except Exception as e:
|
210 |
print(e)
|
211 |
|
|
|
228 |
def show_upscaled_view() -> tuple[dict, dict]:
|
229 |
return _update_result_view(False)
|
230 |
|
231 |
+
def upload_files():
|
232 |
+
"""Zips files and uploads to dataset. Local data is deleted
|
233 |
+
"""
|
234 |
+
if os.path.exists("tmp/results") and os.path.isdir("tmp/results"):
|
235 |
+
try:
|
236 |
+
shutil.make_archive("tmp/results", 'zip', "tmp/results")
|
237 |
+
hf_api.upload_file(
|
238 |
+
path_or_fileobj="tmp/results.zip",
|
239 |
+
path_in_repo=f"results_{shortuuid.uuid()}.zip",
|
240 |
+
repo_id=UPLOAD_REPO_ID,
|
241 |
+
repo_type="dataset",
|
242 |
+
)
|
243 |
+
shutil.rmtree("tmp/results")
|
244 |
+
except Exception as e:
|
245 |
+
print(e)
|
246 |
|
247 |
examples = [
|
248 |
'high quality dslr photo, a photo product of a lemon inspired by natural and organic materials, wooden accents, intricately decorated with glowing vines of led lights, inspired by baroque luxury',
|
|
|
691 |
],
|
692 |
queue=False,
|
693 |
)
|
694 |
+
|
695 |
+
if UPLOAD_REPO_ID:
|
696 |
+
scheduler = BackgroundScheduler()
|
697 |
+
scheduler.add_job(func=upload_files, trigger="interval", seconds=60*5)
|
698 |
+
scheduler.start()
|
699 |
|
700 |
demo.queue(api_open=False, max_size=MAX_QUEUE_SIZE).launch(debug=DEBUG)
|
requirements.txt
CHANGED
@@ -12,3 +12,5 @@ sentencepiece==0.1.98
|
|
12 |
tokenizers==0.13.3
|
13 |
tqdm==4.65.0
|
14 |
transformers==4.28.1
|
|
|
|
|
|
12 |
tokenizers==0.13.3
|
13 |
tqdm==4.65.0
|
14 |
transformers==4.28.1
|
15 |
+
shortuuid==1.0.11
|
16 |
+
apscheduler==3.10.1
|