multimodalart HF staff commited on
Commit
6fb14fa
1 Parent(s): ed6f29c

comment out gr.progress to track errors

Browse files
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -102,7 +102,7 @@ def infer(
102
  num_inference_steps: int,
103
  guidance_scale: float,
104
  seed: int = -1,
105
- progress=gr.Progress(track_tqdm=True),
106
  ):
107
  if seed == -1:
108
  seed = random.randint(0, 2 ** 8 - 1)
@@ -239,14 +239,19 @@ with gr.Blocks() as demo:
239
  """)
240
 
241
 
242
- def generate(prompt, seed_value, scale_status, rife_status, progress=gr.Progress(track_tqdm=True)):
 
 
 
 
 
243
 
244
  latents, seed = infer(
245
  prompt,
246
  num_inference_steps=50, # NOT Changed
247
  guidance_scale=6, # NOT Changed
248
  seed=seed_value,
249
- progress=progress,
250
  )
251
  if scale_status:
252
  latents = utils.upscale_batch_and_concatenate(upscale_model, latents, device)
 
102
  num_inference_steps: int,
103
  guidance_scale: float,
104
  seed: int = -1,
105
+ #progress=gr.Progress(track_tqdm=True),
106
  ):
107
  if seed == -1:
108
  seed = random.randint(0, 2 ** 8 - 1)
 
239
  """)
240
 
241
 
242
+ def generate(prompt,
243
+ seed_value,
244
+ scale_status,
245
+ rife_status,
246
+ #progress=gr.Progress(track_tqdm=True)
247
+ ):
248
 
249
  latents, seed = infer(
250
  prompt,
251
  num_inference_steps=50, # NOT Changed
252
  guidance_scale=6, # NOT Changed
253
  seed=seed_value,
254
+ #progress=progress,
255
  )
256
  if scale_status:
257
  latents = utils.upscale_batch_and_concatenate(upscale_model, latents, device)