zR commited on
Commit
2e3199c
2 Parent(s): 9a61462 6fb14fa

Merge branch 'main' of hf.co:spaces/THUDM/CogVideoX-5B

Browse files
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -102,7 +102,7 @@ def infer(
102
  num_inference_steps: int,
103
  guidance_scale: float,
104
  seed: int = -1,
105
- progress=gr.Progress(track_tqdm=True),
106
  ):
107
  if seed == -1:
108
  seed = random.randint(0, 2 ** 8 - 1)
@@ -263,14 +263,19 @@ with gr.Blocks() as demo:
263
  """)
264
 
265
 
266
- def generate(prompt, seed_value, scale_status, rife_status, progress=gr.Progress(track_tqdm=True)):
 
 
 
 
 
267
 
268
  latents, seed = infer(
269
  prompt,
270
  num_inference_steps=50, # NOT Changed
271
  guidance_scale=7, # NOT Changed
272
  seed=seed_value,
273
- progress=progress,
274
  )
275
  if scale_status:
276
  latents = utils.upscale_batch_and_concatenate(upscale_model, latents, device)
 
102
  num_inference_steps: int,
103
  guidance_scale: float,
104
  seed: int = -1,
105
+ #progress=gr.Progress(track_tqdm=True),
106
  ):
107
  if seed == -1:
108
  seed = random.randint(0, 2 ** 8 - 1)
 
263
  """)
264
 
265
 
266
+ def generate(prompt,
267
+ seed_value,
268
+ scale_status,
269
+ rife_status,
270
+ #progress=gr.Progress(track_tqdm=True)
271
+ ):
272
 
273
  latents, seed = infer(
274
  prompt,
275
  num_inference_steps=50, # NOT Changed
276
  guidance_scale=7, # NOT Changed
277
  seed=seed_value,
278
+ #progress=progress,
279
  )
280
  if scale_status:
281
  latents = utils.upscale_batch_and_concatenate(upscale_model, latents, device)