Spaces:
Build error
Build error
Update UI
Browse files
app.py
CHANGED
@@ -319,7 +319,11 @@ def app_gradio():
|
|
319 |
gr.Markdown(HEADER)
|
320 |
with gr.Tab("Virtual Try on"):
|
321 |
with gr.Row():
|
|
|
|
|
|
|
322 |
with gr.Column(scale=1, min_width=350):
|
|
|
323 |
with gr.Row():
|
324 |
image_path = gr.Image(
|
325 |
type="filepath",
|
@@ -329,13 +333,8 @@ def app_gradio():
|
|
329 |
person_image = gr.ImageEditor(
|
330 |
interactive=True, label="Person Image", type="filepath"
|
331 |
)
|
332 |
-
|
333 |
-
|
334 |
-
with gr.Column(scale=1, min_width=230):
|
335 |
-
cloth_image = gr.Image(
|
336 |
-
interactive=True, label="Clothes Image", type="filepath"
|
337 |
-
)
|
338 |
-
with gr.Column(scale=1, min_width=120):
|
339 |
gr.Markdown(
|
340 |
'<span style="color: #808080; font-size: small;">Two ways to provide Mask:<br>1. Upload the person image and use the `🖌️` above to draw the Mask (higher priority)<br>2. Select the `Try-On Cloth Type` to generate automatically </span>'
|
341 |
)
|
@@ -344,45 +343,9 @@ def app_gradio():
|
|
344 |
choices=["upper", "lower", "overall"],
|
345 |
value="upper",
|
346 |
)
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
gr.Markdown(
|
351 |
-
'<center><span style="color: #FF0000">!!! Click only Once, Wait for Delay !!!</span></center>'
|
352 |
-
)
|
353 |
-
|
354 |
-
gr.Markdown(
|
355 |
-
'<span style="color: #808080; font-size: small;">Advanced options can adjust details:<br>1. `Inference Step` may enhance details;<br>2. `CFG` is highly correlated with saturation;<br>3. `Random seed` may improve pseudo-shadow.</span>'
|
356 |
-
)
|
357 |
-
with gr.Accordion("Advanced Options", open=False):
|
358 |
-
num_inference_steps = gr.Slider(
|
359 |
-
label="Inference Step", minimum=10, maximum=100, step=5, value=50
|
360 |
-
)
|
361 |
-
# Guidence Scale
|
362 |
-
guidance_scale = gr.Slider(
|
363 |
-
label="CFG Strenth", minimum=0.0, maximum=7.5, step=0.5, value=2.5
|
364 |
-
)
|
365 |
-
# Random Seed
|
366 |
-
seed = gr.Slider(
|
367 |
-
label="Seed", minimum=-1, maximum=10000, step=1, value=42
|
368 |
-
)
|
369 |
-
show_type = gr.Radio(
|
370 |
-
label="Show Type",
|
371 |
-
choices=["result only", "input & result", "input & mask & result"],
|
372 |
-
value="result only",
|
373 |
-
)
|
374 |
-
|
375 |
-
# num_inference_steps = 50
|
376 |
-
# guidance_scale = 2.5
|
377 |
-
# seed = 42
|
378 |
-
# show_type = "result only"
|
379 |
-
|
380 |
-
with gr.Column(scale=2, min_width=500):
|
381 |
-
result_image = gr.Image(interactive=False, label="Result")
|
382 |
-
with gr.Row():
|
383 |
-
# Photo Examples
|
384 |
-
root_path = "resource/demo/example"
|
385 |
-
with gr.Column():
|
386 |
men_exm = gr.Examples(
|
387 |
examples=[
|
388 |
os.path.join(root_path, "person", "men", _)
|
@@ -392,6 +355,7 @@ def app_gradio():
|
|
392 |
inputs=image_path,
|
393 |
label="Person Examples ①",
|
394 |
)
|
|
|
395 |
women_exm = gr.Examples(
|
396 |
examples=[
|
397 |
os.path.join(root_path, "person", "women", _)
|
@@ -401,10 +365,38 @@ def app_gradio():
|
|
401 |
inputs=image_path,
|
402 |
label="Person Examples ②",
|
403 |
)
|
|
|
404 |
gr.Markdown(
|
405 |
'<span style="color: #808080; font-size: small;">*Person examples come from the demos of <a href="https://huggingface.co/spaces/levihsu/OOTDiffusion">OOTDiffusion</a> and <a href="https://www.outfitanyone.org">OutfitAnyone</a>. </span>'
|
406 |
)
|
407 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
408 |
condition_upper_exm = gr.Examples(
|
409 |
examples=[
|
410 |
os.path.join(root_path, "condition", "upper", _)
|
@@ -414,6 +406,7 @@ def app_gradio():
|
|
414 |
inputs=cloth_image,
|
415 |
label="Condition Upper Examples",
|
416 |
)
|
|
|
417 |
condition_overall_exm = gr.Examples(
|
418 |
examples=[
|
419 |
os.path.join(root_path, "condition", "overall", _)
|
@@ -423,23 +416,47 @@ def app_gradio():
|
|
423 |
inputs=cloth_image,
|
424 |
label="Condition Overall Examples",
|
425 |
)
|
426 |
-
|
427 |
-
examples=[
|
428 |
-
os.path.join(root_path, "condition", "person", _)
|
429 |
-
for _ in os.listdir(os.path.join(root_path, "condition", "person"))
|
430 |
-
],
|
431 |
-
examples_per_page=4,
|
432 |
-
inputs=cloth_image,
|
433 |
-
label="Condition Reference Person Examples",
|
434 |
-
)
|
435 |
-
gr.Markdown(
|
436 |
-
'<span style="color: #808080; font-size: small;">*Condition examples come from the Internet. </span>'
|
437 |
-
)
|
438 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
439 |
image_path.change(
|
440 |
person_example_fn, inputs=image_path, outputs=person_image
|
441 |
)
|
442 |
|
|
|
443 |
submit.click(
|
444 |
submit_function,
|
445 |
[
|
|
|
319 |
gr.Markdown(HEADER)
|
320 |
with gr.Tab("Virtual Try on"):
|
321 |
with gr.Row():
|
322 |
+
# define root_path
|
323 |
+
root_path = "resource/demo/example"
|
324 |
+
# First column
|
325 |
with gr.Column(scale=1, min_width=350):
|
326 |
+
# Person image
|
327 |
with gr.Row():
|
328 |
image_path = gr.Image(
|
329 |
type="filepath",
|
|
|
333 |
person_image = gr.ImageEditor(
|
334 |
interactive=True, label="Person Image", type="filepath"
|
335 |
)
|
336 |
+
# Mask instruction
|
337 |
+
with gr.Row:
|
|
|
|
|
|
|
|
|
|
|
338 |
gr.Markdown(
|
339 |
'<span style="color: #808080; font-size: small;">Two ways to provide Mask:<br>1. Upload the person image and use the `🖌️` above to draw the Mask (higher priority)<br>2. Select the `Try-On Cloth Type` to generate automatically </span>'
|
340 |
)
|
|
|
343 |
choices=["upper", "lower", "overall"],
|
344 |
value="upper",
|
345 |
)
|
346 |
+
# Model column examples
|
347 |
+
with gr.Row():
|
348 |
+
# Men examples
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
349 |
men_exm = gr.Examples(
|
350 |
examples=[
|
351 |
os.path.join(root_path, "person", "men", _)
|
|
|
355 |
inputs=image_path,
|
356 |
label="Person Examples ①",
|
357 |
)
|
358 |
+
# Women examples
|
359 |
women_exm = gr.Examples(
|
360 |
examples=[
|
361 |
os.path.join(root_path, "person", "women", _)
|
|
|
365 |
inputs=image_path,
|
366 |
label="Person Examples ②",
|
367 |
)
|
368 |
+
# Markdown: component display text in Gradio
|
369 |
gr.Markdown(
|
370 |
'<span style="color: #808080; font-size: small;">*Person examples come from the demos of <a href="https://huggingface.co/spaces/levihsu/OOTDiffusion">OOTDiffusion</a> and <a href="https://www.outfitanyone.org">OutfitAnyone</a>. </span>'
|
371 |
)
|
372 |
+
# Person pose example
|
373 |
+
condition_person_exm = gr.Examples(
|
374 |
+
examples=[
|
375 |
+
os.path.join(root_path, "condition", "person", _)
|
376 |
+
for _ in os.listdir(os.path.join(root_path, "condition", "person"))
|
377 |
+
],
|
378 |
+
examples_per_page=4,
|
379 |
+
inputs=cloth_image,
|
380 |
+
label="Condition Reference Person Examples",
|
381 |
+
)
|
382 |
+
# Display text
|
383 |
+
gr.Markdown(
|
384 |
+
'<span style="color: #808080; font-size: small;">*Condition examples come from the Internet. </span>'
|
385 |
+
)
|
386 |
+
|
387 |
+
# Second column
|
388 |
+
with gr.Column(scale=1, min_width=350):
|
389 |
+
# Clothes image
|
390 |
+
with gr.Row():
|
391 |
+
with gr.Column(scale=1, min_width=230):
|
392 |
+
cloth_image = gr.Image(
|
393 |
+
interactive=True, label="Clothes Image", type="filepath"
|
394 |
+
)
|
395 |
+
|
396 |
+
with gr.Row():
|
397 |
+
# Clothes column examples
|
398 |
+
with gr.Row():
|
399 |
+
# Upper clothes examples
|
400 |
condition_upper_exm = gr.Examples(
|
401 |
examples=[
|
402 |
os.path.join(root_path, "condition", "upper", _)
|
|
|
406 |
inputs=cloth_image,
|
407 |
label="Condition Upper Examples",
|
408 |
)
|
409 |
+
# Full-body clothes examples
|
410 |
condition_overall_exm = gr.Examples(
|
411 |
examples=[
|
412 |
os.path.join(root_path, "condition", "overall", _)
|
|
|
416 |
inputs=cloth_image,
|
417 |
label="Condition Overall Examples",
|
418 |
)
|
419 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
420 |
|
421 |
+
|
422 |
+
# ===============================================================
|
423 |
+
# Result pennal
|
424 |
+
result_image = gr.Image(interactive=False, label="Result")
|
425 |
+
|
426 |
+
# Submit button
|
427 |
+
submit = gr.Button("Submit")
|
428 |
+
gr.Markdown(
|
429 |
+
'<center><span style="color: #FF0000">!!! Click only Once, Wait for Delay !!!</span></center>'
|
430 |
+
)
|
431 |
+
|
432 |
+
# Advance options setting
|
433 |
+
gr.Markdown(
|
434 |
+
'<span style="color: #808080; font-size: small;">Advanced options can adjust details:<br>1. `Inference Step` may enhance details;<br>2. `CFG` is highly correlated with saturation;<br>3. `Random seed` may improve pseudo-shadow.</span>'
|
435 |
+
)
|
436 |
+
with gr.Accordion("Advanced Options", open=False):
|
437 |
+
num_inference_steps = gr.Slider(
|
438 |
+
label="Inference Step", minimum=10, maximum=100, step=5, value=50
|
439 |
+
)
|
440 |
+
# Guidence Scale
|
441 |
+
guidance_scale = gr.Slider(
|
442 |
+
label="CFG Strenth", minimum=0.0, maximum=7.5, step=0.5, value=2.5
|
443 |
+
)
|
444 |
+
# Random Seed
|
445 |
+
seed = gr.Slider(
|
446 |
+
label="Seed", minimum=-1, maximum=10000, step=1, value=42
|
447 |
+
)
|
448 |
+
show_type = gr.Radio(
|
449 |
+
label="Show Type",
|
450 |
+
choices=["result only", "input & result", "input & mask & result"],
|
451 |
+
value="result only",
|
452 |
+
)
|
453 |
+
|
454 |
+
# event listener for changes to the image_path input component. Whenever the value of image_path changes (e.g., when a new image is uploaded or selected)
|
455 |
image_path.change(
|
456 |
person_example_fn, inputs=image_path, outputs=person_image
|
457 |
)
|
458 |
|
459 |
+
# when submit button clicked
|
460 |
submit.click(
|
461 |
submit_function,
|
462 |
[
|