Spaces:
Running
on
Zero
Running
on
Zero
improve UI
Browse files
app.py
CHANGED
@@ -455,7 +455,7 @@ def run_fn(
|
|
455 |
def make_input_images_section():
|
456 |
gr.Markdown('### Input Images')
|
457 |
input_gallery = gr.Gallery(value=None, label="Select images", show_label=False, elem_id="images", columns=[3], rows=[1], object_fit="contain", height="auto", type="pil", show_share_button=False)
|
458 |
-
submit_button = gr.Button("🔴RUN", elem_id="submit_button")
|
459 |
clear_images_button = gr.Button("🗑️Clear", elem_id='clear_button')
|
460 |
return input_gallery, submit_button, clear_images_button
|
461 |
|
@@ -465,7 +465,7 @@ def make_input_video_section():
|
|
465 |
gr.Markdown('_image backbone model is used to extract features from each frame, NCUT is computed on all frames_')
|
466 |
# max_frames_number = gr.Number(100, label="Max frames", elem_id="max_frames")
|
467 |
max_frames_number = gr.Slider(1, 200, step=1, label="Max frames", value=100, elem_id="max_frames")
|
468 |
-
submit_button = gr.Button("🔴RUN", elem_id="submit_button")
|
469 |
clear_images_button = gr.Button("🗑️Clear", elem_id='clear_button')
|
470 |
return input_gallery, submit_button, clear_images_button, max_frames_number
|
471 |
|
@@ -478,6 +478,10 @@ def make_example_images_section():
|
|
478 |
fn=lambda: gr.update(visible=False),
|
479 |
outputs=example_gallery
|
480 |
)
|
|
|
|
|
|
|
|
|
481 |
return load_images_button, example_gallery, hide_button
|
482 |
|
483 |
def make_example_video_section():
|
@@ -615,8 +619,12 @@ def make_parameters_section():
|
|
615 |
perplexity_slider, n_neighbors_slider, min_dist_slider,
|
616 |
sampling_method_dropdown]
|
617 |
|
618 |
-
|
619 |
-
|
|
|
|
|
|
|
|
|
620 |
with gr.Tab('AlignedCut'):
|
621 |
|
622 |
with gr.Row():
|
@@ -829,7 +837,7 @@ with gr.Blocks() as demo:
|
|
829 |
with gr.Column(scale=5, min_width=200) as col:
|
830 |
gr.Markdown(f'### Output Images')
|
831 |
output_gallery = gr.Gallery(value=[], label="NCUT Embedding", show_label=False, elem_id=f"ncut{i_model}", columns=[3], rows=[1], object_fit="contain", height="auto")
|
832 |
-
submit_button = gr.Button("🔴RUN", elem_id=f"submit_button{i_model}")
|
833 |
[
|
834 |
model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
835 |
affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
|
|
|
455 |
def make_input_images_section():
|
456 |
gr.Markdown('### Input Images')
|
457 |
input_gallery = gr.Gallery(value=None, label="Select images", show_label=False, elem_id="images", columns=[3], rows=[1], object_fit="contain", height="auto", type="pil", show_share_button=False)
|
458 |
+
submit_button = gr.Button("🔴 RUN", elem_id="submit_button")
|
459 |
clear_images_button = gr.Button("🗑️Clear", elem_id='clear_button')
|
460 |
return input_gallery, submit_button, clear_images_button
|
461 |
|
|
|
465 |
gr.Markdown('_image backbone model is used to extract features from each frame, NCUT is computed on all frames_')
|
466 |
# max_frames_number = gr.Number(100, label="Max frames", elem_id="max_frames")
|
467 |
max_frames_number = gr.Slider(1, 200, step=1, label="Max frames", value=100, elem_id="max_frames")
|
468 |
+
submit_button = gr.Button("🔴 RUN", elem_id="submit_button")
|
469 |
clear_images_button = gr.Button("🗑️Clear", elem_id='clear_button')
|
470 |
return input_gallery, submit_button, clear_images_button, max_frames_number
|
471 |
|
|
|
478 |
fn=lambda: gr.update(visible=False),
|
479 |
outputs=example_gallery
|
480 |
)
|
481 |
+
hide_button.click(
|
482 |
+
fn=lambda: gr.update(visible=False),
|
483 |
+
outputs=hide_button
|
484 |
+
)
|
485 |
return load_images_button, example_gallery, hide_button
|
486 |
|
487 |
def make_example_video_section():
|
|
|
619 |
perplexity_slider, n_neighbors_slider, min_dist_slider,
|
620 |
sampling_method_dropdown]
|
621 |
|
622 |
+
demo = gr.Blocks(
|
623 |
+
theme=gr.themes.Base(spacing_size='md', text_size='lg', primary_hue='blue', neutral_hue='slate', secondary_hue='pink'),
|
624 |
+
fill_width=False,
|
625 |
+
title="ncut-pytorch",
|
626 |
+
)
|
627 |
+
with demo:
|
628 |
with gr.Tab('AlignedCut'):
|
629 |
|
630 |
with gr.Row():
|
|
|
837 |
with gr.Column(scale=5, min_width=200) as col:
|
838 |
gr.Markdown(f'### Output Images')
|
839 |
output_gallery = gr.Gallery(value=[], label="NCUT Embedding", show_label=False, elem_id=f"ncut{i_model}", columns=[3], rows=[1], object_fit="contain", height="auto")
|
840 |
+
submit_button = gr.Button("🔴 RUN", elem_id=f"submit_button{i_model}")
|
841 |
[
|
842 |
model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
843 |
affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
|