Spaces:
Running
on
Zero
Running
on
Zero
improve loading
Browse files
app.py
CHANGED
@@ -18,6 +18,7 @@ import torch.nn.functional as F
|
|
18 |
from PIL import Image
|
19 |
import numpy as np
|
20 |
import time
|
|
|
21 |
|
22 |
import gradio as gr
|
23 |
|
@@ -518,7 +519,7 @@ def make_dataset_images_section(advanced=False):
|
|
518 |
idx = dataset_names.index(dataset_name)
|
519 |
num_classes = dataset_classes[idx]
|
520 |
return gr.Textbox(label="Class to select", value="0,1,2", elem_id="filter_by_class_text", info=f"e.g. `0,1,2`. ({num_classes} classes)", visible=is_filter)
|
521 |
-
|
522 |
|
523 |
def change_random_seed(is_random):
|
524 |
return gr.Slider(0, 1000, step=1, label="Random seed", value=1, elem_id="random_seed", visible=is_random)
|
@@ -611,10 +612,10 @@ def make_parameters_section():
|
|
611 |
sampling_method_dropdown = gr.Dropdown(["fps", "random"], label="NCUT: Sampling method", value="fps", elem_id="sampling_method", info="Nyström approximation")
|
612 |
knn_ncut_slider = gr.Slider(1, 100, step=1, label="NCUT: KNN", value=10, elem_id="knn_ncut", info="Nyström approximation")
|
613 |
embedding_method_dropdown = gr.Dropdown(["tsne_3d", "umap_3d", "umap_shpere", "tsne_2d", "umap_2d"], label="Coloring method", value="tsne_3d", elem_id="embedding_method")
|
614 |
-
num_sample_tsne_slider = gr.Slider(100,
|
615 |
knn_tsne_slider = gr.Slider(1, 100, step=1, label="t-SNE/UMAP: KNN", value=10, elem_id="knn_tsne", info="Nyström approximation")
|
616 |
-
perplexity_slider = gr.Slider(10,
|
617 |
-
n_neighbors_slider = gr.Slider(10,
|
618 |
min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="UMAP: min_dist", value=0.1, elem_id="min_dist")
|
619 |
return [model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
620 |
affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
|
@@ -894,8 +895,8 @@ with demo:
|
|
894 |
|
895 |
|
896 |
if USE_HUGGINGFACE_SPACE:
|
897 |
-
download_all_models()
|
898 |
-
download_all_datasets()
|
899 |
demo.launch()
|
900 |
else:
|
901 |
demo.launch(share=True)
|
|
|
18 |
from PIL import Image
|
19 |
import numpy as np
|
20 |
import time
|
21 |
+
import threading
|
22 |
|
23 |
import gradio as gr
|
24 |
|
|
|
519 |
idx = dataset_names.index(dataset_name)
|
520 |
num_classes = dataset_classes[idx]
|
521 |
return gr.Textbox(label="Class to select", value="0,1,2", elem_id="filter_by_class_text", info=f"e.g. `0,1,2`. ({num_classes} classes)", visible=is_filter)
|
522 |
+
filter_by_class_checkbox.change(fn=change_filter_by_class, inputs=[filter_by_class_checkbox, dataset_dropdown], outputs=filter_by_class_text)
|
523 |
|
524 |
def change_random_seed(is_random):
|
525 |
return gr.Slider(0, 1000, step=1, label="Random seed", value=1, elem_id="random_seed", visible=is_random)
|
|
|
612 |
sampling_method_dropdown = gr.Dropdown(["fps", "random"], label="NCUT: Sampling method", value="fps", elem_id="sampling_method", info="Nyström approximation")
|
613 |
knn_ncut_slider = gr.Slider(1, 100, step=1, label="NCUT: KNN", value=10, elem_id="knn_ncut", info="Nyström approximation")
|
614 |
embedding_method_dropdown = gr.Dropdown(["tsne_3d", "umap_3d", "umap_shpere", "tsne_2d", "umap_2d"], label="Coloring method", value="tsne_3d", elem_id="embedding_method")
|
615 |
+
num_sample_tsne_slider = gr.Slider(100, 10000, step=100, label="t-SNE/UMAP: num_sample", value=300, elem_id="num_sample_tsne", info="Nyström approximation")
|
616 |
knn_tsne_slider = gr.Slider(1, 100, step=1, label="t-SNE/UMAP: KNN", value=10, elem_id="knn_tsne", info="Nyström approximation")
|
617 |
+
perplexity_slider = gr.Slider(10, 1000, step=10, label="t-SNE: Perplexity", value=150, elem_id="perplexity")
|
618 |
+
n_neighbors_slider = gr.Slider(10, 1000, step=10, label="UMAP: n_neighbors", value=150, elem_id="n_neighbors")
|
619 |
min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="UMAP: min_dist", value=0.1, elem_id="min_dist")
|
620 |
return [model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
621 |
affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
|
|
|
895 |
|
896 |
|
897 |
if USE_HUGGINGFACE_SPACE:
|
898 |
+
threading.Thread(target=download_all_models).start()
|
899 |
+
threading.Thread(target=download_all_datasets).start()
|
900 |
demo.launch()
|
901 |
else:
|
902 |
demo.launch(share=True)
|