huzey commited on
Commit
a48bd1b
1 Parent(s): e9f5121

update parameters

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -592,9 +592,9 @@ def to_pil_images(images):
592
  def main_fn(
593
  images,
594
  model_name="SAM(sam_vit_b)",
595
- node_type="block",
596
  layer=-1,
597
  num_eig=100,
 
598
  affinity_focal_gamma=0.3,
599
  num_sample_ncut=10000,
600
  knn_ncut=10,
@@ -628,21 +628,21 @@ def main_fn(
628
  rgb = dont_use_too_much_green(rgb)
629
  return to_pil_images(rgb)
630
 
631
- default_images = ['./images/image_0.jpg', './images/image_1.jpg', './images/image_2.jpg', './images/image_3.jpg', './images/image_4.jpg', './images/image_5.jpg']
632
- default_outputs = ['./images/ncut_0.jpg', './images/ncut_1.jpg', './images/ncut_2.jpg', './images/ncut_3.jpg', './images/ncut_4.jpg', './images/ncut_5.jpg']
633
 
634
  demo = gr.Interface(
635
  main_fn,
636
  [
637
  gr.Gallery(value=default_images, label="Select images", show_label=False, elem_id="images", columns=[3], rows=[1], object_fit="contain", height="auto", type="pil"),
638
  gr.Dropdown(["MobileSAM", "SAM(sam_vit_b)", "DiNO(dinov2_vitb14_reg)", "CLIP(openai/clip-vit-base-patch16)"], label="Model", value="MobileSAM", elem_id="model_name"),
639
- gr.Dropdown(["attn", "mlp", "block"], label="Node type", value="block", elem_id="node_type", info="attn: attention output, mlp: mlp output, block: sum of residual stream"),
640
  gr.Slider(0, 11, step=1, label="Layer", value=11, elem_id="layer", info="which layer of the image backbone features"),
641
  gr.Slider(1, 1000, step=1, label="Number of eigenvectors", value=100, elem_id="num_eig", info='increase for more object parts, decrease for whole object'),
642
- gr.Slider(0.01, 1, step=0.01, label="Affinity focal gamma", value=0.3, elem_id="affinity_focal_gamma", info="decrease for more aggressive cleaning on the affinity matrix"),
643
  ],
644
  gr.Gallery(value=default_outputs, label="NCUT Embedding", show_label=False, elem_id="ncut", columns=[3], rows=[1], object_fit="contain", height="auto"),
645
  additional_inputs=[
 
 
646
  gr.Slider(100, 10000, step=100, label="num_sample (NCUT)", value=5000, elem_id="num_sample_ncut", info="for Nyström approximation"),
647
  gr.Slider(1, 100, step=1, label="KNN (NCUT)", value=10, elem_id="knn_ncut", info="for Nyström approximation"),
648
  gr.Slider(100, 1000, step=100, label="num_sample (t-SNE)", value=500, elem_id="num_sample_tsne", info="for Nyström approximation. Adding will slow down t-SNE quite a lot"),
 
592
  def main_fn(
593
  images,
594
  model_name="SAM(sam_vit_b)",
 
595
  layer=-1,
596
  num_eig=100,
597
+ node_type="block",
598
  affinity_focal_gamma=0.3,
599
  num_sample_ncut=10000,
600
  knn_ncut=10,
 
628
  rgb = dont_use_too_much_green(rgb)
629
  return to_pil_images(rgb)
630
 
631
+ default_images = ['./images/image_0.jpg', './images/image_1.jpg', './images/image_2.jpg', './images/image_3.jpg', './images/image_5.jpg']
632
+ default_outputs = ['./images/ncut_0.jpg', './images/ncut_1.jpg', './images/ncut_2.jpg', './images/ncut_3.jpg', './images/ncut_5.jpg']
633
 
634
  demo = gr.Interface(
635
  main_fn,
636
  [
637
  gr.Gallery(value=default_images, label="Select images", show_label=False, elem_id="images", columns=[3], rows=[1], object_fit="contain", height="auto", type="pil"),
638
  gr.Dropdown(["MobileSAM", "SAM(sam_vit_b)", "DiNO(dinov2_vitb14_reg)", "CLIP(openai/clip-vit-base-patch16)"], label="Model", value="MobileSAM", elem_id="model_name"),
 
639
  gr.Slider(0, 11, step=1, label="Layer", value=11, elem_id="layer", info="which layer of the image backbone features"),
640
  gr.Slider(1, 1000, step=1, label="Number of eigenvectors", value=100, elem_id="num_eig", info='increase for more object parts, decrease for whole object'),
 
641
  ],
642
  gr.Gallery(value=default_outputs, label="NCUT Embedding", show_label=False, elem_id="ncut", columns=[3], rows=[1], object_fit="contain", height="auto"),
643
  additional_inputs=[
644
+ gr.Dropdown(["attn", "mlp", "block"], label="Node type", value="block", elem_id="node_type", info="attn: attention output, mlp: mlp output, block: sum of residual stream"),
645
+ gr.Slider(0.01, 1, step=0.01, label="Affinity focal gamma", value=0.3, elem_id="affinity_focal_gamma", info="decrease for more aggressive cleaning on the affinity matrix"),
646
  gr.Slider(100, 10000, step=100, label="num_sample (NCUT)", value=5000, elem_id="num_sample_ncut", info="for Nyström approximation"),
647
  gr.Slider(1, 100, step=1, label="KNN (NCUT)", value=10, elem_id="knn_ncut", info="for Nyström approximation"),
648
  gr.Slider(100, 1000, step=100, label="num_sample (t-SNE)", value=500, elem_id="num_sample_tsne", info="for Nyström approximation. Adding will slow down t-SNE quite a lot"),