huzey commited on
Commit
edc0dc6
1 Parent(s): 8c6fc00

optimize logging

Browse files
Files changed (1) hide show
  1. app.py +23 -15
app.py CHANGED
@@ -25,7 +25,7 @@ def compute_ncut(
25
  n_neighbors=150,
26
  min_dist=0.1,
27
  ):
28
-
29
  start = time.time()
30
  eigvecs, eigvals = NCUT(
31
  num_eig=num_eig,
@@ -34,7 +34,8 @@ def compute_ncut(
34
  affinity_focal_gamma=affinity_focal_gamma,
35
  knn=knn_ncut,
36
  ).fit_transform(features.reshape(-1, features.shape[-1]))
37
- print(f"NCUT time: {time.time() - start:.2f}s")
 
38
 
39
  start = time.time()
40
  if embedding_method == "UMAP":
@@ -44,7 +45,8 @@ def compute_ncut(
44
  min_dist=min_dist,
45
  device="cuda" if torch.cuda.is_available() else "cpu",
46
  )
47
- print(f"UMAP time: {time.time() - start:.2f}s")
 
48
  elif embedding_method == "t-SNE":
49
  X_3d, rgb = rgb_from_tsne_3d(
50
  eigvecs,
@@ -53,12 +55,13 @@ def compute_ncut(
53
  knn=knn_tsne,
54
  device="cuda" if torch.cuda.is_available() else "cpu",
55
  )
56
- print(f"t-SNE time: {time.time() - start:.2f}s")
 
57
  else:
58
  raise ValueError(f"Embedding method {embedding_method} not supported.")
59
 
60
  rgb = rgb.reshape(features.shape[:3] + (3,))
61
- return rgb
62
 
63
 
64
  def dont_use_too_much_green(image_rgb):
@@ -85,7 +88,7 @@ downscaled_outputs = ['./images/ncut_0_small.jpg', './images/ncut_1_small.jpg',
85
 
86
  example_items = downscaled_images[:3] + downscaled_outputs[:3]
87
 
88
- def main_fn(
89
  images,
90
  model_name="SAM(sam_vit_b)",
91
  layer=-1,
@@ -101,7 +104,7 @@ def main_fn(
101
  n_neighbors=500,
102
  min_dist=0.1,
103
  ):
104
-
105
  if perplexity >= num_sample_tsne or n_neighbors >= num_sample_tsne:
106
  # raise gr.Error("Perplexity must be less than the number of samples for t-SNE.")
107
  gr.Warning("Perplexity/n_neighbors must be less than the number of samples.\n" f"Setting to {num_sample_tsne-1}.")
@@ -117,9 +120,10 @@ def main_fn(
117
  features = extract_features(
118
  images, model_name=model_name, node_type=node_type, layer=layer
119
  )
120
- print(f"Feature extraction time (gpu): {time.time() - start:.2f}s")
 
121
 
122
- rgb = compute_ncut(
123
  features,
124
  num_eig=num_eig,
125
  num_sample_ncut=num_sample_ncut,
@@ -132,20 +136,21 @@ def main_fn(
132
  n_neighbors=n_neighbors,
133
  min_dist=min_dist,
134
  )
 
135
  rgb = dont_use_too_much_green(rgb)
136
- return to_pil_images(rgb), []
137
 
138
  @spaces.GPU(duration=10)
139
  def quick_run(*args, **kwargs):
140
- return main_fn(*args, **kwargs)
141
 
142
  @spaces.GPU(duration=30)
143
  def long_run(*args, **kwargs):
144
- return main_fn(*args, **kwargs)
145
 
146
  @spaces.GPU(duration=120)
147
  def super_duper_long_run(*args, **kwargs):
148
- return main_fn(*args, **kwargs)
149
 
150
  def run_fn(
151
  images,
@@ -227,6 +232,9 @@ with gr.Blocks() as demo:
227
  n_neighbors_slider = gr.Slider(10, 500, step=10, label="n_neighbors (UMAP)", value=150, elem_id="n_neighbors")
228
  min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="min_dist (UMAP)", value=0.1, elem_id="min_dist")
229
 
 
 
 
230
  def load_default_images():
231
  return default_images, default_outputs, []
232
 
@@ -236,14 +244,14 @@ with gr.Blocks() as demo:
236
  load_images_button.click(load_default_images, outputs=[input_gallery, output_gallery, example_gallery])
237
  clear_images_button.click(empty_input_and_output, outputs=[input_gallery, output_gallery, example_gallery])
238
  submit_button.click(
239
- main_fn,
240
  inputs=[
241
  input_gallery, model_dropdown, layer_slider, num_eig_slider, node_type_dropdown,
242
  affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
243
  embedding_method_dropdown, num_sample_tsne_slider, knn_tsne_slider,
244
  perplexity_slider, n_neighbors_slider, min_dist_slider
245
  ],
246
- outputs=[output_gallery, example_gallery]
247
  )
248
 
249
 
 
25
  n_neighbors=150,
26
  min_dist=0.1,
27
  ):
28
+ logging_str = ""
29
  start = time.time()
30
  eigvecs, eigvals = NCUT(
31
  num_eig=num_eig,
 
34
  affinity_focal_gamma=affinity_focal_gamma,
35
  knn=knn_ncut,
36
  ).fit_transform(features.reshape(-1, features.shape[-1]))
37
+ # print(f"NCUT time: {time.time() - start:.2f}s")
38
+ logging_str += f"NCUT time: {time.time() - start:.2f}s\n"
39
 
40
  start = time.time()
41
  if embedding_method == "UMAP":
 
45
  min_dist=min_dist,
46
  device="cuda" if torch.cuda.is_available() else "cpu",
47
  )
48
+ # print(f"UMAP time: {time.time() - start:.2f}s")
49
+ logging_str += f"UMAP time: {time.time() - start:.2f}s\n"
50
  elif embedding_method == "t-SNE":
51
  X_3d, rgb = rgb_from_tsne_3d(
52
  eigvecs,
 
55
  knn=knn_tsne,
56
  device="cuda" if torch.cuda.is_available() else "cpu",
57
  )
58
+ # print(f"t-SNE time: {time.time() - start:.2f}s")
59
+ logging_str += f"t-SNE time: {time.time() - start:.2f}s\n"
60
  else:
61
  raise ValueError(f"Embedding method {embedding_method} not supported.")
62
 
63
  rgb = rgb.reshape(features.shape[:3] + (3,))
64
+ return rgb, logging_str
65
 
66
 
67
  def dont_use_too_much_green(image_rgb):
 
88
 
89
  example_items = downscaled_images[:3] + downscaled_outputs[:3]
90
 
91
+ def ncut_run(
92
  images,
93
  model_name="SAM(sam_vit_b)",
94
  layer=-1,
 
104
  n_neighbors=500,
105
  min_dist=0.1,
106
  ):
107
+ logging_str = ""
108
  if perplexity >= num_sample_tsne or n_neighbors >= num_sample_tsne:
109
  # raise gr.Error("Perplexity must be less than the number of samples for t-SNE.")
110
  gr.Warning("Perplexity/n_neighbors must be less than the number of samples.\n" f"Setting to {num_sample_tsne-1}.")
 
120
  features = extract_features(
121
  images, model_name=model_name, node_type=node_type, layer=layer
122
  )
123
+ # print(f"Feature extraction time (gpu): {time.time() - start:.2f}s")
124
+ logging_str += f"Backbone time: {time.time() - start:.2f}s\n"
125
 
126
+ rgb, _logging_str = compute_ncut(
127
  features,
128
  num_eig=num_eig,
129
  num_sample_ncut=num_sample_ncut,
 
136
  n_neighbors=n_neighbors,
137
  min_dist=min_dist,
138
  )
139
+ logging_str += _logging_str
140
  rgb = dont_use_too_much_green(rgb)
141
+ return to_pil_images(rgb), [], logging_str
142
 
143
  @spaces.GPU(duration=10)
144
  def quick_run(*args, **kwargs):
145
+ return ncut_run(*args, **kwargs)
146
 
147
  @spaces.GPU(duration=30)
148
  def long_run(*args, **kwargs):
149
+ return ncut_run(*args, **kwargs)
150
 
151
  @spaces.GPU(duration=120)
152
  def super_duper_long_run(*args, **kwargs):
153
+ return ncut_run(*args, **kwargs)
154
 
155
  def run_fn(
156
  images,
 
232
  n_neighbors_slider = gr.Slider(10, 500, step=10, label="n_neighbors (UMAP)", value=150, elem_id="n_neighbors")
233
  min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="min_dist (UMAP)", value=0.1, elem_id="min_dist")
234
 
235
+ # logging text box
236
+ logging_text = gr.Textbox("logging output", label="Logging", elem_id="logging", type="text", placeholder="Logging information", default="")
237
+
238
  def load_default_images():
239
  return default_images, default_outputs, []
240
 
 
244
  load_images_button.click(load_default_images, outputs=[input_gallery, output_gallery, example_gallery])
245
  clear_images_button.click(empty_input_and_output, outputs=[input_gallery, output_gallery, example_gallery])
246
  submit_button.click(
247
+ run_fn,
248
  inputs=[
249
  input_gallery, model_dropdown, layer_slider, num_eig_slider, node_type_dropdown,
250
  affinity_focal_gamma_slider, num_sample_ncut_slider, knn_ncut_slider,
251
  embedding_method_dropdown, num_sample_tsne_slider, knn_tsne_slider,
252
  perplexity_slider, n_neighbors_slider, min_dist_slider
253
  ],
254
+ outputs=[output_gallery, example_gallery, logging_text]
255
  )
256
 
257