added clustering and merge_th=100
Browse files- handcrafted_solution.py +33 -1
- script.py +1 -1
handcrafted_solution.py
CHANGED
@@ -13,6 +13,7 @@ from hoho.color_mappings import gestalt_color_mapping
|
|
13 |
from hoho.read_write_colmap import read_cameras_binary, read_images_binary, read_points3D_binary
|
14 |
from scipy.spatial import KDTree
|
15 |
from scipy.spatial.distance import cdist
|
|
|
16 |
|
17 |
apex_color = gestalt_color_mapping["apex"]
|
18 |
eave_end_point = gestalt_color_mapping["eave_end_point"]
|
@@ -437,6 +438,33 @@ def predict(entry, visualize=False, scale_estimation_coefficient=2.5, **kwargs)
|
|
437 |
image_dict = {}
|
438 |
for k, v in entry["images"].items():
|
439 |
image_dict[v.name] = v
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
440 |
for i, (gest, depthcm, K, R, t, imagekey) in enumerate(zip(entry['gestalt'],
|
441 |
entry['depthcm'],
|
442 |
entry['K'],
|
@@ -455,7 +483,11 @@ def predict(entry, visualize=False, scale_estimation_coefficient=2.5, **kwargs)
|
|
455 |
continue
|
456 |
belonging_points = []
|
457 |
for i in image_dict[imagekey].point3D_ids[np.where(image_dict[imagekey].point3D_ids != -1)]:
|
458 |
-
|
|
|
|
|
|
|
|
|
459 |
|
460 |
if len(belonging_points) < 1:
|
461 |
print(f'No 3D points in image {i}')
|
|
|
13 |
from hoho.read_write_colmap import read_cameras_binary, read_images_binary, read_points3D_binary
|
14 |
from scipy.spatial import KDTree
|
15 |
from scipy.spatial.distance import cdist
|
16 |
+
import scipy.cluster.hierarchy as shc
|
17 |
|
18 |
apex_color = gestalt_color_mapping["apex"]
|
19 |
eave_end_point = gestalt_color_mapping["eave_end_point"]
|
|
|
438 |
image_dict = {}
|
439 |
for k, v in entry["images"].items():
|
440 |
image_dict[v.name] = v
|
441 |
+
points = [v.xyz for k, v in entry["points3d"].items()]
|
442 |
+
too_big = len(points) > 25000
|
443 |
+
|
444 |
+
if not too_big:
|
445 |
+
points = np.array(points)
|
446 |
+
point_keys = [k for k, v in entry["points3d"].items()]
|
447 |
+
point_keys = np.array(point_keys)
|
448 |
+
|
449 |
+
# print(len(points))
|
450 |
+
|
451 |
+
clustered = shc.fclusterdata(points, 100, criterion='distance')
|
452 |
+
clustered_indices = np.argsort(clustered)
|
453 |
+
|
454 |
+
points = points[clustered_indices]
|
455 |
+
point_keys = point_keys[clustered_indices]
|
456 |
+
clustered = clustered[clustered_indices]
|
457 |
+
|
458 |
+
_, cluster_indices = np.unique(clustered, return_index=True)
|
459 |
+
|
460 |
+
clustered_points = np.split(points, cluster_indices[1:])
|
461 |
+
clustered_keys = np.split(point_keys, cluster_indices[1:])
|
462 |
+
|
463 |
+
biggest_cluster_index = np.argmax([len(i) for i in clustered_points])
|
464 |
+
# biggest_cluster = clustered_points[biggest_cluster_index]
|
465 |
+
biggest_cluster_keys = clustered_keys[biggest_cluster_index]
|
466 |
+
biggest_cluster_keys = set(biggest_cluster_keys)
|
467 |
+
|
468 |
for i, (gest, depthcm, K, R, t, imagekey) in enumerate(zip(entry['gestalt'],
|
469 |
entry['depthcm'],
|
470 |
entry['K'],
|
|
|
483 |
continue
|
484 |
belonging_points = []
|
485 |
for i in image_dict[imagekey].point3D_ids[np.where(image_dict[imagekey].point3D_ids != -1)]:
|
486 |
+
if not too_big:
|
487 |
+
if i in biggest_cluster_keys:
|
488 |
+
belonging_points.append(entry["points3d"][i])
|
489 |
+
else:
|
490 |
+
belonging_points.append(entry["points3d"][i])
|
491 |
|
492 |
if len(belonging_points) < 1:
|
493 |
print(f'No 3D points in image {i}')
|
script.py
CHANGED
@@ -132,7 +132,7 @@ if __name__ == "__main__":
|
|
132 |
point_radius=25,
|
133 |
max_angle=15,
|
134 |
extend=30,
|
135 |
-
merge_th=
|
136 |
min_missing_distance=30000000.0,
|
137 |
scale_estimation_coefficient=2.54,
|
138 |
))
|
|
|
132 |
point_radius=25,
|
133 |
max_angle=15,
|
134 |
extend=30,
|
135 |
+
merge_th=100.0,
|
136 |
min_missing_distance=30000000.0,
|
137 |
scale_estimation_coefficient=2.54,
|
138 |
))
|