gridsearch parameters
Browse files- handcrafted_solution.py +5 -8
- script.py +1 -1
- test_solution.ipynb +0 -0
handcrafted_solution.py
CHANGED
@@ -98,13 +98,11 @@ def convert_entry_to_human_readable(entry):
|
|
98 |
return out
|
99 |
|
100 |
|
101 |
-
def get_vertices_and_edges_from_segmentation(gest_seg_np,
|
102 |
'''Get the vertices and edges from the gestalt segmentation mask of the house'''
|
103 |
# Apex
|
104 |
-
color_range = 4.
|
105 |
connections = []
|
106 |
-
|
107 |
-
deviation_threshold = np.cos(np.deg2rad(5))
|
108 |
|
109 |
apex_centroids, eave_end_point_centroids, apex_mask, eave_end_point_mask = get_vertices(gest_seg_np)
|
110 |
|
@@ -114,7 +112,7 @@ def get_vertices_and_edges_from_segmentation(gest_seg_np, edge_th=50.0):
|
|
114 |
vertex_size = np.zeros(vertices.shape[0])
|
115 |
for i, coords in enumerate(vertices):
|
116 |
# coords = np.round(coords).astype(np.uint32)
|
117 |
-
radius =
|
118 |
vertex_size[i] = (scale * radius) ** 2 # because we are using squared distances
|
119 |
|
120 |
for edge_class in ['eave', 'ridge', 'rake', 'valley', 'flashing', 'step_flashing']:
|
@@ -150,7 +148,6 @@ def get_vertices_and_edges_from_segmentation(gest_seg_np, edge_th=50.0):
|
|
150 |
line_directions = np.zeros((len(lines), 2))
|
151 |
for line_idx, line in enumerate(lines):
|
152 |
for x1, y1, x2, y2 in line:
|
153 |
-
extend = 35
|
154 |
if x1 < x2:
|
155 |
x1, y1, x2, y2 = x2, y2, x1, y1
|
156 |
direction = (np.array([x2 - x1, y2 - y1]))
|
@@ -338,7 +335,7 @@ def prune_not_connected(all_3d_vertices, connections_3d):
|
|
338 |
return np.array(new_verts), connected_out
|
339 |
|
340 |
|
341 |
-
def predict(entry, visualize=False) -> Tuple[np.ndarray, List[int]]:
|
342 |
good_entry = convert_entry_to_human_readable(entry)
|
343 |
if 'gestalt' not in good_entry or 'depthcm' not in good_entry or 'K' not in good_entry or 'R' not in good_entry or 't' not in good_entry:
|
344 |
print('Missing required fields in the entry')
|
@@ -354,7 +351,7 @@ def predict(entry, visualize=False) -> Tuple[np.ndarray, List[int]]:
|
|
354 |
gest_seg_np = np.array(gest_seg).astype(np.uint8)
|
355 |
# Metric3D
|
356 |
depth_np = np.array(depth) / 2.5 # 2.5 is the scale estimation coefficient
|
357 |
-
vertices, connections = get_vertices_and_edges_from_segmentation(gest_seg_np,
|
358 |
if (len(vertices) < 2) or (len(connections) < 1):
|
359 |
print(f'Not enough vertices or connections in image {i}')
|
360 |
vert_edge_per_image[i] = np.empty((0, 2)), [], np.empty((0, 3))
|
|
|
98 |
return out
|
99 |
|
100 |
|
101 |
+
def get_vertices_and_edges_from_segmentation(gest_seg_np, *, color_range=4., point_radius=30, max_angle=5., extend = 35, **kwargs):
|
102 |
'''Get the vertices and edges from the gestalt segmentation mask of the house'''
|
103 |
# Apex
|
|
|
104 |
connections = []
|
105 |
+
deviation_threshold = np.cos(np.deg2rad(max_angle))
|
|
|
106 |
|
107 |
apex_centroids, eave_end_point_centroids, apex_mask, eave_end_point_mask = get_vertices(gest_seg_np)
|
108 |
|
|
|
112 |
vertex_size = np.zeros(vertices.shape[0])
|
113 |
for i, coords in enumerate(vertices):
|
114 |
# coords = np.round(coords).astype(np.uint32)
|
115 |
+
radius = point_radius # np.clip(int(max_depth//2 + depth_np[coords[1], coords[0]]), 10, 30)#int(np.clip(max_depth - depth_np[coords[1], coords[0]], 10, 20))
|
116 |
vertex_size[i] = (scale * radius) ** 2 # because we are using squared distances
|
117 |
|
118 |
for edge_class in ['eave', 'ridge', 'rake', 'valley', 'flashing', 'step_flashing']:
|
|
|
148 |
line_directions = np.zeros((len(lines), 2))
|
149 |
for line_idx, line in enumerate(lines):
|
150 |
for x1, y1, x2, y2 in line:
|
|
|
151 |
if x1 < x2:
|
152 |
x1, y1, x2, y2 = x2, y2, x1, y1
|
153 |
direction = (np.array([x2 - x1, y2 - y1]))
|
|
|
335 |
return np.array(new_verts), connected_out
|
336 |
|
337 |
|
338 |
+
def predict(entry, visualize=False, **kwargs) -> Tuple[np.ndarray, List[int]]:
|
339 |
good_entry = convert_entry_to_human_readable(entry)
|
340 |
if 'gestalt' not in good_entry or 'depthcm' not in good_entry or 'K' not in good_entry or 'R' not in good_entry or 't' not in good_entry:
|
341 |
print('Missing required fields in the entry')
|
|
|
351 |
gest_seg_np = np.array(gest_seg).astype(np.uint8)
|
352 |
# Metric3D
|
353 |
depth_np = np.array(depth) / 2.5 # 2.5 is the scale estimation coefficient
|
354 |
+
vertices, connections = get_vertices_and_edges_from_segmentation(gest_seg_np, **kwargs)
|
355 |
if (len(vertices) < 2) or (len(connections) < 1):
|
356 |
print(f'Not enough vertices or connections in image {i}')
|
357 |
vert_edge_per_image[i] = np.empty((0, 2)), [], np.empty((0, 3))
|
script.py
CHANGED
@@ -127,7 +127,7 @@ if __name__ == "__main__":
|
|
127 |
with ProcessPoolExecutor(max_workers=8) as pool:
|
128 |
results = []
|
129 |
for i, sample in enumerate(tqdm(dataset)):
|
130 |
-
results.append(pool.submit(predict, sample, visualize=False))
|
131 |
|
132 |
for i, result in enumerate(tqdm(results)):
|
133 |
key, pred_vertices, pred_edges = result.result()
|
|
|
127 |
with ProcessPoolExecutor(max_workers=8) as pool:
|
128 |
results = []
|
129 |
for i, sample in enumerate(tqdm(dataset)):
|
130 |
+
results.append(pool.submit(predict, sample, visualize=False, point_radius=25, max_angle=9, extend=30))
|
131 |
|
132 |
for i, result in enumerate(tqdm(results)):
|
133 |
key, pred_vertices, pred_edges = result.result()
|
test_solution.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|