scale_estimation_coefficient=4.5, added hip, reordered operations to remove loops
Browse files- handcrafted_solution.py +67 -63
- script.py +7 -1
- test_solution.ipynb +17 -17
handcrafted_solution.py
CHANGED
@@ -101,7 +101,7 @@ def infer_vertices(image_gestalt, *, color_range=4.):
|
|
101 |
return inferred_centroids[1:], intersection_mask
|
102 |
|
103 |
|
104 |
-
def get_missed_vertices(vertices, inferred_centroids, *,
|
105 |
vertices = KDTree(vertices)
|
106 |
closest = vertices.query(inferred_centroids, k=1, distance_upper_bound=min_missing_distance)
|
107 |
missed_points = inferred_centroids[closest[1] == len(vertices.data)]
|
@@ -153,9 +153,19 @@ def get_vertices_and_edges_from_segmentation(gest_seg_np, *, color_range=4., poi
|
|
153 |
# radius = point_radius # np.clip(int(max_depth//2 + depth_np[coords[1], coords[0]]), 10, 30)#int(np.clip(max_depth - depth_np[coords[1], coords[0]], 10, 20))
|
154 |
# vertex_size[i] = (scale * radius) ** 2 # because we are using squared distances
|
155 |
|
156 |
-
|
157 |
-
|
158 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
edge_color = np.array(gestalt_color_mapping[edge_class])
|
160 |
|
161 |
mask = cv2.inRange(gest_seg_np,
|
@@ -167,31 +177,22 @@ def get_vertices_and_edges_from_segmentation(gest_seg_np, *, color_range=4., poi
|
|
167 |
if not np.any(mask):
|
168 |
continue
|
169 |
|
170 |
-
rho = 1 # distance resolution in pixels of the Hough grid
|
171 |
-
theta = np.pi / 180 # angular resolution in radians of the Hough grid
|
172 |
-
threshold = 20 # minimum number of votes (intersections in Hough grid cell)
|
173 |
-
min_line_length = 60 # minimum number of pixels making up a line
|
174 |
-
max_line_gap = 40 # maximum gap in pixels between connectable line segments
|
175 |
-
|
176 |
# Run Hough on edge detected image
|
177 |
# Output "lines" is an array containing endpoints of detected line segments
|
178 |
cv2.GaussianBlur(mask, (11, 11), 0, mask)
|
179 |
lines = cv2.HoughLinesP(mask, rho, theta, threshold, np.array([]),
|
180 |
min_line_length, max_line_gap)
|
181 |
|
182 |
-
edges = []
|
183 |
-
|
184 |
if lines is None:
|
185 |
continue
|
186 |
|
187 |
-
line_directions = np.zeros((len(lines), 2))
|
188 |
for line_idx, line in enumerate(lines):
|
189 |
for x1, y1, x2, y2 in line:
|
190 |
if x1 < x2:
|
191 |
x1, y1, x2, y2 = x2, y2, x1, y1
|
192 |
direction = (np.array([x2 - x1, y2 - y1]))
|
193 |
direction = direction / np.linalg.norm(direction)
|
194 |
-
line_directions
|
195 |
|
196 |
direction = extend * direction
|
197 |
|
@@ -200,64 +201,67 @@ def get_vertices_and_edges_from_segmentation(gest_seg_np, *, color_range=4., poi
|
|
200 |
|
201 |
edges.append((x1, y1, x2, y2))
|
202 |
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
|
|
207 |
|
208 |
-
|
209 |
-
|
210 |
|
211 |
-
|
212 |
-
|
213 |
|
214 |
-
|
215 |
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
|
226 |
-
|
227 |
|
228 |
-
|
229 |
-
|
230 |
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
|
237 |
-
|
238 |
-
|
|
|
|
|
|
|
|
|
|
|
239 |
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
for
|
248 |
-
if i ==
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
line_deviations = np.abs(np.dot(line_directions[owned_lines_per_possible_connection], possible_direction_vectors[fitted_line_idx]))
|
259 |
-
if np.any(line_deviations > deviation_threshold):
|
260 |
-
connections.append(possible_connections[:, fitted_line_idx])
|
261 |
|
262 |
vertices = [{"xy": v, "type": "apex"} for v in apex_centroids]
|
263 |
# vertices += [{"xy": v, "type": "apex"} for v in missed_vertices]
|
|
|
101 |
return inferred_centroids[1:], intersection_mask
|
102 |
|
103 |
|
104 |
+
def get_missed_vertices(vertices, inferred_centroids, *, min_missing_distance=200.0, **kwargs):
|
105 |
vertices = KDTree(vertices)
|
106 |
closest = vertices.query(inferred_centroids, k=1, distance_upper_bound=min_missing_distance)
|
107 |
missed_points = inferred_centroids[closest[1] == len(vertices.data)]
|
|
|
153 |
# radius = point_radius # np.clip(int(max_depth//2 + depth_np[coords[1], coords[0]]), 10, 30)#int(np.clip(max_depth - depth_np[coords[1], coords[0]], 10, 20))
|
154 |
# vertex_size[i] = (scale * radius) ** 2 # because we are using squared distances
|
155 |
|
156 |
+
if len(vertices.data) < 2:
|
157 |
+
return [], []
|
158 |
+
edges = []
|
159 |
+
line_directions = []
|
160 |
+
|
161 |
+
rho = 1 # distance resolution in pixels of the Hough grid
|
162 |
+
theta = np.pi / 180 # angular resolution in radians of the Hough grid
|
163 |
+
threshold = 20 # minimum number of votes (intersections in Hough grid cell)
|
164 |
+
min_line_length = 60 # minimum number of pixels making up a line
|
165 |
+
max_line_gap = 40 # maximum gap in pixels between connectable line segments
|
166 |
+
|
167 |
+
for edge_class in ['eave', 'ridge', 'rake', 'valley', 'flashing', 'step_flashing', 'hip']:
|
168 |
+
|
169 |
edge_color = np.array(gestalt_color_mapping[edge_class])
|
170 |
|
171 |
mask = cv2.inRange(gest_seg_np,
|
|
|
177 |
if not np.any(mask):
|
178 |
continue
|
179 |
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
# Run Hough on edge detected image
|
181 |
# Output "lines" is an array containing endpoints of detected line segments
|
182 |
cv2.GaussianBlur(mask, (11, 11), 0, mask)
|
183 |
lines = cv2.HoughLinesP(mask, rho, theta, threshold, np.array([]),
|
184 |
min_line_length, max_line_gap)
|
185 |
|
|
|
|
|
186 |
if lines is None:
|
187 |
continue
|
188 |
|
|
|
189 |
for line_idx, line in enumerate(lines):
|
190 |
for x1, y1, x2, y2 in line:
|
191 |
if x1 < x2:
|
192 |
x1, y1, x2, y2 = x2, y2, x1, y1
|
193 |
direction = (np.array([x2 - x1, y2 - y1]))
|
194 |
direction = direction / np.linalg.norm(direction)
|
195 |
+
line_directions.append(direction)
|
196 |
|
197 |
direction = extend * direction
|
198 |
|
|
|
201 |
|
202 |
edges.append((x1, y1, x2, y2))
|
203 |
|
204 |
+
edges = np.array(edges).astype(np.float64)
|
205 |
+
line_directions = np.array(line_directions).astype(np.float64)
|
206 |
+
if len(edges) < 1:
|
207 |
+
return [], []
|
208 |
+
# calculate the distances between the vertices and the edge ends
|
209 |
|
210 |
+
begin_edges = KDTree(edges[:, :2])
|
211 |
+
end_edges = KDTree(edges[:, 2:])
|
212 |
|
213 |
+
begin_indices = begin_edges.query_ball_tree(vertices, point_radius)
|
214 |
+
end_indices = end_edges.query_ball_tree(vertices, point_radius)
|
215 |
|
216 |
+
line_indices = np.where(np.array([len(i) and len(j) for i, j in zip(begin_indices, end_indices)]))[0]
|
217 |
|
218 |
+
# create all possible connections between begin and end candidates that correspond to a line
|
219 |
+
begin_vertex_list = []
|
220 |
+
end_vertex_list = []
|
221 |
+
line_idx_list = []
|
222 |
+
for line_idx in line_indices:
|
223 |
+
begin_vertex, end_vertex = begin_indices[line_idx], end_indices[line_idx]
|
224 |
+
begin_vertex, end_vertex = np.meshgrid(begin_vertex, end_vertex)
|
225 |
+
begin_vertex_list.extend(begin_vertex.flatten())
|
226 |
+
end_vertex_list.extend(end_vertex.flatten())
|
227 |
|
228 |
+
line_idx_list.extend([line_idx] * len(begin_vertex.flatten()))
|
229 |
|
230 |
+
line_idx_list = np.array(line_idx_list)
|
231 |
+
all_connections = np.array([begin_vertex_list, end_vertex_list])
|
232 |
|
233 |
+
# decrease the number of possible connections to reduce number of calculations
|
234 |
+
possible_connections = np.unique(all_connections, axis=1)
|
235 |
+
possible_connections = np.sort(possible_connections, axis=0)
|
236 |
+
possible_connections = np.unique(possible_connections, axis=1)
|
237 |
+
possible_connections = possible_connections[:, possible_connections[0, :] != possible_connections[1, :]]
|
238 |
|
239 |
+
if possible_connections.shape[1] < 1:
|
240 |
+
return [], []
|
241 |
+
|
242 |
+
# precalculate the possible direction vectors
|
243 |
+
possible_direction_vectors = vertices.data[possible_connections[0]] - vertices.data[possible_connections[1]]
|
244 |
+
possible_direction_vectors = possible_direction_vectors / np.linalg.norm(possible_direction_vectors, axis=1)[:,
|
245 |
+
np.newaxis]
|
246 |
|
247 |
+
owned_lines_per_possible_connections = [list() for i in range(possible_connections.shape[1])]
|
248 |
+
|
249 |
+
# assign lines to possible connections
|
250 |
+
for line_idx, i, j in zip(line_idx_list, begin_vertex_list, end_vertex_list):
|
251 |
+
if i == j:
|
252 |
+
continue
|
253 |
+
i, j = min(i, j), max(i, j)
|
254 |
+
for connection_idx, connection in enumerate(possible_connections.T):
|
255 |
+
if np.all((i, j) == connection):
|
256 |
+
owned_lines_per_possible_connections[connection_idx].append(line_idx)
|
257 |
+
break
|
258 |
+
|
259 |
+
# check if the lines are in the same direction as the possible connection
|
260 |
+
for fitted_line_idx, owned_lines_per_possible_connection in enumerate(owned_lines_per_possible_connections):
|
261 |
+
line_deviations = np.abs(
|
262 |
+
np.dot(line_directions[owned_lines_per_possible_connection], possible_direction_vectors[fitted_line_idx]))
|
263 |
+
if np.any(line_deviations > deviation_threshold):
|
264 |
+
connections.append(possible_connections[:, fitted_line_idx])
|
|
|
|
|
|
|
265 |
|
266 |
vertices = [{"xy": v, "type": "apex"} for v in apex_centroids]
|
267 |
# vertices += [{"xy": v, "type": "apex"} for v in missed_vertices]
|
script.py
CHANGED
@@ -127,7 +127,13 @@ if __name__ == "__main__":
|
|
127 |
with ProcessPoolExecutor(max_workers=8) as pool:
|
128 |
results = []
|
129 |
for i, sample in enumerate(tqdm(dataset)):
|
130 |
-
results.append(pool.submit(predict, sample, visualize=False,
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
|
132 |
for i, result in enumerate(tqdm(results)):
|
133 |
key, pred_vertices, pred_edges = result.result()
|
|
|
127 |
with ProcessPoolExecutor(max_workers=8) as pool:
|
128 |
results = []
|
129 |
for i, sample in enumerate(tqdm(dataset)):
|
130 |
+
results.append(pool.submit(predict, sample, visualize=False,
|
131 |
+
point_radius=25,
|
132 |
+
max_angle=15,
|
133 |
+
extend=30,
|
134 |
+
merge_th=3.0,
|
135 |
+
min_missing_distance=1000000.0,
|
136 |
+
scale_estimation_coefficient=4.5))
|
137 |
|
138 |
for i, result in enumerate(tqdm(results)):
|
139 |
key, pred_vertices, pred_edges = result.result()
|
test_solution.ipynb
CHANGED
@@ -6,8 +6,8 @@
|
|
6 |
"metadata": {
|
7 |
"collapsed": true,
|
8 |
"ExecuteTime": {
|
9 |
-
"end_time": "2024-05-
|
10 |
-
"start_time": "2024-05-
|
11 |
}
|
12 |
},
|
13 |
"source": [
|
@@ -44,8 +44,8 @@
|
|
44 |
{
|
45 |
"metadata": {
|
46 |
"ExecuteTime": {
|
47 |
-
"end_time": "2024-05-
|
48 |
-
"start_time": "2024-05-
|
49 |
}
|
50 |
},
|
51 |
"cell_type": "code",
|
@@ -64,8 +64,8 @@
|
|
64 |
{
|
65 |
"metadata": {
|
66 |
"ExecuteTime": {
|
67 |
-
"end_time": "2024-05-
|
68 |
-
"start_time": "2024-05-
|
69 |
}
|
70 |
},
|
71 |
"cell_type": "code",
|
@@ -83,8 +83,8 @@
|
|
83 |
{
|
84 |
"metadata": {
|
85 |
"ExecuteTime": {
|
86 |
-
"end_time": "2024-05-
|
87 |
-
"start_time": "2024-05-
|
88 |
}
|
89 |
},
|
90 |
"cell_type": "code",
|
@@ -93,7 +93,7 @@
|
|
93 |
"\n",
|
94 |
"solution = []\n",
|
95 |
"from concurrent.futures import ProcessPoolExecutor\n",
|
96 |
-
"with ProcessPoolExecutor(max_workers=
|
97 |
" results = []\n",
|
98 |
" for i, sample in enumerate(tqdm(dataset)):\n",
|
99 |
" results.append(pool.submit(predict, sample,\n",
|
@@ -102,7 +102,7 @@
|
|
102 |
" extend=30, \n",
|
103 |
" merge_th=3.0, \n",
|
104 |
" min_missing_distance=10000.0, \n",
|
105 |
-
" scale_estimation_coefficient=4))\n",
|
106 |
"\n",
|
107 |
" for i, result in enumerate(tqdm(results)):\n",
|
108 |
" key, pred_vertices, pred_edges = result.result()\n",
|
@@ -122,18 +122,18 @@
|
|
122 |
"name": "stderr",
|
123 |
"output_type": "stream",
|
124 |
"text": [
|
125 |
-
"346it [00:11, 29.
|
126 |
-
"100%|ββββββββββ| 346/346 [01:
|
127 |
]
|
128 |
}
|
129 |
],
|
130 |
-
"execution_count":
|
131 |
},
|
132 |
{
|
133 |
"metadata": {
|
134 |
"ExecuteTime": {
|
135 |
-
"end_time": "2024-05-
|
136 |
-
"start_time": "2024-05-
|
137 |
}
|
138 |
},
|
139 |
"cell_type": "code",
|
@@ -163,7 +163,7 @@
|
|
163 |
{
|
164 |
"data": {
|
165 |
"text/plain": [
|
166 |
-
"DescribeResult(nobs=173, minmax=(1.
|
167 |
]
|
168 |
},
|
169 |
"execution_count": 5,
|
@@ -176,7 +176,7 @@
|
|
176 |
{
|
177 |
"metadata": {},
|
178 |
"cell_type": "markdown",
|
179 |
-
"source": "best mean=2.
|
180 |
"id": "1d3cde94dcfc4c56"
|
181 |
},
|
182 |
{
|
|
|
6 |
"metadata": {
|
7 |
"collapsed": true,
|
8 |
"ExecuteTime": {
|
9 |
+
"end_time": "2024-05-30T20:07:32.989151Z",
|
10 |
+
"start_time": "2024-05-30T20:07:28.709056Z"
|
11 |
}
|
12 |
},
|
13 |
"source": [
|
|
|
44 |
{
|
45 |
"metadata": {
|
46 |
"ExecuteTime": {
|
47 |
+
"end_time": "2024-05-30T20:07:32.997502Z",
|
48 |
+
"start_time": "2024-05-30T20:07:32.991160Z"
|
49 |
}
|
50 |
},
|
51 |
"cell_type": "code",
|
|
|
64 |
{
|
65 |
"metadata": {
|
66 |
"ExecuteTime": {
|
67 |
+
"end_time": "2024-05-30T20:07:33.002189Z",
|
68 |
+
"start_time": "2024-05-30T20:07:32.998509Z"
|
69 |
}
|
70 |
},
|
71 |
"cell_type": "code",
|
|
|
83 |
{
|
84 |
"metadata": {
|
85 |
"ExecuteTime": {
|
86 |
+
"end_time": "2024-05-30T20:09:14.932688Z",
|
87 |
+
"start_time": "2024-05-30T20:07:33.003197Z"
|
88 |
}
|
89 |
},
|
90 |
"cell_type": "code",
|
|
|
93 |
"\n",
|
94 |
"solution = []\n",
|
95 |
"from concurrent.futures import ProcessPoolExecutor\n",
|
96 |
+
"with ProcessPoolExecutor(max_workers=11) as pool:\n",
|
97 |
" results = []\n",
|
98 |
" for i, sample in enumerate(tqdm(dataset)):\n",
|
99 |
" results.append(pool.submit(predict, sample,\n",
|
|
|
102 |
" extend=30, \n",
|
103 |
" merge_th=3.0, \n",
|
104 |
" min_missing_distance=10000.0, \n",
|
105 |
+
" scale_estimation_coefficient=4.5))\n",
|
106 |
"\n",
|
107 |
" for i, result in enumerate(tqdm(results)):\n",
|
108 |
" key, pred_vertices, pred_edges = result.result()\n",
|
|
|
122 |
"name": "stderr",
|
123 |
"output_type": "stream",
|
124 |
"text": [
|
125 |
+
"346it [00:11, 29.46it/s] \n",
|
126 |
+
"100%|ββββββββββ| 346/346 [01:28<00:00, 3.89it/s]\n"
|
127 |
]
|
128 |
}
|
129 |
],
|
130 |
+
"execution_count": 4
|
131 |
},
|
132 |
{
|
133 |
"metadata": {
|
134 |
"ExecuteTime": {
|
135 |
+
"end_time": "2024-05-30T20:09:16.762781Z",
|
136 |
+
"start_time": "2024-05-30T20:09:14.933694Z"
|
137 |
}
|
138 |
},
|
139 |
"cell_type": "code",
|
|
|
163 |
{
|
164 |
"data": {
|
165 |
"text/plain": [
|
166 |
+
"DescribeResult(nobs=173, minmax=(1.017917771309308, 3.4203176014390544), mean=2.1252280986193086, variance=0.18178457466035677, skewness=0.3534767409028922, kurtosis=-0.13765543977621153)"
|
167 |
]
|
168 |
},
|
169 |
"execution_count": 5,
|
|
|
176 |
{
|
177 |
"metadata": {},
|
178 |
"cell_type": "markdown",
|
179 |
+
"source": "best mean=2.123433669870156",
|
180 |
"id": "1d3cde94dcfc4c56"
|
181 |
},
|
182 |
{
|