Aastha
commited on
Commit
•
a21b606
1
Parent(s):
f537928
Add filtering step for display of bounding box
Browse files
app.py
CHANGED
@@ -35,6 +35,18 @@ def parse_rois(roi_json):
|
|
35 |
return roi_polygons
|
36 |
|
37 |
rois = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
def detect(img, model, rois):
|
40 |
if img is None:
|
@@ -81,27 +93,33 @@ def detect(img, model, rois):
|
|
81 |
if len(det):
|
82 |
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
|
83 |
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
|
|
|
|
89 |
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
|
96 |
-
|
97 |
-
|
98 |
-
|
|
|
|
|
|
|
|
|
99 |
|
100 |
return Image.fromarray(im0[:,:,::-1])
|
101 |
# Modify the Gradio interface to accept ROIs
|
102 |
roi_example = [
|
103 |
["8-2.jpg", "yolov7", json.dumps([
|
104 |
-
{"coordinates": [{"x": 0.
|
105 |
{"coordinates": [{"x": 0.237, "y": 0.505}, {"x": 0.283, "y": 0.460}, {"x": 0.921, "y": 0.578}, {"x": 0.912, "y": 0.654}]}
|
106 |
])],
|
107 |
["9-1.jpg", "yolov7", json.dumps([
|
@@ -162,7 +180,7 @@ roi_example = [
|
|
162 |
{"coordinates": [{"x": 0.629, "y": 0.255}, {"x": 0.661, "y": 0.281}, {"x": 0.006, "y": 0.602}, {"x": 0.005, "y": 0.408}]}
|
163 |
])]
|
164 |
]
|
165 |
-
|
166 |
gr_examples = [[example[0], example[1], example[2]] for example in roi_example]
|
167 |
description_html = """<b>Demo for YOLOv7 Object Detection</b>: This interface is specifically tailored for <b>detecting vehicles</b> in images. The primary focus is on <b>accident-prone regions</b> on public roads. By leveraging state-of-the-art object detection techniques, this tool aims to provide insights into areas where vehicles are most at risk, helping in <b>road safety analysis</b> and <b>preventive measures</b>. Users can also define <b>Regions of Interest (ROIs)</b> to narrow down the detection area, ensuring that the analysis is focused on the most critical parts of the image."""
|
168 |
|
@@ -173,5 +191,6 @@ gr.Interface(
|
|
173 |
title="YOLOv7 Object Detection for Accident-Prone Regions",
|
174 |
examples=gr_examples,
|
175 |
description=description_html,
|
176 |
-
live=False
|
|
|
177 |
).launch()
|
|
|
35 |
return roi_polygons
|
36 |
|
37 |
rois = []
|
38 |
+
names= [ 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
|
39 |
+
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
|
40 |
+
'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',
|
41 |
+
'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard',
|
42 |
+
'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
|
43 |
+
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch',
|
44 |
+
'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',
|
45 |
+
'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear',
|
46 |
+
'hair drier', 'toothbrush' ]
|
47 |
+
desired_classes = ['person', 'bicycle', 'car', 'motorcycle', 'bus', 'train', 'truck']
|
48 |
+
desired_indices = [names.index(cls) for cls in desired_classes if cls in names]
|
49 |
+
print(desired_indices)
|
50 |
|
51 |
def detect(img, model, rois):
|
52 |
if img is None:
|
|
|
93 |
if len(det):
|
94 |
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
|
95 |
|
96 |
+
# Filter detections based on ROIs and desired classes
|
97 |
+
filtered_detections = []
|
98 |
+
for *xyxy, conf, cls in reversed(det):
|
99 |
+
if int(cls) not in desired_indices:
|
100 |
+
continue
|
101 |
+
x_center = float((xyxy[0] + xyxy[2]) / 2)
|
102 |
+
y_center = float((xyxy[1] + xyxy[3]) / 2)
|
103 |
|
104 |
+
inside_roi = False
|
105 |
+
for roi_points in roi_points_list:
|
106 |
+
if cv2.pointPolygonTest(np.array(roi_points, dtype=np.float32), (x_center, y_center), False) >= 0:
|
107 |
+
inside_roi = True
|
108 |
+
break
|
109 |
|
110 |
+
filtered_detections.append((*xyxy, conf, cls, inside_roi))
|
111 |
+
|
112 |
+
# Plot the detections with the desired color
|
113 |
+
for *xyxy, conf, cls, inside_roi in filtered_detections:
|
114 |
+
color = (0, 255, 0) if inside_roi else (0, 0, 255) # green for inside ROI, red for outside
|
115 |
+
label = f'{names[int(cls)]} {conf:.2f}'
|
116 |
+
plot_one_box(xyxy, im0, label=label, color=color, line_thickness=1)
|
117 |
|
118 |
return Image.fromarray(im0[:,:,::-1])
|
119 |
# Modify the Gradio interface to accept ROIs
|
120 |
roi_example = [
|
121 |
["8-2.jpg", "yolov7", json.dumps([
|
122 |
+
{"coordinates": [{ "x": 0.005, "y": 0.644 },{ "x": 0.047, "y": 0.572 },{ "x": 0.961, "y": 0.834 },{ "x": 0.695, "y": 0.919 }]},
|
123 |
{"coordinates": [{"x": 0.237, "y": 0.505}, {"x": 0.283, "y": 0.460}, {"x": 0.921, "y": 0.578}, {"x": 0.912, "y": 0.654}]}
|
124 |
])],
|
125 |
["9-1.jpg", "yolov7", json.dumps([
|
|
|
180 |
{"coordinates": [{"x": 0.629, "y": 0.255}, {"x": 0.661, "y": 0.281}, {"x": 0.006, "y": 0.602}, {"x": 0.005, "y": 0.408}]}
|
181 |
])]
|
182 |
]
|
183 |
+
print(len(roi_example))
|
184 |
gr_examples = [[example[0], example[1], example[2]] for example in roi_example]
|
185 |
description_html = """<b>Demo for YOLOv7 Object Detection</b>: This interface is specifically tailored for <b>detecting vehicles</b> in images. The primary focus is on <b>accident-prone regions</b> on public roads. By leveraging state-of-the-art object detection techniques, this tool aims to provide insights into areas where vehicles are most at risk, helping in <b>road safety analysis</b> and <b>preventive measures</b>. Users can also define <b>Regions of Interest (ROIs)</b> to narrow down the detection area, ensuring that the analysis is focused on the most critical parts of the image."""
|
186 |
|
|
|
191 |
title="YOLOv7 Object Detection for Accident-Prone Regions",
|
192 |
examples=gr_examples,
|
193 |
description=description_html,
|
194 |
+
live=False,# This ensures that the model doesn't run until the 'Submit' button is clicked
|
195 |
+
example_ceiling=14
|
196 |
).launch()
|