ottoykh commited on
Commit
98d05b6
Β·
verified Β·
1 Parent(s): f251377

Upload Real_Time_Traffic_CCTV_Instance_Segmentation.ipynb

Browse files
Real_Time_Traffic_CCTV_Instance_Segmentation.ipynb ADDED
@@ -0,0 +1,392 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": 1,
20
+ "metadata": {
21
+ "colab": {
22
+ "base_uri": "https://localhost:8080/"
23
+ },
24
+ "id": "7vLYqOipDn7J",
25
+ "outputId": "d0995580-9b7a-40cd-8147-7fdf58f148fe"
26
+ },
27
+ "outputs": [
28
+ {
29
+ "output_type": "stream",
30
+ "name": "stdout",
31
+ "text": [
32
+ "Cloning into 'Smart-Traffic'...\n",
33
+ "remote: Enumerating objects: 12, done.\u001b[K\n",
34
+ "remote: Counting objects: 100% (9/9), done.\u001b[K\n",
35
+ "remote: Compressing objects: 100% (9/9), done.\u001b[K\n",
36
+ "remote: Total 12 (delta 2), reused 0 (delta 0), pack-reused 3\u001b[K\n",
37
+ "Unpacking objects: 100% (12/12), 199.01 KiB | 939.00 KiB/s, done.\n",
38
+ "Filtering content: 100% (2/2), 57.18 MiB | 19.07 MiB/s, done.\n"
39
+ ]
40
+ }
41
+ ],
42
+ "source": [
43
+ "!git clone https://huggingface.co/ottoykh/Smart-Traffic"
44
+ ]
45
+ },
46
+ {
47
+ "cell_type": "code",
48
+ "source": [
49
+ "!pip install ultralytics"
50
+ ],
51
+ "metadata": {
52
+ "colab": {
53
+ "base_uri": "https://localhost:8080/"
54
+ },
55
+ "id": "ku7viwceDrF-",
56
+ "outputId": "b6246bc3-2849-4c1e-f6bc-b3bc7860bf78"
57
+ },
58
+ "execution_count": 4,
59
+ "outputs": [
60
+ {
61
+ "output_type": "stream",
62
+ "name": "stdout",
63
+ "text": [
64
+ "Collecting ultralytics\n",
65
+ " Downloading ultralytics-8.1.18-py3-none-any.whl (716 kB)\n",
66
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m716.0/716.0 kB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
67
+ "\u001b[?25hRequirement already satisfied: matplotlib>=3.3.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (3.7.1)\n",
68
+ "Requirement already satisfied: opencv-python>=4.6.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (4.8.0.76)\n",
69
+ "Requirement already satisfied: pillow>=7.1.2 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (9.4.0)\n",
70
+ "Requirement already satisfied: pyyaml>=5.3.1 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (6.0.1)\n",
71
+ "Requirement already satisfied: requests>=2.23.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (2.31.0)\n",
72
+ "Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (1.11.4)\n",
73
+ "Requirement already satisfied: torch>=1.8.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (2.1.0+cu121)\n",
74
+ "Requirement already satisfied: torchvision>=0.9.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (0.16.0+cu121)\n",
75
+ "Requirement already satisfied: tqdm>=4.64.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (4.66.2)\n",
76
+ "Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from ultralytics) (5.9.5)\n",
77
+ "Requirement already satisfied: py-cpuinfo in /usr/local/lib/python3.10/dist-packages (from ultralytics) (9.0.0)\n",
78
+ "Collecting thop>=0.1.1 (from ultralytics)\n",
79
+ " Downloading thop-0.1.1.post2209072238-py3-none-any.whl (15 kB)\n",
80
+ "Requirement already satisfied: pandas>=1.1.4 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (1.5.3)\n",
81
+ "Requirement already satisfied: seaborn>=0.11.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics) (0.13.1)\n",
82
+ "Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (1.2.0)\n",
83
+ "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (0.12.1)\n",
84
+ "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (4.49.0)\n",
85
+ "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (1.4.5)\n",
86
+ "Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (1.25.2)\n",
87
+ "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (23.2)\n",
88
+ "Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (3.1.1)\n",
89
+ "Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.0->ultralytics) (2.8.2)\n",
90
+ "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=1.1.4->ultralytics) (2023.4)\n",
91
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.23.0->ultralytics) (3.3.2)\n",
92
+ "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.23.0->ultralytics) (3.6)\n",
93
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.23.0->ultralytics) (2.0.7)\n",
94
+ "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.23.0->ultralytics) (2024.2.2)\n",
95
+ "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (3.13.1)\n",
96
+ "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (4.9.0)\n",
97
+ "Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (1.12)\n",
98
+ "Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (3.2.1)\n",
99
+ "Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (3.1.3)\n",
100
+ "Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (2023.6.0)\n",
101
+ "Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.8.0->ultralytics) (2.1.0)\n",
102
+ "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib>=3.3.0->ultralytics) (1.16.0)\n",
103
+ "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.8.0->ultralytics) (2.1.5)\n",
104
+ "Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.8.0->ultralytics) (1.3.0)\n",
105
+ "Installing collected packages: thop, ultralytics\n",
106
+ "Successfully installed thop-0.1.1.post2209072238 ultralytics-8.1.18\n"
107
+ ]
108
+ }
109
+ ]
110
+ },
111
+ {
112
+ "cell_type": "code",
113
+ "source": [
114
+ "from ultralytics import YOLO\n",
115
+ "\n",
116
+ "from IPython.display import display, Image\n",
117
+ "import requests\n",
118
+ "from PIL import Image\n",
119
+ "import time\n",
120
+ "import datetime\n",
121
+ "import os"
122
+ ],
123
+ "metadata": {
124
+ "id": "tjNmyigvEPut"
125
+ },
126
+ "execution_count": 5,
127
+ "outputs": []
128
+ },
129
+ {
130
+ "cell_type": "code",
131
+ "source": [
132
+ "image_urls = [\n",
133
+ " \"https://tdcctv.data.one.gov.hk/AID01217.JPG\",\n",
134
+ " \"https://tdcctv.data.one.gov.hk/AID01216.JPG\",\n",
135
+ " \"https://tdcctv.data.one.gov.hk/AID01215.JPG\",\n",
136
+ " \"https://tdcctv.data.one.gov.hk/AID01214.JPG\",\n",
137
+ " \"https://tdcctv.data.one.gov.hk/AID01213.JPG\",\n",
138
+ " \"https://tdcctv.data.one.gov.hk/AID01212.JPG\",\n",
139
+ " \"https://tdcctv.data.one.gov.hk/AID01211.JPG\",\n",
140
+ " \"https://tdcctv.data.one.gov.hk/AID01210.JPG\",\n",
141
+ " \"https://tdcctv.data.one.gov.hk/AID01209.JPG\"\n",
142
+ "]\n"
143
+ ],
144
+ "metadata": {
145
+ "id": "NxP8UKN4EUh3"
146
+ },
147
+ "execution_count": 14,
148
+ "outputs": []
149
+ },
150
+ {
151
+ "cell_type": "code",
152
+ "source": [
153
+ "import pytz\n",
154
+ "from urllib.parse import urlparse\n",
155
+ "import json\n",
156
+ "\n",
157
+ "hong_kong_timezone = pytz.timezone('Asia/Hong_Kong')\n",
158
+ "\n",
159
+ "while True:\n",
160
+ " current_time = datetime.datetime.now(tz=hong_kong_timezone).strftime(\"%Y%m%d%H%M%S\")\n",
161
+ " folder_name = f\"/content/{current_time}\"\n",
162
+ " print(folder_name)\n",
163
+ " os.makedirs(folder_name, exist_ok=True)\n",
164
+ "\n",
165
+ " for image_url in image_urls:\n",
166
+ " response = requests.get(image_url)\n",
167
+ " image_data = response.content\n",
168
+ " parsed_url = urlparse(image_url)\n",
169
+ " image_name = os.path.basename(parsed_url.path)\n",
170
+ " file_name = os.path.join(folder_name, image_name)\n",
171
+ " with open(file_name, \"wb\") as file:\n",
172
+ " file.write(image_data)\n",
173
+ " print(file_name)\n",
174
+ " folder_name_formatted = f\"'{folder_name}'\"\n",
175
+ "\n",
176
+ " !yolo task=segment mode=predict model='/content/Smart-Traffic/best.pt' conf=0.45 source={folder_name_formatted} save=true save_txt=true\n",
177
+ "\n",
178
+ " time.sleep(120)"
179
+ ],
180
+ "metadata": {
181
+ "colab": {
182
+ "base_uri": "https://localhost:8080/",
183
+ "height": 1000
184
+ },
185
+ "id": "iNxB2wbrEa5q",
186
+ "outputId": "7854ac0b-c652-4660-bd03-356bc0cbff0c"
187
+ },
188
+ "execution_count": 19,
189
+ "outputs": [
190
+ {
191
+ "output_type": "stream",
192
+ "name": "stdout",
193
+ "text": [
194
+ "/content/20240223165431\n",
195
+ "/content/20240223165431/AID01217.JPG\n",
196
+ "/content/20240223165431/AID01216.JPG\n",
197
+ "/content/20240223165431/AID01215.JPG\n",
198
+ "/content/20240223165431/AID01214.JPG\n",
199
+ "/content/20240223165431/AID01213.JPG\n",
200
+ "/content/20240223165431/AID01212.JPG\n",
201
+ "/content/20240223165431/AID01211.JPG\n",
202
+ "/content/20240223165431/AID01210.JPG\n",
203
+ "/content/20240223165431/AID01209.JPG\n",
204
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
205
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
206
+ "\n",
207
+ "image 1/9 /content/20240223165431/AID01209.JPG: 480x640 (no detections), 750.7ms\n",
208
+ "image 2/9 /content/20240223165431/AID01210.JPG: 480x640 2 Private-cars, 813.9ms\n",
209
+ "image 3/9 /content/20240223165431/AID01211.JPG: 480x640 1 Minibus, 3 Private-cars, 1039.4ms\n",
210
+ "image 4/9 /content/20240223165431/AID01212.JPG: 480x640 (no detections), 996.6ms\n",
211
+ "image 5/9 /content/20240223165431/AID01213.JPG: 480x640 1 Bus, 2 Private-cars, 1 Taxi, 652.4ms\n",
212
+ "image 6/9 /content/20240223165431/AID01214.JPG: 480x640 2 Private-cars, 2 Taxis, 1 Truck, 661.9ms\n",
213
+ "image 7/9 /content/20240223165431/AID01215.JPG: 480x640 2 Private-cars, 1 Taxi, 626.7ms\n",
214
+ "image 8/9 /content/20240223165431/AID01216.JPG: 480x640 1 Minibus, 5 Private-cars, 639.9ms\n",
215
+ "image 9/9 /content/20240223165431/AID01217.JPG: 480x640 3 Private-cars, 619.7ms\n",
216
+ "Speed: 3.2ms preprocess, 755.7ms inference, 13.1ms postprocess per image at shape (1, 3, 480, 640)\n",
217
+ "Results saved to \u001b[1mruns/segment/predict4\u001b[0m\n",
218
+ "7 labels saved to runs/segment/predict4/labels\n",
219
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
220
+ "/content/20240223165647\n",
221
+ "/content/20240223165647/AID01217.JPG\n",
222
+ "/content/20240223165647/AID01216.JPG\n",
223
+ "/content/20240223165647/AID01215.JPG\n",
224
+ "/content/20240223165647/AID01214.JPG\n",
225
+ "/content/20240223165647/AID01213.JPG\n",
226
+ "/content/20240223165647/AID01212.JPG\n",
227
+ "/content/20240223165647/AID01211.JPG\n",
228
+ "/content/20240223165647/AID01210.JPG\n",
229
+ "/content/20240223165647/AID01209.JPG\n",
230
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
231
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
232
+ "\n",
233
+ "image 1/9 /content/20240223165647/AID01209.JPG: 480x640 2 Private-cars, 1 Taxi, 733.2ms\n",
234
+ "image 2/9 /content/20240223165647/AID01210.JPG: 480x640 2 Private-cars, 628.8ms\n",
235
+ "image 3/9 /content/20240223165647/AID01211.JPG: 480x640 (no detections), 648.8ms\n",
236
+ "image 4/9 /content/20240223165647/AID01212.JPG: 480x640 2 Private-cars, 1 Taxi, 650.8ms\n",
237
+ "image 5/9 /content/20240223165647/AID01213.JPG: 480x640 4 Private-cars, 1 Truck, 642.1ms\n",
238
+ "image 6/9 /content/20240223165647/AID01214.JPG: 480x640 1 Bus, 3 Private-cars, 625.7ms\n",
239
+ "image 7/9 /content/20240223165647/AID01215.JPG: 480x640 4 Private-cars, 1 Truck, 839.4ms\n",
240
+ "image 8/9 /content/20240223165647/AID01216.JPG: 480x640 2 Private-cars, 995.4ms\n",
241
+ "image 9/9 /content/20240223165647/AID01217.JPG: 480x640 4 Private-cars, 970.9ms\n",
242
+ "Speed: 3.2ms preprocess, 748.4ms inference, 12.3ms postprocess per image at shape (1, 3, 480, 640)\n",
243
+ "Results saved to \u001b[1mruns/segment/predict5\u001b[0m\n",
244
+ "8 labels saved to runs/segment/predict5/labels\n",
245
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
246
+ "/content/20240223165903\n",
247
+ "/content/20240223165903/AID01217.JPG\n",
248
+ "/content/20240223165903/AID01216.JPG\n",
249
+ "/content/20240223165903/AID01215.JPG\n",
250
+ "/content/20240223165903/AID01214.JPG\n",
251
+ "/content/20240223165903/AID01213.JPG\n",
252
+ "/content/20240223165903/AID01212.JPG\n",
253
+ "/content/20240223165903/AID01211.JPG\n",
254
+ "/content/20240223165903/AID01210.JPG\n",
255
+ "/content/20240223165903/AID01209.JPG\n",
256
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
257
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
258
+ "\n",
259
+ "image 1/9 /content/20240223165903/AID01209.JPG: 480x640 2 Private-cars, 1 Taxi, 755.6ms\n",
260
+ "image 2/9 /content/20240223165903/AID01210.JPG: 480x640 1 Bus, 3 Private-cars, 649.8ms\n",
261
+ "image 3/9 /content/20240223165903/AID01211.JPG: 480x640 (no detections), 627.9ms\n",
262
+ "image 4/9 /content/20240223165903/AID01212.JPG: 480x640 2 Private-cars, 1 Taxi, 639.2ms\n",
263
+ "image 5/9 /content/20240223165903/AID01213.JPG: 480x640 4 Private-cars, 1 Truck, 662.7ms\n",
264
+ "image 6/9 /content/20240223165903/AID01214.JPG: 480x640 1 Bus, 3 Private-cars, 632.2ms\n",
265
+ "image 7/9 /content/20240223165903/AID01215.JPG: 480x640 4 Private-cars, 1 Truck, 612.9ms\n",
266
+ "image 8/9 /content/20240223165903/AID01216.JPG: 480x640 2 Private-cars, 638.8ms\n",
267
+ "image 9/9 /content/20240223165903/AID01217.JPG: 480x640 4 Private-cars, 623.8ms\n",
268
+ "Speed: 3.0ms preprocess, 649.2ms inference, 11.9ms postprocess per image at shape (1, 3, 480, 640)\n",
269
+ "Results saved to \u001b[1mruns/segment/predict6\u001b[0m\n",
270
+ "8 labels saved to runs/segment/predict6/labels\n",
271
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
272
+ "/content/20240223170118\n",
273
+ "/content/20240223170118/AID01217.JPG\n",
274
+ "/content/20240223170118/AID01216.JPG\n",
275
+ "/content/20240223170118/AID01215.JPG\n",
276
+ "/content/20240223170118/AID01214.JPG\n",
277
+ "/content/20240223170118/AID01213.JPG\n",
278
+ "/content/20240223170118/AID01212.JPG\n",
279
+ "/content/20240223170118/AID01211.JPG\n",
280
+ "/content/20240223170118/AID01210.JPG\n",
281
+ "/content/20240223170118/AID01209.JPG\n",
282
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
283
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
284
+ "\n",
285
+ "image 1/9 /content/20240223170118/AID01209.JPG: 480x640 1 Bus, 1 Taxi, 807.7ms\n",
286
+ "image 2/9 /content/20240223170118/AID01210.JPG: 480x640 3 Private-cars, 668.4ms\n",
287
+ "image 3/9 /content/20240223170118/AID01211.JPG: 480x640 (no detections), 654.9ms\n",
288
+ "image 4/9 /content/20240223170118/AID01212.JPG: 480x640 2 Private-cars, 1 Taxi, 660.6ms\n",
289
+ "image 5/9 /content/20240223170118/AID01213.JPG: 480x640 1 Bus, 2 Private-cars, 659.2ms\n",
290
+ "image 6/9 /content/20240223170118/AID01214.JPG: 480x640 1 Minibus, 6 Private-cars, 1 Taxi, 642.2ms\n",
291
+ "image 7/9 /content/20240223170118/AID01215.JPG: 480x640 3 Private-cars, 620.5ms\n",
292
+ "image 8/9 /content/20240223170118/AID01216.JPG: 480x640 4 Private-cars, 1 Taxi, 634.2ms\n",
293
+ "image 9/9 /content/20240223170118/AID01217.JPG: 480x640 2 Private-cars, 1 Taxi, 607.3ms\n",
294
+ "Speed: 3.9ms preprocess, 661.7ms inference, 14.4ms postprocess per image at shape (1, 3, 480, 640)\n",
295
+ "Results saved to \u001b[1mruns/segment/predict7\u001b[0m\n",
296
+ "8 labels saved to runs/segment/predict7/labels\n",
297
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
298
+ "/content/20240223170334\n",
299
+ "/content/20240223170334/AID01217.JPG\n",
300
+ "/content/20240223170334/AID01216.JPG\n",
301
+ "/content/20240223170334/AID01215.JPG\n",
302
+ "/content/20240223170334/AID01214.JPG\n",
303
+ "/content/20240223170334/AID01213.JPG\n",
304
+ "/content/20240223170334/AID01212.JPG\n",
305
+ "/content/20240223170334/AID01211.JPG\n",
306
+ "/content/20240223170334/AID01210.JPG\n",
307
+ "/content/20240223170334/AID01209.JPG\n",
308
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
309
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
310
+ "\n",
311
+ "image 1/9 /content/20240223170334/AID01209.JPG: 480x640 7 Private-cars, 1 Taxi, 1209.1ms\n",
312
+ "image 2/9 /content/20240223170334/AID01210.JPG: 480x640 (no detections), 643.8ms\n",
313
+ "image 3/9 /content/20240223170334/AID01211.JPG: 480x640 1 Private-car, 615.6ms\n",
314
+ "image 4/9 /content/20240223170334/AID01212.JPG: 480x640 2 Private-cars, 1 Taxi, 625.5ms\n",
315
+ "image 5/9 /content/20240223170334/AID01213.JPG: 480x640 1 Taxi, 628.4ms\n",
316
+ "image 6/9 /content/20240223170334/AID01214.JPG: 480x640 1 Private-car, 1 Taxi, 616.1ms\n",
317
+ "image 7/9 /content/20240223170334/AID01215.JPG: 480x640 2 Private-cars, 623.7ms\n",
318
+ "image 8/9 /content/20240223170334/AID01216.JPG: 480x640 1 Bus, 611.1ms\n",
319
+ "image 9/9 /content/20240223170334/AID01217.JPG: 480x640 1 Private-car, 630.6ms\n",
320
+ "Speed: 3.1ms preprocess, 689.3ms inference, 9.7ms postprocess per image at shape (1, 3, 480, 640)\n",
321
+ "Results saved to \u001b[1mruns/segment/predict8\u001b[0m\n",
322
+ "8 labels saved to runs/segment/predict8/labels\n",
323
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
324
+ "/content/20240223170552\n",
325
+ "/content/20240223170552/AID01217.JPG\n",
326
+ "/content/20240223170552/AID01216.JPG\n",
327
+ "/content/20240223170552/AID01215.JPG\n",
328
+ "/content/20240223170552/AID01214.JPG\n",
329
+ "/content/20240223170552/AID01213.JPG\n",
330
+ "/content/20240223170552/AID01212.JPG\n",
331
+ "/content/20240223170552/AID01211.JPG\n",
332
+ "/content/20240223170552/AID01210.JPG\n",
333
+ "/content/20240223170552/AID01209.JPG\n",
334
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
335
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
336
+ "\n",
337
+ "image 1/9 /content/20240223170552/AID01209.JPG: 480x640 7 Private-cars, 1 Taxi, 892.9ms\n",
338
+ "image 2/9 /content/20240223170552/AID01210.JPG: 480x640 2 Private-cars, 974.9ms\n",
339
+ "image 3/9 /content/20240223170552/AID01211.JPG: 480x640 4 Private-cars, 976.1ms\n",
340
+ "image 4/9 /content/20240223170552/AID01212.JPG: 480x640 4 Private-cars, 1 Taxi, 612.6ms\n",
341
+ "image 5/9 /content/20240223170552/AID01213.JPG: 480x640 2 Private-cars, 1 Taxi, 614.1ms\n",
342
+ "image 6/9 /content/20240223170552/AID01214.JPG: 480x640 1 Minibus, 6 Private-cars, 1 Taxi, 609.9ms\n",
343
+ "image 7/9 /content/20240223170552/AID01215.JPG: 480x640 2 Private-cars, 621.7ms\n",
344
+ "image 8/9 /content/20240223170552/AID01216.JPG: 480x640 (no detections), 624.3ms\n",
345
+ "image 9/9 /content/20240223170552/AID01217.JPG: 480x640 2 Private-cars, 605.0ms\n",
346
+ "Speed: 3.4ms preprocess, 725.7ms inference, 15.1ms postprocess per image at shape (1, 3, 480, 640)\n",
347
+ "Results saved to \u001b[1mruns/segment/predict9\u001b[0m\n",
348
+ "8 labels saved to runs/segment/predict9/labels\n",
349
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n",
350
+ "/content/20240223170810\n",
351
+ "/content/20240223170810/AID01217.JPG\n",
352
+ "/content/20240223170810/AID01216.JPG\n",
353
+ "/content/20240223170810/AID01215.JPG\n",
354
+ "/content/20240223170810/AID01214.JPG\n",
355
+ "/content/20240223170810/AID01213.JPG\n",
356
+ "/content/20240223170810/AID01212.JPG\n",
357
+ "/content/20240223170810/AID01211.JPG\n",
358
+ "/content/20240223170810/AID01210.JPG\n",
359
+ "/content/20240223170810/AID01209.JPG\n",
360
+ "Ultralytics YOLOv8.1.18 πŸš€ Python-3.10.12 torch-2.1.0+cu121 CPU (Intel Xeon 2.20GHz)\n",
361
+ "YOLOv8s-seg summary (fused): 195 layers, 11782309 parameters, 0 gradients, 42.5 GFLOPs\n",
362
+ "\n",
363
+ "image 1/9 /content/20240223170810/AID01209.JPG: 480x640 1 Minibus, 4 Private-cars, 1 Taxi, 746.6ms\n",
364
+ "image 2/9 /content/20240223170810/AID01210.JPG: 480x640 2 Private-cars, 624.7ms\n",
365
+ "image 3/9 /content/20240223170810/AID01211.JPG: 480x640 4 Private-cars, 639.6ms\n",
366
+ "image 4/9 /content/20240223170810/AID01212.JPG: 480x640 4 Private-cars, 1 Taxi, 828.6ms\n",
367
+ "image 5/9 /content/20240223170810/AID01213.JPG: 480x640 2 Private-cars, 1 Taxi, 987.7ms\n",
368
+ "image 6/9 /content/20240223170810/AID01214.JPG: 480x640 2 Private-cars, 1 Taxi, 975.8ms\n",
369
+ "image 7/9 /content/20240223170810/AID01215.JPG: 480x640 1 Minibus, 2 Private-cars, 1 Taxi, 629.0ms\n",
370
+ "image 8/9 /content/20240223170810/AID01216.JPG: 480x640 (no detections), 618.1ms\n",
371
+ "image 9/9 /content/20240223170810/AID01217.JPG: 480x640 2 Private-cars, 639.6ms\n",
372
+ "Speed: 3.1ms preprocess, 743.3ms inference, 13.4ms postprocess per image at shape (1, 3, 480, 640)\n",
373
+ "Results saved to \u001b[1mruns/segment/predict10\u001b[0m\n",
374
+ "8 labels saved to runs/segment/predict10/labels\n",
375
+ "πŸ’‘ Learn more at https://docs.ultralytics.com/modes/predict\n"
376
+ ]
377
+ },
378
+ {
379
+ "output_type": "error",
380
+ "ename": "KeyboardInterrupt",
381
+ "evalue": "",
382
+ "traceback": [
383
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
384
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
385
+ "\u001b[0;32m<ipython-input-19-eb142f4ed618>\u001b[0m in \u001b[0;36m<cell line: 7>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msystem\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"yolo task=segment mode=predict model='/content/Smart-Traffic/best.pt' conf=0.45 source={folder_name_formatted} save=true save_txt=true\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 26\u001b[0;31m \u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msleep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m120\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
386
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
387
+ ]
388
+ }
389
+ ]
390
+ }
391
+ ]
392
+ }