Spaces:
Running
Running
Dan Biagini
commited on
Commit
•
faeb28b
1
Parent(s):
fb6287f
add v2 try it to Home page
Browse files- src/Home.py +104 -0
- src/app.py +4 -2
- src/hockey_object_detection.py +0 -104
src/Home.py
CHANGED
@@ -1,7 +1,72 @@
|
|
1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
2 |
|
3 |
import logging
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
st.set_page_config(page_title='TopShelf POC', layout="wide",
|
6 |
page_icon="🥅")
|
7 |
|
@@ -24,3 +89,42 @@ st.markdown(overview)
|
|
24 |
st.subheader('Getting Started')
|
25 |
st.markdown('''We're currently in the training and testing phase of **Top Shelf** development. This is a proof of concept application that friends of **Top Shelf** can use to help in development.
|
26 |
To help us understand how our *Computer Vision* model is working you can upload hockey pictures and then the app will display what hockey objects were found. ''')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from ultralytics import YOLO
|
3 |
+
from huggingface_hub import hf_hub_download
|
4 |
+
import cv2
|
5 |
+
import numpy as np
|
6 |
|
7 |
import logging
|
8 |
|
9 |
+
@st.cache_resource
|
10 |
+
def get_model():
|
11 |
+
repo_id = "danbiagini/hockey_breeds_v2"
|
12 |
+
return hf_hub_download(repo_id=repo_id, filename="hockey_breeds-v2-101623.pt")
|
13 |
+
|
14 |
+
|
15 |
+
def run_inference(img, model, thresh=0.5):
|
16 |
+
model = YOLO(model_f)
|
17 |
+
st.session_state.results = model(img)
|
18 |
+
return draw_hockey_boxes(img, st.session_state.results, thresh)
|
19 |
+
|
20 |
+
|
21 |
+
def draw_hockey_boxes(frame, results, thresh=0.5):
|
22 |
+
colors = {0: (0, 255, 0), 1: (255, 0, 0), 2: (0, 0, 255), 3: (128, 0, 0), 4: (
|
23 |
+
0, 128, 0), 5: (0, 0, 128), 6: (0, 64, 0), 7: (64, 0, 0), 8: (0, 0, 64)}
|
24 |
+
font_scale = frame.shape[0] / 500
|
25 |
+
objects = []
|
26 |
+
|
27 |
+
for name in results:
|
28 |
+
for box in name.boxes.data.tolist():
|
29 |
+
x1, y1, x2, y2, score, class_id = box
|
30 |
+
objects.append((name.names[int(class_id)], score))
|
31 |
+
|
32 |
+
if score > thresh:
|
33 |
+
cv2.rectangle(frame, (int(x1), int(y1)),
|
34 |
+
(int(x2), int(y2)), colors[(class_id % 9)], 3)
|
35 |
+
cv2.putText(frame, f'{name.names[int(class_id)].upper()}: {score:.2f}', (int(x1), int(y1 - 10)),
|
36 |
+
cv2.FONT_HERSHEY_SIMPLEX, font_scale, colors[(class_id % 9)], 3, cv2.LINE_AA)
|
37 |
+
else:
|
38 |
+
print(
|
39 |
+
f'Found an object under confidence threshold {thresh} type: {name.names[class_id]}, score:{score}, x1, y2:{x1}, {y2}')
|
40 |
+
return objects
|
41 |
+
|
42 |
+
def reset_image():
|
43 |
+
st.session_state.img = None
|
44 |
+
|
45 |
+
def upload_img():
|
46 |
+
if st.session_state.upload_img is not None:
|
47 |
+
st.session_state.img = st.session_state.upload_img
|
48 |
+
|
49 |
+
def get_naked_image():
|
50 |
+
if st.session_state.img is not None:
|
51 |
+
img = st.session_state.img
|
52 |
+
img.seek(0)
|
53 |
+
return(cv2.imdecode(np.frombuffer(img.read(), np.uint8), 1))
|
54 |
+
return None
|
55 |
+
|
56 |
+
def use_sample_image():
|
57 |
+
st.session_state.img = open('src/images/samples/v2/net-chaos.jpg', 'rb')
|
58 |
+
|
59 |
+
# Init state
|
60 |
+
if 'results' not in st.session_state:
|
61 |
+
st.session_state.results = []
|
62 |
+
|
63 |
+
if 'thresh' not in st.session_state:
|
64 |
+
st.session_state.thresh = 0.5
|
65 |
+
|
66 |
+
if 'img' not in st.session_state:
|
67 |
+
st.session_state.img = None
|
68 |
+
|
69 |
+
|
70 |
st.set_page_config(page_title='TopShelf POC', layout="wide",
|
71 |
page_icon="🥅")
|
72 |
|
|
|
89 |
st.subheader('Getting Started')
|
90 |
st.markdown('''We're currently in the training and testing phase of **Top Shelf** development. This is a proof of concept application that friends of **Top Shelf** can use to help in development.
|
91 |
To help us understand how our *Computer Vision* model is working you can upload hockey pictures and then the app will display what hockey objects were found. ''')
|
92 |
+
|
93 |
+
st.write("Upload an image file to try detecting hockey objects in your own hockey image, or use a sample image below.")
|
94 |
+
|
95 |
+
|
96 |
+
if st.session_state.img is None:
|
97 |
+
st.file_uploader("Upload an image and Hockey Breeds v2 will find the hockey objects in the image",
|
98 |
+
type=["jpg", "jpeg", "png"], key='upload_img', on_change=upload_img)
|
99 |
+
|
100 |
+
with st.expander("Sample Images"):
|
101 |
+
st.image('src/images/samples/v2/net-chaos.jpg')
|
102 |
+
st.button("Use Sample", on_click=use_sample_image)
|
103 |
+
|
104 |
+
img = get_naked_image()
|
105 |
+
if img is not None:
|
106 |
+
|
107 |
+
thresh = st.slider('Set the object confidence threshold', key='thresh',
|
108 |
+
min_value=0.0, max_value=1.0, value=0.5, step=0.05)
|
109 |
+
|
110 |
+
with st.status("Detecting hockey objects..."):
|
111 |
+
st.write("Loading model...")
|
112 |
+
model_f = get_model()
|
113 |
+
|
114 |
+
st.write("Running inference on image...")
|
115 |
+
objects = run_inference(img, model_f, thresh)
|
116 |
+
|
117 |
+
st.dataframe(objects, column_config={
|
118 |
+
"0": "Object",
|
119 |
+
"1": "Confidence"
|
120 |
+
})
|
121 |
+
|
122 |
+
# check if the results list is empty
|
123 |
+
if len(st.session_state.results) == 0:
|
124 |
+
st.write('**No hockey objects found in image!**')
|
125 |
+
st.image(img, caption='Uploaded Image had no hockey objects')
|
126 |
+
else:
|
127 |
+
st.image(img, caption='Image with hockey object bounding boxes')
|
128 |
+
|
129 |
+
st.button("Reset Image", on_click=reset_image)
|
130 |
+
|
src/app.py
CHANGED
@@ -5,8 +5,10 @@ import logging
|
|
5 |
app = st.navigation(
|
6 |
{"App": [st.Page("Home.py", title="Home", icon=":material/home:"),
|
7 |
st.Page("About.py", icon="🥅")],
|
8 |
-
"Models": [
|
9 |
-
st.Page("hockey_object_detection.py", title="v2 - Hockey Object Detection", icon=":material/filter_b_and_w:")
|
|
|
|
|
10 |
}
|
11 |
)
|
12 |
app.run()
|
|
|
5 |
app = st.navigation(
|
6 |
{"App": [st.Page("Home.py", title="Home", icon=":material/home:"),
|
7 |
st.Page("About.py", icon="🥅")],
|
8 |
+
"Models": [
|
9 |
+
st.Page("hockey_object_detection.py", title="v2 - Hockey Object Detection", icon=":material/filter_b_and_w:"),
|
10 |
+
st.Page("Hockey_Breeds.py", title="v1 - Hockey Breeds", icon=":material/gradient:")
|
11 |
+
]
|
12 |
}
|
13 |
)
|
14 |
app.run()
|
src/hockey_object_detection.py
CHANGED
@@ -1,69 +1,4 @@
|
|
1 |
import streamlit as st
|
2 |
-
from ultralytics import YOLO
|
3 |
-
from huggingface_hub import hf_hub_download
|
4 |
-
import cv2
|
5 |
-
import numpy as np
|
6 |
-
|
7 |
-
@st.cache_resource
|
8 |
-
def get_model():
|
9 |
-
repo_id = "danbiagini/hockey_breeds_v2"
|
10 |
-
return hf_hub_download(repo_id=repo_id, filename="hockey_breeds-v2-101623.pt")
|
11 |
-
|
12 |
-
|
13 |
-
def run_inference(img, model, thresh=0.5):
|
14 |
-
model = YOLO(model_f)
|
15 |
-
st.session_state.results = model(img)
|
16 |
-
return draw_hockey_boxes(img, st.session_state.results, thresh)
|
17 |
-
|
18 |
-
|
19 |
-
def draw_hockey_boxes(frame, results, thresh=0.5):
|
20 |
-
colors = {0: (0, 255, 0), 1: (255, 0, 0), 2: (0, 0, 255), 3: (128, 0, 0), 4: (
|
21 |
-
0, 128, 0), 5: (0, 0, 128), 6: (0, 64, 0), 7: (64, 0, 0), 8: (0, 0, 64)}
|
22 |
-
font_scale = frame.shape[0] / 500
|
23 |
-
objects = []
|
24 |
-
|
25 |
-
for name in results:
|
26 |
-
for box in name.boxes.data.tolist():
|
27 |
-
x1, y1, x2, y2, score, class_id = box
|
28 |
-
objects.append((name.names[int(class_id)], score))
|
29 |
-
|
30 |
-
if score > thresh:
|
31 |
-
cv2.rectangle(frame, (int(x1), int(y1)),
|
32 |
-
(int(x2), int(y2)), colors[(class_id % 9)], 3)
|
33 |
-
cv2.putText(frame, f'{name.names[int(class_id)].upper()}: {score:.2f}', (int(x1), int(y1 - 10)),
|
34 |
-
cv2.FONT_HERSHEY_SIMPLEX, font_scale, colors[(class_id % 9)], 3, cv2.LINE_AA)
|
35 |
-
else:
|
36 |
-
print(
|
37 |
-
f'Found an object under confidence threshold {thresh} type: {name.names[class_id]}, score:{score}, x1, y2:{x1}, {y2}')
|
38 |
-
return objects
|
39 |
-
|
40 |
-
def reset_image():
|
41 |
-
st.session_state.img = None
|
42 |
-
|
43 |
-
def upload_img():
|
44 |
-
if st.session_state.upload_img is not None:
|
45 |
-
st.session_state.img = st.session_state.upload_img
|
46 |
-
|
47 |
-
def get_naked_image():
|
48 |
-
if st.session_state.img is not None:
|
49 |
-
img = st.session_state.img
|
50 |
-
img.seek(0)
|
51 |
-
return(cv2.imdecode(np.frombuffer(img.read(), np.uint8), 1))
|
52 |
-
return None
|
53 |
-
|
54 |
-
def use_sample_image():
|
55 |
-
st.session_state.img = open('src/images/samples/v2/net-chaos.jpg', 'rb')
|
56 |
-
|
57 |
-
# Init state
|
58 |
-
if 'results' not in st.session_state:
|
59 |
-
st.session_state.results = []
|
60 |
-
|
61 |
-
if 'thresh' not in st.session_state:
|
62 |
-
st.session_state.thresh = 0.5
|
63 |
-
|
64 |
-
if 'img' not in st.session_state:
|
65 |
-
st.session_state.img = None
|
66 |
-
|
67 |
|
68 |
# Top down page rendering
|
69 |
st.set_page_config(page_title='Hockey Breeds v2 - Objects', layout="wide",
|
@@ -103,42 +38,3 @@ st.markdown('''Validation of the model\'s performance was done using 15 images n
|
|
103 |
st.image("src/images/artifacts/confusion_matrix_v2.png",
|
104 |
caption="Confusion Matrix for Hockey Breeds v2", )
|
105 |
|
106 |
-
st.subheader("Try it out!")
|
107 |
-
st.write("Upload an image file to try detecting hockey objects in your own hockey image, or use a sample image below.")
|
108 |
-
|
109 |
-
|
110 |
-
if st.session_state.img is None:
|
111 |
-
st.file_uploader("Upload an image and Hockey Breeds v2 will find the hockey objects in the image",
|
112 |
-
type=["jpg", "jpeg", "png"], key='upload_img', on_change=upload_img)
|
113 |
-
|
114 |
-
with st.expander("Sample Images"):
|
115 |
-
st.image('src/images/samples/v2/net-chaos.jpg')
|
116 |
-
st.button("Use Sample", on_click=use_sample_image)
|
117 |
-
|
118 |
-
img = get_naked_image()
|
119 |
-
if img is not None:
|
120 |
-
|
121 |
-
thresh = st.slider('Set the object confidence threshold', key='thresh',
|
122 |
-
min_value=0.0, max_value=1.0, value=0.5, step=0.05)
|
123 |
-
|
124 |
-
with st.status("Detecting hockey objects..."):
|
125 |
-
st.write("Loading model...")
|
126 |
-
model_f = get_model()
|
127 |
-
|
128 |
-
st.write("Running inference on image...")
|
129 |
-
objects = run_inference(img, model_f, thresh)
|
130 |
-
|
131 |
-
st.dataframe(objects, column_config={
|
132 |
-
"0": "Object",
|
133 |
-
"1": "Confidence"
|
134 |
-
})
|
135 |
-
|
136 |
-
# check if the results list is empty
|
137 |
-
if len(st.session_state.results) == 0:
|
138 |
-
st.write('**No hockey objects found in image!**')
|
139 |
-
st.image(img, caption='Uploaded Image had no hockey objects')
|
140 |
-
else:
|
141 |
-
st.image(img, caption='Image with hockey object bounding boxes')
|
142 |
-
|
143 |
-
st.button("Reset Image", on_click=reset_image)
|
144 |
-
|
|
|
1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
# Top down page rendering
|
4 |
st.set_page_config(page_title='Hockey Breeds v2 - Objects', layout="wide",
|
|
|
38 |
st.image("src/images/artifacts/confusion_matrix_v2.png",
|
39 |
caption="Confusion Matrix for Hockey Breeds v2", )
|
40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|