Updated Code
Browse files
app.py
CHANGED
@@ -6,42 +6,11 @@ import supervision as sv
|
|
6 |
from inference import get_model
|
7 |
|
8 |
MARKDOWN = """
|
9 |
-
<h1 style='text-align: center'>
|
10 |
-
Welcome to
|
11 |
-
|
12 |
-
-
|
13 |
-
|
14 |
-
<a href="https://github.com/ultralytics/ultralytics" style="margin-right: 10px;">
|
15 |
-
<img src="https://badges.aleen42.com/src/github.svg">
|
16 |
-
</a>
|
17 |
-
<a href="https://colab.research.google.com/github/roboflow-ai/notebooks/blob/main/notebooks/train-yolov8-object-detection-on-custom-dataset.ipynb" style="margin-right: 10px;">
|
18 |
-
<img src="https://colab.research.google.com/assets/colab-badge.svg">
|
19 |
-
</a>
|
20 |
-
</div>
|
21 |
-
- **YOLOv9**
|
22 |
-
<div style="display: flex; align-items: center;">
|
23 |
-
<a href="https://github.com/WongKinYiu/yolov9" style="margin-right: 10px;">
|
24 |
-
<img src="https://badges.aleen42.com/src/github.svg">
|
25 |
-
</a>
|
26 |
-
<a href="https://arxiv.org/abs/2402.13616" style="margin-right: 10px;">
|
27 |
-
<img src="https://img.shields.io/badge/arXiv-2402.13616-b31b1b.svg">
|
28 |
-
</a>
|
29 |
-
<a href="https://colab.research.google.com/github/roboflow-ai/notebooks/blob/main/notebooks/train-yolov9-object-detection-on-custom-dataset.ipynb" style="margin-right: 10px;">
|
30 |
-
<img src="https://colab.research.google.com/assets/colab-badge.svg">
|
31 |
-
</a>
|
32 |
-
</div>
|
33 |
-
- **YOLOv10**
|
34 |
-
<div style="display: flex; align-items: center;">
|
35 |
-
<a href="https://github.com/THU-MIG/yolov10" style="margin-right: 10px;">
|
36 |
-
<img src="https://badges.aleen42.com/src/github.svg">
|
37 |
-
</a>
|
38 |
-
<a href="https://arxiv.org/abs/2405.14458" style="margin-right: 10px;">
|
39 |
-
<img src="https://img.shields.io/badge/arXiv-2405.14458-b31b1b.svg">
|
40 |
-
</a>
|
41 |
-
<a href="https://colab.research.google.com/github/roboflow-ai/notebooks/blob/main/notebooks/train-yolov10-object-detection-on-custom-dataset.ipynb" style="margin-right: 10px;">
|
42 |
-
<img src="https://colab.research.google.com/assets/colab-badge.svg">
|
43 |
-
</a>
|
44 |
-
</div>
|
45 |
Powered by Roboflow [Inference](https://github.com/roboflow/inference) and
|
46 |
[Supervision](https://github.com/roboflow/supervision). 🔥
|
47 |
"""
|
@@ -52,9 +21,9 @@ IMAGE_EXAMPLES = [
|
|
52 |
['https://media.roboflow.com/supervision/image-examples/basketball-1.png', 0.3, 0.3, 0.1],
|
53 |
]
|
54 |
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
|
59 |
LABEL_ANNOTATORS = sv.LabelAnnotator(text_color=sv.Color.black())
|
60 |
BOUNDING_BOX_ANNOTATORS = sv.BoundingBoxAnnotator()
|
@@ -102,26 +71,30 @@ def process_image(
|
|
102 |
yolo_v10_confidence_threshold: float,
|
103 |
iou_threshold: float
|
104 |
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
|
|
|
|
|
|
|
|
111 |
|
112 |
return (
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
)
|
117 |
|
118 |
|
119 |
-
|
120 |
minimum=0,
|
121 |
maximum=1.0,
|
122 |
value=0.3,
|
123 |
step=0.01,
|
124 |
-
label="
|
125 |
info=(
|
126 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
127 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
@@ -129,12 +102,12 @@ yolo_v8_confidence_threshold_component = gr.Slider(
|
|
129 |
"positives, preventing the model from identifying objects it shouldn't."
|
130 |
))
|
131 |
|
132 |
-
|
133 |
minimum=0,
|
134 |
maximum=1.0,
|
135 |
value=0.3,
|
136 |
step=0.01,
|
137 |
-
label="
|
138 |
info=(
|
139 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
140 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
@@ -142,12 +115,12 @@ yolo_v9_confidence_threshold_component = gr.Slider(
|
|
142 |
"positives, preventing the model from identifying objects it shouldn't."
|
143 |
))
|
144 |
|
145 |
-
|
146 |
minimum=0,
|
147 |
maximum=1.0,
|
148 |
value=0.3,
|
149 |
step=0.01,
|
150 |
-
label="
|
151 |
info=(
|
152 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
153 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
@@ -174,27 +147,27 @@ with gr.Blocks() as demo:
|
|
174 |
gr.Markdown(MARKDOWN)
|
175 |
with gr.Accordion("Configuration", open=False):
|
176 |
with gr.Row():
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
iou_threshold_component.render()
|
181 |
with gr.Row():
|
182 |
input_image_component = gr.Image(
|
183 |
type='pil',
|
184 |
label='Input'
|
185 |
)
|
186 |
-
|
187 |
type='pil',
|
188 |
-
label='
|
189 |
)
|
190 |
with gr.Row():
|
191 |
-
|
192 |
type='pil',
|
193 |
-
label='
|
194 |
)
|
195 |
-
|
196 |
type='pil',
|
197 |
-
label='
|
198 |
)
|
199 |
submit_button_component = gr.Button(
|
200 |
value='Submit',
|
@@ -206,15 +179,15 @@ with gr.Blocks() as demo:
|
|
206 |
examples=IMAGE_EXAMPLES,
|
207 |
inputs=[
|
208 |
input_image_component,
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
iou_threshold_component
|
213 |
],
|
214 |
outputs=[
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
]
|
219 |
)
|
220 |
|
@@ -222,15 +195,15 @@ with gr.Blocks() as demo:
|
|
222 |
fn=process_image,
|
223 |
inputs=[
|
224 |
input_image_component,
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
iou_threshold_component
|
229 |
],
|
230 |
outputs=[
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
]
|
235 |
)
|
236 |
|
|
|
6 |
from inference import get_model
|
7 |
|
8 |
MARKDOWN = """
|
9 |
+
<h1 style='text-align: center'>Detect Something 💫</h1>
|
10 |
+
Welcome to Segment Something! Your on the go demo for instance segmentation. 🚀
|
11 |
+
|
12 |
+
<h2 style='text-align: center'>Matthias Bartolo</h2>
|
13 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
Powered by Roboflow [Inference](https://github.com/roboflow/inference) and
|
15 |
[Supervision](https://github.com/roboflow/supervision). 🔥
|
16 |
"""
|
|
|
21 |
['https://media.roboflow.com/supervision/image-examples/basketball-1.png', 0.3, 0.3, 0.1],
|
22 |
]
|
23 |
|
24 |
+
YOLO_V8N_MODEL = get_model(model_id="yolov8n-640")
|
25 |
+
YOLO_V8S_MODEL = get_model(model_id="yolov8s-640")
|
26 |
+
YOLO_V8M_MODEL = get_model(model_id="yolov8m-640")
|
27 |
|
28 |
LABEL_ANNOTATORS = sv.LabelAnnotator(text_color=sv.Color.black())
|
29 |
BOUNDING_BOX_ANNOTATORS = sv.BoundingBoxAnnotator()
|
|
|
71 |
yolo_v10_confidence_threshold: float,
|
72 |
iou_threshold: float
|
73 |
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
74 |
+
# Validate iou_threshold before using it
|
75 |
+
if iou_threshold is None or not isinstance(iou_threshold, float):
|
76 |
+
iou_threshold = 0.3 # Default value, adjust as necessary
|
77 |
+
|
78 |
+
yolo_v8n_annotated_image = detect_and_annotate(
|
79 |
+
YOLO_V8N_MODEL, input_image, yolo_v8_confidence_threshold, iou_threshold)
|
80 |
+
yolo_v8s_annotated_image = detect_and_annotate(
|
81 |
+
YOLO_V8S_MODEL, input_image, yolo_v9_confidence_threshold, iou_threshold)
|
82 |
+
yolo_8m_annotated_image = detect_and_annotate(
|
83 |
+
YOLO_V8M_MODEL, input_image, yolo_v10_confidence_threshold, iou_threshold)
|
84 |
|
85 |
return (
|
86 |
+
yolo_v8n_annotated_image,
|
87 |
+
yolo_v8s_annotated_image,
|
88 |
+
yolo_8m_annotated_image
|
89 |
)
|
90 |
|
91 |
|
92 |
+
yolo_v8N_confidence_threshold_component = gr.Slider(
|
93 |
minimum=0,
|
94 |
maximum=1.0,
|
95 |
value=0.3,
|
96 |
step=0.01,
|
97 |
+
label="YOLOv8N Confidence Threshold",
|
98 |
info=(
|
99 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
100 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
|
|
102 |
"positives, preventing the model from identifying objects it shouldn't."
|
103 |
))
|
104 |
|
105 |
+
yolo_v8S_confidence_threshold_component = gr.Slider(
|
106 |
minimum=0,
|
107 |
maximum=1.0,
|
108 |
value=0.3,
|
109 |
step=0.01,
|
110 |
+
label="YOLOv8S Confidence Threshold",
|
111 |
info=(
|
112 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
113 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
|
|
115 |
"positives, preventing the model from identifying objects it shouldn't."
|
116 |
))
|
117 |
|
118 |
+
yolo_v8M_confidence_threshold_component = gr.Slider(
|
119 |
minimum=0,
|
120 |
maximum=1.0,
|
121 |
value=0.3,
|
122 |
step=0.01,
|
123 |
+
label="YOLOv8M Confidence Threshold",
|
124 |
info=(
|
125 |
"The confidence threshold for the YOLO model. Lower the threshold to "
|
126 |
"reduce false negatives, enhancing the model's sensitivity to detect "
|
|
|
147 |
gr.Markdown(MARKDOWN)
|
148 |
with gr.Accordion("Configuration", open=False):
|
149 |
with gr.Row():
|
150 |
+
yolo_v8N_confidence_threshold_component.render()
|
151 |
+
yolo_v8S_confidence_threshold_component.render()
|
152 |
+
yolo_v8M_confidence_threshold_component.render()
|
153 |
iou_threshold_component.render()
|
154 |
with gr.Row():
|
155 |
input_image_component = gr.Image(
|
156 |
type='pil',
|
157 |
label='Input'
|
158 |
)
|
159 |
+
yolo_v8n_output_image_component = gr.Image(
|
160 |
type='pil',
|
161 |
+
label='YOLOv8N'
|
162 |
)
|
163 |
with gr.Row():
|
164 |
+
yolo_v8s_output_image_component = gr.Image(
|
165 |
type='pil',
|
166 |
+
label='YOLOv8S'
|
167 |
)
|
168 |
+
yolo_v8m_output_image_component = gr.Image(
|
169 |
type='pil',
|
170 |
+
label='YOLOv8M'
|
171 |
)
|
172 |
submit_button_component = gr.Button(
|
173 |
value='Submit',
|
|
|
179 |
examples=IMAGE_EXAMPLES,
|
180 |
inputs=[
|
181 |
input_image_component,
|
182 |
+
yolo_v8N_confidence_threshold_component,
|
183 |
+
yolo_v8S_confidence_threshold_component,
|
184 |
+
yolo_v8M_confidence_threshold_component,
|
185 |
iou_threshold_component
|
186 |
],
|
187 |
outputs=[
|
188 |
+
yolo_v8n_output_image_component,
|
189 |
+
yolo_v8s_output_image_component,
|
190 |
+
yolo_v8m_output_image_component
|
191 |
]
|
192 |
)
|
193 |
|
|
|
195 |
fn=process_image,
|
196 |
inputs=[
|
197 |
input_image_component,
|
198 |
+
yolo_v8N_confidence_threshold_component,
|
199 |
+
yolo_v8S_confidence_threshold_component,
|
200 |
+
yolo_v8M_confidence_threshold_component,
|
201 |
iou_threshold_component
|
202 |
],
|
203 |
outputs=[
|
204 |
+
yolo_v8n_output_image_component,
|
205 |
+
yolo_v8s_output_image_component,
|
206 |
+
yolo_v8m_output_image_component
|
207 |
]
|
208 |
)
|
209 |
|