Spaces:
Running
Running
update: html layout
Browse files
app.py
CHANGED
@@ -7,35 +7,6 @@ yolov8_result = os.path.join(os.getcwd(), "data/xai/yolov8.png")
|
|
7 |
yolov5_dff = os.path.join(os.getcwd(), "data/xai/yolov5_dff.png")
|
8 |
yolov8_dff = os.path.join(os.getcwd(), "data/xai/yolov8_dff.png")
|
9 |
|
10 |
-
architecture_description_yolov5 = """
|
11 |
-
### YOLOv5 Architecture
|
12 |
-
|
13 |
-
π₯οΈ **Backbone:** Uses **CSPDarknet53** for feature extraction with **ResNet-like** residual connections.
|
14 |
-
|
15 |
-
π **Neck:** Combines **PANet** and **FPN** to aggregate features at multiple scales.
|
16 |
-
|
17 |
-
π§ **Head:** Predicts bounding boxes (**x, y, w, h**), class probabilities, and objectness scores.
|
18 |
-
|
19 |
-
π **Loss Functions:** **CIoU** for bounding box regression. and **Cross-entropy** for classification.
|
20 |
-
|
21 |
-
πΊοΈ **Grid-based Detection:** Divides input into grid cells predicting multiple bounding boxes.
|
22 |
-
|
23 |
-
π« **Non-Maximum Suppression (NMS):** Filters overlapping boxes with high confidence.
|
24 |
-
|
25 |
-
"""
|
26 |
-
|
27 |
-
architecture_description_yolov8s = """
|
28 |
-
### YOLOv8s Architecture
|
29 |
-
|
30 |
-
- **Backbone**: Uses **CSPDarknet** with efficient feature extraction layers.
|
31 |
-
- **Neck**: Incorporates **FPN** and **PANet** for multi-scale feature aggregation.
|
32 |
-
- **Head**: Predicts bounding boxes, objectness scores, and class probabilities.
|
33 |
-
- **Loss Functions**: **CIoU** for bounding box regression, **cross-entropy** for classification.
|
34 |
-
- **Grid-based Prediction**: Outputs predictions per grid cell using multiple anchor boxes.
|
35 |
-
- **Post-Processing**: **NMS** removes redundant boxes with high overlap.
|
36 |
-
|
37 |
-
"""
|
38 |
-
|
39 |
description_yolov5 = """
|
40 |
**Feature Focus for Dogs:**
|
41 |
- **Face & Snout**: Eyes, nose, and mouth for recognition.
|
@@ -273,8 +244,8 @@ with gr.Blocks(css=custom_css) as demo:
|
|
273 |
|
274 |
with gr.Row():
|
275 |
with gr.Column():
|
276 |
-
|
277 |
-
|
278 |
<div style="display: flex; gap: 10px;">
|
279 |
<a href="https://github.com/ultralytics/yolov5/actions" target="_blank">
|
280 |
<img src="https://img.shields.io/badge/YOLOv5%20CI-passing-brightgreen" alt="YOLOv5 CI">
|
@@ -291,12 +262,9 @@ with gr.Blocks(css=custom_css) as demo:
|
|
291 |
<a href="https://colab.research.google.com/" target="_blank">
|
292 |
<img src="https://img.shields.io/badge/Open%20in%20Colab-orange" alt="Open in Colab">
|
293 |
</a>
|
294 |
-
<a href="https://www.kaggle.com/" target="_blank">
|
295 |
-
<img src="https://img.shields.io/badge/Open%20in%20Kaggle-blue" alt="Open in Kaggle">
|
296 |
-
</a>
|
297 |
</div>
|
298 |
"""
|
299 |
-
gr.HTML(
|
300 |
# gr.HTML(get_netron_html(yolov5_url))
|
301 |
gr.Image(yolov5_result, label="Detections & Interpretability Map")
|
302 |
gr.Markdown(description_yolov5)
|
@@ -304,7 +272,30 @@ with gr.Blocks(css=custom_css) as demo:
|
|
304 |
|
305 |
|
306 |
with gr.Column():
|
307 |
-
gr.Markdown(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
308 |
# gr.HTML(get_netron_html(yolov8_url))
|
309 |
gr.Image(yolov8_result, label="Detections & Interpretability Map")
|
310 |
gr.Markdown(description_yolov8)
|
|
|
7 |
yolov5_dff = os.path.join(os.getcwd(), "data/xai/yolov5_dff.png")
|
8 |
yolov8_dff = os.path.join(os.getcwd(), "data/xai/yolov8_dff.png")
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
description_yolov5 = """
|
11 |
**Feature Focus for Dogs:**
|
12 |
- **Face & Snout**: Eyes, nose, and mouth for recognition.
|
|
|
244 |
|
245 |
with gr.Row():
|
246 |
with gr.Column():
|
247 |
+
gr.Markdown(""" ##Yolov5""")
|
248 |
+
html_content1 = """
|
249 |
<div style="display: flex; gap: 10px;">
|
250 |
<a href="https://github.com/ultralytics/yolov5/actions" target="_blank">
|
251 |
<img src="https://img.shields.io/badge/YOLOv5%20CI-passing-brightgreen" alt="YOLOv5 CI">
|
|
|
262 |
<a href="https://colab.research.google.com/" target="_blank">
|
263 |
<img src="https://img.shields.io/badge/Open%20in%20Colab-orange" alt="Open in Colab">
|
264 |
</a>
|
|
|
|
|
|
|
265 |
</div>
|
266 |
"""
|
267 |
+
gr.HTML(html_content1)
|
268 |
# gr.HTML(get_netron_html(yolov5_url))
|
269 |
gr.Image(yolov5_result, label="Detections & Interpretability Map")
|
270 |
gr.Markdown(description_yolov5)
|
|
|
272 |
|
273 |
|
274 |
with gr.Column():
|
275 |
+
gr.Markdown(""" ##Yolov8s""")
|
276 |
+
html_content2 = """
|
277 |
+
<div style="display: flex; gap: 10px;">
|
278 |
+
<a href="https://github.com/ultralytics/ultralytics/actions" target="_blank">
|
279 |
+
<img src="https://img.shields.io/badge/YOLOv8%20CI-passing-brightgreen" alt="YOLOv8 CI">
|
280 |
+
</a>
|
281 |
+
<a href="https://doi.org/10.5281/zenodo.7347926" target="_blank">
|
282 |
+
<img src="https://img.shields.io/badge/DOI-10.5281%2Fzenodo.7347926-blue" alt="DOI">
|
283 |
+
</a>
|
284 |
+
<a href="https://hub.docker.com/r/ultralytics/ultralytics" target="_blank">
|
285 |
+
<img src="https://img.shields.io/badge/docker%20pulls-500k-blue" alt="Docker Pulls">
|
286 |
+
</a>
|
287 |
+
<a href="https://gradient.paperspace.com" target="_blank">
|
288 |
+
<img src="https://img.shields.io/badge/Run%20on%20Gradient-red" alt="Run on Gradient">
|
289 |
+
</a>
|
290 |
+
<a href="https://colab.research.google.com/" target="_blank">
|
291 |
+
<img src="https://img.shields.io/badge/Open%20in%20Colab-orange" alt="Open in Colab">
|
292 |
+
</a>
|
293 |
+
<a href="https://www.kaggle.com/" target="_blank">
|
294 |
+
<img src="https://img.shields.io/badge/Open%20in%20Kaggle-blue" alt="Open in Kaggle">
|
295 |
+
</a>
|
296 |
+
</div>
|
297 |
+
"""
|
298 |
+
gr.HTML(html_content2)
|
299 |
# gr.HTML(get_netron_html(yolov8_url))
|
300 |
gr.Image(yolov8_result, label="Detections & Interpretability Map")
|
301 |
gr.Markdown(description_yolov8)
|