Spaces:
Sleeping
Sleeping
Commit
·
1cc8cac
1
Parent(s):
920afea
Fix: model dependency + default sample
Browse files
app.py
CHANGED
@@ -21,6 +21,7 @@ def process_image(image, yolo_versions=["yolov5"]):
|
|
21 |
result_images.append((Image.fromarray(image), f"{yolo_version} not yet implemented."))
|
22 |
return result_images
|
23 |
|
|
|
24 |
sample_images = {
|
25 |
"Sample 1": os.path.join(os.getcwd(), "data/xai/sample1.jpeg"),
|
26 |
"Sample 2": os.path.join(os.getcwd(), "data/xai/sample2.jpg"),
|
@@ -31,17 +32,20 @@ def load_sample_image(sample_name):
|
|
31 |
return Image.open(image_path)
|
32 |
return None
|
33 |
|
|
|
34 |
default_sample_image = load_sample_image("Sample 1")
|
|
|
|
|
35 |
with gr.Blocks() as interface:
|
36 |
gr.Markdown("# XAI: Upload an image to visualize object detection of your models..")
|
37 |
gr.Markdown("Upload an image or select a sample image to visualize object detection.")
|
38 |
|
39 |
with gr.Row():
|
40 |
-
uploaded_image = gr.Image(type="pil", label="Upload an Image"
|
41 |
sample_selection = gr.Dropdown(
|
42 |
choices=list(sample_images.keys()),
|
43 |
label="Select a Sample Image",
|
44 |
-
type="
|
45 |
)
|
46 |
sample_display = gr.Image(label="Sample Image Preview", value=default_sample_image)
|
47 |
sample_selection.change(fn=load_sample_image, inputs=sample_selection, outputs=sample_display)
|
@@ -53,11 +57,10 @@ with gr.Blocks() as interface:
|
|
53 |
)
|
54 |
|
55 |
result_gallery = gr.Gallery(label="Results", elem_id="gallery", rows=2, height=500)
|
56 |
-
|
57 |
-
print(sample_display)
|
58 |
gr.Button("Run").click(
|
59 |
fn=process_image,
|
60 |
-
inputs=[uploaded_image, selected_models],
|
61 |
outputs=result_gallery,
|
62 |
)
|
63 |
|
|
|
21 |
result_images.append((Image.fromarray(image), f"{yolo_version} not yet implemented."))
|
22 |
return result_images
|
23 |
|
24 |
+
|
25 |
sample_images = {
|
26 |
"Sample 1": os.path.join(os.getcwd(), "data/xai/sample1.jpeg"),
|
27 |
"Sample 2": os.path.join(os.getcwd(), "data/xai/sample2.jpg"),
|
|
|
32 |
return Image.open(image_path)
|
33 |
return None
|
34 |
|
35 |
+
# Default sample image
|
36 |
default_sample_image = load_sample_image("Sample 1")
|
37 |
+
|
38 |
+
# Gradio interface
|
39 |
with gr.Blocks() as interface:
|
40 |
gr.Markdown("# XAI: Upload an image to visualize object detection of your models..")
|
41 |
gr.Markdown("Upload an image or select a sample image to visualize object detection.")
|
42 |
|
43 |
with gr.Row():
|
44 |
+
uploaded_image = gr.Image(type="pil", label="Upload an Image")
|
45 |
sample_selection = gr.Dropdown(
|
46 |
choices=list(sample_images.keys()),
|
47 |
label="Select a Sample Image",
|
48 |
+
type="value",
|
49 |
)
|
50 |
sample_display = gr.Image(label="Sample Image Preview", value=default_sample_image)
|
51 |
sample_selection.change(fn=load_sample_image, inputs=sample_selection, outputs=sample_display)
|
|
|
57 |
)
|
58 |
|
59 |
result_gallery = gr.Gallery(label="Results", elem_id="gallery", rows=2, height=500)
|
60 |
+
|
|
|
61 |
gr.Button("Run").click(
|
62 |
fn=process_image,
|
63 |
+
inputs=[uploaded_image, selected_models, sample_display],
|
64 |
outputs=result_gallery,
|
65 |
)
|
66 |
|