BhumikaMak commited on
Commit
d3127bb
·
1 Parent(s): 63152db

Fix: interface update

Browse files
Files changed (1) hide show
  1. app.py +31 -13
app.py CHANGED
@@ -6,7 +6,7 @@ import torchvision.transforms as transforms
6
  import gradio as gr
7
  from yolov5 import xai_yolov5
8
  from yolov8 import xai_yolov8s
9
-
10
  def process_image(image, yolo_versions=["yolov5"]):
11
  image = np.array(image)
12
  image = cv2.resize(image, (640, 640))
@@ -20,7 +20,7 @@ def process_image(image, yolo_versions=["yolov5"]):
20
  result_images.append((Image.fromarray(image), f"{yolo_version} not yet implemented."))
21
  return result_images
22
 
23
-
24
  sample_images = {
25
  "Sample 1": os.path.join(os.getcwd(), "data/xai/sample1.jpeg"),
26
  "Sample 2": os.path.join(os.getcwd(), "data/xai/sample2.jpg"),
@@ -59,23 +59,41 @@ with gr.Blocks() as interface:
59
  outputs=result_gallery,
60
  )
61
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
 
 
63
  with gr.Blocks() as interface:
64
  gr.Markdown("# XAI: Visualize Object Detection of Your Models")
65
  gr.Markdown("Select a sample image to visualize object detection.")
66
-
67
- with gr.Row():
68
- sample_selection = gr.Radio(
69
- choices=list(sample_images.keys()),
70
- label="Select a Sample Image",
71
- type="value",
72
- )
73
- sample_display = gr.Image(label="Sample Image Preview", value=default_sample_image)
74
- sample_selection.change(fn=load_sample_image, inputs=sample_selection, outputs=sample_display)
75
-
76
  selected_models = gr.CheckboxGroup(
77
  choices=["yolov5", "yolov8s"],
78
- value=["yolov5"],
79
  label="Select Model(s)",
80
  )
81
  result_gallery = gr.Gallery(label="Results", elem_id="gallery", rows=2, height=500)
 
6
  import gradio as gr
7
  from yolov5 import xai_yolov5
8
  from yolov8 import xai_yolov8s
9
+ """
10
  def process_image(image, yolo_versions=["yolov5"]):
11
  image = np.array(image)
12
  image = cv2.resize(image, (640, 640))
 
20
  result_images.append((Image.fromarray(image), f"{yolo_version} not yet implemented."))
21
  return result_images
22
 
23
+ """
24
  sample_images = {
25
  "Sample 1": os.path.join(os.getcwd(), "data/xai/sample1.jpeg"),
26
  "Sample 2": os.path.join(os.getcwd(), "data/xai/sample2.jpg"),
 
59
  outputs=result_gallery,
60
  )
61
  """
62
+ def load_sample_image(choice):
63
+ if choice in sample_images:
64
+ image_path = sample_images[choice]
65
+ return cv2.imread(image_path)[:, :, ::-1] # Convert BGR to RGB for display
66
+ else:
67
+ raise ValueError("Invalid sample selection.")
68
+
69
+
70
+ def process_image(choice, yolo_versions=["yolov5"]):
71
+ image = load_sample_image(choice)
72
+ image = np.array(image)
73
+ image = cv2.resize(image, (640, 640))
74
+ result_images = []
75
+ for yolo_version in yolo_versions:
76
+ if yolo_version == "yolov5":
77
+ result_images.append(xai_yolov5(image))
78
+ elif yolo_version == "yolov8s":
79
+ result_images.append(xai_yolov8s(image))
80
+ else:
81
+ result_images.append((Image.fromarray(image), f"{yolo_version} not yet implemented."))
82
+ return result_images
83
 
84
+
85
+ import gradio as gr
86
  with gr.Blocks() as interface:
87
  gr.Markdown("# XAI: Visualize Object Detection of Your Models")
88
  gr.Markdown("Select a sample image to visualize object detection.")
89
+ sample_selection = gr.Radio(
90
+ choices=list(sample_images.keys()),
91
+ label="Select a Sample Image",
92
+ type="value",
93
+ )
 
 
 
 
 
94
  selected_models = gr.CheckboxGroup(
95
  choices=["yolov5", "yolov8s"],
96
+ value=["yolov5"],
97
  label="Select Model(s)",
98
  )
99
  result_gallery = gr.Gallery(label="Results", elem_id="gallery", rows=2, height=500)