explorewithai commited on
Commit
ef615b9
Β·
verified Β·
1 Parent(s): d3af1f0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -8
app.py CHANGED
@@ -1,9 +1,17 @@
1
  from PIL import Image
2
  import gradio as gr
3
  from transformers import pipeline
 
4
 
5
  # Load the image classification pipeline
6
- classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
 
 
 
 
 
 
 
7
 
8
  def classify_image(image):
9
  """
@@ -13,24 +21,28 @@ def classify_image(image):
13
  image: A PIL Image object or a NumPy array.
14
 
15
  Returns:
16
- A dictionary of labels and scores, or a list of dictionaries if a batch of images is provided.
17
  """
18
  predictions = classifier(image)
19
  # Format the output for Gradio
20
  return {prediction['label']: prediction['score'] for prediction in predictions}
21
 
22
- # Create the Gradio interface
 
 
 
 
23
  iface = gr.Interface(
24
  fn=classify_image,
25
  inputs=gr.Image(type="pil", label="Upload Image"), # Use gr.Image for image input
26
- outputs=gr.Label(num_top_classes=5, label = "Predictions"), # Use gr.Label to display the results
27
  title="NSFW Image Classifier",
28
  description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
29
  examples=[
30
- ["porn.jpg"],
31
- ["cat.jpg"],
32
- ["dog.jpg"]
33
  ],
34
  )
35
 
36
- iface.launch()
 
1
  from PIL import Image
2
  import gradio as gr
3
  from transformers import pipeline
4
+ import concurrent.futures
5
 
6
  # Load the image classification pipeline
7
+ # Using a try-except block for better error handling when loading the model.
8
+ try:
9
+ classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
10
+ except Exception as e:
11
+ print(f"Error loading model: {e}")
12
+ # Handle the error appropriately, e.g., exit the program or display an error message to the user.
13
+ exit()
14
+
15
 
16
  def classify_image(image):
17
  """
 
21
  image: A PIL Image object or a NumPy array.
22
 
23
  Returns:
24
+ A dictionary of labels and scores.
25
  """
26
  predictions = classifier(image)
27
  # Format the output for Gradio
28
  return {prediction['label']: prediction['score'] for prediction in predictions}
29
 
30
+
31
+ # Create a ThreadPoolExecutor with max_workers=20
32
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=20)
33
+
34
+ # Modified Gradio interface to use the ThreadPoolExecutor
35
  iface = gr.Interface(
36
  fn=classify_image,
37
  inputs=gr.Image(type="pil", label="Upload Image"), # Use gr.Image for image input
38
+ outputs=gr.Label(num_top_classes=5, label="Predictions"), # Use gr.Label to display the results
39
  title="NSFW Image Classifier",
40
  description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
41
  examples=[
42
+ ["porn.jpg"],
43
+ ["cat.jpg"],
44
+ ["dog.jpg"]
45
  ],
46
  )
47
 
48
+ iface.launch(executor=executor) # Pass the executor to the launch method