NSFW-DETECT / app.py
explorewithai's picture
Update app.py
ef615b9 verified
from PIL import Image
import gradio as gr
from transformers import pipeline
import concurrent.futures
# Load the image classification pipeline
# Using a try-except block for better error handling when loading the model.
try:
classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
except Exception as e:
print(f"Error loading model: {e}")
# Handle the error appropriately, e.g., exit the program or display an error message to the user.
exit()
def classify_image(image):
"""
Classifies the input image using the NSFW image detection pipeline.
Args:
image: A PIL Image object or a NumPy array.
Returns:
A dictionary of labels and scores.
"""
predictions = classifier(image)
# Format the output for Gradio
return {prediction['label']: prediction['score'] for prediction in predictions}
# Create a ThreadPoolExecutor with max_workers=20
executor = concurrent.futures.ThreadPoolExecutor(max_workers=20)
# Modified Gradio interface to use the ThreadPoolExecutor
iface = gr.Interface(
fn=classify_image,
inputs=gr.Image(type="pil", label="Upload Image"), # Use gr.Image for image input
outputs=gr.Label(num_top_classes=5, label="Predictions"), # Use gr.Label to display the results
title="NSFW Image Classifier",
description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
examples=[
["porn.jpg"],
["cat.jpg"],
["dog.jpg"]
],
)
iface.launch(executor=executor) # Pass the executor to the launch method