NSFW-DETECT / app.py
explorewithai's picture
Create app.py
60ca1d9 verified
raw
history blame
1.21 kB
from PIL import Image
import gradio as gr
from transformers import pipeline
# Load the image classification pipeline
classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
def classify_image(image):
"""
Classifies the input image using the NSFW image detection pipeline.
Args:
image: A PIL Image object or a NumPy array.
Returns:
A dictionary of labels and scores, or a list of dictionaries if a batch of images is provided.
"""
predictions = classifier(image)
# Format the output for Gradio
return {prediction['label']: prediction['score'] for prediction in predictions}
# Create the Gradio interface
iface = gr.Interface(
fn=classify_image,
inputs=gr.Image(type="pil", label="Upload Image"), # Use gr.Image for image input
outputs=gr.Label(num_top_classes=5, label = "Predictions"), # Use gr.Label to display the results
title="NSFW Image Classifier",
description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
examples=[
["cat.jpeg"],
["dog.png"]
],
)
iface.launch()