File size: 1,667 Bytes
60ca1d9
 
 
ef615b9
60ca1d9
 
ef615b9
 
 
 
 
 
 
 
60ca1d9
 
 
 
 
 
 
 
 
ef615b9
60ca1d9
 
 
 
 
ef615b9
 
 
 
 
60ca1d9
 
 
ef615b9
60ca1d9
 
 
ef615b9
 
 
60ca1d9
 
 
ef615b9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from PIL import Image
import gradio as gr
from transformers import pipeline
import concurrent.futures

# Load the image classification pipeline
#  Using a try-except block for better error handling when loading the model.
try:
    classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
except Exception as e:
    print(f"Error loading model: {e}")
    # Handle the error appropriately, e.g., exit the program or display an error message to the user.
    exit()


def classify_image(image):
    """
    Classifies the input image using the NSFW image detection pipeline.

    Args:
        image: A PIL Image object or a NumPy array.

    Returns:
        A dictionary of labels and scores.
    """
    predictions = classifier(image)
    # Format the output for Gradio
    return {prediction['label']: prediction['score'] for prediction in predictions}


# Create a ThreadPoolExecutor with max_workers=20
executor = concurrent.futures.ThreadPoolExecutor(max_workers=20)

# Modified Gradio interface to use the ThreadPoolExecutor
iface = gr.Interface(
    fn=classify_image,
    inputs=gr.Image(type="pil", label="Upload Image"),  # Use gr.Image for image input
    outputs=gr.Label(num_top_classes=5, label="Predictions"),  # Use gr.Label to display the results
    title="NSFW Image Classifier",
    description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
    examples=[
        ["porn.jpg"],
        ["cat.jpg"],
        ["dog.jpg"]
    ],
)

iface.launch(executor=executor)  # Pass the executor to the launch method