explorewithai commited on
Commit
60ca1d9
Β·
verified Β·
1 Parent(s): 3cce5ae

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ import gradio as gr
3
+ from transformers import pipeline
4
+
5
+ # Load the image classification pipeline
6
+ classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
7
+
8
+ def classify_image(image):
9
+ """
10
+ Classifies the input image using the NSFW image detection pipeline.
11
+
12
+ Args:
13
+ image: A PIL Image object or a NumPy array.
14
+
15
+ Returns:
16
+ A dictionary of labels and scores, or a list of dictionaries if a batch of images is provided.
17
+ """
18
+ predictions = classifier(image)
19
+ # Format the output for Gradio
20
+ return {prediction['label']: prediction['score'] for prediction in predictions}
21
+
22
+ # Create the Gradio interface
23
+ iface = gr.Interface(
24
+ fn=classify_image,
25
+ inputs=gr.Image(type="pil", label="Upload Image"), # Use gr.Image for image input
26
+ outputs=gr.Label(num_top_classes=5, label = "Predictions"), # Use gr.Label to display the results
27
+ title="NSFW Image Classifier",
28
+ description="Upload an image to classify it as NSFW (Not Safe For Work) or SFW (Safe For Work). This model uses the Falconsai/nsfw_image_detection model from Hugging Face.",
29
+ examples=[
30
+ ["cat.jpeg"],
31
+ ["dog.png"]
32
+ ],
33
+ )
34
+
35
+ iface.launch()