fastsdcpu / backend /safety_check.py
rupeshs's picture
Update safty checker
df713c0
raw
history blame
374 Bytes
from transformers import pipeline
def is_safe_image(
classifier,
image,
):
pred = classifier(image)
nsfw_score = 0
normal_score = 0
for label in pred:
if label["label"] == "nsfw":
nsfw_score = label["score"]
elif label["label"] == "normal":
normal_score = label["score"]
return normal_score > nsfw_score