Ateeqq commited on
Commit
ce8609f
·
verified ·
1 Parent(s): a6bcf3b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +6 -8
README.md CHANGED
@@ -73,25 +73,23 @@ model_path = "Ateeqq/nsfw-image-detection"
73
  processor = AutoImageProcessor.from_pretrained(model_path)
74
  model = SiglipForImageClassification.from_pretrained(model_path)
75
 
76
- image = Image.open("your_image_path.jpg")
 
77
  inputs = processor(images=image, return_tensors="pt")
78
 
79
  with torch.no_grad():
80
  logits = model(**inputs).logits
81
-
82
  probabilities = F.softmax(logits, dim=1)
83
 
84
  predicted_class_id = logits.argmax().item()
85
  predicted_class_label = model.config.id2label[predicted_class_id]
86
-
87
  confidence_scores = probabilities[0].tolist()
88
 
89
  print(f"Predicted class ID: {predicted_class_id}")
90
  print(f"Predicted class label: {predicted_class_label}\n")
91
-
92
  for i, score in enumerate(confidence_scores):
93
  label = model.config.id2label[i]
94
- print(f"Confidence for '{label}': {score:.4f}")
95
  ```
96
 
97
  ### Output
@@ -100,9 +98,9 @@ for i, score in enumerate(confidence_scores):
100
  Predicted class ID: 0
101
  Predicted class label: graphically_violent
102
 
103
- Confidence for 'graphically_violent': 0.9941
104
- Confidence for 'nudity_pornography': 0.0040
105
- Confidence for 'safe_normal': 0.0019
106
  ```
107
 
108
  ---
 
73
  processor = AutoImageProcessor.from_pretrained(model_path)
74
  model = SiglipForImageClassification.from_pretrained(model_path)
75
 
76
+ image_path = r"your_image_path.jpg"
77
+ image = Image.open(image_path).convert("RGB")
78
  inputs = processor(images=image, return_tensors="pt")
79
 
80
  with torch.no_grad():
81
  logits = model(**inputs).logits
 
82
  probabilities = F.softmax(logits, dim=1)
83
 
84
  predicted_class_id = logits.argmax().item()
85
  predicted_class_label = model.config.id2label[predicted_class_id]
 
86
  confidence_scores = probabilities[0].tolist()
87
 
88
  print(f"Predicted class ID: {predicted_class_id}")
89
  print(f"Predicted class label: {predicted_class_label}\n")
 
90
  for i, score in enumerate(confidence_scores):
91
  label = model.config.id2label[i]
92
+ print(f"Confidence for '{label}': {score:.6f}")
93
  ```
94
 
95
  ### Output
 
98
  Predicted class ID: 0
99
  Predicted class label: graphically_violent
100
 
101
+ Confidence for 'graphically_violent': 0.999988
102
+ Confidence for 'nudity_pornography': 0.000004
103
+ Confidence for 'safe_normal': 0.000008
104
  ```
105
 
106
  ---