Ateeqq commited on
Commit
5dbd83c
·
verified ·
1 Parent(s): f2f7699

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -0
app.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoImageProcessor, SiglipForImageClassification
3
+ from PIL import Image
4
+ import torch.nn.functional as F
5
+ import gradio as gr
6
+
7
+ # Load model and processor from Hugging Face Hub
8
+ model_path = "Ateeqq/nsfw-image-detection"
9
+ processor = AutoImageProcessor.from_pretrained(model_path)
10
+ model = SiglipForImageClassification.from_pretrained(model_path)
11
+ model.eval()
12
+
13
+ def predict(image):
14
+ # Convert to RGB and preprocess
15
+ image = Image.fromarray(image).convert("RGB")
16
+ inputs = processor(images=image, return_tensors="pt")
17
+
18
+ # Inference
19
+ with torch.no_grad():
20
+ logits = model(**inputs).logits
21
+ probs = F.softmax(logits, dim=1)[0].tolist()
22
+
23
+ # Prepare output dict
24
+ labels = [model.config.id2label[i] for i in range(len(probs))]
25
+ result = {labels[i]: float(f"{probs[i]:.6f}") for i in range(len(labels))}
26
+ predicted_idx = int(torch.argmax(logits, dim=1)[0])
27
+ result["predicted_label"] = model.config.id2label[predicted_idx]
28
+ return result
29
+
30
+ # Gradio Interface
31
+ def main():
32
+ description = "NSFW Image Detection using SigLIP2 Safety Classifier"
33
+ iface = gr.Interface(
34
+ fn=predict,
35
+ inputs=gr.Image(type="numpy", label="Upload Image"),
36
+ outputs=gr.Label(num_top_classes=3, label="Predictions"),
37
+ title="NSFW Image Detector",
38
+ description=description,
39
+ examples=[],
40
+ allow_flagging="never"
41
+ )
42
+ iface.launch()
43
+
44
+ if __name__ == "__main__":
45
+ main()