vaibhaviiii28 commited on
Commit
9b4ae8f
Β·
verified Β·
1 Parent(s): ef317d4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py CHANGED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from flask import Flask, request, jsonify
3
+ from pydantic import BaseModel
4
+ from transformers import pipeline
5
+ from PIL import Image
6
+ import joblib
7
+ import re
8
+ import string
9
+ import io
10
+ import uvicorn
11
+ from threading import Thread
12
+
13
+ # Initialize Flask & FastAPI
14
+ app = Flask(__name__)
15
+ api = FastAPI()
16
+
17
+ # βœ… Load NSFW Image Classification Model
18
+ pipe = pipeline("image-classification", model="LukeJacob2023/nsfw-image-detector")
19
+
20
+ # βœ… Load Toxic Text Classification Model
21
+ try:
22
+ model = joblib.load("toxic_classifier.pkl")
23
+ vectorizer = joblib.load("vectorizer.pkl")
24
+ print("βœ… Model & Vectorizer Loaded Successfully!")
25
+ except Exception as e:
26
+ print(f"❌ Error: {e}")
27
+ exit(1)
28
+
29
+ # πŸ“Œ Text Input Data Model
30
+ class TextInput(BaseModel):
31
+ text: str
32
+
33
+ # πŸ”Ή Text Preprocessing Function
34
+ def preprocess_text(text):
35
+ text = text.lower()
36
+ text = re.sub(r'\d+', '', text) # Remove numbers
37
+ text = text.translate(str.maketrans('', '', string.punctuation)) # Remove punctuation
38
+ return text.strip()
39
+
40
+ # πŸ“Œ NSFW Image Classification API (Flask)
41
+ @app.route('/classify_image', methods=['POST'])
42
+ def classify_image():
43
+ if 'file' not in request.files:
44
+ return jsonify({"error": "No file uploaded"}), 400
45
+
46
+ file = request.files['file']
47
+ image = Image.open(io.BytesIO(file.read()))
48
+ results = pipe(image)
49
+
50
+ classification_label = max(results, key=lambda x: x['score'])['label']
51
+ nsfw_labels = {"sexy", "porn", "hentai"}
52
+ nsfw_status = "NSFW" if classification_label in nsfw_labels else "SFW"
53
+
54
+ return jsonify({"status": nsfw_status, "results": results})
55
+
56
+ # πŸ“Œ Toxic Text Classification API (FastAPI)
57
+ @api.post("/classify_text/")
58
+ async def classify_text(data: TextInput):
59
+ try:
60
+ processed_text = preprocess_text(data.text)
61
+ text_vectorized = vectorizer.transform([processed_text])
62
+ prediction = model.predict(text_vectorized)
63
+ result = "Toxic" if prediction[0] == 1 else "Safe"
64
+ return {"prediction": result}
65
+ except Exception as e:
66
+ return {"error": str(e)}
67
+
68
+ # πŸ”₯ Run both servers using Gunicorn
69
+ def run_flask():
70
+ app.run(host="0.0.0.0", port=5000)
71
+
72
+ def run_fastapi():
73
+ uvicorn.run(api, host="0.0.0.0", port=8000)
74
+
75
+ if __name__ == "__main__":
76
+ Thread(target=run_flask).start()
77
+ Thread(target=run_fastapi).start()
78
+