Update app.py
Browse files
app.py
CHANGED
@@ -12,26 +12,24 @@ load_dotenv()
|
|
12 |
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
|
13 |
model_gemini = genai.GenerativeModel("gemini-pro")
|
14 |
|
15 |
-
def
|
16 |
-
prompt = f"""
|
17 |
-
Format it like this:
|
18 |
|
19 |
-
|
20 |
-
[Verse Arabic]
|
21 |
|
22 |
-
|
23 |
-
|
|
|
|
|
24 |
|
25 |
-
|
26 |
-
[Surah or Hadith Reference]"""
|
27 |
try:
|
28 |
response = model_gemini.generate_content(prompt)
|
29 |
-
return f"""<div class='notion-card
|
30 |
-
<h3>{emotion.capitalize()}</h3>
|
31 |
<p style='white-space: pre-wrap;'>{response.text}</p>
|
32 |
</div>"""
|
33 |
except Exception as e:
|
34 |
-
return f"<div class='notion-card'><
|
35 |
|
36 |
model_name = "SamLowe/roberta-base-go_emotions"
|
37 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
@@ -80,24 +78,54 @@ def analyze_combined(text, threshold, image):
|
|
80 |
img_label = None
|
81 |
|
82 |
final_label = text_label if img_label is None else img_label
|
83 |
-
card = islamic_advice.get(final_label)
|
|
|
|
|
84 |
return card
|
85 |
|
86 |
-
def analyze_batch_csv(file):
|
87 |
-
df = pd.read_csv(file.name)
|
88 |
-
results = []
|
89 |
-
for text in df['text']:
|
90 |
-
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
|
91 |
-
with torch.no_grad():
|
92 |
-
logits = model(**inputs).logits
|
93 |
-
probs = sigmoid(logits)[0]
|
94 |
-
idx = torch.argmax(probs).item()
|
95 |
-
label = model.config.id2label[idx].lower()
|
96 |
-
advice = label.capitalize()
|
97 |
-
results.append({"text": text, "emotion": label, "advice": advice})
|
98 |
-
return pd.DataFrame(results)
|
99 |
-
|
100 |
custom_css = """
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
body {
|
102 |
background: #f9fafb;
|
103 |
font-family: 'Inter', sans-serif;
|
@@ -129,9 +157,5 @@ with gr.Blocks(css=custom_css) as demo:
|
|
129 |
result = gr.HTML()
|
130 |
btn.click(fn=analyze_combined, inputs=[text_input, threshold_slider, image_input], outputs=result)
|
131 |
|
132 |
-
|
133 |
-
file_input = gr.File(file_types=[".csv"], label="Upload CSV with 'text' column")
|
134 |
-
table_output = gr.Dataframe()
|
135 |
-
file_input.change(fn=analyze_batch_csv, inputs=file_input, outputs=table_output)
|
136 |
-
|
137 |
demo.launch()
|
|
|
12 |
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
|
13 |
model_gemini = genai.GenerativeModel("gemini-pro")
|
14 |
|
15 |
+
def get_gemini_advice_from_text(text):
|
16 |
+
prompt = f"""Suggest one Qur'an verse or Hadith to comfort and guide a person who wrote the following:
|
|
|
17 |
|
18 |
+
'{text}'
|
|
|
19 |
|
20 |
+
Please include:
|
21 |
+
- Arabic verse or hadith
|
22 |
+
- English translation
|
23 |
+
- Source (Surah or Hadith reference)
|
24 |
|
25 |
+
Wrap it in a gentle tone."""
|
|
|
26 |
try:
|
27 |
response = model_gemini.generate_content(prompt)
|
28 |
+
return f"""<div class='notion-card fade-in'>
|
|
|
29 |
<p style='white-space: pre-wrap;'>{response.text}</p>
|
30 |
</div>"""
|
31 |
except Exception as e:
|
32 |
+
return f"<div class='notion-card fade-in'><p>May Allah guide your heart.</p></div>"
|
33 |
|
34 |
model_name = "SamLowe/roberta-base-go_emotions"
|
35 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
78 |
img_label = None
|
79 |
|
80 |
final_label = text_label if img_label is None else img_label
|
81 |
+
card = islamic_advice.get(final_label)
|
82 |
+
if not card:
|
83 |
+
card = get_gemini_advice_from_text(text)
|
84 |
return card
|
85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
custom_css = """
|
87 |
+
@keyframes slideInUp {
|
88 |
+
from {
|
89 |
+
transform: translateY(20px);
|
90 |
+
opacity: 0;
|
91 |
+
}
|
92 |
+
to {
|
93 |
+
transform: translateY(0);
|
94 |
+
opacity: 1;
|
95 |
+
}
|
96 |
+
}
|
97 |
+
@keyframes glowFadeIn {
|
98 |
+
0% {
|
99 |
+
box-shadow: 0 0 0px rgba(0, 0, 0, 0);
|
100 |
+
opacity: 0;
|
101 |
+
}
|
102 |
+
100% {
|
103 |
+
box-shadow: 0 0 15px rgba(59, 130, 246, 0.3);
|
104 |
+
opacity: 1;
|
105 |
+
}
|
106 |
+
}
|
107 |
+
.fade-in {
|
108 |
+
animation: fadeInPop 0.6s ease-out both;
|
109 |
+
}
|
110 |
+
.slide-up {
|
111 |
+
animation: slideInUp 0.6s ease-out both;
|
112 |
+
}
|
113 |
+
.glow-card {
|
114 |
+
animation: glowFadeIn 1s ease-in-out both;
|
115 |
+
}
|
116 |
+
@keyframes fadeInPop {
|
117 |
+
0% {
|
118 |
+
opacity: 0;
|
119 |
+
transform: scale(0.95);
|
120 |
+
}
|
121 |
+
100% {
|
122 |
+
opacity: 1;
|
123 |
+
transform: scale(1);
|
124 |
+
}
|
125 |
+
}
|
126 |
+
.fade-in {
|
127 |
+
animation: fadeInPop 0.6s ease-out both;
|
128 |
+
}
|
129 |
body {
|
130 |
background: #f9fafb;
|
131 |
font-family: 'Inter', sans-serif;
|
|
|
157 |
result = gr.HTML()
|
158 |
btn.click(fn=analyze_combined, inputs=[text_input, threshold_slider, image_input], outputs=result)
|
159 |
|
160 |
+
|
|
|
|
|
|
|
|
|
161 |
demo.launch()
|