logu29's picture
Update app.py
e4fd89f verified
raw
history blame contribute delete
2.78 kB
import gradio as gr
from textblob import TextBlob
from deepface import DeepFace
import tempfile
import os
import cv2
import moviepy.editor as mp
# Sentiment Analysis for Text
def analyze_text(text):
blob = TextBlob(text)
polarity = blob.sentiment.polarity
sentiment = "Positive" if polarity > 0 else "Negative" if polarity < 0 else "Neutral"
return f"Sentiment: {sentiment} (Polarity: {polarity:.2f})"
# Emotion Analysis for Image (Face Recognition)
def analyze_image(image):
try:
result = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
dominant_emotion = result[0]['dominant_emotion']
return f"Detected Emotion: {dominant_emotion}"
except Exception as e:
return f"Error: {str(e)}"
# Emotion Analysis for Video (Face Recognition)
def analyze_video(video):
try:
tmpdir = tempfile.mkdtemp()
clip = mp.VideoFileClip(video)
frame = clip.get_frame(clip.duration / 2)
frame_path = os.path.join(tmpdir, "frame.jpg")
cv2.imwrite(frame_path, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR))
result = DeepFace.analyze(frame_path, actions=['emotion'], enforce_detection=False)
dominant_emotion = result[0]['dominant_emotion']
return f"Video Emotion: {dominant_emotion}"
except Exception as e:
return f"Error: {str(e)}"
# Gradio Blocks UI
with gr.Blocks(theme="huggingface") as demo:
gr.Markdown("# 🎭 Sentiment & Emotion Decoder", elem_id="header")
gr.Markdown("Upload your text, face image, or video to decode emotions and sentiments!")
with gr.Tabs():
# Text Sentiment Analysis Tab
with gr.TabItem("📜 Text Sentiment"):
text_input = gr.Textbox(label="Enter Text Here", placeholder="Type your social media post here...")
text_button = gr.Button("🔍 Analyze Sentiment")
text_output = gr.Label(label="Sentiment Result")
text_button.click(analyze_text, inputs=text_input, outputs=text_output)
# Image Emotion Analysis Tab
with gr.TabItem("📸 Face Emotion Image"):
img_input = gr.Image(type="filepath", label="Upload Face Image")
img_output = gr.Label(label="Emotion Result")
img_button = gr.Button("🔍 Analyze Image")
img_button.click(analyze_image, inputs=img_input, outputs=img_output)
# Video Emotion Analysis Tab
with gr.TabItem("🎥 Face Emotion Video"):
video_input = gr.Video(label="Upload Face Video")
video_output = gr.Label(label="Emotion Result")
video_button = gr.Button("🔍 Analyze Video")
video_button.click(analyze_video, inputs=video_input, outputs=video_output)
# Launch the Interface
demo.launch()