logu29 commited on
Commit
b3de9fc
·
verified ·
1 Parent(s): 8f7a187

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -50
app.py CHANGED
@@ -1,67 +1,64 @@
1
  import gradio as gr
2
- from transformers import pipeline
3
  from deepface import DeepFace
4
  import cv2
5
- import numpy as np
6
- import tempfile
7
  import moviepy.editor as mp
 
 
8
 
9
- # Load Text Sentiment Model
10
- sentiment_pipeline = pipeline("sentiment-analysis")
11
-
12
- # 1. Text Sentiment Analysis
13
  def analyze_text(text):
14
- result = sentiment_pipeline(text)[0]
15
- return f"{result['label']} ({result['score']*100:.2f}%)"
 
 
16
 
17
- # 2. Face Emotion Detection
18
  def analyze_face(image):
19
  try:
20
- analysis = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
21
- emotion = analysis[0]['dominant_emotion']
22
- return f"Detected Emotion: {emotion}"
23
  except Exception as e:
24
- return f"Error: {str(e)}"
25
-
26
- # 3. Video Emotion Detection
27
- def analyze_video(video_file):
28
- temp_video_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mp4").name
29
- with open(temp_video_path, "wb") as f:
30
- f.write(video_file.read())
31
-
32
- clip = mp.VideoFileClip(temp_video_path)
33
- frame = clip.get_frame(clip.duration / 2) # Take middle frame
34
- frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
35
 
 
 
36
  try:
37
- analysis = DeepFace.analyze(frame_rgb, actions=['emotion'], enforce_detection=False)
38
- emotion = analysis[0]['dominant_emotion']
39
- return f"Detected Emotion in Video: {emotion}"
 
 
 
 
 
40
  except Exception as e:
41
- return f"Error: {str(e)}"
42
 
43
- # Gradio Interface
44
  with gr.Blocks() as demo:
45
- gr.Markdown("# 🎯 Deep Learning Sentiment & Emotion Analyzer")
46
- gr.Markdown("Analyze **Text**, **Face Image**, or **Video**!")
47
-
48
- with gr.Tabs():
49
- with gr.TabItem("Text Sentiment"):
50
- text_input = gr.Textbox(label="Enter Text")
51
- text_output = gr.Label()
52
- text_button = gr.Button("Analyze Text")
53
- text_button.click(analyze_text, inputs=text_input, outputs=text_output)
54
-
55
- with gr.TabItem("Face Emotion (Image)"):
56
- image_input = gr.Image(type="numpy", label="Upload Face Image")
57
- image_output = gr.Label()
58
- image_button = gr.Button("Analyze Face Emotion")
59
- image_button.click(analyze_face, inputs=image_input, outputs=image_output)
60
-
61
- with gr.TabItem("Video Emotion"):
62
- video_input = gr.File(label="Upload Video (.mp4)")
63
- video_output = gr.Label()
64
- video_button = gr.Button("Analyze Video Emotion")
65
- video_button.click(analyze_video, inputs=video_input, outputs=video_output)
66
 
67
  demo.launch()
 
 
 
 
1
  import gradio as gr
2
+ from textblob import TextBlob
3
  from deepface import DeepFace
4
  import cv2
 
 
5
  import moviepy.editor as mp
6
+ import tempfile
7
+ import os
8
 
9
+ # Sentiment analysis for text
 
 
 
10
  def analyze_text(text):
11
+ blob = TextBlob(text)
12
+ sentiment = blob.sentiment.polarity
13
+ emotion = "Positive" if sentiment > 0 else "Negative" if sentiment < 0 else "Neutral"
14
+ return f"Sentiment: {emotion} (Score: {sentiment:.2f})"
15
 
16
+ # Emotion detection for image
17
  def analyze_face(image):
18
  try:
19
+ result = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
20
+ dominant_emotion = result[0]['dominant_emotion']
21
+ return f"Dominant Emotion: {dominant_emotion}"
22
  except Exception as e:
23
+ return f"Error analyzing face: {str(e)}"
 
 
 
 
 
 
 
 
 
 
24
 
25
+ # Analyze emotion in video
26
+ def analyze_video(video_path):
27
  try:
28
+ temp_folder = tempfile.mkdtemp()
29
+ clip = mp.VideoFileClip(video_path)
30
+ frame = clip.get_frame(clip.duration / 2) # middle frame
31
+ frame_path = os.path.join(temp_folder, "frame.jpg")
32
+ cv2.imwrite(frame_path, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR))
33
+ result = DeepFace.analyze(frame_path, actions=['emotion'], enforce_detection=False)
34
+ dominant_emotion = result[0]['dominant_emotion']
35
+ return f"Dominant Emotion in Video: {dominant_emotion}"
36
  except Exception as e:
37
+ return f"Error analyzing video: {str(e)}"
38
 
39
+ # Create Gradio Interface
40
  with gr.Blocks() as demo:
41
+ gr.Markdown("# 🧠 Emotion Decoder - Sentiment & Emotion Analysis")
42
+
43
+ with gr.Tab("Text Analysis"):
44
+ text_input = gr.Textbox(label="Enter text")
45
+ text_output = gr.Textbox(label="Sentiment Result")
46
+ text_button = gr.Button("Analyze Text")
47
+ text_button.click(analyze_text, inputs=text_input, outputs=text_output)
48
+
49
+ with gr.Tab("Face Emotion Detection"):
50
+ img_input = gr.Image(type="filepath", label="Upload an Image")
51
+ img_output = gr.Textbox(label="Emotion Result")
52
+ img_button = gr.Button("Analyze Face Emotion")
53
+ img_button.click(analyze_face, inputs=img_input, outputs=img_output)
54
+
55
+ with gr.Tab("Video Emotion Detection"):
56
+ video_input = gr.Video(label="Upload a Video")
57
+ video_output = gr.Textbox(label="Emotion Result")
58
+ video_button = gr.Button("Analyze Video Emotion")
59
+ video_button.click(analyze_video, inputs=video_input, outputs=video_output)
 
 
60
 
61
  demo.launch()
62
+
63
+
64
+