Commit
Β·
9995d17
1
Parent(s):
18e3ba4
add project description and texts
Browse files- .gitignore +19 -0
- app.py +29 -0
- project_info/description.txt +0 -0
- project_info/project_description.html +74 -0
- requirements.txt +2 -0
- src/load_html.py +7 -0
.gitignore
CHANGED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Python
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*.pyo
|
5 |
+
*.pyd
|
6 |
+
*.env
|
7 |
+
*.venv
|
8 |
+
*.egg
|
9 |
+
*.egg-info/
|
10 |
+
dist/
|
11 |
+
build/
|
12 |
+
*.log
|
13 |
+
|
14 |
+
# Virtual Environment
|
15 |
+
.venv/
|
16 |
+
|
17 |
+
# VSCode
|
18 |
+
.vscode/
|
19 |
+
*.code-workspace
|
app.py
CHANGED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import plotly.graph_objects as go
|
3 |
+
from src.load_html import get_description_html
|
4 |
+
|
5 |
+
def process_audio(audio_file):
|
6 |
+
...
|
7 |
+
|
8 |
+
|
9 |
+
# Gradio interface
|
10 |
+
def create_demo():
|
11 |
+
with gr.Blocks() as demo:
|
12 |
+
gr.HTML(get_description_html)
|
13 |
+
|
14 |
+
audio_input = gr.Audio(label="Upload Audio", type="filepath")
|
15 |
+
submit_button = gr.Button("Generate Graph")
|
16 |
+
|
17 |
+
graph_output = gr.Plot(label="Generated Graph")
|
18 |
+
|
19 |
+
submit_button.click(
|
20 |
+
fn=process_audio,
|
21 |
+
inputs=audio_input,
|
22 |
+
outputs=graph_output
|
23 |
+
)
|
24 |
+
|
25 |
+
return demo
|
26 |
+
|
27 |
+
if __name__ == "__main__":
|
28 |
+
demo = create_demo()
|
29 |
+
demo.launch(show_api=False)
|
project_info/description.txt
DELETED
File without changes
|
project_info/project_description.html
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<head>
|
2 |
+
<meta charset="UTF-8">
|
3 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
4 |
+
<title>System for the Automatic Recognition of Affective Responses</title>
|
5 |
+
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Montserrat">
|
6 |
+
<style>
|
7 |
+
body {
|
8 |
+
margin: 0;
|
9 |
+
padding: 0;
|
10 |
+
font-family: 'Montserrat';
|
11 |
+
}
|
12 |
+
.container {
|
13 |
+
position: relative;
|
14 |
+
width: 100;
|
15 |
+
margin: 0;
|
16 |
+
padding: 0px 20px;
|
17 |
+
word-wrap: break-word;
|
18 |
+
}
|
19 |
+
.title {
|
20 |
+
font-size: 36px;
|
21 |
+
margin-left: 100px;
|
22 |
+
}
|
23 |
+
.subtitle {
|
24 |
+
font-size: 24px;
|
25 |
+
color: #666;
|
26 |
+
line-height: 1.4;
|
27 |
+
margin-left: 100px;
|
28 |
+
}
|
29 |
+
.item {
|
30 |
+
font-weight: bold;
|
31 |
+
font-size: 32px;
|
32 |
+
padding-bottom: 10px;
|
33 |
+
}
|
34 |
+
.item-border {
|
35 |
+
border-bottom: 2px solid #edab3c;
|
36 |
+
display: inline-block;
|
37 |
+
padding-bottom: 10px;
|
38 |
+
}
|
39 |
+
.item-subtitle {
|
40 |
+
font-size: 20px;
|
41 |
+
font-weight: normal;
|
42 |
+
margin-left: 40px;
|
43 |
+
}
|
44 |
+
.item-border-locked {
|
45 |
+
border-bottom: 2px solid #f3f4f6;
|
46 |
+
display: inline-block;
|
47 |
+
padding-bottom: 10px;
|
48 |
+
}
|
49 |
+
</style>
|
50 |
+
</head>
|
51 |
+
<body>
|
52 |
+
<div class="container" style='height: auto; min-height: 160px; width: 100;'>
|
53 |
+
<div class="title">π System for the Automatic Recognition of Affective Responses π</div>
|
54 |
+
<div class="subtitle"> Affective Computing and Emotion Recognition </div>
|
55 |
+
</div>
|
56 |
+
<div class="item">
|
57 |
+
<div class="item-border">βWhat is it?</div>
|
58 |
+
<div class="item-subtitle">
|
59 |
+
The System for the Automatic Recognition of Affective Responses is a project that aims to develop a system capable of recognizing and interpreting human emotions through various modalities, such as speech, facial expressions, and physiological signals. The goal is to create a comprehensive framework that can analyze and understand emotional responses in real-time, enabling applications in fields like human-computer interaction, mental health monitoring, and affective computing.
|
60 |
+
</div>
|
61 |
+
</div>
|
62 |
+
<div class="item">
|
63 |
+
<div class="item-border">π Why is it important?</div>
|
64 |
+
<div class="item-subtitle">
|
65 |
+
Understanding human emotions is crucial for improving communication and interaction between humans and machines. By recognizing and interpreting emotional responses, we can enhance user experiences, develop more empathetic AI systems, and contribute to advancements in mental health care. This project aims to bridge the gap between technology and human emotions, paving the way for more intuitive and responsive systems.
|
66 |
+
</div>
|
67 |
+
</div>
|
68 |
+
<div class="item">
|
69 |
+
<div class="item-border">βοΈ How does it work?</div>
|
70 |
+
<div class="item-subtitle">
|
71 |
+
The system utilizes advanced machine learning algorithms and deep learning techniques to analyze multimodal data, including audio, visual, and physiological signals. By training on large datasets of annotated emotional responses, the system learns to recognize patterns and features associated with different emotions. The framework is designed to be adaptable and scalable, allowing for integration with various applications and platforms.
|
72 |
+
</div>
|
73 |
+
</div>
|
74 |
+
</body>
|
requirements.txt
CHANGED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
gradio==5.24.0
|
2 |
+
plotly==6.0.1
|
src/load_html.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pathlib import Path
|
2 |
+
|
3 |
+
|
4 |
+
HTML_DIR = Path("project_info")
|
5 |
+
|
6 |
+
def get_description_html():
|
7 |
+
return (HTML_DIR / "project_description.html").read_text(encoding="utf-8")
|