Upload 292 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +51 -37
- README.md +15 -13
- assets/img/AIdeaTextCard.jpg +0 -0
- assets/img/assets_img_logo_92x92.ico +0 -0
- assets/img/socialmedia/AIdeaTextCard.jpg +3 -0
- assets/img/socialmedia/Facebook_CoverPhoto-1_820x312.jpg +0 -0
- assets/img/socialmedia/Facebook_CoverPhoto_820x312.jpg +0 -0
- assets/img/socialmedia/_MG_2535.jpg +0 -0
- assets/img/socialmedia/_MG_2585.jpg +0 -0
- assets/img/socialmedia/_MG_2587.jpg +0 -0
- assets/img/socialmedia/_MG_2590.jpg +0 -0
- assets/img/socialmedia/_MG_2678.jpg +0 -0
- assets/img/socialmedia/_MG_2727.jpg +0 -0
- assets/img/socialmedia/_MG_2735.jpg +0 -0
- assets/img/socialmedia/_MG_2790.jpg +0 -0
- assets/img/socialmedia/_MG_2845.JPG +0 -0
- modules/__init__.py +318 -318
- modules/admin/admin_ui.py +249 -251
- modules/auth/auth.py +194 -194
- modules/chatbot/__init__.py +7 -7
- modules/chatbot/chabot.py +59 -59
- modules/chatbot/chat_interface.py +24 -24
- modules/chatbot/chat_process.py +55 -55
- modules/chatbot/sidebar_chat.py +112 -112
- modules/database/chat_mongo_db.py +115 -115
- modules/database/claude_recommendations_mongo_db.py +136 -136
- modules/database/current_situation_mongo_db.py +136 -136
- modules/database/database_init.py +187 -187
- modules/database/discourse_mongo_db.py +151 -172
- modules/database/mongo_db.py +61 -61
- modules/database/morphosyntax_iterative_mongo_db.py +171 -171
- modules/database/semantic_export.py +77 -77
- modules/database/semantic_mongo_db.py +159 -159
- modules/database/sql_db.py +322 -322
- modules/database/writing_progress_mongo_db.py +140 -140
- modules/discourse/__init__.py +16 -16
- modules/discourse/discourse_interface.py +318 -281
- modules/discourse/discourse_live_interface.py +151 -151
- modules/discourse/discourse_process.py +67 -67
- modules/morphosyntax/__init__.py +29 -29
- modules/morphosyntax/morphosyntax_interface-BackUp_Dec24_OK.py +321 -321
- modules/morphosyntax/morphosyntax_interface_BackUp_Dec-28-Ok.py +164 -164
- modules/morphosyntax/morphosyntax_interface_vOk-30-12-24.py +246 -246
- modules/morphosyntax/morphosyntax_process.py +131 -131
- modules/morphosyntax/morphosyntax_process_BackUp_Dec24_Ok.py +131 -131
- modules/semantic/__init_.py +16 -16
- modules/semantic/semantic_interface.py +295 -292
- modules/semantic/semantic_interface_1.py +54 -54
- modules/semantic/semantic_interface_2.py +166 -166
- modules/semantic/semantic_interface_3.py +181 -181
.gitattributes
CHANGED
@@ -1,37 +1,51 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
-
assets/img/
|
37 |
-
assets/img/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
assets/img/AIdeaText_Logo_vectores.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
assets/img/ALPHA_Startup[[:space:]]Badges.png filter=lfs diff=lfs merge=lfs -text
|
38 |
+
assets/img/AIdeaTextCard.jpg filter=lfs diff=lfs merge=lfs -text
|
39 |
+
assets/img/assets_img_logo_92x92.ico filter=lfs diff=lfs merge=lfs -text
|
40 |
+
assets/img/socialmedia/_MG_2535.jpg filter=lfs diff=lfs merge=lfs -text
|
41 |
+
assets/img/socialmedia/_MG_2585.jpg filter=lfs diff=lfs merge=lfs -text
|
42 |
+
assets/img/socialmedia/_MG_2587.jpg filter=lfs diff=lfs merge=lfs -text
|
43 |
+
assets/img/socialmedia/_MG_2590.jpg filter=lfs diff=lfs merge=lfs -text
|
44 |
+
assets/img/socialmedia/_MG_2678.jpg filter=lfs diff=lfs merge=lfs -text
|
45 |
+
assets/img/socialmedia/_MG_2727.jpg filter=lfs diff=lfs merge=lfs -text
|
46 |
+
assets/img/socialmedia/_MG_2735.jpg filter=lfs diff=lfs merge=lfs -text
|
47 |
+
assets/img/socialmedia/_MG_2790.jpg filter=lfs diff=lfs merge=lfs -text
|
48 |
+
assets/img/socialmedia/_MG_2845.JPG filter=lfs diff=lfs merge=lfs -text
|
49 |
+
assets/img/socialmedia/AIdeaTextCard.jpg filter=lfs diff=lfs merge=lfs -text
|
50 |
+
assets/img/socialmedia/Facebook_CoverPhoto_820x312.jpg filter=lfs diff=lfs merge=lfs -text
|
51 |
+
assets/img/socialmedia/Facebook_CoverPhoto-1_820x312.jpg filter=lfs diff=lfs merge=lfs -text
|
README.md
CHANGED
@@ -1,14 +1,16 @@
|
|
1 |
-
---
|
2 |
-
title:
|
3 |
-
emoji: 👀
|
4 |
-
colorFrom: blue
|
5 |
-
colorTo: purple
|
6 |
-
sdk: streamlit
|
7 |
-
sdk_version: 1.44.1
|
8 |
-
app_file: app.py
|
9 |
-
pinned:
|
10 |
-
license: mit
|
11 |
-
short_description: AIdeaText
|
12 |
-
|
13 |
-
|
|
|
|
|
14 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
+
---
|
2 |
+
title: v4
|
3 |
+
emoji: 👀
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: purple
|
6 |
+
sdk: streamlit
|
7 |
+
sdk_version: 1.44.1
|
8 |
+
app_file: app.py
|
9 |
+
pinned: true
|
10 |
+
license: mit
|
11 |
+
short_description: AIdeaText
|
12 |
+
thumbnail: >-
|
13 |
+
https://cdn-uploads.huggingface.co/production/uploads/650f8981f141bc34f92d7bea/b6wrlUCEhhlFOQ5mJ6FtA.png
|
14 |
+
---
|
15 |
+
|
16 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
assets/img/AIdeaTextCard.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/assets_img_logo_92x92.ico
CHANGED
|
|
Git LFS Details
|
assets/img/socialmedia/AIdeaTextCard.jpg
ADDED
![]() |
Git LFS Details
|
assets/img/socialmedia/Facebook_CoverPhoto-1_820x312.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/Facebook_CoverPhoto_820x312.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2535.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2585.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2587.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2590.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2678.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2727.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2735.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2790.jpg
CHANGED
![]() |
![]() |
Git LFS Details
|
assets/img/socialmedia/_MG_2845.JPG
CHANGED
|
|
Git LFS Details
|
modules/__init__.py
CHANGED
@@ -1,319 +1,319 @@
|
|
1 |
-
# modules/__init__.py
|
2 |
-
|
3 |
-
def load_auth_functions():
|
4 |
-
from .auth.auth import authenticate_student, register_student, update_student_info, delete_student
|
5 |
-
return {
|
6 |
-
'authenticate_student': authenticate_student,
|
7 |
-
'register_student': register_student,
|
8 |
-
'update_student_info': update_student_info,
|
9 |
-
'delete_student': delete_student
|
10 |
-
}
|
11 |
-
|
12 |
-
# Agregar nuevo import para current_situation
|
13 |
-
def load_current_situation_functions():
|
14 |
-
"""
|
15 |
-
Carga las funciones relacionadas con el análisis de situación actual.
|
16 |
-
Returns:
|
17 |
-
dict: Diccionario con las funciones de situación actual
|
18 |
-
"""
|
19 |
-
from .studentact.current_situation_interface import (
|
20 |
-
display_current_situation_interface,
|
21 |
-
display_metrics_in_one_row,
|
22 |
-
display_empty_metrics_row,
|
23 |
-
display_metrics_analysis,
|
24 |
-
display_comparison_results,
|
25 |
-
display_metrics_and_suggestions,
|
26 |
-
display_radar_chart,
|
27 |
-
suggest_improvement_tools,
|
28 |
-
prepare_metrics_config
|
29 |
-
)
|
30 |
-
|
31 |
-
from .studentact.current_situation_analysis import (
|
32 |
-
correlate_metrics,
|
33 |
-
analyze_text_dimensions,
|
34 |
-
analyze_clarity,
|
35 |
-
analyze_vocabulary_diversity,
|
36 |
-
analyze_cohesion,
|
37 |
-
analyze_structure,
|
38 |
-
get_dependency_depths,
|
39 |
-
normalize_score,
|
40 |
-
generate_sentence_graphs,
|
41 |
-
generate_word_connections,
|
42 |
-
generate_connection_paths,
|
43 |
-
create_vocabulary_network,
|
44 |
-
create_syntax_complexity_graph,
|
45 |
-
create_cohesion_heatmap
|
46 |
-
)
|
47 |
-
|
48 |
-
return {
|
49 |
-
'display_current_situation_interface': display_current_situation_interface,
|
50 |
-
'display_metrics_in_one_row': display_metrics_in_one_line,
|
51 |
-
'display_empty_metrics_row': display_empty_metrics_row,
|
52 |
-
'display_metrics_analysis': display_metrics_analysis,
|
53 |
-
'display_comparison_results': display_comparison_results,
|
54 |
-
'display_metrics_and_suggestions': display_metrics_and_suggestions,
|
55 |
-
'display_radar_chart': display_radar_chart,
|
56 |
-
'suggest_improvement_tools': suggest_improvement_tools,
|
57 |
-
'prepare_metrics_config': prepare_metrics_config,
|
58 |
-
'display_empty_metrics_row' : display_empty_metrics_row,
|
59 |
-
'correlate_metrics': correlate_metrics,
|
60 |
-
'analyze_text_dimensions': analyze_text_dimensions,
|
61 |
-
'analyze_clarity': analyze_clarity,
|
62 |
-
'analyze_vocabulary_diversity': analyze_vocabulary_diversity,
|
63 |
-
'analyze_cohesion': analyze_cohesion,
|
64 |
-
'analyze_structure': analyze_structure,
|
65 |
-
'get_dependency_depths': get_dependency_depths,
|
66 |
-
'normalize_score': normalize_score,
|
67 |
-
'generate_sentence_graphs': generate_sentence_graphs,
|
68 |
-
'generate_word_connections': generate_word_connections,
|
69 |
-
'generate_connection_paths': generate_connection_paths,
|
70 |
-
'create_vocabulary_network': create_vocabulary_network,
|
71 |
-
'create_syntax_complexity_graph': create_syntax_complexity_graph,
|
72 |
-
'create_cohesion_heatmap': create_cohesion_heatmap
|
73 |
-
}
|
74 |
-
|
75 |
-
def load_database_functions():
|
76 |
-
|
77 |
-
from .database.database_init import (
|
78 |
-
initialize_database_connections,
|
79 |
-
get_container,
|
80 |
-
get_mongodb
|
81 |
-
)
|
82 |
-
|
83 |
-
# Importar funciones SQL
|
84 |
-
from .database.sql_db import (
|
85 |
-
create_student_user,
|
86 |
-
get_student_user,
|
87 |
-
update_student_user,
|
88 |
-
delete_student_user,
|
89 |
-
store_application_request,
|
90 |
-
store_student_feedback,
|
91 |
-
record_login,
|
92 |
-
record_logout,
|
93 |
-
get_recent_sessions,
|
94 |
-
get_user_total_time
|
95 |
-
)
|
96 |
-
|
97 |
-
from .database.mongo_db import (
|
98 |
-
get_collection,
|
99 |
-
insert_document,
|
100 |
-
find_documents,
|
101 |
-
update_document,
|
102 |
-
delete_document,
|
103 |
-
)
|
104 |
-
|
105 |
-
from .database.morphosintax_mongo_db import (
|
106 |
-
store_student_morphosyntax_result,
|
107 |
-
get_student_morphosyntax_analysis,
|
108 |
-
update_student_morphosyntax_analysis,
|
109 |
-
delete_student_morphosyntax_analysis,
|
110 |
-
get_student_morphosyntax_data
|
111 |
-
)
|
112 |
-
|
113 |
-
from .database.semantic_mongo_db import (
|
114 |
-
store_student_semantic_result,
|
115 |
-
get_student_semantic_analysis,
|
116 |
-
update_student_semantic_analysis,
|
117 |
-
delete_student_semantic_analysis,
|
118 |
-
get_student_semantic_data
|
119 |
-
)
|
120 |
-
|
121 |
-
from .database.discourse_mongo_db import (
|
122 |
-
store_student_discourse_result,
|
123 |
-
get_student_discourse_analysis,
|
124 |
-
update_student_discourse_analysis,
|
125 |
-
delete_student_discourse_analysis,
|
126 |
-
get_student_discourse_data
|
127 |
-
)
|
128 |
-
|
129 |
-
# Agregar nueva importación para current_situation
|
130 |
-
from .database.current_situation_mongo_db import (
|
131 |
-
store_current_situation_result,
|
132 |
-
verify_storage,
|
133 |
-
get_recent_sessions,
|
134 |
-
get_student_situation_history,
|
135 |
-
update_exercise_status
|
136 |
-
)
|
137 |
-
|
138 |
-
# Importar nuevas funciones de análisis morfosintáctico iterativo
|
139 |
-
from .morphosyntax_iterative_mongo_db import (
|
140 |
-
store_student_morphosyntax_base,
|
141 |
-
store_student_morphosyntax_iteration,
|
142 |
-
get_student_morphosyntax_analysis,
|
143 |
-
update_student_morphosyntax_analysis,
|
144 |
-
delete_student_morphosyntax_analysis,
|
145 |
-
get_student_morphosyntax_data
|
146 |
-
)
|
147 |
-
|
148 |
-
from .database.chat_mongo_db import store_chat_history, get_chat_history
|
149 |
-
|
150 |
-
return {
|
151 |
-
# Nuevas funciones morfosintácticas iterativas
|
152 |
-
'store_student_morphosyntax_base': store_student_morphosyntax_base,
|
153 |
-
'store_student_morphosyntax_iteration': store_student_morphosyntax_iteration,
|
154 |
-
'get_student_morphosyntax_iterative_analysis': get_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
155 |
-
'update_student_morphosyntax_iterative': update_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
156 |
-
'delete_student_morphosyntax_iterative': delete_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
157 |
-
'get_student_morphosyntax_iterative_data': get_student_morphosyntax_data,
|
158 |
-
'store_current_situation_result': store_current_situation_result,
|
159 |
-
'verify_storage': verify_storage,
|
160 |
-
'get_recent_sessions': get_recent_sessions,
|
161 |
-
'get_student_situation_history': get_student_situation_history,
|
162 |
-
'update_exercise_status': update_exercise_status,
|
163 |
-
'initialize_database_connections': initialize_database_connections,
|
164 |
-
'get_container': get_container,
|
165 |
-
'get_mongodb': get_mongodb,
|
166 |
-
'create_student_user': create_student_user,
|
167 |
-
'get_student_user': get_student_user,
|
168 |
-
'update_student_user': update_student_user,
|
169 |
-
'delete_student_user': delete_student_user,
|
170 |
-
'store_application_request': store_application_request,
|
171 |
-
'store_student_feedback': store_student_feedback,
|
172 |
-
'get_collection': get_collection,
|
173 |
-
'insert_document': insert_document,
|
174 |
-
'find_documents': find_documents,
|
175 |
-
'update_document': update_document,
|
176 |
-
'delete_document': delete_document,
|
177 |
-
'store_student_morphosyntax_result': store_student_morphosyntax_result,
|
178 |
-
'get_student_morphosyntax_analysis': get_student_morphosyntax_analysis,
|
179 |
-
'update_student_morphosyntax_analysis': update_student_morphosyntax_analysis,
|
180 |
-
'delete_student_morphosyntax_analysis': delete_student_morphosyntax_analysis,
|
181 |
-
'get_student_morphosyntax_data': get_student_morphosyntax_data,
|
182 |
-
'store_student_semantic_result': store_student_semantic_result,
|
183 |
-
'get_student_semantic_analysis': get_student_semantic_analysis,
|
184 |
-
'update_student_semantic_analysis': update_student_semantic_analysis,
|
185 |
-
'delete_student_semantic_analysis': delete_student_semantic_analysis,
|
186 |
-
'get_student_semantic_data': get_student_semantic_data,
|
187 |
-
'store_chat_history': store_chat_history,
|
188 |
-
'get_chat_history': get_chat_history,
|
189 |
-
'store_student_discourse_result': store_student_discourse_result,
|
190 |
-
'get_student_discourse_analysis': get_student_discourse_analysis,
|
191 |
-
'update_student_discourse_analysis': update_student_discourse_analysis,
|
192 |
-
'delete_student_discourse_analysis': delete_student_discourse_analysis,
|
193 |
-
'get_student_discourse_data': get_student_discourse_data,
|
194 |
-
'record_login': record_login,
|
195 |
-
'record_logout': record_logout,
|
196 |
-
'get_recent_sessions': get_recent_sessions,
|
197 |
-
'get_user_total_time': get_user_total_time
|
198 |
-
}
|
199 |
-
|
200 |
-
def load_ui_functions():
|
201 |
-
# No importamos nada de ui.py aquí
|
202 |
-
return {} # Retornamos un diccionario vacío
|
203 |
-
|
204 |
-
def load_student_activities_v2_functions():
|
205 |
-
from .studentact.student_activities_v2 import display_student_activities
|
206 |
-
return {
|
207 |
-
'display_student_progress': display_student_activities
|
208 |
-
}
|
209 |
-
|
210 |
-
def load_morphosyntax_functions():
|
211 |
-
from .morphosyntax.morphosyntax_interface import (
|
212 |
-
initialize_arc_analysis_state,
|
213 |
-
reset_arc_analysis_state,
|
214 |
-
display_arc_diagrams,
|
215 |
-
display_morphosyntax_results
|
216 |
-
)
|
217 |
-
from .morphosyntax.morphosyntax_process import (
|
218 |
-
process_morphosyntactic_input,
|
219 |
-
format_analysis_results,
|
220 |
-
perform_advanced_morphosyntactic_analysis # Añadir esta función
|
221 |
-
)
|
222 |
-
|
223 |
-
return {
|
224 |
-
#Interface
|
225 |
-
'initialize_arc_analysis_state': initialize_arc_analysis_state,
|
226 |
-
'reset_arc_analysis_state': reset_morpho_state,
|
227 |
-
'display_arc_diagrams': display_arc_diagrams,
|
228 |
-
'display_morphosyntax_interface': display_morphosyntax_interface,
|
229 |
-
#Process
|
230 |
-
'process_morphosyntactic_input': process_morphosyntactic_input,
|
231 |
-
'format_analysis_results': format_analysis_results,
|
232 |
-
'perform_advanced_morphosyntactic_analysis': perform_advanced_morphosyntactic_analysis
|
233 |
-
}
|
234 |
-
|
235 |
-
def load_semantic_functions():
|
236 |
-
from .semantic.semantic_interface import (
|
237 |
-
display_semantic_interface,
|
238 |
-
display_semantic_results
|
239 |
-
)
|
240 |
-
from modules.semantic.semantic_process import (
|
241 |
-
process_semantic_input,
|
242 |
-
format_semantic_results
|
243 |
-
)
|
244 |
-
|
245 |
-
return {
|
246 |
-
'display_semantic_interface': display_semantic_interface,
|
247 |
-
'display_semantic_results': display_semantic_results,
|
248 |
-
'process_semantic_input': process_semantic_input,
|
249 |
-
'format_semantic_results': format_analysis_results,
|
250 |
-
}
|
251 |
-
|
252 |
-
|
253 |
-
def load_discourse_functions():
|
254 |
-
from .discourse.discourse_interface import (
|
255 |
-
display_discourse_interface,
|
256 |
-
display_discourse_results
|
257 |
-
)
|
258 |
-
from modules.discourse.discourse_process import (
|
259 |
-
perform_discourse_analysis, # Este es el nombre correcto de la función
|
260 |
-
extract_key_concepts, # Función adicional que necesitamos
|
261 |
-
generate_concept_graph, # Función adicional que necesitamos
|
262 |
-
calculate_similarity_matrix # Función adicional que necesitamos
|
263 |
-
)
|
264 |
-
|
265 |
-
return {
|
266 |
-
'display_discourse_interface': display_discourse_interface,
|
267 |
-
'display_discourse_results': display_discourse_results,
|
268 |
-
'perform_discourse_analysis': perform_discourse_analysis,
|
269 |
-
'extract_key_concepts': extract_key_concepts,
|
270 |
-
'generate_concept_graph': generate_concept_graph,
|
271 |
-
'calculate_similarity_matrix': calculate_similarity_matrix
|
272 |
-
}
|
273 |
-
|
274 |
-
def load_admin_functions():
|
275 |
-
from .admin.admin_ui import admin_page
|
276 |
-
return {
|
277 |
-
'admin_page': admin_page
|
278 |
-
}
|
279 |
-
|
280 |
-
def load_utils_functions():
|
281 |
-
from .utils.spacy_utils import load_spacy_models
|
282 |
-
return {
|
283 |
-
'load_spacy_models': load_spacy_models
|
284 |
-
}
|
285 |
-
|
286 |
-
def load_chatbot_functions():
|
287 |
-
"""
|
288 |
-
Carga las funciones del módulo de chatbot
|
289 |
-
Returns:
|
290 |
-
dict: Diccionario con las funciones del chatbot
|
291 |
-
"""
|
292 |
-
from modules.chatbot.sidebar_chat import (
|
293 |
-
display_sidebar_chat
|
294 |
-
)
|
295 |
-
|
296 |
-
from modules.chatbot.chat_process import (
|
297 |
-
ChatProcessor
|
298 |
-
)
|
299 |
-
|
300 |
-
return {
|
301 |
-
'display_sidebar_chat': display_sidebar_chat,
|
302 |
-
'ChatProcessor': ChatProcessor
|
303 |
-
}
|
304 |
-
|
305 |
-
# Función para cargar todas las funciones
|
306 |
-
def load_all_functions():
|
307 |
-
return {
|
308 |
-
**load_auth_functions(),
|
309 |
-
**load_database_functions(),
|
310 |
-
# **load_ui_functions(),
|
311 |
-
**load_admin_functions(),
|
312 |
-
**load_morphosyntax_functions(),
|
313 |
-
**load_semantic_functions(),
|
314 |
-
**load_discourse_functions(),
|
315 |
-
**load_utils_functions(),
|
316 |
-
**load_chatbot_functions(),
|
317 |
-
**load_student_activities_functions(),
|
318 |
-
**load_current_situation_functions() # Agregar el nuevo loader
|
319 |
}
|
|
|
1 |
+
# modules/__init__.py
|
2 |
+
|
3 |
+
def load_auth_functions():
|
4 |
+
from .auth.auth import authenticate_student, register_student, update_student_info, delete_student
|
5 |
+
return {
|
6 |
+
'authenticate_student': authenticate_student,
|
7 |
+
'register_student': register_student,
|
8 |
+
'update_student_info': update_student_info,
|
9 |
+
'delete_student': delete_student
|
10 |
+
}
|
11 |
+
|
12 |
+
# Agregar nuevo import para current_situation
|
13 |
+
def load_current_situation_functions():
|
14 |
+
"""
|
15 |
+
Carga las funciones relacionadas con el análisis de situación actual.
|
16 |
+
Returns:
|
17 |
+
dict: Diccionario con las funciones de situación actual
|
18 |
+
"""
|
19 |
+
from .studentact.current_situation_interface import (
|
20 |
+
display_current_situation_interface,
|
21 |
+
display_metrics_in_one_row,
|
22 |
+
display_empty_metrics_row,
|
23 |
+
display_metrics_analysis,
|
24 |
+
display_comparison_results,
|
25 |
+
display_metrics_and_suggestions,
|
26 |
+
display_radar_chart,
|
27 |
+
suggest_improvement_tools,
|
28 |
+
prepare_metrics_config
|
29 |
+
)
|
30 |
+
|
31 |
+
from .studentact.current_situation_analysis import (
|
32 |
+
correlate_metrics,
|
33 |
+
analyze_text_dimensions,
|
34 |
+
analyze_clarity,
|
35 |
+
analyze_vocabulary_diversity,
|
36 |
+
analyze_cohesion,
|
37 |
+
analyze_structure,
|
38 |
+
get_dependency_depths,
|
39 |
+
normalize_score,
|
40 |
+
generate_sentence_graphs,
|
41 |
+
generate_word_connections,
|
42 |
+
generate_connection_paths,
|
43 |
+
create_vocabulary_network,
|
44 |
+
create_syntax_complexity_graph,
|
45 |
+
create_cohesion_heatmap
|
46 |
+
)
|
47 |
+
|
48 |
+
return {
|
49 |
+
'display_current_situation_interface': display_current_situation_interface,
|
50 |
+
'display_metrics_in_one_row': display_metrics_in_one_line,
|
51 |
+
'display_empty_metrics_row': display_empty_metrics_row,
|
52 |
+
'display_metrics_analysis': display_metrics_analysis,
|
53 |
+
'display_comparison_results': display_comparison_results,
|
54 |
+
'display_metrics_and_suggestions': display_metrics_and_suggestions,
|
55 |
+
'display_radar_chart': display_radar_chart,
|
56 |
+
'suggest_improvement_tools': suggest_improvement_tools,
|
57 |
+
'prepare_metrics_config': prepare_metrics_config,
|
58 |
+
'display_empty_metrics_row' : display_empty_metrics_row,
|
59 |
+
'correlate_metrics': correlate_metrics,
|
60 |
+
'analyze_text_dimensions': analyze_text_dimensions,
|
61 |
+
'analyze_clarity': analyze_clarity,
|
62 |
+
'analyze_vocabulary_diversity': analyze_vocabulary_diversity,
|
63 |
+
'analyze_cohesion': analyze_cohesion,
|
64 |
+
'analyze_structure': analyze_structure,
|
65 |
+
'get_dependency_depths': get_dependency_depths,
|
66 |
+
'normalize_score': normalize_score,
|
67 |
+
'generate_sentence_graphs': generate_sentence_graphs,
|
68 |
+
'generate_word_connections': generate_word_connections,
|
69 |
+
'generate_connection_paths': generate_connection_paths,
|
70 |
+
'create_vocabulary_network': create_vocabulary_network,
|
71 |
+
'create_syntax_complexity_graph': create_syntax_complexity_graph,
|
72 |
+
'create_cohesion_heatmap': create_cohesion_heatmap
|
73 |
+
}
|
74 |
+
|
75 |
+
def load_database_functions():
|
76 |
+
|
77 |
+
from .database.database_init import (
|
78 |
+
initialize_database_connections,
|
79 |
+
get_container,
|
80 |
+
get_mongodb
|
81 |
+
)
|
82 |
+
|
83 |
+
# Importar funciones SQL
|
84 |
+
from .database.sql_db import (
|
85 |
+
create_student_user,
|
86 |
+
get_student_user,
|
87 |
+
update_student_user,
|
88 |
+
delete_student_user,
|
89 |
+
store_application_request,
|
90 |
+
store_student_feedback,
|
91 |
+
record_login,
|
92 |
+
record_logout,
|
93 |
+
get_recent_sessions,
|
94 |
+
get_user_total_time
|
95 |
+
)
|
96 |
+
|
97 |
+
from .database.mongo_db import (
|
98 |
+
get_collection,
|
99 |
+
insert_document,
|
100 |
+
find_documents,
|
101 |
+
update_document,
|
102 |
+
delete_document,
|
103 |
+
)
|
104 |
+
|
105 |
+
from .database.morphosintax_mongo_db import (
|
106 |
+
store_student_morphosyntax_result,
|
107 |
+
get_student_morphosyntax_analysis,
|
108 |
+
update_student_morphosyntax_analysis,
|
109 |
+
delete_student_morphosyntax_analysis,
|
110 |
+
get_student_morphosyntax_data
|
111 |
+
)
|
112 |
+
|
113 |
+
from .database.semantic_mongo_db import (
|
114 |
+
store_student_semantic_result,
|
115 |
+
get_student_semantic_analysis,
|
116 |
+
update_student_semantic_analysis,
|
117 |
+
delete_student_semantic_analysis,
|
118 |
+
get_student_semantic_data
|
119 |
+
)
|
120 |
+
|
121 |
+
from .database.discourse_mongo_db import (
|
122 |
+
store_student_discourse_result,
|
123 |
+
get_student_discourse_analysis,
|
124 |
+
update_student_discourse_analysis,
|
125 |
+
delete_student_discourse_analysis,
|
126 |
+
get_student_discourse_data
|
127 |
+
)
|
128 |
+
|
129 |
+
# Agregar nueva importación para current_situation
|
130 |
+
from .database.current_situation_mongo_db import (
|
131 |
+
store_current_situation_result,
|
132 |
+
verify_storage,
|
133 |
+
get_recent_sessions,
|
134 |
+
get_student_situation_history,
|
135 |
+
update_exercise_status
|
136 |
+
)
|
137 |
+
|
138 |
+
# Importar nuevas funciones de análisis morfosintáctico iterativo
|
139 |
+
from .morphosyntax_iterative_mongo_db import (
|
140 |
+
store_student_morphosyntax_base,
|
141 |
+
store_student_morphosyntax_iteration,
|
142 |
+
get_student_morphosyntax_analysis,
|
143 |
+
update_student_morphosyntax_analysis,
|
144 |
+
delete_student_morphosyntax_analysis,
|
145 |
+
get_student_morphosyntax_data
|
146 |
+
)
|
147 |
+
|
148 |
+
from .database.chat_mongo_db import store_chat_history, get_chat_history
|
149 |
+
|
150 |
+
return {
|
151 |
+
# Nuevas funciones morfosintácticas iterativas
|
152 |
+
'store_student_morphosyntax_base': store_student_morphosyntax_base,
|
153 |
+
'store_student_morphosyntax_iteration': store_student_morphosyntax_iteration,
|
154 |
+
'get_student_morphosyntax_iterative_analysis': get_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
155 |
+
'update_student_morphosyntax_iterative': update_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
156 |
+
'delete_student_morphosyntax_iterative': delete_student_morphosyntax_analysis, # Renombrada para evitar conflicto
|
157 |
+
'get_student_morphosyntax_iterative_data': get_student_morphosyntax_data,
|
158 |
+
'store_current_situation_result': store_current_situation_result,
|
159 |
+
'verify_storage': verify_storage,
|
160 |
+
'get_recent_sessions': get_recent_sessions,
|
161 |
+
'get_student_situation_history': get_student_situation_history,
|
162 |
+
'update_exercise_status': update_exercise_status,
|
163 |
+
'initialize_database_connections': initialize_database_connections,
|
164 |
+
'get_container': get_container,
|
165 |
+
'get_mongodb': get_mongodb,
|
166 |
+
'create_student_user': create_student_user,
|
167 |
+
'get_student_user': get_student_user,
|
168 |
+
'update_student_user': update_student_user,
|
169 |
+
'delete_student_user': delete_student_user,
|
170 |
+
'store_application_request': store_application_request,
|
171 |
+
'store_student_feedback': store_student_feedback,
|
172 |
+
'get_collection': get_collection,
|
173 |
+
'insert_document': insert_document,
|
174 |
+
'find_documents': find_documents,
|
175 |
+
'update_document': update_document,
|
176 |
+
'delete_document': delete_document,
|
177 |
+
'store_student_morphosyntax_result': store_student_morphosyntax_result,
|
178 |
+
'get_student_morphosyntax_analysis': get_student_morphosyntax_analysis,
|
179 |
+
'update_student_morphosyntax_analysis': update_student_morphosyntax_analysis,
|
180 |
+
'delete_student_morphosyntax_analysis': delete_student_morphosyntax_analysis,
|
181 |
+
'get_student_morphosyntax_data': get_student_morphosyntax_data,
|
182 |
+
'store_student_semantic_result': store_student_semantic_result,
|
183 |
+
'get_student_semantic_analysis': get_student_semantic_analysis,
|
184 |
+
'update_student_semantic_analysis': update_student_semantic_analysis,
|
185 |
+
'delete_student_semantic_analysis': delete_student_semantic_analysis,
|
186 |
+
'get_student_semantic_data': get_student_semantic_data,
|
187 |
+
'store_chat_history': store_chat_history,
|
188 |
+
'get_chat_history': get_chat_history,
|
189 |
+
'store_student_discourse_result': store_student_discourse_result,
|
190 |
+
'get_student_discourse_analysis': get_student_discourse_analysis,
|
191 |
+
'update_student_discourse_analysis': update_student_discourse_analysis,
|
192 |
+
'delete_student_discourse_analysis': delete_student_discourse_analysis,
|
193 |
+
'get_student_discourse_data': get_student_discourse_data,
|
194 |
+
'record_login': record_login,
|
195 |
+
'record_logout': record_logout,
|
196 |
+
'get_recent_sessions': get_recent_sessions,
|
197 |
+
'get_user_total_time': get_user_total_time
|
198 |
+
}
|
199 |
+
|
200 |
+
def load_ui_functions():
|
201 |
+
# No importamos nada de ui.py aquí
|
202 |
+
return {} # Retornamos un diccionario vacío
|
203 |
+
|
204 |
+
def load_student_activities_v2_functions():
|
205 |
+
from .studentact.student_activities_v2 import display_student_activities
|
206 |
+
return {
|
207 |
+
'display_student_progress': display_student_activities
|
208 |
+
}
|
209 |
+
|
210 |
+
def load_morphosyntax_functions():
|
211 |
+
from .morphosyntax.morphosyntax_interface import (
|
212 |
+
initialize_arc_analysis_state,
|
213 |
+
reset_arc_analysis_state,
|
214 |
+
display_arc_diagrams,
|
215 |
+
display_morphosyntax_results
|
216 |
+
)
|
217 |
+
from .morphosyntax.morphosyntax_process import (
|
218 |
+
process_morphosyntactic_input,
|
219 |
+
format_analysis_results,
|
220 |
+
perform_advanced_morphosyntactic_analysis # Añadir esta función
|
221 |
+
)
|
222 |
+
|
223 |
+
return {
|
224 |
+
#Interface
|
225 |
+
'initialize_arc_analysis_state': initialize_arc_analysis_state,
|
226 |
+
'reset_arc_analysis_state': reset_morpho_state,
|
227 |
+
'display_arc_diagrams': display_arc_diagrams,
|
228 |
+
'display_morphosyntax_interface': display_morphosyntax_interface,
|
229 |
+
#Process
|
230 |
+
'process_morphosyntactic_input': process_morphosyntactic_input,
|
231 |
+
'format_analysis_results': format_analysis_results,
|
232 |
+
'perform_advanced_morphosyntactic_analysis': perform_advanced_morphosyntactic_analysis
|
233 |
+
}
|
234 |
+
|
235 |
+
def load_semantic_functions():
|
236 |
+
from .semantic.semantic_interface import (
|
237 |
+
display_semantic_interface,
|
238 |
+
display_semantic_results
|
239 |
+
)
|
240 |
+
from modules.semantic.semantic_process import (
|
241 |
+
process_semantic_input,
|
242 |
+
format_semantic_results
|
243 |
+
)
|
244 |
+
|
245 |
+
return {
|
246 |
+
'display_semantic_interface': display_semantic_interface,
|
247 |
+
'display_semantic_results': display_semantic_results,
|
248 |
+
'process_semantic_input': process_semantic_input,
|
249 |
+
'format_semantic_results': format_analysis_results,
|
250 |
+
}
|
251 |
+
|
252 |
+
|
253 |
+
def load_discourse_functions():
|
254 |
+
from .discourse.discourse_interface import (
|
255 |
+
display_discourse_interface,
|
256 |
+
display_discourse_results
|
257 |
+
)
|
258 |
+
from modules.discourse.discourse_process import (
|
259 |
+
perform_discourse_analysis, # Este es el nombre correcto de la función
|
260 |
+
extract_key_concepts, # Función adicional que necesitamos
|
261 |
+
generate_concept_graph, # Función adicional que necesitamos
|
262 |
+
calculate_similarity_matrix # Función adicional que necesitamos
|
263 |
+
)
|
264 |
+
|
265 |
+
return {
|
266 |
+
'display_discourse_interface': display_discourse_interface,
|
267 |
+
'display_discourse_results': display_discourse_results,
|
268 |
+
'perform_discourse_analysis': perform_discourse_analysis,
|
269 |
+
'extract_key_concepts': extract_key_concepts,
|
270 |
+
'generate_concept_graph': generate_concept_graph,
|
271 |
+
'calculate_similarity_matrix': calculate_similarity_matrix
|
272 |
+
}
|
273 |
+
|
274 |
+
def load_admin_functions():
|
275 |
+
from .admin.admin_ui import admin_page
|
276 |
+
return {
|
277 |
+
'admin_page': admin_page
|
278 |
+
}
|
279 |
+
|
280 |
+
def load_utils_functions():
|
281 |
+
from .utils.spacy_utils import load_spacy_models
|
282 |
+
return {
|
283 |
+
'load_spacy_models': load_spacy_models
|
284 |
+
}
|
285 |
+
|
286 |
+
def load_chatbot_functions():
|
287 |
+
"""
|
288 |
+
Carga las funciones del módulo de chatbot
|
289 |
+
Returns:
|
290 |
+
dict: Diccionario con las funciones del chatbot
|
291 |
+
"""
|
292 |
+
from modules.chatbot.sidebar_chat import (
|
293 |
+
display_sidebar_chat
|
294 |
+
)
|
295 |
+
|
296 |
+
from modules.chatbot.chat_process import (
|
297 |
+
ChatProcessor
|
298 |
+
)
|
299 |
+
|
300 |
+
return {
|
301 |
+
'display_sidebar_chat': display_sidebar_chat,
|
302 |
+
'ChatProcessor': ChatProcessor
|
303 |
+
}
|
304 |
+
|
305 |
+
# Función para cargar todas las funciones
|
306 |
+
def load_all_functions():
|
307 |
+
return {
|
308 |
+
**load_auth_functions(),
|
309 |
+
**load_database_functions(),
|
310 |
+
# **load_ui_functions(),
|
311 |
+
**load_admin_functions(),
|
312 |
+
**load_morphosyntax_functions(),
|
313 |
+
**load_semantic_functions(),
|
314 |
+
**load_discourse_functions(),
|
315 |
+
**load_utils_functions(),
|
316 |
+
**load_chatbot_functions(),
|
317 |
+
**load_student_activities_functions(),
|
318 |
+
**load_current_situation_functions() # Agregar el nuevo loader
|
319 |
}
|
modules/admin/admin_ui.py
CHANGED
@@ -1,252 +1,250 @@
|
|
1 |
-
#modules/admin/admin_ui.py
|
2 |
-
|
3 |
-
import streamlit as st
|
4 |
-
|
5 |
-
from datetime import datetime
|
6 |
-
|
7 |
-
from ..database.sql_db import (
|
8 |
-
get_user,
|
9 |
-
get_student_user,
|
10 |
-
get_admin_user,
|
11 |
-
get_teacher_user,
|
12 |
-
create_student_user,
|
13 |
-
update_student_user,
|
14 |
-
delete_student_user,
|
15 |
-
record_login,
|
16 |
-
record_logout,
|
17 |
-
get_recent_sessions,
|
18 |
-
get_user_total_time
|
19 |
-
)
|
20 |
-
|
21 |
-
from ..database.morphosintax_mongo_db import get_student_morphosyntax_analysis
|
22 |
-
|
23 |
-
from ..auth.auth import hash_password # Agregar esta importación al inicio
|
24 |
-
|
25 |
-
|
26 |
-
def format_duration(seconds):
|
27 |
-
"""Convierte segundos a formato legible"""
|
28 |
-
if not seconds:
|
29 |
-
return "0h 0m"
|
30 |
-
hours = seconds // 3600
|
31 |
-
minutes = (seconds % 3600) // 60
|
32 |
-
return f"{hours}h {minutes}m"
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
st.
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
)
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
st.
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
st.
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
"
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
st.write("Datos
|
191 |
-
st.json(
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
)
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
st.
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
from ..auth.auth import logout
|
251 |
-
logout()
|
252 |
st.rerun()
|
|
|
1 |
+
#modules/admin/admin_ui.py
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
from datetime import datetime
|
6 |
+
|
7 |
+
from ..database.sql_db import (
|
8 |
+
get_user,
|
9 |
+
get_student_user,
|
10 |
+
get_admin_user,
|
11 |
+
get_teacher_user,
|
12 |
+
create_student_user,
|
13 |
+
update_student_user,
|
14 |
+
delete_student_user,
|
15 |
+
record_login,
|
16 |
+
record_logout,
|
17 |
+
get_recent_sessions,
|
18 |
+
get_user_total_time
|
19 |
+
)
|
20 |
+
|
21 |
+
from ..database.morphosintax_mongo_db import get_student_morphosyntax_analysis
|
22 |
+
|
23 |
+
from ..auth.auth import hash_password # Agregar esta importación al inicio
|
24 |
+
|
25 |
+
#######################################################################################
|
26 |
+
def format_duration(seconds):
|
27 |
+
"""Convierte segundos a formato legible"""
|
28 |
+
if not seconds:
|
29 |
+
return "0h 0m"
|
30 |
+
hours = seconds // 3600
|
31 |
+
minutes = (seconds % 3600) // 60
|
32 |
+
return f"{hours}h {minutes}m"
|
33 |
+
|
34 |
+
def admin_page():
|
35 |
+
st.title("Panel de Administración")
|
36 |
+
st.write(f"Bienvenido, {st.session_state.username}")
|
37 |
+
|
38 |
+
# Crear tres tabs para las diferentes secciones
|
39 |
+
tab1, tab2, tab3 = st.tabs([
|
40 |
+
"Gestión de Usuarios",
|
41 |
+
"Búsqueda de Usuarios",
|
42 |
+
"Actividad de la Plataforma"
|
43 |
+
])
|
44 |
+
|
45 |
+
|
46 |
+
########################################################
|
47 |
+
# Tab 1: Gestión de Usuarios
|
48 |
+
with tab1:
|
49 |
+
st.header("Crear Nuevo Usuario Estudiante")
|
50 |
+
|
51 |
+
# Crear dos columnas para el formulario
|
52 |
+
col1, col2 = st.columns(2)
|
53 |
+
|
54 |
+
with col1:
|
55 |
+
new_username = st.text_input(
|
56 |
+
"Correo electrónico del nuevo usuario",
|
57 |
+
key="admin_new_username"
|
58 |
+
)
|
59 |
+
|
60 |
+
with col2:
|
61 |
+
new_password = st.text_input(
|
62 |
+
"Contraseña",
|
63 |
+
type="password",
|
64 |
+
key="admin_new_password"
|
65 |
+
)
|
66 |
+
|
67 |
+
if st.button("Crear Usuario", key="admin_create_user", type="primary"):
|
68 |
+
if new_username and new_password: # Verificamos que ambos campos tengan valor
|
69 |
+
try:
|
70 |
+
# Hashear la contraseña antes de crear el usuario
|
71 |
+
hashed_password = hash_password(new_password)
|
72 |
+
if create_student_user(new_username, hashed_password, {'partitionKey': new_username}):
|
73 |
+
st.success(f"Usuario estudiante {new_username} creado exitosamente")
|
74 |
+
else:
|
75 |
+
st.error("Error al crear el usuario estudiante")
|
76 |
+
except Exception as e:
|
77 |
+
st.error(f"Error al crear usuario: {str(e)}")
|
78 |
+
else:
|
79 |
+
st.warning("Por favor complete todos los campos")
|
80 |
+
|
81 |
+
#######################################################################
|
82 |
+
# Tab 2: Búsqueda de Usuarios
|
83 |
+
with tab2:
|
84 |
+
st.header("Búsqueda de Usuarios")
|
85 |
+
|
86 |
+
search_col1, search_col2 = st.columns([2,1])
|
87 |
+
|
88 |
+
with search_col1:
|
89 |
+
student_username = st.text_input(
|
90 |
+
"Nombre de usuario del estudiante",
|
91 |
+
key="admin_view_student"
|
92 |
+
)
|
93 |
+
|
94 |
+
with search_col2:
|
95 |
+
search_button = st.button(
|
96 |
+
"Buscar",
|
97 |
+
key="admin_view_student_data",
|
98 |
+
type="primary"
|
99 |
+
)
|
100 |
+
|
101 |
+
if search_button:
|
102 |
+
student = get_student_user(student_username)
|
103 |
+
if student:
|
104 |
+
# Crear tabs para diferentes tipos de información
|
105 |
+
info_tab1, info_tab2, info_tab3 = st.tabs([
|
106 |
+
"Información Básica",
|
107 |
+
"Análisis Realizados",
|
108 |
+
"Tiempo en Plataforma"
|
109 |
+
])
|
110 |
+
|
111 |
+
with info_tab1:
|
112 |
+
st.subheader("Información del Usuario")
|
113 |
+
st.json(student)
|
114 |
+
|
115 |
+
with info_tab2:
|
116 |
+
st.subheader("Análisis Realizados")
|
117 |
+
student_data = get_student_morphosyntax_analysis(student_username)
|
118 |
+
if student_data:
|
119 |
+
st.json(student_data)
|
120 |
+
else:
|
121 |
+
st.info("No hay datos de análisis para este estudiante.")
|
122 |
+
|
123 |
+
with info_tab3:
|
124 |
+
st.subheader("Tiempo en Plataforma")
|
125 |
+
total_time = get_user_total_time(student_username)
|
126 |
+
if total_time:
|
127 |
+
st.metric(
|
128 |
+
"Tiempo Total",
|
129 |
+
format_duration(total_time)
|
130 |
+
)
|
131 |
+
else:
|
132 |
+
st.info("No hay registros de tiempo para este usuario")
|
133 |
+
else:
|
134 |
+
st.error("Estudiante no encontrado")
|
135 |
+
|
136 |
+
#######################################################################
|
137 |
+
# Tab 3: Actividad de la Plataforma
|
138 |
+
with tab3:
|
139 |
+
st.header("Actividad Reciente")
|
140 |
+
|
141 |
+
# Agregar botón de actualización
|
142 |
+
if st.button("Actualizar datos", key="refresh_sessions", type="primary"):
|
143 |
+
st.rerun()
|
144 |
+
|
145 |
+
# Mostrar spinner mientras carga
|
146 |
+
with st.spinner("Cargando datos de sesiones..."):
|
147 |
+
# Obtener sesiones recientes
|
148 |
+
recent_sessions = get_recent_sessions(20) # Aumentado a 20 para más datos
|
149 |
+
|
150 |
+
if recent_sessions:
|
151 |
+
# Crear dataframe para mostrar los datos
|
152 |
+
sessions_data = []
|
153 |
+
for session in recent_sessions:
|
154 |
+
try:
|
155 |
+
# Manejar el formato de fecha con manejo de excepciones
|
156 |
+
try:
|
157 |
+
login_time = datetime.fromisoformat(
|
158 |
+
session['loginTime'].replace('Z', '+00:00')
|
159 |
+
).strftime("%Y-%m-%d %H:%M:%S")
|
160 |
+
except Exception as e:
|
161 |
+
login_time = session['loginTime']
|
162 |
+
|
163 |
+
# Manejar el caso de logout_time cuando la sesión está activa
|
164 |
+
if session.get('logoutTime') and session['logoutTime'] != "Activo":
|
165 |
+
try:
|
166 |
+
logout_time = datetime.fromisoformat(
|
167 |
+
session['logoutTime'].replace('Z', '+00:00')
|
168 |
+
).strftime("%Y-%m-%d %H:%M:%S")
|
169 |
+
except Exception as e:
|
170 |
+
logout_time = session['logoutTime']
|
171 |
+
else:
|
172 |
+
logout_time = "Activo"
|
173 |
+
|
174 |
+
# Agregar datos a la lista
|
175 |
+
sessions_data.append({
|
176 |
+
"Usuario": session.get('username', 'Desconocido'),
|
177 |
+
"Inicio de Sesión": login_time,
|
178 |
+
"Fin de Sesión": logout_time,
|
179 |
+
"Duración": format_duration(session.get('sessionDuration', 0))
|
180 |
+
})
|
181 |
+
except Exception as e:
|
182 |
+
st.error(f"Error procesando sesión: {str(e)}")
|
183 |
+
continue
|
184 |
+
|
185 |
+
# Mostrar información de depuración si hay problemas
|
186 |
+
with st.expander("Información de depuración", expanded=False):
|
187 |
+
st.write("Datos crudos recuperados:")
|
188 |
+
st.json(recent_sessions)
|
189 |
+
|
190 |
+
st.write("Datos procesados para mostrar:")
|
191 |
+
st.json(sessions_data)
|
192 |
+
|
193 |
+
# Mostrar tabla con estilos
|
194 |
+
st.dataframe(
|
195 |
+
sessions_data,
|
196 |
+
hide_index=True,
|
197 |
+
column_config={
|
198 |
+
"Usuario": st.column_config.TextColumn(
|
199 |
+
"Usuario",
|
200 |
+
width="medium"
|
201 |
+
),
|
202 |
+
"Inicio de Sesión": st.column_config.TextColumn(
|
203 |
+
"Inicio de Sesión",
|
204 |
+
width="medium"
|
205 |
+
),
|
206 |
+
"Fin de Sesión": st.column_config.TextColumn(
|
207 |
+
"Fin de Sesión",
|
208 |
+
width="medium"
|
209 |
+
),
|
210 |
+
"Duración": st.column_config.TextColumn(
|
211 |
+
"Duración",
|
212 |
+
width="small"
|
213 |
+
)
|
214 |
+
}
|
215 |
+
)
|
216 |
+
|
217 |
+
# Añadir métricas resumen
|
218 |
+
total_sessions = len(sessions_data)
|
219 |
+
total_users = len(set(session['Usuario'] for session in sessions_data))
|
220 |
+
|
221 |
+
metric_col1, metric_col2 = st.columns(2)
|
222 |
+
with metric_col1:
|
223 |
+
st.metric("Total de Sesiones", total_sessions)
|
224 |
+
with metric_col2:
|
225 |
+
st.metric("Usuarios Únicos", total_users)
|
226 |
+
else:
|
227 |
+
st.info("No hay registros de sesiones recientes o hubo un problema al recuperarlos.")
|
228 |
+
|
229 |
+
# Ayuda de depuración
|
230 |
+
if st.button("Mostrar diagnóstico"):
|
231 |
+
st.write("Verificando la función get_recent_sessions:")
|
232 |
+
container = get_container("users_sessions")
|
233 |
+
if container:
|
234 |
+
st.success("✅ Conectado al contenedor users_sessions")
|
235 |
+
else:
|
236 |
+
st.error("❌ No se pudo conectar al contenedor users_sessions")
|
237 |
+
|
238 |
+
|
239 |
+
#######################################################################
|
240 |
+
# Agregar una línea divisoria antes del botón
|
241 |
+
st.markdown("---")
|
242 |
+
|
243 |
+
#######################################################################
|
244 |
+
# Centrar el botón de cierre de sesión
|
245 |
+
col1, col2, col3 = st.columns([2,1,2])
|
246 |
+
with col2:
|
247 |
+
if st.button("Cerrar Sesión", key="admin_logout", type="primary", use_container_width=True):
|
248 |
+
from ..auth.auth import logout
|
249 |
+
logout()
|
|
|
|
|
250 |
st.rerun()
|
modules/auth/auth.py
CHANGED
@@ -1,195 +1,195 @@
|
|
1 |
-
##########modules/auth/auth.py
|
2 |
-
|
3 |
-
import os
|
4 |
-
import streamlit as st
|
5 |
-
from azure.cosmos import CosmosClient, exceptions
|
6 |
-
from azure.cosmos.exceptions import CosmosHttpResponseError
|
7 |
-
import bcrypt
|
8 |
-
import base64
|
9 |
-
from ..database.sql_db import (
|
10 |
-
get_user,
|
11 |
-
get_student_user,
|
12 |
-
get_admin_user,
|
13 |
-
create_student_user,
|
14 |
-
update_student_user,
|
15 |
-
delete_student_user,
|
16 |
-
record_login,
|
17 |
-
record_logout
|
18 |
-
)
|
19 |
-
|
20 |
-
import logging
|
21 |
-
|
22 |
-
from datetime import datetime, timezone
|
23 |
-
|
24 |
-
logger = logging.getLogger(__name__)
|
25 |
-
|
26 |
-
def clean_and_validate_key(key):
|
27 |
-
"""Limpia y valida la clave de CosmosDB"""
|
28 |
-
key = key.strip()
|
29 |
-
while len(key) % 4 != 0:
|
30 |
-
key += '='
|
31 |
-
try:
|
32 |
-
base64.b64decode(key)
|
33 |
-
return key
|
34 |
-
except:
|
35 |
-
raise ValueError("La clave proporcionada no es válida")
|
36 |
-
|
37 |
-
# Verificar las variables de entorno
|
38 |
-
endpoint = os.getenv("COSMOS_ENDPOINT")
|
39 |
-
key = os.getenv("COSMOS_KEY")
|
40 |
-
|
41 |
-
if not endpoint or not key:
|
42 |
-
raise ValueError("Las variables de entorno COSMOS_ENDPOINT y COSMOS_KEY deben estar configuradas")
|
43 |
-
|
44 |
-
key = clean_and_validate_key(key)
|
45 |
-
|
46 |
-
|
47 |
-
def authenticate_user(username, password):
|
48 |
-
"""Autentica un usuario y registra el inicio de sesión"""
|
49 |
-
try:
|
50 |
-
user_item = get_user(username)
|
51 |
-
|
52 |
-
if not user_item:
|
53 |
-
logger.warning(f"Usuario no encontrado: {username}")
|
54 |
-
return False, None
|
55 |
-
|
56 |
-
if verify_password(user_item['password'], password):
|
57 |
-
logger.info(f"Usuario autenticado: {username}, Rol: {user_item['role']}")
|
58 |
-
|
59 |
-
try:
|
60 |
-
session_id = record_login(username)
|
61 |
-
if session_id:
|
62 |
-
st.session_state.session_id = session_id
|
63 |
-
st.session_state.username = username
|
64 |
-
st.session_state.login_time = datetime.now(timezone.utc).isoformat()
|
65 |
-
logger.info(f"Sesión iniciada: {session_id}")
|
66 |
-
else:
|
67 |
-
logger.warning("No se pudo registrar la sesión")
|
68 |
-
except Exception as e:
|
69 |
-
logger.error(f"Error al registrar inicio de sesión: {str(e)}")
|
70 |
-
|
71 |
-
return True, user_item['role']
|
72 |
-
|
73 |
-
logger.warning(f"Contraseña incorrecta para usuario: {username}")
|
74 |
-
return False, None
|
75 |
-
|
76 |
-
except Exception as e:
|
77 |
-
logger.error(f"Error durante la autenticación del usuario: {str(e)}")
|
78 |
-
return False, None
|
79 |
-
|
80 |
-
def authenticate_student(username, password):
|
81 |
-
"""Autentica un estudiante"""
|
82 |
-
success, role = authenticate_user(username, password)
|
83 |
-
if success and role == 'Estudiante':
|
84 |
-
return True, role
|
85 |
-
return False, None
|
86 |
-
|
87 |
-
def authenticate_admin(username, password):
|
88 |
-
"""Autentica un administrador"""
|
89 |
-
success, role = authenticate_user(username, password)
|
90 |
-
if success and role == 'Administrador':
|
91 |
-
return True, role
|
92 |
-
return False, None
|
93 |
-
|
94 |
-
def register_student(username, password, additional_info=None):
|
95 |
-
"""Registra un nuevo estudiante"""
|
96 |
-
try:
|
97 |
-
if get_student_user(username):
|
98 |
-
logger.warning(f"Estudiante ya existe: {username}")
|
99 |
-
return False
|
100 |
-
|
101 |
-
hashed_password = hash_password(password)
|
102 |
-
|
103 |
-
# Asegurarse que additional_info tenga el rol correcto
|
104 |
-
if not additional_info:
|
105 |
-
additional_info = {}
|
106 |
-
additional_info['role'] = 'Estudiante'
|
107 |
-
|
108 |
-
success = create_student_user(username, hashed_password, additional_info)
|
109 |
-
if success:
|
110 |
-
logger.info(f"Nuevo estudiante registrado: {username}")
|
111 |
-
return True
|
112 |
-
|
113 |
-
logger.error(f"Error al crear estudiante: {username}")
|
114 |
-
return False
|
115 |
-
|
116 |
-
except Exception as e:
|
117 |
-
logger.error(f"Error al registrar estudiante: {str(e)}")
|
118 |
-
return False
|
119 |
-
|
120 |
-
def update_student_info(username, new_info):
|
121 |
-
"""Actualiza la información de un estudiante"""
|
122 |
-
try:
|
123 |
-
if 'password' in new_info:
|
124 |
-
new_info['password'] = hash_password(new_info['password'])
|
125 |
-
|
126 |
-
success = update_student_user(username, new_info)
|
127 |
-
if success:
|
128 |
-
logger.info(f"Información actualizada: {username}")
|
129 |
-
return True
|
130 |
-
|
131 |
-
logger.error(f"Error al actualizar: {username}")
|
132 |
-
return False
|
133 |
-
|
134 |
-
except Exception as e:
|
135 |
-
logger.error(f"Error en actualización: {str(e)}")
|
136 |
-
return False
|
137 |
-
|
138 |
-
def delete_student(username):
|
139 |
-
"""Elimina un estudiante"""
|
140 |
-
try:
|
141 |
-
success = delete_student_user(username)
|
142 |
-
if success:
|
143 |
-
logger.info(f"Estudiante eliminado: {username}")
|
144 |
-
return True
|
145 |
-
|
146 |
-
logger.error(f"Error al eliminar: {username}")
|
147 |
-
return False
|
148 |
-
|
149 |
-
except Exception as e:
|
150 |
-
logger.error(f"Error en eliminación: {str(e)}")
|
151 |
-
return False
|
152 |
-
|
153 |
-
def logout():
|
154 |
-
"""Cierra la sesión del usuario"""
|
155 |
-
try:
|
156 |
-
if 'session_id' in st.session_state and 'username' in st.session_state:
|
157 |
-
success = record_logout(
|
158 |
-
st.session_state.username,
|
159 |
-
st.session_state.session_id
|
160 |
-
)
|
161 |
-
if success:
|
162 |
-
logger.info(f"Sesión cerrada: {st.session_state.username}")
|
163 |
-
else:
|
164 |
-
logger.warning(f"Error al registrar cierre de sesión: {st.session_state.username}")
|
165 |
-
|
166 |
-
except Exception as e:
|
167 |
-
logger.error(f"Error en logout: {str(e)}")
|
168 |
-
finally:
|
169 |
-
st.session_state.clear()
|
170 |
-
|
171 |
-
def hash_password(password):
|
172 |
-
"""Hashea una contraseña"""
|
173 |
-
return bcrypt.hashpw(
|
174 |
-
password.encode('utf-8'),
|
175 |
-
bcrypt.gensalt()
|
176 |
-
).decode('utf-8')
|
177 |
-
|
178 |
-
def verify_password(stored_password, provided_password):
|
179 |
-
"""Verifica una contraseña"""
|
180 |
-
return bcrypt.checkpw(
|
181 |
-
provided_password.encode('utf-8'),
|
182 |
-
stored_password.encode('utf-8')
|
183 |
-
)
|
184 |
-
|
185 |
-
__all__ = [
|
186 |
-
'authenticate_user',
|
187 |
-
'authenticate_admin',
|
188 |
-
'authenticate_student',
|
189 |
-
'register_student',
|
190 |
-
'update_student_info',
|
191 |
-
'delete_student',
|
192 |
-
'logout',
|
193 |
-
'hash_password',
|
194 |
-
'verify_password'
|
195 |
]
|
|
|
1 |
+
##########modules/auth/auth.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import streamlit as st
|
5 |
+
from azure.cosmos import CosmosClient, exceptions
|
6 |
+
from azure.cosmos.exceptions import CosmosHttpResponseError
|
7 |
+
import bcrypt
|
8 |
+
import base64
|
9 |
+
from ..database.sql_db import (
|
10 |
+
get_user,
|
11 |
+
get_student_user,
|
12 |
+
get_admin_user,
|
13 |
+
create_student_user,
|
14 |
+
update_student_user,
|
15 |
+
delete_student_user,
|
16 |
+
record_login,
|
17 |
+
record_logout
|
18 |
+
)
|
19 |
+
|
20 |
+
import logging
|
21 |
+
|
22 |
+
from datetime import datetime, timezone
|
23 |
+
|
24 |
+
logger = logging.getLogger(__name__)
|
25 |
+
|
26 |
+
def clean_and_validate_key(key):
|
27 |
+
"""Limpia y valida la clave de CosmosDB"""
|
28 |
+
key = key.strip()
|
29 |
+
while len(key) % 4 != 0:
|
30 |
+
key += '='
|
31 |
+
try:
|
32 |
+
base64.b64decode(key)
|
33 |
+
return key
|
34 |
+
except:
|
35 |
+
raise ValueError("La clave proporcionada no es válida")
|
36 |
+
|
37 |
+
# Verificar las variables de entorno
|
38 |
+
endpoint = os.getenv("COSMOS_ENDPOINT")
|
39 |
+
key = os.getenv("COSMOS_KEY")
|
40 |
+
|
41 |
+
if not endpoint or not key:
|
42 |
+
raise ValueError("Las variables de entorno COSMOS_ENDPOINT y COSMOS_KEY deben estar configuradas")
|
43 |
+
|
44 |
+
key = clean_and_validate_key(key)
|
45 |
+
|
46 |
+
|
47 |
+
def authenticate_user(username, password):
|
48 |
+
"""Autentica un usuario y registra el inicio de sesión"""
|
49 |
+
try:
|
50 |
+
user_item = get_user(username)
|
51 |
+
|
52 |
+
if not user_item:
|
53 |
+
logger.warning(f"Usuario no encontrado: {username}")
|
54 |
+
return False, None
|
55 |
+
|
56 |
+
if verify_password(user_item['password'], password):
|
57 |
+
logger.info(f"Usuario autenticado: {username}, Rol: {user_item['role']}")
|
58 |
+
|
59 |
+
try:
|
60 |
+
session_id = record_login(username)
|
61 |
+
if session_id:
|
62 |
+
st.session_state.session_id = session_id
|
63 |
+
st.session_state.username = username
|
64 |
+
st.session_state.login_time = datetime.now(timezone.utc).isoformat()
|
65 |
+
logger.info(f"Sesión iniciada: {session_id}")
|
66 |
+
else:
|
67 |
+
logger.warning("No se pudo registrar la sesión")
|
68 |
+
except Exception as e:
|
69 |
+
logger.error(f"Error al registrar inicio de sesión: {str(e)}")
|
70 |
+
|
71 |
+
return True, user_item['role']
|
72 |
+
|
73 |
+
logger.warning(f"Contraseña incorrecta para usuario: {username}")
|
74 |
+
return False, None
|
75 |
+
|
76 |
+
except Exception as e:
|
77 |
+
logger.error(f"Error durante la autenticación del usuario: {str(e)}")
|
78 |
+
return False, None
|
79 |
+
|
80 |
+
def authenticate_student(username, password):
|
81 |
+
"""Autentica un estudiante"""
|
82 |
+
success, role = authenticate_user(username, password)
|
83 |
+
if success and role == 'Estudiante':
|
84 |
+
return True, role
|
85 |
+
return False, None
|
86 |
+
|
87 |
+
def authenticate_admin(username, password):
|
88 |
+
"""Autentica un administrador"""
|
89 |
+
success, role = authenticate_user(username, password)
|
90 |
+
if success and role == 'Administrador':
|
91 |
+
return True, role
|
92 |
+
return False, None
|
93 |
+
|
94 |
+
def register_student(username, password, additional_info=None):
|
95 |
+
"""Registra un nuevo estudiante"""
|
96 |
+
try:
|
97 |
+
if get_student_user(username):
|
98 |
+
logger.warning(f"Estudiante ya existe: {username}")
|
99 |
+
return False
|
100 |
+
|
101 |
+
hashed_password = hash_password(password)
|
102 |
+
|
103 |
+
# Asegurarse que additional_info tenga el rol correcto
|
104 |
+
if not additional_info:
|
105 |
+
additional_info = {}
|
106 |
+
additional_info['role'] = 'Estudiante'
|
107 |
+
|
108 |
+
success = create_student_user(username, hashed_password, additional_info)
|
109 |
+
if success:
|
110 |
+
logger.info(f"Nuevo estudiante registrado: {username}")
|
111 |
+
return True
|
112 |
+
|
113 |
+
logger.error(f"Error al crear estudiante: {username}")
|
114 |
+
return False
|
115 |
+
|
116 |
+
except Exception as e:
|
117 |
+
logger.error(f"Error al registrar estudiante: {str(e)}")
|
118 |
+
return False
|
119 |
+
|
120 |
+
def update_student_info(username, new_info):
|
121 |
+
"""Actualiza la información de un estudiante"""
|
122 |
+
try:
|
123 |
+
if 'password' in new_info:
|
124 |
+
new_info['password'] = hash_password(new_info['password'])
|
125 |
+
|
126 |
+
success = update_student_user(username, new_info)
|
127 |
+
if success:
|
128 |
+
logger.info(f"Información actualizada: {username}")
|
129 |
+
return True
|
130 |
+
|
131 |
+
logger.error(f"Error al actualizar: {username}")
|
132 |
+
return False
|
133 |
+
|
134 |
+
except Exception as e:
|
135 |
+
logger.error(f"Error en actualización: {str(e)}")
|
136 |
+
return False
|
137 |
+
|
138 |
+
def delete_student(username):
|
139 |
+
"""Elimina un estudiante"""
|
140 |
+
try:
|
141 |
+
success = delete_student_user(username)
|
142 |
+
if success:
|
143 |
+
logger.info(f"Estudiante eliminado: {username}")
|
144 |
+
return True
|
145 |
+
|
146 |
+
logger.error(f"Error al eliminar: {username}")
|
147 |
+
return False
|
148 |
+
|
149 |
+
except Exception as e:
|
150 |
+
logger.error(f"Error en eliminación: {str(e)}")
|
151 |
+
return False
|
152 |
+
|
153 |
+
def logout():
|
154 |
+
"""Cierra la sesión del usuario"""
|
155 |
+
try:
|
156 |
+
if 'session_id' in st.session_state and 'username' in st.session_state:
|
157 |
+
success = record_logout(
|
158 |
+
st.session_state.username,
|
159 |
+
st.session_state.session_id
|
160 |
+
)
|
161 |
+
if success:
|
162 |
+
logger.info(f"Sesión cerrada: {st.session_state.username}")
|
163 |
+
else:
|
164 |
+
logger.warning(f"Error al registrar cierre de sesión: {st.session_state.username}")
|
165 |
+
|
166 |
+
except Exception as e:
|
167 |
+
logger.error(f"Error en logout: {str(e)}")
|
168 |
+
finally:
|
169 |
+
st.session_state.clear()
|
170 |
+
|
171 |
+
def hash_password(password):
|
172 |
+
"""Hashea una contraseña"""
|
173 |
+
return bcrypt.hashpw(
|
174 |
+
password.encode('utf-8'),
|
175 |
+
bcrypt.gensalt()
|
176 |
+
).decode('utf-8')
|
177 |
+
|
178 |
+
def verify_password(stored_password, provided_password):
|
179 |
+
"""Verifica una contraseña"""
|
180 |
+
return bcrypt.checkpw(
|
181 |
+
provided_password.encode('utf-8'),
|
182 |
+
stored_password.encode('utf-8')
|
183 |
+
)
|
184 |
+
|
185 |
+
__all__ = [
|
186 |
+
'authenticate_user',
|
187 |
+
'authenticate_admin',
|
188 |
+
'authenticate_student',
|
189 |
+
'register_student',
|
190 |
+
'update_student_info',
|
191 |
+
'delete_student',
|
192 |
+
'logout',
|
193 |
+
'hash_password',
|
194 |
+
'verify_password'
|
195 |
]
|
modules/chatbot/__init__.py
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
-
# modules/chatbot/__init__.py
|
2 |
-
from .sidebar_chat import display_sidebar_chat
|
3 |
-
from .chat_process import ChatProcessor
|
4 |
-
|
5 |
-
__all__ = [
|
6 |
-
'display_sidebar_chat',
|
7 |
-
'ChatProcessor'
|
8 |
]
|
|
|
1 |
+
# modules/chatbot/__init__.py
|
2 |
+
from .sidebar_chat import display_sidebar_chat
|
3 |
+
from .chat_process import ChatProcessor
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
'display_sidebar_chat',
|
7 |
+
'ChatProcessor'
|
8 |
]
|
modules/chatbot/chabot.py
CHANGED
@@ -1,60 +1,60 @@
|
|
1 |
-
# chatbot/chatbot.py
|
2 |
-
import streamlit as st
|
3 |
-
from typing import Dict, List, Tuple
|
4 |
-
import logging
|
5 |
-
|
6 |
-
logger = logging.getLogger(__name__)
|
7 |
-
|
8 |
-
class AIdeaTextChatbot:
|
9 |
-
def __init__(self, lang_code: str):
|
10 |
-
self.lang_code = lang_code
|
11 |
-
self.conversation_history = []
|
12 |
-
self.context = {
|
13 |
-
'current_analysis': None,
|
14 |
-
'last_question': None,
|
15 |
-
'user_profile': None
|
16 |
-
}
|
17 |
-
|
18 |
-
def process_message(self, message: str, context: Dict = None) -> str:
|
19 |
-
"""
|
20 |
-
Procesa el mensaje del usuario y genera una respuesta
|
21 |
-
"""
|
22 |
-
try:
|
23 |
-
# Actualizar contexto
|
24 |
-
if context:
|
25 |
-
self.context.update(context)
|
26 |
-
|
27 |
-
# Analizar intención del mensaje
|
28 |
-
intent = self._analyze_intent(message)
|
29 |
-
|
30 |
-
# Generar respuesta basada en la intención
|
31 |
-
response = self._generate_response(intent, message)
|
32 |
-
|
33 |
-
# Actualizar historial
|
34 |
-
self._update_history(message, response)
|
35 |
-
|
36 |
-
return response
|
37 |
-
|
38 |
-
except Exception as e:
|
39 |
-
logger.error(f"Error procesando mensaje: {str(e)}")
|
40 |
-
return self._get_fallback_response()
|
41 |
-
|
42 |
-
def _analyze_intent(self, message: str) -> str:
|
43 |
-
"""
|
44 |
-
Analiza la intención del mensaje del usuario
|
45 |
-
"""
|
46 |
-
# Implementar análisis de intención
|
47 |
-
pass
|
48 |
-
|
49 |
-
def _generate_response(self, intent: str, message: str) -> str:
|
50 |
-
"""
|
51 |
-
Genera una respuesta basada en la intención
|
52 |
-
"""
|
53 |
-
# Implementar generación de respuesta
|
54 |
-
pass
|
55 |
-
|
56 |
-
def get_conversation_history(self) -> List[Tuple[str, str]]:
|
57 |
-
"""
|
58 |
-
Retorna el historial de conversación
|
59 |
-
"""
|
60 |
return self.conversation_history
|
|
|
1 |
+
# chatbot/chatbot.py
|
2 |
+
import streamlit as st
|
3 |
+
from typing import Dict, List, Tuple
|
4 |
+
import logging
|
5 |
+
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
|
8 |
+
class AIdeaTextChatbot:
|
9 |
+
def __init__(self, lang_code: str):
|
10 |
+
self.lang_code = lang_code
|
11 |
+
self.conversation_history = []
|
12 |
+
self.context = {
|
13 |
+
'current_analysis': None,
|
14 |
+
'last_question': None,
|
15 |
+
'user_profile': None
|
16 |
+
}
|
17 |
+
|
18 |
+
def process_message(self, message: str, context: Dict = None) -> str:
|
19 |
+
"""
|
20 |
+
Procesa el mensaje del usuario y genera una respuesta
|
21 |
+
"""
|
22 |
+
try:
|
23 |
+
# Actualizar contexto
|
24 |
+
if context:
|
25 |
+
self.context.update(context)
|
26 |
+
|
27 |
+
# Analizar intención del mensaje
|
28 |
+
intent = self._analyze_intent(message)
|
29 |
+
|
30 |
+
# Generar respuesta basada en la intención
|
31 |
+
response = self._generate_response(intent, message)
|
32 |
+
|
33 |
+
# Actualizar historial
|
34 |
+
self._update_history(message, response)
|
35 |
+
|
36 |
+
return response
|
37 |
+
|
38 |
+
except Exception as e:
|
39 |
+
logger.error(f"Error procesando mensaje: {str(e)}")
|
40 |
+
return self._get_fallback_response()
|
41 |
+
|
42 |
+
def _analyze_intent(self, message: str) -> str:
|
43 |
+
"""
|
44 |
+
Analiza la intención del mensaje del usuario
|
45 |
+
"""
|
46 |
+
# Implementar análisis de intención
|
47 |
+
pass
|
48 |
+
|
49 |
+
def _generate_response(self, intent: str, message: str) -> str:
|
50 |
+
"""
|
51 |
+
Genera una respuesta basada en la intención
|
52 |
+
"""
|
53 |
+
# Implementar generación de respuesta
|
54 |
+
pass
|
55 |
+
|
56 |
+
def get_conversation_history(self) -> List[Tuple[str, str]]:
|
57 |
+
"""
|
58 |
+
Retorna el historial de conversación
|
59 |
+
"""
|
60 |
return self.conversation_history
|
modules/chatbot/chat_interface.py
CHANGED
@@ -1,25 +1,25 @@
|
|
1 |
-
# chatbot/chat_interface.py
|
2 |
-
import streamlit as st
|
3 |
-
from .chatbot import AIdeaTextChatbot
|
4 |
-
|
5 |
-
def display_chat_interface(lang_code: str, chat_translations: Dict):
|
6 |
-
"""
|
7 |
-
Muestra la interfaz del chat
|
8 |
-
"""
|
9 |
-
# Inicializar chatbot si no existe
|
10 |
-
if 'chatbot' not in st.session_state:
|
11 |
-
st.session_state.chatbot = AIdeaTextChatbot(lang_code)
|
12 |
-
|
13 |
-
# Mostrar historial
|
14 |
-
for msg in st.session_state.chatbot.get_conversation_history():
|
15 |
-
with st.chat_message(msg[0]):
|
16 |
-
st.write(msg[1])
|
17 |
-
|
18 |
-
# Input del usuario
|
19 |
-
if prompt := st.chat_input(chat_translations.get('chat_placeholder', 'Escribe tu mensaje...')):
|
20 |
-
# Procesar mensaje
|
21 |
-
response = st.session_state.chatbot.process_message(prompt)
|
22 |
-
|
23 |
-
# Mostrar respuesta
|
24 |
-
with st.chat_message("assistant"):
|
25 |
st.write(response)
|
|
|
1 |
+
# chatbot/chat_interface.py
|
2 |
+
import streamlit as st
|
3 |
+
from .chatbot import AIdeaTextChatbot
|
4 |
+
|
5 |
+
def display_chat_interface(lang_code: str, chat_translations: Dict):
|
6 |
+
"""
|
7 |
+
Muestra la interfaz del chat
|
8 |
+
"""
|
9 |
+
# Inicializar chatbot si no existe
|
10 |
+
if 'chatbot' not in st.session_state:
|
11 |
+
st.session_state.chatbot = AIdeaTextChatbot(lang_code)
|
12 |
+
|
13 |
+
# Mostrar historial
|
14 |
+
for msg in st.session_state.chatbot.get_conversation_history():
|
15 |
+
with st.chat_message(msg[0]):
|
16 |
+
st.write(msg[1])
|
17 |
+
|
18 |
+
# Input del usuario
|
19 |
+
if prompt := st.chat_input(chat_translations.get('chat_placeholder', 'Escribe tu mensaje...')):
|
20 |
+
# Procesar mensaje
|
21 |
+
response = st.session_state.chatbot.process_message(prompt)
|
22 |
+
|
23 |
+
# Mostrar respuesta
|
24 |
+
with st.chat_message("assistant"):
|
25 |
st.write(response)
|
modules/chatbot/chat_process.py
CHANGED
@@ -1,56 +1,56 @@
|
|
1 |
-
# modules/chatbot/chat_process.py
|
2 |
-
import os
|
3 |
-
import anthropic
|
4 |
-
import logging
|
5 |
-
from typing import Dict, Generator
|
6 |
-
|
7 |
-
logger = logging.getLogger(__name__)
|
8 |
-
|
9 |
-
####################################################
|
10 |
-
class ChatProcessor:
|
11 |
-
def __init__(self):
|
12 |
-
"""Inicializa el procesador de chat con la API de Claude"""
|
13 |
-
api_key = os.environ.get("ANTHROPIC_API_KEY")
|
14 |
-
if not api_key:
|
15 |
-
raise ValueError("No se encontró la clave API de Anthropic. Asegúrate de configurarla en las variables de entorno.")
|
16 |
-
self.client = anthropic.Anthropic(api_key=api_key)
|
17 |
-
self.conversation_history = []
|
18 |
-
|
19 |
-
def process_chat_input(self, message: str, lang_code: str) -> Generator[str, None, None]:
|
20 |
-
"""Procesa el mensaje y genera una respuesta"""
|
21 |
-
try:
|
22 |
-
# Agregar mensaje a la historia
|
23 |
-
self.conversation_history.append({"role": "user", "content": message})
|
24 |
-
|
25 |
-
# Generar respuesta usando la API de Claude
|
26 |
-
response = self.client.messages.create(
|
27 |
-
model="claude-3-5-sonnet-20241022",
|
28 |
-
messages=self.conversation_history,
|
29 |
-
max_tokens=8000, # Añadimos este parámetro requerido
|
30 |
-
temperature=0.7,
|
31 |
-
)
|
32 |
-
|
33 |
-
# Procesar la respuesta
|
34 |
-
claude_response = response.content[0].text
|
35 |
-
self.conversation_history.append({"role": "assistant", "content": claude_response})
|
36 |
-
|
37 |
-
# Mantener un historial limitado
|
38 |
-
if len(self.conversation_history) > 10:
|
39 |
-
self.conversation_history = self.conversation_history[-10:]
|
40 |
-
|
41 |
-
# Dividir la respuesta en palabras para streaming
|
42 |
-
words = claude_response.split()
|
43 |
-
for word in words:
|
44 |
-
yield word + " "
|
45 |
-
|
46 |
-
except Exception as e:
|
47 |
-
logger.error(f"Error en process_chat_input: {str(e)}")
|
48 |
-
yield f"Error: {str(e)}"
|
49 |
-
|
50 |
-
def get_conversation_history(self) -> list:
|
51 |
-
"""Retorna el historial de la conversación"""
|
52 |
-
return self.conversation_history
|
53 |
-
|
54 |
-
def clear_history(self):
|
55 |
-
"""Limpia el historial de la conversación"""
|
56 |
self.conversation_history = []
|
|
|
1 |
+
# modules/chatbot/chat_process.py
|
2 |
+
import os
|
3 |
+
import anthropic
|
4 |
+
import logging
|
5 |
+
from typing import Dict, Generator
|
6 |
+
|
7 |
+
logger = logging.getLogger(__name__)
|
8 |
+
|
9 |
+
####################################################
|
10 |
+
class ChatProcessor:
|
11 |
+
def __init__(self):
|
12 |
+
"""Inicializa el procesador de chat con la API de Claude"""
|
13 |
+
api_key = os.environ.get("ANTHROPIC_API_KEY")
|
14 |
+
if not api_key:
|
15 |
+
raise ValueError("No se encontró la clave API de Anthropic. Asegúrate de configurarla en las variables de entorno.")
|
16 |
+
self.client = anthropic.Anthropic(api_key=api_key)
|
17 |
+
self.conversation_history = []
|
18 |
+
|
19 |
+
def process_chat_input(self, message: str, lang_code: str) -> Generator[str, None, None]:
|
20 |
+
"""Procesa el mensaje y genera una respuesta"""
|
21 |
+
try:
|
22 |
+
# Agregar mensaje a la historia
|
23 |
+
self.conversation_history.append({"role": "user", "content": message})
|
24 |
+
|
25 |
+
# Generar respuesta usando la API de Claude
|
26 |
+
response = self.client.messages.create(
|
27 |
+
model="claude-3-5-sonnet-20241022",
|
28 |
+
messages=self.conversation_history,
|
29 |
+
max_tokens=8000, # Añadimos este parámetro requerido
|
30 |
+
temperature=0.7,
|
31 |
+
)
|
32 |
+
|
33 |
+
# Procesar la respuesta
|
34 |
+
claude_response = response.content[0].text
|
35 |
+
self.conversation_history.append({"role": "assistant", "content": claude_response})
|
36 |
+
|
37 |
+
# Mantener un historial limitado
|
38 |
+
if len(self.conversation_history) > 10:
|
39 |
+
self.conversation_history = self.conversation_history[-10:]
|
40 |
+
|
41 |
+
# Dividir la respuesta en palabras para streaming
|
42 |
+
words = claude_response.split()
|
43 |
+
for word in words:
|
44 |
+
yield word + " "
|
45 |
+
|
46 |
+
except Exception as e:
|
47 |
+
logger.error(f"Error en process_chat_input: {str(e)}")
|
48 |
+
yield f"Error: {str(e)}"
|
49 |
+
|
50 |
+
def get_conversation_history(self) -> list:
|
51 |
+
"""Retorna el historial de la conversación"""
|
52 |
+
return self.conversation_history
|
53 |
+
|
54 |
+
def clear_history(self):
|
55 |
+
"""Limpia el historial de la conversación"""
|
56 |
self.conversation_history = []
|
modules/chatbot/sidebar_chat.py
CHANGED
@@ -1,113 +1,113 @@
|
|
1 |
-
# modules/chatbot/sidebar_chat.py
|
2 |
-
import streamlit as st
|
3 |
-
from .chat_process import ChatProcessor
|
4 |
-
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
5 |
-
import logging
|
6 |
-
|
7 |
-
logger = logging.getLogger(__name__)
|
8 |
-
|
9 |
-
def display_sidebar_chat(lang_code: str, chatbot_t: dict):
|
10 |
-
"""
|
11 |
-
Muestra el chatbot en el sidebar
|
12 |
-
Args:
|
13 |
-
lang_code: Código del idioma
|
14 |
-
chatbot_t: Diccionario de traducciones del chatbot
|
15 |
-
"""
|
16 |
-
# Asegurar que tenemos las traducciones necesarias
|
17 |
-
default_translations = {
|
18 |
-
'error_message': 'An error occurred',
|
19 |
-
'expand_chat': 'Open Assistant',
|
20 |
-
'initial_message': 'Hi! How can I help?',
|
21 |
-
'input_placeholder': 'Type your message...',
|
22 |
-
'clear_chat': 'Clear chat'
|
23 |
-
}
|
24 |
-
|
25 |
-
# Combinar traducciones por defecto con las proporcionadas
|
26 |
-
translations = {**default_translations, **chatbot_t}
|
27 |
-
|
28 |
-
with st.sidebar:
|
29 |
-
# Chatbot expandible
|
30 |
-
with st.expander(translations['expand_chat'], expanded=False):
|
31 |
-
try:
|
32 |
-
# Inicializar procesador si no existe
|
33 |
-
if 'chat_processor' not in st.session_state:
|
34 |
-
try:
|
35 |
-
st.session_state.chat_processor = ChatProcessor()
|
36 |
-
except Exception as e:
|
37 |
-
logger.error(f"Error inicializando ChatProcessor: {str(e)}")
|
38 |
-
st.error("Error: No se pudo inicializar el chat. Verifica la configuración.")
|
39 |
-
return
|
40 |
-
|
41 |
-
# Inicializar mensajes si no existen
|
42 |
-
if 'sidebar_messages' not in st.session_state:
|
43 |
-
# Intentar recuperar historial previo
|
44 |
-
try:
|
45 |
-
history = get_chat_history(st.session_state.username, 'sidebar', 10)
|
46 |
-
if history:
|
47 |
-
st.session_state.sidebar_messages = history[0]['messages']
|
48 |
-
else:
|
49 |
-
st.session_state.sidebar_messages = [
|
50 |
-
{"role": "assistant", "content": translations['initial_message']}
|
51 |
-
]
|
52 |
-
except Exception as e:
|
53 |
-
logger.error(f"Error recuperando historial: {str(e)}")
|
54 |
-
st.session_state.sidebar_messages = [
|
55 |
-
{"role": "assistant", "content": translations['initial_message']}
|
56 |
-
]
|
57 |
-
|
58 |
-
# Contenedor del chat
|
59 |
-
chat_container = st.container()
|
60 |
-
|
61 |
-
# Mostrar mensajes existentes
|
62 |
-
with chat_container:
|
63 |
-
for message in st.session_state.sidebar_messages:
|
64 |
-
with st.chat_message(message["role"]):
|
65 |
-
st.markdown(message["content"])
|
66 |
-
|
67 |
-
# Input del usuario
|
68 |
-
user_input = st.text_input(
|
69 |
-
translations['input_placeholder'],
|
70 |
-
key='sidebar_chat_input'
|
71 |
-
)
|
72 |
-
|
73 |
-
if user_input:
|
74 |
-
# Agregar mensaje del usuario
|
75 |
-
st.session_state.sidebar_messages.append(
|
76 |
-
{"role": "user", "content": user_input}
|
77 |
-
)
|
78 |
-
|
79 |
-
# Generar y mostrar respuesta
|
80 |
-
with chat_container:
|
81 |
-
with st.chat_message("assistant"):
|
82 |
-
message_placeholder = st.empty()
|
83 |
-
full_response = ""
|
84 |
-
|
85 |
-
for chunk in st.session_state.chat_processor.process_chat_input(
|
86 |
-
user_input,
|
87 |
-
lang_code
|
88 |
-
):
|
89 |
-
full_response += chunk
|
90 |
-
message_placeholder.markdown(full_response)
|
91 |
-
|
92 |
-
# Guardar respuesta
|
93 |
-
st.session_state.sidebar_messages.append(
|
94 |
-
{"role": "assistant", "content": full_response.strip()}
|
95 |
-
)
|
96 |
-
|
97 |
-
# En la función donde guardamos el chat
|
98 |
-
store_chat_history(
|
99 |
-
username=st.session_state.username,
|
100 |
-
messages=st.session_state.sidebar_messages,
|
101 |
-
analysis_type='sidebar' # Especificar el tipo
|
102 |
-
)
|
103 |
-
|
104 |
-
# Botón para limpiar chat
|
105 |
-
if st.button(translations['clear_chat']):
|
106 |
-
st.session_state.sidebar_messages = [
|
107 |
-
{"role": "assistant", "content": translations['initial_message']}
|
108 |
-
]
|
109 |
-
st.rerun()
|
110 |
-
|
111 |
-
except Exception as e:
|
112 |
-
logger.error(f"Error en sidebar chat: {str(e)}")
|
113 |
st.error(translations['error_message'])
|
|
|
1 |
+
# modules/chatbot/sidebar_chat.py
|
2 |
+
import streamlit as st
|
3 |
+
from .chat_process import ChatProcessor
|
4 |
+
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
5 |
+
import logging
|
6 |
+
|
7 |
+
logger = logging.getLogger(__name__)
|
8 |
+
|
9 |
+
def display_sidebar_chat(lang_code: str, chatbot_t: dict):
|
10 |
+
"""
|
11 |
+
Muestra el chatbot en el sidebar
|
12 |
+
Args:
|
13 |
+
lang_code: Código del idioma
|
14 |
+
chatbot_t: Diccionario de traducciones del chatbot
|
15 |
+
"""
|
16 |
+
# Asegurar que tenemos las traducciones necesarias
|
17 |
+
default_translations = {
|
18 |
+
'error_message': 'An error occurred',
|
19 |
+
'expand_chat': 'Open Assistant',
|
20 |
+
'initial_message': 'Hi! How can I help?',
|
21 |
+
'input_placeholder': 'Type your message...',
|
22 |
+
'clear_chat': 'Clear chat'
|
23 |
+
}
|
24 |
+
|
25 |
+
# Combinar traducciones por defecto con las proporcionadas
|
26 |
+
translations = {**default_translations, **chatbot_t}
|
27 |
+
|
28 |
+
with st.sidebar:
|
29 |
+
# Chatbot expandible
|
30 |
+
with st.expander(translations['expand_chat'], expanded=False):
|
31 |
+
try:
|
32 |
+
# Inicializar procesador si no existe
|
33 |
+
if 'chat_processor' not in st.session_state:
|
34 |
+
try:
|
35 |
+
st.session_state.chat_processor = ChatProcessor()
|
36 |
+
except Exception as e:
|
37 |
+
logger.error(f"Error inicializando ChatProcessor: {str(e)}")
|
38 |
+
st.error("Error: No se pudo inicializar el chat. Verifica la configuración.")
|
39 |
+
return
|
40 |
+
|
41 |
+
# Inicializar mensajes si no existen
|
42 |
+
if 'sidebar_messages' not in st.session_state:
|
43 |
+
# Intentar recuperar historial previo
|
44 |
+
try:
|
45 |
+
history = get_chat_history(st.session_state.username, 'sidebar', 10)
|
46 |
+
if history:
|
47 |
+
st.session_state.sidebar_messages = history[0]['messages']
|
48 |
+
else:
|
49 |
+
st.session_state.sidebar_messages = [
|
50 |
+
{"role": "assistant", "content": translations['initial_message']}
|
51 |
+
]
|
52 |
+
except Exception as e:
|
53 |
+
logger.error(f"Error recuperando historial: {str(e)}")
|
54 |
+
st.session_state.sidebar_messages = [
|
55 |
+
{"role": "assistant", "content": translations['initial_message']}
|
56 |
+
]
|
57 |
+
|
58 |
+
# Contenedor del chat
|
59 |
+
chat_container = st.container()
|
60 |
+
|
61 |
+
# Mostrar mensajes existentes
|
62 |
+
with chat_container:
|
63 |
+
for message in st.session_state.sidebar_messages:
|
64 |
+
with st.chat_message(message["role"]):
|
65 |
+
st.markdown(message["content"])
|
66 |
+
|
67 |
+
# Input del usuario
|
68 |
+
user_input = st.text_input(
|
69 |
+
translations['input_placeholder'],
|
70 |
+
key='sidebar_chat_input'
|
71 |
+
)
|
72 |
+
|
73 |
+
if user_input:
|
74 |
+
# Agregar mensaje del usuario
|
75 |
+
st.session_state.sidebar_messages.append(
|
76 |
+
{"role": "user", "content": user_input}
|
77 |
+
)
|
78 |
+
|
79 |
+
# Generar y mostrar respuesta
|
80 |
+
with chat_container:
|
81 |
+
with st.chat_message("assistant"):
|
82 |
+
message_placeholder = st.empty()
|
83 |
+
full_response = ""
|
84 |
+
|
85 |
+
for chunk in st.session_state.chat_processor.process_chat_input(
|
86 |
+
user_input,
|
87 |
+
lang_code
|
88 |
+
):
|
89 |
+
full_response += chunk
|
90 |
+
message_placeholder.markdown(full_response)
|
91 |
+
|
92 |
+
# Guardar respuesta
|
93 |
+
st.session_state.sidebar_messages.append(
|
94 |
+
{"role": "assistant", "content": full_response.strip()}
|
95 |
+
)
|
96 |
+
|
97 |
+
# En la función donde guardamos el chat
|
98 |
+
store_chat_history(
|
99 |
+
username=st.session_state.username,
|
100 |
+
messages=st.session_state.sidebar_messages,
|
101 |
+
analysis_type='sidebar' # Especificar el tipo
|
102 |
+
)
|
103 |
+
|
104 |
+
# Botón para limpiar chat
|
105 |
+
if st.button(translations['clear_chat']):
|
106 |
+
st.session_state.sidebar_messages = [
|
107 |
+
{"role": "assistant", "content": translations['initial_message']}
|
108 |
+
]
|
109 |
+
st.rerun()
|
110 |
+
|
111 |
+
except Exception as e:
|
112 |
+
logger.error(f"Error en sidebar chat: {str(e)}")
|
113 |
st.error(translations['error_message'])
|
modules/database/chat_mongo_db.py
CHANGED
@@ -1,116 +1,116 @@
|
|
1 |
-
# /modules/database/chat_mongo_db.py
|
2 |
-
from .mongo_db import insert_document, find_documents, get_collection
|
3 |
-
from datetime import datetime, timezone
|
4 |
-
import logging
|
5 |
-
|
6 |
-
logger = logging.getLogger(__name__)
|
7 |
-
COLLECTION_NAME = 'chat_history-v3'
|
8 |
-
|
9 |
-
def get_chat_history(username: str, analysis_type: str = 'sidebar', limit: int = None) -> list:
|
10 |
-
"""
|
11 |
-
Recupera el historial del chat.
|
12 |
-
|
13 |
-
Args:
|
14 |
-
username: Nombre del usuario
|
15 |
-
analysis_type: Tipo de análisis ('sidebar' por defecto)
|
16 |
-
limit: Límite de conversaciones a recuperar
|
17 |
-
|
18 |
-
Returns:
|
19 |
-
list: Lista de conversaciones con formato
|
20 |
-
"""
|
21 |
-
try:
|
22 |
-
query = {
|
23 |
-
"username": username,
|
24 |
-
"analysis_type": analysis_type
|
25 |
-
}
|
26 |
-
|
27 |
-
collection = get_collection(COLLECTION_NAME)
|
28 |
-
if collection is None:
|
29 |
-
logger.error("No se pudo obtener la colección de chat")
|
30 |
-
return []
|
31 |
-
|
32 |
-
# Obtener y formatear conversaciones
|
33 |
-
cursor = collection.find(query).sort("timestamp", -1)
|
34 |
-
if limit:
|
35 |
-
cursor = cursor.limit(limit)
|
36 |
-
|
37 |
-
conversations = []
|
38 |
-
for chat in cursor:
|
39 |
-
try:
|
40 |
-
formatted_chat = {
|
41 |
-
'timestamp': chat['timestamp'],
|
42 |
-
'messages': [
|
43 |
-
{
|
44 |
-
'role': msg.get('role', 'unknown'),
|
45 |
-
'content': msg.get('content', '')
|
46 |
-
}
|
47 |
-
for msg in chat.get('messages', [])
|
48 |
-
]
|
49 |
-
}
|
50 |
-
conversations.append(formatted_chat)
|
51 |
-
except Exception as e:
|
52 |
-
logger.error(f"Error formateando chat: {str(e)}")
|
53 |
-
continue
|
54 |
-
|
55 |
-
return conversations
|
56 |
-
|
57 |
-
except Exception as e:
|
58 |
-
logger.error(f"Error al recuperar historial de chat: {str(e)}")
|
59 |
-
return []
|
60 |
-
|
61 |
-
def store_chat_history(username: str, messages: list, analysis_type: str = 'sidebar') -> bool:
|
62 |
-
"""
|
63 |
-
Guarda el historial del chat.
|
64 |
-
|
65 |
-
Args:
|
66 |
-
username: Nombre del usuario
|
67 |
-
messages: Lista de mensajes a guardar
|
68 |
-
analysis_type: Tipo de análisis
|
69 |
-
|
70 |
-
Returns:
|
71 |
-
bool: True si se guardó correctamente
|
72 |
-
"""
|
73 |
-
try:
|
74 |
-
collection = get_collection(COLLECTION_NAME)
|
75 |
-
if collection is None:
|
76 |
-
logger.error("No se pudo obtener la colección de chat")
|
77 |
-
return False
|
78 |
-
|
79 |
-
# Formatear mensajes antes de guardar
|
80 |
-
formatted_messages = [
|
81 |
-
{
|
82 |
-
'role': msg.get('role', 'unknown'),
|
83 |
-
'content': msg.get('content', ''),
|
84 |
-
'timestamp': datetime.now(timezone.utc).isoformat()
|
85 |
-
}
|
86 |
-
for msg in messages
|
87 |
-
]
|
88 |
-
|
89 |
-
chat_document = {
|
90 |
-
'username': username,
|
91 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
92 |
-
'messages': formatted_messages,
|
93 |
-
'analysis_type': analysis_type
|
94 |
-
}
|
95 |
-
|
96 |
-
result = collection.insert_one(chat_document)
|
97 |
-
if result.inserted_id:
|
98 |
-
logger.info(f"Historial de chat guardado con ID: {result.inserted_id} para el usuario: {username}")
|
99 |
-
return True
|
100 |
-
|
101 |
-
logger.error("No se pudo insertar el documento")
|
102 |
-
return False
|
103 |
-
|
104 |
-
except Exception as e:
|
105 |
-
logger.error(f"Error al guardar historial de chat: {str(e)}")
|
106 |
-
return False
|
107 |
-
|
108 |
-
|
109 |
-
#def get_chat_history(username, analysis_type=None, limit=10):
|
110 |
-
# query = {"username": username}
|
111 |
-
# if analysis_type:
|
112 |
-
# query["analysis_type"] = analysis_type
|
113 |
-
|
114 |
-
# return find_documents(COLLECTION_NAME, query, sort=[("timestamp", -1)], limit=limit)
|
115 |
-
|
116 |
# Agregar funciones para actualizar y eliminar chat si es necesario
|
|
|
1 |
+
# /modules/database/chat_mongo_db.py
|
2 |
+
from .mongo_db import insert_document, find_documents, get_collection
|
3 |
+
from datetime import datetime, timezone
|
4 |
+
import logging
|
5 |
+
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
COLLECTION_NAME = 'chat_history-v3'
|
8 |
+
|
9 |
+
def get_chat_history(username: str, analysis_type: str = 'sidebar', limit: int = None) -> list:
|
10 |
+
"""
|
11 |
+
Recupera el historial del chat.
|
12 |
+
|
13 |
+
Args:
|
14 |
+
username: Nombre del usuario
|
15 |
+
analysis_type: Tipo de análisis ('sidebar' por defecto)
|
16 |
+
limit: Límite de conversaciones a recuperar
|
17 |
+
|
18 |
+
Returns:
|
19 |
+
list: Lista de conversaciones con formato
|
20 |
+
"""
|
21 |
+
try:
|
22 |
+
query = {
|
23 |
+
"username": username,
|
24 |
+
"analysis_type": analysis_type
|
25 |
+
}
|
26 |
+
|
27 |
+
collection = get_collection(COLLECTION_NAME)
|
28 |
+
if collection is None:
|
29 |
+
logger.error("No se pudo obtener la colección de chat")
|
30 |
+
return []
|
31 |
+
|
32 |
+
# Obtener y formatear conversaciones
|
33 |
+
cursor = collection.find(query).sort("timestamp", -1)
|
34 |
+
if limit:
|
35 |
+
cursor = cursor.limit(limit)
|
36 |
+
|
37 |
+
conversations = []
|
38 |
+
for chat in cursor:
|
39 |
+
try:
|
40 |
+
formatted_chat = {
|
41 |
+
'timestamp': chat['timestamp'],
|
42 |
+
'messages': [
|
43 |
+
{
|
44 |
+
'role': msg.get('role', 'unknown'),
|
45 |
+
'content': msg.get('content', '')
|
46 |
+
}
|
47 |
+
for msg in chat.get('messages', [])
|
48 |
+
]
|
49 |
+
}
|
50 |
+
conversations.append(formatted_chat)
|
51 |
+
except Exception as e:
|
52 |
+
logger.error(f"Error formateando chat: {str(e)}")
|
53 |
+
continue
|
54 |
+
|
55 |
+
return conversations
|
56 |
+
|
57 |
+
except Exception as e:
|
58 |
+
logger.error(f"Error al recuperar historial de chat: {str(e)}")
|
59 |
+
return []
|
60 |
+
|
61 |
+
def store_chat_history(username: str, messages: list, analysis_type: str = 'sidebar') -> bool:
|
62 |
+
"""
|
63 |
+
Guarda el historial del chat.
|
64 |
+
|
65 |
+
Args:
|
66 |
+
username: Nombre del usuario
|
67 |
+
messages: Lista de mensajes a guardar
|
68 |
+
analysis_type: Tipo de análisis
|
69 |
+
|
70 |
+
Returns:
|
71 |
+
bool: True si se guardó correctamente
|
72 |
+
"""
|
73 |
+
try:
|
74 |
+
collection = get_collection(COLLECTION_NAME)
|
75 |
+
if collection is None:
|
76 |
+
logger.error("No se pudo obtener la colección de chat")
|
77 |
+
return False
|
78 |
+
|
79 |
+
# Formatear mensajes antes de guardar
|
80 |
+
formatted_messages = [
|
81 |
+
{
|
82 |
+
'role': msg.get('role', 'unknown'),
|
83 |
+
'content': msg.get('content', ''),
|
84 |
+
'timestamp': datetime.now(timezone.utc).isoformat()
|
85 |
+
}
|
86 |
+
for msg in messages
|
87 |
+
]
|
88 |
+
|
89 |
+
chat_document = {
|
90 |
+
'username': username,
|
91 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
92 |
+
'messages': formatted_messages,
|
93 |
+
'analysis_type': analysis_type
|
94 |
+
}
|
95 |
+
|
96 |
+
result = collection.insert_one(chat_document)
|
97 |
+
if result.inserted_id:
|
98 |
+
logger.info(f"Historial de chat guardado con ID: {result.inserted_id} para el usuario: {username}")
|
99 |
+
return True
|
100 |
+
|
101 |
+
logger.error("No se pudo insertar el documento")
|
102 |
+
return False
|
103 |
+
|
104 |
+
except Exception as e:
|
105 |
+
logger.error(f"Error al guardar historial de chat: {str(e)}")
|
106 |
+
return False
|
107 |
+
|
108 |
+
|
109 |
+
#def get_chat_history(username, analysis_type=None, limit=10):
|
110 |
+
# query = {"username": username}
|
111 |
+
# if analysis_type:
|
112 |
+
# query["analysis_type"] = analysis_type
|
113 |
+
|
114 |
+
# return find_documents(COLLECTION_NAME, query, sort=[("timestamp", -1)], limit=limit)
|
115 |
+
|
116 |
# Agregar funciones para actualizar y eliminar chat si es necesario
|
modules/database/claude_recommendations_mongo_db.py
CHANGED
@@ -1,137 +1,137 @@
|
|
1 |
-
# modules/database/claude_recommendations_mongo_db.py
|
2 |
-
from datetime import datetime, timezone, timedelta
|
3 |
-
import logging
|
4 |
-
from .mongo_db import get_collection
|
5 |
-
|
6 |
-
logger = logging.getLogger(__name__)
|
7 |
-
COLLECTION_NAME = 'student_claude_recommendations'
|
8 |
-
|
9 |
-
def store_claude_recommendation(username, text, metrics, text_type, recommendations):
|
10 |
-
"""
|
11 |
-
Guarda las recomendaciones generadas por Claude AI.
|
12 |
-
|
13 |
-
Args:
|
14 |
-
username: Nombre del usuario
|
15 |
-
text: Texto analizado
|
16 |
-
metrics: Métricas del análisis
|
17 |
-
text_type: Tipo de texto (academic_article, university_work, general_communication)
|
18 |
-
recommendations: Recomendaciones generadas por Claude
|
19 |
-
|
20 |
-
Returns:
|
21 |
-
bool: True si se guardó correctamente, False en caso contrario
|
22 |
-
"""
|
23 |
-
try:
|
24 |
-
# Verificar parámetros
|
25 |
-
if not all([username, text, recommendations]):
|
26 |
-
logger.error("Faltan parámetros requeridos para guardar recomendaciones de Claude")
|
27 |
-
return False
|
28 |
-
|
29 |
-
collection = get_collection(COLLECTION_NAME)
|
30 |
-
if collection is None:
|
31 |
-
logger.error("No se pudo obtener la colección de recomendaciones de Claude")
|
32 |
-
return False
|
33 |
-
|
34 |
-
# Crear documento
|
35 |
-
document = {
|
36 |
-
'username': username,
|
37 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
38 |
-
'text': text,
|
39 |
-
'metrics': metrics or {},
|
40 |
-
'text_type': text_type,
|
41 |
-
'recommendations': recommendations,
|
42 |
-
'analysis_type': 'claude_recommendation'
|
43 |
-
}
|
44 |
-
|
45 |
-
# Insertar documento
|
46 |
-
result = collection.insert_one(document)
|
47 |
-
if result.inserted_id:
|
48 |
-
logger.info(f"""
|
49 |
-
Recomendaciones de Claude guardadas:
|
50 |
-
- Usuario: {username}
|
51 |
-
- ID: {result.inserted_id}
|
52 |
-
- Tipo de texto: {text_type}
|
53 |
-
- Longitud del texto: {len(text)}
|
54 |
-
""")
|
55 |
-
|
56 |
-
# Verificar almacenamiento
|
57 |
-
storage_verified = verify_recommendation_storage(username)
|
58 |
-
if not storage_verified:
|
59 |
-
logger.warning("Verificación de almacenamiento de recomendaciones falló")
|
60 |
-
|
61 |
-
return True
|
62 |
-
|
63 |
-
logger.error("No se pudo insertar el documento de recomendaciones")
|
64 |
-
return False
|
65 |
-
|
66 |
-
except Exception as e:
|
67 |
-
logger.error(f"Error guardando recomendaciones de Claude: {str(e)}")
|
68 |
-
return False
|
69 |
-
|
70 |
-
def verify_recommendation_storage(username):
|
71 |
-
"""
|
72 |
-
Verifica que las recomendaciones se están guardando correctamente.
|
73 |
-
|
74 |
-
Args:
|
75 |
-
username: Nombre del usuario
|
76 |
-
|
77 |
-
Returns:
|
78 |
-
bool: True si la verificación es exitosa, False en caso contrario
|
79 |
-
"""
|
80 |
-
try:
|
81 |
-
collection = get_collection(COLLECTION_NAME)
|
82 |
-
if collection is None:
|
83 |
-
logger.error("No se pudo obtener la colección para verificación de recomendaciones")
|
84 |
-
return False
|
85 |
-
|
86 |
-
# Buscar documentos recientes del usuario
|
87 |
-
timestamp_threshold = (datetime.now(timezone.utc) - timedelta(minutes=5)).isoformat()
|
88 |
-
recent_docs = collection.find({
|
89 |
-
'username': username,
|
90 |
-
'timestamp': {'$gte': timestamp_threshold}
|
91 |
-
}).sort('timestamp', -1).limit(1)
|
92 |
-
|
93 |
-
docs = list(recent_docs)
|
94 |
-
if docs:
|
95 |
-
logger.info(f"""
|
96 |
-
Último documento de recomendaciones guardado:
|
97 |
-
- ID: {docs[0]['_id']}
|
98 |
-
- Timestamp: {docs[0]['timestamp']}
|
99 |
-
- Tipo de texto: {docs[0].get('text_type', 'N/A')}
|
100 |
-
""")
|
101 |
-
return True
|
102 |
-
|
103 |
-
logger.warning(f"No se encontraron documentos recientes de recomendaciones para {username}")
|
104 |
-
return False
|
105 |
-
|
106 |
-
except Exception as e:
|
107 |
-
logger.error(f"Error verificando almacenamiento de recomendaciones: {str(e)}")
|
108 |
-
return False
|
109 |
-
|
110 |
-
def get_claude_recommendations(username, limit=10):
|
111 |
-
"""
|
112 |
-
Obtiene las recomendaciones más recientes de Claude para un usuario.
|
113 |
-
|
114 |
-
Args:
|
115 |
-
username: Nombre del usuario
|
116 |
-
limit: Número máximo de recomendaciones a recuperar
|
117 |
-
|
118 |
-
Returns:
|
119 |
-
list: Lista de recomendaciones
|
120 |
-
"""
|
121 |
-
try:
|
122 |
-
collection = get_collection(COLLECTION_NAME)
|
123 |
-
if collection is None:
|
124 |
-
logger.error("No se pudo obtener la colección de recomendaciones")
|
125 |
-
return []
|
126 |
-
|
127 |
-
results = collection.find(
|
128 |
-
{'username': username}
|
129 |
-
).sort('timestamp', -1).limit(limit)
|
130 |
-
|
131 |
-
recommendations = list(results)
|
132 |
-
logger.info(f"Recuperadas {len(recommendations)} recomendaciones de Claude para {username}")
|
133 |
-
return recommendations
|
134 |
-
|
135 |
-
except Exception as e:
|
136 |
-
logger.error(f"Error obteniendo recomendaciones de Claude: {str(e)}")
|
137 |
return []
|
|
|
1 |
+
# modules/database/claude_recommendations_mongo_db.py
|
2 |
+
from datetime import datetime, timezone, timedelta
|
3 |
+
import logging
|
4 |
+
from .mongo_db import get_collection
|
5 |
+
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
COLLECTION_NAME = 'student_claude_recommendations'
|
8 |
+
|
9 |
+
def store_claude_recommendation(username, text, metrics, text_type, recommendations):
|
10 |
+
"""
|
11 |
+
Guarda las recomendaciones generadas por Claude AI.
|
12 |
+
|
13 |
+
Args:
|
14 |
+
username: Nombre del usuario
|
15 |
+
text: Texto analizado
|
16 |
+
metrics: Métricas del análisis
|
17 |
+
text_type: Tipo de texto (academic_article, university_work, general_communication)
|
18 |
+
recommendations: Recomendaciones generadas por Claude
|
19 |
+
|
20 |
+
Returns:
|
21 |
+
bool: True si se guardó correctamente, False en caso contrario
|
22 |
+
"""
|
23 |
+
try:
|
24 |
+
# Verificar parámetros
|
25 |
+
if not all([username, text, recommendations]):
|
26 |
+
logger.error("Faltan parámetros requeridos para guardar recomendaciones de Claude")
|
27 |
+
return False
|
28 |
+
|
29 |
+
collection = get_collection(COLLECTION_NAME)
|
30 |
+
if collection is None:
|
31 |
+
logger.error("No se pudo obtener la colección de recomendaciones de Claude")
|
32 |
+
return False
|
33 |
+
|
34 |
+
# Crear documento
|
35 |
+
document = {
|
36 |
+
'username': username,
|
37 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
38 |
+
'text': text,
|
39 |
+
'metrics': metrics or {},
|
40 |
+
'text_type': text_type,
|
41 |
+
'recommendations': recommendations,
|
42 |
+
'analysis_type': 'claude_recommendation'
|
43 |
+
}
|
44 |
+
|
45 |
+
# Insertar documento
|
46 |
+
result = collection.insert_one(document)
|
47 |
+
if result.inserted_id:
|
48 |
+
logger.info(f"""
|
49 |
+
Recomendaciones de Claude guardadas:
|
50 |
+
- Usuario: {username}
|
51 |
+
- ID: {result.inserted_id}
|
52 |
+
- Tipo de texto: {text_type}
|
53 |
+
- Longitud del texto: {len(text)}
|
54 |
+
""")
|
55 |
+
|
56 |
+
# Verificar almacenamiento
|
57 |
+
storage_verified = verify_recommendation_storage(username)
|
58 |
+
if not storage_verified:
|
59 |
+
logger.warning("Verificación de almacenamiento de recomendaciones falló")
|
60 |
+
|
61 |
+
return True
|
62 |
+
|
63 |
+
logger.error("No se pudo insertar el documento de recomendaciones")
|
64 |
+
return False
|
65 |
+
|
66 |
+
except Exception as e:
|
67 |
+
logger.error(f"Error guardando recomendaciones de Claude: {str(e)}")
|
68 |
+
return False
|
69 |
+
|
70 |
+
def verify_recommendation_storage(username):
|
71 |
+
"""
|
72 |
+
Verifica que las recomendaciones se están guardando correctamente.
|
73 |
+
|
74 |
+
Args:
|
75 |
+
username: Nombre del usuario
|
76 |
+
|
77 |
+
Returns:
|
78 |
+
bool: True si la verificación es exitosa, False en caso contrario
|
79 |
+
"""
|
80 |
+
try:
|
81 |
+
collection = get_collection(COLLECTION_NAME)
|
82 |
+
if collection is None:
|
83 |
+
logger.error("No se pudo obtener la colección para verificación de recomendaciones")
|
84 |
+
return False
|
85 |
+
|
86 |
+
# Buscar documentos recientes del usuario
|
87 |
+
timestamp_threshold = (datetime.now(timezone.utc) - timedelta(minutes=5)).isoformat()
|
88 |
+
recent_docs = collection.find({
|
89 |
+
'username': username,
|
90 |
+
'timestamp': {'$gte': timestamp_threshold}
|
91 |
+
}).sort('timestamp', -1).limit(1)
|
92 |
+
|
93 |
+
docs = list(recent_docs)
|
94 |
+
if docs:
|
95 |
+
logger.info(f"""
|
96 |
+
Último documento de recomendaciones guardado:
|
97 |
+
- ID: {docs[0]['_id']}
|
98 |
+
- Timestamp: {docs[0]['timestamp']}
|
99 |
+
- Tipo de texto: {docs[0].get('text_type', 'N/A')}
|
100 |
+
""")
|
101 |
+
return True
|
102 |
+
|
103 |
+
logger.warning(f"No se encontraron documentos recientes de recomendaciones para {username}")
|
104 |
+
return False
|
105 |
+
|
106 |
+
except Exception as e:
|
107 |
+
logger.error(f"Error verificando almacenamiento de recomendaciones: {str(e)}")
|
108 |
+
return False
|
109 |
+
|
110 |
+
def get_claude_recommendations(username, limit=10):
|
111 |
+
"""
|
112 |
+
Obtiene las recomendaciones más recientes de Claude para un usuario.
|
113 |
+
|
114 |
+
Args:
|
115 |
+
username: Nombre del usuario
|
116 |
+
limit: Número máximo de recomendaciones a recuperar
|
117 |
+
|
118 |
+
Returns:
|
119 |
+
list: Lista de recomendaciones
|
120 |
+
"""
|
121 |
+
try:
|
122 |
+
collection = get_collection(COLLECTION_NAME)
|
123 |
+
if collection is None:
|
124 |
+
logger.error("No se pudo obtener la colección de recomendaciones")
|
125 |
+
return []
|
126 |
+
|
127 |
+
results = collection.find(
|
128 |
+
{'username': username}
|
129 |
+
).sort('timestamp', -1).limit(limit)
|
130 |
+
|
131 |
+
recommendations = list(results)
|
132 |
+
logger.info(f"Recuperadas {len(recommendations)} recomendaciones de Claude para {username}")
|
133 |
+
return recommendations
|
134 |
+
|
135 |
+
except Exception as e:
|
136 |
+
logger.error(f"Error obteniendo recomendaciones de Claude: {str(e)}")
|
137 |
return []
|
modules/database/current_situation_mongo_db.py
CHANGED
@@ -1,137 +1,137 @@
|
|
1 |
-
# modules/database/current_situation_mongo_db.py
|
2 |
-
from datetime import datetime, timezone, timedelta
|
3 |
-
import logging
|
4 |
-
from .mongo_db import get_collection
|
5 |
-
|
6 |
-
logger = logging.getLogger(__name__)
|
7 |
-
COLLECTION_NAME = 'student_current_situation'
|
8 |
-
|
9 |
-
# En modules/database/current_situation_mongo_db.py
|
10 |
-
|
11 |
-
def store_current_situation_result(username, text, metrics, feedback):
|
12 |
-
"""
|
13 |
-
Guarda los resultados del análisis de situación actual.
|
14 |
-
"""
|
15 |
-
try:
|
16 |
-
# Verificar parámetros
|
17 |
-
if not all([username, text, metrics]):
|
18 |
-
logger.error("Faltan parámetros requeridos")
|
19 |
-
return False
|
20 |
-
|
21 |
-
collection = get_collection(COLLECTION_NAME)
|
22 |
-
if collection is None:
|
23 |
-
logger.error("No se pudo obtener la colección")
|
24 |
-
return False
|
25 |
-
|
26 |
-
# Crear documento
|
27 |
-
document = {
|
28 |
-
'username': username,
|
29 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
30 |
-
'text': text,
|
31 |
-
'metrics': metrics,
|
32 |
-
'feedback': feedback or {},
|
33 |
-
'analysis_type': 'current_situation'
|
34 |
-
}
|
35 |
-
|
36 |
-
# Insertar documento y verificar
|
37 |
-
result = collection.insert_one(document)
|
38 |
-
if result.inserted_id:
|
39 |
-
logger.info(f"""
|
40 |
-
Análisis de situación actual guardado:
|
41 |
-
- Usuario: {username}
|
42 |
-
- ID: {result.inserted_id}
|
43 |
-
- Longitud texto: {len(text)}
|
44 |
-
""")
|
45 |
-
|
46 |
-
# Verificar almacenamiento
|
47 |
-
storage_verified = verify_storage(username)
|
48 |
-
if not storage_verified:
|
49 |
-
logger.warning("Verificación de almacenamiento falló")
|
50 |
-
|
51 |
-
return True
|
52 |
-
|
53 |
-
logger.error("No se pudo insertar el documento")
|
54 |
-
return False
|
55 |
-
|
56 |
-
except Exception as e:
|
57 |
-
logger.error(f"Error guardando análisis de situación actual: {str(e)}")
|
58 |
-
return False
|
59 |
-
|
60 |
-
def verify_storage(username):
|
61 |
-
"""
|
62 |
-
Verifica que los datos se están guardando correctamente.
|
63 |
-
"""
|
64 |
-
try:
|
65 |
-
collection = get_collection(COLLECTION_NAME)
|
66 |
-
if collection is None:
|
67 |
-
logger.error("No se pudo obtener la colección para verificación")
|
68 |
-
return False
|
69 |
-
|
70 |
-
# Buscar documentos recientes del usuario
|
71 |
-
timestamp_threshold = (datetime.now(timezone.utc) - timedelta(minutes=5)).isoformat()
|
72 |
-
|
73 |
-
recent_docs = collection.find({
|
74 |
-
'username': username,
|
75 |
-
'timestamp': {'$gte': timestamp_threshold}
|
76 |
-
}).sort('timestamp', -1).limit(1)
|
77 |
-
|
78 |
-
docs = list(recent_docs)
|
79 |
-
if docs:
|
80 |
-
logger.info(f"""
|
81 |
-
Último documento guardado:
|
82 |
-
- ID: {docs[0]['_id']}
|
83 |
-
- Timestamp: {docs[0]['timestamp']}
|
84 |
-
- Métricas guardadas: {bool(docs[0].get('metrics'))}
|
85 |
-
""")
|
86 |
-
return True
|
87 |
-
|
88 |
-
logger.warning(f"No se encontraron documentos recientes para {username}")
|
89 |
-
return False
|
90 |
-
|
91 |
-
except Exception as e:
|
92 |
-
logger.error(f"Error verificando almacenamiento: {str(e)}")
|
93 |
-
return False
|
94 |
-
|
95 |
-
def get_current_situation_analysis(username, limit=5):
|
96 |
-
"""
|
97 |
-
Obtiene los análisis de situación actual de un usuario.
|
98 |
-
"""
|
99 |
-
try:
|
100 |
-
collection = get_collection(COLLECTION_NAME)
|
101 |
-
if collection is None:
|
102 |
-
logger.error("No se pudo obtener la colección")
|
103 |
-
return []
|
104 |
-
|
105 |
-
# Buscar documentos
|
106 |
-
query = {'username': username, 'analysis_type': 'current_situation'}
|
107 |
-
cursor = collection.find(query).sort('timestamp', -1)
|
108 |
-
|
109 |
-
# Aplicar límite si se especifica
|
110 |
-
if limit:
|
111 |
-
cursor = cursor.limit(limit)
|
112 |
-
|
113 |
-
# Convertir cursor a lista
|
114 |
-
return list(cursor)
|
115 |
-
|
116 |
-
except Exception as e:
|
117 |
-
logger.error(f"Error obteniendo análisis de situación actual: {str(e)}")
|
118 |
-
return []
|
119 |
-
|
120 |
-
def get_recent_situation_analysis(username, limit=5):
|
121 |
-
"""
|
122 |
-
Obtiene los análisis más recientes de un usuario.
|
123 |
-
"""
|
124 |
-
try:
|
125 |
-
collection = get_collection(COLLECTION_NAME)
|
126 |
-
if collection is None:
|
127 |
-
return []
|
128 |
-
|
129 |
-
results = collection.find(
|
130 |
-
{'username': username}
|
131 |
-
).sort('timestamp', -1).limit(limit)
|
132 |
-
|
133 |
-
return list(results)
|
134 |
-
|
135 |
-
except Exception as e:
|
136 |
-
logger.error(f"Error obteniendo análisis recientes: {str(e)}")
|
137 |
return []
|
|
|
1 |
+
# modules/database/current_situation_mongo_db.py
|
2 |
+
from datetime import datetime, timezone, timedelta
|
3 |
+
import logging
|
4 |
+
from .mongo_db import get_collection
|
5 |
+
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
COLLECTION_NAME = 'student_current_situation'
|
8 |
+
|
9 |
+
# En modules/database/current_situation_mongo_db.py
|
10 |
+
|
11 |
+
def store_current_situation_result(username, text, metrics, feedback):
|
12 |
+
"""
|
13 |
+
Guarda los resultados del análisis de situación actual.
|
14 |
+
"""
|
15 |
+
try:
|
16 |
+
# Verificar parámetros
|
17 |
+
if not all([username, text, metrics]):
|
18 |
+
logger.error("Faltan parámetros requeridos")
|
19 |
+
return False
|
20 |
+
|
21 |
+
collection = get_collection(COLLECTION_NAME)
|
22 |
+
if collection is None:
|
23 |
+
logger.error("No se pudo obtener la colección")
|
24 |
+
return False
|
25 |
+
|
26 |
+
# Crear documento
|
27 |
+
document = {
|
28 |
+
'username': username,
|
29 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
30 |
+
'text': text,
|
31 |
+
'metrics': metrics,
|
32 |
+
'feedback': feedback or {},
|
33 |
+
'analysis_type': 'current_situation'
|
34 |
+
}
|
35 |
+
|
36 |
+
# Insertar documento y verificar
|
37 |
+
result = collection.insert_one(document)
|
38 |
+
if result.inserted_id:
|
39 |
+
logger.info(f"""
|
40 |
+
Análisis de situación actual guardado:
|
41 |
+
- Usuario: {username}
|
42 |
+
- ID: {result.inserted_id}
|
43 |
+
- Longitud texto: {len(text)}
|
44 |
+
""")
|
45 |
+
|
46 |
+
# Verificar almacenamiento
|
47 |
+
storage_verified = verify_storage(username)
|
48 |
+
if not storage_verified:
|
49 |
+
logger.warning("Verificación de almacenamiento falló")
|
50 |
+
|
51 |
+
return True
|
52 |
+
|
53 |
+
logger.error("No se pudo insertar el documento")
|
54 |
+
return False
|
55 |
+
|
56 |
+
except Exception as e:
|
57 |
+
logger.error(f"Error guardando análisis de situación actual: {str(e)}")
|
58 |
+
return False
|
59 |
+
|
60 |
+
def verify_storage(username):
|
61 |
+
"""
|
62 |
+
Verifica que los datos se están guardando correctamente.
|
63 |
+
"""
|
64 |
+
try:
|
65 |
+
collection = get_collection(COLLECTION_NAME)
|
66 |
+
if collection is None:
|
67 |
+
logger.error("No se pudo obtener la colección para verificación")
|
68 |
+
return False
|
69 |
+
|
70 |
+
# Buscar documentos recientes del usuario
|
71 |
+
timestamp_threshold = (datetime.now(timezone.utc) - timedelta(minutes=5)).isoformat()
|
72 |
+
|
73 |
+
recent_docs = collection.find({
|
74 |
+
'username': username,
|
75 |
+
'timestamp': {'$gte': timestamp_threshold}
|
76 |
+
}).sort('timestamp', -1).limit(1)
|
77 |
+
|
78 |
+
docs = list(recent_docs)
|
79 |
+
if docs:
|
80 |
+
logger.info(f"""
|
81 |
+
Último documento guardado:
|
82 |
+
- ID: {docs[0]['_id']}
|
83 |
+
- Timestamp: {docs[0]['timestamp']}
|
84 |
+
- Métricas guardadas: {bool(docs[0].get('metrics'))}
|
85 |
+
""")
|
86 |
+
return True
|
87 |
+
|
88 |
+
logger.warning(f"No se encontraron documentos recientes para {username}")
|
89 |
+
return False
|
90 |
+
|
91 |
+
except Exception as e:
|
92 |
+
logger.error(f"Error verificando almacenamiento: {str(e)}")
|
93 |
+
return False
|
94 |
+
|
95 |
+
def get_current_situation_analysis(username, limit=5):
|
96 |
+
"""
|
97 |
+
Obtiene los análisis de situación actual de un usuario.
|
98 |
+
"""
|
99 |
+
try:
|
100 |
+
collection = get_collection(COLLECTION_NAME)
|
101 |
+
if collection is None:
|
102 |
+
logger.error("No se pudo obtener la colección")
|
103 |
+
return []
|
104 |
+
|
105 |
+
# Buscar documentos
|
106 |
+
query = {'username': username, 'analysis_type': 'current_situation'}
|
107 |
+
cursor = collection.find(query).sort('timestamp', -1)
|
108 |
+
|
109 |
+
# Aplicar límite si se especifica
|
110 |
+
if limit:
|
111 |
+
cursor = cursor.limit(limit)
|
112 |
+
|
113 |
+
# Convertir cursor a lista
|
114 |
+
return list(cursor)
|
115 |
+
|
116 |
+
except Exception as e:
|
117 |
+
logger.error(f"Error obteniendo análisis de situación actual: {str(e)}")
|
118 |
+
return []
|
119 |
+
|
120 |
+
def get_recent_situation_analysis(username, limit=5):
|
121 |
+
"""
|
122 |
+
Obtiene los análisis más recientes de un usuario.
|
123 |
+
"""
|
124 |
+
try:
|
125 |
+
collection = get_collection(COLLECTION_NAME)
|
126 |
+
if collection is None:
|
127 |
+
return []
|
128 |
+
|
129 |
+
results = collection.find(
|
130 |
+
{'username': username}
|
131 |
+
).sort('timestamp', -1).limit(limit)
|
132 |
+
|
133 |
+
return list(results)
|
134 |
+
|
135 |
+
except Exception as e:
|
136 |
+
logger.error(f"Error obteniendo análisis recientes: {str(e)}")
|
137 |
return []
|
modules/database/database_init.py
CHANGED
@@ -1,188 +1,188 @@
|
|
1 |
-
# 1. modules/database/database_init.py
|
2 |
-
|
3 |
-
import os
|
4 |
-
import logging
|
5 |
-
from azure.cosmos import CosmosClient
|
6 |
-
from pymongo import MongoClient
|
7 |
-
import certifi
|
8 |
-
|
9 |
-
logging.basicConfig(level=logging.DEBUG)
|
10 |
-
logger = logging.getLogger(__name__)
|
11 |
-
|
12 |
-
# Variables globales para Cosmos DB SQL API
|
13 |
-
cosmos_client = None
|
14 |
-
user_database = None
|
15 |
-
user_container = None
|
16 |
-
application_requests_container = None
|
17 |
-
user_feedback_container = None
|
18 |
-
user_sessions_container = None
|
19 |
-
|
20 |
-
# Variables globales para Cosmos DB MongoDB API
|
21 |
-
mongo_client = None
|
22 |
-
mongo_db = None
|
23 |
-
|
24 |
-
###################################################################
|
25 |
-
def verify_container_partition_key(container, expected_path):
|
26 |
-
"""Verifica la configuración de partition key de un contenedor"""
|
27 |
-
try:
|
28 |
-
container_props = container.read()
|
29 |
-
partition_key_paths = container_props['partitionKey']['paths']
|
30 |
-
logger.info(f"Container: {container.id}, Partition Key Paths: {partition_key_paths}")
|
31 |
-
return expected_path in partition_key_paths
|
32 |
-
except Exception as e:
|
33 |
-
logger.error(f"Error verificando partition key en {container.id}: {str(e)}")
|
34 |
-
return False
|
35 |
-
|
36 |
-
###################################################################
|
37 |
-
def get_container(container_name):
|
38 |
-
"""Obtiene un contenedor específico"""
|
39 |
-
logger.info(f"Solicitando contenedor: {container_name}")
|
40 |
-
|
41 |
-
if not initialize_cosmos_sql_connection():
|
42 |
-
logger.error("No se pudo inicializar la conexión")
|
43 |
-
return None
|
44 |
-
|
45 |
-
# Verificar estado de los contenedores
|
46 |
-
containers_status = {
|
47 |
-
"users": user_container is not None,
|
48 |
-
"users_sessions": user_sessions_container is not None,
|
49 |
-
"application_requests": application_requests_container is not None,
|
50 |
-
"user_feedback": user_feedback_container is not None # Añadido
|
51 |
-
}
|
52 |
-
|
53 |
-
logger.info(f"Estado actual de los contenedores: {containers_status}")
|
54 |
-
|
55 |
-
# Mapear nombres a contenedores
|
56 |
-
containers = {
|
57 |
-
"users": user_container,
|
58 |
-
"users_sessions": user_sessions_container,
|
59 |
-
"application_requests": application_requests_container,
|
60 |
-
"user_feedback": user_feedback_container # Añadido
|
61 |
-
}
|
62 |
-
|
63 |
-
container = containers.get(container_name)
|
64 |
-
|
65 |
-
if container is None:
|
66 |
-
logger.error(f"Contenedor '{container_name}' no encontrado o no inicializado")
|
67 |
-
logger.error(f"Contenedores disponibles: {[k for k, v in containers_status.items() if v]}")
|
68 |
-
return None
|
69 |
-
|
70 |
-
logger.info(f"Contenedor '{container_name}' obtenido exitosamente")
|
71 |
-
return container
|
72 |
-
###################################################################
|
73 |
-
|
74 |
-
def initialize_cosmos_sql_connection():
|
75 |
-
"""Inicializa la conexión a Cosmos DB SQL API"""
|
76 |
-
global cosmos_client, user_database, user_container, user_sessions_container, application_requests_container, user_feedback_container # Añadida aquí user_feedback_container
|
77 |
-
|
78 |
-
try:
|
79 |
-
# Verificar conexión existente
|
80 |
-
if all([
|
81 |
-
cosmos_client,
|
82 |
-
user_database,
|
83 |
-
user_container,
|
84 |
-
user_sessions_container,
|
85 |
-
application_requests_container,
|
86 |
-
user_feedback_container
|
87 |
-
]):
|
88 |
-
logger.debug("Todas las conexiones ya están inicializadas")
|
89 |
-
return True
|
90 |
-
|
91 |
-
# Obtener credenciales
|
92 |
-
cosmos_endpoint = os.environ.get("COSMOS_ENDPOINT")
|
93 |
-
cosmos_key = os.environ.get("COSMOS_KEY")
|
94 |
-
|
95 |
-
if not cosmos_endpoint or not cosmos_key:
|
96 |
-
raise ValueError("COSMOS_ENDPOINT y COSMOS_KEY deben estar configurados")
|
97 |
-
|
98 |
-
# Inicializar cliente y base de datos
|
99 |
-
cosmos_client = CosmosClient(cosmos_endpoint, cosmos_key)
|
100 |
-
user_database = cosmos_client.get_database_client("user_database")
|
101 |
-
|
102 |
-
# Inicializar contenedores
|
103 |
-
try:
|
104 |
-
user_container = user_database.get_container_client("users")
|
105 |
-
logger.info("Contenedor 'users' inicializado correctamente")
|
106 |
-
except Exception as e:
|
107 |
-
logger.error(f"Error inicializando contenedor 'users': {str(e)}")
|
108 |
-
user_container = None
|
109 |
-
|
110 |
-
try:
|
111 |
-
user_sessions_container = user_database.get_container_client("users_sessions")
|
112 |
-
logger.info("Contenedor 'users_sessions' inicializado correctamente")
|
113 |
-
except Exception as e:
|
114 |
-
logger.error(f"Error inicializando contenedor 'users_sessions': {str(e)}")
|
115 |
-
user_sessions_container = None
|
116 |
-
|
117 |
-
try:
|
118 |
-
application_requests_container = user_database.get_container_client("application_requests")
|
119 |
-
logger.info("Contenedor 'application_requests' inicializado correctamente")
|
120 |
-
except Exception as e:
|
121 |
-
logger.error(f"Error inicializando contenedor 'application_requests': {str(e)}")
|
122 |
-
application_requests_container = None
|
123 |
-
|
124 |
-
try:
|
125 |
-
user_feedback_container = user_database.get_container_client("user_feedback")
|
126 |
-
logger.info("Contenedor 'user_feedback' inicializado correctamente")
|
127 |
-
except Exception as e:
|
128 |
-
logger.error(f"Error inicializando contenedor 'user_feedback': {str(e)}")
|
129 |
-
user_feedback_container = None
|
130 |
-
|
131 |
-
# Verificar el estado de los contenedores
|
132 |
-
containers_status = {
|
133 |
-
'users': user_container is not None,
|
134 |
-
'users_sessions': user_sessions_container is not None,
|
135 |
-
'application_requests': application_requests_container is not None,
|
136 |
-
'user_feedback': user_feedback_container is not None
|
137 |
-
}
|
138 |
-
|
139 |
-
logger.info(f"Estado de los contenedores: {containers_status}")
|
140 |
-
|
141 |
-
if all(containers_status.values()):
|
142 |
-
logger.info("Todos los contenedores inicializados correctamente")
|
143 |
-
return True
|
144 |
-
else:
|
145 |
-
logger.error("No se pudieron inicializar todos los contenedores")
|
146 |
-
return False
|
147 |
-
|
148 |
-
except Exception as e:
|
149 |
-
logger.error(f"Error al conectar con Cosmos DB SQL API: {str(e)}")
|
150 |
-
return False
|
151 |
-
|
152 |
-
|
153 |
-
###################################################################
|
154 |
-
def initialize_mongodb_connection():
|
155 |
-
"""Inicializa la conexión a MongoDB"""
|
156 |
-
global mongo_client, mongo_db
|
157 |
-
try:
|
158 |
-
connection_string = os.getenv("MONGODB_CONNECTION_STRING")
|
159 |
-
if not connection_string:
|
160 |
-
raise ValueError("MONGODB_CONNECTION_STRING debe estar configurado")
|
161 |
-
|
162 |
-
mongo_client = MongoClient(
|
163 |
-
connection_string,
|
164 |
-
tls=True,
|
165 |
-
tlsCAFile=certifi.where(),
|
166 |
-
retryWrites=False,
|
167 |
-
serverSelectionTimeoutMS=5000,
|
168 |
-
connectTimeoutMS=10000,
|
169 |
-
socketTimeoutMS=10000
|
170 |
-
)
|
171 |
-
|
172 |
-
mongo_db = mongo_client['aideatext_db']
|
173 |
-
return True
|
174 |
-
except Exception as e:
|
175 |
-
logger.error(f"Error conectando a MongoDB: {str(e)}")
|
176 |
-
return False
|
177 |
-
|
178 |
-
###################################################################
|
179 |
-
def initialize_database_connections():
|
180 |
-
"""Inicializa todas las conexiones"""
|
181 |
-
return initialize_cosmos_sql_connection() and initialize_mongodb_connection()
|
182 |
-
|
183 |
-
###################################################################
|
184 |
-
def get_mongodb():
|
185 |
-
"""Obtiene la conexión MongoDB"""
|
186 |
-
if mongo_db is None:
|
187 |
-
initialize_mongodb_connection()
|
188 |
return mongo_db
|
|
|
1 |
+
# 1. modules/database/database_init.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import logging
|
5 |
+
from azure.cosmos import CosmosClient
|
6 |
+
from pymongo import MongoClient
|
7 |
+
import certifi
|
8 |
+
|
9 |
+
logging.basicConfig(level=logging.DEBUG)
|
10 |
+
logger = logging.getLogger(__name__)
|
11 |
+
|
12 |
+
# Variables globales para Cosmos DB SQL API
|
13 |
+
cosmos_client = None
|
14 |
+
user_database = None
|
15 |
+
user_container = None
|
16 |
+
application_requests_container = None
|
17 |
+
user_feedback_container = None
|
18 |
+
user_sessions_container = None
|
19 |
+
|
20 |
+
# Variables globales para Cosmos DB MongoDB API
|
21 |
+
mongo_client = None
|
22 |
+
mongo_db = None
|
23 |
+
|
24 |
+
###################################################################
|
25 |
+
def verify_container_partition_key(container, expected_path):
|
26 |
+
"""Verifica la configuración de partition key de un contenedor"""
|
27 |
+
try:
|
28 |
+
container_props = container.read()
|
29 |
+
partition_key_paths = container_props['partitionKey']['paths']
|
30 |
+
logger.info(f"Container: {container.id}, Partition Key Paths: {partition_key_paths}")
|
31 |
+
return expected_path in partition_key_paths
|
32 |
+
except Exception as e:
|
33 |
+
logger.error(f"Error verificando partition key en {container.id}: {str(e)}")
|
34 |
+
return False
|
35 |
+
|
36 |
+
###################################################################
|
37 |
+
def get_container(container_name):
|
38 |
+
"""Obtiene un contenedor específico"""
|
39 |
+
logger.info(f"Solicitando contenedor: {container_name}")
|
40 |
+
|
41 |
+
if not initialize_cosmos_sql_connection():
|
42 |
+
logger.error("No se pudo inicializar la conexión")
|
43 |
+
return None
|
44 |
+
|
45 |
+
# Verificar estado de los contenedores
|
46 |
+
containers_status = {
|
47 |
+
"users": user_container is not None,
|
48 |
+
"users_sessions": user_sessions_container is not None,
|
49 |
+
"application_requests": application_requests_container is not None,
|
50 |
+
"user_feedback": user_feedback_container is not None # Añadido
|
51 |
+
}
|
52 |
+
|
53 |
+
logger.info(f"Estado actual de los contenedores: {containers_status}")
|
54 |
+
|
55 |
+
# Mapear nombres a contenedores
|
56 |
+
containers = {
|
57 |
+
"users": user_container,
|
58 |
+
"users_sessions": user_sessions_container,
|
59 |
+
"application_requests": application_requests_container,
|
60 |
+
"user_feedback": user_feedback_container # Añadido
|
61 |
+
}
|
62 |
+
|
63 |
+
container = containers.get(container_name)
|
64 |
+
|
65 |
+
if container is None:
|
66 |
+
logger.error(f"Contenedor '{container_name}' no encontrado o no inicializado")
|
67 |
+
logger.error(f"Contenedores disponibles: {[k for k, v in containers_status.items() if v]}")
|
68 |
+
return None
|
69 |
+
|
70 |
+
logger.info(f"Contenedor '{container_name}' obtenido exitosamente")
|
71 |
+
return container
|
72 |
+
###################################################################
|
73 |
+
|
74 |
+
def initialize_cosmos_sql_connection():
|
75 |
+
"""Inicializa la conexión a Cosmos DB SQL API"""
|
76 |
+
global cosmos_client, user_database, user_container, user_sessions_container, application_requests_container, user_feedback_container # Añadida aquí user_feedback_container
|
77 |
+
|
78 |
+
try:
|
79 |
+
# Verificar conexión existente
|
80 |
+
if all([
|
81 |
+
cosmos_client,
|
82 |
+
user_database,
|
83 |
+
user_container,
|
84 |
+
user_sessions_container,
|
85 |
+
application_requests_container,
|
86 |
+
user_feedback_container
|
87 |
+
]):
|
88 |
+
logger.debug("Todas las conexiones ya están inicializadas")
|
89 |
+
return True
|
90 |
+
|
91 |
+
# Obtener credenciales
|
92 |
+
cosmos_endpoint = os.environ.get("COSMOS_ENDPOINT")
|
93 |
+
cosmos_key = os.environ.get("COSMOS_KEY")
|
94 |
+
|
95 |
+
if not cosmos_endpoint or not cosmos_key:
|
96 |
+
raise ValueError("COSMOS_ENDPOINT y COSMOS_KEY deben estar configurados")
|
97 |
+
|
98 |
+
# Inicializar cliente y base de datos
|
99 |
+
cosmos_client = CosmosClient(cosmos_endpoint, cosmos_key)
|
100 |
+
user_database = cosmos_client.get_database_client("user_database")
|
101 |
+
|
102 |
+
# Inicializar contenedores
|
103 |
+
try:
|
104 |
+
user_container = user_database.get_container_client("users")
|
105 |
+
logger.info("Contenedor 'users' inicializado correctamente")
|
106 |
+
except Exception as e:
|
107 |
+
logger.error(f"Error inicializando contenedor 'users': {str(e)}")
|
108 |
+
user_container = None
|
109 |
+
|
110 |
+
try:
|
111 |
+
user_sessions_container = user_database.get_container_client("users_sessions")
|
112 |
+
logger.info("Contenedor 'users_sessions' inicializado correctamente")
|
113 |
+
except Exception as e:
|
114 |
+
logger.error(f"Error inicializando contenedor 'users_sessions': {str(e)}")
|
115 |
+
user_sessions_container = None
|
116 |
+
|
117 |
+
try:
|
118 |
+
application_requests_container = user_database.get_container_client("application_requests")
|
119 |
+
logger.info("Contenedor 'application_requests' inicializado correctamente")
|
120 |
+
except Exception as e:
|
121 |
+
logger.error(f"Error inicializando contenedor 'application_requests': {str(e)}")
|
122 |
+
application_requests_container = None
|
123 |
+
|
124 |
+
try:
|
125 |
+
user_feedback_container = user_database.get_container_client("user_feedback")
|
126 |
+
logger.info("Contenedor 'user_feedback' inicializado correctamente")
|
127 |
+
except Exception as e:
|
128 |
+
logger.error(f"Error inicializando contenedor 'user_feedback': {str(e)}")
|
129 |
+
user_feedback_container = None
|
130 |
+
|
131 |
+
# Verificar el estado de los contenedores
|
132 |
+
containers_status = {
|
133 |
+
'users': user_container is not None,
|
134 |
+
'users_sessions': user_sessions_container is not None,
|
135 |
+
'application_requests': application_requests_container is not None,
|
136 |
+
'user_feedback': user_feedback_container is not None
|
137 |
+
}
|
138 |
+
|
139 |
+
logger.info(f"Estado de los contenedores: {containers_status}")
|
140 |
+
|
141 |
+
if all(containers_status.values()):
|
142 |
+
logger.info("Todos los contenedores inicializados correctamente")
|
143 |
+
return True
|
144 |
+
else:
|
145 |
+
logger.error("No se pudieron inicializar todos los contenedores")
|
146 |
+
return False
|
147 |
+
|
148 |
+
except Exception as e:
|
149 |
+
logger.error(f"Error al conectar con Cosmos DB SQL API: {str(e)}")
|
150 |
+
return False
|
151 |
+
|
152 |
+
|
153 |
+
###################################################################
|
154 |
+
def initialize_mongodb_connection():
|
155 |
+
"""Inicializa la conexión a MongoDB"""
|
156 |
+
global mongo_client, mongo_db
|
157 |
+
try:
|
158 |
+
connection_string = os.getenv("MONGODB_CONNECTION_STRING")
|
159 |
+
if not connection_string:
|
160 |
+
raise ValueError("MONGODB_CONNECTION_STRING debe estar configurado")
|
161 |
+
|
162 |
+
mongo_client = MongoClient(
|
163 |
+
connection_string,
|
164 |
+
tls=True,
|
165 |
+
tlsCAFile=certifi.where(),
|
166 |
+
retryWrites=False,
|
167 |
+
serverSelectionTimeoutMS=5000,
|
168 |
+
connectTimeoutMS=10000,
|
169 |
+
socketTimeoutMS=10000
|
170 |
+
)
|
171 |
+
|
172 |
+
mongo_db = mongo_client['aideatext_db']
|
173 |
+
return True
|
174 |
+
except Exception as e:
|
175 |
+
logger.error(f"Error conectando a MongoDB: {str(e)}")
|
176 |
+
return False
|
177 |
+
|
178 |
+
###################################################################
|
179 |
+
def initialize_database_connections():
|
180 |
+
"""Inicializa todas las conexiones"""
|
181 |
+
return initialize_cosmos_sql_connection() and initialize_mongodb_connection()
|
182 |
+
|
183 |
+
###################################################################
|
184 |
+
def get_mongodb():
|
185 |
+
"""Obtiene la conexión MongoDB"""
|
186 |
+
if mongo_db is None:
|
187 |
+
initialize_mongodb_connection()
|
188 |
return mongo_db
|
modules/database/discourse_mongo_db.py
CHANGED
@@ -1,173 +1,152 @@
|
|
1 |
-
# modules/database/discourse_mongo_db.py
|
2 |
-
|
3 |
-
import
|
4 |
-
import
|
5 |
-
from
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
logger
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
return
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
"""
|
153 |
-
Actualiza un análisis del discurso existente.
|
154 |
-
"""
|
155 |
-
try:
|
156 |
-
query = {"_id": analysis_id}
|
157 |
-
update = {"$set": update_data}
|
158 |
-
return update_document(COLLECTION_NAME, query, update)
|
159 |
-
except Exception as e:
|
160 |
-
logger.error(f"Error al actualizar análisis del discurso: {str(e)}")
|
161 |
-
return False
|
162 |
-
|
163 |
-
###########################################################################
|
164 |
-
def delete_student_discourse_analysis(analysis_id):
|
165 |
-
"""
|
166 |
-
Elimina un análisis del discurso.
|
167 |
-
"""
|
168 |
-
try:
|
169 |
-
query = {"_id": analysis_id}
|
170 |
-
return delete_document(COLLECTION_NAME, query)
|
171 |
-
except Exception as e:
|
172 |
-
logger.error(f"Error al eliminar análisis del discurso: {str(e)}")
|
173 |
return False
|
|
|
1 |
+
# modules/database/discourse_mongo_db.py
|
2 |
+
import base64
|
3 |
+
import logging
|
4 |
+
from datetime import datetime, timezone
|
5 |
+
from ..database.mongo_db import get_collection, insert_document, find_documents
|
6 |
+
|
7 |
+
logger = logging.getLogger(__name__)
|
8 |
+
|
9 |
+
COLLECTION_NAME = 'student_discourse_analysis'
|
10 |
+
|
11 |
+
########################################################################
|
12 |
+
|
13 |
+
def store_student_discourse_result(username, text1, text2, analysis_result):
|
14 |
+
"""
|
15 |
+
Guarda el resultado del análisis de discurso en MongoDB.
|
16 |
+
"""
|
17 |
+
try:
|
18 |
+
# Verificar que el resultado sea válido
|
19 |
+
if not analysis_result.get('success', False):
|
20 |
+
logger.error("No se puede guardar un análisis fallido")
|
21 |
+
return False
|
22 |
+
|
23 |
+
logger.info(f"Almacenando análisis de discurso para {username}")
|
24 |
+
|
25 |
+
# Preparar el documento para MongoDB
|
26 |
+
document = {
|
27 |
+
'username': username,
|
28 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
29 |
+
'text1': text1,
|
30 |
+
'text2': text2,
|
31 |
+
'key_concepts1': analysis_result.get('key_concepts1', []),
|
32 |
+
'key_concepts2': analysis_result.get('key_concepts2', [])
|
33 |
+
}
|
34 |
+
|
35 |
+
# Codificar gráficos a base64 para almacenamiento
|
36 |
+
for graph_key in ['graph1', 'graph2', 'combined_graph']:
|
37 |
+
if graph_key in analysis_result and analysis_result[graph_key] is not None:
|
38 |
+
if isinstance(analysis_result[graph_key], bytes):
|
39 |
+
logger.info(f"Codificando {graph_key} como base64")
|
40 |
+
document[graph_key] = base64.b64encode(analysis_result[graph_key]).decode('utf-8')
|
41 |
+
logger.info(f"{graph_key} codificado correctamente, longitud: {len(document[graph_key])}")
|
42 |
+
else:
|
43 |
+
logger.warning(f"{graph_key} no es de tipo bytes, es: {type(analysis_result[graph_key])}")
|
44 |
+
else:
|
45 |
+
logger.info(f"{graph_key} no presente en el resultado del análisis")
|
46 |
+
|
47 |
+
# Almacenar el documento en MongoDB
|
48 |
+
collection = get_collection(COLLECTION_NAME)
|
49 |
+
if collection is None: # CORREGIDO: Usar 'is None' en lugar de valor booleano
|
50 |
+
logger.error("No se pudo obtener la colección")
|
51 |
+
return False
|
52 |
+
|
53 |
+
result = collection.insert_one(document)
|
54 |
+
logger.info(f"Análisis de discurso guardado con ID: {result.inserted_id}")
|
55 |
+
return True
|
56 |
+
|
57 |
+
except Exception as e:
|
58 |
+
logger.error(f"Error guardando análisis de discurso: {str(e)}")
|
59 |
+
return False
|
60 |
+
|
61 |
+
#################################################################################
|
62 |
+
|
63 |
+
# Corrección 1: Actualizar get_student_discourse_analysis para recuperar todos los campos necesarios
|
64 |
+
|
65 |
+
def get_student_discourse_analysis(username, limit=10):
|
66 |
+
"""
|
67 |
+
Recupera los análisis del discurso de un estudiante.
|
68 |
+
"""
|
69 |
+
try:
|
70 |
+
logger.info(f"Recuperando análisis de discurso para {username}")
|
71 |
+
|
72 |
+
collection = get_collection(COLLECTION_NAME)
|
73 |
+
if collection is None:
|
74 |
+
logger.error("No se pudo obtener la colección")
|
75 |
+
return []
|
76 |
+
|
77 |
+
query = {"username": username}
|
78 |
+
documents = list(collection.find(query).sort("timestamp", -1).limit(limit))
|
79 |
+
logger.info(f"Recuperados {len(documents)} documentos de análisis de discurso")
|
80 |
+
|
81 |
+
# Decodificar gráficos para uso en la aplicación
|
82 |
+
for doc in documents:
|
83 |
+
for graph_key in ['graph1', 'graph2', 'combined_graph']:
|
84 |
+
if graph_key in doc and doc[graph_key]:
|
85 |
+
try:
|
86 |
+
# Verificar si es string (base64) y decodificar
|
87 |
+
if isinstance(doc[graph_key], str):
|
88 |
+
logger.info(f"Decodificando {graph_key} de base64 a bytes")
|
89 |
+
doc[graph_key] = base64.b64decode(doc[graph_key])
|
90 |
+
logger.info(f"{graph_key} decodificado correctamente, tamaño: {len(doc[graph_key])} bytes")
|
91 |
+
elif not isinstance(doc[graph_key], bytes):
|
92 |
+
logger.warning(f"{graph_key} no es ni string ni bytes: {type(doc[graph_key])}")
|
93 |
+
except Exception as decode_error:
|
94 |
+
logger.error(f"Error decodificando {graph_key}: {str(decode_error)}")
|
95 |
+
doc[graph_key] = None
|
96 |
+
|
97 |
+
return documents
|
98 |
+
|
99 |
+
except Exception as e:
|
100 |
+
logger.error(f"Error recuperando análisis de discurso: {str(e)}")
|
101 |
+
return []
|
102 |
+
|
103 |
+
#####################################################################################
|
104 |
+
|
105 |
+
def get_student_discourse_data(username):
|
106 |
+
"""
|
107 |
+
Obtiene un resumen de los análisis del discurso de un estudiante.
|
108 |
+
"""
|
109 |
+
try:
|
110 |
+
analyses = get_student_discourse_analysis(username, limit=None)
|
111 |
+
formatted_analyses = []
|
112 |
+
|
113 |
+
for analysis in analyses:
|
114 |
+
formatted_analysis = {
|
115 |
+
'timestamp': analysis['timestamp'],
|
116 |
+
'text1': analysis.get('text1', ''),
|
117 |
+
'text2': analysis.get('text2', ''),
|
118 |
+
'key_concepts1': analysis.get('key_concepts1', []),
|
119 |
+
'key_concepts2': analysis.get('key_concepts2', [])
|
120 |
+
}
|
121 |
+
formatted_analyses.append(formatted_analysis)
|
122 |
+
|
123 |
+
return {'entries': formatted_analyses}
|
124 |
+
|
125 |
+
except Exception as e:
|
126 |
+
logger.error(f"Error al obtener datos del discurso: {str(e)}")
|
127 |
+
return {'entries': []}
|
128 |
+
|
129 |
+
###########################################################################
|
130 |
+
def update_student_discourse_analysis(analysis_id, update_data):
|
131 |
+
"""
|
132 |
+
Actualiza un análisis del discurso existente.
|
133 |
+
"""
|
134 |
+
try:
|
135 |
+
query = {"_id": analysis_id}
|
136 |
+
update = {"$set": update_data}
|
137 |
+
return update_document(COLLECTION_NAME, query, update)
|
138 |
+
except Exception as e:
|
139 |
+
logger.error(f"Error al actualizar análisis del discurso: {str(e)}")
|
140 |
+
return False
|
141 |
+
|
142 |
+
###########################################################################
|
143 |
+
def delete_student_discourse_analysis(analysis_id):
|
144 |
+
"""
|
145 |
+
Elimina un análisis del discurso.
|
146 |
+
"""
|
147 |
+
try:
|
148 |
+
query = {"_id": analysis_id}
|
149 |
+
return delete_document(COLLECTION_NAME, query)
|
150 |
+
except Exception as e:
|
151 |
+
logger.error(f"Error al eliminar análisis del discurso: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
return False
|
modules/database/mongo_db.py
CHANGED
@@ -1,62 +1,62 @@
|
|
1 |
-
from .database_init import get_mongodb
|
2 |
-
import logging
|
3 |
-
|
4 |
-
logger = logging.getLogger(__name__)
|
5 |
-
|
6 |
-
def get_collection(collection_name):
|
7 |
-
try:
|
8 |
-
db = get_mongodb()
|
9 |
-
if db is None:
|
10 |
-
logger.error(f"No se pudo obtener la base de datos para {collection_name}")
|
11 |
-
return None
|
12 |
-
|
13 |
-
collection = db[collection_name]
|
14 |
-
logger.info(f"Colección {collection_name} obtenida exitosamente")
|
15 |
-
return collection
|
16 |
-
|
17 |
-
except Exception as e:
|
18 |
-
logger.error(f"Error al obtener colección {collection_name}: {str(e)}")
|
19 |
-
return None
|
20 |
-
|
21 |
-
def insert_document(collection_name, document):
|
22 |
-
collection = get_collection(collection_name)
|
23 |
-
try:
|
24 |
-
result = collection.insert_one(document)
|
25 |
-
logger.info(f"Documento insertado en {collection_name} con ID: {result.inserted_id}")
|
26 |
-
return result.inserted_id
|
27 |
-
except Exception as e:
|
28 |
-
logger.error(f"Error al insertar documento en {collection_name}: {str(e)}")
|
29 |
-
return None
|
30 |
-
|
31 |
-
def find_documents(collection_name, query, sort=None, limit=None):
|
32 |
-
collection = get_collection(collection_name)
|
33 |
-
try:
|
34 |
-
cursor = collection.find(query)
|
35 |
-
if sort:
|
36 |
-
cursor = cursor.sort(sort)
|
37 |
-
if limit:
|
38 |
-
cursor = cursor.limit(limit)
|
39 |
-
return list(cursor)
|
40 |
-
except Exception as e:
|
41 |
-
logger.error(f"Error al buscar documentos en {collection_name}: {str(e)}")
|
42 |
-
return []
|
43 |
-
|
44 |
-
def update_document(collection_name, query, update):
|
45 |
-
collection = get_collection(collection_name)
|
46 |
-
try:
|
47 |
-
result = collection.update_one(query, update)
|
48 |
-
logger.info(f"Documento actualizado en {collection_name}: {result.modified_count} modificado(s)")
|
49 |
-
return result.modified_count
|
50 |
-
except Exception as e:
|
51 |
-
logger.error(f"Error al actualizar documento en {collection_name}: {str(e)}")
|
52 |
-
return 0
|
53 |
-
|
54 |
-
def delete_document(collection_name, query):
|
55 |
-
collection = get_collection(collection_name)
|
56 |
-
try:
|
57 |
-
result = collection.delete_one(query)
|
58 |
-
logger.info(f"Documento eliminado de {collection_name}: {result.deleted_count} eliminado(s)")
|
59 |
-
return result.deleted_count
|
60 |
-
except Exception as e:
|
61 |
-
logger.error(f"Error al eliminar documento de {collection_name}: {str(e)}")
|
62 |
return 0
|
|
|
1 |
+
from .database_init import get_mongodb
|
2 |
+
import logging
|
3 |
+
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
|
6 |
+
def get_collection(collection_name):
|
7 |
+
try:
|
8 |
+
db = get_mongodb()
|
9 |
+
if db is None:
|
10 |
+
logger.error(f"No se pudo obtener la base de datos para {collection_name}")
|
11 |
+
return None
|
12 |
+
|
13 |
+
collection = db[collection_name]
|
14 |
+
logger.info(f"Colección {collection_name} obtenida exitosamente")
|
15 |
+
return collection
|
16 |
+
|
17 |
+
except Exception as e:
|
18 |
+
logger.error(f"Error al obtener colección {collection_name}: {str(e)}")
|
19 |
+
return None
|
20 |
+
|
21 |
+
def insert_document(collection_name, document):
|
22 |
+
collection = get_collection(collection_name)
|
23 |
+
try:
|
24 |
+
result = collection.insert_one(document)
|
25 |
+
logger.info(f"Documento insertado en {collection_name} con ID: {result.inserted_id}")
|
26 |
+
return result.inserted_id
|
27 |
+
except Exception as e:
|
28 |
+
logger.error(f"Error al insertar documento en {collection_name}: {str(e)}")
|
29 |
+
return None
|
30 |
+
|
31 |
+
def find_documents(collection_name, query, sort=None, limit=None):
|
32 |
+
collection = get_collection(collection_name)
|
33 |
+
try:
|
34 |
+
cursor = collection.find(query)
|
35 |
+
if sort:
|
36 |
+
cursor = cursor.sort(sort)
|
37 |
+
if limit:
|
38 |
+
cursor = cursor.limit(limit)
|
39 |
+
return list(cursor)
|
40 |
+
except Exception as e:
|
41 |
+
logger.error(f"Error al buscar documentos en {collection_name}: {str(e)}")
|
42 |
+
return []
|
43 |
+
|
44 |
+
def update_document(collection_name, query, update):
|
45 |
+
collection = get_collection(collection_name)
|
46 |
+
try:
|
47 |
+
result = collection.update_one(query, update)
|
48 |
+
logger.info(f"Documento actualizado en {collection_name}: {result.modified_count} modificado(s)")
|
49 |
+
return result.modified_count
|
50 |
+
except Exception as e:
|
51 |
+
logger.error(f"Error al actualizar documento en {collection_name}: {str(e)}")
|
52 |
+
return 0
|
53 |
+
|
54 |
+
def delete_document(collection_name, query):
|
55 |
+
collection = get_collection(collection_name)
|
56 |
+
try:
|
57 |
+
result = collection.delete_one(query)
|
58 |
+
logger.info(f"Documento eliminado de {collection_name}: {result.deleted_count} eliminado(s)")
|
59 |
+
return result.deleted_count
|
60 |
+
except Exception as e:
|
61 |
+
logger.error(f"Error al eliminar documento de {collection_name}: {str(e)}")
|
62 |
return 0
|
modules/database/morphosyntax_iterative_mongo_db.py
CHANGED
@@ -1,171 +1,171 @@
|
|
1 |
-
# modules/database/morphosyntax_iterative_mongo_db.py
|
2 |
-
|
3 |
-
|
4 |
-
from datetime import datetime, timezone
|
5 |
-
import logging
|
6 |
-
from bson import ObjectId # <--- Importar ObjectId
|
7 |
-
from .mongo_db import get_collection, insert_document, find_documents, update_document, delete_document
|
8 |
-
|
9 |
-
logger = logging.getLogger(__name__)
|
10 |
-
|
11 |
-
BASE_COLLECTION = 'student_morphosyntax_analysis_base'
|
12 |
-
ITERATION_COLLECTION = 'student_morphosyntax_iterations'
|
13 |
-
|
14 |
-
def store_student_morphosyntax_base(username, text, arc_diagrams):
|
15 |
-
"""Almacena el análisis morfosintáctico base y retorna su ObjectId."""
|
16 |
-
try:
|
17 |
-
base_document = {
|
18 |
-
'username': username,
|
19 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
20 |
-
'text': text,
|
21 |
-
'arc_diagrams': arc_diagrams,
|
22 |
-
'analysis_type': 'morphosyntax_base',
|
23 |
-
'has_iterations': False
|
24 |
-
}
|
25 |
-
collection = get_collection(BASE_COLLECTION)
|
26 |
-
result = collection.insert_one(base_document)
|
27 |
-
|
28 |
-
logger.info(f"Análisis base guardado para {username}")
|
29 |
-
# Retornamos el ObjectId directamente (NO str)
|
30 |
-
return result.inserted_id
|
31 |
-
|
32 |
-
except Exception as e:
|
33 |
-
logger.error(f"Error almacenando análisis base: {str(e)}")
|
34 |
-
return None
|
35 |
-
|
36 |
-
def store_student_morphosyntax_iteration(username, base_id, original_text, iteration_text, arc_diagrams):
|
37 |
-
"""
|
38 |
-
Almacena una iteración de análisis morfosintáctico.
|
39 |
-
base_id: ObjectId de la base (o string convertible a ObjectId).
|
40 |
-
"""
|
41 |
-
try:
|
42 |
-
# Convertir a ObjectId si viene como string
|
43 |
-
if isinstance(base_id, str):
|
44 |
-
base_id = ObjectId(base_id)
|
45 |
-
|
46 |
-
iteration_document = {
|
47 |
-
'username': username,
|
48 |
-
'base_id': base_id, # Guardar el ObjectId en la iteración
|
49 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
50 |
-
'original_text': original_text,
|
51 |
-
'iteration_text': iteration_text,
|
52 |
-
'arc_diagrams': arc_diagrams,
|
53 |
-
'analysis_type': 'morphosyntax_iteration'
|
54 |
-
}
|
55 |
-
collection = get_collection(ITERATION_COLLECTION)
|
56 |
-
result = collection.insert_one(iteration_document)
|
57 |
-
|
58 |
-
# Actualizar documento base (usando ObjectId)
|
59 |
-
base_collection = get_collection(BASE_COLLECTION)
|
60 |
-
base_collection.update_one(
|
61 |
-
{'_id': base_id, 'username': username},
|
62 |
-
{'$set': {'has_iterations': True}}
|
63 |
-
)
|
64 |
-
|
65 |
-
logger.info(f"Iteración guardada para {username}, base_id: {base_id}")
|
66 |
-
return result.inserted_id # Retornar el ObjectId de la iteración
|
67 |
-
|
68 |
-
except Exception as e:
|
69 |
-
logger.error(f"Error almacenando iteración: {str(e)}")
|
70 |
-
return None
|
71 |
-
|
72 |
-
def get_student_morphosyntax_analysis(username, limit=10):
|
73 |
-
"""
|
74 |
-
Obtiene los análisis base y sus iteraciones.
|
75 |
-
Returns: Lista de análisis con sus iteraciones.
|
76 |
-
"""
|
77 |
-
try:
|
78 |
-
base_collection = get_collection(BASE_COLLECTION)
|
79 |
-
base_query = {
|
80 |
-
"username": username,
|
81 |
-
"analysis_type": "morphosyntax_base"
|
82 |
-
}
|
83 |
-
base_analyses = list(
|
84 |
-
base_collection.find(base_query).sort("timestamp", -1).limit(limit)
|
85 |
-
)
|
86 |
-
|
87 |
-
# Para cada análisis base, obtener sus iteraciones
|
88 |
-
iteration_collection = get_collection(ITERATION_COLLECTION)
|
89 |
-
for analysis in base_analyses:
|
90 |
-
base_id = analysis['_id']
|
91 |
-
# Buscar iteraciones con base_id = ObjectId
|
92 |
-
iterations = list(
|
93 |
-
iteration_collection.find({"base_id": base_id}).sort("timestamp", -1)
|
94 |
-
)
|
95 |
-
analysis['iterations'] = iterations
|
96 |
-
|
97 |
-
return base_analyses
|
98 |
-
|
99 |
-
except Exception as e:
|
100 |
-
logger.error(f"Error obteniendo análisis: {str(e)}")
|
101 |
-
return []
|
102 |
-
|
103 |
-
def update_student_morphosyntax_analysis(analysis_id, is_base, update_data):
|
104 |
-
"""
|
105 |
-
Actualiza un análisis base o iteración.
|
106 |
-
analysis_id puede ser un ObjectId o string.
|
107 |
-
"""
|
108 |
-
from bson import ObjectId
|
109 |
-
|
110 |
-
try:
|
111 |
-
collection_name = BASE_COLLECTION if is_base else ITERATION_COLLECTION
|
112 |
-
collection = get_collection(collection_name)
|
113 |
-
|
114 |
-
if isinstance(analysis_id, str):
|
115 |
-
analysis_id = ObjectId(analysis_id)
|
116 |
-
|
117 |
-
query = {"_id": analysis_id}
|
118 |
-
update = {"$set": update_data}
|
119 |
-
|
120 |
-
result = update_document(collection_name, query, update)
|
121 |
-
return result
|
122 |
-
|
123 |
-
except Exception as e:
|
124 |
-
logger.error(f"Error actualizando análisis: {str(e)}")
|
125 |
-
return False
|
126 |
-
|
127 |
-
def delete_student_morphosyntax_analysis(analysis_id, is_base):
|
128 |
-
"""
|
129 |
-
Elimina un análisis base o iteración.
|
130 |
-
Si es base, también elimina todas sus iteraciones.
|
131 |
-
"""
|
132 |
-
from bson import ObjectId
|
133 |
-
|
134 |
-
try:
|
135 |
-
if isinstance(analysis_id, str):
|
136 |
-
analysis_id = ObjectId(analysis_id)
|
137 |
-
|
138 |
-
if is_base:
|
139 |
-
# Eliminar iteraciones vinculadas
|
140 |
-
iteration_collection = get_collection(ITERATION_COLLECTION)
|
141 |
-
iteration_collection.delete_many({"base_id": analysis_id})
|
142 |
-
|
143 |
-
# Luego eliminar el análisis base
|
144 |
-
collection = get_collection(BASE_COLLECTION)
|
145 |
-
else:
|
146 |
-
collection = get_collection(ITERATION_COLLECTION)
|
147 |
-
|
148 |
-
query = {"_id": analysis_id}
|
149 |
-
result = delete_document(collection.name, query)
|
150 |
-
return result
|
151 |
-
|
152 |
-
except Exception as e:
|
153 |
-
logger.error(f"Error eliminando análisis: {str(e)}")
|
154 |
-
return False
|
155 |
-
|
156 |
-
def get_student_morphosyntax_data(username):
|
157 |
-
"""
|
158 |
-
Obtiene todos los datos de análisis morfosintáctico de un estudiante.
|
159 |
-
Returns: Diccionario con todos los análisis y sus iteraciones.
|
160 |
-
"""
|
161 |
-
try:
|
162 |
-
analyses = get_student_morphosyntax_analysis(username, limit=None)
|
163 |
-
return {
|
164 |
-
'entries': analyses,
|
165 |
-
'total_analyses': len(analyses),
|
166 |
-
'has_iterations': any(a.get('has_iterations', False) for a in analyses)
|
167 |
-
}
|
168 |
-
|
169 |
-
except Exception as e:
|
170 |
-
logger.error(f"Error obteniendo datos del estudiante: {str(e)}")
|
171 |
-
return {'entries': [], 'total_analyses': 0, 'has_iterations': False}
|
|
|
1 |
+
# modules/database/morphosyntax_iterative_mongo_db.py
|
2 |
+
|
3 |
+
|
4 |
+
from datetime import datetime, timezone
|
5 |
+
import logging
|
6 |
+
from bson import ObjectId # <--- Importar ObjectId
|
7 |
+
from .mongo_db import get_collection, insert_document, find_documents, update_document, delete_document
|
8 |
+
|
9 |
+
logger = logging.getLogger(__name__)
|
10 |
+
|
11 |
+
BASE_COLLECTION = 'student_morphosyntax_analysis_base'
|
12 |
+
ITERATION_COLLECTION = 'student_morphosyntax_iterations'
|
13 |
+
|
14 |
+
def store_student_morphosyntax_base(username, text, arc_diagrams):
|
15 |
+
"""Almacena el análisis morfosintáctico base y retorna su ObjectId."""
|
16 |
+
try:
|
17 |
+
base_document = {
|
18 |
+
'username': username,
|
19 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
20 |
+
'text': text,
|
21 |
+
'arc_diagrams': arc_diagrams,
|
22 |
+
'analysis_type': 'morphosyntax_base',
|
23 |
+
'has_iterations': False
|
24 |
+
}
|
25 |
+
collection = get_collection(BASE_COLLECTION)
|
26 |
+
result = collection.insert_one(base_document)
|
27 |
+
|
28 |
+
logger.info(f"Análisis base guardado para {username}")
|
29 |
+
# Retornamos el ObjectId directamente (NO str)
|
30 |
+
return result.inserted_id
|
31 |
+
|
32 |
+
except Exception as e:
|
33 |
+
logger.error(f"Error almacenando análisis base: {str(e)}")
|
34 |
+
return None
|
35 |
+
|
36 |
+
def store_student_morphosyntax_iteration(username, base_id, original_text, iteration_text, arc_diagrams):
|
37 |
+
"""
|
38 |
+
Almacena una iteración de análisis morfosintáctico.
|
39 |
+
base_id: ObjectId de la base (o string convertible a ObjectId).
|
40 |
+
"""
|
41 |
+
try:
|
42 |
+
# Convertir a ObjectId si viene como string
|
43 |
+
if isinstance(base_id, str):
|
44 |
+
base_id = ObjectId(base_id)
|
45 |
+
|
46 |
+
iteration_document = {
|
47 |
+
'username': username,
|
48 |
+
'base_id': base_id, # Guardar el ObjectId en la iteración
|
49 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
50 |
+
'original_text': original_text,
|
51 |
+
'iteration_text': iteration_text,
|
52 |
+
'arc_diagrams': arc_diagrams,
|
53 |
+
'analysis_type': 'morphosyntax_iteration'
|
54 |
+
}
|
55 |
+
collection = get_collection(ITERATION_COLLECTION)
|
56 |
+
result = collection.insert_one(iteration_document)
|
57 |
+
|
58 |
+
# Actualizar documento base (usando ObjectId)
|
59 |
+
base_collection = get_collection(BASE_COLLECTION)
|
60 |
+
base_collection.update_one(
|
61 |
+
{'_id': base_id, 'username': username},
|
62 |
+
{'$set': {'has_iterations': True}}
|
63 |
+
)
|
64 |
+
|
65 |
+
logger.info(f"Iteración guardada para {username}, base_id: {base_id}")
|
66 |
+
return result.inserted_id # Retornar el ObjectId de la iteración
|
67 |
+
|
68 |
+
except Exception as e:
|
69 |
+
logger.error(f"Error almacenando iteración: {str(e)}")
|
70 |
+
return None
|
71 |
+
|
72 |
+
def get_student_morphosyntax_analysis(username, limit=10):
|
73 |
+
"""
|
74 |
+
Obtiene los análisis base y sus iteraciones.
|
75 |
+
Returns: Lista de análisis con sus iteraciones.
|
76 |
+
"""
|
77 |
+
try:
|
78 |
+
base_collection = get_collection(BASE_COLLECTION)
|
79 |
+
base_query = {
|
80 |
+
"username": username,
|
81 |
+
"analysis_type": "morphosyntax_base"
|
82 |
+
}
|
83 |
+
base_analyses = list(
|
84 |
+
base_collection.find(base_query).sort("timestamp", -1).limit(limit)
|
85 |
+
)
|
86 |
+
|
87 |
+
# Para cada análisis base, obtener sus iteraciones
|
88 |
+
iteration_collection = get_collection(ITERATION_COLLECTION)
|
89 |
+
for analysis in base_analyses:
|
90 |
+
base_id = analysis['_id']
|
91 |
+
# Buscar iteraciones con base_id = ObjectId
|
92 |
+
iterations = list(
|
93 |
+
iteration_collection.find({"base_id": base_id}).sort("timestamp", -1)
|
94 |
+
)
|
95 |
+
analysis['iterations'] = iterations
|
96 |
+
|
97 |
+
return base_analyses
|
98 |
+
|
99 |
+
except Exception as e:
|
100 |
+
logger.error(f"Error obteniendo análisis: {str(e)}")
|
101 |
+
return []
|
102 |
+
|
103 |
+
def update_student_morphosyntax_analysis(analysis_id, is_base, update_data):
|
104 |
+
"""
|
105 |
+
Actualiza un análisis base o iteración.
|
106 |
+
analysis_id puede ser un ObjectId o string.
|
107 |
+
"""
|
108 |
+
from bson import ObjectId
|
109 |
+
|
110 |
+
try:
|
111 |
+
collection_name = BASE_COLLECTION if is_base else ITERATION_COLLECTION
|
112 |
+
collection = get_collection(collection_name)
|
113 |
+
|
114 |
+
if isinstance(analysis_id, str):
|
115 |
+
analysis_id = ObjectId(analysis_id)
|
116 |
+
|
117 |
+
query = {"_id": analysis_id}
|
118 |
+
update = {"$set": update_data}
|
119 |
+
|
120 |
+
result = update_document(collection_name, query, update)
|
121 |
+
return result
|
122 |
+
|
123 |
+
except Exception as e:
|
124 |
+
logger.error(f"Error actualizando análisis: {str(e)}")
|
125 |
+
return False
|
126 |
+
|
127 |
+
def delete_student_morphosyntax_analysis(analysis_id, is_base):
|
128 |
+
"""
|
129 |
+
Elimina un análisis base o iteración.
|
130 |
+
Si es base, también elimina todas sus iteraciones.
|
131 |
+
"""
|
132 |
+
from bson import ObjectId
|
133 |
+
|
134 |
+
try:
|
135 |
+
if isinstance(analysis_id, str):
|
136 |
+
analysis_id = ObjectId(analysis_id)
|
137 |
+
|
138 |
+
if is_base:
|
139 |
+
# Eliminar iteraciones vinculadas
|
140 |
+
iteration_collection = get_collection(ITERATION_COLLECTION)
|
141 |
+
iteration_collection.delete_many({"base_id": analysis_id})
|
142 |
+
|
143 |
+
# Luego eliminar el análisis base
|
144 |
+
collection = get_collection(BASE_COLLECTION)
|
145 |
+
else:
|
146 |
+
collection = get_collection(ITERATION_COLLECTION)
|
147 |
+
|
148 |
+
query = {"_id": analysis_id}
|
149 |
+
result = delete_document(collection.name, query)
|
150 |
+
return result
|
151 |
+
|
152 |
+
except Exception as e:
|
153 |
+
logger.error(f"Error eliminando análisis: {str(e)}")
|
154 |
+
return False
|
155 |
+
|
156 |
+
def get_student_morphosyntax_data(username):
|
157 |
+
"""
|
158 |
+
Obtiene todos los datos de análisis morfosintáctico de un estudiante.
|
159 |
+
Returns: Diccionario con todos los análisis y sus iteraciones.
|
160 |
+
"""
|
161 |
+
try:
|
162 |
+
analyses = get_student_morphosyntax_analysis(username, limit=None)
|
163 |
+
return {
|
164 |
+
'entries': analyses,
|
165 |
+
'total_analyses': len(analyses),
|
166 |
+
'has_iterations': any(a.get('has_iterations', False) for a in analyses)
|
167 |
+
}
|
168 |
+
|
169 |
+
except Exception as e:
|
170 |
+
logger.error(f"Error obteniendo datos del estudiante: {str(e)}")
|
171 |
+
return {'entries': [], 'total_analyses': 0, 'has_iterations': False}
|
modules/database/semantic_export.py
CHANGED
@@ -1,78 +1,78 @@
|
|
1 |
-
from io import BytesIO
|
2 |
-
from reportlab.lib import colors
|
3 |
-
from reportlab.lib.pagesizes import letter
|
4 |
-
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image, PageBreak
|
5 |
-
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
6 |
-
from reportlab.lib.units import cm
|
7 |
-
from svglib.svglib import svg2rlg
|
8 |
-
from reportlab.graphics import renderPM
|
9 |
-
import base64
|
10 |
-
import cairosvg
|
11 |
-
from reportlab.graphics import renderPDF
|
12 |
-
from reportlab.lib.utils import ImageReader
|
13 |
-
|
14 |
-
#importaciones locales
|
15 |
-
from .semantic_mongo_db import get_student_semantic_data
|
16 |
-
from .chat_db import get_chat_history
|
17 |
-
|
18 |
-
# Placeholder para el logo
|
19 |
-
LOGO_PATH = "assets\img\logo_92x92.png" # Reemplaza esto con la ruta real de tu logo
|
20 |
-
|
21 |
-
# Definir el tamaño de página carta manualmente (612 x 792 puntos)
|
22 |
-
LETTER_SIZE = (612, 792)
|
23 |
-
|
24 |
-
def add_logo(canvas, doc):
|
25 |
-
logo = Image(LOGO_PATH, width=2*cm, height=2*cm)
|
26 |
-
logo.drawOn(canvas, doc.leftMargin, doc.height + doc.topMargin - 0.5*cm)
|
27 |
-
|
28 |
-
def export_user_interactions(username, analysis_type):
|
29 |
-
# Obtener historial de chat (que ahora incluye los análisis morfosintácticos)
|
30 |
-
chat_history = get_chat_history(username, analysis_type)
|
31 |
-
|
32 |
-
# Crear un PDF
|
33 |
-
buffer = BytesIO()
|
34 |
-
doc = SimpleDocTemplate(
|
35 |
-
buffer,
|
36 |
-
pagesize=letter,
|
37 |
-
rightMargin=2*cm,
|
38 |
-
leftMargin=2*cm,
|
39 |
-
topMargin=2*cm,
|
40 |
-
bottomMargin=2*cm
|
41 |
-
)
|
42 |
-
|
43 |
-
story = []
|
44 |
-
styles = getSampleStyleSheet()
|
45 |
-
|
46 |
-
# Título
|
47 |
-
story.append(Paragraph(f"Interacciones de {username} - Análisis {analysis_type}", styles['Title']))
|
48 |
-
story.append(Spacer(1, 0.5*cm))
|
49 |
-
|
50 |
-
# Historial del chat y análisis
|
51 |
-
for entry in chat_history:
|
52 |
-
for message in entry['messages']:
|
53 |
-
role = message['role']
|
54 |
-
content = message['content']
|
55 |
-
story.append(Paragraph(f"<b>{role.capitalize()}:</b> {content}", styles['BodyText']))
|
56 |
-
story.append(Spacer(1, 0.25*cm))
|
57 |
-
|
58 |
-
# Si hay visualizaciones (diagramas SVG), convertirlas a imagen y añadirlas
|
59 |
-
if 'visualizations' in message and message['visualizations']:
|
60 |
-
for svg in message['visualizations']:
|
61 |
-
drawing = svg2rlg(BytesIO(svg.encode('utf-8')))
|
62 |
-
img_data = BytesIO()
|
63 |
-
renderPM.drawToFile(drawing, img_data, fmt="PNG")
|
64 |
-
img_data.seek(0)
|
65 |
-
img = Image(img_data, width=15*cm, height=7.5*cm)
|
66 |
-
story.append(img)
|
67 |
-
story.append(Spacer(1, 0.5*cm))
|
68 |
-
|
69 |
-
story.append(PageBreak())
|
70 |
-
|
71 |
-
# Construir el PDF
|
72 |
-
doc.build(story)
|
73 |
-
buffer.seek(0)
|
74 |
-
return buffer
|
75 |
-
|
76 |
-
# Uso en Streamlit:
|
77 |
-
# pdf_buffer = export_user_interactions(username, 'morphosyntax')
|
78 |
# st.download_button(label="Descargar PDF", data=pdf_buffer, file_name="interacciones.pdf", mime="application/pdf")
|
|
|
1 |
+
from io import BytesIO
|
2 |
+
from reportlab.lib import colors
|
3 |
+
from reportlab.lib.pagesizes import letter
|
4 |
+
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image, PageBreak
|
5 |
+
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
6 |
+
from reportlab.lib.units import cm
|
7 |
+
from svglib.svglib import svg2rlg
|
8 |
+
from reportlab.graphics import renderPM
|
9 |
+
import base64
|
10 |
+
import cairosvg
|
11 |
+
from reportlab.graphics import renderPDF
|
12 |
+
from reportlab.lib.utils import ImageReader
|
13 |
+
|
14 |
+
#importaciones locales
|
15 |
+
from .semantic_mongo_db import get_student_semantic_data
|
16 |
+
from .chat_db import get_chat_history
|
17 |
+
|
18 |
+
# Placeholder para el logo
|
19 |
+
LOGO_PATH = "assets\img\logo_92x92.png" # Reemplaza esto con la ruta real de tu logo
|
20 |
+
|
21 |
+
# Definir el tamaño de página carta manualmente (612 x 792 puntos)
|
22 |
+
LETTER_SIZE = (612, 792)
|
23 |
+
|
24 |
+
def add_logo(canvas, doc):
|
25 |
+
logo = Image(LOGO_PATH, width=2*cm, height=2*cm)
|
26 |
+
logo.drawOn(canvas, doc.leftMargin, doc.height + doc.topMargin - 0.5*cm)
|
27 |
+
|
28 |
+
def export_user_interactions(username, analysis_type):
|
29 |
+
# Obtener historial de chat (que ahora incluye los análisis morfosintácticos)
|
30 |
+
chat_history = get_chat_history(username, analysis_type)
|
31 |
+
|
32 |
+
# Crear un PDF
|
33 |
+
buffer = BytesIO()
|
34 |
+
doc = SimpleDocTemplate(
|
35 |
+
buffer,
|
36 |
+
pagesize=letter,
|
37 |
+
rightMargin=2*cm,
|
38 |
+
leftMargin=2*cm,
|
39 |
+
topMargin=2*cm,
|
40 |
+
bottomMargin=2*cm
|
41 |
+
)
|
42 |
+
|
43 |
+
story = []
|
44 |
+
styles = getSampleStyleSheet()
|
45 |
+
|
46 |
+
# Título
|
47 |
+
story.append(Paragraph(f"Interacciones de {username} - Análisis {analysis_type}", styles['Title']))
|
48 |
+
story.append(Spacer(1, 0.5*cm))
|
49 |
+
|
50 |
+
# Historial del chat y análisis
|
51 |
+
for entry in chat_history:
|
52 |
+
for message in entry['messages']:
|
53 |
+
role = message['role']
|
54 |
+
content = message['content']
|
55 |
+
story.append(Paragraph(f"<b>{role.capitalize()}:</b> {content}", styles['BodyText']))
|
56 |
+
story.append(Spacer(1, 0.25*cm))
|
57 |
+
|
58 |
+
# Si hay visualizaciones (diagramas SVG), convertirlas a imagen y añadirlas
|
59 |
+
if 'visualizations' in message and message['visualizations']:
|
60 |
+
for svg in message['visualizations']:
|
61 |
+
drawing = svg2rlg(BytesIO(svg.encode('utf-8')))
|
62 |
+
img_data = BytesIO()
|
63 |
+
renderPM.drawToFile(drawing, img_data, fmt="PNG")
|
64 |
+
img_data.seek(0)
|
65 |
+
img = Image(img_data, width=15*cm, height=7.5*cm)
|
66 |
+
story.append(img)
|
67 |
+
story.append(Spacer(1, 0.5*cm))
|
68 |
+
|
69 |
+
story.append(PageBreak())
|
70 |
+
|
71 |
+
# Construir el PDF
|
72 |
+
doc.build(story)
|
73 |
+
buffer.seek(0)
|
74 |
+
return buffer
|
75 |
+
|
76 |
+
# Uso en Streamlit:
|
77 |
+
# pdf_buffer = export_user_interactions(username, 'morphosyntax')
|
78 |
# st.download_button(label="Descargar PDF", data=pdf_buffer, file_name="interacciones.pdf", mime="application/pdf")
|
modules/database/semantic_mongo_db.py
CHANGED
@@ -1,160 +1,160 @@
|
|
1 |
-
#/modules/database/semantic_mongo_db.py
|
2 |
-
|
3 |
-
# Importaciones estándar
|
4 |
-
import io
|
5 |
-
import base64
|
6 |
-
from datetime import datetime, timezone
|
7 |
-
import logging
|
8 |
-
|
9 |
-
# Importaciones de terceros
|
10 |
-
import matplotlib.pyplot as plt
|
11 |
-
|
12 |
-
# Importaciones locales
|
13 |
-
from .mongo_db import (
|
14 |
-
get_collection,
|
15 |
-
insert_document,
|
16 |
-
find_documents,
|
17 |
-
update_document,
|
18 |
-
delete_document
|
19 |
-
)
|
20 |
-
|
21 |
-
# Configuración del logger
|
22 |
-
logger = logging.getLogger(__name__) # Cambiado de name a __name__
|
23 |
-
COLLECTION_NAME = 'student_semantic_analysis'
|
24 |
-
|
25 |
-
def store_student_semantic_result(username, text, analysis_result):
|
26 |
-
"""
|
27 |
-
Guarda el resultado del análisis semántico en MongoDB.
|
28 |
-
"""
|
29 |
-
try:
|
30 |
-
# El gráfico ya viene en bytes, solo necesitamos codificarlo a base64
|
31 |
-
concept_graph_data = None
|
32 |
-
if 'concept_graph' in analysis_result and analysis_result['concept_graph'] is not None:
|
33 |
-
try:
|
34 |
-
# Ya está en bytes, solo codificar a base64
|
35 |
-
concept_graph_data = base64.b64encode(analysis_result['concept_graph']).decode('utf-8')
|
36 |
-
except Exception as e:
|
37 |
-
logger.error(f"Error al codificar gráfico conceptual: {str(e)}")
|
38 |
-
|
39 |
-
# Crear documento para MongoDB
|
40 |
-
analysis_document = {
|
41 |
-
'username': username,
|
42 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
43 |
-
'text': text,
|
44 |
-
'analysis_type': 'semantic',
|
45 |
-
'key_concepts': analysis_result.get('key_concepts', []),
|
46 |
-
'concept_graph': concept_graph_data
|
47 |
-
}
|
48 |
-
|
49 |
-
# Insertar en MongoDB
|
50 |
-
result = insert_document(COLLECTION_NAME, analysis_document)
|
51 |
-
if result:
|
52 |
-
logger.info(f"Análisis semántico guardado con ID: {result} para el usuario: {username}")
|
53 |
-
return True
|
54 |
-
|
55 |
-
logger.error("No se pudo insertar el documento en MongoDB")
|
56 |
-
return False
|
57 |
-
|
58 |
-
except Exception as e:
|
59 |
-
logger.error(f"Error al guardar el análisis semántico: {str(e)}")
|
60 |
-
return False
|
61 |
-
|
62 |
-
####################################################################################
|
63 |
-
def get_student_semantic_analysis(username, limit=10):
|
64 |
-
"""
|
65 |
-
Recupera los análisis semánticos de un estudiante.
|
66 |
-
"""
|
67 |
-
try:
|
68 |
-
# Obtener la colección
|
69 |
-
collection = get_collection(COLLECTION_NAME)
|
70 |
-
if collection is None: # Cambiado de if not collection a if collection is None
|
71 |
-
logger.error("No se pudo obtener la colección semantic")
|
72 |
-
return []
|
73 |
-
|
74 |
-
# Consulta
|
75 |
-
query = {
|
76 |
-
"username": username,
|
77 |
-
"analysis_type": "semantic"
|
78 |
-
}
|
79 |
-
|
80 |
-
# Campos a recuperar
|
81 |
-
projection = {
|
82 |
-
"timestamp": 1,
|
83 |
-
"concept_graph": 1,
|
84 |
-
"_id": 1
|
85 |
-
}
|
86 |
-
|
87 |
-
# Ejecutar consulta
|
88 |
-
try:
|
89 |
-
cursor = collection.find(query, projection).sort("timestamp", -1)
|
90 |
-
if limit:
|
91 |
-
cursor = cursor.limit(limit)
|
92 |
-
|
93 |
-
# Convertir cursor a lista
|
94 |
-
results = list(cursor)
|
95 |
-
logger.info(f"Recuperados {len(results)} análisis semánticos para {username}")
|
96 |
-
return results
|
97 |
-
|
98 |
-
except Exception as db_error:
|
99 |
-
logger.error(f"Error en la consulta a MongoDB: {str(db_error)}")
|
100 |
-
return []
|
101 |
-
|
102 |
-
except Exception as e:
|
103 |
-
logger.error(f"Error recuperando análisis semántico: {str(e)}")
|
104 |
-
return []
|
105 |
-
####################################################################################################
|
106 |
-
|
107 |
-
|
108 |
-
def update_student_semantic_analysis(analysis_id, update_data):
|
109 |
-
"""
|
110 |
-
Actualiza un análisis semántico existente.
|
111 |
-
Args:
|
112 |
-
analysis_id: ID del análisis a actualizar
|
113 |
-
update_data: Datos a actualizar
|
114 |
-
"""
|
115 |
-
query = {"_id": analysis_id}
|
116 |
-
update = {"$set": update_data}
|
117 |
-
return update_document(COLLECTION_NAME, query, update)
|
118 |
-
|
119 |
-
def delete_student_semantic_analysis(analysis_id):
|
120 |
-
"""
|
121 |
-
Elimina un análisis semántico.
|
122 |
-
Args:
|
123 |
-
analysis_id: ID del análisis a eliminar
|
124 |
-
"""
|
125 |
-
query = {"_id": analysis_id}
|
126 |
-
return delete_document(COLLECTION_NAME, query)
|
127 |
-
|
128 |
-
def get_student_semantic_data(username):
|
129 |
-
"""
|
130 |
-
Obtiene todos los análisis semánticos de un estudiante.
|
131 |
-
Args:
|
132 |
-
username: Nombre del usuario
|
133 |
-
Returns:
|
134 |
-
dict: Diccionario con todos los análisis del estudiante
|
135 |
-
"""
|
136 |
-
analyses = get_student_semantic_analysis(username, limit=None)
|
137 |
-
|
138 |
-
formatted_analyses = []
|
139 |
-
for analysis in analyses:
|
140 |
-
formatted_analysis = {
|
141 |
-
'timestamp': analysis['timestamp'],
|
142 |
-
'text': analysis['text'],
|
143 |
-
'key_concepts': analysis['key_concepts'],
|
144 |
-
'entities': analysis['entities']
|
145 |
-
# No incluimos los gráficos en el resumen general
|
146 |
-
}
|
147 |
-
formatted_analyses.append(formatted_analysis)
|
148 |
-
|
149 |
-
return {
|
150 |
-
'entries': formatted_analyses
|
151 |
-
}
|
152 |
-
|
153 |
-
# Exportar las funciones necesarias
|
154 |
-
__all__ = [
|
155 |
-
'store_student_semantic_result',
|
156 |
-
'get_student_semantic_analysis',
|
157 |
-
'update_student_semantic_analysis',
|
158 |
-
'delete_student_semantic_analysis',
|
159 |
-
'get_student_semantic_data'
|
160 |
]
|
|
|
1 |
+
#/modules/database/semantic_mongo_db.py
|
2 |
+
|
3 |
+
# Importaciones estándar
|
4 |
+
import io
|
5 |
+
import base64
|
6 |
+
from datetime import datetime, timezone
|
7 |
+
import logging
|
8 |
+
|
9 |
+
# Importaciones de terceros
|
10 |
+
import matplotlib.pyplot as plt
|
11 |
+
|
12 |
+
# Importaciones locales
|
13 |
+
from .mongo_db import (
|
14 |
+
get_collection,
|
15 |
+
insert_document,
|
16 |
+
find_documents,
|
17 |
+
update_document,
|
18 |
+
delete_document
|
19 |
+
)
|
20 |
+
|
21 |
+
# Configuración del logger
|
22 |
+
logger = logging.getLogger(__name__) # Cambiado de name a __name__
|
23 |
+
COLLECTION_NAME = 'student_semantic_analysis'
|
24 |
+
|
25 |
+
def store_student_semantic_result(username, text, analysis_result):
|
26 |
+
"""
|
27 |
+
Guarda el resultado del análisis semántico en MongoDB.
|
28 |
+
"""
|
29 |
+
try:
|
30 |
+
# El gráfico ya viene en bytes, solo necesitamos codificarlo a base64
|
31 |
+
concept_graph_data = None
|
32 |
+
if 'concept_graph' in analysis_result and analysis_result['concept_graph'] is not None:
|
33 |
+
try:
|
34 |
+
# Ya está en bytes, solo codificar a base64
|
35 |
+
concept_graph_data = base64.b64encode(analysis_result['concept_graph']).decode('utf-8')
|
36 |
+
except Exception as e:
|
37 |
+
logger.error(f"Error al codificar gráfico conceptual: {str(e)}")
|
38 |
+
|
39 |
+
# Crear documento para MongoDB
|
40 |
+
analysis_document = {
|
41 |
+
'username': username,
|
42 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
43 |
+
'text': text,
|
44 |
+
'analysis_type': 'semantic',
|
45 |
+
'key_concepts': analysis_result.get('key_concepts', []),
|
46 |
+
'concept_graph': concept_graph_data
|
47 |
+
}
|
48 |
+
|
49 |
+
# Insertar en MongoDB
|
50 |
+
result = insert_document(COLLECTION_NAME, analysis_document)
|
51 |
+
if result:
|
52 |
+
logger.info(f"Análisis semántico guardado con ID: {result} para el usuario: {username}")
|
53 |
+
return True
|
54 |
+
|
55 |
+
logger.error("No se pudo insertar el documento en MongoDB")
|
56 |
+
return False
|
57 |
+
|
58 |
+
except Exception as e:
|
59 |
+
logger.error(f"Error al guardar el análisis semántico: {str(e)}")
|
60 |
+
return False
|
61 |
+
|
62 |
+
####################################################################################
|
63 |
+
def get_student_semantic_analysis(username, limit=10):
|
64 |
+
"""
|
65 |
+
Recupera los análisis semánticos de un estudiante.
|
66 |
+
"""
|
67 |
+
try:
|
68 |
+
# Obtener la colección
|
69 |
+
collection = get_collection(COLLECTION_NAME)
|
70 |
+
if collection is None: # Cambiado de if not collection a if collection is None
|
71 |
+
logger.error("No se pudo obtener la colección semantic")
|
72 |
+
return []
|
73 |
+
|
74 |
+
# Consulta
|
75 |
+
query = {
|
76 |
+
"username": username,
|
77 |
+
"analysis_type": "semantic"
|
78 |
+
}
|
79 |
+
|
80 |
+
# Campos a recuperar
|
81 |
+
projection = {
|
82 |
+
"timestamp": 1,
|
83 |
+
"concept_graph": 1,
|
84 |
+
"_id": 1
|
85 |
+
}
|
86 |
+
|
87 |
+
# Ejecutar consulta
|
88 |
+
try:
|
89 |
+
cursor = collection.find(query, projection).sort("timestamp", -1)
|
90 |
+
if limit:
|
91 |
+
cursor = cursor.limit(limit)
|
92 |
+
|
93 |
+
# Convertir cursor a lista
|
94 |
+
results = list(cursor)
|
95 |
+
logger.info(f"Recuperados {len(results)} análisis semánticos para {username}")
|
96 |
+
return results
|
97 |
+
|
98 |
+
except Exception as db_error:
|
99 |
+
logger.error(f"Error en la consulta a MongoDB: {str(db_error)}")
|
100 |
+
return []
|
101 |
+
|
102 |
+
except Exception as e:
|
103 |
+
logger.error(f"Error recuperando análisis semántico: {str(e)}")
|
104 |
+
return []
|
105 |
+
####################################################################################################
|
106 |
+
|
107 |
+
|
108 |
+
def update_student_semantic_analysis(analysis_id, update_data):
|
109 |
+
"""
|
110 |
+
Actualiza un análisis semántico existente.
|
111 |
+
Args:
|
112 |
+
analysis_id: ID del análisis a actualizar
|
113 |
+
update_data: Datos a actualizar
|
114 |
+
"""
|
115 |
+
query = {"_id": analysis_id}
|
116 |
+
update = {"$set": update_data}
|
117 |
+
return update_document(COLLECTION_NAME, query, update)
|
118 |
+
|
119 |
+
def delete_student_semantic_analysis(analysis_id):
|
120 |
+
"""
|
121 |
+
Elimina un análisis semántico.
|
122 |
+
Args:
|
123 |
+
analysis_id: ID del análisis a eliminar
|
124 |
+
"""
|
125 |
+
query = {"_id": analysis_id}
|
126 |
+
return delete_document(COLLECTION_NAME, query)
|
127 |
+
|
128 |
+
def get_student_semantic_data(username):
|
129 |
+
"""
|
130 |
+
Obtiene todos los análisis semánticos de un estudiante.
|
131 |
+
Args:
|
132 |
+
username: Nombre del usuario
|
133 |
+
Returns:
|
134 |
+
dict: Diccionario con todos los análisis del estudiante
|
135 |
+
"""
|
136 |
+
analyses = get_student_semantic_analysis(username, limit=None)
|
137 |
+
|
138 |
+
formatted_analyses = []
|
139 |
+
for analysis in analyses:
|
140 |
+
formatted_analysis = {
|
141 |
+
'timestamp': analysis['timestamp'],
|
142 |
+
'text': analysis['text'],
|
143 |
+
'key_concepts': analysis['key_concepts'],
|
144 |
+
'entities': analysis['entities']
|
145 |
+
# No incluimos los gráficos en el resumen general
|
146 |
+
}
|
147 |
+
formatted_analyses.append(formatted_analysis)
|
148 |
+
|
149 |
+
return {
|
150 |
+
'entries': formatted_analyses
|
151 |
+
}
|
152 |
+
|
153 |
+
# Exportar las funciones necesarias
|
154 |
+
__all__ = [
|
155 |
+
'store_student_semantic_result',
|
156 |
+
'get_student_semantic_analysis',
|
157 |
+
'update_student_semantic_analysis',
|
158 |
+
'delete_student_semantic_analysis',
|
159 |
+
'get_student_semantic_data'
|
160 |
]
|
modules/database/sql_db.py
CHANGED
@@ -1,323 +1,323 @@
|
|
1 |
-
# modules/database/sql_db.py
|
2 |
-
|
3 |
-
from .database_init import get_container
|
4 |
-
from datetime import datetime, timezone
|
5 |
-
import logging
|
6 |
-
import bcrypt
|
7 |
-
import uuid
|
8 |
-
|
9 |
-
logger = logging.getLogger(__name__)
|
10 |
-
|
11 |
-
#########################################
|
12 |
-
def get_user(username, role=None):
|
13 |
-
container = get_container("users")
|
14 |
-
try:
|
15 |
-
query = f"SELECT * FROM c WHERE c.id = '{username}'"
|
16 |
-
if role:
|
17 |
-
query += f" AND c.role = '{role}'"
|
18 |
-
items = list(container.query_items(query=query))
|
19 |
-
return items[0] if items else None
|
20 |
-
except Exception as e:
|
21 |
-
logger.error(f"Error al obtener usuario {username}: {str(e)}")
|
22 |
-
return None
|
23 |
-
|
24 |
-
|
25 |
-
#########################################
|
26 |
-
def get_admin_user(username):
|
27 |
-
return get_user(username, role='Administrador')
|
28 |
-
|
29 |
-
|
30 |
-
#########################################
|
31 |
-
def get_student_user(username):
|
32 |
-
return get_user(username, role='Estudiante')
|
33 |
-
|
34 |
-
|
35 |
-
#########################################
|
36 |
-
def get_teacher_user(username):
|
37 |
-
return get_user(username, role='Profesor')
|
38 |
-
|
39 |
-
|
40 |
-
#########################################
|
41 |
-
def create_user(username, password, role, additional_info=None):
|
42 |
-
"""Crea un nuevo usuario"""
|
43 |
-
container = get_container("users")
|
44 |
-
if not container:
|
45 |
-
logger.error("No se pudo obtener el contenedor de usuarios")
|
46 |
-
return False
|
47 |
-
|
48 |
-
try:
|
49 |
-
user_data = {
|
50 |
-
'id': username,
|
51 |
-
'password': password,
|
52 |
-
'role': role,
|
53 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
54 |
-
'additional_info': additional_info or {},
|
55 |
-
'partitionKey': username # Agregar partition key
|
56 |
-
}
|
57 |
-
|
58 |
-
# Crear item sin especificar partition_key en el método
|
59 |
-
container.create_item(body=user_data)
|
60 |
-
logger.info(f"Usuario {role} creado: {username}")
|
61 |
-
return True
|
62 |
-
|
63 |
-
except Exception as e:
|
64 |
-
logger.error(f"Error al crear usuario {role}: {str(e)}")
|
65 |
-
return False
|
66 |
-
|
67 |
-
#########################################
|
68 |
-
def create_student_user(username, password, additional_info=None):
|
69 |
-
return create_user(username, password, 'Estudiante', additional_info)
|
70 |
-
|
71 |
-
#########################################
|
72 |
-
def create_teacher_user(username, password, additional_info=None):
|
73 |
-
return create_user(username, password, 'Profesor', additional_info)
|
74 |
-
|
75 |
-
#########################################
|
76 |
-
def create_admin_user(username, password, additional_info=None):
|
77 |
-
return create_user(username, password, 'Administrador', additional_info)
|
78 |
-
|
79 |
-
#########################################
|
80 |
-
def record_login(username):
|
81 |
-
"""Registra el inicio de sesión de un usuario"""
|
82 |
-
try:
|
83 |
-
container = get_container("users_sessions")
|
84 |
-
if not container:
|
85 |
-
logger.error("No se pudo obtener el contenedor users_sessions")
|
86 |
-
return None
|
87 |
-
|
88 |
-
session_id = str(uuid.uuid4())
|
89 |
-
session_doc = {
|
90 |
-
"id": session_id,
|
91 |
-
"type": "session",
|
92 |
-
"username": username,
|
93 |
-
"loginTime": datetime.now(timezone.utc).isoformat(),
|
94 |
-
"additional_info": {},
|
95 |
-
"partitionKey": username
|
96 |
-
}
|
97 |
-
|
98 |
-
result = container.create_item(body=session_doc)
|
99 |
-
logger.info(f"Sesión {session_id} registrada para {username}")
|
100 |
-
return session_id
|
101 |
-
except Exception as e:
|
102 |
-
logger.error(f"Error registrando login: {str(e)}")
|
103 |
-
return None
|
104 |
-
|
105 |
-
#########################################
|
106 |
-
def record_logout(username, session_id):
|
107 |
-
"""Registra el cierre de sesión y calcula la duración"""
|
108 |
-
try:
|
109 |
-
container = get_container("users_sessions")
|
110 |
-
if not container:
|
111 |
-
logger.error("No se pudo obtener el contenedor users_sessions")
|
112 |
-
return False
|
113 |
-
|
114 |
-
query = "SELECT * FROM c WHERE c.id = @id AND c.username = @username"
|
115 |
-
params = [
|
116 |
-
{"name": "@id", "value": session_id},
|
117 |
-
{"name": "@username", "value": username}
|
118 |
-
]
|
119 |
-
|
120 |
-
items = list(container.query_items(
|
121 |
-
query=query,
|
122 |
-
parameters=params
|
123 |
-
))
|
124 |
-
|
125 |
-
if not items:
|
126 |
-
logger.warning(f"Sesión no encontrada: {session_id}")
|
127 |
-
return False
|
128 |
-
|
129 |
-
session = items[0]
|
130 |
-
login_time = datetime.fromisoformat(session['loginTime'].rstrip('Z'))
|
131 |
-
logout_time = datetime.now(timezone.utc)
|
132 |
-
duration = int((logout_time - login_time).total_seconds())
|
133 |
-
|
134 |
-
session.update({
|
135 |
-
"logoutTime": logout_time.isoformat(),
|
136 |
-
"sessionDuration": duration,
|
137 |
-
"partitionKey": username
|
138 |
-
})
|
139 |
-
|
140 |
-
container.upsert_item(body=session)
|
141 |
-
logger.info(f"Sesión {session_id} cerrada para {username}, duración: {duration}s")
|
142 |
-
return True
|
143 |
-
except Exception as e:
|
144 |
-
logger.error(f"Error registrando logout: {str(e)}")
|
145 |
-
return False
|
146 |
-
|
147 |
-
#########################################
|
148 |
-
def get_recent_sessions(limit=10):
|
149 |
-
"""Obtiene las sesiones más recientes"""
|
150 |
-
try:
|
151 |
-
container = get_container("users_sessions")
|
152 |
-
if not container:
|
153 |
-
logger.error("No se pudo obtener el contenedor users_sessions")
|
154 |
-
return []
|
155 |
-
|
156 |
-
query = """
|
157 |
-
SELECT c.username, c.loginTime, c.logoutTime, c.sessionDuration
|
158 |
-
FROM c
|
159 |
-
WHERE c.type = 'session'
|
160 |
-
ORDER BY c.loginTime DESC
|
161 |
-
OFFSET 0 LIMIT @limit
|
162 |
-
"""
|
163 |
-
|
164 |
-
sessions = list(container.query_items(
|
165 |
-
query=query,
|
166 |
-
parameters=[{"name": "@limit", "value": limit}],
|
167 |
-
enable_cross_partition_query=True # Agregar este parámetro
|
168 |
-
))
|
169 |
-
|
170 |
-
clean_sessions = []
|
171 |
-
for session in sessions:
|
172 |
-
try:
|
173 |
-
clean_sessions.append({
|
174 |
-
"username": session["username"],
|
175 |
-
"loginTime": session["loginTime"],
|
176 |
-
"logoutTime": session.get("logoutTime", "Activo"),
|
177 |
-
"sessionDuration": session.get("sessionDuration", 0)
|
178 |
-
})
|
179 |
-
except KeyError as e:
|
180 |
-
logger.warning(f"Sesión con datos incompletos: {e}")
|
181 |
-
continue
|
182 |
-
|
183 |
-
return clean_sessions
|
184 |
-
except Exception as e:
|
185 |
-
logger.error(f"Error obteniendo sesiones recientes: {str(e)}")
|
186 |
-
return []
|
187 |
-
|
188 |
-
#########################################
|
189 |
-
def get_user_total_time(username):
|
190 |
-
"""Obtiene el tiempo total que un usuario ha pasado en la plataforma"""
|
191 |
-
try:
|
192 |
-
container = get_container("users_sessions")
|
193 |
-
if not container:
|
194 |
-
return None
|
195 |
-
|
196 |
-
query = """
|
197 |
-
SELECT VALUE SUM(c.sessionDuration)
|
198 |
-
FROM c
|
199 |
-
WHERE c.type = 'session'
|
200 |
-
AND c.username = @username
|
201 |
-
AND IS_DEFINED(c.sessionDuration)
|
202 |
-
"""
|
203 |
-
|
204 |
-
result = list(container.query_items(
|
205 |
-
query=query,
|
206 |
-
parameters=[{"name": "@username", "value": username}]
|
207 |
-
))
|
208 |
-
|
209 |
-
return result[0] if result and result[0] is not None else 0
|
210 |
-
except Exception as e:
|
211 |
-
logger.error(f"Error obteniendo tiempo total: {str(e)}")
|
212 |
-
return 0
|
213 |
-
|
214 |
-
#########################################
|
215 |
-
def update_student_user(username, new_info):
|
216 |
-
container = get_container("users")
|
217 |
-
try:
|
218 |
-
user = get_student_user(username)
|
219 |
-
if user:
|
220 |
-
user['additional_info'].update(new_info)
|
221 |
-
user['partitionKey'] = username
|
222 |
-
container.upsert_item(body=user)
|
223 |
-
logger.info(f"Información del estudiante actualizada: {username}")
|
224 |
-
return True
|
225 |
-
else:
|
226 |
-
logger.warning(f"Intento de actualizar estudiante no existente: {username}")
|
227 |
-
return False
|
228 |
-
except Exception as e:
|
229 |
-
logger.error(f"Error al actualizar información del estudiante {username}: {str(e)}")
|
230 |
-
return False
|
231 |
-
|
232 |
-
#########################################
|
233 |
-
def delete_student_user(username):
|
234 |
-
container = get_container("users")
|
235 |
-
try:
|
236 |
-
user = get_student_user(username)
|
237 |
-
if user:
|
238 |
-
# El ID es suficiente para eliminación ya que partitionKey está en el documento
|
239 |
-
container.delete_item(item=user['id'])
|
240 |
-
logger.info(f"Estudiante eliminado: {username}")
|
241 |
-
return True
|
242 |
-
else:
|
243 |
-
logger.warning(f"Intento de eliminar estudiante no existente: {username}")
|
244 |
-
return False
|
245 |
-
except Exception as e:
|
246 |
-
logger.error(f"Error al eliminar estudiante {username}: {str(e)}")
|
247 |
-
return False
|
248 |
-
|
249 |
-
#########################################
|
250 |
-
def store_application_request(name, lastname, email, institution, current_role, desired_role, reason):
|
251 |
-
"""Almacena una solicitud de aplicación"""
|
252 |
-
try:
|
253 |
-
# Obtener el contenedor usando get_container() que sí funciona
|
254 |
-
container = get_container("application_requests")
|
255 |
-
if not container:
|
256 |
-
logger.error("No se pudo obtener el contenedor de solicitudes")
|
257 |
-
return False
|
258 |
-
|
259 |
-
# Crear documento con la solicitud
|
260 |
-
# Nótese que incluimos email como partition key en el cuerpo del documento
|
261 |
-
application_request = {
|
262 |
-
"id": str(uuid.uuid4()),
|
263 |
-
"name": name,
|
264 |
-
"lastname": lastname,
|
265 |
-
"email": email,
|
266 |
-
"institution": institution,
|
267 |
-
"current_role": current_role,
|
268 |
-
"desired_role": desired_role,
|
269 |
-
"reason": reason,
|
270 |
-
"requestDate": datetime.utcnow().isoformat(),
|
271 |
-
# El campo para partition key debe estar en el documento
|
272 |
-
"partitionKey": email
|
273 |
-
}
|
274 |
-
|
275 |
-
# Crear el item en el contenedor - sin el parámetro enable_cross_partition_query
|
276 |
-
container.create_item(
|
277 |
-
body=application_request # Solo pasamos el body
|
278 |
-
)
|
279 |
-
logger.info(f"Solicitud de aplicación almacenada para: {email}")
|
280 |
-
return True
|
281 |
-
|
282 |
-
except Exception as e:
|
283 |
-
logger.error(f"Error al almacenar la solicitud de aplicación: {str(e)}")
|
284 |
-
logger.error(f"Detalles del error: {str(e)}")
|
285 |
-
return False
|
286 |
-
|
287 |
-
|
288 |
-
################################################################
|
289 |
-
def store_student_feedback(username, name, email, feedback):
|
290 |
-
"""Almacena el feedback de un estudiante"""
|
291 |
-
try:
|
292 |
-
# Obtener el contenedor - verificar disponibilidad
|
293 |
-
logger.info(f"Intentando obtener contenedor user_feedback para usuario: {username}")
|
294 |
-
container = get_container("user_feedback")
|
295 |
-
if not container:
|
296 |
-
logger.error("No se pudo obtener el contenedor user_feedback")
|
297 |
-
return False
|
298 |
-
|
299 |
-
# Crear documento de feedback - asegurar que el username esté como partition key
|
300 |
-
feedback_item = {
|
301 |
-
"id": str(uuid.uuid4()),
|
302 |
-
"username": username, # Campo regular
|
303 |
-
"name": name,
|
304 |
-
"email": email,
|
305 |
-
"feedback": feedback,
|
306 |
-
"role": "Estudiante",
|
307 |
-
"timestamp": datetime.now(timezone.utc).isoformat(),
|
308 |
-
"partitionKey": username # Campo de partición
|
309 |
-
}
|
310 |
-
|
311 |
-
# Crear el item - sin el parámetro enable_cross_partition_query
|
312 |
-
logger.info(f"Intentando almacenar feedback para usuario: {username}")
|
313 |
-
result = container.create_item(
|
314 |
-
body=feedback_item # Solo el body, no parámetros adicionales
|
315 |
-
)
|
316 |
-
|
317 |
-
logger.info(f"Feedback almacenado exitosamente para el usuario: {username}")
|
318 |
-
return True
|
319 |
-
|
320 |
-
except Exception as e:
|
321 |
-
logger.error(f"Error al almacenar el feedback del estudiante {username}")
|
322 |
-
logger.error(f"Detalles del error: {str(e)}")
|
323 |
return False
|
|
|
1 |
+
# modules/database/sql_db.py
|
2 |
+
|
3 |
+
from .database_init import get_container
|
4 |
+
from datetime import datetime, timezone
|
5 |
+
import logging
|
6 |
+
import bcrypt
|
7 |
+
import uuid
|
8 |
+
|
9 |
+
logger = logging.getLogger(__name__)
|
10 |
+
|
11 |
+
#########################################
|
12 |
+
def get_user(username, role=None):
|
13 |
+
container = get_container("users")
|
14 |
+
try:
|
15 |
+
query = f"SELECT * FROM c WHERE c.id = '{username}'"
|
16 |
+
if role:
|
17 |
+
query += f" AND c.role = '{role}'"
|
18 |
+
items = list(container.query_items(query=query))
|
19 |
+
return items[0] if items else None
|
20 |
+
except Exception as e:
|
21 |
+
logger.error(f"Error al obtener usuario {username}: {str(e)}")
|
22 |
+
return None
|
23 |
+
|
24 |
+
|
25 |
+
#########################################
|
26 |
+
def get_admin_user(username):
|
27 |
+
return get_user(username, role='Administrador')
|
28 |
+
|
29 |
+
|
30 |
+
#########################################
|
31 |
+
def get_student_user(username):
|
32 |
+
return get_user(username, role='Estudiante')
|
33 |
+
|
34 |
+
|
35 |
+
#########################################
|
36 |
+
def get_teacher_user(username):
|
37 |
+
return get_user(username, role='Profesor')
|
38 |
+
|
39 |
+
|
40 |
+
#########################################
|
41 |
+
def create_user(username, password, role, additional_info=None):
|
42 |
+
"""Crea un nuevo usuario"""
|
43 |
+
container = get_container("users")
|
44 |
+
if not container:
|
45 |
+
logger.error("No se pudo obtener el contenedor de usuarios")
|
46 |
+
return False
|
47 |
+
|
48 |
+
try:
|
49 |
+
user_data = {
|
50 |
+
'id': username,
|
51 |
+
'password': password,
|
52 |
+
'role': role,
|
53 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
54 |
+
'additional_info': additional_info or {},
|
55 |
+
'partitionKey': username # Agregar partition key
|
56 |
+
}
|
57 |
+
|
58 |
+
# Crear item sin especificar partition_key en el método
|
59 |
+
container.create_item(body=user_data)
|
60 |
+
logger.info(f"Usuario {role} creado: {username}")
|
61 |
+
return True
|
62 |
+
|
63 |
+
except Exception as e:
|
64 |
+
logger.error(f"Error al crear usuario {role}: {str(e)}")
|
65 |
+
return False
|
66 |
+
|
67 |
+
#########################################
|
68 |
+
def create_student_user(username, password, additional_info=None):
|
69 |
+
return create_user(username, password, 'Estudiante', additional_info)
|
70 |
+
|
71 |
+
#########################################
|
72 |
+
def create_teacher_user(username, password, additional_info=None):
|
73 |
+
return create_user(username, password, 'Profesor', additional_info)
|
74 |
+
|
75 |
+
#########################################
|
76 |
+
def create_admin_user(username, password, additional_info=None):
|
77 |
+
return create_user(username, password, 'Administrador', additional_info)
|
78 |
+
|
79 |
+
#########################################
|
80 |
+
def record_login(username):
|
81 |
+
"""Registra el inicio de sesión de un usuario"""
|
82 |
+
try:
|
83 |
+
container = get_container("users_sessions")
|
84 |
+
if not container:
|
85 |
+
logger.error("No se pudo obtener el contenedor users_sessions")
|
86 |
+
return None
|
87 |
+
|
88 |
+
session_id = str(uuid.uuid4())
|
89 |
+
session_doc = {
|
90 |
+
"id": session_id,
|
91 |
+
"type": "session",
|
92 |
+
"username": username,
|
93 |
+
"loginTime": datetime.now(timezone.utc).isoformat(),
|
94 |
+
"additional_info": {},
|
95 |
+
"partitionKey": username
|
96 |
+
}
|
97 |
+
|
98 |
+
result = container.create_item(body=session_doc)
|
99 |
+
logger.info(f"Sesión {session_id} registrada para {username}")
|
100 |
+
return session_id
|
101 |
+
except Exception as e:
|
102 |
+
logger.error(f"Error registrando login: {str(e)}")
|
103 |
+
return None
|
104 |
+
|
105 |
+
#########################################
|
106 |
+
def record_logout(username, session_id):
|
107 |
+
"""Registra el cierre de sesión y calcula la duración"""
|
108 |
+
try:
|
109 |
+
container = get_container("users_sessions")
|
110 |
+
if not container:
|
111 |
+
logger.error("No se pudo obtener el contenedor users_sessions")
|
112 |
+
return False
|
113 |
+
|
114 |
+
query = "SELECT * FROM c WHERE c.id = @id AND c.username = @username"
|
115 |
+
params = [
|
116 |
+
{"name": "@id", "value": session_id},
|
117 |
+
{"name": "@username", "value": username}
|
118 |
+
]
|
119 |
+
|
120 |
+
items = list(container.query_items(
|
121 |
+
query=query,
|
122 |
+
parameters=params
|
123 |
+
))
|
124 |
+
|
125 |
+
if not items:
|
126 |
+
logger.warning(f"Sesión no encontrada: {session_id}")
|
127 |
+
return False
|
128 |
+
|
129 |
+
session = items[0]
|
130 |
+
login_time = datetime.fromisoformat(session['loginTime'].rstrip('Z'))
|
131 |
+
logout_time = datetime.now(timezone.utc)
|
132 |
+
duration = int((logout_time - login_time).total_seconds())
|
133 |
+
|
134 |
+
session.update({
|
135 |
+
"logoutTime": logout_time.isoformat(),
|
136 |
+
"sessionDuration": duration,
|
137 |
+
"partitionKey": username
|
138 |
+
})
|
139 |
+
|
140 |
+
container.upsert_item(body=session)
|
141 |
+
logger.info(f"Sesión {session_id} cerrada para {username}, duración: {duration}s")
|
142 |
+
return True
|
143 |
+
except Exception as e:
|
144 |
+
logger.error(f"Error registrando logout: {str(e)}")
|
145 |
+
return False
|
146 |
+
|
147 |
+
#########################################
|
148 |
+
def get_recent_sessions(limit=10):
|
149 |
+
"""Obtiene las sesiones más recientes"""
|
150 |
+
try:
|
151 |
+
container = get_container("users_sessions")
|
152 |
+
if not container:
|
153 |
+
logger.error("No se pudo obtener el contenedor users_sessions")
|
154 |
+
return []
|
155 |
+
|
156 |
+
query = """
|
157 |
+
SELECT c.username, c.loginTime, c.logoutTime, c.sessionDuration
|
158 |
+
FROM c
|
159 |
+
WHERE c.type = 'session'
|
160 |
+
ORDER BY c.loginTime DESC
|
161 |
+
OFFSET 0 LIMIT @limit
|
162 |
+
"""
|
163 |
+
|
164 |
+
sessions = list(container.query_items(
|
165 |
+
query=query,
|
166 |
+
parameters=[{"name": "@limit", "value": limit}],
|
167 |
+
enable_cross_partition_query=True # Agregar este parámetro
|
168 |
+
))
|
169 |
+
|
170 |
+
clean_sessions = []
|
171 |
+
for session in sessions:
|
172 |
+
try:
|
173 |
+
clean_sessions.append({
|
174 |
+
"username": session["username"],
|
175 |
+
"loginTime": session["loginTime"],
|
176 |
+
"logoutTime": session.get("logoutTime", "Activo"),
|
177 |
+
"sessionDuration": session.get("sessionDuration", 0)
|
178 |
+
})
|
179 |
+
except KeyError as e:
|
180 |
+
logger.warning(f"Sesión con datos incompletos: {e}")
|
181 |
+
continue
|
182 |
+
|
183 |
+
return clean_sessions
|
184 |
+
except Exception as e:
|
185 |
+
logger.error(f"Error obteniendo sesiones recientes: {str(e)}")
|
186 |
+
return []
|
187 |
+
|
188 |
+
#########################################
|
189 |
+
def get_user_total_time(username):
|
190 |
+
"""Obtiene el tiempo total que un usuario ha pasado en la plataforma"""
|
191 |
+
try:
|
192 |
+
container = get_container("users_sessions")
|
193 |
+
if not container:
|
194 |
+
return None
|
195 |
+
|
196 |
+
query = """
|
197 |
+
SELECT VALUE SUM(c.sessionDuration)
|
198 |
+
FROM c
|
199 |
+
WHERE c.type = 'session'
|
200 |
+
AND c.username = @username
|
201 |
+
AND IS_DEFINED(c.sessionDuration)
|
202 |
+
"""
|
203 |
+
|
204 |
+
result = list(container.query_items(
|
205 |
+
query=query,
|
206 |
+
parameters=[{"name": "@username", "value": username}]
|
207 |
+
))
|
208 |
+
|
209 |
+
return result[0] if result and result[0] is not None else 0
|
210 |
+
except Exception as e:
|
211 |
+
logger.error(f"Error obteniendo tiempo total: {str(e)}")
|
212 |
+
return 0
|
213 |
+
|
214 |
+
#########################################
|
215 |
+
def update_student_user(username, new_info):
|
216 |
+
container = get_container("users")
|
217 |
+
try:
|
218 |
+
user = get_student_user(username)
|
219 |
+
if user:
|
220 |
+
user['additional_info'].update(new_info)
|
221 |
+
user['partitionKey'] = username
|
222 |
+
container.upsert_item(body=user)
|
223 |
+
logger.info(f"Información del estudiante actualizada: {username}")
|
224 |
+
return True
|
225 |
+
else:
|
226 |
+
logger.warning(f"Intento de actualizar estudiante no existente: {username}")
|
227 |
+
return False
|
228 |
+
except Exception as e:
|
229 |
+
logger.error(f"Error al actualizar información del estudiante {username}: {str(e)}")
|
230 |
+
return False
|
231 |
+
|
232 |
+
#########################################
|
233 |
+
def delete_student_user(username):
|
234 |
+
container = get_container("users")
|
235 |
+
try:
|
236 |
+
user = get_student_user(username)
|
237 |
+
if user:
|
238 |
+
# El ID es suficiente para eliminación ya que partitionKey está en el documento
|
239 |
+
container.delete_item(item=user['id'])
|
240 |
+
logger.info(f"Estudiante eliminado: {username}")
|
241 |
+
return True
|
242 |
+
else:
|
243 |
+
logger.warning(f"Intento de eliminar estudiante no existente: {username}")
|
244 |
+
return False
|
245 |
+
except Exception as e:
|
246 |
+
logger.error(f"Error al eliminar estudiante {username}: {str(e)}")
|
247 |
+
return False
|
248 |
+
|
249 |
+
#########################################
|
250 |
+
def store_application_request(name, lastname, email, institution, current_role, desired_role, reason):
|
251 |
+
"""Almacena una solicitud de aplicación"""
|
252 |
+
try:
|
253 |
+
# Obtener el contenedor usando get_container() que sí funciona
|
254 |
+
container = get_container("application_requests")
|
255 |
+
if not container:
|
256 |
+
logger.error("No se pudo obtener el contenedor de solicitudes")
|
257 |
+
return False
|
258 |
+
|
259 |
+
# Crear documento con la solicitud
|
260 |
+
# Nótese que incluimos email como partition key en el cuerpo del documento
|
261 |
+
application_request = {
|
262 |
+
"id": str(uuid.uuid4()),
|
263 |
+
"name": name,
|
264 |
+
"lastname": lastname,
|
265 |
+
"email": email,
|
266 |
+
"institution": institution,
|
267 |
+
"current_role": current_role,
|
268 |
+
"desired_role": desired_role,
|
269 |
+
"reason": reason,
|
270 |
+
"requestDate": datetime.utcnow().isoformat(),
|
271 |
+
# El campo para partition key debe estar en el documento
|
272 |
+
"partitionKey": email
|
273 |
+
}
|
274 |
+
|
275 |
+
# Crear el item en el contenedor - sin el parámetro enable_cross_partition_query
|
276 |
+
container.create_item(
|
277 |
+
body=application_request # Solo pasamos el body
|
278 |
+
)
|
279 |
+
logger.info(f"Solicitud de aplicación almacenada para: {email}")
|
280 |
+
return True
|
281 |
+
|
282 |
+
except Exception as e:
|
283 |
+
logger.error(f"Error al almacenar la solicitud de aplicación: {str(e)}")
|
284 |
+
logger.error(f"Detalles del error: {str(e)}")
|
285 |
+
return False
|
286 |
+
|
287 |
+
|
288 |
+
################################################################
|
289 |
+
def store_student_feedback(username, name, email, feedback):
|
290 |
+
"""Almacena el feedback de un estudiante"""
|
291 |
+
try:
|
292 |
+
# Obtener el contenedor - verificar disponibilidad
|
293 |
+
logger.info(f"Intentando obtener contenedor user_feedback para usuario: {username}")
|
294 |
+
container = get_container("user_feedback")
|
295 |
+
if not container:
|
296 |
+
logger.error("No se pudo obtener el contenedor user_feedback")
|
297 |
+
return False
|
298 |
+
|
299 |
+
# Crear documento de feedback - asegurar que el username esté como partition key
|
300 |
+
feedback_item = {
|
301 |
+
"id": str(uuid.uuid4()),
|
302 |
+
"username": username, # Campo regular
|
303 |
+
"name": name,
|
304 |
+
"email": email,
|
305 |
+
"feedback": feedback,
|
306 |
+
"role": "Estudiante",
|
307 |
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
308 |
+
"partitionKey": username # Campo de partición
|
309 |
+
}
|
310 |
+
|
311 |
+
# Crear el item - sin el parámetro enable_cross_partition_query
|
312 |
+
logger.info(f"Intentando almacenar feedback para usuario: {username}")
|
313 |
+
result = container.create_item(
|
314 |
+
body=feedback_item # Solo el body, no parámetros adicionales
|
315 |
+
)
|
316 |
+
|
317 |
+
logger.info(f"Feedback almacenado exitosamente para el usuario: {username}")
|
318 |
+
return True
|
319 |
+
|
320 |
+
except Exception as e:
|
321 |
+
logger.error(f"Error al almacenar el feedback del estudiante {username}")
|
322 |
+
logger.error(f"Detalles del error: {str(e)}")
|
323 |
return False
|
modules/database/writing_progress_mongo_db.py
CHANGED
@@ -1,141 +1,141 @@
|
|
1 |
-
# modules/database/writing_progress_mongo_db.py
|
2 |
-
|
3 |
-
from .mongo_db import get_collection, insert_document
|
4 |
-
from datetime import datetime, timezone
|
5 |
-
import logging
|
6 |
-
|
7 |
-
logger = logging.getLogger(__name__)
|
8 |
-
COLLECTION_NAME = 'writing_progress'
|
9 |
-
|
10 |
-
def store_writing_baseline(username, metrics, text):
|
11 |
-
"""
|
12 |
-
Guarda la línea base de escritura de un usuario.
|
13 |
-
Args:
|
14 |
-
username: ID del usuario
|
15 |
-
metrics: Diccionario con métricas iniciales
|
16 |
-
text: Texto analizado
|
17 |
-
"""
|
18 |
-
try:
|
19 |
-
document = {
|
20 |
-
'username': username,
|
21 |
-
'type': 'baseline',
|
22 |
-
'metrics': metrics,
|
23 |
-
'text': text,
|
24 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
25 |
-
'iteration': 0 # Línea base siempre es iteración 0
|
26 |
-
}
|
27 |
-
|
28 |
-
# Verificar si ya existe una línea base
|
29 |
-
collection = get_collection(COLLECTION_NAME)
|
30 |
-
existing = collection.find_one({
|
31 |
-
'username': username,
|
32 |
-
'type': 'baseline'
|
33 |
-
})
|
34 |
-
|
35 |
-
if existing:
|
36 |
-
# Actualizar línea base existente
|
37 |
-
result = collection.update_one(
|
38 |
-
{'_id': existing['_id']},
|
39 |
-
{'$set': document}
|
40 |
-
)
|
41 |
-
success = result.modified_count > 0
|
42 |
-
else:
|
43 |
-
# Insertar nueva línea base
|
44 |
-
result = collection.insert_one(document)
|
45 |
-
success = result.inserted_id is not None
|
46 |
-
|
47 |
-
logger.info(f"Línea base {'actualizada' if existing else 'creada'} para usuario: {username}")
|
48 |
-
return success
|
49 |
-
|
50 |
-
except Exception as e:
|
51 |
-
logger.error(f"Error al guardar línea base: {str(e)}")
|
52 |
-
return False
|
53 |
-
|
54 |
-
def store_writing_progress(username, metrics, text):
|
55 |
-
"""
|
56 |
-
Guarda una nueva iteración de progreso.
|
57 |
-
"""
|
58 |
-
try:
|
59 |
-
# Obtener último número de iteración
|
60 |
-
collection = get_collection(COLLECTION_NAME)
|
61 |
-
last_progress = collection.find_one(
|
62 |
-
{'username': username},
|
63 |
-
sort=[('iteration', -1)]
|
64 |
-
)
|
65 |
-
|
66 |
-
next_iteration = (last_progress['iteration'] + 1) if last_progress else 1
|
67 |
-
|
68 |
-
document = {
|
69 |
-
'username': username,
|
70 |
-
'type': 'progress',
|
71 |
-
'metrics': metrics,
|
72 |
-
'text': text,
|
73 |
-
'timestamp': datetime.now(timezone.utc).isoformat(),
|
74 |
-
'iteration': next_iteration
|
75 |
-
}
|
76 |
-
|
77 |
-
result = collection.insert_one(document)
|
78 |
-
success = result.inserted_id is not None
|
79 |
-
|
80 |
-
if success:
|
81 |
-
logger.info(f"Progreso guardado para {username}, iteración {next_iteration}")
|
82 |
-
|
83 |
-
return success
|
84 |
-
|
85 |
-
except Exception as e:
|
86 |
-
logger.error(f"Error al guardar progreso: {str(e)}")
|
87 |
-
return False
|
88 |
-
|
89 |
-
def get_writing_baseline(username):
|
90 |
-
"""
|
91 |
-
Obtiene la línea base de un usuario.
|
92 |
-
"""
|
93 |
-
try:
|
94 |
-
collection = get_collection(COLLECTION_NAME)
|
95 |
-
return collection.find_one({
|
96 |
-
'username': username,
|
97 |
-
'type': 'baseline'
|
98 |
-
})
|
99 |
-
except Exception as e:
|
100 |
-
logger.error(f"Error al obtener línea base: {str(e)}")
|
101 |
-
return None
|
102 |
-
|
103 |
-
def get_writing_progress(username, limit=None):
|
104 |
-
"""
|
105 |
-
Obtiene el historial de progreso de un usuario.
|
106 |
-
Args:
|
107 |
-
username: ID del usuario
|
108 |
-
limit: Número máximo de registros a retornar
|
109 |
-
"""
|
110 |
-
try:
|
111 |
-
collection = get_collection(COLLECTION_NAME)
|
112 |
-
cursor = collection.find(
|
113 |
-
{
|
114 |
-
'username': username,
|
115 |
-
'type': 'progress'
|
116 |
-
},
|
117 |
-
sort=[('iteration', -1)]
|
118 |
-
)
|
119 |
-
|
120 |
-
if limit:
|
121 |
-
cursor = cursor.limit(limit)
|
122 |
-
|
123 |
-
return list(cursor)
|
124 |
-
|
125 |
-
except Exception as e:
|
126 |
-
logger.error(f"Error al obtener progreso: {str(e)}")
|
127 |
-
return []
|
128 |
-
|
129 |
-
def get_latest_writing_metrics(username):
|
130 |
-
"""
|
131 |
-
Obtiene las métricas más recientes (línea base o progreso).
|
132 |
-
"""
|
133 |
-
try:
|
134 |
-
collection = get_collection(COLLECTION_NAME)
|
135 |
-
return collection.find_one(
|
136 |
-
{'username': username},
|
137 |
-
sort=[('timestamp', -1)]
|
138 |
-
)
|
139 |
-
except Exception as e:
|
140 |
-
logger.error(f"Error al obtener métricas recientes: {str(e)}")
|
141 |
return None
|
|
|
1 |
+
# modules/database/writing_progress_mongo_db.py
|
2 |
+
|
3 |
+
from .mongo_db import get_collection, insert_document
|
4 |
+
from datetime import datetime, timezone
|
5 |
+
import logging
|
6 |
+
|
7 |
+
logger = logging.getLogger(__name__)
|
8 |
+
COLLECTION_NAME = 'writing_progress'
|
9 |
+
|
10 |
+
def store_writing_baseline(username, metrics, text):
|
11 |
+
"""
|
12 |
+
Guarda la línea base de escritura de un usuario.
|
13 |
+
Args:
|
14 |
+
username: ID del usuario
|
15 |
+
metrics: Diccionario con métricas iniciales
|
16 |
+
text: Texto analizado
|
17 |
+
"""
|
18 |
+
try:
|
19 |
+
document = {
|
20 |
+
'username': username,
|
21 |
+
'type': 'baseline',
|
22 |
+
'metrics': metrics,
|
23 |
+
'text': text,
|
24 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
25 |
+
'iteration': 0 # Línea base siempre es iteración 0
|
26 |
+
}
|
27 |
+
|
28 |
+
# Verificar si ya existe una línea base
|
29 |
+
collection = get_collection(COLLECTION_NAME)
|
30 |
+
existing = collection.find_one({
|
31 |
+
'username': username,
|
32 |
+
'type': 'baseline'
|
33 |
+
})
|
34 |
+
|
35 |
+
if existing:
|
36 |
+
# Actualizar línea base existente
|
37 |
+
result = collection.update_one(
|
38 |
+
{'_id': existing['_id']},
|
39 |
+
{'$set': document}
|
40 |
+
)
|
41 |
+
success = result.modified_count > 0
|
42 |
+
else:
|
43 |
+
# Insertar nueva línea base
|
44 |
+
result = collection.insert_one(document)
|
45 |
+
success = result.inserted_id is not None
|
46 |
+
|
47 |
+
logger.info(f"Línea base {'actualizada' if existing else 'creada'} para usuario: {username}")
|
48 |
+
return success
|
49 |
+
|
50 |
+
except Exception as e:
|
51 |
+
logger.error(f"Error al guardar línea base: {str(e)}")
|
52 |
+
return False
|
53 |
+
|
54 |
+
def store_writing_progress(username, metrics, text):
|
55 |
+
"""
|
56 |
+
Guarda una nueva iteración de progreso.
|
57 |
+
"""
|
58 |
+
try:
|
59 |
+
# Obtener último número de iteración
|
60 |
+
collection = get_collection(COLLECTION_NAME)
|
61 |
+
last_progress = collection.find_one(
|
62 |
+
{'username': username},
|
63 |
+
sort=[('iteration', -1)]
|
64 |
+
)
|
65 |
+
|
66 |
+
next_iteration = (last_progress['iteration'] + 1) if last_progress else 1
|
67 |
+
|
68 |
+
document = {
|
69 |
+
'username': username,
|
70 |
+
'type': 'progress',
|
71 |
+
'metrics': metrics,
|
72 |
+
'text': text,
|
73 |
+
'timestamp': datetime.now(timezone.utc).isoformat(),
|
74 |
+
'iteration': next_iteration
|
75 |
+
}
|
76 |
+
|
77 |
+
result = collection.insert_one(document)
|
78 |
+
success = result.inserted_id is not None
|
79 |
+
|
80 |
+
if success:
|
81 |
+
logger.info(f"Progreso guardado para {username}, iteración {next_iteration}")
|
82 |
+
|
83 |
+
return success
|
84 |
+
|
85 |
+
except Exception as e:
|
86 |
+
logger.error(f"Error al guardar progreso: {str(e)}")
|
87 |
+
return False
|
88 |
+
|
89 |
+
def get_writing_baseline(username):
|
90 |
+
"""
|
91 |
+
Obtiene la línea base de un usuario.
|
92 |
+
"""
|
93 |
+
try:
|
94 |
+
collection = get_collection(COLLECTION_NAME)
|
95 |
+
return collection.find_one({
|
96 |
+
'username': username,
|
97 |
+
'type': 'baseline'
|
98 |
+
})
|
99 |
+
except Exception as e:
|
100 |
+
logger.error(f"Error al obtener línea base: {str(e)}")
|
101 |
+
return None
|
102 |
+
|
103 |
+
def get_writing_progress(username, limit=None):
|
104 |
+
"""
|
105 |
+
Obtiene el historial de progreso de un usuario.
|
106 |
+
Args:
|
107 |
+
username: ID del usuario
|
108 |
+
limit: Número máximo de registros a retornar
|
109 |
+
"""
|
110 |
+
try:
|
111 |
+
collection = get_collection(COLLECTION_NAME)
|
112 |
+
cursor = collection.find(
|
113 |
+
{
|
114 |
+
'username': username,
|
115 |
+
'type': 'progress'
|
116 |
+
},
|
117 |
+
sort=[('iteration', -1)]
|
118 |
+
)
|
119 |
+
|
120 |
+
if limit:
|
121 |
+
cursor = cursor.limit(limit)
|
122 |
+
|
123 |
+
return list(cursor)
|
124 |
+
|
125 |
+
except Exception as e:
|
126 |
+
logger.error(f"Error al obtener progreso: {str(e)}")
|
127 |
+
return []
|
128 |
+
|
129 |
+
def get_latest_writing_metrics(username):
|
130 |
+
"""
|
131 |
+
Obtiene las métricas más recientes (línea base o progreso).
|
132 |
+
"""
|
133 |
+
try:
|
134 |
+
collection = get_collection(COLLECTION_NAME)
|
135 |
+
return collection.find_one(
|
136 |
+
{'username': username},
|
137 |
+
sort=[('timestamp', -1)]
|
138 |
+
)
|
139 |
+
except Exception as e:
|
140 |
+
logger.error(f"Error al obtener métricas recientes: {str(e)}")
|
141 |
return None
|
modules/discourse/__init__.py
CHANGED
@@ -1,17 +1,17 @@
|
|
1 |
-
# En /modules/discourse/__init__.py
|
2 |
-
|
3 |
-
from ..database.discourse_mongo_db import (
|
4 |
-
store_student_discourse_result,
|
5 |
-
get_student_discourse_analysis,
|
6 |
-
update_student_discourse_analysis,
|
7 |
-
delete_student_discourse_analysis,
|
8 |
-
get_student_discourse_data
|
9 |
-
)
|
10 |
-
|
11 |
-
__all__ = [
|
12 |
-
'store_student_discourse_result',
|
13 |
-
'get_student_discourse_analysis',
|
14 |
-
'update_student_discourse_analysis',
|
15 |
-
'delete_student_discourse_analysis',
|
16 |
-
'get_student_discourse_data'
|
17 |
]
|
|
|
1 |
+
# En /modules/discourse/__init__.py
|
2 |
+
|
3 |
+
from ..database.discourse_mongo_db import (
|
4 |
+
store_student_discourse_result,
|
5 |
+
get_student_discourse_analysis,
|
6 |
+
update_student_discourse_analysis,
|
7 |
+
delete_student_discourse_analysis,
|
8 |
+
get_student_discourse_data
|
9 |
+
)
|
10 |
+
|
11 |
+
__all__ = [
|
12 |
+
'store_student_discourse_result',
|
13 |
+
'get_student_discourse_analysis',
|
14 |
+
'update_student_discourse_analysis',
|
15 |
+
'delete_student_discourse_analysis',
|
16 |
+
'get_student_discourse_data'
|
17 |
]
|
modules/discourse/discourse_interface.py
CHANGED
@@ -1,281 +1,318 @@
|
|
1 |
-
# modules/discourse/discourse/discourse_interface.py
|
2 |
-
|
3 |
-
import streamlit as st
|
4 |
-
import pandas as pd
|
5 |
-
import
|
6 |
-
import
|
7 |
-
|
8 |
-
from .
|
9 |
-
from
|
10 |
-
from ..database.
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
'
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
st.session_state.
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
st.
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
f'<span class="concept-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# modules/discourse/discourse/discourse_interface.py
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
import pandas as pd
|
5 |
+
import matplotlib.pyplot as plt
|
6 |
+
import plotly.graph_objects as go
|
7 |
+
import logging
|
8 |
+
from ..utils.widget_utils import generate_unique_key
|
9 |
+
from .discourse_process import perform_discourse_analysis
|
10 |
+
from ..database.chat_mongo_db import store_chat_history
|
11 |
+
from ..database.discourse_mongo_db import store_student_discourse_result
|
12 |
+
|
13 |
+
logger = logging.getLogger(__name__)
|
14 |
+
|
15 |
+
#############################################################################################
|
16 |
+
def display_discourse_interface(lang_code, nlp_models, discourse_t):
|
17 |
+
"""
|
18 |
+
Interfaz para el análisis del discurso
|
19 |
+
Args:
|
20 |
+
lang_code: Código del idioma actual
|
21 |
+
nlp_models: Modelos de spaCy cargados
|
22 |
+
discourse_t: Diccionario de traducciones
|
23 |
+
"""
|
24 |
+
try:
|
25 |
+
# 1. Inicializar estado si no existe
|
26 |
+
if 'discourse_state' not in st.session_state:
|
27 |
+
st.session_state.discourse_state = {
|
28 |
+
'analysis_count': 0,
|
29 |
+
'last_analysis': None,
|
30 |
+
'current_files': None
|
31 |
+
}
|
32 |
+
|
33 |
+
# 2. Título y descripción
|
34 |
+
# st.subheader(discourse_t.get('discourse_title', 'Análisis del Discurso'))
|
35 |
+
st.info(discourse_t.get('initial_instruction',
|
36 |
+
'Cargue dos archivos de texto para realizar un análisis comparativo del discurso.'))
|
37 |
+
|
38 |
+
# 3. Área de carga de archivos
|
39 |
+
col1, col2 = st.columns(2)
|
40 |
+
with col1:
|
41 |
+
st.markdown(discourse_t.get('file1_label', "**Documento 1 (Patrón)**"))
|
42 |
+
uploaded_file1 = st.file_uploader(
|
43 |
+
discourse_t.get('file_uploader1', "Cargar archivo 1"),
|
44 |
+
type=['txt'],
|
45 |
+
key=f"discourse_file1_{st.session_state.discourse_state['analysis_count']}"
|
46 |
+
)
|
47 |
+
|
48 |
+
with col2:
|
49 |
+
st.markdown(discourse_t.get('file2_label', "**Documento 2 (Comparación)**"))
|
50 |
+
uploaded_file2 = st.file_uploader(
|
51 |
+
discourse_t.get('file_uploader2', "Cargar archivo 2"),
|
52 |
+
type=['txt'],
|
53 |
+
key=f"discourse_file2_{st.session_state.discourse_state['analysis_count']}"
|
54 |
+
)
|
55 |
+
|
56 |
+
# 4. Botón de análisis
|
57 |
+
col1, col2, col3 = st.columns([1,2,1])
|
58 |
+
with col1:
|
59 |
+
analyze_button = st.button(
|
60 |
+
discourse_t.get('discourse_analyze_button', 'Comparar textos'),
|
61 |
+
key=generate_unique_key("discourse", "analyze_button"),
|
62 |
+
type="primary",
|
63 |
+
icon="🔍",
|
64 |
+
disabled=not (uploaded_file1 and uploaded_file2),
|
65 |
+
use_container_width=True
|
66 |
+
)
|
67 |
+
|
68 |
+
# 5. Proceso de análisis
|
69 |
+
if analyze_button and uploaded_file1 and uploaded_file2:
|
70 |
+
try:
|
71 |
+
with st.spinner(discourse_t.get('processing', 'Procesando análisis...')):
|
72 |
+
# Leer contenido de archivos
|
73 |
+
text1 = uploaded_file1.getvalue().decode('utf-8')
|
74 |
+
text2 = uploaded_file2.getvalue().decode('utf-8')
|
75 |
+
|
76 |
+
# Realizar análisis
|
77 |
+
result = perform_discourse_analysis(
|
78 |
+
text1,
|
79 |
+
text2,
|
80 |
+
nlp_models[lang_code],
|
81 |
+
lang_code
|
82 |
+
)
|
83 |
+
|
84 |
+
if result['success']:
|
85 |
+
# Guardar estado
|
86 |
+
st.session_state.discourse_result = result
|
87 |
+
st.session_state.discourse_state['analysis_count'] += 1
|
88 |
+
st.session_state.discourse_state['current_files'] = (
|
89 |
+
uploaded_file1.name,
|
90 |
+
uploaded_file2.name
|
91 |
+
)
|
92 |
+
|
93 |
+
# Guardar en base de datos
|
94 |
+
if store_student_discourse_result(
|
95 |
+
st.session_state.username,
|
96 |
+
text1,
|
97 |
+
text2,
|
98 |
+
result
|
99 |
+
):
|
100 |
+
st.success(discourse_t.get('success_message', 'Análisis guardado correctamente'))
|
101 |
+
|
102 |
+
# Mostrar resultados
|
103 |
+
display_discourse_results(result, lang_code, discourse_t)
|
104 |
+
else:
|
105 |
+
st.error(discourse_t.get('error_message', 'Error al guardar el análisis'))
|
106 |
+
else:
|
107 |
+
st.error(discourse_t.get('analysis_error', 'Error en el análisis'))
|
108 |
+
|
109 |
+
except Exception as e:
|
110 |
+
logger.error(f"Error en análisis del discurso: {str(e)}")
|
111 |
+
st.error(discourse_t.get('error_processing', f'Error procesando archivos: {str(e)}'))
|
112 |
+
|
113 |
+
# 6. Mostrar resultados previos
|
114 |
+
elif 'discourse_result' in st.session_state and st.session_state.discourse_result is not None:
|
115 |
+
if st.session_state.discourse_state.get('current_files'):
|
116 |
+
st.info(
|
117 |
+
discourse_t.get('current_analysis_message', 'Mostrando análisis de los archivos: {} y {}')
|
118 |
+
.format(*st.session_state.discourse_state['current_files'])
|
119 |
+
)
|
120 |
+
display_discourse_results(
|
121 |
+
st.session_state.discourse_result,
|
122 |
+
lang_code,
|
123 |
+
discourse_t
|
124 |
+
)
|
125 |
+
|
126 |
+
except Exception as e:
|
127 |
+
logger.error(f"Error general en interfaz del discurso: {str(e)}")
|
128 |
+
st.error(discourse_t.get('general_error', 'Se produjo un error. Por favor, intente de nuevo.'))
|
129 |
+
|
130 |
+
|
131 |
+
|
132 |
+
#####################################################################################################################
|
133 |
+
def display_discourse_results(result, lang_code, discourse_t):
|
134 |
+
"""
|
135 |
+
Muestra los resultados del análisis del discurso
|
136 |
+
"""
|
137 |
+
if not result.get('success'):
|
138 |
+
st.warning(discourse_t.get('no_results', 'No hay resultados disponibles'))
|
139 |
+
return
|
140 |
+
|
141 |
+
# Estilo CSS
|
142 |
+
st.markdown("""
|
143 |
+
<style>
|
144 |
+
.concepts-container {
|
145 |
+
display: flex;
|
146 |
+
flex-wrap: nowrap;
|
147 |
+
gap: 8px;
|
148 |
+
padding: 12px;
|
149 |
+
background-color: #f8f9fa;
|
150 |
+
border-radius: 8px;
|
151 |
+
overflow-x: auto;
|
152 |
+
margin-bottom: 15px;
|
153 |
+
white-space: nowrap;
|
154 |
+
}
|
155 |
+
.concept-item {
|
156 |
+
background-color: white;
|
157 |
+
border-radius: 4px;
|
158 |
+
padding: 6px 10px;
|
159 |
+
display: inline-flex;
|
160 |
+
align-items: center;
|
161 |
+
gap: 4px;
|
162 |
+
box-shadow: 0 1px 2px rgba(0,0,0,0.1);
|
163 |
+
flex-shrink: 0;
|
164 |
+
}
|
165 |
+
.concept-name {
|
166 |
+
font-weight: 500;
|
167 |
+
color: #1f2937;
|
168 |
+
font-size: 0.85em;
|
169 |
+
}
|
170 |
+
.concept-freq {
|
171 |
+
color: #6b7280;
|
172 |
+
font-size: 0.75em;
|
173 |
+
}
|
174 |
+
.graph-container {
|
175 |
+
background-color: white;
|
176 |
+
padding: 15px;
|
177 |
+
border-radius: 8px;
|
178 |
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
179 |
+
margin-top: 10px;
|
180 |
+
}
|
181 |
+
</style>
|
182 |
+
""", unsafe_allow_html=True)
|
183 |
+
|
184 |
+
col1, col2 = st.columns(2)
|
185 |
+
|
186 |
+
# Documento 1
|
187 |
+
with col1:
|
188 |
+
st.subheader(discourse_t.get('doc1_title', 'Documento 1'))
|
189 |
+
st.markdown(discourse_t.get('key_concepts', 'Conceptos Clave'))
|
190 |
+
if 'key_concepts1' in result:
|
191 |
+
concepts_html = f"""
|
192 |
+
<div class="concepts-container">
|
193 |
+
{''.join([
|
194 |
+
f'<div class="concept-item"><span class="concept-name">{concept}</span>'
|
195 |
+
f'<span class="concept-freq">({freq:.2f})</span></div>'
|
196 |
+
for concept, freq in result['key_concepts1']
|
197 |
+
])}
|
198 |
+
</div>
|
199 |
+
"""
|
200 |
+
st.markdown(concepts_html, unsafe_allow_html=True)
|
201 |
+
|
202 |
+
# Verificar el tipo de graph1 de manera más robusta
|
203 |
+
if 'graph1' in result:
|
204 |
+
st.markdown('<div class="graph-container">', unsafe_allow_html=True)
|
205 |
+
|
206 |
+
# Más información para depuración
|
207 |
+
graph_type = type(result['graph1']).__name__
|
208 |
+
graph_size = len(result['graph1']) if isinstance(result['graph1'], bytes) else "N/A"
|
209 |
+
logger.info(f"Tipo de graph1: {graph_type}, Tamaño: {graph_size}")
|
210 |
+
|
211 |
+
if isinstance(result['graph1'], bytes) and len(result['graph1']) > 0:
|
212 |
+
# Es bytes válidos
|
213 |
+
st.image(result['graph1'])
|
214 |
+
elif isinstance(result['graph1'], plt.Figure):
|
215 |
+
# Es una figura de matplotlib
|
216 |
+
st.pyplot(result['graph1'])
|
217 |
+
elif result['graph1'] is None:
|
218 |
+
# Es None
|
219 |
+
st.warning("Gráfico no disponible")
|
220 |
+
else:
|
221 |
+
# Otro tipo o bytes vacíos
|
222 |
+
st.warning(f"Formato de gráfico no reconocido: {graph_type}")
|
223 |
+
|
224 |
+
# Botones y controles
|
225 |
+
button_col1, spacer_col1 = st.columns([1,4])
|
226 |
+
with button_col1:
|
227 |
+
if 'graph1_bytes' in result:
|
228 |
+
st.download_button(
|
229 |
+
label="📥 " + discourse_t.get('download_graph', "Download"),
|
230 |
+
data=result['graph1_bytes'],
|
231 |
+
file_name="discourse_graph1.png",
|
232 |
+
mime="image/png",
|
233 |
+
use_container_width=True
|
234 |
+
)
|
235 |
+
|
236 |
+
# Interpretación como texto normal sin expander
|
237 |
+
st.markdown("**📊 Interpretación del grafo:**")
|
238 |
+
st.markdown("""
|
239 |
+
- 🔀 Las flechas indican la dirección de la relación entre conceptos
|
240 |
+
- 🎨 Los colores más intensos indican conceptos más centrales en el texto
|
241 |
+
- ⭕ El tamaño de los nodos representa la frecuencia del concepto
|
242 |
+
- ↔️ El grosor de las líneas indica la fuerza de la conexión
|
243 |
+
""")
|
244 |
+
|
245 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
246 |
+
else:
|
247 |
+
st.warning(discourse_t.get('graph_not_available', 'Gráfico no disponible'))
|
248 |
+
else:
|
249 |
+
st.warning(discourse_t.get('concepts_not_available', 'Conceptos no disponibles'))
|
250 |
+
|
251 |
+
# Documento 2
|
252 |
+
with col2:
|
253 |
+
st.subheader(discourse_t.get('doc2_title', 'Documento 2'))
|
254 |
+
st.markdown(discourse_t.get('key_concepts', 'Conceptos Clave'))
|
255 |
+
if 'key_concepts2' in result:
|
256 |
+
concepts_html = f"""
|
257 |
+
<div class="concepts-container">
|
258 |
+
{''.join([
|
259 |
+
f'<div class="concept-item"><span class="concept-name">{concept}</span>'
|
260 |
+
f'<span class="concept-freq">({freq:.2f})</span></div>'
|
261 |
+
for concept, freq in result['key_concepts2']
|
262 |
+
])}
|
263 |
+
</div>
|
264 |
+
"""
|
265 |
+
st.markdown(concepts_html, unsafe_allow_html=True)
|
266 |
+
|
267 |
+
# Verificar el tipo de graph1 de manera más robusta
|
268 |
+
if 'graph1' in result:
|
269 |
+
st.markdown('<div class="graph-container">', unsafe_allow_html=True)
|
270 |
+
|
271 |
+
# Más información para depuración
|
272 |
+
graph_type = type(result['graph2']).__name__
|
273 |
+
graph_size = len(result['graph2']) if isinstance(result['graph2'], bytes) else "N/A"
|
274 |
+
logger.info(f"Tipo de graph2: {graph_type}, Tamaño: {graph_size}")
|
275 |
+
|
276 |
+
if isinstance(result['graph2'], bytes) and len(result['graph2']) > 0:
|
277 |
+
# Es bytes válidos
|
278 |
+
st.image(result['graph2'])
|
279 |
+
elif isinstance(result['graph2'], plt.Figure):
|
280 |
+
# Es una figura de matplotlib
|
281 |
+
st.pyplot(result['graph2'])
|
282 |
+
elif result['graph2'] is None:
|
283 |
+
# Es None
|
284 |
+
st.warning("Gráfico no disponible")
|
285 |
+
else:
|
286 |
+
# Otro tipo o bytes vacíos
|
287 |
+
st.warning(f"Formato de gráfico no reconocido: {graph_type}")
|
288 |
+
|
289 |
+
# Botones y controles
|
290 |
+
button_col2, spacer_col2 = st.columns([1,4])
|
291 |
+
with button_col2:
|
292 |
+
if 'graph2_bytes' in result:
|
293 |
+
st.download_button(
|
294 |
+
label="📥 " + discourse_t.get('download_graph', "Download"),
|
295 |
+
data=result['graph2_bytes'],
|
296 |
+
file_name="discourse_graph2.png",
|
297 |
+
mime="image/png",
|
298 |
+
use_container_width=True
|
299 |
+
)
|
300 |
+
|
301 |
+
# Interpretación como texto normal sin expander
|
302 |
+
st.markdown("**📊 Interpretación del grafo:**")
|
303 |
+
st.markdown("""
|
304 |
+
- 🔀 Las flechas indican la dirección de la relación entre conceptos
|
305 |
+
- 🎨 Los colores más intensos indican conceptos más centrales en el texto
|
306 |
+
- ⭕ El tamaño de los nodos representa la frecuencia del concepto
|
307 |
+
- ↔️ El grosor de las líneas indica la fuerza de la conexión
|
308 |
+
""")
|
309 |
+
|
310 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
311 |
+
else:
|
312 |
+
st.warning(discourse_t.get('graph_not_available', 'Gráfico no disponible'))
|
313 |
+
else:
|
314 |
+
st.warning(discourse_t.get('concepts_not_available', 'Conceptos no disponibles'))
|
315 |
+
|
316 |
+
# Nota informativa sobre la comparación
|
317 |
+
st.info(discourse_t.get('comparison_note',
|
318 |
+
'La funcionalidad de comparación detallada estará disponible en una próxima actualización.'))
|
modules/discourse/discourse_live_interface.py
CHANGED
@@ -1,151 +1,151 @@
|
|
1 |
-
# modules/discourse/discourse/discourse_live_interface.py
|
2 |
-
|
3 |
-
import streamlit as st
|
4 |
-
from streamlit_float import *
|
5 |
-
from streamlit_antd_components import *
|
6 |
-
import pandas as pd
|
7 |
-
import logging
|
8 |
-
import io
|
9 |
-
import matplotlib.pyplot as plt
|
10 |
-
|
11 |
-
# Configuración del logger
|
12 |
-
logger = logging.getLogger(__name__)
|
13 |
-
|
14 |
-
# Importaciones locales
|
15 |
-
from .discourse_process import perform_discourse_analysis
|
16 |
-
from .discourse_interface import display_discourse_results # Añadida esta importación
|
17 |
-
from ..utils.widget_utils import generate_unique_key
|
18 |
-
from ..database.discourse_mongo_db import store_student_discourse_result
|
19 |
-
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
20 |
-
|
21 |
-
|
22 |
-
#####################################################################################################
|
23 |
-
def fig_to_bytes(fig):
|
24 |
-
"""Convierte una figura de matplotlib a bytes."""
|
25 |
-
try:
|
26 |
-
buf = io.BytesIO()
|
27 |
-
fig.savefig(buf, format='png', dpi=300, bbox_inches='tight')
|
28 |
-
buf.seek(0)
|
29 |
-
return buf.getvalue()
|
30 |
-
except Exception as e:
|
31 |
-
logger.error(f"Error en fig_to_bytes: {str(e)}")
|
32 |
-
return None
|
33 |
-
|
34 |
-
#################################################################################################
|
35 |
-
def display_discourse_live_interface(lang_code, nlp_models, discourse_t):
|
36 |
-
"""
|
37 |
-
Interfaz para el análisis del discurso en vivo con layout mejorado
|
38 |
-
"""
|
39 |
-
try:
|
40 |
-
if 'discourse_live_state' not in st.session_state:
|
41 |
-
st.session_state.discourse_live_state = {
|
42 |
-
'analysis_count': 0,
|
43 |
-
'current_text1': '',
|
44 |
-
'current_text2': '',
|
45 |
-
'last_result': None,
|
46 |
-
'text_changed': False
|
47 |
-
}
|
48 |
-
|
49 |
-
# Título
|
50 |
-
st.subheader(discourse_t.get('enter_text', 'Ingrese sus textos'))
|
51 |
-
|
52 |
-
# Área de entrada de textos en dos columnas
|
53 |
-
text_col1, text_col2 = st.columns(2)
|
54 |
-
|
55 |
-
# Texto 1
|
56 |
-
with text_col1:
|
57 |
-
st.markdown("**Texto 1 (Patrón)**")
|
58 |
-
text_input1 = st.text_area(
|
59 |
-
"Texto 1",
|
60 |
-
height=200,
|
61 |
-
key="discourse_live_text1",
|
62 |
-
value=st.session_state.discourse_live_state.get('current_text1', ''),
|
63 |
-
label_visibility="collapsed"
|
64 |
-
)
|
65 |
-
st.session_state.discourse_live_state['current_text1'] = text_input1
|
66 |
-
|
67 |
-
# Texto 2
|
68 |
-
with text_col2:
|
69 |
-
st.markdown("**Texto 2 (Comparación)**")
|
70 |
-
text_input2 = st.text_area(
|
71 |
-
"Texto 2",
|
72 |
-
height=200,
|
73 |
-
key="discourse_live_text2",
|
74 |
-
value=st.session_state.discourse_live_state.get('current_text2', ''),
|
75 |
-
label_visibility="collapsed"
|
76 |
-
)
|
77 |
-
st.session_state.discourse_live_state['current_text2'] = text_input2
|
78 |
-
|
79 |
-
# Botón de análisis centrado
|
80 |
-
col1, col2, col3 = st.columns([1,2,1])
|
81 |
-
with col1:
|
82 |
-
analyze_button = st.button(
|
83 |
-
discourse_t.get('analyze_button', 'Analizar'),
|
84 |
-
key="discourse_live_analyze",
|
85 |
-
type="primary",
|
86 |
-
icon="🔍",
|
87 |
-
disabled=not (text_input1 and text_input2),
|
88 |
-
use_container_width=True
|
89 |
-
)
|
90 |
-
|
91 |
-
# Proceso y visualización de resultados
|
92 |
-
if analyze_button and text_input1 and text_input2:
|
93 |
-
try:
|
94 |
-
with st.spinner(discourse_t.get('processing', 'Procesando...')):
|
95 |
-
result = perform_discourse_analysis(
|
96 |
-
text_input1,
|
97 |
-
text_input2,
|
98 |
-
nlp_models[lang_code],
|
99 |
-
lang_code
|
100 |
-
)
|
101 |
-
|
102 |
-
if result['success']:
|
103 |
-
# Procesar ambos gráficos
|
104 |
-
for graph_key in ['graph1', 'graph2']:
|
105 |
-
if graph_key in result and result[graph_key] is not None:
|
106 |
-
bytes_key = f'{graph_key}_bytes'
|
107 |
-
graph_bytes = fig_to_bytes(result[graph_key])
|
108 |
-
if graph_bytes:
|
109 |
-
result[bytes_key] = graph_bytes
|
110 |
-
plt.close(result[graph_key])
|
111 |
-
|
112 |
-
st.session_state.discourse_live_state['last_result'] = result
|
113 |
-
st.session_state.discourse_live_state['analysis_count'] += 1
|
114 |
-
|
115 |
-
store_student_discourse_result(
|
116 |
-
st.session_state.username,
|
117 |
-
text_input1,
|
118 |
-
text_input2,
|
119 |
-
result
|
120 |
-
)
|
121 |
-
|
122 |
-
# Mostrar resultados
|
123 |
-
st.markdown("---")
|
124 |
-
st.subheader(discourse_t.get('results_title', 'Resultados del Análisis'))
|
125 |
-
display_discourse_results(result, lang_code, discourse_t)
|
126 |
-
|
127 |
-
else:
|
128 |
-
st.error(result.get('message', 'Error en el análisis'))
|
129 |
-
|
130 |
-
except Exception as e:
|
131 |
-
logger.error(f"Error en análisis: {str(e)}")
|
132 |
-
st.error(discourse_t.get('error_processing', f'Error al procesar el texto: {str(e)}'))
|
133 |
-
|
134 |
-
# Mostrar resultados previos si existen
|
135 |
-
elif 'last_result' in st.session_state.discourse_live_state and \
|
136 |
-
st.session_state.discourse_live_state['last_result'] is not None:
|
137 |
-
|
138 |
-
st.markdown("---")
|
139 |
-
st.subheader(discourse_t.get('previous_results', 'Resultados del Análisis Anterior'))
|
140 |
-
display_discourse_results(
|
141 |
-
st.session_state.discourse_live_state['last_result'],
|
142 |
-
lang_code,
|
143 |
-
discourse_t
|
144 |
-
)
|
145 |
-
|
146 |
-
except Exception as e:
|
147 |
-
logger.error(f"Error general en interfaz del discurso en vivo: {str(e)}")
|
148 |
-
st.error(discourse_t.get('general_error', "Se produjo un error. Por favor, intente de nuevo."))
|
149 |
-
|
150 |
-
|
151 |
-
|
|
|
1 |
+
# modules/discourse/discourse/discourse_live_interface.py
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
from streamlit_float import *
|
5 |
+
from streamlit_antd_components import *
|
6 |
+
import pandas as pd
|
7 |
+
import logging
|
8 |
+
import io
|
9 |
+
import matplotlib.pyplot as plt
|
10 |
+
|
11 |
+
# Configuración del logger
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
|
14 |
+
# Importaciones locales
|
15 |
+
from .discourse_process import perform_discourse_analysis
|
16 |
+
from .discourse_interface import display_discourse_results # Añadida esta importación
|
17 |
+
from ..utils.widget_utils import generate_unique_key
|
18 |
+
from ..database.discourse_mongo_db import store_student_discourse_result
|
19 |
+
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
20 |
+
|
21 |
+
|
22 |
+
#####################################################################################################
|
23 |
+
def fig_to_bytes(fig):
|
24 |
+
"""Convierte una figura de matplotlib a bytes."""
|
25 |
+
try:
|
26 |
+
buf = io.BytesIO()
|
27 |
+
fig.savefig(buf, format='png', dpi=300, bbox_inches='tight')
|
28 |
+
buf.seek(0)
|
29 |
+
return buf.getvalue()
|
30 |
+
except Exception as e:
|
31 |
+
logger.error(f"Error en fig_to_bytes: {str(e)}")
|
32 |
+
return None
|
33 |
+
|
34 |
+
#################################################################################################
|
35 |
+
def display_discourse_live_interface(lang_code, nlp_models, discourse_t):
|
36 |
+
"""
|
37 |
+
Interfaz para el análisis del discurso en vivo con layout mejorado
|
38 |
+
"""
|
39 |
+
try:
|
40 |
+
if 'discourse_live_state' not in st.session_state:
|
41 |
+
st.session_state.discourse_live_state = {
|
42 |
+
'analysis_count': 0,
|
43 |
+
'current_text1': '',
|
44 |
+
'current_text2': '',
|
45 |
+
'last_result': None,
|
46 |
+
'text_changed': False
|
47 |
+
}
|
48 |
+
|
49 |
+
# Título
|
50 |
+
st.subheader(discourse_t.get('enter_text', 'Ingrese sus textos'))
|
51 |
+
|
52 |
+
# Área de entrada de textos en dos columnas
|
53 |
+
text_col1, text_col2 = st.columns(2)
|
54 |
+
|
55 |
+
# Texto 1
|
56 |
+
with text_col1:
|
57 |
+
st.markdown("**Texto 1 (Patrón)**")
|
58 |
+
text_input1 = st.text_area(
|
59 |
+
"Texto 1",
|
60 |
+
height=200,
|
61 |
+
key="discourse_live_text1",
|
62 |
+
value=st.session_state.discourse_live_state.get('current_text1', ''),
|
63 |
+
label_visibility="collapsed"
|
64 |
+
)
|
65 |
+
st.session_state.discourse_live_state['current_text1'] = text_input1
|
66 |
+
|
67 |
+
# Texto 2
|
68 |
+
with text_col2:
|
69 |
+
st.markdown("**Texto 2 (Comparación)**")
|
70 |
+
text_input2 = st.text_area(
|
71 |
+
"Texto 2",
|
72 |
+
height=200,
|
73 |
+
key="discourse_live_text2",
|
74 |
+
value=st.session_state.discourse_live_state.get('current_text2', ''),
|
75 |
+
label_visibility="collapsed"
|
76 |
+
)
|
77 |
+
st.session_state.discourse_live_state['current_text2'] = text_input2
|
78 |
+
|
79 |
+
# Botón de análisis centrado
|
80 |
+
col1, col2, col3 = st.columns([1,2,1])
|
81 |
+
with col1:
|
82 |
+
analyze_button = st.button(
|
83 |
+
discourse_t.get('analyze_button', 'Analizar'),
|
84 |
+
key="discourse_live_analyze",
|
85 |
+
type="primary",
|
86 |
+
icon="🔍",
|
87 |
+
disabled=not (text_input1 and text_input2),
|
88 |
+
use_container_width=True
|
89 |
+
)
|
90 |
+
|
91 |
+
# Proceso y visualización de resultados
|
92 |
+
if analyze_button and text_input1 and text_input2:
|
93 |
+
try:
|
94 |
+
with st.spinner(discourse_t.get('processing', 'Procesando...')):
|
95 |
+
result = perform_discourse_analysis(
|
96 |
+
text_input1,
|
97 |
+
text_input2,
|
98 |
+
nlp_models[lang_code],
|
99 |
+
lang_code
|
100 |
+
)
|
101 |
+
|
102 |
+
if result['success']:
|
103 |
+
# Procesar ambos gráficos
|
104 |
+
for graph_key in ['graph1', 'graph2']:
|
105 |
+
if graph_key in result and result[graph_key] is not None:
|
106 |
+
bytes_key = f'{graph_key}_bytes'
|
107 |
+
graph_bytes = fig_to_bytes(result[graph_key])
|
108 |
+
if graph_bytes:
|
109 |
+
result[bytes_key] = graph_bytes
|
110 |
+
plt.close(result[graph_key])
|
111 |
+
|
112 |
+
st.session_state.discourse_live_state['last_result'] = result
|
113 |
+
st.session_state.discourse_live_state['analysis_count'] += 1
|
114 |
+
|
115 |
+
store_student_discourse_result(
|
116 |
+
st.session_state.username,
|
117 |
+
text_input1,
|
118 |
+
text_input2,
|
119 |
+
result
|
120 |
+
)
|
121 |
+
|
122 |
+
# Mostrar resultados
|
123 |
+
st.markdown("---")
|
124 |
+
st.subheader(discourse_t.get('results_title', 'Resultados del Análisis'))
|
125 |
+
display_discourse_results(result, lang_code, discourse_t)
|
126 |
+
|
127 |
+
else:
|
128 |
+
st.error(result.get('message', 'Error en el análisis'))
|
129 |
+
|
130 |
+
except Exception as e:
|
131 |
+
logger.error(f"Error en análisis: {str(e)}")
|
132 |
+
st.error(discourse_t.get('error_processing', f'Error al procesar el texto: {str(e)}'))
|
133 |
+
|
134 |
+
# Mostrar resultados previos si existen
|
135 |
+
elif 'last_result' in st.session_state.discourse_live_state and \
|
136 |
+
st.session_state.discourse_live_state['last_result'] is not None:
|
137 |
+
|
138 |
+
st.markdown("---")
|
139 |
+
st.subheader(discourse_t.get('previous_results', 'Resultados del Análisis Anterior'))
|
140 |
+
display_discourse_results(
|
141 |
+
st.session_state.discourse_live_state['last_result'],
|
142 |
+
lang_code,
|
143 |
+
discourse_t
|
144 |
+
)
|
145 |
+
|
146 |
+
except Exception as e:
|
147 |
+
logger.error(f"Error general en interfaz del discurso en vivo: {str(e)}")
|
148 |
+
st.error(discourse_t.get('general_error', "Se produjo un error. Por favor, intente de nuevo."))
|
149 |
+
|
150 |
+
|
151 |
+
|
modules/discourse/discourse_process.py
CHANGED
@@ -1,68 +1,68 @@
|
|
1 |
-
from ..text_analysis.discourse_analysis import perform_discourse_analysis, compare_semantic_analysis
|
2 |
-
import streamlit as st
|
3 |
-
|
4 |
-
def process_discourse_input(text1, text2, nlp_models, lang_code):
|
5 |
-
"""
|
6 |
-
Procesa la entrada para el análisis del discurso
|
7 |
-
Args:
|
8 |
-
text1: Texto del primer documento
|
9 |
-
text2: Texto del segundo documento
|
10 |
-
nlp_models: Diccionario de modelos de spaCy
|
11 |
-
lang_code: Código del idioma actual
|
12 |
-
Returns:
|
13 |
-
dict: Resultados del análisis
|
14 |
-
"""
|
15 |
-
try:
|
16 |
-
# Obtener el modelo específico del idioma
|
17 |
-
nlp = nlp_models[lang_code]
|
18 |
-
|
19 |
-
# Realizar el análisis
|
20 |
-
analysis_result = perform_discourse_analysis(text1, text2, nlp, lang_code)
|
21 |
-
|
22 |
-
if analysis_result['success']:
|
23 |
-
return {
|
24 |
-
'success': True,
|
25 |
-
'analysis': analysis_result
|
26 |
-
}
|
27 |
-
else:
|
28 |
-
return {
|
29 |
-
'success': False,
|
30 |
-
'error': 'Error en el análisis del discurso'
|
31 |
-
}
|
32 |
-
|
33 |
-
except Exception as e:
|
34 |
-
logger.error(f"Error en process_discourse_input: {str(e)}")
|
35 |
-
return {
|
36 |
-
'success': False,
|
37 |
-
'error': str(e)
|
38 |
-
}
|
39 |
-
|
40 |
-
def format_discourse_results(result):
|
41 |
-
"""
|
42 |
-
Formatea los resultados del análisis para su visualización
|
43 |
-
Args:
|
44 |
-
result: Resultado del análisis
|
45 |
-
Returns:
|
46 |
-
dict: Resultados formateados
|
47 |
-
"""
|
48 |
-
try:
|
49 |
-
if not result['success']:
|
50 |
-
return result
|
51 |
-
|
52 |
-
analysis = result['analysis']
|
53 |
-
return {
|
54 |
-
'success': True,
|
55 |
-
'graph1': analysis['graph1'],
|
56 |
-
'graph2': analysis['graph2'],
|
57 |
-
'key_concepts1': analysis['key_concepts1'],
|
58 |
-
'key_concepts2': analysis['key_concepts2'],
|
59 |
-
'table1': analysis['table1'],
|
60 |
-
'table2': analysis['table2']
|
61 |
-
}
|
62 |
-
|
63 |
-
except Exception as e:
|
64 |
-
logger.error(f"Error en format_discourse_results: {str(e)}")
|
65 |
-
return {
|
66 |
-
'success': False,
|
67 |
-
'error': str(e)
|
68 |
}
|
|
|
1 |
+
from ..text_analysis.discourse_analysis import perform_discourse_analysis, compare_semantic_analysis
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
def process_discourse_input(text1, text2, nlp_models, lang_code):
|
5 |
+
"""
|
6 |
+
Procesa la entrada para el análisis del discurso
|
7 |
+
Args:
|
8 |
+
text1: Texto del primer documento
|
9 |
+
text2: Texto del segundo documento
|
10 |
+
nlp_models: Diccionario de modelos de spaCy
|
11 |
+
lang_code: Código del idioma actual
|
12 |
+
Returns:
|
13 |
+
dict: Resultados del análisis
|
14 |
+
"""
|
15 |
+
try:
|
16 |
+
# Obtener el modelo específico del idioma
|
17 |
+
nlp = nlp_models[lang_code]
|
18 |
+
|
19 |
+
# Realizar el análisis
|
20 |
+
analysis_result = perform_discourse_analysis(text1, text2, nlp, lang_code)
|
21 |
+
|
22 |
+
if analysis_result['success']:
|
23 |
+
return {
|
24 |
+
'success': True,
|
25 |
+
'analysis': analysis_result
|
26 |
+
}
|
27 |
+
else:
|
28 |
+
return {
|
29 |
+
'success': False,
|
30 |
+
'error': 'Error en el análisis del discurso'
|
31 |
+
}
|
32 |
+
|
33 |
+
except Exception as e:
|
34 |
+
logger.error(f"Error en process_discourse_input: {str(e)}")
|
35 |
+
return {
|
36 |
+
'success': False,
|
37 |
+
'error': str(e)
|
38 |
+
}
|
39 |
+
|
40 |
+
def format_discourse_results(result):
|
41 |
+
"""
|
42 |
+
Formatea los resultados del análisis para su visualización
|
43 |
+
Args:
|
44 |
+
result: Resultado del análisis
|
45 |
+
Returns:
|
46 |
+
dict: Resultados formateados
|
47 |
+
"""
|
48 |
+
try:
|
49 |
+
if not result['success']:
|
50 |
+
return result
|
51 |
+
|
52 |
+
analysis = result['analysis']
|
53 |
+
return {
|
54 |
+
'success': True,
|
55 |
+
'graph1': analysis['graph1'],
|
56 |
+
'graph2': analysis['graph2'],
|
57 |
+
'key_concepts1': analysis['key_concepts1'],
|
58 |
+
'key_concepts2': analysis['key_concepts2'],
|
59 |
+
'table1': analysis['table1'],
|
60 |
+
'table2': analysis['table2']
|
61 |
+
}
|
62 |
+
|
63 |
+
except Exception as e:
|
64 |
+
logger.error(f"Error en format_discourse_results: {str(e)}")
|
65 |
+
return {
|
66 |
+
'success': False,
|
67 |
+
'error': str(e)
|
68 |
}
|
modules/morphosyntax/__init__.py
CHANGED
@@ -1,29 +1,29 @@
|
|
1 |
-
from .morphosyntax_interface import (
|
2 |
-
display_morphosyntax_interface,
|
3 |
-
display_arc_diagram
|
4 |
-
# display_morphosyntax_results
|
5 |
-
)
|
6 |
-
|
7 |
-
from .morphosyntax_process import (
|
8 |
-
process_morphosyntactic_input,
|
9 |
-
format_analysis_results,
|
10 |
-
perform_advanced_morphosyntactic_analysis,
|
11 |
-
get_repeated_words_colors,
|
12 |
-
highlight_repeated_words,
|
13 |
-
POS_COLORS,
|
14 |
-
POS_TRANSLATIONS
|
15 |
-
)
|
16 |
-
|
17 |
-
__all__ = [
|
18 |
-
'display_morphosyntax_interface',
|
19 |
-
'display_arc_diagram',
|
20 |
-
#'display_morphosyntax_results',
|
21 |
-
'process_morphosyntactic_input',
|
22 |
-
'format_analysis_results',
|
23 |
-
'perform_advanced_morphosyntactic_analysis',
|
24 |
-
'get_repeated_words_colors',
|
25 |
-
'highlight_repeated_words',
|
26 |
-
'POS_COLORS',
|
27 |
-
'POS_TRANSLATIONS'
|
28 |
-
]
|
29 |
-
|
|
|
1 |
+
from .morphosyntax_interface import (
|
2 |
+
display_morphosyntax_interface,
|
3 |
+
display_arc_diagram
|
4 |
+
# display_morphosyntax_results
|
5 |
+
)
|
6 |
+
|
7 |
+
from .morphosyntax_process import (
|
8 |
+
process_morphosyntactic_input,
|
9 |
+
format_analysis_results,
|
10 |
+
perform_advanced_morphosyntactic_analysis,
|
11 |
+
get_repeated_words_colors,
|
12 |
+
highlight_repeated_words,
|
13 |
+
POS_COLORS,
|
14 |
+
POS_TRANSLATIONS
|
15 |
+
)
|
16 |
+
|
17 |
+
__all__ = [
|
18 |
+
'display_morphosyntax_interface',
|
19 |
+
'display_arc_diagram',
|
20 |
+
#'display_morphosyntax_results',
|
21 |
+
'process_morphosyntactic_input',
|
22 |
+
'format_analysis_results',
|
23 |
+
'perform_advanced_morphosyntactic_analysis',
|
24 |
+
'get_repeated_words_colors',
|
25 |
+
'highlight_repeated_words',
|
26 |
+
'POS_COLORS',
|
27 |
+
'POS_TRANSLATIONS'
|
28 |
+
]
|
29 |
+
|
modules/morphosyntax/morphosyntax_interface-BackUp_Dec24_OK.py
CHANGED
@@ -1,322 +1,322 @@
|
|
1 |
-
#modules/morphosyntax/morphosyntax_interface.py
|
2 |
-
import streamlit as st
|
3 |
-
from streamlit_float import *
|
4 |
-
from streamlit_antd_components import *
|
5 |
-
from streamlit.components.v1 import html
|
6 |
-
import spacy
|
7 |
-
from spacy import displacy
|
8 |
-
import spacy_streamlit
|
9 |
-
import pandas as pd
|
10 |
-
import base64
|
11 |
-
import re
|
12 |
-
|
13 |
-
# Importar desde morphosyntax_process.py
|
14 |
-
from .morphosyntax_process import (
|
15 |
-
process_morphosyntactic_input,
|
16 |
-
format_analysis_results,
|
17 |
-
perform_advanced_morphosyntactic_analysis, # Añadir esta importación
|
18 |
-
get_repeated_words_colors, # Y estas también
|
19 |
-
highlight_repeated_words,
|
20 |
-
POS_COLORS,
|
21 |
-
POS_TRANSLATIONS
|
22 |
-
)
|
23 |
-
|
24 |
-
from ..utils.widget_utils import generate_unique_key
|
25 |
-
|
26 |
-
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
27 |
-
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
28 |
-
|
29 |
-
# from ..database.morphosintaxis_export import export_user_interactions
|
30 |
-
|
31 |
-
import logging
|
32 |
-
logger = logging.getLogger(__name__)
|
33 |
-
|
34 |
-
############################################################################################################
|
35 |
-
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
36 |
-
try:
|
37 |
-
# 1. Inicializar el estado morfosintáctico si no existe
|
38 |
-
if 'morphosyntax_state' not in st.session_state:
|
39 |
-
st.session_state.morphosyntax_state = {
|
40 |
-
'input_text': "",
|
41 |
-
'analysis_count': 0,
|
42 |
-
'last_analysis': None
|
43 |
-
}
|
44 |
-
|
45 |
-
# 2. Campo de entrada de texto con key única basada en el contador
|
46 |
-
input_key = f"morpho_input_{st.session_state.morphosyntax_state['analysis_count']}"
|
47 |
-
|
48 |
-
sentence_input = st.text_area(
|
49 |
-
morpho_t.get('morpho_input_label', 'Enter text to analyze'),
|
50 |
-
height=150,
|
51 |
-
placeholder=morpho_t.get('morpho_input_placeholder', 'Enter your text here...'),
|
52 |
-
key=input_key
|
53 |
-
)
|
54 |
-
|
55 |
-
# 3. Actualizar el estado con el texto actual
|
56 |
-
st.session_state.morphosyntax_state['input_text'] = sentence_input
|
57 |
-
|
58 |
-
# 4. Crear columnas para el botón
|
59 |
-
col1, col2, col3 = st.columns([2,1,2])
|
60 |
-
|
61 |
-
# 5. Botón de análisis en la columna central
|
62 |
-
with col1:
|
63 |
-
analyze_button = st.button(
|
64 |
-
morpho_t.get('morpho_analyze_button', 'Analyze Morphosyntax'),
|
65 |
-
key=f"morpho_button_{st.session_state.morphosyntax_state['analysis_count']}",
|
66 |
-
type="primary", # Nuevo en Streamlit 1.39.0
|
67 |
-
icon="🔍", # Nuevo en Streamlit 1.39.0
|
68 |
-
disabled=not bool(sentence_input.strip()), # Se activa solo cuando hay texto
|
69 |
-
use_container_width=True
|
70 |
-
)
|
71 |
-
|
72 |
-
# 6. Lógica de análisis
|
73 |
-
if analyze_button and sentence_input.strip(): # Verificar que haya texto y no solo espacios
|
74 |
-
try:
|
75 |
-
with st.spinner(morpho_t.get('processing', 'Processing...')):
|
76 |
-
# Obtener el modelo específico del idioma y procesar el texto
|
77 |
-
doc = nlp_models[lang_code](sentence_input)
|
78 |
-
|
79 |
-
# Realizar análisis morfosintáctico con el mismo modelo
|
80 |
-
advanced_analysis = perform_advanced_morphosyntactic_analysis(
|
81 |
-
sentence_input,
|
82 |
-
nlp_models[lang_code]
|
83 |
-
)
|
84 |
-
|
85 |
-
# Guardar resultado en el estado de la sesión
|
86 |
-
st.session_state.morphosyntax_result = {
|
87 |
-
'doc': doc,
|
88 |
-
'advanced_analysis': advanced_analysis
|
89 |
-
}
|
90 |
-
|
91 |
-
# Incrementar el contador de análisis
|
92 |
-
st.session_state.morphosyntax_state['analysis_count'] += 1
|
93 |
-
|
94 |
-
# Guardar el análisis en la base de datos
|
95 |
-
if store_student_morphosyntax_result(
|
96 |
-
username=st.session_state.username,
|
97 |
-
text=sentence_input,
|
98 |
-
arc_diagrams=advanced_analysis['arc_diagrams']
|
99 |
-
):
|
100 |
-
st.success(morpho_t.get('success_message', 'Analysis saved successfully'))
|
101 |
-
|
102 |
-
# Mostrar resultados
|
103 |
-
display_morphosyntax_results(
|
104 |
-
st.session_state.morphosyntax_result,
|
105 |
-
lang_code,
|
106 |
-
morpho_t
|
107 |
-
)
|
108 |
-
else:
|
109 |
-
st.error(morpho_t.get('error_message', 'Error saving analysis'))
|
110 |
-
|
111 |
-
except Exception as e:
|
112 |
-
logger.error(f"Error en análisis morfosintáctico: {str(e)}")
|
113 |
-
st.error(morpho_t.get('error_processing', f'Error processing text: {str(e)}'))
|
114 |
-
|
115 |
-
# 7. Mostrar resultados previos si existen
|
116 |
-
elif 'morphosyntax_result' in st.session_state and st.session_state.morphosyntax_result is not None:
|
117 |
-
display_morphosyntax_results(
|
118 |
-
st.session_state.morphosyntax_result,
|
119 |
-
lang_code,
|
120 |
-
morpho_t
|
121 |
-
)
|
122 |
-
elif not sentence_input.strip():
|
123 |
-
st.info(morpho_t.get('morpho_initial_message', 'Enter text to begin analysis'))
|
124 |
-
|
125 |
-
except Exception as e:
|
126 |
-
logger.error(f"Error general en display_morphosyntax_interface: {str(e)}")
|
127 |
-
st.error("Se produjo un error. Por favor, intente de nuevo.")
|
128 |
-
st.error(f"Detalles del error: {str(e)}") # Añadido para mejor debugging
|
129 |
-
|
130 |
-
############################################################################################################
|
131 |
-
def display_morphosyntax_results(result, lang_code, morpho_t):
|
132 |
-
"""
|
133 |
-
Muestra los resultados del análisis morfosintáctico.
|
134 |
-
Args:
|
135 |
-
result: Resultado del análisis
|
136 |
-
lang_code: Código del idioma
|
137 |
-
t: Diccionario de traducciones
|
138 |
-
"""
|
139 |
-
# Obtener el diccionario de traducciones morfosintácticas
|
140 |
-
# morpho_t = t.get('MORPHOSYNTACTIC', {})
|
141 |
-
|
142 |
-
if result is None:
|
143 |
-
st.warning(morpho_t.get('no_results', 'No results available'))
|
144 |
-
return
|
145 |
-
|
146 |
-
doc = result['doc']
|
147 |
-
advanced_analysis = result['advanced_analysis']
|
148 |
-
|
149 |
-
# Mostrar leyenda
|
150 |
-
st.markdown(f"##### {morpho_t.get('legend', 'Legend: Grammatical categories')}")
|
151 |
-
legend_html = "<div style='display: flex; flex-wrap: wrap;'>"
|
152 |
-
for pos, color in POS_COLORS.items():
|
153 |
-
if pos in POS_TRANSLATIONS[lang_code]:
|
154 |
-
legend_html += f"<div style='margin-right: 10px;'><span style='background-color: {color}; padding: 2px 5px;'>{POS_TRANSLATIONS[lang_code][pos]}</span></div>"
|
155 |
-
legend_html += "</div>"
|
156 |
-
st.markdown(legend_html, unsafe_allow_html=True)
|
157 |
-
|
158 |
-
# Mostrar análisis de palabras repetidas
|
159 |
-
word_colors = get_repeated_words_colors(doc)
|
160 |
-
with st.expander(morpho_t.get('repeated_words', 'Repeated words'), expanded=True):
|
161 |
-
highlighted_text = highlight_repeated_words(doc, word_colors)
|
162 |
-
st.markdown(highlighted_text, unsafe_allow_html=True)
|
163 |
-
|
164 |
-
# Mostrar estructura de oraciones
|
165 |
-
with st.expander(morpho_t.get('sentence_structure', 'Sentence structure'), expanded=True):
|
166 |
-
for i, sent_analysis in enumerate(advanced_analysis['sentence_structure']):
|
167 |
-
sentence_str = (
|
168 |
-
f"**{morpho_t.get('sentence', 'Sentence')} {i+1}** " # Aquí está el cambio
|
169 |
-
f"{morpho_t.get('root', 'Root')}: {sent_analysis['root']} ({sent_analysis['root_pos']}) -- " # Y aquí
|
170 |
-
f"{morpho_t.get('subjects', 'Subjects')}: {', '.join(sent_analysis['subjects'])} -- " # Y aquí
|
171 |
-
f"{morpho_t.get('objects', 'Objects')}: {', '.join(sent_analysis['objects'])} -- " # Y aquí
|
172 |
-
f"{morpho_t.get('verbs', 'Verbs')}: {', '.join(sent_analysis['verbs'])}" # Y aquí
|
173 |
-
)
|
174 |
-
st.markdown(sentence_str)
|
175 |
-
|
176 |
-
# Mostrar análisis de categorías gramaticales # Mostrar análisis morfológico
|
177 |
-
col1, col2 = st.columns(2)
|
178 |
-
|
179 |
-
with col1:
|
180 |
-
with st.expander(morpho_t.get('pos_analysis', 'Part of speech'), expanded=True):
|
181 |
-
pos_df = pd.DataFrame(advanced_analysis['pos_analysis'])
|
182 |
-
|
183 |
-
# Traducir las etiquetas POS a sus nombres en el idioma seleccionado
|
184 |
-
pos_df['pos'] = pos_df['pos'].map(lambda x: POS_TRANSLATIONS[lang_code].get(x, x))
|
185 |
-
|
186 |
-
# Renombrar las columnas para mayor claridad
|
187 |
-
pos_df = pos_df.rename(columns={
|
188 |
-
'pos': morpho_t.get('grammatical_category', 'Grammatical category'),
|
189 |
-
'count': morpho_t.get('count', 'Count'),
|
190 |
-
'percentage': morpho_t.get('percentage', 'Percentage'),
|
191 |
-
'examples': morpho_t.get('examples', 'Examples')
|
192 |
-
})
|
193 |
-
|
194 |
-
# Mostrar el dataframe
|
195 |
-
st.dataframe(pos_df)
|
196 |
-
|
197 |
-
with col2:
|
198 |
-
with st.expander(morpho_t.get('morphological_analysis', 'Morphological Analysis'), expanded=True):
|
199 |
-
# 1. Crear el DataFrame inicial
|
200 |
-
morph_df = pd.DataFrame(advanced_analysis['morphological_analysis'])
|
201 |
-
|
202 |
-
# 2. Primero renombrar las columnas usando las traducciones de la interfaz
|
203 |
-
column_mapping = {
|
204 |
-
'text': morpho_t.get('word', 'Word'),
|
205 |
-
'lemma': morpho_t.get('lemma', 'Lemma'),
|
206 |
-
'pos': morpho_t.get('grammatical_category', 'Grammatical category'),
|
207 |
-
'dep': morpho_t.get('dependency', 'Dependency'),
|
208 |
-
'morph': morpho_t.get('morphology', 'Morphology')
|
209 |
-
}
|
210 |
-
|
211 |
-
# 3. Aplicar el renombrado
|
212 |
-
morph_df = morph_df.rename(columns=column_mapping)
|
213 |
-
|
214 |
-
# 4. Traducir las categorías gramaticales usando POS_TRANSLATIONS global
|
215 |
-
grammatical_category = morpho_t.get('grammatical_category', 'Grammatical category')
|
216 |
-
morph_df[grammatical_category] = morph_df[grammatical_category].map(lambda x: POS_TRANSLATIONS[lang_code].get(x, x))
|
217 |
-
|
218 |
-
# 2.2 Traducir dependencias usando traducciones específicas
|
219 |
-
dep_translations = {
|
220 |
-
|
221 |
-
'es': {
|
222 |
-
'ROOT': 'RAÍZ', 'nsubj': 'sujeto nominal', 'obj': 'objeto', 'iobj': 'objeto indirecto',
|
223 |
-
'csubj': 'sujeto clausal', 'ccomp': 'complemento clausal', 'xcomp': 'complemento clausal abierto',
|
224 |
-
'obl': 'oblicuo', 'vocative': 'vocativo', 'expl': 'expletivo', 'dislocated': 'dislocado',
|
225 |
-
'advcl': 'cláusula adverbial', 'advmod': 'modificador adverbial', 'discourse': 'discurso',
|
226 |
-
'aux': 'auxiliar', 'cop': 'cópula', 'mark': 'marcador', 'nmod': 'modificador nominal',
|
227 |
-
'appos': 'aposición', 'nummod': 'modificador numeral', 'acl': 'cláusula adjetiva',
|
228 |
-
'amod': 'modificador adjetival', 'det': 'determinante', 'clf': 'clasificador',
|
229 |
-
'case': 'caso', 'conj': 'conjunción', 'cc': 'coordinante', 'fixed': 'fijo',
|
230 |
-
'flat': 'plano', 'compound': 'compuesto', 'list': 'lista', 'parataxis': 'parataxis',
|
231 |
-
'orphan': 'huérfano', 'goeswith': 'va con', 'reparandum': 'reparación', 'punct': 'puntuación'
|
232 |
-
},
|
233 |
-
|
234 |
-
'en': {
|
235 |
-
'ROOT': 'ROOT', 'nsubj': 'nominal subject', 'obj': 'object',
|
236 |
-
'iobj': 'indirect object', 'csubj': 'clausal subject', 'ccomp': 'clausal complement', 'xcomp': 'open clausal complement',
|
237 |
-
'obl': 'oblique', 'vocative': 'vocative', 'expl': 'expletive', 'dislocated': 'dislocated', 'advcl': 'adverbial clause modifier',
|
238 |
-
'advmod': 'adverbial modifier', 'discourse': 'discourse element', 'aux': 'auxiliary', 'cop': 'copula', 'mark': 'marker',
|
239 |
-
'nmod': 'nominal modifier', 'appos': 'appositional modifier', 'nummod': 'numeric modifier', 'acl': 'clausal modifier of noun',
|
240 |
-
'amod': 'adjectival modifier', 'det': 'determiner', 'clf': 'classifier', 'case': 'case marking',
|
241 |
-
'conj': 'conjunct', 'cc': 'coordinating conjunction', 'fixed': 'fixed multiword expression',
|
242 |
-
'flat': 'flat multiword expression', 'compound': 'compound', 'list': 'list', 'parataxis': 'parataxis', 'orphan': 'orphan',
|
243 |
-
'goeswith': 'goes with', 'reparandum': 'reparandum', 'punct': 'punctuation'
|
244 |
-
},
|
245 |
-
|
246 |
-
'fr': {
|
247 |
-
'ROOT': 'RACINE', 'nsubj': 'sujet nominal', 'obj': 'objet', 'iobj': 'objet indirect',
|
248 |
-
'csubj': 'sujet phrastique', 'ccomp': 'complément phrastique', 'xcomp': 'complément phrastique ouvert', 'obl': 'oblique',
|
249 |
-
'vocative': 'vocatif', 'expl': 'explétif', 'dislocated': 'disloqué', 'advcl': 'clause adverbiale', 'advmod': 'modifieur adverbial',
|
250 |
-
'discourse': 'élément de discours', 'aux': 'auxiliaire', 'cop': 'copule', 'mark': 'marqueur', 'nmod': 'modifieur nominal',
|
251 |
-
'appos': 'apposition', 'nummod': 'modifieur numéral', 'acl': 'clause relative', 'amod': 'modifieur adjectival', 'det': 'déterminant',
|
252 |
-
'clf': 'classificateur', 'case': 'marqueur de cas', 'conj': 'conjonction', 'cc': 'coordination', 'fixed': 'expression figée',
|
253 |
-
'flat': 'construction plate', 'compound': 'composé', 'list': 'liste', 'parataxis': 'parataxe', 'orphan': 'orphelin',
|
254 |
-
'goeswith': 'va avec', 'reparandum': 'réparation', 'punct': 'ponctuation'
|
255 |
-
}
|
256 |
-
}
|
257 |
-
|
258 |
-
dependency = morpho_t.get('dependency', 'Dependency')
|
259 |
-
morph_df[dependency] = morph_df[dependency].map(lambda x: dep_translations[lang_code].get(x, x))
|
260 |
-
|
261 |
-
morph_translations = {
|
262 |
-
'es': {
|
263 |
-
'Gender': 'Género', 'Number': 'Número', 'Case': 'Caso', 'Definite': 'Definido',
|
264 |
-
'PronType': 'Tipo de Pronombre', 'Person': 'Persona', 'Mood': 'Modo',
|
265 |
-
'Tense': 'Tiempo', 'VerbForm': 'Forma Verbal', 'Voice': 'Voz',
|
266 |
-
'Fem': 'Femenino', 'Masc': 'Masculino', 'Sing': 'Singular', 'Plur': 'Plural',
|
267 |
-
'Ind': 'Indicativo', 'Sub': 'Subjuntivo', 'Imp': 'Imperativo', 'Inf': 'Infinitivo',
|
268 |
-
'Part': 'Participio', 'Ger': 'Gerundio', 'Pres': 'Presente', 'Past': 'Pasado',
|
269 |
-
'Fut': 'Futuro', 'Perf': 'Perfecto', 'Imp': 'Imperfecto'
|
270 |
-
},
|
271 |
-
|
272 |
-
'en': {
|
273 |
-
'Gender': 'Gender', 'Number': 'Number', 'Case': 'Case', 'Definite': 'Definite', 'PronType': 'Pronoun Type', 'Person': 'Person',
|
274 |
-
'Mood': 'Mood', 'Tense': 'Tense', 'VerbForm': 'Verb Form', 'Voice': 'Voice',
|
275 |
-
'Fem': 'Feminine', 'Masc': 'Masculine', 'Sing': 'Singular', 'Plur': 'Plural', 'Ind': 'Indicative',
|
276 |
-
'Sub': 'Subjunctive', 'Imp': 'Imperative', 'Inf': 'Infinitive', 'Part': 'Participle',
|
277 |
-
'Ger': 'Gerund', 'Pres': 'Present', 'Past': 'Past', 'Fut': 'Future', 'Perf': 'Perfect', 'Imp': 'Imperfect'
|
278 |
-
},
|
279 |
-
|
280 |
-
'fr': {
|
281 |
-
'Gender': 'Genre', 'Number': 'Nombre', 'Case': 'Cas', 'Definite': 'Défini', 'PronType': 'Type de Pronom',
|
282 |
-
'Person': 'Personne', 'Mood': 'Mode', 'Tense': 'Temps', 'VerbForm': 'Forme Verbale', 'Voice': 'Voix',
|
283 |
-
'Fem': 'Féminin', 'Masc': 'Masculin', 'Sing': 'Singulier', 'Plur': 'Pluriel', 'Ind': 'Indicatif',
|
284 |
-
'Sub': 'Subjonctif', 'Imp': 'Impératif', 'Inf': 'Infinitif', 'Part': 'Participe',
|
285 |
-
'Ger': 'Gérondif', 'Pres': 'Présent', 'Past': 'Passé', 'Fut': 'Futur', 'Perf': 'Parfait', 'Imp': 'Imparfait'
|
286 |
-
}
|
287 |
-
}
|
288 |
-
|
289 |
-
def translate_morph(morph_string, lang_code):
|
290 |
-
for key, value in morph_translations[lang_code].items():
|
291 |
-
morph_string = morph_string.replace(key, value)
|
292 |
-
return morph_string
|
293 |
-
|
294 |
-
morphology = morpho_t.get('morphology', 'Morphology')
|
295 |
-
morph_df[morphology] = morph_df[morphology].apply(lambda x: translate_morph(x, lang_code))
|
296 |
-
|
297 |
-
st.dataframe(morph_df)
|
298 |
-
|
299 |
-
# Mostrar diagramas de arco
|
300 |
-
with st.expander(morpho_t.get('arc_diagram', 'Syntactic analysis: Arc diagram'), expanded=True):
|
301 |
-
sentences = list(doc.sents)
|
302 |
-
arc_diagrams = []
|
303 |
-
|
304 |
-
for i, sent in enumerate(sentences):
|
305 |
-
st.subheader(f"{morpho_t.get('sentence', 'Sentence')} {i+1}")
|
306 |
-
html = displacy.render(sent, style="dep", options={"distance": 100})
|
307 |
-
html = html.replace('height="375"', 'height="200"')
|
308 |
-
html = re.sub(r'<svg[^>]*>', lambda m: m.group(0).replace('height="450"', 'height="300"'), html)
|
309 |
-
html = re.sub(r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
310 |
-
lambda m: f'<g transform="translate({m.group(1)},50)"', html)
|
311 |
-
st.write(html, unsafe_allow_html=True)
|
312 |
-
arc_diagrams.append(html)
|
313 |
-
|
314 |
-
# Botón de exportación
|
315 |
-
# if st.button(morpho_t.get('export_button', 'Export Analysis')):
|
316 |
-
# pdf_buffer = export_user_interactions(st.session_state.username, 'morphosyntax')
|
317 |
-
# st.download_button(
|
318 |
-
# label=morpho_t.get('download_pdf', 'Download PDF'),
|
319 |
-
# data=pdf_buffer,
|
320 |
-
# file_name="morphosyntax_analysis.pdf",
|
321 |
-
# mime="application/pdf"
|
322 |
# )
|
|
|
1 |
+
#modules/morphosyntax/morphosyntax_interface.py
|
2 |
+
import streamlit as st
|
3 |
+
from streamlit_float import *
|
4 |
+
from streamlit_antd_components import *
|
5 |
+
from streamlit.components.v1 import html
|
6 |
+
import spacy
|
7 |
+
from spacy import displacy
|
8 |
+
import spacy_streamlit
|
9 |
+
import pandas as pd
|
10 |
+
import base64
|
11 |
+
import re
|
12 |
+
|
13 |
+
# Importar desde morphosyntax_process.py
|
14 |
+
from .morphosyntax_process import (
|
15 |
+
process_morphosyntactic_input,
|
16 |
+
format_analysis_results,
|
17 |
+
perform_advanced_morphosyntactic_analysis, # Añadir esta importación
|
18 |
+
get_repeated_words_colors, # Y estas también
|
19 |
+
highlight_repeated_words,
|
20 |
+
POS_COLORS,
|
21 |
+
POS_TRANSLATIONS
|
22 |
+
)
|
23 |
+
|
24 |
+
from ..utils.widget_utils import generate_unique_key
|
25 |
+
|
26 |
+
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
27 |
+
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
28 |
+
|
29 |
+
# from ..database.morphosintaxis_export import export_user_interactions
|
30 |
+
|
31 |
+
import logging
|
32 |
+
logger = logging.getLogger(__name__)
|
33 |
+
|
34 |
+
############################################################################################################
|
35 |
+
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
36 |
+
try:
|
37 |
+
# 1. Inicializar el estado morfosintáctico si no existe
|
38 |
+
if 'morphosyntax_state' not in st.session_state:
|
39 |
+
st.session_state.morphosyntax_state = {
|
40 |
+
'input_text': "",
|
41 |
+
'analysis_count': 0,
|
42 |
+
'last_analysis': None
|
43 |
+
}
|
44 |
+
|
45 |
+
# 2. Campo de entrada de texto con key única basada en el contador
|
46 |
+
input_key = f"morpho_input_{st.session_state.morphosyntax_state['analysis_count']}"
|
47 |
+
|
48 |
+
sentence_input = st.text_area(
|
49 |
+
morpho_t.get('morpho_input_label', 'Enter text to analyze'),
|
50 |
+
height=150,
|
51 |
+
placeholder=morpho_t.get('morpho_input_placeholder', 'Enter your text here...'),
|
52 |
+
key=input_key
|
53 |
+
)
|
54 |
+
|
55 |
+
# 3. Actualizar el estado con el texto actual
|
56 |
+
st.session_state.morphosyntax_state['input_text'] = sentence_input
|
57 |
+
|
58 |
+
# 4. Crear columnas para el botón
|
59 |
+
col1, col2, col3 = st.columns([2,1,2])
|
60 |
+
|
61 |
+
# 5. Botón de análisis en la columna central
|
62 |
+
with col1:
|
63 |
+
analyze_button = st.button(
|
64 |
+
morpho_t.get('morpho_analyze_button', 'Analyze Morphosyntax'),
|
65 |
+
key=f"morpho_button_{st.session_state.morphosyntax_state['analysis_count']}",
|
66 |
+
type="primary", # Nuevo en Streamlit 1.39.0
|
67 |
+
icon="🔍", # Nuevo en Streamlit 1.39.0
|
68 |
+
disabled=not bool(sentence_input.strip()), # Se activa solo cuando hay texto
|
69 |
+
use_container_width=True
|
70 |
+
)
|
71 |
+
|
72 |
+
# 6. Lógica de análisis
|
73 |
+
if analyze_button and sentence_input.strip(): # Verificar que haya texto y no solo espacios
|
74 |
+
try:
|
75 |
+
with st.spinner(morpho_t.get('processing', 'Processing...')):
|
76 |
+
# Obtener el modelo específico del idioma y procesar el texto
|
77 |
+
doc = nlp_models[lang_code](sentence_input)
|
78 |
+
|
79 |
+
# Realizar análisis morfosintáctico con el mismo modelo
|
80 |
+
advanced_analysis = perform_advanced_morphosyntactic_analysis(
|
81 |
+
sentence_input,
|
82 |
+
nlp_models[lang_code]
|
83 |
+
)
|
84 |
+
|
85 |
+
# Guardar resultado en el estado de la sesión
|
86 |
+
st.session_state.morphosyntax_result = {
|
87 |
+
'doc': doc,
|
88 |
+
'advanced_analysis': advanced_analysis
|
89 |
+
}
|
90 |
+
|
91 |
+
# Incrementar el contador de análisis
|
92 |
+
st.session_state.morphosyntax_state['analysis_count'] += 1
|
93 |
+
|
94 |
+
# Guardar el análisis en la base de datos
|
95 |
+
if store_student_morphosyntax_result(
|
96 |
+
username=st.session_state.username,
|
97 |
+
text=sentence_input,
|
98 |
+
arc_diagrams=advanced_analysis['arc_diagrams']
|
99 |
+
):
|
100 |
+
st.success(morpho_t.get('success_message', 'Analysis saved successfully'))
|
101 |
+
|
102 |
+
# Mostrar resultados
|
103 |
+
display_morphosyntax_results(
|
104 |
+
st.session_state.morphosyntax_result,
|
105 |
+
lang_code,
|
106 |
+
morpho_t
|
107 |
+
)
|
108 |
+
else:
|
109 |
+
st.error(morpho_t.get('error_message', 'Error saving analysis'))
|
110 |
+
|
111 |
+
except Exception as e:
|
112 |
+
logger.error(f"Error en análisis morfosintáctico: {str(e)}")
|
113 |
+
st.error(morpho_t.get('error_processing', f'Error processing text: {str(e)}'))
|
114 |
+
|
115 |
+
# 7. Mostrar resultados previos si existen
|
116 |
+
elif 'morphosyntax_result' in st.session_state and st.session_state.morphosyntax_result is not None:
|
117 |
+
display_morphosyntax_results(
|
118 |
+
st.session_state.morphosyntax_result,
|
119 |
+
lang_code,
|
120 |
+
morpho_t
|
121 |
+
)
|
122 |
+
elif not sentence_input.strip():
|
123 |
+
st.info(morpho_t.get('morpho_initial_message', 'Enter text to begin analysis'))
|
124 |
+
|
125 |
+
except Exception as e:
|
126 |
+
logger.error(f"Error general en display_morphosyntax_interface: {str(e)}")
|
127 |
+
st.error("Se produjo un error. Por favor, intente de nuevo.")
|
128 |
+
st.error(f"Detalles del error: {str(e)}") # Añadido para mejor debugging
|
129 |
+
|
130 |
+
############################################################################################################
|
131 |
+
def display_morphosyntax_results(result, lang_code, morpho_t):
|
132 |
+
"""
|
133 |
+
Muestra los resultados del análisis morfosintáctico.
|
134 |
+
Args:
|
135 |
+
result: Resultado del análisis
|
136 |
+
lang_code: Código del idioma
|
137 |
+
t: Diccionario de traducciones
|
138 |
+
"""
|
139 |
+
# Obtener el diccionario de traducciones morfosintácticas
|
140 |
+
# morpho_t = t.get('MORPHOSYNTACTIC', {})
|
141 |
+
|
142 |
+
if result is None:
|
143 |
+
st.warning(morpho_t.get('no_results', 'No results available'))
|
144 |
+
return
|
145 |
+
|
146 |
+
doc = result['doc']
|
147 |
+
advanced_analysis = result['advanced_analysis']
|
148 |
+
|
149 |
+
# Mostrar leyenda
|
150 |
+
st.markdown(f"##### {morpho_t.get('legend', 'Legend: Grammatical categories')}")
|
151 |
+
legend_html = "<div style='display: flex; flex-wrap: wrap;'>"
|
152 |
+
for pos, color in POS_COLORS.items():
|
153 |
+
if pos in POS_TRANSLATIONS[lang_code]:
|
154 |
+
legend_html += f"<div style='margin-right: 10px;'><span style='background-color: {color}; padding: 2px 5px;'>{POS_TRANSLATIONS[lang_code][pos]}</span></div>"
|
155 |
+
legend_html += "</div>"
|
156 |
+
st.markdown(legend_html, unsafe_allow_html=True)
|
157 |
+
|
158 |
+
# Mostrar análisis de palabras repetidas
|
159 |
+
word_colors = get_repeated_words_colors(doc)
|
160 |
+
with st.expander(morpho_t.get('repeated_words', 'Repeated words'), expanded=True):
|
161 |
+
highlighted_text = highlight_repeated_words(doc, word_colors)
|
162 |
+
st.markdown(highlighted_text, unsafe_allow_html=True)
|
163 |
+
|
164 |
+
# Mostrar estructura de oraciones
|
165 |
+
with st.expander(morpho_t.get('sentence_structure', 'Sentence structure'), expanded=True):
|
166 |
+
for i, sent_analysis in enumerate(advanced_analysis['sentence_structure']):
|
167 |
+
sentence_str = (
|
168 |
+
f"**{morpho_t.get('sentence', 'Sentence')} {i+1}** " # Aquí está el cambio
|
169 |
+
f"{morpho_t.get('root', 'Root')}: {sent_analysis['root']} ({sent_analysis['root_pos']}) -- " # Y aquí
|
170 |
+
f"{morpho_t.get('subjects', 'Subjects')}: {', '.join(sent_analysis['subjects'])} -- " # Y aquí
|
171 |
+
f"{morpho_t.get('objects', 'Objects')}: {', '.join(sent_analysis['objects'])} -- " # Y aquí
|
172 |
+
f"{morpho_t.get('verbs', 'Verbs')}: {', '.join(sent_analysis['verbs'])}" # Y aquí
|
173 |
+
)
|
174 |
+
st.markdown(sentence_str)
|
175 |
+
|
176 |
+
# Mostrar análisis de categorías gramaticales # Mostrar análisis morfológico
|
177 |
+
col1, col2 = st.columns(2)
|
178 |
+
|
179 |
+
with col1:
|
180 |
+
with st.expander(morpho_t.get('pos_analysis', 'Part of speech'), expanded=True):
|
181 |
+
pos_df = pd.DataFrame(advanced_analysis['pos_analysis'])
|
182 |
+
|
183 |
+
# Traducir las etiquetas POS a sus nombres en el idioma seleccionado
|
184 |
+
pos_df['pos'] = pos_df['pos'].map(lambda x: POS_TRANSLATIONS[lang_code].get(x, x))
|
185 |
+
|
186 |
+
# Renombrar las columnas para mayor claridad
|
187 |
+
pos_df = pos_df.rename(columns={
|
188 |
+
'pos': morpho_t.get('grammatical_category', 'Grammatical category'),
|
189 |
+
'count': morpho_t.get('count', 'Count'),
|
190 |
+
'percentage': morpho_t.get('percentage', 'Percentage'),
|
191 |
+
'examples': morpho_t.get('examples', 'Examples')
|
192 |
+
})
|
193 |
+
|
194 |
+
# Mostrar el dataframe
|
195 |
+
st.dataframe(pos_df)
|
196 |
+
|
197 |
+
with col2:
|
198 |
+
with st.expander(morpho_t.get('morphological_analysis', 'Morphological Analysis'), expanded=True):
|
199 |
+
# 1. Crear el DataFrame inicial
|
200 |
+
morph_df = pd.DataFrame(advanced_analysis['morphological_analysis'])
|
201 |
+
|
202 |
+
# 2. Primero renombrar las columnas usando las traducciones de la interfaz
|
203 |
+
column_mapping = {
|
204 |
+
'text': morpho_t.get('word', 'Word'),
|
205 |
+
'lemma': morpho_t.get('lemma', 'Lemma'),
|
206 |
+
'pos': morpho_t.get('grammatical_category', 'Grammatical category'),
|
207 |
+
'dep': morpho_t.get('dependency', 'Dependency'),
|
208 |
+
'morph': morpho_t.get('morphology', 'Morphology')
|
209 |
+
}
|
210 |
+
|
211 |
+
# 3. Aplicar el renombrado
|
212 |
+
morph_df = morph_df.rename(columns=column_mapping)
|
213 |
+
|
214 |
+
# 4. Traducir las categorías gramaticales usando POS_TRANSLATIONS global
|
215 |
+
grammatical_category = morpho_t.get('grammatical_category', 'Grammatical category')
|
216 |
+
morph_df[grammatical_category] = morph_df[grammatical_category].map(lambda x: POS_TRANSLATIONS[lang_code].get(x, x))
|
217 |
+
|
218 |
+
# 2.2 Traducir dependencias usando traducciones específicas
|
219 |
+
dep_translations = {
|
220 |
+
|
221 |
+
'es': {
|
222 |
+
'ROOT': 'RAÍZ', 'nsubj': 'sujeto nominal', 'obj': 'objeto', 'iobj': 'objeto indirecto',
|
223 |
+
'csubj': 'sujeto clausal', 'ccomp': 'complemento clausal', 'xcomp': 'complemento clausal abierto',
|
224 |
+
'obl': 'oblicuo', 'vocative': 'vocativo', 'expl': 'expletivo', 'dislocated': 'dislocado',
|
225 |
+
'advcl': 'cláusula adverbial', 'advmod': 'modificador adverbial', 'discourse': 'discurso',
|
226 |
+
'aux': 'auxiliar', 'cop': 'cópula', 'mark': 'marcador', 'nmod': 'modificador nominal',
|
227 |
+
'appos': 'aposición', 'nummod': 'modificador numeral', 'acl': 'cláusula adjetiva',
|
228 |
+
'amod': 'modificador adjetival', 'det': 'determinante', 'clf': 'clasificador',
|
229 |
+
'case': 'caso', 'conj': 'conjunción', 'cc': 'coordinante', 'fixed': 'fijo',
|
230 |
+
'flat': 'plano', 'compound': 'compuesto', 'list': 'lista', 'parataxis': 'parataxis',
|
231 |
+
'orphan': 'huérfano', 'goeswith': 'va con', 'reparandum': 'reparación', 'punct': 'puntuación'
|
232 |
+
},
|
233 |
+
|
234 |
+
'en': {
|
235 |
+
'ROOT': 'ROOT', 'nsubj': 'nominal subject', 'obj': 'object',
|
236 |
+
'iobj': 'indirect object', 'csubj': 'clausal subject', 'ccomp': 'clausal complement', 'xcomp': 'open clausal complement',
|
237 |
+
'obl': 'oblique', 'vocative': 'vocative', 'expl': 'expletive', 'dislocated': 'dislocated', 'advcl': 'adverbial clause modifier',
|
238 |
+
'advmod': 'adverbial modifier', 'discourse': 'discourse element', 'aux': 'auxiliary', 'cop': 'copula', 'mark': 'marker',
|
239 |
+
'nmod': 'nominal modifier', 'appos': 'appositional modifier', 'nummod': 'numeric modifier', 'acl': 'clausal modifier of noun',
|
240 |
+
'amod': 'adjectival modifier', 'det': 'determiner', 'clf': 'classifier', 'case': 'case marking',
|
241 |
+
'conj': 'conjunct', 'cc': 'coordinating conjunction', 'fixed': 'fixed multiword expression',
|
242 |
+
'flat': 'flat multiword expression', 'compound': 'compound', 'list': 'list', 'parataxis': 'parataxis', 'orphan': 'orphan',
|
243 |
+
'goeswith': 'goes with', 'reparandum': 'reparandum', 'punct': 'punctuation'
|
244 |
+
},
|
245 |
+
|
246 |
+
'fr': {
|
247 |
+
'ROOT': 'RACINE', 'nsubj': 'sujet nominal', 'obj': 'objet', 'iobj': 'objet indirect',
|
248 |
+
'csubj': 'sujet phrastique', 'ccomp': 'complément phrastique', 'xcomp': 'complément phrastique ouvert', 'obl': 'oblique',
|
249 |
+
'vocative': 'vocatif', 'expl': 'explétif', 'dislocated': 'disloqué', 'advcl': 'clause adverbiale', 'advmod': 'modifieur adverbial',
|
250 |
+
'discourse': 'élément de discours', 'aux': 'auxiliaire', 'cop': 'copule', 'mark': 'marqueur', 'nmod': 'modifieur nominal',
|
251 |
+
'appos': 'apposition', 'nummod': 'modifieur numéral', 'acl': 'clause relative', 'amod': 'modifieur adjectival', 'det': 'déterminant',
|
252 |
+
'clf': 'classificateur', 'case': 'marqueur de cas', 'conj': 'conjonction', 'cc': 'coordination', 'fixed': 'expression figée',
|
253 |
+
'flat': 'construction plate', 'compound': 'composé', 'list': 'liste', 'parataxis': 'parataxe', 'orphan': 'orphelin',
|
254 |
+
'goeswith': 'va avec', 'reparandum': 'réparation', 'punct': 'ponctuation'
|
255 |
+
}
|
256 |
+
}
|
257 |
+
|
258 |
+
dependency = morpho_t.get('dependency', 'Dependency')
|
259 |
+
morph_df[dependency] = morph_df[dependency].map(lambda x: dep_translations[lang_code].get(x, x))
|
260 |
+
|
261 |
+
morph_translations = {
|
262 |
+
'es': {
|
263 |
+
'Gender': 'Género', 'Number': 'Número', 'Case': 'Caso', 'Definite': 'Definido',
|
264 |
+
'PronType': 'Tipo de Pronombre', 'Person': 'Persona', 'Mood': 'Modo',
|
265 |
+
'Tense': 'Tiempo', 'VerbForm': 'Forma Verbal', 'Voice': 'Voz',
|
266 |
+
'Fem': 'Femenino', 'Masc': 'Masculino', 'Sing': 'Singular', 'Plur': 'Plural',
|
267 |
+
'Ind': 'Indicativo', 'Sub': 'Subjuntivo', 'Imp': 'Imperativo', 'Inf': 'Infinitivo',
|
268 |
+
'Part': 'Participio', 'Ger': 'Gerundio', 'Pres': 'Presente', 'Past': 'Pasado',
|
269 |
+
'Fut': 'Futuro', 'Perf': 'Perfecto', 'Imp': 'Imperfecto'
|
270 |
+
},
|
271 |
+
|
272 |
+
'en': {
|
273 |
+
'Gender': 'Gender', 'Number': 'Number', 'Case': 'Case', 'Definite': 'Definite', 'PronType': 'Pronoun Type', 'Person': 'Person',
|
274 |
+
'Mood': 'Mood', 'Tense': 'Tense', 'VerbForm': 'Verb Form', 'Voice': 'Voice',
|
275 |
+
'Fem': 'Feminine', 'Masc': 'Masculine', 'Sing': 'Singular', 'Plur': 'Plural', 'Ind': 'Indicative',
|
276 |
+
'Sub': 'Subjunctive', 'Imp': 'Imperative', 'Inf': 'Infinitive', 'Part': 'Participle',
|
277 |
+
'Ger': 'Gerund', 'Pres': 'Present', 'Past': 'Past', 'Fut': 'Future', 'Perf': 'Perfect', 'Imp': 'Imperfect'
|
278 |
+
},
|
279 |
+
|
280 |
+
'fr': {
|
281 |
+
'Gender': 'Genre', 'Number': 'Nombre', 'Case': 'Cas', 'Definite': 'Défini', 'PronType': 'Type de Pronom',
|
282 |
+
'Person': 'Personne', 'Mood': 'Mode', 'Tense': 'Temps', 'VerbForm': 'Forme Verbale', 'Voice': 'Voix',
|
283 |
+
'Fem': 'Féminin', 'Masc': 'Masculin', 'Sing': 'Singulier', 'Plur': 'Pluriel', 'Ind': 'Indicatif',
|
284 |
+
'Sub': 'Subjonctif', 'Imp': 'Impératif', 'Inf': 'Infinitif', 'Part': 'Participe',
|
285 |
+
'Ger': 'Gérondif', 'Pres': 'Présent', 'Past': 'Passé', 'Fut': 'Futur', 'Perf': 'Parfait', 'Imp': 'Imparfait'
|
286 |
+
}
|
287 |
+
}
|
288 |
+
|
289 |
+
def translate_morph(morph_string, lang_code):
|
290 |
+
for key, value in morph_translations[lang_code].items():
|
291 |
+
morph_string = morph_string.replace(key, value)
|
292 |
+
return morph_string
|
293 |
+
|
294 |
+
morphology = morpho_t.get('morphology', 'Morphology')
|
295 |
+
morph_df[morphology] = morph_df[morphology].apply(lambda x: translate_morph(x, lang_code))
|
296 |
+
|
297 |
+
st.dataframe(morph_df)
|
298 |
+
|
299 |
+
# Mostrar diagramas de arco
|
300 |
+
with st.expander(morpho_t.get('arc_diagram', 'Syntactic analysis: Arc diagram'), expanded=True):
|
301 |
+
sentences = list(doc.sents)
|
302 |
+
arc_diagrams = []
|
303 |
+
|
304 |
+
for i, sent in enumerate(sentences):
|
305 |
+
st.subheader(f"{morpho_t.get('sentence', 'Sentence')} {i+1}")
|
306 |
+
html = displacy.render(sent, style="dep", options={"distance": 100})
|
307 |
+
html = html.replace('height="375"', 'height="200"')
|
308 |
+
html = re.sub(r'<svg[^>]*>', lambda m: m.group(0).replace('height="450"', 'height="300"'), html)
|
309 |
+
html = re.sub(r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
310 |
+
lambda m: f'<g transform="translate({m.group(1)},50)"', html)
|
311 |
+
st.write(html, unsafe_allow_html=True)
|
312 |
+
arc_diagrams.append(html)
|
313 |
+
|
314 |
+
# Botón de exportación
|
315 |
+
# if st.button(morpho_t.get('export_button', 'Export Analysis')):
|
316 |
+
# pdf_buffer = export_user_interactions(st.session_state.username, 'morphosyntax')
|
317 |
+
# st.download_button(
|
318 |
+
# label=morpho_t.get('download_pdf', 'Download PDF'),
|
319 |
+
# data=pdf_buffer,
|
320 |
+
# file_name="morphosyntax_analysis.pdf",
|
321 |
+
# mime="application/pdf"
|
322 |
# )
|
modules/morphosyntax/morphosyntax_interface_BackUp_Dec-28-Ok.py
CHANGED
@@ -1,164 +1,164 @@
|
|
1 |
-
#modules/morphosyntax/morphosyntax_interface.py
|
2 |
-
|
3 |
-
import streamlit as st
|
4 |
-
from streamlit_float import *
|
5 |
-
from streamlit_antd_components import *
|
6 |
-
from streamlit.components.v1 import html
|
7 |
-
import spacy
|
8 |
-
from spacy import displacy
|
9 |
-
import spacy_streamlit
|
10 |
-
import pandas as pd
|
11 |
-
import base64
|
12 |
-
import re
|
13 |
-
|
14 |
-
from .morphosyntax_process import (
|
15 |
-
process_morphosyntactic_input,
|
16 |
-
format_analysis_results,
|
17 |
-
perform_advanced_morphosyntactic_analysis,
|
18 |
-
get_repeated_words_colors,
|
19 |
-
highlight_repeated_words,
|
20 |
-
POS_COLORS,
|
21 |
-
POS_TRANSLATIONS
|
22 |
-
)
|
23 |
-
|
24 |
-
from ..utils.widget_utils import generate_unique_key
|
25 |
-
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
26 |
-
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
27 |
-
|
28 |
-
import logging
|
29 |
-
logger = logging.getLogger(__name__)
|
30 |
-
|
31 |
-
|
32 |
-
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
33 |
-
try:
|
34 |
-
# Inicializar el estado si no existe
|
35 |
-
if 'morphosyntax_state' not in st.session_state:
|
36 |
-
st.session_state.morphosyntax_state = {
|
37 |
-
'analysis_count': 0,
|
38 |
-
'current_text': '', # Almacenar el texto actual
|
39 |
-
'last_analysis': None,
|
40 |
-
'needs_update': False # Flag para actualización
|
41 |
-
}
|
42 |
-
|
43 |
-
# Campo de entrada de texto que mantiene su valor
|
44 |
-
text_key = "morpho_text_input"
|
45 |
-
|
46 |
-
# Función para manejar cambios en el texto
|
47 |
-
def on_text_change():
|
48 |
-
st.session_state.morphosyntax_state['current_text'] = st.session_state[text_key]
|
49 |
-
st.session_state.morphosyntax_state['needs_update'] = True
|
50 |
-
|
51 |
-
# Recuperar el texto anterior si existe
|
52 |
-
default_text = st.session_state.morphosyntax_state.get('current_text', '')
|
53 |
-
|
54 |
-
sentence_input = st.text_area(
|
55 |
-
morpho_t.get('morpho_input_label', 'Enter text to analyze'),
|
56 |
-
value=default_text, # Usar el texto guardado
|
57 |
-
height=150,
|
58 |
-
key=text_key,
|
59 |
-
on_change=on_text_change,
|
60 |
-
placeholder=morpho_t.get('morpho_input_placeholder', 'Enter your text here...')
|
61 |
-
)
|
62 |
-
|
63 |
-
# Botón de análisis
|
64 |
-
col1, col2, col3 = st.columns([2,1,2])
|
65 |
-
with col1:
|
66 |
-
analyze_button = st.button(
|
67 |
-
morpho_t.get('morpho_analyze_button', 'Analyze Morphosyntax'),
|
68 |
-
key=f"morpho_button_{st.session_state.morphosyntax_state['analysis_count']}",
|
69 |
-
type="primary",
|
70 |
-
icon="🔍",
|
71 |
-
disabled=not bool(sentence_input.strip()),
|
72 |
-
use_container_width=True
|
73 |
-
)
|
74 |
-
|
75 |
-
# Procesar análisis solo cuando sea necesario
|
76 |
-
if (analyze_button or st.session_state.morphosyntax_state['needs_update']) and sentence_input.strip():
|
77 |
-
try:
|
78 |
-
with st.spinner(morpho_t.get('processing', 'Processing...')):
|
79 |
-
doc = nlp_models[lang_code](sentence_input)
|
80 |
-
advanced_analysis = perform_advanced_morphosyntactic_analysis(
|
81 |
-
sentence_input,
|
82 |
-
nlp_models[lang_code]
|
83 |
-
)
|
84 |
-
|
85 |
-
st.session_state.morphosyntax_result = {
|
86 |
-
'doc': doc,
|
87 |
-
'advanced_analysis': advanced_analysis
|
88 |
-
}
|
89 |
-
|
90 |
-
# Solo guardar en DB si fue un click en el botón
|
91 |
-
if analyze_button:
|
92 |
-
if store_student_morphosyntax_result(
|
93 |
-
username=st.session_state.username,
|
94 |
-
text=sentence_input,
|
95 |
-
arc_diagrams=advanced_analysis['arc_diagrams']
|
96 |
-
):
|
97 |
-
st.success(morpho_t.get('success_message', 'Analysis saved successfully'))
|
98 |
-
st.session_state.morphosyntax_state['analysis_count'] += 1
|
99 |
-
|
100 |
-
st.session_state.morphosyntax_state['needs_update'] = False
|
101 |
-
|
102 |
-
# Mostrar resultados en un contenedor específico
|
103 |
-
with st.container():
|
104 |
-
display_morphosyntax_results(
|
105 |
-
st.session_state.morphosyntax_result,
|
106 |
-
lang_code,
|
107 |
-
morpho_t
|
108 |
-
)
|
109 |
-
|
110 |
-
except Exception as e:
|
111 |
-
logger.error(f"Error en análisis morfosintáctico: {str(e)}")
|
112 |
-
st.error(morpho_t.get('error_processing', f'Error processing text: {str(e)}'))
|
113 |
-
|
114 |
-
# Mostrar resultados previos si existen
|
115 |
-
elif 'morphosyntax_result' in st.session_state and st.session_state.morphosyntax_result:
|
116 |
-
with st.container():
|
117 |
-
display_morphosyntax_results(
|
118 |
-
st.session_state.morphosyntax_result,
|
119 |
-
lang_code,
|
120 |
-
morpho_t
|
121 |
-
)
|
122 |
-
|
123 |
-
except Exception as e:
|
124 |
-
logger.error(f"Error general en display_morphosyntax_interface: {str(e)}")
|
125 |
-
st.error("Se produjo un error. Por favor, intente de nuevo.")
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
def display_morphosyntax_results(result, lang_code, morpho_t):
|
130 |
-
"""
|
131 |
-
Muestra solo el análisis sintáctico con diagramas de arco.
|
132 |
-
"""
|
133 |
-
if result is None:
|
134 |
-
st.warning(morpho_t.get('no_results', 'No results available'))
|
135 |
-
return
|
136 |
-
|
137 |
-
doc = result['doc']
|
138 |
-
|
139 |
-
# Análisis sintáctico (diagramas de arco)
|
140 |
-
st.markdown(f"### {morpho_t.get('arc_diagram', 'Syntactic analysis: Arc diagram')}")
|
141 |
-
|
142 |
-
with st.container():
|
143 |
-
sentences = list(doc.sents)
|
144 |
-
for i, sent in enumerate(sentences):
|
145 |
-
with st.container():
|
146 |
-
st.subheader(f"{morpho_t.get('sentence', 'Sentence')} {i+1}")
|
147 |
-
try:
|
148 |
-
html = displacy.render(sent, style="dep", options={
|
149 |
-
"distance": 100,
|
150 |
-
"arrow_spacing": 20,
|
151 |
-
"word_spacing": 30
|
152 |
-
})
|
153 |
-
# Ajustar dimensiones del SVG
|
154 |
-
html = html.replace('height="375"', 'height="200"')
|
155 |
-
html = re.sub(r'<svg[^>]*>', lambda m: m.group(0).replace('height="450"', 'height="300"'), html)
|
156 |
-
html = re.sub(r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
157 |
-
lambda m: f'<g transform="translate({m.group(1)},50)"', html)
|
158 |
-
|
159 |
-
# Envolver en un div con clase para estilos
|
160 |
-
html = f'<div class="arc-diagram-container">{html}</div>'
|
161 |
-
st.write(html, unsafe_allow_html=True)
|
162 |
-
except Exception as e:
|
163 |
-
logger.error(f"Error rendering sentence {i}: {str(e)}")
|
164 |
-
st.error(f"Error displaying diagram for sentence {i+1}")
|
|
|
1 |
+
#modules/morphosyntax/morphosyntax_interface.py
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
from streamlit_float import *
|
5 |
+
from streamlit_antd_components import *
|
6 |
+
from streamlit.components.v1 import html
|
7 |
+
import spacy
|
8 |
+
from spacy import displacy
|
9 |
+
import spacy_streamlit
|
10 |
+
import pandas as pd
|
11 |
+
import base64
|
12 |
+
import re
|
13 |
+
|
14 |
+
from .morphosyntax_process import (
|
15 |
+
process_morphosyntactic_input,
|
16 |
+
format_analysis_results,
|
17 |
+
perform_advanced_morphosyntactic_analysis,
|
18 |
+
get_repeated_words_colors,
|
19 |
+
highlight_repeated_words,
|
20 |
+
POS_COLORS,
|
21 |
+
POS_TRANSLATIONS
|
22 |
+
)
|
23 |
+
|
24 |
+
from ..utils.widget_utils import generate_unique_key
|
25 |
+
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
26 |
+
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
27 |
+
|
28 |
+
import logging
|
29 |
+
logger = logging.getLogger(__name__)
|
30 |
+
|
31 |
+
|
32 |
+
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
33 |
+
try:
|
34 |
+
# Inicializar el estado si no existe
|
35 |
+
if 'morphosyntax_state' not in st.session_state:
|
36 |
+
st.session_state.morphosyntax_state = {
|
37 |
+
'analysis_count': 0,
|
38 |
+
'current_text': '', # Almacenar el texto actual
|
39 |
+
'last_analysis': None,
|
40 |
+
'needs_update': False # Flag para actualización
|
41 |
+
}
|
42 |
+
|
43 |
+
# Campo de entrada de texto que mantiene su valor
|
44 |
+
text_key = "morpho_text_input"
|
45 |
+
|
46 |
+
# Función para manejar cambios en el texto
|
47 |
+
def on_text_change():
|
48 |
+
st.session_state.morphosyntax_state['current_text'] = st.session_state[text_key]
|
49 |
+
st.session_state.morphosyntax_state['needs_update'] = True
|
50 |
+
|
51 |
+
# Recuperar el texto anterior si existe
|
52 |
+
default_text = st.session_state.morphosyntax_state.get('current_text', '')
|
53 |
+
|
54 |
+
sentence_input = st.text_area(
|
55 |
+
morpho_t.get('morpho_input_label', 'Enter text to analyze'),
|
56 |
+
value=default_text, # Usar el texto guardado
|
57 |
+
height=150,
|
58 |
+
key=text_key,
|
59 |
+
on_change=on_text_change,
|
60 |
+
placeholder=morpho_t.get('morpho_input_placeholder', 'Enter your text here...')
|
61 |
+
)
|
62 |
+
|
63 |
+
# Botón de análisis
|
64 |
+
col1, col2, col3 = st.columns([2,1,2])
|
65 |
+
with col1:
|
66 |
+
analyze_button = st.button(
|
67 |
+
morpho_t.get('morpho_analyze_button', 'Analyze Morphosyntax'),
|
68 |
+
key=f"morpho_button_{st.session_state.morphosyntax_state['analysis_count']}",
|
69 |
+
type="primary",
|
70 |
+
icon="🔍",
|
71 |
+
disabled=not bool(sentence_input.strip()),
|
72 |
+
use_container_width=True
|
73 |
+
)
|
74 |
+
|
75 |
+
# Procesar análisis solo cuando sea necesario
|
76 |
+
if (analyze_button or st.session_state.morphosyntax_state['needs_update']) and sentence_input.strip():
|
77 |
+
try:
|
78 |
+
with st.spinner(morpho_t.get('processing', 'Processing...')):
|
79 |
+
doc = nlp_models[lang_code](sentence_input)
|
80 |
+
advanced_analysis = perform_advanced_morphosyntactic_analysis(
|
81 |
+
sentence_input,
|
82 |
+
nlp_models[lang_code]
|
83 |
+
)
|
84 |
+
|
85 |
+
st.session_state.morphosyntax_result = {
|
86 |
+
'doc': doc,
|
87 |
+
'advanced_analysis': advanced_analysis
|
88 |
+
}
|
89 |
+
|
90 |
+
# Solo guardar en DB si fue un click en el botón
|
91 |
+
if analyze_button:
|
92 |
+
if store_student_morphosyntax_result(
|
93 |
+
username=st.session_state.username,
|
94 |
+
text=sentence_input,
|
95 |
+
arc_diagrams=advanced_analysis['arc_diagrams']
|
96 |
+
):
|
97 |
+
st.success(morpho_t.get('success_message', 'Analysis saved successfully'))
|
98 |
+
st.session_state.morphosyntax_state['analysis_count'] += 1
|
99 |
+
|
100 |
+
st.session_state.morphosyntax_state['needs_update'] = False
|
101 |
+
|
102 |
+
# Mostrar resultados en un contenedor específico
|
103 |
+
with st.container():
|
104 |
+
display_morphosyntax_results(
|
105 |
+
st.session_state.morphosyntax_result,
|
106 |
+
lang_code,
|
107 |
+
morpho_t
|
108 |
+
)
|
109 |
+
|
110 |
+
except Exception as e:
|
111 |
+
logger.error(f"Error en análisis morfosintáctico: {str(e)}")
|
112 |
+
st.error(morpho_t.get('error_processing', f'Error processing text: {str(e)}'))
|
113 |
+
|
114 |
+
# Mostrar resultados previos si existen
|
115 |
+
elif 'morphosyntax_result' in st.session_state and st.session_state.morphosyntax_result:
|
116 |
+
with st.container():
|
117 |
+
display_morphosyntax_results(
|
118 |
+
st.session_state.morphosyntax_result,
|
119 |
+
lang_code,
|
120 |
+
morpho_t
|
121 |
+
)
|
122 |
+
|
123 |
+
except Exception as e:
|
124 |
+
logger.error(f"Error general en display_morphosyntax_interface: {str(e)}")
|
125 |
+
st.error("Se produjo un error. Por favor, intente de nuevo.")
|
126 |
+
|
127 |
+
|
128 |
+
|
129 |
+
def display_morphosyntax_results(result, lang_code, morpho_t):
|
130 |
+
"""
|
131 |
+
Muestra solo el análisis sintáctico con diagramas de arco.
|
132 |
+
"""
|
133 |
+
if result is None:
|
134 |
+
st.warning(morpho_t.get('no_results', 'No results available'))
|
135 |
+
return
|
136 |
+
|
137 |
+
doc = result['doc']
|
138 |
+
|
139 |
+
# Análisis sintáctico (diagramas de arco)
|
140 |
+
st.markdown(f"### {morpho_t.get('arc_diagram', 'Syntactic analysis: Arc diagram')}")
|
141 |
+
|
142 |
+
with st.container():
|
143 |
+
sentences = list(doc.sents)
|
144 |
+
for i, sent in enumerate(sentences):
|
145 |
+
with st.container():
|
146 |
+
st.subheader(f"{morpho_t.get('sentence', 'Sentence')} {i+1}")
|
147 |
+
try:
|
148 |
+
html = displacy.render(sent, style="dep", options={
|
149 |
+
"distance": 100,
|
150 |
+
"arrow_spacing": 20,
|
151 |
+
"word_spacing": 30
|
152 |
+
})
|
153 |
+
# Ajustar dimensiones del SVG
|
154 |
+
html = html.replace('height="375"', 'height="200"')
|
155 |
+
html = re.sub(r'<svg[^>]*>', lambda m: m.group(0).replace('height="450"', 'height="300"'), html)
|
156 |
+
html = re.sub(r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
157 |
+
lambda m: f'<g transform="translate({m.group(1)},50)"', html)
|
158 |
+
|
159 |
+
# Envolver en un div con clase para estilos
|
160 |
+
html = f'<div class="arc-diagram-container">{html}</div>'
|
161 |
+
st.write(html, unsafe_allow_html=True)
|
162 |
+
except Exception as e:
|
163 |
+
logger.error(f"Error rendering sentence {i}: {str(e)}")
|
164 |
+
st.error(f"Error displaying diagram for sentence {i+1}")
|
modules/morphosyntax/morphosyntax_interface_vOk-30-12-24.py
CHANGED
@@ -1,247 +1,247 @@
|
|
1 |
-
# modules/morphosyntax/morphosyntax_interface.py
|
2 |
-
|
3 |
-
import streamlit as st
|
4 |
-
import re
|
5 |
-
import logging
|
6 |
-
from spacy import displacy
|
7 |
-
|
8 |
-
# Se asume que la función perform_advanced_morphosyntactic_analysis
|
9 |
-
# y los métodos store_student_morphosyntax_base/iteration existen.
|
10 |
-
from ..morphosyntax.morphosyntax_process import perform_advanced_morphosyntactic_analysis
|
11 |
-
from ..database.morphosyntax_iterative_mongo_db import (
|
12 |
-
store_student_morphosyntax_base,
|
13 |
-
store_student_morphosyntax_iteration,
|
14 |
-
)
|
15 |
-
|
16 |
-
logger = logging.getLogger(__name__)
|
17 |
-
|
18 |
-
###########################################################################
|
19 |
-
def initialize_arc_analysis_state():
|
20 |
-
"""
|
21 |
-
Inicializa el estado de análisis de arcos (base e iteraciones) si no existe.
|
22 |
-
"""
|
23 |
-
if "arc_analysis_state" not in st.session_state:
|
24 |
-
st.session_state.arc_analysis_state = {
|
25 |
-
"base_id": None,
|
26 |
-
"base_text": "",
|
27 |
-
"base_diagram": None,
|
28 |
-
"iteration_text": "",
|
29 |
-
"iteration_diagram": None,
|
30 |
-
}
|
31 |
-
logger.info("Estado de análisis de arcos inicializado.")
|
32 |
-
|
33 |
-
###########################################################################
|
34 |
-
def reset_arc_analysis_state():
|
35 |
-
"""
|
36 |
-
Resetea completamente el estado de análisis de arcos.
|
37 |
-
"""
|
38 |
-
st.session_state.arc_analysis_state = {
|
39 |
-
"base_id": None,
|
40 |
-
"base_text": "",
|
41 |
-
"base_diagram": None,
|
42 |
-
"iteration_text": "",
|
43 |
-
"iteration_diagram": None,
|
44 |
-
}
|
45 |
-
logger.info("Estado de arcos reseteado.")
|
46 |
-
|
47 |
-
###########################################################################
|
48 |
-
def display_arc_diagram(doc):
|
49 |
-
"""
|
50 |
-
Genera y retorna el HTML del diagrama de arco para un `Doc` de spaCy.
|
51 |
-
No imprime directamente en pantalla; regresa el HTML para
|
52 |
-
usar con `st.write(..., unsafe_allow_html=True)`.
|
53 |
-
"""
|
54 |
-
try:
|
55 |
-
diagram_html = ""
|
56 |
-
for sent in doc.sents:
|
57 |
-
svg_html = displacy.render(
|
58 |
-
sent,
|
59 |
-
style="dep",
|
60 |
-
options={
|
61 |
-
"distance": 100,
|
62 |
-
"arrow_spacing": 20,
|
63 |
-
"word_spacing": 30
|
64 |
-
}
|
65 |
-
)
|
66 |
-
# Ajustar tamaños
|
67 |
-
svg_html = svg_html.replace('height="375"', 'height="200"')
|
68 |
-
svg_html = re.sub(
|
69 |
-
r'<svg[^>]*>',
|
70 |
-
lambda m: m.group(0).replace('height="450"', 'height="300"'),
|
71 |
-
svg_html
|
72 |
-
)
|
73 |
-
svg_html = re.sub(
|
74 |
-
r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
75 |
-
lambda m: f'<g transform="translate({m.group(1)},50)"',
|
76 |
-
svg_html
|
77 |
-
)
|
78 |
-
# Envolver en contenedor
|
79 |
-
diagram_html += f'<div class="arc-diagram-container">{svg_html}</div>'
|
80 |
-
return diagram_html
|
81 |
-
|
82 |
-
except Exception as e:
|
83 |
-
logger.error(f"Error en display_arc_diagram: {str(e)}")
|
84 |
-
return "<p style='color:red;'>Error generando diagrama</p>"
|
85 |
-
|
86 |
-
###########################################################################
|
87 |
-
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
88 |
-
"""
|
89 |
-
Interfaz principal para la visualización de diagramas de arco
|
90 |
-
(Texto Base vs Iteraciones).
|
91 |
-
"""
|
92 |
-
# CSS para layout vertical y estable
|
93 |
-
st.markdown("""
|
94 |
-
<style>
|
95 |
-
.stTextArea textarea {
|
96 |
-
font-size: 1rem;
|
97 |
-
line-height: 1.5;
|
98 |
-
min-height: 100px !important;
|
99 |
-
height: 100px !important;
|
100 |
-
}
|
101 |
-
.arc-diagram-container {
|
102 |
-
width: 100%;
|
103 |
-
padding: 0.5rem;
|
104 |
-
margin: 0.5rem 0;
|
105 |
-
}
|
106 |
-
.divider {
|
107 |
-
height: 3px;
|
108 |
-
border: none;
|
109 |
-
background-color: #333;
|
110 |
-
margin: 2rem 0;
|
111 |
-
}
|
112 |
-
</style>
|
113 |
-
""", unsafe_allow_html=True)
|
114 |
-
|
115 |
-
# 1) Inicializar estados
|
116 |
-
initialize_arc_analysis_state()
|
117 |
-
arc_state = st.session_state.arc_analysis_state
|
118 |
-
|
119 |
-
# 2) Creamos pestañas: "Texto Base" y "Iteraciones"
|
120 |
-
tabs = st.tabs(["Texto Base", "Iteraciones"])
|
121 |
-
|
122 |
-
# =================== PESTAÑA 1: Texto Base ==========================
|
123 |
-
with tabs[0]:
|
124 |
-
st.subheader("Análisis de Texto Base")
|
125 |
-
|
126 |
-
# Botón para iniciar nuevo análisis
|
127 |
-
if st.button("Nuevo Análisis", key="btn_reset_base"):
|
128 |
-
# Solo limpiamos el estado; si requieres forzar reload,
|
129 |
-
# descomenta la siguiente línea:
|
130 |
-
# st.experimental_rerun()
|
131 |
-
reset_arc_analysis_state()
|
132 |
-
|
133 |
-
# Textarea de texto base
|
134 |
-
arc_state["base_text"] = st.text_area(
|
135 |
-
"Ingrese su texto inicial",
|
136 |
-
value=arc_state["base_text"],
|
137 |
-
key="base_text_input",
|
138 |
-
height=150
|
139 |
-
)
|
140 |
-
|
141 |
-
# Botón para analizar texto base
|
142 |
-
if st.button("Analizar Texto Base", key="btn_analyze_base"):
|
143 |
-
if not arc_state["base_text"].strip():
|
144 |
-
st.warning("Ingrese un texto para analizar.")
|
145 |
-
else:
|
146 |
-
try:
|
147 |
-
# Procesar con spaCy
|
148 |
-
doc = nlp_models[lang_code](arc_state["base_text"])
|
149 |
-
# Generar HTML del arco
|
150 |
-
arc_html = display_arc_diagram(doc)
|
151 |
-
arc_state["base_diagram"] = arc_html
|
152 |
-
|
153 |
-
# Guardar en Mongo
|
154 |
-
analysis = perform_advanced_morphosyntactic_analysis(
|
155 |
-
arc_state["base_text"],
|
156 |
-
nlp_models[lang_code]
|
157 |
-
)
|
158 |
-
base_id = store_student_morphosyntax_base(
|
159 |
-
username=st.session_state.username,
|
160 |
-
text=arc_state["base_text"],
|
161 |
-
arc_diagrams=analysis["arc_diagrams"]
|
162 |
-
)
|
163 |
-
if base_id:
|
164 |
-
arc_state["base_id"] = base_id
|
165 |
-
st.success(f"Análisis base guardado. ID: {base_id}")
|
166 |
-
|
167 |
-
except Exception as exc:
|
168 |
-
st.error("Error procesando texto base")
|
169 |
-
logger.error(f"Error en análisis base: {str(exc)}")
|
170 |
-
|
171 |
-
# Mostrar diagrama base
|
172 |
-
if arc_state["base_diagram"]:
|
173 |
-
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
174 |
-
st.markdown("#### Diagrama de Arco (Texto Base)")
|
175 |
-
st.write(arc_state["base_diagram"], unsafe_allow_html=True)
|
176 |
-
|
177 |
-
# ================== PESTAÑA 2: Iteraciones ==========================
|
178 |
-
with tabs[1]:
|
179 |
-
st.subheader("Análisis de Cambios / Iteraciones")
|
180 |
-
|
181 |
-
# Verificar que exista texto base analizado
|
182 |
-
if not arc_state["base_id"]:
|
183 |
-
st.info("Primero analiza un texto base en la pestaña anterior.")
|
184 |
-
return
|
185 |
-
|
186 |
-
# Mostrar texto base como referencia (solo lectura)
|
187 |
-
st.text_area(
|
188 |
-
"Texto Base (solo lectura)",
|
189 |
-
value=arc_state["base_text"],
|
190 |
-
height=80,
|
191 |
-
disabled=True
|
192 |
-
)
|
193 |
-
|
194 |
-
# Caja de texto para la iteración
|
195 |
-
arc_state["iteration_text"] = st.text_area(
|
196 |
-
"Texto de Iteración",
|
197 |
-
value=arc_state["iteration_text"],
|
198 |
-
height=150
|
199 |
-
)
|
200 |
-
|
201 |
-
# Botón analizar iteración
|
202 |
-
if st.button("Analizar Cambios", key="btn_analyze_iteration"):
|
203 |
-
if not arc_state["iteration_text"].strip():
|
204 |
-
st.warning("Ingrese texto de iteración.")
|
205 |
-
else:
|
206 |
-
try:
|
207 |
-
# Procesar con spaCy
|
208 |
-
doc_iter = nlp_models[lang_code](arc_state["iteration_text"])
|
209 |
-
arc_html_iter = display_arc_diagram(doc_iter)
|
210 |
-
arc_state["iteration_diagram"] = arc_html_iter
|
211 |
-
|
212 |
-
# Guardar en Mongo
|
213 |
-
analysis_iter = perform_advanced_morphosyntactic_analysis(
|
214 |
-
arc_state["iteration_text"],
|
215 |
-
nlp_models[lang_code]
|
216 |
-
)
|
217 |
-
iteration_id = store_student_morphosyntax_iteration(
|
218 |
-
username=st.session_state.username,
|
219 |
-
base_id=arc_state["base_id"],
|
220 |
-
original_text=arc_state["base_text"],
|
221 |
-
iteration_text=arc_state["iteration_text"],
|
222 |
-
arc_diagrams=analysis_iter["arc_diagrams"]
|
223 |
-
)
|
224 |
-
if iteration_id:
|
225 |
-
st.success(f"Iteración guardada. ID: {iteration_id}")
|
226 |
-
|
227 |
-
except Exception as exc:
|
228 |
-
st.error("Error procesando iteración")
|
229 |
-
logger.error(f"Error en iteración: {str(exc)}")
|
230 |
-
|
231 |
-
# Mostrar diagrama de iteración
|
232 |
-
if arc_state["iteration_diagram"]:
|
233 |
-
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
234 |
-
st.markdown("#### Diagrama de Arco (Iteración)")
|
235 |
-
st.write(arc_state["iteration_diagram"], unsafe_allow_html=True)
|
236 |
-
|
237 |
-
# Comparación vertical (uno abajo del otro)
|
238 |
-
if arc_state["base_diagram"] and arc_state["iteration_diagram"]:
|
239 |
-
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
240 |
-
st.markdown("### Comparación Vertical: Base vs. Iteración")
|
241 |
-
|
242 |
-
st.markdown("**Diagrama Base**")
|
243 |
-
st.write(arc_state["base_diagram"], unsafe_allow_html=True)
|
244 |
-
|
245 |
-
st.markdown("---")
|
246 |
-
st.markdown("**Diagrama Iterado**")
|
247 |
st.write(arc_state["iteration_diagram"], unsafe_allow_html=True)
|
|
|
1 |
+
# modules/morphosyntax/morphosyntax_interface.py
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
import re
|
5 |
+
import logging
|
6 |
+
from spacy import displacy
|
7 |
+
|
8 |
+
# Se asume que la función perform_advanced_morphosyntactic_analysis
|
9 |
+
# y los métodos store_student_morphosyntax_base/iteration existen.
|
10 |
+
from ..morphosyntax.morphosyntax_process import perform_advanced_morphosyntactic_analysis
|
11 |
+
from ..database.morphosyntax_iterative_mongo_db import (
|
12 |
+
store_student_morphosyntax_base,
|
13 |
+
store_student_morphosyntax_iteration,
|
14 |
+
)
|
15 |
+
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
###########################################################################
|
19 |
+
def initialize_arc_analysis_state():
|
20 |
+
"""
|
21 |
+
Inicializa el estado de análisis de arcos (base e iteraciones) si no existe.
|
22 |
+
"""
|
23 |
+
if "arc_analysis_state" not in st.session_state:
|
24 |
+
st.session_state.arc_analysis_state = {
|
25 |
+
"base_id": None,
|
26 |
+
"base_text": "",
|
27 |
+
"base_diagram": None,
|
28 |
+
"iteration_text": "",
|
29 |
+
"iteration_diagram": None,
|
30 |
+
}
|
31 |
+
logger.info("Estado de análisis de arcos inicializado.")
|
32 |
+
|
33 |
+
###########################################################################
|
34 |
+
def reset_arc_analysis_state():
|
35 |
+
"""
|
36 |
+
Resetea completamente el estado de análisis de arcos.
|
37 |
+
"""
|
38 |
+
st.session_state.arc_analysis_state = {
|
39 |
+
"base_id": None,
|
40 |
+
"base_text": "",
|
41 |
+
"base_diagram": None,
|
42 |
+
"iteration_text": "",
|
43 |
+
"iteration_diagram": None,
|
44 |
+
}
|
45 |
+
logger.info("Estado de arcos reseteado.")
|
46 |
+
|
47 |
+
###########################################################################
|
48 |
+
def display_arc_diagram(doc):
|
49 |
+
"""
|
50 |
+
Genera y retorna el HTML del diagrama de arco para un `Doc` de spaCy.
|
51 |
+
No imprime directamente en pantalla; regresa el HTML para
|
52 |
+
usar con `st.write(..., unsafe_allow_html=True)`.
|
53 |
+
"""
|
54 |
+
try:
|
55 |
+
diagram_html = ""
|
56 |
+
for sent in doc.sents:
|
57 |
+
svg_html = displacy.render(
|
58 |
+
sent,
|
59 |
+
style="dep",
|
60 |
+
options={
|
61 |
+
"distance": 100,
|
62 |
+
"arrow_spacing": 20,
|
63 |
+
"word_spacing": 30
|
64 |
+
}
|
65 |
+
)
|
66 |
+
# Ajustar tamaños
|
67 |
+
svg_html = svg_html.replace('height="375"', 'height="200"')
|
68 |
+
svg_html = re.sub(
|
69 |
+
r'<svg[^>]*>',
|
70 |
+
lambda m: m.group(0).replace('height="450"', 'height="300"'),
|
71 |
+
svg_html
|
72 |
+
)
|
73 |
+
svg_html = re.sub(
|
74 |
+
r'<g [^>]*transform="translate\((\d+),(\d+)\)"',
|
75 |
+
lambda m: f'<g transform="translate({m.group(1)},50)"',
|
76 |
+
svg_html
|
77 |
+
)
|
78 |
+
# Envolver en contenedor
|
79 |
+
diagram_html += f'<div class="arc-diagram-container">{svg_html}</div>'
|
80 |
+
return diagram_html
|
81 |
+
|
82 |
+
except Exception as e:
|
83 |
+
logger.error(f"Error en display_arc_diagram: {str(e)}")
|
84 |
+
return "<p style='color:red;'>Error generando diagrama</p>"
|
85 |
+
|
86 |
+
###########################################################################
|
87 |
+
def display_morphosyntax_interface(lang_code, nlp_models, morpho_t):
|
88 |
+
"""
|
89 |
+
Interfaz principal para la visualización de diagramas de arco
|
90 |
+
(Texto Base vs Iteraciones).
|
91 |
+
"""
|
92 |
+
# CSS para layout vertical y estable
|
93 |
+
st.markdown("""
|
94 |
+
<style>
|
95 |
+
.stTextArea textarea {
|
96 |
+
font-size: 1rem;
|
97 |
+
line-height: 1.5;
|
98 |
+
min-height: 100px !important;
|
99 |
+
height: 100px !important;
|
100 |
+
}
|
101 |
+
.arc-diagram-container {
|
102 |
+
width: 100%;
|
103 |
+
padding: 0.5rem;
|
104 |
+
margin: 0.5rem 0;
|
105 |
+
}
|
106 |
+
.divider {
|
107 |
+
height: 3px;
|
108 |
+
border: none;
|
109 |
+
background-color: #333;
|
110 |
+
margin: 2rem 0;
|
111 |
+
}
|
112 |
+
</style>
|
113 |
+
""", unsafe_allow_html=True)
|
114 |
+
|
115 |
+
# 1) Inicializar estados
|
116 |
+
initialize_arc_analysis_state()
|
117 |
+
arc_state = st.session_state.arc_analysis_state
|
118 |
+
|
119 |
+
# 2) Creamos pestañas: "Texto Base" y "Iteraciones"
|
120 |
+
tabs = st.tabs(["Texto Base", "Iteraciones"])
|
121 |
+
|
122 |
+
# =================== PESTAÑA 1: Texto Base ==========================
|
123 |
+
with tabs[0]:
|
124 |
+
st.subheader("Análisis de Texto Base")
|
125 |
+
|
126 |
+
# Botón para iniciar nuevo análisis
|
127 |
+
if st.button("Nuevo Análisis", key="btn_reset_base"):
|
128 |
+
# Solo limpiamos el estado; si requieres forzar reload,
|
129 |
+
# descomenta la siguiente línea:
|
130 |
+
# st.experimental_rerun()
|
131 |
+
reset_arc_analysis_state()
|
132 |
+
|
133 |
+
# Textarea de texto base
|
134 |
+
arc_state["base_text"] = st.text_area(
|
135 |
+
"Ingrese su texto inicial",
|
136 |
+
value=arc_state["base_text"],
|
137 |
+
key="base_text_input",
|
138 |
+
height=150
|
139 |
+
)
|
140 |
+
|
141 |
+
# Botón para analizar texto base
|
142 |
+
if st.button("Analizar Texto Base", key="btn_analyze_base"):
|
143 |
+
if not arc_state["base_text"].strip():
|
144 |
+
st.warning("Ingrese un texto para analizar.")
|
145 |
+
else:
|
146 |
+
try:
|
147 |
+
# Procesar con spaCy
|
148 |
+
doc = nlp_models[lang_code](arc_state["base_text"])
|
149 |
+
# Generar HTML del arco
|
150 |
+
arc_html = display_arc_diagram(doc)
|
151 |
+
arc_state["base_diagram"] = arc_html
|
152 |
+
|
153 |
+
# Guardar en Mongo
|
154 |
+
analysis = perform_advanced_morphosyntactic_analysis(
|
155 |
+
arc_state["base_text"],
|
156 |
+
nlp_models[lang_code]
|
157 |
+
)
|
158 |
+
base_id = store_student_morphosyntax_base(
|
159 |
+
username=st.session_state.username,
|
160 |
+
text=arc_state["base_text"],
|
161 |
+
arc_diagrams=analysis["arc_diagrams"]
|
162 |
+
)
|
163 |
+
if base_id:
|
164 |
+
arc_state["base_id"] = base_id
|
165 |
+
st.success(f"Análisis base guardado. ID: {base_id}")
|
166 |
+
|
167 |
+
except Exception as exc:
|
168 |
+
st.error("Error procesando texto base")
|
169 |
+
logger.error(f"Error en análisis base: {str(exc)}")
|
170 |
+
|
171 |
+
# Mostrar diagrama base
|
172 |
+
if arc_state["base_diagram"]:
|
173 |
+
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
174 |
+
st.markdown("#### Diagrama de Arco (Texto Base)")
|
175 |
+
st.write(arc_state["base_diagram"], unsafe_allow_html=True)
|
176 |
+
|
177 |
+
# ================== PESTAÑA 2: Iteraciones ==========================
|
178 |
+
with tabs[1]:
|
179 |
+
st.subheader("Análisis de Cambios / Iteraciones")
|
180 |
+
|
181 |
+
# Verificar que exista texto base analizado
|
182 |
+
if not arc_state["base_id"]:
|
183 |
+
st.info("Primero analiza un texto base en la pestaña anterior.")
|
184 |
+
return
|
185 |
+
|
186 |
+
# Mostrar texto base como referencia (solo lectura)
|
187 |
+
st.text_area(
|
188 |
+
"Texto Base (solo lectura)",
|
189 |
+
value=arc_state["base_text"],
|
190 |
+
height=80,
|
191 |
+
disabled=True
|
192 |
+
)
|
193 |
+
|
194 |
+
# Caja de texto para la iteración
|
195 |
+
arc_state["iteration_text"] = st.text_area(
|
196 |
+
"Texto de Iteración",
|
197 |
+
value=arc_state["iteration_text"],
|
198 |
+
height=150
|
199 |
+
)
|
200 |
+
|
201 |
+
# Botón analizar iteración
|
202 |
+
if st.button("Analizar Cambios", key="btn_analyze_iteration"):
|
203 |
+
if not arc_state["iteration_text"].strip():
|
204 |
+
st.warning("Ingrese texto de iteración.")
|
205 |
+
else:
|
206 |
+
try:
|
207 |
+
# Procesar con spaCy
|
208 |
+
doc_iter = nlp_models[lang_code](arc_state["iteration_text"])
|
209 |
+
arc_html_iter = display_arc_diagram(doc_iter)
|
210 |
+
arc_state["iteration_diagram"] = arc_html_iter
|
211 |
+
|
212 |
+
# Guardar en Mongo
|
213 |
+
analysis_iter = perform_advanced_morphosyntactic_analysis(
|
214 |
+
arc_state["iteration_text"],
|
215 |
+
nlp_models[lang_code]
|
216 |
+
)
|
217 |
+
iteration_id = store_student_morphosyntax_iteration(
|
218 |
+
username=st.session_state.username,
|
219 |
+
base_id=arc_state["base_id"],
|
220 |
+
original_text=arc_state["base_text"],
|
221 |
+
iteration_text=arc_state["iteration_text"],
|
222 |
+
arc_diagrams=analysis_iter["arc_diagrams"]
|
223 |
+
)
|
224 |
+
if iteration_id:
|
225 |
+
st.success(f"Iteración guardada. ID: {iteration_id}")
|
226 |
+
|
227 |
+
except Exception as exc:
|
228 |
+
st.error("Error procesando iteración")
|
229 |
+
logger.error(f"Error en iteración: {str(exc)}")
|
230 |
+
|
231 |
+
# Mostrar diagrama de iteración
|
232 |
+
if arc_state["iteration_diagram"]:
|
233 |
+
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
234 |
+
st.markdown("#### Diagrama de Arco (Iteración)")
|
235 |
+
st.write(arc_state["iteration_diagram"], unsafe_allow_html=True)
|
236 |
+
|
237 |
+
# Comparación vertical (uno abajo del otro)
|
238 |
+
if arc_state["base_diagram"] and arc_state["iteration_diagram"]:
|
239 |
+
st.markdown("<hr class='divider'>", unsafe_allow_html=True)
|
240 |
+
st.markdown("### Comparación Vertical: Base vs. Iteración")
|
241 |
+
|
242 |
+
st.markdown("**Diagrama Base**")
|
243 |
+
st.write(arc_state["base_diagram"], unsafe_allow_html=True)
|
244 |
+
|
245 |
+
st.markdown("---")
|
246 |
+
st.markdown("**Diagrama Iterado**")
|
247 |
st.write(arc_state["iteration_diagram"], unsafe_allow_html=True)
|
modules/morphosyntax/morphosyntax_process.py
CHANGED
@@ -1,132 +1,132 @@
|
|
1 |
-
#modules/morphosyntax/morphosyntax_process.py
|
2 |
-
import streamlit as st
|
3 |
-
|
4 |
-
from ..text_analysis.morpho_analysis import (
|
5 |
-
get_repeated_words_colors,
|
6 |
-
highlight_repeated_words,
|
7 |
-
generate_arc_diagram,
|
8 |
-
get_detailed_pos_analysis,
|
9 |
-
get_morphological_analysis,
|
10 |
-
get_sentence_structure_analysis,
|
11 |
-
perform_advanced_morphosyntactic_analysis,
|
12 |
-
POS_COLORS,
|
13 |
-
POS_TRANSLATIONS
|
14 |
-
)
|
15 |
-
|
16 |
-
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
17 |
-
|
18 |
-
import logging
|
19 |
-
logger = logging.getLogger(__name__)
|
20 |
-
|
21 |
-
|
22 |
-
def process_morphosyntactic_input(text, lang_code, nlp_models, t):
|
23 |
-
"""
|
24 |
-
Procesa el texto ingresado para realizar el análisis morfosintáctico.
|
25 |
-
|
26 |
-
Args:
|
27 |
-
text: Texto a analizar
|
28 |
-
lang_code: Código del idioma
|
29 |
-
nlp_models: Diccionario de modelos spaCy
|
30 |
-
t: Diccionario de traducciones
|
31 |
-
|
32 |
-
Returns:
|
33 |
-
tuple: (análisis, visualizaciones, texto_resaltado, mensaje)
|
34 |
-
"""
|
35 |
-
try:
|
36 |
-
# Realizar el análisis morfosintáctico
|
37 |
-
doc = nlp_models[lang_code](text)
|
38 |
-
|
39 |
-
# Obtener el análisis avanzado
|
40 |
-
analysis = perform_advanced_morphosyntactic_analysis(text, nlp_models[lang_code])
|
41 |
-
|
42 |
-
# Generar visualizaciones - AQUÍ ESTÁ EL CAMBIO
|
43 |
-
arc_diagrams = generate_arc_diagram(doc) # Quitamos lang_code
|
44 |
-
|
45 |
-
# Obtener palabras repetidas y texto resaltado
|
46 |
-
word_colors = get_repeated_words_colors(doc)
|
47 |
-
highlighted_text = highlight_repeated_words(doc, word_colors)
|
48 |
-
|
49 |
-
# Guardar el análisis en la base de datos
|
50 |
-
store_student_morphosyntax_result(
|
51 |
-
st.session_state.username,
|
52 |
-
text,
|
53 |
-
{
|
54 |
-
'arc_diagrams': arc_diagrams,
|
55 |
-
'pos_analysis': analysis['pos_analysis'],
|
56 |
-
'morphological_analysis': analysis['morphological_analysis'],
|
57 |
-
'sentence_structure': analysis['sentence_structure']
|
58 |
-
}
|
59 |
-
)
|
60 |
-
|
61 |
-
return {
|
62 |
-
'analysis': analysis,
|
63 |
-
'visualizations': arc_diagrams,
|
64 |
-
'highlighted_text': highlighted_text,
|
65 |
-
'success': True,
|
66 |
-
'message': t.get('MORPHOSYNTACTIC', {}).get('success_message', 'Analysis completed successfully')
|
67 |
-
}
|
68 |
-
|
69 |
-
except Exception as e:
|
70 |
-
logger.error(f"Error en el análisis morfosintáctico: {str(e)}")
|
71 |
-
return {
|
72 |
-
'analysis': None,
|
73 |
-
'visualizations': None,
|
74 |
-
'highlighted_text': None,
|
75 |
-
'success': False,
|
76 |
-
'message': t.get('MORPHOSYNTACTIC', {}).get('error_message', f'Error in analysis: {str(e)}')
|
77 |
-
}
|
78 |
-
|
79 |
-
|
80 |
-
def format_analysis_results(analysis_result, t):
|
81 |
-
"""
|
82 |
-
Formatea los resultados del análisis para su visualización.
|
83 |
-
|
84 |
-
Args:
|
85 |
-
analysis_result: Resultado del análisis morfosintáctico
|
86 |
-
t: Diccionario de traducciones
|
87 |
-
|
88 |
-
Returns:
|
89 |
-
dict: Resultados formateados para visualización
|
90 |
-
"""
|
91 |
-
morpho_t = t.get('MORPHOSYNTACTIC', {})
|
92 |
-
|
93 |
-
if not analysis_result['success']:
|
94 |
-
return {
|
95 |
-
'formatted_text': analysis_result['message'],
|
96 |
-
'visualizations': None
|
97 |
-
}
|
98 |
-
|
99 |
-
formatted_sections = []
|
100 |
-
|
101 |
-
# Formato para análisis POS
|
102 |
-
if 'pos_analysis' in analysis_result['analysis']:
|
103 |
-
pos_section = [f"### {morpho_t.get('pos_analysis', 'Part of Speech Analysis')}"]
|
104 |
-
for pos_item in analysis_result['analysis']['pos_analysis']:
|
105 |
-
pos_section.append(
|
106 |
-
f"- {morpho_t.get(pos_item['pos'], pos_item['pos'])}: "
|
107 |
-
f"{pos_item['count']} ({pos_item['percentage']}%)\n "
|
108 |
-
f"Ejemplos: {', '.join(pos_item['examples'])}"
|
109 |
-
)
|
110 |
-
formatted_sections.append('\n'.join(pos_section))
|
111 |
-
|
112 |
-
# Agregar otras secciones de formato según sea necesario
|
113 |
-
|
114 |
-
return {
|
115 |
-
'formatted_text': '\n\n'.join(formatted_sections),
|
116 |
-
'visualizations': analysis_result['visualizations'],
|
117 |
-
'highlighted_text': analysis_result['highlighted_text']
|
118 |
-
}
|
119 |
-
|
120 |
-
# Re-exportar las funciones y constantes necesarias
|
121 |
-
__all__ = [
|
122 |
-
'process_morphosyntactic_input',
|
123 |
-
'highlight_repeated_words',
|
124 |
-
'generate_arc_diagram',
|
125 |
-
'get_repeated_words_colors',
|
126 |
-
'get_detailed_pos_analysis',
|
127 |
-
'get_morphological_analysis',
|
128 |
-
'get_sentence_structure_analysis',
|
129 |
-
'perform_advanced_morphosyntactic_analysis',
|
130 |
-
'POS_COLORS',
|
131 |
-
'POS_TRANSLATIONS'
|
132 |
]
|
|
|
1 |
+
#modules/morphosyntax/morphosyntax_process.py
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from ..text_analysis.morpho_analysis import (
|
5 |
+
get_repeated_words_colors,
|
6 |
+
highlight_repeated_words,
|
7 |
+
generate_arc_diagram,
|
8 |
+
get_detailed_pos_analysis,
|
9 |
+
get_morphological_analysis,
|
10 |
+
get_sentence_structure_analysis,
|
11 |
+
perform_advanced_morphosyntactic_analysis,
|
12 |
+
POS_COLORS,
|
13 |
+
POS_TRANSLATIONS
|
14 |
+
)
|
15 |
+
|
16 |
+
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
17 |
+
|
18 |
+
import logging
|
19 |
+
logger = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
|
22 |
+
def process_morphosyntactic_input(text, lang_code, nlp_models, t):
|
23 |
+
"""
|
24 |
+
Procesa el texto ingresado para realizar el análisis morfosintáctico.
|
25 |
+
|
26 |
+
Args:
|
27 |
+
text: Texto a analizar
|
28 |
+
lang_code: Código del idioma
|
29 |
+
nlp_models: Diccionario de modelos spaCy
|
30 |
+
t: Diccionario de traducciones
|
31 |
+
|
32 |
+
Returns:
|
33 |
+
tuple: (análisis, visualizaciones, texto_resaltado, mensaje)
|
34 |
+
"""
|
35 |
+
try:
|
36 |
+
# Realizar el análisis morfosintáctico
|
37 |
+
doc = nlp_models[lang_code](text)
|
38 |
+
|
39 |
+
# Obtener el análisis avanzado
|
40 |
+
analysis = perform_advanced_morphosyntactic_analysis(text, nlp_models[lang_code])
|
41 |
+
|
42 |
+
# Generar visualizaciones - AQUÍ ESTÁ EL CAMBIO
|
43 |
+
arc_diagrams = generate_arc_diagram(doc) # Quitamos lang_code
|
44 |
+
|
45 |
+
# Obtener palabras repetidas y texto resaltado
|
46 |
+
word_colors = get_repeated_words_colors(doc)
|
47 |
+
highlighted_text = highlight_repeated_words(doc, word_colors)
|
48 |
+
|
49 |
+
# Guardar el análisis en la base de datos
|
50 |
+
store_student_morphosyntax_result(
|
51 |
+
st.session_state.username,
|
52 |
+
text,
|
53 |
+
{
|
54 |
+
'arc_diagrams': arc_diagrams,
|
55 |
+
'pos_analysis': analysis['pos_analysis'],
|
56 |
+
'morphological_analysis': analysis['morphological_analysis'],
|
57 |
+
'sentence_structure': analysis['sentence_structure']
|
58 |
+
}
|
59 |
+
)
|
60 |
+
|
61 |
+
return {
|
62 |
+
'analysis': analysis,
|
63 |
+
'visualizations': arc_diagrams,
|
64 |
+
'highlighted_text': highlighted_text,
|
65 |
+
'success': True,
|
66 |
+
'message': t.get('MORPHOSYNTACTIC', {}).get('success_message', 'Analysis completed successfully')
|
67 |
+
}
|
68 |
+
|
69 |
+
except Exception as e:
|
70 |
+
logger.error(f"Error en el análisis morfosintáctico: {str(e)}")
|
71 |
+
return {
|
72 |
+
'analysis': None,
|
73 |
+
'visualizations': None,
|
74 |
+
'highlighted_text': None,
|
75 |
+
'success': False,
|
76 |
+
'message': t.get('MORPHOSYNTACTIC', {}).get('error_message', f'Error in analysis: {str(e)}')
|
77 |
+
}
|
78 |
+
|
79 |
+
|
80 |
+
def format_analysis_results(analysis_result, t):
|
81 |
+
"""
|
82 |
+
Formatea los resultados del análisis para su visualización.
|
83 |
+
|
84 |
+
Args:
|
85 |
+
analysis_result: Resultado del análisis morfosintáctico
|
86 |
+
t: Diccionario de traducciones
|
87 |
+
|
88 |
+
Returns:
|
89 |
+
dict: Resultados formateados para visualización
|
90 |
+
"""
|
91 |
+
morpho_t = t.get('MORPHOSYNTACTIC', {})
|
92 |
+
|
93 |
+
if not analysis_result['success']:
|
94 |
+
return {
|
95 |
+
'formatted_text': analysis_result['message'],
|
96 |
+
'visualizations': None
|
97 |
+
}
|
98 |
+
|
99 |
+
formatted_sections = []
|
100 |
+
|
101 |
+
# Formato para análisis POS
|
102 |
+
if 'pos_analysis' in analysis_result['analysis']:
|
103 |
+
pos_section = [f"### {morpho_t.get('pos_analysis', 'Part of Speech Analysis')}"]
|
104 |
+
for pos_item in analysis_result['analysis']['pos_analysis']:
|
105 |
+
pos_section.append(
|
106 |
+
f"- {morpho_t.get(pos_item['pos'], pos_item['pos'])}: "
|
107 |
+
f"{pos_item['count']} ({pos_item['percentage']}%)\n "
|
108 |
+
f"Ejemplos: {', '.join(pos_item['examples'])}"
|
109 |
+
)
|
110 |
+
formatted_sections.append('\n'.join(pos_section))
|
111 |
+
|
112 |
+
# Agregar otras secciones de formato según sea necesario
|
113 |
+
|
114 |
+
return {
|
115 |
+
'formatted_text': '\n\n'.join(formatted_sections),
|
116 |
+
'visualizations': analysis_result['visualizations'],
|
117 |
+
'highlighted_text': analysis_result['highlighted_text']
|
118 |
+
}
|
119 |
+
|
120 |
+
# Re-exportar las funciones y constantes necesarias
|
121 |
+
__all__ = [
|
122 |
+
'process_morphosyntactic_input',
|
123 |
+
'highlight_repeated_words',
|
124 |
+
'generate_arc_diagram',
|
125 |
+
'get_repeated_words_colors',
|
126 |
+
'get_detailed_pos_analysis',
|
127 |
+
'get_morphological_analysis',
|
128 |
+
'get_sentence_structure_analysis',
|
129 |
+
'perform_advanced_morphosyntactic_analysis',
|
130 |
+
'POS_COLORS',
|
131 |
+
'POS_TRANSLATIONS'
|
132 |
]
|
modules/morphosyntax/morphosyntax_process_BackUp_Dec24_Ok.py
CHANGED
@@ -1,132 +1,132 @@
|
|
1 |
-
#modules/morphosyntax/morphosyntax_process.py
|
2 |
-
import streamlit as st
|
3 |
-
|
4 |
-
from ..text_analysis.morpho_analysis import (
|
5 |
-
get_repeated_words_colors,
|
6 |
-
highlight_repeated_words,
|
7 |
-
generate_arc_diagram,
|
8 |
-
get_detailed_pos_analysis,
|
9 |
-
get_morphological_analysis,
|
10 |
-
get_sentence_structure_analysis,
|
11 |
-
perform_advanced_morphosyntactic_analysis,
|
12 |
-
POS_COLORS,
|
13 |
-
POS_TRANSLATIONS
|
14 |
-
)
|
15 |
-
|
16 |
-
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
17 |
-
|
18 |
-
import logging
|
19 |
-
logger = logging.getLogger(__name__)
|
20 |
-
|
21 |
-
|
22 |
-
def process_morphosyntactic_input(text, lang_code, nlp_models, t):
|
23 |
-
"""
|
24 |
-
Procesa el texto ingresado para realizar el análisis morfosintáctico.
|
25 |
-
|
26 |
-
Args:
|
27 |
-
text: Texto a analizar
|
28 |
-
lang_code: Código del idioma
|
29 |
-
nlp_models: Diccionario de modelos spaCy
|
30 |
-
t: Diccionario de traducciones
|
31 |
-
|
32 |
-
Returns:
|
33 |
-
tuple: (análisis, visualizaciones, texto_resaltado, mensaje)
|
34 |
-
"""
|
35 |
-
try:
|
36 |
-
# Realizar el análisis morfosintáctico
|
37 |
-
doc = nlp_models[lang_code](text)
|
38 |
-
|
39 |
-
# Obtener el análisis avanzado
|
40 |
-
analysis = perform_advanced_morphosyntactic_analysis(text, nlp_models[lang_code])
|
41 |
-
|
42 |
-
# Generar visualizaciones - AQUÍ ESTÁ EL CAMBIO
|
43 |
-
arc_diagrams = generate_arc_diagram(doc) # Quitamos lang_code
|
44 |
-
|
45 |
-
# Obtener palabras repetidas y texto resaltado
|
46 |
-
word_colors = get_repeated_words_colors(doc)
|
47 |
-
highlighted_text = highlight_repeated_words(doc, word_colors)
|
48 |
-
|
49 |
-
# Guardar el análisis en la base de datos
|
50 |
-
store_student_morphosyntax_result(
|
51 |
-
st.session_state.username,
|
52 |
-
text,
|
53 |
-
{
|
54 |
-
'arc_diagrams': arc_diagrams,
|
55 |
-
'pos_analysis': analysis['pos_analysis'],
|
56 |
-
'morphological_analysis': analysis['morphological_analysis'],
|
57 |
-
'sentence_structure': analysis['sentence_structure']
|
58 |
-
}
|
59 |
-
)
|
60 |
-
|
61 |
-
return {
|
62 |
-
'analysis': analysis,
|
63 |
-
'visualizations': arc_diagrams,
|
64 |
-
'highlighted_text': highlighted_text,
|
65 |
-
'success': True,
|
66 |
-
'message': t.get('MORPHOSYNTACTIC', {}).get('success_message', 'Analysis completed successfully')
|
67 |
-
}
|
68 |
-
|
69 |
-
except Exception as e:
|
70 |
-
logger.error(f"Error en el análisis morfosintáctico: {str(e)}")
|
71 |
-
return {
|
72 |
-
'analysis': None,
|
73 |
-
'visualizations': None,
|
74 |
-
'highlighted_text': None,
|
75 |
-
'success': False,
|
76 |
-
'message': t.get('MORPHOSYNTACTIC', {}).get('error_message', f'Error in analysis: {str(e)}')
|
77 |
-
}
|
78 |
-
|
79 |
-
|
80 |
-
def format_analysis_results(analysis_result, t):
|
81 |
-
"""
|
82 |
-
Formatea los resultados del análisis para su visualización.
|
83 |
-
|
84 |
-
Args:
|
85 |
-
analysis_result: Resultado del análisis morfosintáctico
|
86 |
-
t: Diccionario de traducciones
|
87 |
-
|
88 |
-
Returns:
|
89 |
-
dict: Resultados formateados para visualización
|
90 |
-
"""
|
91 |
-
morpho_t = t.get('MORPHOSYNTACTIC', {})
|
92 |
-
|
93 |
-
if not analysis_result['success']:
|
94 |
-
return {
|
95 |
-
'formatted_text': analysis_result['message'],
|
96 |
-
'visualizations': None
|
97 |
-
}
|
98 |
-
|
99 |
-
formatted_sections = []
|
100 |
-
|
101 |
-
# Formato para análisis POS
|
102 |
-
if 'pos_analysis' in analysis_result['analysis']:
|
103 |
-
pos_section = [f"### {morpho_t.get('pos_analysis', 'Part of Speech Analysis')}"]
|
104 |
-
for pos_item in analysis_result['analysis']['pos_analysis']:
|
105 |
-
pos_section.append(
|
106 |
-
f"- {morpho_t.get(pos_item['pos'], pos_item['pos'])}: "
|
107 |
-
f"{pos_item['count']} ({pos_item['percentage']}%)\n "
|
108 |
-
f"Ejemplos: {', '.join(pos_item['examples'])}"
|
109 |
-
)
|
110 |
-
formatted_sections.append('\n'.join(pos_section))
|
111 |
-
|
112 |
-
# Agregar otras secciones de formato según sea necesario
|
113 |
-
|
114 |
-
return {
|
115 |
-
'formatted_text': '\n\n'.join(formatted_sections),
|
116 |
-
'visualizations': analysis_result['visualizations'],
|
117 |
-
'highlighted_text': analysis_result['highlighted_text']
|
118 |
-
}
|
119 |
-
|
120 |
-
# Re-exportar las funciones y constantes necesarias
|
121 |
-
__all__ = [
|
122 |
-
'process_morphosyntactic_input',
|
123 |
-
'highlight_repeated_words',
|
124 |
-
'generate_arc_diagram',
|
125 |
-
'get_repeated_words_colors',
|
126 |
-
'get_detailed_pos_analysis',
|
127 |
-
'get_morphological_analysis',
|
128 |
-
'get_sentence_structure_analysis',
|
129 |
-
'perform_advanced_morphosyntactic_analysis',
|
130 |
-
'POS_COLORS',
|
131 |
-
'POS_TRANSLATIONS'
|
132 |
]
|
|
|
1 |
+
#modules/morphosyntax/morphosyntax_process.py
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from ..text_analysis.morpho_analysis import (
|
5 |
+
get_repeated_words_colors,
|
6 |
+
highlight_repeated_words,
|
7 |
+
generate_arc_diagram,
|
8 |
+
get_detailed_pos_analysis,
|
9 |
+
get_morphological_analysis,
|
10 |
+
get_sentence_structure_analysis,
|
11 |
+
perform_advanced_morphosyntactic_analysis,
|
12 |
+
POS_COLORS,
|
13 |
+
POS_TRANSLATIONS
|
14 |
+
)
|
15 |
+
|
16 |
+
from ..database.morphosintax_mongo_db import store_student_morphosyntax_result
|
17 |
+
|
18 |
+
import logging
|
19 |
+
logger = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
|
22 |
+
def process_morphosyntactic_input(text, lang_code, nlp_models, t):
|
23 |
+
"""
|
24 |
+
Procesa el texto ingresado para realizar el análisis morfosintáctico.
|
25 |
+
|
26 |
+
Args:
|
27 |
+
text: Texto a analizar
|
28 |
+
lang_code: Código del idioma
|
29 |
+
nlp_models: Diccionario de modelos spaCy
|
30 |
+
t: Diccionario de traducciones
|
31 |
+
|
32 |
+
Returns:
|
33 |
+
tuple: (análisis, visualizaciones, texto_resaltado, mensaje)
|
34 |
+
"""
|
35 |
+
try:
|
36 |
+
# Realizar el análisis morfosintáctico
|
37 |
+
doc = nlp_models[lang_code](text)
|
38 |
+
|
39 |
+
# Obtener el análisis avanzado
|
40 |
+
analysis = perform_advanced_morphosyntactic_analysis(text, nlp_models[lang_code])
|
41 |
+
|
42 |
+
# Generar visualizaciones - AQUÍ ESTÁ EL CAMBIO
|
43 |
+
arc_diagrams = generate_arc_diagram(doc) # Quitamos lang_code
|
44 |
+
|
45 |
+
# Obtener palabras repetidas y texto resaltado
|
46 |
+
word_colors = get_repeated_words_colors(doc)
|
47 |
+
highlighted_text = highlight_repeated_words(doc, word_colors)
|
48 |
+
|
49 |
+
# Guardar el análisis en la base de datos
|
50 |
+
store_student_morphosyntax_result(
|
51 |
+
st.session_state.username,
|
52 |
+
text,
|
53 |
+
{
|
54 |
+
'arc_diagrams': arc_diagrams,
|
55 |
+
'pos_analysis': analysis['pos_analysis'],
|
56 |
+
'morphological_analysis': analysis['morphological_analysis'],
|
57 |
+
'sentence_structure': analysis['sentence_structure']
|
58 |
+
}
|
59 |
+
)
|
60 |
+
|
61 |
+
return {
|
62 |
+
'analysis': analysis,
|
63 |
+
'visualizations': arc_diagrams,
|
64 |
+
'highlighted_text': highlighted_text,
|
65 |
+
'success': True,
|
66 |
+
'message': t.get('MORPHOSYNTACTIC', {}).get('success_message', 'Analysis completed successfully')
|
67 |
+
}
|
68 |
+
|
69 |
+
except Exception as e:
|
70 |
+
logger.error(f"Error en el análisis morfosintáctico: {str(e)}")
|
71 |
+
return {
|
72 |
+
'analysis': None,
|
73 |
+
'visualizations': None,
|
74 |
+
'highlighted_text': None,
|
75 |
+
'success': False,
|
76 |
+
'message': t.get('MORPHOSYNTACTIC', {}).get('error_message', f'Error in analysis: {str(e)}')
|
77 |
+
}
|
78 |
+
|
79 |
+
|
80 |
+
def format_analysis_results(analysis_result, t):
|
81 |
+
"""
|
82 |
+
Formatea los resultados del análisis para su visualización.
|
83 |
+
|
84 |
+
Args:
|
85 |
+
analysis_result: Resultado del análisis morfosintáctico
|
86 |
+
t: Diccionario de traducciones
|
87 |
+
|
88 |
+
Returns:
|
89 |
+
dict: Resultados formateados para visualización
|
90 |
+
"""
|
91 |
+
morpho_t = t.get('MORPHOSYNTACTIC', {})
|
92 |
+
|
93 |
+
if not analysis_result['success']:
|
94 |
+
return {
|
95 |
+
'formatted_text': analysis_result['message'],
|
96 |
+
'visualizations': None
|
97 |
+
}
|
98 |
+
|
99 |
+
formatted_sections = []
|
100 |
+
|
101 |
+
# Formato para análisis POS
|
102 |
+
if 'pos_analysis' in analysis_result['analysis']:
|
103 |
+
pos_section = [f"### {morpho_t.get('pos_analysis', 'Part of Speech Analysis')}"]
|
104 |
+
for pos_item in analysis_result['analysis']['pos_analysis']:
|
105 |
+
pos_section.append(
|
106 |
+
f"- {morpho_t.get(pos_item['pos'], pos_item['pos'])}: "
|
107 |
+
f"{pos_item['count']} ({pos_item['percentage']}%)\n "
|
108 |
+
f"Ejemplos: {', '.join(pos_item['examples'])}"
|
109 |
+
)
|
110 |
+
formatted_sections.append('\n'.join(pos_section))
|
111 |
+
|
112 |
+
# Agregar otras secciones de formato según sea necesario
|
113 |
+
|
114 |
+
return {
|
115 |
+
'formatted_text': '\n\n'.join(formatted_sections),
|
116 |
+
'visualizations': analysis_result['visualizations'],
|
117 |
+
'highlighted_text': analysis_result['highlighted_text']
|
118 |
+
}
|
119 |
+
|
120 |
+
# Re-exportar las funciones y constantes necesarias
|
121 |
+
__all__ = [
|
122 |
+
'process_morphosyntactic_input',
|
123 |
+
'highlight_repeated_words',
|
124 |
+
'generate_arc_diagram',
|
125 |
+
'get_repeated_words_colors',
|
126 |
+
'get_detailed_pos_analysis',
|
127 |
+
'get_morphological_analysis',
|
128 |
+
'get_sentence_structure_analysis',
|
129 |
+
'perform_advanced_morphosyntactic_analysis',
|
130 |
+
'POS_COLORS',
|
131 |
+
'POS_TRANSLATIONS'
|
132 |
]
|
modules/semantic/__init_.py
CHANGED
@@ -1,17 +1,17 @@
|
|
1 |
-
# modules/semantic/__init_.py
|
2 |
-
|
3 |
-
from .semantic_interface import (
|
4 |
-
display_semantic_interface,
|
5 |
-
display_semantic_results
|
6 |
-
)
|
7 |
-
from .semantic_process import (
|
8 |
-
process_semantic_input,
|
9 |
-
format_semantic_results
|
10 |
-
)
|
11 |
-
|
12 |
-
__all__ = [
|
13 |
-
'display_semantic_interface',
|
14 |
-
'display_semantic_results',
|
15 |
-
'process_semantic_input',
|
16 |
-
'format_semantic_results'
|
17 |
]
|
|
|
1 |
+
# modules/semantic/__init_.py
|
2 |
+
|
3 |
+
from .semantic_interface import (
|
4 |
+
display_semantic_interface,
|
5 |
+
display_semantic_results
|
6 |
+
)
|
7 |
+
from .semantic_process import (
|
8 |
+
process_semantic_input,
|
9 |
+
format_semantic_results
|
10 |
+
)
|
11 |
+
|
12 |
+
__all__ = [
|
13 |
+
'display_semantic_interface',
|
14 |
+
'display_semantic_results',
|
15 |
+
'process_semantic_input',
|
16 |
+
'format_semantic_results'
|
17 |
]
|
modules/semantic/semantic_interface.py
CHANGED
@@ -1,293 +1,296 @@
|
|
1 |
-
#modules/semantic/semantic_interface.py
|
2 |
-
import streamlit as st
|
3 |
-
from streamlit_float import *
|
4 |
-
from streamlit_antd_components import *
|
5 |
-
from streamlit.components.v1 import html
|
6 |
-
import spacy_streamlit
|
7 |
-
import io
|
8 |
-
from io import BytesIO
|
9 |
-
import base64
|
10 |
-
import matplotlib.pyplot as plt
|
11 |
-
import pandas as pd
|
12 |
-
import re
|
13 |
-
import logging
|
14 |
-
|
15 |
-
# Configuración del logger
|
16 |
-
logger = logging.getLogger(__name__)
|
17 |
-
|
18 |
-
# Importaciones locales
|
19 |
-
from .semantic_process import (
|
20 |
-
process_semantic_input,
|
21 |
-
format_semantic_results
|
22 |
-
)
|
23 |
-
|
24 |
-
from ..utils.widget_utils import generate_unique_key
|
25 |
-
from ..database.semantic_mongo_db import store_student_semantic_result
|
26 |
-
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
27 |
-
|
28 |
-
# from ..database.semantic_export import export_user_interactions
|
29 |
-
|
30 |
-
|
31 |
-
###############################
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
'
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
st.
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
'
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
}
|
185 |
-
.concept-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
|
|
|
|
|
|
293 |
'''
|
|
|
1 |
+
#modules/semantic/semantic_interface.py
|
2 |
+
import streamlit as st
|
3 |
+
from streamlit_float import *
|
4 |
+
from streamlit_antd_components import *
|
5 |
+
from streamlit.components.v1 import html
|
6 |
+
import spacy_streamlit
|
7 |
+
import io
|
8 |
+
from io import BytesIO
|
9 |
+
import base64
|
10 |
+
import matplotlib.pyplot as plt
|
11 |
+
import pandas as pd
|
12 |
+
import re
|
13 |
+
import logging
|
14 |
+
|
15 |
+
# Configuración del logger
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
# Importaciones locales
|
19 |
+
from .semantic_process import (
|
20 |
+
process_semantic_input,
|
21 |
+
format_semantic_results
|
22 |
+
)
|
23 |
+
|
24 |
+
from ..utils.widget_utils import generate_unique_key
|
25 |
+
from ..database.semantic_mongo_db import store_student_semantic_result
|
26 |
+
from ..database.chat_mongo_db import store_chat_history, get_chat_history
|
27 |
+
|
28 |
+
# from ..database.semantic_export import export_user_interactions
|
29 |
+
|
30 |
+
|
31 |
+
###############################
|
32 |
+
|
33 |
+
# En semantic_interface.py
|
34 |
+
def display_semantic_interface(lang_code, nlp_models, semantic_t):
|
35 |
+
try:
|
36 |
+
# 1. Inicializar el estado de la sesión
|
37 |
+
if 'semantic_state' not in st.session_state:
|
38 |
+
st.session_state.semantic_state = {
|
39 |
+
'analysis_count': 0,
|
40 |
+
'last_analysis': None,
|
41 |
+
'current_file': None,
|
42 |
+
'pending_analysis': False # Nuevo flag para controlar el análisis pendiente
|
43 |
+
}
|
44 |
+
|
45 |
+
# 2. Área de carga de archivo con mensaje informativo
|
46 |
+
st.info(semantic_t.get('initial_instruction',
|
47 |
+
'Para comenzar un nuevo análisis semántico, cargue un archivo de texto (.txt)'))
|
48 |
+
|
49 |
+
uploaded_file = st.file_uploader(
|
50 |
+
semantic_t.get('semantic_file_uploader', 'Upload a text file for semantic analysis'),
|
51 |
+
type=['txt'],
|
52 |
+
key=f"semantic_file_uploader_{st.session_state.semantic_state['analysis_count']}"
|
53 |
+
)
|
54 |
+
|
55 |
+
# Verificar si hay un archivo cargado y un análisis pendiente
|
56 |
+
if uploaded_file is not None and st.session_state.semantic_state.get('pending_analysis', False):
|
57 |
+
try:
|
58 |
+
with st.spinner(semantic_t.get('processing', 'Processing...')):
|
59 |
+
# Realizar análisis
|
60 |
+
text_content = uploaded_file.getvalue().decode('utf-8')
|
61 |
+
|
62 |
+
analysis_result = process_semantic_input(
|
63 |
+
text_content,
|
64 |
+
lang_code,
|
65 |
+
nlp_models,
|
66 |
+
semantic_t
|
67 |
+
)
|
68 |
+
|
69 |
+
if analysis_result['success']:
|
70 |
+
# Guardar resultado
|
71 |
+
st.session_state.semantic_result = analysis_result
|
72 |
+
st.session_state.semantic_state['analysis_count'] += 1
|
73 |
+
st.session_state.semantic_state['current_file'] = uploaded_file.name
|
74 |
+
|
75 |
+
# Guardar en base de datos
|
76 |
+
storage_success = store_student_semantic_result(
|
77 |
+
st.session_state.username,
|
78 |
+
text_content,
|
79 |
+
analysis_result['analysis']
|
80 |
+
)
|
81 |
+
|
82 |
+
if storage_success:
|
83 |
+
st.success(
|
84 |
+
semantic_t.get('analysis_complete',
|
85 |
+
'Análisis completado y guardado. Para realizar un nuevo análisis, cargue otro archivo.')
|
86 |
+
)
|
87 |
+
else:
|
88 |
+
st.error(semantic_t.get('error_message', 'Error saving analysis'))
|
89 |
+
else:
|
90 |
+
st.error(analysis_result['message'])
|
91 |
+
|
92 |
+
# Restablecer el flag de análisis pendiente
|
93 |
+
st.session_state.semantic_state['pending_analysis'] = False
|
94 |
+
|
95 |
+
except Exception as e:
|
96 |
+
logger.error(f"Error en análisis semántico: {str(e)}")
|
97 |
+
st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
|
98 |
+
# Restablecer el flag de análisis pendiente en caso de error
|
99 |
+
st.session_state.semantic_state['pending_analysis'] = False
|
100 |
+
|
101 |
+
# 3. Columnas para los botones y mensajes
|
102 |
+
col1, col2 = st.columns([1,4])
|
103 |
+
|
104 |
+
# 4. Botón de análisis
|
105 |
+
with col1:
|
106 |
+
analyze_button = st.button(
|
107 |
+
semantic_t.get('semantic_analyze_button', 'Analyze'),
|
108 |
+
key=f"semantic_analyze_button_{st.session_state.semantic_state['analysis_count']}",
|
109 |
+
type="primary",
|
110 |
+
icon="🔍",
|
111 |
+
disabled=uploaded_file is None,
|
112 |
+
use_container_width=True
|
113 |
+
)
|
114 |
+
|
115 |
+
# 5. Procesar análisis
|
116 |
+
if analyze_button and uploaded_file is not None:
|
117 |
+
# En lugar de realizar el análisis inmediatamente, establecer el flag
|
118 |
+
st.session_state.semantic_state['pending_analysis'] = True
|
119 |
+
# Forzar la recarga de la aplicación
|
120 |
+
st.rerun()
|
121 |
+
|
122 |
+
# 6. Mostrar resultados previos o mensaje inicial
|
123 |
+
elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
|
124 |
+
# Mostrar mensaje sobre el análisis actual
|
125 |
+
st.info(
|
126 |
+
semantic_t.get('current_analysis_message',
|
127 |
+
f'Mostrando análisis del archivo: {st.session_state.semantic_state["current_file"]}. '
|
128 |
+
'Para realizar un nuevo análisis, cargue otro archivo.')
|
129 |
+
)
|
130 |
+
|
131 |
+
display_semantic_results(
|
132 |
+
st.session_state.semantic_result,
|
133 |
+
lang_code,
|
134 |
+
semantic_t
|
135 |
+
)
|
136 |
+
else:
|
137 |
+
st.info(semantic_t.get('upload_prompt', 'Cargue un archivo para comenzar el análisis'))
|
138 |
+
|
139 |
+
except Exception as e:
|
140 |
+
logger.error(f"Error general en interfaz semántica: {str(e)}")
|
141 |
+
st.error(semantic_t.get('general_error', "Se produjo un error. Por favor, intente de nuevo."))
|
142 |
+
|
143 |
+
|
144 |
+
#######################################
|
145 |
+
def display_semantic_results(semantic_result, lang_code, semantic_t):
|
146 |
+
"""
|
147 |
+
Muestra los resultados del análisis semántico de conceptos clave.
|
148 |
+
"""
|
149 |
+
if semantic_result is None or not semantic_result['success']:
|
150 |
+
st.warning(semantic_t.get('no_results', 'No results available'))
|
151 |
+
return
|
152 |
+
|
153 |
+
analysis = semantic_result['analysis']
|
154 |
+
|
155 |
+
# Mostrar conceptos clave en formato horizontal
|
156 |
+
st.subheader(semantic_t.get('key_concepts', 'Key Concepts'))
|
157 |
+
if 'key_concepts' in analysis and analysis['key_concepts']:
|
158 |
+
# Crear tabla de conceptos
|
159 |
+
df = pd.DataFrame(
|
160 |
+
analysis['key_concepts'],
|
161 |
+
columns=[
|
162 |
+
semantic_t.get('concept', 'Concept'),
|
163 |
+
semantic_t.get('frequency', 'Frequency')
|
164 |
+
]
|
165 |
+
)
|
166 |
+
|
167 |
+
# Convertir DataFrame a formato horizontal
|
168 |
+
st.write(
|
169 |
+
"""
|
170 |
+
<style>
|
171 |
+
.concept-table {
|
172 |
+
display: flex;
|
173 |
+
flex-wrap: wrap;
|
174 |
+
gap: 10px;
|
175 |
+
margin-bottom: 20px;
|
176 |
+
}
|
177 |
+
.concept-item {
|
178 |
+
background-color: #f0f2f6;
|
179 |
+
border-radius: 5px;
|
180 |
+
padding: 8px 12px;
|
181 |
+
display: flex;
|
182 |
+
align-items: center;
|
183 |
+
gap: 8px;
|
184 |
+
}
|
185 |
+
.concept-name {
|
186 |
+
font-weight: bold;
|
187 |
+
}
|
188 |
+
.concept-freq {
|
189 |
+
color: #666;
|
190 |
+
font-size: 0.9em;
|
191 |
+
}
|
192 |
+
</style>
|
193 |
+
<div class="concept-table">
|
194 |
+
""" +
|
195 |
+
''.join([
|
196 |
+
f'<div class="concept-item"><span class="concept-name">{concept}</span>'
|
197 |
+
f'<span class="concept-freq">({freq:.2f})</span></div>'
|
198 |
+
for concept, freq in df.values
|
199 |
+
]) +
|
200 |
+
"</div>",
|
201 |
+
unsafe_allow_html=True
|
202 |
+
)
|
203 |
+
else:
|
204 |
+
st.info(semantic_t.get('no_concepts', 'No key concepts found'))
|
205 |
+
|
206 |
+
# Gráfico de conceptos
|
207 |
+
st.subheader(semantic_t.get('concept_graph', 'Concepts Graph'))
|
208 |
+
if 'concept_graph' in analysis and analysis['concept_graph'] is not None:
|
209 |
+
try:
|
210 |
+
# Container para el grafo con estilos mejorados
|
211 |
+
st.markdown(
|
212 |
+
"""
|
213 |
+
<style>
|
214 |
+
.graph-container {
|
215 |
+
background-color: white;
|
216 |
+
border-radius: 10px;
|
217 |
+
padding: 20px;
|
218 |
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
219 |
+
margin: 10px 0;
|
220 |
+
}
|
221 |
+
.button-container {
|
222 |
+
display: flex;
|
223 |
+
gap: 10px;
|
224 |
+
margin: 10px 0;
|
225 |
+
}
|
226 |
+
</style>
|
227 |
+
""",
|
228 |
+
unsafe_allow_html=True
|
229 |
+
)
|
230 |
+
|
231 |
+
with st.container():
|
232 |
+
st.markdown('<div class="graph-container">', unsafe_allow_html=True)
|
233 |
+
|
234 |
+
# Mostrar grafo
|
235 |
+
graph_bytes = analysis['concept_graph']
|
236 |
+
graph_base64 = base64.b64encode(graph_bytes).decode()
|
237 |
+
st.markdown(
|
238 |
+
f'<img src="data:image/png;base64,{graph_base64}" alt="Concept Graph" style="width:100%;"/>',
|
239 |
+
unsafe_allow_html=True
|
240 |
+
)
|
241 |
+
|
242 |
+
# Leyenda del grafo
|
243 |
+
st.caption(semantic_t.get(
|
244 |
+
'graph_description',
|
245 |
+
'Visualización de relaciones entre conceptos clave identificados en el texto.'
|
246 |
+
))
|
247 |
+
|
248 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
249 |
+
|
250 |
+
# Contenedor para botones
|
251 |
+
col1, col2 = st.columns([1,4])
|
252 |
+
with col1:
|
253 |
+
st.download_button(
|
254 |
+
label="📥 " + semantic_t.get('download_graph', "Download"),
|
255 |
+
data=graph_bytes,
|
256 |
+
file_name="semantic_graph.png",
|
257 |
+
mime="image/png",
|
258 |
+
use_container_width=True
|
259 |
+
)
|
260 |
+
|
261 |
+
# Expandible con la interpretación
|
262 |
+
with st.expander("📊 " + semantic_t.get('graph_help', "Graph Interpretation")):
|
263 |
+
st.markdown("""
|
264 |
+
- 🔀 Las flechas indican la dirección de la relación entre conceptos
|
265 |
+
- 🎨 Los colores más intensos indican conceptos más centrales en el texto
|
266 |
+
- ⭕ El tamaño de los nodos representa la frecuencia del concepto
|
267 |
+
- ↔️ El grosor de las líneas indica la fuerza de la conexión
|
268 |
+
""")
|
269 |
+
|
270 |
+
except Exception as e:
|
271 |
+
logger.error(f"Error displaying graph: {str(e)}")
|
272 |
+
st.error(semantic_t.get('graph_error', 'Error displaying the graph'))
|
273 |
+
else:
|
274 |
+
st.info(semantic_t.get('no_graph', 'No concept graph available'))
|
275 |
+
|
276 |
+
|
277 |
+
########################################################################################
|
278 |
+
'''
|
279 |
+
# Botón de exportación al final
|
280 |
+
if 'semantic_analysis_counter' in st.session_state:
|
281 |
+
col1, col2, col3 = st.columns([2,1,2])
|
282 |
+
with col2:
|
283 |
+
if st.button(
|
284 |
+
semantic_t.get('export_button', 'Export Analysis'),
|
285 |
+
key=f"semantic_export_{st.session_state.semantic_analysis_counter}",
|
286 |
+
use_container_width=True
|
287 |
+
):
|
288 |
+
pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
|
289 |
+
st.download_button(
|
290 |
+
label=semantic_t.get('download_pdf', 'Download PDF'),
|
291 |
+
data=pdf_buffer,
|
292 |
+
file_name="semantic_analysis.pdf",
|
293 |
+
mime="application/pdf",
|
294 |
+
key=f"semantic_download_{st.session_state.semantic_analysis_counter}"
|
295 |
+
)
|
296 |
'''
|
modules/semantic/semantic_interface_1.py
CHANGED
@@ -1,55 +1,55 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
from .semantic_process import process_semantic_analysis
|
3 |
-
from ..chatbot.chatbot import initialize_chatbot
|
4 |
-
from ..database.database_oldFromV2 import store_semantic_result
|
5 |
-
from ..text_analysis.semantic_analysis import perform_semantic_analysis
|
6 |
-
from ..utils.widget_utils import generate_unique_key
|
7 |
-
|
8 |
-
def display_semantic_interface(lang_code, nlp_models, t):
|
9 |
-
st.subheader(t['title'])
|
10 |
-
|
11 |
-
# Inicializar el chatbot si no existe
|
12 |
-
if 'semantic_chatbot' not in st.session_state:
|
13 |
-
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
14 |
-
|
15 |
-
# Sección para cargar archivo
|
16 |
-
uploaded_file = st.file_uploader(t['file_uploader'], type=['txt', 'pdf', 'docx', 'doc', 'odt'])
|
17 |
-
if uploaded_file:
|
18 |
-
file_contents = uploaded_file.getvalue().decode('utf-8')
|
19 |
-
st.session_state.file_contents = file_contents
|
20 |
-
|
21 |
-
# Mostrar el historial del chat
|
22 |
-
chat_history = st.session_state.get('semantic_chat_history', [])
|
23 |
-
for message in chat_history:
|
24 |
-
with st.chat_message(message["role"]):
|
25 |
-
st.write(message["content"])
|
26 |
-
if "visualization" in message:
|
27 |
-
st.pyplot(message["visualization"])
|
28 |
-
|
29 |
-
# Input del usuario
|
30 |
-
user_input = st.chat_input(t['semantic_initial_message'], key=generate_unique_key('semantic', st.session_state.username))
|
31 |
-
|
32 |
-
if user_input:
|
33 |
-
# Procesar el input del usuario
|
34 |
-
response, visualization = process_semantic_analysis(user_input, lang_code, nlp_models[lang_code], st.session_state.get('file_contents'), t)
|
35 |
-
|
36 |
-
# Actualizar el historial del chat
|
37 |
-
chat_history.append({"role": "user", "content": user_input})
|
38 |
-
chat_history.append({"role": "assistant", "content": response, "visualization": visualization})
|
39 |
-
st.session_state.semantic_chat_history = chat_history
|
40 |
-
|
41 |
-
# Mostrar el resultado más reciente
|
42 |
-
with st.chat_message("assistant"):
|
43 |
-
st.write(response)
|
44 |
-
if visualization:
|
45 |
-
st.pyplot(visualization)
|
46 |
-
|
47 |
-
# Guardar el resultado en la base de datos si es un análisis
|
48 |
-
if user_input.startswith('/analisis_semantico'):
|
49 |
-
result = perform_semantic_analysis(st.session_state.file_contents, nlp_models[lang_code], lang_code)
|
50 |
-
store_semantic_result(st.session_state.username, st.session_state.file_contents, result)
|
51 |
-
|
52 |
-
# Botón para limpiar el historial del chat
|
53 |
-
if st.button(t['clear_chat'], key=generate_unique_key('semantic', 'clear_chat')):
|
54 |
-
st.session_state.semantic_chat_history = []
|
55 |
st.rerun()
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from .semantic_process import process_semantic_analysis
|
3 |
+
from ..chatbot.chatbot import initialize_chatbot
|
4 |
+
from ..database.database_oldFromV2 import store_semantic_result
|
5 |
+
from ..text_analysis.semantic_analysis import perform_semantic_analysis
|
6 |
+
from ..utils.widget_utils import generate_unique_key
|
7 |
+
|
8 |
+
def display_semantic_interface(lang_code, nlp_models, t):
|
9 |
+
st.subheader(t['title'])
|
10 |
+
|
11 |
+
# Inicializar el chatbot si no existe
|
12 |
+
if 'semantic_chatbot' not in st.session_state:
|
13 |
+
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
14 |
+
|
15 |
+
# Sección para cargar archivo
|
16 |
+
uploaded_file = st.file_uploader(t['file_uploader'], type=['txt', 'pdf', 'docx', 'doc', 'odt'])
|
17 |
+
if uploaded_file:
|
18 |
+
file_contents = uploaded_file.getvalue().decode('utf-8')
|
19 |
+
st.session_state.file_contents = file_contents
|
20 |
+
|
21 |
+
# Mostrar el historial del chat
|
22 |
+
chat_history = st.session_state.get('semantic_chat_history', [])
|
23 |
+
for message in chat_history:
|
24 |
+
with st.chat_message(message["role"]):
|
25 |
+
st.write(message["content"])
|
26 |
+
if "visualization" in message:
|
27 |
+
st.pyplot(message["visualization"])
|
28 |
+
|
29 |
+
# Input del usuario
|
30 |
+
user_input = st.chat_input(t['semantic_initial_message'], key=generate_unique_key('semantic', st.session_state.username))
|
31 |
+
|
32 |
+
if user_input:
|
33 |
+
# Procesar el input del usuario
|
34 |
+
response, visualization = process_semantic_analysis(user_input, lang_code, nlp_models[lang_code], st.session_state.get('file_contents'), t)
|
35 |
+
|
36 |
+
# Actualizar el historial del chat
|
37 |
+
chat_history.append({"role": "user", "content": user_input})
|
38 |
+
chat_history.append({"role": "assistant", "content": response, "visualization": visualization})
|
39 |
+
st.session_state.semantic_chat_history = chat_history
|
40 |
+
|
41 |
+
# Mostrar el resultado más reciente
|
42 |
+
with st.chat_message("assistant"):
|
43 |
+
st.write(response)
|
44 |
+
if visualization:
|
45 |
+
st.pyplot(visualization)
|
46 |
+
|
47 |
+
# Guardar el resultado en la base de datos si es un análisis
|
48 |
+
if user_input.startswith('/analisis_semantico'):
|
49 |
+
result = perform_semantic_analysis(st.session_state.file_contents, nlp_models[lang_code], lang_code)
|
50 |
+
store_semantic_result(st.session_state.username, st.session_state.file_contents, result)
|
51 |
+
|
52 |
+
# Botón para limpiar el historial del chat
|
53 |
+
if st.button(t['clear_chat'], key=generate_unique_key('semantic', 'clear_chat')):
|
54 |
+
st.session_state.semantic_chat_history = []
|
55 |
st.rerun()
|
modules/semantic/semantic_interface_2.py
CHANGED
@@ -1,167 +1,167 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
from .semantic_process import process_semantic_analysis
|
3 |
-
from ..chatbot.chatbot import initialize_chatbot
|
4 |
-
from ..database.database_oldFromV2 import store_file_semantic_contents, retrieve_file_contents, delete_file, get_user_files
|
5 |
-
from ..utils.widget_utils import generate_unique_key
|
6 |
-
|
7 |
-
def get_translation(t, key, default):
|
8 |
-
return t.get(key, default)
|
9 |
-
|
10 |
-
def display_semantic_interface(lang_code, nlp_models, t):
|
11 |
-
#st.set_page_config(layout="wide")
|
12 |
-
|
13 |
-
# Estilo CSS personalizado
|
14 |
-
st.markdown("""
|
15 |
-
<style>
|
16 |
-
.semantic-initial-message {
|
17 |
-
background-color: #f0f2f6;
|
18 |
-
border-left: 5px solid #4CAF50;
|
19 |
-
padding: 10px;
|
20 |
-
border-radius: 5px;
|
21 |
-
font-size: 16px;
|
22 |
-
margin-bottom: 20px;
|
23 |
-
}
|
24 |
-
.stButton > button {
|
25 |
-
width: 100%;
|
26 |
-
}
|
27 |
-
.chat-container {
|
28 |
-
height: 400px;
|
29 |
-
overflow-y: auto;
|
30 |
-
border: 1px solid #ddd;
|
31 |
-
padding: 10px;
|
32 |
-
border-radius: 5px;
|
33 |
-
}
|
34 |
-
.file-management-container {
|
35 |
-
border: 1px solid #ddd;
|
36 |
-
padding: 10px;
|
37 |
-
border-radius: 5px;
|
38 |
-
margin-bottom: 20px;
|
39 |
-
}
|
40 |
-
.horizontal-list {
|
41 |
-
display: flex;
|
42 |
-
flex-wrap: wrap;
|
43 |
-
gap: 10px;
|
44 |
-
}
|
45 |
-
</style>
|
46 |
-
""", unsafe_allow_html=True)
|
47 |
-
|
48 |
-
# Mostrar el mensaje inicial como un párrafo estilizado
|
49 |
-
st.markdown(f"""
|
50 |
-
<div class="semantic-initial-message">
|
51 |
-
{get_translation(t, 'semantic_initial_message', 'Welcome to the semantic analysis interface.')}
|
52 |
-
</div>
|
53 |
-
""", unsafe_allow_html=True)
|
54 |
-
|
55 |
-
# Inicializar el chatbot si no existe
|
56 |
-
if 'semantic_chatbot' not in st.session_state:
|
57 |
-
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
58 |
-
|
59 |
-
# Contenedor para la gestión de archivos
|
60 |
-
with st.container():
|
61 |
-
st.markdown('<div class="file-management-container">', unsafe_allow_html=True)
|
62 |
-
col1, col2, col3, col4 = st.columns(4)
|
63 |
-
|
64 |
-
with col1:
|
65 |
-
if st.button(get_translation(t, 'upload_file', 'Upload File'), key=generate_unique_key('semantic', 'upload_button')):
|
66 |
-
uploaded_file = st.file_uploader(get_translation(t, 'file_uploader', 'Choose a file'), type=['txt', 'pdf', 'docx', 'doc', 'odt'], key=generate_unique_key('semantic', 'file_uploader'))
|
67 |
-
if uploaded_file is not None:
|
68 |
-
file_contents = uploaded_file.getvalue().decode('utf-8')
|
69 |
-
if store_file_semantic_contents(st.session_state.username, uploaded_file.name, file_contents):
|
70 |
-
st.success(get_translation(t, 'file_uploaded_success', 'File uploaded and saved to database successfully'))
|
71 |
-
st.session_state.file_contents = file_contents
|
72 |
-
st.rerun()
|
73 |
-
else:
|
74 |
-
st.error(get_translation(t, 'file_upload_error', 'Error uploading file'))
|
75 |
-
|
76 |
-
with col2:
|
77 |
-
user_files = get_user_files(st.session_state.username, 'semantic')
|
78 |
-
file_options = [get_translation(t, 'select_file', 'Select a file')] + [file['file_name'] for file in user_files]
|
79 |
-
selected_file = st.selectbox(get_translation(t, 'file_list', 'File List'), options=file_options, key=generate_unique_key('semantic', 'file_selector'))
|
80 |
-
if selected_file != get_translation(t, 'select_file', 'Select a file'):
|
81 |
-
if st.button(get_translation(t, 'load_file', 'Load File'), key=generate_unique_key('semantic', 'load_file')):
|
82 |
-
file_contents = retrieve_file_contents(st.session_state.username, selected_file, 'semantic')
|
83 |
-
if file_contents:
|
84 |
-
st.session_state.file_contents = file_contents
|
85 |
-
st.success(get_translation(t, 'file_loaded_success', 'File loaded successfully'))
|
86 |
-
else:
|
87 |
-
st.error(get_translation(t, 'file_load_error', 'Error loading file'))
|
88 |
-
|
89 |
-
with col3:
|
90 |
-
if st.button(get_translation(t, 'analyze_document', 'Analyze Document'), key=generate_unique_key('semantic', 'analyze_document')):
|
91 |
-
if 'file_contents' in st.session_state:
|
92 |
-
with st.spinner(get_translation(t, 'analyzing', 'Analyzing...')):
|
93 |
-
graph, key_concepts = process_semantic_analysis(st.session_state.file_contents, nlp_models[lang_code], lang_code)
|
94 |
-
st.session_state.graph = graph
|
95 |
-
st.session_state.key_concepts = key_concepts
|
96 |
-
st.success(get_translation(t, 'analysis_completed', 'Analysis completed'))
|
97 |
-
else:
|
98 |
-
st.error(get_translation(t, 'no_file_uploaded', 'No file uploaded'))
|
99 |
-
|
100 |
-
with col4:
|
101 |
-
if st.button(get_translation(t, 'delete_file', 'Delete File'), key=generate_unique_key('semantic', 'delete_file')):
|
102 |
-
if selected_file and selected_file != get_translation(t, 'select_file', 'Select a file'):
|
103 |
-
if delete_file(st.session_state.username, selected_file, 'semantic'):
|
104 |
-
st.success(get_translation(t, 'file_deleted_success', 'File deleted successfully'))
|
105 |
-
if 'file_contents' in st.session_state:
|
106 |
-
del st.session_state.file_contents
|
107 |
-
st.rerun()
|
108 |
-
else:
|
109 |
-
st.error(get_translation(t, 'file_delete_error', 'Error deleting file'))
|
110 |
-
else:
|
111 |
-
st.error(get_translation(t, 'no_file_selected', 'No file selected'))
|
112 |
-
|
113 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
114 |
-
|
115 |
-
# Crear dos columnas: una para el chat y otra para la visualización
|
116 |
-
col_chat, col_graph = st.columns([1, 1])
|
117 |
-
|
118 |
-
with col_chat:
|
119 |
-
st.subheader(get_translation(t, 'chat_title', 'Semantic Analysis Chat'))
|
120 |
-
# Chat interface
|
121 |
-
chat_container = st.container()
|
122 |
-
|
123 |
-
with chat_container:
|
124 |
-
# Mostrar el historial del chat
|
125 |
-
chat_history = st.session_state.get('semantic_chat_history', [])
|
126 |
-
for message in chat_history:
|
127 |
-
with st.chat_message(message["role"]):
|
128 |
-
st.write(message["content"])
|
129 |
-
|
130 |
-
# Input del usuario
|
131 |
-
user_input = st.chat_input(get_translation(t, 'semantic_chat_input', 'Type your message here...'), key=generate_unique_key('semantic', 'chat_input'))
|
132 |
-
|
133 |
-
if user_input:
|
134 |
-
# Añadir el mensaje del usuario al historial
|
135 |
-
chat_history.append({"role": "user", "content": user_input})
|
136 |
-
|
137 |
-
# Generar respuesta del chatbot
|
138 |
-
chatbot = st.session_state.semantic_chatbot
|
139 |
-
response = chatbot.generate_response(user_input, lang_code, context=st.session_state.get('file_contents'))
|
140 |
-
|
141 |
-
# Añadir la respuesta del chatbot al historial
|
142 |
-
chat_history.append({"role": "assistant", "content": response})
|
143 |
-
|
144 |
-
# Actualizar el historial en session_state
|
145 |
-
st.session_state.semantic_chat_history = chat_history
|
146 |
-
|
147 |
-
# Forzar la actualización de la interfaz
|
148 |
-
st.rerun()
|
149 |
-
|
150 |
-
with col_graph:
|
151 |
-
st.subheader(get_translation(t, 'graph_title', 'Semantic Graph'))
|
152 |
-
|
153 |
-
# Mostrar conceptos clave en un expander horizontal
|
154 |
-
with st.expander(get_translation(t, 'key_concepts_title', 'Key Concepts'), expanded=True):
|
155 |
-
if 'key_concepts' in st.session_state:
|
156 |
-
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
157 |
-
for concept, freq in st.session_state.key_concepts:
|
158 |
-
st.markdown(f'<span style="margin-right: 10px;">{concept}: {freq:.2f}</span>', unsafe_allow_html=True)
|
159 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
160 |
-
|
161 |
-
if 'graph' in st.session_state:
|
162 |
-
st.pyplot(st.session_state.graph)
|
163 |
-
|
164 |
-
# Botón para limpiar el historial del chat
|
165 |
-
if st.button(get_translation(t, 'clear_chat', 'Clear chat'), key=generate_unique_key('semantic', 'clear_chat')):
|
166 |
-
st.session_state.semantic_chat_history = []
|
167 |
st.rerun()
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from .semantic_process import process_semantic_analysis
|
3 |
+
from ..chatbot.chatbot import initialize_chatbot
|
4 |
+
from ..database.database_oldFromV2 import store_file_semantic_contents, retrieve_file_contents, delete_file, get_user_files
|
5 |
+
from ..utils.widget_utils import generate_unique_key
|
6 |
+
|
7 |
+
def get_translation(t, key, default):
|
8 |
+
return t.get(key, default)
|
9 |
+
|
10 |
+
def display_semantic_interface(lang_code, nlp_models, t):
|
11 |
+
#st.set_page_config(layout="wide")
|
12 |
+
|
13 |
+
# Estilo CSS personalizado
|
14 |
+
st.markdown("""
|
15 |
+
<style>
|
16 |
+
.semantic-initial-message {
|
17 |
+
background-color: #f0f2f6;
|
18 |
+
border-left: 5px solid #4CAF50;
|
19 |
+
padding: 10px;
|
20 |
+
border-radius: 5px;
|
21 |
+
font-size: 16px;
|
22 |
+
margin-bottom: 20px;
|
23 |
+
}
|
24 |
+
.stButton > button {
|
25 |
+
width: 100%;
|
26 |
+
}
|
27 |
+
.chat-container {
|
28 |
+
height: 400px;
|
29 |
+
overflow-y: auto;
|
30 |
+
border: 1px solid #ddd;
|
31 |
+
padding: 10px;
|
32 |
+
border-radius: 5px;
|
33 |
+
}
|
34 |
+
.file-management-container {
|
35 |
+
border: 1px solid #ddd;
|
36 |
+
padding: 10px;
|
37 |
+
border-radius: 5px;
|
38 |
+
margin-bottom: 20px;
|
39 |
+
}
|
40 |
+
.horizontal-list {
|
41 |
+
display: flex;
|
42 |
+
flex-wrap: wrap;
|
43 |
+
gap: 10px;
|
44 |
+
}
|
45 |
+
</style>
|
46 |
+
""", unsafe_allow_html=True)
|
47 |
+
|
48 |
+
# Mostrar el mensaje inicial como un párrafo estilizado
|
49 |
+
st.markdown(f"""
|
50 |
+
<div class="semantic-initial-message">
|
51 |
+
{get_translation(t, 'semantic_initial_message', 'Welcome to the semantic analysis interface.')}
|
52 |
+
</div>
|
53 |
+
""", unsafe_allow_html=True)
|
54 |
+
|
55 |
+
# Inicializar el chatbot si no existe
|
56 |
+
if 'semantic_chatbot' not in st.session_state:
|
57 |
+
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
58 |
+
|
59 |
+
# Contenedor para la gestión de archivos
|
60 |
+
with st.container():
|
61 |
+
st.markdown('<div class="file-management-container">', unsafe_allow_html=True)
|
62 |
+
col1, col2, col3, col4 = st.columns(4)
|
63 |
+
|
64 |
+
with col1:
|
65 |
+
if st.button(get_translation(t, 'upload_file', 'Upload File'), key=generate_unique_key('semantic', 'upload_button')):
|
66 |
+
uploaded_file = st.file_uploader(get_translation(t, 'file_uploader', 'Choose a file'), type=['txt', 'pdf', 'docx', 'doc', 'odt'], key=generate_unique_key('semantic', 'file_uploader'))
|
67 |
+
if uploaded_file is not None:
|
68 |
+
file_contents = uploaded_file.getvalue().decode('utf-8')
|
69 |
+
if store_file_semantic_contents(st.session_state.username, uploaded_file.name, file_contents):
|
70 |
+
st.success(get_translation(t, 'file_uploaded_success', 'File uploaded and saved to database successfully'))
|
71 |
+
st.session_state.file_contents = file_contents
|
72 |
+
st.rerun()
|
73 |
+
else:
|
74 |
+
st.error(get_translation(t, 'file_upload_error', 'Error uploading file'))
|
75 |
+
|
76 |
+
with col2:
|
77 |
+
user_files = get_user_files(st.session_state.username, 'semantic')
|
78 |
+
file_options = [get_translation(t, 'select_file', 'Select a file')] + [file['file_name'] for file in user_files]
|
79 |
+
selected_file = st.selectbox(get_translation(t, 'file_list', 'File List'), options=file_options, key=generate_unique_key('semantic', 'file_selector'))
|
80 |
+
if selected_file != get_translation(t, 'select_file', 'Select a file'):
|
81 |
+
if st.button(get_translation(t, 'load_file', 'Load File'), key=generate_unique_key('semantic', 'load_file')):
|
82 |
+
file_contents = retrieve_file_contents(st.session_state.username, selected_file, 'semantic')
|
83 |
+
if file_contents:
|
84 |
+
st.session_state.file_contents = file_contents
|
85 |
+
st.success(get_translation(t, 'file_loaded_success', 'File loaded successfully'))
|
86 |
+
else:
|
87 |
+
st.error(get_translation(t, 'file_load_error', 'Error loading file'))
|
88 |
+
|
89 |
+
with col3:
|
90 |
+
if st.button(get_translation(t, 'analyze_document', 'Analyze Document'), key=generate_unique_key('semantic', 'analyze_document')):
|
91 |
+
if 'file_contents' in st.session_state:
|
92 |
+
with st.spinner(get_translation(t, 'analyzing', 'Analyzing...')):
|
93 |
+
graph, key_concepts = process_semantic_analysis(st.session_state.file_contents, nlp_models[lang_code], lang_code)
|
94 |
+
st.session_state.graph = graph
|
95 |
+
st.session_state.key_concepts = key_concepts
|
96 |
+
st.success(get_translation(t, 'analysis_completed', 'Analysis completed'))
|
97 |
+
else:
|
98 |
+
st.error(get_translation(t, 'no_file_uploaded', 'No file uploaded'))
|
99 |
+
|
100 |
+
with col4:
|
101 |
+
if st.button(get_translation(t, 'delete_file', 'Delete File'), key=generate_unique_key('semantic', 'delete_file')):
|
102 |
+
if selected_file and selected_file != get_translation(t, 'select_file', 'Select a file'):
|
103 |
+
if delete_file(st.session_state.username, selected_file, 'semantic'):
|
104 |
+
st.success(get_translation(t, 'file_deleted_success', 'File deleted successfully'))
|
105 |
+
if 'file_contents' in st.session_state:
|
106 |
+
del st.session_state.file_contents
|
107 |
+
st.rerun()
|
108 |
+
else:
|
109 |
+
st.error(get_translation(t, 'file_delete_error', 'Error deleting file'))
|
110 |
+
else:
|
111 |
+
st.error(get_translation(t, 'no_file_selected', 'No file selected'))
|
112 |
+
|
113 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
114 |
+
|
115 |
+
# Crear dos columnas: una para el chat y otra para la visualización
|
116 |
+
col_chat, col_graph = st.columns([1, 1])
|
117 |
+
|
118 |
+
with col_chat:
|
119 |
+
st.subheader(get_translation(t, 'chat_title', 'Semantic Analysis Chat'))
|
120 |
+
# Chat interface
|
121 |
+
chat_container = st.container()
|
122 |
+
|
123 |
+
with chat_container:
|
124 |
+
# Mostrar el historial del chat
|
125 |
+
chat_history = st.session_state.get('semantic_chat_history', [])
|
126 |
+
for message in chat_history:
|
127 |
+
with st.chat_message(message["role"]):
|
128 |
+
st.write(message["content"])
|
129 |
+
|
130 |
+
# Input del usuario
|
131 |
+
user_input = st.chat_input(get_translation(t, 'semantic_chat_input', 'Type your message here...'), key=generate_unique_key('semantic', 'chat_input'))
|
132 |
+
|
133 |
+
if user_input:
|
134 |
+
# Añadir el mensaje del usuario al historial
|
135 |
+
chat_history.append({"role": "user", "content": user_input})
|
136 |
+
|
137 |
+
# Generar respuesta del chatbot
|
138 |
+
chatbot = st.session_state.semantic_chatbot
|
139 |
+
response = chatbot.generate_response(user_input, lang_code, context=st.session_state.get('file_contents'))
|
140 |
+
|
141 |
+
# Añadir la respuesta del chatbot al historial
|
142 |
+
chat_history.append({"role": "assistant", "content": response})
|
143 |
+
|
144 |
+
# Actualizar el historial en session_state
|
145 |
+
st.session_state.semantic_chat_history = chat_history
|
146 |
+
|
147 |
+
# Forzar la actualización de la interfaz
|
148 |
+
st.rerun()
|
149 |
+
|
150 |
+
with col_graph:
|
151 |
+
st.subheader(get_translation(t, 'graph_title', 'Semantic Graph'))
|
152 |
+
|
153 |
+
# Mostrar conceptos clave en un expander horizontal
|
154 |
+
with st.expander(get_translation(t, 'key_concepts_title', 'Key Concepts'), expanded=True):
|
155 |
+
if 'key_concepts' in st.session_state:
|
156 |
+
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
157 |
+
for concept, freq in st.session_state.key_concepts:
|
158 |
+
st.markdown(f'<span style="margin-right: 10px;">{concept}: {freq:.2f}</span>', unsafe_allow_html=True)
|
159 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
160 |
+
|
161 |
+
if 'graph' in st.session_state:
|
162 |
+
st.pyplot(st.session_state.graph)
|
163 |
+
|
164 |
+
# Botón para limpiar el historial del chat
|
165 |
+
if st.button(get_translation(t, 'clear_chat', 'Clear chat'), key=generate_unique_key('semantic', 'clear_chat')):
|
166 |
+
st.session_state.semantic_chat_history = []
|
167 |
st.rerun()
|
modules/semantic/semantic_interface_3.py
CHANGED
@@ -1,182 +1,182 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import logging
|
3 |
-
from .semantic_process import process_semantic_analysis
|
4 |
-
from ..chatbot.chatbot import initialize_chatbot, process_semantic_chat_input
|
5 |
-
from ..database.database_oldFromV2 import store_file_semantic_contents, retrieve_file_contents, delete_file, get_user_files
|
6 |
-
from ..utils.widget_utils import generate_unique_key
|
7 |
-
|
8 |
-
logger = logging.getLogger(__name__)
|
9 |
-
|
10 |
-
def get_translation(t, key, default):
|
11 |
-
return t.get(key, default)
|
12 |
-
|
13 |
-
def display_semantic_interface(lang_code, nlp_models, t):
|
14 |
-
st.markdown("""
|
15 |
-
<style>
|
16 |
-
.semantic-initial-message {
|
17 |
-
background-color: #f0f2f6;
|
18 |
-
border-left: 5px solid #4CAF50;
|
19 |
-
padding: 10px;
|
20 |
-
border-radius: 5px;
|
21 |
-
font-size: 16px;
|
22 |
-
margin-bottom: 20px;
|
23 |
-
}
|
24 |
-
.stButton > button {
|
25 |
-
width: 100%;
|
26 |
-
height: 3em;
|
27 |
-
}
|
28 |
-
.chat-container {
|
29 |
-
height: 400px;
|
30 |
-
overflow-y: auto;
|
31 |
-
border: 1px solid #ddd;
|
32 |
-
padding: 10px;
|
33 |
-
border-radius: 5px;
|
34 |
-
}
|
35 |
-
.file-management-container, .analysis-container {
|
36 |
-
border: 1px solid #ddd;
|
37 |
-
padding: 10px;
|
38 |
-
border-radius: 5px;
|
39 |
-
margin-bottom: 20px;
|
40 |
-
}
|
41 |
-
.horizontal-list {
|
42 |
-
display: flex;
|
43 |
-
flex-wrap: wrap;
|
44 |
-
gap: 10px;
|
45 |
-
}
|
46 |
-
.graph-container {
|
47 |
-
height: 500px;
|
48 |
-
overflow-y: auto;
|
49 |
-
}
|
50 |
-
</style>
|
51 |
-
""", unsafe_allow_html=True)
|
52 |
-
|
53 |
-
st.markdown(f"""
|
54 |
-
<div class="semantic-initial-message">
|
55 |
-
{get_translation(t, 'semantic_initial_message', 'Welcome to the semantic analysis interface.')}
|
56 |
-
</div>
|
57 |
-
""", unsafe_allow_html=True)
|
58 |
-
|
59 |
-
if 'semantic_chatbot' not in st.session_state:
|
60 |
-
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
61 |
-
|
62 |
-
# Contenedor para la gestión de archivos
|
63 |
-
with st.container():
|
64 |
-
st.markdown('<div class="file-management-container">', unsafe_allow_html=True)
|
65 |
-
col1, col2, col3, col4 = st.columns(4)
|
66 |
-
|
67 |
-
with col1:
|
68 |
-
uploaded_file = st.file_uploader(get_translation(t, 'upload_file', 'Upload File'), type=['txt', 'pdf', 'docx', 'doc', 'odt'], key=generate_unique_key('semantic', 'file_uploader'))
|
69 |
-
if uploaded_file is not None:
|
70 |
-
file_contents = uploaded_file.getvalue().decode('utf-8')
|
71 |
-
if store_file_semantic_contents(st.session_state.username, uploaded_file.name, file_contents):
|
72 |
-
st.session_state.file_contents = file_contents
|
73 |
-
st.success(get_translation(t, 'file_uploaded_success', 'File uploaded and saved successfully'))
|
74 |
-
st.rerun()
|
75 |
-
else:
|
76 |
-
st.error(get_translation(t, 'file_upload_error', 'Error uploading file'))
|
77 |
-
|
78 |
-
with col2:
|
79 |
-
user_files = get_user_files(st.session_state.username, 'semantic')
|
80 |
-
file_options = [get_translation(t, 'select_saved_file', 'Select a saved file')] + [file['file_name'] for file in user_files]
|
81 |
-
selected_file = st.selectbox("", options=file_options, key=generate_unique_key('semantic', 'file_selector'))
|
82 |
-
if selected_file and selected_file != get_translation(t, 'select_saved_file', 'Select a saved file'):
|
83 |
-
file_contents = retrieve_file_contents(st.session_state.username, selected_file, 'semantic')
|
84 |
-
if file_contents:
|
85 |
-
st.session_state.file_contents = file_contents
|
86 |
-
st.success(get_translation(t, 'file_loaded_success', 'File loaded successfully'))
|
87 |
-
else:
|
88 |
-
st.error(get_translation(t, 'file_load_error', 'Error loading file'))
|
89 |
-
|
90 |
-
with col3:
|
91 |
-
if st.button(get_translation(t, 'analyze_document', 'Analyze Document'), key=generate_unique_key('semantic', 'analyze_document')):
|
92 |
-
if 'file_contents' in st.session_state:
|
93 |
-
with st.spinner(get_translation(t, 'analyzing', 'Analyzing...')):
|
94 |
-
try:
|
95 |
-
nlp_model = nlp_models[lang_code]
|
96 |
-
concept_graph, entity_graph, key_concepts = process_semantic_analysis(st.session_state.file_contents, nlp_model, lang_code)
|
97 |
-
st.session_state.concept_graph = concept_graph
|
98 |
-
st.session_state.entity_graph = entity_graph
|
99 |
-
st.session_state.key_concepts = key_concepts
|
100 |
-
st.success(get_translation(t, 'analysis_completed', 'Analysis completed'))
|
101 |
-
except Exception as e:
|
102 |
-
logger.error(f"Error during analysis: {str(e)}")
|
103 |
-
st.error(f"Error during analysis: {str(e)}")
|
104 |
-
else:
|
105 |
-
st.error(get_translation(t, 'no_file_uploaded', 'No file uploaded'))
|
106 |
-
|
107 |
-
with col4:
|
108 |
-
if st.button(get_translation(t, 'delete_file', 'Delete File'), key=generate_unique_key('semantic', 'delete_file')):
|
109 |
-
if selected_file and selected_file != get_translation(t, 'select_saved_file', 'Select a saved file'):
|
110 |
-
if delete_file(st.session_state.username, selected_file, 'semantic'):
|
111 |
-
st.success(get_translation(t, 'file_deleted_success', 'File deleted successfully'))
|
112 |
-
if 'file_contents' in st.session_state:
|
113 |
-
del st.session_state.file_contents
|
114 |
-
st.rerun()
|
115 |
-
else:
|
116 |
-
st.error(get_translation(t, 'file_delete_error', 'Error deleting file'))
|
117 |
-
else:
|
118 |
-
st.error(get_translation(t, 'no_file_selected', 'No file selected'))
|
119 |
-
|
120 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
121 |
-
|
122 |
-
# Contenedor para la sección de análisis
|
123 |
-
st.markdown('<div class="analysis-container">', unsafe_allow_html=True)
|
124 |
-
col_chat, col_graph = st.columns([1, 1])
|
125 |
-
|
126 |
-
with col_chat:
|
127 |
-
st.subheader(get_translation(t, 'chat_title', 'Semantic Analysis Chat'))
|
128 |
-
chat_container = st.container()
|
129 |
-
|
130 |
-
with chat_container:
|
131 |
-
chat_history = st.session_state.get('semantic_chat_history', [])
|
132 |
-
for message in chat_history:
|
133 |
-
with st.chat_message(message["role"]):
|
134 |
-
st.write(message["content"])
|
135 |
-
|
136 |
-
user_input = st.chat_input(get_translation(t, 'semantic_chat_input', 'Type your message here...'), key=generate_unique_key('semantic', 'chat_input'))
|
137 |
-
|
138 |
-
if user_input:
|
139 |
-
chat_history.append({"role": "user", "content": user_input})
|
140 |
-
|
141 |
-
if user_input.startswith('/analyze_current'):
|
142 |
-
response = process_semantic_chat_input(user_input, lang_code, nlp_models[lang_code], st.session_state.get('file_contents', ''))
|
143 |
-
else:
|
144 |
-
response = st.session_state.semantic_chatbot.generate_response(user_input, lang_code)
|
145 |
-
|
146 |
-
chat_history.append({"role": "assistant", "content": response})
|
147 |
-
st.session_state.semantic_chat_history = chat_history
|
148 |
-
|
149 |
-
with col_graph:
|
150 |
-
st.subheader(get_translation(t, 'graph_title', 'Semantic Graphs'))
|
151 |
-
|
152 |
-
# Mostrar conceptos clave y entidades horizontalmente
|
153 |
-
if 'key_concepts' in st.session_state:
|
154 |
-
st.write(get_translation(t, 'key_concepts_title', 'Key Concepts'))
|
155 |
-
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
156 |
-
for concept, freq in st.session_state.key_concepts:
|
157 |
-
st.markdown(f'<span style="margin-right: 10px;">{concept}: {freq:.2f}</span>', unsafe_allow_html=True)
|
158 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
159 |
-
|
160 |
-
if 'entities' in st.session_state:
|
161 |
-
st.write(get_translation(t, 'entities_title', 'Entities'))
|
162 |
-
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
163 |
-
for entity, type in st.session_state.entities.items():
|
164 |
-
st.markdown(f'<span style="margin-right: 10px;">{entity}: {type}</span>', unsafe_allow_html=True)
|
165 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
166 |
-
|
167 |
-
# Usar pestañas para mostrar los gráficos
|
168 |
-
tab1, tab2 = st.tabs(["Concept Graph", "Entity Graph"])
|
169 |
-
|
170 |
-
with tab1:
|
171 |
-
if 'concept_graph' in st.session_state:
|
172 |
-
st.pyplot(st.session_state.concept_graph)
|
173 |
-
|
174 |
-
with tab2:
|
175 |
-
if 'entity_graph' in st.session_state:
|
176 |
-
st.pyplot(st.session_state.entity_graph)
|
177 |
-
|
178 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
179 |
-
|
180 |
-
if st.button(get_translation(t, 'clear_chat', 'Clear chat'), key=generate_unique_key('semantic', 'clear_chat')):
|
181 |
-
st.session_state.semantic_chat_history = []
|
182 |
st.rerun()
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import logging
|
3 |
+
from .semantic_process import process_semantic_analysis
|
4 |
+
from ..chatbot.chatbot import initialize_chatbot, process_semantic_chat_input
|
5 |
+
from ..database.database_oldFromV2 import store_file_semantic_contents, retrieve_file_contents, delete_file, get_user_files
|
6 |
+
from ..utils.widget_utils import generate_unique_key
|
7 |
+
|
8 |
+
logger = logging.getLogger(__name__)
|
9 |
+
|
10 |
+
def get_translation(t, key, default):
|
11 |
+
return t.get(key, default)
|
12 |
+
|
13 |
+
def display_semantic_interface(lang_code, nlp_models, t):
|
14 |
+
st.markdown("""
|
15 |
+
<style>
|
16 |
+
.semantic-initial-message {
|
17 |
+
background-color: #f0f2f6;
|
18 |
+
border-left: 5px solid #4CAF50;
|
19 |
+
padding: 10px;
|
20 |
+
border-radius: 5px;
|
21 |
+
font-size: 16px;
|
22 |
+
margin-bottom: 20px;
|
23 |
+
}
|
24 |
+
.stButton > button {
|
25 |
+
width: 100%;
|
26 |
+
height: 3em;
|
27 |
+
}
|
28 |
+
.chat-container {
|
29 |
+
height: 400px;
|
30 |
+
overflow-y: auto;
|
31 |
+
border: 1px solid #ddd;
|
32 |
+
padding: 10px;
|
33 |
+
border-radius: 5px;
|
34 |
+
}
|
35 |
+
.file-management-container, .analysis-container {
|
36 |
+
border: 1px solid #ddd;
|
37 |
+
padding: 10px;
|
38 |
+
border-radius: 5px;
|
39 |
+
margin-bottom: 20px;
|
40 |
+
}
|
41 |
+
.horizontal-list {
|
42 |
+
display: flex;
|
43 |
+
flex-wrap: wrap;
|
44 |
+
gap: 10px;
|
45 |
+
}
|
46 |
+
.graph-container {
|
47 |
+
height: 500px;
|
48 |
+
overflow-y: auto;
|
49 |
+
}
|
50 |
+
</style>
|
51 |
+
""", unsafe_allow_html=True)
|
52 |
+
|
53 |
+
st.markdown(f"""
|
54 |
+
<div class="semantic-initial-message">
|
55 |
+
{get_translation(t, 'semantic_initial_message', 'Welcome to the semantic analysis interface.')}
|
56 |
+
</div>
|
57 |
+
""", unsafe_allow_html=True)
|
58 |
+
|
59 |
+
if 'semantic_chatbot' not in st.session_state:
|
60 |
+
st.session_state.semantic_chatbot = initialize_chatbot('semantic')
|
61 |
+
|
62 |
+
# Contenedor para la gestión de archivos
|
63 |
+
with st.container():
|
64 |
+
st.markdown('<div class="file-management-container">', unsafe_allow_html=True)
|
65 |
+
col1, col2, col3, col4 = st.columns(4)
|
66 |
+
|
67 |
+
with col1:
|
68 |
+
uploaded_file = st.file_uploader(get_translation(t, 'upload_file', 'Upload File'), type=['txt', 'pdf', 'docx', 'doc', 'odt'], key=generate_unique_key('semantic', 'file_uploader'))
|
69 |
+
if uploaded_file is not None:
|
70 |
+
file_contents = uploaded_file.getvalue().decode('utf-8')
|
71 |
+
if store_file_semantic_contents(st.session_state.username, uploaded_file.name, file_contents):
|
72 |
+
st.session_state.file_contents = file_contents
|
73 |
+
st.success(get_translation(t, 'file_uploaded_success', 'File uploaded and saved successfully'))
|
74 |
+
st.rerun()
|
75 |
+
else:
|
76 |
+
st.error(get_translation(t, 'file_upload_error', 'Error uploading file'))
|
77 |
+
|
78 |
+
with col2:
|
79 |
+
user_files = get_user_files(st.session_state.username, 'semantic')
|
80 |
+
file_options = [get_translation(t, 'select_saved_file', 'Select a saved file')] + [file['file_name'] for file in user_files]
|
81 |
+
selected_file = st.selectbox("", options=file_options, key=generate_unique_key('semantic', 'file_selector'))
|
82 |
+
if selected_file and selected_file != get_translation(t, 'select_saved_file', 'Select a saved file'):
|
83 |
+
file_contents = retrieve_file_contents(st.session_state.username, selected_file, 'semantic')
|
84 |
+
if file_contents:
|
85 |
+
st.session_state.file_contents = file_contents
|
86 |
+
st.success(get_translation(t, 'file_loaded_success', 'File loaded successfully'))
|
87 |
+
else:
|
88 |
+
st.error(get_translation(t, 'file_load_error', 'Error loading file'))
|
89 |
+
|
90 |
+
with col3:
|
91 |
+
if st.button(get_translation(t, 'analyze_document', 'Analyze Document'), key=generate_unique_key('semantic', 'analyze_document')):
|
92 |
+
if 'file_contents' in st.session_state:
|
93 |
+
with st.spinner(get_translation(t, 'analyzing', 'Analyzing...')):
|
94 |
+
try:
|
95 |
+
nlp_model = nlp_models[lang_code]
|
96 |
+
concept_graph, entity_graph, key_concepts = process_semantic_analysis(st.session_state.file_contents, nlp_model, lang_code)
|
97 |
+
st.session_state.concept_graph = concept_graph
|
98 |
+
st.session_state.entity_graph = entity_graph
|
99 |
+
st.session_state.key_concepts = key_concepts
|
100 |
+
st.success(get_translation(t, 'analysis_completed', 'Analysis completed'))
|
101 |
+
except Exception as e:
|
102 |
+
logger.error(f"Error during analysis: {str(e)}")
|
103 |
+
st.error(f"Error during analysis: {str(e)}")
|
104 |
+
else:
|
105 |
+
st.error(get_translation(t, 'no_file_uploaded', 'No file uploaded'))
|
106 |
+
|
107 |
+
with col4:
|
108 |
+
if st.button(get_translation(t, 'delete_file', 'Delete File'), key=generate_unique_key('semantic', 'delete_file')):
|
109 |
+
if selected_file and selected_file != get_translation(t, 'select_saved_file', 'Select a saved file'):
|
110 |
+
if delete_file(st.session_state.username, selected_file, 'semantic'):
|
111 |
+
st.success(get_translation(t, 'file_deleted_success', 'File deleted successfully'))
|
112 |
+
if 'file_contents' in st.session_state:
|
113 |
+
del st.session_state.file_contents
|
114 |
+
st.rerun()
|
115 |
+
else:
|
116 |
+
st.error(get_translation(t, 'file_delete_error', 'Error deleting file'))
|
117 |
+
else:
|
118 |
+
st.error(get_translation(t, 'no_file_selected', 'No file selected'))
|
119 |
+
|
120 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
121 |
+
|
122 |
+
# Contenedor para la sección de análisis
|
123 |
+
st.markdown('<div class="analysis-container">', unsafe_allow_html=True)
|
124 |
+
col_chat, col_graph = st.columns([1, 1])
|
125 |
+
|
126 |
+
with col_chat:
|
127 |
+
st.subheader(get_translation(t, 'chat_title', 'Semantic Analysis Chat'))
|
128 |
+
chat_container = st.container()
|
129 |
+
|
130 |
+
with chat_container:
|
131 |
+
chat_history = st.session_state.get('semantic_chat_history', [])
|
132 |
+
for message in chat_history:
|
133 |
+
with st.chat_message(message["role"]):
|
134 |
+
st.write(message["content"])
|
135 |
+
|
136 |
+
user_input = st.chat_input(get_translation(t, 'semantic_chat_input', 'Type your message here...'), key=generate_unique_key('semantic', 'chat_input'))
|
137 |
+
|
138 |
+
if user_input:
|
139 |
+
chat_history.append({"role": "user", "content": user_input})
|
140 |
+
|
141 |
+
if user_input.startswith('/analyze_current'):
|
142 |
+
response = process_semantic_chat_input(user_input, lang_code, nlp_models[lang_code], st.session_state.get('file_contents', ''))
|
143 |
+
else:
|
144 |
+
response = st.session_state.semantic_chatbot.generate_response(user_input, lang_code)
|
145 |
+
|
146 |
+
chat_history.append({"role": "assistant", "content": response})
|
147 |
+
st.session_state.semantic_chat_history = chat_history
|
148 |
+
|
149 |
+
with col_graph:
|
150 |
+
st.subheader(get_translation(t, 'graph_title', 'Semantic Graphs'))
|
151 |
+
|
152 |
+
# Mostrar conceptos clave y entidades horizontalmente
|
153 |
+
if 'key_concepts' in st.session_state:
|
154 |
+
st.write(get_translation(t, 'key_concepts_title', 'Key Concepts'))
|
155 |
+
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
156 |
+
for concept, freq in st.session_state.key_concepts:
|
157 |
+
st.markdown(f'<span style="margin-right: 10px;">{concept}: {freq:.2f}</span>', unsafe_allow_html=True)
|
158 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
159 |
+
|
160 |
+
if 'entities' in st.session_state:
|
161 |
+
st.write(get_translation(t, 'entities_title', 'Entities'))
|
162 |
+
st.markdown('<div class="horizontal-list">', unsafe_allow_html=True)
|
163 |
+
for entity, type in st.session_state.entities.items():
|
164 |
+
st.markdown(f'<span style="margin-right: 10px;">{entity}: {type}</span>', unsafe_allow_html=True)
|
165 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
166 |
+
|
167 |
+
# Usar pestañas para mostrar los gráficos
|
168 |
+
tab1, tab2 = st.tabs(["Concept Graph", "Entity Graph"])
|
169 |
+
|
170 |
+
with tab1:
|
171 |
+
if 'concept_graph' in st.session_state:
|
172 |
+
st.pyplot(st.session_state.concept_graph)
|
173 |
+
|
174 |
+
with tab2:
|
175 |
+
if 'entity_graph' in st.session_state:
|
176 |
+
st.pyplot(st.session_state.entity_graph)
|
177 |
+
|
178 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
179 |
+
|
180 |
+
if st.button(get_translation(t, 'clear_chat', 'Clear chat'), key=generate_unique_key('semantic', 'clear_chat')):
|
181 |
+
st.session_state.semantic_chat_history = []
|
182 |
st.rerun()
|