Spaces:
Running
Running
Upload app.py
Browse files
app.py
CHANGED
@@ -289,23 +289,31 @@ with chat_container:
|
|
289 |
|
290 |
st.markdown("---")
|
291 |
|
292 |
-
#
|
293 |
-
|
294 |
-
with st.chat_message(
|
295 |
-
name=message['role'],
|
296 |
-
avatar=AI_AVATAR_ICON if message['role'] == MODEL_ROLE else USER_AVATAR_ICON
|
297 |
-
):
|
298 |
-
st.markdown(message['content'])
|
299 |
|
300 |
-
#
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
|
|
|
|
|
|
|
|
308 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
309 |
# Actualizar título y procesar respuesta
|
310 |
try:
|
311 |
title_response = st.session_state.model.generate_content(
|
@@ -324,8 +332,7 @@ with chat_container:
|
|
324 |
process_model_response(prompt)
|
325 |
update_chat_memory()
|
326 |
|
327 |
-
#
|
328 |
-
new_prompt = st.chat_input('¿En qué puedo ayudarte hoy?')
|
329 |
if new_prompt:
|
330 |
with st.chat_message("user", avatar=USER_AVATAR_ICON):
|
331 |
st.markdown(new_prompt)
|
|
|
289 |
|
290 |
st.markdown("---")
|
291 |
|
292 |
+
# Crear un contenedor para los mensajes que empuja el input hacia abajo
|
293 |
+
messages_container = st.container()
|
|
|
|
|
|
|
|
|
|
|
294 |
|
295 |
+
# Crear el input primero (aparecerá al final)
|
296 |
+
input_placeholder = st.empty()
|
297 |
+
new_prompt = input_placeholder.chat_input('¿En qué puedo ayudarte hoy?')
|
298 |
+
|
299 |
+
# Mostrar mensajes en el contenedor de mensajes
|
300 |
+
with messages_container:
|
301 |
+
for message in st.session_state.messages:
|
302 |
+
with st.chat_message(
|
303 |
+
name=message['role'],
|
304 |
+
avatar=AI_AVATAR_ICON if message['role'] == MODEL_ROLE else USER_AVATAR_ICON
|
305 |
+
):
|
306 |
+
st.markdown(message['content'])
|
307 |
|
308 |
+
# Procesar entrada del usuario si existe
|
309 |
+
if 'user_input' in st.session_state:
|
310 |
+
prompt = st.session_state.user_input
|
311 |
+
del st.session_state.user_input
|
312 |
+
|
313 |
+
with st.chat_message("user", avatar=USER_AVATAR_ICON):
|
314 |
+
st.markdown(prompt)
|
315 |
+
add_message("user", prompt, USER_AVATAR_ICON)
|
316 |
+
|
317 |
# Actualizar título y procesar respuesta
|
318 |
try:
|
319 |
title_response = st.session_state.model.generate_content(
|
|
|
332 |
process_model_response(prompt)
|
333 |
update_chat_memory()
|
334 |
|
335 |
+
# Procesar nueva entrada si existe
|
|
|
336 |
if new_prompt:
|
337 |
with st.chat_message("user", avatar=USER_AVATAR_ICON):
|
338 |
st.markdown(new_prompt)
|