zelk12 commited on
Commit
8d04965
·
verified ·
1 Parent(s): 2157ca1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -45
app.py CHANGED
@@ -177,13 +177,6 @@ def user_message(msg: str, history: list) -> tuple[str, list]:
177
  history.append(ChatMessage(role="user", content=msg))
178
  return "", history
179
 
180
- def process_message(msg):
181
- """Обрабатывает сообщение пользователя: сохраняет, отображает и генерирует ответ."""
182
- msg_store_val, _, _ = lambda msg: (msg, msg, "")(msg) # Store message and clear input (inline lambda)
183
- input_box_val, chatbot_val = user_message(msg_store_val, chatbot) # Add user message to chat
184
- chatbot_val_final = stream_gemini_response(msg_store_val, chatbot_val) # Generate and stream response
185
- return msg_store_val, input_box_val, chatbot_val_final
186
-
187
  # Create the Gradio interface
188
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", neutral_hue="neutral")) as demo: # Using Soft theme with adjusted hues for a refined look
189
  gr.Markdown("# Chat with " + used_model)
@@ -238,53 +231,38 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", n
238
 
239
  # Set up event handlers
240
  msg_store = gr.State("") # Store for preserving user message
241
-
242
- # Created by gemini-2.5-pro-exp-03-25
243
  input_box.submit(
244
- process_message,
245
  inputs=[input_box],
246
- outputs=[msg_store, input_box, chatbot], # Исправлены outputs, чтобы включать chatbot
247
  queue=False
 
 
 
 
 
 
 
 
 
248
  )
249
 
250
  submit_button.click(
251
- process_message,
252
  inputs=[input_box],
253
- outputs=[msg_store, input_box, chatbot], # Исправлены outputs, чтобы включать chatbot
 
 
 
 
 
254
  queue=False
 
 
 
 
255
  )
256
-
257
- #input_box.submit(
258
- # lambda msg: (msg, msg, ""), # Store message and clear input
259
- # inputs=[input_box],
260
- # outputs=[msg_store, input_box, input_box],
261
- # queue=False
262
- #).then(
263
- # user_message, # Add user message to chat
264
- # inputs=[msg_store, chatbot],
265
- # outputs=[input_box, chatbot],
266
- # queue=False
267
- #).then(
268
- # stream_gemini_response, # Generate and stream response
269
- # inputs=[msg_store, chatbot],
270
- # outputs=chatbot
271
- #)
272
-
273
- #submit_button.click(
274
- # lambda msg: (msg, msg, ""), # Store message and clear input
275
- # inputs=[input_box],
276
- # outputs=[msg_store, input_box, input_box],
277
- # queue=False
278
- #).then(
279
- # user_message, # Add user message to chat
280
- # inputs=[msg_store, chatbot],
281
- # outputs=[input_box, chatbot],
282
- # queue=False
283
- #).then(
284
- # stream_gemini_response, # Generate and stream response
285
- # inputs=[msg_store, chatbot],
286
- # outputs=chatbot
287
- #)
288
 
289
  clear_button.click(
290
  lambda: ([], "", ""),
 
177
  history.append(ChatMessage(role="user", content=msg))
178
  return "", history
179
 
 
 
 
 
 
 
 
180
  # Create the Gradio interface
181
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", neutral_hue="neutral")) as demo: # Using Soft theme with adjusted hues for a refined look
182
  gr.Markdown("# Chat with " + used_model)
 
231
 
232
  # Set up event handlers
233
  msg_store = gr.State("") # Store for preserving user message
234
+
 
235
  input_box.submit(
236
+ lambda msg: (msg, msg, ""), # Store message and clear input
237
  inputs=[input_box],
238
+ outputs=[msg_store, input_box, input_box],
239
  queue=False
240
+ ).then(
241
+ user_message, # Add user message to chat
242
+ inputs=[msg_store, chatbot],
243
+ outputs=[input_box, chatbot],
244
+ queue=False
245
+ ).then(
246
+ stream_gemini_response, # Generate and stream response
247
+ inputs=[msg_store, chatbot],
248
+ outputs=chatbot
249
  )
250
 
251
  submit_button.click(
252
+ lambda msg: (msg, msg, ""), # Store message and clear input
253
  inputs=[input_box],
254
+ outputs=[msg_store, input_box, input_box],
255
+ queue=False
256
+ ).then(
257
+ user_message, # Add user message to chat
258
+ inputs=[msg_store, chatbot],
259
+ outputs=[input_box, chatbot],
260
  queue=False
261
+ ).then(
262
+ stream_gemini_response, # Generate and stream response
263
+ inputs=[msg_store, chatbot],
264
+ outputs=chatbot
265
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
266
 
267
  clear_button.click(
268
  lambda: ([], "", ""),