Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -284,40 +284,34 @@ with gr.Blocks(css=css) as demo:
|
|
284 |
outputs=model_status
|
285 |
)
|
286 |
|
287 |
-
#
|
288 |
-
def
|
289 |
-
"""
|
290 |
-
# Ensure chat_history is a list
|
291 |
-
if chat_history is None:
|
292 |
-
chat_history = []
|
293 |
-
|
294 |
if message.strip() == "":
|
295 |
return "", chat_history
|
296 |
|
297 |
# Add user message to chat history
|
298 |
chat_history = list(chat_history)
|
299 |
chat_history.append((message, None))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
300 |
return "", chat_history
|
301 |
-
|
302 |
-
# Bind message submission
|
303 |
submit_event = user_input.submit(
|
304 |
-
fn=
|
305 |
inputs=[user_input, chatbot],
|
306 |
outputs=[user_input, chatbot]
|
307 |
-
).then(
|
308 |
-
fn=update_chat_with_response,
|
309 |
-
inputs=[chatbot, user_input, model_name, temperature, max_tokens],
|
310 |
-
outputs=chatbot
|
311 |
)
|
312 |
-
|
313 |
submit_button.click(
|
314 |
-
fn=
|
315 |
inputs=[user_input, chatbot],
|
316 |
outputs=[user_input, chatbot]
|
317 |
-
).then(
|
318 |
-
fn=update_chat_with_response,
|
319 |
-
inputs=[chatbot, user_input, model_name, temperature, max_tokens],
|
320 |
-
outputs=chatbot
|
321 |
)
|
322 |
|
323 |
# Clear chat
|
@@ -326,6 +320,48 @@ with gr.Blocks(css=css) as demo:
|
|
326 |
outputs=chatbot
|
327 |
)
|
328 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
329 |
examples = [
|
330 |
["脷ltimamente tengo la tensi贸n un poco alta, 驴c贸mo debo adaptar mis h谩bitos?"],
|
331 |
["What are the common side effects of metformin?"],
|
|
|
284 |
outputs=model_status
|
285 |
)
|
286 |
|
287 |
+
# Bind message submission
|
288 |
+
def process_message(message, chat_history):
|
289 |
+
"""Process user message and generate response"""
|
|
|
|
|
|
|
|
|
290 |
if message.strip() == "":
|
291 |
return "", chat_history
|
292 |
|
293 |
# Add user message to chat history
|
294 |
chat_history = list(chat_history)
|
295 |
chat_history.append((message, None))
|
296 |
+
|
297 |
+
# Generate response
|
298 |
+
response = generate_response_non_streaming(message, model_name.value, temperature.value, max_tokens.value)
|
299 |
+
|
300 |
+
# Add response to chat history
|
301 |
+
chat_history[-1] = (message, response)
|
302 |
+
|
303 |
return "", chat_history
|
304 |
+
|
|
|
305 |
submit_event = user_input.submit(
|
306 |
+
fn=process_message,
|
307 |
inputs=[user_input, chatbot],
|
308 |
outputs=[user_input, chatbot]
|
|
|
|
|
|
|
|
|
309 |
)
|
310 |
+
|
311 |
submit_button.click(
|
312 |
+
fn=process_message,
|
313 |
inputs=[user_input, chatbot],
|
314 |
outputs=[user_input, chatbot]
|
|
|
|
|
|
|
|
|
315 |
)
|
316 |
|
317 |
# Clear chat
|
|
|
320 |
outputs=chatbot
|
321 |
)
|
322 |
|
323 |
+
# # Handle message submission
|
324 |
+
# def user_message_submitted(message, chat_history):
|
325 |
+
# """Handle user submitted message"""
|
326 |
+
# # Ensure chat_history is a list
|
327 |
+
# if chat_history is None:
|
328 |
+
# chat_history = []
|
329 |
+
|
330 |
+
# if message.strip() == "":
|
331 |
+
# return "", chat_history
|
332 |
+
|
333 |
+
# # Add user message to chat history
|
334 |
+
# chat_history = list(chat_history)
|
335 |
+
# chat_history.append((message, None))
|
336 |
+
# return "", chat_history
|
337 |
+
|
338 |
+
# # Bind message submission
|
339 |
+
# submit_event = user_input.submit(
|
340 |
+
# fn=user_message_submitted,
|
341 |
+
# inputs=[user_input, chatbot],
|
342 |
+
# outputs=[user_input, chatbot]
|
343 |
+
# ).then(
|
344 |
+
# fn=update_chat_with_response,
|
345 |
+
# inputs=[chatbot, user_input, model_name, temperature, max_tokens],
|
346 |
+
# outputs=chatbot
|
347 |
+
# )
|
348 |
+
|
349 |
+
# submit_button.click(
|
350 |
+
# fn=user_message_submitted,
|
351 |
+
# inputs=[user_input, chatbot],
|
352 |
+
# outputs=[user_input, chatbot]
|
353 |
+
# ).then(
|
354 |
+
# fn=update_chat_with_response,
|
355 |
+
# inputs=[chatbot, user_input, model_name, temperature, max_tokens],
|
356 |
+
# outputs=chatbot
|
357 |
+
# )
|
358 |
+
|
359 |
+
# # Clear chat
|
360 |
+
# clear_button.click(
|
361 |
+
# fn=lambda: [],
|
362 |
+
# outputs=chatbot
|
363 |
+
# )
|
364 |
+
|
365 |
examples = [
|
366 |
["脷ltimamente tengo la tensi贸n un poco alta, 驴c贸mo debo adaptar mis h谩bitos?"],
|
367 |
["What are the common side effects of metformin?"],
|