Abbasid commited on
Commit
72a54cb
·
verified ·
1 Parent(s): 44a82cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -31
app.py CHANGED
@@ -249,6 +249,7 @@ class WritableQueue:
249
  pass
250
  # --- END OF HELPER CLASS ---
251
 
 
252
  # --- REPLACE THE EXISTING run_agent_wrapper FUNCTION WITH THIS ---
253
 
254
  def agent_thread_func(agent, query, log_queue, result_queue):
@@ -270,26 +271,31 @@ def agent_thread_func(agent, query, log_queue, result_queue):
270
  # Signal that logging is finished by putting None in the log queue
271
  log_queue.put(None)
272
 
273
- # Make the main Gradio function a generator
274
- def run_agent_wrapper(query: str) -> Iterator[Tuple[List[Tuple[str, str]], str]]:
 
275
  """
276
  Runs the agent in a thread, captures stdout via a queue, and yields updates
277
- for Gradio streaming. Uses Chatbot format for reasoning.
278
- Returns Iterator yielding: (chatbot_history, final_answer_status)
279
  """
280
  if height_agent is None:
281
  error_msg = initialization_error_message or "Agent not initialized."
282
- yield ([(None, error_msg)], "Error: Agent not initialized.")
 
283
  return # Stop the generator
284
 
285
  log_queue = queue.Queue()
286
  result_queue = queue.Queue()
287
- chatbot_history = [] # Start with empty history
 
 
288
  current_log_message = "" # Accumulate lines into one message block
289
  final_answer = "⏳ Running..." # Initial status
290
 
291
  # Initial yield to clear previous state and show "Running"
292
- yield (chatbot_history, final_answer)
 
293
 
294
  # Start the agent thread
295
  thread = threading.Thread(
@@ -307,13 +313,12 @@ def run_agent_wrapper(query: str) -> Iterator[Tuple[List[Tuple[str, str]], str]]
307
  break
308
 
309
  # Append new line to the current log message block
310
- current_log_message += log_line
 
 
311
  # Update the chatbot history: Replace the last message or add a new one
312
- # Simple approach: always update a single entry representing the log
313
- if chatbot_history and chatbot_history[-1][0] is None: # Check if last entry is from "Bot" (None for user)
314
- chatbot_history[-1] = (None, current_log_message) # Update last bot message
315
- else:
316
- chatbot_history.append((None, current_log_message)) # Add new bot message if history is empty or last was user
317
 
318
  yield (chatbot_history, final_answer) # Yield updated log
319
 
@@ -323,13 +328,15 @@ def run_agent_wrapper(query: str) -> Iterator[Tuple[List[Tuple[str, str]], str]]
323
  if not thread.is_alive() and result_queue.empty():
324
  print("Warning: Agent thread finished unexpectedly without result.")
325
  # Attempt to retrieve any remaining logs
326
- while not log_queue.empty():
327
- log_line = log_queue.get_nowait()
328
- if log_line: current_log_message += log_line
329
- if chatbot_history and chatbot_history[-1][0] is None:
330
- chatbot_history[-1] = (None, current_log_message + "\nError: Agent stopped unexpectedly.")
331
- else:
332
- chatbot_history.append((None, current_log_message + "\nError: Agent stopped unexpectedly."))
 
 
333
  final_answer = "Error: Agent stopped unexpectedly."
334
  yield (chatbot_history, final_answer)
335
  return # Stop
@@ -345,25 +352,25 @@ def run_agent_wrapper(query: str) -> Iterator[Tuple[List[Tuple[str, str]], str]]
345
  if isinstance(final_result, Exception):
346
  final_answer = f"Error during execution: {final_result}"
347
  # Append error to the chatbot log
348
- error_log = f"\n--- EXECUTION ERROR ---\n{final_result}"
349
  current_log_message += error_log
350
- if chatbot_history and chatbot_history[-1][0] is None:
351
- chatbot_history[-1] = (None, current_log_message)
352
- else:
353
- chatbot_history.append((None, current_log_message))
354
  else:
355
- final_answer = final_result
356
 
357
  # Final yield with the complete log and the final answer
 
 
358
  yield (chatbot_history, final_answer)
359
 
 
 
360
  # --- Build Gradio Interface Manually with gr.Blocks ---
361
  print("--- Building Gradio Interface with gr.Blocks ---")
362
 
363
  # Make sure theme is applied correctly if desired
364
  # theme = gr.themes.Default() # Or another theme
365
  # with gr.Blocks(theme=theme, css="footer {visibility: hidden}") as demo:
366
- # --- MODIFY THE gr.Blocks SECTION ---
367
 
368
  with gr.Blocks(css="footer {visibility: hidden}") as demo:
369
  gr.Markdown("# Height Comparison Agent")
@@ -381,13 +388,13 @@ with gr.Blocks(css="footer {visibility: hidden}") as demo:
381
  # --- CHANGE THIS ---
382
  # reasoning_output = gr.Code(label="Reasoning Log", language="markdown", interactive=False, lines=20)
383
  reasoning_output_chatbot = gr.Chatbot(
384
- label="Reasoning Log",
385
- height=500 # Set a height to enable scrolling
 
 
386
  )
387
- # --- END OF CHANGE ---
388
 
389
 
390
- # --- CHANGE THIS ---
391
  # Link components - ensure outputs match the function's yield tuple order
392
  submit_button.click(
393
  fn=run_agent_wrapper,
 
249
  pass
250
  # --- END OF HELPER CLASS ---
251
 
252
+ # --- REPLACE THE EXISTING run_agent_wrapper FUNCTION WITH THIS ---
253
  # --- REPLACE THE EXISTING run_agent_wrapper FUNCTION WITH THIS ---
254
 
255
  def agent_thread_func(agent, query, log_queue, result_queue):
 
271
  # Signal that logging is finished by putting None in the log queue
272
  log_queue.put(None)
273
 
274
+ # Generator function for Gradio streaming
275
+ # REMOVED the return type hint -> Iterator[...]
276
+ def run_agent_wrapper(query: str):
277
  """
278
  Runs the agent in a thread, captures stdout via a queue, and yields updates
279
+ for Gradio streaming. Uses Chatbot 'messages' format for reasoning.
280
+ Yields: (chatbot_history, final_answer_status)
281
  """
282
  if height_agent is None:
283
  error_msg = initialization_error_message or "Agent not initialized."
284
+ # Yield error in the 'messages' format
285
+ yield ([{"role": "assistant", "content": error_msg}], "Error: Agent not initialized.")
286
  return # Stop the generator
287
 
288
  log_queue = queue.Queue()
289
  result_queue = queue.Queue()
290
+ # History will be a list of dictionaries: [{"role": "assistant", "content": "..."}]
291
+ # We'll just use one dictionary and update its content for the streaming log
292
+ chatbot_history = []
293
  current_log_message = "" # Accumulate lines into one message block
294
  final_answer = "⏳ Running..." # Initial status
295
 
296
  # Initial yield to clear previous state and show "Running"
297
+ # Yield empty history initially, or a starting message
298
+ yield ([], final_answer)
299
 
300
  # Start the agent thread
301
  thread = threading.Thread(
 
313
  break
314
 
315
  # Append new line to the current log message block
316
+ # Add line breaks for readability in the chatbot
317
+ current_log_message += log_line + "\n"
318
+
319
  # Update the chatbot history: Replace the last message or add a new one
320
+ # Simplified: Update a single assistant message with the whole log
321
+ chatbot_history = [{"role": "assistant", "content": current_log_message}]
 
 
 
322
 
323
  yield (chatbot_history, final_answer) # Yield updated log
324
 
 
328
  if not thread.is_alive() and result_queue.empty():
329
  print("Warning: Agent thread finished unexpectedly without result.")
330
  # Attempt to retrieve any remaining logs
331
+ try:
332
+ while True: # Get all remaining logs
333
+ log_line = log_queue.get_nowait()
334
+ if log_line: current_log_message += log_line + "\n"
335
+ else: break # Should not happen if None was already processed, but safety
336
+ except queue.Empty:
337
+ pass # No more logs
338
+ current_log_message += "\nError: Agent stopped unexpectedly."
339
+ chatbot_history = [{"role": "assistant", "content": current_log_message}]
340
  final_answer = "Error: Agent stopped unexpectedly."
341
  yield (chatbot_history, final_answer)
342
  return # Stop
 
352
  if isinstance(final_result, Exception):
353
  final_answer = f"Error during execution: {final_result}"
354
  # Append error to the chatbot log
355
+ error_log = f"\n\n--- EXECUTION ERROR ---\n{final_result}"
356
  current_log_message += error_log
357
+ chatbot_history = [{"role": "assistant", "content": current_log_message}]
 
 
 
358
  else:
359
+ final_answer = final_result # This is the actual final answer string
360
 
361
  # Final yield with the complete log and the final answer
362
+ # Ensure history is in the correct format before the final yield
363
+ chatbot_history = [{"role": "assistant", "content": current_log_message}]
364
  yield (chatbot_history, final_answer)
365
 
366
+ # --- END OF REPLACEMENT ---
367
+
368
  # --- Build Gradio Interface Manually with gr.Blocks ---
369
  print("--- Building Gradio Interface with gr.Blocks ---")
370
 
371
  # Make sure theme is applied correctly if desired
372
  # theme = gr.themes.Default() # Or another theme
373
  # with gr.Blocks(theme=theme, css="footer {visibility: hidden}") as demo:
 
374
 
375
  with gr.Blocks(css="footer {visibility: hidden}") as demo:
376
  gr.Markdown("# Height Comparison Agent")
 
388
  # --- CHANGE THIS ---
389
  # reasoning_output = gr.Code(label="Reasoning Log", language="markdown", interactive=False, lines=20)
390
  reasoning_output_chatbot = gr.Chatbot(
391
+ label="Reasoning Log",
392
+ height=500,
393
+ type="messages" # <<< ADD THIS
394
+ `)
395
  )
 
396
 
397
 
 
398
  # Link components - ensure outputs match the function's yield tuple order
399
  submit_button.click(
400
  fn=run_agent_wrapper,