sskorol commited on
Commit
90d4a43
·
verified ·
1 Parent(s): 1556b80

Improve error handling

Browse files

Improved Gradio error handling

Files changed (1) hide show
  1. Gradio_UI.py +17 -6
Gradio_UI.py CHANGED
@@ -188,17 +188,28 @@ class GradioUI:
188
 
189
  def interact_with_agent(self, prompt, messages):
190
  import gradio as gr
191
-
192
  try:
193
  messages.append(gr.ChatMessage(role="user", content=prompt))
194
  yield messages
195
- for msg in stream_to_gradio(self.agent, task=prompt, reset_agent_memory=False):
196
- messages.append(msg)
 
 
 
 
 
 
 
 
197
  yield messages
198
- yield messages
 
 
 
 
199
  except Exception as e:
200
- error_message = f"Error: {str(e)}\nPlease try again or contact support if the issue persists."
201
- messages.append(gr.ChatMessage(role="assistant", content=error_message))
202
  yield messages
203
 
204
  def upload_file(
 
188
 
189
  def interact_with_agent(self, prompt, messages):
190
  import gradio as gr
191
+
192
  try:
193
  messages.append(gr.ChatMessage(role="user", content=prompt))
194
  yield messages
195
+
196
+ try:
197
+ for msg in stream_to_gradio(self.agent, task=prompt, reset_agent_memory=True):
198
+ messages.append(msg)
199
+ yield messages
200
+ except Exception as e:
201
+ error_msg = str(e)
202
+ if "Failed Dependency" in error_msg:
203
+ error_msg = "The model reached its token limit. Try a shorter query or breaking your request into smaller parts."
204
+ messages.append(gr.ChatMessage(role="assistant", content=f"Error: {error_msg}"))
205
  yield messages
206
+
207
+ # Clear agent memory after each interaction
208
+ if hasattr(self.agent, 'clear_memory'):
209
+ self.agent.clear_memory()
210
+
211
  except Exception as e:
212
+ messages.append(gr.ChatMessage(role="assistant", content=f"An error occurred: {str(e)}"))
 
213
  yield messages
214
 
215
  def upload_file(