thinkall commited on
Commit
dd4f592
·
1 Parent(s): f90d68d

Force terminate process

Browse files
Files changed (1) hide show
  1. app.py +14 -5
app.py CHANGED
@@ -12,6 +12,7 @@ from autogen.agentchat.contrib.retrieve_user_proxy_agent import (
12
  PROMPT_CODE,
13
  )
14
 
 
15
 
16
  def initialize_agents(config_list, docs_path=None):
17
  if isinstance(config_list, gr.State):
@@ -57,7 +58,7 @@ def initiate_chat(config_list, problem, queue, n_results=3):
57
  else:
58
  llm_config = (
59
  {
60
- "request_timeout": 30,
61
  "seed": 42,
62
  "config_list": _config_list,
63
  },
@@ -65,7 +66,7 @@ def initiate_chat(config_list, problem, queue, n_results=3):
65
  assistant.llm_config.update(llm_config[0])
66
  assistant.reset()
67
  try:
68
- ragproxyagent.a_initiate_chat(
69
  assistant, problem=problem, silent=False, n_results=n_results
70
  )
71
  messages = ragproxyagent.chat_messages
@@ -85,8 +86,16 @@ def chatbot_reply(input_text):
85
  args=(config_list, input_text, queue),
86
  )
87
  process.start()
88
- process.join()
89
- messages = queue.get()
 
 
 
 
 
 
 
 
90
  return messages
91
 
92
 
@@ -154,7 +163,7 @@ with gr.Blocks() as demo:
154
  ]
155
  llm_config = (
156
  {
157
- "request_timeout": 120,
158
  "seed": 42,
159
  "config_list": config_list,
160
  },
 
12
  PROMPT_CODE,
13
  )
14
 
15
+ TIMEOUT = 15
16
 
17
  def initialize_agents(config_list, docs_path=None):
18
  if isinstance(config_list, gr.State):
 
58
  else:
59
  llm_config = (
60
  {
61
+ "request_timeout": TIMEOUT,
62
  "seed": 42,
63
  "config_list": _config_list,
64
  },
 
66
  assistant.llm_config.update(llm_config[0])
67
  assistant.reset()
68
  try:
69
+ ragproxyagent.initiate_chat(
70
  assistant, problem=problem, silent=False, n_results=n_results
71
  )
72
  messages = ragproxyagent.chat_messages
 
86
  args=(config_list, input_text, queue),
87
  )
88
  process.start()
89
+ try:
90
+ # process.join(TIMEOUT+2)
91
+ messages = queue.get(timeout=TIMEOUT)
92
+ except Exception as e:
93
+ messages = [str(e) if len(str(e)) > 0 else "Invalid Request to OpenAI, please check your API keys."]
94
+ finally:
95
+ try:
96
+ process.terminate()
97
+ except:
98
+ pass
99
  return messages
100
 
101
 
 
163
  ]
164
  llm_config = (
165
  {
166
+ "request_timeout": TIMEOUT,
167
  "seed": 42,
168
  "config_list": config_list,
169
  },