thinkall commited on
Commit
b112116
·
1 Parent(s): 626513d

Improve welcome message

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. test_group.py +39 -0
app.py CHANGED
@@ -31,7 +31,7 @@ def initialize_agents(config_list, docs_path=None):
31
  human_input_mode="NEVER",
32
  max_consecutive_auto_reply=5,
33
  retrieve_config={
34
- # "task": "qa",
35
  "docs_path": docs_path,
36
  "chunk_token_size": 2000,
37
  "model": _config_list[0]["model"],
@@ -51,7 +51,7 @@ def initiate_chat(config_list, problem, queue, n_results=3):
51
  else:
52
  _config_list = config_list
53
  if len(_config_list[0].get("api_key", "")) < 2:
54
- queue.put(["Please set the LLM config first"])
55
  return
56
  else:
57
  llm_config = (
 
31
  human_input_mode="NEVER",
32
  max_consecutive_auto_reply=5,
33
  retrieve_config={
34
+ "task": "code",
35
  "docs_path": docs_path,
36
  "chunk_token_size": 2000,
37
  "model": _config_list[0]["model"],
 
51
  else:
52
  _config_list = config_list
53
  if len(_config_list[0].get("api_key", "")) < 2:
54
+ queue.put(["Hi, nice to meet you! Please enter your API keys in below text boxs."])
55
  return
56
  else:
57
  llm_config = (
test_group.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ llm_config = {"config_list": config_list_gpt4, "seed": 42}
2
+
3
+ ragproxyagent = RetrieveUserProxyAgent(
4
+ name="ragproxyagent",
5
+ human_input_mode="NEVER",
6
+ max_consecutive_auto_reply=10,
7
+ retrieve_config={
8
+ "task": "you need to fetch the answers related to company if needed",
9
+ "docs_path": "/content/Docs",
10
+ "chunk_token_size": 10,
11
+ "model": config_list_gpt4[0]["model"],
12
+ "client": chromadb.PersistentClient(path="/tmp/chromadb"),
13
+ "embedding_model": "all-mpnet-base-v2",
14
+ },
15
+ )
16
+ user_proxy = autogen.UserProxyAgent(
17
+ name="User_proxy",
18
+ human_input_mode="ALWAYS",
19
+ max_consecutive_auto_reply=5,
20
+ is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
21
+ code_execution_config={"work_dir": "web"},
22
+ llm_config=llm_config,
23
+ system_message="""You are a user of a mobile app. Reply to the agent with a suitable answer, otherwise reply I dont know or answer the agents questions with facts."""
24
+
25
+ )
26
+ support_agent = autogen.AssistantAgent(
27
+ name="support_agent",
28
+ llm_config=llm_config,
29
+ system_message="You are the founder of a mobile app. You are trying to find the reason for less usage and uninstalls from the customer by asking relevant questions. \
30
+ You try to answer the customer's query in the best possible way. If you do not know the answer ask for clarification or more data."
31
+ )
32
+ pm = autogen.AssistantAgent(
33
+ name="PM",
34
+ system_message="You are an expert in analysing if the objective of user research has been met from the conversation. Answer concisely.",
35
+ llm_config=llm_config,
36
+ )
37
+ groupchat = autogen.GroupChat(agents=[user_proxy, support_agent, pm, ragproxyagent], messages=[], max_round=8)
38
+ manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
39
+ but this seems not working the ragproxyagent is not participating in conversation