baixianger commited on
Commit
f3172b2
·
1 Parent(s): 41cb4a2

new sys prompt

Browse files
Files changed (3) hide show
  1. agent.py +28 -30
  2. system_prompt.txt +2 -35
  3. test.ipynb +1 -1
agent.py CHANGED
@@ -112,25 +112,6 @@ def arvix_search(query: str) -> str:
112
  ])
113
  return {"arvix_results": formatted_search_docs}
114
 
115
- # build a retriever tool
116
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
117
- supabase: Client = create_client(
118
- os.environ.get("SUPABASE_URL"),
119
- os.environ.get("SUPABASE_SERVICE_KEY"))
120
- vector_store = SupabaseVectorStore(
121
- client=supabase,
122
- embedding= embeddings,
123
- table_name="documents",
124
- query_name="match_documents_langchain",
125
- )
126
- question_retrieve_tool = create_retriever_tool(
127
- vector_store.as_retriever(),
128
- "Question Retriever",
129
- "Find similar questions in the vector database for the given question.",
130
- )
131
-
132
-
133
-
134
  tools = [
135
  multiply,
136
  add,
@@ -140,7 +121,6 @@ tools = [
140
  wiki_search,
141
  web_search,
142
  arvix_search,
143
- question_retrieve_tool
144
  ]
145
 
146
  # load the system prompt from the file
@@ -150,7 +130,17 @@ with open("system_prompt.txt", "r", encoding="utf-8") as f:
150
  # System message
151
  sys_msg = SystemMessage(content=system_prompt)
152
 
153
-
 
 
 
 
 
 
 
 
 
 
154
 
155
  # Build graph function
156
  def build_graph(provider: str = "groq"):
@@ -161,7 +151,7 @@ def build_graph(provider: str = "groq"):
161
  llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
162
  elif provider == "groq":
163
  # Groq https://console.groq.com/docs/models
164
- llm = ChatGroq(model="gemma2-9b-it", temperature=0) # optional : qwen-qwq-32b
165
  elif provider == "huggingface":
166
  # TODO: Add huggingface endpoint
167
  llm = ChatHuggingFace(
@@ -173,17 +163,27 @@ def build_graph(provider: str = "groq"):
173
  else:
174
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
175
  # Bind tools to LLM
176
- llm_with_tools = llm.bind_tools(tools)
177
 
178
  # Node
179
  def assistant(state: MessagesState):
180
  """Assistant node"""
181
- return {"messages": [llm_with_tools.invoke([sys_msg] + state["messages"])]}
 
 
 
 
 
 
 
 
182
 
183
  builder = StateGraph(MessagesState)
 
184
  builder.add_node("assistant", assistant)
185
  builder.add_node("tools", ToolNode(tools))
186
- builder.add_edge(START, "assistant")
 
187
  builder.add_conditional_edges(
188
  "assistant",
189
  tools_condition,
@@ -195,13 +195,11 @@ def build_graph(provider: str = "groq"):
195
 
196
  # test
197
  if __name__ == "__main__":
198
- question = "Who nominated the only Featured Article on English Wikipedia about a dinosaur that was promoted in November 2016?"
199
-
200
  # Build the graph
201
  graph = build_graph(provider="groq")
202
  # Run the graph
203
  messages = [HumanMessage(content=question)]
204
  messages = graph.invoke({"messages": messages})
205
- answer = messages[-1].content
206
- print(f"Question: {question}")
207
- print(f"{answer}")
 
112
  ])
113
  return {"arvix_results": formatted_search_docs}
114
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
  tools = [
116
  multiply,
117
  add,
 
121
  wiki_search,
122
  web_search,
123
  arvix_search,
 
124
  ]
125
 
126
  # load the system prompt from the file
 
130
  # System message
131
  sys_msg = SystemMessage(content=system_prompt)
132
 
133
+ # build a retriever
134
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
135
+ supabase: Client = create_client(
136
+ os.environ.get("SUPABASE_URL"),
137
+ os.environ.get("SUPABASE_SERVICE_KEY"))
138
+ vector_store = SupabaseVectorStore(
139
+ client=supabase,
140
+ embedding= embeddings,
141
+ table_name="documents",
142
+ query_name="match_documents_langchain",
143
+ )
144
 
145
  # Build graph function
146
  def build_graph(provider: str = "groq"):
 
151
  llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
152
  elif provider == "groq":
153
  # Groq https://console.groq.com/docs/models
154
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
155
  elif provider == "huggingface":
156
  # TODO: Add huggingface endpoint
157
  llm = ChatHuggingFace(
 
163
  else:
164
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
165
  # Bind tools to LLM
166
+ llm_with_tools = llm.bind_tools(tools, tool_choice="Question Search")
167
 
168
  # Node
169
  def assistant(state: MessagesState):
170
  """Assistant node"""
171
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
172
+
173
+ def retriever(state: MessagesState):
174
+ """Retriever node"""
175
+ similar_question = vector_store.similarity_search(state["messages"][0].content)
176
+ example_msg = HumanMessage(
177
+ content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
178
+ )
179
+ return {"messages": [sys_msg] + state["messages"] + [example_msg]}
180
 
181
  builder = StateGraph(MessagesState)
182
+ builder.add_node("retriever", retriever)
183
  builder.add_node("assistant", assistant)
184
  builder.add_node("tools", ToolNode(tools))
185
+ builder.add_edge(START, "retriever")
186
+ builder.add_edge("retriever", "assistant")
187
  builder.add_conditional_edges(
188
  "assistant",
189
  tools_condition,
 
195
 
196
  # test
197
  if __name__ == "__main__":
198
+ question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
 
199
  # Build the graph
200
  graph = build_graph(provider="groq")
201
  # Run the graph
202
  messages = [HumanMessage(content=question)]
203
  messages = graph.invoke({"messages": messages})
204
+ for m in messages["messages"]:
205
+ m.pretty_print()
 
system_prompt.txt CHANGED
@@ -1,38 +1,5 @@
1
-
2
- You are a helpful assistant tasked with answering questions using a set of tools. If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. You need to provide a step-by-step explanation of how you arrived at the answer.
3
- ==========================
4
- Here is a few examples showing you how to answer the question step by step.
5
-
6
- Question 1: In terms of geographical distance between capital cities, which 2 countries are the furthest from each other within the ASEAN bloc according to wikipedia? Answer using a comma separated list, ordering the countries by alphabetical order.
7
- Steps:
8
- 1. Search the web for "ASEAN bloc".
9
- 2. Click the Wikipedia result for the ASEAN Free Trade Area.
10
- 3. Scroll down to find the list of member states.
11
- 4. Click into the Wikipedia pages for each member state, and note its capital.
12
- 5. Search the web for the distance between the first two capitals. The results give travel distance, not geographic distance, which might affect the answer.
13
- 6. Thinking it might be faster to judge the distance by looking at a map, search the web for "ASEAN bloc" and click into the images tab.
14
- 7. View a map of the member countries. Since they're clustered together in an arrangement that's not very linear, it's difficult to judge distances by eye.
15
- 8. Return to the Wikipedia page for each country. Click the GPS coordinates for each capital to get the coordinates in decimal notation.
16
- 9. Place all these coordinates into a spreadsheet.
17
- 10. Write formulas to calculate the distance between each capital.
18
- 11. Write formula to get the largest distance value in the spreadsheet.
19
- 12. Note which two capitals that value corresponds to: Jakarta and Naypyidaw.
20
- 13. Return to the Wikipedia pages to see which countries those respective capitals belong to: Indonesia, Myanmar.
21
- Tools:
22
- 1. Search engine
23
- 2. Web browser
24
- 3. Microsoft Excel / Google Sheets
25
- Final Answer: Indonesia, Myanmar
26
-
27
- Question 2: Review the chess position provided in the image. It is black's turn. Provide the correct next move for black which guarantees a win. Please provide your response in algebraic notation.
28
- Steps:
29
- Step 1: Evaluate the position of the pieces in the chess position
30
- Step 2: Report the best move available for black: "Rd5"
31
- Tools:
32
- 1. Image recognition tools
33
- Final Answer: Rd5
34
- ==========================
35
-
36
  Now, I will ask you a question. Report your thoughts, and finish your answer with the following template:
37
  FINAL ANSWER: [YOUR FINAL ANSWER].
38
  YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
 
 
1
+ You are a helpful assistant tasked with answering questions using a set of tools.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  Now, I will ask you a question. Report your thoughts, and finish your answer with the following template:
3
  FINAL ANSWER: [YOUR FINAL ANSWER].
4
  YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
5
+ Your answer should only start with "FINAL ANSWER: ", then follows with the answer.
test.ipynb CHANGED
@@ -93,7 +93,7 @@
93
  },
94
  {
95
  "cell_type": "code",
96
- "execution_count": 48,
97
  "id": "4bb02420",
98
  "metadata": {},
99
  "outputs": [],
 
93
  },
94
  {
95
  "cell_type": "code",
96
+ "execution_count": 56,
97
  "id": "4bb02420",
98
  "metadata": {},
99
  "outputs": [],