Commit
·
2604805
1
Parent(s):
81917a3
feat: push project
Browse files- .gitignore +115 -0
- agent.py +214 -0
- app.py +17 -4
- metadata.jsonl +0 -0
- requirements.txt +18 -1
- system_prompt.txt +29 -0
- test.ipynb +901 -0
.gitignore
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
*.so
|
6 |
+
|
7 |
+
# Distribution / packaging
|
8 |
+
.Python
|
9 |
+
build/
|
10 |
+
develop-eggs/
|
11 |
+
dist/
|
12 |
+
downloads/
|
13 |
+
eggs/
|
14 |
+
.eggs/
|
15 |
+
lib/
|
16 |
+
lib64/
|
17 |
+
parts/
|
18 |
+
sdist/
|
19 |
+
var/
|
20 |
+
wheels/
|
21 |
+
*.egg-info/
|
22 |
+
.installed.cfg
|
23 |
+
*.egg
|
24 |
+
|
25 |
+
# Virtual environments
|
26 |
+
venv/
|
27 |
+
ENV/
|
28 |
+
env/
|
29 |
+
.env
|
30 |
+
.venv
|
31 |
+
env.bak/
|
32 |
+
venv.bak/
|
33 |
+
.python-version
|
34 |
+
|
35 |
+
# Unit test / coverage reports
|
36 |
+
htmlcov/
|
37 |
+
.tox/
|
38 |
+
.nox/
|
39 |
+
.coverage
|
40 |
+
.coverage.*
|
41 |
+
.cache
|
42 |
+
nosetests.xml
|
43 |
+
coverage.xml
|
44 |
+
*.cover
|
45 |
+
.hypothesis/
|
46 |
+
.pytest_cache/
|
47 |
+
pytest-*.xml
|
48 |
+
|
49 |
+
# Jupyter Notebook
|
50 |
+
.ipynb_checkpoints
|
51 |
+
|
52 |
+
# IPython
|
53 |
+
profile_default/
|
54 |
+
ipython_config.py
|
55 |
+
|
56 |
+
# Logs
|
57 |
+
*.log
|
58 |
+
logs/
|
59 |
+
log/
|
60 |
+
|
61 |
+
# IDE specific files
|
62 |
+
.idea/
|
63 |
+
.vscode/
|
64 |
+
*.swp
|
65 |
+
*.swo
|
66 |
+
*~
|
67 |
+
.DS_Store
|
68 |
+
.project
|
69 |
+
.pydevproject
|
70 |
+
.settings/
|
71 |
+
.vs/
|
72 |
+
*.sublime-project
|
73 |
+
*.sublime-workspace
|
74 |
+
|
75 |
+
# Database
|
76 |
+
*.db
|
77 |
+
*.rdb
|
78 |
+
*.sqlite
|
79 |
+
*.sqlite3
|
80 |
+
|
81 |
+
# Environment variables
|
82 |
+
.env
|
83 |
+
.env.local
|
84 |
+
.env.development.local
|
85 |
+
.env.test.local
|
86 |
+
.env.production.local
|
87 |
+
|
88 |
+
# macOS specific
|
89 |
+
.DS_Store
|
90 |
+
.AppleDouble
|
91 |
+
.LSOverride
|
92 |
+
Icon
|
93 |
+
._*
|
94 |
+
.DocumentRevisions-V100
|
95 |
+
.fseventsd
|
96 |
+
.Spotlight-V100
|
97 |
+
.TemporaryItems
|
98 |
+
.Trashes
|
99 |
+
.VolumeIcon.icns
|
100 |
+
.com.apple.timemachine.donotpresent
|
101 |
+
|
102 |
+
# AI/model files
|
103 |
+
*.h5
|
104 |
+
*.pb
|
105 |
+
*.onnx
|
106 |
+
*.tflite
|
107 |
+
*.pt
|
108 |
+
*.pth
|
109 |
+
*.weights
|
110 |
+
|
111 |
+
# Temporary files
|
112 |
+
tmp/
|
113 |
+
temp/
|
114 |
+
.tmp
|
115 |
+
*.tmp
|
agent.py
ADDED
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""LangGraph Agent"""
|
2 |
+
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from langgraph.graph import START, StateGraph, MessagesState
|
5 |
+
from langgraph.prebuilt import tools_condition
|
6 |
+
from langgraph.prebuilt import ToolNode
|
7 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
8 |
+
from langchain_groq import ChatGroq
|
9 |
+
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
10 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
11 |
+
from langchain_community.document_loaders import WikipediaLoader
|
12 |
+
from langchain_community.document_loaders import ArxivLoader
|
13 |
+
from langchain_community.vectorstores import SupabaseVectorStore
|
14 |
+
from langchain_core.messages import SystemMessage, HumanMessage
|
15 |
+
from langchain_core.tools import tool
|
16 |
+
from langchain.tools.retriever import create_retriever_tool
|
17 |
+
from supabase.client import Client, create_client
|
18 |
+
|
19 |
+
load_dotenv()
|
20 |
+
|
21 |
+
@tool
|
22 |
+
def multiply(a: int, b: int) -> int:
|
23 |
+
"""Multiply two numbers.
|
24 |
+
|
25 |
+
Args:
|
26 |
+
a: first int
|
27 |
+
b: second int
|
28 |
+
"""
|
29 |
+
return a * b
|
30 |
+
|
31 |
+
@tool
|
32 |
+
def add(a: int, b: int) -> int:
|
33 |
+
"""Add two numbers.
|
34 |
+
|
35 |
+
Args:
|
36 |
+
a: first int
|
37 |
+
b: second int
|
38 |
+
"""
|
39 |
+
return a + b
|
40 |
+
|
41 |
+
@tool
|
42 |
+
def subtract(a: int, b: int) -> int:
|
43 |
+
"""Subtract two numbers.
|
44 |
+
|
45 |
+
Args:
|
46 |
+
a: first int
|
47 |
+
b: second int
|
48 |
+
"""
|
49 |
+
return a - b
|
50 |
+
|
51 |
+
@tool
|
52 |
+
def divide(a: int, b: int) -> int:
|
53 |
+
"""Divide two numbers.
|
54 |
+
|
55 |
+
Args:
|
56 |
+
a: first int
|
57 |
+
b: second int
|
58 |
+
"""
|
59 |
+
if b == 0:
|
60 |
+
raise ValueError("Cannot divide by zero.")
|
61 |
+
return a / b
|
62 |
+
|
63 |
+
@tool
|
64 |
+
def modulus(a: int, b: int) -> int:
|
65 |
+
"""Get the modulus of two numbers.
|
66 |
+
|
67 |
+
Args:
|
68 |
+
a: first int
|
69 |
+
b: second int
|
70 |
+
"""
|
71 |
+
return a % b
|
72 |
+
|
73 |
+
@tool
|
74 |
+
def wiki_search(query: str) -> str:
|
75 |
+
"""Search Wikipedia for a query and return maximum 2 results.
|
76 |
+
|
77 |
+
Args:
|
78 |
+
query: The search query."""
|
79 |
+
search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
|
80 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
81 |
+
[
|
82 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
83 |
+
for doc in search_docs
|
84 |
+
])
|
85 |
+
return {"wiki_results": formatted_search_docs}
|
86 |
+
|
87 |
+
@tool
|
88 |
+
def web_search(query: str) -> str:
|
89 |
+
"""Search Tavily for a query and return maximum 3 results.
|
90 |
+
|
91 |
+
Args:
|
92 |
+
query: The search query."""
|
93 |
+
search_docs = TavilySearchResults(max_results=3).invoke(query=query)
|
94 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
95 |
+
[
|
96 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
97 |
+
for doc in search_docs
|
98 |
+
])
|
99 |
+
return {"web_results": formatted_search_docs}
|
100 |
+
|
101 |
+
@tool
|
102 |
+
def arvix_search(query: str) -> str:
|
103 |
+
"""Search Arxiv for a query and return maximum 3 result.
|
104 |
+
|
105 |
+
Args:
|
106 |
+
query: The search query."""
|
107 |
+
search_docs = ArxivLoader(query=query, load_max_docs=3).load()
|
108 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
109 |
+
[
|
110 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
111 |
+
for doc in search_docs
|
112 |
+
])
|
113 |
+
return {"arvix_results": formatted_search_docs}
|
114 |
+
|
115 |
+
|
116 |
+
|
117 |
+
# load the system prompt from the file
|
118 |
+
with open("system_prompt.txt", "r", encoding="utf-8") as f:
|
119 |
+
system_prompt = f.read()
|
120 |
+
|
121 |
+
# System message
|
122 |
+
sys_msg = SystemMessage(content=system_prompt)
|
123 |
+
|
124 |
+
# build a retriever
|
125 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
|
126 |
+
supabase: Client = create_client(
|
127 |
+
os.environ.get("SUPABASE_URL"),
|
128 |
+
os.environ.get("SUPABASE_SERVICE_KEY"))
|
129 |
+
vector_store = SupabaseVectorStore(
|
130 |
+
client=supabase,
|
131 |
+
embedding= embeddings,
|
132 |
+
table_name="documents",
|
133 |
+
query_name="match_documents_langchain",
|
134 |
+
)
|
135 |
+
create_retriever_tool = create_retriever_tool(
|
136 |
+
retriever=vector_store.as_retriever(),
|
137 |
+
name="Question Search",
|
138 |
+
description="A tool to retrieve similar questions from a vector store.",
|
139 |
+
)
|
140 |
+
|
141 |
+
|
142 |
+
|
143 |
+
tools = [
|
144 |
+
multiply,
|
145 |
+
add,
|
146 |
+
subtract,
|
147 |
+
divide,
|
148 |
+
modulus,
|
149 |
+
wiki_search,
|
150 |
+
web_search,
|
151 |
+
arvix_search,
|
152 |
+
]
|
153 |
+
|
154 |
+
# Build graph function
|
155 |
+
def build_graph(provider: str = "groq"):
|
156 |
+
"""Build the graph"""
|
157 |
+
# Load environment variables from .env file
|
158 |
+
if provider == "google":
|
159 |
+
# Google Gemini
|
160 |
+
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
161 |
+
elif provider == "groq":
|
162 |
+
# Groq https://console.groq.com/docs/models
|
163 |
+
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
164 |
+
elif provider == "huggingface":
|
165 |
+
# TODO: Add huggingface endpoint
|
166 |
+
llm = ChatHuggingFace(
|
167 |
+
llm=HuggingFaceEndpoint(
|
168 |
+
url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
169 |
+
temperature=0,
|
170 |
+
),
|
171 |
+
)
|
172 |
+
else:
|
173 |
+
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
174 |
+
# Bind tools to LLM
|
175 |
+
llm_with_tools = llm.bind_tools(tools)
|
176 |
+
|
177 |
+
# Node
|
178 |
+
def assistant(state: MessagesState):
|
179 |
+
"""Assistant node"""
|
180 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
181 |
+
|
182 |
+
def retriever(state: MessagesState):
|
183 |
+
"""Retriever node"""
|
184 |
+
similar_question = vector_store.similarity_search(state["messages"][0].content)
|
185 |
+
example_msg = HumanMessage(
|
186 |
+
content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
|
187 |
+
)
|
188 |
+
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
|
189 |
+
|
190 |
+
builder = StateGraph(MessagesState)
|
191 |
+
builder.add_node("retriever", retriever)
|
192 |
+
builder.add_node("assistant", assistant)
|
193 |
+
builder.add_node("tools", ToolNode(tools))
|
194 |
+
builder.add_edge(START, "retriever")
|
195 |
+
builder.add_edge("retriever", "assistant")
|
196 |
+
builder.add_conditional_edges(
|
197 |
+
"assistant",
|
198 |
+
tools_condition,
|
199 |
+
)
|
200 |
+
builder.add_edge("tools", "assistant")
|
201 |
+
|
202 |
+
# Compile graph
|
203 |
+
return builder.compile()
|
204 |
+
|
205 |
+
# test
|
206 |
+
if __name__ == "__main__":
|
207 |
+
question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
|
208 |
+
# Build the graph
|
209 |
+
graph = build_graph(provider="groq")
|
210 |
+
# Run the graph
|
211 |
+
messages = [HumanMessage(content=question)]
|
212 |
+
messages = graph.invoke({"messages": messages})
|
213 |
+
for m in messages["messages"]:
|
214 |
+
m.pretty_print()
|
app.py
CHANGED
@@ -1,8 +1,13 @@
|
|
|
|
1 |
import os
|
|
|
2 |
import gradio as gr
|
3 |
import requests
|
4 |
-
import inspect
|
5 |
import pandas as pd
|
|
|
|
|
|
|
|
|
6 |
|
7 |
# (Keep Constants as is)
|
8 |
# --- Constants ---
|
@@ -10,14 +15,22 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
|
10 |
|
11 |
# --- Basic Agent Definition ---
|
12 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
|
|
|
|
13 |
class BasicAgent:
|
|
|
14 |
def __init__(self):
|
15 |
print("BasicAgent initialized.")
|
|
|
|
|
16 |
def __call__(self, question: str) -> str:
|
17 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
21 |
|
22 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
23 |
"""
|
|
|
1 |
+
""" Basic Agent Evaluation Runner"""
|
2 |
import os
|
3 |
+
import inspect
|
4 |
import gradio as gr
|
5 |
import requests
|
|
|
6 |
import pandas as pd
|
7 |
+
from langchain_core.messages import HumanMessage
|
8 |
+
from Final_Assignment_Template.agent import build_graph
|
9 |
+
|
10 |
+
|
11 |
|
12 |
# (Keep Constants as is)
|
13 |
# --- Constants ---
|
|
|
15 |
|
16 |
# --- Basic Agent Definition ---
|
17 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
18 |
+
|
19 |
+
|
20 |
class BasicAgent:
|
21 |
+
"""A langgraph agent."""
|
22 |
def __init__(self):
|
23 |
print("BasicAgent initialized.")
|
24 |
+
self.graph = build_graph()
|
25 |
+
|
26 |
def __call__(self, question: str) -> str:
|
27 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
28 |
+
# Wrap the question in a HumanMessage from langchain_core
|
29 |
+
messages = [HumanMessage(content=question)]
|
30 |
+
messages = self.graph.invoke({"messages": messages})
|
31 |
+
answer = messages['messages'][-1].content
|
32 |
+
return answer[14:]
|
33 |
+
|
34 |
|
35 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
36 |
"""
|
metadata.jsonl
ADDED
The diff for this file is too large to render.
See raw diff
|
|
requirements.txt
CHANGED
@@ -1,2 +1,19 @@
|
|
1 |
gradio
|
2 |
-
requests
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
gradio
|
2 |
+
requests
|
3 |
+
langchain
|
4 |
+
langchain-community
|
5 |
+
langchain-core
|
6 |
+
langchain-google-genai
|
7 |
+
langchain-openai
|
8 |
+
langchain-huggingface
|
9 |
+
langchain-groq
|
10 |
+
langchain-tavily
|
11 |
+
langchain-chroma
|
12 |
+
langgraph
|
13 |
+
huggingface_hub
|
14 |
+
supabase
|
15 |
+
arxiv
|
16 |
+
pymupdf
|
17 |
+
wikipedia
|
18 |
+
pgvector
|
19 |
+
python-dotenv
|
system_prompt.txt
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
You are a helpful assistant tasked with answering questions using a set of tools.
|
3 |
+
If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question.
|
4 |
+
You need to provide a step-by-step explanation of how you arrived at the answer.
|
5 |
+
==========================
|
6 |
+
Here is a few examples showing you how to answer the question step by step.
|
7 |
+
|
8 |
+
Question 1: What is the absolute difference in tens of thousands between the population of chinstrap penguins on the Wikipedia page for penguin species populations as of the end of 2018 and the population recorded in the Nature.com "global population assessment of the Chinstrap penguin" article from 2020, assuming two penguins per breeding pair?
|
9 |
+
Steps:
|
10 |
+
1. Searched "penguin species populations wikipedia" on Google search.
|
11 |
+
2. Opened the "List of Sphenisciformes by population" Wikipedia article.
|
12 |
+
3. Clicked "View history".
|
13 |
+
4. Scrolled to the end of 2018 and opened the page.
|
14 |
+
5. Scrolled to the encoding for the population table.
|
15 |
+
6. Recorded the number of chinstrap penguins (8 million).
|
16 |
+
7. Searched "Nature.com global population assessment of the Chinstrap penguin 2020" in Google search.
|
17 |
+
8. Opened the top link to the article with the corresponding name and date.
|
18 |
+
9. Read the abstract and noted the number of breeding pairs (3.42 million).
|
19 |
+
10. Multiplied the breeding pairs by 2 to get the number of penguins (6.84 million).
|
20 |
+
11. Subtracted the Wikipedia population from the Nature.com population (1.16 million).
|
21 |
+
12. Multiplied 1.16 by 100 to get tens of thousands (116).
|
22 |
+
Tools:
|
23 |
+
1. Search engine
|
24 |
+
2. Web browser
|
25 |
+
3. Calculator
|
26 |
+
Final Answer: 116
|
27 |
+
|
28 |
+
==========================
|
29 |
+
Now, please answer the following question step by step.
|
test.ipynb
ADDED
@@ -0,0 +1,901 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": null,
|
6 |
+
"id": "c57cb5f3",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [
|
9 |
+
{
|
10 |
+
"name": "stderr",
|
11 |
+
"output_type": "stream",
|
12 |
+
"text": [
|
13 |
+
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
|
14 |
+
"To disable this warning, you can either:\n",
|
15 |
+
"\t- Avoid using `tokenizers` before the fork if possible\n",
|
16 |
+
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
|
17 |
+
]
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"name": "stdout",
|
21 |
+
"output_type": "stream",
|
22 |
+
"text": [
|
23 |
+
"Requirement already satisfied: gradio in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 1)) (5.29.0)\n",
|
24 |
+
"Requirement already satisfied: requests in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 2)) (2.32.3)\n",
|
25 |
+
"Requirement already satisfied: langchain in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 3)) (0.3.25)\n",
|
26 |
+
"Requirement already satisfied: langchain-community in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 4)) (0.3.23)\n",
|
27 |
+
"Requirement already satisfied: langchain-core in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 5)) (0.3.58)\n",
|
28 |
+
"Requirement already satisfied: langchain-google-genai in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 6)) (2.1.4)\n",
|
29 |
+
"Collecting langchain-openai (from -r requirements.txt (line 7))\n",
|
30 |
+
" Downloading langchain_openai-0.3.16-py3-none-any.whl.metadata (2.3 kB)\n",
|
31 |
+
"Requirement already satisfied: langchain-huggingface in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 8)) (0.1.2)\n",
|
32 |
+
"Requirement already satisfied: langchain-groq in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 9)) (0.3.2)\n",
|
33 |
+
"Requirement already satisfied: langchain-tavily in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 10)) (0.1.6)\n",
|
34 |
+
"Requirement already satisfied: langchain-chroma in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 11)) (0.2.3)\n",
|
35 |
+
"Requirement already satisfied: langgraph in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 12)) (0.4.1)\n",
|
36 |
+
"Requirement already satisfied: huggingface_hub in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 13)) (0.30.2)\n",
|
37 |
+
"Requirement already satisfied: supabase in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 14)) (2.15.1)\n",
|
38 |
+
"Requirement already satisfied: arxiv in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 15)) (2.2.0)\n",
|
39 |
+
"Requirement already satisfied: pymupdf in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 16)) (1.25.5)\n",
|
40 |
+
"Requirement already satisfied: wikipedia in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 17)) (1.4.0)\n",
|
41 |
+
"Requirement already satisfied: pgvector in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 18)) (0.4.1)\n",
|
42 |
+
"Requirement already satisfied: python-dotenv in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from -r requirements.txt (line 19)) (1.0.1)\n",
|
43 |
+
"Requirement already satisfied: aiofiles<25.0,>=22.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (24.1.0)\n",
|
44 |
+
"Requirement already satisfied: anyio<5.0,>=3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (4.9.0)\n",
|
45 |
+
"Requirement already satisfied: fastapi<1.0,>=0.115.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.115.12)\n",
|
46 |
+
"Requirement already satisfied: ffmpy in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.5.0)\n",
|
47 |
+
"Requirement already satisfied: gradio-client==1.10.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (1.10.0)\n",
|
48 |
+
"Requirement already satisfied: groovy~=0.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.1.2)\n",
|
49 |
+
"Requirement already satisfied: httpx>=0.24.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.28.1)\n",
|
50 |
+
"Requirement already satisfied: jinja2<4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (3.1.6)\n",
|
51 |
+
"Requirement already satisfied: markupsafe<4.0,>=2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (3.0.2)\n",
|
52 |
+
"Requirement already satisfied: numpy<3.0,>=1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (2.2.4)\n",
|
53 |
+
"Requirement already satisfied: orjson~=3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (3.10.18)\n",
|
54 |
+
"Requirement already satisfied: packaging in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (24.2)\n",
|
55 |
+
"Requirement already satisfied: pandas<3.0,>=1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (2.2.3)\n",
|
56 |
+
"Requirement already satisfied: pillow<12.0,>=8.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (11.2.1)\n",
|
57 |
+
"Requirement already satisfied: pydantic<2.12,>=2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (2.11.2)\n",
|
58 |
+
"Requirement already satisfied: pydub in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.25.1)\n",
|
59 |
+
"Requirement already satisfied: python-multipart>=0.0.18 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.0.20)\n",
|
60 |
+
"Requirement already satisfied: pyyaml<7.0,>=5.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (6.0.2)\n",
|
61 |
+
"Requirement already satisfied: ruff>=0.9.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.11.8)\n",
|
62 |
+
"Requirement already satisfied: safehttpx<0.2.0,>=0.1.6 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.1.6)\n",
|
63 |
+
"Requirement already satisfied: semantic-version~=2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (2.10.0)\n",
|
64 |
+
"Requirement already satisfied: starlette<1.0,>=0.40.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.46.2)\n",
|
65 |
+
"Requirement already satisfied: tomlkit<0.14.0,>=0.12.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.13.2)\n",
|
66 |
+
"Requirement already satisfied: typer<1.0,>=0.12 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.15.3)\n",
|
67 |
+
"Requirement already satisfied: typing-extensions~=4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (4.13.2)\n",
|
68 |
+
"Requirement already satisfied: uvicorn>=0.14.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio->-r requirements.txt (line 1)) (0.34.2)\n",
|
69 |
+
"Requirement already satisfied: fsspec in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio-client==1.10.0->gradio->-r requirements.txt (line 1)) (2025.3.2)\n",
|
70 |
+
"Requirement already satisfied: websockets<16.0,>=10.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gradio-client==1.10.0->gradio->-r requirements.txt (line 1)) (14.2)\n",
|
71 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from requests->-r requirements.txt (line 2)) (3.4.1)\n",
|
72 |
+
"Requirement already satisfied: idna<4,>=2.5 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from requests->-r requirements.txt (line 2)) (3.10)\n",
|
73 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from requests->-r requirements.txt (line 2)) (2.3.0)\n",
|
74 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from requests->-r requirements.txt (line 2)) (2025.1.31)\n",
|
75 |
+
"Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain->-r requirements.txt (line 3)) (0.3.8)\n",
|
76 |
+
"Requirement already satisfied: langsmith<0.4,>=0.1.17 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain->-r requirements.txt (line 3)) (0.3.42)\n",
|
77 |
+
"Requirement already satisfied: SQLAlchemy<3,>=1.4 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain->-r requirements.txt (line 3)) (2.0.40)\n",
|
78 |
+
"Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-community->-r requirements.txt (line 4)) (3.11.18)\n",
|
79 |
+
"Requirement already satisfied: tenacity!=8.4.0,<10,>=8.1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-community->-r requirements.txt (line 4)) (9.1.2)\n",
|
80 |
+
"Requirement already satisfied: dataclasses-json<0.7,>=0.5.7 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-community->-r requirements.txt (line 4)) (0.6.7)\n",
|
81 |
+
"Requirement already satisfied: pydantic-settings<3.0.0,>=2.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-community->-r requirements.txt (line 4)) (2.9.1)\n",
|
82 |
+
"Requirement already satisfied: httpx-sse<1.0.0,>=0.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-community->-r requirements.txt (line 4)) (0.4.0)\n",
|
83 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-core->-r requirements.txt (line 5)) (1.33)\n",
|
84 |
+
"Requirement already satisfied: filetype<2.0.0,>=1.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-google-genai->-r requirements.txt (line 6)) (1.2.0)\n",
|
85 |
+
"Requirement already satisfied: google-ai-generativelanguage<0.7.0,>=0.6.18 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-google-genai->-r requirements.txt (line 6)) (0.6.18)\n",
|
86 |
+
"Requirement already satisfied: openai<2.0.0,>=1.68.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-openai->-r requirements.txt (line 7)) (1.70.0)\n",
|
87 |
+
"Collecting tiktoken<1,>=0.7 (from langchain-openai->-r requirements.txt (line 7))\n",
|
88 |
+
" Downloading tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl.metadata (6.7 kB)\n",
|
89 |
+
"Requirement already satisfied: sentence-transformers>=2.6.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-huggingface->-r requirements.txt (line 8)) (4.1.0)\n",
|
90 |
+
"Requirement already satisfied: tokenizers>=0.19.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-huggingface->-r requirements.txt (line 8)) (0.21.1)\n",
|
91 |
+
"Requirement already satisfied: transformers>=4.39.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-huggingface->-r requirements.txt (line 8)) (4.51.3)\n",
|
92 |
+
"Requirement already satisfied: groq<1,>=0.4.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-groq->-r requirements.txt (line 9)) (0.24.0)\n",
|
93 |
+
"Requirement already satisfied: mypy<2.0.0,>=1.15.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-tavily->-r requirements.txt (line 10)) (1.15.0)\n",
|
94 |
+
"Requirement already satisfied: chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langchain-chroma->-r requirements.txt (line 11)) (0.6.3)\n",
|
95 |
+
"Requirement already satisfied: langgraph-checkpoint<3.0.0,>=2.0.10 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langgraph->-r requirements.txt (line 12)) (2.0.25)\n",
|
96 |
+
"Requirement already satisfied: langgraph-prebuilt>=0.1.8 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langgraph->-r requirements.txt (line 12)) (0.1.8)\n",
|
97 |
+
"Requirement already satisfied: langgraph-sdk>=0.1.42 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langgraph->-r requirements.txt (line 12)) (0.1.66)\n",
|
98 |
+
"Requirement already satisfied: xxhash<4.0.0,>=3.5.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langgraph->-r requirements.txt (line 12)) (3.5.0)\n",
|
99 |
+
"Requirement already satisfied: filelock in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from huggingface_hub->-r requirements.txt (line 13)) (3.18.0)\n",
|
100 |
+
"Requirement already satisfied: tqdm>=4.42.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from huggingface_hub->-r requirements.txt (line 13)) (4.67.1)\n",
|
101 |
+
"Requirement already satisfied: gotrue<3.0.0,>=2.11.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supabase->-r requirements.txt (line 14)) (2.12.0)\n",
|
102 |
+
"Requirement already satisfied: postgrest<1.1,>0.19 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supabase->-r requirements.txt (line 14)) (1.0.1)\n",
|
103 |
+
"Requirement already satisfied: realtime<2.5.0,>=2.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supabase->-r requirements.txt (line 14)) (2.4.3)\n",
|
104 |
+
"Requirement already satisfied: storage3<0.12,>=0.10 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supabase->-r requirements.txt (line 14)) (0.11.3)\n",
|
105 |
+
"Requirement already satisfied: supafunc<0.10,>=0.9 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supabase->-r requirements.txt (line 14)) (0.9.4)\n",
|
106 |
+
"Requirement already satisfied: feedparser~=6.0.10 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from arxiv->-r requirements.txt (line 15)) (6.0.11)\n",
|
107 |
+
"Requirement already satisfied: beautifulsoup4 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from wikipedia->-r requirements.txt (line 17)) (4.13.4)\n",
|
108 |
+
"Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (2.6.1)\n",
|
109 |
+
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (1.3.2)\n",
|
110 |
+
"Requirement already satisfied: attrs>=17.3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (25.3.0)\n",
|
111 |
+
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (1.5.0)\n",
|
112 |
+
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (6.2.0)\n",
|
113 |
+
"Requirement already satisfied: propcache>=0.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (0.3.1)\n",
|
114 |
+
"Requirement already satisfied: yarl<2.0,>=1.17.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community->-r requirements.txt (line 4)) (1.19.0)\n",
|
115 |
+
"Requirement already satisfied: sniffio>=1.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from anyio<5.0,>=3.0->gradio->-r requirements.txt (line 1)) (1.3.1)\n",
|
116 |
+
"Requirement already satisfied: build>=1.0.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.2.2.post1)\n",
|
117 |
+
"Requirement already satisfied: chroma-hnswlib==0.7.6 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.7.6)\n",
|
118 |
+
"Requirement already satisfied: posthog>=2.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (4.0.1)\n",
|
119 |
+
"Requirement already satisfied: onnxruntime>=1.14.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.21.1)\n",
|
120 |
+
"Requirement already satisfied: opentelemetry-api>=1.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.32.1)\n",
|
121 |
+
"Requirement already satisfied: opentelemetry-exporter-otlp-proto-grpc>=1.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.32.1)\n",
|
122 |
+
"Requirement already satisfied: opentelemetry-instrumentation-fastapi>=0.41b0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.53b1)\n",
|
123 |
+
"Requirement already satisfied: opentelemetry-sdk>=1.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.32.1)\n",
|
124 |
+
"Requirement already satisfied: pypika>=0.48.9 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.48.9)\n",
|
125 |
+
"Requirement already satisfied: overrides>=7.3.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (7.7.0)\n",
|
126 |
+
"Requirement already satisfied: importlib-resources in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (6.5.2)\n",
|
127 |
+
"Requirement already satisfied: grpcio>=1.58.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.72.0rc1)\n",
|
128 |
+
"Requirement already satisfied: bcrypt>=4.0.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (4.3.0)\n",
|
129 |
+
"Requirement already satisfied: kubernetes>=28.1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (32.0.1)\n",
|
130 |
+
"Requirement already satisfied: mmh3>=4.0.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (5.1.0)\n",
|
131 |
+
"Requirement already satisfied: rich>=10.11.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (14.0.0)\n",
|
132 |
+
"Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from dataclasses-json<0.7,>=0.5.7->langchain-community->-r requirements.txt (line 4)) (3.26.1)\n",
|
133 |
+
"Requirement already satisfied: typing-inspect<1,>=0.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from dataclasses-json<0.7,>=0.5.7->langchain-community->-r requirements.txt (line 4)) (0.9.0)\n",
|
134 |
+
"Requirement already satisfied: sgmllib3k in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from feedparser~=6.0.10->arxiv->-r requirements.txt (line 15)) (1.0.0)\n",
|
135 |
+
"Requirement already satisfied: google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (2.24.2)\n",
|
136 |
+
"Requirement already satisfied: google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (2.38.0)\n",
|
137 |
+
"Requirement already satisfied: proto-plus<2.0.0,>=1.22.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (1.26.1)\n",
|
138 |
+
"Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0,>=3.20.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (5.29.4)\n",
|
139 |
+
"Requirement already satisfied: pyjwt<3.0.0,>=2.10.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (2.10.1)\n",
|
140 |
+
"Requirement already satisfied: pytest-mock<4.0.0,>=3.14.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (3.14.0)\n",
|
141 |
+
"Requirement already satisfied: distro<2,>=1.7.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from groq<1,>=0.4.1->langchain-groq->-r requirements.txt (line 9)) (1.9.0)\n",
|
142 |
+
"Requirement already satisfied: httpcore==1.* in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from httpx>=0.24.1->gradio->-r requirements.txt (line 1)) (1.0.7)\n",
|
143 |
+
"Requirement already satisfied: h11<0.15,>=0.13 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from httpcore==1.*->httpx>=0.24.1->gradio->-r requirements.txt (line 1)) (0.14.0)\n",
|
144 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core->-r requirements.txt (line 5)) (3.0.0)\n",
|
145 |
+
"Requirement already satisfied: ormsgpack<2.0.0,>=1.8.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langgraph-checkpoint<3.0.0,>=2.0.10->langgraph->-r requirements.txt (line 12)) (1.9.1)\n",
|
146 |
+
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.17->langchain->-r requirements.txt (line 3)) (1.0.0)\n",
|
147 |
+
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.17->langchain->-r requirements.txt (line 3)) (0.23.0)\n",
|
148 |
+
"Requirement already satisfied: mypy_extensions>=1.0.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from mypy<2.0.0,>=1.15.0->langchain-tavily->-r requirements.txt (line 10)) (1.1.0)\n",
|
149 |
+
"Requirement already satisfied: jiter<1,>=0.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from openai<2.0.0,>=1.68.2->langchain-openai->-r requirements.txt (line 7)) (0.9.0)\n",
|
150 |
+
"Requirement already satisfied: python-dateutil>=2.8.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pandas<3.0,>=1.0->gradio->-r requirements.txt (line 1)) (2.9.0.post0)\n",
|
151 |
+
"Requirement already satisfied: pytz>=2020.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pandas<3.0,>=1.0->gradio->-r requirements.txt (line 1)) (2025.2)\n",
|
152 |
+
"Requirement already satisfied: tzdata>=2022.7 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pandas<3.0,>=1.0->gradio->-r requirements.txt (line 1)) (2025.2)\n",
|
153 |
+
"Requirement already satisfied: deprecation<3.0.0,>=2.1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from postgrest<1.1,>0.19->supabase->-r requirements.txt (line 14)) (2.1.0)\n",
|
154 |
+
"Requirement already satisfied: annotated-types>=0.6.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pydantic<2.12,>=2.0->gradio->-r requirements.txt (line 1)) (0.7.0)\n",
|
155 |
+
"Requirement already satisfied: pydantic-core==2.33.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pydantic<2.12,>=2.0->gradio->-r requirements.txt (line 1)) (2.33.1)\n",
|
156 |
+
"Requirement already satisfied: typing-inspection>=0.4.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pydantic<2.12,>=2.0->gradio->-r requirements.txt (line 1)) (0.4.0)\n",
|
157 |
+
"Requirement already satisfied: torch>=1.11.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (2.7.0)\n",
|
158 |
+
"Requirement already satisfied: scikit-learn in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (1.6.1)\n",
|
159 |
+
"Requirement already satisfied: scipy in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (1.15.2)\n",
|
160 |
+
"Requirement already satisfied: strenum<0.5.0,>=0.4.15 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from supafunc<0.10,>=0.9->supabase->-r requirements.txt (line 14)) (0.4.15)\n",
|
161 |
+
"Requirement already satisfied: regex>=2022.1.18 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from tiktoken<1,>=0.7->langchain-openai->-r requirements.txt (line 7)) (2024.11.6)\n",
|
162 |
+
"Requirement already satisfied: safetensors>=0.4.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface->-r requirements.txt (line 8)) (0.5.3)\n",
|
163 |
+
"Requirement already satisfied: click>=8.0.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from typer<1.0,>=0.12->gradio->-r requirements.txt (line 1)) (8.1.8)\n",
|
164 |
+
"Requirement already satisfied: shellingham>=1.3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from typer<1.0,>=0.12->gradio->-r requirements.txt (line 1)) (1.5.4)\n",
|
165 |
+
"Requirement already satisfied: soupsieve>1.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from beautifulsoup4->wikipedia->-r requirements.txt (line 17)) (2.7)\n",
|
166 |
+
"Requirement already satisfied: pyproject_hooks in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from build>=1.0.3->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.2.0)\n",
|
167 |
+
"Requirement already satisfied: googleapis-common-protos<2.0.0,>=1.56.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (1.69.2)\n",
|
168 |
+
"Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (1.71.0)\n",
|
169 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (5.5.2)\n",
|
170 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (0.4.2)\n",
|
171 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (4.9)\n",
|
172 |
+
"Requirement already satisfied: h2<5,>=3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (4.2.0)\n",
|
173 |
+
"Requirement already satisfied: six>=1.9.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from kubernetes>=28.1.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.17.0)\n",
|
174 |
+
"Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from kubernetes>=28.1.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.8.0)\n",
|
175 |
+
"Requirement already satisfied: requests-oauthlib in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from kubernetes>=28.1.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (2.0.0)\n",
|
176 |
+
"Requirement already satisfied: oauthlib>=3.2.2 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from kubernetes>=28.1.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (3.2.2)\n",
|
177 |
+
"Requirement already satisfied: durationpy>=0.7 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from kubernetes>=28.1.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.9)\n",
|
178 |
+
"Requirement already satisfied: coloredlogs in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from onnxruntime>=1.14.1->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (15.0.1)\n",
|
179 |
+
"Requirement already satisfied: flatbuffers in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from onnxruntime>=1.14.1->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (25.2.10)\n",
|
180 |
+
"Requirement already satisfied: sympy in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from onnxruntime>=1.14.1->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.14.0)\n",
|
181 |
+
"Requirement already satisfied: deprecated>=1.2.6 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-api>=1.2.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.2.18)\n",
|
182 |
+
"Requirement already satisfied: importlib-metadata<8.7.0,>=6.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-api>=1.2.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (8.6.1)\n",
|
183 |
+
"Requirement already satisfied: opentelemetry-exporter-otlp-proto-common==1.32.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.32.1)\n",
|
184 |
+
"Requirement already satisfied: opentelemetry-proto==1.32.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.32.1)\n",
|
185 |
+
"Requirement already satisfied: opentelemetry-instrumentation-asgi==0.53b1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.53b1)\n",
|
186 |
+
"Requirement already satisfied: opentelemetry-instrumentation==0.53b1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.53b1)\n",
|
187 |
+
"Requirement already satisfied: opentelemetry-semantic-conventions==0.53b1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.53b1)\n",
|
188 |
+
"Requirement already satisfied: opentelemetry-util-http==0.53b1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.53b1)\n",
|
189 |
+
"Requirement already satisfied: wrapt<2.0.0,>=1.0.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation==0.53b1->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.17.2)\n",
|
190 |
+
"Requirement already satisfied: asgiref~=3.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from opentelemetry-instrumentation-asgi==0.53b1->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (3.8.1)\n",
|
191 |
+
"Requirement already satisfied: backoff>=1.10.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from posthog>=2.4.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (2.2.1)\n",
|
192 |
+
"Requirement already satisfied: pytest>=6.2.5 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (8.3.5)\n",
|
193 |
+
"Requirement already satisfied: markdown-it-py>=2.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from rich>=10.11.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (3.0.0)\n",
|
194 |
+
"Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from rich>=10.11.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (2.19.1)\n",
|
195 |
+
"Requirement already satisfied: setuptools in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (80.3.0)\n",
|
196 |
+
"Requirement already satisfied: networkx in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (3.4.2)\n",
|
197 |
+
"Requirement already satisfied: httptools>=0.6.3 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from uvicorn[standard]>=0.18.3->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.6.4)\n",
|
198 |
+
"Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from uvicorn[standard]>=0.18.3->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.21.0)\n",
|
199 |
+
"Requirement already satisfied: watchfiles>=0.13 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from uvicorn[standard]>=0.18.3->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.0.5)\n",
|
200 |
+
"Requirement already satisfied: joblib>=1.2.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (1.5.0)\n",
|
201 |
+
"Requirement already satisfied: threadpoolctl>=3.1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface->-r requirements.txt (line 8)) (3.6.0)\n",
|
202 |
+
"Requirement already satisfied: hyperframe<7,>=6.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (6.1.0)\n",
|
203 |
+
"Requirement already satisfied: hpack<5,>=4.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (4.1.0)\n",
|
204 |
+
"Requirement already satisfied: zipp>=3.20 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from importlib-metadata<8.7.0,>=6.0->opentelemetry-api>=1.2.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (3.21.0)\n",
|
205 |
+
"Requirement already satisfied: mdurl~=0.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (0.1.2)\n",
|
206 |
+
"Requirement already satisfied: pyasn1<0.7.0,>=0.6.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pyasn1-modules>=0.2.1->google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai->-r requirements.txt (line 6)) (0.6.1)\n",
|
207 |
+
"Requirement already satisfied: iniconfig in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (2.1.0)\n",
|
208 |
+
"Requirement already satisfied: pluggy<2,>=1.5 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase->-r requirements.txt (line 14)) (1.5.0)\n",
|
209 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from sympy->onnxruntime>=1.14.1->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (1.3.0)\n",
|
210 |
+
"Requirement already satisfied: humanfriendly>=9.1 in /Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages (from coloredlogs->onnxruntime>=1.14.1->chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0->langchain-chroma->-r requirements.txt (line 11)) (10.0)\n",
|
211 |
+
"Downloading langchain_openai-0.3.16-py3-none-any.whl (62 kB)\n",
|
212 |
+
"Downloading tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl (1.0 MB)\n",
|
213 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m8.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
214 |
+
"\u001b[?25hInstalling collected packages: tiktoken, langchain-openai\n",
|
215 |
+
"Successfully installed langchain-openai-0.3.16 tiktoken-0.9.0\n",
|
216 |
+
"\n",
|
217 |
+
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m25.0.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1\u001b[0m\n",
|
218 |
+
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n"
|
219 |
+
]
|
220 |
+
}
|
221 |
+
],
|
222 |
+
"source": [
|
223 |
+
"# !pip install -r requirements.txt\n",
|
224 |
+
"# Load metadata.jsonl\n",
|
225 |
+
"import json\n",
|
226 |
+
"# Load the metadata.jsonl file\n",
|
227 |
+
"with open('metadata.jsonl', 'r') as jsonl_file:\n",
|
228 |
+
" json_list = list(jsonl_file)\n",
|
229 |
+
"\n",
|
230 |
+
"json_QA = []\n",
|
231 |
+
"for json_str in json_list:\n",
|
232 |
+
" json_data = json.loads(json_str)\n",
|
233 |
+
" json_QA.append(json_data)"
|
234 |
+
]
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"cell_type": "code",
|
238 |
+
"execution_count": 2,
|
239 |
+
"id": "ec0f78a4",
|
240 |
+
"metadata": {},
|
241 |
+
"outputs": [
|
242 |
+
{
|
243 |
+
"name": "stdout",
|
244 |
+
"output_type": "stream",
|
245 |
+
"text": [
|
246 |
+
"==================================================\n",
|
247 |
+
"Task ID: 5d0080cb-90d7-4712-bc33-848150e917d3\n",
|
248 |
+
"Question: What was the volume in m^3 of the fish bag that was calculated in the University of Leicester paper \"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\"\n",
|
249 |
+
"Level: 1\n",
|
250 |
+
"Final Answer: 0.1777\n",
|
251 |
+
"Annotator Metadata: \n",
|
252 |
+
" ├── Steps: \n",
|
253 |
+
" │ ├── 1. Searched '\"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\"' on Google.\n",
|
254 |
+
" │ ├── 2. Opened \"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\" at https://journals.le.ac.uk/ojs1/index.php/jist/article/view/733.\n",
|
255 |
+
" │ ├── 3. Clicked \"PDF\".\n",
|
256 |
+
" │ ├── 4. Found the calculations for the volume of the fish bag and noted them.\n",
|
257 |
+
" ├── Number of steps: 4\n",
|
258 |
+
" ├── How long did this take?: 5 minutes\n",
|
259 |
+
" ├── Tools:\n",
|
260 |
+
" │ ├── 1. Web browser\n",
|
261 |
+
" │ ├── 2. Search engine\n",
|
262 |
+
" │ ├── 3. PDF access\n",
|
263 |
+
" └── Number of tools: 3\n",
|
264 |
+
"==================================================\n"
|
265 |
+
]
|
266 |
+
}
|
267 |
+
],
|
268 |
+
"source": [
|
269 |
+
"# randomly select 3 samples\n",
|
270 |
+
"# {\"task_id\": \"c61d22de-5f6c-4958-a7f6-5e9707bd3466\", \"Question\": \"A paper about AI regulation that was originally submitted to arXiv.org in June 2022 shows a figure with three axes, where each axis has a label word at both ends. Which of these words is used to describe a type of society in a Physics and Society article submitted to arXiv.org on August 11, 2016?\", \"Level\": 2, \"Final answer\": \"egalitarian\", \"file_name\": \"\", \"Annotator Metadata\": {\"Steps\": \"1. Go to arxiv.org and navigate to the Advanced Search page.\\n2. Enter \\\"AI regulation\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n3. Enter 2022-06-01 and 2022-07-01 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n4. Go through the search results to find the article that has a figure with three axes and labels on each end of the axes, titled \\\"Fairness in Agreement With European Values: An Interdisciplinary Perspective on AI Regulation\\\".\\n5. Note the six words used as labels: deontological, egalitarian, localized, standardized, utilitarian, and consequential.\\n6. Go back to arxiv.org\\n7. Find \\\"Physics and Society\\\" and go to the page for the \\\"Physics and Society\\\" category.\\n8. Note that the tag for this category is \\\"physics.soc-ph\\\".\\n9. Go to the Advanced Search page.\\n10. Enter \\\"physics.soc-ph\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n11. Enter 2016-08-11 and 2016-08-12 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n12. Search for instances of the six words in the results to find the paper titled \\\"Phase transition from egalitarian to hierarchical societies driven by competition between cognitive and social constraints\\\", indicating that \\\"egalitarian\\\" is the correct answer.\", \"Number of steps\": \"12\", \"How long did this take?\": \"8 minutes\", \"Tools\": \"1. Web browser\\n2. Image recognition tools (to identify and parse a figure with three axes)\", \"Number of tools\": \"2\"}}\n",
|
271 |
+
"\n",
|
272 |
+
"import random\n",
|
273 |
+
"# random.seed(42)\n",
|
274 |
+
"random_samples = random.sample(json_QA, 1)\n",
|
275 |
+
"for sample in random_samples:\n",
|
276 |
+
" print(\"=\" * 50)\n",
|
277 |
+
" print(f\"Task ID: {sample['task_id']}\")\n",
|
278 |
+
" print(f\"Question: {sample['Question']}\")\n",
|
279 |
+
" print(f\"Level: {sample['Level']}\")\n",
|
280 |
+
" print(f\"Final Answer: {sample['Final answer']}\")\n",
|
281 |
+
" print(f\"Annotator Metadata: \")\n",
|
282 |
+
" print(f\" ├── Steps: \")\n",
|
283 |
+
" for step in sample['Annotator Metadata']['Steps'].split('\\n'):\n",
|
284 |
+
" print(f\" │ ├── {step}\")\n",
|
285 |
+
" print(f\" ├── Number of steps: {sample['Annotator Metadata']['Number of steps']}\")\n",
|
286 |
+
" print(f\" ├── How long did this take?: {sample['Annotator Metadata']['How long did this take?']}\")\n",
|
287 |
+
" print(f\" ├── Tools:\")\n",
|
288 |
+
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
|
289 |
+
" print(f\" │ ├── {tool}\")\n",
|
290 |
+
" print(f\" └── Number of tools: {sample['Annotator Metadata']['Number of tools']}\")\n",
|
291 |
+
"print(\"=\" * 50)"
|
292 |
+
]
|
293 |
+
},
|
294 |
+
{
|
295 |
+
"cell_type": "code",
|
296 |
+
"execution_count": 3,
|
297 |
+
"id": "fdd11b62",
|
298 |
+
"metadata": {},
|
299 |
+
"outputs": [
|
300 |
+
{
|
301 |
+
"name": "stderr",
|
302 |
+
"output_type": "stream",
|
303 |
+
"text": [
|
304 |
+
"/Users/csefrassia/.pyenv/versions/3.12.9/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
305 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
306 |
+
]
|
307 |
+
}
|
308 |
+
],
|
309 |
+
"source": [
|
310 |
+
"### build a vector database based on the metadata.jsonl\n",
|
311 |
+
"# https://python.langchain.com/docs/integrations/vectorstores/supabase/\n",
|
312 |
+
"import os\n",
|
313 |
+
"from dotenv import load_dotenv\n",
|
314 |
+
"from langchain_huggingface import HuggingFaceEmbeddings\n",
|
315 |
+
"from langchain_community.vectorstores import SupabaseVectorStore\n",
|
316 |
+
"from supabase.client import Client, create_client\n",
|
317 |
+
"\n",
|
318 |
+
"\n",
|
319 |
+
"load_dotenv()\n",
|
320 |
+
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
|
321 |
+
"\n",
|
322 |
+
"supabase_url = os.environ.get(\"SUPABASE_URL\")\n",
|
323 |
+
"supabase_key = os.environ.get(\"SUPABASE_SERVICE_KEY\")\n",
|
324 |
+
"supabase: Client = create_client(supabase_url, supabase_key)"
|
325 |
+
]
|
326 |
+
},
|
327 |
+
{
|
328 |
+
"cell_type": "code",
|
329 |
+
"execution_count": 4,
|
330 |
+
"id": "04babae7",
|
331 |
+
"metadata": {},
|
332 |
+
"outputs": [
|
333 |
+
{
|
334 |
+
"name": "stdout",
|
335 |
+
"output_type": "stream",
|
336 |
+
"text": [
|
337 |
+
"service_role\n"
|
338 |
+
]
|
339 |
+
}
|
340 |
+
],
|
341 |
+
"source": [
|
342 |
+
"import jwt\n",
|
343 |
+
"import os\n",
|
344 |
+
"\n",
|
345 |
+
"token = os.environ[\"SUPABASE_SERVICE_KEY\"]\n",
|
346 |
+
"decoded = jwt.decode(token, options={\"verify_signature\": False})\n",
|
347 |
+
"print(decoded[\"role\"])"
|
348 |
+
]
|
349 |
+
},
|
350 |
+
{
|
351 |
+
"cell_type": "code",
|
352 |
+
"execution_count": 5,
|
353 |
+
"id": "c8eda3dc",
|
354 |
+
"metadata": {},
|
355 |
+
"outputs": [],
|
356 |
+
"source": [
|
357 |
+
"# wrap the metadata.jsonl's questions and answers into a list of document\n",
|
358 |
+
"from langchain.schema import Document\n",
|
359 |
+
"docs = []\n",
|
360 |
+
"for sample in json_QA:\n",
|
361 |
+
" content = f\"Question : {sample['Question']}\\n\\nFinal answer : {sample['Final answer']}\"\n",
|
362 |
+
" doc = {\n",
|
363 |
+
" \"content\" : content,\n",
|
364 |
+
" \"metadata\" : { # meatadata的格式必须时source键,否则会报错\n",
|
365 |
+
" \"source\" : sample['task_id']\n",
|
366 |
+
" },\n",
|
367 |
+
" \"embedding\" : embeddings.embed_query(content),\n",
|
368 |
+
" }\n",
|
369 |
+
" docs.append(doc)\n",
|
370 |
+
"\n",
|
371 |
+
"# upload the documents to the vector database\n",
|
372 |
+
"try:\n",
|
373 |
+
" response = (\n",
|
374 |
+
" supabase.table(\"documents\")\n",
|
375 |
+
" .insert(docs)\n",
|
376 |
+
" .execute()\n",
|
377 |
+
" )\n",
|
378 |
+
"except Exception as exception:\n",
|
379 |
+
" print(\"Error inserting data into Supabase:\", exception)\n",
|
380 |
+
"\n",
|
381 |
+
"# ALTERNATIVE : Save the documents (a list of dict) into a csv file, and manually upload it to Supabase\n",
|
382 |
+
"# import pandas as pd\n",
|
383 |
+
"# df = pd.DataFrame(docs)\n",
|
384 |
+
"# df.to_csv('supabase_docs.csv', index=False)"
|
385 |
+
]
|
386 |
+
},
|
387 |
+
{
|
388 |
+
"cell_type": "code",
|
389 |
+
"execution_count": 8,
|
390 |
+
"id": "406643cc",
|
391 |
+
"metadata": {},
|
392 |
+
"outputs": [],
|
393 |
+
"source": [
|
394 |
+
"# add items to vector database\n",
|
395 |
+
"vector_store = SupabaseVectorStore(\n",
|
396 |
+
" client=supabase,\n",
|
397 |
+
" embedding= embeddings,\n",
|
398 |
+
" table_name=\"documents\",\n",
|
399 |
+
" query_name=\"match_documents_langchain\",\n",
|
400 |
+
")\n",
|
401 |
+
"retriever = vector_store.as_retriever()"
|
402 |
+
]
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"cell_type": "code",
|
406 |
+
"execution_count": 9,
|
407 |
+
"id": "93faf53b",
|
408 |
+
"metadata": {},
|
409 |
+
"outputs": [
|
410 |
+
{
|
411 |
+
"data": {
|
412 |
+
"text/plain": [
|
413 |
+
"Document(metadata={'source': '840bfca7-4f7b-481a-8794-c560c340185d'}, page_content='Question : On June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\\n\\nFinal answer : 80GSFC21M0002')"
|
414 |
+
]
|
415 |
+
},
|
416 |
+
"execution_count": 9,
|
417 |
+
"metadata": {},
|
418 |
+
"output_type": "execute_result"
|
419 |
+
}
|
420 |
+
],
|
421 |
+
"source": [
|
422 |
+
"query = \"On June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\"\n",
|
423 |
+
"# matched_docs = vector_store.similarity_search(query, 2)\n",
|
424 |
+
"docs = retriever.invoke(query)\n",
|
425 |
+
"docs[0]"
|
426 |
+
]
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"cell_type": "code",
|
430 |
+
"execution_count": 33,
|
431 |
+
"id": "a26cb46d",
|
432 |
+
"metadata": {},
|
433 |
+
"outputs": [
|
434 |
+
{
|
435 |
+
"name": "stdout",
|
436 |
+
"output_type": "stream",
|
437 |
+
"text": [
|
438 |
+
"List of tools used in all samples:\n",
|
439 |
+
"Total number of tools used: 83\n",
|
440 |
+
" ├── web browser: 107\n",
|
441 |
+
" ├── image recognition tools (to identify and parse a figure with three axes): 1\n",
|
442 |
+
" ├── search engine: 101\n",
|
443 |
+
" ├── calculator: 34\n",
|
444 |
+
" ├── unlambda compiler (optional): 1\n",
|
445 |
+
" ├── a web browser.: 2\n",
|
446 |
+
" ├── a search engine.: 2\n",
|
447 |
+
" ├── a calculator.: 1\n",
|
448 |
+
" ├── microsoft excel: 5\n",
|
449 |
+
" ├── google search: 1\n",
|
450 |
+
" ├── ne: 9\n",
|
451 |
+
" ├── pdf access: 7\n",
|
452 |
+
" ├── file handling: 2\n",
|
453 |
+
" ├── python: 3\n",
|
454 |
+
" ├── image recognition tools: 12\n",
|
455 |
+
" ├── jsonld file access: 1\n",
|
456 |
+
" ├── video parsing: 1\n",
|
457 |
+
" ├── python compiler: 1\n",
|
458 |
+
" ├── video recognition tools: 3\n",
|
459 |
+
" ├── pdf viewer: 7\n",
|
460 |
+
" ├── microsoft excel / google sheets: 3\n",
|
461 |
+
" ├── word document access: 1\n",
|
462 |
+
" ├── tool to extract text from images: 1\n",
|
463 |
+
" ├── a word reversal tool / script: 1\n",
|
464 |
+
" ├── counter: 1\n",
|
465 |
+
" ├── excel: 3\n",
|
466 |
+
" ├── image recognition: 5\n",
|
467 |
+
" ├── color recognition: 3\n",
|
468 |
+
" ├── excel file access: 3\n",
|
469 |
+
" ├── xml file access: 1\n",
|
470 |
+
" ├── access to the internet archive, web.archive.org: 1\n",
|
471 |
+
" ├── text processing/diff tool: 1\n",
|
472 |
+
" ├── gif parsing tools: 1\n",
|
473 |
+
" ├── a web browser: 7\n",
|
474 |
+
" ├── a search engine: 7\n",
|
475 |
+
" ├── a speech-to-text tool: 2\n",
|
476 |
+
" ├── code/data analysis tools: 1\n",
|
477 |
+
" ├── audio capability: 2\n",
|
478 |
+
" ├── pdf reader: 1\n",
|
479 |
+
" ├── markdown: 1\n",
|
480 |
+
" ├── a calculator: 5\n",
|
481 |
+
" ├── access to wikipedia: 3\n",
|
482 |
+
" ├── image recognition/ocr: 3\n",
|
483 |
+
" ├── google translate access: 1\n",
|
484 |
+
" ├── ocr: 4\n",
|
485 |
+
" ├── bass note data: 1\n",
|
486 |
+
" ├── text editor: 1\n",
|
487 |
+
" ├── xlsx file access: 1\n",
|
488 |
+
" ├── powerpoint viewer: 1\n",
|
489 |
+
" ├── csv file access: 1\n",
|
490 |
+
" ├── calculator (or use excel): 1\n",
|
491 |
+
" ├── computer algebra system: 1\n",
|
492 |
+
" ├── video processing software: 1\n",
|
493 |
+
" ├── audio processing software: 1\n",
|
494 |
+
" ├── computer vision: 1\n",
|
495 |
+
" ├── google maps: 1\n",
|
496 |
+
" ├── access to excel files: 1\n",
|
497 |
+
" ├── calculator (or ability to count): 1\n",
|
498 |
+
" ├── a file interface: 3\n",
|
499 |
+
" ├── a python ide: 1\n",
|
500 |
+
" ├── spreadsheet editor: 1\n",
|
501 |
+
" ├── tools required: 1\n",
|
502 |
+
" ├── b browser: 1\n",
|
503 |
+
" ├── image recognition and processing tools: 1\n",
|
504 |
+
" ├── computer vision or ocr: 1\n",
|
505 |
+
" ├── c++ compiler: 1\n",
|
506 |
+
" ├── access to google maps: 1\n",
|
507 |
+
" ├── youtube player: 1\n",
|
508 |
+
" ├── natural language processor: 1\n",
|
509 |
+
" ├── graph interaction tools: 1\n",
|
510 |
+
" ├── bablyonian cuniform -> arabic legend: 1\n",
|
511 |
+
" ├── access to youtube: 1\n",
|
512 |
+
" ├── image search tools: 1\n",
|
513 |
+
" ├── calculator or counting function: 1\n",
|
514 |
+
" ├── a speech-to-text audio processing tool: 1\n",
|
515 |
+
" ├── access to academic journal websites: 1\n",
|
516 |
+
" ├── pdf reader/extracter: 1\n",
|
517 |
+
" ├── rubik's cube model: 1\n",
|
518 |
+
" ├── wikipedia: 1\n",
|
519 |
+
" ├── video capability: 1\n",
|
520 |
+
" ├── image processing tools: 1\n",
|
521 |
+
" ├── age recognition software: 1\n",
|
522 |
+
" ├── youtube: 1\n"
|
523 |
+
]
|
524 |
+
}
|
525 |
+
],
|
526 |
+
"source": [
|
527 |
+
"# list of the tools used in all the samples\n",
|
528 |
+
"from collections import Counter, OrderedDict\n",
|
529 |
+
"\n",
|
530 |
+
"tools = []\n",
|
531 |
+
"for sample in json_QA:\n",
|
532 |
+
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
|
533 |
+
" tool = tool[2:].strip().lower()\n",
|
534 |
+
" if tool.startswith(\"(\"):\n",
|
535 |
+
" tool = tool[11:].strip()\n",
|
536 |
+
" tools.append(tool)\n",
|
537 |
+
"tools_counter = OrderedDict(Counter(tools))\n",
|
538 |
+
"print(\"List of tools used in all samples:\")\n",
|
539 |
+
"print(\"Total number of tools used:\", len(tools_counter))\n",
|
540 |
+
"for tool, count in tools_counter.items():\n",
|
541 |
+
" print(f\" ├── {tool}: {count}\")"
|
542 |
+
]
|
543 |
+
},
|
544 |
+
{
|
545 |
+
"cell_type": "code",
|
546 |
+
"execution_count": 34,
|
547 |
+
"id": "cdfc8075",
|
548 |
+
"metadata": {},
|
549 |
+
"outputs": [],
|
550 |
+
"source": [
|
551 |
+
"system_prompt = \"\"\"\n",
|
552 |
+
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
|
553 |
+
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
|
554 |
+
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
|
555 |
+
"==========================\n",
|
556 |
+
"Here is a few examples showing you how to answer the question step by step.\n",
|
557 |
+
"\"\"\"\n",
|
558 |
+
"for i, samples in enumerate(random_samples):\n",
|
559 |
+
" system_prompt += f\"\\nQuestion {i+1}: {samples['Question']}\\nSteps:\\n{samples['Annotator Metadata']['Steps']}\\nTools:\\n{samples['Annotator Metadata']['Tools']}\\nFinal Answer: {samples['Final answer']}\\n\"\n",
|
560 |
+
"system_prompt += \"\\n==========================\\n\"\n",
|
561 |
+
"system_prompt += \"Now, please answer the following question step by step.\\n\"\n",
|
562 |
+
"\n",
|
563 |
+
"# save the system_prompt to a file\n",
|
564 |
+
"with open('system_prompt.txt', 'w') as f:\n",
|
565 |
+
" f.write(system_prompt)"
|
566 |
+
]
|
567 |
+
},
|
568 |
+
{
|
569 |
+
"cell_type": "code",
|
570 |
+
"execution_count": 35,
|
571 |
+
"id": "186b13af",
|
572 |
+
"metadata": {},
|
573 |
+
"outputs": [
|
574 |
+
{
|
575 |
+
"name": "stdout",
|
576 |
+
"output_type": "stream",
|
577 |
+
"text": [
|
578 |
+
"\n",
|
579 |
+
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
|
580 |
+
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
|
581 |
+
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
|
582 |
+
"==========================\n",
|
583 |
+
"Here is a few examples showing you how to answer the question step by step.\n",
|
584 |
+
"\n",
|
585 |
+
"Question 1: What was the volume in m^3 of the fish bag that was calculated in the University of Leicester paper \"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\"\n",
|
586 |
+
"Steps:\n",
|
587 |
+
"1. Searched '\"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\"' on Google.\n",
|
588 |
+
"2. Opened \"Can Hiccup Supply Enough Fish to Maintain a Dragon’s Diet?\" at https://journals.le.ac.uk/ojs1/index.php/jist/article/view/733.\n",
|
589 |
+
"3. Clicked \"PDF\".\n",
|
590 |
+
"4. Found the calculations for the volume of the fish bag and noted them.\n",
|
591 |
+
"Tools:\n",
|
592 |
+
"1. Web browser\n",
|
593 |
+
"2. Search engine\n",
|
594 |
+
"3. PDF access\n",
|
595 |
+
"Final Answer: 0.1777\n",
|
596 |
+
"\n",
|
597 |
+
"==========================\n",
|
598 |
+
"Now, please answer the following question step by step.\n",
|
599 |
+
"\n"
|
600 |
+
]
|
601 |
+
}
|
602 |
+
],
|
603 |
+
"source": [
|
604 |
+
"# load the system prompt from the file\n",
|
605 |
+
"with open('system_prompt.txt', 'r') as f:\n",
|
606 |
+
" system_prompt = f.read()\n",
|
607 |
+
"print(system_prompt)"
|
608 |
+
]
|
609 |
+
},
|
610 |
+
{
|
611 |
+
"cell_type": "code",
|
612 |
+
"execution_count": 36,
|
613 |
+
"id": "3a162284",
|
614 |
+
"metadata": {},
|
615 |
+
"outputs": [],
|
616 |
+
"source": [
|
617 |
+
"import dotenv\n",
|
618 |
+
"from langgraph.graph import MessagesState, START, StateGraph\n",
|
619 |
+
"from langgraph.prebuilt import tools_condition\n",
|
620 |
+
"from langgraph.prebuilt import ToolNode\n",
|
621 |
+
"from langchain_openai import ChatOpenAI\n",
|
622 |
+
"from langchain_huggingface import HuggingFaceEmbeddings\n",
|
623 |
+
"from langchain_community.tools.tavily_search import TavilySearchResults\n",
|
624 |
+
"from langchain_community.document_loaders import WikipediaLoader\n",
|
625 |
+
"from langchain_community.document_loaders import ArxivLoader\n",
|
626 |
+
"from langchain_community.vectorstores import SupabaseVectorStore\n",
|
627 |
+
"from langchain.tools.retriever import create_retriever_tool\n",
|
628 |
+
"from langchain_core.messages import HumanMessage, SystemMessage\n",
|
629 |
+
"from langchain_core.tools import tool\n",
|
630 |
+
"from supabase.client import Client, create_client\n",
|
631 |
+
"\n",
|
632 |
+
"# Define the retriever from supabase\n",
|
633 |
+
"load_dotenv()\n",
|
634 |
+
"\n",
|
635 |
+
"\n",
|
636 |
+
"\n",
|
637 |
+
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
|
638 |
+
"\n",
|
639 |
+
"supabase_url = os.environ.get(\"SUPABASE_URL\")\n",
|
640 |
+
"supabase_key = os.environ.get(\"SUPABASE_SERVICE_KEY\")\n",
|
641 |
+
"supabase: Client = create_client(supabase_url, supabase_key)\n",
|
642 |
+
"vector_store = SupabaseVectorStore(\n",
|
643 |
+
" client=supabase,\n",
|
644 |
+
" embedding= embeddings,\n",
|
645 |
+
" table_name=\"documents\",\n",
|
646 |
+
" query_name=\"match_documents_langchain\",\n",
|
647 |
+
")\n",
|
648 |
+
"\n",
|
649 |
+
"question_retrieve_tool = create_retriever_tool(\n",
|
650 |
+
" vector_store.as_retriever(),\n",
|
651 |
+
" \"question_retriever\",\n",
|
652 |
+
" \"Find similar questions in the vector database for the given question.\",\n",
|
653 |
+
")\n",
|
654 |
+
"\n",
|
655 |
+
"@tool\n",
|
656 |
+
"def multiply(a: int, b: int) -> int:\n",
|
657 |
+
" \"\"\"Multiply two numbers.\n",
|
658 |
+
"\n",
|
659 |
+
" Args:\n",
|
660 |
+
" a: first int\n",
|
661 |
+
" b: second int\n",
|
662 |
+
" \"\"\"\n",
|
663 |
+
" return a * b\n",
|
664 |
+
"\n",
|
665 |
+
"@tool\n",
|
666 |
+
"def add(a: int, b: int) -> int:\n",
|
667 |
+
" \"\"\"Add two numbers.\n",
|
668 |
+
" \n",
|
669 |
+
" Args:\n",
|
670 |
+
" a: first int\n",
|
671 |
+
" b: second int\n",
|
672 |
+
" \"\"\"\n",
|
673 |
+
" return a + b\n",
|
674 |
+
"\n",
|
675 |
+
"@tool\n",
|
676 |
+
"def subtract(a: int, b: int) -> int:\n",
|
677 |
+
" \"\"\"Subtract two numbers.\n",
|
678 |
+
" \n",
|
679 |
+
" Args:\n",
|
680 |
+
" a: first int\n",
|
681 |
+
" b: second int\n",
|
682 |
+
" \"\"\"\n",
|
683 |
+
" return a - b\n",
|
684 |
+
"\n",
|
685 |
+
"@tool\n",
|
686 |
+
"def divide(a: int, b: int) -> int:\n",
|
687 |
+
" \"\"\"Divide two numbers.\n",
|
688 |
+
" \n",
|
689 |
+
" Args:\n",
|
690 |
+
" a: first int\n",
|
691 |
+
" b: second int\n",
|
692 |
+
" \"\"\"\n",
|
693 |
+
" if b == 0:\n",
|
694 |
+
" raise ValueError(\"Cannot divide by zero.\")\n",
|
695 |
+
" return a / b\n",
|
696 |
+
"\n",
|
697 |
+
"@tool\n",
|
698 |
+
"def modulus(a: int, b: int) -> int:\n",
|
699 |
+
" \"\"\"Get the modulus of two numbers.\n",
|
700 |
+
" \n",
|
701 |
+
" Args:\n",
|
702 |
+
" a: first int\n",
|
703 |
+
" b: second int\n",
|
704 |
+
" \"\"\"\n",
|
705 |
+
" return a % b\n",
|
706 |
+
"\n",
|
707 |
+
"@tool\n",
|
708 |
+
"def wiki_search(query: str) -> str:\n",
|
709 |
+
" \"\"\"Search Wikipedia for a query and return maximum 2 results.\n",
|
710 |
+
" \n",
|
711 |
+
" Args:\n",
|
712 |
+
" query: The search query.\"\"\"\n",
|
713 |
+
" search_docs = WikipediaLoader(query=query, load_max_docs=2).load()\n",
|
714 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
715 |
+
" [\n",
|
716 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
|
717 |
+
" for doc in search_docs\n",
|
718 |
+
" ])\n",
|
719 |
+
" return {\"wiki_results\": formatted_search_docs}\n",
|
720 |
+
"\n",
|
721 |
+
"@tool\n",
|
722 |
+
"def web_search(query: str) -> str:\n",
|
723 |
+
" \"\"\"Search Tavily for a query and return maximum 3 results.\n",
|
724 |
+
" \n",
|
725 |
+
" Args:\n",
|
726 |
+
" query: The search query.\"\"\"\n",
|
727 |
+
" search_docs = TavilySearchResults(max_results=3).invoke(query=query)\n",
|
728 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
729 |
+
" [\n",
|
730 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
|
731 |
+
" for doc in search_docs\n",
|
732 |
+
" ])\n",
|
733 |
+
" return {\"web_results\": formatted_search_docs}\n",
|
734 |
+
"\n",
|
735 |
+
"@tool\n",
|
736 |
+
"def arvix_search(query: str) -> str:\n",
|
737 |
+
" \"\"\"Search Arxiv for a query and return maximum 3 result.\n",
|
738 |
+
" \n",
|
739 |
+
" Args:\n",
|
740 |
+
" query: The search query.\"\"\"\n",
|
741 |
+
" search_docs = ArxivLoader(query=query, load_max_docs=3).load()\n",
|
742 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
743 |
+
" [\n",
|
744 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
|
745 |
+
" for doc in search_docs\n",
|
746 |
+
" ])\n",
|
747 |
+
" return {\"arvix_results\": formatted_search_docs}\n",
|
748 |
+
"\n",
|
749 |
+
"@tool\n",
|
750 |
+
"def similar_question_search(question: str) -> str:\n",
|
751 |
+
" \"\"\"Search the vector database for similar questions and return the first results.\n",
|
752 |
+
" \n",
|
753 |
+
" Args:\n",
|
754 |
+
" question: the question human provided.\"\"\"\n",
|
755 |
+
" matched_docs = vector_store.similarity_search(query, 3)\n",
|
756 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
757 |
+
" [\n",
|
758 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
|
759 |
+
" for doc in matched_docs\n",
|
760 |
+
" ])\n",
|
761 |
+
" return {\"similar_questions\": formatted_search_docs}\n",
|
762 |
+
"\n",
|
763 |
+
"tools = [\n",
|
764 |
+
" multiply,\n",
|
765 |
+
" add,\n",
|
766 |
+
" subtract,\n",
|
767 |
+
" divide,\n",
|
768 |
+
" modulus,\n",
|
769 |
+
" wiki_search,\n",
|
770 |
+
" web_search,\n",
|
771 |
+
" arvix_search,\n",
|
772 |
+
" question_retrieve_tool\n",
|
773 |
+
"]\n",
|
774 |
+
"\n",
|
775 |
+
"\n",
|
776 |
+
"llm = ChatOpenAI(\n",
|
777 |
+
" model=os.environ.get(\"OPENAI_MODEL_NAME\", \"gpt-4o\"),\n",
|
778 |
+
" temperature=0.0\n",
|
779 |
+
")\n",
|
780 |
+
"llm_with_tools = llm.bind_tools(tools)\n"
|
781 |
+
]
|
782 |
+
},
|
783 |
+
{
|
784 |
+
"cell_type": "code",
|
785 |
+
"execution_count": 22,
|
786 |
+
"id": "8fac9099",
|
787 |
+
"metadata": {},
|
788 |
+
"outputs": [],
|
789 |
+
"source": [
|
790 |
+
"# load the system prompt from the file\n",
|
791 |
+
"with open('system_prompt.txt', 'r') as f:\n",
|
792 |
+
" system_prompt = f.read()\n",
|
793 |
+
"\n",
|
794 |
+
"\n",
|
795 |
+
"# System message\n",
|
796 |
+
"sys_msg = SystemMessage(content=system_prompt)\n",
|
797 |
+
"\n",
|
798 |
+
"# Node\n",
|
799 |
+
"def assistant(state: MessagesState):\n",
|
800 |
+
" \"\"\"Assistant node\"\"\"\n",
|
801 |
+
" return {\"messages\": [llm_with_tools.invoke([sys_msg] + state[\"messages\"])]}\n",
|
802 |
+
"\n",
|
803 |
+
"# Build graph\n",
|
804 |
+
"builder = StateGraph(MessagesState)\n",
|
805 |
+
"builder.add_node(\"assistant\", assistant)\n",
|
806 |
+
"builder.add_node(\"tools\", ToolNode(tools))\n",
|
807 |
+
"builder.add_edge(START, \"assistant\")\n",
|
808 |
+
"builder.add_conditional_edges(\n",
|
809 |
+
" \"assistant\",\n",
|
810 |
+
" # If the latest message (result) from assistant is a tool call -> tools_condition routes to tools\n",
|
811 |
+
" # If the latest message (result) from assistant is a not a tool call -> tools_condition routes to END\n",
|
812 |
+
" tools_condition,\n",
|
813 |
+
")\n",
|
814 |
+
"builder.add_edge(\"tools\", \"assistant\")\n",
|
815 |
+
"\n",
|
816 |
+
"# Compile graph\n",
|
817 |
+
"graph = builder.compile()\n"
|
818 |
+
]
|
819 |
+
},
|
820 |
+
{
|
821 |
+
"cell_type": "code",
|
822 |
+
"execution_count": 23,
|
823 |
+
"id": "b91dcb05",
|
824 |
+
"metadata": {},
|
825 |
+
"outputs": [
|
826 |
+
{
|
827 |
+
"data": {
|
828 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAANgAAAD5CAIAAADKsmwpAAAQAElEQVR4nOzdB1xT1x4H8JNBQhIIkLCXAqKCKG6qtI7qw1EXTtC2jmfr6mut2qGttVpbbWuf1omrddddreLWJ+6+WieIgiAWEiKbkL14f8gr5fECassN5+ae74dPPuHekEDy48x7z+VWVlYigmhqXEQQGCBBJLBAgkhggQSRwAIJIoEFEkQCCySIdRl05iKZQVNh1lSYzKZKo4EGw1t8AZvLYwlduUJXtk+wANEQi4wjWmlUpsxfVdmp6hKF3t2bJ3TlwOcqlnCNehq8P07O7FIF/POYII6P0zWhUS6h7URh7VwQfZAgIngHrhwpVuRovYKcQ6NEgeFCRGcGnSU7VZX7QCt7qO0+WNqyoyuiA6YHMf1n5dndBfCBdXzZAzmWilIj/INBMRn3mq9IjHsbjNFBvHCwkOOEYgd7IcdV8kR/aI2871if4NZYl/TMDeK/9hVIfHjRPdwRAxxOkr0wUOoT7IxwxdAgHtkgD2olbN+TESm0OrxO1rqLuFVnTJuMbMQ8V44U+YcJGJVCMHRawI1zpUVyPcIS44KYebMCbjv1cbSuybNIfD8YmsWVFhzrQMYFMeVAYYfeTEyhVWhbl0uHixB+mBXEm+dLW3cWC1w4iKmgQZJ5U6VWmhBmmBXEnDR1t8ESxGw9hnveSilDmGFQEHPuqblObA6Hif2z2oJbi1IvlyPMMOhTeXRXHdJWhOzrgw8+OHLkCHp+ffv2lcvliAI8Z7ZXIB8mABFOGBTEkgJDmN2DmJ6ejp6fQqEoK6Ow9mzZwSXvoQbhhClBNOgsRTK9wIWqKddDhw6NHj06Nja2T58+77333pMnT2Bj586doVRbuHBhr1694Fuz2ZyUlDRs2LDu3bsPGDBg6dKlWu1/iyUo/3bt2vX2229369bt4sWLgwYNgo1DhgyZPXs2ooDIzakwD68BRaYEEfqJ1E3837x5c/HixYmJiXv27Pn222+hMPvwww9h+7Fjx+AWcnn48GG4A1HbsmXL9OnTd+/evWDBgpSUlDVr1lifgcvlHjx4sEWLFuvXr+/SpcuSJUtg444dOxYtWoQoIBJz1EozwglTDoxVl5tEblT9sVlZWXw+f/DgwZCnwMBAKOry8/Nhu5ubG9wKhULrHSgFocCDtMH94ODguLi4y5cvW5+BxWI5OztDiWj9ViSqakKIxWLrnUYHbwW8IQgnTAmixYJ4AqqKf6iCIUmTJ08eOnRoTEyMv7+/VCr9/4e5u7snJydD2VlQUGAymTQaDWS0Zm+7du2QvbC5LOiyIJwwpWqGyqi80Iio0bx58++//x7KwlWrVkHDbsKECampqf//sK+//nrTpk3QlNy4cSNU0/Hx8bX3urjY74BqdZmJw2UhnDAliEIxV0PldEJ4eDgUdadPn4ZGHofDmTlzpsFgqP0A6KlAS3H8+PEDBw4MCAjw9PRUqVSoiVDaYv5zmBJEgYjjGcA3GS2IAlD+3blzB+5ABDt16jRt2jTorxQXF1v3Wg+0s1gskEVrYxGo1eoLFy40fAwedUfo6TUW7yA+wgmDxhFhijn7rhpR4MqVK7NmzTp79mxeXt6DBw+gU+zn5+fr68uvduPGDdgIjchWrVodPXoUHpOZmQlFJoz1KJXKnJwcaC/WeULopsDtpUuXsrOzEQUyblT4NMPrIFkGBTEkSvQolZIgTpo0CRp8K1asGDly5IwZM6AkW7lyJSQPdkF78cyZMzBkA0OGn3zyCRSK0EacO3duQkICPBLC+vrrr0Pfpc4TRkREwFjj8uXLv/rqK0SBnHuakDb2HttvGIOO0DboLcmb8+OnByBm++2BJvuuqtdIb4QTBpWIPD7bO5B/41wpYrYrPxW16eaGMMOslR66D5KumZNV35mj0J94+eWXbe6CLjCPx7O5KyQkBMZuEDVu3boFrUn0nL8SdOFhhMjmLmgdevjwvALw6qkgBp48dftCmcVS2aGX7SxWVFTY3K7X6+FTtzb76mCz2RTNfwDox9TMRzfKr5S8Wf5SvJdY4oQww8Sz+I59l9+qsyu9VuRoFDj/4Uw8SnTgJL+rR4sLcnWISVIOFEr9eNj++zH0vGb4qw98m/fCK1K6r3TzjCCF3sH8iC5ihCuGHjcPTauRM4N+OVWadg27g+YbF/zLHV4nE0u4OKcQkUWYriYXPUrTQG+6eSReA7yN4vrpkrRryt6jvYNb4V7wk2XpULFcf+VoMV/ADggXwHyD0JX2Q1qFefrH6epfz5a2e8k9ZoCEzcbrQBubSBD/S5alffBLxaM0tYePk8SHJ3LjisRckRvHjNeBzLZB0pQlRrXSXGmpzLihchaxW0S7QApxO+iwASSIdSlytIUyg7rcpFaaoCzRVDRmEmFQMDs7u02bNqhRuUq4lZaqYy5dPbj+YQJXD+yGCZ+KBNGusrKy5s6du3fvXkT8L7KYO4EFEkQCCySIBBZIEAkskCASWCBBJLBAgkhggQSRwAIJIoEFEkQCCySIBBZIEAkskCASWCBBJLBAgkhggQSRwAIJIoEFEkQCCySIBBZIEAkskCASWCBBJLBAgkhggQTRrlgslrc3XotXY4IE0a4qKyv//xoCBCJBJDBBgkhggQSRwAIJIoEFEkQCCySIBBZIEAkskCASWCBBJLBAgkhggQSRwAIJIoEFEkQCCySIBBZIEAkskAv+2ENCQoJWq4W32mg0lpSU+Pr6wn29Xn/y5ElEVGPoZXLtbMiQIQqFQi6XFxYWms1mmUwG98VirK9ba2ckiPaQmJgYGBhYewubzY6NjUXE70gQ7YHFYo0YMYLD4dRsCQ4OHjNmDCJ+R4JoJ6NHj64pFCGXPXv29PPzQ8TvSBDthMvlQgXN5/PhPiRy5MiRiKiFBNF+hg8fHhAQAP3l7t27k+KwDsaNI2pV5mK5wWCwoKYwLG7KiRMnesckZKeqUROodHHnSnx4XCfsCiAGjSOaDJZTO57IsrRBLUUGXdMEsWk58dhlhQazydKyk2vXfhKEE6YEUa81H1gp6zLA07eZEDHe9VNFHC7qEe+JsMGUNuKeZbm9RvuRFFp1jvOsrGRdOVqMsMGIIKZeKQ+NdnWVOCHidx37SOXZWpXShPDAiCAqHuuEYpLCumA4s1RhQHhgRK8ZuiZiKQliXRI/vrrMjPDAiCDq1JZKJvaSnwL+P80WXLqq5HhEAgskiAQWSBAJLJAgElggQSSwQIJIYIEEkcACCSKBBRJEAgskiAQWSBAJLJBzVqiVnf2wd5/Od+/eQkSDSBCp5enlPfOdD/39Axt4zKNHWQljB6G/ZtjwvvkKOaItUjVTS+wqHjrkKWeOZmSko7/myRNFeXkZojMSRNvuP7i3adPqzIcPDAZ982ahf//7jM6dYqy7ko8d2n9gV36+jM93jm7X8a0Zc7y9ferbDlXz399IWLliU9u27SEuSetX3Lr9q0aj9vX1Hzli7OBBw7dsXb9120b4cajBZ0yfBRvre+nDP+3/fkvSks9XrFz9dW5ujtjV7dVX/z5wwNCbt67Pmj0VHjB23JCxiRPemPwWoiFSNdug1+s/+PAfTjzesq/XrluzLbJNu/mfzC4srLqq6J07N5d9s3jE8MTNm/Ys+eLbcmXZws8+bGB7bV99vbCouPCLz1d8t3nv8PiEFd8u/eX6tYQx44cPT4DIHjp4ZvCgEQ28NJfLVatV23ZsWrjgqyOHz8fFvbJ8xRLY1Taq/Sfzl8AD1ifteO3VyYieSIloA4fDWf7NeqnU083NHb6dNGHawYO7U9Nu9+71t0c5WXw+v3+/wRCLAP/ABfOXKp7kw2Pq215b9qOH8cPGRLRuA/cDhoxsGd7ax8fP2dmZz+OzWCzra5lMpvpe2rp3bMIEawE8oP9QKEqzsjJeeOFFoVAEW1xdxfBsiJ5IEG2AMBlNxpWrvnqYlaFSVVjPuFUqy+G2Q/vOEJq3Z06GOrFTpxg/X3+JRNrA9tq6d+vxw+4t8IQxMbHt2naIiIh6rpe2Cg0Nt96B2MFthaoCOQRSNduQl/fb7DlTDQbDvLmfbUjauX7djppdwcHNV6/8HnrBGzaugjbZ9Lcm3EtPbWB7be/OnDt50ow7d27MeW96/Ii+8Ego4Z79pa2sq+f8wVFOSyclog3n/nXKbDZ//NHn1k8dOhm194aFhX88bzE8AEYHN3+/dt5HM/fuPsbj8Wxur/2DUNqNGJEIXyUlxadOJ2/+bq27u8foUa8++0s7MFIi2mA0GqDnW1P2nD7zR57S01PT0u6g6nZk+/adJk2cBuMmEKz6ttf8oEqlOn3muLUIhFo7YczrkZFtoU/97C/9VLRetIME0YaI1lEQo+MnfiouLjp0eN/9B2lQdGVVNdpUP//7ykfzZ6VcOCuT58EIC/QkfH38fHx869te85zQgly56kvoWcNeeb7szNkTMHwIkYVdLi6u8ELQ71Yo8ht46QZ+YXF1e/HatUv0HdMmVbMN3bv3GDP6tfUbVq5d98+YrrEfvr9w/4GdP+zeymazYXTQZDImJa2AgRiRyCUqKnrpkpUQslfHTbK5veY5RSLRl0tXwwDhrNlToAkI44gTJ0yFXjbs6vNy/5Onjs5+bxqMAsLG+l46PLx1fb9wy5YRXbt2X5e0XCbP/ceMOYiGGLEI08HVsrYvSXybCxBRy5UjBYEtnNu8gMWa8qREJLBAgkhggQSRwAIJIoEFEkQCCySIBBZIEAkskCASWCBBJLBAgkhggQSRwAIJIoEFEkQCC4wIopsn12EOqW9EfGc2j89CeGDEgbECEadQpkfE/5I91Eh8eAgPjAhiszaiskJcLrGECZ3GLHDhSP35CA+MCGJAqEDizb12tAARvzuzQ/7iMIyuTsqg6zVfP1NakKv3DxN6BjhjeOVsO2CxKpWlpooiw8/HixLmBHlgUy8jRgUR5KSrM35V6dTmkloXQzQYDGw2m8u1R78N3m2jwcDjU1UhajQaFovF/h2Hw6m9ly/kQO/EL9S5a5yEy8PrX5FZQazDbDY/fPjw/PnzU6ZMQXaRlZU1d+7cvXv3ImrMmzfv+PHjEEEPDw8XFxcejxcYGNiiRYvp06cjvDE3iNu2bXvllVdEIpE914upqKj49ddfe/Xqhahx//79d955p7j4j/OpK6v5+fklJycjjDH0vOYDBw6UlpZKpVI7r1rk6upKXQpB69atIyMja2+Bmhr+2TBPIWJgEM+dOwe3sbGxUHIguyssLFy7di2iUmJiokQiqfkWqumLFy8i7DEriEuXLs3OzoY7vr6+qCkolUpokiIqde3aNSwsrObb0NDQw4cPI+wxJYjQKYHbfv36TZ7clEtZent726HfMGrUKLG46rT5gICA3bt33759+4svvkB4Y0RnBTqqffr06du3L2KMcePGQTPg1KlT1m+hTfzjjz/u2LED4crBg6hSqcrKyu7duxcXF4cwAOHYt29fkwympKenv/baa1u3bm3Tpg3CjyNXzZ999llR8Y8BRAAAD1BJREFUUREMpGGSQmSXNmJ9IiIirl+//uWXX+7fvx/hx2GDCJVR27ZtmzdvjnBinzZiA2D0NDMzc+HChQgzDlg1b9iw4c0334SJO5hXQIQtP/30086dO7dv347PW+RoJeInn3zi7l61Hj+eKbTDOOKzGDJkyOeff96zZ89bt3C5NpvjBDElJQVu33777dGjRyNcNWEbsQ6YgL569eqqVat27dqFMOAgQYTRCuvyrJ6eGB1j9/+avI1Yx+bNm/Pz8z/++GPU1GjfRszLy4NPF+ZLYJoVEX/K8ePHN27cCE1GmJVGTYTGJaLJZHrjjTd0Oh00B+mSQkzaiHUMGDBg+fLlcPvLL7+gJkLXIEJBfvny5WnTpkFbB9EHPm3EOpo1a3bhwgWoqWHEGzUF+gXRYrG8++67EETo9HXs2BHRCm5txDqSkpLKy8vff/99ZHf0ayMuWLAAJo579OiBCGqcPXt2xYoV0GS0DoTZB52CCLXG+PHjEZ014Vzzc5HL5TAxvWjRotjYWGQXtKma+/fvHxUVhWgO2zZiHf7+/lAu7tmzZ9OmTcguaFAi3rhxA9qC0Dum78WIa1B9zkqjW7duXUZGBvSpEcWwLhHVanW/fv2sx3g6QAoR9eesNDoYl4iPj4dPoaCA2uUJ8C0RVSoVDPp7eHhgPlnyXOjSRqyjqKgImoxLly6Njo5G1MC0RDx48CDUyOHh4Y6UQlRdrt+8eRPRDXwKMPuyZs0amUyGqIHpsnSZmZlGoxE5HKiaYWZFq9XCzDjtGhtQNEAnBlED0xJx6tSpgwYNQo7IyclJIBBAhxQaHog+7t+/36pVq9oX/m1cmAbRzc2tCSfg7QAGRGfOnInoIz09PSIiAlEG0yCuX7/+6NGjyKFBoQi3ubm5iA7u3btXZw2JxoVpEGHGE8ZuEAOkpKTAyCLCHtUlIqbDNxBELpfr2LVzjcWLF+NwaGrDOnfufP36dUQZ0kZsetYUXrt2DeEK6mVKi0NE2oj4yMvLO3nyJMIS1fUyIm1EfIwcOVKpVCIsUd1TQdgGccqUKY46jtiAUaNGwe0PP/yAMMPcEpFRbcQ6pFIpVquCWCwWmOiC0WxEJdJGxE5cXBxWK6XYoV5GpI2IJxgrQdWrViAM2KFeRqSNiLP4+PidO3eipmafIGJ69A20ERHjdejQwcfHBzU1qJoTExMRxUgbEWvWw66gaERNxGQyPXr0KDw8HFGMtBFpICkpafv27bW39OvXD9mFfXoqiMw104WhGofDEQgEAwcOfPLkCWTRDku079mz5/Hjx3Y45Z60EemBV+3FF1+Ed6agoIDFYqWlpZWUlNS+pAoVoETs0qULoh5pI9IJjHVDWWi9Dym8dOkSoph9usyItBFpZMSIEbXPXdJoNKdPn0ZUgsZAbm5u7csHUQfTqhnGEe1z3Vq6gBTm5OSg6mvrWbfAHdiSnZ0dGhqKqGG3ngoic810ceDAgWHDhgUHB3t4eFgvOAoboZqmtHa2W72MsC0RoY0YEBBAJldqmz9/PtzevXv3YrXi4mJlmfb8mZ/jB49F1Mi4l9u+ffuKUhP6s+D/RSx5pozhNXzTt2/f0tJS669krYPgvq+v77FjxxBRy/XTJXculVayTEadxVkgQNSA0WwYMPorp5BK/PiyTE2LaFHMQKlY4tTAI/EqEbt163b8+PHafzmbzR48eDAiajmxVeEicRowKdjF3Qlhz2S0lBUY9n2bN3xGgId3vdccwauNmJCQUGd2NTAw0A4TnTRyfIvCw5cf3UNKixQCrhPbM8B59KyQH9fIlCX1rt6BVxDbtGlTexFEKBr79+9vz3VLMZdzT80TcCJf8EA01HuM37VjJfXtxa7XPH78+JrZAigOcb56j/0V5Oqd+HRdf9/Dh//wVkV9e7H7q2DgKjo62jpCAcUhjFYg4nd6jdnTj4/oicNlBbcSlRUabO7F8d9r4sSJMJcFneUxY8Ygoha10myi8xppJU8M9fXB/2qvWZ6lKS8yqStMGqXZYoYOvwU1AulLrafDgPb143oYtUV/GV/AZiGWUMyBL6k/38ufroWKA/uTQXycrs64ocpOVXv4CiorWRwnDhu+OJzGGpOMiu4NtxUa1ChUWmQxmc0yk9mgM+rKjTpzWDtR686uPs0cYTlkx/DcQcx/pL3wY7GTkMfi8sO6eXCdOIhuDFpTcZE65VCpQIheGiZ19yKXdW56zxfEMz8UyrN10hCJyIPGZQlPwJUEVR3vqCxQH1glj+jq2n2QFBFN6lk7KzA+vmXRY52ZH9zRn9YprE3sLQrrFlSgYMNYKyKa1DMF0Wyq3DA32y/Sx0XqgEfEuAeIndzEu5fRY8FMR/X0IFoslevez4rsE8IX0WNO6U9wkQrFAZKtix8jook8PYg7l/wW3j0AOTqhu7MkyD15M50WWHckTwni+QNF7kHufBEj+pWu3i5GxL+VUoYIu2soiMVy/aNUtauXC2IMd3+3S4eKaHfpYAfQUBAvHCr2DKH2bEUM+bb0uHioGBH2VW8QFTlak5nt6iVEWLqdenbO/Bi1uvGrUc/m7rJsvV5rRkS1ofF9tm2n/GK59Qbx4W01zNwhZmKxc9IaaXqxqX268IMTJ48g7NUbxKw7aldvTItDqgklosxbKuQQMjLSER3YnuIrLTAIXJ2o6yznye8fO70Wbs0mY3hYlyED3pV4+MH2K/8+cPLshkmvfnP42D8LCnOEQrc+PSfGdBoCu8xm0+Fjy2/cOVFpsUS2erFFaGdEGbG3MD8N03XVn0vvPlXv0pdfLVyz9psjh8/D/eRjh/bu2yGX5wkEwpiu3adNfVci+e/0ZgO7asBj9h/YlZ8v4/Odo9t1fGvGHG/vxlk4z3aJqCoz6bSNckCXDaVliqTvprNZ7GmT1k6dtEajUa7f8pbRVHW8JIfN1elUZ1K+ez1hyWcfne3UfuDBI1+WlVddsvrcha0/Xz80ZMDMd6dvC2neHh6DKMNisVSlRrXyz59GiYm9u6vOfvzHW+/t2H4Y7pw6lbzsm8Vxf3vlu017Fn36dUbm/bnz3rEOETSwq8adOzfhMSOGJ27etGfJF9+WK8sWfvYhaiS2g6hRmjmUHVZz9ZeD8FGPG/WZn0+LoIDIxJGflpTK7qads+41W0y9X3rd3c0H0tC142AoCOWKTNj+6+3jUZE9YYunNKh71xEtw2IQlXjOHHU57YMoFlcd2yGEmqX6zr79O2Nje44bOzEoqFn79p0goBC41NTbDe+q8Sgni8/n9+83OMA/MDIiasH8pTOmz0aNpJ4gVpg4PKrONP0tNzU4IFIgcLV+6+HuK/EIkOVn1DzA3+e/y0IKBWK41ekqTCZjUXEupLbmMcGBbRCVnAQcDf1LxNpMJlNWdmZkRNuaLa1aVb2fD7MyGthV+xk6tO8MpcPbMycfTf4xXyGHihviiBpJvWljIaoGdbU6tVzx4INPX6zZYjYblRVFNd86Of3PEdRQQRgM2qrt3D+28/nUdqQs5qoaGjkQrU4L76RQ+MdhK0JB1Xuo1Woa2FX7GYKDm69e+f0Pe7Zu2Liq4p+fR0REQRuxsbJoO4hCMdds1CFqODuLQoLbjxz6P80LHq+hYDnxqg480+r/6MlqtRWISmaDWSR2qFWgBM4CNput0fyxxpq6+r5I5NLArjpPEhYW/vG8xWaz+e7dW5u/Xzvvo5n79hx3cmqEYT7bVbPQlWM2UjWi2ywoqqgkVyoJ9PZqbv2Cwkfs6tnAjzhxeR7ufvnVjUWrjKx/IyoZdGahmH4Hn9tk7XNwudwWYS3vpt6q2X4v7Q6qroUb2FX7edLTU9Oqt3M4HGhHTpo4rby8DL5QY7AdRLGE68SjqmJ6oXO8Xq/ZfXCRTP6gsOi30//avGx1Yq4sreGf6tA2LvVeyrXrh/IVD1Mu75TnZyDKWCyVLu5cBygR+dVu37mR+fABNARHjXr12rVLMEajUOTfvHV91Zpl0dEdW1enrYFdNX7+95WP5s9KuXBWJs+DJzx4cLevj59U6okag+332s2TZ9KZdRUGZ9fGH0qEIcOpk9Ymn1q9ZtObbDbH1zts4rhlzYLaNvxTf3t5slpTdvTESkulJaJl7Ctxb23bMxfuIwoon6g9vB1kVikxYcLuPVuvXr24Y/uhvn366/U6SNvGTauh2n0xtteUKe9YH9bArhqvjpsEvcakpBVFxYXwmKio6KVLVrIaqSVd72pgV5OL83IqvUKZeH67PK2gSx+X8A6uCDMntir8w1xC2tL1eKgfVz0eOtXfzdPGP3m9U3wtokWVJocav3h2LJY5pA1ZJtSu6m0GeQU6C4SV5U/Ubj62PxKY8IC2nc1dznwXnd72XK2PV8g/3mzMQzk+/rxPfbssZhObY+MPhDHIN8evrO+nCrNLQyIFXB5dl5ihqYba4z2Ge+5fIasviK4uklnTt9vcZTTq64wF1uA09hE99f0OwGDU82z9GlxuvQ1fi9lS+Kh81Ax7LF9O1NZQEN2kThExLsWFFa5eNlpLHA5X4uGPmlrj/g7K/PJeoxqnG0g8l6dUQN0HeWqKVJoyqga3sVKer3QRWSJjyLWGmsDTW0JjZgX+dlNh1Dl4x6VModKWqPqO9UZEU3imJvmUL0MzL+c6cLlYrlAhnTphThAimsgzBREGLacva6GUlSifUDvD2yRKc0t5LO2waU3f3mWy5xikgAJDKjVnX8tTFjjIxclKZcr75x+HtOIOmOCLiCb1fNOpsYOlkTGuF34sLsrSVHKcxF4iOq5DolXqKwo1Fr3e099p4KfN+AIHObiB1p57Xt/Dmzd0ip8iR5d5S5V15wlfyLVYWBwep3qtTi7C8tR0NptlNJgsBpPJYDZojXwBO7y9S8uOXmRlRHz8yQNMfJs7w9dLwzxLFIbyoqrTO9TlJrPJbDbhGESeM5vNYYvEQqGY4xnAc3Fj6mmyGPurRzpJfHnwhQjiryGXoqUTkRuX1oseSHxhxtV2nUmm9ulEIGIXyfSInowGS16G2s3Tdv1JgkgnPs2cjXq6LspTotA3cIgnCSKdBLUUsljo5jlaLlZ2bpc8dki9i+bjdb1m4llcOFhoNFaGtRNL/Wmwqj6MqJQX6v+1W/HaR8Gi+scrSBBpKfVqedoVpV5j1mmoWhmmUXgF8ssKDCFtRbGDPRu+nCUJIo3BR2fQYR3ESkuls+iZJq5IEAkskHFEAgskiAQWSBAJLJAgElggQSSwQIJIYOE/AAAA//9IOO73AAAABklEQVQDAFPPIzkUheU2AAAAAElFTkSuQmCC",
|
829 |
+
"text/plain": [
|
830 |
+
"<IPython.core.display.Image object>"
|
831 |
+
]
|
832 |
+
},
|
833 |
+
"metadata": {},
|
834 |
+
"output_type": "display_data"
|
835 |
+
}
|
836 |
+
],
|
837 |
+
"source": [
|
838 |
+
"from IPython.display import Image, display\n",
|
839 |
+
"\n",
|
840 |
+
"display(Image(graph.get_graph(xray=True).draw_mermaid_png()))"
|
841 |
+
]
|
842 |
+
},
|
843 |
+
{
|
844 |
+
"cell_type": "code",
|
845 |
+
"execution_count": 24,
|
846 |
+
"id": "988462ef",
|
847 |
+
"metadata": {},
|
848 |
+
"outputs": [],
|
849 |
+
"source": [
|
850 |
+
"question = \"\"\n",
|
851 |
+
"messages = [HumanMessage(content=question)]\n",
|
852 |
+
"messages = graph.invoke({\"messages\": messages})"
|
853 |
+
]
|
854 |
+
},
|
855 |
+
{
|
856 |
+
"cell_type": "code",
|
857 |
+
"execution_count": 26,
|
858 |
+
"id": "ef3ac15c",
|
859 |
+
"metadata": {},
|
860 |
+
"outputs": [
|
861 |
+
{
|
862 |
+
"name": "stdout",
|
863 |
+
"output_type": "stream",
|
864 |
+
"text": [
|
865 |
+
"================================\u001b[1m Human Message \u001b[0m=================================\n",
|
866 |
+
"\n",
|
867 |
+
"\n",
|
868 |
+
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
|
869 |
+
"\n",
|
870 |
+
"It seems like your message was empty. Could you please provide more details or ask a question? I'm here to help!\n"
|
871 |
+
]
|
872 |
+
}
|
873 |
+
],
|
874 |
+
"source": [
|
875 |
+
"for m in messages['messages']:\n",
|
876 |
+
" m.pretty_print()"
|
877 |
+
]
|
878 |
+
}
|
879 |
+
],
|
880 |
+
"metadata": {
|
881 |
+
"kernelspec": {
|
882 |
+
"display_name": "3.12.9",
|
883 |
+
"language": "python",
|
884 |
+
"name": "python3"
|
885 |
+
},
|
886 |
+
"language_info": {
|
887 |
+
"codemirror_mode": {
|
888 |
+
"name": "ipython",
|
889 |
+
"version": 3
|
890 |
+
},
|
891 |
+
"file_extension": ".py",
|
892 |
+
"mimetype": "text/x-python",
|
893 |
+
"name": "python",
|
894 |
+
"nbconvert_exporter": "python",
|
895 |
+
"pygments_lexer": "ipython3",
|
896 |
+
"version": "3.12.9"
|
897 |
+
}
|
898 |
+
},
|
899 |
+
"nbformat": 4,
|
900 |
+
"nbformat_minor": 5
|
901 |
+
}
|