Spaces:
Sleeping
Sleeping
Update agent.py
Browse files
agent.py
CHANGED
@@ -8,26 +8,18 @@ from langchain.tools import tool
|
|
8 |
from langchain.agents import initialize_agent, AgentType
|
9 |
from langchain_community.document_loaders import WikipediaLoader
|
10 |
from langchain.llms.base import LLM
|
11 |
-
from pydantic import Field
|
12 |
|
13 |
-
# βββ 1) Gemini
|
14 |
-
|
15 |
-
#
|
16 |
-
GENAI_CLIENT = genai.Client(api_key=os.environ["GEMINI_API_KEY"])
|
17 |
-
GEMINI_MODEL = "gemini-1.5-pro" # or whichever you have access to
|
18 |
|
|
|
19 |
class GeminiLLM(LLM):
|
20 |
"""
|
21 |
-
A LangChain-compatible wrapper around Google Gemini
|
22 |
"""
|
23 |
-
|
24 |
-
|
25 |
-
model: str = Field(...)
|
26 |
-
|
27 |
-
def __init__(self, client: genai.Client, model: str, **kwargs):
|
28 |
-
super().__init__(**kwargs)
|
29 |
-
object.__setattr__(self, "client", client)
|
30 |
-
object.__setattr__(self, "model", model)
|
31 |
|
32 |
@property
|
33 |
def _llm_type(self) -> str:
|
@@ -42,14 +34,13 @@ class GeminiLLM(LLM):
|
|
42 |
prompt: str,
|
43 |
stop: Optional[List[str]] = None,
|
44 |
) -> str:
|
45 |
-
# Use the google-genai SDK
|
46 |
response = self.client.generate_content(
|
47 |
model=self.model,
|
48 |
contents=[prompt]
|
49 |
)
|
50 |
return response.text
|
51 |
|
52 |
-
# βββ
|
53 |
|
54 |
@tool
|
55 |
def calculator(expr: str) -> str:
|
@@ -64,39 +55,36 @@ def calculator(expr: str) -> str:
|
|
64 |
@tool
|
65 |
def wiki_search(query: str) -> str:
|
66 |
"""
|
67 |
-
|
68 |
"""
|
69 |
docs = WikipediaLoader(query=query, load_max_docs=2).load()
|
70 |
-
return "\n\n".join(
|
71 |
|
72 |
-
# βββ
|
73 |
|
74 |
class BasicAgent:
|
75 |
"""
|
76 |
-
|
77 |
-
with access to calculator and wiki_search tools.
|
78 |
"""
|
79 |
def __init__(self):
|
80 |
-
# Ensure your
|
81 |
-
assert "GEMINI_API_KEY"
|
82 |
|
83 |
-
#
|
84 |
-
gemini_llm = GeminiLLM(
|
85 |
|
86 |
-
#
|
87 |
self.agent = initialize_agent(
|
88 |
tools=[calculator, wiki_search],
|
89 |
llm=gemini_llm,
|
90 |
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
|
91 |
-
verbose=True,
|
92 |
max_iterations=5,
|
93 |
early_stopping_method="generate",
|
94 |
)
|
95 |
|
96 |
def __call__(self, question: str) -> str:
|
97 |
-
|
98 |
-
Runs the agent on the question and returns only the final answer line.
|
99 |
-
"""
|
100 |
prompt = (
|
101 |
"You have access to two tools:\n"
|
102 |
" β’ calculator(expr)\n"
|
@@ -104,8 +92,7 @@ class BasicAgent:
|
|
104 |
"Think internally; output ONLY the final answer.\n\n"
|
105 |
f"Question: {question}"
|
106 |
)
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
lines
|
111 |
-
return lines[-1] if lines else raw_output.strip()
|
|
|
8 |
from langchain.agents import initialize_agent, AgentType
|
9 |
from langchain_community.document_loaders import WikipediaLoader
|
10 |
from langchain.llms.base import LLM
|
|
|
11 |
|
12 |
+
# βββ 1) Gemini SDK client βββ
|
13 |
+
GENAI_CLIENT = genai.Client(api_key=os.environ.get("GEMINI_API_KEY", ""))
|
14 |
+
GEMINI_MODEL = "gemini-1.5-pro" # or your trial model
|
|
|
|
|
15 |
|
16 |
+
# βββ 2) LLM wrapper with classβlevel defaults βββ
|
17 |
class GeminiLLM(LLM):
|
18 |
"""
|
19 |
+
A LangChain-compatible LLM wrapper around Google Gemini.
|
20 |
"""
|
21 |
+
client: genai.Client = GENAI_CLIENT
|
22 |
+
model: str = GEMINI_MODEL
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
@property
|
25 |
def _llm_type(self) -> str:
|
|
|
34 |
prompt: str,
|
35 |
stop: Optional[List[str]] = None,
|
36 |
) -> str:
|
|
|
37 |
response = self.client.generate_content(
|
38 |
model=self.model,
|
39 |
contents=[prompt]
|
40 |
)
|
41 |
return response.text
|
42 |
|
43 |
+
# βββ 3) Tools βββ
|
44 |
|
45 |
@tool
|
46 |
def calculator(expr: str) -> str:
|
|
|
55 |
@tool
|
56 |
def wiki_search(query: str) -> str:
|
57 |
"""
|
58 |
+
Loads up to 2 Wikipedia pages for the query and concatenates their text.
|
59 |
"""
|
60 |
docs = WikipediaLoader(query=query, load_max_docs=2).load()
|
61 |
+
return "\n\n".join(d.page_content for d in docs)
|
62 |
|
63 |
+
# βββ 4) Agent βββ
|
64 |
|
65 |
class BasicAgent:
|
66 |
"""
|
67 |
+
Zero-Shot React agent powered by Gemini + two tools.
|
|
|
68 |
"""
|
69 |
def __init__(self):
|
70 |
+
# Ensure your key is present
|
71 |
+
assert os.environ.get("GEMINI_API_KEY"), "β GEMINI_API_KEY not set in Secrets"
|
72 |
|
73 |
+
# Instantiate our LLM (defaults are on the class)
|
74 |
+
gemini_llm = GeminiLLM()
|
75 |
|
76 |
+
# Build the agent
|
77 |
self.agent = initialize_agent(
|
78 |
tools=[calculator, wiki_search],
|
79 |
llm=gemini_llm,
|
80 |
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
|
81 |
+
verbose=True,
|
82 |
max_iterations=5,
|
83 |
early_stopping_method="generate",
|
84 |
)
|
85 |
|
86 |
def __call__(self, question: str) -> str:
|
87 |
+
# Build the prompt
|
|
|
|
|
88 |
prompt = (
|
89 |
"You have access to two tools:\n"
|
90 |
" β’ calculator(expr)\n"
|
|
|
92 |
"Think internally; output ONLY the final answer.\n\n"
|
93 |
f"Question: {question}"
|
94 |
)
|
95 |
+
# Run & extract
|
96 |
+
raw = self.agent.run(prompt)
|
97 |
+
lines = [l.strip() for l in raw.splitlines() if l.strip()]
|
98 |
+
return lines[-1] if lines else raw.strip()
|
|