AC-Angelo93 commited on
Commit
67d8dd6
·
verified ·
1 Parent(s): b9c670e

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +33 -48
agent.py CHANGED
@@ -1,16 +1,30 @@
1
  # agent.py
2
 
3
- import os, requests
 
4
  from langchain.tools import tool
5
  from langchain.agents import initialize_agent, AgentType
6
-
7
  from langchain_community.document_loaders import WikipediaLoader
8
 
9
- # 1) Define your tools
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  @tool
12
  def calculator(expr: str) -> str:
13
- """Safely evaluate a math expression."""
14
  try:
15
  return str(eval(expr, {"__builtins__": {}}))
16
  except Exception as e:
@@ -18,62 +32,33 @@ def calculator(expr: str) -> str:
18
 
19
  @tool
20
  def wiki_search(query: str) -> str:
21
- """Fetch up to 2 Wikipedia pages for the query."""
22
  docs = WikipediaLoader(query=query, load_max_docs=2).load()
23
  return "\n\n".join(d.page_content for d in docs)
24
 
25
- # 2) Build your Agent
26
 
27
  class BasicAgent:
28
  def __init__(self):
29
- token = os.environ.get("HF_TOKEN")
30
- assert token, "HF_TOKEN secret is missing!"
31
- # We call the free inference endpoint directly
32
- self.api_url = "https://api-inference.huggingface.co/models/google/flan-t5-large"
33
- self.headers = {"Authorization": f"Bearer {token}"}
34
-
35
- # LangChain’s HF wrapper
36
- from langchain.llms import HuggingFaceEndpoint
37
- self.llm = HuggingFaceEndpoint(
38
- endpoint_url=self.api_url,
39
- headers=self.headers,
40
- model_kwargs={"temperature": 0.0, "max_new_tokens": 200},
41
- )
42
-
43
- # Register tools and initialize a React agent
44
  self.agent = initialize_agent(
45
  [calculator, wiki_search],
46
- self.llm,
 
47
  agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
48
- verbose=True, # see what it’s doing in the logs
49
- max_iterations=5, # let it call up to 5 tools
50
  early_stopping_method="generate"
51
  )
52
 
53
  def __call__(self, question: str) -> str:
54
- # (Optional) Inject 3 hard-coded examples to guide format
55
- EXAMPLES = """
56
- Q: What is 2+2?
57
- A: 4
58
-
59
- Q: If a car goes 60 km/h for 2 hours, how far?
60
- A: 120
61
-
62
- Q: What is the capital of France?
63
- A: Paris
64
- """
65
  prompt = (
66
- f"Answer the following question using the tools below. "
67
- f"First think (internally), then output **only** the final answer—no chain-of-thought.\n\n"
68
- f"Tools:\n"
69
- f" calculator(expr: str) -> str\n"
70
- f" • wiki_search(query: str) -> str\n\n"
71
- f"### Examples ###{EXAMPLES}\n"
72
- f"### New Question ###\n{question}"
73
  )
74
-
75
- # Run the agent
76
- raw = self.agent.run(prompt)
77
-
78
- # Extract the last line as the answer
79
- return raw.splitlines()[-1].strip()
 
1
  # agent.py
2
 
3
+ import os
4
+ import requests
5
  from langchain.tools import tool
6
  from langchain.agents import initialize_agent, AgentType
 
7
  from langchain_community.document_loaders import WikipediaLoader
8
 
9
+ # ——— 1) Gemini Client Setup ———
10
+ from google import genai
11
+
12
+ # Initialize once at import time
13
+ GENAI_CLIENT = genai.Client(api_key=os.environ["GEMINI_API_KEY"])
14
+ GEMINI_MODEL = "gemini-1.5-pro" # or "gemini-1.0", "gemini-2.0-flash", etc.
15
+
16
+ def gemini_generate(prompt: str) -> str:
17
+ """Call Google Gemini via the GenAI SDK."""
18
+ response = GENAI_CLIENT.generate_content(
19
+ model=GEMINI_MODEL,
20
+ contents=[prompt]
21
+ )
22
+ return response.text
23
+
24
+ # ——— 2) Tools ———
25
 
26
  @tool
27
  def calculator(expr: str) -> str:
 
28
  try:
29
  return str(eval(expr, {"__builtins__": {}}))
30
  except Exception as e:
 
32
 
33
  @tool
34
  def wiki_search(query: str) -> str:
 
35
  docs = WikipediaLoader(query=query, load_max_docs=2).load()
36
  return "\n\n".join(d.page_content for d in docs)
37
 
38
+ # ——— 3) Agent Definition ———
39
 
40
  class BasicAgent:
41
  def __init__(self):
42
+ # We’re not using Hugging Face anymore—Gemini handles LLM calls
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  self.agent = initialize_agent(
44
  [calculator, wiki_search],
45
+ # Wrap our gemini_generate as an LLM
46
+ lambda prompt: gemini_generate(prompt),
47
  agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
48
+ verbose=True,
49
+ max_iterations=5,
50
  early_stopping_method="generate"
51
  )
52
 
53
  def __call__(self, question: str) -> str:
54
+ # Prepend your toy examples or system prompt if you like
 
 
 
 
 
 
 
 
 
 
55
  prompt = (
56
+ "You have two tools:\n"
57
+ " calculator(expr)\n"
58
+ " • wiki_search(query)\n"
59
+ "Use them internally, then OUTPUT ONLY the final answer.\n\n"
60
+ f"Question: {question}"
 
 
61
  )
62
+ result = self.agent.run(prompt)
63
+ # Strip off anything but the last line
64
+ return result.splitlines()[-1].strip()