File size: 6,946 Bytes
dfd19f5 0527a8f dfd19f5 b9a4880 dfd19f5 8310e6d dfd19f5 8310e6d dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 d367dae dfd19f5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 |
"""
agent.py – Gemini-smolagents baseline using google-genai SDK
------------------------------------------------------------
Environment variables
---------------------
GOOGLE_API_KEY API key from Google AI Studio
Optional:
GAIA_API_URL GAIA evaluation endpoint (default: official URL)
This file defines:
• GeminiModel – wraps google-genai for smolagents
• gaia_file_reader – custom tool to fetch <file:xyz> attachments
• GeminiAgent – CodeAgent with Python / Search / File tools + Gemini model
"""
import os
import re
import base64
import mimetypes
import requests
import google.genai as genai
from google.genai import types as gtypes
from smolagents import (
CodeAgent,
DuckDuckGoSearchTool,
PythonInterpreterTool,
tool,
)
# --------------------------------------------------------------------------- #
# Constants & helpers
# --------------------------------------------------------------------------- #
DEFAULT_API_URL = os.getenv(
"GAIA_API_URL", "https://agents-course-unit4-scoring.hf.space"
)
FILE_TAG = re.compile(r"<file:([^>]+)>")
def _download_file(file_id: str) -> bytes:
"""Download the attachment for a GAIA task."""
url = f"{DEFAULT_API_URL}/files/{file_id}"
resp = requests.get(url, timeout=30)
resp.raise_for_status()
return resp.content
# --------------------------------------------------------------------------- #
# Model wrapper
# --------------------------------------------------------------------------- #
class GeminiModel:
"""
Thin adapter around google-genai.Client so it can be used by smolagents.
"""
def __init__(
self,
model_name: str = "gemini-2.0-flash",
temperature: float = 0.1,
max_tokens: int = 128,
):
api_key = os.getenv("GOOGLE_API_KEY")
if not api_key:
raise EnvironmentError("GOOGLE_API_KEY is not set.")
# One client per process is enough
self.client = genai.Client(api_key=api_key)
self.model_name = model_name
self.temperature = temperature
self.max_tokens = max_tokens
# ---------- Text-only convenience ---------- #
def call(self, prompt: str, **kwargs) -> str:
response = self.client.models.generate_content(
model=self.model_name,
contents=prompt,
generation_config=gtypes.GenerateContentConfig(
temperature=self.temperature,
max_output_tokens=self.max_tokens,
),
)
return response.text.strip()
# ---------- smolagents will use this when messages are present ---------- #
def call_messages(self, messages, **kwargs) -> str:
"""
`messages` is a list of dictionaries with keys 'role' | 'content'.
If `content` is already a list[types.Content], we forward it as-is.
Otherwise we concatenate to a single string prompt.
"""
sys_msg, user_msg = messages # CodeAgent always sends two
if isinstance(user_msg["content"], list):
# Multimodal path – pass system text first, then structured user parts
contents = [sys_msg["content"], *user_msg["content"]]
else:
# Text prompt path
contents = f"{sys_msg['content']}\n\n{user_msg['content']}"
response = self.client.models.generate_content(
model=self.model_name,
contents=contents,
generation_config=gtypes.GenerateContentConfig(
temperature=self.temperature,
max_output_tokens=self.max_tokens,
),
)
return response.text.strip()
# --------------------------------------------------------------------------- #
# Custom tool: fetch GAIA attachments
# --------------------------------------------------------------------------- #
@tool
def gaia_file_reader(file_id: str) -> str:
"""
Download a GAIA attachment and return its contents.
Args:
file_id: The identifier that appears inside a <file:...> placeholder
in the GAIA question prompt.
Returns:
A base-64 string for binary files (images, PDF, etc.) or UTF-8 text for
plain-text files.
"""
try:
raw = _download_file(file_id)
mime = mimetypes.guess_type(file_id)[0] or "application/octet-stream"
if mime.startswith("text") or mime in ("application/json",):
return raw.decode(errors="ignore")
return base64.b64encode(raw).decode()
except Exception as exc:
return f"ERROR downloading {file_id}: {exc}"
# --------------------------------------------------------------------------- #
# Final agent class
# --------------------------------------------------------------------------- #
class GeminiAgent:
def __init__(self):
self.system_prompt = (
"You are a concise, highly accurate assistant. "
"Unless explicitly required, reply with ONE short sentence. "
"Use the provided tools if needed. "
"All answers are graded by exact string match."
)
model = GeminiModel()
tools = [
PythonInterpreterTool(),
DuckDuckGoSearchTool(),
gaia_file_reader,
]
# ✨ system_prompt removed – newest smolagents doesn't take it
self.agent = CodeAgent(
model=model,
tools=tools,
# any other kwargs (executor_type, additional_authorized_imports…)
verbosity_level=0,
)
print("✅ GeminiAgent ready.")
def __call__(self, question: str) -> str:
file_ids = FILE_TAG.findall(question)
# -------- multimodal branch -------- #
if file_ids:
parts: list[gtypes.Part] = []
text_part = FILE_TAG.sub("", question).strip()
if text_part:
parts.append(gtypes.Part.from_text(text_part))
for fid in file_ids:
try:
img_bytes = _download_file(fid)
mime = mimetypes.guess_type(fid)[0] or "image/png"
parts.append(
gtypes.Part.from_bytes(data=img_bytes, mime_type=mime)
)
except Exception as exc:
parts.append(
gtypes.Part.from_text(f"[FILE {fid} ERROR: {exc}]")
)
messages = [
{"role": "system", "content": self.system_prompt},
{"role": "user", "content": parts},
]
answer = self.agent.model.call_messages(messages)
# -------- text-only branch -------- #
else:
# prepend system prompt to the user question
full_prompt = f"{self.system_prompt}\n\n{question}"
answer = self.agent(full_prompt)
return answer.rstrip(" .\n\r\t")
|