File size: 6,790 Bytes
dfd19f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
"""
agent.py ­– Gemini-smolagents baseline using google-genai SDK
------------------------------------------------------------
Environment variables
---------------------
GOOGLE_API_KEY            API key from Google AI Studio
Optional:
    GAIA_API_URL          GAIA evaluation endpoint (default: official URL)

This file defines:
    β€’ GeminiModel  – wraps google-genai for smolagents
    β€’ gaia_file_reader  – custom tool to fetch <file:xyz> attachments
    β€’ GeminiAgent  – CodeAgent with Python / Search / File tools + Gemini model
"""

import os
import re
import base64
import mimetypes
import requests
import google.genai as genai
from google.genai import types as gtypes
from smolagents import (
    CodeAgent,
    DuckDuckGoSearchTool,
    PythonInterpreterTool,
    BaseModel,
    tool,
)

# --------------------------------------------------------------------------- #
# Constants & helpers
# --------------------------------------------------------------------------- #
DEFAULT_API_URL = os.getenv(
    "GAIA_API_URL", "https://agents-course-unit4-scoring.hf.space"
)
FILE_TAG = re.compile(r"<file:([^>]+)>")


def _download_file(file_id: str) -> bytes:
    """Download the attachment for a GAIA task."""
    url = f"{DEFAULT_API_URL}/files/{file_id}"
    resp = requests.get(url, timeout=30)
    resp.raise_for_status()
    return resp.content


# --------------------------------------------------------------------------- #
# Model wrapper
# --------------------------------------------------------------------------- #
class GeminiModel(BaseModel):
    """
    Thin adapter around google-genai.Client so it can be used by smolagents.
    """

    def __init__(
        self,
        model_name: str = "gemini-2.0-flash",
        temperature: float = 0.1,
        max_tokens: int = 128,
    ):
        api_key = os.getenv("GOOGLE_API_KEY")
        if not api_key:
            raise EnvironmentError("GOOGLE_API_KEY is not set.")
        # One client per process is enough
        self.client = genai.Client(api_key=api_key)
        self.model_name = model_name
        self.temperature = temperature
        self.max_tokens = max_tokens

    # ---------- Text-only convenience ---------- #
    def call(self, prompt: str, **kwargs) -> str:
        response = self.client.models.generate_content(
            model=self.model_name,
            contents=prompt,
            generation_config=gtypes.GenerateContentConfig(
                temperature=self.temperature,
                max_output_tokens=self.max_tokens,
            ),
        )
        return response.text.strip()

    # ---------- smolagents will use this when messages are present ---------- #
    def call_messages(self, messages, **kwargs) -> str:
        """
        `messages` is a list of dictionaries with keys 'role' | 'content'.
        If `content` is already a list[types.Content], we forward it as-is.
        Otherwise we concatenate to a single string prompt.
        """
        sys_msg, user_msg = messages  # CodeAgent always sends two
        if isinstance(user_msg["content"], list):
            # Multimodal path – pass system text first, then structured user parts
            contents = [sys_msg["content"], *user_msg["content"]]
        else:
            # Text prompt path
            contents = f"{sys_msg['content']}\n\n{user_msg['content']}"
        response = self.client.models.generate_content(
            model=self.model_name,
            contents=contents,
            generation_config=gtypes.GenerateContentConfig(
                temperature=self.temperature,
                max_output_tokens=self.max_tokens,
            ),
        )
        return response.text.strip()


# --------------------------------------------------------------------------- #
# Custom tool: fetch GAIA attachments
# --------------------------------------------------------------------------- #
@tool(name="gaia_file_reader", description="Download attachment referenced as <file:id>")
def gaia_file_reader(file_id: str) -> str:
    """
    Returns:
        β€’ base64-str for binary files (images, pdf, etc.)
        β€’ decoded text for utf-8 files
    """
    try:
        raw = _download_file(file_id)
        mime = mimetypes.guess_type(file_id)[0] or "application/octet-stream"
        if mime.startswith("text") or mime in ("application/json",):
            return raw.decode(errors="ignore")
        return base64.b64encode(raw).decode()
    except Exception as exc:
        return f"ERROR downloading {file_id}: {exc}"


# --------------------------------------------------------------------------- #
# Final agent class
# --------------------------------------------------------------------------- #
class GeminiAgent:
    """
    Exposed to `app.py` – instantiated once and then called per question.
    """

    def __init__(self):
        model = GeminiModel()
        tools = [
            PythonInterpreterTool(),  # maths, csv, small image ops
            DuckDuckGoSearchTool(),   # quick web look-ups
            gaia_file_reader,         # our custom file tool
        ]
        self.system_prompt = (
            "You are a concise, highly accurate assistant. "
            "Unless explicitly required, reply with ONE short sentence. "
            "Use the provided tools if needed. "
            "All answers are graded by exact string match."
        )
        self.agent = CodeAgent(
            model=model,
            tools=tools,
            system_prompt=self.system_prompt,
        )
        print("βœ… GeminiAgent (google-genai) initialised.")

    # ----------- Main entry point for app.py ----------- #
    def __call__(self, question: str) -> str:
        file_ids = FILE_TAG.findall(question)
        if file_ids:
            # Build multimodal user content
            parts: list[gtypes.Part] = []
            text_part = FILE_TAG.sub("", question).strip()
            if text_part:
                parts.append(gtypes.Part.from_text(text_part))
            for fid in file_ids:
                try:
                    img_bytes = _download_file(fid)
                    mime = mimetypes.guess_type(fid)[0] or "image/png"
                    parts.append(gtypes.Part.from_bytes(data=img_bytes, mime_type=mime))
                except Exception as exc:
                    parts.append(gtypes.Part.from_text(f"[FILE {fid} ERROR: {exc}]"))
            messages = [
                {"role": "system", "content": self.system_prompt},
                {"role": "user", "content": parts},
            ]
            answer = self.agent.model.call_messages(messages)
        else:
            answer = self.agent(question)
        # Trim trailing punctuation – GAIA scoring is case-/punctuation-sensitive
        return answer.rstrip(" .\n\r\t")