Ubik80's picture
added agent.py
c216f4b verified
raw
history blame
2.63 kB
import os
import requests
from smolagent import Agent, Tool
# ------------------------
# Constants
# ------------------------
API_URL = "https://agents-course-unit4-scoring.hf.space"
# ------------------------
# Tool definitions
# ------------------------
def fetch_questions() -> list:
"""Fetch the full list of GAIA evaluation questions."""
response = requests.get(f"{API_URL}/questions", timeout=15)
response.raise_for_status()
return response.json()
def fetch_random_question() -> dict:
"""Fetch a single random GAIA question."""
response = requests.get(f"{API_URL}/random-question", timeout=15)
response.raise_for_status()
return response.json()
def fetch_file(task_id: str) -> bytes:
"""Download a file associated with a given task_id."""
response = requests.get(f"{API_URL}/files/{task_id}", timeout=15)
response.raise_for_status()
return response.content
def submit_answers(username: str, agent_code: str, answers: list) -> dict:
"""Submit the agent's answers to GAIA and return the scoring."""
payload = {
"username": username,
"agent_code": agent_code,
"answers": answers
}
response = requests.post(f"{API_URL}/submit", json=payload, timeout=60)
response.raise_for_status()
return response.json()
# ------------------------
# SmolAgent setup
# ------------------------
# Define the tools for the agent
tools = [
Tool(
name="fetch_questions",
function=fetch_questions,
description="Fetch the list of evaluation questions from GAIA."
),
Tool(
name="fetch_random_question",
function=fetch_random_question,
description="Fetch one random question from GAIA."
),
Tool(
name="fetch_file",
function=fetch_file,
description="Download a file for a given GAIA task_id."
),
Tool(
name="submit_answers",
function=submit_answers,
description="Submit the answers payload and get the score back."
),
]
def create_agent() -> Agent:
"""
Factory that returns a configured SmolAgent instance.
Assumes OPENAI_API_KEY is set in the environment for LLM access.
"""
# Initialize OpenAI key for the LLM
import openai
openai.api_key = os.getenv("OPENAI_API_KEY")
agent = Agent(
tools=tools,
llm="openai", # Use OpenAI backend
model="gpt-3.5-turbo", # LLM model
prompt_template=(
"Here is a GAIA question: {question}\n"
"Provide the exact answer, nothing else."
)
)
return agent