import os import requests from smolagents import CodeAgent, tool, OpenAIServerModel # ------------------------ # Constants # ------------------------ API_URL = "https://agents-course-unit4-scoring.hf.space" # ------------------------ # Tool definitions # ------------------------ @tool def fetch_questions() -> list: """ Fetch the full list of GAIA evaluation questions. """ response = requests.get(f"{API_URL}/questions", timeout=15) response.raise_for_status() return response.json() @tool def fetch_random_question() -> dict: """ Fetch a single random GAIA question. """ response = requests.get(f"{API_URL}/random-question", timeout=15) response.raise_for_status() return response.json() @tool def fetch_file(task_id: str) -> bytes: """ Download a file associated with a given task_id. """ response = requests.get(f"{API_URL}/files/{task_id}", timeout=15) response.raise_for_status() return response.content @tool def submit_answers(username: str, agent_code: str, answers: list) -> dict: """ Submit the agent's answers to GAIA and return the scoring. """ payload = { "username": username, "agent_code": agent_code, "answers": answers } response = requests.post(f"{API_URL}/submit", json=payload, timeout=60) response.raise_for_status() return response.json() # ------------------------ # Agent factory # ------------------------ def create_agent() -> CodeAgent: """ Factory that returns a configured CodeAgent instance. Requires OPENAI_API_KEY in environment. """ # Initialize the LLM with OpenAI API llm = OpenAIServerModel( model_id=os.getenv("OPENAI_MODEL_ID", "gpt-3.5-turbo"), api_key=os.getenv("OPENAI_API_KEY") ) # Create agent with defined tools agent = CodeAgent( tools=[fetch_questions, fetch_random_question, fetch_file, submit_answers], model=llm ) return agent