File size: 5,063 Bytes
10e9b7d eccf8e4 5a04eaf 3c4371f 70575e8 5a04eaf 10e9b7d e80aab9 3db6293 e80aab9 dc2edb0 31243f4 70575e8 7b67e95 580e81a 7b67e95 ee2d2fa dc2edb0 8601660 dc2edb0 8601660 0b2a728 dc2edb0 7e4a06b dc2edb0 3c4371f 7e4a06b 3c4371f 7d65c66 3c4371f 7e4a06b 31243f4 e80aab9 31243f4 dc2edb0 31243f4 dc2edb0 36ed51a 3c4371f eccf8e4 31243f4 7d65c66 31243f4 7d65c66 dc2edb0 e80aab9 7d65c66 31243f4 dc2edb0 31243f4 dc2edb0 7d65c66 31243f4 dc2edb0 31243f4 7d65c66 e80aab9 7d65c66 e80aab9 31243f4 e80aab9 3c4371f e80aab9 dc2edb0 7d65c66 dc2edb0 e80aab9 dc2edb0 e514fd7 dc2edb0 7e4a06b 31243f4 9088b99 7d65c66 e80aab9 31243f4 e80aab9 dc2edb0 8601660 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import os
import gradio as gr
import requests
import inspect
import pandas as pd
from smolagents import HfApiModel
# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
# --- Enhanced Agent Definition ---
class GAIAAgent:
def __init__(self):
print("GAIAAgent with HfApiModel initialized.")
self.model = gr.load(
"models/deepseek-ai/DeepSeek-R1",
provider="novita",
)
def format_prompt(self, question: str, file_content: str = None) -> str:
prompt = (
"You are a helpful AI agent solving a question from the GAIA benchmark. "
"Respond only with the final answer."
)
if file_content:
prompt += f"\nAttached File Content:\n{file_content}\n"
prompt += f"\nQuestion: {question}\nAnswer:"
return prompt
def read_file(self, filename: str) -> str:
filepath = os.path.join("./", filename)
if filename.endswith(".txt") and os.path.exists(filepath):
with open(filepath, "r") as file:
return file.read()[:1000] # limit to 1000 chars
return ""
def __call__(self, question: str, file_name: str = None) -> str:
file_content = self.read_file(file_name) if file_name else None
prompt = self.format_prompt(question, file_content)
try:
print("Prompt sent to model:", prompt)
result = self.model(prompt)
print("Model raw result:", result)
if not result or not isinstance(result, str):
return "AGENT ERROR: Empty or invalid response"
return result.strip().split("Answer:")[-1].strip()
except Exception as e:
print(f"Model inference failed: {e}")
return f"AGENT ERROR: {e}"
def run_and_submit_all(profile: gr.OAuthProfile | None):
space_id = os.getenv("SPACE_ID")
if profile:
username = f"{profile.username}"
print(f"User logged in: {username}")
else:
print("User not logged in.")
return "Please Login to Hugging Face with the button.", None
api_url = DEFAULT_API_URL
questions_url = f"{api_url}/questions"
submit_url = f"{api_url}/submit"
try:
agent = GAIAAgent()
except Exception as e:
return f"Error initializing agent: {e}", None
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
try:
response = requests.get(questions_url, timeout=15)
response.raise_for_status()
questions_data = response.json()
except Exception as e:
return f"Error fetching questions: {e}", None
results_log = []
answers_payload = []
for item in questions_data:
task_id = item.get("task_id")
question_text = item.get("question")
file_name = item.get("file_name")
if not task_id or question_text is None:
continue
try:
submitted_answer = agent(question_text, file_name)
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
except Exception as e:
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"})
if not answers_payload:
return "Agent did not produce any answers to submit.", pd.DataFrame(results_log)
submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload}
try:
response = requests.post(submit_url, json=submission_data, timeout=60)
response.raise_for_status()
result_data = response.json()
final_status = (
f"Submission Successful!\n"
f"User: {result_data.get('username')}\n"
f"Overall Score: {result_data.get('score', 'N/A')}% "
f"({result_data.get('correct_count', '?')}/{result_data.get('total_attempted', '?')} correct)\n"
f"Message: {result_data.get('message', 'No message received.')}"
)
return final_status, pd.DataFrame(results_log)
except Exception as e:
return f"Submission Failed: {e}", pd.DataFrame(results_log)
with gr.Blocks() as demo:
gr.Markdown("# GAIA Agent Evaluation Runner")
gr.Markdown("""
**Instructions:**
1. Log in to your Hugging Face account.
2. Click the button to run the agent and submit answers.
3. Your score will be printed below.
""")
gr.LoginButton()
run_button = gr.Button("Run Evaluation & Submit All Answers")
status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True)
run_button.click(
fn=run_and_submit_all,
outputs=[status_output, results_table]
)
if __name__ == "__main__":
print("Launching GAIA agent app...")
demo.launch(debug=True, share=False) |