|
""" Basic Agent Evaluation Runner""" |
|
import os |
|
import inspect |
|
import gradio as gr |
|
import requests |
|
import pandas as pd |
|
import time |
|
from langchain_core.messages import HumanMessage |
|
from agent import build_graph |
|
|
|
|
|
|
|
|
|
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" |
|
|
|
|
|
|
|
|
|
|
|
cached_answers = [] |
|
|
|
class BasicAgent: |
|
"""A langgraph agent.""" |
|
def __init__(self): |
|
print("BasicAgent initialized.") |
|
self.graph = build_graph() |
|
|
|
def __call__(self, question: str) -> str: |
|
print(f"Agent received question (first 50 chars): {question[:50]}...") |
|
messages = [HumanMessage(content=question)] |
|
messages = self.graph.invoke({"messages": messages}) |
|
raw_answer = messages['messages'][-1].content |
|
if raw_answer.startswith("FINAL ANSWER: "): |
|
return raw_answer[len("FINAL ANSWER: "):].strip() |
|
return f"Agent response did not follow FINAL ANSWER format: {raw_answer}" |
|
|
|
def run_agent_only(profile: gr.OAuthProfile | None): |
|
global cached_answers |
|
cached_answers = [] |
|
results_log = [] |
|
|
|
if not profile: |
|
return "Please login first.", None |
|
|
|
try: |
|
agent = BasicAgent() |
|
except Exception as e: |
|
return f"Agent Init Error: {e}", None |
|
|
|
questions_url = f"{DEFAULT_API_URL}/questions" |
|
|
|
try: |
|
response = requests.get(questions_url, timeout=15) |
|
questions_data = response.json() |
|
except Exception as e: |
|
return f"Error fetching questions: {e}", None |
|
|
|
with open("system_prompt.txt", "r", encoding="utf-8") as f: |
|
system_prompt = f.read().strip() |
|
|
|
for item in questions_data: |
|
task_id = item.get("task_id") |
|
question = item.get("question") |
|
file_name = item.get("file_name") |
|
|
|
if not task_id or question is None: |
|
continue |
|
|
|
try: |
|
user_message = question |
|
if file_name: |
|
user_message += f"\n\nFile to use: {file_name}" |
|
|
|
full_input = system_prompt + "\n\n" + user_message |
|
answer = agent(full_input) |
|
cached_answers.append({"task_id": task_id, "submitted_answer": answer}) |
|
results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": answer}) |
|
except Exception as e: |
|
results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": f"AGENT ERROR: {e}"}) |
|
|
|
return "Agent finished. Now click 'Submit Cached Answers'", pd.DataFrame(results_log) |
|
|
|
def submit_cached_answers(profile: gr.OAuthProfile | None): |
|
global cached_answers |
|
if not profile or not cached_answers: |
|
return "No cached answers to submit. Run the agent first.", None |
|
|
|
space_id = os.getenv("SPACE_ID") |
|
username = profile.username |
|
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main" |
|
|
|
payload = { |
|
"username": username, |
|
"agent_code": agent_code, |
|
"answers": cached_answers |
|
} |
|
|
|
submit_url = f"{DEFAULT_API_URL}/submit" |
|
|
|
try: |
|
response = requests.post(submit_url, json=payload, timeout=60) |
|
result = response.json() |
|
final_status = ( |
|
f"Submission Successful!\nUser: {result.get('username')}\n" |
|
f"Score: {result.get('score', 'N/A')}% ({result.get('correct_count', '?')}/{result.get('total_attempted', '?')})" |
|
) |
|
return final_status, None |
|
except Exception as e: |
|
return f"Submission failed: {e}", None |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("# Basic Agent Evaluation Runner") |
|
gr.Markdown(""" |
|
**Instructions:** |
|
1. Run the Agent to generate answers to all questions. |
|
2. Then click 'Submit Cached Answers' to submit them for scoring. |
|
""") |
|
|
|
gr.LoginButton() |
|
|
|
run_button = gr.Button("🧠 Run Agent Only") |
|
submit_button = gr.Button("📤 Submit Cached Answers") |
|
|
|
status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False) |
|
results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True) |
|
|
|
run_button.click(fn=run_agent_only, outputs=[status_output, results_table]) |
|
submit_button.click(fn=submit_cached_answers, outputs=[status_output, results_table]) |
|
|
|
if __name__ == "__main__": |
|
print("\n" + "-"*30 + " App Starting " + "-"*30) |
|
space_host_startup = os.getenv("SPACE_HOST") |
|
space_id_startup = os.getenv("SPACE_ID") |
|
|
|
if space_host_startup: |
|
print(f"✅ SPACE_HOST found: {space_host_startup}") |
|
print(f" Runtime URL: https://{space_host_startup}.hf.space") |
|
else: |
|
print("ℹ️ No SPACE_HOST found.") |
|
|
|
if space_id_startup: |
|
print(f"✅ SPACE_ID found: {space_id_startup}") |
|
print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}") |
|
else: |
|
print("ℹ️ No SPACE_ID found.") |
|
|
|
print("Launching Gradio Interface...") |
|
demo.launch(debug=True, share=False) |
|
|