File size: 5,216 Bytes
781c86d
10e9b7d
781c86d
10e9b7d
fc7015b
781c86d
 
 
 
78f636a
09b53af
b5ed0c0
781c86d
fc7015b
 
 
781c86d
 
fc7015b
7df20a1
dec881c
 
781c86d
 
 
 
 
31a5120
781c86d
 
 
dccb212
 
 
 
 
 
 
 
 
 
31a5120
dec881c
 
 
 
4021bf3
dec881c
 
e80aab9
31243f4
781c86d
31243f4
dec881c
6ac8934
54c62fb
3c4371f
eccf8e4
31243f4
 
7d65c66
dec881c
6ac8934
 
 
 
31243f4
 
dec881c
6ac8934
 
dec881c
31243f4
6ac8934
31243f4
dec881c
6ac8934
 
 
dec881c
 
 
 
31243f4
dec881c
31243f4
dec881c
31243f4
dec881c
 
 
 
 
 
 
 
 
 
 
 
 
 
54c62fb
 
e80aab9
 
dec881c
 
31243f4
dec881c
 
e80aab9
dec881c
7d65c66
dec881c
e80aab9
dec881c
e80aab9
fc7015b
dec881c
 
 
 
 
fc7015b
 
 
dec881c
 
fc7015b
 
 
 
dec881c
 
e80aab9
 
fc7015b
 
dec881c
fc7015b
 
 
dec881c
fc7015b
dec881c
fc7015b
dec881c
fc7015b
 
 
dec881c
fc7015b
dec881c
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
""" Basic Agent Evaluation Runner"""
import os
import inspect
import gradio as gr
import requests
import pandas as pd
import time
from langchain_core.messages import HumanMessage
from agent import build_graph
import re


# (Keep Constants as is)
# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"

# --- Basic Agent Definition ---
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------


cached_answers = []

class BasicAgent:
    """A langgraph agent."""
    def __init__(self):
        print("BasicAgent initialized.")
        self.graph = build_graph()

    def __call__(self, question: str) -> str:
        print(f"Agent received question (first 50 chars): {question[:50]}...")
        messages = [HumanMessage(content=question)]
        result = self.graph.invoke({"messages": messages})
        full_output = result["messages"][-1].content.strip()

        # Enforce strict FINAL ANSWER format parsing
        match = re.search(r"FINAL ANSWER:\s*(.+)", full_output, re.IGNORECASE)
        if match:
            return match.group(0).strip()  # Returns the entire "FINAL ANSWER: xxx"
        else:
            print(" FINAL ANSWER not found in output, returning fallback.")
            return "FINAL ANSWER: unknown"

def run_agent_only(profile: gr.OAuthProfile | None):
    global cached_answers
    cached_answers = []
    results_log = []

    if not profile:
        return "Please login first.", None

    try:
        agent = BasicAgent()
    except Exception as e:
        return f"Agent Init Error: {e}", None

    questions_url = f"{DEFAULT_API_URL}/questions"

    try:
        response = requests.get(questions_url, timeout=15)
        questions_data = response.json()
    except Exception as e:
        return f"Error fetching questions: {e}", None

    with open("system_prompt.txt", "r", encoding="utf-8") as f:
        system_prompt = f.read().strip()

    for item in questions_data:
        task_id = item.get("task_id")
        question = item.get("question")
        file_name = item.get("file_name")

        if not task_id or question is None:
            continue

        try:
            user_message = question
            if file_name:
                user_message += f"\n\nFile to use: {file_name}"

            full_input = system_prompt + "\n\n" + user_message
            answer = agent(full_input)
            cached_answers.append({"task_id": task_id, "submitted_answer": answer})
            results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": answer})
        except Exception as e:
            results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": f"AGENT ERROR: {e}"})

    return "Agent finished. Now click 'Submit Cached Answers'", pd.DataFrame(results_log)

def submit_cached_answers(profile: gr.OAuthProfile | None):
    global cached_answers
    if not profile or not cached_answers:
        return "No cached answers to submit. Run the agent first.", None

    space_id = os.getenv("SPACE_ID")
    username = profile.username
    agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"

    payload = {
        "username": username,
        "agent_code": agent_code,
        "answers": cached_answers
    }

    submit_url = f"{DEFAULT_API_URL}/submit"

    try:
        response = requests.post(submit_url, json=payload, timeout=60)
        result = response.json()
        final_status = (
            f"Submission Successful!\nUser: {result.get('username')}\n"
            f"Score: {result.get('score', 'N/A')}% ({result.get('correct_count', '?')}/{result.get('total_attempted', '?')})"
        )
        return final_status, None
    except Exception as e:
        return f"Submission failed: {e}", None

# --- Gradio UI ---
with gr.Blocks() as demo:
    gr.Markdown("# Basic Agent Evaluation Runner")
    gr.Markdown("""
    **Instructions:**
    1. Run the Agent to generate answers to all questions.
    2. Then click 'Submit Cached Answers' to submit them for scoring.
    """)

    gr.LoginButton()

    run_button = gr.Button("🧠 Run Agent Only")
    submit_button = gr.Button("📤 Submit Cached Answers")

    status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
    results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True)

    run_button.click(fn=run_agent_only, outputs=[status_output, results_table])
    submit_button.click(fn=submit_cached_answers, outputs=[status_output, results_table])

if __name__ == "__main__":
    print("\n" + "-"*30 + " App Starting " + "-"*30)
    space_host_startup = os.getenv("SPACE_HOST")
    space_id_startup = os.getenv("SPACE_ID")

    if space_host_startup:
        print(f"✅ SPACE_HOST found: {space_host_startup}")
        print(f"   Runtime URL: https://{space_host_startup}.hf.space")
    else:
        print("ℹ️  No SPACE_HOST found.")

    if space_id_startup:
        print(f"✅ SPACE_ID found: {space_id_startup}")
        print(f"   Repo URL: https://huggingface.co/spaces/{space_id_startup}")
    else:
        print("ℹ️  No SPACE_ID found.")

    print("Launching Gradio Interface...")
    demo.launch(debug=True, share=False)