File size: 4,630 Bytes
10e9b7d
6a38a35
eccf8e4
6a38a35
bee5328
e0cc1b7
de8170e
bee5328
e0cc1b7
6a38a35
bee5328
e0cc1b7
 
de8170e
d26d21f
e0cc1b7
 
 
 
 
 
 
 
 
 
 
de8170e
 
 
 
 
c396a92
de8170e
 
 
 
 
 
 
 
 
 
 
 
 
c396a92
de8170e
c396a92
de8170e
6a38a35
e0cc1b7
 
 
6a38a35
 
bee5328
de8170e
e0cc1b7
de8170e
 
c396a92
de8170e
 
 
 
6a38a35
e0cc1b7
6a38a35
fd5a08b
de8170e
 
bd7cd5b
 
c396a92
41085c3
de8170e
bd7cd5b
de8170e
 
 
c396a92
6a38a35
e0cc1b7
 
 
6a38a35
 
de8170e
 
 
0e6388c
6a38a35
 
 
 
 
de8170e
6a38a35
de8170e
 
 
 
 
fd5a08b
e0cc1b7
 
de8170e
 
e0cc1b7
 
6a38a35
e0cc1b7
6a38a35
e0cc1b7
de8170e
 
 
 
 
 
 
 
6a38a35
bd7cd5b
 
 
 
 
 
6a38a35
de8170e
 
6a38a35
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import os
import gradio as gr
import requests
import pandas as pd

from tools import FinalAnswerTool
from smolagents import CodeAgent, OpenAIServerModel

# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"

class BasicAgent:
    def __init__(self):
        # Use GPT-3.5-turbo for quota-friendly operation
        model = OpenAIServerModel(model_id="gpt-3.5-turbo")
        final_tool = FinalAnswerTool()
        self.agent = CodeAgent(
            model=model,
            tools=[final_tool],
            max_steps=3,
            verbosity_level=1
        )

    def __call__(self, question: str) -> str:
        return self.agent.run(question)


def extract_username(profile):
    """
    Extract username from the Hugging Face OAuthProfile or dict.
    """
    if not profile:
        return None
    # If profile is a dict (Gradio may return dict), extract keys
    if isinstance(profile, dict):
        return profile.get('username') or profile.get('name') or profile.get('login') or profile.get('id')
    # Otherwise, assume object with attributes
    return getattr(profile, 'username', None) or getattr(profile, 'name', None) or getattr(profile, 'login', None)


def run_and_submit_all(profile):
    """
    Fetch all questions, run the BasicAgent, submit answers, and return status and DataFrame.
    """
    username = extract_username(profile)
    if not username:
        return "Please login to Hugging Face with the login button.", None

    # Fetch questions
    try:
        resp = requests.get(f"{DEFAULT_API_URL}/questions", timeout=15)
        resp.raise_for_status()
        questions = resp.json()
    except Exception as e:
        return f"Error fetching questions: {e}", None

    # Run agent on each question
    agent = BasicAgent()
    results = []
    payload = []
    for q in questions:
        tid = q.get('task_id')
        text = q.get('question')
        if not tid or not text:
            continue
        try:
            ans = agent(text)
        except Exception as e:
            ans = f"ERROR: {e}"
        results.append({'Task ID': tid, 'Question': text, 'Answer': ans})
        payload.append({'task_id': tid, 'submitted_answer': ans})

    if not payload:
        return "Agent returned no answers.", pd.DataFrame(results)

    # Prepare and submit
    submission = {
        'username': username,
        'agent_code': f"https://huggingface.co/spaces/{os.getenv('SPACE_ID')}/tree/main",
        'answers': payload
    }
    try:
        sub_resp = requests.post(f"{DEFAULT_API_URL}/submit", json=submission, timeout=60)
        sub_resp.raise_for_status()
        data = sub_resp.json()
        status = (
            f"Submission Successful!\n"
            f"User: {data.get('username')}\n"
            f"Score: {data.get('score')}% ({data.get('correct_count')}/{data.get('total_attempted')})\n"
            f"Message: {data.get('message')}"
        )
    except Exception as e:
        status = f"Submission Failed: {e}"

    return status, pd.DataFrame(results)


def test_random_question(profile):
    """
    Fetch a random question and return question and agent answer.
    """
    username = extract_username(profile)
    if not username:
        return "Please login to Hugging Face with the login button.", ""
    try:
        q = requests.get(f"{DEFAULT_API_URL}/random-question", timeout=15).json()
        ans = BasicAgent()(q.get('question', ''))
        return q.get('question', ''), ans
    except Exception as e:
        return f"Error during test: {e}", ""

# --- Gradio UI ---
with gr.Blocks() as demo:
    gr.Markdown("# Basic Agent Evaluation Runner")
    gr.Markdown(
        """
        **Instructions:**
        1. Clone this space and define your agent in `tools.py`.
        2. Log in with your Hugging Face account.
        3. Use **Run Evaluation & Submit All Answers** or **Test Random Question**.
        """
    )
    login = gr.LoginButton()
    run_btn = gr.Button("Run Evaluation & Submit All Answers")
    test_btn = gr.Button("Test Random Question")
    status_out = gr.Textbox(label="Status / Result", lines=5, interactive=False)
    table_out = gr.DataFrame(label="Full Results Table", wrap=True)
    question_out = gr.Textbox(label="Random Question", lines=3, interactive=False)
    answer_out = gr.Textbox(label="Agent Answer", lines=3, interactive=False)

    run_btn.click(fn=run_and_submit_all, inputs=[login], outputs=[status_out, table_out])
    test_btn.click(fn=test_random_question, inputs=[login], outputs=[question_out, answer_out])

if __name__ == "__main__":
    demo.launch(debug=True, share=False)