import os import gradio as gr import requests import inspect import pandas as pd from huggingface_hub import hf_hub_download, login from smolagents import CodeAgent from smolagents import OpenAIServerModel from smolagents import Tool from smolagents import PythonInterpreterTool from smolagents import DuckDuckGoSearchTool # (Keep Constants as is) # --- Constants --- DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" # --- Basic Agent Definition --- # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------ # Global variables HF_DATASET_TOKEN = os.getenv("HF_DATASET_TOKEN") OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") # GAIA Dataset file dowloading """ Basic function to download a GAIA dataset validation file """ def get_GAIA_dataset_validation_file(file_name: str): response = hf_hub_download( repo_id="gaia-benchmark/GAIA", filename= f"2023/validation/{file_name}", repo_type="dataset" ) return response """ Basic function to download a GAIA dataset test file """ def get_GAIA_dataset_test_file(file_name: str): response = hf_hub_download( repo_id="gaia-benchmark/GAIA", filename= f"2023/test/{file_name}", repo_type="dataset" ) return response """ Basic function to download a GAIA dataset file (validation attempted first) """ def get_GAIA_dataset_file(file_name: str): global HF_DATASET_TOKEN login(token = HF_DATASET_TOKEN) response = None try: response = get_GAIA_dataset_validation_file(file_name) except: response = get_GAIA_dataset_test_file(file_name) return response class BasicAgent: def __init__(self): print("Starting the initialization of model.") global OPENAI_API_KEY model = OpenAIServerModel( model_id="gpt-4o-mini-2024-07-18", api_key = OPENAI_API_KEY ) print("Core model has been initialized.") self.tools = [ DuckDuckGoSearchTool() ] print("Agent tools have been initialized.") self.agent = CodeAgent( model = model, tools = self.tools, add_base_tools=True # Add basic tools like math ) print("Core agent has been initialized.") def __call__(self, question: str) -> str: print("#"*20) print(f"ℹ️ Agent received question: {question}") print("#"*20) try: # send the question content to the agent answer = self.agent.run(question) print("#"*20) print(f"✅ Agent returning the answer: {answer}") print("#"*20) # return the answer return answer except Exception as e: print("!"*20) print(f"❗Error running agent {str(e)}") print("!"*20) def run_and_submit_all( profile: gr.OAuthProfile | None): """ Fetches all questions, runs the BasicAgent on them, submits all answers, and displays the results. """ print("!!!!!!!!!!!!! HANDLING DATASET FILE") response = get_GAIA_dataset_file("076c8171-9b3b-49b9-a477-244d2a532826.xlsx") print(response) return # --- Determine HF Space Runtime URL and Repo URL --- space_id = os.getenv("SPACE_ID") # Get the SPACE_ID for sending link to the code if profile: username= f"{profile.username}" print(f"User logged in: {username}") else: print("User not logged in.") return "Please Login to Hugging Face with the button.", None api_url = DEFAULT_API_URL questions_url = f"{api_url}/questions" submit_url = f"{api_url}/submit" # 1. Instantiate Agent ( modify this part to create your agent) try: agent = BasicAgent() except Exception as e: print(f"Error instantiating agent: {e}") return f"Error initializing agent: {e}", None # In the case of an app running as a hugging Face space, this link points toward your codebase ( usefull for others so please keep it public) agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main" print(agent_code) # 2. Fetch Questions print(f"Fetching questions from: {questions_url}") try: response = requests.get(questions_url, timeout=15) response.raise_for_status() questions_data = response.json() if not questions_data: print("Fetched questions list is empty.") return "Fetched questions list is empty or invalid format.", None print(f"Fetched {len(questions_data)} questions.") except requests.exceptions.RequestException as e: print(f"Error fetching questions: {e}") return f"Error fetching questions: {e}", None except requests.exceptions.JSONDecodeError as e: print(f"Error decoding JSON response from questions endpoint: {e}") print(f"Response text: {response.text[:500]}") return f"Error decoding server response for questions: {e}", None except Exception as e: print(f"An unexpected error occurred fetching questions: {e}") return f"An unexpected error occurred fetching questions: {e}", None # 3. Run your Agent results_log = [] answers_payload = [] print(f"Running agent on {len(questions_data)} questions...") question_index = 1 for item in questions_data: print(f"ℹ️ Handling question #: {question_index}") task_id = item.get("task_id") question_text = item.get("question") if not task_id or question_text is None: print(f"⚠️Skipping item with missing task_id or question: {item}") continue try: #submitted_answer = agent(question_text) submitted_answer = "Placeholder" answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer}) results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer}) print(f"✅ Successful handling of question #: {question_index}") except Exception as e: print(f"❌ Error running agent on task {task_id}: {e}") results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"}) question_index = question_index + 1 # REMOVE: prevent payload submission!!! return if not answers_payload: print("Agent did not produce any answers to submit.") return "Agent did not produce any answers to submit.", pd.DataFrame(results_log) # 4. Prepare Submission submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload} status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..." print(status_update) # 5. Submit print(f"Submitting {len(answers_payload)} answers to: {submit_url}") try: response = requests.post(submit_url, json=submission_data, timeout=60) response.raise_for_status() result_data = response.json() final_status = ( f"Submission Successful!\n" f"User: {result_data.get('username')}\n" f"Overall Score: {result_data.get('score', 'N/A')}% " f"({result_data.get('correct_count', '?')}/{result_data.get('total_attempted', '?')} correct)\n" f"Message: {result_data.get('message', 'No message received.')}" ) print("Submission successful.") results_df = pd.DataFrame(results_log) return final_status, results_df except requests.exceptions.HTTPError as e: error_detail = f"Server responded with status {e.response.status_code}." try: error_json = e.response.json() error_detail += f" Detail: {error_json.get('detail', e.response.text)}" except requests.exceptions.JSONDecodeError: error_detail += f" Response: {e.response.text[:500]}" status_message = f"Submission Failed: {error_detail}" print(status_message) results_df = pd.DataFrame(results_log) return status_message, results_df except requests.exceptions.Timeout: status_message = "Submission Failed: The request timed out." print(status_message) results_df = pd.DataFrame(results_log) return status_message, results_df except requests.exceptions.RequestException as e: status_message = f"Submission Failed: Network error - {e}" print(status_message) results_df = pd.DataFrame(results_log) return status_message, results_df except Exception as e: status_message = f"An unexpected error occurred during submission: {e}" print(status_message) results_df = pd.DataFrame(results_log) return status_message, results_df # --- Build Gradio Interface using Blocks --- with gr.Blocks() as demo: gr.Markdown("# Basic Agent Evaluation Runner") gr.Markdown( """ # HuggingFace agents course - final assignement implementation. An OPEN AI key will be needed to run this assignment. """ ) gr.LoginButton() run_button = gr.Button("Run Evaluation & Submit All Answers") status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False) # Removed max_rows=10 from DataFrame constructor results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True) run_button.click( fn=run_and_submit_all, outputs=[status_output, results_table] ) if __name__ == "__main__": print("\n" + "-"*30 + " App Starting " + "-"*30) # Check for SPACE_HOST and SPACE_ID at startup for information space_host_startup = os.getenv("SPACE_HOST") space_id_startup = os.getenv("SPACE_ID") # Get SPACE_ID at startup if space_host_startup: print(f"✅ SPACE_HOST found: {space_host_startup}") print(f" Runtime URL should be: https://{space_host_startup}.hf.space") else: print("ℹ️ SPACE_HOST environment variable not found (running locally?).") if space_id_startup: # Print repo URLs if SPACE_ID is found print(f"✅ SPACE_ID found: {space_id_startup}") print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}") print(f" Repo Tree URL: https://huggingface.co/spaces/{space_id_startup}/tree/main") else: print("ℹ️ SPACE_ID environment variable not found (running locally?). Repo URL cannot be determined.") print("-"*(60 + len(" App Starting ")) + "\n") print("Launching Gradio Interface for Basic Agent Evaluation...") demo.launch(debug=True, share=False)