Datawithsarah's picture
switched to qwen
58aeeb9
import os
import gradio as gr
import pandas as pd
import requests
from dotenv import load_dotenv
from langchain_core.messages import HumanMessage
from agent import build_graph
load_dotenv()
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
cached_answers = []
class ChatAgent:
def __init__(self):
print("ChatAgent initialized with Qwen LangGraph workflow.")
self.graph = build_graph("huggingface") # Uses Qwen endpoint
def __call__(self, question: str) -> str:
print(f"Processing question: {question[:60]}...")
messages = [HumanMessage(content=question)]
results = self.graph.invoke({"messages": messages})
answer = results['messages'][-1].content.strip()
return answer
def run_agent_only(profile: gr.OAuthProfile | None):
global cached_answers
cached_answers = []
results_log = []
if not profile:
return "Please login first.", None
try:
agent = ChatAgent()
except Exception as e:
return f"Agent Init Error: {e}", None
try:
response = requests.get(f"{DEFAULT_API_URL}/questions", timeout=15)
questions_data = response.json()
except Exception as e:
return f"Error fetching questions: {e}", None
for item in questions_data:
task_id = item.get("task_id")
question = item.get("question")
file_name = item.get("file_name")
if not task_id or question is None:
continue
try:
user_message = question
if file_name:
user_message += f"\n\nFile to use: {file_name}"
answer = agent(user_message)
cached_answers.append({"task_id": task_id, "submitted_answer": answer})
results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": answer})
except Exception as e:
results_log.append({
"Task ID": task_id,
"Question": question,
"Submitted Answer": f"AGENT ERROR: {e}"
})
return "Agent finished. Now click 'Submit Cached Answers'", pd.DataFrame(results_log)
def submit_cached_answers(profile: gr.OAuthProfile | None):
global cached_answers
if not profile or not cached_answers:
return "No cached answers to submit. Run the agent first.", None
space_id = os.getenv("SPACE_ID")
username = profile.username
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
payload = {
"username": username,
"agent_code": agent_code,
"answers": cached_answers
}
try:
response = requests.post(f"{DEFAULT_API_URL}/submit", json=payload, timeout=60)
result = response.json()
final_status = (
f"Submission Successful!\nUser: {result.get('username')}\n"
f"Score: {result.get('score', 'N/A')}% "
f"({result.get('correct_count', '?')}/{result.get('total_attempted', '?')})"
)
return final_status, None
except Exception as e:
return f"Submission failed: {e}", None
# --- Gradio UI ---
with gr.Blocks() as demo:
gr.Markdown("# LangGraph ChatAgent Evaluation")
gr.Markdown("Run the agent on all tasks, then submit for scoring.")
gr.LoginButton()
run_button = gr.Button("\U0001F9E0 Run Agent")
submit_button = gr.Button("\U0001F4E4 Submit Answers")
status_box = gr.Textbox(label="Status", lines=3)
table = gr.DataFrame(label="Results", wrap=True)
run_button.click(fn=run_agent_only, outputs=[status_box, table])
submit_button.click(fn=submit_cached_answers, outputs=[status_box, table])
if __name__ == "__main__":
print("\n" + "-"*30 + " App Starting " + "-"*30)
space_host_startup = os.getenv("SPACE_HOST")
space_id_startup = os.getenv("SPACE_ID")
if space_host_startup:
print(f"✅ SPACE_HOST found: {space_host_startup}")
print(f" Runtime URL: https://{space_host_startup}.hf.space")
else:
print("ℹ️ No SPACE_HOST found.")
if space_id_startup:
print(f"✅ SPACE_ID found: {space_id_startup}")
print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}")
else:
print("ℹ️ No SPACE_ID found.")
print("Launching Gradio Interface...")
demo.launch(debug=True, share=False)