File size: 4,355 Bytes
10e9b7d 781c86d de718ca 781c86d 09b53af de718ca b5ed0c0 fc7015b dec881c de718ca 781c86d 58aeeb9 31a5120 781c86d de718ca 781c86d de718ca 31a5120 dec881c 4021bf3 dec881c e80aab9 31243f4 de718ca 31243f4 dec881c 6ac8934 eccf8e4 de718ca 7d65c66 dec881c 6ac8934 de718ca 31243f4 dec881c 6ac8934 de718ca 31243f4 6ac8934 31243f4 de718ca dec881c 31243f4 58aeeb9 31243f4 de718ca 31243f4 dec881c de718ca dec881c de718ca dec881c de718ca dec881c 54c62fb e80aab9 de718ca dec881c de718ca 58aeeb9 de718ca 7d65c66 dec881c e80aab9 de718ca e80aab9 de718ca fc7015b de718ca 6d24d35 de718ca fc7015b 586cfcf 208307a e80aab9 fc7015b dec881c fc7015b dec881c fc7015b dec881c fc7015b dec881c fc7015b dec881c fc7015b dec881c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
import os
import gradio as gr
import pandas as pd
import requests
from dotenv import load_dotenv
from langchain_core.messages import HumanMessage
from agent import build_graph
load_dotenv()
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
cached_answers = []
class ChatAgent:
def __init__(self):
print("ChatAgent initialized with Qwen LangGraph workflow.")
self.graph = build_graph("huggingface") # Uses Qwen endpoint
def __call__(self, question: str) -> str:
print(f"Processing question: {question[:60]}...")
messages = [HumanMessage(content=question)]
results = self.graph.invoke({"messages": messages})
answer = results['messages'][-1].content.strip()
return answer
def run_agent_only(profile: gr.OAuthProfile | None):
global cached_answers
cached_answers = []
results_log = []
if not profile:
return "Please login first.", None
try:
agent = ChatAgent()
except Exception as e:
return f"Agent Init Error: {e}", None
try:
response = requests.get(f"{DEFAULT_API_URL}/questions", timeout=15)
questions_data = response.json()
except Exception as e:
return f"Error fetching questions: {e}", None
for item in questions_data:
task_id = item.get("task_id")
question = item.get("question")
file_name = item.get("file_name")
if not task_id or question is None:
continue
try:
user_message = question
if file_name:
user_message += f"\n\nFile to use: {file_name}"
answer = agent(user_message)
cached_answers.append({"task_id": task_id, "submitted_answer": answer})
results_log.append({"Task ID": task_id, "Question": question, "Submitted Answer": answer})
except Exception as e:
results_log.append({
"Task ID": task_id,
"Question": question,
"Submitted Answer": f"AGENT ERROR: {e}"
})
return "Agent finished. Now click 'Submit Cached Answers'", pd.DataFrame(results_log)
def submit_cached_answers(profile: gr.OAuthProfile | None):
global cached_answers
if not profile or not cached_answers:
return "No cached answers to submit. Run the agent first.", None
space_id = os.getenv("SPACE_ID")
username = profile.username
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
payload = {
"username": username,
"agent_code": agent_code,
"answers": cached_answers
}
try:
response = requests.post(f"{DEFAULT_API_URL}/submit", json=payload, timeout=60)
result = response.json()
final_status = (
f"Submission Successful!\nUser: {result.get('username')}\n"
f"Score: {result.get('score', 'N/A')}% "
f"({result.get('correct_count', '?')}/{result.get('total_attempted', '?')})"
)
return final_status, None
except Exception as e:
return f"Submission failed: {e}", None
# --- Gradio UI ---
with gr.Blocks() as demo:
gr.Markdown("# LangGraph ChatAgent Evaluation")
gr.Markdown("Run the agent on all tasks, then submit for scoring.")
gr.LoginButton()
run_button = gr.Button("\U0001F9E0 Run Agent")
submit_button = gr.Button("\U0001F4E4 Submit Answers")
status_box = gr.Textbox(label="Status", lines=3)
table = gr.DataFrame(label="Results", wrap=True)
run_button.click(fn=run_agent_only, outputs=[status_box, table])
submit_button.click(fn=submit_cached_answers, outputs=[status_box, table])
if __name__ == "__main__":
print("\n" + "-"*30 + " App Starting " + "-"*30)
space_host_startup = os.getenv("SPACE_HOST")
space_id_startup = os.getenv("SPACE_ID")
if space_host_startup:
print(f"✅ SPACE_HOST found: {space_host_startup}")
print(f" Runtime URL: https://{space_host_startup}.hf.space")
else:
print("ℹ️ No SPACE_HOST found.")
if space_id_startup:
print(f"✅ SPACE_ID found: {space_id_startup}")
print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}")
else:
print("ℹ️ No SPACE_ID found.")
print("Launching Gradio Interface...")
demo.launch(debug=True, share=False)
|