Spaces:
Sleeping
Sleeping
File size: 4,234 Bytes
6165204 10af3d7 50e5a10 5054645 3ca564b f4accca 3ca564b 1f05742 3ca564b f4accca 6165204 3ca564b 6165204 3ca564b 6165204 3ca564b 6165204 3ca564b 7db86c7 10af3d7 6165204 3ca564b 7db86c7 3ca564b 6165204 3ca564b 7db86c7 6165204 3ca564b 6165204 3ca564b 10af3d7 3ca564b 10af3d7 3ca564b 7db86c7 3ca564b 7db86c7 6165204 7939a0c 3ca564b 7db86c7 3ca564b f4accca 3ca564b 50e5a10 3ca564b f4accca 3ca564b 6165204 3ca564b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
import os
import gradio as gr
import requests
import pandas as pd
from huggingface_hub import login
from dotenv import load_dotenv
# Cargar variables de entorno
load_dotenv()
API_BASE_URL = "https://my-custom-api.hf.space"
MODEL_NAME = os.getenv("MODEL_NAME", "meta-llama/Meta-Llama-3-8B-Instruct") # o usa 'google/gemma-2b-it'
def call_model(prompt):
headers = {
"Authorization": f"Bearer {os.environ['HF_TOKEN']}",
"Content-Type": "application/json"
}
payload = {
"inputs": prompt,
"parameters": {"max_new_tokens": 512}
}
response = requests.post(
f"https://api-inference.huggingface.co/models/{MODEL_NAME}",
headers=headers,
json=payload,
timeout=60
)
result = response.json()
if isinstance(result, dict) and result.get("error"):
return f"ERROR: {result['error']}"
return result[0]["generated_text"] if isinstance(result, list) else result
def execute_agent_operations(profile: gr.OAuthProfile | None):
space_id = os.getenv("MY_SPACE_ID")
if profile:
username = f"{profile.username}"
print(f"Usuario conectado: {username}")
else:
print("No has iniciado sesión.")
return "Inicia sesión en Hugging Face.", None
questions_url = f"{API_BASE_URL}/questions"
attachments_url = f"{API_BASE_URL}/files/"
submit_url = f"{API_BASE_URL}/submit"
try:
response = requests.get(questions_url, timeout=15)
response.raise_for_status()
questions = response.json()
if not questions:
return "No se encontraron preguntas.", None
for q in questions:
task_id = q.get("task_id")
file_name = q.get("file_name", "")
if task_id and file_name:
try:
att_response = requests.get(f"{attachments_url}{task_id}", timeout=15)
att_response.raise_for_status()
q["attachment_b64"] = att_response.text
except Exception as e:
print(f"Error al obtener adjunto: {e}")
q["attachment_b64"] = None
except Exception as e:
return f"Error al obtener preguntas: {e}", None
results_log = []
answers_payload = []
for item in questions:
task_id = item.get("task_id")
question_text = item.get("question", "")
attachment = item.get("attachment_b64", "")
full_prompt = f"{question_text}\n\nArchivo adjunto:\n{attachment}" if attachment else question_text
if not task_id or not full_prompt:
continue
try:
submitted_answer = call_model(full_prompt)
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
except Exception as e:
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"ERROR: {e}"})
if not answers_payload:
return "El agente no produjo respuestas.", pd.DataFrame(results_log)
submission_data = {
"username": username.strip(),
"agent_code": f"https://huggingface.co/spaces/{space_id}",
"answers": answers_payload
}
try:
post_response = requests.post(submit_url, json=submission_data, timeout=60)
post_response.raise_for_status()
result_data = post_response.json()
score = result_data.get("score", "N/A")
return f"¡Envío exitoso!\nPuntuación: {score}", pd.DataFrame(results_log)
except Exception as e:
return f"Error al enviar: {e}", pd.DataFrame(results_log)
# Interfaz Gradio
with gr.Blocks() as demo:
gr.Markdown("# Evaluador de Agente (versión personalizada)")
gr.LoginButton()
run_button = gr.Button("Ejecutar Evaluación y Enviar Respuestas")
status_output = gr.Textbox(label="Estado", lines=3)
results_table = gr.DataFrame(label="Respuestas del agente")
run_button.click(
fn=execute_agent_operations,
outputs=[status_output, results_table]
)
if __name__ == "__main__":
demo.launch(debug=True)
|