Spaces:
Running
Running
from smolagents import CodeAgent, InferenceClientModel, GradioUI, tool | |
from huggingface_hub import HfApi | |
import requests | |
from typing import List, Dict | |
def leaderboard_search(query: str) -> str: | |
""" | |
Search Hugging Face Spaces specifically in the model benchmarking category. | |
Args: | |
query: The search query to find relevant model benchmarking spaces | |
Returns: | |
A formatted string containing search results with space names, descriptions, and additional information | |
""" | |
api_url = "https://huggingface.co/api/spaces" | |
search_words = ["arena", "leaderboard", "benchmark"] | |
results = [] | |
try: | |
for word in search_words: | |
params = { | |
"search": query + " " + word, | |
"full": True # Get full information | |
} | |
response = requests.get(api_url, params=params) | |
print(response) | |
spaces = response.json() | |
print(spaces) | |
if not spaces: | |
continue # Skip if no spaces found for this search word | |
for space in spaces: | |
# Extract relevant information | |
space_id = space.get("id", "Unknown") | |
author = space_id.split("/")[0] if "/" in space_id else "Unknown" | |
space_name = space_id.split("/")[1] if "/" in space_id else space_id | |
likes = space.get("likes", 0) | |
# Try to get detailed information if available | |
title = space.get("cardData", {}).get("title") if space.get("cardData") else space_name | |
description = space.get("cardData", {}).get("short_description", "No description available") if space.get("cardData") else "No description available" | |
# Create formatted result string | |
result = f"π **{title}** ({space_id})\n" | |
result += f" π€ Author: {author}\n" | |
result += f" π {description}\n" | |
result += f" β€οΈ Likes: {likes}\n" | |
result += f" π URL: https://huggingface.co/spaces/{space_id}\n" | |
results.append(result) | |
if not results: | |
return f"No model benchmarking spaces found for query: '{query}'" | |
return "\n".join(results) | |
except requests.exceptions.RequestException as e: | |
return f"Error searching Hugging Face Spaces: {str(e)}" | |
except Exception as e: | |
return f"Unexpected error: {str(e)}" | |
except requests.exceptions.RequestException as e: | |
return f"Error searching Hugging Face Spaces: {str(e)}" | |
except Exception as e: | |
return f"Unexpected error: {str(e)}" | |
def get_space_content(space_id: str) -> str: | |
""" | |
Get the content of a Hugging Face Space. | |
Args: | |
space_id: The Hugging Face Space ID (e.g., "open-llm-leaderboard/open_llm_leaderboard") | |
Returns: | |
The space content or error message | |
""" | |
try: | |
# Get the space's README or main content | |
readme_url = f"https://huggingface.co/spaces/{space_id}/raw/main/README.md" | |
response = requests.get(readme_url) | |
if response.status_code == 200: | |
return f"Content from {space_id}:\n\n{response.text}" | |
else: | |
# Try to get any available file | |
files_url = f"https://huggingface.co/api/spaces/{space_id}/tree/main" | |
files_response = requests.get(files_url) | |
if files_response.status_code == 200: | |
files = files_response.json() | |
return f"Available files in {space_id}:\n" + "\n".join([f"- {file['path']}" for file in files]) | |
else: | |
return f"Space {space_id} exists but couldn't retrieve content" | |
except Exception as e: | |
return f"Error accessing space {space_id}: {str(e)}" | |
def get_file_from_space(space_id: str, file_path: str) -> str: | |
""" | |
Get a specific file from a Hugging Face Space. | |
Args: | |
space_id: The Hugging Face Space ID | |
file_path: Path to the file in the space | |
Returns: | |
The file content or error message | |
""" | |
try: | |
url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}" | |
response = requests.get(url) | |
if response.status_code == 200: | |
return f"Content of {file_path} from {space_id}:\n\n{response.text}" | |
else: | |
return f"Couldn't retrieve {file_path} from {space_id}" | |
except Exception as e: | |
return f"Error: {str(e)}" | |
# Initialize the agent with the leaderboard search and space content tools | |
model = InferenceClientModel() | |
agent = CodeAgent( | |
tools=[leaderboard_search, get_space_content, get_file_from_space], | |
additional_authorized_imports=["json", "requests", "pandas"], | |
model=model, | |
add_base_tools=False, | |
description="Your job is to find the best possible model for a given task based on relevant leaderboards or arenas. You will be provided with a task description, and you should use the leaderboard tool to find relevant leaderboards or arenas. If you want to inspect the contents of a particular Space (e.g., README or code), use the space_content_tool. Respond with a list of the top models, including their names, scores, and links to their leaderboard pages.", | |
) | |
GradioUI(agent).launch() |