Spaces:
Sleeping
Sleeping
File size: 5,538 Bytes
056beb8 240890c 056beb8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
from smolagents import CodeAgent, InferenceClientModel, GradioUI, tool
from huggingface_hub import HfApi
import requests
from typing import List, Dict
@tool
def leaderboard_search(query: str) -> str:
"""
Search Hugging Face Spaces specifically in the model benchmarking category.
Args:
query: The search query to find relevant model benchmarking spaces
Returns:
A formatted string containing search results with space names, descriptions, and additional information
"""
api_url = "https://huggingface.co/api/spaces"
search_words = ["arena", "leaderboard", "benchmark"]
results = []
try:
for word in search_words:
params = {
"search": query + " " + word,
"full": True # Get full information
}
response = requests.get(api_url, params=params)
print(response)
spaces = response.json()
print(spaces)
if not spaces:
continue # Skip if no spaces found for this search word
for space in spaces:
# Extract relevant information
space_id = space.get("id", "Unknown")
author = space_id.split("/")[0] if "/" in space_id else "Unknown"
space_name = space_id.split("/")[1] if "/" in space_id else space_id
likes = space.get("likes", 0)
# Try to get detailed information if available
title = space.get("cardData", {}).get("title") if space.get("cardData") else space_name
description = space.get("cardData", {}).get("short_description", "No description available") if space.get("cardData") else "No description available"
# Create formatted result string
result = f"π **{title}** ({space_id})\n"
result += f" π€ Author: {author}\n"
result += f" π {description}\n"
result += f" β€οΈ Likes: {likes}\n"
result += f" π URL: https://huggingface.co/spaces/{space_id}\n"
results.append(result)
if not results:
return f"No model benchmarking spaces found for query: '{query}'"
return "\n".join(results)
except requests.exceptions.RequestException as e:
return f"Error searching Hugging Face Spaces: {str(e)}"
except Exception as e:
return f"Unexpected error: {str(e)}"
except requests.exceptions.RequestException as e:
return f"Error searching Hugging Face Spaces: {str(e)}"
except Exception as e:
return f"Unexpected error: {str(e)}"
@tool
def get_space_content(space_id: str) -> str:
"""
Get the content of a Hugging Face Space.
Args:
space_id: The Hugging Face Space ID (e.g., "open-llm-leaderboard/open_llm_leaderboard")
Returns:
The space content or error message
"""
try:
# Get the space's README or main content
readme_url = f"https://huggingface.co/spaces/{space_id}/raw/main/README.md"
response = requests.get(readme_url)
if response.status_code == 200:
return f"Content from {space_id}:\n\n{response.text}"
else:
# Try to get any available file
files_url = f"https://huggingface.co/api/spaces/{space_id}/tree/main"
files_response = requests.get(files_url)
if files_response.status_code == 200:
files = files_response.json()
return f"Available files in {space_id}:\n" + "\n".join([f"- {file['path']}" for file in files])
else:
return f"Space {space_id} exists but couldn't retrieve content"
except Exception as e:
return f"Error accessing space {space_id}: {str(e)}"
@tool
def get_file_from_space(space_id: str, file_path: str) -> str:
"""
Get a specific file from a Hugging Face Space.
Args:
space_id: The Hugging Face Space ID
file_path: Path to the file in the space
Returns:
The file content or error message
"""
try:
url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}"
response = requests.get(url)
if response.status_code == 200:
return f"Content of {file_path} from {space_id}:\n\n{response.text}"
else:
return f"Couldn't retrieve {file_path} from {space_id}"
except Exception as e:
return f"Error: {str(e)}"
# Initialize the agent with the leaderboard search and space content tools
model = InferenceClientModel()
agent = CodeAgent(
tools=[leaderboard_search, get_space_content, get_file_from_space],
additional_authorized_imports=["json", "requests", "pandas"],
model=model,
add_base_tools=False,
description="Your job is to find the best possible model for a given task based on relevant leaderboards or arenas. You will be provided with a task description, and you should use the leaderboard tool to find relevant leaderboards or arenas. If you want to inspect the contents of a particular Space (e.g., README or code), use the space_content_tool. Respond with a list of the top models, including their names, scores, and links to their leaderboard pages.",
)
GradioUI(agent).launch() |