import os | |
import logging | |
from huggingface_hub import HfApi | |
from app.core.cache import cache_config | |
logger = logging.getLogger(__name__) | |
# Organization or user who owns the datasets | |
HF_ORGANIZATION = "stacklok" | |
# Get HF token directly from environment | |
HF_TOKEN = os.environ.get("HF_TOKEN") | |
if not HF_TOKEN: | |
logger.warning( | |
"HF_TOKEN not found in environment variables. Some features may be limited." | |
) | |
# Initialize HF API | |
API = HfApi(token=HF_TOKEN) | |
# Repository configuration | |
QUEUE_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-requests" | |
AGGREGATED_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-contents" | |
VOTES_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-votes" | |
OFFICIAL_PROVIDERS_REPO = "open-llm-leaderboard/official-providers" | |
logger.info(f"QUEUE_REPO: {QUEUE_REPO}") | |
logger.info(f"AGGREGATED_REPO: {AGGREGATED_REPO}") | |
logger.info(f"VOTES_REPO: {VOTES_REPO}") | |
logger.info(f"OFFICIAL_PROVIDERS_REPO: {OFFICIAL_PROVIDERS_REPO}") | |
# File paths from cache config | |
VOTES_PATH = cache_config.votes_file | |
EVAL_REQUESTS_PATH = cache_config.eval_requests_file | |
MODEL_CACHE_DIR = cache_config.models_cache | |