File size: 3,096 Bytes
e7abd9e
 
 
58582d3
e7abd9e
 
 
 
 
 
 
 
23c96f8
e7abd9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccd1d98
e7abd9e
 
 
 
73fea0f
e7abd9e
58582d3
e7abd9e
 
 
ccd1d98
58582d3
e7abd9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58582d3
e7abd9e
 
 
 
 
58582d3
e7abd9e
 
 
58582d3
 
b7378ca
58582d3
 
 
e7abd9e
 
 
58582d3
 
 
 
 
 
e7abd9e
 
58582d3
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
"""
ASGI entry point for the Open LLM Leaderboard API.
"""

import logging
import logging.config
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware

from app.api.router import router
from app.core.fastapi_cache import setup_cache
from app.core.formatting import LogFormatter
from app.config import hf_config

# Configure logging before anything else
LOGGING_CONFIG = {
    "version": 1,
    "disable_existing_loggers": True,
    "formatters": {
        "default": {
            "format": "%(name)s - %(levelname)s - %(message)s",
        }
    },
    "handlers": {
        "default": {
            "formatter": "default",
            "class": "logging.StreamHandler",
            "stream": "ext://sys.stdout",
        }
    },
    "loggers": {
        "uvicorn": {
            "handlers": ["default"],
            "level": "WARNING",
            "propagate": False,
        },
        "uvicorn.error": {
            "level": "WARNING",
            "handlers": ["default"],
            "propagate": False,
        },
        "uvicorn.access": {
            "handlers": ["default"],
            "level": "WARNING",
            "propagate": False,
        },
        "app": {
            "handlers": ["default"],
            "level": "INFO",
            "propagate": False,
        },
    },
    "root": {
        "handlers": ["default"],
        "level": "WARNING",
    },
}

# Apply logging configuration
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("app")

# Create FastAPI application
app = FastAPI(
    title="Open LLM Leaderboard",
    version="1.0.0",
    docs_url="/docs",
)

# Add CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Add GZIP compression
app.add_middleware(GZipMiddleware, minimum_size=500)

# Include API router
app.include_router(router, prefix="/api")


@app.on_event("startup")
async def startup_event():
    """Initialize services on startup"""
    logger.info("\n")
    logger.info(LogFormatter.section("APPLICATION STARTUP"))

    # Log HF configuration
    logger.info(LogFormatter.section("HUGGING FACE CONFIGURATION"))
    logger.info(LogFormatter.info(f"Organization: {hf_config.HF_ORGANIZATION}"))
    logger.info(
        LogFormatter.info(
            f"Token Status: {f'Using token starting with: {hf_config.HF_TOKEN[:4]}...' if hf_config.HF_TOKEN else 'Missing'}"
        )
    )
    logger.info(LogFormatter.info("Using repositories:"))
    logger.info(LogFormatter.info(f"  - Queue: {hf_config.QUEUE_REPO}"))
    logger.info(LogFormatter.info(f"  - Aggregated: {hf_config.AGGREGATED_REPO}"))
    logger.info(LogFormatter.info(f"  - Votes: {hf_config.VOTES_REPO}"))
    logger.info(
        LogFormatter.info(
            f"  - Official Providers: {hf_config.OFFICIAL_PROVIDERS_REPO}"
        )
    )

    # Setup cache
    setup_cache()
    logger.info(
        LogFormatter.success("FastAPI Cache initialized with in-memory backend")
    )