Spaces:
Running
Running
File size: 4,348 Bytes
099bd02 8f93924 099bd02 8f93924 099bd02 8f93924 099bd02 8f93924 099bd02 8f93924 099bd02 8f93924 83c9fd8 099bd02 83c9fd8 099bd02 8f93924 099bd02 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
import math
from pathlib import Path
import gradio as gr
import pandas as pd
from gradio_leaderboard import ColumnFilter, Leaderboard
abs_path = Path(__file__).parent
# Any pandas-compatible data
df = pd.read_csv(str(abs_path / "data.csv"))
df["Model"] = df.apply(
lambda row: f'<a target="_blank" href="{row["URL"]}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">{row["Provider"]}</a>',
axis=1,
)
df = df[["Model"] + [col for col in df.columns.tolist() if col not in ["URL", "Provider", "Model"]]]
with gr.Blocks("ParityError/Interstellar") as demo:
gr.Markdown(
"""
<h1 style="margin: 0;">InfraBench - A Leaderboard for Inference Providers</h1>
<br>
<div style="margin-bottom: 20px;">
<p>Welcome to InfraBench, the ultimate leaderboard for evaluating inference providers. Our platform focuses on key metrics such as cost, quality, and compression to help you make informed decisions. Whether you're a developer, researcher, or business looking to optimize your inference processes, InfraBench provides the insights you need to choose the best provider for your needs.</p>
</div>
"""
)
with gr.Tabs():
with gr.TabItem("InfraBench Leaderboard"):
median_inference_time_min = math.floor(float(df["Median Inference Time (in s)"].min()))
median_inference_time_max = math.ceil(float(df["Median Inference Time (in s)"].max()))
price_per_image_min = math.floor(float(df["Price per Image"].min()))
price_per_image_max = math.ceil(float(df["Price per Image"].max()))
Leaderboard(
value=df,
search_columns=["Model"],
filter_columns=[
ColumnFilter(
column="Median Inference Time (in s)",
type="slider",
default=[median_inference_time_min, median_inference_time_max],
min=median_inference_time_min,
max=median_inference_time_max,
),
ColumnFilter(
column="Price per Image",
type="slider",
default=[price_per_image_min, price_per_image_max],
min=price_per_image_min,
max=price_per_image_max,
),
],
select_columns=df.columns.tolist(),
datatype="markdown",
)
with gr.Accordion("Citation", open=True):
gr.Markdown(
"""
```bibtex
@article{InfraBench,
title={InfraBench: A Leaderboard for Inference Providers},
author={PrunaAI},
year={2025},
howpublished={\\url{https://huggingface.co/spaces/PrunaAI/InferBench}}
}
```
"""
)
with gr.TabItem("About"):
gr.Markdown(
"""
# About InfraBench
InfraBench is a leaderboard for inference providers, focusing on cost, quality, and compression.
<h1>🌍 Join the Pruna AI community!</h1>
<p><a rel="nofollow" href="https://twitter.com/PrunaAI"><img alt="Twitter" src="https://img.shields.io/twitter/follow/PrunaAI?style=social"></a>
<a rel="nofollow" href="https://github.com/PrunaAI/pruna"><img alt="GitHub" src="https://img.shields.io/github/stars/prunaai/pruna"></a>
<a rel="nofollow" href="https://www.linkedin.com/company/93832878/admin/feed/posts/?feedType=following"><img alt="LinkedIn" src="https://img.shields.io/badge/LinkedIn-Connect-blue"></a>
<a rel="nofollow" href="https://discord.com/invite/rskEr4BZJx"><img alt="Discord" src="https://img.shields.io/badge/Discord-Join%20Us-blue?style=social&logo=discord"></a>
<a rel="nofollow" href="https://www.reddit.com/r/PrunaAI/"><img alt="Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/PrunaAI?style=social"></a></p>
"""
)
if __name__ == "__main__":
demo.launch()
|