InferBench / app.py
davidberenstein1957's picture
fix: override leaderboard error handling in app.py to prevent crashes and enable full column selection for improved data display
36e8f6e
raw
history blame
4 kB
import json
from pathlib import Path
import gradio as gr
import pandas as pd
from gradio_leaderboard import Leaderboard
from assets import custom_css
# override method to avoid bugg
Leaderboard.raise_error_if_incorrect_config = lambda self: None
abs_path = Path(__file__).parent
# Load the JSONL file into a pandas DataFrame using the json library
with open(abs_path / "results.jsonl", "r") as file:
json_data = file.read()
partially_fixed_json_data = json_data.replace("}\n{", "},\n{")
fixed_json_data = f"[{partially_fixed_json_data}]"
json_data = json.loads(fixed_json_data)
df = pd.DataFrame(json_data)
df["Model"] = df.apply(
lambda row: f'<a target="_blank" href="{row["URL"]}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">{row["Model"]}</a>',
axis=1,
)
df = df[
["Model", "Median Inference Time", "Price per Image"]
+ [col for col in df.columns.tolist() if col not in ["URL", "Model", "Median Inference Time", "Price per Image"]]
]
df = df.sort_values(by="GenEval", ascending=False)
with gr.Blocks("ParityError/Interstellar", fill_width=True, css=custom_css) as demo:
gr.HTML(
"""
<div style="text-align: center;">
<img src="https://huggingface.co/datasets/PrunaAI/documentation-images/resolve/main/inferbench/logo2-cropped.png" style="width: 200px; height: auto; max-width: 100%; margin: 0 auto;">
<h1>πŸ‹οΈ InferBench πŸ‹οΈ</h1>
<h2>A cost/quality/speed Leaderboard for Inference Providers!</h2>
</div>
"""
)
with gr.Tabs():
with gr.TabItem("FLUX.1 [dev] Leaderboard"):
Leaderboard(
value=df,
select_columns=df.columns.tolist(),
datatype=["markdown"] + ["number"] * (len(df.columns.tolist()) - 1),
)
with gr.TabItem("FLUX.1 [dev] examples"):
gr.HTML(
"""
<iframe src="https://pruna.notion.site/ebd/1d270a039e5f80c6a2a3c00fc0d75ef0" width="100%" height="900" frameborder="0" allowfullscreen />
"""
)
with gr.TabItem("About"):
gr.HTML(
"""
<iframe src="https://pruna.notion.site/ebd/1d870a039e5f8021aafdd19e844bf2c8" width="100%" height="900" frameborder="0" allowfullscreen />
"""
)
with gr.Accordion("🌍 Join the Pruna AI community!", open=False):
gr.HTML(
"""
<a rel="nofollow" href="https://twitter.com/PrunaAI"><img alt="Twitter" src="https://img.shields.io/twitter/follow/PrunaAI?style=social"></a>
<a rel="nofollow" href="https://github.com/PrunaAI/pruna"><img alt="GitHub" src="https://img.shields.io/github/stars/prunaai/pruna"></a>
<a rel="nofollow" href="https://www.linkedin.com/company/93832878/admin/feed/posts/?feedType=following"><img alt="LinkedIn" src="https://img.shields.io/badge/LinkedIn-Connect-blue"></a>
<a rel="nofollow" href="https://discord.com/invite/rskEr4BZJx"><img alt="Discord" src="https://img.shields.io/badge/Discord-Join%20Us-blue?style=social&amp;logo=discord"></a>
<a rel="nofollow" href="https://www.reddit.com/r/PrunaAI/"><img alt="Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/PrunaAI?style=social"></a>
"""
)
with gr.Accordion("Citation", open=True):
gr.Markdown(
"""
```bibtex
@article{InferBench,
title={InferBench: A Leaderboard for Inference Providers},
author={PrunaAI},
year={2025},
howpublished={\\url{https://huggingface.co/spaces/PrunaAI/InferBench}}
}
```
"""
)
if __name__ == "__main__":
demo.launch()