File size: 2,641 Bytes
c960e8b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os
if os.environ.get("SPACES_ZERO_GPU") is not None:
    import spaces
else:
    class spaces:
        @staticmethod
        def GPU(func):
            def wrapper(*args, **kwargs):
                return func(*args, **kwargs)
            return wrapper
import gradio as gr
import subprocess
from huggingface_hub import HfApi

@spaces.GPU
def infer(filter: str, sort: str, sort_dir: bool, infer: str, gated: str, appr: list[str]):
    try:
        api = HfApi()
        kwargs = {}
        if filter: kwargs["filter"] = filter
        if gated == "gated": kwargs["gated"] = True
        elif gated == "non-gated": kwargs["gated"] = False
        if sort_dir: kwargs["direction"] = -1
        models = api.list_models(inference=infer, sort=sort, cardData=True, **kwargs)
        md = "### Results:\n"
        for model in models:
            if model.gated and model.gated not in appr: continue
            md += "1. "
            md += f"[{model.id}](https://hf.co/{model.id})"
            md += f" Inference: '{infer}'"
            #gated_str = model.gated if model.gated else "false"
            #md += f" Gated: '{gated_str}'"
            md += f" Gated: '{gated}'"
            if model.library_name: md += f" Lib:'{model.library_name}'"
            if model.pipeline_tag: md += f" Pipeline:'{model.pipeline_tag}'"
            if model.last_modified: md += f" LastMod:'{model.last_modified}'"
            if model.likes: md += f" Likes:'{model.likes}'"
            if model.downloads: md += f" DLs:'{model.downloads}'"
            if model.downloads_all_time: md += f" AllDLs:'{model.downloads_all_time}'"
            md += "\n"
        return md
    except Exception as e:
        raise gr.Error(e)

with gr.Blocks() as demo:
    filter = gr.Textbox(label="Query", value="")
    with gr.Row(equal_height=True):
        infer_status = gr.Radio(label="Inference status", choices=["warm", "cold", "frozen"], value="warm")
        gated_status = gr.Radio(label="Gated status", choices=["gated", "non-gated", "all"], value="non-gated")
        sort = gr.Radio(label="Sort", choices=["last_modified", "likes", "downloads"], value="likes")
        sort_dir = gr.Checkbox(label="Sort by descending order", value=False)
        appr_status = gr.CheckboxGroup(label="Approval method", choices=["auto", "manual"], value=["auto", "manual"], visible=False)
    
    run_button = gr.Button("Search", variant="primary")

    output_md = gr.Markdown("<br><br>")

    run_button.click(infer, [filter, sort, sort_dir, infer_status, gated_status, appr_status], [output_md])

demo.launch()