Spaces:
Running
Running
updated interface (#35)
Browse files- improved interface (0feb0c292add68ba2ca54dd80128a6b3a7957d2d)
- fixed (53f22e83fd41987e56dc6d97b9630535e5405ff6)
app.py
CHANGED
@@ -68,6 +68,7 @@ with gr.Blocks() as block:
|
|
68 |
datatype=DATA_TITLE_TYPE,
|
69 |
interactive=False,
|
70 |
visible=True,
|
|
|
71 |
)
|
72 |
|
73 |
refresh_button = gr.Button("Refresh")
|
|
|
68 |
datatype=DATA_TITLE_TYPE,
|
69 |
interactive=False,
|
70 |
visible=True,
|
71 |
+
max_height=2400,
|
72 |
)
|
73 |
|
74 |
refresh_button = gr.Button("Refresh")
|
results.csv
CHANGED
@@ -27,4 +27,4 @@ LLaVE-0.5B,0.894,Self-Reported,59.1,57.4,50.3,59.8,82.9
|
|
27 |
UniME(LLaVA-OneVision-7B-LoRA-Res336),8.03,Self-Reported,70.7,66.8,66.6,70.5,90.9
|
28 |
UniME(LLaVA-1.6-7B-LoRA-LowRes),7.57,Self-Reported,66.6,60.6,52.9,67.9,85.1
|
29 |
UniME(Phi-3.5-V-LoRA),4.2,Self-Reported,64.2,54.8,55.9,64.5,81.8
|
30 |
-
QQMM-embed,8.297,Self-Reported,72.175,70.07,69.52,71.175,87.075
|
|
|
27 |
UniME(LLaVA-OneVision-7B-LoRA-Res336),8.03,Self-Reported,70.7,66.8,66.6,70.5,90.9
|
28 |
UniME(LLaVA-1.6-7B-LoRA-LowRes),7.57,Self-Reported,66.6,60.6,52.9,67.9,85.1
|
29 |
UniME(Phi-3.5-V-LoRA),4.2,Self-Reported,64.2,54.8,55.9,64.5,81.8
|
30 |
+
QQMM-embed,8.297,Self-Reported,72.175,70.07,69.52,71.175,87.075
|
urls.csv
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Models,URL
|
2 |
+
clip-vit-large-patch14,https://huggingface.co/openai/clip-vit-large-patch14
|
3 |
+
blip2-opt-2.7b,https://huggingface.co/Salesforce/blip2-opt-2.7b
|
4 |
+
siglip-base-patch16-224,https://huggingface.co/google/siglip-base-patch16-224
|
5 |
+
open_clip-ViT-L/14,https://github.com/mlfoundations/open_clip
|
6 |
+
e5-v,https://huggingface.co/royokong/e5-v
|
7 |
+
Magiclens,https://github.com/google-deepmind/magiclens
|
8 |
+
MMRet,https://huggingface.co/JUNJIE99/MMRet-large
|
9 |
+
VLM2Vec-Phi-3.5-v,https://huggingface.co/TIGER-Lab/VLM2Vec-Full
|
10 |
+
VLM2Vec,https://github.com/TIGER-AI-Lab/VLM2Vec
|
11 |
+
VLM2Vec (Qwen2-VL-7B-LoRA-HighRes),https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-7B
|
12 |
+
VLM2Vec (Qwen2-VL-2B-LoRA-HighRes),https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-2B
|
13 |
+
UniIR,https://huggingface.co/TIGER-Lab/UniIR
|
14 |
+
OpenCLIP-FT,https://doi.org/10.48550/arXiv.2212.07143
|
15 |
+
CLIP-FT,https://doi.org/10.48550/arXiv.2103.00020
|
16 |
+
mmE5,https://huggingface.co/intfloat/mmE5-mllama-11b-instruct
|
17 |
+
gme-Qwen2-VL-2B-Instruct,https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct
|
18 |
+
MM-Embed,https://huggingface.co/nvidia/MM-Embed
|
19 |
+
LLaVE-7B,https://huggingface.co/zhibinlan/LLaVE-7B
|
20 |
+
LLaVE-2B,https://huggingface.co/zhibinlan/LLaVE-2B
|
21 |
+
LLaVE-0.5B,https://huggingface.co/zhibinlan/LLaVE-0.5B
|
22 |
+
UniME(LLaVA-OneVision-7B-LoRA-Res336),https://huggingface.co/DeepGlint-AI/UniME-LLaVA-OneVision-7B
|
23 |
+
UniME(LLaVA-1.6-7B-LoRA-LowRes),https://huggingface.co/DeepGlint-AI/UniME-LLaVA-1.6-7B
|
24 |
+
UniME(Phi-3.5-V-LoRA),https://huggingface.co/DeepGlint-AI/UniME-Phi3.5-V-4.2B
|
utils.py
CHANGED
@@ -101,31 +101,8 @@ Github link: https://github.com/TIGER-AI-Lab/VLM2Vec. \n
|
|
101 |
Please send us an email at [email protected], attaching the JSON file. We will review your submission and update the leaderboard accordingly.
|
102 |
"""
|
103 |
|
104 |
-
MODEL_URLS =
|
105 |
-
|
106 |
-
"blip2-opt-2.7b": "https://huggingface.co/Salesforce/blip2-opt-2.7b",
|
107 |
-
"siglip-base-patch16-224": "https://huggingface.co/google/siglip-base-patch16-224",
|
108 |
-
"open_clip-ViT-L/14": "https://github.com/mlfoundations/open_clip",
|
109 |
-
"e5-v": "https://huggingface.co/royokong/e5-v",
|
110 |
-
"Magiclens": "https://github.com/google-deepmind/magiclens",
|
111 |
-
"MMRet": "https://huggingface.co/JUNJIE99/MMRet-large",
|
112 |
-
"VLM2Vec-Phi-3.5-v": "https://huggingface.co/TIGER-Lab/VLM2Vec-Full",
|
113 |
-
"VLM2Vec": "https://github.com/TIGER-AI-Lab/VLM2Vec",
|
114 |
-
"VLM2Vec (Qwen2-VL-7B-LoRA-HighRes)": "https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-7B",
|
115 |
-
"VLM2Vec (Qwen2-VL-2B-LoRA-HighRes)": "https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-2B",
|
116 |
-
"UniIR": "https://huggingface.co/TIGER-Lab/UniIR",
|
117 |
-
"OpenCLIP-FT": "https://doi.org/10.48550/arXiv.2212.07143",
|
118 |
-
"CLIP-FT": "https://doi.org/10.48550/arXiv.2103.00020",
|
119 |
-
"mmE5": "https://huggingface.co/intfloat/mmE5-mllama-11b-instruct",
|
120 |
-
"gme-Qwen2-VL-2B-Instruct": "https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct",
|
121 |
-
"MM-Embed": "https://huggingface.co/nvidia/MM-Embed",
|
122 |
-
"LLaVE-7B": "https://huggingface.co/zhibinlan/LLaVE-7B",
|
123 |
-
"LLaVE-2B": "https://huggingface.co/zhibinlan/LLaVE-2B",
|
124 |
-
"LLaVE-0.5B": "https://huggingface.co/zhibinlan/LLaVE-0.5B",
|
125 |
-
"UniME(LLaVA-OneVision-7B-LoRA-Res336)": "https://huggingface.co/DeepGlint-AI/UniME-LLaVA-OneVision-7B",
|
126 |
-
"UniME(LLaVA-1.6-7B-LoRA-LowRes)": "https://huggingface.co/DeepGlint-AI/UniME-LLaVA-1.6-7B",
|
127 |
-
"UniME(Phi-3.5-V-LoRA)": "https://huggingface.co/DeepGlint-AI/UniME-Phi3.5-V-4.2B"
|
128 |
-
}
|
129 |
|
130 |
def create_hyperlinked_names(df):
|
131 |
def convert_url(url, model_name):
|
|
|
101 |
Please send us an email at [email protected], attaching the JSON file. We will review your submission and update the leaderboard accordingly.
|
102 |
"""
|
103 |
|
104 |
+
MODEL_URLS = pd.read_csv("urls.csv")
|
105 |
+
MODEL_URLS = dict(zip(MODEL_URLS['Models'], MODEL_URLS['URL']))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
def create_hyperlinked_names(df):
|
108 |
def convert_url(url, model_name):
|