MINGYISU commited on
Commit
2b86120
·
verified ·
1 Parent(s): 004eed8
Files changed (4) hide show
  1. .gitignore +2 -0
  2. results.csv +0 -31
  3. urls.csv +0 -26
  4. utils.py +17 -74
.gitignore CHANGED
@@ -11,3 +11,5 @@ eval-results/
11
  eval-queue-bk/
12
  eval-results-bk/
13
  logs/
 
 
 
11
  eval-queue-bk/
12
  eval-results-bk/
13
  logs/
14
+ .gitignore
15
+ .gradio
results.csv DELETED
@@ -1,31 +0,0 @@
1
- Models,Model Size(B),Data Source,Overall,Classification,VQA,Retrieval,Grounding
2
- clip-vit-large-patch14,0.428,TIGER-Lab,37.8,42.8,9.1,53.0,51.8
3
- blip2-opt-2.7b,3.74,TIGER-Lab,25.2,27.0,4.2,33.9,47.0
4
- siglip-base-patch16-224,0.203,TIGER-Lab,34.8,40.3,8.4,31.6,59.5
5
- open_clip-ViT-L/14,0.428,TIGER-Lab,39.7,47.8,10.9,52.3,53.3
6
- UniIR (BLIP_FF),0.247,TIGER-Lab,42.8,42.1,15.0,60.1,62.2
7
- UniIR (CLIP_SF),0.428,TIGER-Lab,44.7,44.3,16.2,61.8,65.3
8
- e5-v,8.36,TIGER-Lab,13.3,21.8,4.9,11.5,19.0
9
- Magiclens,0.428,TIGER-Lab,27.8,38.8,8.3,35.4,26.0
10
- CLIP-FT,0.428,TIGER-Lab,45.4,55.2,19.7,53.2,62.2
11
- OpenCLIP-FT,0.428,TIGER-Lab,47.2,56.0,21.9,55.4,64.1
12
- VLM2Vec (Phi-3.5-V-FT),4.15,TIGER-Lab,55.9,52.8,50.3,57.8,72.3
13
- VLM2Vec (Phi-3.5-V-LoRA),4.15,TIGER-Lab,60.1,54.8,54.9,62.3,79.5
14
- VLM2Vec (LLaVA-1.6-LoRA-LowRes),7.57,TIGER-Lab,55.0,54.7,50.3,56.2,64.0
15
- VLM2Vec (LLaVA-1.6-LoRA-HighRes),7.57,TIGER-Lab,62.9,61.2,49.9,67.4,86.1
16
- MMRet-MLLM (LLaVA-1.6),7.57,Self-Reported,44.0,47.2,18.4,56.5,62.2
17
- MMRet-MLLM (FT),7.57,Self-Reported,64.1,56.0,57.4,69.9,83.6
18
- mmE5-mllama-11b-instruct,10.6,Self-Reported,69.8,67.6,62.6,71.0,89.6
19
- mmE5 (w/ 560K synthetic data),10.6,Self-Reported,58.6,60.6,55.7,54.7,72.4
20
- MM-Embed,8.18,Self-Reported,50.0,48.1,32.3,63.8,57.8
21
- gme-Qwen2-VL-2B-Instruct,2.21,Self-Reported,55.8,56.9,41.2,67.8,53.4
22
- VLM2Vec (Qwen2-VL-7B-LoRA-HighRes),8.29,TIGER-Lab,65.8,62.6,57.8,69.9,81.7
23
- VLM2Vec (Qwen2-VL-2B-LoRA-HighRes),2.21,TIGER-Lab,59.3,59.0,49.4,65.4,73.4
24
- LLaVE-7B,8.03,Self-Reported,70.3,65.7,65.4,70.9,91.9
25
- LLaVE-2B,1.95,Self-Reported,65.2,62.1,60.2,65.2,84.9
26
- LLaVE-0.5B,0.894,Self-Reported,59.1,57.4,50.3,59.8,82.9
27
- UniME(LLaVA-OneVision-7B-LoRA-Res336),8.03,Self-Reported,70.7,66.8,66.6,70.5,90.9
28
- UniME(LLaVA-1.6-7B-LoRA-LowRes),7.57,Self-Reported,66.6,60.6,52.9,67.9,85.1
29
- UniME(Phi-3.5-V-LoRA),4.2,Self-Reported,64.2,54.8,55.9,64.5,81.8
30
- QQMM-embed,8.297,Self-Reported,72.175,70.07,69.52,71.175,87.075
31
- B3,8.29,Self-Reported,72.0,70.0,66.5,74.1,84.6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
urls.csv DELETED
@@ -1,26 +0,0 @@
1
- Models,URL
2
- clip-vit-large-patch14,https://huggingface.co/openai/clip-vit-large-patch14
3
- blip2-opt-2.7b,https://huggingface.co/Salesforce/blip2-opt-2.7b
4
- siglip-base-patch16-224,https://huggingface.co/google/siglip-base-patch16-224
5
- open_clip-ViT-L/14,https://github.com/mlfoundations/open_clip
6
- e5-v,https://huggingface.co/royokong/e5-v
7
- Magiclens,https://github.com/google-deepmind/magiclens
8
- MMRet,https://huggingface.co/JUNJIE99/MMRet-large
9
- VLM2Vec-Phi-3.5-v,https://huggingface.co/TIGER-Lab/VLM2Vec-Full
10
- VLM2Vec,https://github.com/TIGER-AI-Lab/VLM2Vec
11
- VLM2Vec (Qwen2-VL-7B-LoRA-HighRes),https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-7B
12
- VLM2Vec (Qwen2-VL-2B-LoRA-HighRes),https://huggingface.co/TIGER-Lab/VLM2Vec-Qwen2VL-2B
13
- UniIR,https://huggingface.co/TIGER-Lab/UniIR
14
- OpenCLIP-FT,https://doi.org/10.48550/arXiv.2212.07143
15
- CLIP-FT,https://doi.org/10.48550/arXiv.2103.00020
16
- mmE5,https://huggingface.co/intfloat/mmE5-mllama-11b-instruct
17
- gme-Qwen2-VL-2B-Instruct,https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct
18
- MM-Embed,https://huggingface.co/nvidia/MM-Embed
19
- LLaVE-7B,https://huggingface.co/zhibinlan/LLaVE-7B
20
- LLaVE-2B,https://huggingface.co/zhibinlan/LLaVE-2B
21
- LLaVE-0.5B,https://huggingface.co/zhibinlan/LLaVE-0.5B
22
- UniME(LLaVA-OneVision-7B-LoRA-Res336),https://huggingface.co/DeepGlint-AI/UniME-LLaVA-OneVision-7B
23
- UniME(LLaVA-1.6-7B-LoRA-LowRes),https://huggingface.co/DeepGlint-AI/UniME-LLaVA-1.6-7B
24
- UniME(Phi-3.5-V-LoRA),https://huggingface.co/DeepGlint-AI/UniME-Phi3.5-V-4.2B
25
- QQMM-embed,https://github.com/QQ-MM/QQMM-embed
26
- B3,https://huggingface.co/raghavlite/B3_Qwen2_7B
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
utils.py CHANGED
@@ -25,7 +25,7 @@ DATA_TITLE_TYPE = ['number', 'markdown', 'str', 'markdown', 'number', 'number',
25
  SUBMISSION_NAME = "MMEB"
26
  SUBMISSION_URL = os.path.join("https://huggingface.co/spaces/TIGER-Lab/", SUBMISSION_NAME)
27
  FILE_NAME = "results.csv"
28
- CSV_DIR = "./results.csv"
29
 
30
  COLUMN_NAMES = MODEL_INFO
31
 
@@ -103,99 +103,42 @@ Please send us an email at [email protected], attaching the JSON file. We will r
103
 
104
  def create_hyperlinked_names(df):
105
  def convert_url(url, model_name):
106
- return f'<a href="{url}">{model_name}</a>'
107
-
108
- def add_link_to_model_name(model_name):
109
- if "VLM2Vec (Phi-3.5-V-" in model_name:
110
- url = MODEL_URLS["VLM2Vec-Phi-3.5-v"]
111
- return convert_url(url, model_name)
112
- if "VLM2Vec (LLaVA-1.6-LoRA-" in model_name:
113
- url = MODEL_URLS["VLM2Vec"]
114
- return convert_url(url, model_name)
115
- if "UniIR" in model_name:
116
- url = MODEL_URLS["UniIR"]
117
- return convert_url(url, model_name)
118
- if "mmE5" in model_name:
119
- url = MODEL_URLS["mmE5"]
120
- return convert_url(url, model_name)
121
- if "MMRet" in model_name:
122
- url = MODEL_URLS["MMRet"]
123
- return convert_url(url, model_name)
124
- return convert_url(MODEL_URLS[model_name], model_name) if model_name in MODEL_URLS else model_name
125
 
126
  df = df.copy()
127
- df['Models'] = df['Models'].apply(add_link_to_model_name)
128
  return df
129
 
130
- def fetch_data(url: str) -> pd.DataFrame:
131
- # fetch the leaderboard data
132
- if url is None:
133
  raise ValueError("URL Not Provided")
134
- url = f"https://huggingface.co/spaces/TIGER-Lab/MMEB/resolve/main/{url}"
135
  print(f"Fetching data from {url}")
136
  response = requests.get(url)
137
  if response.status_code != 200:
138
  raise requests.HTTPError(f"Failed to fetch data: HTTP status code {response.status_code}")
139
- return pd.read_csv(io.StringIO(response.text))
140
-
141
- def get_urls(csv: str='urls.csv') -> dict:
142
- urls = fetch_data(csv)
143
- return dict(zip(urls['Models'], urls['URL']))
144
-
145
- MODEL_URLS = get_urls()
146
 
147
- def get_df(csv="results.csv"):
148
- df = fetch_data(csv)
149
- df.to_csv(CSV_DIR, index=False) # update local file
 
 
150
  df['Model Size(B)'] = df['Model Size(B)'].apply(process_model_size)
151
  df = df.sort_values(by=['Overall'], ascending=False)
152
  df = create_hyperlinked_names(df)
153
  df['Rank'] = range(1, len(df) + 1)
154
  return df
155
 
156
-
157
- def add_new_eval(input_file):
158
- if input_file is None:
159
- return "Error! Empty file!"
160
-
161
- # Load the input json file
162
- upload_data = json.loads(input_file)
163
- print("upload_data:\n", upload_data)
164
- data_row = [f'{upload_data["Model"]}']
165
- for col in ['Overall', 'Model Size(B)'] + TASKS:
166
- if not col in upload_data.keys():
167
- return f"Error! Missing {col} column!"
168
- data_row += [upload_data[col]]
169
- if 'URL' in upload_data.keys():
170
- MODEL_URLS[upload_data['Model']] = upload_data['URL']
171
- print("data_row:\n", data_row)
172
- submission_repo = Repository(local_dir=SUBMISSION_NAME, clone_from=SUBMISSION_URL,
173
- use_auth_token=HF_TOKEN, repo_type="space")
174
- submission_repo.git_pull()
175
-
176
- # Track submitted models
177
- already_submitted = []
178
- with open(CSV_DIR, mode='r') as file:
179
- reader = csv.reader(file, delimiter=',')
180
- for row in reader:
181
- already_submitted.append(row[0])
182
- # if not in the existing models list, add it to the csv file
183
- if data_row[0] not in already_submitted:
184
- with open(CSV_DIR, mode='a', newline='') as file:
185
- writer = csv.writer(file)
186
- writer.writerow(data_row)
187
-
188
- submission_repo.push_to_hub()
189
- print('Submission Successful')
190
- else:
191
- print('The model already exists in the leaderboard!')
192
-
193
  def refresh_data():
194
  df = get_df()
195
- MODEL_URLS = get_urls()
196
  return df[COLUMN_NAMES]
197
 
198
-
199
  def search_and_filter_models(df, query, min_size, max_size):
200
  filtered_df = df.copy()
201
 
 
25
  SUBMISSION_NAME = "MMEB"
26
  SUBMISSION_URL = os.path.join("https://huggingface.co/spaces/TIGER-Lab/", SUBMISSION_NAME)
27
  FILE_NAME = "results.csv"
28
+ CSV_DIR = "results.csv"
29
 
30
  COLUMN_NAMES = MODEL_INFO
31
 
 
103
 
104
  def create_hyperlinked_names(df):
105
  def convert_url(url, model_name):
106
+ return f'<a href="{url}">{model_name}</a>' if url is not None else model_name
107
+
108
+ def add_link_to_model_name(row):
109
+ row['Models'] = convert_url(row['URL'], row['Models'])
110
+ return row
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
112
  df = df.copy()
113
+ df = df.apply(add_link_to_model_name, axis=1)
114
  return df
115
 
116
+ def fetch_data(file: str) -> pd.DataFrame:
117
+ # fetch the leaderboard data from remote
118
+ if file is None:
119
  raise ValueError("URL Not Provided")
120
+ url = f"https://huggingface.co/spaces/TIGER-Lab/MMEB/resolve/main/{file}"
121
  print(f"Fetching data from {url}")
122
  response = requests.get(url)
123
  if response.status_code != 200:
124
  raise requests.HTTPError(f"Failed to fetch data: HTTP status code {response.status_code}")
125
+ return pd.read_json(io.StringIO(response.text), orient='records', lines=True)
 
 
 
 
 
 
126
 
127
+ def get_df(file="results.jsonl"):
128
+ df = fetch_data(file)
129
+ print(df.columns)
130
+ print('URL' in df.columns)
131
+ print(df)
132
  df['Model Size(B)'] = df['Model Size(B)'].apply(process_model_size)
133
  df = df.sort_values(by=['Overall'], ascending=False)
134
  df = create_hyperlinked_names(df)
135
  df['Rank'] = range(1, len(df) + 1)
136
  return df
137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  def refresh_data():
139
  df = get_df()
 
140
  return df[COLUMN_NAMES]
141
 
 
142
  def search_and_filter_models(df, query, min_size, max_size):
143
  filtered_df = df.copy()
144