Commit
·
3521728
1
Parent(s):
d10da9f
Remove duplicates
Browse filesOnly show the most recent model evaluation
backend/app/services/leaderboard.py
CHANGED
@@ -54,7 +54,7 @@ class LeaderboardService:
|
|
54 |
logger.info(LogFormatter.section("FORMATTING LEADERBOARD DATA"))
|
55 |
|
56 |
raw_data = await self.fetch_raw_data()
|
57 |
-
formatted_data =
|
58 |
type_counts = {}
|
59 |
error_count = 0
|
60 |
|
@@ -65,11 +65,23 @@ class LeaderboardService:
|
|
65 |
for i, item in enumerate(raw_data, 1):
|
66 |
try:
|
67 |
formatted_item = await self.transform_data(item)
|
68 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
# Count model types
|
71 |
-
|
72 |
-
|
|
|
73 |
|
74 |
except Exception as e:
|
75 |
error_count += 1
|
@@ -104,7 +116,7 @@ class LeaderboardService:
|
|
104 |
for line in LogFormatter.stats(type_stats):
|
105 |
logger.info(line)
|
106 |
|
107 |
-
return formatted_data
|
108 |
|
109 |
except Exception as e:
|
110 |
logger.error(LogFormatter.error("Failed to format leaderboard data", e))
|
@@ -123,7 +135,7 @@ class LeaderboardService:
|
|
123 |
|
124 |
# Create unique ID combining model name, precision, sha and chat template status
|
125 |
|
126 |
-
unique_id = f"{data.get('fullname', 'Unknown')}
|
127 |
|
128 |
evaluations = {
|
129 |
"safetensors": {
|
|
|
54 |
logger.info(LogFormatter.section("FORMATTING LEADERBOARD DATA"))
|
55 |
|
56 |
raw_data = await self.fetch_raw_data()
|
57 |
+
formatted_data = {}
|
58 |
type_counts = {}
|
59 |
error_count = 0
|
60 |
|
|
|
65 |
for i, item in enumerate(raw_data, 1):
|
66 |
try:
|
67 |
formatted_item = await self.transform_data(item)
|
68 |
+
unique_id = formatted_item["id"]
|
69 |
+
|
70 |
+
# Check if entry already exists and keep the one with the latest submission date
|
71 |
+
if unique_id in formatted_data:
|
72 |
+
existing_date = formatted_data[unique_id]["metadata"].get("submission_date")
|
73 |
+
new_date = formatted_item["metadata"].get("submission_date")
|
74 |
+
|
75 |
+
# If new item has a newer submission date (or existing has no date), replace it
|
76 |
+
if not existing_date or (new_date and new_date > existing_date):
|
77 |
+
formatted_data[unique_id] = formatted_item
|
78 |
+
else:
|
79 |
+
formatted_data[unique_id] = formatted_item
|
80 |
|
81 |
# Count model types
|
82 |
+
if unique_id not in formatted_data or formatted_data[unique_id] == formatted_item:
|
83 |
+
model_type = formatted_item["model"]["type"]
|
84 |
+
type_counts[model_type] = type_counts.get(model_type, 0) + 1
|
85 |
|
86 |
except Exception as e:
|
87 |
error_count += 1
|
|
|
116 |
for line in LogFormatter.stats(type_stats):
|
117 |
logger.info(line)
|
118 |
|
119 |
+
return list(formatted_data.values())
|
120 |
|
121 |
except Exception as e:
|
122 |
logger.error(LogFormatter.error("Failed to format leaderboard data", e))
|
|
|
135 |
|
136 |
# Create unique ID combining model name, precision, sha and chat template status
|
137 |
|
138 |
+
unique_id = f"{data.get('fullname', 'Unknown')}"
|
139 |
|
140 |
evaluations = {
|
141 |
"safetensors": {
|
backend/app/services/models.py
CHANGED
@@ -135,89 +135,6 @@ class ModelService(HuggingFaceService):
|
|
135 |
self._init_done = True
|
136 |
logger.info(LogFormatter.success("Initialization complete"))
|
137 |
|
138 |
-
async def _download_and_process_file(
|
139 |
-
self, file: str, session: aiohttp.ClientSession, progress: ProgressTracker
|
140 |
-
) -> Optional[Dict]:
|
141 |
-
"""Download and process a file asynchronously"""
|
142 |
-
try:
|
143 |
-
# Build file URL
|
144 |
-
url = f"https://huggingface.co/datasets/{QUEUE_REPO}/resolve/main/{file}"
|
145 |
-
headers = build_hf_headers(token=self.token)
|
146 |
-
|
147 |
-
# Download file
|
148 |
-
async with session.get(url, headers=headers) as response:
|
149 |
-
if response.status != 200:
|
150 |
-
logger.error(
|
151 |
-
LogFormatter.error(
|
152 |
-
f"Failed to download {file}", f"HTTP {response.status}"
|
153 |
-
)
|
154 |
-
)
|
155 |
-
progress.update()
|
156 |
-
return None
|
157 |
-
|
158 |
-
try:
|
159 |
-
# First read content as text
|
160 |
-
text_content = await response.text()
|
161 |
-
# Then parse JSON
|
162 |
-
content = json.loads(text_content)
|
163 |
-
except json.JSONDecodeError as e:
|
164 |
-
logger.error(
|
165 |
-
LogFormatter.error(f"Failed to decode JSON from {file}", e)
|
166 |
-
)
|
167 |
-
progress.update()
|
168 |
-
return None
|
169 |
-
|
170 |
-
# Get status and determine target status
|
171 |
-
status = content.get("status", "PENDING").upper()
|
172 |
-
target_status = None
|
173 |
-
status_map = {
|
174 |
-
"PENDING": ["PENDING"],
|
175 |
-
"EVALUATING": ["RUNNING"],
|
176 |
-
"FINISHED": ["FINISHED"],
|
177 |
-
}
|
178 |
-
|
179 |
-
for target, source_statuses in status_map.items():
|
180 |
-
if status in source_statuses:
|
181 |
-
target_status = target
|
182 |
-
break
|
183 |
-
|
184 |
-
if not target_status:
|
185 |
-
progress.update()
|
186 |
-
return None
|
187 |
-
|
188 |
-
# Calculate wait time
|
189 |
-
try:
|
190 |
-
submit_time = datetime.fromisoformat(
|
191 |
-
content["submitted_time"].replace("Z", "+00:00")
|
192 |
-
)
|
193 |
-
if submit_time.tzinfo is None:
|
194 |
-
submit_time = submit_time.replace(tzinfo=timezone.utc)
|
195 |
-
current_time = datetime.now(timezone.utc)
|
196 |
-
wait_time = current_time - submit_time
|
197 |
-
|
198 |
-
model_info = {
|
199 |
-
"name": content["model"],
|
200 |
-
"submitter": content.get("sender", "Unknown"),
|
201 |
-
"revision": content["revision"],
|
202 |
-
"wait_time": f"{wait_time.total_seconds():.1f}s",
|
203 |
-
"submission_time": content["submitted_time"],
|
204 |
-
"status": target_status,
|
205 |
-
"precision": content.get("precision", "Unknown"),
|
206 |
-
}
|
207 |
-
|
208 |
-
progress.update()
|
209 |
-
return model_info
|
210 |
-
|
211 |
-
except (ValueError, TypeError) as e:
|
212 |
-
logger.error(LogFormatter.error(f"Failed to process {file}", e))
|
213 |
-
progress.update()
|
214 |
-
return None
|
215 |
-
|
216 |
-
except Exception as e:
|
217 |
-
logger.error(LogFormatter.error(f"Failed to load {file}", e))
|
218 |
-
progress.update()
|
219 |
-
return None
|
220 |
-
|
221 |
async def _refresh_models_cache(self):
|
222 |
"""Refresh the models cache"""
|
223 |
try:
|
@@ -298,12 +215,8 @@ class ModelService(HuggingFaceService):
|
|
298 |
"precision": content.get("precision", "Unknown"),
|
299 |
}
|
300 |
|
301 |
-
# Use
|
302 |
-
key =
|
303 |
-
content["model"],
|
304 |
-
content["revision"],
|
305 |
-
content.get("precision", "Unknown"),
|
306 |
-
)
|
307 |
if (
|
308 |
key not in model_submissions
|
309 |
or submit_time
|
|
|
135 |
self._init_done = True
|
136 |
logger.info(LogFormatter.success("Initialization complete"))
|
137 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
138 |
async def _refresh_models_cache(self):
|
139 |
"""Refresh the models cache"""
|
140 |
try:
|
|
|
215 |
"precision": content.get("precision", "Unknown"),
|
216 |
}
|
217 |
|
218 |
+
# Use model_id as key to track latest submission
|
219 |
+
key = content["model"]
|
|
|
|
|
|
|
|
|
220 |
if (
|
221 |
key not in model_submissions
|
222 |
or submit_time
|
frontend/src/pages/AddModelPage/components/ModelSubmissionForm/ModelSubmissionForm.js
CHANGED
@@ -300,146 +300,6 @@ function ModelSubmissionForm({ user, isAuthenticated }) {
|
|
300 |
/>
|
301 |
</Grid>
|
302 |
|
303 |
-
<Grid item xs={12} sm={4}>
|
304 |
-
<TextField
|
305 |
-
fullWidth
|
306 |
-
name="revision"
|
307 |
-
label="Revision commit"
|
308 |
-
value={formData.revision}
|
309 |
-
onChange={handleChange}
|
310 |
-
helperText="Default: main"
|
311 |
-
InputProps={{
|
312 |
-
endAdornment: (
|
313 |
-
<InfoIconWithTooltip tooltip={HELP_TEXTS.revision} />
|
314 |
-
),
|
315 |
-
}}
|
316 |
-
/>
|
317 |
-
</Grid>
|
318 |
-
|
319 |
-
{/* Model Configuration */}
|
320 |
-
{/*<Grid item xs={12}>
|
321 |
-
<Stack direction="row" spacing={1} alignItems="center">
|
322 |
-
<Typography variant="h6">Model Configuration</Typography>
|
323 |
-
</Stack>
|
324 |
-
</Grid>
|
325 |
-
|
326 |
-
<Grid item xs={12} sm={6}>
|
327 |
-
<FormControl fullWidth>
|
328 |
-
<InputLabel>Model Type</InputLabel>
|
329 |
-
<Select
|
330 |
-
name="modelType"
|
331 |
-
value={formData.modelType}
|
332 |
-
onChange={handleChange}
|
333 |
-
label="Model Type"
|
334 |
-
endAdornment={
|
335 |
-
<InfoIconWithTooltip
|
336 |
-
tooltip={HELP_TEXTS.modelType}
|
337 |
-
sx={{ mr: 2 }}
|
338 |
-
/>
|
339 |
-
}
|
340 |
-
>
|
341 |
-
{modelTypeOptions.map((type) => (
|
342 |
-
<MenuItem key={type.value} value={type.value}>
|
343 |
-
{type.label}
|
344 |
-
</MenuItem>
|
345 |
-
))}
|
346 |
-
</Select>
|
347 |
-
</FormControl>
|
348 |
-
</Grid>*/}
|
349 |
-
|
350 |
-
|
351 |
-
{/*<Grid item xs={12} sm={6}>
|
352 |
-
<Stack
|
353 |
-
direction="row"
|
354 |
-
spacing={2}
|
355 |
-
alignItems="center"
|
356 |
-
sx={{ height: "100%" }}
|
357 |
-
>
|
358 |
-
<FormControlLabel
|
359 |
-
control={
|
360 |
-
<Switch
|
361 |
-
name="useChatTemplate"
|
362 |
-
checked={formData.useChatTemplate}
|
363 |
-
onChange={handleChange}
|
364 |
-
/>
|
365 |
-
}
|
366 |
-
label="Use Chat Template"
|
367 |
-
/>
|
368 |
-
<InfoIconWithTooltip tooltip={HELP_TEXTS.chatTemplate} />
|
369 |
-
</Stack>
|
370 |
-
</Grid>*/}
|
371 |
-
|
372 |
-
{/*
|
373 |
-
<Grid item xs={12} sm={6}>
|
374 |
-
<FormControl fullWidth>
|
375 |
-
<InputLabel>Precision</InputLabel>
|
376 |
-
<Select
|
377 |
-
name="precision"
|
378 |
-
value={formData.precision}
|
379 |
-
onChange={handleChange}
|
380 |
-
label="Precision"
|
381 |
-
endAdornment={
|
382 |
-
<InfoIconWithTooltip
|
383 |
-
tooltip={HELP_TEXTS.precision}
|
384 |
-
sx={{ mr: 2 }}
|
385 |
-
/>
|
386 |
-
}
|
387 |
-
>
|
388 |
-
{SUBMISSION_PRECISIONS.map((option) => (
|
389 |
-
<MenuItem key={option.value} value={option.value}>
|
390 |
-
{option.label}
|
391 |
-
</MenuItem>
|
392 |
-
))}
|
393 |
-
</Select>
|
394 |
-
</FormControl>
|
395 |
-
</Grid>
|
396 |
-
|
397 |
-
<Grid item xs={12} sm={6}>
|
398 |
-
<FormControl fullWidth>
|
399 |
-
<InputLabel>Weights Type</InputLabel>
|
400 |
-
<Select
|
401 |
-
name="weightsType"
|
402 |
-
value={formData.weightsType}
|
403 |
-
onChange={handleChange}
|
404 |
-
label="Weights Type"
|
405 |
-
endAdornment={
|
406 |
-
<InfoIconWithTooltip
|
407 |
-
tooltip={HELP_TEXTS.weightsType}
|
408 |
-
sx={{ mr: 2 }}
|
409 |
-
/>
|
410 |
-
}
|
411 |
-
>
|
412 |
-
{WEIGHT_TYPES.map((type) => (
|
413 |
-
<MenuItem key={type.value} value={type.value}>
|
414 |
-
{type.label}
|
415 |
-
</MenuItem>
|
416 |
-
))}
|
417 |
-
</Select>
|
418 |
-
</FormControl>
|
419 |
-
</Grid>
|
420 |
-
|
421 |
-
{formData.weightsType !== "Original" && (
|
422 |
-
<Grid item xs={12}>
|
423 |
-
<TextField
|
424 |
-
fullWidth
|
425 |
-
required={
|
426 |
-
formData.weightsType === "Delta" ||
|
427 |
-
formData.weightsType === "Adapter"
|
428 |
-
}
|
429 |
-
name="baseModel"
|
430 |
-
label="Base Model"
|
431 |
-
value={formData.baseModel}
|
432 |
-
onChange={handleChange}
|
433 |
-
InputProps={{
|
434 |
-
endAdornment: (
|
435 |
-
<InfoIconWithTooltip tooltip={HELP_TEXTS.baseModel} />
|
436 |
-
),
|
437 |
-
}}
|
438 |
-
/>
|
439 |
-
</Grid>
|
440 |
-
)}
|
441 |
-
*/}
|
442 |
-
|
443 |
{/* Submit Button */}
|
444 |
<Grid item xs={12}>
|
445 |
<Box
|
|
|
300 |
/>
|
301 |
</Grid>
|
302 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
303 |
{/* Submit Button */}
|
304 |
<Grid item xs={12}>
|
305 |
<Box
|