Spaces:
Running
Running
Update app-backup.py
Browse files- app-backup.py +120 -116
app-backup.py
CHANGED
@@ -292,97 +292,94 @@ target_models = {
|
|
292 |
"sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6"
|
293 |
}
|
294 |
|
295 |
-
|
296 |
def get_models_data(progress=gr.Progress()):
|
297 |
"""๋ชจ๋ธ ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๊ธฐ"""
|
298 |
def normalize_model_id(model_id):
|
299 |
"""๋ชจ๋ธ ID๋ฅผ ์ ๊ทํ"""
|
300 |
return model_id.strip().lower()
|
301 |
|
302 |
-
url = "https://huggingface.co/api/models"
|
303 |
-
|
304 |
try:
|
305 |
-
progress(0, desc="Fetching
|
306 |
-
|
|
|
|
|
307 |
'full': 'true',
|
308 |
-
'limit':
|
309 |
'sort': 'likes',
|
310 |
'direction': -1
|
311 |
}
|
312 |
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
print(f"Response: {response.text}")
|
319 |
-
return create_error_plot(), "<div>๋ชจ๋ธ ๋ฐ์ดํฐ๋ฅผ ๊ฐ์ ธ์ค๋๋ฐ ์คํจํ์ต๋๋ค.</div>", pd.DataFrame()
|
320 |
-
|
321 |
-
models = response.json()
|
322 |
|
323 |
-
|
324 |
-
|
325 |
-
|
|
|
|
|
|
|
326 |
|
327 |
-
|
|
|
|
|
328 |
model_id = normalize_model_id(model.get('id', ''))
|
329 |
-
|
330 |
'rank': idx,
|
331 |
'downloads': model.get('downloads', 0),
|
332 |
'likes': model.get('likes', 0),
|
333 |
'title': model.get('title', 'No Title')
|
334 |
}
|
335 |
|
336 |
-
# target_models
|
337 |
filtered_models = []
|
338 |
-
for
|
339 |
-
|
340 |
-
|
341 |
-
|
342 |
-
|
343 |
-
|
344 |
-
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
else:
|
351 |
-
# ์์๊ถ ๋ฐ์ ๋ชจ๋ธ์ ๊ฐ๋ณ API ํธ์ถ๋ก ์ ๋ณด ๊ฐ์ ธ์ค๊ธฐ
|
352 |
-
try:
|
353 |
-
model_url = f"https://huggingface.co/api/models/{target_id}"
|
354 |
-
model_response = requests.get(model_url, headers=headers)
|
355 |
-
if model_response.status_code == 200:
|
356 |
-
model_info = model_response.json()
|
357 |
-
model_info['id'] = target_id
|
358 |
-
model_info['rank'] = 'Not in top 3000'
|
359 |
-
else:
|
360 |
-
model_info = {
|
361 |
-
'id': target_id,
|
362 |
-
'rank': 'Not in top 3000',
|
363 |
-
'downloads': 0,
|
364 |
-
'likes': 0,
|
365 |
-
'title': 'No Title'
|
366 |
-
}
|
367 |
-
except Exception as e:
|
368 |
-
print(f"Error fetching data for model {target_id}: {str(e)}")
|
369 |
model_info = {
|
370 |
-
'id':
|
371 |
-
'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
372 |
'downloads': 0,
|
373 |
'likes': 0,
|
374 |
'title': 'No Title'
|
375 |
-
}
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
|
380 |
-
|
381 |
-
|
382 |
-
|
|
|
|
|
|
|
|
|
|
|
383 |
|
384 |
if not filtered_models:
|
385 |
-
return create_error_plot(), "<div
|
386 |
|
387 |
progress(0.3, desc="Creating visualization...")
|
388 |
|
@@ -391,62 +388,72 @@ def get_models_data(progress=gr.Progress()):
|
|
391 |
|
392 |
# ๋ฐ์ดํฐ ์ค๋น
|
393 |
ids = [model['id'] for model in filtered_models]
|
394 |
-
ranks = [model['
|
395 |
likes = [model['likes'] for model in filtered_models]
|
396 |
downloads = [model['downloads'] for model in filtered_models]
|
397 |
|
398 |
-
#
|
399 |
-
|
400 |
-
|
401 |
-
# ๋ง๋ ๊ทธ๋ํ ์์ฑ
|
402 |
-
fig.add_trace(go.Bar(
|
403 |
-
x=ids,
|
404 |
-
y=y_values,
|
405 |
-
text=[f"Rank: {r}<br>Likes: {l:,}<br>Downloads: {d:,}"
|
406 |
-
for r, l, d in zip(ranks, likes, downloads)],
|
407 |
-
textposition='auto',
|
408 |
-
marker_color='rgb(158,202,225)',
|
409 |
-
opacity=0.8
|
410 |
-
))
|
411 |
|
412 |
-
|
413 |
-
|
414 |
-
|
415 |
-
|
416 |
-
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
432 |
|
433 |
progress(0.6, desc="Creating model cards...")
|
434 |
|
435 |
# HTML ์นด๋ ์์ฑ
|
436 |
html_content = """
|
437 |
<div style='padding: 20px; background: #f5f5f5;'>
|
438 |
-
<h2 style='color: #2c3e50;'>Models
|
439 |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
440 |
"""
|
441 |
|
442 |
-
# ์์๊ถ ๋ด ๋ชจ๋ธ ์นด๋ ์์ฑ
|
443 |
for model in filtered_models:
|
444 |
model_id = model['id']
|
445 |
-
|
446 |
-
likes = model
|
447 |
-
downloads = model
|
448 |
title = model.get('title', 'No Title')
|
449 |
|
|
|
|
|
450 |
html_content += f"""
|
451 |
<div style='
|
452 |
background: white;
|
@@ -455,10 +462,10 @@ def get_models_data(progress=gr.Progress()):
|
|
455 |
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
456 |
transition: transform 0.2s;
|
457 |
'>
|
458 |
-
<h3 style='color: #34495e;'>
|
459 |
<p style='color: #2c3e50;'>{title}</p>
|
460 |
-
<p style='color: #7f8c8d;'>๐ Likes: {likes
|
461 |
-
<p style='color: #7f8c8d;'>โฌ๏ธ Downloads: {downloads
|
462 |
<a href='{target_models[model_id]}'
|
463 |
target='_blank'
|
464 |
style='
|
@@ -478,17 +485,14 @@ def get_models_data(progress=gr.Progress()):
|
|
478 |
html_content += "</div></div>"
|
479 |
|
480 |
# ๋ฐ์ดํฐํ๋ ์ ์์ฑ
|
481 |
-
df_data = [
|
482 |
-
|
483 |
-
|
484 |
-
|
485 |
-
|
486 |
-
|
487 |
-
|
488 |
-
|
489 |
-
'Downloads': f"{model.get('downloads', 0):,}",
|
490 |
-
'URL': target_models[model['id']]
|
491 |
-
})
|
492 |
|
493 |
df = pd.DataFrame(df_data)
|
494 |
|
|
|
292 |
"sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6"
|
293 |
}
|
294 |
|
|
|
295 |
def get_models_data(progress=gr.Progress()):
|
296 |
"""๋ชจ๋ธ ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๊ธฐ"""
|
297 |
def normalize_model_id(model_id):
|
298 |
"""๋ชจ๋ธ ID๋ฅผ ์ ๊ทํ"""
|
299 |
return model_id.strip().lower()
|
300 |
|
|
|
|
|
301 |
try:
|
302 |
+
progress(0, desc="Fetching global rankings...")
|
303 |
+
|
304 |
+
# ์ ์ฒด ๋ชจ๋ธ ๋ชฉ๋ก ๊ฐ์ ธ์ค๊ธฐ (์ข์์ ์์ผ๋ก ์ ๋ ฌ)
|
305 |
+
global_params = {
|
306 |
'full': 'true',
|
307 |
+
'limit': 10000, # 10000์๊น์ง ํ์ฅ
|
308 |
'sort': 'likes',
|
309 |
'direction': -1
|
310 |
}
|
311 |
|
312 |
+
global_response = requests.get(
|
313 |
+
"https://huggingface.co/api/models",
|
314 |
+
headers={'Accept': 'application/json'},
|
315 |
+
params=global_params
|
316 |
+
)
|
|
|
|
|
|
|
|
|
317 |
|
318 |
+
if global_response.status_code != 200:
|
319 |
+
print(f"Failed to fetch global rankings: {global_response.status_code}")
|
320 |
+
return create_error_plot(), "<div>์ ์ฒด ์์ ๋ฐ์ดํฐ๋ฅผ ๊ฐ์ ธ์ค๋๋ฐ ์คํจํ์ต๋๋ค.</div>", pd.DataFrame()
|
321 |
+
|
322 |
+
all_global_models = global_response.json()
|
323 |
+
print(f"Fetched {len(all_global_models)} models")
|
324 |
|
325 |
+
# ์ ์ฒด ์์ ๋งต ์์ฑ
|
326 |
+
global_ranks = {}
|
327 |
+
for idx, model in enumerate(all_global_models, 1):
|
328 |
model_id = normalize_model_id(model.get('id', ''))
|
329 |
+
global_ranks[model_id] = {
|
330 |
'rank': idx,
|
331 |
'downloads': model.get('downloads', 0),
|
332 |
'likes': model.get('likes', 0),
|
333 |
'title': model.get('title', 'No Title')
|
334 |
}
|
335 |
|
336 |
+
# target_models์ ์์ธ ์ ๋ณด ๊ฐ์ ธ์ค๊ธฐ
|
337 |
filtered_models = []
|
338 |
+
for model_id in target_models.keys():
|
339 |
+
try:
|
340 |
+
# ๊ฐ๋ณ ๋ชจ๋ธ API ํธ์ถ๋ก ์ ํํ ์ ๋ณด ๊ฐ์ ธ์ค๊ธฐ
|
341 |
+
model_url_api = f"https://huggingface.co/api/models/{model_id}"
|
342 |
+
response = requests.get(model_url_api, headers={'Accept': 'application/json'})
|
343 |
+
|
344 |
+
if response.status_code == 200:
|
345 |
+
model_data = response.json()
|
346 |
+
normalized_id = normalize_model_id(model_id)
|
347 |
+
|
348 |
+
# ์ ์ฒด ์์ ์ ๋ณด์ ๊ฐ๋ณ ๋ชจ๋ธ ์ ๋ณด ๊ฒฐํฉ
|
349 |
+
rank_info = global_ranks.get(normalized_id, {})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
350 |
model_info = {
|
351 |
+
'id': model_id,
|
352 |
+
'global_rank': rank_info.get('rank', 'Not in top 10000'),
|
353 |
+
'downloads': model_data.get('downloads', 0),
|
354 |
+
'likes': model_data.get('likes', 0),
|
355 |
+
'title': model_data.get('title', 'No Title')
|
356 |
+
}
|
357 |
+
filtered_models.append(model_info)
|
358 |
+
print(f"Model {model_id}: Rank={model_info['global_rank']}, Likes={model_info['likes']}, Downloads={model_info['downloads']}")
|
359 |
+
else:
|
360 |
+
print(f"Failed to fetch data for {model_id}: {response.status_code}")
|
361 |
+
filtered_models.append({
|
362 |
+
'id': model_id,
|
363 |
+
'global_rank': 'Not in top 10000',
|
364 |
'downloads': 0,
|
365 |
'likes': 0,
|
366 |
'title': 'No Title'
|
367 |
+
})
|
368 |
+
except Exception as e:
|
369 |
+
print(f"Error fetching data for {model_id}: {str(e)}")
|
370 |
+
filtered_models.append({
|
371 |
+
'id': model_id,
|
372 |
+
'global_rank': 'Not in top 10000',
|
373 |
+
'downloads': 0,
|
374 |
+
'likes': 0,
|
375 |
+
'title': 'No Title'
|
376 |
+
})
|
377 |
+
|
378 |
+
# ์์๋ก ์ ๋ ฌ
|
379 |
+
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
380 |
|
381 |
if not filtered_models:
|
382 |
+
return create_error_plot(), "<div>๋ชจ๋ธ ๋ฐ์ดํฐ๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.</div>", pd.DataFrame()
|
383 |
|
384 |
progress(0.3, desc="Creating visualization...")
|
385 |
|
|
|
388 |
|
389 |
# ๋ฐ์ดํฐ ์ค๋น
|
390 |
ids = [model['id'] for model in filtered_models]
|
391 |
+
ranks = [model['global_rank'] for model in filtered_models]
|
392 |
likes = [model['likes'] for model in filtered_models]
|
393 |
downloads = [model['downloads'] for model in filtered_models]
|
394 |
|
395 |
+
# ์์๊ถ ๋ด ๋ชจ๋ธ๋ง ํํฐ๋ง
|
396 |
+
valid_indices = [i for i, rank in enumerate(ranks) if isinstance(rank, (int, float))]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
397 |
|
398 |
+
if valid_indices: # ์์๊ถ ๋ด ๋ชจ๋ธ์ด ์๋ ๊ฒฝ์ฐ๋ง ๊ทธ๋ํ ์์ฑ
|
399 |
+
valid_ids = [ids[i] for i in valid_indices]
|
400 |
+
valid_ranks = [ranks[i] for i in valid_indices]
|
401 |
+
valid_likes = [likes[i] for i in valid_indices]
|
402 |
+
valid_downloads = [downloads[i] for i in valid_indices]
|
403 |
+
|
404 |
+
# Y์ถ ๊ฐ์ ๋ฐ์
|
405 |
+
y_values = [10001 - r if isinstance(r, (int, float)) else 0 for r in valid_ranks]
|
406 |
+
|
407 |
+
# ๋ง๋ ๊ทธ๋ํ ์์ฑ
|
408 |
+
fig.add_trace(go.Bar(
|
409 |
+
x=valid_ids,
|
410 |
+
y=y_values,
|
411 |
+
text=[f"Global Rank: {r}<br>Likes: {format(l, ',')}<br>Downloads: {format(d, ',')}"
|
412 |
+
for r, l, d in zip(valid_ranks, valid_likes, valid_downloads)],
|
413 |
+
textposition='auto',
|
414 |
+
marker_color='rgb(158,202,225)',
|
415 |
+
opacity=0.8
|
416 |
+
))
|
417 |
+
|
418 |
+
fig.update_layout(
|
419 |
+
title={
|
420 |
+
'text': 'Hugging Face Models Global Rankings (by Likes)',
|
421 |
+
'y':0.95,
|
422 |
+
'x':0.5,
|
423 |
+
'xanchor': 'center',
|
424 |
+
'yanchor': 'top'
|
425 |
+
},
|
426 |
+
xaxis_title='Model ID',
|
427 |
+
yaxis_title='Global Rank',
|
428 |
+
yaxis=dict(
|
429 |
+
ticktext=[str(i) for i in range(1, 10001, 500)],
|
430 |
+
tickvals=[10001 - i for i in range(1, 10001, 500)],
|
431 |
+
range=[0, 10000]
|
432 |
+
),
|
433 |
+
height=800,
|
434 |
+
showlegend=False,
|
435 |
+
template='plotly_white',
|
436 |
+
xaxis_tickangle=-45
|
437 |
+
)
|
438 |
|
439 |
progress(0.6, desc="Creating model cards...")
|
440 |
|
441 |
# HTML ์นด๋ ์์ฑ
|
442 |
html_content = """
|
443 |
<div style='padding: 20px; background: #f5f5f5;'>
|
444 |
+
<h2 style='color: #2c3e50;'>Models Global Rankings (by Likes)</h2>
|
445 |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
446 |
"""
|
447 |
|
|
|
448 |
for model in filtered_models:
|
449 |
model_id = model['id']
|
450 |
+
global_rank = model['global_rank']
|
451 |
+
likes = model['likes']
|
452 |
+
downloads = model['downloads']
|
453 |
title = model.get('title', 'No Title')
|
454 |
|
455 |
+
rank_display = f"Global Rank #{global_rank}" if isinstance(global_rank, (int, float)) else global_rank
|
456 |
+
|
457 |
html_content += f"""
|
458 |
<div style='
|
459 |
background: white;
|
|
|
462 |
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
463 |
transition: transform 0.2s;
|
464 |
'>
|
465 |
+
<h3 style='color: #34495e;'>{rank_display} - {model_id}</h3>
|
466 |
<p style='color: #2c3e50;'>{title}</p>
|
467 |
+
<p style='color: #7f8c8d;'>๐ Likes: {format(likes, ',')}</p>
|
468 |
+
<p style='color: #7f8c8d;'>โฌ๏ธ Downloads: {format(downloads, ',')}</p>
|
469 |
<a href='{target_models[model_id]}'
|
470 |
target='_blank'
|
471 |
style='
|
|
|
485 |
html_content += "</div></div>"
|
486 |
|
487 |
# ๋ฐ์ดํฐํ๋ ์ ์์ฑ
|
488 |
+
df_data = [{
|
489 |
+
'Global Rank': model['global_rank'],
|
490 |
+
'Model ID': model['id'],
|
491 |
+
'Title': model.get('title', 'No Title'),
|
492 |
+
'Likes': format(model['likes'], ','),
|
493 |
+
'Downloads': format(model['downloads'], ','),
|
494 |
+
'URL': target_models[model['id']]
|
495 |
+
} for model in filtered_models]
|
|
|
|
|
|
|
496 |
|
497 |
df = pd.DataFrame(df_data)
|
498 |
|