openfree commited on
Commit
99a3580
·
verified ·
1 Parent(s): 94f7004

Delete app-backup-datasets.py

Browse files
Files changed (1) hide show
  1. app-backup-datasets.py +0 -1318
app-backup-datasets.py DELETED
@@ -1,1318 +0,0 @@
1
- import gradio as gr
2
- import requests
3
- import pandas as pd
4
- import plotly.graph_objects as go
5
- from datetime import datetime
6
- import os
7
-
8
- HF_TOKEN = os.getenv("HF_TOKEN")
9
-
10
- target_models = {
11
- "openfree/flux-lora-korea-palace": "https://huggingface.co/openfree/flux-lora-korea-palace",
12
- "seawolf2357/hanbok": "https://huggingface.co/seawolf2357/hanbok",
13
- "seawolf2357/ntower": "https://huggingface.co/seawolf2357/ntower",
14
-
15
- "openfree/claude-monet": "https://huggingface.co/openfree/claude-monet",
16
-
17
- "LGAI-EXAONE/EXAONE-3.5-32B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-32B-Instruct",
18
- "LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct",
19
- "LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct",
20
- "ginipick/flux-lora-eric-cat": "https://huggingface.co/ginipick/flux-lora-eric-cat",
21
- "seawolf2357/flux-lora-car-rolls-royce": "https://huggingface.co/seawolf2357/flux-lora-car-rolls-royce",
22
- "moreh/Llama-3-Motif-102B-Instruct": "https://huggingface.co/moreh/Llama-3-Motif-102B-Instruct",
23
- "OnomaAIResearch/Illustrious-xl-early-release-v0": "https://huggingface.co/OnomaAIResearch/Illustrious-xl-early-release-v0",
24
- "upstage/solar-pro-preview-instruct": "https://huggingface.co/upstage/solar-pro-preview-instruct",
25
- "NCSOFT/VARCO-VISION-14B": "https://huggingface.co/NCSOFT/VARCO-VISION-14B",
26
- "NCSOFT/Llama-VARCO-8B-Instruct": "https://huggingface.co/NCSOFT/Llama-VARCO-8B-Instruct",
27
- "NCSOFT/VARCO-VISION-14B-HF": "https://huggingface.co/NCSOFT/VARCO-VISION-14B-HF",
28
- "KAERI-MLP/llama-3.1-Korean-AtomicGPT-Bllossom-8B": "https://huggingface.co/KAERI-MLP/llama-3.1-Korean-AtomicGPT-Bllossom-8B",
29
- "dnotitia/Llama-DNA-1.0-8B-Instruct": "https://huggingface.co/dnotitia/Llama-DNA-1.0-8B-Instruct",
30
- "Bllossom/llama-3.2-Korean-Bllossom-3B": "https://huggingface.co/Bllossom/llama-3.2-Korean-Bllossom-3B",
31
-
32
- "unidocs/llama-3.1-8b-komedic-instruct": "https://huggingface.co/unidocs/llama-3.1-8b-komedic-instruct",
33
- "unidocs/llama-3.2-3b-komedic-instruct": "https://huggingface.co/unidocs/llama-3.2-3b-komedic-instruct",
34
- "etri-lirs/eagle-3b-preview": "https://huggingface.co/etri-lirs/eagle-3b-preview",
35
- "kakaobrain/kogpt": "https://huggingface.co/kakaobrain/kogpt",
36
-
37
- "Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B",
38
- "AALF/gemma-2-27b-it-SimPO-37K": "https://huggingface.co/AALF/gemma-2-27b-it-SimPO-37K",
39
- "nbeerbower/mistral-nemo-wissenschaft-12B": "https://huggingface.co/nbeerbower/mistral-nemo-wissenschaft-12B",
40
- "Saxo/Linkbricks-Horizon-AI-Korean-Mistral-Nemo-sft-dpo-12B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Mistral-Nemo-sft-dpo-12B",
41
- "princeton-nlp/gemma-2-9b-it-SimPO": "https://huggingface.co/princeton-nlp/gemma-2-9b-it-SimPO",
42
- "migtissera/Tess-v2.5-Gemma-2-27B-alpha": "https://huggingface.co/migtissera/Tess-v2.5-Gemma-2-27B-alpha",
43
- "DeepMount00/Llama-3.1-8b-Ita": "https://huggingface.co/DeepMount00/Llama-3.1-8b-Ita",
44
- "cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b": "https://huggingface.co/cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b",
45
- "ai-human-lab/EEVE-Korean_Instruct-10.8B-expo": "https://huggingface.co/ai-human-lab/EEVE-Korean_Instruct-10.8B-expo",
46
- "VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct": "https://huggingface.co/VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct",
47
- "Saxo/Linkbricks-Horizon-AI-Korean-llama-3.1-sft-dpo-8B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-llama-3.1-sft-dpo-8B",
48
- "AIDX-ktds/ktdsbaseLM-v0.12-based-on-openchat3.5": "https://huggingface.co/AIDX-ktds/ktdsbaseLM-v0.12-based-on-openchat3.5",
49
- "mlabonne/Daredevil-8B-abliterated": "https://huggingface.co/mlabonne/Daredevil-8B-abliterated",
50
- "ENERGY-DRINK-LOVE/eeve_dpo-v3": "https://huggingface.co/ENERGY-DRINK-LOVE/eeve_dpo-v3",
51
- "migtissera/Trinity-2-Codestral-22B": "https://huggingface.co/migtissera/Trinity-2-Codestral-22B",
52
- "Saxo/Linkbricks-Horizon-AI-Korean-llama3.1-sft-rlhf-dpo-8B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-llama3.1-sft-rlhf-dpo-8B",
53
- "mlabonne/Daredevil-8B-abliterated-dpomix": "https://huggingface.co/mlabonne/Daredevil-8B-abliterated-dpomix",
54
- "yanolja/EEVE-Korean-Instruct-10.8B-v1.0": "https://huggingface.co/yanolja/EEVE-Korean-Instruct-10.8B-v1.0",
55
- "vicgalle/Configurable-Llama-3.1-8B-Instruct": "https://huggingface.co/vicgalle/Configurable-Llama-3.1-8B-Instruct",
56
- "T3Q-LLM/T3Q-LLM1-sft1.0-dpo1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-sft1.0-dpo1.0",
57
- "Eurdem/Defne-llama3.1-8B": "https://huggingface.co/Eurdem/Defne-llama3.1-8B",
58
- "BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B": "https://huggingface.co/BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B",
59
- "BAAI/Infinity-Instruct-3M-0625-Llama3-8B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Llama3-8B",
60
- "T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0",
61
- "BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B": "https://huggingface.co/BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B",
62
- "mightbe/EEVE-10.8B-Multiturn": "https://huggingface.co/mightbe/EEVE-10.8B-Multiturn",
63
- "hyemijo/omed-llama3.1-8b": "https://huggingface.co/hyemijo/omed-llama3.1-8b",
64
- "yanolja/Bookworm-10.7B-v0.4-DPO": "https://huggingface.co/yanolja/Bookworm-10.7B-v0.4-DPO",
65
- "algograp-Inc/algograpV4": "https://huggingface.co/algograp-Inc/algograpV4",
66
- "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75",
67
- "chihoonlee10/T3Q-LLM-MG-DPO-v1.0": "https://huggingface.co/chihoonlee10/T3Q-LLM-MG-DPO-v1.0",
68
- "vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B": "https://huggingface.co/vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B",
69
- "RLHFlow/LLaMA3-iterative-DPO-final": "https://huggingface.co/RLHFlow/LLaMA3-iterative-DPO-final",
70
- "SEOKDONG/llama3.1_korean_v0.1_sft_by_aidx": "https://huggingface.co/SEOKDONG/llama3.1_korean_v0.1_sft_by_aidx",
71
- "spow12/Ko-Qwen2-7B-Instruct": "https://huggingface.co/spow12/Ko-Qwen2-7B-Instruct",
72
- "BAAI/Infinity-Instruct-3M-0625-Qwen2-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Qwen2-7B",
73
- "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half",
74
- "T3Q-LLM/T3Q-LLM1-CV-v2.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-CV-v2.0",
75
- "migtissera/Trinity-2-Codestral-22B-v0.2": "https://huggingface.co/migtissera/Trinity-2-Codestral-22B-v0.2",
76
- "sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval": "https://huggingface.co/sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval",
77
- "MaziyarPanahi/Llama-3-8B-Instruct-v0.10": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.10",
78
- "MaziyarPanahi/Llama-3-8B-Instruct-v0.9": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.9",
79
- "zhengr/MixTAO-7Bx2-MoE-v8.1": "https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-v8.1",
80
- "TIGER-Lab/MAmmoTH2-8B-Plus": "https://huggingface.co/TIGER-Lab/MAmmoTH2-8B-Plus",
81
- "OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k": "https://huggingface.co/OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k",
82
- "haoranxu/Llama-3-Instruct-8B-CPO-SimPO": "https://huggingface.co/haoranxu/Llama-3-Instruct-8B-CPO-SimPO",
83
- "Weyaxi/Einstein-v7-Qwen2-7B": "https://huggingface.co/Weyaxi/Einstein-v7-Qwen2-7B",
84
- "DKYoon/kosolar-hermes-test": "https://huggingface.co/DKYoon/kosolar-hermes-test",
85
- "vilm/Quyen-Pro-v0.1": "https://huggingface.co/vilm/Quyen-Pro-v0.1",
86
- "chihoonlee10/T3Q-LLM-MG-v1.0": "https://huggingface.co/chihoonlee10/T3Q-LLM-MG-v1.0",
87
- "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25",
88
- "ai-human-lab/EEVE-Korean-10.8B-RAFT": "https://huggingface.co/ai-human-lab/EEVE-Korean-10.8B-RAFT",
89
- "princeton-nlp/Llama-3-Base-8B-SFT-RDPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-RDPO",
90
- "MaziyarPanahi/Llama-3-8B-Instruct-v0.8": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.8",
91
- "chihoonlee10/T3Q-ko-solar-dpo-v7.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v7.0",
92
- "jondurbin/bagel-8b-v1.0": "https://huggingface.co/jondurbin/bagel-8b-v1.0",
93
- "DeepMount00/Llama-3-8b-Ita": "https://huggingface.co/DeepMount00/Llama-3-8b-Ita",
94
- "VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct": "https://huggingface.co/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct",
95
- "princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2",
96
- "AIDX-ktds/ktdsbaseLM-v0.11-based-on-openchat3.5": "https://huggingface.co/AIDX-ktds/ktdsbaseLM-v0.11-based-on-openchat3.5",
97
- "princeton-nlp/Llama-3-Base-8B-SFT-KTO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-KTO",
98
- "maywell/Mini_Synatra_SFT": "https://huggingface.co/maywell/Mini_Synatra_SFT",
99
- "princeton-nlp/Llama-3-Base-8B-SFT-ORPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-ORPO",
100
- "princeton-nlp/Llama-3-Instruct-8B-CPO-v0.2": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-CPO-v0.2",
101
- "spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat": "https://huggingface.co/spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat",
102
- "princeton-nlp/Llama-3-Base-8B-SFT-DPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-DPO",
103
- "princeton-nlp/Llama-3-Instruct-8B-ORPO": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-ORPO",
104
- "lcw99/llama-3-10b-it-kor-extented-chang": "https://huggingface.co/lcw99/llama-3-10b-it-kor-extented-chang",
105
- "migtissera/Llama-3-8B-Synthia-v3.5": "https://huggingface.co/migtissera/Llama-3-8B-Synthia-v3.5",
106
- "megastudyedu/M-SOLAR-10.7B-v1.4-dpo": "https://huggingface.co/megastudyedu/M-SOLAR-10.7B-v1.4-dpo",
107
- "T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0",
108
- "maywell/Synatra-10.7B-v0.4": "https://huggingface.co/maywell/Synatra-10.7B-v0.4",
109
- "nlpai-lab/KULLM3": "https://huggingface.co/nlpai-lab/KULLM3",
110
- "abacusai/Llama-3-Smaug-8B": "https://huggingface.co/abacusai/Llama-3-Smaug-8B",
111
- "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1",
112
- "BAAI/Infinity-Instruct-3M-0625-Mistral-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Mistral-7B",
113
- "openchat/openchat_3.5": "https://huggingface.co/openchat/openchat_3.5",
114
- "T3Q-LLM/T3Q-LLM1-v2.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-v2.0",
115
- "T3Q-LLM/T3Q-LLM1-CV-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-CV-v1.0",
116
- "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1": "https://huggingface.co/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1",
117
- "macadeliccc/Samantha-Qwen-2-7B": "https://huggingface.co/macadeliccc/Samantha-Qwen-2-7B",
118
- "openchat/openchat-3.5-0106": "https://huggingface.co/openchat/openchat-3.5-0106",
119
- "NousResearch/Nous-Hermes-2-SOLAR-10.7B": "https://huggingface.co/NousResearch/Nous-Hermes-2-SOLAR-10.7B",
120
- "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter1": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter1",
121
- "MTSAIR/multi_verse_model": "https://huggingface.co/MTSAIR/multi_verse_model",
122
- "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.0": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.0",
123
- "VIRNECT/llama-3-Korean-8B": "https://huggingface.co/VIRNECT/llama-3-Korean-8B",
124
- "ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3": "https://huggingface.co/ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3",
125
- "SeaLLMs/SeaLLMs-v3-7B-Chat": "https://huggingface.co/SeaLLMs/SeaLLMs-v3-7B-Chat",
126
- "VIRNECT/llama-3-Korean-8B-V2": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-V2",
127
- "MLP-KTLim/llama-3-Korean-Bllossom-8B": "https://huggingface.co/MLP-KTLim/llama-3-Korean-Bllossom-8B",
128
- "Magpie-Align/Llama-3-8B-Magpie-Align-v0.3": "https://huggingface.co/Magpie-Align/Llama-3-8B-Magpie-Align-v0.3",
129
- "cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2": "https://huggingface.co/cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2",
130
- "SkyOrbis/SKY-Ko-Llama3-8B-lora": "https://huggingface.co/SkyOrbis/SKY-Ko-Llama3-8B-lora",
131
- "4yo1/llama3-eng-ko-8b-sl5": "https://huggingface.co/4yo1/llama3-eng-ko-8b-sl5",
132
- "kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39": "https://huggingface.co/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39",
133
- "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2": "https://huggingface.co/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2",
134
- "lcw99/llama-3-10b-it-kor-extented-chang-pro8": "https://huggingface.co/lcw99/llama-3-10b-it-kor-extented-chang-pro8",
135
- "BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B",
136
- "migtissera/Tess-2.0-Llama-3-8B": "https://huggingface.co/migtissera/Tess-2.0-Llama-3-8B",
137
- "BAAI/Infinity-Instruct-3M-0613-Mistral-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0613-Mistral-7B",
138
- "yeonwoo780/cydinfo-llama3-8b-lora-v01": "https://huggingface.co/yeonwoo780/cydinfo-llama3-8b-lora-v01",
139
- "vicgalle/ConfigurableSOLAR-10.7B": "https://huggingface.co/vicgalle/ConfigurableSOLAR-10.7B",
140
- "chihoonlee10/T3Q-ko-solar-jo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-jo-v1.0",
141
- "Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4": "https://huggingface.co/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4",
142
- "Edentns/DataVortexS-10.7B-dpo-v1.0": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.0",
143
- "SJ-Donald/SJ-SOLAR-10.7b-DPO": "https://huggingface.co/SJ-Donald/SJ-SOLAR-10.7b-DPO",
144
- "lemon-mint/gemma-ko-7b-it-v0.40": "https://huggingface.co/lemon-mint/gemma-ko-7b-it-v0.40",
145
- "GyuHyeonWkdWkdMan/naps-llama-3.1-8b-instruct-v0.3": "https://huggingface.co/GyuHyeonWkdWkdMan/naps-llama-3.1-8b-instruct-v0.3",
146
- "hyeogi/SOLAR-10.7B-v1.5": "https://huggingface.co/hyeogi/SOLAR-10.7B-v1.5",
147
- "etri-xainlp/llama3-8b-dpo_v1": "https://huggingface.co/etri-xainlp/llama3-8b-dpo_v1",
148
- "LDCC/LDCC-SOLAR-10.7B": "https://huggingface.co/LDCC/LDCC-SOLAR-10.7B",
149
- "chlee10/T3Q-Llama3-8B-Inst-sft1.0": "https://huggingface.co/chlee10/T3Q-Llama3-8B-Inst-sft1.0",
150
- "lemon-mint/gemma-ko-7b-it-v0.41": "https://huggingface.co/lemon-mint/gemma-ko-7b-it-v0.41",
151
- "chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0": "https://huggingface.co/chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0",
152
- "maywell/Synatra-7B-Instruct-v0.3-pre": "https://huggingface.co/maywell/Synatra-7B-Instruct-v0.3-pre",
153
- "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2",
154
- "hwkwon/S-SOLAR-10.7B-v1.4": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.4",
155
- "12thD/ko-Llama-3-8B-sft-v0.3": "https://huggingface.co/12thD/ko-Llama-3-8B-sft-v0.3",
156
- "hkss/hk-SOLAR-10.7B-v1.4": "https://huggingface.co/hkss/hk-SOLAR-10.7B-v1.4",
157
- "lookuss/test-llilu": "https://huggingface.co/lookuss/test-llilu",
158
- "chihoonlee10/T3Q-ko-solar-dpo-v3.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v3.0",
159
- "chihoonlee10/T3Q-ko-solar-dpo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v1.0",
160
- "lcw99/llama-3-10b-wiki-240709-f": "https://huggingface.co/lcw99/llama-3-10b-wiki-240709-f",
161
- "Edentns/DataVortexS-10.7B-v0.4": "https://huggingface.co/Edentns/DataVortexS-10.7B-v0.4",
162
- "princeton-nlp/Llama-3-Instruct-8B-KTO": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-KTO",
163
- "spow12/kosolar_4.1_sft": "https://huggingface.co/spow12/kosolar_4.1_sft",
164
- "natong19/Qwen2-7B-Instruct-abliterated": "https://huggingface.co/natong19/Qwen2-7B-Instruct-abliterated",
165
- "megastudyedu/ME-dpo-7B-v1.1": "https://huggingface.co/megastudyedu/ME-dpo-7B-v1.1",
166
- "01-ai/Yi-1.5-9B-Chat-16K": "https://huggingface.co/01-ai/Yi-1.5-9B-Chat-16K",
167
- "Edentns/DataVortexS-10.7B-dpo-v0.1": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v0.1",
168
- "Alphacode-AI/AlphaMist7B-slr-v4-slow": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v4-slow",
169
- "chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0",
170
- "hwkwon/S-SOLAR-10.7B-v1.1": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.1",
171
- "DopeorNope/Dear_My_best_Friends-13B": "https://huggingface.co/DopeorNope/Dear_My_best_Friends-13B",
172
- "GyuHyeonWkdWkdMan/NAPS-llama-3.1-8b-instruct-v0.3.2": "https://huggingface.co/GyuHyeonWkdWkdMan/NAPS-llama-3.1-8b-instruct-v0.3.2",
173
- "PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct": "https://huggingface.co/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct",
174
- "vicgalle/ConfigurableHermes-7B": "https://huggingface.co/vicgalle/ConfigurableHermes-7B",
175
- "maywell/PiVoT-10.7B-Mistral-v0.2": "https://huggingface.co/maywell/PiVoT-10.7B-Mistral-v0.2",
176
- "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3": "https://huggingface.co/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3",
177
- "lemon-mint/gemma-ko-7b-instruct-v0.50": "https://huggingface.co/lemon-mint/gemma-ko-7b-instruct-v0.50",
178
- "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT",
179
- "maywell/PiVoT-0.1-early": "https://huggingface.co/maywell/PiVoT-0.1-early",
180
- "hwkwon/S-SOLAR-10.7B-v1.3": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.3",
181
- "werty1248/Llama-3-Ko-8B-Instruct-AOG": "https://huggingface.co/werty1248/Llama-3-Ko-8B-Instruct-AOG",
182
- "Alphacode-AI/AlphaMist7B-slr-v2": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v2",
183
- "maywell/koOpenChat-sft": "https://huggingface.co/maywell/koOpenChat-sft",
184
- "lemon-mint/gemma-7b-openhermes-v0.80": "https://huggingface.co/lemon-mint/gemma-7b-openhermes-v0.80",
185
- "VIRNECT/llama-3-Korean-8B-r-v1": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-r-v1",
186
- "Alphacode-AI/AlphaMist7B-slr-v1": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v1",
187
- "Loyola/Mistral-7b-ITmodel": "https://huggingface.co/Loyola/Mistral-7b-ITmodel",
188
- "VIRNECT/llama-3-Korean-8B-r-v2": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-r-v2",
189
- "NLPark/AnFeng_v3.1-Avocet": "https://huggingface.co/NLPark/AnFeng_v3.1-Avocet",
190
- "maywell/Synatra_TbST11B_EP01": "https://huggingface.co/maywell/Synatra_TbST11B_EP01",
191
- "GritLM/GritLM-7B-KTO": "https://huggingface.co/GritLM/GritLM-7B-KTO",
192
- "01-ai/Yi-34B-Chat": "https://huggingface.co/01-ai/Yi-34B-Chat",
193
- "ValiantLabs/Llama3.1-8B-ShiningValiant2": "https://huggingface.co/ValiantLabs/Llama3.1-8B-ShiningValiant2",
194
- "princeton-nlp/Llama-3-Base-8B-SFT-CPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-CPO",
195
- "hyokwan/hkcode_llama3_8b": "https://huggingface.co/hyokwan/hkcode_llama3_8b",
196
- "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3",
197
- "yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0": "https://huggingface.co/yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0",
198
- "juungwon/Llama-3-cs-LoRA": "https://huggingface.co/juungwon/Llama-3-cs-LoRA",
199
- "gangyeolkim/llama-3-chat": "https://huggingface.co/gangyeolkim/llama-3-chat",
200
- "mncai/llama2-13b-dpo-v3": "https://huggingface.co/mncai/llama2-13b-dpo-v3",
201
- "maywell/Synatra-Zephyr-7B-v0.01": "https://huggingface.co/maywell/Synatra-Zephyr-7B-v0.01",
202
- "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT",
203
- "juungwon/Llama-3-constructionsafety-LoRA": "https://huggingface.co/juungwon/Llama-3-constructionsafety-LoRA",
204
- "princeton-nlp/Mistral-7B-Base-SFT-SimPO": "https://huggingface.co/princeton-nlp/Mistral-7B-Base-SFT-SimPO",
205
- "moondriller/solar10B-eugeneparkthebestv2": "https://huggingface.co/moondriller/solar10B-eugeneparkthebestv2",
206
- "chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0": "https://huggingface.co/chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0",
207
- "Edentns/DataVortexS-10.7B-dpo-v1.7": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.7",
208
- "gamzadole/llama3_instruct_tuning_without_pretraing": "https://huggingface.co/gamzadole/llama3_instruct_tuning_without_pretraing",
209
- "saltlux/Ko-Llama3-Luxia-8B": "https://huggingface.co/saltlux/Ko-Llama3-Luxia-8B",
210
- "kimdeokgi/ko-pt-model-test1": "https://huggingface.co/kimdeokgi/ko-pt-model-test1",
211
- "maywell/Synatra-11B-Testbench-2": "https://huggingface.co/maywell/Synatra-11B-Testbench-2",
212
- "Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO": "https://huggingface.co/Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO",
213
- "vicgalle/Configurable-Mistral-7B": "https://huggingface.co/vicgalle/Configurable-Mistral-7B",
214
- "ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT",
215
- "beomi/Llama-3-Open-Ko-8B-Instruct-preview": "https://huggingface.co/beomi/Llama-3-Open-Ko-8B-Instruct-preview",
216
- "Edentns/DataVortexS-10.7B-dpo-v1.3": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.3",
217
- "spow12/Llama3_ko_4.2_sft": "https://huggingface.co/spow12/Llama3_ko_4.2_sft",
218
- "maywell/Llama-3-Ko-8B-Instruct": "https://huggingface.co/maywell/Llama-3-Ko-8B-Instruct",
219
- "T3Q-LLM/T3Q-LLM3-NC-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM3-NC-v1.0",
220
- "ehartford/dolphin-2.2.1-mistral-7b": "https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b",
221
- "hwkwon/S-SOLAR-10.7B-SFT-v1.3": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-SFT-v1.3",
222
- "sel303/llama3-instruct-diverce-v2.0": "https://huggingface.co/sel303/llama3-instruct-diverce-v2.0",
223
- "4yo1/llama3-eng-ko-8b-sl3": "https://huggingface.co/4yo1/llama3-eng-ko-8b-sl3",
224
- "hkss/hk-SOLAR-10.7B-v1.1": "https://huggingface.co/hkss/hk-SOLAR-10.7B-v1.1",
225
- "Open-Orca/Mistral-7B-OpenOrca": "https://huggingface.co/Open-Orca/Mistral-7B-OpenOrca",
226
- "hyokwan/familidata": "https://huggingface.co/hyokwan/familidata",
227
- "uukuguy/zephyr-7b-alpha-dare-0.85": "https://huggingface.co/uukuguy/zephyr-7b-alpha-dare-0.85",
228
- "gwonny/nox-solar-10.7b-v4-kolon-all-5": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-all-5",
229
- "shleeeee/mistral-ko-tech-science-v1": "https://huggingface.co/shleeeee/mistral-ko-tech-science-v1",
230
- "Deepnoid/deep-solar-eeve-KorSTS": "https://huggingface.co/Deepnoid/deep-solar-eeve-KorSTS",
231
- "AIdenU/Mistral-7B-v0.2-ko-Y24_v1.0": "https://huggingface.co/AIdenU/Mistral-7B-v0.2-ko-Y24_v1.0",
232
- "tlphams/gollm-tendency-45": "https://huggingface.co/tlphams/gollm-tendency-45",
233
- "realPCH/ko_solra_merge": "https://huggingface.co/realPCH/ko_solra_merge",
234
- "Cartinoe5930/original-KoRAE-13b": "https://huggingface.co/Cartinoe5930/original-KoRAE-13b",
235
- "GAI-LLM/Yi-Ko-6B-dpo-v5": "https://huggingface.co/GAI-LLM/Yi-Ko-6B-dpo-v5",
236
- "Minirecord/Mini_DPO_test02": "https://huggingface.co/Minirecord/Mini_DPO_test02",
237
- "AIJUUD/juud-Mistral-7B-dpo": "https://huggingface.co/AIJUUD/juud-Mistral-7B-dpo",
238
- "gwonny/nox-solar-10.7b-v4-kolon-all-10": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-all-10",
239
- "jieunhan/TEST_MODEL": "https://huggingface.co/jieunhan/TEST_MODEL",
240
- "etri-xainlp/kor-llama2-13b-dpo": "https://huggingface.co/etri-xainlp/kor-llama2-13b-dpo",
241
- "ifuseok/yi-ko-playtus-instruct-v0.2": "https://huggingface.co/ifuseok/yi-ko-playtus-instruct-v0.2",
242
- "Cartinoe5930/original-KoRAE-13b-3ep": "https://huggingface.co/Cartinoe5930/original-KoRAE-13b-3ep",
243
- "Trofish/KULLM-RLHF": "https://huggingface.co/Trofish/KULLM-RLHF",
244
- "wkshin89/Yi-Ko-6B-Instruct-v1.0": "https://huggingface.co/wkshin89/Yi-Ko-6B-Instruct-v1.0",
245
- "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge": "https://huggingface.co/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge",
246
- "PracticeLLM/Custom-KoLLM-13B-v5": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v5",
247
- "BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B",
248
- "MRAIRR/minillama3_8b_all": "https://huggingface.co/MRAIRR/minillama3_8b_all",
249
- "failspy/Phi-3-medium-4k-instruct-abliterated-v3": "https://huggingface.co/failspy/Phi-3-medium-4k-instruct-abliterated-v3",
250
- "DILAB-HYU/koquality-polyglot-12.8b": "https://huggingface.co/DILAB-HYU/koquality-polyglot-12.8b",
251
- "kyujinpy/Korean-OpenOrca-v3": "https://huggingface.co/kyujinpy/Korean-OpenOrca-v3",
252
- "4yo1/llama3-eng-ko-8b": "https://huggingface.co/4yo1/llama3-eng-ko-8b",
253
- "4yo1/llama3-eng-ko-8": "https://huggingface.co/4yo1/llama3-eng-ko-8",
254
- "4yo1/llama3-eng-ko-8-llama": "https://huggingface.co/4yo1/llama3-eng-ko-8-llama",
255
- "PracticeLLM/Custom-KoLLM-13B-v2": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v2",
256
- "kyujinpy/KOR-Orca-Platypus-13B-v2": "https://huggingface.co/kyujinpy/KOR-Orca-Platypus-13B-v2",
257
- "ghost-x/ghost-7b-alpha": "https://huggingface.co/ghost-x/ghost-7b-alpha",
258
- "HumanF-MarkrAI/pub-llama-13B-v6": "https://huggingface.co/HumanF-MarkrAI/pub-llama-13B-v6",
259
- "nlpai-lab/kullm-polyglot-5.8b-v2": "https://huggingface.co/nlpai-lab/kullm-polyglot-5.8b-v2",
260
- "maywell/Synatra-42dot-1.3B": "https://huggingface.co/maywell/Synatra-42dot-1.3B",
261
- "yhkim9362/gemma-en-ko-7b-v0.1": "https://huggingface.co/yhkim9362/gemma-en-ko-7b-v0.1",
262
- "yhkim9362/gemma-en-ko-7b-v0.2": "https://huggingface.co/yhkim9362/gemma-en-ko-7b-v0.2",
263
- "daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B": "https://huggingface.co/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B",
264
- "beomi/Yi-Ko-6B": "https://huggingface.co/beomi/Yi-Ko-6B",
265
- "jojo0217/ChatSKKU5.8B": "https://huggingface.co/jojo0217/ChatSKKU5.8B",
266
- "Deepnoid/deep-solar-v2.0.7": "https://huggingface.co/Deepnoid/deep-solar-v2.0.7",
267
- "01-ai/Yi-1.5-9B": "https://huggingface.co/01-ai/Yi-1.5-9B",
268
- "PracticeLLM/Custom-KoLLM-13B-v4": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v4",
269
- "nuebaek/komt_mistral_mss_user_0_max_steps_80": "https://huggingface.co/nuebaek/komt_mistral_mss_user_0_max_steps_80",
270
- "dltjdgh0928/lsh_finetune_v0.11": "https://huggingface.co/dltjdgh0928/lsh_finetune_v0.11",
271
- "shleeeee/mistral-7b-wiki": "https://huggingface.co/shleeeee/mistral-7b-wiki",
272
- "nayohan/polyglot-ko-5.8b-Inst": "https://huggingface.co/nayohan/polyglot-ko-5.8b-Inst",
273
- "ifuseok/sft-solar-10.7b-v1.1": "https://huggingface.co/ifuseok/sft-solar-10.7b-v1.1",
274
- "Junmai/KIT-5.8b": "https://huggingface.co/Junmai/KIT-5.8b",
275
- "heegyu/polyglot-ko-3.8b-chat": "https://huggingface.co/heegyu/polyglot-ko-3.8b-chat",
276
- "etri-xainlp/polyglot-ko-12.8b-instruct": "https://huggingface.co/etri-xainlp/polyglot-ko-12.8b-instruct",
277
- "OpenBuddy/openbuddy-mistral2-7b-v20.3-32k": "https://huggingface.co/OpenBuddy/openbuddy-mistral2-7b-v20.3-32k",
278
- "sh2orc/Llama-3-Korean-8B": "https://huggingface.co/sh2orc/Llama-3-Korean-8B",
279
- "Deepnoid/deep-solar-eeve-v2.0.0": "https://huggingface.co/Deepnoid/deep-solar-eeve-v2.0.0",
280
- "Herry443/Mistral-7B-KNUT-ref": "https://huggingface.co/Herry443/Mistral-7B-KNUT-ref",
281
- "heegyu/polyglot-ko-5.8b-chat": "https://huggingface.co/heegyu/polyglot-ko-5.8b-chat",
282
- "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3": "https://huggingface.co/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3",
283
- "DILAB-HYU/KoQuality-Polyglot-5.8b": "https://huggingface.co/DILAB-HYU/KoQuality-Polyglot-5.8b",
284
- "Byungchae/k2s3_test_0000": "https://huggingface.co/Byungchae/k2s3_test_0000",
285
- "migtissera/Tess-v2.5-Phi-3-medium-128k-14B": "https://huggingface.co/migtissera/Tess-v2.5-Phi-3-medium-128k-14B",
286
- "kyujinpy/Korean-OpenOrca-13B": "https://huggingface.co/kyujinpy/Korean-OpenOrca-13B",
287
- "kyujinpy/KO-Platypus2-13B": "https://huggingface.co/kyujinpy/KO-Platypus2-13B",
288
- "jin05102518/Astral-7B-Instruct-v0.01": "https://huggingface.co/jin05102518/Astral-7B-Instruct-v0.01",
289
- "Byungchae/k2s3_test_0002": "https://huggingface.co/Byungchae/k2s3_test_0002",
290
- "NousResearch/Nous-Hermes-llama-2-7b": "https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b",
291
- "kaist-ai/prometheus-13b-v1.0": "https://huggingface.co/kaist-ai/prometheus-13b-v1.0",
292
- "sel303/llama3-diverce-ver1.0": "https://huggingface.co/sel303/llama3-diverce-ver1.0",
293
- "NousResearch/Nous-Capybara-7B": "https://huggingface.co/NousResearch/Nous-Capybara-7B",
294
- "rrw-x2/KoSOLAR-10.7B-DPO-v1.0": "https://huggingface.co/rrw-x2/KoSOLAR-10.7B-DPO-v1.0",
295
- "Edentns/DataVortexS-10.7B-v0.2": "https://huggingface.co/Edentns/DataVortexS-10.7B-v0.2",
296
- "Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6": "https://huggingface.co/Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6",
297
- "tlphams/gollm-instruct-all-in-one-v1": "https://huggingface.co/tlphams/gollm-instruct-all-in-one-v1",
298
- "Edentns/DataVortexTL-1.1B-v0.1": "https://huggingface.co/Edentns/DataVortexTL-1.1B-v0.1",
299
- "richard-park/llama3-pre1-ds": "https://huggingface.co/richard-park/llama3-pre1-ds",
300
- "ehartford/samantha-1.1-llama-33b": "https://huggingface.co/ehartford/samantha-1.1-llama-33b",
301
- "heegyu/LIMA-13b-hf": "https://huggingface.co/heegyu/LIMA-13b-hf",
302
- "heegyu/42dot_LLM-PLM-1.3B-mt": "https://huggingface.co/heegyu/42dot_LLM-PLM-1.3B-mt",
303
- "shleeeee/mistral-ko-7b-wiki-neft": "https://huggingface.co/shleeeee/mistral-ko-7b-wiki-neft",
304
- "EleutherAI/polyglot-ko-1.3b": "https://huggingface.co/EleutherAI/polyglot-ko-1.3b",
305
- "kyujinpy/Ko-PlatYi-6B-gu": "https://huggingface.co/kyujinpy/Ko-PlatYi-6B-gu",
306
- "sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6"
307
- }
308
-
309
- def get_korea_models():
310
- """Korea 관련 모델 검색"""
311
- params = {
312
- "search": "korea",
313
- "full": "True",
314
- "config": "True",
315
- "limit": 1000
316
- }
317
-
318
- try:
319
- response = requests.get(
320
- "https://huggingface.co/api/models",
321
- headers={'Accept': 'application/json'},
322
- params=params
323
- )
324
-
325
- if response.status_code == 200:
326
- return response.json()
327
- else:
328
- print(f"Failed to fetch Korea models: {response.status_code}")
329
- return []
330
- except Exception as e:
331
- print(f"Error fetching Korea models: {str(e)}")
332
- return []
333
-
334
- def get_all_models(limit=3000):
335
- """모든 모델과 Korea 관련 모델 가져오기"""
336
- all_models = []
337
- page_size = 1000 # API의 한 번 요청당 최대 크기
338
-
339
- # 여러 페이지에 걸쳐 데이터 수집
340
- for offset in range(0, limit, page_size):
341
- params = {
342
- 'limit': min(page_size, limit - offset),
343
- 'full': 'True',
344
- 'config': 'True',
345
- 'offset': offset
346
- }
347
-
348
- response = requests.get(
349
- "https://huggingface.co/api/models",
350
- headers={'Accept': 'application/json'},
351
- params=params
352
- )
353
-
354
- if response.status_code == 200:
355
- all_models.extend(response.json())
356
- print(f"Fetched models {offset+1} to {offset+len(response.json())}")
357
- else:
358
- print(f"Failed to fetch models at offset {offset}: {response.status_code}")
359
- break
360
-
361
- # Korea 검색 결과도 동일하게 확장
362
- korea_params = {
363
- "search": "korea",
364
- "full": "True",
365
- "config": "True",
366
- "limit": limit
367
- }
368
-
369
- korea_response = requests.get(
370
- "https://huggingface.co/api/models",
371
- headers={'Accept': 'application/json'},
372
- params=korea_params
373
- )
374
-
375
- if korea_response.status_code == 200:
376
- korea_models = korea_response.json()
377
- print(f"Fetched {len(korea_models)} Korea-related models")
378
-
379
- # 중복 제거하면서 Korea 모델 추가
380
- existing_ids = {model.get('id', '') for model in all_models}
381
- for model in korea_models:
382
- if model.get('id', '') not in existing_ids:
383
- all_models.append(model)
384
- existing_ids.add(model.get('id', ''))
385
-
386
- print(f"Total unique models: {len(all_models)}")
387
- return all_models[:limit]
388
-
389
- def get_models_data(progress=gr.Progress()):
390
- def calculate_rank(model_id, all_global_models, korea_models):
391
- # 글로벌 순위 확인
392
- global_rank = next((idx for idx, m in enumerate(all_global_models, 1)
393
- if m.get('id', '').strip() == model_id.strip()), None)
394
-
395
- # Korea 모델인 경우
396
- is_korea = any(m.get('id', '').strip() == model_id.strip() for m in korea_models)
397
-
398
- if is_korea:
399
- # Korea 모델 중에서의 순위 확인
400
- korea_rank = next((idx for idx, m in enumerate(korea_models, 1)
401
- if m.get('id', '').strip() == model_id.strip()), None)
402
-
403
- if korea_rank:
404
- return min(global_rank or 3001, korea_rank + 1000), True
405
-
406
- return global_rank if global_rank else 'Not in top 3000', is_korea
407
-
408
- try:
409
- progress(0, desc="Fetching models...")
410
-
411
- if not HF_TOKEN:
412
- fig = create_error_plot()
413
- error_html = """
414
- <div style='padding: 20px; background: #fee; border-radius: 10px; margin: 10px 0;'>
415
- <h3 style='color: #c00;'>⚠️ API 인증이 필요합니다</h3>
416
- <p>HuggingFace API 토큰이 설정되지 않았습니다. 완전한 기능을 사용하기 위해서는 API 토큰이 필요합니다.</p>
417
- </div>
418
- """
419
- empty_df = pd.DataFrame(columns=['Global Rank', 'Model ID', 'Title', 'Downloads', 'Likes', 'Korea Search', 'URL'])
420
- return fig, error_html, empty_df
421
-
422
- # 일반 모델과 Korea 관련 모델 모두 가져오기 (3000위까지)
423
- all_global_models = get_all_models(limit=3000)
424
- korea_models = get_korea_models()
425
-
426
- print(f"Total global models fetched: {len(all_global_models)}")
427
- print(f"Total Korea models fetched: {len(korea_models)}")
428
-
429
- # 모든 모델 통합 (중복 제거)
430
- all_models = all_global_models.copy()
431
- existing_ids = {model.get('id', '') for model in all_global_models}
432
-
433
- added_korea_models = 0
434
- for korea_model in korea_models:
435
- if korea_model.get('id', '') not in existing_ids:
436
- all_models.append(korea_model)
437
- existing_ids.add(korea_model.get('id', ''))
438
- added_korea_models += 1
439
-
440
- print(f"Added {added_korea_models} unique Korea models")
441
- print(f"Total combined models: {len(all_models)}")
442
-
443
- # 시각화를 위한 Figure 생성
444
- fig = go.Figure()
445
-
446
- # 순위 정보 수집
447
- filtered_models = []
448
- for model_id in target_models.keys():
449
- try:
450
- normalized_id = model_id.strip('/')
451
- model_url_api = f"https://huggingface.co/api/models/{normalized_id}"
452
- response = requests.get(
453
- model_url_api,
454
- headers={'Accept': 'application/json'}
455
- )
456
-
457
- if response.status_code == 200:
458
- model_data = response.json()
459
- rank, is_korea = calculate_rank(model_id, all_global_models, korea_models)
460
-
461
- filtered_models.append({
462
- 'id': model_id,
463
- 'global_rank': rank,
464
- 'downloads': model_data.get('downloads', 0),
465
- 'likes': model_data.get('likes', 0),
466
- 'title': model_data.get('title', 'No Title'),
467
- 'is_korea': is_korea
468
- })
469
-
470
- print(f"Model {model_id}: Rank={rank}, Is Korea={is_korea}")
471
- else:
472
- filtered_models.append({
473
- 'id': model_id,
474
- 'global_rank': 'Not in top 3000',
475
- 'downloads': 0,
476
- 'likes': 0,
477
- 'title': 'No Title',
478
- 'is_korea': False
479
- })
480
- except Exception as e:
481
- print(f"Error processing {model_id}: {str(e)}")
482
- continue
483
-
484
- # 순위로 정렬
485
- filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
486
-
487
- # 시각화 데이터 준비
488
- valid_models = [m for m in filtered_models if isinstance(m['global_rank'], (int, float))]
489
-
490
- if valid_models:
491
- ids = [m['id'] for m in valid_models]
492
- ranks = [m['global_rank'] for m in valid_models]
493
-
494
- fig.add_trace(go.Bar(
495
- x=ids,
496
- y=[3001 - r for r in ranks], # Y축 범위 3000까지 확장
497
- text=[f"Rank: #{r}<br>Downloads: {format(m['downloads'], ',')}<br>Likes: {format(m['likes'], ',')}"
498
- for r, m in zip(ranks, valid_models)],
499
- textposition='auto',
500
- marker_color=['rgba(255,0,0,0.6)' if m['is_korea'] else 'rgba(0,0,255,0.6)'
501
- for m in valid_models],
502
- opacity=0.8
503
- ))
504
-
505
- fig.update_layout(
506
- title="HuggingFace Models Global Rankings (Up to #3000)",
507
- xaxis_title="Model ID",
508
- yaxis_title="Global Rank",
509
- yaxis=dict(
510
- ticktext=[f"#{i}" for i in range(1, 3001, 100)],
511
- tickvals=[3001 - i for i in range(1, 3001, 100)],
512
- range=[0, 3000]
513
- ),
514
- height=800,
515
- showlegend=False,
516
- template='plotly_white',
517
- xaxis_tickangle=-45
518
- )
519
-
520
- # HTML 카드 생성
521
- html_content = """
522
- <div style='padding: 20px; background: #f5f5f5;'>
523
- <h2 style='color: #2c3e50;'>Models Rankings (Up to #3000)</h2>
524
- <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
525
- """
526
-
527
- for model in filtered_models:
528
- rank_display = f"Global Rank #{model['global_rank']}" if isinstance(model['global_rank'], (int, float)) else "Not in top 3000"
529
- korea_badge = "🇰🇷 Korea Search Result" if model['is_korea'] else ""
530
-
531
- html_content += f"""
532
- <div style='
533
- background: white;
534
- padding: 20px;
535
- border-radius: 10px;
536
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
537
- transition: transform 0.2s;
538
- {f"border: 2px solid #e74c3c;" if model['is_korea'] else ""}
539
- '>
540
- <h3 style='color: #34495e;'>{rank_display}</h3>
541
- <h4 style='color: #2c3e50;'>{model['id']}</h4>
542
- <p style='color: #e74c3c; font-weight: bold;'>{korea_badge}</p>
543
- <p style='color: #7f8c8d;'>⬇️ Downloads: {format(model['downloads'], ',')}</p>
544
- <p style='color: #7f8c8d;'>👍 Likes: {format(model['likes'], ',')}</p>
545
- <a href='{target_models[model['id']]}'
546
- target='_blank'
547
- style='
548
- display: inline-block;
549
- padding: 8px 16px;
550
- background: #3498db;
551
- color: white;
552
- text-decoration: none;
553
- border-radius: 5px;
554
- transition: background 0.3s;
555
- '>
556
- Visit Model 🔗
557
- </a>
558
- </div>
559
- """
560
-
561
- html_content += "</div></div>"
562
-
563
- # 데이터프레임 생성
564
- df = pd.DataFrame([{
565
- 'Global Rank': f"#{m['global_rank']}" if isinstance(m['global_rank'], (int, float)) else m['global_rank'],
566
- 'Model ID': m['id'],
567
- 'Title': m['title'],
568
- 'Downloads': format(m['downloads'], ','),
569
- 'Likes': format(m['likes'], ','),
570
- 'Korea Search': '🇰🇷' if m['is_korea'] else '',
571
- 'URL': target_models[m['id']]
572
- } for m in filtered_models])
573
-
574
- progress(1.0, desc="Complete!")
575
- return fig, html_content, df
576
-
577
- except Exception as e:
578
- print(f"Error in get_models_data: {str(e)}")
579
- error_fig = create_error_plot()
580
- error_html = f"""
581
- <div style='padding: 20px; background: #fee; border-radius: 10px; margin: 10px 0;'>
582
- <h3 style='color: #c00;'>⚠️ 오류가 발생했습니다</h3>
583
- <p>{str(e)}</p>
584
- </div>
585
- """
586
- empty_df = pd.DataFrame(columns=['Global Rank', 'Model ID', 'Title', 'Downloads', 'Likes', 'Korea Search', 'URL'])
587
- return error_fig, error_html, empty_df
588
-
589
-
590
- # 관심 스페이스 URL 리스트와 정보
591
- target_spaces = {
592
-
593
- "kolaslab/Quantum": "https://huggingface.co/spaces/kolaslab/Quantum",
594
- "openfree/webtoon": "https://huggingface.co/spaces/openfree/webtoon",
595
- "immunobiotech/ChicagoGallery": "https://huggingface.co/spaces/immunobiotech/ChicagoGallery",
596
- "immunobiotech/MetropolitanMuseum": "https://huggingface.co/spaces/immunobiotech/MetropolitanMuseum",
597
- "immunobiotech/opensky": "https://huggingface.co/spaces/immunobiotech/opensky",
598
-
599
- "kolaslab/Audio-Visualizer": "https://huggingface.co/spaces/kolaslab/Audio-Visualizer",
600
- "kolaslab/Radio-Learning": "https://huggingface.co/spaces/kolaslab/Radio-Learning",
601
- "kolaslab/Future-Gallaxy": "https://huggingface.co/spaces/kolaslab/Future-Gallaxy",
602
- "openfree/ProteinGenesis": "https://huggingface.co/spaces/openfree/ProteinGenesis",
603
- "openfree/2025saju": "https://huggingface.co/spaces/openfree/2025saju",
604
- "ginigen/Dokdo-membership": "https://huggingface.co/spaces/ginigen/Dokdo-membership",
605
- "VIDraft/eum": "https://huggingface.co/spaces/VIDraft/eum",
606
- "kolaslab/VisionART": "https://huggingface.co/spaces/kolaslab/VisionART",
607
- "aiqtech/FLUX-military": "https://huggingface.co/spaces/aiqtech/FLUX-military",
608
- "fantaxy/Rolls-Royce": "https://huggingface.co/spaces/fantaxy/Rolls-Royce",
609
- "seawolf2357/flux-korea-hanbok-lora": "https://huggingface.co/spaces/seawolf2357/flux-korea-hanbok-lora",
610
- "seawolf2357/flux-korea-palace-lora": "https://huggingface.co/spaces/seawolf2357/flux-korea-palace-lora",
611
- "aiqcamp/flux-cat-lora": "https://huggingface.co/spaces/aiqcamp/flux-cat-lora",
612
- "gunship999/SexyImages": "https://huggingface.co/spaces/gunship999/SexyImages",
613
- "aiqtech/flux-claude-monet-lora": "https://huggingface.co/spaces/aiqtech/flux-claude-monet-lora",
614
- "ginigen/CANVAS-o3": "https://huggingface.co/spaces/ginigen/CANVAS-o3",
615
- "kolaslab/world-sdr": "https://huggingface.co/spaces/kolaslab/world-sdr",
616
- "seawolf2357/3D-Avatar-Generator": "https://huggingface.co/spaces/seawolf2357/3D-Avatar-Generator",
617
- "fantaxy/playground25": "https://huggingface.co/spaces/fantaxy/playground25",
618
- "openfree/ultpixgen": "https://huggingface.co/spaces/openfree/ultpixgen",
619
- "kolaslab/VISION-NIGHT": "https://huggingface.co/spaces/kolaslab/VISION-NIGHT",
620
- "kolaslab/FLUX-WEB": "https://huggingface.co/spaces/kolaslab/FLUX-WEB",
621
- "seawolf2357/REALVISXL-V5": "https://huggingface.co/spaces/seawolf2357/REALVISXL-V5",
622
- "ginipick/Dokdo-multimodal": "https://huggingface.co/spaces/ginipick/Dokdo-multimodal",
623
- "ginigen/theater": "https://huggingface.co/spaces/ginigen/theater",
624
- "VIDraft/stock": "https://huggingface.co/spaces/VIDraft/stock",
625
- "fantos/flxcontrol": "https://huggingface.co/spaces/fantos/flxcontrol",
626
- "fantos/textcutobject": "https://huggingface.co/spaces/fantos/textcutobject",
627
- "ginipick/FLUX-Prompt-Generator": "https://huggingface.co/spaces/ginipick/FLUX-Prompt-Generator",
628
- "fantaxy/flxloraexp": "https://huggingface.co/spaces/fantaxy/flxloraexp",
629
- "fantos/flxloraexp": "https://huggingface.co/spaces/fantos/flxloraexp",
630
- "seawolf2357/flxloraexp": "https://huggingface.co/spaces/seawolf2357/flxloraexp",
631
- "ginipick/flxloraexp": "https://huggingface.co/spaces/ginipick/flxloraexp",
632
- "ginipick/FLUX-Prompt-Generator": "https://huggingface.co/spaces/ginipick/FLUX-Prompt-Generator",
633
- "ginigen/Dokdo": "https://huggingface.co/spaces/ginigen/Dokdo",
634
- "aiqcamp/imagemagic": "https://huggingface.co/spaces/aiqcamp/imagemagic",
635
- "openfree/ColorRevive": "https://huggingface.co/spaces/openfree/ColorRevive",
636
- "VIDraft/RAGOndevice": "https://huggingface.co/spaces/VIDraft/RAGOndevice",
637
- "gunship999/Radar-Bluetooth": "https://huggingface.co/spaces/gunship999/Radar-Bluetooth",
638
- "gunship999/WiFi-VISION": "https://huggingface.co/spaces/gunship999/WiFi-VISION",
639
- "gunship999/SONAR-Radar": "https://huggingface.co/spaces/gunship999/SONAR-Radar",
640
- "aiqcamp/AudioLlama": "https://huggingface.co/spaces/aiqcamp/AudioLlama",
641
- "ginigen/FLUXllama-Multilingual": "https://huggingface.co/spaces/ginigen/FLUXllama-Multilingual",
642
- "ginipick/ginimedi": "https://huggingface.co/spaces/ginipick/ginimedi",
643
- "ginipick/ginilaw": "https://huggingface.co/spaces/ginipick/ginilaw",
644
- "ginipick/ginipharm": "https://huggingface.co/spaces/ginipick/ginipharm",
645
- "ginipick/FitGen": "https://huggingface.co/spaces/ginipick/FitGen",
646
- "fantaxy/FLUX-Animations": "https://huggingface.co/spaces/fantaxy/FLUX-Animations",
647
- "fantaxy/Remove-Video-Background": "https://huggingface.co/spaces/fantaxy/Remove-Video-Background",
648
- "fantaxy/ofai-flx-logo": "https://huggingface.co/spaces/fantaxy/ofai-flx-logo",
649
- "fantaxy/flx-pulid": "https://huggingface.co/spaces/fantaxy/flx-pulid",
650
- "fantaxy/flx-upscale": "https://huggingface.co/spaces/fantaxy/flx-upscale",
651
- "aiqcamp/Fashion-FLUX": "https://huggingface.co/spaces/aiqcamp/Fashion-FLUX",
652
- "ginipick/StyleGen": "https://huggingface.co/spaces/ginipick/StyleGen",
653
- "openfree/StoryStar": "https://huggingface.co/spaces/openfree/StoryStar",
654
- "fantos/x-mas": "https://huggingface.co/spaces/fantos/x-mas",
655
- "openfree/Korean-Leaderboard": "https://huggingface.co/spaces/openfree/Korean-Leaderboard",
656
- "ginipick/FLUXllama": "https://huggingface.co/spaces/ginipick/FLUXllama",
657
- "ginipick/SORA-3D": "https://huggingface.co/spaces/ginipick/SORA-3D",
658
- "fantaxy/Sound-AI-SFX": "https://huggingface.co/spaces/fantaxy/Sound-AI-SFX",
659
- "fantos/flx8lora": "https://huggingface.co/spaces/fantos/flx8lora",
660
- "ginigen/Canvas": "https://huggingface.co/spaces/ginigen/Canvas",
661
- "fantaxy/erotica": "https://huggingface.co/spaces/fantaxy/erotica",
662
- "ginipick/time-machine": "https://huggingface.co/spaces/ginipick/time-machine",
663
- "aiqcamp/FLUX-VisionReply": "https://huggingface.co/spaces/aiqcamp/FLUX-VisionReply",
664
- "openfree/Tetris-Game": "https://huggingface.co/spaces/openfree/Tetris-Game",
665
- "openfree/everychat": "https://huggingface.co/spaces/openfree/everychat",
666
- "VIDraft/mouse1": "https://huggingface.co/spaces/VIDraft/mouse1",
667
- "kolaslab/alpha-go": "https://huggingface.co/spaces/kolaslab/alpha-go",
668
- "ginipick/text3d": "https://huggingface.co/spaces/ginipick/text3d",
669
- "openfree/trending-board": "https://huggingface.co/spaces/openfree/trending-board",
670
- "cutechicken/tankwar": "https://huggingface.co/spaces/cutechicken/tankwar",
671
- "openfree/game-jewel": "https://huggingface.co/spaces/openfree/game-jewel",
672
- "VIDraft/mouse-chat": "https://huggingface.co/spaces/VIDraft/mouse-chat",
673
- "ginipick/AccDiffusion": "https://huggingface.co/spaces/ginipick/AccDiffusion",
674
- "aiqtech/Particle-Accelerator-Simulation": "https://huggingface.co/spaces/aiqtech/Particle-Accelerator-Simulation",
675
- "openfree/GiniGEN": "https://huggingface.co/spaces/openfree/GiniGEN",
676
- "kolaslab/3DAudio-Spectrum-Analyzer": "https://huggingface.co/spaces/kolaslab/3DAudio-Spectrum-Analyzer",
677
- "openfree/trending-news-24": "https://huggingface.co/spaces/openfree/trending-news-24",
678
- "ginipick/Realtime-FLUX": "https://huggingface.co/spaces/ginipick/Realtime-FLUX",
679
- "VIDraft/prime-number": "https://huggingface.co/spaces/VIDraft/prime-number",
680
- "kolaslab/zombie-game": "https://huggingface.co/spaces/kolaslab/zombie-game",
681
- "fantos/miro-game": "https://huggingface.co/spaces/fantos/miro-game",
682
- "kolaslab/shooting": "https://huggingface.co/spaces/kolaslab/shooting",
683
- "VIDraft/Mouse-Hackathon": "https://huggingface.co/spaces/VIDraft/Mouse-Hackathon",
684
- "aiqmaster/stocksimulation": "https://huggingface.co/spaces/aiqmaster/stocksimulation",
685
- "aiqmaster/assetai": "https://huggingface.co/spaces/aiqmaster/assetai",
686
- "aiqmaster/stockai": "https://huggingface.co/spaces/aiqmaster/stockai",
687
- "cutechicken/TankWar3D": "https://huggingface.co/spaces/cutechicken/TankWar3D",
688
- "kolaslab/RC4-EnDecoder": "https://huggingface.co/spaces/kolaslab/RC4-EnDecoder",
689
- "kolaslab/simulator": "https://huggingface.co/spaces/kolaslab/simulator",
690
- "kolaslab/calculator": "https://huggingface.co/spaces/kolaslab/calculator",
691
- "aiqtech/kofaceid": "https://huggingface.co/spaces/aiqtech/kofaceid",
692
- "fantaxy/fastvideogena": "https://huggingface.co/spaces/fantaxy/fastvideogen",
693
- "fantos/cogvidx": "https://huggingface.co/spaces/fantos/cogvidx",
694
- "fantos/flxfashmodel": "https://huggingface.co/spaces/fantos/flxfashmodel",
695
- "fantos/kolcontrl": "https://huggingface.co/spaces/fantos/kolcontrl",
696
- "fantos/EveryText": "https://huggingface.co/spaces/fantos/EveryText",
697
- "aiqtech/cinevid": "https://huggingface.co/spaces/aiqtech/cinevid",
698
- "aiqtech/FLUX-Ghibli-Studio-LoRA": "https://huggingface.co/spaces/aiqtech/FLUX-Ghibli-Studio-LoRA",
699
- "aiqtech/flxgif": "https://huggingface.co/spaces/aiqtech/flxgif",
700
- "aiqtech/imaginpaint": "https://huggingface.co/spaces/aiqtech/imaginpaint",
701
-
702
-
703
- "upstage/open-ko-llm-leaderboard": "https://huggingface.co/spaces/upstage/open-ko-llm-leaderboard",
704
- "LGAI-EXAONE/EXAONE-3.5-Instruct-Demo": "https://huggingface.co/spaces/LGAI-EXAONE/EXAONE-3.5-Instruct-Demo",
705
- "LeeSangHoon/HierSpeech_TTS": "https://huggingface.co/spaces/LeeSangHoon/HierSpeech_TTS",
706
- "etri-vilab/Ko-LLaVA": "https://huggingface.co/spaces/etri-vilab/Ko-LLaVA",
707
- "etri-vilab/KOALA": "https://huggingface.co/spaces/etri-vilab/KOALA",
708
- "naver-clova-ix/donut-base-finetuned-cord-v2": "https://huggingface.co/spaces/naver-clova-ix/donut-base-finetuned-cord-v2",
709
- "NCSOFT/VARCO_Arena": "https://huggingface.co/spaces/NCSOFT/VARCO_Arena"
710
- }
711
-
712
- def get_spaces_data(sort_type="trending", progress=gr.Progress()):
713
- """스페이스 데이터 가져오기 (trending 또는 modes)"""
714
- url = "https://huggingface.co/api/spaces"
715
- params = {
716
- 'full': 'true',
717
- 'limit': 500
718
- }
719
-
720
- if sort_type == "modes":
721
- params['sort'] = 'likes'
722
-
723
- try:
724
- progress(0, desc=f"Fetching {sort_type} spaces data...")
725
- response = requests.get(url, params=params)
726
- response.raise_for_status()
727
- all_spaces = response.json()
728
-
729
- # 순위 정보 저장
730
- space_ranks = {}
731
- for idx, space in enumerate(all_spaces, 1):
732
- space_id = space.get('id', '')
733
- if space_id in target_spaces:
734
- space['rank'] = idx
735
- space_ranks[space_id] = space
736
-
737
- spaces = [space_ranks[space_id] for space_id in space_ranks.keys()]
738
- spaces.sort(key=lambda x: x['rank'])
739
-
740
- progress(0.3, desc="Creating visualization...")
741
-
742
- # 시각화 생성
743
- fig = go.Figure()
744
-
745
- # 데이터 준비
746
- ids = [space['id'] for space in spaces]
747
- ranks = [space['rank'] for space in spaces]
748
- likes = [space.get('likes', 0) for space in spaces]
749
- titles = [space.get('cardData', {}).get('title') or space.get('title', 'No Title') for space in spaces]
750
-
751
- # 막대 그래프 생성
752
- fig.add_trace(go.Bar(
753
- x=ids,
754
- y=ranks,
755
- text=[f"Rank: {r}<br>Title: {t}<br>Likes: {l}"
756
- for r, t, l in zip(ranks, titles, likes)],
757
- textposition='auto',
758
- marker_color='rgb(158,202,225)',
759
- opacity=0.8
760
- ))
761
-
762
- fig.update_layout(
763
- title={
764
- 'text': f'Hugging Face Spaces {sort_type.title()} Rankings (Top 500)',
765
- 'y':0.95,
766
- 'x':0.5,
767
- 'xanchor': 'center',
768
- 'yanchor': 'top'
769
- },
770
- xaxis_title='Space ID',
771
- yaxis_title='Rank',
772
- yaxis=dict(
773
- autorange='reversed', # Y축을 반전
774
- tickmode='array',
775
- ticktext=[str(i) for i in range(1, 501, 20)], # 1부터 400까지 20 간격으로 표시
776
- tickvals=[i for i in range(1, 501, 20)],
777
- range=[1, 500] # Y축 범위를 1부터 400까지로 설정
778
- ),
779
- height=800,
780
- showlegend=False,
781
- template='plotly_white',
782
- xaxis_tickangle=-45
783
- )
784
-
785
- progress(0.6, desc="Creating space cards...")
786
-
787
- # HTML 카드 생성
788
- html_content = f"""
789
- <div style='padding: 20px; background: #f5f5f5;'>
790
- <h2 style='color: #2c3e50;'>{sort_type.title()} Rankings</h2>
791
- <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
792
- """
793
-
794
- for space in spaces:
795
- space_id = space['id']
796
- rank = space['rank']
797
- title = space.get('cardData', {}).get('title') or space.get('title', 'No Title')
798
- likes = space.get('likes', 0)
799
-
800
- html_content += f"""
801
- <div style='
802
- background: white;
803
- padding: 20px;
804
- border-radius: 10px;
805
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
806
- transition: transform 0.2s;
807
- '>
808
- <h3 style='color: #34495e;'>Rank #{rank} - {space_id}</h3>
809
- <h4 style='
810
- color: #2980b9;
811
- margin: 10px 0;
812
- font-size: 1.2em;
813
- font-weight: bold;
814
- text-shadow: 1px 1px 2px rgba(0,0,0,0.1);
815
- background: linear-gradient(to right, #3498db, #2980b9);
816
- -webkit-background-clip: text;
817
- -webkit-text-fill-color: transparent;
818
- padding: 5px 0;
819
- '>{title}</h4>
820
- <p style='color: #7f8c8d; margin-bottom: 10px;'>👍 Likes: {likes}</p>
821
- <a href='{target_spaces[space_id]}'
822
- target='_blank'
823
- style='
824
- display: inline-block;
825
- padding: 8px 16px;
826
- background: #3498db;
827
- color: white;
828
- text-decoration: none;
829
- border-radius: 5px;
830
- transition: background 0.3s;
831
- '>
832
- Visit Space 🔗
833
- </a>
834
- </div>
835
- """
836
-
837
- html_content += "</div></div>"
838
-
839
- # 데이터프레임 생성
840
- df = pd.DataFrame([{
841
- 'Rank': space['rank'],
842
- 'Space ID': space['id'],
843
- 'Title': space.get('cardData', {}).get('title') or space.get('title', 'No Title'),
844
- 'Likes': space.get('likes', 0),
845
- 'URL': target_spaces[space['id']]
846
- } for space in spaces])
847
-
848
- progress(1.0, desc="Complete!")
849
- return fig, html_content, df
850
-
851
- except Exception as e:
852
- print(f"Error in get_spaces_data: {str(e)}")
853
- error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
854
- error_plot = create_error_plot()
855
- return error_plot, error_html, pd.DataFrame()
856
-
857
-
858
- def create_trend_visualization(spaces_data):
859
- if not spaces_data:
860
- return create_error_plot()
861
-
862
- fig = go.Figure()
863
-
864
- # 순위 데이터 준비
865
- ranks = []
866
- for idx, space in enumerate(spaces_data, 1):
867
- space_id = space.get('id', '')
868
- if space_id in target_spaces:
869
- ranks.append({
870
- 'id': space_id,
871
- 'rank': idx,
872
- 'likes': space.get('likes', 0),
873
- 'title': space.get('title', 'N/A'),
874
- 'views': space.get('views', 0)
875
- })
876
-
877
- if not ranks:
878
- return create_error_plot()
879
-
880
- # 순위별로 정렬
881
- ranks.sort(key=lambda x: x['rank'])
882
-
883
- # 플롯 데이터 생성
884
- ids = [r['id'] for r in ranks]
885
- rank_values = [r['rank'] for r in ranks]
886
- likes = [r['likes'] for r in ranks]
887
- views = [r['views'] for r in ranks]
888
-
889
- # 막대 그래프 생성
890
- fig.add_trace(go.Bar(
891
- x=ids,
892
- y=rank_values,
893
- text=[f"Rank: {r}<br>Likes: {l}<br>Views: {v}" for r, l, v in zip(rank_values, likes, views)],
894
- textposition='auto',
895
- marker_color='rgb(158,202,225)',
896
- opacity=0.8
897
- ))
898
-
899
- fig.update_layout(
900
- title={
901
- 'text': 'Current Trending Ranks (All Target Spaces)',
902
- 'y':0.95,
903
- 'x':0.5,
904
- 'xanchor': 'center',
905
- 'yanchor': 'top'
906
- },
907
- xaxis_title='Space ID',
908
- yaxis_title='Trending Rank',
909
- yaxis_autorange='reversed',
910
- height=800,
911
- showlegend=False,
912
- template='plotly_white',
913
- xaxis_tickangle=-45
914
- )
915
-
916
- return fig
917
-
918
- # 토큰이 없는 경우를 위한 대체 함수
919
- def get_trending_spaces_without_token():
920
- try:
921
- url = "https://huggingface.co/api/spaces"
922
- params = {
923
- 'sort': 'likes',
924
- 'direction': -1,
925
- 'limit': 500,
926
- 'full': 'true'
927
- }
928
-
929
- response = requests.get(url, params=params)
930
-
931
- if response.status_code == 200:
932
- return response.json()
933
- else:
934
- print(f"API 요청 실패 (토큰 없음): {response.status_code}")
935
- print(f"Response: {response.text}")
936
- return None
937
- except Exception as e:
938
- print(f"API 호출 중 에러 발생 (토큰 없음): {str(e)}")
939
- return None
940
-
941
- # API 토큰 설정 및 함수 선택
942
- if not HF_TOKEN:
943
- get_trending_spaces = get_trending_spaces_without_token
944
-
945
-
946
-
947
- def create_error_plot():
948
- fig = go.Figure()
949
- fig.add_annotation(
950
- text="데이터를 불러올 수 없습니다.\n(API 인증이 필요합니다)",
951
- xref="paper",
952
- yref="paper",
953
- x=0.5,
954
- y=0.5,
955
- showarrow=False,
956
- font=dict(size=20)
957
- )
958
- fig.update_layout(
959
- title="Error Loading Data",
960
- height=400
961
- )
962
- return fig
963
-
964
-
965
- def create_space_info_html(spaces_data):
966
- if not spaces_data:
967
- return "<div style='padding: 20px;'><h2>데이터를 불러오는데 실패했습니다.</h2></div>"
968
-
969
- html_content = """
970
- <div style='padding: 20px;'>
971
- <h2 style='color: #2c3e50;'>Current Trending Rankings</h2>
972
- <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
973
- """
974
-
975
- # 모든 target spaces를 포함하도록 수정
976
- for space_id in target_spaces.keys():
977
- space_info = next((s for s in spaces_data if s.get('id') == space_id), None)
978
- if space_info:
979
- rank = next((idx for idx, s in enumerate(spaces_data, 1) if s.get('id') == space_id), 'N/A')
980
- html_content += f"""
981
- <div style='
982
- background: white;
983
- padding: 20px;
984
- border-radius: 10px;
985
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
986
- transition: transform 0.2s;
987
- '>
988
- <h3 style='color: #34495e;'>#{rank} - {space_id}</h3>
989
- <p style='color: #7f8c8d;'>👍 Likes: {space_info.get('likes', 'N/A')}</p>
990
- <p style='color: #7f8c8d;'>👀 Views: {space_info.get('views', 'N/A')}</p>
991
- <p style='color: #2c3e50;'>{space_info.get('title', 'N/A')}</p>
992
- <p style='color: #7f8c8d; font-size: 0.9em;'>{space_info.get('description', 'N/A')[:100]}...</p>
993
- <a href='{target_spaces[space_id]}'
994
- target='_blank'
995
- style='
996
- display: inline-block;
997
- padding: 8px 16px;
998
- background: #3498db;
999
- color: white;
1000
- text-decoration: none;
1001
- border-radius: 5px;
1002
- transition: background 0.3s;
1003
- '>
1004
- Visit Space 🔗
1005
- </a>
1006
- </div>
1007
- """
1008
- else:
1009
- html_content += f"""
1010
- <div style='
1011
- background: #f8f9fa;
1012
- padding: 20px;
1013
- border-radius: 10px;
1014
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
1015
- '>
1016
- <h3 style='color: #34495e;'>{space_id}</h3>
1017
- <p style='color: #7f8c8d;'>Not in trending</p>
1018
- <a href='{target_spaces[space_id]}'
1019
- target='_blank'
1020
- style='
1021
- display: inline-block;
1022
- padding: 8px 16px;
1023
- background: #95a5a6;
1024
- color: white;
1025
- text-decoration: none;
1026
- border-radius: 5px;
1027
- '>
1028
- Visit Space 🔗
1029
- </a>
1030
- </div>
1031
- """
1032
-
1033
- html_content += "</div></div>"
1034
- return html_content
1035
-
1036
- def create_data_table(spaces_data):
1037
- if not spaces_data:
1038
- return pd.DataFrame()
1039
-
1040
- rows = []
1041
- for idx, space in enumerate(spaces_data, 1):
1042
- space_id = space.get('id', '')
1043
- if space_id in target_spaces:
1044
- rows.append({
1045
- 'Rank': idx,
1046
- 'Space ID': space_id,
1047
- 'Likes': space.get('likes', 'N/A'),
1048
- 'Title': space.get('title', 'N/A'),
1049
- 'URL': target_spaces[space_id]
1050
- })
1051
-
1052
- return pd.DataFrame(rows)
1053
-
1054
- def refresh_data():
1055
- spaces_data = get_trending_spaces()
1056
- if spaces_data:
1057
- plot = create_trend_visualization(spaces_data)
1058
- info = create_space_info_html(spaces_data)
1059
- df = create_data_table(spaces_data)
1060
- return plot, info, df
1061
- else:
1062
- return create_error_plot(), "<div>API 인증이 필요합니다.</div>", pd.DataFrame()
1063
-
1064
-
1065
-
1066
- def create_registration_bar_chart(data, type_name="Spaces"):
1067
- try:
1068
- # TOP 기준 설정
1069
- top_limit = 500 if type_name == "Spaces" else 3000
1070
-
1071
- # DataFrame인 경우 처리
1072
- if isinstance(data, pd.DataFrame):
1073
- if type_name == "Models":
1074
- # 3000위 이내의 모델만 필터링
1075
- data = data[data['Global Rank'].apply(lambda x: isinstance(x, (int, float)) or (isinstance(x, str) and x.startswith('#')))]
1076
- data = data[data['Global Rank'].apply(lambda x: int(str(x).replace('#', '')) if isinstance(x, str) else x) <= top_limit]
1077
- elif type_name == "Spaces":
1078
- # 500위 이내의 스페이스만 필터링
1079
- data = data[data['Rank'].apply(lambda x: isinstance(x, (int, float))) & (data['Rank'] <= top_limit)]
1080
-
1081
- # ID 컬럼 선택
1082
- id_column = 'Space ID' if type_name == "Spaces" else 'Model ID'
1083
- registrations = data[id_column].apply(lambda x: x.split('/')[0]).value_counts()
1084
- else:
1085
- # 리스트나 다른 형태의 데이터인 경우 처리
1086
- registrations = {}
1087
- for item in data:
1088
- if isinstance(item, dict):
1089
- rank = item.get('global_rank' if type_name == "Models" else 'rank')
1090
- if isinstance(rank, str) or rank > top_limit:
1091
- continue
1092
- creator = item.get('id', '').split('/')[0]
1093
- registrations[creator] = registrations.get(creator, 0) + 1
1094
- registrations = pd.Series(registrations)
1095
-
1096
- # 정렬된 데이터 준비
1097
- registrations = registrations.sort_values(ascending=False)
1098
-
1099
- fig = go.Figure(data=[go.Bar(
1100
- x=registrations.index,
1101
- y=registrations.values,
1102
- text=registrations.values,
1103
- textposition='auto',
1104
- marker_color='#FF6B6B'
1105
- )])
1106
-
1107
- fig.update_layout(
1108
- title=f"Korean {type_name} Registrations by Creator (Top {top_limit})",
1109
- xaxis_title="Creator ID",
1110
- yaxis_title="Number of Registrations",
1111
- showlegend=False,
1112
- height=400,
1113
- width=700
1114
- )
1115
-
1116
- return fig
1117
- except Exception as e:
1118
- print(f"Error in create_registration_bar_chart: {str(e)}")
1119
- return go.Figure()
1120
-
1121
- def create_pie_chart(data, total_count, type_name="Spaces"):
1122
- try:
1123
- # TOP 기준 설정
1124
- top_limit = 500 if type_name == "Spaces" else 3000
1125
-
1126
- # DataFrame인 경우 처리
1127
- if isinstance(data, pd.DataFrame):
1128
- if type_name == "Models":
1129
- # 3000위 이내의 모델만 필터링
1130
- data = data[data['Global Rank'].apply(lambda x: isinstance(x, (int, float)) or (isinstance(x, str) and x.startswith('#')))]
1131
- data = data[data['Global Rank'].apply(lambda x: int(str(x).replace('#', '')) if isinstance(x, str) else x) <= top_limit]
1132
- elif type_name == "Spaces":
1133
- # 500위 이내의 스페이스만 필터링
1134
- data = data[data['Rank'].apply(lambda x: isinstance(x, (int, float))) & (data['Rank'] <= top_limit)]
1135
- korean_count = len(data)
1136
- else:
1137
- # 리스트나 다른 형태의 데이터인 경우 처리
1138
- if type_name == "Models":
1139
- korean_count = sum(1 for item in data if isinstance(item.get('global_rank'), (int, float)) and item.get('global_rank') <= top_limit)
1140
- else:
1141
- korean_count = sum(1 for item in data if isinstance(item.get('rank'), (int, float)) and item.get('rank') <= top_limit)
1142
-
1143
- other_count = total_count - korean_count
1144
-
1145
- fig = go.Figure(data=[go.Pie(
1146
- labels=[f'Korean {type_name} in Top {top_limit}', f'Other {type_name} in Top {top_limit}'],
1147
- values=[korean_count, other_count],
1148
- hole=.3,
1149
- marker_colors=['#FF6B6B', '#4ECDC4'],
1150
- textinfo='percent+value',
1151
- hovertemplate="<b>%{label}</b><br>" +
1152
- "Count: %{value}<br>" +
1153
- "Percentage: %{percent}<br>"
1154
- )])
1155
-
1156
- fig.update_layout(
1157
- title=f"Korean vs Other {type_name} Distribution (Top {top_limit})",
1158
- showlegend=True,
1159
- height=400,
1160
- width=500
1161
- )
1162
-
1163
- return fig
1164
- except Exception as e:
1165
- print(f"Error in create_pie_chart: {str(e)}")
1166
- return go.Figure()
1167
-
1168
- def refresh_all_data():
1169
- spaces_results = get_spaces_data("trending")
1170
- models_results = get_models_data()
1171
-
1172
- # Spaces 차트 생성
1173
- spaces_pie = create_pie_chart(spaces_results[2], 500, "Spaces")
1174
- spaces_bar = create_registration_bar_chart(spaces_results[2], "Spaces")
1175
-
1176
- # Models 차트 생성
1177
- models_pie = create_pie_chart(models_results[2], 3000, "Models")
1178
- models_bar = create_registration_bar_chart(models_results[2], "Models")
1179
-
1180
- return [
1181
- spaces_results[0], spaces_results[1], spaces_results[2],
1182
- spaces_pie, spaces_bar,
1183
- models_results[0], models_results[1], models_results[2],
1184
- models_pie, models_bar
1185
- ]
1186
-
1187
-
1188
- with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css="""
1189
- #spaces_pie, #models_pie {
1190
- min-height: 400px;
1191
- border-radius: 10px;
1192
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
1193
- }
1194
- #spaces_bar, #models_bar {
1195
- min-height: 400px;
1196
- border-radius: 10px;
1197
- box-shadow: 0 2px 4px rgba(0,0,0,0.1);
1198
- }
1199
- """) as demo:
1200
-
1201
- gr.Markdown("""
1202
- # 🤗 허깅페이스 '한국(언어) 리더보드'
1203
- HuggingFace가 제공하는 Spaces와 Models 실시간 인기 순위 반영하여 '한국인(기업/언어)'의 리스트(공개,검색,리더보드 등)만 분석. (c)'한국인공지능진흥협회' / 요청: [email protected]
1204
- """)
1205
-
1206
- # 이미지와 설명 추가
1207
- gr.Markdown("""
1208
- ### [Hot NEWS] 허깅페이스 선정 12월 'TOP 12'에 한국 'ginipick'의 'FLUXllama'와 'Text3D' 2종이 선정됨
1209
- """)
1210
- gr.Image("HF-TOP12.png", show_label=False)
1211
-
1212
- # 새로 고침 버튼 (기존 코드)
1213
- refresh_btn = gr.Button("🔄 새로 고침", variant="primary")
1214
-
1215
-
1216
- with gr.Tab("Spaces Trending"):
1217
- trending_plot = gr.Plot()
1218
- with gr.Row():
1219
- # 원형 그래프와 막대 그래프를 위한 컨테이너 추가
1220
- with gr.Column(scale=1):
1221
- spaces_pie_chart = gr.Plot(
1222
- label="Korean Spaces Distribution",
1223
- elem_id="spaces_pie"
1224
- )
1225
- with gr.Column(scale=2):
1226
- spaces_bar_chart = gr.Plot(
1227
- label="Registrations by Creator",
1228
- elem_id="spaces_bar"
1229
- )
1230
- trending_info = gr.HTML()
1231
- trending_df = gr.DataFrame(
1232
- headers=["Rank", "Space ID", "Title", "Likes", "URL"],
1233
- datatype=["number", "str", "str", "number", "str"],
1234
- row_count=(10, "dynamic")
1235
- )
1236
-
1237
- with gr.Tab("Models Trending"):
1238
- models_plot = gr.Plot()
1239
- with gr.Row():
1240
- # 원형 그래프와 막대 그래프를 위한 컨테이너 추가
1241
- with gr.Column(scale=1):
1242
- models_pie_chart = gr.Plot(
1243
- label="Korean Models Distribution",
1244
- elem_id="models_pie"
1245
- )
1246
- with gr.Column(scale=2):
1247
- models_bar_chart = gr.Plot(
1248
- label="Registrations by Creator",
1249
- elem_id="models_bar"
1250
- )
1251
- models_info = gr.HTML()
1252
- models_df = gr.DataFrame(
1253
- headers=["Global Rank", "Model ID", "Title", "Downloads", "Likes", "Korea Search", "URL"],
1254
- datatype=["str", "str", "str", "str", "str", "str", "str"],
1255
- row_count=(10, "dynamic")
1256
- )
1257
-
1258
- def refresh_all_data():
1259
- try:
1260
- spaces_results = get_spaces_data("trending")
1261
- models_results = get_models_data()
1262
-
1263
- # Spaces 차트 생성
1264
- spaces_pie = create_pie_chart(spaces_results[2], 500, "Spaces")
1265
- spaces_bar = create_registration_bar_chart(spaces_results[2], "Spaces")
1266
-
1267
- # Models 차트 생성
1268
- models_pie = create_pie_chart(models_results[2], 3000, "Models")
1269
- models_bar = create_registration_bar_chart(models_results[2], "Models")
1270
-
1271
- return [
1272
- spaces_results[0], spaces_results[1], spaces_results[2],
1273
- spaces_pie, spaces_bar,
1274
- models_results[0], models_results[1], models_results[2],
1275
- models_pie, models_bar
1276
- ]
1277
- except Exception as e:
1278
- print(f"Error in refresh_all_data: {str(e)}")
1279
- # 에러 발생 시 기본값 반환
1280
- return [None] * 10
1281
-
1282
- # 새로고침 버튼 클릭 이벤트 핸들러
1283
- refresh_btn.click(
1284
- fn=refresh_all_data,
1285
- outputs=[
1286
- trending_plot, trending_info, trending_df,
1287
- spaces_pie_chart, spaces_bar_chart,
1288
- models_plot, models_info, models_df,
1289
- models_pie_chart, models_bar_chart
1290
- ]
1291
- )
1292
-
1293
- # 초기 데이터 로드
1294
- try:
1295
- initial_data = refresh_all_data()
1296
-
1297
- # 초기값 설정
1298
- trending_plot.value = initial_data[0]
1299
- trending_info.value = initial_data[1]
1300
- trending_df.value = initial_data[2]
1301
- spaces_pie_chart.value = initial_data[3]
1302
- spaces_bar_chart.value = initial_data[4]
1303
- models_plot.value = initial_data[5]
1304
- models_info.value = initial_data[6]
1305
- models_df.value = initial_data[7]
1306
- models_pie_chart.value = initial_data[8]
1307
- models_bar_chart.value = initial_data[9]
1308
- except Exception as e:
1309
- print(f"Error loading initial data: {str(e)}")
1310
- gr.Warning("초기 데이터 로드 중 오류가 발생했습니다.")
1311
-
1312
- # Gradio 앱 실행
1313
- demo.launch(
1314
- server_name="0.0.0.0",
1315
- server_port=7860,
1316
- share=False,
1317
- show_error=True
1318
- )