Spaces:
Running
Running
Delete app.py
Browse files
app.py
DELETED
@@ -1,399 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
from huggingface_hub import InferenceClient, HfApi
|
3 |
-
import os
|
4 |
-
import requests
|
5 |
-
from typing import List, Dict, Union, Tuple
|
6 |
-
import traceback
|
7 |
-
from PIL import Image
|
8 |
-
from io import BytesIO
|
9 |
-
import asyncio
|
10 |
-
from gradio_client import Client
|
11 |
-
import time
|
12 |
-
import threading
|
13 |
-
import json
|
14 |
-
import re
|
15 |
-
import asyncio
|
16 |
-
|
17 |
-
|
18 |
-
HF_TOKEN = os.getenv("HF_TOKEN")
|
19 |
-
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
20 |
-
hf_api = HfApi(token=HF_TOKEN)
|
21 |
-
|
22 |
-
def get_headers():
|
23 |
-
if not HF_TOKEN:
|
24 |
-
raise ValueError("Hugging Face token not found in environment variables")
|
25 |
-
return {"Authorization": f"Bearer {HF_TOKEN}"}
|
26 |
-
|
27 |
-
def get_file_content(space_id: str, file_path: str) -> str:
|
28 |
-
file_url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}"
|
29 |
-
try:
|
30 |
-
response = requests.get(file_url, headers=get_headers())
|
31 |
-
if response.status_code == 200:
|
32 |
-
return response.text
|
33 |
-
else:
|
34 |
-
return f"File not found or inaccessible: {file_path}"
|
35 |
-
except requests.RequestException:
|
36 |
-
return f"Error fetching content for file: {file_path}"
|
37 |
-
|
38 |
-
def get_space_structure(space_id: str) -> Dict:
|
39 |
-
try:
|
40 |
-
files = hf_api.list_repo_files(repo_id=space_id, repo_type="space")
|
41 |
-
|
42 |
-
tree = {"type": "directory", "path": "", "name": space_id, "children": []}
|
43 |
-
for file in files:
|
44 |
-
path_parts = file.split('/')
|
45 |
-
current = tree
|
46 |
-
for i, part in enumerate(path_parts):
|
47 |
-
if i == len(path_parts) - 1: # ํ์ผ
|
48 |
-
current["children"].append({"type": "file", "path": file, "name": part})
|
49 |
-
else: # ๋๋ ํ ๋ฆฌ
|
50 |
-
found = False
|
51 |
-
for child in current["children"]:
|
52 |
-
if child["type"] == "directory" and child["name"] == part:
|
53 |
-
current = child
|
54 |
-
found = True
|
55 |
-
break
|
56 |
-
if not found:
|
57 |
-
new_dir = {"type": "directory", "path": '/'.join(path_parts[:i+1]), "name": part, "children": []}
|
58 |
-
current["children"].append(new_dir)
|
59 |
-
current = new_dir
|
60 |
-
|
61 |
-
return tree
|
62 |
-
except Exception as e:
|
63 |
-
print(f"Error in get_space_structure: {str(e)}")
|
64 |
-
return {"error": f"API request error: {str(e)}"}
|
65 |
-
|
66 |
-
def format_tree_structure(tree_data: Dict, indent: str = "") -> str:
|
67 |
-
if "error" in tree_data:
|
68 |
-
return tree_data["error"]
|
69 |
-
|
70 |
-
formatted = f"{indent}{'๐' if tree_data.get('type') == 'directory' else '๐'} {tree_data.get('name', 'Unknown')}\n"
|
71 |
-
if tree_data.get("type") == "directory":
|
72 |
-
for child in sorted(tree_data.get("children", []), key=lambda x: (x.get("type", "") != "directory", x.get("name", ""))):
|
73 |
-
formatted += format_tree_structure(child, indent + " ")
|
74 |
-
return formatted
|
75 |
-
|
76 |
-
def summarize_code(app_content: str):
|
77 |
-
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๊ณ ์์ฝํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ๊ฐ๊ฒฐํ๊ฒ ์์ฝํด์ฃผ์ธ์."
|
78 |
-
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ์์ฝํด์ฃผ์ธ์:\n\n{app_content}"
|
79 |
-
|
80 |
-
messages = [
|
81 |
-
{"role": "system", "content": system_message},
|
82 |
-
{"role": "user", "content": user_message}
|
83 |
-
]
|
84 |
-
|
85 |
-
try:
|
86 |
-
response = hf_client.chat_completion(messages, max_tokens=200, temperature=0.7)
|
87 |
-
return response.choices[0].message.content
|
88 |
-
except Exception as e:
|
89 |
-
return f"์์ฝ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
90 |
-
|
91 |
-
def analyze_code(app_content: str):
|
92 |
-
system_message = """๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ๋ค์ ํญ๋ชฉ์ ๋ํด ์ค๋ช
ํด์ฃผ์ธ์:
|
93 |
-
A. ๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ
|
94 |
-
B. ๊ธฐ๋ฅ์ ํจ์ฉ์ฑ ๋ฐ ๊ฐ์น
|
95 |
-
C. ํน์ฅ์
|
96 |
-
D. ์ ์ฉ ๋์ ๋ฐ ํ๊ฒ
|
97 |
-
E. ๊ธฐ๋ํจ๊ณผ
|
98 |
-
๊ธฐ์กด ๋ฐ ์ ์ฌ ํ๋ก์ ํธ์ ๋น๊ตํ์ฌ ๋ถ์ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."""
|
99 |
-
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ ๋ถ์ํด์ฃผ์ธ์:\n\n{app_content}"
|
100 |
-
|
101 |
-
messages = [
|
102 |
-
{"role": "system", "content": system_message},
|
103 |
-
{"role": "user", "content": user_message}
|
104 |
-
]
|
105 |
-
|
106 |
-
try:
|
107 |
-
response = hf_client.chat_completion(messages, max_tokens=1000, temperature=0.7)
|
108 |
-
return response.choices[0].message.content
|
109 |
-
except Exception as e:
|
110 |
-
return f"๋ถ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
111 |
-
|
112 |
-
def explain_usage(app_content: str):
|
113 |
-
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ฐํ์ผ๋ก ๋ง์น ํ๋ฉด์ ๋ณด๋ ๊ฒ์ฒ๋ผ ์ฌ์ฉ๋ฒ์ ์์ธํ ์ค๋ช
ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."
|
114 |
-
user_message = f"๋ค์ Python ์ฝ๋์ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํด์ฃผ์ธ์:\n\n{app_content}"
|
115 |
-
|
116 |
-
messages = [
|
117 |
-
{"role": "system", "content": system_message},
|
118 |
-
{"role": "user", "content": user_message}
|
119 |
-
]
|
120 |
-
|
121 |
-
try:
|
122 |
-
response = hf_client.chat_completion(messages, max_tokens=800, temperature=0.7)
|
123 |
-
return response.choices[0].message.content
|
124 |
-
except Exception as e:
|
125 |
-
return f"์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
126 |
-
|
127 |
-
def adjust_lines_for_code(code_content: str, min_lines: int = 10, max_lines: int = 100) -> int:
|
128 |
-
"""
|
129 |
-
์ฝ๋ ๋ด์ฉ์ ๋ฐ๋ผ lines ์๋ฅผ ๋์ ์ผ๋ก ์กฐ์ ํฉ๋๋ค.
|
130 |
-
|
131 |
-
Parameters:
|
132 |
-
- code_content (str): ์ฝ๋ ํ
์คํธ ๋ด์ฉ
|
133 |
-
- min_lines (int): ์ต์ lines ์
|
134 |
-
- max_lines (int): ์ต๋ lines ์
|
135 |
-
|
136 |
-
Returns:
|
137 |
-
- int: ์ค์ ๋ lines ์
|
138 |
-
"""
|
139 |
-
# ์ฝ๋์ ์ค ์ ๊ณ์ฐ
|
140 |
-
num_lines = len(code_content.split('\n'))
|
141 |
-
# ์ค ์๊ฐ min_lines๋ณด๋ค ์ ๋ค๋ฉด min_lines ์ฌ์ฉ, max_lines๋ณด๋ค ํฌ๋ฉด max_lines ์ฌ์ฉ
|
142 |
-
return min(max(num_lines, min_lines), max_lines)
|
143 |
-
|
144 |
-
def analyze_space(url: str, progress=gr.Progress()):
|
145 |
-
try:
|
146 |
-
space_id = url.split('spaces/')[-1]
|
147 |
-
|
148 |
-
# Space ID ์ ํจ์ฑ ๊ฒ์ฌ ์์
|
149 |
-
if not re.match(r'^[\w.-]+/[\w.-]+$', space_id):
|
150 |
-
raise ValueError(f"Invalid Space ID format: {space_id}")
|
151 |
-
|
152 |
-
progress(0.1, desc="ํ์ผ ๊ตฌ์กฐ ๋ถ์ ์ค...")
|
153 |
-
tree_structure = get_space_structure(space_id)
|
154 |
-
if "error" in tree_structure:
|
155 |
-
raise ValueError(tree_structure["error"])
|
156 |
-
tree_view = format_tree_structure(tree_structure)
|
157 |
-
|
158 |
-
progress(0.3, desc="app.py ๋ด์ฉ ๊ฐ์ ธ์ค๋ ์ค...")
|
159 |
-
app_content = get_file_content(space_id, "app.py")
|
160 |
-
|
161 |
-
progress(0.5, desc="์ฝ๋ ์์ฝ ์ค...")
|
162 |
-
summary = summarize_code(app_content)
|
163 |
-
|
164 |
-
progress(0.7, desc="์ฝ๋ ๋ถ์ ์ค...")
|
165 |
-
analysis = analyze_code(app_content)
|
166 |
-
|
167 |
-
progress(0.9, desc="์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค...")
|
168 |
-
usage = explain_usage(app_content)
|
169 |
-
|
170 |
-
# ์ค ์ ๊ณ์ฐํ์ฌ lines ์ค์
|
171 |
-
app_py_lines = adjust_lines_for_code(app_content)
|
172 |
-
|
173 |
-
progress(1.0, desc="์๋ฃ")
|
174 |
-
return app_content, tree_view, tree_structure, space_id, summary, analysis, usage, app_py_lines
|
175 |
-
except Exception as e:
|
176 |
-
print(f"Error in analyze_space: {str(e)}")
|
177 |
-
print(traceback.format_exc())
|
178 |
-
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None, "", "", "", "", 10
|
179 |
-
|
180 |
-
def format_chat_history(chat_history):
|
181 |
-
formatted = []
|
182 |
-
for user_message, assistant_message in chat_history:
|
183 |
-
if user_message:
|
184 |
-
formatted.append({"role": "user", "content": user_message})
|
185 |
-
if assistant_message:
|
186 |
-
formatted.append({"role": "assistant", "content": assistant_message})
|
187 |
-
return formatted
|
188 |
-
|
189 |
-
def respond(
|
190 |
-
message: str,
|
191 |
-
chat_history: List[Tuple[str, str]],
|
192 |
-
system_message: str = "",
|
193 |
-
max_tokens: int = 4000,
|
194 |
-
temperature: float = 0.7,
|
195 |
-
top_p: float = 0.9,
|
196 |
-
):
|
197 |
-
system_prefix = """๋ฐ๋์ ํ๊ธ๋ก ๋ต๋ณํ ๊ฒ. ๋๋ ์ฃผ์ด์ง ์์ค์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก "์๋น์ค ์ฌ์ฉ ์ค๋ช
๋ฐ ์๋ด, qna๋ฅผ ํ๋ ์ญํ ์ด๋ค". ์์ฃผ ์น์ ํ๊ณ ์์ธํ๊ฒ 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ. ๋๋ ์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ฌ์ฉ ์ค๋ช
๋ฐ ์ง์ ์๋ต์ ์งํํ๋ฉฐ, ์ด์ฉ์์๊ฒ ๋์์ ์ฃผ์ด์ผ ํ๋ค. ์ด์ฉ์๊ฐ ๊ถ๊ธํด ํ ๋ง ํ ๋ด์ฉ์ ์น์ ํ๊ฒ ์๋ ค์ฃผ๋๋ก ํ๋ผ. ์ฝ๋ ์ ์ฒด ๋ด์ฉ์ ๋ํด์๋ ๋ณด์์ ์ ์งํ๊ณ , ํค ๊ฐ ๋ฐ ์๋ํฌ์ธํธ์ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์ ๊ณต๊ฐํ์ง ๋ง๋ผ."""
|
198 |
-
|
199 |
-
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
200 |
-
for user_msg, assistant_msg in chat_history:
|
201 |
-
if user_msg:
|
202 |
-
messages.append({"role": "user", "content": user_msg})
|
203 |
-
if assistant_msg:
|
204 |
-
messages.append({"role": "assistant", "content": assistant_msg})
|
205 |
-
messages.append({"role": "user", "content": message})
|
206 |
-
|
207 |
-
try:
|
208 |
-
response = ""
|
209 |
-
for chunk in hf_client.chat_completion(
|
210 |
-
messages,
|
211 |
-
max_tokens=max_tokens,
|
212 |
-
stream=True,
|
213 |
-
temperature=temperature,
|
214 |
-
top_p=top_p,
|
215 |
-
):
|
216 |
-
if chunk.choices[0].delta.content is not None:
|
217 |
-
response += chunk.choices[0].delta.content
|
218 |
-
yield response, chat_history + [(message, response)]
|
219 |
-
except Exception as e:
|
220 |
-
error_message = f"์๋ต ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
221 |
-
yield error_message, chat_history + [(message, error_message)]
|
222 |
-
def create_ui():
|
223 |
-
try:
|
224 |
-
css = """
|
225 |
-
footer {visibility: hidden;}
|
226 |
-
.output-group {
|
227 |
-
border: 1px solid #ddd;
|
228 |
-
border-radius: 5px;
|
229 |
-
padding: 10px;
|
230 |
-
margin-bottom: 20px;
|
231 |
-
}
|
232 |
-
.scroll-lock {
|
233 |
-
overflow-y: auto !important;
|
234 |
-
max-height: calc((100vh - 200px) / 5) !important;
|
235 |
-
}
|
236 |
-
.tree-view-scroll {
|
237 |
-
overflow-y: auto !important;
|
238 |
-
max-height: calc((100vh - 200px) / 2) !important;
|
239 |
-
}
|
240 |
-
.full-height {
|
241 |
-
height: calc(200em * 1.2) !important;
|
242 |
-
overflow-y: auto !important;
|
243 |
-
}
|
244 |
-
.code-box {
|
245 |
-
overflow-x: auto !important;
|
246 |
-
overflow-y: auto !important;
|
247 |
-
white-space: pre !important;
|
248 |
-
word-wrap: normal !important;
|
249 |
-
height: 100% !important;
|
250 |
-
}
|
251 |
-
.code-box > div {
|
252 |
-
min-width: 100% !important;
|
253 |
-
}
|
254 |
-
.code-box > div > textarea {
|
255 |
-
word-break: normal !important;
|
256 |
-
overflow-wrap: normal !important;
|
257 |
-
}
|
258 |
-
.tab-nav {
|
259 |
-
background-color: #2c3e50;
|
260 |
-
border-radius: 5px 5px 0 0;
|
261 |
-
overflow: hidden;
|
262 |
-
}
|
263 |
-
.tab-nav button {
|
264 |
-
color: #ecf0f1 !important;
|
265 |
-
background-color: #34495e;
|
266 |
-
border: none;
|
267 |
-
padding: 10px 20px;
|
268 |
-
margin: 0;
|
269 |
-
transition: background-color 0.3s;
|
270 |
-
font-size: 16px;
|
271 |
-
font-weight: bold;
|
272 |
-
}
|
273 |
-
.tab-nav button:hover {
|
274 |
-
background-color: #2980b9;
|
275 |
-
}
|
276 |
-
.tab-nav button.selected {
|
277 |
-
color: #2c3e50 !important;
|
278 |
-
background-color: #ecf0f1;
|
279 |
-
}
|
280 |
-
input[type="text"], textarea {
|
281 |
-
color: #2c3e50 !important;
|
282 |
-
background-color: #ecf0f1 !important;
|
283 |
-
}
|
284 |
-
"""
|
285 |
-
|
286 |
-
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
287 |
-
gr.Markdown("# Mouse: HuggingFace")
|
288 |
-
|
289 |
-
with gr.Tabs() as tabs:
|
290 |
-
with gr.TabItem("๋ถ์"):
|
291 |
-
with gr.Row():
|
292 |
-
with gr.Column(scale=6): # ์ผ์ชฝ 60%
|
293 |
-
url_input = gr.Textbox(label="HuggingFace Space URL")
|
294 |
-
analyze_button = gr.Button("๋ถ์")
|
295 |
-
|
296 |
-
with gr.Group(elem_classes="output-group scroll-lock"):
|
297 |
-
summary_output = gr.Markdown(label="์์ฝ (3์ค ์ด๋ด)")
|
298 |
-
|
299 |
-
with gr.Group(elem_classes="output-group scroll-lock"):
|
300 |
-
analysis_output = gr.Markdown(label="๋ถ์")
|
301 |
-
|
302 |
-
with gr.Group(elem_classes="output-group scroll-lock"):
|
303 |
-
usage_output = gr.Markdown(label="์ฌ์ฉ๋ฒ")
|
304 |
-
|
305 |
-
with gr.Group(elem_classes="output-group tree-view-scroll"): # ํธ๋ฆฌ ๋ทฐ ์คํฌ๋กค ์ถ๊ฐ
|
306 |
-
tree_view_output = gr.Textbox(label="ํ์ผ ๊ตฌ์กฐ (Tree View)", lines=30)
|
307 |
-
|
308 |
-
with gr.Column(scale=4): # ์ค๋ฅธ์ชฝ 40%
|
309 |
-
with gr.Group(elem_classes="output-group full-height"):
|
310 |
-
code_tabs = gr.Tabs()
|
311 |
-
with code_tabs:
|
312 |
-
app_py_tab = gr.TabItem("app.py")
|
313 |
-
with app_py_tab:
|
314 |
-
app_py_content = gr.Code(
|
315 |
-
language="python",
|
316 |
-
label="app.py",
|
317 |
-
lines=200,
|
318 |
-
elem_classes="full-height code-box"
|
319 |
-
)
|
320 |
-
requirements_tab = gr.TabItem("requirements.txt")
|
321 |
-
with requirements_tab:
|
322 |
-
requirements_content = gr.Textbox(
|
323 |
-
label="requirements.txt",
|
324 |
-
lines=200,
|
325 |
-
elem_classes="full-height code-box"
|
326 |
-
)
|
327 |
-
|
328 |
-
with gr.TabItem("AI ์ฝ๋ฉ"):
|
329 |
-
chatbot = gr.Chatbot(label="๋ํ", type="messages")
|
330 |
-
msg = gr.Textbox(label="๋ฉ์์ง")
|
331 |
-
|
332 |
-
system_message = gr.Textbox(label="System Message", value="")
|
333 |
-
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens")
|
334 |
-
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
335 |
-
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
336 |
-
|
337 |
-
examples = [
|
338 |
-
["์์ธํ ์ฌ์ฉ ๋ฐฉ๋ฒ์ ๋ง์น ํ๋ฉด์ ๋ณด๋ฉด์ ์ค๋ช
ํ๋ฏ์ด 4000 ํ ํฐ ์ด์ ์์ธํ ์ค๋ช
ํ๋ผ"],
|
339 |
-
["FAQ 20๊ฑด์ ์์ธํ๊ฒ ์์ฑํ๋ผ. 4000ํ ํฐ ์ด์ ์ฌ์ฉํ๋ผ."],
|
340 |
-
["์ฌ์ฉ ๋ฐฉ๋ฒ๊ณผ ์ฐจ๋ณ์ , ํน์ง, ๊ฐ์ ์ ์ค์ฌ์ผ๋ก 4000 ํ ํฐ ์ด์ ์ ํ๋ธ ์์ ์คํฌ๋ฆฝํธ ํํ๋ก ์์ฑํ๋ผ"],
|
341 |
-
["๋ณธ ์๋น์ค๋ฅผ SEO ์ต์ ํํ์ฌ ๋ธ๋ก๊ทธ ํฌ์คํธ(๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ, ๊ธฐ์กด ์ ์ฌ ์๋น์ค์ ๋น๊ตํ์ฌ ํน์ฅ์ , ํ์ฉ์ฒ, ๊ฐ์น, ๊ธฐ๋ํจ๊ณผ, ๊ฒฐ๋ก ์ ํฌํจ)๋ก 4000 ํ ํฐ ์ด์ ์์ฑํ๋ผ"],
|
342 |
-
["ํนํ ์ถ์์ ํ์ฉํ ๊ธฐ์ ๋ฐ ๋น์ฆ๋์ค๋ชจ๋ธ ์ธก๋ฉด์ ํฌํจํ์ฌ ํนํ ์ถ์์ ๊ตฌ์ฑ์ ๋ง๊ฒ ํ์ ์ ์ธ ์ฐฝ์ ๋ฐ๋ช
๋ด์ฉ์ ์ค์ฌ์ผ๋ก 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ."],
|
343 |
-
["๊ณ์ ์ด์ด์ ๋ต๋ณํ๋ผ"],
|
344 |
-
]
|
345 |
-
|
346 |
-
gr.Examples(examples, inputs=msg)
|
347 |
-
|
348 |
-
msg.submit(respond, [msg, chatbot, system_message, max_tokens, temperature, top_p], [chatbot, chatbot])
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
space_id_state = gr.State()
|
353 |
-
tree_structure_state = gr.State()
|
354 |
-
app_py_content_lines = gr.State()
|
355 |
-
|
356 |
-
analyze_button.click(
|
357 |
-
analyze_space,
|
358 |
-
inputs=[url_input],
|
359 |
-
outputs=[app_py_content, tree_view_output, tree_structure_state, space_id_state, summary_output, analysis_output, usage_output, app_py_content_lines]
|
360 |
-
).then(
|
361 |
-
lambda space_id: get_file_content(space_id, "requirements.txt"),
|
362 |
-
inputs=[space_id_state],
|
363 |
-
outputs=[requirements_content]
|
364 |
-
)
|
365 |
-
|
366 |
-
# lines ์๋ฅผ ๋์ ์ผ๋ก ์ค์
|
367 |
-
app_py_content.change(lambda lines: gr.update(lines=lines), inputs=[app_py_content_lines], outputs=[app_py_content])
|
368 |
-
|
369 |
-
return demo
|
370 |
-
|
371 |
-
except Exception as e:
|
372 |
-
print(f"Error in create_ui: {str(e)}")
|
373 |
-
print(traceback.format_exc())
|
374 |
-
raise
|
375 |
-
|
376 |
-
if __name__ == "__main__":
|
377 |
-
try:
|
378 |
-
print("Starting HuggingFace Space Analyzer...")
|
379 |
-
demo = create_ui()
|
380 |
-
print("UI created successfully.")
|
381 |
-
|
382 |
-
print("Configuring Gradio queue...")
|
383 |
-
demo.queue()
|
384 |
-
print("Gradio queue configured.")
|
385 |
-
|
386 |
-
print("Launching Gradio app...")
|
387 |
-
demo.launch(
|
388 |
-
server_name="0.0.0.0",
|
389 |
-
server_port=7860,
|
390 |
-
share=False,
|
391 |
-
debug=True,
|
392 |
-
show_api=False
|
393 |
-
)
|
394 |
-
print("Gradio app launched successfully.")
|
395 |
-
except Exception as e:
|
396 |
-
print(f"Error in main: {str(e)}")
|
397 |
-
print("Detailed error information:")
|
398 |
-
print(traceback.format_exc())
|
399 |
-
raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|