Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,397 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from huggingface_hub import InferenceClient, HfApi
|
3 |
+
import os
|
4 |
+
import requests
|
5 |
+
from typing import List, Dict, Union, Tuple
|
6 |
+
import traceback
|
7 |
+
from PIL import Image
|
8 |
+
from io import BytesIO
|
9 |
+
import asyncio
|
10 |
+
from gradio_client import Client
|
11 |
+
import time
|
12 |
+
import threading
|
13 |
+
import json
|
14 |
+
import re
|
15 |
+
import asyncio
|
16 |
+
|
17 |
+
|
18 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
19 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
20 |
+
hf_api = HfApi(token=HF_TOKEN)
|
21 |
+
|
22 |
+
def get_headers():
|
23 |
+
if not HF_TOKEN:
|
24 |
+
raise ValueError("Hugging Face token not found in environment variables")
|
25 |
+
return {"Authorization": f"Bearer {HF_TOKEN}"}
|
26 |
+
|
27 |
+
def get_file_content(space_id: str, file_path: str) -> str:
|
28 |
+
file_url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}"
|
29 |
+
try:
|
30 |
+
response = requests.get(file_url, headers=get_headers())
|
31 |
+
if response.status_code == 200:
|
32 |
+
return response.text
|
33 |
+
else:
|
34 |
+
return f"File not found or inaccessible: {file_path}"
|
35 |
+
except requests.RequestException:
|
36 |
+
return f"Error fetching content for file: {file_path}"
|
37 |
+
|
38 |
+
def get_space_structure(space_id: str) -> Dict:
|
39 |
+
try:
|
40 |
+
files = hf_api.list_repo_files(repo_id=space_id, repo_type="space")
|
41 |
+
|
42 |
+
tree = {"type": "directory", "path": "", "name": space_id, "children": []}
|
43 |
+
for file in files:
|
44 |
+
path_parts = file.split('/')
|
45 |
+
current = tree
|
46 |
+
for i, part in enumerate(path_parts):
|
47 |
+
if i == len(path_parts) - 1: # ํ์ผ
|
48 |
+
current["children"].append({"type": "file", "path": file, "name": part})
|
49 |
+
else: # ๋๋ ํ ๋ฆฌ
|
50 |
+
found = False
|
51 |
+
for child in current["children"]:
|
52 |
+
if child["type"] == "directory" and child["name"] == part:
|
53 |
+
current = child
|
54 |
+
found = True
|
55 |
+
break
|
56 |
+
if not found:
|
57 |
+
new_dir = {"type": "directory", "path": '/'.join(path_parts[:i+1]), "name": part, "children": []}
|
58 |
+
current["children"].append(new_dir)
|
59 |
+
current = new_dir
|
60 |
+
|
61 |
+
return tree
|
62 |
+
except Exception as e:
|
63 |
+
print(f"Error in get_space_structure: {str(e)}")
|
64 |
+
return {"error": f"API request error: {str(e)}"}
|
65 |
+
|
66 |
+
def format_tree_structure(tree_data: Dict, indent: str = "") -> str:
|
67 |
+
if "error" in tree_data:
|
68 |
+
return tree_data["error"]
|
69 |
+
|
70 |
+
formatted = f"{indent}{'๐' if tree_data.get('type') == 'directory' else '๐'} {tree_data.get('name', 'Unknown')}\n"
|
71 |
+
if tree_data.get("type") == "directory":
|
72 |
+
for child in sorted(tree_data.get("children", []), key=lambda x: (x.get("type", "") != "directory", x.get("name", ""))):
|
73 |
+
formatted += format_tree_structure(child, indent + " ")
|
74 |
+
return formatted
|
75 |
+
|
76 |
+
def summarize_code(app_content: str):
|
77 |
+
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๊ณ ์์ฝํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ๊ฐ๊ฒฐํ๊ฒ ์์ฝํด์ฃผ์ธ์."
|
78 |
+
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ์์ฝํด์ฃผ์ธ์:\n\n{app_content}"
|
79 |
+
|
80 |
+
messages = [
|
81 |
+
{"role": "system", "content": system_message},
|
82 |
+
{"role": "user", "content": user_message}
|
83 |
+
]
|
84 |
+
|
85 |
+
try:
|
86 |
+
response = hf_client.chat_completion(messages, max_tokens=200, temperature=0.7)
|
87 |
+
return response.choices[0].message.content
|
88 |
+
except Exception as e:
|
89 |
+
return f"์์ฝ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
90 |
+
|
91 |
+
def analyze_code(app_content: str):
|
92 |
+
system_message = """๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ๋ค์ ํญ๋ชฉ์ ๋ํด ์ค๋ช
ํด์ฃผ์ธ์:
|
93 |
+
A. ๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ
|
94 |
+
B. ๊ธฐ๋ฅ์ ํจ์ฉ์ฑ ๋ฐ ๊ฐ์น
|
95 |
+
C. ํน์ฅ์
|
96 |
+
D. ์ ์ฉ ๋์ ๋ฐ ํ๊ฒ
|
97 |
+
E. ๊ธฐ๋ํจ๊ณผ
|
98 |
+
๊ธฐ์กด ๋ฐ ์ ์ฌ ํ๋ก์ ํธ์ ๋น๊ตํ์ฌ ๋ถ์ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."""
|
99 |
+
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ ๋ถ์ํด์ฃผ์ธ์:\n\n{app_content}"
|
100 |
+
|
101 |
+
messages = [
|
102 |
+
{"role": "system", "content": system_message},
|
103 |
+
{"role": "user", "content": user_message}
|
104 |
+
]
|
105 |
+
|
106 |
+
try:
|
107 |
+
response = hf_client.chat_completion(messages, max_tokens=1000, temperature=0.7)
|
108 |
+
return response.choices[0].message.content
|
109 |
+
except Exception as e:
|
110 |
+
return f"๋ถ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
111 |
+
|
112 |
+
def explain_usage(app_content: str):
|
113 |
+
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ฐํ์ผ๋ก ๋ง์น ํ๋ฉด์ ๋ณด๋ ๊ฒ์ฒ๋ผ ์ฌ์ฉ๋ฒ์ ์์ธํ ์ค๋ช
ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."
|
114 |
+
user_message = f"๋ค์ Python ์ฝ๋์ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํด์ฃผ์ธ์:\n\n{app_content}"
|
115 |
+
|
116 |
+
messages = [
|
117 |
+
{"role": "system", "content": system_message},
|
118 |
+
{"role": "user", "content": user_message}
|
119 |
+
]
|
120 |
+
|
121 |
+
try:
|
122 |
+
response = hf_client.chat_completion(messages, max_tokens=800, temperature=0.7)
|
123 |
+
return response.choices[0].message.content
|
124 |
+
except Exception as e:
|
125 |
+
return f"์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
126 |
+
|
127 |
+
def adjust_lines_for_code(code_content: str, min_lines: int = 10, max_lines: int = 100) -> int:
|
128 |
+
"""
|
129 |
+
์ฝ๋ ๋ด์ฉ์ ๋ฐ๋ผ lines ์๋ฅผ ๋์ ์ผ๋ก ์กฐ์ ํฉ๋๋ค.
|
130 |
+
|
131 |
+
Parameters:
|
132 |
+
- code_content (str): ์ฝ๋ ํ
์คํธ ๋ด์ฉ
|
133 |
+
- min_lines (int): ์ต์ lines ์
|
134 |
+
- max_lines (int): ์ต๋ lines ์
|
135 |
+
|
136 |
+
Returns:
|
137 |
+
- int: ์ค์ ๋ lines ์
|
138 |
+
"""
|
139 |
+
# ์ฝ๋์ ์ค ์ ๊ณ์ฐ
|
140 |
+
num_lines = len(code_content.split('\n'))
|
141 |
+
# ์ค ์๊ฐ min_lines๋ณด๋ค ์ ๋ค๋ฉด min_lines ์ฌ์ฉ, max_lines๋ณด๋ค ํฌ๋ฉด max_lines ์ฌ์ฉ
|
142 |
+
return min(max(num_lines, min_lines), max_lines)
|
143 |
+
|
144 |
+
def analyze_space(url: str, progress=gr.Progress()):
|
145 |
+
try:
|
146 |
+
space_id = url.split('spaces/')[-1]
|
147 |
+
|
148 |
+
# Space ID ์ ํจ์ฑ ๊ฒ์ฌ ์์
|
149 |
+
if not re.match(r'^[\w.-]+/[\w.-]+$', space_id):
|
150 |
+
raise ValueError(f"Invalid Space ID format: {space_id}")
|
151 |
+
|
152 |
+
progress(0.1, desc="ํ์ผ ๊ตฌ์กฐ ๋ถ์ ์ค...")
|
153 |
+
tree_structure = get_space_structure(space_id)
|
154 |
+
if "error" in tree_structure:
|
155 |
+
raise ValueError(tree_structure["error"])
|
156 |
+
tree_view = format_tree_structure(tree_structure)
|
157 |
+
|
158 |
+
progress(0.3, desc="app.py ๋ด์ฉ ๊ฐ์ ธ์ค๋ ์ค...")
|
159 |
+
app_content = get_file_content(space_id, "app.py")
|
160 |
+
|
161 |
+
progress(0.5, desc="์ฝ๋ ์์ฝ ์ค...")
|
162 |
+
summary = summarize_code(app_content)
|
163 |
+
|
164 |
+
progress(0.7, desc="์ฝ๋ ๋ถ์ ์ค...")
|
165 |
+
analysis = analyze_code(app_content)
|
166 |
+
|
167 |
+
progress(0.9, desc="์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค...")
|
168 |
+
usage = explain_usage(app_content)
|
169 |
+
|
170 |
+
# ์ค ์ ๊ณ์ฐํ์ฌ lines ์ค์
|
171 |
+
app_py_lines = adjust_lines_for_code(app_content)
|
172 |
+
|
173 |
+
progress(1.0, desc="์๋ฃ")
|
174 |
+
return app_content, tree_view, tree_structure, space_id, summary, analysis, usage, app_py_lines
|
175 |
+
except Exception as e:
|
176 |
+
print(f"Error in analyze_space: {str(e)}")
|
177 |
+
print(traceback.format_exc())
|
178 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None, "", "", "", "", 10
|
179 |
+
|
180 |
+
|
181 |
+
|
182 |
+
async def respond_stream(message: str, chat_history: List[Dict[str, str]], max_tokens: int, temperature: float, top_p: float):
|
183 |
+
system_message = """๋น์ ์ ํ๊น
ํ์ด์ค์ ํนํ๋ AI ์ฝ๋ฉ ์ ๋ฌธ๊ฐ์
๋๋ค. ์ฌ์ฉ์์ ์ง๋ฌธ์ ์น์ ํ๊ณ ์์ธํ๊ฒ ๋ต๋ณํด์ฃผ์ธ์.
|
184 |
+
Gradio ํน์ฑ์ ์ ํํ ์ธ์ํ๊ณ Requirements.txt ๋๋ฝ์์ด ์ฝ๋ฉ๊ณผ ์ค๋ฅ๋ฅผ ํด๊ฒฐํด์ผ ํฉ๋๋ค.
|
185 |
+
ํญ์ ์ ํํ๊ณ ์ ์ฉํ ์ ๋ณด๋ฅผ ์ ๊ณตํ๋๋ก ๋
ธ๋ ฅํ์ธ์."""
|
186 |
+
|
187 |
+
messages = [{"role": "system", "content": system_message}]
|
188 |
+
messages.extend(chat_history)
|
189 |
+
messages.append({"role": "user", "content": message})
|
190 |
+
|
191 |
+
try:
|
192 |
+
stream = hf_client.text_generation(
|
193 |
+
"CohereForAI/c4ai-command-r-plus-08-2024",
|
194 |
+
messages,
|
195 |
+
max_new_tokens=max_tokens,
|
196 |
+
temperature=temperature,
|
197 |
+
top_p=top_p,
|
198 |
+
stream=True
|
199 |
+
)
|
200 |
+
|
201 |
+
full_response = ""
|
202 |
+
for response in stream:
|
203 |
+
if response.token.special:
|
204 |
+
continue
|
205 |
+
full_response += response.token.text
|
206 |
+
yield full_response
|
207 |
+
except Exception as e:
|
208 |
+
yield f"์๋ต ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
209 |
+
|
210 |
+
|
211 |
+
def create_ui():
|
212 |
+
try:
|
213 |
+
css = """
|
214 |
+
footer {visibility: hidden;}
|
215 |
+
.output-group {
|
216 |
+
border: 1px solid #ddd;
|
217 |
+
border-radius: 5px;
|
218 |
+
padding: 10px;
|
219 |
+
margin-bottom: 20px;
|
220 |
+
}
|
221 |
+
.scroll-lock {
|
222 |
+
overflow-y: auto !important;
|
223 |
+
max-height: calc((100vh - 200px) / 5) !important;
|
224 |
+
}
|
225 |
+
.tree-view-scroll {
|
226 |
+
overflow-y: auto !important;
|
227 |
+
max-height: calc((100vh - 200px) / 2) !important;
|
228 |
+
}
|
229 |
+
.full-height {
|
230 |
+
height: calc(200em * 1.2) !important;
|
231 |
+
overflow-y: auto !important;
|
232 |
+
}
|
233 |
+
.code-box {
|
234 |
+
overflow-x: auto !important;
|
235 |
+
overflow-y: auto !important;
|
236 |
+
white-space: pre !important;
|
237 |
+
word-wrap: normal !important;
|
238 |
+
height: 100% !important;
|
239 |
+
}
|
240 |
+
.code-box > div {
|
241 |
+
min-width: 100% !important;
|
242 |
+
}
|
243 |
+
.code-box > div > textarea {
|
244 |
+
word-break: normal !important;
|
245 |
+
overflow-wrap: normal !important;
|
246 |
+
}
|
247 |
+
.tab-nav {
|
248 |
+
background-color: #2c3e50;
|
249 |
+
border-radius: 5px 5px 0 0;
|
250 |
+
overflow: hidden;
|
251 |
+
}
|
252 |
+
.tab-nav button {
|
253 |
+
color: #ecf0f1 !important;
|
254 |
+
background-color: #34495e;
|
255 |
+
border: none;
|
256 |
+
padding: 10px 20px;
|
257 |
+
margin: 0;
|
258 |
+
transition: background-color 0.3s;
|
259 |
+
font-size: 16px;
|
260 |
+
font-weight: bold;
|
261 |
+
}
|
262 |
+
.tab-nav button:hover {
|
263 |
+
background-color: #2980b9;
|
264 |
+
}
|
265 |
+
.tab-nav button.selected {
|
266 |
+
color: #2c3e50 !important;
|
267 |
+
background-color: #ecf0f1;
|
268 |
+
}
|
269 |
+
input[type="text"], textarea {
|
270 |
+
color: #2c3e50 !important;
|
271 |
+
background-color: #ecf0f1 !important;
|
272 |
+
}
|
273 |
+
"""
|
274 |
+
|
275 |
+
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
276 |
+
gr.Markdown("# Mouse: HuggingFace")
|
277 |
+
|
278 |
+
with gr.Tabs() as tabs:
|
279 |
+
with gr.TabItem("๋ถ์"):
|
280 |
+
with gr.Row():
|
281 |
+
with gr.Column(scale=6): # ์ผ์ชฝ 60%
|
282 |
+
url_input = gr.Textbox(label="HuggingFace Space URL")
|
283 |
+
analyze_button = gr.Button("๋ถ์")
|
284 |
+
|
285 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
286 |
+
summary_output = gr.Markdown(label="์์ฝ (3์ค ์ด๋ด)")
|
287 |
+
|
288 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
289 |
+
analysis_output = gr.Markdown(label="๋ถ์")
|
290 |
+
|
291 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
292 |
+
usage_output = gr.Markdown(label="์ฌ์ฉ๋ฒ")
|
293 |
+
|
294 |
+
with gr.Group(elem_classes="output-group tree-view-scroll"): # ํธ๋ฆฌ ๋ทฐ ์คํฌ๋กค ์ถ๊ฐ
|
295 |
+
tree_view_output = gr.Textbox(label="ํ์ผ ๊ตฌ์กฐ (Tree View)", lines=30)
|
296 |
+
|
297 |
+
with gr.Column(scale=4): # ์ค๋ฅธ์ชฝ 40%
|
298 |
+
with gr.Group(elem_classes="output-group full-height"):
|
299 |
+
code_tabs = gr.Tabs()
|
300 |
+
with code_tabs:
|
301 |
+
app_py_tab = gr.TabItem("app.py")
|
302 |
+
with app_py_tab:
|
303 |
+
app_py_content = gr.Code(
|
304 |
+
language="python",
|
305 |
+
label="app.py",
|
306 |
+
lines=200,
|
307 |
+
elem_classes="full-height code-box"
|
308 |
+
)
|
309 |
+
requirements_tab = gr.TabItem("requirements.txt")
|
310 |
+
with requirements_tab:
|
311 |
+
requirements_content = gr.Textbox(
|
312 |
+
label="requirements.txt",
|
313 |
+
lines=200,
|
314 |
+
elem_classes="full-height code-box"
|
315 |
+
)
|
316 |
+
|
317 |
+
|
318 |
+
with gr.TabItem("AI ์ฝ๋ฉ"):
|
319 |
+
chatbot = gr.Chatbot(label="๋ํ", type='messages')
|
320 |
+
msg = gr.Textbox(label="๋ฉ์์ง")
|
321 |
+
|
322 |
+
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens", visible=False)
|
323 |
+
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature", visible=False)
|
324 |
+
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P", visible=False)
|
325 |
+
|
326 |
+
examples = [
|
327 |
+
["์์ธํ ์ฌ์ฉ ๋ฐฉ๋ฒ์ ๋ง์น ํ๋ฉด์ ๋ณด๋ฉด์ ์ค๋ช
ํ๋ฏ์ด 4000 ํ ํฐ ์ด์ ์์ธํ ์ค๋ช
ํ๋ผ"],
|
328 |
+
["FAQ 20๊ฑด์ ์์ธํ๊ฒ ์์ฑํ๋ผ. 4000ํ ํฐ ์ด์ ์ฌ์ฉํ๋ผ."],
|
329 |
+
["์ฌ์ฉ ๋ฐฉ๋ฒ๊ณผ ์ฐจ๋ณ์ , ํน์ง, ๊ฐ์ ์ ์ค์ฌ์ผ๋ก 4000 ํ ํฐ ์ด์ ์ ํ๋ธ ์์ ์คํฌ๋ฆฝํธ ํํ๋ก ์์ฑํ๋ผ"],
|
330 |
+
["๋ณธ ์๋น์ค๋ฅผ SEO ์ต์ ํํ์ฌ ๋ธ๋ก๊ทธ ํฌ์คํธ(๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ, ๊ธฐ์กด ์ ์ฌ ์๋น์ค์ ๋น๊ตํ์ฌ ํน์ฅ์ , ํ์ฉ์ฒ, ๊ฐ์น, ๊ธฐ๋ํจ๊ณผ, ๊ฒฐ๋ก ์ ํฌํจ)๋ก 4000 ํ ํฐ ์ด์ ์์ฑํ๋ผ"],
|
331 |
+
["ํนํ ์ถ์์ ํ์ฉํ ๊ธฐ์ ๋ฐ ๋น์ฆ๋์ค๋ชจ๋ธ ์ธก๋ฉด์ ํฌํจํ์ฌ ํนํ ์ถ์์ ๊ตฌ์ฑ์ ๋ง๊ฒ ํ์ ์ ์ธ ์ฐฝ์ ๋ฐ๋ช
๋ด์ฉ์ ์ค์ฌ์ผ๋ก 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ."],
|
332 |
+
["๊ณ์ ์ด์ด์ ๋ต๋ณํ๋ผ"],
|
333 |
+
]
|
334 |
+
|
335 |
+
gr.Examples(examples, inputs=msg)
|
336 |
+
|
337 |
+
def respond_wrapper(message, chat_history, max_tokens, temperature, top_p):
|
338 |
+
bot_message = asyncio.run(respond_stream(message, chat_history, max_tokens, temperature, top_p).__anext__())
|
339 |
+
chat_history.append({"role": "user", "content": message})
|
340 |
+
chat_history.append({"role": "assistant", "content": bot_message})
|
341 |
+
return "", chat_history
|
342 |
+
|
343 |
+
msg.submit(respond_wrapper, [msg, chatbot, max_tokens, temperature, top_p], [msg, chatbot])
|
344 |
+
|
345 |
+
|
346 |
+
|
347 |
+
|
348 |
+
|
349 |
+
|
350 |
+
space_id_state = gr.State()
|
351 |
+
tree_structure_state = gr.State()
|
352 |
+
app_py_content_lines = gr.State()
|
353 |
+
|
354 |
+
analyze_button.click(
|
355 |
+
analyze_space,
|
356 |
+
inputs=[url_input],
|
357 |
+
outputs=[app_py_content, tree_view_output, tree_structure_state, space_id_state, summary_output, analysis_output, usage_output, app_py_content_lines]
|
358 |
+
).then(
|
359 |
+
lambda space_id: get_file_content(space_id, "requirements.txt"),
|
360 |
+
inputs=[space_id_state],
|
361 |
+
outputs=[requirements_content]
|
362 |
+
)
|
363 |
+
|
364 |
+
# lines ์๋ฅผ ๋์ ์ผ๋ก ์ค์
|
365 |
+
app_py_content.change(lambda lines: gr.update(lines=lines), inputs=[app_py_content_lines], outputs=[app_py_content])
|
366 |
+
|
367 |
+
return demo
|
368 |
+
|
369 |
+
except Exception as e:
|
370 |
+
print(f"Error in create_ui: {str(e)}")
|
371 |
+
print(traceback.format_exc())
|
372 |
+
raise
|
373 |
+
|
374 |
+
if __name__ == "__main__":
|
375 |
+
try:
|
376 |
+
print("Starting HuggingFace Space Analyzer...")
|
377 |
+
demo = create_ui()
|
378 |
+
print("UI created successfully.")
|
379 |
+
|
380 |
+
print("Configuring Gradio queue...")
|
381 |
+
demo.queue()
|
382 |
+
print("Gradio queue configured.")
|
383 |
+
|
384 |
+
print("Launching Gradio app...")
|
385 |
+
demo.launch(
|
386 |
+
server_name="0.0.0.0",
|
387 |
+
server_port=7860,
|
388 |
+
share=False,
|
389 |
+
debug=True,
|
390 |
+
show_api=False
|
391 |
+
)
|
392 |
+
print("Gradio app launched successfully.")
|
393 |
+
except Exception as e:
|
394 |
+
print(f"Error in main: {str(e)}")
|
395 |
+
print("Detailed error information:")
|
396 |
+
print(traceback.format_exc())
|
397 |
+
raise
|