Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -178,14 +178,21 @@ def analyze_space(url: str, progress=gr.Progress()):
|
|
178 |
return f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}", "", None, "", "", "", "", 10
|
179 |
|
180 |
|
|
|
|
|
|
|
|
|
|
|
|
|
181 |
|
|
|
182 |
def respond_stream(message: str, chat_history: List[Dict[str, str]], max_tokens: int, temperature: float, top_p: float):
|
183 |
system_message = """λΉμ μ νκΉ
νμ΄μ€μ νΉνλ AI μ½λ© μ λ¬Έκ°μ
λλ€. μ¬μ©μμ μ§λ¬Έμ μΉμ νκ³ μμΈνκ² λ΅λ³ν΄μ£ΌμΈμ.
|
184 |
Gradio νΉμ±μ μ νν μΈμνκ³ Requirements.txt λλ½μμ΄ μ½λ©κ³Ό μ€λ₯λ₯Ό ν΄κ²°ν΄μΌ ν©λλ€.
|
185 |
νμ μ ννκ³ μ μ©ν μ 보λ₯Ό μ 곡νλλ‘ λ
Έλ ₯νμΈμ."""
|
186 |
|
187 |
messages = [{"role": "system", "content": system_message}]
|
188 |
-
messages.extend(chat_history)
|
189 |
messages.append({"role": "user", "content": message})
|
190 |
|
191 |
try:
|
@@ -205,7 +212,6 @@ def respond_stream(message: str, chat_history: List[Dict[str, str]], max_tokens:
|
|
205 |
except Exception as e:
|
206 |
yield f"μλ΅ μμ± μ€ μ€λ₯ λ°μ: {str(e)}"
|
207 |
|
208 |
-
|
209 |
def create_ui():
|
210 |
try:
|
211 |
css = """
|
@@ -314,7 +320,7 @@ def create_ui():
|
|
314 |
|
315 |
|
316 |
with gr.TabItem("AI μ½λ©"):
|
317 |
-
chatbot = gr.Chatbot(label="λν"
|
318 |
msg = gr.Textbox(label="λ©μμ§")
|
319 |
|
320 |
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens", visible=False)
|
@@ -333,17 +339,17 @@ def create_ui():
|
|
333 |
gr.Examples(examples, inputs=msg)
|
334 |
|
335 |
def respond_wrapper(message, chat_history, max_tokens, temperature, top_p):
|
336 |
-
chat_history.append(
|
337 |
bot_message = ""
|
338 |
for partial_response in respond_stream(message, chat_history, max_tokens, temperature, top_p):
|
339 |
bot_message = partial_response
|
340 |
-
chat_history[-1] =
|
341 |
yield "", chat_history
|
342 |
return
|
343 |
|
344 |
msg.submit(respond_wrapper, [msg, chatbot, max_tokens, temperature, top_p], [msg, chatbot])
|
345 |
|
346 |
-
|
347 |
|
348 |
|
349 |
|
|
|
178 |
return f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}", "", None, "", "", "", "", 10
|
179 |
|
180 |
|
181 |
+
def format_chat_history(chat_history):
|
182 |
+
formatted = []
|
183 |
+
for message in chat_history:
|
184 |
+
if message["role"] in ["user", "assistant"]:
|
185 |
+
formatted.append(message)
|
186 |
+
return formatted
|
187 |
|
188 |
+
|
189 |
def respond_stream(message: str, chat_history: List[Dict[str, str]], max_tokens: int, temperature: float, top_p: float):
|
190 |
system_message = """λΉμ μ νκΉ
νμ΄μ€μ νΉνλ AI μ½λ© μ λ¬Έκ°μ
λλ€. μ¬μ©μμ μ§λ¬Έμ μΉμ νκ³ μμΈνκ² λ΅λ³ν΄μ£ΌμΈμ.
|
191 |
Gradio νΉμ±μ μ νν μΈμνκ³ Requirements.txt λλ½μμ΄ μ½λ©κ³Ό μ€λ₯λ₯Ό ν΄κ²°ν΄μΌ ν©λλ€.
|
192 |
νμ μ ννκ³ μ μ©ν μ 보λ₯Ό μ 곡νλλ‘ λ
Έλ ₯νμΈμ."""
|
193 |
|
194 |
messages = [{"role": "system", "content": system_message}]
|
195 |
+
messages.extend(format_chat_history(chat_history))
|
196 |
messages.append({"role": "user", "content": message})
|
197 |
|
198 |
try:
|
|
|
212 |
except Exception as e:
|
213 |
yield f"μλ΅ μμ± μ€ μ€λ₯ λ°μ: {str(e)}"
|
214 |
|
|
|
215 |
def create_ui():
|
216 |
try:
|
217 |
css = """
|
|
|
320 |
|
321 |
|
322 |
with gr.TabItem("AI μ½λ©"):
|
323 |
+
chatbot = gr.Chatbot(label="λν")
|
324 |
msg = gr.Textbox(label="λ©μμ§")
|
325 |
|
326 |
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens", visible=False)
|
|
|
339 |
gr.Examples(examples, inputs=msg)
|
340 |
|
341 |
def respond_wrapper(message, chat_history, max_tokens, temperature, top_p):
|
342 |
+
chat_history.append((message, ""))
|
343 |
bot_message = ""
|
344 |
for partial_response in respond_stream(message, chat_history, max_tokens, temperature, top_p):
|
345 |
bot_message = partial_response
|
346 |
+
chat_history[-1] = (message, bot_message)
|
347 |
yield "", chat_history
|
348 |
return
|
349 |
|
350 |
msg.submit(respond_wrapper, [msg, chatbot, max_tokens, temperature, top_p], [msg, chatbot])
|
351 |
|
352 |
+
|
353 |
|
354 |
|
355 |
|