Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -188,7 +188,7 @@ def format_chat_history(chat_history):
|
|
188 |
|
189 |
def respond(
|
190 |
message: str,
|
191 |
-
|
192 |
system_message: str = "",
|
193 |
max_tokens: int = 4000,
|
194 |
temperature: float = 0.7,
|
@@ -197,7 +197,7 @@ def respond(
|
|
197 |
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, qnaλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
|
198 |
|
199 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
200 |
-
for user_msg, assistant_msg in
|
201 |
if user_msg:
|
202 |
messages.append({"role": "user", "content": user_msg})
|
203 |
if assistant_msg:
|
@@ -206,20 +206,19 @@ def respond(
|
|
206 |
|
207 |
try:
|
208 |
response = ""
|
209 |
-
for
|
210 |
messages,
|
211 |
max_tokens=max_tokens,
|
212 |
stream=True,
|
213 |
temperature=temperature,
|
214 |
top_p=top_p,
|
215 |
):
|
216 |
-
|
217 |
-
|
218 |
-
response
|
219 |
-
yield response
|
220 |
except Exception as e:
|
221 |
-
|
222 |
-
|
223 |
def create_ui():
|
224 |
try:
|
225 |
css = """
|
@@ -346,7 +345,7 @@ def create_ui():
|
|
346 |
|
347 |
gr.Examples(examples, inputs=msg)
|
348 |
|
349 |
-
msg.submit(respond, [msg, chatbot, system_message, max_tokens, temperature, top_p], [
|
350 |
|
351 |
|
352 |
|
|
|
188 |
|
189 |
def respond(
|
190 |
message: str,
|
191 |
+
chat_history: List[Tuple[str, str]],
|
192 |
system_message: str = "",
|
193 |
max_tokens: int = 4000,
|
194 |
temperature: float = 0.7,
|
|
|
197 |
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, qnaλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
|
198 |
|
199 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
200 |
+
for user_msg, assistant_msg in chat_history:
|
201 |
if user_msg:
|
202 |
messages.append({"role": "user", "content": user_msg})
|
203 |
if assistant_msg:
|
|
|
206 |
|
207 |
try:
|
208 |
response = ""
|
209 |
+
for chunk in hf_client.chat_completion(
|
210 |
messages,
|
211 |
max_tokens=max_tokens,
|
212 |
stream=True,
|
213 |
temperature=temperature,
|
214 |
top_p=top_p,
|
215 |
):
|
216 |
+
if chunk.choices[0].delta.content is not None:
|
217 |
+
response += chunk.choices[0].delta.content
|
218 |
+
yield response, chat_history + [(message, response)]
|
|
|
219 |
except Exception as e:
|
220 |
+
error_message = f"μλ΅ μμ± μ€ μ€λ₯ λ°μ: {str(e)}"
|
221 |
+
yield error_message, chat_history + [(message, error_message)]
|
222 |
def create_ui():
|
223 |
try:
|
224 |
css = """
|
|
|
345 |
|
346 |
gr.Examples(examples, inputs=msg)
|
347 |
|
348 |
+
msg.submit(respond, [msg, chatbot, system_message, max_tokens, temperature, top_p], [chatbot, chatbot])
|
349 |
|
350 |
|
351 |
|