hsuwill000 commited on
Commit
b22d7dd
·
verified ·
1 Parent(s): 97328ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -10
app.py CHANGED
@@ -1,4 +1,10 @@
1
  import subprocess
 
 
 
 
 
 
2
  # Utility functions
3
  def run_command(command, cwd=None):
4
  """Run a system command."""
@@ -13,17 +19,10 @@ def run_command(command, cwd=None):
13
 
14
  run_command('pip install openai')
15
 
16
- import time
17
- import gradio as gr
18
- from openai import OpenAI
19
- import os
20
- from huggingface_hub import snapshot_download
21
-
22
  # Model configuration
23
  MODEL_ID = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
24
  QUANT = "Q5_K_M"
25
 
26
-
27
  def setup_llama_cpp():
28
  """Clone and compile llama.cpp repository."""
29
  if not os.path.exists('llama.cpp'):
@@ -87,7 +86,10 @@ details { border: 1px solid #e0e0e0 !important; border-radius: 8px !important; p
87
 
88
  client = OpenAI(base_url="http://localhost:8080/v1", api_key="no-key-required")
89
 
 
90
  def user(message, history):
 
 
91
  return "", history + [[message, None]]
92
 
93
  class ParserState:
@@ -192,7 +194,8 @@ with gr.Blocks(css=CSS) as demo:
192
  elem_id="chatbot",
193
  height=500,
194
  show_label=False,
195
- render_markdown=True
 
196
  )
197
 
198
  with gr.Row():
@@ -200,7 +203,8 @@ with gr.Blocks(css=CSS) as demo:
200
  label="Message",
201
  placeholder="Type your message...",
202
  container=False,
203
- scale=4
 
204
  )
205
  submit_btn = gr.Button("Send", variant='primary', scale=1)
206
 
@@ -260,4 +264,4 @@ if __name__ == "__main__":
260
  finally:
261
  # Cleanup: terminate the server process when the GUI is closed
262
  server_process.terminate()
263
- server_process.wait()
 
1
  import subprocess
2
+ import time
3
+ import os
4
+ import gradio as gr
5
+ from openai import OpenAI
6
+ from huggingface_hub import snapshot_download
7
+
8
  # Utility functions
9
  def run_command(command, cwd=None):
10
  """Run a system command."""
 
19
 
20
  run_command('pip install openai')
21
 
 
 
 
 
 
 
22
  # Model configuration
23
  MODEL_ID = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
24
  QUANT = "Q5_K_M"
25
 
 
26
  def setup_llama_cpp():
27
  """Clone and compile llama.cpp repository."""
28
  if not os.path.exists('llama.cpp'):
 
86
 
87
  client = OpenAI(base_url="http://localhost:8080/v1", api_key="no-key-required")
88
 
89
+ # 修改 user() 確保 message 為字串
90
  def user(message, history):
91
+ if not isinstance(message, str):
92
+ message = str(message)
93
  return "", history + [[message, None]]
94
 
95
  class ParserState:
 
194
  elem_id="chatbot",
195
  height=500,
196
  show_label=False,
197
+ render_markdown=True,
198
+ value=[] # 指定初始值為空列表
199
  )
200
 
201
  with gr.Row():
 
203
  label="Message",
204
  placeholder="Type your message...",
205
  container=False,
206
+ scale=4,
207
+ clear_on_submit=True # 提交後清空輸入框
208
  )
209
  submit_btn = gr.Button("Send", variant='primary', scale=1)
210
 
 
264
  finally:
265
  # Cleanup: terminate the server process when the GUI is closed
266
  server_process.terminate()
267
+ server_process.wait()