hadadrjt commited on
Commit
18663e1
·
1 Parent(s): 6d927b2

fixup! ai: Enable gradio default stop button.

Browse files
Files changed (1) hide show
  1. jarvis.py +13 -18
jarvis.py CHANGED
@@ -58,10 +58,9 @@ ALLOWED_EXTENSIONS = json.loads(os.getenv("ALLOWED_EXTENSIONS", "[]"))
58
  ACTIVE_CANDIDATE = None
59
 
60
  class SessionWithID(requests.Session):
61
- def __init__(self):
62
  super().__init__()
63
- self.session_id = str(uuid.uuid4())
64
- self.stop_event = asyncio.Event()
65
 
66
  def create_session():
67
  return SessionWithID()
@@ -224,6 +223,7 @@ async def fetch_response_async(host, key, model, msgs, cfg, sid):
224
  return None
225
 
226
  async def chat_with_model_async(history, user_input, model_display, sess, custom_prompt):
 
227
  if not get_available_items(LINUX_SERVER_PROVIDER_KEYS, LINUX_SERVER_PROVIDER_KEYS_MARKED) or not get_available_items(LINUX_SERVER_HOSTS, LINUX_SERVER_HOSTS_ATTEMPTS):
228
  return RESPONSES["RESPONSE_3"]
229
  if not hasattr(sess, "session_id"):
@@ -256,24 +256,20 @@ async def chat_with_model_async(history, user_input, model_display, sess, custom
256
  return RESPONSES["RESPONSE_2"]
257
 
258
  async def respond_async(multi, history, model_display, sess, custom_prompt):
259
- if not hasattr(sess, "stop_event"):
260
- sess.stop_event = asyncio.Event()
261
  sess.stop_event.clear()
262
  msg_input = {"text": multi.get("text", "").strip(), "files": multi.get("files", [])}
263
  if not msg_input["text"] and not msg_input["files"]:
264
- yield history, gr.MultimodalTextbox(value="", interactive=True, submit_btn=True, stop_btn=False), sess
265
  return
266
  inp = ""
267
  for f in msg_input["files"]:
268
- if isinstance(f, dict):
269
- fp = f.get("data", f.get("name", ""))
270
- else:
271
- fp = f
272
  inp += f"{Path(fp).name}\n\n{extract_file_content(fp)}\n\n"
273
  if msg_input["text"]:
274
  inp += msg_input["text"]
275
  history.append([inp, RESPONSES["RESPONSE_8"]])
276
- yield history, gr.MultimodalTextbox(value="", interactive=False, submit_btn=False, stop_btn=True), sess
277
  ai = await chat_with_model_async(history, inp, model_display, sess, custom_prompt)
278
  history[-1][1] = ""
279
  buffer = []
@@ -281,7 +277,7 @@ async def respond_async(multi, history, model_display, sess, custom_prompt):
281
  for char in ai:
282
  if sess.stop_event.is_set():
283
  history[-1][1] = RESPONSES["RESPONSE_1"]
284
- yield history, gr.MultimodalTextbox(value="", interactive=True, submit_btn=True, stop_btn=False), sess
285
  sess.stop_event.clear()
286
  return
287
  buffer.append(char)
@@ -290,12 +286,12 @@ async def respond_async(multi, history, model_display, sess, custom_prompt):
290
  history[-1][1] += "".join(buffer)
291
  buffer.clear()
292
  last_update = current_time
293
- yield history, gr.MultimodalTextbox(value="", interactive=False, submit_btn=False, stop_btn=True), sess
294
  await asyncio.sleep(0.016)
295
  if buffer:
296
  history[-1][1] += "".join(buffer)
297
- yield history, gr.MultimodalTextbox(value="", interactive=False, submit_btn=False, stop_btn=True), sess
298
- yield history, gr.MultimodalTextbox(value="", interactive=True, submit_btn=True, stop_btn=False), sess
299
 
300
  def change_model(new):
301
  visible = new != MODEL_CHOICES[0]
@@ -303,13 +299,12 @@ def change_model(new):
303
  return [], create_session(), new, default, gr.update(value=default, visible=visible)
304
 
305
  def stop_response(history, sess):
306
- if not hasattr(sess, "stop_event"):
307
- sess.stop_event = asyncio.Event()
308
  sess.stop_event.set()
309
  if history:
310
  history[-1][1] = RESPONSES["RESPONSE_1"]
311
  new_session = create_session()
312
- return history, gr.MultimodalTextbox(value="", interactive=True, submit_btn=True, stop_btn=False), new_session
313
 
314
  with gr.Blocks(fill_height=True, fill_width=True, title=AI_TYPES["AI_TYPE_4"], head=META_TAGS) as jarvis:
315
  user_history = gr.State([])
 
58
  ACTIVE_CANDIDATE = None
59
 
60
  class SessionWithID(requests.Session):
61
+ def __init__(sess):
62
  super().__init__()
63
+ sess.session_id = str(uuid.uuid4())
 
64
 
65
  def create_session():
66
  return SessionWithID()
 
223
  return None
224
 
225
  async def chat_with_model_async(history, user_input, model_display, sess, custom_prompt):
226
+ ensure_stop_event(sess)
227
  if not get_available_items(LINUX_SERVER_PROVIDER_KEYS, LINUX_SERVER_PROVIDER_KEYS_MARKED) or not get_available_items(LINUX_SERVER_HOSTS, LINUX_SERVER_HOSTS_ATTEMPTS):
228
  return RESPONSES["RESPONSE_3"]
229
  if not hasattr(sess, "session_id"):
 
256
  return RESPONSES["RESPONSE_2"]
257
 
258
  async def respond_async(multi, history, model_display, sess, custom_prompt):
259
+ ensure_stop_event(sess)
 
260
  sess.stop_event.clear()
261
  msg_input = {"text": multi.get("text", "").strip(), "files": multi.get("files", [])}
262
  if not msg_input["text"] and not msg_input["files"]:
263
+ yield history, gr.update(value="", interactive=True, submit_btn=True, stop_btn=False), sess
264
  return
265
  inp = ""
266
  for f in msg_input["files"]:
267
+ fp = f.get("data", f.get("name", "")) if isinstance(f, dict) else f
 
 
 
268
  inp += f"{Path(fp).name}\n\n{extract_file_content(fp)}\n\n"
269
  if msg_input["text"]:
270
  inp += msg_input["text"]
271
  history.append([inp, RESPONSES["RESPONSE_8"]])
272
+ yield history, gr.update(interactive=False, submit_btn=False, stop_btn=True), sess
273
  ai = await chat_with_model_async(history, inp, model_display, sess, custom_prompt)
274
  history[-1][1] = ""
275
  buffer = []
 
277
  for char in ai:
278
  if sess.stop_event.is_set():
279
  history[-1][1] = RESPONSES["RESPONSE_1"]
280
+ yield history, gr.update(value="", interactive=True, submit_btn=True, stop_btn=False), sess
281
  sess.stop_event.clear()
282
  return
283
  buffer.append(char)
 
286
  history[-1][1] += "".join(buffer)
287
  buffer.clear()
288
  last_update = current_time
289
+ yield history, gr.update(interactive=False, submit_btn=False, stop_btn=True), sess
290
  await asyncio.sleep(0.016)
291
  if buffer:
292
  history[-1][1] += "".join(buffer)
293
+ yield history, gr.update(interactive=False, submit_btn=False, stop_btn=True), sess
294
+ yield history, gr.update(value="", interactive=True, submit_btn=True, stop_btn=False), sess
295
 
296
  def change_model(new):
297
  visible = new != MODEL_CHOICES[0]
 
299
  return [], create_session(), new, default, gr.update(value=default, visible=visible)
300
 
301
  def stop_response(history, sess):
302
+ ensure_stop_event(sess)
 
303
  sess.stop_event.set()
304
  if history:
305
  history[-1][1] = RESPONSES["RESPONSE_1"]
306
  new_session = create_session()
307
+ return history, None, new_session
308
 
309
  with gr.Blocks(fill_height=True, fill_width=True, title=AI_TYPES["AI_TYPE_4"], head=META_TAGS) as jarvis:
310
  user_history = gr.State([])