Spaces:
sierrafr
/
Runtime error

hadadrjt commited on
Commit
2c6eb6a
·
1 Parent(s): 991dd3b

ai: Limit max context memory to the last 3 history.

Browse files

* I initially hesitated to implement this setup, but
given the circumstances, it became necessary.
Managing all the contexts was placing excessive strain
on the server, especially considering its limited
specs.

* And the API flag is no longer required as
it is enabled by default.

Files changed (1) hide show
  1. jarvis.py +3 -1
jarvis.py CHANGED
@@ -205,6 +205,8 @@ async def respond_async(multi_input, history, selected_model_display, sess):
205
  if message["text"]:
206
  combined_input += message["text"]
207
  history.append([combined_input, ""])
 
 
208
  ai_response = await chat_with_model_async(history, combined_input, selected_model_display, sess)
209
  history[-1][1] = ""
210
  def convert_to_string(data):
@@ -236,4 +238,4 @@ with gr.Blocks(fill_height=True, fill_width=True, title=AI_TYPES["AI_TYPE_4"], h
236
  msg = gr.MultimodalTextbox(show_label=False, placeholder=RESPONSES["RESPONSE_5"], interactive=True, file_count="single", file_types=ALLOWED_EXTENSIONS)
237
  #model_dropdown.change(fn=change_model, inputs=[model_dropdown], outputs=[user_history, user_session, selected_model])
238
  msg.submit(fn=respond_async, inputs=[msg, user_history, selected_model, user_session], outputs=[chatbot, msg, user_session], concurrency_limit=None, api_name=INTERNAL_AI_GET_SERVER)
239
- jarvis.launch(show_api=True, max_file_size="1mb")
 
205
  if message["text"]:
206
  combined_input += message["text"]
207
  history.append([combined_input, ""])
208
+ if len(history) > 3:
209
+ history[:] = history[-3:]
210
  ai_response = await chat_with_model_async(history, combined_input, selected_model_display, sess)
211
  history[-1][1] = ""
212
  def convert_to_string(data):
 
238
  msg = gr.MultimodalTextbox(show_label=False, placeholder=RESPONSES["RESPONSE_5"], interactive=True, file_count="single", file_types=ALLOWED_EXTENSIONS)
239
  #model_dropdown.change(fn=change_model, inputs=[model_dropdown], outputs=[user_history, user_session, selected_model])
240
  msg.submit(fn=respond_async, inputs=[msg, user_history, selected_model, user_session], outputs=[chatbot, msg, user_session], concurrency_limit=None, api_name=INTERNAL_AI_GET_SERVER)
241
+ jarvis.launch(max_file_size="1mb")