ai: Expose welcome messages.
Browse files
jarvis.py
CHANGED
@@ -27,6 +27,8 @@ from openpyxl import load_workbook
|
|
27 |
|
28 |
os.system("apt-get update -q -y && apt-get install -q -y tesseract-ocr tesseract-ocr-eng tesseract-ocr-ind libleptonica-dev libtesseract-dev")
|
29 |
|
|
|
|
|
30 |
INTERNAL_AI_GET_SERVER = os.getenv("INTERNAL_AI_GET_SERVER")
|
31 |
INTERNAL_TRAINING_DATA = os.getenv("INTERNAL_TRAINING_DATA")
|
32 |
|
@@ -364,11 +366,14 @@ with gr.Blocks(fill_height=True, fill_width=True, title=AI_TYPES["AI_TYPE_4"], h
|
|
364 |
user_session = gr.State(create_session())
|
365 |
selected_model = gr.State(MODEL_CHOICES[0] if MODEL_CHOICES else "")
|
366 |
J_A_R_V_I_S = gr.State("")
|
367 |
-
chatbot = gr.Chatbot(label=AI_TYPES["AI_TYPE_1"], show_copy_button=True, scale=1, elem_id=AI_TYPES["AI_TYPE_2"])
|
368 |
msg = gr.MultimodalTextbox(show_label=False, placeholder=RESPONSES["RESPONSE_5"], interactive=True, file_count="single", file_types=ALLOWED_EXTENSIONS)
|
369 |
with gr.Sidebar(open=False):
|
370 |
model_radio = gr.Radio(show_label=False, choices=MODEL_CHOICES, value=MODEL_CHOICES[0])
|
371 |
model_radio.change(fn=change_model, inputs=[model_radio], outputs=[user_history, user_session, selected_model, J_A_R_V_I_S])
|
|
|
|
|
|
|
372 |
msg.submit(fn=respond_async, inputs=[msg, user_history, selected_model, user_session, J_A_R_V_I_S], outputs=[chatbot, msg, user_session], api_name=INTERNAL_AI_GET_SERVER)
|
373 |
msg.stop(fn=stop_response, inputs=[user_history, user_session], outputs=[chatbot, msg, user_session])
|
374 |
jarvis.queue(default_concurrency_limit=2).launch(max_file_size="1mb")
|
|
|
27 |
|
28 |
os.system("apt-get update -q -y && apt-get install -q -y tesseract-ocr tesseract-ocr-eng tesseract-ocr-ind libleptonica-dev libtesseract-dev")
|
29 |
|
30 |
+
JARVIS_INIT = json.loads(os.getenv("HELLO", "[]"))
|
31 |
+
|
32 |
INTERNAL_AI_GET_SERVER = os.getenv("INTERNAL_AI_GET_SERVER")
|
33 |
INTERNAL_TRAINING_DATA = os.getenv("INTERNAL_TRAINING_DATA")
|
34 |
|
|
|
366 |
user_session = gr.State(create_session())
|
367 |
selected_model = gr.State(MODEL_CHOICES[0] if MODEL_CHOICES else "")
|
368 |
J_A_R_V_I_S = gr.State("")
|
369 |
+
chatbot = gr.Chatbot(label=AI_TYPES["AI_TYPE_1"], show_copy_button=True, scale=1, elem_id=AI_TYPES["AI_TYPE_2"], examples=JARVIS_INIT)
|
370 |
msg = gr.MultimodalTextbox(show_label=False, placeholder=RESPONSES["RESPONSE_5"], interactive=True, file_count="single", file_types=ALLOWED_EXTENSIONS)
|
371 |
with gr.Sidebar(open=False):
|
372 |
model_radio = gr.Radio(show_label=False, choices=MODEL_CHOICES, value=MODEL_CHOICES[0])
|
373 |
model_radio.change(fn=change_model, inputs=[model_radio], outputs=[user_history, user_session, selected_model, J_A_R_V_I_S])
|
374 |
+
def on_example_select(evt: gr.SelectData):
|
375 |
+
return evt.value
|
376 |
+
chatbot.example_select(fn=on_example_select, inputs=[], outputs=[msg]).then(fn=respond_async, inputs=[msg, user_history, selected_model, user_session, J_A_R_V_I_S], outputs=[chatbot, msg, user_session])
|
377 |
msg.submit(fn=respond_async, inputs=[msg, user_history, selected_model, user_session, J_A_R_V_I_S], outputs=[chatbot, msg, user_session], api_name=INTERNAL_AI_GET_SERVER)
|
378 |
msg.stop(fn=stop_response, inputs=[user_history, user_session], outputs=[chatbot, msg, user_session])
|
379 |
jarvis.queue(default_concurrency_limit=2).launch(max_file_size="1mb")
|