Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from langchain_core.messages import AIMessage, HumanMessage | |
from modules.graph import invoke_our_graph | |
from modules.st_callable_util import get_streamlit_cb # Utility function to get a Streamlit callback handler with context | |
# This section is for debugging purpose | |
import debugpy | |
debugpy.listen(("localhost", 5678)) # Set the port for the debugger | |
print("Waiting for debugger attach...") | |
debugpy.wait_for_client() # Pause execution until debugger is attached | |
# Streamlit UI | |
st.title("Paintrek Medical Assistant") | |
st.markdown("Chat with an AI-powered health assistant.") | |
# Initialize the expander state | |
if "expander_open" not in st.session_state: | |
st.session_state.expander_open = True | |
# Check if the OpenAI API key is set | |
if not os.getenv('GOOGLE_API_KEY'): | |
# If not, display a sidebar input for the user to provide the API key | |
st.sidebar.header("GOOGLE_API_KEY Setup") | |
api_key = st.sidebar.text_input(label="API Key", type="password", label_visibility="collapsed") | |
os.environ["GOOGLE_API_KEY"] = api_key | |
# If no key is provided, show an info message and stop further execution and wait till key is entered | |
if not api_key: | |
st.info("Please enter your GOOGLE_API_KEY in the sidebar.") | |
st.stop() | |
# Capture user input from chat input | |
prompt = st.chat_input() | |
# Toggle expander state based on user input | |
if prompt is not None: | |
st.session_state.expander_open = False # Close the expander when the user starts typing | |
# st write magic | |
with st.expander(label="Paintrek Bot", expanded=st.session_state.expander_open): | |
""" | |
At any time you can type 'q' or 'quit' to quit. | |
""" | |
# Initialize chat messages in session state | |
if "messages" not in st.session_state: | |
st.session_state["messages"] = [AIMessage(content="Welcome to the Paintrek world. I am a health assistant, an interactive clinical recording system. I will ask you questions about your pain and related symptoms and record your responses. I will then store this information securely. At any time, you can type `q` to quit.")] | |
# Loop through all messages in the session state and render them as a chat on every st.refresh mech | |
for msg in st.session_state.messages: | |
# https://docs.streamlit.io/develop/api-reference/chat/st.chat_message | |
# we store them as AIMessage and HumanMessage as its easier to send to LangGraph | |
if isinstance(msg, AIMessage): | |
st.chat_message("assistant").write(msg.content) | |
elif isinstance(msg, HumanMessage): | |
st.chat_message("user").write(msg.content) | |
# Handle user input if provided | |
if prompt: | |
st.session_state.messages.append(HumanMessage(content=prompt)) | |
st.chat_message("user").write(prompt) | |
with st.chat_message("assistant"): | |
# create a new placeholder for streaming messages and other events, and give it context | |
st_callback = get_streamlit_cb(st.container()) | |
response = invoke_our_graph(st.session_state.messages, [st_callback]) | |
st.session_state.messages.append(AIMessage(content=response["messages"][-1].content)) # Add that last message to the st_message_state | |
st.write(response["messages"][-1].content) # Write the message inside the chat_message context | |