Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -101,7 +101,7 @@ def chat_interface():
|
|
101 |
|
102 |
if user_input:
|
103 |
# Add user message to chat history
|
104 |
-
st.session_state.chat_history.append({"role": "user", "content": user_input})
|
105 |
|
106 |
# Prepare the prompt
|
107 |
prompt = prepare_prompt(user_input, st.session_state.chat_history)
|
@@ -132,12 +132,12 @@ def chat_interface():
|
|
132 |
}
|
133 |
|
134 |
# Generate and stream response
|
135 |
-
with st.chat_message("
|
136 |
stream = generate_response(watsonx_llm, prompt_data, params)
|
137 |
response = st.write_stream(stream)
|
138 |
|
139 |
# Add AI response to chat history
|
140 |
-
st.session_state.chat_history.append({"role": "
|
141 |
|
142 |
def main():
|
143 |
initialize_session_state()
|
|
|
101 |
|
102 |
if user_input:
|
103 |
# Add user message to chat history
|
104 |
+
st.session_state.chat_history.append({"role": "user", "content": user_input, , avatar="π€"})
|
105 |
|
106 |
# Prepare the prompt
|
107 |
prompt = prepare_prompt(user_input, st.session_state.chat_history)
|
|
|
132 |
}
|
133 |
|
134 |
# Generate and stream response
|
135 |
+
with st.chat_message("Jimmy", avatar="π"):
|
136 |
stream = generate_response(watsonx_llm, prompt_data, params)
|
137 |
response = st.write_stream(stream)
|
138 |
|
139 |
# Add AI response to chat history
|
140 |
+
st.session_state.chat_history.append({"role": "Jimmy", "content": response, avatar="π"})
|
141 |
|
142 |
def main():
|
143 |
initialize_session_state()
|