KeerthiVM commited on
Commit
9905e70
Β·
1 Parent(s): ef73894
Files changed (1) hide show
  1. app.py +25 -6
app.py CHANGED
@@ -150,16 +150,35 @@ if prompt := st.chat_input("Ask a follow-up question..."):
150
  #
151
  # st.markdown(assistant_response)
152
  # st.session_state.messages.append({"role": "assistant", "content": assistant_response})
 
 
 
 
 
 
 
 
 
 
 
153
  with st.spinner("Thinking..."):
154
  if len(st.session_state.messages) > 1:
155
- response = llm.invoke([{"role": m["role"], "content": m["content"]} for m in st.session_state.messages])
156
- response = response['result']
 
 
 
 
 
 
 
 
157
  else:
158
- response = rag_chain.invoke(prompt)
159
- response = response['result']
160
 
161
- st.markdown(response)
162
- st.session_state.messages.append({"role": "assistant", "content": response})
163
 
164
  if st.session_state.messages and st.button("πŸ“„ Download Chat as PDF"):
165
  pdf_file = export_chat_to_pdf(st.session_state.messages)
 
150
  #
151
  # st.markdown(assistant_response)
152
  # st.session_state.messages.append({"role": "assistant", "content": assistant_response})
153
+ # with st.spinner("Thinking..."):
154
+ # if len(st.session_state.messages) > 1:
155
+ # response = llm.invoke([{"role": m["role"], "content": m["content"]} for m in st.session_state.messages])
156
+ # response = response.content
157
+ # else:
158
+ # response = rag_chain.invoke(prompt)
159
+ # response = response['result']
160
+ #
161
+ # st.markdown(response)
162
+ # st.session_state.messages.append({"role": "assistant", "content": response})
163
+
164
  with st.spinner("Thinking..."):
165
  if len(st.session_state.messages) > 1:
166
+ conversation_context = "\n".join(
167
+ f"{m['role']}: {m['content']}"
168
+ for m in st.session_state.messages[:-1] # Exclude current prompt
169
+ )
170
+ augmented_prompt = (
171
+ f"Conversation history:\n{conversation_context}\n\n"
172
+ f"Current question: {prompt}"
173
+ )
174
+ response = rag_chain.invoke({"query": augmented_prompt})
175
+ assistant_response = response['result']
176
  else:
177
+ response = rag_chain.invoke({"query": prompt})
178
+ assistant_response = response['result']
179
 
180
+ st.markdown(assistant_response)
181
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
182
 
183
  if st.session_state.messages and st.button("πŸ“„ Download Chat as PDF"):
184
  pdf_file = export_chat_to_pdf(st.session_state.messages)