KeerthiVM commited on
Commit
ef73894
Β·
1 Parent(s): ae2bc75
Files changed (1) hide show
  1. app.py +24 -24
app.py CHANGED
@@ -135,31 +135,31 @@ if prompt := st.chat_input("Ask a follow-up question..."):
135
  st.markdown(prompt)
136
 
137
  with st.chat_message("assistant"):
138
- with st.spinner("Thinking..."):
139
- # Convert messages to LangChain format
140
- chat_history = []
141
- for msg in st.session_state.messages[:-1]: # Exclude the current prompt
142
- if msg["role"] == "user":
143
- chat_history.append(HumanMessage(content=msg["content"]))
144
- else:
145
- chat_history.append(AIMessage(content=msg["content"]))
146
-
147
- # Get response
148
- response = llm.invoke([HumanMessage(content=prompt)] + chat_history)
149
- assistant_response = response.content
150
-
151
- st.markdown(assistant_response)
152
- st.session_state.messages.append({"role": "assistant", "content": assistant_response})
153
  # with st.spinner("Thinking..."):
154
- # if len(st.session_state.messages) > 1:
155
- # response = llm.invoke([{"role": m["role"], "content": m["content"]} for m in st.session_state.messages])
156
- # else:
157
- # response = rag_chain.invoke(prompt)
158
- # response = response['result']
159
- #
160
- # st.markdown(response)
161
- # st.session_state.messages.append({"role": "assistant", "content": response})
162
- #
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
 
164
  if st.session_state.messages and st.button("πŸ“„ Download Chat as PDF"):
165
  pdf_file = export_chat_to_pdf(st.session_state.messages)
 
135
  st.markdown(prompt)
136
 
137
  with st.chat_message("assistant"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  # with st.spinner("Thinking..."):
139
+ # Convert messages to LangChain format
140
+ # chat_history = []
141
+ # for msg in st.session_state.messages[:-1]: # Exclude the current prompt
142
+ # if msg["role"] == "user":
143
+ # chat_history.append(HumanMessage(content=msg["content"]))
144
+ # else:
145
+ # chat_history.append(AIMessage(content=msg["content"]))
146
+ #
147
+ # # Get response
148
+ # response = llm.invoke([HumanMessage(content=prompt)] + chat_history)
149
+ # assistant_response = response.content
150
+ #
151
+ # st.markdown(assistant_response)
152
+ # st.session_state.messages.append({"role": "assistant", "content": assistant_response})
153
+ with st.spinner("Thinking..."):
154
+ if len(st.session_state.messages) > 1:
155
+ response = llm.invoke([{"role": m["role"], "content": m["content"]} for m in st.session_state.messages])
156
+ response = response['result']
157
+ else:
158
+ response = rag_chain.invoke(prompt)
159
+ response = response['result']
160
+
161
+ st.markdown(response)
162
+ st.session_state.messages.append({"role": "assistant", "content": response})
163
 
164
  if st.session_state.messages and st.button("πŸ“„ Download Chat as PDF"):
165
  pdf_file = export_chat_to_pdf(st.session_state.messages)