Sbnos commited on
Commit
9cae142
·
verified ·
1 Parent(s): 69cffa4

cgpt check 2

Browse files
Files changed (1) hide show
  1. app.py +13 -11
app.py CHANGED
@@ -1,13 +1,12 @@
1
  import streamlit as st
2
  import os
3
- from langchain_community.vectorstores import Chroma
4
- from langchain_community.embeddings import HuggingFaceBgeEmbeddings
5
- from langchain_community.llms import Together
6
  from langchain.prompts import ChatPromptTemplate, PromptTemplate
7
  from langchain.schema import format_document
8
  from typing import List
9
  from langchain.memory import ConversationBufferMemory
10
- from langchain.schema.runnable import RunnableParallel, RunnablePassthrough, StrOutputParser
11
  from langchain_core.chat_message_histories import StreamlitChatMessageHistory
12
  import time
13
 
@@ -71,7 +70,7 @@ def get_streaming_response(user_query, chat_history):
71
  "user_question": user_query
72
  }
73
 
74
- chain = prompt | llm | StrOutputParser()
75
  return chain.stream(inputs)
76
 
77
  def app():
@@ -117,12 +116,15 @@ def app():
117
  with st.chat_message("assistant"):
118
  with st.spinner("Thinking..."):
119
  chat_history = "\n".join([f"{msg['role']}: {msg['content']}" for msg in chistory])
120
- response_generator = get_streaming_response(user_query, chat_history)
121
- response_text = ""
122
- for response_part in response_generator:
123
- response_text += response_part
124
- st.write(response_text)
125
- st.session_state.messages.append({"role": "assistant", "content": response_text})
 
 
 
126
 
127
  if __name__ == '__main__':
128
  app()
 
1
  import streamlit as st
2
  import os
3
+ from langchain.vectorstores import Chroma
4
+ from langchain.embeddings import HuggingFaceBgeEmbeddings
5
+ from langchain.llms import Together
6
  from langchain.prompts import ChatPromptTemplate, PromptTemplate
7
  from langchain.schema import format_document
8
  from typing import List
9
  from langchain.memory import ConversationBufferMemory
 
10
  from langchain_core.chat_message_histories import StreamlitChatMessageHistory
11
  import time
12
 
 
70
  "user_question": user_query
71
  }
72
 
73
+ chain = prompt | llm
74
  return chain.stream(inputs)
75
 
76
  def app():
 
116
  with st.chat_message("assistant"):
117
  with st.spinner("Thinking..."):
118
  chat_history = "\n".join([f"{msg['role']}: {msg['content']}" for msg in chistory])
119
+ try:
120
+ response_generator = get_streaming_response(user_query, chat_history)
121
+ response_text = ""
122
+ for response_part in response_generator:
123
+ response_text += response_part
124
+ st.write(response_text)
125
+ st.session_state.messages.append({"role": "assistant", "content": response_text})
126
+ except Exception as e:
127
+ st.error(f"An error occurred: {e}")
128
 
129
  if __name__ == '__main__':
130
  app()