SamuelM0422 commited on
Commit
ba99b07
Β·
verified Β·
1 Parent(s): d026311

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -15
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import streamlit as st
 
2
  from langchain_community.document_loaders import PyPDFLoader
3
  from langchain_core.messages import HumanMessage, AIMessageChunk, AIMessage
4
  from langchain_huggingface import HuggingFaceEmbeddings
@@ -13,6 +14,9 @@ from langgraph.errors import GraphRecursionError
13
  from graph import get_graph
14
  from langchain_core.runnables import RunnableConfig
15
 
 
 
 
16
  if 'read_file' not in st.session_state:
17
  st.session_state.read_file = False
18
  st.session_state.retriever = None
@@ -28,24 +32,44 @@ def get_session_by_id(session_id: str) -> BaseChatMessageHistory:
28
  return st.session_state.chat_history[session_id]
29
 
30
  if not st.session_state.read_file:
31
- st.title('πŸ€“ Upload your PDF to talk with it', anchor=False)
32
- file = st.file_uploader('Upload a PDF file', type='pdf')
 
 
 
 
 
 
33
  if file:
34
- with st.status('πŸ€— Booting up the things!', expanded=True):
35
- with st.spinner('πŸ“ Uploading the PDF...', show_time=True):
 
 
 
 
36
  with open('file.pdf', 'wb') as f:
37
  f.write(file.read())
38
  loader = PyPDFLoader('file.pdf')
39
  documents = loader.load_and_split(RecursiveCharacterTextSplitter(chunk_size=2000, chunk_overlap=200))
40
- st.success('πŸ“ File uploaded successfully!!!')
41
- with st.spinner('🧐 Reading the file...', show_time=True):
 
 
 
 
42
  vstore = InMemoryVectorStore.from_documents(documents, HuggingFaceEmbeddings(model_name='all-MiniLM-L6-v2'))
43
  st.session_state.retriever = vstore.as_retriever()
44
- st.success('🧐 File read successfully!!!')
 
 
45
  os.remove('file.pdf')
46
- with st.spinner('😴 Waking up the LLM...', show_time=True):
 
 
47
  st.session_state.graph = get_graph(st.session_state.retriever)
48
- st.success('😁 LLM awakened!!!')
 
 
49
  st.balloons()
50
  placeholder = st.empty()
51
  for _ in range(5, -1, -1):
@@ -57,18 +81,25 @@ if not st.session_state.read_file:
57
  if st.session_state.read_file:
58
 
59
  st.title('πŸ€— DocAI', anchor=False)
60
- st.subheader('Chat with your document!', anchor=False)
 
 
61
 
62
  if st.session_state.first_msg:
63
  st.session_state.first_msg = False
64
- get_session_by_id('chat42').add_message(AIMessage(content='Hello, how are you? How about we talk about the '
65
- 'document you sent me to read?'))
 
 
 
66
 
67
  for msg in get_session_by_id('chat42').messages:
68
  with st.chat_message(name='user' if isinstance(msg, HumanMessage) else 'ai'):
69
  st.write(msg.content)
70
 
71
- prompt = st.chat_input('Try to ask something about your file!')
 
 
72
  if prompt:
73
  with st.chat_message(name='user'):
74
  st.write(prompt)
@@ -102,7 +133,9 @@ if st.session_state.read_file:
102
  for msg in get_message():
103
  full_response += msg
104
  if '<tool>' in full_response:
105
- with tool_placeholder.status('Reading document...', expanded=True):
 
 
106
  if 'tool_message_placeholder' not in placeholders:
107
  placeholders['tool_message_placeholder'] = st.empty()
108
  placeholders['tool_message_placeholder'].write(full_response
@@ -116,7 +149,9 @@ if st.session_state.read_file:
116
  else:
117
  prompt_message_placeholder.write(full_response.replace('$', '\$'))
118
  except GraphRecursionError:
119
- message = 'NΓ£o consegui responder a sua pergunta. πŸ˜₯ Poderia me perguntar outra coisa?'
 
 
120
  full_response = ''
121
  for letter in message:
122
  full_response += letter
 
1
  import streamlit as st
2
+ from language_detection import detect_browser_language
3
  from langchain_community.document_loaders import PyPDFLoader
4
  from langchain_core.messages import HumanMessage, AIMessageChunk, AIMessage
5
  from langchain_huggingface import HuggingFaceEmbeddings
 
14
  from graph import get_graph
15
  from langchain_core.runnables import RunnableConfig
16
 
17
+ if 'language' not in st.session_state:
18
+ st.session_state.language = detect_browser_language()
19
+
20
  if 'read_file' not in st.session_state:
21
  st.session_state.read_file = False
22
  st.session_state.retriever = None
 
32
  return st.session_state.chat_history[session_id]
33
 
34
  if not st.session_state.read_file:
35
+
36
+ st.title('πŸ€“ FaΓ§a o Upload de seu arquivo PDF para que possamos conversar sobre ele'
37
+ if st.session_state.language.startswith("pt")
38
+ else 'πŸ€“ Upload your PDF to talk with it',
39
+ anchor=False)
40
+ file = st.file_uploader('Carregue o seu arquivo PDF'
41
+ if st.session_state.language.startswith("pt")
42
+ else 'Upload a PDF file', type='pdf')
43
  if file:
44
+ with st.status('πŸ€— Deixando tudo pronto!'
45
+ if st.session_state.language.startswith("pt")
46
+ else 'πŸ€— Booting up the things!', expanded=True):
47
+ with st.spinner('πŸ“ Carregando o PDF...'
48
+ if st.session_state.language.startswith("pt")
49
+ else 'πŸ“ Uploading the PDF...', show_time=True):
50
  with open('file.pdf', 'wb') as f:
51
  f.write(file.read())
52
  loader = PyPDFLoader('file.pdf')
53
  documents = loader.load_and_split(RecursiveCharacterTextSplitter(chunk_size=2000, chunk_overlap=200))
54
+ st.success('πŸ“ Arquivo carregado com sucesso!!!'
55
+ if st.session_state.language.startswith("pt")
56
+ else 'πŸ“ File uploaded successfully!!!')
57
+ with st.spinner('🧐 Lendo o arquivo...'
58
+ if st.session_state.language.startswith("pt")
59
+ else '🧐 Reading the file...', show_time=True):
60
  vstore = InMemoryVectorStore.from_documents(documents, HuggingFaceEmbeddings(model_name='all-MiniLM-L6-v2'))
61
  st.session_state.retriever = vstore.as_retriever()
62
+ st.success('🧐 Arquivo lido com sucesso!!!'
63
+ if st.session_state.language.startswith("pt")
64
+ else '🧐 File read successfully!!!')
65
  os.remove('file.pdf')
66
+ with st.spinner('😴 Acordando o LLM...'
67
+ if st.session_state.language.startswith("pt")
68
+ else '😴 Waking up the LLM...', show_time=True):
69
  st.session_state.graph = get_graph(st.session_state.retriever)
70
+ st.success('😁 LLM pronto e operante!!!'
71
+ if st.session_state.language.startswith("pt")
72
+ else '😁 LLM awakened!!!')
73
  st.balloons()
74
  placeholder = st.empty()
75
  for _ in range(5, -1, -1):
 
81
  if st.session_state.read_file:
82
 
83
  st.title('πŸ€— DocAI', anchor=False)
84
+ st.subheader('Converse com seu PDF!'
85
+ if st.session_state.language.startswith("pt")
86
+ else 'Chat with your document!', anchor=False)
87
 
88
  if st.session_state.first_msg:
89
  st.session_state.first_msg = False
90
+ get_session_by_id('chat42').add_message(AIMessage(content='E aΓ­, como vocΓͺ tΓ‘? Que tal a gente conversar sobre o '
91
+ 'arquivo que vocΓͺ me passou pra ler?'
92
+ if st.session_state.language.startswith("pt")
93
+ else 'Hello, how are you? How about we talk about the '
94
+ 'document you sent me to read?'))
95
 
96
  for msg in get_session_by_id('chat42').messages:
97
  with st.chat_message(name='user' if isinstance(msg, HumanMessage) else 'ai'):
98
  st.write(msg.content)
99
 
100
+ prompt = st.chat_input('Tente perguntar algo sobre o seu arquivo!'
101
+ if st.session_state.language.startswith("pt")
102
+ else 'Try to ask something about your file!')
103
  if prompt:
104
  with st.chat_message(name='user'):
105
  st.write(prompt)
 
133
  for msg in get_message():
134
  full_response += msg
135
  if '<tool>' in full_response:
136
+ with tool_placeholder.status('Lendo documento...'
137
+ if st.session_state.language.startswith("pt")
138
+ else 'Reading document...', expanded=True):
139
  if 'tool_message_placeholder' not in placeholders:
140
  placeholders['tool_message_placeholder'] = st.empty()
141
  placeholders['tool_message_placeholder'].write(full_response
 
149
  else:
150
  prompt_message_placeholder.write(full_response.replace('$', '\$'))
151
  except GraphRecursionError:
152
+ message = ('NΓ£o consegui responder a sua pergunta. πŸ˜₯ Poderia me perguntar outra coisa?'
153
+ if st.session_state.language.startswith("pt")
154
+ else 'I can't answer your question. πŸ˜₯ Can u ask me other question?'
155
  full_response = ''
156
  for letter in message:
157
  full_response += letter