Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -8,8 +8,9 @@ import numpy as np
|
|
8 |
from langchain_community.vectorstores import Chroma
|
9 |
from langchain_community.document_loaders import UnstructuredPDFLoader, PyPDFLoader
|
10 |
from langchain.text_splitter import CharacterTextSplitter
|
11 |
-
from langchain.chains import RetrievalQAWithSourcesChain
|
12 |
-
from langchain.chains import load_qa_with_sources_from_chain_type
|
|
|
13 |
from langchain.schema import Document
|
14 |
from langchain.memory import ConversationBufferMemory
|
15 |
from langchain.callbacks.manager import CallbackManager
|
@@ -76,14 +77,15 @@ def initialize_LLM(llm_option, llm_temperature, max_tokens, top_k, vector_db, pr
|
|
76 |
if llm_name != default_llm:
|
77 |
print(f"Using default LLM {default_llm} for {language}")
|
78 |
llm_name = default_llm
|
79 |
-
|
80 |
-
qa_chain =
|
81 |
llm=llm_name,
|
82 |
-
chain_type="stuff",
|
83 |
retriever=vector_db.as_retriever(),
|
|
|
|
|
|
|
84 |
temperature=llm_temperature,
|
85 |
-
|
86 |
-
max_tokens=max_tokens,
|
87 |
)
|
88 |
progress.update(1.0)
|
89 |
logger.info("LLM chain initialized successfully.")
|
|
|
8 |
from langchain_community.vectorstores import Chroma
|
9 |
from langchain_community.document_loaders import UnstructuredPDFLoader, PyPDFLoader
|
10 |
from langchain.text_splitter import CharacterTextSplitter
|
11 |
+
# from langchain.chains import RetrievalQAWithSourcesChain
|
12 |
+
# from langchain.chains import load_qa_with_sources_from_chain_type
|
13 |
+
from langchain.chains import ConversationalRetrievalChain
|
14 |
from langchain.schema import Document
|
15 |
from langchain.memory import ConversationBufferMemory
|
16 |
from langchain.callbacks.manager import CallbackManager
|
|
|
77 |
if llm_name != default_llm:
|
78 |
print(f"Using default LLM {default_llm} for {language}")
|
79 |
llm_name = default_llm
|
80 |
+
|
81 |
+
qa_chain = ConversationalRetrievalChain.from_llm(
|
82 |
llm=llm_name,
|
|
|
83 |
retriever=vector_db.as_retriever(),
|
84 |
+
chain_type="stuff",
|
85 |
+
memory=memory,
|
86 |
+
return_source_documents=True,
|
87 |
temperature=llm_temperature,
|
88 |
+
verbose=False,
|
|
|
89 |
)
|
90 |
progress.update(1.0)
|
91 |
logger.info("LLM chain initialized successfully.")
|