ProductionRAG / app.py
Mdean77's picture
App refactored
8669df3
"""
IMPORTS HERE
"""
import chainlit as cl
from qdrant_client import QdrantClient
from qdrant_client.http.models import Distance, VectorParams
from langchain_qdrant import QdrantVectorStore
from operator import itemgetter
from langchain_core.runnables.passthrough import RunnablePassthrough
from langchain_core.runnables.config import RunnableConfig
import uuid
from prompts import chat_prompt
from handle_files import split_file
from models import chat_model, cached_embedder
"""
GLOBAL CODE HERE
"""
# Typical QDrant Client Set-up
collection_name = f"pdf_to_parse_{uuid.uuid4()}"
client = QdrantClient(":memory:")
client.create_collection(
collection_name=collection_name,
vectors_config=VectorParams(size=1536, distance=Distance.COSINE),
)
# Typical QDrant Vector Store Set-up
vectorstore = QdrantVectorStore(
client=client,
collection_name=collection_name,
embedding=cached_embedder)
### On Chat Start (Session Start) Section ###
@cl.on_chat_start
async def on_chat_start():
""" SESSION SPECIFIC CODE HERE """
files = None
# Wait for the user to upload a file
while files == None:
files = await cl.AskFileMessage(
content="Please upload a PDF File file to begin!",
accept=["application/pdf"],
max_size_mb=20,
timeout=180,
).send()
file = files[0]
msg = cl.Message(
content=f"Processing `{file.name}`..."
)
await msg.send()
docs = split_file(file)
vectorstore.add_documents(docs)
retriever = vectorstore.as_retriever(search_type="mmr", search_kwargs={"k": 15})
retrieval_augmented_qa_chain = (
{"context": itemgetter("question") | retriever, "question": itemgetter("question")}
| RunnablePassthrough.assign(context=itemgetter("context"))
| chat_prompt | chat_model
)
msg.content = f"Processing `{file.name}` done. You can now ask questions!"
await msg.send()
cl.user_session.set("chain", retrieval_augmented_qa_chain)
# ### Rename Chains ###
@cl.author_rename
def rename(orig_author: str):
""" RENAME CODE HERE """
rename_dict = {"ChatOpenAI": "the Generator ...", "VectorStoreRetriever" : "the Retriever"}
return rename_dict.get(orig_author, orig_author)
### On Message Section ###
@cl.on_message
async def main(message: cl.Message):
"""
MESSAGE CODE HERE
"""
chain = cl.user_session.get("chain")
msg = cl.Message(content="")
async for stream_response in chain.astream(
{"question":message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()])
):
await msg.stream_token(stream_response.content)
await msg.send()