docling_rag / app.py
NEXAS's picture
Update app.py
a8e2b6e verified
raw
history blame
1.86 kB
import streamlit as st
import os
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine
# Set up Streamlit page
st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()
# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)
# Sidebar - File Upload
st.sidebar.header("Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
if uploaded_file:
pdf_path = os.path.join("temp", uploaded_file.name)
with open(pdf_path, "wb") as f:
f.write(uploaded_file.read())
st.sidebar.success("βœ… File uploaded successfully!")
with st.spinner(""):
document_processor.process_document(pdf_path)
st.sidebar.success("βœ… Document processed successfully!")
# Initialize chat history in session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# Display chat history
st.title("πŸ’¬ AI-Powered Document Chat")
chat_container = st.container()
with chat_container:
for message in st.session_state.chat_history:
role, text = message
if role == "user":
st.markdown(f"**πŸ§‘β€πŸ’» You:** {text}")
else:
st.markdown(f"**πŸ€– AI:** {text}")
# User Input at the bottom
question = st.text_input("Ask a question:", placeholder="Type your question and press Enter...", key="user_input")
if question:
# Append user question to history
st.session_state.chat_history.append(("user", question))
with st.spinner(""):
answer = qa_engine.query(question)
# Append AI answer to history
st.session_state.chat_history.append(("ai", answer))
# Rerun the app to update chat history
st.rerun()