Spaces:
Running
Running
File size: 1,864 Bytes
b1b6964 4afa186 b1b6964 a8e2b6e bd6665e b1b6964 6e1201a bd6665e a8e2b6e b1b6964 6e1201a b1b6964 bd6665e b1b6964 6e1201a b1b6964 bd6665e b1b6964 a8e2b6e bd6665e a8e2b6e bd6665e 6e1201a bd6665e a8e2b6e bd6665e a8e2b6e b1b6964 a8e2b6e bd6665e a8e2b6e bd6665e a8e2b6e bd6665e a8e2b6e bd6665e a8e2b6e bd6665e b1b6964 bd6665e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import streamlit as st
import os
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine
# Set up Streamlit page
st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()
# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)
# Sidebar - File Upload
st.sidebar.header("Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
if uploaded_file:
pdf_path = os.path.join("temp", uploaded_file.name)
with open(pdf_path, "wb") as f:
f.write(uploaded_file.read())
st.sidebar.success("β
File uploaded successfully!")
with st.spinner(""):
document_processor.process_document(pdf_path)
st.sidebar.success("β
Document processed successfully!")
# Initialize chat history in session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# Display chat history
st.title("π¬ AI-Powered Document Chat")
chat_container = st.container()
with chat_container:
for message in st.session_state.chat_history:
role, text = message
if role == "user":
st.markdown(f"**π§βπ» You:** {text}")
else:
st.markdown(f"**π€ AI:** {text}")
# User Input at the bottom
question = st.text_input("Ask a question:", placeholder="Type your question and press Enter...", key="user_input")
if question:
# Append user question to history
st.session_state.chat_history.append(("user", question))
with st.spinner(""):
answer = qa_engine.query(question)
# Append AI answer to history
st.session_state.chat_history.append(("ai", answer))
# Rerun the app to update chat history
st.rerun()
|