pradeepsengarr commited on
Commit
b38d37f
·
verified ·
1 Parent(s): 83e3bd0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -13
app.py CHANGED
@@ -570,21 +570,32 @@ def qa_llm():
570
  logging.info("QA model setup complete")
571
  return qa
572
 
573
- def process_answer(user_question):
574
- """Generate an answer to the user’s question."""
575
  try:
576
  logging.info("Processing user question")
577
- qa = qa_llm()
578
-
579
- tailored_prompt = f"""
580
- You are an expert chatbot designed to assist with any topic, providing accurate and detailed answers based on the provided PDFs.
581
- Your goal is to deliver the most relevant information and resources based on the question asked.
582
- User question: {user_question}
583
- """
584
-
 
 
 
 
 
 
 
 
 
 
585
  generated_text = qa({"query": tailored_prompt})
586
  answer = generated_text['result']
587
 
 
588
  if "not provide" in answer or "no information" in answer:
589
  return "The document does not provide sufficient information to answer your question."
590
 
@@ -595,6 +606,7 @@ def process_answer(user_question):
595
  logging.error(f"Error during answer generation: {str(e)}")
596
  return "Error processing the question."
597
 
 
598
  # Streamlit UI Setup
599
  st.sidebar.header("File Upload")
600
  uploaded_files = st.sidebar.file_uploader("Upload your PDF files", type=["pdf"], accept_multiple_files=True)
@@ -633,9 +645,9 @@ if uploaded_files:
633
  else:
634
  st.warning("Failed to extract text from this PDF.")
635
 
636
- # Generate summary option
637
- if st.button("Generate Summary of Document"):
638
- st.write("Summary: [Provide the generated summary here]")
639
 
640
  # Run data ingestion when files are uploaded
641
  data_ingestion()
 
570
  logging.info("QA model setup complete")
571
  return qa
572
 
573
+ def process_answer(user_question, full_text):
574
+ """Generate an answer to the user’s question or summarize the PDF content."""
575
  try:
576
  logging.info("Processing user question")
577
+
578
+ # Check if the question is related to summarization
579
+ if "summarize" in user_question.lower() or "summary" in user_question.lower():
580
+ tailored_prompt = f"""
581
+ Please provide a summary of the following content extracted from the PDF:
582
+ {full_text}
583
+ """
584
+ else:
585
+ # Regular Q&A with context from the uploaded PDF
586
+ tailored_prompt = f"""
587
+ You are an expert chatbot designed to assist with any topic, providing accurate and detailed answers based on the provided PDFs.
588
+ Your goal is to deliver the most relevant information and resources based on the question asked.
589
+ User question: {user_question}
590
+ Content from the uploaded document: {full_text}
591
+ """
592
+
593
+ # Pass the tailored prompt to the question-answering chain (QA) system
594
+ qa = qa_llm() # Call your QA LLM setup
595
  generated_text = qa({"query": tailored_prompt})
596
  answer = generated_text['result']
597
 
598
+ # If the answer contains certain fallback phrases, return a default message
599
  if "not provide" in answer or "no information" in answer:
600
  return "The document does not provide sufficient information to answer your question."
601
 
 
606
  logging.error(f"Error during answer generation: {str(e)}")
607
  return "Error processing the question."
608
 
609
+
610
  # Streamlit UI Setup
611
  st.sidebar.header("File Upload")
612
  uploaded_files = st.sidebar.file_uploader("Upload your PDF files", type=["pdf"], accept_multiple_files=True)
 
645
  else:
646
  st.warning("Failed to extract text from this PDF.")
647
 
648
+ # # Generate summary option
649
+ # if st.button("Generate Summary of Document"):
650
+ # st.write("Summary: [Provide the generated summary here]")
651
 
652
  # Run data ingestion when files are uploaded
653
  data_ingestion()