pradeepsengarr commited on
Commit
7c797e6
·
verified ·
1 Parent(s): 38fe9c5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -12
app.py CHANGED
@@ -315,23 +315,38 @@ def qa_llm():
315
  return RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=True)
316
 
317
  def process_answer(user_question):
 
318
  try:
319
- qa = qa_llm()
320
- prompt = f"""
321
- You are a helpful and accurate RAG-based chatbot. Your role is to analyze the content from uploaded PDF documents and
322
- provide informative and detailed answers to any questions asked by the user. Use the uploaded knowledge to answer precisely.
323
-
324
- Question: {user_question}
325
- """
326
- output = qa({"query": prompt})
327
- return output['result']
 
 
 
 
 
 
 
 
 
 
 
 
 
328
  except Exception as e:
329
- logging.error(f"QA failed: {e}")
330
- return " Could not generate a valid answer."
 
331
 
332
  # ---------------- STREAMLIT UI ---------------- #
333
 
334
- # Sidebar Upload
335
  st.sidebar.header("📤 Upload PDF Files")
336
  uploaded_files = st.sidebar.file_uploader("Select one or more PDF files", type="pdf", accept_multiple_files=True)
337
 
 
315
  return RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=True)
316
 
317
  def process_answer(user_question):
318
+ """Generate an answer to the user’s question using a general RAG-based prompt."""
319
  try:
320
+ logging.info("Processing user question")
321
+ qa = qa_llm() # Set up the retrieval-based QA chain
322
+
323
+ # Generalized, flexible prompt for any kind of PDF (resume, legal doc, etc.)
324
+ tailored_prompt = f"""
325
+ You are an intelligent and helpful AI assistant that provides answers strictly based on the provided document contents.
326
+ If the question cannot be answered using the documents, say: 'The document does not contain this information.'
327
+ Otherwise, respond clearly and concisely with relevant and factual details from the PDF.
328
+
329
+ Question: {user_question}
330
+ """
331
+
332
+ generated_text = qa({"query": tailored_prompt})
333
+ answer = generated_text['result']
334
+
335
+ # Add a safeguard for uncertain or hallucinated answers
336
+ if "not provide" in answer.lower() or "no information" in answer.lower() or len(answer.strip()) < 10:
337
+ return "The document does not contain this information."
338
+
339
+ logging.info("Answer generated successfully")
340
+ return answer
341
+
342
  except Exception as e:
343
+ logging.error(f"Error during answer generation: {str(e)}")
344
+ return "Sorry, something went wrong while processing your question."
345
+
346
 
347
  # ---------------- STREAMLIT UI ---------------- #
348
 
349
+ # Sidebar Upload
350
  st.sidebar.header("📤 Upload PDF Files")
351
  uploaded_files = st.sidebar.file_uploader("Select one or more PDF files", type="pdf", accept_multiple_files=True)
352