Update app.py
Browse files
app.py
CHANGED
@@ -8,10 +8,10 @@ from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
8 |
from langchain.chains import RetrievalQA
|
9 |
from tempfile import NamedTemporaryFile
|
10 |
|
11 |
-
# Load Groq API Key securely
|
12 |
os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
|
13 |
|
14 |
-
#
|
15 |
def process_pdfs(files):
|
16 |
all_docs = []
|
17 |
for file in files:
|
@@ -31,32 +31,34 @@ def process_pdfs(files):
|
|
31 |
qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
|
32 |
return qa_chain
|
33 |
|
34 |
-
# Global
|
35 |
qa_chain = None
|
36 |
|
37 |
-
# Upload
|
38 |
def upload_pdfs(files):
|
39 |
global qa_chain
|
40 |
qa_chain = process_pdfs(files)
|
41 |
return "β
PDFs uploaded and processed. Now ask your questions."
|
42 |
|
43 |
-
#
|
44 |
def ask_question(query):
|
45 |
if qa_chain is None:
|
46 |
return "β Please upload Kaggle notebooks/competition PDFs first."
|
47 |
-
|
48 |
-
return result
|
49 |
-
|
50 |
-
# Gradio UI
|
51 |
-
upload = gr.File(file_types=[".pdf"], file_count="multiple", label="Upload Kaggle PDFs")
|
52 |
-
btn_upload = gr.Button("Process PDFs")
|
53 |
-
question = gr.Textbox(label="Ask a question about uploaded notebooks")
|
54 |
-
answer = gr.Textbox(label="Assistant Answer")
|
55 |
|
|
|
56 |
with gr.Blocks() as app:
|
57 |
gr.Markdown("## π€ Kaggle Study Assistant\nUpload PDFs from Kaggle and ask intelligent questions.")
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
59 |
btn_upload.click(fn=upload_pdfs, inputs=upload, outputs=upload_output)
|
|
|
|
|
|
|
60 |
question.submit(fn=ask_question, inputs=question, outputs=answer)
|
61 |
|
62 |
app.launch()
|
|
|
8 |
from langchain.chains import RetrievalQA
|
9 |
from tempfile import NamedTemporaryFile
|
10 |
|
11 |
+
# Load Groq API Key securely
|
12 |
os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
|
13 |
|
14 |
+
# Function to process PDFs
|
15 |
def process_pdfs(files):
|
16 |
all_docs = []
|
17 |
for file in files:
|
|
|
31 |
qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
|
32 |
return qa_chain
|
33 |
|
34 |
+
# Global variable
|
35 |
qa_chain = None
|
36 |
|
37 |
+
# Upload handler
|
38 |
def upload_pdfs(files):
|
39 |
global qa_chain
|
40 |
qa_chain = process_pdfs(files)
|
41 |
return "β
PDFs uploaded and processed. Now ask your questions."
|
42 |
|
43 |
+
# Question handler
|
44 |
def ask_question(query):
|
45 |
if qa_chain is None:
|
46 |
return "β Please upload Kaggle notebooks/competition PDFs first."
|
47 |
+
return qa_chain.run(query)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
+
# β
Gradio UI (fixed)
|
50 |
with gr.Blocks() as app:
|
51 |
gr.Markdown("## π€ Kaggle Study Assistant\nUpload PDFs from Kaggle and ask intelligent questions.")
|
52 |
+
|
53 |
+
with gr.Row():
|
54 |
+
upload = gr.File(file_types=[".pdf"], file_count="multiple", label="Upload Kaggle PDFs")
|
55 |
+
btn_upload = gr.Button("π₯ Process PDFs")
|
56 |
+
|
57 |
+
upload_output = gr.Textbox(label="Upload Status")
|
58 |
btn_upload.click(fn=upload_pdfs, inputs=upload, outputs=upload_output)
|
59 |
+
|
60 |
+
question = gr.Textbox(label="Ask a question about uploaded notebooks")
|
61 |
+
answer = gr.Textbox(label="Assistant Answer", interactive=False)
|
62 |
question.submit(fn=ask_question, inputs=question, outputs=answer)
|
63 |
|
64 |
app.launch()
|