Bit_gpt_0.2.8 / app.py
Piux24's picture
Update app.py
f1a0e2a verified
raw
history blame
2.1 kB
import gradio as gr
from transformers import pipeline
import PyPDF2
import json
# πŸ“Œ Step 1: Extract text from PDF
def read_pdf(file_path):
try:
with open(file_path, "rb") as file:
reader = PyPDF2.PdfReader(file)
text = "\n".join([page.extract_text() for page in reader.pages if page.extract_text()])
return text
except Exception as e:
return f"Error loading syllabus: {str(e)}"
syllabus_text = read_pdf("Syllabus.pdf")
# πŸ“Œ Step 2: Extract subjects and topics
def extract_subjects_and_topics(text):
subjects = {}
current_subject = None
for line in text.split("\n"):
line = line.strip()
if line.isupper(): # Assuming subject names are in uppercase
current_subject = line
subjects[current_subject] = []
elif current_subject and line:
subjects[current_subject].append(line)
return subjects
subjects_data = extract_subjects_and_topics(syllabus_text)
# πŸ“Œ Step 3: Convert to JSON format for easy searching
subjects_json = json.dumps(subjects_data, indent=4)
# πŸ“Œ Load AI Model for Chatbot
chatbot = pipeline("text-generation", model="facebook/blenderbot-400M-distill")
# πŸ“Œ Step 4: Chat Function
def chat_response(message):
message = message.lower()
# If user asks for subjects
if "subjects" in message:
return "πŸ“š Available Subjects:\n\n" + "\n".join(subjects_data.keys())
# If user asks for topics under a subject
for subject, topics in subjects_data.items():
if subject.lower() in message:
return f"πŸ“– Topics under {subject}:\n\n" + "\n".join(topics)
# If chatbot response is needed
response = chatbot(message, max_length=100, do_sample=True)
return response[0]['generated_text']
# πŸ“Œ Step 5: Create Gradio Interface
iface = gr.Interface(
fn=chat_response,
inputs="text",
outputs="text",
title="Bit GPT 0.2.8",
description="Ask me about syllabus subjects, topics, or general questions!"
)
# πŸ“Œ Step 6: Launch App
if __name__ == "__main__":
iface.launch()