Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import T5Tokenizer, T5ForConditionalGeneration | |
# Load the model and tokenizer with use_fast=False to avoid SentencePiece issues | |
model_name = "valhalla/t5-base-e2e-qg" | |
tokenizer = T5Tokenizer.from_pretrained(model_name, use_fast=False) | |
model = T5ForConditionalGeneration.from_pretrained(model_name) | |
def generate_questions(text): | |
input_text = f"generate questions: {text}" | |
input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
outputs = model.generate(input_ids, max_length=256, num_beams=4, do_sample=False) | |
questions = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return questions | |
gr.Interface( | |
fn=generate_questions, | |
inputs=gr.Textbox(label="Enter a paragraph", lines=8), | |
outputs="text", | |
title="π Question Generator (T5)" | |
).launch() | |