Spaces:
Sleeping
Sleeping
File size: 853 Bytes
272a28c ef543ab 272a28c ef543ab 272a28c ef543ab 272a28c ef543ab 272a28c ef543ab 272a28c ef543ab |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
import gradio as gr
from transformers import T5Tokenizer, T5ForConditionalGeneration
# Load the model and tokenizer with use_fast=False to avoid SentencePiece issues
model_name = "valhalla/t5-base-e2e-qg"
tokenizer = T5Tokenizer.from_pretrained(model_name, use_fast=False)
model = T5ForConditionalGeneration.from_pretrained(model_name)
def generate_questions(text):
input_text = f"generate questions: {text}"
input_ids = tokenizer.encode(input_text, return_tensors="pt")
outputs = model.generate(input_ids, max_length=256, num_beams=4, do_sample=False)
questions = tokenizer.decode(outputs[0], skip_special_tokens=True)
return questions
gr.Interface(
fn=generate_questions,
inputs=gr.Textbox(label="Enter a paragraph", lines=8),
outputs="text",
title="π Question Generator (T5)"
).launch()
|