File size: 873 Bytes
409a190
 
 
5f43f70
 
409a190
 
5f43f70
 
 
 
 
 
 
 
409a190
5f43f70
 
409a190
5f43f70
 
 
 
409a190
 
5f43f70
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import gradio as gr
from transformers import pipeline

# βœ… Use the correct pipeline task
question_generator = pipeline("text2text-generation", model="valhalla/t5-base-e2e-qg")

def generate_questions(text):
    # Split text into chunks (optional)
    chunks = text.split(". ")
    questions = []
    for chunk in chunks:
        if len(chunk.strip()) > 0:
            result = question_generator(chunk, max_length=64)[0]['generated_text']
            questions.append(f"❓ {result}")
    return "\n".join(questions)

# Gradio interface
interface = gr.Interface(
    fn=generate_questions,
    inputs=gr.Textbox(lines=15, placeholder="Paste your long text here..."),
    outputs="text",
    title="πŸ” Generate Questions from Text",
    description="Uses a T5 model to generate questions from a multi-sentence paragraph."
)

interface.launch()