Spaces:
Sleeping
Sleeping
File size: 1,187 Bytes
c152e15 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
import gradio as gr
from transformers import pipeline
question_gen = pipeline("text2text-generation", model="valhalla/t5-small-qg-hl")
def generate_questions(input_text, num_questions, question_type):
highlighted_text = input_text
if question_type == "mcq":
prefix = "generate questions:"
elif question_type == "subjective":
prefix = "generate descriptive questions:"
else:
prefix = "generate questions:"
prompt = f"{prefix} {highlighted_text}"
questions = question_gen(prompt, max_length=64, num_return_sequences=num_questions)
return [q['generated_text'] for q in questions]
with gr.Blocks() as demo:
gr.Markdown("# AI Mock Test Generator")
input_text = gr.Textbox(lines=10, label="Paste text or content here")
num_questions = gr.Slider(minimum=1, maximum=10, value=5, label="Number of Questions")
question_type = gr.Radio(choices=["mcq", "subjective", "mixed"], value="mixed", label="Question Type")
output = gr.Textbox(label="Generated Questions", lines=10)
btn = gr.Button("Generate")
btn.click(fn=generate_questions, inputs=[input_text, num_questions, question_type], outputs=output)
demo.launch() |