Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,11 +2,21 @@ import gradio as gr
|
|
2 |
from transformers import pipeline
|
3 |
|
4 |
# Load the question generation model
|
5 |
-
question_gen = pipeline("text2text-generation", model="
|
6 |
|
7 |
# Function to generate questions
|
8 |
def generate_questions(text, num_questions, question_type):
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
results = question_gen(prompt, max_length=128, num_return_sequences=num_questions)
|
11 |
return "\n\n".join([f"{i+1}. {r['generated_text']}" for i, r in enumerate(results)])
|
12 |
|
|
|
2 |
from transformers import pipeline
|
3 |
|
4 |
# Load the question generation model
|
5 |
+
question_gen = pipeline("text2text-generation", model="valhalla/t5-base-qg-hl")
|
6 |
|
7 |
# Function to generate questions
|
8 |
def generate_questions(text, num_questions, question_type):
|
9 |
+
# Highlight the answer in the context using <hl> tags
|
10 |
+
# For simplicity, we'll highlight the first sentence
|
11 |
+
sentences = text.strip().split('.')
|
12 |
+
if len(sentences) > 1:
|
13 |
+
answer = sentences[0].strip()
|
14 |
+
context = '. '.join(sentences[1:]).strip()
|
15 |
+
else:
|
16 |
+
answer = text.strip()
|
17 |
+
context = text.strip()
|
18 |
+
|
19 |
+
prompt = f"generate question: <hl> {answer} <hl> {context}"
|
20 |
results = question_gen(prompt, max_length=128, num_return_sequences=num_questions)
|
21 |
return "\n\n".join([f"{i+1}. {r['generated_text']}" for i, r in enumerate(results)])
|
22 |
|