Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,9 +3,8 @@ import gradio as gr
|
|
3 |
from transformers import T5ForConditionalGeneration, T5Tokenizer, pipeline
|
4 |
|
5 |
# -----------------------------
|
6 |
-
# 1. Load the
|
7 |
-
#
|
8 |
-
# b) For passage generation (using GPT-2 for dynamic passage creation)
|
9 |
# -----------------------------
|
10 |
tokenizer = T5Tokenizer.from_pretrained("valhalla/t5-base-qg-hl", use_fast=False)
|
11 |
model = T5ForConditionalGeneration.from_pretrained("valhalla/t5-base-qg-hl")
|
@@ -15,54 +14,47 @@ qg_pipeline = pipeline(
|
|
15 |
tokenizer=tokenizer
|
16 |
)
|
17 |
|
18 |
-
# Pipeline for text generation (passage generation)
|
19 |
-
tg_pipeline = pipeline(
|
20 |
-
"text-generation",
|
21 |
-
model="gpt2",
|
22 |
-
max_length=100,
|
23 |
-
do_sample=True
|
24 |
-
)
|
25 |
-
|
26 |
# -----------------------------
|
27 |
-
# 2.
|
28 |
-
# The generated passage should include <hl> tokens to mark the key fact (answer).
|
29 |
# -----------------------------
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
|
|
|
|
66 |
|
67 |
# -----------------------------
|
68 |
# 3. Session State Initialization
|
@@ -84,34 +76,35 @@ def adjust_difficulty(state):
|
|
84 |
idx = diff_order.index(state["difficulty"])
|
85 |
if state["score"] >= 2 and idx < len(diff_order) - 1:
|
86 |
state["difficulty"] = diff_order[idx + 1]
|
87 |
-
state["score"] = 0 #
|
88 |
return "Level up! Difficulty increased to: " + state["difficulty"]
|
89 |
elif state["score"] <= -2 and idx > 0:
|
90 |
state["difficulty"] = diff_order[idx - 1]
|
91 |
-
state["score"] = 0 #
|
92 |
return "Don't worry! Difficulty decreased to: " + state["difficulty"]
|
93 |
else:
|
94 |
return f"Keep going! Current level: {state['difficulty']} (Score: {state['score']})"
|
95 |
|
96 |
# -----------------------------
|
97 |
-
# 5. Generate a Question from a
|
98 |
# -----------------------------
|
99 |
def generate_question(state):
|
100 |
-
#
|
101 |
-
passage =
|
102 |
-
state["passage"] = passage
|
103 |
|
104 |
-
# Extract the answer from
|
105 |
parts = passage.split("<hl>")
|
106 |
if len(parts) >= 3:
|
107 |
answer = parts[1].strip()
|
108 |
else:
|
109 |
answer = "N/A"
|
110 |
|
111 |
-
# Use the
|
112 |
result = qg_pipeline(passage, max_length=64)
|
113 |
question_text = result[0]["generated_text"]
|
114 |
|
|
|
|
|
115 |
state["question"] = question_text
|
116 |
state["answer"] = answer
|
117 |
return question_text
|
|
|
3 |
from transformers import T5ForConditionalGeneration, T5Tokenizer, pipeline
|
4 |
|
5 |
# -----------------------------
|
6 |
+
# 1. Load the Model & Slow Tokenizer
|
7 |
+
# Using the "valhalla/t5-base-qg-hl" model for improved question quality.
|
|
|
8 |
# -----------------------------
|
9 |
tokenizer = T5Tokenizer.from_pretrained("valhalla/t5-base-qg-hl", use_fast=False)
|
10 |
model = T5ForConditionalGeneration.from_pretrained("valhalla/t5-base-qg-hl")
|
|
|
14 |
tokenizer=tokenizer
|
15 |
)
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
# -----------------------------
|
18 |
+
# 2. Define Fixed Passages by Difficulty
|
|
|
19 |
# -----------------------------
|
20 |
+
passages = {
|
21 |
+
"easy": [
|
22 |
+
"What is the capital of France? <hl>Paris<hl>",
|
23 |
+
"What type of animal is a cat? <hl>Feline<hl>",
|
24 |
+
"What liquid is essential for all known forms of life? <hl>Water<hl>",
|
25 |
+
"In which direction does the sun rise? <hl>East<hl>",
|
26 |
+
"What quality are dogs best known for? <hl>Loyalty<hl>",
|
27 |
+
"Which planet do humans live on? <hl>Earth<hl>",
|
28 |
+
"Which fruit is commonly red and grows on trees? <hl>Apple<hl>",
|
29 |
+
"What device is used to tell time? <hl>Clock<hl>",
|
30 |
+
"Which object in the sky shines at night? <hl>Moon<hl>",
|
31 |
+
"What appears in the sky after rain? <hl>Rainbow<hl>"
|
32 |
+
],
|
33 |
+
"medium": [
|
34 |
+
"In which country was Albert Einstein born? <hl>Germany<hl>",
|
35 |
+
"What is the process by which plants convert sunlight into energy? <hl>Photosynthesis<hl>",
|
36 |
+
"Which explorer sailed across the Atlantic Ocean in 1492? <hl>Columbus<hl>",
|
37 |
+
"Which rainforest is the largest in the world? <hl>Amazon<hl>",
|
38 |
+
"Which ocean is the largest and deepest on Earth? <hl>Pacific<hl>",
|
39 |
+
"Which gas is essential for human breathing? <hl>Oxygen<hl>",
|
40 |
+
"Which galaxy contains our solar system? <hl>Milky<hl>",
|
41 |
+
"What structures in Egypt were built as tombs for pharaohs? <hl>Pyramids<hl>",
|
42 |
+
"Which revolution transformed manufacturing in the 18th and 19th centuries? <hl>Industrial<hl>",
|
43 |
+
"What system of government allows citizens to elect their leaders? <hl>Democracy<hl>"
|
44 |
+
],
|
45 |
+
"hard": [
|
46 |
+
"Which historical period was a European cultural and intellectual revival? <hl>Renaissance<hl>",
|
47 |
+
"Which branch of physics describes nature at the smallest scales? <hl>Quantum<hl>",
|
48 |
+
"What document established the framework of the U.S. government? <hl>Constitution<hl>",
|
49 |
+
"Which empire was the continuation of the Roman Empire in the East? <hl>Byzantine<hl>",
|
50 |
+
"Which fundamental particle gives mass to others? <hl>Higgs<hl>",
|
51 |
+
"Which principle states that position and momentum cannot both be precisely known? <hl>Heisenberg<hl>",
|
52 |
+
"What was the period of political tension between the U.S. and the Soviet Union called? <hl>Cold<hl>",
|
53 |
+
"Which collection of essays supported the ratification of the U.S. Constitution? <hl>Federalist<hl>",
|
54 |
+
"Which theoretical framework aims to unify quantum mechanics and relativity? <hl>String<hl>",
|
55 |
+
"Which field of computer science focuses on creating intelligent machines? <hl>Artificial<hl>"
|
56 |
+
]
|
57 |
+
}
|
58 |
|
59 |
# -----------------------------
|
60 |
# 3. Session State Initialization
|
|
|
76 |
idx = diff_order.index(state["difficulty"])
|
77 |
if state["score"] >= 2 and idx < len(diff_order) - 1:
|
78 |
state["difficulty"] = diff_order[idx + 1]
|
79 |
+
state["score"] = 0 # Reset score when leveling up
|
80 |
return "Level up! Difficulty increased to: " + state["difficulty"]
|
81 |
elif state["score"] <= -2 and idx > 0:
|
82 |
state["difficulty"] = diff_order[idx - 1]
|
83 |
+
state["score"] = 0 # Reset score when leveling down
|
84 |
return "Don't worry! Difficulty decreased to: " + state["difficulty"]
|
85 |
else:
|
86 |
return f"Keep going! Current level: {state['difficulty']} (Score: {state['score']})"
|
87 |
|
88 |
# -----------------------------
|
89 |
+
# 5. Generate a Question from a Fixed Passage
|
90 |
# -----------------------------
|
91 |
def generate_question(state):
|
92 |
+
# Select a random passage from the fixed list based on the current difficulty.
|
93 |
+
passage = random.choice(passages[state["difficulty"]])
|
|
|
94 |
|
95 |
+
# Extract the answer from text between <hl> tokens.
|
96 |
parts = passage.split("<hl>")
|
97 |
if len(parts) >= 3:
|
98 |
answer = parts[1].strip()
|
99 |
else:
|
100 |
answer = "N/A"
|
101 |
|
102 |
+
# Use the QG pipeline to generate a question from the fixed passage.
|
103 |
result = qg_pipeline(passage, max_length=64)
|
104 |
question_text = result[0]["generated_text"]
|
105 |
|
106 |
+
# Update state with the chosen passage, generated question, and correct answer.
|
107 |
+
state["passage"] = passage
|
108 |
state["question"] = question_text
|
109 |
state["answer"] = answer
|
110 |
return question_text
|