Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,8 +10,7 @@ def generate_question(description):
|
|
10 |
prompt = f"Only generate a factual and relevant question about this memory: {description}"
|
11 |
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
|
12 |
outputs = model.generate(**inputs, max_new_tokens=64)
|
13 |
-
|
14 |
-
return question
|
15 |
|
16 |
interface = gr.Interface(
|
17 |
fn=generate_question,
|
@@ -19,4 +18,5 @@ interface = gr.Interface(
|
|
19 |
outputs=gr.Textbox(label="Generated Question"),
|
20 |
)
|
21 |
|
22 |
-
|
|
|
|
10 |
prompt = f"Only generate a factual and relevant question about this memory: {description}"
|
11 |
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
|
12 |
outputs = model.generate(**inputs, max_new_tokens=64)
|
13 |
+
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
14 |
|
15 |
interface = gr.Interface(
|
16 |
fn=generate_question,
|
|
|
18 |
outputs=gr.Textbox(label="Generated Question"),
|
19 |
)
|
20 |
|
21 |
+
demo = gr.Interface(fn=generate_question, inputs="text", outputs="text")
|
22 |
+
demo.launch()
|