Spaces:
Runtime error
Runtime error
Luis Oala
commited on
Commit
·
5fee423
1
Parent(s):
78cf1f0
Update app.py
Browse files
app.py
CHANGED
@@ -1,18 +1,25 @@
|
|
1 |
import gradio as gr
|
2 |
from gradio import mix
|
3 |
|
4 |
-
title = "trustworthy artificial intelligence workshop - content generator"
|
5 |
description = "based on the gpt2 demo interface by <a href='https://huggingface.co/spaces/docs-demos/gpt2/tree/main'>ahsen khaliq</a>"
|
6 |
|
7 |
-
io1 = gr.Interface.load("huggingface/distilgpt2")
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
io2 = gr.Interface.load("huggingface/gpt2-large")
|
|
|
10 |
|
11 |
-
io3 = gr.Interface.load("huggingface/gpt2-medium")
|
12 |
|
13 |
-
io4 = gr.Interface.load("huggingface/gpt2-xl")
|
14 |
|
15 |
def inference(text, model):
|
|
|
16 |
if model == "gpt2-large":
|
17 |
outtext = io2(text)
|
18 |
elif model == "gpt2-medium":
|
@@ -21,15 +28,21 @@ def inference(text, model):
|
|
21 |
outtext = io4(text)
|
22 |
else:
|
23 |
outtext = io1(text)
|
|
|
|
|
|
|
24 |
return outtext
|
25 |
|
26 |
|
27 |
|
28 |
gr.Interface(
|
29 |
inference,
|
30 |
-
[gr.inputs.Textbox(label="Input"
|
|
|
31 |
],
|
32 |
-
gr.outputs.Textbox(label="
|
33 |
-
title=title,
|
34 |
-
description=description,
|
35 |
-
cache_examples=True).launch(enable_queue=True)
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from gradio import mix
|
3 |
|
4 |
+
#title = "trustworthy artificial intelligence workshop - content generator"
|
5 |
description = "based on the gpt2 demo interface by <a href='https://huggingface.co/spaces/docs-demos/gpt2/tree/main'>ahsen khaliq</a>"
|
6 |
|
7 |
+
#io1 = gr.Interface.load("huggingface/distilgpt2")
|
8 |
+
|
9 |
+
from transformers import pipeline, set_seed
|
10 |
+
generator = pipeline('text-generation', model='gpt2')
|
11 |
+
set_seed(42)
|
12 |
+
|
13 |
|
14 |
io2 = gr.Interface.load("huggingface/gpt2-large")
|
15 |
+
#TODO: 1) seed 2) output lenght
|
16 |
|
17 |
+
#io3 = gr.Interface.load("huggingface/gpt2-medium")
|
18 |
|
19 |
+
#io4 = gr.Interface.load("huggingface/gpt2-xl")
|
20 |
|
21 |
def inference(text, model):
|
22 |
+
"""
|
23 |
if model == "gpt2-large":
|
24 |
outtext = io2(text)
|
25 |
elif model == "gpt2-medium":
|
|
|
28 |
outtext = io4(text)
|
29 |
else:
|
30 |
outtext = io1(text)
|
31 |
+
"""
|
32 |
+
#outtext = io2(text)
|
33 |
+
outtext = generator(text, max_length=30, num_return_sequences=5)
|
34 |
return outtext
|
35 |
|
36 |
|
37 |
|
38 |
gr.Interface(
|
39 |
inference,
|
40 |
+
[gr.inputs.Textbox(label="Input", placeholder="trustworthy artificial intelligence")]
|
41 |
+
#,gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")
|
42 |
],
|
43 |
+
gr.outputs.Textbox(label="gpt-2 proposal"),
|
44 |
+
#title=title,
|
45 |
+
#description=description,
|
46 |
+
cache_examples=True).launch(enable_queue=True)
|
47 |
+
|
48 |
+
#TODO: add credits at bottom
|