Update app.py
Browse files
app.py
CHANGED
@@ -1,18 +1,12 @@
|
|
1 |
import gradio as gr
|
2 |
-
|
3 |
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
|
4 |
-
# from transformers import๋ก ์์ํ๋ import ๋ฌธ์ ๋ณด๋ฉด
|
5 |
-
# ๋ง์ ๊ฒฝ์ฐ AutoTokenizer, AutoModel
|
6 |
-
# tokenizer = AutoTokenizer.from_pretrained("model ์ด๋ฆ ์ด์ฉ๊ณ ์ ์ฉ๊ณ ")
|
7 |
-
# PreTrainedTokenizerFast : https://huggingface.co/docs/transformers/main_classes/tokenizer
|
8 |
-
# BART๋ encoder-decoder ๋ชจ๋ธ์ ์์
|
9 |
|
10 |
model_name = "ainize/kobart-news"
|
11 |
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name)
|
12 |
model = BartForConditionalGeneration.from_pretrained(model_name)
|
13 |
|
14 |
# ์๋ฌธ์ ๋ฐ์์ ์์ฝ๋ฌธ์ ๋ฐํ
|
15 |
-
def summ(
|
16 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
17 |
summary_text_ids = model.generate(
|
18 |
input_ids=input_ids,
|
@@ -28,4 +22,4 @@ interface = gr.Interface(summ,
|
|
28 |
[gr.Textbox(label="original text")],
|
29 |
[gr.Textbox(label="summary")])
|
30 |
|
31 |
-
interface.launch()
|
|
|
1 |
import gradio as gr
|
|
|
2 |
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
model_name = "ainize/kobart-news"
|
5 |
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name)
|
6 |
model = BartForConditionalGeneration.from_pretrained(model_name)
|
7 |
|
8 |
# ์๋ฌธ์ ๋ฐ์์ ์์ฝ๋ฌธ์ ๋ฐํ
|
9 |
+
def summ(input_text): # ๋งค๊ฐ๋ณ์๋ช
์ txt์์ input_text๋ก ๋ณ๊ฒฝ
|
10 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
11 |
summary_text_ids = model.generate(
|
12 |
input_ids=input_ids,
|
|
|
22 |
[gr.Textbox(label="original text")],
|
23 |
[gr.Textbox(label="summary")])
|
24 |
|
25 |
+
interface.launch()
|