tfedoseev's picture
Update app.py
e573614
raw
history blame contribute delete
924 Bytes
import streamlit as st
from transformers import T5Tokenizer, T5ForConditionalGeneration
tokenizer = T5Tokenizer.from_pretrained("jokes-tokenizer")
@st.cache
def load_model():
model = T5ForConditionalGeneration.from_pretrained("jokes-model")
return model
model = load_model()
def infer(input_ids):
output_sequences = model.generate(input_ids=input_ids)
return tokenizer.decode(output_sequences[0], skip_special_tokens=True)
st.title("Stupid jokes with transformers")
st.write("Write a question you want to see a funny answer for.")
sent = st.text_area("Text", height = 100)
if sent:
max_source_length = 64
max_target_length = 32
prefix = "Answer the following question in a funny way: "
input_ids = tokenizer(prefix + sent, max_length=max_source_length, truncation=True, return_tensors="pt").input_ids
generated_sequence = infer(input_ids)
st.write(generated_sequence)