sairamn's picture
Initial commit
db7f031
raw
history blame
940 Bytes
import streamlit as st
from transformers import BartTokenizer, TFBartForConditionalGeneration
# Load the model and tokenizer
model_path = 'facebook/bart-large-cnn'
tokenizer_path = 'facebook/bart-large-cnn'
tokenizer = BartTokenizer.from_pretrained(tokenizer_path)
model = TFBartForConditionalGeneration.from_pretrained(model_path)
def summarize_text(text):
inputs = tokenizer.encode('summarize: ' + text, return_tensors='tf', max_length=1024, truncation=True)
summary_ids = model.generate(
inputs,
max_length=150,
min_length=40,
length_penalty=2.0,
num_beams=4,
early_stopping=True
)
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary
st.title("Text Summarization")
text = st.text_area("Enter text to summarize", height=200)
if st.button("Summarize"):
summary = summarize_text(text)
st.write("Summary:")
st.write(summary)