summarization / app.py
zoya23's picture
Update app.py
490b63e verified
raw
history blame
1.74 kB
import streamlit as st
from langchain.prompts import FewShotChatMessagePromptTemplate
from langchain.llms import HuggingFaceHub
from datasets import load_dataset
# Load the dataset
dataset = load_dataset("knkarthick/dialogsum", split="train[:1%]") # Load a small subset of the dataset for testing
# Extract the input (dialogue) and output (summary) from the dataset
examples = [
{
"input": dialogue['dialogue'], # Assuming 'dialogue' field contains the conversation text
"output": dialogue['summary'] # Assuming 'summary' field contains the summary
}
for dialogue in dataset
]
# Create FewShotChatMessagePromptTemplate
example_prompt = FewShotChatMessagePromptTemplate(
examples=examples,
input_variables=["input"],
prefix="You are a helpful summarizer. Here are a few examples:",
suffix="Now summarize this: {input}"
)
# Streamlit UI
st.title("πŸ“ Text Summarizer using Few-Shot Prompt")
input_text = st.text_area("Enter the text you want to summarize:")
if st.button("Summarize"):
if input_text.strip():
# Format the prompt
formatted_message = example_prompt.format(input=input_text)
with st.expander("πŸ” Prompt Preview"):
st.markdown(f"**Formatted Prompt:** {formatted_message}")
# Load the model from Hugging Face (replace with your choice of model)
model = HuggingFaceHub(
repo_id="google/pegasus-xsum", # You can replace with any model available in Hugging Face
model_kwargs={"temperature": 0.7}
)
# Generate the summary
summary = model(formatted_message)
st.success("βœ… Summary:")
st.write(summary)
else:
st.warning("Please enter some text!")