Spaces:
Sleeping
Sleeping
import streamlit as st | |
from langchain.prompts import FewShotChatMessagePromptTemplate | |
from langchain.prompts.example_selector import LengthBasedExampleSelector | |
from langchain.llms import HuggingFaceHub | |
from datasets import load_dataset | |
# Load dataset (you can use any summarization dataset here) | |
def load_examples(): | |
# Using 'knkarthick/dialogsum' as an example dataset | |
dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Load a subset for testing | |
examples = [] | |
for example in dataset: | |
examples.append({ | |
"input": example["dialogue"], | |
"output": example["summary"] | |
}) | |
return examples | |
# Load few-shot examples from the dataset | |
examples = load_examples() | |
# Create FewShotChatMessagePromptTemplate | |
example_prompt = FewShotChatMessagePromptTemplate.from_examples( | |
examples=examples, | |
example_selector=LengthBasedExampleSelector(examples=examples, max_length=1000), | |
input_variables=["input"], | |
prefix="You are a helpful assistant that summarizes dialogues. Examples:", | |
suffix="Now summarize this:\n{input}" | |
) | |
# Set up Hugging Face model (you can replace it with any other available model) | |
llm = HuggingFaceHub(repo_id="t5-small", task="summarization") | |
# Streamlit UI setup | |
st.title("π Dialogue Summarizer using Few-Shot Prompt + T5") | |
input_text = st.text_area("π Paste your conversation here:") | |
if st.button("Generate Summary"): | |
if input_text.strip(): | |
# Create the prompt using FewShotChatMessagePromptTemplate | |
messages = example_prompt.format_messages(input=input_text) | |
with st.expander("π Generated Prompt"): | |
for msg in messages: | |
st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```") | |
# Get the summary using the Hugging Face model | |
response = llm(messages[0].content) | |
st.success("β Summary:") | |
st.write(response['text']) | |
else: | |
st.warning("Please enter some text.") | |