Spaces:
Sleeping
Sleeping
File size: 1,999 Bytes
52d4ec9 50a4735 43d6abb 50a4735 52d4ec9 43d6abb 793bade 43d6abb 793bade 43d6abb 793bade 50a4735 793bade 52d4ec9 793bade 52d4ec9 793bade 52d4ec9 43d6abb 50a4735 43d6abb 52d4ec9 793bade 52d4ec9 50a4735 793bade 52d4ec9 793bade 52d4ec9 43d6abb 793bade 43d6abb 52d4ec9 793bade |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import streamlit as st
from langchain.prompts import FewShotChatMessagePromptTemplate
from langchain.prompts.example_selector import LengthBasedExampleSelector
from langchain.llms import HuggingFaceHub
from datasets import load_dataset
# Load dataset (you can use any summarization dataset here)
@st.cache_data
def load_examples():
# Using 'knkarthick/dialogsum' as an example dataset
dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Load a subset for testing
examples = []
for example in dataset:
examples.append({
"input": example["dialogue"],
"output": example["summary"]
})
return examples
# Load few-shot examples from the dataset
examples = load_examples()
# Create FewShotChatMessagePromptTemplate
example_prompt = FewShotChatMessagePromptTemplate.from_examples(
examples=examples,
example_selector=LengthBasedExampleSelector(examples=examples, max_length=1000),
input_variables=["input"],
prefix="You are a helpful assistant that summarizes dialogues. Examples:",
suffix="Now summarize this:\n{input}"
)
# Set up Hugging Face model (you can replace it with any other available model)
llm = HuggingFaceHub(repo_id="t5-small", task="summarization")
# Streamlit UI setup
st.title("π Dialogue Summarizer using Few-Shot Prompt + T5")
input_text = st.text_area("π Paste your conversation here:")
if st.button("Generate Summary"):
if input_text.strip():
# Create the prompt using FewShotChatMessagePromptTemplate
messages = example_prompt.format_messages(input=input_text)
with st.expander("π Generated Prompt"):
for msg in messages:
st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
# Get the summary using the Hugging Face model
response = llm(messages[0].content)
st.success("β
Summary:")
st.write(response['text'])
else:
st.warning("Please enter some text.")
|