zoya23 commited on
Commit
490b63e
Β·
verified Β·
1 Parent(s): 43d6abb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -36
app.py CHANGED
@@ -1,54 +1,50 @@
1
  import streamlit as st
2
  from langchain.prompts import FewShotChatMessagePromptTemplate
3
- from langchain.prompts.example_selector import LengthBasedExampleSelector
4
  from langchain.llms import HuggingFaceHub
5
  from datasets import load_dataset
6
 
7
- # Load dataset (you can use any summarization dataset here)
8
- @st.cache_data
9
- def load_examples():
10
- # Using 'knkarthick/dialogsum' as an example dataset
11
- dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Load a subset for testing
12
- examples = []
13
- for example in dataset:
14
- examples.append({
15
- "input": example["dialogue"],
16
- "output": example["summary"]
17
- })
18
- return examples
19
-
20
- # Load few-shot examples from the dataset
21
- examples = load_examples()
22
 
23
  # Create FewShotChatMessagePromptTemplate
24
- example_prompt = FewShotChatMessagePromptTemplate.from_examples(
25
  examples=examples,
26
- example_selector=LengthBasedExampleSelector(examples=examples, max_length=1000),
27
  input_variables=["input"],
28
- prefix="You are a helpful assistant that summarizes dialogues. Examples:",
29
- suffix="Now summarize this:\n{input}"
30
  )
31
 
32
- # Set up Hugging Face model (you can replace it with any other available model)
33
- llm = HuggingFaceHub(repo_id="t5-small", task="summarization")
34
-
35
- # Streamlit UI setup
36
- st.title("πŸ“ Dialogue Summarizer using Few-Shot Prompt + T5")
37
 
38
- input_text = st.text_area("πŸ“ Paste your conversation here:")
39
 
40
- if st.button("Generate Summary"):
41
  if input_text.strip():
42
- # Create the prompt using FewShotChatMessagePromptTemplate
43
- messages = example_prompt.format_messages(input=input_text)
 
 
 
44
 
45
- with st.expander("πŸ“‹ Generated Prompt"):
46
- for msg in messages:
47
- st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
 
 
48
 
49
- # Get the summary using the Hugging Face model
50
- response = llm(messages[0].content)
51
  st.success("βœ… Summary:")
52
- st.write(response['text'])
53
  else:
54
- st.warning("Please enter some text.")
 
1
  import streamlit as st
2
  from langchain.prompts import FewShotChatMessagePromptTemplate
 
3
  from langchain.llms import HuggingFaceHub
4
  from datasets import load_dataset
5
 
6
+ # Load the dataset
7
+ dataset = load_dataset("knkarthick/dialogsum", split="train[:1%]") # Load a small subset of the dataset for testing
8
+
9
+ # Extract the input (dialogue) and output (summary) from the dataset
10
+ examples = [
11
+ {
12
+ "input": dialogue['dialogue'], # Assuming 'dialogue' field contains the conversation text
13
+ "output": dialogue['summary'] # Assuming 'summary' field contains the summary
14
+ }
15
+ for dialogue in dataset
16
+ ]
 
 
 
 
17
 
18
  # Create FewShotChatMessagePromptTemplate
19
+ example_prompt = FewShotChatMessagePromptTemplate(
20
  examples=examples,
 
21
  input_variables=["input"],
22
+ prefix="You are a helpful summarizer. Here are a few examples:",
23
+ suffix="Now summarize this: {input}"
24
  )
25
 
26
+ # Streamlit UI
27
+ st.title("πŸ“ Text Summarizer using Few-Shot Prompt")
 
 
 
28
 
29
+ input_text = st.text_area("Enter the text you want to summarize:")
30
 
31
+ if st.button("Summarize"):
32
  if input_text.strip():
33
+ # Format the prompt
34
+ formatted_message = example_prompt.format(input=input_text)
35
+
36
+ with st.expander("πŸ” Prompt Preview"):
37
+ st.markdown(f"**Formatted Prompt:** {formatted_message}")
38
 
39
+ # Load the model from Hugging Face (replace with your choice of model)
40
+ model = HuggingFaceHub(
41
+ repo_id="google/pegasus-xsum", # You can replace with any model available in Hugging Face
42
+ model_kwargs={"temperature": 0.7}
43
+ )
44
 
45
+ # Generate the summary
46
+ summary = model(formatted_message)
47
  st.success("βœ… Summary:")
48
+ st.write(summary)
49
  else:
50
+ st.warning("Please enter some text!")