Copain22 commited on
Commit
7d8a35f
Β·
verified Β·
1 Parent(s): fa1027d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +85 -49
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
  import streamlit as st
3
  from langchain_community.embeddings import HuggingFaceEmbeddings
4
  from langchain_community.vectorstores import FAISS
@@ -8,76 +9,111 @@ from langchain.memory import ConversationBufferMemory
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
9
  from langchain_community.document_loaders import PyMuPDFLoader
10
 
11
- # ──────────────────── 1. Setup ────────────────────
 
12
  st.title("β˜• CafΓ© Eleven Ordering Assistant")
13
- st.caption("Powered by LangChain & Streamlit")
14
 
15
- # Load documents
16
- @st.cache_resource
17
- def load_docs():
18
- loader = PyMuPDFLoader("menu.pdf") # Change to your PDF filename
19
- return loader.load()
 
20
 
21
- # Initialize components
 
 
 
 
22
  @st.cache_resource
23
- def init_chain():
24
- # Embeddings
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  embeddings = HuggingFaceEmbeddings(
26
  model_name="sentence-transformers/all-mpnet-base-v2"
27
  )
 
28
 
29
- # Text splitting
30
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
31
- documents = text_splitter.split_documents(load_docs())
32
-
33
- # Vectorstore
34
- vectorstore = FAISS.from_documents(documents, embeddings)
35
-
36
- # LLM (using free inference API)
37
  llm = HuggingFaceHub(
38
  repo_id="meta-llama/Llama-2-7b-chat-hf",
39
  huggingfacehub_api_token=os.environ["HF_TOKEN"],
40
  model_kwargs={
41
  "temperature": 0.2,
42
- "max_new_tokens": 256
43
  }
44
  )
45
 
46
- # Memory
47
- memory = ConversationBufferMemory(
48
- memory_key="chat_history",
49
- return_messages=True,
50
- output_key='answer'
51
- )
52
-
53
- # Chain
54
  return ConversationalRetrievalChain.from_llm(
55
  llm=llm,
56
  retriever=vectorstore.as_retriever(),
57
- memory=memory,
58
- return_source_documents=True
 
 
 
 
 
 
 
 
 
 
 
 
59
  )
60
 
61
- # ──────────────────── 2. Chat Interface ────────────────────
62
- if "messages" not in st.session_state:
63
- st.session_state.messages = [
64
- {"role": "assistant", "content": "Hi! Welcome to CafΓ© Eleven. What would you like to order today?"}
65
- ]
66
-
67
- for message in st.session_state.messages:
68
- with st.chat_message(message["role"]):
69
- st.markdown(message["content"])
70
-
71
  if prompt := st.chat_input("Your order..."):
72
  st.session_state.messages.append({"role": "user", "content": prompt})
73
- with st.chat_message("user"):
74
- st.markdown(prompt)
75
 
76
- with st.chat_message("assistant"):
77
- chain = init_chain()
78
- result = chain({"question": prompt})
79
- response = result["answer"]
80
-
81
- # Display response
82
- st.markdown(response)
83
- st.session_state.messages.append({"role": "assistant", "content": response})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ from pathlib import Path
3
  import streamlit as st
4
  from langchain_community.embeddings import HuggingFaceEmbeddings
5
  from langchain_community.vectorstores import FAISS
 
9
  from langchain.text_splitter import RecursiveCharacterTextSplitter
10
  from langchain_community.document_loaders import PyMuPDFLoader
11
 
12
+ # App config
13
+ st.set_page_config(page_title="CafΓ© Eleven", page_icon="β˜•")
14
  st.title("β˜• CafΓ© Eleven Ordering Assistant")
 
15
 
16
+ # Initialize chat
17
+ if "messages" not in st.session_state:
18
+ st.session_state.messages = [{
19
+ "role": "assistant",
20
+ "content": "Hi! Welcome to CafΓ© Eleven. What would you like to order today?"
21
+ }]
22
 
23
+ # Display messages
24
+ for msg in st.session_state.messages:
25
+ st.chat_message(msg["role"]).write(msg["content"])
26
+
27
+ # Chat functions
28
  @st.cache_resource
29
+ def load_chain():
30
+ # Load all PDFs in directory
31
+ pdf_files = [str(p) for p in Path(".").glob("*.pdf")]
32
+ if not pdf_files:
33
+ st.error("No PDF files found! Please upload menu PDFs.")
34
+ st.stop()
35
+
36
+ # Load and split documents
37
+ docs = []
38
+ for pdf in pdf_files:
39
+ loader = PyMuPDFLoader(pdf)
40
+ docs.extend(loader.load())
41
+
42
+ text_splitter = RecursiveCharacterTextSplitter(
43
+ chunk_size=1000,
44
+ chunk_overlap=200
45
+ )
46
+ splits = text_splitter.split_documents(docs)
47
+
48
+ # Setup vectorstore
49
  embeddings = HuggingFaceEmbeddings(
50
  model_name="sentence-transformers/all-mpnet-base-v2"
51
  )
52
+ vectorstore = FAISS.from_documents(splits, embeddings)
53
 
54
+ # Setup LLM
 
 
 
 
 
 
 
55
  llm = HuggingFaceHub(
56
  repo_id="meta-llama/Llama-2-7b-chat-hf",
57
  huggingfacehub_api_token=os.environ["HF_TOKEN"],
58
  model_kwargs={
59
  "temperature": 0.2,
60
+ "max_length": 256
61
  }
62
  )
63
 
64
+ # Create chain with system prompt
 
 
 
 
 
 
 
65
  return ConversationalRetrievalChain.from_llm(
66
  llm=llm,
67
  retriever=vectorstore.as_retriever(),
68
+ memory=ConversationBufferMemory(
69
+ memory_key="chat_history",
70
+ return_messages=True
71
+ ),
72
+ condense_question_prompt="""
73
+ You are a friendly cafΓ© assistant. Your job is to:
74
+ 1. Greet customers warmly
75
+ 2. Help them place orders
76
+ 3. Suggest menu items
77
+ 4. Never make up items not in the menu
78
+ Current conversation:
79
+ {chat_history}
80
+ Question: {question}
81
+ """
82
  )
83
 
84
+ # Chat input
 
 
 
 
 
 
 
 
 
85
  if prompt := st.chat_input("Your order..."):
86
  st.session_state.messages.append({"role": "user", "content": prompt})
87
+ st.chat_message("user").write(prompt)
 
88
 
89
+ with st.spinner("Preparing your order..."):
90
+ try:
91
+ chain = load_chain()
92
+ response = chain({"question": prompt})["answer"]
93
+
94
+ # Format response cleanly
95
+ if "menu" in prompt.lower():
96
+ response = "Here are our offerings:\n" + response
97
+ elif "thank" in prompt.lower():
98
+ response = "You're welcome! " + response
99
+
100
+ st.session_state.messages.append({"role": "assistant", "content": response})
101
+ st.chat_message("assistant").write(response)
102
+
103
+ except Exception as e:
104
+ st.error(f"Sorry, something went wrong: {str(e)}")
105
+
106
+ # PDF upload section
107
+ with st.sidebar:
108
+ st.subheader("Menu Management")
109
+ uploaded_files = st.file_uploader(
110
+ "Upload PDF menus",
111
+ type="pdf",
112
+ accept_multiple_files=True
113
+ )
114
+ if uploaded_files:
115
+ for file in uploaded_files:
116
+ with open(file.name, "wb") as f:
117
+ f.write(file.getbuffer())
118
+ st.success(f"Uploaded {len(uploaded_files)} menu(s)")
119
+ st.cache_resource.clear() # Refresh the vectorstore