pdoshi commited on
Commit
eb93a33
·
1 Parent(s): 46425b1

chatbot history added

Browse files
Files changed (1) hide show
  1. llm_setup/llm_setup.py +23 -3
llm_setup/llm_setup.py CHANGED
@@ -44,17 +44,23 @@ class LLMService:
44
  Initializes the conversational RAG chain.
45
  """
46
  # Initialize RAG (Retrieval-Augmented Generation) chain
47
- prompt = ChatPromptTemplate(input_variables=['context', 'question'], messages=[HumanMessagePromptTemplate(
48
- prompt=PromptTemplate(input_variables=['context', 'question'], template=self.system_prompt))])
49
 
50
  # Initialize conversational RAG chain
51
  self._conversational_rag_chain = (
52
- {"context": self._web_retriever | format_documents, "question": RunnablePassthrough()}
53
  | prompt
54
  | self.llm
55
  | StrOutputParser()
56
  )
57
 
 
 
 
 
 
 
58
  def conversational_rag_chain(self):
59
  """
60
  Returns the initialized conversational RAG chain.
@@ -64,6 +70,20 @@ class LLMService:
64
  """
65
  return self._conversational_rag_chain
66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  def get_llm(self) -> ChatGoogleGenerativeAI:
68
  """
69
  Returns the LLM instance.
 
44
  Initializes the conversational RAG chain.
45
  """
46
  # Initialize RAG (Retrieval-Augmented Generation) chain
47
+ prompt = ChatPromptTemplate(input_variables=['history','context', 'question'], messages=[HumanMessagePromptTemplate(
48
+ prompt=PromptTemplate(input_variables=['history','context', 'question'], template="{history}\nContext: {context}\nQuestion: {question}"))])
49
 
50
  # Initialize conversational RAG chain
51
  self._conversational_rag_chain = (
52
+ {"context": self._web_retriever | format_documents, "question": RunnablePassthrough(), "history":self.get_chat_history}
53
  | prompt
54
  | self.llm
55
  | StrOutputParser()
56
  )
57
 
58
+ def get_chat_history(self):
59
+ """
60
+ Retrieves the last 3 chat messages formatted as a string.
61
+ """
62
+ return "\n".join(self.chat_history) if self.chat_history else "No prior conversation."
63
+
64
  def conversational_rag_chain(self):
65
  """
66
  Returns the initialized conversational RAG chain.
 
70
  """
71
  return self._conversational_rag_chain
72
 
73
+ def update_chat_history(self, user_input: str, llm_response: str):
74
+ """
75
+ Updates the chat history with the latest question and response.
76
+ """
77
+ self.chat_history.append(f"User: {user_input}\nAI: {llm_response}")
78
+
79
+ def ask_question(self, question: str):
80
+ """
81
+ Processes a user question using the conversational RAG chain and updates history.
82
+ """
83
+ response = self._conversational_rag_chain.invoke(question)
84
+ self.update_chat_history(question, response)
85
+ return response
86
+
87
  def get_llm(self) -> ChatGoogleGenerativeAI:
88
  """
89
  Returns the LLM instance.