VishnuRamDebyez commited on
Commit
b04e992
·
verified ·
1 Parent(s): 7310486

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -11
app.py CHANGED
@@ -115,25 +115,46 @@ class QASystem:
115
  graph_builder = StateGraph(MessagesState)
116
 
117
  def query_or_respond(state: MessagesState):
118
- response = llm.invoke(state["messages"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  return {"messages": [response]}
120
 
121
  def generate(state: MessagesState):
122
- recent_tools = [m for m in reversed(state["messages"]) if m.type == "tool"][::-1]
123
-
 
 
124
  system_prompt = (
125
- "You are a senior legal assistant with knowledge in the Indian legal and judiciary system.indtroduce yourself for the first quiestion"
126
- " Provide direct concise summarized answers in 5 sentences based on the following context:\n\n"
127
- f"{' '.join(m.content for m in recent_tools)}"
128
  )
129
- messages = [SystemMessage(content=system_prompt)] + [
130
- m for m in state["messages"]
131
- if m.type in ("human", "system") or (m.type == "ai" and not m.tool_calls)
132
- ]
133
-
134
  response = llm.invoke(messages)
135
  return {"messages": [response]}
136
 
 
137
  graph_builder.add_node("query_or_respond", query_or_respond)
138
  graph_builder.add_node("generate", generate)
139
 
 
115
  graph_builder = StateGraph(MessagesState)
116
 
117
  def query_or_respond(state: MessagesState):
118
+ retrieved_docs = [m for m in state["messages"] if m.type == "tool"]
119
+
120
+ if retrieved_docs:
121
+ context = ' '.join(m.content for m in retrieved_docs)
122
+ else:
123
+ context = "Legal knowledge system. Use Indian judiciary references."
124
+
125
+ system_prompt = (
126
+ "You are a senior legal assistant with expertise in Indian law. "
127
+ "Always provide legally accurate responses with references to Indian judiciary principles. "
128
+ "If the user query is not legal-specific, still respond from a legal perspective."
129
+ f"\n\nContext:\n{context}"
130
+ )
131
+
132
+ messages = [SystemMessage(content=system_prompt)] + state["messages"]
133
+
134
+ logger.info(f"Sending to LLM: {[m.content for m in messages]}") # Debugging log
135
+
136
+ response = llm.invoke(messages)
137
  return {"messages": [response]}
138
 
139
  def generate(state: MessagesState):
140
+ retrieved_docs = [m for m in reversed(state["messages"]) if m.type == "tool"][::-1]
141
+
142
+ context = ' '.join(m.content for m in retrieved_docs) if retrieved_docs else "Legal knowledge system."
143
+
144
  system_prompt = (
145
+ "You are a senior legal assistant specializing in Indian judiciary matters. "
146
+ "Your responses MUST be legally accurate, concise (5 sentences max), and reference Indian laws when applicable."
147
+ f"\n\nContext:\n{context}"
148
  )
149
+
150
+ messages = [SystemMessage(content=system_prompt)] + state["messages"]
151
+
152
+ logger.info(f"Sending to LLM: {[m.content for m in messages]}") # Debugging log
153
+
154
  response = llm.invoke(messages)
155
  return {"messages": [response]}
156
 
157
+
158
  graph_builder.add_node("query_or_respond", query_or_respond)
159
  graph_builder.add_node("generate", generate)
160