Update app.py
Browse files
app.py
CHANGED
@@ -53,9 +53,10 @@ Format your responses as follows:
|
|
53 |
4. Offer additional context or related information when applicable.
|
54 |
5. Conclude with suggestions for next steps or related topics the user might explore further.
|
55 |
If a user’s query is unclear or falls outside the scope of AutoGen, politely ask for clarification or direct them to more appropriate resources.
|
56 |
-
IMPORTANT: Be concise with your code generation to adhere to
|
|
|
57 |
|
58 |
-
# LangGraph system prompt (for LangGraph’s codebase)
|
59 |
langgraph_system_prompt = """
|
60 |
You are an AI assistant specializing in the LangGraph framework. Your role is to help users build, understand, and troubleshoot their multi-agent AI applications by providing accurate and helpful information from the LangGraph documentation, source code, and examples.
|
61 |
You have access to a powerful tool called `retriever_tool` that functions as a search engine for LangGraph’s resources. This tool is essential for retrieving up-to-date information to answer user queries accurately. Use it extensively to ensure your responses reflect the latest details from LangGraph.
|
@@ -88,24 +89,25 @@ When responding to user queries:
|
|
88 |
- Conclude with suggestions for next steps or related topics to explore further.
|
89 |
If a user’s query is unclear or falls outside the scope of LangGraph, politely ask for clarification or direct them to more appropriate resources.
|
90 |
Always use the `retriever_tool` frequently—even for queries you think you know well—since LangGraph’s resources are continuously updated.
|
91 |
-
|
92 |
-
IMPORTANT: Be concise with your code generation to adhere to langgraph's framework:
|
93 |
"""
|
94 |
|
95 |
def convert_namespace(ns: str) -> str:
|
96 |
"""
|
97 |
Convert the UI namespace option to the actual namespace used.
|
98 |
If the user selects "autogen", return "lmsys" instead.
|
|
|
99 |
"""
|
100 |
return "lmsys" if ns == "autogen" else ns
|
101 |
|
102 |
def get_description(actual_namespace: str, top_k: int) -> str:
|
103 |
"""
|
104 |
-
Generate a dynamic description for a retriever tool based on
|
105 |
-
the actual namespace and top_k value.
|
106 |
"""
|
107 |
if actual_namespace == "lmsys":
|
108 |
-
return f"Search and return information from
|
|
|
|
|
109 |
else:
|
110 |
return f"Search and return information from LangGraph's documentation using namespace '{actual_namespace}' with top_k = {top_k}."
|
111 |
|
@@ -114,9 +116,10 @@ def init_agent(namespace1: str, top_k1: int, namespace2: str, top_k2: int):
|
|
114 |
"""
|
115 |
Initialize the LangGraph agent with up to two Pinecone retriever tools.
|
116 |
Only add a retriever tool if a non-empty namespace is provided.
|
117 |
-
Choose the system prompt based on the
|
118 |
-
-
|
119 |
-
-
|
|
|
120 |
"""
|
121 |
# Retrieve API keys from environment variables
|
122 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
@@ -181,11 +184,15 @@ def init_agent(namespace1: str, top_k1: int, namespace2: str, top_k2: int):
|
|
181 |
tools.append(retriever_tool2)
|
182 |
|
183 |
# --- Choose the System Prompt Based on Namespace Selections ---
|
184 |
-
|
185 |
-
if
|
186 |
prompt = autogen_system_prompt
|
187 |
-
|
188 |
prompt = langgraph_system_prompt
|
|
|
|
|
|
|
|
|
189 |
|
190 |
# --- Chat Model ---
|
191 |
model = ChatOpenAI(model_name="o3-mini-2025-01-31", openai_api_key=OPENAI_API_KEY)
|
@@ -202,10 +209,8 @@ namespace_options = ["langgraph-main", "autogen", "llm-cli", ""]
|
|
202 |
namespace1 = st.sidebar.selectbox("Select namespace for Retriever Tool 1:", namespace_options, index=0)
|
203 |
top_k1 = st.sidebar.slider("Select top_k for Retriever Tool 1:", min_value=1, max_value=4, value=1, step=1)
|
204 |
|
205 |
-
|
206 |
-
|
207 |
-
namespace_options2 = ["autogen", ""]
|
208 |
-
# Dropdown and slider for Retriever Tool 2 (empty option available)
|
209 |
namespace2 = st.sidebar.selectbox("Select namespace for Retriever Tool 2:", namespace_options2, index=0)
|
210 |
top_k2 = st.sidebar.slider("Select top_k for Retriever Tool 2:", min_value=1, max_value=4, value=1, step=1)
|
211 |
|
@@ -238,7 +243,6 @@ with st.form("chat_form", clear_on_submit=True):
|
|
238 |
st.session_state.chat_history.append(("user", user_input))
|
239 |
# No need to force a rerun—Streamlit re-runs automatically on widget interaction.
|
240 |
|
241 |
-
|
242 |
# --- Generate Assistant Response ---
|
243 |
if st.session_state.chat_history and st.session_state.chat_history[-1][0] == "user":
|
244 |
inputs = {"messages": st.session_state.chat_history}
|
@@ -260,7 +264,7 @@ if st.session_state.chat_history and st.session_state.chat_history[-1][0] == "us
|
|
260 |
|
261 |
if isinstance(message, tuple):
|
262 |
# This is a tool-related message.
|
263 |
-
#
|
264 |
role, text = message
|
265 |
if "call_" in text.lower():
|
266 |
tool_calls_text += text + "\n\n"
|
@@ -290,5 +294,3 @@ if st.session_state.chat_history and st.session_state.chat_history[-1][0] == "us
|
|
290 |
f"**Final Answer:**\n\n{final_answer_text}"
|
291 |
)
|
292 |
st.session_state.chat_history.append(("assistant", combined_response))
|
293 |
-
|
294 |
-
|
|
|
53 |
4. Offer additional context or related information when applicable.
|
54 |
5. Conclude with suggestions for next steps or related topics the user might explore further.
|
55 |
If a user’s query is unclear or falls outside the scope of AutoGen, politely ask for clarification or direct them to more appropriate resources.
|
56 |
+
IMPORTANT: Be concise with your code generation to adhere to AutoGen's framework.
|
57 |
+
"""
|
58 |
|
59 |
+
# LangGraph system prompt (for LangGraph’s codebase)
|
60 |
langgraph_system_prompt = """
|
61 |
You are an AI assistant specializing in the LangGraph framework. Your role is to help users build, understand, and troubleshoot their multi-agent AI applications by providing accurate and helpful information from the LangGraph documentation, source code, and examples.
|
62 |
You have access to a powerful tool called `retriever_tool` that functions as a search engine for LangGraph’s resources. This tool is essential for retrieving up-to-date information to answer user queries accurately. Use it extensively to ensure your responses reflect the latest details from LangGraph.
|
|
|
89 |
- Conclude with suggestions for next steps or related topics to explore further.
|
90 |
If a user’s query is unclear or falls outside the scope of LangGraph, politely ask for clarification or direct them to more appropriate resources.
|
91 |
Always use the `retriever_tool` frequently—even for queries you think you know well—since LangGraph’s resources are continuously updated.
|
92 |
+
IMPORTANT: Be concise with your code generation to adhere to LangGraph's framework.
|
|
|
93 |
"""
|
94 |
|
95 |
def convert_namespace(ns: str) -> str:
|
96 |
"""
|
97 |
Convert the UI namespace option to the actual namespace used.
|
98 |
If the user selects "autogen", return "lmsys" instead.
|
99 |
+
Otherwise, return the option as-is.
|
100 |
"""
|
101 |
return "lmsys" if ns == "autogen" else ns
|
102 |
|
103 |
def get_description(actual_namespace: str, top_k: int) -> str:
|
104 |
"""
|
105 |
+
Generate a dynamic description for a retriever tool based on the actual namespace and top_k value.
|
|
|
106 |
"""
|
107 |
if actual_namespace == "lmsys":
|
108 |
+
return f"Search and return information from AutoGen's codebase using namespace '{actual_namespace}' with top_k = {top_k}."
|
109 |
+
elif actual_namespace == "llm-cli":
|
110 |
+
return f"Search and return information using the LLM CLI interface with namespace '{actual_namespace}' with top_k = {top_k}."
|
111 |
else:
|
112 |
return f"Search and return information from LangGraph's documentation using namespace '{actual_namespace}' with top_k = {top_k}."
|
113 |
|
|
|
116 |
"""
|
117 |
Initialize the LangGraph agent with up to two Pinecone retriever tools.
|
118 |
Only add a retriever tool if a non-empty namespace is provided.
|
119 |
+
Choose the system prompt based on the active namespaces:
|
120 |
+
- If either dropdown is set to "autogen", use autogen_system_prompt.
|
121 |
+
- Else if any non-empty namespace is "langgraph-main", use langgraph_system_prompt.
|
122 |
+
- Else if the only active namespace is "llm-cli", use an empty string as the prompt.
|
123 |
"""
|
124 |
# Retrieve API keys from environment variables
|
125 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
|
|
184 |
tools.append(retriever_tool2)
|
185 |
|
186 |
# --- Choose the System Prompt Based on Namespace Selections ---
|
187 |
+
active_ns = [ns for ns in [namespace1, namespace2] if ns != ""]
|
188 |
+
if "autogen" in active_ns:
|
189 |
prompt = autogen_system_prompt
|
190 |
+
elif any(ns == "langgraph-main" for ns in active_ns):
|
191 |
prompt = langgraph_system_prompt
|
192 |
+
elif active_ns and all(ns == "llm-cli" for ns in active_ns):
|
193 |
+
prompt = "" # Empty system prompt when only llm-cli is active.
|
194 |
+
else:
|
195 |
+
prompt = ""
|
196 |
|
197 |
# --- Chat Model ---
|
198 |
model = ChatOpenAI(model_name="o3-mini-2025-01-31", openai_api_key=OPENAI_API_KEY)
|
|
|
209 |
namespace1 = st.sidebar.selectbox("Select namespace for Retriever Tool 1:", namespace_options, index=0)
|
210 |
top_k1 = st.sidebar.slider("Select top_k for Retriever Tool 1:", min_value=1, max_value=4, value=1, step=1)
|
211 |
|
212 |
+
# For Retriever Tool 2, we limit the options (if desired) or leave the same.
|
213 |
+
namespace_options2 = ["autogen", "", "llm-cli"]
|
|
|
|
|
214 |
namespace2 = st.sidebar.selectbox("Select namespace for Retriever Tool 2:", namespace_options2, index=0)
|
215 |
top_k2 = st.sidebar.slider("Select top_k for Retriever Tool 2:", min_value=1, max_value=4, value=1, step=1)
|
216 |
|
|
|
243 |
st.session_state.chat_history.append(("user", user_input))
|
244 |
# No need to force a rerun—Streamlit re-runs automatically on widget interaction.
|
245 |
|
|
|
246 |
# --- Generate Assistant Response ---
|
247 |
if st.session_state.chat_history and st.session_state.chat_history[-1][0] == "user":
|
248 |
inputs = {"messages": st.session_state.chat_history}
|
|
|
264 |
|
265 |
if isinstance(message, tuple):
|
266 |
# This is a tool-related message.
|
267 |
+
# Use a simple heuristic: if the text contains "call_" (case-insensitive), treat it as a tool call.
|
268 |
role, text = message
|
269 |
if "call_" in text.lower():
|
270 |
tool_calls_text += text + "\n\n"
|
|
|
294 |
f"**Final Answer:**\n\n{final_answer_text}"
|
295 |
)
|
296 |
st.session_state.chat_history.append(("assistant", combined_response))
|
|
|
|