Jofthomas commited on
Commit
10e9b7d
·
verified ·
1 Parent(s): db9a838

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -0
app.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from gradio import ChatMessage
4
+ import requests
5
+ from typing import Dict, List
6
+ from langchain_core.messages import HumanMessage
7
+ from langchain_core.tools import tool
8
+ from langchain_openai import ChatOpenAI
9
+ from langgraph.checkpoint.memory import MemorySaver
10
+ from langgraph.prebuilt import create_react_agent
11
+
12
+ # Weather and location tools
13
+ @tool
14
+ def get_lat_lng(location_description: str) -> dict[str, float]:
15
+ """Get the latitude and longitude of a location."""
16
+ return {"lat": 51.1, "lng": -0.1} # London coordinates as dummy response
17
+
18
+ @tool
19
+ def get_weather(lat: float, lng: float) -> dict[str, str]:
20
+ """Get the weather at a location."""
21
+ return {"temperature": "21°C", "description": "Sunny"} # Dummy response
22
+
23
+
24
+ def stream_from_agent(message: str, history: List[Dict[str, str]]) -> gr.ChatMessage:
25
+ """Process messages through the LangChain agent with visible reasoning."""
26
+
27
+ # Initialize the agent
28
+ llm = ChatOpenAI(temperature=0, model="gpt-4")
29
+ memory = MemorySaver()
30
+ tools = [get_lat_lng, get_weather]
31
+ agent_executor = create_react_agent(llm, tools, checkpointer=memory)
32
+
33
+ # Add message to history
34
+ past_messages = [HumanMessage(content=message)]
35
+ for h in history:
36
+ if h["role"] == "user":
37
+ past_messages.append(HumanMessage(content=h["content"]))
38
+
39
+ messages_to_display = []
40
+ final_response = None
41
+
42
+ for chunk in agent_executor.stream(
43
+ {"messages": past_messages},
44
+ config={"configurable": {"thread_id": "abc123"}}
45
+ ):
46
+ # Handle agent's actions and tool usage
47
+ if chunk.get("agent"):
48
+ for msg in chunk["agent"]["messages"]:
49
+ if msg.content:
50
+ final_response = msg.content
51
+
52
+ # Handle tool calls
53
+ for tool_call in msg.tool_calls:
54
+ tool_message = ChatMessage(
55
+ content=f"Parameters: {tool_call['args']}",
56
+ metadata={
57
+ "title": f"🛠️ Using {tool_call['name']}",
58
+ "id": tool_call["id"],
59
+ }
60
+ )
61
+ messages_to_display.append(tool_message)
62
+ yield messages_to_display
63
+
64
+ # Handle tool responses
65
+ if chunk.get("tools"):
66
+ for tool_response in chunk["tools"]["messages"]:
67
+ # Find the corresponding tool message
68
+ for msg in messages_to_display:
69
+ if msg.metadata.get("id") == tool_response.tool_call_id:
70
+ msg.content += f"\nResult: {tool_response.content}"
71
+ yield messages_to_display
72
+
73
+ # Add the final response as a regular message
74
+ if final_response:
75
+ messages_to_display.append(ChatMessage(content=final_response))
76
+ yield messages_to_display
77
+
78
+ # Create the Gradio interface
79
+ demo = gr.ChatInterface(
80
+ fn=stream_from_agent,
81
+ type="messages",
82
+ title="🌤️ Weather Assistant",
83
+ description="Ask about the weather anywhere! Watch as I gather the information step by step.",
84
+ examples=[
85
+ "What's the weather like in Tokyo?",
86
+ "Is it sunny in Paris right now?",
87
+ "Should I bring an umbrella in New York today?"
88
+ ],
89
+
90
+ )
91
+
92
+ demo.launch()