thewimo commited on
Commit
71d4e8a
·
verified ·
1 Parent(s): 81917a3

Create agents.py

Browse files
Files changed (1) hide show
  1. agents.py +162 -0
agents.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from smolagents import CodeAgent, HfApiModel, DuckDuckGoSearchTool, PythonInterpreterTool, VisitWebpageTool, WikipediaSearchTool
2
+ from smolagents import LiteLLMModel
3
+ import os
4
+ from smolagents import CodeAgent
5
+ from mcp import StdioServerParameters
6
+ from custom_mcp import create_mcp_client
7
+
8
+ def run_orchestrator(prompt):
9
+ """
10
+ Runs the orchestrator agent with the given prompt and returns the answer.
11
+
12
+ Args:
13
+ prompt (str): The prompt to run with the orchestrator
14
+
15
+ Returns:
16
+ str: The answer from the orchestrator
17
+ """
18
+ '''
19
+ model = LiteLLMModel(
20
+ model_id="openrouter/google/gemini-2.5-flash-preview",
21
+ api_base="https://openrouter.ai/api/v1",
22
+ api_key="your_api_key_here",
23
+ num_ctx=1000000,
24
+ )
25
+ '''
26
+
27
+ model = HfApiModel("Qwen/Qwen2.5-Coder-32B-Instruct")
28
+
29
+ # Store active clients to clean up later
30
+ active_clients = []
31
+
32
+ def create_client_safely(name, params):
33
+ """Create an MCP client and handle errors gracefully"""
34
+ try:
35
+ client = create_mcp_client(params)
36
+ active_clients.append(client)
37
+ print(f"✅ Successfully connected to {name} MCP server")
38
+ return client
39
+ except Exception as e:
40
+ print(f"❌ Error connecting to {name} MCP server: {e}")
41
+ return None
42
+
43
+ ## Define MCP parameters
44
+ wikipedia_params = StdioServerParameters(
45
+ command="npx",
46
+ args=["-y", "wikipedia-mcp"],
47
+ env=os.environ
48
+ )
49
+
50
+ fetch_webpage_params = StdioServerParameters(
51
+ command="uvx",
52
+ args=["mcp-server-fetch"],
53
+ env=os.environ
54
+ )
55
+
56
+ youtube_transcript_params = StdioServerParameters(
57
+ command="uvx",
58
+ args=["--from", "git+https://github.com/jkawamoto/mcp-youtube-transcript", "mcp-youtube-transcript"],
59
+ env=os.environ
60
+ )
61
+
62
+ code_reasoning_params = StdioServerParameters(
63
+ command="npx",
64
+ args=["-y", "@mettamatt/code-reasoning"],
65
+ env=os.environ
66
+ )
67
+
68
+ sequential_thinking_params = StdioServerParameters(
69
+ command="npx",
70
+ args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
71
+ env=os.environ
72
+ )
73
+
74
+ # Connect to MCP servers safely
75
+ wikipedia_client = create_client_safely("Wikipedia", wikipedia_params)
76
+ youtube_transcript_client = create_client_safely("YouTube Transcript", youtube_transcript_params)
77
+ code_reasoning_client = create_client_safely("Code Reasoning", code_reasoning_params)
78
+ sequential_thinking_client = create_client_safely("Sequential Thinking", sequential_thinking_params)
79
+ fetch_webpage_client = create_client_safely("Fetch Webpage", fetch_webpage_params)
80
+
81
+ answer = None
82
+ try:
83
+ # Only create agents for services that connected successfully
84
+ agents = []
85
+
86
+ if sequential_thinking_client:
87
+ reasoning_agent = CodeAgent(
88
+ tools=[*sequential_thinking_client.get_tools()],
89
+ model=model,
90
+ name="reasoning_agent",
91
+ description="Analyzes complex problems using logical reasoning."
92
+ )
93
+ agents.append(reasoning_agent)
94
+
95
+ if code_reasoning_client:
96
+ code_agent = CodeAgent(
97
+ tools=[PythonInterpreterTool(), *code_reasoning_client.get_tools()],
98
+ model=model,
99
+ additional_authorized_imports=['pandas','csv', 'numpy', 'requests', 'matplotlib', 'seaborn', 'plotly', 'scipy', 'sklearn'],
100
+ name="code_agent",
101
+ description="Writes and executes Python code to solve computational and data analysis problems. Also reason about the code and the results."
102
+ )
103
+ agents.append(code_agent)
104
+
105
+ if youtube_transcript_client:
106
+ youtube_agent = CodeAgent(
107
+ tools=[*youtube_transcript_client.get_tools()],
108
+ model=model,
109
+ name="youtube_agent",
110
+ description="Gets the transcript of a youtube video."
111
+ )
112
+ agents.append(youtube_agent)
113
+
114
+ if wikipedia_client:
115
+ wikipedia_agent = CodeAgent(
116
+ tools=[*wikipedia_client.get_tools()],
117
+ model=model,
118
+ name="wikipedia_agent",
119
+ description="Searches Wikipedia for information."
120
+ )
121
+ agents.append(wikipedia_agent)
122
+
123
+ # Create the orchestrator agent if we have any working agents
124
+ if agents:
125
+ if fetch_webpage_client:
126
+ search_agent = CodeAgent(
127
+ tools=[DuckDuckGoSearchTool(), *fetch_webpage_client.get_tools()],
128
+ model=model,
129
+ name="search_agent",
130
+ description="Performs web searches to find factual information online."
131
+ )
132
+ else:
133
+ search_agent = CodeAgent(
134
+ tools=[VisitWebpageTool(), DuckDuckGoSearchTool()],
135
+ model=model,
136
+ name="search_agent",
137
+ description="Performs web searches to find factual information online."
138
+ )
139
+
140
+ manager_agent = CodeAgent(
141
+ tools=[],
142
+ model=model,
143
+ managed_agents=[search_agent, *agents],
144
+ name="orchestrator",
145
+ description="Coordinates specialized agents to solve complex problems."
146
+ )
147
+
148
+ # Run the agent with the given prompt and get the answer
149
+ answer = manager_agent.run(prompt)
150
+ else:
151
+ print("❌ No agents were created - all MCP servers failed to connect")
152
+ answer = "Error: No agents were available to process the question."
153
+
154
+ finally:
155
+ # Clean up all active clients
156
+ for client in active_clients:
157
+ try:
158
+ client.disconnect()
159
+ except:
160
+ pass
161
+
162
+ return answer