thewimo's picture
Create agents.py
71d4e8a verified
from smolagents import CodeAgent, HfApiModel, DuckDuckGoSearchTool, PythonInterpreterTool, VisitWebpageTool, WikipediaSearchTool
from smolagents import LiteLLMModel
import os
from smolagents import CodeAgent
from mcp import StdioServerParameters
from custom_mcp import create_mcp_client
def run_orchestrator(prompt):
"""
Runs the orchestrator agent with the given prompt and returns the answer.
Args:
prompt (str): The prompt to run with the orchestrator
Returns:
str: The answer from the orchestrator
"""
'''
model = LiteLLMModel(
model_id="openrouter/google/gemini-2.5-flash-preview",
api_base="https://openrouter.ai/api/v1",
api_key="your_api_key_here",
num_ctx=1000000,
)
'''
model = HfApiModel("Qwen/Qwen2.5-Coder-32B-Instruct")
# Store active clients to clean up later
active_clients = []
def create_client_safely(name, params):
"""Create an MCP client and handle errors gracefully"""
try:
client = create_mcp_client(params)
active_clients.append(client)
print(f"✅ Successfully connected to {name} MCP server")
return client
except Exception as e:
print(f"❌ Error connecting to {name} MCP server: {e}")
return None
## Define MCP parameters
wikipedia_params = StdioServerParameters(
command="npx",
args=["-y", "wikipedia-mcp"],
env=os.environ
)
fetch_webpage_params = StdioServerParameters(
command="uvx",
args=["mcp-server-fetch"],
env=os.environ
)
youtube_transcript_params = StdioServerParameters(
command="uvx",
args=["--from", "git+https://github.com/jkawamoto/mcp-youtube-transcript", "mcp-youtube-transcript"],
env=os.environ
)
code_reasoning_params = StdioServerParameters(
command="npx",
args=["-y", "@mettamatt/code-reasoning"],
env=os.environ
)
sequential_thinking_params = StdioServerParameters(
command="npx",
args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
env=os.environ
)
# Connect to MCP servers safely
wikipedia_client = create_client_safely("Wikipedia", wikipedia_params)
youtube_transcript_client = create_client_safely("YouTube Transcript", youtube_transcript_params)
code_reasoning_client = create_client_safely("Code Reasoning", code_reasoning_params)
sequential_thinking_client = create_client_safely("Sequential Thinking", sequential_thinking_params)
fetch_webpage_client = create_client_safely("Fetch Webpage", fetch_webpage_params)
answer = None
try:
# Only create agents for services that connected successfully
agents = []
if sequential_thinking_client:
reasoning_agent = CodeAgent(
tools=[*sequential_thinking_client.get_tools()],
model=model,
name="reasoning_agent",
description="Analyzes complex problems using logical reasoning."
)
agents.append(reasoning_agent)
if code_reasoning_client:
code_agent = CodeAgent(
tools=[PythonInterpreterTool(), *code_reasoning_client.get_tools()],
model=model,
additional_authorized_imports=['pandas','csv', 'numpy', 'requests', 'matplotlib', 'seaborn', 'plotly', 'scipy', 'sklearn'],
name="code_agent",
description="Writes and executes Python code to solve computational and data analysis problems. Also reason about the code and the results."
)
agents.append(code_agent)
if youtube_transcript_client:
youtube_agent = CodeAgent(
tools=[*youtube_transcript_client.get_tools()],
model=model,
name="youtube_agent",
description="Gets the transcript of a youtube video."
)
agents.append(youtube_agent)
if wikipedia_client:
wikipedia_agent = CodeAgent(
tools=[*wikipedia_client.get_tools()],
model=model,
name="wikipedia_agent",
description="Searches Wikipedia for information."
)
agents.append(wikipedia_agent)
# Create the orchestrator agent if we have any working agents
if agents:
if fetch_webpage_client:
search_agent = CodeAgent(
tools=[DuckDuckGoSearchTool(), *fetch_webpage_client.get_tools()],
model=model,
name="search_agent",
description="Performs web searches to find factual information online."
)
else:
search_agent = CodeAgent(
tools=[VisitWebpageTool(), DuckDuckGoSearchTool()],
model=model,
name="search_agent",
description="Performs web searches to find factual information online."
)
manager_agent = CodeAgent(
tools=[],
model=model,
managed_agents=[search_agent, *agents],
name="orchestrator",
description="Coordinates specialized agents to solve complex problems."
)
# Run the agent with the given prompt and get the answer
answer = manager_agent.run(prompt)
else:
print("❌ No agents were created - all MCP servers failed to connect")
answer = "Error: No agents were available to process the question."
finally:
# Clean up all active clients
for client in active_clients:
try:
client.disconnect()
except:
pass
return answer