File size: 6,017 Bytes
71d4e8a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
from smolagents import CodeAgent, HfApiModel,  DuckDuckGoSearchTool, PythonInterpreterTool, VisitWebpageTool, WikipediaSearchTool
from smolagents import LiteLLMModel
import os
from smolagents import CodeAgent
from mcp import StdioServerParameters
from custom_mcp import create_mcp_client

def run_orchestrator(prompt):
    """
    Runs the orchestrator agent with the given prompt and returns the answer.
    
    Args:
        prompt (str): The prompt to run with the orchestrator
        
    Returns:
        str: The answer from the orchestrator
    """
    '''
    model = LiteLLMModel(
        model_id="openrouter/google/gemini-2.5-flash-preview",
        api_base="https://openrouter.ai/api/v1",
        api_key="your_api_key_here",
        num_ctx=1000000,
    )
    '''

    model = HfApiModel("Qwen/Qwen2.5-Coder-32B-Instruct")

    # Store active clients to clean up later
    active_clients = []

    def create_client_safely(name, params):
        """Create an MCP client and handle errors gracefully"""
        try:
            client = create_mcp_client(params)
            active_clients.append(client)
            print(f"✅ Successfully connected to {name} MCP server")
            return client
        except Exception as e:
            print(f"❌ Error connecting to {name} MCP server: {e}")
            return None

    ## Define MCP parameters
    wikipedia_params = StdioServerParameters(
        command="npx",
        args=["-y", "wikipedia-mcp"],
        env=os.environ
    )
    
    fetch_webpage_params = StdioServerParameters(
        command="uvx",
        args=["mcp-server-fetch"],
        env=os.environ
    )

    youtube_transcript_params = StdioServerParameters(
        command="uvx",
        args=["--from", "git+https://github.com/jkawamoto/mcp-youtube-transcript", "mcp-youtube-transcript"],
        env=os.environ
    )

    code_reasoning_params = StdioServerParameters(
        command="npx",
        args=["-y", "@mettamatt/code-reasoning"],
        env=os.environ
    )

    sequential_thinking_params = StdioServerParameters(
        command="npx",
        args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
        env=os.environ
    )

    # Connect to MCP servers safely
    wikipedia_client = create_client_safely("Wikipedia", wikipedia_params)
    youtube_transcript_client = create_client_safely("YouTube Transcript", youtube_transcript_params)
    code_reasoning_client = create_client_safely("Code Reasoning", code_reasoning_params)
    sequential_thinking_client = create_client_safely("Sequential Thinking", sequential_thinking_params)
    fetch_webpage_client = create_client_safely("Fetch Webpage", fetch_webpage_params)

    answer = None
    try:
        # Only create agents for services that connected successfully
        agents = []
        
        if sequential_thinking_client:
            reasoning_agent = CodeAgent(
                tools=[*sequential_thinking_client.get_tools()],
                model=model,
                name="reasoning_agent",
                description="Analyzes complex problems using logical reasoning."
            )
            agents.append(reasoning_agent)
        
        if code_reasoning_client:
            code_agent = CodeAgent(
                tools=[PythonInterpreterTool(), *code_reasoning_client.get_tools()],
                model=model,
                additional_authorized_imports=['pandas','csv', 'numpy', 'requests', 'matplotlib', 'seaborn', 'plotly', 'scipy', 'sklearn'],
                name="code_agent",
                description="Writes and executes Python code to solve computational and data analysis problems. Also reason about the code and the results."
            )
            agents.append(code_agent)
        
        if youtube_transcript_client:
            youtube_agent = CodeAgent(
                tools=[*youtube_transcript_client.get_tools()],
                model=model,
                name="youtube_agent",
                description="Gets the transcript of a youtube video."
            )
            agents.append(youtube_agent)
        
        if wikipedia_client:
            wikipedia_agent = CodeAgent(
                tools=[*wikipedia_client.get_tools()],
                model=model,
                name="wikipedia_agent",
                description="Searches Wikipedia for information."
            )
            agents.append(wikipedia_agent)
        
        # Create the orchestrator agent if we have any working agents
        if agents:
            if fetch_webpage_client:
                search_agent = CodeAgent(
                    tools=[DuckDuckGoSearchTool(), *fetch_webpage_client.get_tools()],
                    model=model,
                    name="search_agent",
                    description="Performs web searches to find factual information online."
                )
            else:
                search_agent = CodeAgent(
                    tools=[VisitWebpageTool(), DuckDuckGoSearchTool()],
                    model=model,
                    name="search_agent",
                    description="Performs web searches to find factual information online."
                )
                
            manager_agent = CodeAgent(
                tools=[],
                model=model,
                managed_agents=[search_agent, *agents],
                name="orchestrator",
                description="Coordinates specialized agents to solve complex problems."
            )
            
            # Run the agent with the given prompt and get the answer
            answer = manager_agent.run(prompt)
        else:
            print("❌ No agents were created - all MCP servers failed to connect")
            answer = "Error: No agents were available to process the question."
            
    finally:
        # Clean up all active clients
        for client in active_clients:
            try:
                client.disconnect()
            except:
                pass
    
    return answer