Spaces:
Running
Running
Update tests.py
Browse files
tests.py
CHANGED
@@ -1,10 +1,6 @@
|
|
1 |
from mcp.server.fastmcp import FastMCP
|
2 |
-
import random
|
3 |
import time
|
4 |
from litellm import completion
|
5 |
-
import shlex
|
6 |
-
from subprocess import Popen, PIPE
|
7 |
-
from threading import Timer
|
8 |
import os
|
9 |
import glob
|
10 |
import http.client
|
@@ -14,7 +10,6 @@ import shutil
|
|
14 |
from google import genai
|
15 |
import pexpect
|
16 |
|
17 |
-
|
18 |
client = genai.Client(api_key="AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc")
|
19 |
source_dir = "/app/uploads/temp"
|
20 |
destination_dir = "/app/code_interpreter"
|
@@ -33,74 +28,33 @@ from bs4 import BeautifulSoup # For parsing HTML
|
|
33 |
|
34 |
Parent=pexpect.spawn('bash')
|
35 |
|
36 |
-
def download_all_files(base_url, files_endpoint, download_directory):
|
37 |
-
"""Downloads all files listed on the server's /upload page."""
|
38 |
-
global downloaded_files
|
39 |
-
|
40 |
-
# Create the download directory if it doesn't exist
|
41 |
-
if not os.path.exists(download_directory):
|
42 |
-
os.makedirs(download_directory)
|
43 |
|
44 |
-
try:
|
45 |
-
# 1. Get the HTML of the /upload page
|
46 |
-
files_url = f"{base_url}{files_endpoint}"
|
47 |
-
response = requests.get(files_url)
|
48 |
-
response.raise_for_status() # Check for HTTP errors
|
49 |
-
|
50 |
-
# 2. Parse the HTML using BeautifulSoup
|
51 |
-
soup = BeautifulSoup(response.content, "html.parser")
|
52 |
-
|
53 |
-
# 3. Find all the <a> (anchor) tags, which represent the links to the files
|
54 |
-
# This assumes the file links are inside <a> tags as shown in the server code
|
55 |
-
file_links = soup.find_all("a")
|
56 |
-
|
57 |
-
# 4. Iterate through the links and download the files
|
58 |
-
for link in file_links:
|
59 |
-
try:
|
60 |
-
file_url = link.get("href") # Extract the href attribute (the URL)
|
61 |
-
if file_url:
|
62 |
-
# Construct the full file URL if the href is relative
|
63 |
-
if not file_url.startswith("http"):
|
64 |
-
file_url = f"{base_url}{file_url}" # Relative URLs
|
65 |
-
|
66 |
-
filename = os.path.basename(file_url) # Extract the filename from the URL
|
67 |
-
file_path = os.path.join(download_directory, filename)
|
68 |
-
if filename in downloaded_files:
|
69 |
-
pass
|
70 |
-
else:
|
71 |
-
downloaded_files.append(filename)
|
72 |
-
print(f"Downloading: {filename} from {file_url}")
|
73 |
-
|
74 |
-
# Download the file
|
75 |
-
file_response = requests.get(file_url, stream=True) # Use stream=True for large files
|
76 |
-
file_response.raise_for_status() # Check for HTTP errors
|
77 |
-
|
78 |
-
with open(file_path, "wb") as file: # Open in binary write mode
|
79 |
-
for chunk in file_response.iter_content(chunk_size=8192): # Iterate and write in chunks (good for large files)
|
80 |
-
if chunk: # filter out keep-alive new chunks
|
81 |
-
file.write(chunk)
|
82 |
-
|
83 |
-
print(f"Downloaded: {filename} to {file_path}")
|
84 |
-
|
85 |
-
except requests.exceptions.RequestException as e:
|
86 |
-
print(f"Error downloading {link.get('href')}: {e}")
|
87 |
-
except OSError as e: #Handles potential issues with file permissions or disk space.
|
88 |
-
print(f"Error saving {filename}: {e}")
|
89 |
-
|
90 |
-
except requests.exceptions.RequestException as e:
|
91 |
-
print(f"Error getting file list from server: {e}")
|
92 |
-
except Exception as e: # Catch all other potential errors
|
93 |
-
print(f"An unexpected error occurred: {e}")
|
94 |
|
95 |
def transfer_files():
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
def upload_file(file_path, upload_url):
|
105 |
"""Uploads a file to the specified server endpoint."""
|
106 |
|
@@ -149,7 +103,7 @@ def run(cmd, timeout_sec,forever_cmd):
|
|
149 |
|
150 |
Parent.sendline(command)
|
151 |
Parent.readline().decode()
|
152 |
-
return str(Parent.readline().decode())
|
153 |
t=time.time()
|
154 |
child = pexpect.spawn("bash")
|
155 |
output=""
|
@@ -161,7 +115,7 @@ def run(cmd, timeout_sec,forever_cmd):
|
|
161 |
|
162 |
child.sendline(command)
|
163 |
|
164 |
-
while (not child.eof() ) and (time.time()-t<
|
165 |
x=child.readline().decode()
|
166 |
output=output+x
|
167 |
print(x)
|
@@ -176,7 +130,7 @@ def run(cmd, timeout_sec,forever_cmd):
|
|
176 |
@mcp.tool()
|
177 |
def analyse_audio(audiopath,query) -> dict:
|
178 |
"""Ask another AI model about audios.The AI model can listen to the audio and give answers.Eg-query:Generate detailed minutes of meeting from the audio clip,audiopath='/app/code_interpreter/<audioname>'.Note:The audios are automatically present in the /app/code_interpreter directory."""
|
179 |
-
|
180 |
myfile = client.files.upload(file=audiopath)
|
181 |
|
182 |
response = client.models.generate_content(
|
@@ -188,7 +142,7 @@ def analyse_audio(audiopath,query) -> dict:
|
|
188 |
@mcp.tool()
|
189 |
def analyse_video(videopath,query) -> dict:
|
190 |
"""Ask another AI model about videos.The AI model can see the videos and give answers.Eg-query:Create a very detailed transcript and summary of the video,videopath='/app/code_interpreter/<videoname>'Note:The videos are automatically present in the /app/code_interpreter directory."""
|
191 |
-
|
192 |
video_file = client.files.upload(file=videopath)
|
193 |
|
194 |
while video_file.state.name == "PROCESSING":
|
@@ -209,7 +163,7 @@ def analyse_video(videopath,query) -> dict:
|
|
209 |
@mcp.tool()
|
210 |
def analyse_images(imagepath,query) -> dict:
|
211 |
"""Ask another AI model about images.The AI model can see the images and give answers.Eg-query:Who is the person in this image?,imagepath='/app/code_interpreter/<imagename>'.Note:The images are automatically present in the /app/code_interpreter directory."""
|
212 |
-
|
213 |
video_file = client.files.upload(file=imagepath)
|
214 |
|
215 |
|
@@ -219,12 +173,26 @@ def analyse_images(imagepath,query) -> dict:
|
|
219 |
)
|
220 |
return {"Output":str(response.text)}
|
221 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
222 |
@mcp.tool()
|
223 |
def create_code_files(filename: str, code: str) -> dict:
|
224 |
"""Create code files by passing the the filename as well the entire code to write.The file is created by default in the /app/code_interpreter directory.Note:All user uploaded files that you might need to work upon are stored in the /app/code_interpreter directory."""
|
225 |
global destination_dir
|
226 |
-
download_all_files("https://opengpt-4ik5.onrender.com", "/upload", "/app/code_interpreter")
|
227 |
transfer_files()
|
|
|
228 |
f = open(os.path.join(destination_dir, filename), "w")
|
229 |
f.write(code)
|
230 |
f.close()
|
@@ -253,7 +221,7 @@ def run_code(python_packages:str,filename: str, code: str,start_cmd:str,forever_
|
|
253 |
stdot=run(
|
254 |
f"{command} --break-system-packages {package_names}", timeout_sec=300,forever_cmd= 'false'
|
255 |
)
|
256 |
-
|
257 |
transfer_files()
|
258 |
f = open(os.path.join(destination_dir, filename), "w")
|
259 |
f.write(code)
|
@@ -275,7 +243,28 @@ def run_code(python_packages:str,filename: str, code: str,start_cmd:str,forever_
|
|
275 |
|
276 |
@mcp.tool()
|
277 |
def run_code_files(start_cmd:str,forever_cmd:str) -> dict:
|
278 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
279 |
global files_list
|
280 |
|
281 |
stdot=run(start_cmd, 300,forever_cmd)
|
@@ -294,9 +283,39 @@ def run_code_files(start_cmd:str,forever_cmd:str) -> dict:
|
|
294 |
|
295 |
@mcp.tool()
|
296 |
def run_shell_command(cmd:str,forever_cmd:str) -> dict:
|
297 |
-
"""
|
298 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
299 |
transfer_files()
|
|
|
|
|
300 |
return {"output":output}
|
301 |
|
302 |
|
@@ -318,7 +337,7 @@ def install_python_packages(python_packages:str) -> dict:
|
|
318 |
|
319 |
@mcp.tool()
|
320 |
def get_youtube_transcript(videoid:str) -> dict:
|
321 |
-
"""Get the transcript of a youtube video by passing the video id.
|
322 |
conn = http.client.HTTPSConnection("youtube-transcript3.p.rapidapi.com")
|
323 |
headers = {
|
324 |
'x-rapidapi-key': "2a155d4498mshd52b7d6b7a2ff86p10cdd0jsn6252e0f2f529",
|
@@ -334,7 +353,7 @@ def get_youtube_transcript(videoid:str) -> dict:
|
|
334 |
def read_excel_file(filename) -> dict:
|
335 |
"""Reads the contents of an excel file.Returns a dict with key :value pair = cell location:cell content.Always run this command first , when working with excels.The excel file is automatically present in the /app/code_interpreter directory. """
|
336 |
global destination_dir
|
337 |
-
|
338 |
|
339 |
workbook = openpyxl.load_workbook(os.path.join(destination_dir, filename))
|
340 |
|
@@ -385,50 +404,11 @@ def scrape_websites(url_list:list,query:str) -> list:
|
|
385 |
return {"website_content":Output}
|
386 |
|
387 |
|
388 |
-
@mcp.tool()
|
389 |
-
def deepthinking1(query:str,info:str) -> dict:
|
390 |
-
"""Ask another intelligent AI about the query.Ask the question defined by the query string and what you know about the question as well as provide your own knowledge and ideas about the question through the info string."""
|
391 |
-
response = completion(
|
392 |
-
model="groq/deepseek-r1-distill-llama-70b",
|
393 |
-
messages=[
|
394 |
-
{"role": "user", "content": f"{query}.Here is what i Know about the query:{info}"}
|
395 |
-
],
|
396 |
-
stream=False
|
397 |
-
)
|
398 |
-
|
399 |
-
|
400 |
-
return {"response":str(response.choices[0].message.content)}
|
401 |
-
|
402 |
-
@mcp.tool()
|
403 |
-
def deepthinking2(query:str,info:str) -> dict:
|
404 |
-
"""Ask another intelligent AI about the query.Ask the question defined by the query string and what you know about the question as well as provide your own knowledge and ideas about the question through the info string."""
|
405 |
-
response = completion(
|
406 |
-
model="openrouter/deepseek/deepseek-chat",
|
407 |
-
messages=[
|
408 |
-
{"role": "user", "content": f"Hi!"}],
|
409 |
-
provider={"order": ["Together"],"allow_fallbacks":False},
|
410 |
-
|
411 |
-
)
|
412 |
-
|
413 |
-
|
414 |
-
return {"response":str(response.choices[0].message.content)}
|
415 |
-
|
416 |
-
@mcp.tool()
|
417 |
-
def deepthinking3(query:str,info:str) -> dict:
|
418 |
-
"""Ask another intelligent AI about the query.Ask the question defined by the query string and what you know about the question as well as provide your own knowledge and ideas about the question through the info string."""
|
419 |
-
response = completion(
|
420 |
-
model="gemini/gemini-2.0-flash-thinking-exp-01-21",
|
421 |
-
messages=[
|
422 |
-
{"role": "user", "content": f"{query}.Here is what i Know about the query:{info}"}
|
423 |
-
],
|
424 |
-
)
|
425 |
-
|
426 |
|
427 |
-
return {"response":str(response.choices[0].message.content)}
|
428 |
|
429 |
if __name__ == "__main__":
|
430 |
# Initialize and run the server
|
431 |
Ngrok=pexpect.spawn('bash')
|
432 |
-
Ngrok.sendline("ngrok http 1337")
|
433 |
Ngrok.readline().decode()
|
434 |
mcp.run(transport='stdio')
|
|
|
1 |
from mcp.server.fastmcp import FastMCP
|
|
|
2 |
import time
|
3 |
from litellm import completion
|
|
|
|
|
|
|
4 |
import os
|
5 |
import glob
|
6 |
import http.client
|
|
|
10 |
from google import genai
|
11 |
import pexpect
|
12 |
|
|
|
13 |
client = genai.Client(api_key="AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc")
|
14 |
source_dir = "/app/uploads/temp"
|
15 |
destination_dir = "/app/code_interpreter"
|
|
|
28 |
|
29 |
Parent=pexpect.spawn('bash')
|
30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
def transfer_files():
|
34 |
+
try:
|
35 |
+
for item in os.listdir(source_dir):
|
36 |
+
item_path = os.path.join(source_dir, item)
|
37 |
+
if os.path.isdir(item_path): # Check if it's a directory
|
38 |
+
for filename in os.listdir(item_path):
|
39 |
+
source_file_path = os.path.join(item_path, filename)
|
40 |
+
destination_file_path = os.path.join(destination_dir, filename)
|
41 |
+
if not os.path.exists(destination_file_path):
|
42 |
+
shutil.move(source_file_path, destination_file_path)
|
43 |
+
except:
|
44 |
+
pass
|
45 |
+
def transfer_files2():
|
46 |
+
try:
|
47 |
+
for item in os.listdir("/app/uploads"):
|
48 |
+
if "temp" not in item:
|
49 |
+
item_path = os.path.join(source_dir, item)
|
50 |
+
if os.path.isdir(item_path): # Check if it's a directory
|
51 |
+
for filename in os.listdir(item_path):
|
52 |
+
source_file_path = os.path.join(item_path, filename)
|
53 |
+
destination_file_path = os.path.join(destination_dir, filename.split("__")[1])
|
54 |
+
if not os.path.exists(destination_file_path):
|
55 |
+
shutil.move(source_file_path, destination_file_path)
|
56 |
+
except:
|
57 |
+
pass
|
58 |
def upload_file(file_path, upload_url):
|
59 |
"""Uploads a file to the specified server endpoint."""
|
60 |
|
|
|
103 |
|
104 |
Parent.sendline(command)
|
105 |
Parent.readline().decode()
|
106 |
+
return str(Parent.readline().decode()) + "[INFO] The opened ports can be externally accessed at https://suitable-liked-ibex.ngrok-free.app/ "
|
107 |
t=time.time()
|
108 |
child = pexpect.spawn("bash")
|
109 |
output=""
|
|
|
115 |
|
116 |
child.sendline(command)
|
117 |
|
118 |
+
while (not child.eof() ) and (time.time()-t<300):
|
119 |
x=child.readline().decode()
|
120 |
output=output+x
|
121 |
print(x)
|
|
|
130 |
@mcp.tool()
|
131 |
def analyse_audio(audiopath,query) -> dict:
|
132 |
"""Ask another AI model about audios.The AI model can listen to the audio and give answers.Eg-query:Generate detailed minutes of meeting from the audio clip,audiopath='/app/code_interpreter/<audioname>'.Note:The audios are automatically present in the /app/code_interpreter directory."""
|
133 |
+
transfer_files2()
|
134 |
myfile = client.files.upload(file=audiopath)
|
135 |
|
136 |
response = client.models.generate_content(
|
|
|
142 |
@mcp.tool()
|
143 |
def analyse_video(videopath,query) -> dict:
|
144 |
"""Ask another AI model about videos.The AI model can see the videos and give answers.Eg-query:Create a very detailed transcript and summary of the video,videopath='/app/code_interpreter/<videoname>'Note:The videos are automatically present in the /app/code_interpreter directory."""
|
145 |
+
transfer_files2()
|
146 |
video_file = client.files.upload(file=videopath)
|
147 |
|
148 |
while video_file.state.name == "PROCESSING":
|
|
|
163 |
@mcp.tool()
|
164 |
def analyse_images(imagepath,query) -> dict:
|
165 |
"""Ask another AI model about images.The AI model can see the images and give answers.Eg-query:Who is the person in this image?,imagepath='/app/code_interpreter/<imagename>'.Note:The images are automatically present in the /app/code_interpreter directory."""
|
166 |
+
transfer_files2()
|
167 |
video_file = client.files.upload(file=imagepath)
|
168 |
|
169 |
|
|
|
173 |
)
|
174 |
return {"Output":str(response.text)}
|
175 |
|
176 |
+
|
177 |
+
# @mcp.tool()
|
178 |
+
# def generate_images(imagepath,query) -> dict:
|
179 |
+
# """Ask another AI model to generate images based on the query and the image path.Set image path as an empty string , if you dont want to edit images , but rather generate images.Eg-query:Generate a cartoon version of this image,imagepath='/app/code_interpreter/<imagename>'.Note:The images are automatically present in the /app/code_interpreter directory."""
|
180 |
+
# transfer_files2()
|
181 |
+
# video_file = client.files.upload(file=imagepath)
|
182 |
+
|
183 |
+
|
184 |
+
# response = client.models.generate_content(
|
185 |
+
# model='gemini-2.0-flash',
|
186 |
+
# contents=[query, video_file]
|
187 |
+
# )
|
188 |
+
# return {"Output":str(response.text)}
|
189 |
+
|
190 |
@mcp.tool()
|
191 |
def create_code_files(filename: str, code: str) -> dict:
|
192 |
"""Create code files by passing the the filename as well the entire code to write.The file is created by default in the /app/code_interpreter directory.Note:All user uploaded files that you might need to work upon are stored in the /app/code_interpreter directory."""
|
193 |
global destination_dir
|
|
|
194 |
transfer_files()
|
195 |
+
transfer_files2()
|
196 |
f = open(os.path.join(destination_dir, filename), "w")
|
197 |
f.write(code)
|
198 |
f.close()
|
|
|
221 |
stdot=run(
|
222 |
f"{command} --break-system-packages {package_names}", timeout_sec=300,forever_cmd= 'false'
|
223 |
)
|
224 |
+
transfer_files2()
|
225 |
transfer_files()
|
226 |
f = open(os.path.join(destination_dir, filename), "w")
|
227 |
f.write(code)
|
|
|
243 |
|
244 |
@mcp.tool()
|
245 |
def run_code_files(start_cmd:str,forever_cmd:str) -> dict:
|
246 |
+
"""Executes a shell command to run code files from /app/code_interpreter.
|
247 |
+
|
248 |
+
Runs the given `start_cmd`. The execution behavior depends on `forever_cmd`.
|
249 |
+
Any server/website started should use port 1337.
|
250 |
+
|
251 |
+
Args:
|
252 |
+
start_cmd (str): The shell command to execute the code.
|
253 |
+
(e.g., ``python /app/code_interpreter/app.py``).
|
254 |
+
Files must be in ``/app/code_interpreter``.
|
255 |
+
forever_cmd (str): Execution mode.
|
256 |
+
- ``'true'``: Runs indefinitely (for servers/websites).
|
257 |
+
- ``'false'``: Runs up to 300s, captures output.
|
258 |
+
|
259 |
+
Returns:
|
260 |
+
dict: A dictionary containing:
|
261 |
+
- ``'output'`` (str): Captured stdout (mainly when forever_cmd='false').
|
262 |
+
- ``'Files_download_link'`` (Any): Links/identifiers for downloadable files.
|
263 |
+
|
264 |
+
Notes:
|
265 |
+
- After execution, embed a download link (or display images/gifs/videos directly in markdown format) in your response.
|
266 |
+
|
267 |
+
"""
|
268 |
global files_list
|
269 |
|
270 |
stdot=run(start_cmd, 300,forever_cmd)
|
|
|
283 |
|
284 |
@mcp.tool()
|
285 |
def run_shell_command(cmd:str,forever_cmd:str) -> dict:
|
286 |
+
"""Executes a shell command in a sandboxed Alpine Linux environment.
|
287 |
+
|
288 |
+
Runs the provided `cmd` string within a bash shell. Commands are executed
|
289 |
+
relative to the `/app/code_interpreter/` working directory by default.
|
290 |
+
The execution behavior (indefinite run vs. timeout) is controlled by
|
291 |
+
the `forever_cmd` parameter.
|
292 |
+
|
293 |
+
Important Environment Notes:
|
294 |
+
- The execution environment is **Alpine Linux**. Commands should be
|
295 |
+
compatible (e.g., `apk add` instead of `apt-get install`).
|
296 |
+
- `sudo` is not available and not required.
|
297 |
+
- Standard bash features like `&&`, `||`, pipes (`|`), etc., are supported.
|
298 |
+
- When installing python packages , add an argument --break-system-packages to the pip install command.
|
299 |
+
|
300 |
+
Args:
|
301 |
+
cmd (str): The shell command to execute.
|
302 |
+
Example: ``mkdir test_dir && ls -l``
|
303 |
+
forever_cmd (str): Determines the execution mode.
|
304 |
+
- ``'true'``: Runs the command indefinitely. Suitable
|
305 |
+
for starting servers or long-running processes.
|
306 |
+
Output capture might be limited.
|
307 |
+
- ``'false'``: Runs the command until completion or
|
308 |
+
a 300-second timeout, whichever comes first.
|
309 |
+
Captures standard output.
|
310 |
+
|
311 |
+
Returns:
|
312 |
+
dict: A dictionary containing the execution results:
|
313 |
+
- ``'output'`` (str): The captured standard output (stdout) and potentially
|
314 |
+
standard error (stderr) from the command.
|
315 |
+
"""
|
316 |
transfer_files()
|
317 |
+
transfer_files2()
|
318 |
+
output=run(cmd, 300,forever_cmd)
|
319 |
return {"output":output}
|
320 |
|
321 |
|
|
|
337 |
|
338 |
@mcp.tool()
|
339 |
def get_youtube_transcript(videoid:str) -> dict:
|
340 |
+
"""Get the transcript of a youtube video by passing the video id.Eg videoid=ZacjOVVgoLY"""
|
341 |
conn = http.client.HTTPSConnection("youtube-transcript3.p.rapidapi.com")
|
342 |
headers = {
|
343 |
'x-rapidapi-key': "2a155d4498mshd52b7d6b7a2ff86p10cdd0jsn6252e0f2f529",
|
|
|
353 |
def read_excel_file(filename) -> dict:
|
354 |
"""Reads the contents of an excel file.Returns a dict with key :value pair = cell location:cell content.Always run this command first , when working with excels.The excel file is automatically present in the /app/code_interpreter directory. """
|
355 |
global destination_dir
|
356 |
+
transfer_files2()
|
357 |
|
358 |
workbook = openpyxl.load_workbook(os.path.join(destination_dir, filename))
|
359 |
|
|
|
404 |
return {"website_content":Output}
|
405 |
|
406 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
407 |
|
|
|
408 |
|
409 |
if __name__ == "__main__":
|
410 |
# Initialize and run the server
|
411 |
Ngrok=pexpect.spawn('bash')
|
412 |
+
Ngrok.sendline("ngrok http --url=suitable-liked-ibex.ngrok-free.app 1337")
|
413 |
Ngrok.readline().decode()
|
414 |
mcp.run(transport='stdio')
|