compute_pool / app.py
Oscar Wang
Create app.py
d97f2ec verified
raw
history blame
3.87 kB
import gradio as gr
import os
import subprocess
import tempfile
import shutil
from zipfile import ZipFile
import logging
import json
import threading
import psutil
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
connected_cpus = {}
# Function to donate CPU
def donate_cpu(data):
host = data['host']
cpu_count = data['cpu_count']
connected_cpus[host] = {"cpu_count": cpu_count, "usage": 0.0}
logger.info(f"CPU donated by {host} with {cpu_count} CPUs.")
return {"status": "success", "message": f"CPU donated by {host}"}
# Function to update CPU usage
def update_cpu_usage(data):
host = data['host']
usage = data['usage']
if host in connected_cpus:
connected_cpus[host]['usage'] = usage
logger.info(f"Updated CPU usage for {host}: {usage}%")
return {"status": "success"}
# Function to run the provided Python script using MPI
def run_script(script_name, folder_path):
output_log = tempfile.TemporaryFile(mode='w+t')
try:
# Collect all available CPUs
total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values())
# Run the script using MPI
result = subprocess.run(['mpiexec', '-n', str(total_cpus), 'python', script_name], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT)
output_log.seek(0)
log_output = output_log.read()
except Exception as e:
log_output = str(e)
finally:
output_log.close()
return log_output
# Function to handle file uploads and script execution
def handle_upload(folder, script_name):
# Create a temporary directory to store uploaded files
temp_dir = tempfile.mkdtemp()
# Save the uploaded folder contents to the temporary directory
folder_path = os.path.join(temp_dir, 'uploaded_folder')
os.makedirs(folder_path, exist_ok=True)
for file_name, file_obj in folder.items():
with open(os.path.join(folder_path, file_name), 'wb') as f:
f.write(file_obj.read())
# Run the script
log_output = run_script(script_name, folder_path)
# Create a zip file of the entire folder (including any new files created by the script)
zip_path = os.path.join(temp_dir, 'output_folder.zip')
with ZipFile(zip_path, 'w') as zipf:
for root, _, files in os.walk(folder_path):
for file in files:
zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path))
return log_output, zip_path
# Function to get connected CPUs information
def get_cpu_info():
info = []
for host, data in connected_cpus.items():
info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage")
return "\n".join(info)
# Gradio interface
def gradio_interface():
with gr.Blocks() as demo:
gr.Markdown("## Python Script Executor with Distributed Computing")
with gr.Row():
folder = gr.File(label="Upload Folder", file_count="multiple", file_types=['file'])
script_name = gr.Textbox(label="Python Script Name")
log_output = gr.Textbox(label="Log Output", interactive=False)
output_folder = gr.File(label="Download Output Folder")
cpu_info = gr.Textbox(label="Connected CPUs Info", interactive=False)
run_button = gr.Button("Run Script")
refresh_button = gr.Button("Refresh CPU Info")
run_button.click(fn=handle_upload, inputs=[folder, script_name], outputs=[log_output, output_folder])
refresh_button.click(fn=get_cpu_info, inputs=[], outputs=[cpu_info])
# Define the donate CPU endpoint
demo.api(donate_cpu, inputs=gr.JSON(), outputs=gr.JSON(), name="donate_cpu")
demo.launch()
if __name__ == "__main__":
gradio_interface()