import os import subprocess import tempfile import shutil from zipfile import ZipFile import logging import json import psutil import multiprocessing from flask import Flask, request, jsonify, render_template, send_file # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Initialize Flask app app = Flask(__name__) connected_cpus = {"localhost": {"cpu_count": psutil.cpu_count(logical=False), "usage": 0.0}} # Endpoint to donate CPU resources @app.route('/donate_cpu', methods=['POST']) def donate_cpu_handler(): data = request.get_json() host = data['host'] cpu_count = data['cpu_count'] connected_cpus[host] = {"cpu_count": cpu_count, "usage": 0.0} logger.info(f"CPU donated by {host} with {cpu_count} CPUs.") return jsonify({"status": "success", "message": f"CPU donated by {host}"}) # Endpoint to update CPU usage @app.route('/update_cpu_usage', methods=['POST']) def update_cpu_usage_handler(): data = request.get_json() host = data['host'] usage = data['usage'] if host in connected_cpus: connected_cpus[host]['usage'] = usage logger.info(f"Updated CPU usage for {host}: {usage}%") return jsonify({"status": "success"}) # Function to run the provided Python script using multiprocessing def run_script(script_content, folder_path): script_path = os.path.join(folder_path, 'user_script.py') with open(script_path, 'w') as script_file: script_file.write(script_content) def target_function(cpu_id): output_log = tempfile.TemporaryFile(mode='w+t') try: result = subprocess.run(['python', script_path], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT) output_log.seek(0) log_output = output_log.read() except Exception as e: log_output = str(e) finally: output_log.close() return log_output # Collect all available CPUs including the local host CPU total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values()) # Run the script using multiprocessing with multiprocessing.Pool(total_cpus) as pool: log_outputs = pool.map(target_function, range(total_cpus)) return '\n'.join(log_outputs) # Function to handle file uploads and script execution @app.route('/upload', methods=['POST']) def handle_upload(): if 'file' not in request.files or 'script_content' not in request.form: return jsonify({"status": "error", "message": "File or script content not provided"}), 400 files = request.files.getlist('file') script_content = request.form['script_content'] # Create a temporary directory to store uploaded files temp_dir = tempfile.mkdtemp() # Save the uploaded folder contents to the temporary directory folder_path = os.path.join(temp_dir, 'uploaded_folder') os.makedirs(folder_path, exist_ok=True) for file_obj in files: file_path = os.path.join(folder_path, file_obj.filename) file_obj.save(file_path) # Run the script log_output = run_script(script_content, folder_path) # Create a zip file of the entire folder (including any new files created by the script) zip_path = os.path.join(temp_dir, 'output_folder.zip') with ZipFile(zip_path, 'w') as zipf: for root, _, files in os.walk(folder_path): for file in files: zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path)) return jsonify({"status": "success", "log_output": log_output, "download_url": f"/download/{os.path.basename(zip_path)}"}) @app.route('/download/') def download_file(filename): return send_file(os.path.join(tempfile.gettempdir(), filename), as_attachment=True) # Endpoint to get connected CPUs information @app.route('/cpu_info', methods=['GET']) def get_cpu_info(): info = [] for host, data in connected_cpus.items(): info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage") return jsonify({"status": "success", "cpu_info": "\n".join(info)}) # Main interface @app.route('/') def index(): return render_template('index.html') if __name__ == "__main__": app.run(host='0.0.0.0', port=7860, threaded=True)