import os import subprocess import tempfile import shutil from zipfile import ZipFile import logging import json import psutil from flask import Flask, request, jsonify, render_template, send_file import threading # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Initialize Flask app app = Flask(__name__) connected_cpus = {} # Endpoint to donate CPU resources @app.route('/donate_cpu', methods=['POST']) def donate_cpu_handler(): data = request.get_json() host = data['host'] cpu_count = data['cpu_count'] connected_cpus[host] = {"cpu_count": cpu_count, "usage": 0.0} logger.info(f"CPU donated by {host} with {cpu_count} CPUs.") return jsonify({"status": "success", "message": f"CPU donated by {host}"}) # Endpoint to update CPU usage @app.route('/update_cpu_usage', methods=['POST']) def update_cpu_usage_handler(): data = request.get_json() host = data['host'] usage = data['usage'] if host in connected_cpus: connected_cpus[host]['usage'] = usage logger.info(f"Updated CPU usage for {host}: {usage}%") return jsonify({"status": "success"}) # Function to run the provided Python script using MPI def run_script(script_name, folder_path): output_log = tempfile.TemporaryFile(mode='w+t') try: # Collect all available CPUs total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values()) # Run the script using MPI result = subprocess.run(['mpiexec', '-n', str(total_cpus), 'python', script_name], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT) output_log.seek(0) log_output = output_log.read() except Exception as e: log_output = str(e) finally: output_log.close() return log_output # Function to handle file uploads and script execution @app.route('/upload', methods=['POST']) def handle_upload(): if 'file' not in request.files or 'script_name' not in request.form: return jsonify({"status": "error", "message": "File or script name not provided"}), 400 files = request.files.getlist('file') script_name = request.form['script_name'] # Create a temporary directory to store uploaded files temp_dir = tempfile.mkdtemp() # Save the uploaded folder contents to the temporary directory folder_path = os.path.join(temp_dir, 'uploaded_folder') os.makedirs(folder_path, exist_ok=True) for file_obj in files: file_path = os.path.join(folder_path, file_obj.filename) file_obj.save(file_path) # Run the script log_output = run_script(script_name, folder_path) # Create a zip file of the entire folder (including any new files created by the script) zip_path = os.path.join(temp_dir, 'output_folder.zip') with ZipFile(zip_path, 'w') as zipf: for root, _, files in os.walk(folder_path): for file in files: zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path)) return jsonify({"status": "success", "log_output": log_output, "download_url": f"/download/{os.path.basename(zip_path)}"}) @app.route('/download/') def download_file(filename): return send_file(os.path.join(tempfile.gettempdir(), filename), as_attachment=True) # Endpoint to get connected CPUs information @app.route('/cpu_info', methods=['GET']) def get_cpu_info(): info = [] for host, data in connected_cpus.items(): info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage") return jsonify({"status": "success", "cpu_info": "\n".join(info)}) # Main interface @app.route('/') def index(): return render_template('index.html') if __name__ == "__main__": app.run(host='0.0.0.0', port=7860, threaded=True)