from multiprocessing import Pool, cpu_count import os import numpy as np import json import sys sys.path.append("/home/judson/Neural-Networks-in-GNC/inverted_pendulum/analysis") from simulation import run_simulation from data_processing import get_controller_files from analysis_conditions import analysis_conditions # Loss function names (originals and mirrored) loss_functions = [ "one_fifth", "one_fourth", "one_third", "one_half", "one", "two", "three", "four", "five" ] # Simulation parameters epoch_range = (0, 1000) # Start and end epoch (now larger) epoch_step = 1 # Interval between epochs dt = 0.02 # Time step for simulation num_steps = 500 # Number of simulation steps # Compute the time array once (same for all epochs) time_values = list(np.arange(num_steps) * dt) # Directory to save results (do not delete if it exists) output_dir = "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/analysis/base_loss" os.makedirs(output_dir, exist_ok=True) # Run simulations for each condition and save a separate JSON file per loss function for condition_name, initial_condition in analysis_conditions.items(): print(f"Running condition: {condition_name}") # Create a folder for this condition condition_dir = os.path.join(output_dir, condition_name) os.makedirs(condition_dir, exist_ok=True) # Create a data subdirectory to store individual loss function JSON files data_dir = os.path.join(condition_dir, "data") os.makedirs(data_dir, exist_ok=True) # Process each loss function separately for loss_function in loss_functions: print(f" Processing loss function: {loss_function}") # Build directory for controller files directory = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss/{loss_function}/controllers" controllers = get_controller_files(directory, epoch_range, epoch_step) tasks = [(c, initial_condition, directory, dt, num_steps) for c in controllers] with Pool(min(cpu_count(), 16)) as pool: results = pool.map(run_simulation, tasks) results.sort(key=lambda x: x[0]) # Sort by epoch (assumed to be x[0]) epochs, state_histories, _ = zip(*results) # Extract theta from each state history (first state component) theta_over_epochs = [[float(state[0]) for state in history] for history in state_histories] epochs = [float(ep) for ep in epochs] # Create a result dictionary for this loss function result_data = { "epochs": epochs, "theta_over_epochs": theta_over_epochs, "time": time_values # Single time array for all epochs } # Helper to convert numpy types to native Python types for JSON serialization def default_converter(o): if isinstance(o, np.integer): return int(o) if isinstance(o, np.floating): return float(o) if isinstance(o, np.ndarray): return o.tolist() raise TypeError # Save results to a JSON file named .json in the data directory output_path = os.path.join(data_dir, f"{loss_function}.json") with open(output_path, "w") as f: json.dump(result_data, f, default=default_converter, indent=2) print(f" Saved results for {loss_function} to {output_path}.")