Inverted-Pendulum-Neural-Ne.../analysis/base_loss_learning_rate_sweep/generate_data.py

80 lines
3.3 KiB
Python

from multiprocessing import Pool, cpu_count
import os
import numpy as np
import json
import sys
sys.path.append("/home/judson/Neural-Networks-in-GNC/inverted_pendulum/analysis")
from simulation import run_simulation
from data_processing import get_controller_files
from analysis_conditions import analysis_conditions
from best_base_loss_learning_rate_sweep import data # Import the dictionary
# Define loss function names.
base_loss_functions = list(data.keys())
loss_functions_all = base_loss_functions
# Simulation parameters
epoch_range = (0, 200)
epoch_step = 1 # every epoch
dt = 0.02 # time step (seconds)
num_steps = 500 # simulation steps
# Compute the time array once (same for all epochs)
time_values = list(np.arange(num_steps) * dt)
# Directory to save results (do not delete if it exists)
output_dir = "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/analysis/base_loss_learning_rate_sweep"
os.makedirs(output_dir, exist_ok=True)
# Loop over each condition from analysis_conditions
for condition_name, initial_condition in analysis_conditions.items():
print(f"Running condition: {condition_name}")
# Create a folder for this condition
condition_dir = os.path.join(output_dir, condition_name)
os.makedirs(condition_dir, exist_ok=True)
# Create a data subfolder for individual loss function JSON files
data_dir = os.path.join(condition_dir, "data")
os.makedirs(data_dir, exist_ok=True)
# Process each loss function (including mirrored ones)
for loss_function in loss_functions_all:
print(f" Processing loss function: {loss_function}")
# Use the "constant" entry from the dictionary as the base path.
base_path = data[loss_function]["constant"]["path"]
# Assume that the controllers are stored under the "controllers" subfolder.
directory = os.path.join(base_path, "controllers")
controllers = get_controller_files(directory, epoch_range, epoch_step)
tasks = [(c, initial_condition, directory, dt, num_steps) for c in controllers]
with Pool(min(cpu_count(), 16)) as pool:
results = pool.map(run_simulation, tasks)
results.sort(key=lambda x: x[0]) # sort by epoch (assumed to be x[0])
epochs, state_histories, _ = zip(*results)
# Extract theta (first state component) from each state's history.
theta_over_epochs = [[float(state[0]) for state in history] for history in state_histories]
epochs = [float(ep) for ep in epochs]
result_data = {
"epochs": epochs,
"theta_over_epochs": theta_over_epochs,
"time": time_values # the common time array
}
# Helper function to handle numpy types for JSON serialization.
def default_converter(o):
if isinstance(o, np.integer):
return int(o)
if isinstance(o, np.floating):
return float(o)
if isinstance(o, np.ndarray):
return o.tolist()
raise TypeError
output_path = os.path.join(data_dir, f"{loss_function}.json")
with open(output_path, "w") as f:
json.dump(result_data, f, default=default_converter, indent=2)
print(f" Saved results for {loss_function} to {output_path}.")