Moved training files to separate folder in preparation of making training_files a submodule

This commit is contained in:
judsonupchurch 2025-05-13 01:40:59 +00:00
parent 1f00ca4da4
commit d7422cd99b
1168 changed files with 41 additions and 120053 deletions

View File

@ -42,7 +42,7 @@ for condition_name, initial_condition in analysis_conditions.items():
for loss_function in loss_functions:
print(f" Processing loss function: {loss_function}")
# Build directory for controller files
directory = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss/{loss_function}/controllers"
directory = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss/{loss_function}/controllers"
controllers = get_controller_files(directory, epoch_range, epoch_step)
tasks = [(c, initial_condition, directory, dt, num_steps) for c in controllers]

View File

@ -1 +1 @@
data = {'one': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one/lr_0.100', 'csv_loss': 0.07867201417684555, 'constant_loss': 2.5390186309814453}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one/lr_0.100', 'csv_loss': 0.07867201417684555, 'constant_loss': 2.5390186309814453}}, 'one_fourth': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_fourth/lr_0.300', 'csv_loss': 0.08876045793294907, 'constant_loss': 2.5319466590881348}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_fourth/lr_0.250', 'csv_loss': 0.09172269701957703, 'constant_loss': 2.5288496017456055}}, 'four': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/four/lr_0.200', 'csv_loss': 0.1293140947818756, 'constant_loss': 2.9976892471313477}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/four/lr_0.200', 'csv_loss': 0.1293140947818756, 'constant_loss': 2.9976892471313477}}, 'one_fifth': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_fifth/lr_0.250', 'csv_loss': 0.08940213173627853, 'constant_loss': 2.5548017024993896}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_fifth/lr_0.100', 'csv_loss': 0.09396396577358246, 'constant_loss': 2.5306591987609863}}, 'three': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/three/lr_0.100', 'csv_loss': 0.09003690630197525, 'constant_loss': 2.5991272926330566}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/three/lr_0.100', 'csv_loss': 0.09003690630197525, 'constant_loss': 2.5991272926330566}}, 'five': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/five/lr_0.200', 'csv_loss': 0.2009778916835785, 'constant_loss': 3.64280366897583}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/five/lr_0.125', 'csv_loss': 0.20845991373062134, 'constant_loss': 3.589925527572632}}, 'one_third': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_third/lr_0.250', 'csv_loss': 0.0854221060872078, 'constant_loss': 2.506823778152466}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_third/lr_0.250', 'csv_loss': 0.0854221060872078, 'constant_loss': 2.506823778152466}}, 'two': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/two/lr_0.100', 'csv_loss': 0.07678339630365372, 'constant_loss': 2.5585412979125977}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/two/lr_0.100', 'csv_loss': 0.07678339630365372, 'constant_loss': 2.5585412979125977}}, 'one_half': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_half/lr_0.200', 'csv_loss': 0.08620432019233704, 'constant_loss': 2.541421890258789}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/base_loss_learning_rate_sweep/one_half/lr_0.200', 'csv_loss': 0.08620432019233704, 'constant_loss': 2.541421890258789}}}
data = {'one': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one/lr_0.100', 'csv_loss': 0.07867201417684555, 'constant_loss': 2.5390186309814453}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one/lr_0.100', 'csv_loss': 0.07867201417684555, 'constant_loss': 2.5390186309814453}}, 'one_fourth': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_fourth/lr_0.300', 'csv_loss': 0.08876045793294907, 'constant_loss': 2.5319466590881348}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_fourth/lr_0.250', 'csv_loss': 0.09172269701957703, 'constant_loss': 2.5288496017456055}}, 'four': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/four/lr_0.200', 'csv_loss': 0.1293140947818756, 'constant_loss': 2.9976892471313477}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/four/lr_0.200', 'csv_loss': 0.1293140947818756, 'constant_loss': 2.9976892471313477}}, 'one_fifth': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_fifth/lr_0.250', 'csv_loss': 0.08940213173627853, 'constant_loss': 2.5548017024993896}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_fifth/lr_0.100', 'csv_loss': 0.09396396577358246, 'constant_loss': 2.5306591987609863}}, 'three': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/three/lr_0.100', 'csv_loss': 0.09003690630197525, 'constant_loss': 2.5991272926330566}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/three/lr_0.100', 'csv_loss': 0.09003690630197525, 'constant_loss': 2.5991272926330566}}, 'five': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/five/lr_0.200', 'csv_loss': 0.2009778916835785, 'constant_loss': 3.64280366897583}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/five/lr_0.125', 'csv_loss': 0.20845991373062134, 'constant_loss': 3.589925527572632}}, 'one_third': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_third/lr_0.250', 'csv_loss': 0.0854221060872078, 'constant_loss': 2.506823778152466}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_third/lr_0.250', 'csv_loss': 0.0854221060872078, 'constant_loss': 2.506823778152466}}, 'two': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/two/lr_0.100', 'csv_loss': 0.07678339630365372, 'constant_loss': 2.5585412979125977}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/two/lr_0.100', 'csv_loss': 0.07678339630365372, 'constant_loss': 2.5585412979125977}}, 'one_half': {'csv': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_half/lr_0.200', 'csv_loss': 0.08620432019233704, 'constant_loss': 2.541421890258789}, 'constant': {'path': '/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/base_loss_learning_rate_sweep/one_half/lr_0.200', 'csv_loss': 0.08620432019233704, 'constant_loss': 2.541421890258789}}}

View File

@ -14,7 +14,7 @@ from PendulumController import PendulumController
# List of controller file names to validate.
# Replace these paths with your actual controller file paths.
controller_file_names = [
"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting/constant/controllers/controller_1000.pth",
"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting/constant/controllers/controller_1000.pth",
]
# Constants for simulation

View File

@ -48,7 +48,7 @@ for condition_name, initial_condition in analysis_conditions.items():
for loss_function in loss_functions:
print(f" Processing loss function: {loss_function}")
# Build directory for controller files
directory = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting/{loss_function}/controllers"
directory = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting/{loss_function}/controllers"
controllers = get_controller_files(directory, epoch_range, epoch_step)
tasks = [(c, initial_condition, directory, dt, num_steps) for c in controllers]

View File

@ -1,204 +1,204 @@
data = {
"inverse": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse/lr_0.250",
"csv_loss": 0.531498372554779,
"constant_loss": 2.5503664016723633
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse/lr_0.250",
"csv_loss": 0.531498372554779,
"constant_loss": 2.5503664016723633
}
},
"linear_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/linear_mirrored/lr_0.125",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/linear_mirrored/lr_0.125",
"csv_loss": 2.3770766258239746,
"constant_loss": 2.552375078201294
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/linear_mirrored/lr_0.125",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/linear_mirrored/lr_0.125",
"csv_loss": 2.3770766258239746,
"constant_loss": 2.552375078201294
}
},
"inverse_squared_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_squared_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_squared_mirrored/lr_0.160",
"csv_loss": 0.033845994621515274,
"constant_loss": 2.7603342533111572
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_squared_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_squared_mirrored/lr_0.160",
"csv_loss": 0.033845994621515274,
"constant_loss": 2.7603342533111572
}
},
"cubic_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_mirrored/lr_0.080",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_mirrored/lr_0.080",
"csv_loss": 2.0769901275634766,
"constant_loss": 2.563471555709839
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_mirrored/lr_0.080",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_mirrored/lr_0.080",
"csv_loss": 2.0769901275634766,
"constant_loss": 2.563471555709839
}
},
"quadratic": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/quadratic/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/quadratic/lr_0.200",
"csv_loss": 0.06192325800657272,
"constant_loss": 3.025479316711426
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/quadratic/lr_0.080",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/quadratic/lr_0.080",
"csv_loss": 0.14040324091911316,
"constant_loss": 2.982274055480957
}
},
"inverse_squared": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_squared/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_squared/lr_0.200",
"csv_loss": 1.1794205904006958,
"constant_loss": 2.5662319660186768
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_squared/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_squared/lr_0.200",
"csv_loss": 1.1794205904006958,
"constant_loss": 2.5662319660186768
}
},
"quadratic_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/quadratic_mirrored/lr_0.125",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/quadratic_mirrored/lr_0.125",
"csv_loss": 2.218207836151123,
"constant_loss": 2.555176258087158
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/quadratic_mirrored/lr_0.125",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/quadratic_mirrored/lr_0.125",
"csv_loss": 2.218207836151123,
"constant_loss": 2.555176258087158
}
},
"square_root": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/square_root/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/square_root/lr_0.250",
"csv_loss": 0.6526519656181335,
"constant_loss": 2.5856597423553467
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/square_root/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/square_root/lr_0.250",
"csv_loss": 0.6526519656181335,
"constant_loss": 2.5856597423553467
}
},
"inverse_cubed_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_cubed_mirrored/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_cubed_mirrored/lr_0.200",
"csv_loss": 0.03754603490233421,
"constant_loss": 2.9996697902679443
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_cubed_mirrored/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_cubed_mirrored/lr_0.200",
"csv_loss": 0.03754603490233421,
"constant_loss": 2.9996697902679443
}
},
"cubic_root_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_root_mirrored/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_root_mirrored/lr_0.250",
"csv_loss": 2.47979474067688,
"constant_loss": 2.5389654636383057
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_root_mirrored/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_root_mirrored/lr_0.250",
"csv_loss": 2.47979474067688,
"constant_loss": 2.5389654636383057
}
},
"inverse_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_mirrored/lr_0.160",
"csv_loss": 0.032234687358140945,
"constant_loss": 2.942859649658203
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_mirrored/lr_0.160",
"csv_loss": 0.032234687358140945,
"constant_loss": 2.942859649658203
}
},
"inverse_cubed": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_cubed/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_cubed/lr_0.200",
"csv_loss": 1.4481265544891357,
"constant_loss": 2.557009696960449
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/inverse_cubed/lr_0.200",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/inverse_cubed/lr_0.200",
"csv_loss": 1.4481265544891357,
"constant_loss": 2.557009696960449
}
},
"cubic_root": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_root/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_root/lr_0.250",
"csv_loss": 1.0203485488891602,
"constant_loss": 2.609311819076538
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic_root/lr_0.250",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic_root/lr_0.250",
"csv_loss": 1.0203485488891602,
"constant_loss": 2.609311819076538
}
},
"square_root_mirrored": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/square_root_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/square_root_mirrored/lr_0.160",
"csv_loss": 2.4792795181274414,
"constant_loss": 2.5693373680114746
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/square_root_mirrored/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/square_root_mirrored/lr_0.160",
"csv_loss": 2.4792795181274414,
"constant_loss": 2.5693373680114746
}
},
"linear": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/linear/lr_0.125",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/linear/lr_0.125",
"csv_loss": 0.2883843183517456,
"constant_loss": 3.05281400680542
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/linear/lr_0.080",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/linear/lr_0.080",
"csv_loss": 0.28867313265800476,
"constant_loss": 2.9585072994232178
}
},
"constant": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/constant/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/constant/lr_0.160",
"csv_loss": 2.608083486557007,
"constant_loss": 2.606748342514038
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/constant/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/constant/lr_0.160",
"csv_loss": 2.608083486557007,
"constant_loss": 2.606748342514038
}
},
"cubic": {
"csv": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic/lr_0.160",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic/lr_0.160",
"csv_loss": 0.04065453261137009,
"constant_loss": 3.101959228515625
},
"constant": {
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/time_weighting_learning_rate_sweep/cubic/lr_0.300",
"path": "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/training_files/time_weighting_learning_rate_sweep/cubic/lr_0.300",
"csv_loss": 0.049555618315935135,
"constant_loss": 3.0432639122009277
}

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: one
Loss Function Exponent: 1
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def abs_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=1, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: one_fifth
Loss Function Exponent: 0.2
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def one_fifth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=1/5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: one_fourth
Loss Function Exponent: 0.25
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def one_fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=1/4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: one_half
Loss Function Exponent: 0.5
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def one_half_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=1/2, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: one_third
Loss Function Exponent: 0.3333333333333333
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def one_third_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=1/3, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: three
Loss Function Exponent: 3
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def three_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=3, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0.0001
Loss Function Name: two
Loss Function Exponent: 2
Current Loss Function (wrapper) Source Code:
def current_loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(loss_fn(theta, desired_theta))
Specific Loss Function Source Code:
def square_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=2, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.0025
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,7770.62841796875
2,6718.59423828125
3,5794.0087890625
4,4935.95947265625
5,4327.53271484375
6,3802.377197265625
7,3329.73046875
8,2931.816650390625
9,2564.612060546875
10,2215.23388671875
11,1976.3067626953125
12,1704.258544921875
13,1438.279052734375
14,1257.5654296875
15,1139.8956298828125
16,995.1824340820312
17,815.4264526367188
18,727.8626098632812
19,632.2618408203125
20,605.2363891601562
21,508.25048828125
22,416.4864196777344
23,366.84027099609375
24,369.57958984375
25,295.07867431640625
26,248.3028564453125
27,223.56149291992188
28,239.32118225097656
29,202.33860778808594
30,194.6480255126953
31,167.9254150390625
32,156.02577209472656
33,151.82904052734375
34,151.10589599609375
35,144.50389099121094
36,144.8226776123047
37,145.7972869873047
38,148.10838317871094
39,152.98716735839844
40,154.3490753173828
41,156.31729125976562
42,156.1006622314453
43,154.32318115234375
44,149.095947265625
45,141.8406524658203
46,132.90611267089844
47,130.16073608398438
48,128.01268005371094
49,127.04995727539062
50,126.7177963256836
51,126.15620422363281
52,125.43661499023438
53,124.47113800048828
54,122.81539916992188
55,123.9261245727539
56,124.15579223632812
57,124.08218383789062
58,123.82746887207031
59,123.41194152832031
60,122.7802963256836
61,121.62992095947266
62,122.0086441040039
63,122.00167083740234
64,121.75686645507812
65,121.33285522460938
66,120.52906036376953
67,121.29076385498047
68,121.7078857421875
69,121.7892074584961
70,121.73064422607422
71,121.57801055908203
72,121.35053253173828
73,121.05697631835938
74,120.69910430908203
75,120.26854705810547
76,119.73797607421875
77,119.03357696533203
78,117.8167724609375
79,116.83917236328125
80,116.34820556640625
81,115.63579559326172
82,114.36708068847656
83,115.11323547363281
84,115.37007904052734
85,115.34087371826172
86,115.01927185058594
87,114.36756896972656
88,114.035400390625
89,114.01355743408203
90,114.08613586425781
91,113.75379180908203
92,113.07562255859375
93,111.48456573486328
94,111.98307037353516
95,113.6790542602539
96,113.85575103759766
97,113.80955505371094
98,113.60377502441406
99,112.89239501953125
100,113.32893371582031
101,114.0166244506836
102,114.2397689819336
103,114.04379272460938
104,112.6296157836914
105,107.9374771118164
106,107.7266845703125
107,107.7040786743164
108,108.00822448730469
109,108.39141845703125
110,108.71395111083984
111,108.90518951416016
112,108.95988464355469
113,108.91028594970703
114,108.78828430175781
115,108.61094665527344
116,108.3825454711914
117,108.09669494628906
118,107.74114990234375
119,107.30583190917969
120,106.79058837890625
121,106.1917953491211
122,110.83098602294922
123,105.59488677978516
124,105.57504272460938
125,105.48200225830078
126,105.35533142089844
127,105.2125015258789
128,105.0588150024414
129,104.88456726074219
130,106.81533813476562
131,106.51081848144531
132,109.82728576660156
133,115.5789794921875
134,128.6304473876953
135,131.7053680419922
136,121.98052215576172
137,118.72258758544922
138,117.06177520751953
139,116.03680419921875
140,115.83661651611328
141,121.03598022460938
142,120.2808837890625
143,119.34547424316406
144,118.78247833251953
145,118.43366241455078
146,118.12821960449219
147,117.74102020263672
148,117.023193359375
149,118.93740844726562
150,119.89824676513672
151,120.37339782714844
152,120.54845428466797
153,120.5039291381836
154,120.2508773803711
155,119.74303436279297
156,118.87834930419922
157,116.06034088134766
158,116.97679901123047
159,117.20667266845703
160,117.27363586425781
161,117.27104187011719
162,117.23406982421875
163,117.1791000366211
164,117.11459350585938
165,117.04459381103516
166,116.971923828125
167,116.8958511352539
168,116.815185546875
169,116.7279281616211
170,116.63368225097656
171,116.53009033203125
172,116.41732788085938
173,116.29596710205078
174,116.16725158691406
175,116.0330581665039
176,115.89540100097656
177,115.7552719116211
178,115.61406707763672
179,115.47228240966797
180,115.32965850830078
181,115.18595123291016
182,115.04006958007812
183,114.89013671875
184,114.73365783691406
185,114.56776428222656
186,114.38726043701172
187,114.18212127685547
188,113.93012237548828
189,113.56849670410156
190,113.1717300415039
191,114.64246368408203
192,115.82766723632812
193,110.32905578613281
194,116.33597564697266
195,111.70674896240234
196,112.64942932128906
197,113.70431518554688
198,114.73318481445312
199,115.6131820678711
200,116.27291870117188
1 Epoch Loss
2 0 9022.2587890625
3 1 7770.62841796875
4 2 6718.59423828125
5 3 5794.0087890625
6 4 4935.95947265625
7 5 4327.53271484375
8 6 3802.377197265625
9 7 3329.73046875
10 8 2931.816650390625
11 9 2564.612060546875
12 10 2215.23388671875
13 11 1976.3067626953125
14 12 1704.258544921875
15 13 1438.279052734375
16 14 1257.5654296875
17 15 1139.8956298828125
18 16 995.1824340820312
19 17 815.4264526367188
20 18 727.8626098632812
21 19 632.2618408203125
22 20 605.2363891601562
23 21 508.25048828125
24 22 416.4864196777344
25 23 366.84027099609375
26 24 369.57958984375
27 25 295.07867431640625
28 26 248.3028564453125
29 27 223.56149291992188
30 28 239.32118225097656
31 29 202.33860778808594
32 30 194.6480255126953
33 31 167.9254150390625
34 32 156.02577209472656
35 33 151.82904052734375
36 34 151.10589599609375
37 35 144.50389099121094
38 36 144.8226776123047
39 37 145.7972869873047
40 38 148.10838317871094
41 39 152.98716735839844
42 40 154.3490753173828
43 41 156.31729125976562
44 42 156.1006622314453
45 43 154.32318115234375
46 44 149.095947265625
47 45 141.8406524658203
48 46 132.90611267089844
49 47 130.16073608398438
50 48 128.01268005371094
51 49 127.04995727539062
52 50 126.7177963256836
53 51 126.15620422363281
54 52 125.43661499023438
55 53 124.47113800048828
56 54 122.81539916992188
57 55 123.9261245727539
58 56 124.15579223632812
59 57 124.08218383789062
60 58 123.82746887207031
61 59 123.41194152832031
62 60 122.7802963256836
63 61 121.62992095947266
64 62 122.0086441040039
65 63 122.00167083740234
66 64 121.75686645507812
67 65 121.33285522460938
68 66 120.52906036376953
69 67 121.29076385498047
70 68 121.7078857421875
71 69 121.7892074584961
72 70 121.73064422607422
73 71 121.57801055908203
74 72 121.35053253173828
75 73 121.05697631835938
76 74 120.69910430908203
77 75 120.26854705810547
78 76 119.73797607421875
79 77 119.03357696533203
80 78 117.8167724609375
81 79 116.83917236328125
82 80 116.34820556640625
83 81 115.63579559326172
84 82 114.36708068847656
85 83 115.11323547363281
86 84 115.37007904052734
87 85 115.34087371826172
88 86 115.01927185058594
89 87 114.36756896972656
90 88 114.035400390625
91 89 114.01355743408203
92 90 114.08613586425781
93 91 113.75379180908203
94 92 113.07562255859375
95 93 111.48456573486328
96 94 111.98307037353516
97 95 113.6790542602539
98 96 113.85575103759766
99 97 113.80955505371094
100 98 113.60377502441406
101 99 112.89239501953125
102 100 113.32893371582031
103 101 114.0166244506836
104 102 114.2397689819336
105 103 114.04379272460938
106 104 112.6296157836914
107 105 107.9374771118164
108 106 107.7266845703125
109 107 107.7040786743164
110 108 108.00822448730469
111 109 108.39141845703125
112 110 108.71395111083984
113 111 108.90518951416016
114 112 108.95988464355469
115 113 108.91028594970703
116 114 108.78828430175781
117 115 108.61094665527344
118 116 108.3825454711914
119 117 108.09669494628906
120 118 107.74114990234375
121 119 107.30583190917969
122 120 106.79058837890625
123 121 106.1917953491211
124 122 110.83098602294922
125 123 105.59488677978516
126 124 105.57504272460938
127 125 105.48200225830078
128 126 105.35533142089844
129 127 105.2125015258789
130 128 105.0588150024414
131 129 104.88456726074219
132 130 106.81533813476562
133 131 106.51081848144531
134 132 109.82728576660156
135 133 115.5789794921875
136 134 128.6304473876953
137 135 131.7053680419922
138 136 121.98052215576172
139 137 118.72258758544922
140 138 117.06177520751953
141 139 116.03680419921875
142 140 115.83661651611328
143 141 121.03598022460938
144 142 120.2808837890625
145 143 119.34547424316406
146 144 118.78247833251953
147 145 118.43366241455078
148 146 118.12821960449219
149 147 117.74102020263672
150 148 117.023193359375
151 149 118.93740844726562
152 150 119.89824676513672
153 151 120.37339782714844
154 152 120.54845428466797
155 153 120.5039291381836
156 154 120.2508773803711
157 155 119.74303436279297
158 156 118.87834930419922
159 157 116.06034088134766
160 158 116.97679901123047
161 159 117.20667266845703
162 160 117.27363586425781
163 161 117.27104187011719
164 162 117.23406982421875
165 163 117.1791000366211
166 164 117.11459350585938
167 165 117.04459381103516
168 166 116.971923828125
169 167 116.8958511352539
170 168 116.815185546875
171 169 116.7279281616211
172 170 116.63368225097656
173 171 116.53009033203125
174 172 116.41732788085938
175 173 116.29596710205078
176 174 116.16725158691406
177 175 116.0330581665039
178 176 115.89540100097656
179 177 115.7552719116211
180 178 115.61406707763672
181 179 115.47228240966797
182 180 115.32965850830078
183 181 115.18595123291016
184 182 115.04006958007812
185 183 114.89013671875
186 184 114.73365783691406
187 185 114.56776428222656
188 186 114.38726043701172
189 187 114.18212127685547
190 188 113.93012237548828
191 189 113.56849670410156
192 190 113.1717300415039
193 191 114.64246368408203
194 192 115.82766723632812
195 193 110.32905578613281
196 194 116.33597564697266
197 195 111.70674896240234
198 196 112.64942932128906
199 197 113.70431518554688
200 198 114.73318481445312
201 199 115.6131820678711
202 200 116.27291870117188

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.005
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,6725.52001953125
2,4997.44384765625
3,3804.835693359375
4,2935.503173828125
5,2222.25732421875
6,1693.8331298828125
7,1256.9798583984375
8,963.8367309570312
9,665.0695190429688
10,535.7227783203125
11,364.3943786621094
12,252.67892456054688
13,221.32859802246094
14,194.2473602294922
15,135.03506469726562
16,110.41338348388672
17,121.97980499267578
18,109.03631591796875
19,108.22425079345703
20,109.557861328125
21,104.28150939941406
22,86.65518951416016
23,76.22408294677734
24,73.43879699707031
25,71.68223571777344
26,68.87044525146484
27,62.73584747314453
28,62.54648971557617
29,60.324642181396484
30,55.01743698120117
31,53.166526794433594
32,50.682369232177734
33,48.591407775878906
34,46.70733642578125
35,45.11572265625
36,42.944496154785156
37,40.58065414428711
38,39.167545318603516
39,38.011505126953125
40,36.439659118652344
41,36.005401611328125
42,35.49430847167969
43,35.04253387451172
44,34.596492767333984
45,32.74248123168945
46,32.25679397583008
47,31.893898010253906
48,31.575838088989258
49,31.29216957092285
50,31.034502029418945
51,30.795312881469727
52,30.56829071044922
53,30.349084854125977
54,30.13558006286621
55,29.928409576416016
56,29.730928421020508
57,29.550201416015625
58,29.39569091796875
59,29.28091812133789
60,29.154844284057617
61,28.832595825195312
62,28.593297958374023
63,28.365314483642578
64,28.122602462768555
65,27.836380004882812
66,27.429363250732422
67,26.822507858276367
68,26.477489471435547
69,25.864377975463867
70,25.378442764282227
71,23.984514236450195
72,23.57914161682129
73,23.373628616333008
74,23.214035034179688
75,23.080238342285156
76,22.963762283325195
77,22.859521865844727
78,22.763877868652344
79,22.67420196533203
80,22.587997436523438
81,22.503862380981445
82,22.420806884765625
83,22.338428497314453
84,22.256061553955078
85,22.172527313232422
86,22.08760643005371
87,22.0006103515625
88,21.91148567199707
89,21.82109260559082
90,21.730979919433594
91,21.642606735229492
92,21.556926727294922
93,21.474634170532227
94,21.395732879638672
95,21.319725036621094
96,21.246471405029297
97,21.17557144165039
98,21.10727882385254
99,21.041545867919922
100,20.97849464416504
101,20.918176651000977
102,20.860525131225586
103,20.805519104003906
104,20.753093719482422
105,20.703218460083008
106,20.655841827392578
107,20.610776901245117
108,20.567886352539062
109,20.527023315429688
110,20.487937927246094
111,20.450258255004883
112,20.413429260253906
113,20.376502990722656
114,20.33868408203125
115,20.300275802612305
116,20.262203216552734
117,20.225011825561523
118,20.188913345336914
119,20.15406036376953
120,20.120267868041992
121,20.087509155273438
122,20.055627822875977
123,20.024587631225586
124,19.994253158569336
125,19.964513778686523
126,19.935354232788086
127,19.906675338745117
128,19.878501892089844
129,19.850749969482422
130,19.823453903198242
131,19.796541213989258
132,19.77004623413086
133,19.743968963623047
134,19.718332290649414
135,19.693124771118164
136,19.668338775634766
137,19.64396858215332
138,19.620038986206055
139,19.59649658203125
140,19.573381423950195
141,19.550628662109375
142,19.528242111206055
143,19.506250381469727
144,19.48459815979004
145,19.463274002075195
146,19.442277908325195
147,19.421581268310547
148,19.401185989379883
149,19.381067276000977
150,19.361270904541016
151,19.341718673706055
152,19.322460174560547
153,19.30341911315918
154,19.284698486328125
155,19.26616859436035
156,19.247894287109375
157,19.229854583740234
158,19.2120418548584
159,19.194433212280273
160,19.17705726623535
161,19.15989112854004
162,19.1429443359375
163,19.12616539001465
164,19.10961151123047
165,19.093191146850586
166,19.07701301574707
167,19.060972213745117
168,19.045129776000977
169,19.029451370239258
170,19.013916015625
171,18.99856185913086
172,18.98333168029785
173,18.96823501586914
174,18.953290939331055
175,18.938474655151367
176,18.923778533935547
177,18.909225463867188
178,18.894758224487305
179,18.880416870117188
180,18.866165161132812
181,18.852020263671875
182,18.83796501159668
183,18.824012756347656
184,18.81011390686035
185,18.79629898071289
186,18.782527923583984
187,18.768795013427734
188,18.7551212310791
189,18.74149513244629
190,18.72787857055664
191,18.71425437927246
192,18.70063018798828
193,18.68698501586914
194,18.673267364501953
195,18.659528732299805
196,18.645681381225586
197,18.631698608398438
198,18.61760139465332
199,18.603317260742188
200,18.58875274658203
1 Epoch Loss
2 0 9022.2587890625
3 1 6725.52001953125
4 2 4997.44384765625
5 3 3804.835693359375
6 4 2935.503173828125
7 5 2222.25732421875
8 6 1693.8331298828125
9 7 1256.9798583984375
10 8 963.8367309570312
11 9 665.0695190429688
12 10 535.7227783203125
13 11 364.3943786621094
14 12 252.67892456054688
15 13 221.32859802246094
16 14 194.2473602294922
17 15 135.03506469726562
18 16 110.41338348388672
19 17 121.97980499267578
20 18 109.03631591796875
21 19 108.22425079345703
22 20 109.557861328125
23 21 104.28150939941406
24 22 86.65518951416016
25 23 76.22408294677734
26 24 73.43879699707031
27 25 71.68223571777344
28 26 68.87044525146484
29 27 62.73584747314453
30 28 62.54648971557617
31 29 60.324642181396484
32 30 55.01743698120117
33 31 53.166526794433594
34 32 50.682369232177734
35 33 48.591407775878906
36 34 46.70733642578125
37 35 45.11572265625
38 36 42.944496154785156
39 37 40.58065414428711
40 38 39.167545318603516
41 39 38.011505126953125
42 40 36.439659118652344
43 41 36.005401611328125
44 42 35.49430847167969
45 43 35.04253387451172
46 44 34.596492767333984
47 45 32.74248123168945
48 46 32.25679397583008
49 47 31.893898010253906
50 48 31.575838088989258
51 49 31.29216957092285
52 50 31.034502029418945
53 51 30.795312881469727
54 52 30.56829071044922
55 53 30.349084854125977
56 54 30.13558006286621
57 55 29.928409576416016
58 56 29.730928421020508
59 57 29.550201416015625
60 58 29.39569091796875
61 59 29.28091812133789
62 60 29.154844284057617
63 61 28.832595825195312
64 62 28.593297958374023
65 63 28.365314483642578
66 64 28.122602462768555
67 65 27.836380004882812
68 66 27.429363250732422
69 67 26.822507858276367
70 68 26.477489471435547
71 69 25.864377975463867
72 70 25.378442764282227
73 71 23.984514236450195
74 72 23.57914161682129
75 73 23.373628616333008
76 74 23.214035034179688
77 75 23.080238342285156
78 76 22.963762283325195
79 77 22.859521865844727
80 78 22.763877868652344
81 79 22.67420196533203
82 80 22.587997436523438
83 81 22.503862380981445
84 82 22.420806884765625
85 83 22.338428497314453
86 84 22.256061553955078
87 85 22.172527313232422
88 86 22.08760643005371
89 87 22.0006103515625
90 88 21.91148567199707
91 89 21.82109260559082
92 90 21.730979919433594
93 91 21.642606735229492
94 92 21.556926727294922
95 93 21.474634170532227
96 94 21.395732879638672
97 95 21.319725036621094
98 96 21.246471405029297
99 97 21.17557144165039
100 98 21.10727882385254
101 99 21.041545867919922
102 100 20.97849464416504
103 101 20.918176651000977
104 102 20.860525131225586
105 103 20.805519104003906
106 104 20.753093719482422
107 105 20.703218460083008
108 106 20.655841827392578
109 107 20.610776901245117
110 108 20.567886352539062
111 109 20.527023315429688
112 110 20.487937927246094
113 111 20.450258255004883
114 112 20.413429260253906
115 113 20.376502990722656
116 114 20.33868408203125
117 115 20.300275802612305
118 116 20.262203216552734
119 117 20.225011825561523
120 118 20.188913345336914
121 119 20.15406036376953
122 120 20.120267868041992
123 121 20.087509155273438
124 122 20.055627822875977
125 123 20.024587631225586
126 124 19.994253158569336
127 125 19.964513778686523
128 126 19.935354232788086
129 127 19.906675338745117
130 128 19.878501892089844
131 129 19.850749969482422
132 130 19.823453903198242
133 131 19.796541213989258
134 132 19.77004623413086
135 133 19.743968963623047
136 134 19.718332290649414
137 135 19.693124771118164
138 136 19.668338775634766
139 137 19.64396858215332
140 138 19.620038986206055
141 139 19.59649658203125
142 140 19.573381423950195
143 141 19.550628662109375
144 142 19.528242111206055
145 143 19.506250381469727
146 144 19.48459815979004
147 145 19.463274002075195
148 146 19.442277908325195
149 147 19.421581268310547
150 148 19.401185989379883
151 149 19.381067276000977
152 150 19.361270904541016
153 151 19.341718673706055
154 152 19.322460174560547
155 153 19.30341911315918
156 154 19.284698486328125
157 155 19.26616859436035
158 156 19.247894287109375
159 157 19.229854583740234
160 158 19.2120418548584
161 159 19.194433212280273
162 160 19.17705726623535
163 161 19.15989112854004
164 162 19.1429443359375
165 163 19.12616539001465
166 164 19.10961151123047
167 165 19.093191146850586
168 166 19.07701301574707
169 167 19.060972213745117
170 168 19.045129776000977
171 169 19.029451370239258
172 170 19.013916015625
173 171 18.99856185913086
174 172 18.98333168029785
175 173 18.96823501586914
176 174 18.953290939331055
177 175 18.938474655151367
178 176 18.923778533935547
179 177 18.909225463867188
180 178 18.894758224487305
181 179 18.880416870117188
182 180 18.866165161132812
183 181 18.852020263671875
184 182 18.83796501159668
185 183 18.824012756347656
186 184 18.81011390686035
187 185 18.79629898071289
188 186 18.782527923583984
189 187 18.768795013427734
190 188 18.7551212310791
191 189 18.74149513244629
192 190 18.72787857055664
193 191 18.71425437927246
194 192 18.70063018798828
195 193 18.68698501586914
196 194 18.673267364501953
197 195 18.659528732299805
198 196 18.645681381225586
199 197 18.631698608398438
200 198 18.61760139465332
201 199 18.603317260742188
202 200 18.58875274658203

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.01
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,5059.482421875
2,3010.3740234375
3,1816.048583984375
4,1021.2877197265625
5,545.4293823242188
6,291.2381591796875
7,230.19659423828125
8,125.45794677734375
9,134.42971801757812
10,117.55680847167969
11,102.60958099365234
12,76.38494873046875
13,68.34687805175781
14,62.285850524902344
15,52.418697357177734
16,46.43372344970703
17,44.03841018676758
18,43.49372100830078
19,42.42775344848633
20,40.21412658691406
21,40.499664306640625
22,34.71615219116211
23,32.54283905029297
24,27.556747436523438
25,24.67312240600586
26,23.636560440063477
27,22.539194107055664
28,20.3195858001709
29,18.445499420166016
30,15.775135040283203
31,14.405683517456055
32,13.408025741577148
33,12.5646390914917
34,11.871582984924316
35,11.312950134277344
36,10.84416389465332
37,10.433754920959473
38,9.928411483764648
39,9.642745018005371
40,9.40228271484375
41,9.211402893066406
42,9.064362525939941
43,8.936352729797363
44,8.75970458984375
45,8.682718276977539
46,8.610445022583008
47,8.548540115356445
48,8.481241226196289
49,8.38536262512207
50,8.219938278198242
51,7.891648292541504
52,7.5890936851501465
53,7.376401901245117
54,6.950267791748047
55,6.586104393005371
56,6.322219371795654
57,6.213955402374268
58,6.054443359375
59,5.882267951965332
60,5.800445556640625
61,5.714313983917236
62,5.6333489418029785
63,5.557476043701172
64,5.517022132873535
65,5.424365997314453
66,5.364746570587158
67,5.305666446685791
68,5.246865272521973
69,5.18797492980957
70,5.129032611846924
71,5.070919513702393
72,5.01667594909668
73,4.888418197631836
74,4.858117580413818
75,4.824503421783447
76,4.788944721221924
77,4.752417087554932
78,4.715500831604004
79,4.678471088409424
80,4.6414289474487305
81,4.604058742523193
82,4.569658279418945
83,4.531286239624023
84,4.4958882331848145
85,4.4591498374938965
86,4.421026706695557
87,4.38107442855835
88,4.34315299987793
89,4.300421714782715
90,4.258784294128418
91,4.216139316558838
92,4.173788547515869
93,4.132620811462402
94,4.092691898345947
95,4.0533270835876465
96,4.0136308670043945
97,3.972959518432617
98,3.859011650085449
99,3.9553351402282715
100,3.9903764724731445
101,4.018263339996338
102,3.9151854515075684
103,3.882251739501953
104,3.7630834579467773
105,3.7000489234924316
106,3.6120121479034424
107,3.475703477859497
108,3.497345447540283
109,3.395029067993164
110,3.279219150543213
111,3.223318099975586
112,3.2120800018310547
113,3.209965467453003
114,3.107729434967041
115,3.0968451499938965
116,3.078746795654297
117,3.0428404808044434
118,2.9915549755096436
119,2.9698710441589355
120,2.9466629028320312
121,2.918043613433838
122,2.883138656616211
123,2.8782236576080322
124,2.873089551925659
125,2.86930513381958
126,2.86629581451416
127,2.863724708557129
128,2.861400842666626
129,2.8591976165771484
130,2.8570375442504883
131,2.854865312576294
132,2.8526415824890137
133,2.850334882736206
134,2.847916841506958
135,2.845351457595825
136,2.8425850868225098
137,2.839533805847168
138,2.8360393047332764
139,2.8318018913269043
140,2.826171636581421
141,2.817456007003784
142,2.800262689590454
143,2.759161949157715
144,2.7409002780914307
145,2.733471393585205
146,2.7278575897216797
147,2.7232511043548584
148,2.719332456588745
149,2.715935468673706
150,2.7129509449005127
151,2.7102749347686768
152,2.7075414657592773
153,2.7018003463745117
154,2.666679620742798
155,2.6628167629241943
156,2.662264823913574
157,2.6619603633880615
158,2.6614482402801514
159,2.6607513427734375
160,2.659938335418701
161,2.65906023979187
162,2.6581509113311768
163,2.657233476638794
164,2.6563141345977783
165,2.655404567718506
166,2.6545050144195557
167,2.653618812561035
168,2.6527457237243652
169,2.651887893676758
170,2.651043176651001
171,2.650209903717041
172,2.6493892669677734
173,2.6485791206359863
174,2.6477811336517334
175,2.6469900608062744
176,2.646209955215454
177,2.6454365253448486
178,2.6446681022644043
179,2.6439082622528076
180,2.643153667449951
181,2.6424012184143066
182,2.6416542530059814
183,2.6409099102020264
184,2.6401679515838623
185,2.639427661895752
186,2.6386876106262207
187,2.637948751449585
188,2.6372103691101074
189,2.636469841003418
190,2.635728597640991
191,2.634984016418457
192,2.634237289428711
193,2.633486032485962
194,2.632730722427368
195,2.631969451904297
196,2.631201982498169
197,2.6304264068603516
198,2.6296417713165283
199,2.6288468837738037
200,2.628040313720703
1 Epoch Loss
2 0 9022.2587890625
3 1 5059.482421875
4 2 3010.3740234375
5 3 1816.048583984375
6 4 1021.2877197265625
7 5 545.4293823242188
8 6 291.2381591796875
9 7 230.19659423828125
10 8 125.45794677734375
11 9 134.42971801757812
12 10 117.55680847167969
13 11 102.60958099365234
14 12 76.38494873046875
15 13 68.34687805175781
16 14 62.285850524902344
17 15 52.418697357177734
18 16 46.43372344970703
19 17 44.03841018676758
20 18 43.49372100830078
21 19 42.42775344848633
22 20 40.21412658691406
23 21 40.499664306640625
24 22 34.71615219116211
25 23 32.54283905029297
26 24 27.556747436523438
27 25 24.67312240600586
28 26 23.636560440063477
29 27 22.539194107055664
30 28 20.3195858001709
31 29 18.445499420166016
32 30 15.775135040283203
33 31 14.405683517456055
34 32 13.408025741577148
35 33 12.5646390914917
36 34 11.871582984924316
37 35 11.312950134277344
38 36 10.84416389465332
39 37 10.433754920959473
40 38 9.928411483764648
41 39 9.642745018005371
42 40 9.40228271484375
43 41 9.211402893066406
44 42 9.064362525939941
45 43 8.936352729797363
46 44 8.75970458984375
47 45 8.682718276977539
48 46 8.610445022583008
49 47 8.548540115356445
50 48 8.481241226196289
51 49 8.38536262512207
52 50 8.219938278198242
53 51 7.891648292541504
54 52 7.5890936851501465
55 53 7.376401901245117
56 54 6.950267791748047
57 55 6.586104393005371
58 56 6.322219371795654
59 57 6.213955402374268
60 58 6.054443359375
61 59 5.882267951965332
62 60 5.800445556640625
63 61 5.714313983917236
64 62 5.6333489418029785
65 63 5.557476043701172
66 64 5.517022132873535
67 65 5.424365997314453
68 66 5.364746570587158
69 67 5.305666446685791
70 68 5.246865272521973
71 69 5.18797492980957
72 70 5.129032611846924
73 71 5.070919513702393
74 72 5.01667594909668
75 73 4.888418197631836
76 74 4.858117580413818
77 75 4.824503421783447
78 76 4.788944721221924
79 77 4.752417087554932
80 78 4.715500831604004
81 79 4.678471088409424
82 80 4.6414289474487305
83 81 4.604058742523193
84 82 4.569658279418945
85 83 4.531286239624023
86 84 4.4958882331848145
87 85 4.4591498374938965
88 86 4.421026706695557
89 87 4.38107442855835
90 88 4.34315299987793
91 89 4.300421714782715
92 90 4.258784294128418
93 91 4.216139316558838
94 92 4.173788547515869
95 93 4.132620811462402
96 94 4.092691898345947
97 95 4.0533270835876465
98 96 4.0136308670043945
99 97 3.972959518432617
100 98 3.859011650085449
101 99 3.9553351402282715
102 100 3.9903764724731445
103 101 4.018263339996338
104 102 3.9151854515075684
105 103 3.882251739501953
106 104 3.7630834579467773
107 105 3.7000489234924316
108 106 3.6120121479034424
109 107 3.475703477859497
110 108 3.497345447540283
111 109 3.395029067993164
112 110 3.279219150543213
113 111 3.223318099975586
114 112 3.2120800018310547
115 113 3.209965467453003
116 114 3.107729434967041
117 115 3.0968451499938965
118 116 3.078746795654297
119 117 3.0428404808044434
120 118 2.9915549755096436
121 119 2.9698710441589355
122 120 2.9466629028320312
123 121 2.918043613433838
124 122 2.883138656616211
125 123 2.8782236576080322
126 124 2.873089551925659
127 125 2.86930513381958
128 126 2.86629581451416
129 127 2.863724708557129
130 128 2.861400842666626
131 129 2.8591976165771484
132 130 2.8570375442504883
133 131 2.854865312576294
134 132 2.8526415824890137
135 133 2.850334882736206
136 134 2.847916841506958
137 135 2.845351457595825
138 136 2.8425850868225098
139 137 2.839533805847168
140 138 2.8360393047332764
141 139 2.8318018913269043
142 140 2.826171636581421
143 141 2.817456007003784
144 142 2.800262689590454
145 143 2.759161949157715
146 144 2.7409002780914307
147 145 2.733471393585205
148 146 2.7278575897216797
149 147 2.7232511043548584
150 148 2.719332456588745
151 149 2.715935468673706
152 150 2.7129509449005127
153 151 2.7102749347686768
154 152 2.7075414657592773
155 153 2.7018003463745117
156 154 2.666679620742798
157 155 2.6628167629241943
158 156 2.662264823913574
159 157 2.6619603633880615
160 158 2.6614482402801514
161 159 2.6607513427734375
162 160 2.659938335418701
163 161 2.65906023979187
164 162 2.6581509113311768
165 163 2.657233476638794
166 164 2.6563141345977783
167 165 2.655404567718506
168 166 2.6545050144195557
169 167 2.653618812561035
170 168 2.6527457237243652
171 169 2.651887893676758
172 170 2.651043176651001
173 171 2.650209903717041
174 172 2.6493892669677734
175 173 2.6485791206359863
176 174 2.6477811336517334
177 175 2.6469900608062744
178 176 2.646209955215454
179 177 2.6454365253448486
180 178 2.6446681022644043
181 179 2.6439082622528076
182 180 2.643153667449951
183 181 2.6424012184143066
184 182 2.6416542530059814
185 183 2.6409099102020264
186 184 2.6401679515838623
187 185 2.639427661895752
188 186 2.6386876106262207
189 187 2.637948751449585
190 188 2.6372103691101074
191 189 2.636469841003418
192 190 2.635728597640991
193 191 2.634984016418457
194 192 2.634237289428711
195 193 2.633486032485962
196 194 2.632730722427368
197 195 2.631969451904297
198 196 2.631201982498169
199 197 2.6304264068603516
200 198 2.6296417713165283
201 199 2.6288468837738037
202 200 2.628040313720703

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.02
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,3088.385009765625
2,1144.3016357421875
3,318.79901123046875
4,140.68502807617188
5,152.70545959472656
6,75.14848327636719
7,73.43697357177734
8,66.32743072509766
9,50.1694450378418
10,33.105369567871094
11,25.02813148498535
12,22.472965240478516
13,18.639480590820312
14,15.920618057250977
15,13.850238800048828
16,13.115677833557129
17,12.236470222473145
18,11.852234840393066
19,11.72622299194336
20,11.202983856201172
21,11.095417976379395
22,11.030611038208008
23,10.815608024597168
24,10.718470573425293
25,10.56022834777832
26,10.564657211303711
27,10.425938606262207
28,10.180031776428223
29,10.043537139892578
30,9.891477584838867
31,9.733704566955566
32,9.577387809753418
33,9.462329864501953
34,9.409721374511719
35,9.363308906555176
36,9.301348686218262
37,9.235352516174316
38,9.170696258544922
39,9.110300064086914
40,9.056041717529297
41,9.00932788848877
42,8.970462799072266
43,8.939508438110352
44,8.91582202911377
45,8.898208618164062
46,8.885146141052246
47,8.87503433227539
48,8.866385459899902
49,8.85802173614502
50,8.849024772644043
51,8.838545799255371
52,8.832979202270508
53,8.856711387634277
54,8.841376304626465
55,8.813333511352539
56,8.778741836547852
57,8.743232727050781
58,8.709452629089355
59,8.677081108093262
60,8.644346237182617
61,8.621743202209473
62,8.610654830932617
63,8.603785514831543
64,8.598999977111816
65,8.594789505004883
66,8.590073585510254
67,8.584161758422852
68,8.576689720153809
69,8.56757640838623
70,8.556938171386719
71,8.545018196105957
72,8.532170295715332
73,8.518823623657227
74,8.505425453186035
75,8.492317199707031
76,8.479771614074707
77,8.467961311340332
78,8.456985473632812
79,8.446765899658203
80,8.437174797058105
81,8.428035736083984
82,8.419135093688965
83,8.410305976867676
84,8.401407241821289
85,8.392420768737793
86,8.383390426635742
87,8.374467849731445
88,8.365741729736328
89,8.357316970825195
90,8.349235534667969
91,8.341489791870117
92,8.334039688110352
93,8.326807022094727
94,8.319735527038574
95,8.312769889831543
96,8.30587100982666
97,8.299010276794434
98,8.292182922363281
99,8.285400390625
100,8.278678894042969
101,8.27203369140625
102,8.265478134155273
103,8.259018898010254
104,8.25264835357666
105,8.24635124206543
106,8.240104675292969
107,8.233864784240723
108,8.227606773376465
109,8.221291542053223
110,8.214883804321289
111,8.208340644836426
112,8.201622009277344
113,8.194698333740234
114,8.187500953674316
115,8.17996597290039
116,8.1719970703125
117,8.163481712341309
118,8.1542329788208
119,8.144001007080078
120,8.13238525390625
121,8.118897438049316
122,8.103504180908203
123,8.099246978759766
124,8.115421295166016
125,8.134553909301758
126,8.147979736328125
127,8.157594680786133
128,8.163418769836426
129,8.165081024169922
130,8.162735939025879
131,8.157225608825684
132,8.149883270263672
133,8.142067909240723
134,8.134814262390137
135,8.128766059875488
136,8.124173164367676
137,8.120987892150879
138,8.118855476379395
139,8.117076873779297
140,8.114785194396973
141,8.111310005187988
142,8.106478691101074
143,8.100659370422363
144,8.094478607177734
145,8.088531494140625
146,8.083104133605957
147,8.078246116638184
148,8.073824882507324
149,8.06963062286377
150,8.065448760986328
151,8.06110668182373
152,8.05648136138916
153,8.051547050476074
154,8.046350479125977
155,8.040987968444824
156,8.035585403442383
157,8.030244827270508
158,8.025039672851562
159,8.019987106323242
160,8.015042304992676
161,8.01014518737793
162,8.005194664001465
163,8.000125885009766
164,7.994886875152588
165,7.989474296569824
166,7.9839277267456055
167,7.978292465209961
168,7.9726057052612305
169,7.966889381408691
170,7.961117267608643
171,7.955262660980225
172,7.9492669105529785
173,7.943070411682129
174,7.936622142791748
175,7.929859161376953
176,7.922729969024658
177,7.915169715881348
178,7.907082557678223
179,7.898306846618652
180,7.8885908126831055
181,7.877531051635742
182,7.864451885223389
183,7.848282814025879
184,7.827654838562012
185,7.820808410644531
186,7.830437660217285
187,7.848799705505371
188,7.861062526702881
189,7.866879463195801
190,7.861657619476318
191,7.848287105560303
192,7.83420467376709
193,7.8231635093688965
194,7.8167314529418945
195,7.815674781799316
196,7.817105293273926
197,7.812990188598633
198,7.801965236663818
199,7.791414737701416
200,7.784646511077881
1 Epoch Loss
2 0 9022.2587890625
3 1 3088.385009765625
4 2 1144.3016357421875
5 3 318.79901123046875
6 4 140.68502807617188
7 5 152.70545959472656
8 6 75.14848327636719
9 7 73.43697357177734
10 8 66.32743072509766
11 9 50.1694450378418
12 10 33.105369567871094
13 11 25.02813148498535
14 12 22.472965240478516
15 13 18.639480590820312
16 14 15.920618057250977
17 15 13.850238800048828
18 16 13.115677833557129
19 17 12.236470222473145
20 18 11.852234840393066
21 19 11.72622299194336
22 20 11.202983856201172
23 21 11.095417976379395
24 22 11.030611038208008
25 23 10.815608024597168
26 24 10.718470573425293
27 25 10.56022834777832
28 26 10.564657211303711
29 27 10.425938606262207
30 28 10.180031776428223
31 29 10.043537139892578
32 30 9.891477584838867
33 31 9.733704566955566
34 32 9.577387809753418
35 33 9.462329864501953
36 34 9.409721374511719
37 35 9.363308906555176
38 36 9.301348686218262
39 37 9.235352516174316
40 38 9.170696258544922
41 39 9.110300064086914
42 40 9.056041717529297
43 41 9.00932788848877
44 42 8.970462799072266
45 43 8.939508438110352
46 44 8.91582202911377
47 45 8.898208618164062
48 46 8.885146141052246
49 47 8.87503433227539
50 48 8.866385459899902
51 49 8.85802173614502
52 50 8.849024772644043
53 51 8.838545799255371
54 52 8.832979202270508
55 53 8.856711387634277
56 54 8.841376304626465
57 55 8.813333511352539
58 56 8.778741836547852
59 57 8.743232727050781
60 58 8.709452629089355
61 59 8.677081108093262
62 60 8.644346237182617
63 61 8.621743202209473
64 62 8.610654830932617
65 63 8.603785514831543
66 64 8.598999977111816
67 65 8.594789505004883
68 66 8.590073585510254
69 67 8.584161758422852
70 68 8.576689720153809
71 69 8.56757640838623
72 70 8.556938171386719
73 71 8.545018196105957
74 72 8.532170295715332
75 73 8.518823623657227
76 74 8.505425453186035
77 75 8.492317199707031
78 76 8.479771614074707
79 77 8.467961311340332
80 78 8.456985473632812
81 79 8.446765899658203
82 80 8.437174797058105
83 81 8.428035736083984
84 82 8.419135093688965
85 83 8.410305976867676
86 84 8.401407241821289
87 85 8.392420768737793
88 86 8.383390426635742
89 87 8.374467849731445
90 88 8.365741729736328
91 89 8.357316970825195
92 90 8.349235534667969
93 91 8.341489791870117
94 92 8.334039688110352
95 93 8.326807022094727
96 94 8.319735527038574
97 95 8.312769889831543
98 96 8.30587100982666
99 97 8.299010276794434
100 98 8.292182922363281
101 99 8.285400390625
102 100 8.278678894042969
103 101 8.27203369140625
104 102 8.265478134155273
105 103 8.259018898010254
106 104 8.25264835357666
107 105 8.24635124206543
108 106 8.240104675292969
109 107 8.233864784240723
110 108 8.227606773376465
111 109 8.221291542053223
112 110 8.214883804321289
113 111 8.208340644836426
114 112 8.201622009277344
115 113 8.194698333740234
116 114 8.187500953674316
117 115 8.17996597290039
118 116 8.1719970703125
119 117 8.163481712341309
120 118 8.1542329788208
121 119 8.144001007080078
122 120 8.13238525390625
123 121 8.118897438049316
124 122 8.103504180908203
125 123 8.099246978759766
126 124 8.115421295166016
127 125 8.134553909301758
128 126 8.147979736328125
129 127 8.157594680786133
130 128 8.163418769836426
131 129 8.165081024169922
132 130 8.162735939025879
133 131 8.157225608825684
134 132 8.149883270263672
135 133 8.142067909240723
136 134 8.134814262390137
137 135 8.128766059875488
138 136 8.124173164367676
139 137 8.120987892150879
140 138 8.118855476379395
141 139 8.117076873779297
142 140 8.114785194396973
143 141 8.111310005187988
144 142 8.106478691101074
145 143 8.100659370422363
146 144 8.094478607177734
147 145 8.088531494140625
148 146 8.083104133605957
149 147 8.078246116638184
150 148 8.073824882507324
151 149 8.06963062286377
152 150 8.065448760986328
153 151 8.06110668182373
154 152 8.05648136138916
155 153 8.051547050476074
156 154 8.046350479125977
157 155 8.040987968444824
158 156 8.035585403442383
159 157 8.030244827270508
160 158 8.025039672851562
161 159 8.019987106323242
162 160 8.015042304992676
163 161 8.01014518737793
164 162 8.005194664001465
165 163 8.000125885009766
166 164 7.994886875152588
167 165 7.989474296569824
168 166 7.9839277267456055
169 167 7.978292465209961
170 168 7.9726057052612305
171 169 7.966889381408691
172 170 7.961117267608643
173 171 7.955262660980225
174 172 7.9492669105529785
175 173 7.943070411682129
176 174 7.936622142791748
177 175 7.929859161376953
178 176 7.922729969024658
179 177 7.915169715881348
180 178 7.907082557678223
181 179 7.898306846618652
182 180 7.8885908126831055
183 181 7.877531051635742
184 182 7.864451885223389
185 183 7.848282814025879
186 184 7.827654838562012
187 185 7.820808410644531
188 186 7.830437660217285
189 187 7.848799705505371
190 188 7.861062526702881
191 189 7.866879463195801
192 190 7.861657619476318
193 191 7.848287105560303
194 192 7.83420467376709
195 193 7.8231635093688965
196 194 7.8167314529418945
197 195 7.815674781799316
198 196 7.817105293273926
199 197 7.812990188598633
200 198 7.801965236663818
201 199 7.791414737701416
202 200 7.784646511077881

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.04
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,1289.341552734375
2,169.74205017089844
3,80.52271270751953
4,43.68131637573242
5,20.937349319458008
6,9.95045280456543
7,5.791399002075195
8,3.4011170864105225
9,2.2140963077545166
10,1.3589996099472046
11,0.8892810940742493
12,0.7924511432647705
13,0.6559172868728638
14,0.5640803575515747
15,0.5065086483955383
16,0.46587052941322327
17,0.43497705459594727
18,0.40961483120918274
19,0.390272319316864
20,0.375575989484787
21,0.36431777477264404
22,0.3530777394771576
23,0.34686341881752014
24,0.3405311703681946
25,0.32142800092697144
26,0.30399277806282043
27,0.2940421402454376
28,0.2890926003456116
29,0.286124587059021
30,0.28330594301223755
31,0.2808089852333069
32,0.27851444482803345
33,0.2765269875526428
34,0.27496540546417236
35,0.2740345299243927
36,0.27290958166122437
37,0.27196675539016724
38,0.2715482711791992
39,0.2710689604282379
40,0.27050724625587463
41,0.2699800729751587
42,0.2695007622241974
43,0.2690669298171997
44,0.2686779201030731
45,0.26833468675613403
46,0.26803550124168396
47,0.26777052879333496
48,0.2675304710865021
49,0.2673094570636749
50,0.26710379123687744
51,0.26691052317619324
52,0.26672792434692383
53,0.2665548324584961
54,0.2663900554180145
55,0.26623332500457764
56,0.26608356833457947
57,0.26594021916389465
58,0.26580268144607544
59,0.26567041873931885
60,0.2655431628227234
61,0.2654201090335846
62,0.26530104875564575
63,0.26518580317497253
64,0.2650740444660187
65,0.264965295791626
66,0.2648593783378601
67,0.26475611329078674
68,0.26465508341789246
69,0.2645563781261444
70,0.26445940136909485
71,0.2643643617630005
72,0.2642710506916046
73,0.2641793191432953
74,0.2640889883041382
75,0.2639997899532318
76,0.26391205191612244
77,0.2638251781463623
78,0.2637394070625305
79,0.2636547088623047
80,0.2635708153247833
81,0.26348769664764404
82,0.26340535283088684
83,0.26332375407218933
84,0.26324278116226196
85,0.2631625533103943
86,0.2630827724933624
87,0.26300349831581116
88,0.2629247307777405
89,0.2628464102745056
90,0.2627684772014618
91,0.26269087195396423
92,0.2626136839389801
93,0.2625367343425751
94,0.2624601423740387
95,0.2623837888240814
96,0.26230767369270325
97,0.2622319161891937
98,0.2621561586856842
99,0.26208069920539856
100,0.26200541853904724
101,0.26193028688430786
102,0.26185524463653564
103,0.2617804706096649
104,0.26170584559440613
105,0.26163122057914734
106,0.2615566849708557
107,0.2614821791648865
108,0.2614077627658844
109,0.2613334655761719
110,0.26125913858413696
111,0.26118481159210205
112,0.26111048460006714
113,0.26103606820106506
114,0.260961651802063
115,0.26088714599609375
116,0.26081234216690063
117,0.26073750853538513
118,0.26066240668296814
119,0.26058709621429443
120,0.260511577129364
121,0.2604355812072754
122,0.2603593170642853
123,0.2602824866771698
124,0.26020514965057373
125,0.26012709736824036
126,0.2600480616092682
127,0.25996801257133484
128,0.2598864734172821
129,0.2598028779029846
130,0.2597167193889618
131,0.25962668657302856
132,0.25953125953674316
133,0.25942733883857727
134,0.2593097686767578
135,0.25917068123817444
136,0.2590011656284332
137,0.2588154375553131
138,0.2586815357208252
139,0.2586136758327484
140,0.2585947513580322
141,0.2585603594779968
142,0.2585074305534363
143,0.2584458887577057
144,0.25838011503219604
145,0.25831231474876404
146,0.2582433223724365
147,0.25817352533340454
148,0.25810328125953674
149,0.25803276896476746
150,0.2579619884490967
151,0.25789105892181396
152,0.257820188999176
153,0.257749080657959
154,0.2576778829097748
155,0.2576064467430115
156,0.25753504037857056
157,0.2574634552001953
158,0.25739187002182007
159,0.25732001662254333
160,0.2572482228279114
161,0.2571762204170227
162,0.2571040689945221
163,0.2570318579673767
164,0.25695955753326416
165,0.2568871080875397
166,0.25681447982788086
167,0.2567417621612549
168,0.25666895508766174
169,0.2565959692001343
170,0.25652286410331726
171,0.2564496099948883
172,0.2563762366771698
173,0.25630292296409607
174,0.2562292814254761
175,0.2561556398868561
176,0.2560817301273346
177,0.2560078501701355
178,0.2559337913990021
179,0.25585949420928955
180,0.2557850778102875
181,0.25571057200431824
182,0.2556358873844147
183,0.2555610239505768
184,0.25548604130744934
185,0.2554108798503876
186,0.25533556938171387
187,0.255260169506073
188,0.255184531211853
189,0.2551087439060211
190,0.25503283739089966
191,0.25495678186416626
192,0.25488048791885376
193,0.25480416417121887
194,0.25472769141197205
195,0.2546510696411133
196,0.2545742690563202
197,0.25449734926223755
198,0.2544202208518982
199,0.2543429732322693
200,0.25426557660102844
1 Epoch Loss
2 0 9022.2587890625
3 1 1289.341552734375
4 2 169.74205017089844
5 3 80.52271270751953
6 4 43.68131637573242
7 5 20.937349319458008
8 6 9.95045280456543
9 7 5.791399002075195
10 8 3.4011170864105225
11 9 2.2140963077545166
12 10 1.3589996099472046
13 11 0.8892810940742493
14 12 0.7924511432647705
15 13 0.6559172868728638
16 14 0.5640803575515747
17 15 0.5065086483955383
18 16 0.46587052941322327
19 17 0.43497705459594727
20 18 0.40961483120918274
21 19 0.390272319316864
22 20 0.375575989484787
23 21 0.36431777477264404
24 22 0.3530777394771576
25 23 0.34686341881752014
26 24 0.3405311703681946
27 25 0.32142800092697144
28 26 0.30399277806282043
29 27 0.2940421402454376
30 28 0.2890926003456116
31 29 0.286124587059021
32 30 0.28330594301223755
33 31 0.2808089852333069
34 32 0.27851444482803345
35 33 0.2765269875526428
36 34 0.27496540546417236
37 35 0.2740345299243927
38 36 0.27290958166122437
39 37 0.27196675539016724
40 38 0.2715482711791992
41 39 0.2710689604282379
42 40 0.27050724625587463
43 41 0.2699800729751587
44 42 0.2695007622241974
45 43 0.2690669298171997
46 44 0.2686779201030731
47 45 0.26833468675613403
48 46 0.26803550124168396
49 47 0.26777052879333496
50 48 0.2675304710865021
51 49 0.2673094570636749
52 50 0.26710379123687744
53 51 0.26691052317619324
54 52 0.26672792434692383
55 53 0.2665548324584961
56 54 0.2663900554180145
57 55 0.26623332500457764
58 56 0.26608356833457947
59 57 0.26594021916389465
60 58 0.26580268144607544
61 59 0.26567041873931885
62 60 0.2655431628227234
63 61 0.2654201090335846
64 62 0.26530104875564575
65 63 0.26518580317497253
66 64 0.2650740444660187
67 65 0.264965295791626
68 66 0.2648593783378601
69 67 0.26475611329078674
70 68 0.26465508341789246
71 69 0.2645563781261444
72 70 0.26445940136909485
73 71 0.2643643617630005
74 72 0.2642710506916046
75 73 0.2641793191432953
76 74 0.2640889883041382
77 75 0.2639997899532318
78 76 0.26391205191612244
79 77 0.2638251781463623
80 78 0.2637394070625305
81 79 0.2636547088623047
82 80 0.2635708153247833
83 81 0.26348769664764404
84 82 0.26340535283088684
85 83 0.26332375407218933
86 84 0.26324278116226196
87 85 0.2631625533103943
88 86 0.2630827724933624
89 87 0.26300349831581116
90 88 0.2629247307777405
91 89 0.2628464102745056
92 90 0.2627684772014618
93 91 0.26269087195396423
94 92 0.2626136839389801
95 93 0.2625367343425751
96 94 0.2624601423740387
97 95 0.2623837888240814
98 96 0.26230767369270325
99 97 0.2622319161891937
100 98 0.2621561586856842
101 99 0.26208069920539856
102 100 0.26200541853904724
103 101 0.26193028688430786
104 102 0.26185524463653564
105 103 0.2617804706096649
106 104 0.26170584559440613
107 105 0.26163122057914734
108 106 0.2615566849708557
109 107 0.2614821791648865
110 108 0.2614077627658844
111 109 0.2613334655761719
112 110 0.26125913858413696
113 111 0.26118481159210205
114 112 0.26111048460006714
115 113 0.26103606820106506
116 114 0.260961651802063
117 115 0.26088714599609375
118 116 0.26081234216690063
119 117 0.26073750853538513
120 118 0.26066240668296814
121 119 0.26058709621429443
122 120 0.260511577129364
123 121 0.2604355812072754
124 122 0.2603593170642853
125 123 0.2602824866771698
126 124 0.26020514965057373
127 125 0.26012709736824036
128 126 0.2600480616092682
129 127 0.25996801257133484
130 128 0.2598864734172821
131 129 0.2598028779029846
132 130 0.2597167193889618
133 131 0.25962668657302856
134 132 0.25953125953674316
135 133 0.25942733883857727
136 134 0.2593097686767578
137 135 0.25917068123817444
138 136 0.2590011656284332
139 137 0.2588154375553131
140 138 0.2586815357208252
141 139 0.2586136758327484
142 140 0.2585947513580322
143 141 0.2585603594779968
144 142 0.2585074305534363
145 143 0.2584458887577057
146 144 0.25838011503219604
147 145 0.25831231474876404
148 146 0.2582433223724365
149 147 0.25817352533340454
150 148 0.25810328125953674
151 149 0.25803276896476746
152 150 0.2579619884490967
153 151 0.25789105892181396
154 152 0.257820188999176
155 153 0.257749080657959
156 154 0.2576778829097748
157 155 0.2576064467430115
158 156 0.25753504037857056
159 157 0.2574634552001953
160 158 0.25739187002182007
161 159 0.25732001662254333
162 160 0.2572482228279114
163 161 0.2571762204170227
164 162 0.2571040689945221
165 163 0.2570318579673767
166 164 0.25695955753326416
167 165 0.2568871080875397
168 166 0.25681447982788086
169 167 0.2567417621612549
170 168 0.25666895508766174
171 169 0.2565959692001343
172 170 0.25652286410331726
173 171 0.2564496099948883
174 172 0.2563762366771698
175 173 0.25630292296409607
176 174 0.2562292814254761
177 175 0.2561556398868561
178 176 0.2560817301273346
179 177 0.2560078501701355
180 178 0.2559337913990021
181 179 0.25585949420928955
182 180 0.2557850778102875
183 181 0.25571057200431824
184 182 0.2556358873844147
185 183 0.2555610239505768
186 184 0.25548604130744934
187 185 0.2554108798503876
188 186 0.25533556938171387
189 187 0.255260169506073
190 188 0.255184531211853
191 189 0.2551087439060211
192 190 0.25503283739089966
193 191 0.25495678186416626
194 192 0.25488048791885376
195 193 0.25480416417121887
196 194 0.25472769141197205
197 195 0.2546510696411133
198 196 0.2545742690563202
199 197 0.25449734926223755
200 198 0.2544202208518982
201 199 0.2543429732322693
202 200 0.25426557660102844

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.05
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,751.8114013671875
2,130.89430236816406
3,89.82174682617188
4,63.68391418457031
5,43.99032211303711
6,31.095996856689453
7,25.97844123840332
8,23.500003814697266
9,24.296039581298828
10,25.648801803588867
11,22.16640853881836
12,20.829294204711914
13,20.836965560913086
14,19.963972091674805
15,21.25447654724121
16,19.66006851196289
17,21.987844467163086
18,19.279869079589844
19,18.898744583129883
20,19.826873779296875
21,19.9918212890625
22,19.450101852416992
23,18.81133460998535
24,18.70695686340332
25,18.14211082458496
26,17.82823944091797
27,17.536941528320312
28,17.246450424194336
29,16.960630416870117
30,16.723360061645508
31,16.592172622680664
32,16.580820083618164
33,16.574913024902344
34,16.361085891723633
35,16.14403533935547
36,16.023208618164062
37,15.987268447875977
38,16.022022247314453
39,16.006994247436523
40,15.889321327209473
41,16.001598358154297
42,15.413629531860352
43,15.293877601623535
44,14.868927955627441
45,14.334311485290527
46,15.300267219543457
47,13.76059627532959
48,13.92603588104248
49,12.991423606872559
50,14.897271156311035
51,14.650100708007812
52,13.899643898010254
53,13.451325416564941
54,12.87199592590332
55,12.015827178955078
56,11.035057067871094
57,10.162378311157227
58,9.626203536987305
59,9.77097225189209
60,8.695050239562988
61,9.787149429321289
62,10.72116756439209
63,8.866284370422363
64,4.62868595123291
65,4.122593402862549
66,3.6860625743865967
67,3.4153401851654053
68,3.203277349472046
69,2.9480788707733154
70,2.772449493408203
71,2.5858964920043945
72,2.3219501972198486
73,1.173472285270691
74,0.9639328122138977
75,0.9020412564277649
76,0.8489874005317688
77,0.7789888381958008
78,0.7139028310775757
79,0.6817778944969177
80,0.6499652862548828
81,0.617756724357605
82,0.5936900973320007
83,0.5775876045227051
84,0.5651906728744507
85,0.5547771453857422
86,0.5457246899604797
87,0.5377205014228821
88,0.5305731892585754
89,0.5241448283195496
90,0.5183328986167908
91,0.5130601525306702
92,0.5082560181617737
93,0.5038605332374573
94,0.49981173872947693
95,0.49594882130622864
96,0.4921724498271942
97,0.48903343081474304
98,0.4861673414707184
99,0.4835188686847687
100,0.481064110994339
101,0.4787884056568146
102,0.47666478157043457
103,0.4746760427951813
104,0.47280797362327576
105,0.47104790806770325
106,0.469384104013443
107,0.46780672669410706
108,0.4663074314594269
109,0.4648783802986145
110,0.4635125696659088
111,0.4622034728527069
112,0.46094557642936707
113,0.4597340226173401
114,0.4585638642311096
115,0.45743075013160706
116,0.45633068680763245
117,0.455260306596756
118,0.4542166590690613
119,0.453197181224823
120,0.45219916105270386
121,0.4512207508087158
122,0.4502596855163574
123,0.4493138790130615
124,0.4483817517757416
125,0.44746142625808716
126,0.4465518593788147
127,0.4456513226032257
128,0.44475871324539185
129,0.44387298822402954
130,0.4429936408996582
131,0.442119836807251
132,0.4412505328655243
133,0.4403845965862274
134,0.4395216703414917
135,0.4386606514453888
136,0.4378011226654053
137,0.4369426965713501
138,0.43608441948890686
139,0.43522512912750244
140,0.434364914894104
141,0.43350234627723694
142,0.43263688683509827
143,0.4317673146724701
144,0.43089258670806885
145,0.430011510848999
146,0.4291227459907532
147,0.4282253086566925
148,0.4273178279399872
149,0.4263983964920044
150,0.4254654347896576
151,0.4245169460773468
152,0.4235500693321228
153,0.42256081104278564
154,0.421546071767807
155,0.4205014109611511
156,0.4194214344024658
157,0.41830214858055115
158,0.4171343743801117
159,0.41590481996536255
160,0.4145958721637726
161,0.41318830847740173
162,0.41165217757225037
163,0.4099459648132324
164,0.408006489276886
165,0.4057483673095703
166,0.40303128957748413
167,0.39963987469673157
168,0.3951798975467682
169,0.3890199661254883
170,0.3805030584335327
171,0.3696395456790924
172,0.35881492495536804
173,0.3038080036640167
174,0.27980467677116394
175,0.2760487496852875
176,0.2734012305736542
177,0.27130740880966187
178,0.26955366134643555
179,0.26803523302078247
180,0.26669636368751526
181,0.26550835371017456
182,0.2644420862197876
183,0.263476699590683
184,0.262599378824234
185,0.2618001699447632
186,0.2610713541507721
187,0.260405957698822
188,0.259795218706131
189,0.25923264026641846
190,0.2587129771709442
191,0.25823110342025757
192,0.2577832341194153
193,0.25736480951309204
194,0.256972998380661
195,0.2566049098968506
196,0.25625842809677124
197,0.25593122839927673
198,0.2556215226650238
199,0.2553277611732483
200,0.2550479769706726
1 Epoch Loss
2 0 9022.2587890625
3 1 751.8114013671875
4 2 130.89430236816406
5 3 89.82174682617188
6 4 63.68391418457031
7 5 43.99032211303711
8 6 31.095996856689453
9 7 25.97844123840332
10 8 23.500003814697266
11 9 24.296039581298828
12 10 25.648801803588867
13 11 22.16640853881836
14 12 20.829294204711914
15 13 20.836965560913086
16 14 19.963972091674805
17 15 21.25447654724121
18 16 19.66006851196289
19 17 21.987844467163086
20 18 19.279869079589844
21 19 18.898744583129883
22 20 19.826873779296875
23 21 19.9918212890625
24 22 19.450101852416992
25 23 18.81133460998535
26 24 18.70695686340332
27 25 18.14211082458496
28 26 17.82823944091797
29 27 17.536941528320312
30 28 17.246450424194336
31 29 16.960630416870117
32 30 16.723360061645508
33 31 16.592172622680664
34 32 16.580820083618164
35 33 16.574913024902344
36 34 16.361085891723633
37 35 16.14403533935547
38 36 16.023208618164062
39 37 15.987268447875977
40 38 16.022022247314453
41 39 16.006994247436523
42 40 15.889321327209473
43 41 16.001598358154297
44 42 15.413629531860352
45 43 15.293877601623535
46 44 14.868927955627441
47 45 14.334311485290527
48 46 15.300267219543457
49 47 13.76059627532959
50 48 13.92603588104248
51 49 12.991423606872559
52 50 14.897271156311035
53 51 14.650100708007812
54 52 13.899643898010254
55 53 13.451325416564941
56 54 12.87199592590332
57 55 12.015827178955078
58 56 11.035057067871094
59 57 10.162378311157227
60 58 9.626203536987305
61 59 9.77097225189209
62 60 8.695050239562988
63 61 9.787149429321289
64 62 10.72116756439209
65 63 8.866284370422363
66 64 4.62868595123291
67 65 4.122593402862549
68 66 3.6860625743865967
69 67 3.4153401851654053
70 68 3.203277349472046
71 69 2.9480788707733154
72 70 2.772449493408203
73 71 2.5858964920043945
74 72 2.3219501972198486
75 73 1.173472285270691
76 74 0.9639328122138977
77 75 0.9020412564277649
78 76 0.8489874005317688
79 77 0.7789888381958008
80 78 0.7139028310775757
81 79 0.6817778944969177
82 80 0.6499652862548828
83 81 0.617756724357605
84 82 0.5936900973320007
85 83 0.5775876045227051
86 84 0.5651906728744507
87 85 0.5547771453857422
88 86 0.5457246899604797
89 87 0.5377205014228821
90 88 0.5305731892585754
91 89 0.5241448283195496
92 90 0.5183328986167908
93 91 0.5130601525306702
94 92 0.5082560181617737
95 93 0.5038605332374573
96 94 0.49981173872947693
97 95 0.49594882130622864
98 96 0.4921724498271942
99 97 0.48903343081474304
100 98 0.4861673414707184
101 99 0.4835188686847687
102 100 0.481064110994339
103 101 0.4787884056568146
104 102 0.47666478157043457
105 103 0.4746760427951813
106 104 0.47280797362327576
107 105 0.47104790806770325
108 106 0.469384104013443
109 107 0.46780672669410706
110 108 0.4663074314594269
111 109 0.4648783802986145
112 110 0.4635125696659088
113 111 0.4622034728527069
114 112 0.46094557642936707
115 113 0.4597340226173401
116 114 0.4585638642311096
117 115 0.45743075013160706
118 116 0.45633068680763245
119 117 0.455260306596756
120 118 0.4542166590690613
121 119 0.453197181224823
122 120 0.45219916105270386
123 121 0.4512207508087158
124 122 0.4502596855163574
125 123 0.4493138790130615
126 124 0.4483817517757416
127 125 0.44746142625808716
128 126 0.4465518593788147
129 127 0.4456513226032257
130 128 0.44475871324539185
131 129 0.44387298822402954
132 130 0.4429936408996582
133 131 0.442119836807251
134 132 0.4412505328655243
135 133 0.4403845965862274
136 134 0.4395216703414917
137 135 0.4386606514453888
138 136 0.4378011226654053
139 137 0.4369426965713501
140 138 0.43608441948890686
141 139 0.43522512912750244
142 140 0.434364914894104
143 141 0.43350234627723694
144 142 0.43263688683509827
145 143 0.4317673146724701
146 144 0.43089258670806885
147 145 0.430011510848999
148 146 0.4291227459907532
149 147 0.4282253086566925
150 148 0.4273178279399872
151 149 0.4263983964920044
152 150 0.4254654347896576
153 151 0.4245169460773468
154 152 0.4235500693321228
155 153 0.42256081104278564
156 154 0.421546071767807
157 155 0.4205014109611511
158 156 0.4194214344024658
159 157 0.41830214858055115
160 158 0.4171343743801117
161 159 0.41590481996536255
162 160 0.4145958721637726
163 161 0.41318830847740173
164 162 0.41165217757225037
165 163 0.4099459648132324
166 164 0.408006489276886
167 165 0.4057483673095703
168 166 0.40303128957748413
169 167 0.39963987469673157
170 168 0.3951798975467682
171 169 0.3890199661254883
172 170 0.3805030584335327
173 171 0.3696395456790924
174 172 0.35881492495536804
175 173 0.3038080036640167
176 174 0.27980467677116394
177 175 0.2760487496852875
178 176 0.2734012305736542
179 177 0.27130740880966187
180 178 0.26955366134643555
181 179 0.26803523302078247
182 180 0.26669636368751526
183 181 0.26550835371017456
184 182 0.2644420862197876
185 183 0.263476699590683
186 184 0.262599378824234
187 185 0.2618001699447632
188 186 0.2610713541507721
189 187 0.260405957698822
190 188 0.259795218706131
191 189 0.25923264026641846
192 190 0.2587129771709442
193 191 0.25823110342025757
194 192 0.2577832341194153
195 193 0.25736480951309204
196 194 0.256972998380661
197 195 0.2566049098968506
198 196 0.25625842809677124
199 197 0.25593122839927673
200 198 0.2556215226650238
201 199 0.2553277611732483
202 200 0.2550479769706726

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.08
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,213.75233459472656
2,144.91659545898438
3,84.52567291259766
4,14.287747383117676
5,11.56564998626709
6,5.72916841506958
7,6.569018363952637
8,6.6359543800354
9,9.952731132507324
10,11.137259483337402
11,11.742608070373535
12,7.280147552490234
13,6.2738566398620605
14,6.074666976928711
15,5.9280548095703125
16,5.809908390045166
17,5.698787689208984
18,5.58311653137207
19,5.45709753036499
20,5.315023899078369
21,5.156440258026123
22,4.9358086585998535
23,4.7533698081970215
24,4.514986991882324
25,4.157462120056152
26,3.3027751445770264
27,3.014774799346924
28,2.828134536743164
29,2.6535589694976807
30,2.4275925159454346
31,1.224988579750061
32,1.0326144695281982
33,0.9040429592132568
34,0.837325930595398
35,0.7802968621253967
36,0.7260206937789917
37,0.5953264832496643
38,0.5632900595664978
39,0.5375853776931763
40,0.5147020816802979
41,0.49307960271835327
42,0.47000372409820557
43,0.4359363317489624
44,0.3894558846950531
45,0.37590739130973816
46,0.36737021803855896
47,0.36043238639831543
48,0.3544873595237732
49,0.3492850363254547
50,0.3446817994117737
51,0.3405757546424866
52,0.33688613772392273
53,0.3335525691509247
54,0.3305256962776184
55,0.3277673125267029
56,0.32524824142456055
57,0.32293397188186646
58,0.3208073377609253
59,0.3188490867614746
60,0.3170395493507385
61,0.3153652250766754
62,0.31380632519721985
63,0.3123517334461212
64,0.31098899245262146
65,0.30970969796180725
66,0.3085029721260071
67,0.3073629140853882
68,0.30627936124801636
69,0.3052476942539215
70,0.3042643070220947
71,0.3033238649368286
72,0.30242201685905457
73,0.3015551269054413
74,0.3007185757160187
75,0.2999109923839569
76,0.2991292178630829
77,0.29836970567703247
78,0.29763063788414
79,0.29691189527511597
80,0.2962109446525574
81,0.2955252230167389
82,0.29485437273979187
83,0.294197142124176
84,0.293551504611969
85,0.29291680455207825
86,0.29229286313056946
87,0.29167768359184265
88,0.29107043147087097
89,0.29047027230262756
90,0.2898769974708557
91,0.28929048776626587
92,0.2887097895145416
93,0.2881343364715576
94,0.28756362199783325
95,0.28699785470962524
96,0.28643614053726196
97,0.2858782708644867
98,0.2853240370750427
99,0.2847738265991211
100,0.2842266857624054
101,0.28368210792541504
102,0.2831399738788605
103,0.2826009690761566
104,0.28206512331962585
105,0.28153112530708313
106,0.28099873661994934
107,0.28046849370002747
108,0.2799402177333832
109,0.27941322326660156
110,0.278888076543808
111,0.2783638536930084
112,0.27784091234207153
113,0.2773183584213257
114,0.2767961919307709
115,0.27627477049827576
116,0.2757546305656433
117,0.27523505687713623
118,0.27471596002578735
119,0.2741974890232086
120,0.27368006110191345
121,0.27316227555274963
122,0.27264121174812317
123,0.2721223831176758
124,0.2716078758239746
125,0.27109768986701965
126,0.27059072256088257
127,0.27008548378944397
128,0.26957958936691284
129,0.269069105386734
130,0.26854315400123596
131,0.26795637607574463
132,0.2667262852191925
133,0.2658275365829468
134,0.26519909501075745
135,0.26457369327545166
136,0.26396873593330383
137,0.26340386271476746
138,0.2628770172595978
139,0.26237374544143677
140,0.2618885338306427
141,0.26142144203186035
142,0.26097092032432556
143,0.26053521037101746
144,0.26011255383491516
145,0.25970011949539185
146,0.2592967748641968
147,0.25890031456947327
148,0.25850993394851685
149,0.25812387466430664
150,0.25774142146110535
151,0.25736165046691895
152,0.256981760263443
153,0.256600022315979
154,0.25621405243873596
155,0.2558208405971527
156,0.2554178237915039
157,0.2550014853477478
158,0.25456923246383667
159,0.2541246712207794
160,0.2536747455596924
161,0.2532365620136261
162,0.25284162163734436
163,0.25247806310653687
164,0.25211435556411743
165,0.25173473358154297
166,0.2513110041618347
167,0.2507563829421997
168,0.25014299154281616
169,0.2501814365386963
170,0.2501217722892761
171,0.24999010562896729
172,0.24982847273349762
173,0.24965296685695648
174,0.24947039783000946
175,0.24928438663482666
176,0.24909627437591553
177,0.24890722334384918
178,0.24871771037578583
179,0.24852831661701202
180,0.2483389526605606
181,0.24814994633197784
182,0.24796098470687866
183,0.24777227640151978
184,0.24758365750312805
185,0.24739517271518707
186,0.24720735847949982
187,0.24702028930187225
188,0.24683350324630737
189,0.24664688110351562
190,0.24646036326885223
191,0.24627414345741272
192,0.24608822166919708
193,0.24590298533439636
194,0.24571843445301056
195,0.24553422629833221
196,0.24535034596920013
197,0.245167076587677
198,0.24498440325260162
199,0.2448023110628128
200,0.24462100863456726
1 Epoch Loss
2 0 9022.2587890625
3 1 213.75233459472656
4 2 144.91659545898438
5 3 84.52567291259766
6 4 14.287747383117676
7 5 11.56564998626709
8 6 5.72916841506958
9 7 6.569018363952637
10 8 6.6359543800354
11 9 9.952731132507324
12 10 11.137259483337402
13 11 11.742608070373535
14 12 7.280147552490234
15 13 6.2738566398620605
16 14 6.074666976928711
17 15 5.9280548095703125
18 16 5.809908390045166
19 17 5.698787689208984
20 18 5.58311653137207
21 19 5.45709753036499
22 20 5.315023899078369
23 21 5.156440258026123
24 22 4.9358086585998535
25 23 4.7533698081970215
26 24 4.514986991882324
27 25 4.157462120056152
28 26 3.3027751445770264
29 27 3.014774799346924
30 28 2.828134536743164
31 29 2.6535589694976807
32 30 2.4275925159454346
33 31 1.224988579750061
34 32 1.0326144695281982
35 33 0.9040429592132568
36 34 0.837325930595398
37 35 0.7802968621253967
38 36 0.7260206937789917
39 37 0.5953264832496643
40 38 0.5632900595664978
41 39 0.5375853776931763
42 40 0.5147020816802979
43 41 0.49307960271835327
44 42 0.47000372409820557
45 43 0.4359363317489624
46 44 0.3894558846950531
47 45 0.37590739130973816
48 46 0.36737021803855896
49 47 0.36043238639831543
50 48 0.3544873595237732
51 49 0.3492850363254547
52 50 0.3446817994117737
53 51 0.3405757546424866
54 52 0.33688613772392273
55 53 0.3335525691509247
56 54 0.3305256962776184
57 55 0.3277673125267029
58 56 0.32524824142456055
59 57 0.32293397188186646
60 58 0.3208073377609253
61 59 0.3188490867614746
62 60 0.3170395493507385
63 61 0.3153652250766754
64 62 0.31380632519721985
65 63 0.3123517334461212
66 64 0.31098899245262146
67 65 0.30970969796180725
68 66 0.3085029721260071
69 67 0.3073629140853882
70 68 0.30627936124801636
71 69 0.3052476942539215
72 70 0.3042643070220947
73 71 0.3033238649368286
74 72 0.30242201685905457
75 73 0.3015551269054413
76 74 0.3007185757160187
77 75 0.2999109923839569
78 76 0.2991292178630829
79 77 0.29836970567703247
80 78 0.29763063788414
81 79 0.29691189527511597
82 80 0.2962109446525574
83 81 0.2955252230167389
84 82 0.29485437273979187
85 83 0.294197142124176
86 84 0.293551504611969
87 85 0.29291680455207825
88 86 0.29229286313056946
89 87 0.29167768359184265
90 88 0.29107043147087097
91 89 0.29047027230262756
92 90 0.2898769974708557
93 91 0.28929048776626587
94 92 0.2887097895145416
95 93 0.2881343364715576
96 94 0.28756362199783325
97 95 0.28699785470962524
98 96 0.28643614053726196
99 97 0.2858782708644867
100 98 0.2853240370750427
101 99 0.2847738265991211
102 100 0.2842266857624054
103 101 0.28368210792541504
104 102 0.2831399738788605
105 103 0.2826009690761566
106 104 0.28206512331962585
107 105 0.28153112530708313
108 106 0.28099873661994934
109 107 0.28046849370002747
110 108 0.2799402177333832
111 109 0.27941322326660156
112 110 0.278888076543808
113 111 0.2783638536930084
114 112 0.27784091234207153
115 113 0.2773183584213257
116 114 0.2767961919307709
117 115 0.27627477049827576
118 116 0.2757546305656433
119 117 0.27523505687713623
120 118 0.27471596002578735
121 119 0.2741974890232086
122 120 0.27368006110191345
123 121 0.27316227555274963
124 122 0.27264121174812317
125 123 0.2721223831176758
126 124 0.2716078758239746
127 125 0.27109768986701965
128 126 0.27059072256088257
129 127 0.27008548378944397
130 128 0.26957958936691284
131 129 0.269069105386734
132 130 0.26854315400123596
133 131 0.26795637607574463
134 132 0.2667262852191925
135 133 0.2658275365829468
136 134 0.26519909501075745
137 135 0.26457369327545166
138 136 0.26396873593330383
139 137 0.26340386271476746
140 138 0.2628770172595978
141 139 0.26237374544143677
142 140 0.2618885338306427
143 141 0.26142144203186035
144 142 0.26097092032432556
145 143 0.26053521037101746
146 144 0.26011255383491516
147 145 0.25970011949539185
148 146 0.2592967748641968
149 147 0.25890031456947327
150 148 0.25850993394851685
151 149 0.25812387466430664
152 150 0.25774142146110535
153 151 0.25736165046691895
154 152 0.256981760263443
155 153 0.256600022315979
156 154 0.25621405243873596
157 155 0.2558208405971527
158 156 0.2554178237915039
159 157 0.2550014853477478
160 158 0.25456923246383667
161 159 0.2541246712207794
162 160 0.2536747455596924
163 161 0.2532365620136261
164 162 0.25284162163734436
165 163 0.25247806310653687
166 164 0.25211435556411743
167 165 0.25173473358154297
168 166 0.2513110041618347
169 167 0.2507563829421997
170 168 0.25014299154281616
171 169 0.2501814365386963
172 170 0.2501217722892761
173 171 0.24999010562896729
174 172 0.24982847273349762
175 173 0.24965296685695648
176 174 0.24947039783000946
177 175 0.24928438663482666
178 176 0.24909627437591553
179 177 0.24890722334384918
180 178 0.24871771037578583
181 179 0.24852831661701202
182 180 0.2483389526605606
183 181 0.24814994633197784
184 182 0.24796098470687866
185 183 0.24777227640151978
186 184 0.24758365750312805
187 185 0.24739517271518707
188 186 0.24720735847949982
189 187 0.24702028930187225
190 188 0.24683350324630737
191 189 0.24664688110351562
192 190 0.24646036326885223
193 191 0.24627414345741272
194 192 0.24608822166919708
195 193 0.24590298533439636
196 194 0.24571843445301056
197 195 0.24553422629833221
198 196 0.24535034596920013
199 197 0.245167076587677
200 198 0.24498440325260162
201 199 0.2448023110628128
202 200 0.24462100863456726

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,100.43730926513672
2,96.93311309814453
3,89.14500427246094
4,16.861927032470703
5,6.2114176750183105
6,9.358972549438477
7,4.834885597229004
8,5.445126533508301
9,8.018295288085938
10,7.717260837554932
11,8.220857620239258
12,8.761358261108398
13,11.86694049835205
14,8.257389068603516
15,8.167379379272461
16,7.9784135818481445
17,7.780478477478027
18,7.464355945587158
19,7.343413352966309
20,7.376784324645996
21,7.297830104827881
22,7.13732385635376
23,7.109685897827148
24,7.030025005340576
25,6.798166751861572
26,6.609936237335205
27,6.644596099853516
28,6.62838077545166
29,6.435997009277344
30,6.391785621643066
31,6.423974990844727
32,6.252047061920166
33,6.118603706359863
34,6.134428977966309
35,6.014663219451904
36,5.922581672668457
37,5.935040473937988
38,5.789401054382324
39,5.7784552574157715
40,5.686356544494629
41,5.6193108558654785
42,5.561056613922119
43,5.4459943771362305
44,5.4203901290893555
45,5.370626926422119
46,5.343603610992432
47,5.662219047546387
48,5.549167156219482
49,6.598168849945068
50,5.829538345336914
51,6.1988091468811035
52,6.1207146644592285
53,5.266809463500977
54,6.596978664398193
55,6.610266208648682
56,5.96524715423584
57,6.112117290496826
58,5.745918273925781
59,5.308660507202148
60,5.324173927307129
61,5.464718818664551
62,5.423999786376953
63,5.597532749176025
64,5.491841793060303
65,5.571497440338135
66,5.510494232177734
67,5.525457859039307
68,5.578790664672852
69,5.486977577209473
70,5.637714862823486
71,5.647332191467285
72,5.562199592590332
73,5.6814799308776855
74,5.481347560882568
75,5.489483833312988
76,5.390561580657959
77,5.346833229064941
78,5.345126152038574
79,5.304792404174805
80,5.242499351501465
81,5.252551078796387
82,5.173880100250244
83,5.17497444152832
84,5.146238327026367
85,5.095942974090576
86,5.073659896850586
87,5.0636467933654785
88,5.031016826629639
89,5.002904415130615
90,4.988232612609863
91,4.967968940734863
92,4.927240371704102
93,4.880908966064453
94,4.84502649307251
95,4.813478469848633
96,4.7735466957092285
97,4.7547287940979
98,4.7235846519470215
99,4.695253372192383
100,4.66761589050293
101,4.631405353546143
102,4.601306915283203
103,4.559226989746094
104,4.5140380859375
105,4.4657511711120605
106,4.394598960876465
107,4.199791431427002
108,3.578441858291626
109,3.502746343612671
110,3.124742269515991
111,2.6868035793304443
112,2.3495395183563232
113,1.6503485441207886
114,1.3978327512741089
115,2.0735440254211426
116,2.037876844406128
117,1.96929931640625
118,1.8223828077316284
119,0.888815701007843
120,0.733945369720459
121,0.6840583682060242
122,0.6493815779685974
123,0.6209799647331238
124,0.597114622592926
125,0.5767983794212341
126,0.5591034293174744
127,0.5435870885848999
128,0.5298057794570923
129,0.5173324942588806
130,0.5059248208999634
131,0.4954167604446411
132,0.4856302738189697
133,0.47635146975517273
134,0.46737954020500183
135,0.45856526494026184
136,0.4496859014034271
137,0.44047781825065613
138,0.43099892139434814
139,0.4213165044784546
140,0.41163626313209534
141,0.4023117125034332
142,0.39364612102508545
143,0.3854162395000458
144,0.3773326873779297
145,0.3681708872318268
146,0.35411253571510315
147,0.33172619342803955
148,0.3144674301147461
149,0.3061102330684662
150,0.30091962218284607
151,0.29698294401168823
152,0.29382845759391785
153,0.2913872003555298
154,0.28941747546195984
155,0.28778699040412903
156,0.2863949239253998
157,0.28517863154411316
158,0.2841072082519531
159,0.28314682841300964
160,0.28228095173835754
161,0.2814924418926239
162,0.28076842427253723
163,0.2801024317741394
164,0.2794859707355499
165,0.2789105474948883
166,0.2783709466457367
167,0.27786359190940857
168,0.27738362550735474
169,0.2769278287887573
170,0.27649322152137756
171,0.2760770320892334
172,0.27567800879478455
173,0.2752940058708191
174,0.2749234139919281
175,0.27456581592559814
176,0.2742190957069397
177,0.2738820016384125
178,0.27355334162712097
179,0.27323243021965027
180,0.27291837334632874
181,0.2726115882396698
182,0.272310733795166
183,0.27201494574546814
184,0.2717238664627075
185,0.2714369595050812
186,0.2711535692214966
187,0.2708740532398224
188,0.27059799432754517
189,0.27032479643821716
190,0.27005380392074585
191,0.2697846591472626
192,0.26951709389686584
193,0.2692510783672333
194,0.26898691058158875
195,0.2687237560749054
196,0.26846104860305786
197,0.26819872856140137
198,0.2679362893104553
199,0.2676732838153839
200,0.267410010099411
1 Epoch Loss
2 0 9022.2587890625
3 1 100.43730926513672
4 2 96.93311309814453
5 3 89.14500427246094
6 4 16.861927032470703
7 5 6.2114176750183105
8 6 9.358972549438477
9 7 4.834885597229004
10 8 5.445126533508301
11 9 8.018295288085938
12 10 7.717260837554932
13 11 8.220857620239258
14 12 8.761358261108398
15 13 11.86694049835205
16 14 8.257389068603516
17 15 8.167379379272461
18 16 7.9784135818481445
19 17 7.780478477478027
20 18 7.464355945587158
21 19 7.343413352966309
22 20 7.376784324645996
23 21 7.297830104827881
24 22 7.13732385635376
25 23 7.109685897827148
26 24 7.030025005340576
27 25 6.798166751861572
28 26 6.609936237335205
29 27 6.644596099853516
30 28 6.62838077545166
31 29 6.435997009277344
32 30 6.391785621643066
33 31 6.423974990844727
34 32 6.252047061920166
35 33 6.118603706359863
36 34 6.134428977966309
37 35 6.014663219451904
38 36 5.922581672668457
39 37 5.935040473937988
40 38 5.789401054382324
41 39 5.7784552574157715
42 40 5.686356544494629
43 41 5.6193108558654785
44 42 5.561056613922119
45 43 5.4459943771362305
46 44 5.4203901290893555
47 45 5.370626926422119
48 46 5.343603610992432
49 47 5.662219047546387
50 48 5.549167156219482
51 49 6.598168849945068
52 50 5.829538345336914
53 51 6.1988091468811035
54 52 6.1207146644592285
55 53 5.266809463500977
56 54 6.596978664398193
57 55 6.610266208648682
58 56 5.96524715423584
59 57 6.112117290496826
60 58 5.745918273925781
61 59 5.308660507202148
62 60 5.324173927307129
63 61 5.464718818664551
64 62 5.423999786376953
65 63 5.597532749176025
66 64 5.491841793060303
67 65 5.571497440338135
68 66 5.510494232177734
69 67 5.525457859039307
70 68 5.578790664672852
71 69 5.486977577209473
72 70 5.637714862823486
73 71 5.647332191467285
74 72 5.562199592590332
75 73 5.6814799308776855
76 74 5.481347560882568
77 75 5.489483833312988
78 76 5.390561580657959
79 77 5.346833229064941
80 78 5.345126152038574
81 79 5.304792404174805
82 80 5.242499351501465
83 81 5.252551078796387
84 82 5.173880100250244
85 83 5.17497444152832
86 84 5.146238327026367
87 85 5.095942974090576
88 86 5.073659896850586
89 87 5.0636467933654785
90 88 5.031016826629639
91 89 5.002904415130615
92 90 4.988232612609863
93 91 4.967968940734863
94 92 4.927240371704102
95 93 4.880908966064453
96 94 4.84502649307251
97 95 4.813478469848633
98 96 4.7735466957092285
99 97 4.7547287940979
100 98 4.7235846519470215
101 99 4.695253372192383
102 100 4.66761589050293
103 101 4.631405353546143
104 102 4.601306915283203
105 103 4.559226989746094
106 104 4.5140380859375
107 105 4.4657511711120605
108 106 4.394598960876465
109 107 4.199791431427002
110 108 3.578441858291626
111 109 3.502746343612671
112 110 3.124742269515991
113 111 2.6868035793304443
114 112 2.3495395183563232
115 113 1.6503485441207886
116 114 1.3978327512741089
117 115 2.0735440254211426
118 116 2.037876844406128
119 117 1.96929931640625
120 118 1.8223828077316284
121 119 0.888815701007843
122 120 0.733945369720459
123 121 0.6840583682060242
124 122 0.6493815779685974
125 123 0.6209799647331238
126 124 0.597114622592926
127 125 0.5767983794212341
128 126 0.5591034293174744
129 127 0.5435870885848999
130 128 0.5298057794570923
131 129 0.5173324942588806
132 130 0.5059248208999634
133 131 0.4954167604446411
134 132 0.4856302738189697
135 133 0.47635146975517273
136 134 0.46737954020500183
137 135 0.45856526494026184
138 136 0.4496859014034271
139 137 0.44047781825065613
140 138 0.43099892139434814
141 139 0.4213165044784546
142 140 0.41163626313209534
143 141 0.4023117125034332
144 142 0.39364612102508545
145 143 0.3854162395000458
146 144 0.3773326873779297
147 145 0.3681708872318268
148 146 0.35411253571510315
149 147 0.33172619342803955
150 148 0.3144674301147461
151 149 0.3061102330684662
152 150 0.30091962218284607
153 151 0.29698294401168823
154 152 0.29382845759391785
155 153 0.2913872003555298
156 154 0.28941747546195984
157 155 0.28778699040412903
158 156 0.2863949239253998
159 157 0.28517863154411316
160 158 0.2841072082519531
161 159 0.28314682841300964
162 160 0.28228095173835754
163 161 0.2814924418926239
164 162 0.28076842427253723
165 163 0.2801024317741394
166 164 0.2794859707355499
167 165 0.2789105474948883
168 166 0.2783709466457367
169 167 0.27786359190940857
170 168 0.27738362550735474
171 169 0.2769278287887573
172 170 0.27649322152137756
173 171 0.2760770320892334
174 172 0.27567800879478455
175 173 0.2752940058708191
176 174 0.2749234139919281
177 175 0.27456581592559814
178 176 0.2742190957069397
179 177 0.2738820016384125
180 178 0.27355334162712097
181 179 0.27323243021965027
182 180 0.27291837334632874
183 181 0.2726115882396698
184 182 0.272310733795166
185 183 0.27201494574546814
186 184 0.2717238664627075
187 185 0.2714369595050812
188 186 0.2711535692214966
189 187 0.2708740532398224
190 188 0.27059799432754517
191 189 0.27032479643821716
192 190 0.27005380392074585
193 191 0.2697846591472626
194 192 0.26951709389686584
195 193 0.2692510783672333
196 194 0.26898691058158875
197 195 0.2687237560749054
198 196 0.26846104860305786
199 197 0.26819872856140137
200 198 0.2679362893104553
201 199 0.2676732838153839
202 200 0.267410010099411

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.125
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,95.41421508789062
2,9.637655258178711
3,2.501572370529175
4,2.2572340965270996
5,2.5352203845977783
6,2.6880838871002197
7,2.7544963359832764
8,2.744837999343872
9,2.7000091075897217
10,2.6246747970581055
11,2.4920523166656494
12,2.3269684314727783
13,2.1334569454193115
14,1.8318654298782349
15,1.207951545715332
16,0.9834228754043579
17,0.8784857988357544
18,0.809000551700592
19,0.7551987767219543
20,0.7101470232009888
21,0.6710558533668518
22,0.6355773210525513
23,0.6024050712585449
24,0.5707368850708008
25,0.5396313667297363
26,0.5083053708076477
27,0.47475922107696533
28,0.4371282756328583
29,0.4041551947593689
30,0.3802357017993927
31,0.362160861492157
32,0.34823542833328247
33,0.33688151836395264
34,0.32754936814308167
35,0.31982263922691345
36,0.3132243752479553
37,0.3075513243675232
38,0.3026157021522522
39,0.2982998490333557
40,0.29447075724601746
41,0.2910462021827698
42,0.28796643018722534
43,0.28517985343933105
44,0.28264713287353516
45,0.2803274691104889
46,0.2781863212585449
47,0.27620476484298706
48,0.27436304092407227
49,0.2726420760154724
50,0.27102431654930115
51,0.26950252056121826
52,0.26807183027267456
53,0.2667229473590851
54,0.2654457986354828
55,0.26423582434654236
56,0.2630847692489624
57,0.26198649406433105
58,0.2609376907348633
59,0.25993213057518005
60,0.25896966457366943
61,0.2580448091030121
62,0.2571549713611603
63,0.2562972903251648
64,0.2554702162742615
65,0.25466760993003845
66,0.2538895606994629
67,0.25313419103622437
68,0.2523998022079468
69,0.2516857385635376
70,0.25099021196365356
71,0.25031396746635437
72,0.24965296685695648
73,0.24900583922863007
74,0.24837158620357513
75,0.2477491945028305
76,0.2471380978822708
77,0.2465362399816513
78,0.24594222009181976
79,0.24535711109638214
80,0.24478262662887573
81,0.2442171275615692
82,0.24366053938865662
83,0.24311400949954987
84,0.2425750195980072
85,0.2420436292886734
86,0.2415219247341156
87,0.24101051688194275
88,0.24050457775592804
89,0.24000443518161774
90,0.2395101934671402
91,0.23902547359466553
92,0.23854941129684448
93,0.23808063566684723
94,0.23761539161205292
95,0.2371557205915451
96,0.23670300841331482
97,0.23626047372817993
98,0.23582488298416138
99,0.23539681732654572
100,0.234974205493927
101,0.23455804586410522
102,0.2341471016407013
103,0.23374170064926147
104,0.23334072530269623
105,0.23294417560100555
106,0.23255202174186707
107,0.23216494917869568
108,0.23178164660930634
109,0.23140251636505127
110,0.23102767765522003
111,0.2306574434041977
112,0.2302904576063156
113,0.2299269735813141
114,0.22956769168376923
115,0.22921140491962433
116,0.2288571149110794
117,0.22850479185581207
118,0.22815434634685516
119,0.22780588269233704
120,0.22746078670024872
121,0.22711844742298126
122,0.22677931189537048
123,0.22644400596618652
124,0.22611108422279358
125,0.2257806658744812
126,0.22545281052589417
127,0.22512859106063843
128,0.22480659186840057
129,0.22448676824569702
130,0.22416941821575165
131,0.22385519742965698
132,0.22354330122470856
133,0.22323349118232727
134,0.22292596101760864
135,0.22262100875377655
136,0.22231870889663696
137,0.22201837599277496
138,0.22172003984451294
139,0.2214239239692688
140,0.22112996876239777
141,0.22083812952041626
142,0.22054846584796906
143,0.22026120126247406
144,0.21997550129890442
145,0.2196919322013855
146,0.21941110491752625
147,0.21913203597068787
148,0.21885502338409424
149,0.218580961227417
150,0.21830910444259644
151,0.2180401086807251
152,0.21777303516864777
153,0.21750803291797638
154,0.21724630892276764
155,0.21698610484600067
156,0.2167278528213501
157,0.21647123992443085
158,0.21621641516685486
159,0.21596387028694153
160,0.21571239829063416
161,0.21546289324760437
162,0.21521472930908203
163,0.21496836841106415
164,0.2147236168384552
165,0.2144802212715149
166,0.21423733234405518
167,0.21399334073066711
168,0.21374747157096863
169,0.21349436044692993
170,0.21321013569831848
171,0.21281449496746063
172,0.21257062256336212
173,0.21238145232200623
174,0.21218504011631012
175,0.21199607849121094
176,0.21183383464813232
177,0.21167518198490143
178,0.2115168273448944
179,0.21135972440242767
180,0.21120436489582062
181,0.21105122566223145
182,0.21090054512023926
183,0.21075186133384705
184,0.2106052041053772
185,0.21046045422554016
186,0.2103177160024643
187,0.2101764976978302
188,0.21003688871860504
189,0.2098984569311142
190,0.20976176857948303
191,0.2096264660358429
192,0.20949237048625946
193,0.20935948193073273
194,0.2092277854681015
195,0.20909738540649414
196,0.20896796882152557
197,0.20883971452713013
198,0.20871224999427795
199,0.2085854709148407
200,0.20845991373062134
1 Epoch Loss
2 0 9022.2587890625
3 1 95.41421508789062
4 2 9.637655258178711
5 3 2.501572370529175
6 4 2.2572340965270996
7 5 2.5352203845977783
8 6 2.6880838871002197
9 7 2.7544963359832764
10 8 2.744837999343872
11 9 2.7000091075897217
12 10 2.6246747970581055
13 11 2.4920523166656494
14 12 2.3269684314727783
15 13 2.1334569454193115
16 14 1.8318654298782349
17 15 1.207951545715332
18 16 0.9834228754043579
19 17 0.8784857988357544
20 18 0.809000551700592
21 19 0.7551987767219543
22 20 0.7101470232009888
23 21 0.6710558533668518
24 22 0.6355773210525513
25 23 0.6024050712585449
26 24 0.5707368850708008
27 25 0.5396313667297363
28 26 0.5083053708076477
29 27 0.47475922107696533
30 28 0.4371282756328583
31 29 0.4041551947593689
32 30 0.3802357017993927
33 31 0.362160861492157
34 32 0.34823542833328247
35 33 0.33688151836395264
36 34 0.32754936814308167
37 35 0.31982263922691345
38 36 0.3132243752479553
39 37 0.3075513243675232
40 38 0.3026157021522522
41 39 0.2982998490333557
42 40 0.29447075724601746
43 41 0.2910462021827698
44 42 0.28796643018722534
45 43 0.28517985343933105
46 44 0.28264713287353516
47 45 0.2803274691104889
48 46 0.2781863212585449
49 47 0.27620476484298706
50 48 0.27436304092407227
51 49 0.2726420760154724
52 50 0.27102431654930115
53 51 0.26950252056121826
54 52 0.26807183027267456
55 53 0.2667229473590851
56 54 0.2654457986354828
57 55 0.26423582434654236
58 56 0.2630847692489624
59 57 0.26198649406433105
60 58 0.2609376907348633
61 59 0.25993213057518005
62 60 0.25896966457366943
63 61 0.2580448091030121
64 62 0.2571549713611603
65 63 0.2562972903251648
66 64 0.2554702162742615
67 65 0.25466760993003845
68 66 0.2538895606994629
69 67 0.25313419103622437
70 68 0.2523998022079468
71 69 0.2516857385635376
72 70 0.25099021196365356
73 71 0.25031396746635437
74 72 0.24965296685695648
75 73 0.24900583922863007
76 74 0.24837158620357513
77 75 0.2477491945028305
78 76 0.2471380978822708
79 77 0.2465362399816513
80 78 0.24594222009181976
81 79 0.24535711109638214
82 80 0.24478262662887573
83 81 0.2442171275615692
84 82 0.24366053938865662
85 83 0.24311400949954987
86 84 0.2425750195980072
87 85 0.2420436292886734
88 86 0.2415219247341156
89 87 0.24101051688194275
90 88 0.24050457775592804
91 89 0.24000443518161774
92 90 0.2395101934671402
93 91 0.23902547359466553
94 92 0.23854941129684448
95 93 0.23808063566684723
96 94 0.23761539161205292
97 95 0.2371557205915451
98 96 0.23670300841331482
99 97 0.23626047372817993
100 98 0.23582488298416138
101 99 0.23539681732654572
102 100 0.234974205493927
103 101 0.23455804586410522
104 102 0.2341471016407013
105 103 0.23374170064926147
106 104 0.23334072530269623
107 105 0.23294417560100555
108 106 0.23255202174186707
109 107 0.23216494917869568
110 108 0.23178164660930634
111 109 0.23140251636505127
112 110 0.23102767765522003
113 111 0.2306574434041977
114 112 0.2302904576063156
115 113 0.2299269735813141
116 114 0.22956769168376923
117 115 0.22921140491962433
118 116 0.2288571149110794
119 117 0.22850479185581207
120 118 0.22815434634685516
121 119 0.22780588269233704
122 120 0.22746078670024872
123 121 0.22711844742298126
124 122 0.22677931189537048
125 123 0.22644400596618652
126 124 0.22611108422279358
127 125 0.2257806658744812
128 126 0.22545281052589417
129 127 0.22512859106063843
130 128 0.22480659186840057
131 129 0.22448676824569702
132 130 0.22416941821575165
133 131 0.22385519742965698
134 132 0.22354330122470856
135 133 0.22323349118232727
136 134 0.22292596101760864
137 135 0.22262100875377655
138 136 0.22231870889663696
139 137 0.22201837599277496
140 138 0.22172003984451294
141 139 0.2214239239692688
142 140 0.22112996876239777
143 141 0.22083812952041626
144 142 0.22054846584796906
145 143 0.22026120126247406
146 144 0.21997550129890442
147 145 0.2196919322013855
148 146 0.21941110491752625
149 147 0.21913203597068787
150 148 0.21885502338409424
151 149 0.218580961227417
152 150 0.21830910444259644
153 151 0.2180401086807251
154 152 0.21777303516864777
155 153 0.21750803291797638
156 154 0.21724630892276764
157 155 0.21698610484600067
158 156 0.2167278528213501
159 157 0.21647123992443085
160 158 0.21621641516685486
161 159 0.21596387028694153
162 160 0.21571239829063416
163 161 0.21546289324760437
164 162 0.21521472930908203
165 163 0.21496836841106415
166 164 0.2147236168384552
167 165 0.2144802212715149
168 166 0.21423733234405518
169 167 0.21399334073066711
170 168 0.21374747157096863
171 169 0.21349436044692993
172 170 0.21321013569831848
173 171 0.21281449496746063
174 172 0.21257062256336212
175 173 0.21238145232200623
176 174 0.21218504011631012
177 175 0.21199607849121094
178 176 0.21183383464813232
179 177 0.21167518198490143
180 178 0.2115168273448944
181 179 0.21135972440242767
182 180 0.21120436489582062
183 181 0.21105122566223145
184 182 0.21090054512023926
185 183 0.21075186133384705
186 184 0.2106052041053772
187 185 0.21046045422554016
188 186 0.2103177160024643
189 187 0.2101764976978302
190 188 0.21003688871860504
191 189 0.2098984569311142
192 190 0.20976176857948303
193 191 0.2096264660358429
194 192 0.20949237048625946
195 193 0.20935948193073273
196 194 0.2092277854681015
197 195 0.20909738540649414
198 196 0.20896796882152557
199 197 0.20883971452713013
200 198 0.20871224999427795
201 199 0.2085854709148407
202 200 0.20845991373062134

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.16
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,53.14285659790039
2,8.485350608825684
3,4.047055244445801
4,4.3218560218811035
5,11.188774108886719
6,169230544.0
7,7.47413444519043
8,104.8115234375
9,97289.515625
10,55537.1796875
11,16373.12890625
12,7047.0087890625
13,3719.85791015625
14,2398.782470703125
15,1655.3016357421875
16,1262.3192138671875
17,1046.24072265625
18,864.222900390625
19,736.0709838867188
20,661.6405639648438
21,591.5280151367188
22,523.3145751953125
23,483.91607666015625
24,453.1597595214844
25,427.9263610839844
26,406.2738037109375
27,386.4601745605469
28,366.847900390625
29,349.10198974609375
30,335.71240234375
31,324.9439697265625
32,315.41522216796875
33,306.9287414550781
34,299.3354797363281
35,292.5215148925781
36,286.3797607421875
37,280.8400573730469
38,275.8109130859375
39,271.21514892578125
40,266.9905090332031
41,263.1186828613281
42,259.5670471191406
43,256.2671203613281
44,253.18511962890625
45,250.30577087402344
46,247.60382080078125
47,245.04502868652344
48,242.6671142578125
49,240.43145751953125
50,238.3055877685547
51,236.28091430664062
52,234.33877563476562
53,232.4892120361328
54,230.72315979003906
55,229.021728515625
56,227.37222290039062
57,225.77609252929688
58,224.21893310546875
59,222.6989288330078
60,221.21461486816406
61,219.77049255371094
62,218.36122131347656
63,216.98497009277344
64,215.6407470703125
65,214.32313537597656
66,213.0328369140625
67,211.7731170654297
68,210.54345703125
69,209.34364318847656
70,208.17645263671875
71,207.0418243408203
72,205.93911743164062
73,204.8711395263672
74,203.8440399169922
75,202.83505249023438
76,201.8434295654297
77,200.8687744140625
78,199.9086151123047
79,198.9639129638672
80,198.03208923339844
81,197.11354064941406
82,196.2053680419922
83,195.30914306640625
84,194.42245483398438
85,193.54763793945312
86,192.6814727783203
87,191.8249969482422
88,190.9762725830078
89,190.13815307617188
90,189.30726623535156
91,188.48464965820312
92,187.66995239257812
93,186.86358642578125
94,186.0642547607422
95,185.27232360839844
96,184.4871368408203
97,183.70884704589844
98,182.93748474121094
99,182.17227172851562
100,181.41390991210938
101,180.66070556640625
102,179.9159698486328
103,179.1767120361328
104,178.44412231445312
105,177.7157745361328
106,176.99464416503906
107,176.28028869628906
108,175.5712890625
109,174.86798095703125
110,174.1688232421875
111,173.47544860839844
112,172.7867431640625
113,172.10333251953125
114,171.42430114746094
115,170.75015258789062
116,170.08148193359375
117,169.41793823242188
118,168.75991821289062
119,168.1067352294922
120,167.4604034423828
121,166.81764221191406
122,166.1800994873047
123,165.54652404785156
124,164.91824340820312
125,164.294189453125
126,163.67312622070312
127,163.0565185546875
128,162.443115234375
129,161.83541870117188
130,161.2306671142578
131,160.62896728515625
132,160.0313720703125
133,159.43797302246094
134,158.8486785888672
135,158.26145935058594
136,157.6769256591797
137,157.0966796875
138,156.52029418945312
139,155.94744873046875
140,155.3775634765625
141,154.81069946289062
142,154.24832153320312
143,153.68838500976562
144,153.1308135986328
145,152.57681274414062
146,152.02664184570312
147,151.47885131835938
148,150.9330291748047
149,150.3901824951172
150,149.84988403320312
151,149.31214904785156
152,148.77688598632812
153,148.25265502929688
154,147.735595703125
155,147.22531127929688
156,146.71868896484375
157,146.21725463867188
158,145.71971130371094
159,145.22613525390625
160,144.73741149902344
161,144.25096130371094
162,143.76693725585938
163,143.28530883789062
164,142.8070068359375
165,142.33102416992188
166,141.85716247558594
167,141.38552856445312
168,140.91590881347656
169,140.4486846923828
170,139.98268127441406
171,139.5181884765625
172,139.0554962158203
173,138.59500122070312
174,138.135498046875
175,137.6772003173828
176,137.22055053710938
177,136.76617431640625
178,136.31605529785156
179,135.86781311035156
180,135.42324829101562
181,134.98043823242188
182,134.54022216796875
183,134.10147094726562
184,133.66441345214844
185,133.2293243408203
186,132.7957000732422
187,132.3634033203125
188,131.93292236328125
189,131.503662109375
190,131.07534790039062
191,130.6481475830078
192,130.22186279296875
193,129.79640197753906
194,129.37232971191406
195,128.94927978515625
196,128.52684020996094
197,128.10629272460938
198,127.68697357177734
199,127.2684097290039
200,126.85059356689453
1 Epoch Loss
2 0 9022.2587890625
3 1 53.14285659790039
4 2 8.485350608825684
5 3 4.047055244445801
6 4 4.3218560218811035
7 5 11.188774108886719
8 6 169230544.0
9 7 7.47413444519043
10 8 104.8115234375
11 9 97289.515625
12 10 55537.1796875
13 11 16373.12890625
14 12 7047.0087890625
15 13 3719.85791015625
16 14 2398.782470703125
17 15 1655.3016357421875
18 16 1262.3192138671875
19 17 1046.24072265625
20 18 864.222900390625
21 19 736.0709838867188
22 20 661.6405639648438
23 21 591.5280151367188
24 22 523.3145751953125
25 23 483.91607666015625
26 24 453.1597595214844
27 25 427.9263610839844
28 26 406.2738037109375
29 27 386.4601745605469
30 28 366.847900390625
31 29 349.10198974609375
32 30 335.71240234375
33 31 324.9439697265625
34 32 315.41522216796875
35 33 306.9287414550781
36 34 299.3354797363281
37 35 292.5215148925781
38 36 286.3797607421875
39 37 280.8400573730469
40 38 275.8109130859375
41 39 271.21514892578125
42 40 266.9905090332031
43 41 263.1186828613281
44 42 259.5670471191406
45 43 256.2671203613281
46 44 253.18511962890625
47 45 250.30577087402344
48 46 247.60382080078125
49 47 245.04502868652344
50 48 242.6671142578125
51 49 240.43145751953125
52 50 238.3055877685547
53 51 236.28091430664062
54 52 234.33877563476562
55 53 232.4892120361328
56 54 230.72315979003906
57 55 229.021728515625
58 56 227.37222290039062
59 57 225.77609252929688
60 58 224.21893310546875
61 59 222.6989288330078
62 60 221.21461486816406
63 61 219.77049255371094
64 62 218.36122131347656
65 63 216.98497009277344
66 64 215.6407470703125
67 65 214.32313537597656
68 66 213.0328369140625
69 67 211.7731170654297
70 68 210.54345703125
71 69 209.34364318847656
72 70 208.17645263671875
73 71 207.0418243408203
74 72 205.93911743164062
75 73 204.8711395263672
76 74 203.8440399169922
77 75 202.83505249023438
78 76 201.8434295654297
79 77 200.8687744140625
80 78 199.9086151123047
81 79 198.9639129638672
82 80 198.03208923339844
83 81 197.11354064941406
84 82 196.2053680419922
85 83 195.30914306640625
86 84 194.42245483398438
87 85 193.54763793945312
88 86 192.6814727783203
89 87 191.8249969482422
90 88 190.9762725830078
91 89 190.13815307617188
92 90 189.30726623535156
93 91 188.48464965820312
94 92 187.66995239257812
95 93 186.86358642578125
96 94 186.0642547607422
97 95 185.27232360839844
98 96 184.4871368408203
99 97 183.70884704589844
100 98 182.93748474121094
101 99 182.17227172851562
102 100 181.41390991210938
103 101 180.66070556640625
104 102 179.9159698486328
105 103 179.1767120361328
106 104 178.44412231445312
107 105 177.7157745361328
108 106 176.99464416503906
109 107 176.28028869628906
110 108 175.5712890625
111 109 174.86798095703125
112 110 174.1688232421875
113 111 173.47544860839844
114 112 172.7867431640625
115 113 172.10333251953125
116 114 171.42430114746094
117 115 170.75015258789062
118 116 170.08148193359375
119 117 169.41793823242188
120 118 168.75991821289062
121 119 168.1067352294922
122 120 167.4604034423828
123 121 166.81764221191406
124 122 166.1800994873047
125 123 165.54652404785156
126 124 164.91824340820312
127 125 164.294189453125
128 126 163.67312622070312
129 127 163.0565185546875
130 128 162.443115234375
131 129 161.83541870117188
132 130 161.2306671142578
133 131 160.62896728515625
134 132 160.0313720703125
135 133 159.43797302246094
136 134 158.8486785888672
137 135 158.26145935058594
138 136 157.6769256591797
139 137 157.0966796875
140 138 156.52029418945312
141 139 155.94744873046875
142 140 155.3775634765625
143 141 154.81069946289062
144 142 154.24832153320312
145 143 153.68838500976562
146 144 153.1308135986328
147 145 152.57681274414062
148 146 152.02664184570312
149 147 151.47885131835938
150 148 150.9330291748047
151 149 150.3901824951172
152 150 149.84988403320312
153 151 149.31214904785156
154 152 148.77688598632812
155 153 148.25265502929688
156 154 147.735595703125
157 155 147.22531127929688
158 156 146.71868896484375
159 157 146.21725463867188
160 158 145.71971130371094
161 159 145.22613525390625
162 160 144.73741149902344
163 161 144.25096130371094
164 162 143.76693725585938
165 163 143.28530883789062
166 164 142.8070068359375
167 165 142.33102416992188
168 166 141.85716247558594
169 167 141.38552856445312
170 168 140.91590881347656
171 169 140.4486846923828
172 170 139.98268127441406
173 171 139.5181884765625
174 172 139.0554962158203
175 173 138.59500122070312
176 174 138.135498046875
177 175 137.6772003173828
178 176 137.22055053710938
179 177 136.76617431640625
180 178 136.31605529785156
181 179 135.86781311035156
182 180 135.42324829101562
183 181 134.98043823242188
184 182 134.54022216796875
185 183 134.10147094726562
186 184 133.66441345214844
187 185 133.2293243408203
188 186 132.7957000732422
189 187 132.3634033203125
190 188 131.93292236328125
191 189 131.503662109375
192 190 131.07534790039062
193 191 130.6481475830078
194 192 130.22186279296875
195 193 129.79640197753906
196 194 129.37232971191406
197 195 128.94927978515625
198 196 128.52684020996094
199 197 128.10629272460938
200 198 127.68697357177734
201 199 127.2684097290039
202 200 126.85059356689453

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.2
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,30.786584854125977
2,1.681357979774475
3,1.2378876209259033
4,0.6146263480186462
5,0.5548313856124878
6,0.5137755274772644
7,0.4818757176399231
8,0.4549444317817688
9,0.4294472634792328
10,0.4036228358745575
11,0.37501829862594604
12,0.3448524475097656
13,0.32043248414993286
14,0.3036501705646515
15,0.2926180958747864
16,0.284405916929245
17,0.2779163122177124
18,0.2725856304168701
19,0.2679991126060486
20,0.264038622379303
21,0.2606087625026703
22,0.2576262652873993
23,0.2550634443759918
24,0.25279951095581055
25,0.25077083706855774
26,0.24894018471240997
27,0.24727317690849304
28,0.24574851989746094
29,0.2443452626466751
30,0.2430475354194641
31,0.24184155464172363
32,0.2407168447971344
33,0.23966296017169952
34,0.2386685162782669
35,0.23772922158241272
36,0.2368369996547699
37,0.23598597943782806
38,0.23517362773418427
39,0.2343970686197281
40,0.23365278542041779
41,0.23293717205524445
42,0.23224800825119019
43,0.2315862476825714
44,0.2309478223323822
45,0.23033185303211212
46,0.22973735630512238
47,0.22916392982006073
48,0.2286134660243988
49,0.2280857115983963
50,0.2275809943675995
51,0.22709544003009796
52,0.22663012146949768
53,0.22618259489536285
54,0.22575268149375916
55,0.2253376692533493
56,0.22493690252304077
57,0.22454865276813507
58,0.22417187690734863
59,0.2238062173128128
60,0.22345033288002014
61,0.22310476005077362
62,0.22276675701141357
63,0.2224380522966385
64,0.22211626172065735
65,0.22180162370204926
66,0.22149285674095154
67,0.22119003534317017
68,0.22089259326457977
69,0.22059887647628784
70,0.2203090786933899
71,0.22002220153808594
72,0.21974101662635803
73,0.21946243941783905
74,0.21918843686580658
75,0.21891681849956512
76,0.21864725649356842
77,0.21838174760341644
78,0.21812690794467926
79,0.21787941455841064
80,0.2176399528980255
81,0.21740728616714478
82,0.21718090772628784
83,0.21695969998836517
84,0.21674367785453796
85,0.21653267741203308
86,0.2163265496492386
87,0.2161242961883545
88,0.21592600643634796
89,0.21573171019554138
90,0.21554075181484222
91,0.21535338461399078
92,0.21516874432563782
93,0.2149873673915863
94,0.21480873227119446
95,0.2146330177783966
96,0.21445977687835693
97,0.21428894996643066
98,0.21411976218223572
99,0.2139524519443512
100,0.21378664672374725
101,0.21362271904945374
102,0.21346044540405273
103,0.21330036222934723
104,0.2131413072347641
105,0.21298328042030334
106,0.21282383799552917
107,0.21266339719295502
108,0.21250155568122864
109,0.2123381346464157
110,0.21217478811740875
111,0.2120106816291809
112,0.21184618771076202
113,0.2116815447807312
114,0.211516872048378
115,0.21135272085666656
116,0.21118858456611633
117,0.2110253870487213
118,0.21086272597312927
119,0.2107010781764984
120,0.21053968369960785
121,0.21037939190864563
122,0.21021981537342072
123,0.21006116271018982
124,0.2099035680294037
125,0.2097470760345459
126,0.20959144830703735
127,0.2094370275735855
128,0.2092839777469635
129,0.20913194119930267
130,0.20898133516311646
131,0.20883159339427948
132,0.20868340134620667
133,0.2085360735654831
134,0.20838990807533264
135,0.20824511349201202
136,0.20810116827487946
137,0.2079588919878006
138,0.2078179568052292
139,0.20767800509929657
140,0.20753930509090424
141,0.20740143954753876
142,0.2072649598121643
143,0.20712950825691223
144,0.20699545741081238
145,0.20686306059360504
146,0.20673134922981262
147,0.20660097897052765
148,0.2064712792634964
149,0.2063431292772293
150,0.20621612668037415
151,0.20609070360660553
152,0.2059663087129593
153,0.2058427929878235
154,0.20572030544281006
155,0.20559877157211304
156,0.20547857880592346
157,0.2053588181734085
158,0.2052401453256607
159,0.20512184500694275
160,0.20500479638576508
161,0.204888254404068
162,0.20477284491062164
163,0.20465780794620514
164,0.20454367995262146
165,0.20443008840084076
166,0.20431707799434662
167,0.20420458912849426
168,0.20409280061721802
169,0.20398202538490295
170,0.20387178659439087
171,0.20376236736774445
172,0.20365354418754578
173,0.20354589819908142
174,0.2034396380186081
175,0.20333422720432281
176,0.20323002338409424
177,0.20312710106372833
178,0.20302511751651764
179,0.20292435586452484
180,0.20282457768917084
181,0.2027253359556198
182,0.20262713730335236
183,0.2025294303894043
184,0.20243266224861145
185,0.20233644545078278
186,0.20224088430404663
187,0.2021460384130478
188,0.20205163955688477
189,0.20195838809013367
190,0.20186589658260345
191,0.20177403092384338
192,0.20168283581733704
193,0.20159222185611725
194,0.20150259137153625
195,0.20141348242759705
196,0.20132528245449066
197,0.2012374997138977
198,0.20115014910697937
199,0.20106373727321625
200,0.2009778916835785
1 Epoch Loss
2 0 9022.2587890625
3 1 30.786584854125977
4 2 1.681357979774475
5 3 1.2378876209259033
6 4 0.6146263480186462
7 5 0.5548313856124878
8 6 0.5137755274772644
9 7 0.4818757176399231
10 8 0.4549444317817688
11 9 0.4294472634792328
12 10 0.4036228358745575
13 11 0.37501829862594604
14 12 0.3448524475097656
15 13 0.32043248414993286
16 14 0.3036501705646515
17 15 0.2926180958747864
18 16 0.284405916929245
19 17 0.2779163122177124
20 18 0.2725856304168701
21 19 0.2679991126060486
22 20 0.264038622379303
23 21 0.2606087625026703
24 22 0.2576262652873993
25 23 0.2550634443759918
26 24 0.25279951095581055
27 25 0.25077083706855774
28 26 0.24894018471240997
29 27 0.24727317690849304
30 28 0.24574851989746094
31 29 0.2443452626466751
32 30 0.2430475354194641
33 31 0.24184155464172363
34 32 0.2407168447971344
35 33 0.23966296017169952
36 34 0.2386685162782669
37 35 0.23772922158241272
38 36 0.2368369996547699
39 37 0.23598597943782806
40 38 0.23517362773418427
41 39 0.2343970686197281
42 40 0.23365278542041779
43 41 0.23293717205524445
44 42 0.23224800825119019
45 43 0.2315862476825714
46 44 0.2309478223323822
47 45 0.23033185303211212
48 46 0.22973735630512238
49 47 0.22916392982006073
50 48 0.2286134660243988
51 49 0.2280857115983963
52 50 0.2275809943675995
53 51 0.22709544003009796
54 52 0.22663012146949768
55 53 0.22618259489536285
56 54 0.22575268149375916
57 55 0.2253376692533493
58 56 0.22493690252304077
59 57 0.22454865276813507
60 58 0.22417187690734863
61 59 0.2238062173128128
62 60 0.22345033288002014
63 61 0.22310476005077362
64 62 0.22276675701141357
65 63 0.2224380522966385
66 64 0.22211626172065735
67 65 0.22180162370204926
68 66 0.22149285674095154
69 67 0.22119003534317017
70 68 0.22089259326457977
71 69 0.22059887647628784
72 70 0.2203090786933899
73 71 0.22002220153808594
74 72 0.21974101662635803
75 73 0.21946243941783905
76 74 0.21918843686580658
77 75 0.21891681849956512
78 76 0.21864725649356842
79 77 0.21838174760341644
80 78 0.21812690794467926
81 79 0.21787941455841064
82 80 0.2176399528980255
83 81 0.21740728616714478
84 82 0.21718090772628784
85 83 0.21695969998836517
86 84 0.21674367785453796
87 85 0.21653267741203308
88 86 0.2163265496492386
89 87 0.2161242961883545
90 88 0.21592600643634796
91 89 0.21573171019554138
92 90 0.21554075181484222
93 91 0.21535338461399078
94 92 0.21516874432563782
95 93 0.2149873673915863
96 94 0.21480873227119446
97 95 0.2146330177783966
98 96 0.21445977687835693
99 97 0.21428894996643066
100 98 0.21411976218223572
101 99 0.2139524519443512
102 100 0.21378664672374725
103 101 0.21362271904945374
104 102 0.21346044540405273
105 103 0.21330036222934723
106 104 0.2131413072347641
107 105 0.21298328042030334
108 106 0.21282383799552917
109 107 0.21266339719295502
110 108 0.21250155568122864
111 109 0.2123381346464157
112 110 0.21217478811740875
113 111 0.2120106816291809
114 112 0.21184618771076202
115 113 0.2116815447807312
116 114 0.211516872048378
117 115 0.21135272085666656
118 116 0.21118858456611633
119 117 0.2110253870487213
120 118 0.21086272597312927
121 119 0.2107010781764984
122 120 0.21053968369960785
123 121 0.21037939190864563
124 122 0.21021981537342072
125 123 0.21006116271018982
126 124 0.2099035680294037
127 125 0.2097470760345459
128 126 0.20959144830703735
129 127 0.2094370275735855
130 128 0.2092839777469635
131 129 0.20913194119930267
132 130 0.20898133516311646
133 131 0.20883159339427948
134 132 0.20868340134620667
135 133 0.2085360735654831
136 134 0.20838990807533264
137 135 0.20824511349201202
138 136 0.20810116827487946
139 137 0.2079588919878006
140 138 0.2078179568052292
141 139 0.20767800509929657
142 140 0.20753930509090424
143 141 0.20740143954753876
144 142 0.2072649598121643
145 143 0.20712950825691223
146 144 0.20699545741081238
147 145 0.20686306059360504
148 146 0.20673134922981262
149 147 0.20660097897052765
150 148 0.2064712792634964
151 149 0.2063431292772293
152 150 0.20621612668037415
153 151 0.20609070360660553
154 152 0.2059663087129593
155 153 0.2058427929878235
156 154 0.20572030544281006
157 155 0.20559877157211304
158 156 0.20547857880592346
159 157 0.2053588181734085
160 158 0.2052401453256607
161 159 0.20512184500694275
162 160 0.20500479638576508
163 161 0.204888254404068
164 162 0.20477284491062164
165 163 0.20465780794620514
166 164 0.20454367995262146
167 165 0.20443008840084076
168 166 0.20431707799434662
169 167 0.20420458912849426
170 168 0.20409280061721802
171 169 0.20398202538490295
172 170 0.20387178659439087
173 171 0.20376236736774445
174 172 0.20365354418754578
175 173 0.20354589819908142
176 174 0.2034396380186081
177 175 0.20333422720432281
178 176 0.20323002338409424
179 177 0.20312710106372833
180 178 0.20302511751651764
181 179 0.20292435586452484
182 180 0.20282457768917084
183 181 0.2027253359556198
184 182 0.20262713730335236
185 183 0.2025294303894043
186 184 0.20243266224861145
187 185 0.20233644545078278
188 186 0.20224088430404663
189 187 0.2021460384130478
190 188 0.20205163955688477
191 189 0.20195838809013367
192 190 0.20186589658260345
193 191 0.20177403092384338
194 192 0.20168283581733704
195 193 0.20159222185611725
196 194 0.20150259137153625
197 195 0.20141348242759705
198 196 0.20132528245449066
199 197 0.2012374997138977
200 198 0.20115014910697937
201 199 0.20106373727321625
202 200 0.2009778916835785

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.25
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,9.433856964111328
2,2.5299301147460938
3,3.142794370651245
4,3.465233564376831
5,3.6106789112091064
6,3.592188835144043
7,3.4246888160705566
8,3.159924030303955
9,2.843125104904175
10,2.502859354019165
11,2.1569066047668457
12,1.8219718933105469
13,1.519317865371704
14,1.2742528915405273
15,1.0896445512771606
16,0.9504628777503967
17,0.8406355381011963
18,0.7525423765182495
19,0.6805659532546997
20,0.6205721497535706
21,0.5700027346611023
22,0.5271663665771484
23,0.49078914523124695
24,0.4597688317298889
25,0.43347999453544617
26,0.41160839796066284
27,0.39356037974357605
28,0.3781180679798126
29,0.3649483621120453
30,0.35368606448173523
31,0.34402111172676086
32,0.33566585183143616
33,0.328347772359848
34,0.32193684577941895
35,0.3162860572338104
36,0.3112390339374542
37,0.3067215085029602
38,0.30259305238723755
39,0.29889318346977234
40,0.29553481936454773
41,0.29249563813209534
42,0.28975072503089905
43,0.28725308179855347
44,0.28496432304382324
45,0.2828529477119446
46,0.2808634042739868
47,0.2789967358112335
48,0.27724477648735046
49,0.27560004591941833
50,0.27405259013175964
51,0.27259358763694763
52,0.2712130546569824
53,0.2699030935764313
54,0.268657922744751
55,0.26746630668640137
56,0.2663276791572571
57,0.2652367353439331
58,0.26419228315353394
59,0.2631900906562805
60,0.2622276842594147
61,0.2613007426261902
62,0.2604082524776459
63,0.25954726338386536
64,0.25871461629867554
65,0.2579105496406555
66,0.2571322023868561
67,0.2563781440258026
68,0.2556454539299011
69,0.25493475794792175
70,0.2542436420917511
71,0.2535701394081116
72,0.2529148757457733
73,0.2522762715816498
74,0.2516525387763977
75,0.25104472041130066
76,0.25045084953308105
77,0.24986939132213593
78,0.24930095672607422
79,0.24874401092529297
80,0.24819909036159515
81,0.24766623973846436
82,0.24714381992816925
83,0.24663110077381134
84,0.24612957239151
85,0.24563758075237274
86,0.24515391886234283
87,0.24467957019805908
88,0.2442140281200409
89,0.24375654757022858
90,0.24330686032772064
91,0.242864727973938
92,0.24243015050888062
93,0.24200324714183807
94,0.2415838986635208
95,0.24117112159729004
96,0.24076442420482635
97,0.24036407470703125
98,0.2399684637784958
99,0.2395772635936737
100,0.23919102549552917
101,0.23880943655967712
102,0.23843106627464294
103,0.23805592954158783
104,0.2376854121685028
105,0.23731909692287445
106,0.2369561493396759
107,0.2365967035293579
108,0.2362401932477951
109,0.2358865588903427
110,0.2355341762304306
111,0.23518341779708862
112,0.23483504354953766
113,0.23449067771434784
114,0.23415066301822662
115,0.2338140308856964
116,0.2334805130958557
117,0.23315145075321198
118,0.23282645642757416
119,0.23250454664230347
120,0.23218612372875214
121,0.23187173902988434
122,0.23156072199344635
123,0.23125240206718445
124,0.23094765841960907
125,0.230645552277565
126,0.23034662008285522
127,0.2300506979227066
128,0.22975826263427734
129,0.22946886718273163
130,0.22918203473091125
131,0.22889865934848785
132,0.22861698269844055
133,0.22833673655986786
134,0.2280593067407608
135,0.2277848720550537
136,0.2275126576423645
137,0.2272426337003708
138,0.226975679397583
139,0.22671128809452057
140,0.22644953429698944
141,0.2261902093887329
142,0.22593337297439575
143,0.22567854821681976
144,0.22542645037174225
145,0.22517727315425873
146,0.22493018209934235
147,0.2246852070093155
148,0.22444269061088562
149,0.2242031842470169
150,0.22396551072597504
151,0.22372961044311523
152,0.22349587082862854
153,0.22326447069644928
154,0.2230348289012909
155,0.22280701994895935
156,0.22258184850215912
157,0.2223588228225708
158,0.22213812172412872
159,0.22191961109638214
160,0.221703439950943
161,0.22148913145065308
162,0.22127680480480194
163,0.2210668921470642
164,0.22085902094841003
165,0.22065305709838867
166,0.2204488217830658
167,0.2202477604150772
168,0.2200484722852707
169,0.2198508232831955
170,0.21965506672859192
171,0.2194613665342331
172,0.21926961839199066
173,0.21907933056354523
174,0.21889100968837738
175,0.2187042087316513
176,0.2185191512107849
177,0.21833576261997223
178,0.2181544005870819
179,0.21797414124011993
180,0.2177950143814087
181,0.21761807799339294
182,0.21744289994239807
183,0.21726882457733154
184,0.21709583699703217
185,0.2169242948293686
186,0.21675477921962738
187,0.21658632159233093
188,0.21641886234283447
189,0.21625308692455292
190,0.21608847379684448
191,0.2159251868724823
192,0.2157626450061798
193,0.21560148894786835
194,0.21544122695922852
195,0.2152819186449051
196,0.2151239961385727
197,0.21496735513210297
198,0.21481168270111084
199,0.21465693414211273
200,0.2145029902458191
1 Epoch Loss
2 0 9022.2587890625
3 1 9.433856964111328
4 2 2.5299301147460938
5 3 3.142794370651245
6 4 3.465233564376831
7 5 3.6106789112091064
8 6 3.592188835144043
9 7 3.4246888160705566
10 8 3.159924030303955
11 9 2.843125104904175
12 10 2.502859354019165
13 11 2.1569066047668457
14 12 1.8219718933105469
15 13 1.519317865371704
16 14 1.2742528915405273
17 15 1.0896445512771606
18 16 0.9504628777503967
19 17 0.8406355381011963
20 18 0.7525423765182495
21 19 0.6805659532546997
22 20 0.6205721497535706
23 21 0.5700027346611023
24 22 0.5271663665771484
25 23 0.49078914523124695
26 24 0.4597688317298889
27 25 0.43347999453544617
28 26 0.41160839796066284
29 27 0.39356037974357605
30 28 0.3781180679798126
31 29 0.3649483621120453
32 30 0.35368606448173523
33 31 0.34402111172676086
34 32 0.33566585183143616
35 33 0.328347772359848
36 34 0.32193684577941895
37 35 0.3162860572338104
38 36 0.3112390339374542
39 37 0.3067215085029602
40 38 0.30259305238723755
41 39 0.29889318346977234
42 40 0.29553481936454773
43 41 0.29249563813209534
44 42 0.28975072503089905
45 43 0.28725308179855347
46 44 0.28496432304382324
47 45 0.2828529477119446
48 46 0.2808634042739868
49 47 0.2789967358112335
50 48 0.27724477648735046
51 49 0.27560004591941833
52 50 0.27405259013175964
53 51 0.27259358763694763
54 52 0.2712130546569824
55 53 0.2699030935764313
56 54 0.268657922744751
57 55 0.26746630668640137
58 56 0.2663276791572571
59 57 0.2652367353439331
60 58 0.26419228315353394
61 59 0.2631900906562805
62 60 0.2622276842594147
63 61 0.2613007426261902
64 62 0.2604082524776459
65 63 0.25954726338386536
66 64 0.25871461629867554
67 65 0.2579105496406555
68 66 0.2571322023868561
69 67 0.2563781440258026
70 68 0.2556454539299011
71 69 0.25493475794792175
72 70 0.2542436420917511
73 71 0.2535701394081116
74 72 0.2529148757457733
75 73 0.2522762715816498
76 74 0.2516525387763977
77 75 0.25104472041130066
78 76 0.25045084953308105
79 77 0.24986939132213593
80 78 0.24930095672607422
81 79 0.24874401092529297
82 80 0.24819909036159515
83 81 0.24766623973846436
84 82 0.24714381992816925
85 83 0.24663110077381134
86 84 0.24612957239151
87 85 0.24563758075237274
88 86 0.24515391886234283
89 87 0.24467957019805908
90 88 0.2442140281200409
91 89 0.24375654757022858
92 90 0.24330686032772064
93 91 0.242864727973938
94 92 0.24243015050888062
95 93 0.24200324714183807
96 94 0.2415838986635208
97 95 0.24117112159729004
98 96 0.24076442420482635
99 97 0.24036407470703125
100 98 0.2399684637784958
101 99 0.2395772635936737
102 100 0.23919102549552917
103 101 0.23880943655967712
104 102 0.23843106627464294
105 103 0.23805592954158783
106 104 0.2376854121685028
107 105 0.23731909692287445
108 106 0.2369561493396759
109 107 0.2365967035293579
110 108 0.2362401932477951
111 109 0.2358865588903427
112 110 0.2355341762304306
113 111 0.23518341779708862
114 112 0.23483504354953766
115 113 0.23449067771434784
116 114 0.23415066301822662
117 115 0.2338140308856964
118 116 0.2334805130958557
119 117 0.23315145075321198
120 118 0.23282645642757416
121 119 0.23250454664230347
122 120 0.23218612372875214
123 121 0.23187173902988434
124 122 0.23156072199344635
125 123 0.23125240206718445
126 124 0.23094765841960907
127 125 0.230645552277565
128 126 0.23034662008285522
129 127 0.2300506979227066
130 128 0.22975826263427734
131 129 0.22946886718273163
132 130 0.22918203473091125
133 131 0.22889865934848785
134 132 0.22861698269844055
135 133 0.22833673655986786
136 134 0.2280593067407608
137 135 0.2277848720550537
138 136 0.2275126576423645
139 137 0.2272426337003708
140 138 0.226975679397583
141 139 0.22671128809452057
142 140 0.22644953429698944
143 141 0.2261902093887329
144 142 0.22593337297439575
145 143 0.22567854821681976
146 144 0.22542645037174225
147 145 0.22517727315425873
148 146 0.22493018209934235
149 147 0.2246852070093155
150 148 0.22444269061088562
151 149 0.2242031842470169
152 150 0.22396551072597504
153 151 0.22372961044311523
154 152 0.22349587082862854
155 153 0.22326447069644928
156 154 0.2230348289012909
157 155 0.22280701994895935
158 156 0.22258184850215912
159 157 0.2223588228225708
160 158 0.22213812172412872
161 159 0.22191961109638214
162 160 0.221703439950943
163 161 0.22148913145065308
164 162 0.22127680480480194
165 163 0.2210668921470642
166 164 0.22085902094841003
167 165 0.22065305709838867
168 166 0.2204488217830658
169 167 0.2202477604150772
170 168 0.2200484722852707
171 169 0.2198508232831955
172 170 0.21965506672859192
173 171 0.2194613665342331
174 172 0.21926961839199066
175 173 0.21907933056354523
176 174 0.21889100968837738
177 175 0.2187042087316513
178 176 0.2185191512107849
179 177 0.21833576261997223
180 178 0.2181544005870819
181 179 0.21797414124011993
182 180 0.2177950143814087
183 181 0.21761807799339294
184 182 0.21744289994239807
185 183 0.21726882457733154
186 184 0.21709583699703217
187 185 0.2169242948293686
188 186 0.21675477921962738
189 187 0.21658632159233093
190 188 0.21641886234283447
191 189 0.21625308692455292
192 190 0.21608847379684448
193 191 0.2159251868724823
194 192 0.2157626450061798
195 193 0.21560148894786835
196 194 0.21544122695922852
197 195 0.2152819186449051
198 196 0.2151239961385727
199 197 0.21496735513210297
200 198 0.21481168270111084
201 199 0.21465693414211273
202 200 0.2145029902458191

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.3
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,47 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,6.646026134490967
2,0.6921998858451843
3,0.5801647901535034
4,0.5539664030075073
5,0.5352448225021362
6,0.516973078250885
7,0.49768322706222534
8,0.47827211022377014
9,0.45960789918899536
10,0.44195565581321716
11,0.425246924161911
12,0.4094034433364868
13,0.3944760262966156
14,0.3805064260959625
15,0.3674965798854828
16,0.35542601346969604
17,0.3442697525024414
18,0.33402371406555176
19,0.3246816098690033
20,0.31616491079330444
21,0.30842363834381104
22,0.3014298379421234
23,0.29512548446655273
24,0.2894197404384613
25,0.28433334827423096
26,0.279852956533432
27,0.2758702337741852
28,0.2723054885864258
29,0.2691437005996704
30,0.26628929376602173
31,0.2636895775794983
32,0.2613082230091095
33,0.2591358721256256
34,0.25713443756103516
35,546871.1875
36,2501842176.0
37,4865546752.0
38,4806522880.0
39,4581817856.0
40,4479803392.0
41,4458418688.0
42,nan
43,nan
44,nan
45,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 6.646026134490967
4 2 0.6921998858451843
5 3 0.5801647901535034
6 4 0.5539664030075073
7 5 0.5352448225021362
8 6 0.516973078250885
9 7 0.49768322706222534
10 8 0.47827211022377014
11 9 0.45960789918899536
12 10 0.44195565581321716
13 11 0.425246924161911
14 12 0.4094034433364868
15 13 0.3944760262966156
16 14 0.3805064260959625
17 15 0.3674965798854828
18 16 0.35542601346969604
19 17 0.3442697525024414
20 18 0.33402371406555176
21 19 0.3246816098690033
22 20 0.31616491079330444
23 21 0.30842363834381104
24 22 0.3014298379421234
25 23 0.29512548446655273
26 24 0.2894197404384613
27 25 0.28433334827423096
28 26 0.279852956533432
29 27 0.2758702337741852
30 28 0.2723054885864258
31 29 0.2691437005996704
32 30 0.26628929376602173
33 31 0.2636895775794983
34 32 0.2613082230091095
35 33 0.2591358721256256
36 34 0.25713443756103516
37 35 546871.1875
38 36 2501842176.0
39 37 4865546752.0
40 38 4806522880.0
41 39 4581817856.0
42 40 4479803392.0
43 41 4458418688.0
44 42 nan
45 43 nan
46 44 nan
47 45 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.4
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,32 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,2.362724542617798
2,2.392826795578003
3,2.2736353874206543
4,1.9006738662719727
5,1.394894003868103
6,0.9551928043365479
7,0.7115665674209595
8,0.5500522255897522
9,0.45250430703163147
10,0.38881200551986694
11,0.34427231550216675
12,0.31002360582351685
13,0.2798919677734375
14,0.255569726228714
15,14.465670585632324
16,99704.53125
17,963.663818359375
18,1015.0718383789062
19,975.5271606445312
20,928.832275390625
21,921.9561767578125
22,968.0391845703125
23,908.3063354492188
24,907.8488159179688
25,891.1127319335938
26,879.9981079101562
27,nan
28,nan
29,nan
30,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 2.362724542617798
4 2 2.392826795578003
5 3 2.2736353874206543
6 4 1.9006738662719727
7 5 1.394894003868103
8 6 0.9551928043365479
9 7 0.7115665674209595
10 8 0.5500522255897522
11 9 0.45250430703163147
12 10 0.38881200551986694
13 11 0.34427231550216675
14 12 0.31002360582351685
15 13 0.2798919677734375
16 14 0.255569726228714
17 15 14.465670585632324
18 16 99704.53125
19 17 963.663818359375
20 18 1015.0718383789062
21 19 975.5271606445312
22 20 928.832275390625
23 21 921.9561767578125
24 22 968.0391845703125
25 23 908.3063354492188
26 24 907.8488159179688
27 25 891.1127319335938
28 26 879.9981079101562
29 27 nan
30 28 nan
31 29 nan
32 30 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.5
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,17 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,2.133845090866089
2,2.3031303882598877
3,1.9755902290344238
4,1.4275221824645996
5,0.941654622554779
6,0.8686284422874451
7,0.6571183800697327
8,0.6279698014259338
9,4022965248.0
10,4916342272.0
11,4916342272.0
12,4916342272.0
13,4916342272.0
14,4916342272.0
15,4916342272.0
1 Epoch Loss
2 0 9022.2587890625
3 1 2.133845090866089
4 2 2.3031303882598877
5 3 1.9755902290344238
6 4 1.4275221824645996
7 5 0.941654622554779
8 6 0.8686284422874451
9 7 0.6571183800697327
10 8 0.6279698014259338
11 9 4022965248.0
12 10 4916342272.0
13 11 4916342272.0
14 12 4916342272.0
15 13 4916342272.0
16 14 4916342272.0
17 15 4916342272.0

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.6
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,15 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,2.6557106971740723
2,2.378608226776123
3,1.9489076137542725
4,1.388096809387207
5,0.9401108026504517
6,0.6727041006088257
7,2153934848.0
8,4916342272.0
9,4916342272.0
10,4916342272.0
11,4916342272.0
12,4916342272.0
13,4916342272.0
1 Epoch Loss
2 0 9022.2587890625
3 1 2.6557106971740723
4 2 2.378608226776123
5 3 1.9489076137542725
6 4 1.388096809387207
7 5 0.9401108026504517
8 6 0.6727041006088257
9 7 2153934848.0
10 8 4916342272.0
11 9 4916342272.0
12 10 4916342272.0
13 11 4916342272.0
14 12 4916342272.0
15 13 4916342272.0

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.7
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,11 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,2.9760751724243164
2,2.6220152378082275
3,2.1410739421844482
4,1.5488426685333252
5,1.053941249847412
6,nan
7,nan
8,nan
9,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 2.9760751724243164
4 2 2.6220152378082275
5 3 2.1410739421844482
6 4 1.5488426685333252
7 5 1.053941249847412
8 6 nan
9 7 nan
10 8 nan
11 9 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.8
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,11 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,3.3278462886810303
2,2.7362890243530273
3,2.1506998538970947
4,1.5317654609680176
5,2.213674783706665
6,nan
7,nan
8,nan
9,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 3.3278462886810303
4 2 2.7362890243530273
5 3 2.1506998538970947
6 4 1.5317654609680176
7 5 2.213674783706665
8 6 nan
9 7 nan
10 8 nan
11 9 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.9
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,10 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,3.765124559402466
2,2.760067939758301
3,2.0699892044067383
4,1.454463005065918
5,nan
6,nan
7,nan
8,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 3.765124559402466
4 2 2.760067939758301
5 3 2.0699892044067383
6 4 1.454463005065918
7 5 nan
8 6 nan
9 7 nan
10 8 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 1
Weight Decay: 0
Loss Function Name: five
Loss Function Exponent: 5
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def five_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=5, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,9 +0,0 @@
Epoch,Loss
0,9022.2587890625
1,4.307502746582031
2,2.659949779510498
3,1.898849606513977
4,nan
5,nan
6,nan
7,nan
1 Epoch Loss
2 0 9022.2587890625
3 1 4.307502746582031
4 2 2.659949779510498
5 3 1.898849606513977
6 4 nan
7 5 nan
8 6 nan
9 7 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.0025
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,997.6203002929688
2,891.60498046875
3,792.384521484375
4,699.4647827148438
5,632.1798095703125
6,569.7451171875
7,513.91748046875
8,464.5724182128906
9,416.0574035644531
10,373.46307373046875
11,338.5057067871094
12,292.76019287109375
13,264.6914367675781
14,246.33180236816406
15,215.95249938964844
16,170.56326293945312
17,152.89686584472656
18,151.65560913085938
19,147.84130859375
20,132.6151885986328
21,106.1927719116211
22,83.6707992553711
23,73.62540435791016
24,72.54122924804688
25,62.63677215576172
26,63.071067810058594
27,54.98676300048828
28,51.694881439208984
29,42.68155288696289
30,42.36272048950195
31,42.05136489868164
32,41.26948547363281
33,39.32139587402344
34,39.47389602661133
35,39.981895446777344
36,41.160247802734375
37,42.251258850097656
38,40.95119094848633
39,38.07765579223633
40,36.905296325683594
41,35.187828063964844
42,35.23639678955078
43,37.2154655456543
44,36.79240798950195
45,34.37611770629883
46,33.77314758300781
47,33.56240463256836
48,33.355770111083984
49,32.858489990234375
50,36.315799713134766
51,34.758338928222656
52,35.0203857421875
53,35.225364685058594
54,35.16332244873047
55,34.92576599121094
56,34.686973571777344
57,34.509300231933594
58,34.39813995361328
59,34.343021392822266
60,34.32703399658203
61,34.332740783691406
62,34.34229278564453
63,34.34109878540039
64,34.32158279418945
65,34.287845611572266
66,34.251495361328125
67,34.22208786010742
68,34.19154357910156
69,34.13505554199219
70,34.065643310546875
71,34.006343841552734
72,33.959083557128906
73,33.91813659667969
74,33.874229431152344
75,33.80865478515625
76,33.63778305053711
77,31.760251998901367
78,31.786039352416992
79,31.786176681518555
80,31.76287841796875
81,31.71902847290039
82,31.659523010253906
83,31.590347290039062
84,31.516937255859375
85,31.44348907470703
86,31.373315811157227
87,31.308292388916016
88,31.249252319335938
89,31.1966609954834
90,31.150043487548828
91,31.108301162719727
92,31.06918716430664
93,31.021446228027344
94,32.83933639526367
95,31.007394790649414
96,31.02997398376465
97,31.052167892456055
98,31.032508850097656
99,31.001428604125977
100,30.98272705078125
101,30.973342895507812
102,30.967823028564453
103,30.96207618713379
104,30.953495025634766
105,30.940954208374023
106,30.924341201782227
107,30.904329299926758
108,30.88216209411621
109,30.859107971191406
110,30.8365478515625
111,30.815479278564453
112,30.79663848876953
113,30.779943466186523
114,30.765037536621094
115,30.750843048095703
116,30.73650550842285
117,30.721729278564453
118,30.706817626953125
119,30.69219398498535
120,30.678071975708008
121,30.664165496826172
122,30.649667739868164
123,30.633665084838867
124,30.615341186523438
125,30.593040466308594
126,30.56435775756836
127,30.52349090576172
128,30.44930076599121
129,30.4921875
130,30.823917388916016
131,31.503942489624023
132,32.437129974365234
133,33.56292724609375
134,35.21110916137695
135,32.644065856933594
136,32.01215744018555
137,31.906841278076172
138,31.555864334106445
139,31.15477752685547
140,31.122241973876953
141,31.224224090576172
142,31.729516983032227
143,31.429323196411133
144,31.475847244262695
145,31.560016632080078
146,31.65003776550293
147,31.727834701538086
148,31.78221893310547
149,31.806644439697266
150,31.798969268798828
151,31.760112762451172
152,31.693601608276367
153,31.604965209960938
154,31.50230598449707
155,31.394044876098633
156,31.287628173828125
157,31.18984603881836
158,31.10569953918457
159,31.037321090698242
160,30.98438262939453
161,30.944093704223633
162,30.911590576171875
163,30.880977630615234
164,30.846569061279297
165,30.806386947631836
166,30.7630672454834
167,30.721088409423828
168,30.68382453918457
169,30.652652740478516
170,30.627349853515625
171,30.606979370117188
172,30.590471267700195
173,30.577104568481445
174,30.5659236907959
175,30.55608558654785
176,30.54660987854004
177,30.536556243896484
178,30.52549934387207
179,30.513559341430664
180,30.50126075744629
181,30.489152908325195
182,30.47825813293457
183,30.469242095947266
184,30.462278366088867
185,30.45747184753418
186,30.4539794921875
187,30.450651168823242
188,30.446460723876953
189,30.440563201904297
190,30.433095932006836
191,30.424863815307617
192,30.41676902770996
193,30.40935707092285
194,30.4028263092041
195,30.39715003967285
196,30.39190673828125
197,30.386859893798828
198,30.381738662719727
199,30.376243591308594
200,30.37030601501465
1 Epoch Loss
2 0 1117.280029296875
3 1 997.6203002929688
4 2 891.60498046875
5 3 792.384521484375
6 4 699.4647827148438
7 5 632.1798095703125
8 6 569.7451171875
9 7 513.91748046875
10 8 464.5724182128906
11 9 416.0574035644531
12 10 373.46307373046875
13 11 338.5057067871094
14 12 292.76019287109375
15 13 264.6914367675781
16 14 246.33180236816406
17 15 215.95249938964844
18 16 170.56326293945312
19 17 152.89686584472656
20 18 151.65560913085938
21 19 147.84130859375
22 20 132.6151885986328
23 21 106.1927719116211
24 22 83.6707992553711
25 23 73.62540435791016
26 24 72.54122924804688
27 25 62.63677215576172
28 26 63.071067810058594
29 27 54.98676300048828
30 28 51.694881439208984
31 29 42.68155288696289
32 30 42.36272048950195
33 31 42.05136489868164
34 32 41.26948547363281
35 33 39.32139587402344
36 34 39.47389602661133
37 35 39.981895446777344
38 36 41.160247802734375
39 37 42.251258850097656
40 38 40.95119094848633
41 39 38.07765579223633
42 40 36.905296325683594
43 41 35.187828063964844
44 42 35.23639678955078
45 43 37.2154655456543
46 44 36.79240798950195
47 45 34.37611770629883
48 46 33.77314758300781
49 47 33.56240463256836
50 48 33.355770111083984
51 49 32.858489990234375
52 50 36.315799713134766
53 51 34.758338928222656
54 52 35.0203857421875
55 53 35.225364685058594
56 54 35.16332244873047
57 55 34.92576599121094
58 56 34.686973571777344
59 57 34.509300231933594
60 58 34.39813995361328
61 59 34.343021392822266
62 60 34.32703399658203
63 61 34.332740783691406
64 62 34.34229278564453
65 63 34.34109878540039
66 64 34.32158279418945
67 65 34.287845611572266
68 66 34.251495361328125
69 67 34.22208786010742
70 68 34.19154357910156
71 69 34.13505554199219
72 70 34.065643310546875
73 71 34.006343841552734
74 72 33.959083557128906
75 73 33.91813659667969
76 74 33.874229431152344
77 75 33.80865478515625
78 76 33.63778305053711
79 77 31.760251998901367
80 78 31.786039352416992
81 79 31.786176681518555
82 80 31.76287841796875
83 81 31.71902847290039
84 82 31.659523010253906
85 83 31.590347290039062
86 84 31.516937255859375
87 85 31.44348907470703
88 86 31.373315811157227
89 87 31.308292388916016
90 88 31.249252319335938
91 89 31.1966609954834
92 90 31.150043487548828
93 91 31.108301162719727
94 92 31.06918716430664
95 93 31.021446228027344
96 94 32.83933639526367
97 95 31.007394790649414
98 96 31.02997398376465
99 97 31.052167892456055
100 98 31.032508850097656
101 99 31.001428604125977
102 100 30.98272705078125
103 101 30.973342895507812
104 102 30.967823028564453
105 103 30.96207618713379
106 104 30.953495025634766
107 105 30.940954208374023
108 106 30.924341201782227
109 107 30.904329299926758
110 108 30.88216209411621
111 109 30.859107971191406
112 110 30.8365478515625
113 111 30.815479278564453
114 112 30.79663848876953
115 113 30.779943466186523
116 114 30.765037536621094
117 115 30.750843048095703
118 116 30.73650550842285
119 117 30.721729278564453
120 118 30.706817626953125
121 119 30.69219398498535
122 120 30.678071975708008
123 121 30.664165496826172
124 122 30.649667739868164
125 123 30.633665084838867
126 124 30.615341186523438
127 125 30.593040466308594
128 126 30.56435775756836
129 127 30.52349090576172
130 128 30.44930076599121
131 129 30.4921875
132 130 30.823917388916016
133 131 31.503942489624023
134 132 32.437129974365234
135 133 33.56292724609375
136 134 35.21110916137695
137 135 32.644065856933594
138 136 32.01215744018555
139 137 31.906841278076172
140 138 31.555864334106445
141 139 31.15477752685547
142 140 31.122241973876953
143 141 31.224224090576172
144 142 31.729516983032227
145 143 31.429323196411133
146 144 31.475847244262695
147 145 31.560016632080078
148 146 31.65003776550293
149 147 31.727834701538086
150 148 31.78221893310547
151 149 31.806644439697266
152 150 31.798969268798828
153 151 31.760112762451172
154 152 31.693601608276367
155 153 31.604965209960938
156 154 31.50230598449707
157 155 31.394044876098633
158 156 31.287628173828125
159 157 31.18984603881836
160 158 31.10569953918457
161 159 31.037321090698242
162 160 30.98438262939453
163 161 30.944093704223633
164 162 30.911590576171875
165 163 30.880977630615234
166 164 30.846569061279297
167 165 30.806386947631836
168 166 30.7630672454834
169 167 30.721088409423828
170 168 30.68382453918457
171 169 30.652652740478516
172 170 30.627349853515625
173 171 30.606979370117188
174 172 30.590471267700195
175 173 30.577104568481445
176 174 30.5659236907959
177 175 30.55608558654785
178 176 30.54660987854004
179 177 30.536556243896484
180 178 30.52549934387207
181 179 30.513559341430664
182 180 30.50126075744629
183 181 30.489152908325195
184 182 30.47825813293457
185 183 30.469242095947266
186 184 30.462278366088867
187 185 30.45747184753418
188 186 30.4539794921875
189 187 30.450651168823242
190 188 30.446460723876953
191 189 30.440563201904297
192 190 30.433095932006836
193 191 30.424863815307617
194 192 30.41676902770996
195 193 30.40935707092285
196 194 30.4028263092041
197 195 30.39715003967285
198 196 30.39190673828125
199 197 30.386859893798828
200 198 30.381738662719727
201 199 30.376243591308594
202 200 30.37030601501465

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.005
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,892.8590087890625
2,698.3191528320312
3,570.3776245117188
4,476.5622863769531
5,380.6253967285156
6,305.2460632324219
7,252.75254821777344
8,190.08253479003906
9,148.2053680419922
10,137.71133422851562
11,87.31128692626953
12,69.86664581298828
13,68.2911605834961
14,55.73902130126953
15,42.16077423095703
16,45.81683349609375
17,30.689926147460938
18,33.03388977050781
19,37.50985336303711
20,39.31280517578125
21,40.91934585571289
22,37.541080474853516
23,25.80324363708496
24,25.349077224731445
25,26.116735458374023
26,23.89807891845703
27,24.020008087158203
28,23.74786376953125
29,21.63871192932129
30,21.43347930908203
31,20.521602630615234
32,19.806589126586914
33,18.716327667236328
34,18.2205810546875
35,17.85334587097168
36,17.59905242919922
37,17.431428909301758
38,17.315353393554688
39,17.12322998046875
40,16.512434005737305
41,16.426111221313477
42,16.37677764892578
43,16.352676391601562
44,16.342693328857422
45,16.33089256286621
46,16.28411102294922
47,16.118549346923828
48,15.43973445892334
49,15.269601821899414
50,14.699694633483887
51,14.50003433227539
52,14.53187084197998
53,12.874610900878906
54,12.370750427246094
55,12.284062385559082
56,12.245644569396973
57,12.21902084350586
58,12.199322700500488
59,12.180035591125488
60,12.014286994934082
61,11.787736892700195
62,11.723413467407227
63,11.649492263793945
64,11.540241241455078
65,11.39716911315918
66,11.24026870727539
67,11.086420059204102
68,10.940152168273926
69,10.798566818237305
70,10.65703010559082
71,10.500772476196289
72,10.298833847045898
73,10.177816390991211
74,10.07143497467041
75,9.983602523803711
76,9.909494400024414
77,9.843381881713867
78,9.781015396118164
79,9.719908714294434
80,9.658785820007324
81,9.59704303741455
82,9.53458309173584
83,9.471447944641113
84,9.407761573791504
85,9.343637466430664
86,9.279317855834961
87,9.214919090270996
88,9.150506019592285
89,9.086077690124512
90,9.021574020385742
91,8.95689868927002
92,8.891914367675781
93,8.8264741897583
94,8.760379791259766
95,8.69336223602295
96,8.625228881835938
97,8.555773735046387
98,8.484535217285156
99,8.411334991455078
100,8.336674690246582
101,8.263079643249512
102,8.202085494995117
103,8.188687324523926
104,8.081355094909668
105,8.020689964294434
106,7.97079610824585
107,7.921357154846191
108,7.946727275848389
109,7.912241458892822
110,7.872714996337891
111,7.8333516120910645
112,7.794707298278809
113,7.756875038146973
114,7.719827651977539
115,7.683511257171631
116,7.6478962898254395
117,7.612988471984863
118,7.578941822052002
119,7.546220302581787
120,7.515961170196533
121,7.491048812866211
122,7.466612815856934
123,7.433409214019775
124,7.408999919891357
125,7.386017799377441
126,7.363176345825195
127,7.340090751647949
128,7.316715240478516
129,7.293026924133301
130,7.269060134887695
131,7.244910717010498
132,7.2207932472229
133,7.197107791900635
134,7.174717903137207
135,7.155032634735107
136,7.135009288787842
137,7.110218524932861
138,7.088065147399902
139,7.066854953765869
140,7.045318126678467
141,7.022775650024414
142,6.998859882354736
143,6.97316312789917
144,6.945396423339844
145,6.916000843048096
146,6.893879413604736
147,6.8827223777771
148,6.887653350830078
149,6.8826165199279785
150,6.876326084136963
151,6.870234966278076
152,6.864363670349121
153,6.858572959899902
154,6.852811813354492
155,6.8471221923828125
156,6.841633319854736
157,6.83650016784668
158,6.831880569458008
159,6.827744483947754
160,6.823757171630859
161,6.819786071777344
162,6.815982341766357
163,6.812315464019775
164,6.808562278747559
165,6.804400444030762
166,6.797222137451172
167,6.701035499572754
168,6.696537017822266
169,6.694585800170898
170,6.69248104095459
171,6.689929008483887
172,6.686838150024414
173,6.6828742027282715
174,6.676235675811768
175,6.748085021972656
176,6.679407596588135
177,6.681601524353027
178,6.68332052230835
179,6.684406280517578
180,6.684821128845215
181,6.684565544128418
182,6.683947563171387
183,6.683539867401123
184,6.683383464813232
185,6.681382179260254
186,6.676774024963379
187,6.60246467590332
188,6.707942962646484
189,6.65845251083374
190,6.695793151855469
191,6.731545448303223
192,6.762855052947998
193,6.789050579071045
194,6.809808254241943
195,6.824751377105713
196,6.833522796630859
197,6.836491584777832
198,6.837913513183594
199,6.87576150894165
200,6.828190326690674
1 Epoch Loss
2 0 1117.280029296875
3 1 892.8590087890625
4 2 698.3191528320312
5 3 570.3776245117188
6 4 476.5622863769531
7 5 380.6253967285156
8 6 305.2460632324219
9 7 252.75254821777344
10 8 190.08253479003906
11 9 148.2053680419922
12 10 137.71133422851562
13 11 87.31128692626953
14 12 69.86664581298828
15 13 68.2911605834961
16 14 55.73902130126953
17 15 42.16077423095703
18 16 45.81683349609375
19 17 30.689926147460938
20 18 33.03388977050781
21 19 37.50985336303711
22 20 39.31280517578125
23 21 40.91934585571289
24 22 37.541080474853516
25 23 25.80324363708496
26 24 25.349077224731445
27 25 26.116735458374023
28 26 23.89807891845703
29 27 24.020008087158203
30 28 23.74786376953125
31 29 21.63871192932129
32 30 21.43347930908203
33 31 20.521602630615234
34 32 19.806589126586914
35 33 18.716327667236328
36 34 18.2205810546875
37 35 17.85334587097168
38 36 17.59905242919922
39 37 17.431428909301758
40 38 17.315353393554688
41 39 17.12322998046875
42 40 16.512434005737305
43 41 16.426111221313477
44 42 16.37677764892578
45 43 16.352676391601562
46 44 16.342693328857422
47 45 16.33089256286621
48 46 16.28411102294922
49 47 16.118549346923828
50 48 15.43973445892334
51 49 15.269601821899414
52 50 14.699694633483887
53 51 14.50003433227539
54 52 14.53187084197998
55 53 12.874610900878906
56 54 12.370750427246094
57 55 12.284062385559082
58 56 12.245644569396973
59 57 12.21902084350586
60 58 12.199322700500488
61 59 12.180035591125488
62 60 12.014286994934082
63 61 11.787736892700195
64 62 11.723413467407227
65 63 11.649492263793945
66 64 11.540241241455078
67 65 11.39716911315918
68 66 11.24026870727539
69 67 11.086420059204102
70 68 10.940152168273926
71 69 10.798566818237305
72 70 10.65703010559082
73 71 10.500772476196289
74 72 10.298833847045898
75 73 10.177816390991211
76 74 10.07143497467041
77 75 9.983602523803711
78 76 9.909494400024414
79 77 9.843381881713867
80 78 9.781015396118164
81 79 9.719908714294434
82 80 9.658785820007324
83 81 9.59704303741455
84 82 9.53458309173584
85 83 9.471447944641113
86 84 9.407761573791504
87 85 9.343637466430664
88 86 9.279317855834961
89 87 9.214919090270996
90 88 9.150506019592285
91 89 9.086077690124512
92 90 9.021574020385742
93 91 8.95689868927002
94 92 8.891914367675781
95 93 8.8264741897583
96 94 8.760379791259766
97 95 8.69336223602295
98 96 8.625228881835938
99 97 8.555773735046387
100 98 8.484535217285156
101 99 8.411334991455078
102 100 8.336674690246582
103 101 8.263079643249512
104 102 8.202085494995117
105 103 8.188687324523926
106 104 8.081355094909668
107 105 8.020689964294434
108 106 7.97079610824585
109 107 7.921357154846191
110 108 7.946727275848389
111 109 7.912241458892822
112 110 7.872714996337891
113 111 7.8333516120910645
114 112 7.794707298278809
115 113 7.756875038146973
116 114 7.719827651977539
117 115 7.683511257171631
118 116 7.6478962898254395
119 117 7.612988471984863
120 118 7.578941822052002
121 119 7.546220302581787
122 120 7.515961170196533
123 121 7.491048812866211
124 122 7.466612815856934
125 123 7.433409214019775
126 124 7.408999919891357
127 125 7.386017799377441
128 126 7.363176345825195
129 127 7.340090751647949
130 128 7.316715240478516
131 129 7.293026924133301
132 130 7.269060134887695
133 131 7.244910717010498
134 132 7.2207932472229
135 133 7.197107791900635
136 134 7.174717903137207
137 135 7.155032634735107
138 136 7.135009288787842
139 137 7.110218524932861
140 138 7.088065147399902
141 139 7.066854953765869
142 140 7.045318126678467
143 141 7.022775650024414
144 142 6.998859882354736
145 143 6.97316312789917
146 144 6.945396423339844
147 145 6.916000843048096
148 146 6.893879413604736
149 147 6.8827223777771
150 148 6.887653350830078
151 149 6.8826165199279785
152 150 6.876326084136963
153 151 6.870234966278076
154 152 6.864363670349121
155 153 6.858572959899902
156 154 6.852811813354492
157 155 6.8471221923828125
158 156 6.841633319854736
159 157 6.83650016784668
160 158 6.831880569458008
161 159 6.827744483947754
162 160 6.823757171630859
163 161 6.819786071777344
164 162 6.815982341766357
165 163 6.812315464019775
166 164 6.808562278747559
167 165 6.804400444030762
168 166 6.797222137451172
169 167 6.701035499572754
170 168 6.696537017822266
171 169 6.694585800170898
172 170 6.69248104095459
173 171 6.689929008483887
174 172 6.686838150024414
175 173 6.6828742027282715
176 174 6.676235675811768
177 175 6.748085021972656
178 176 6.679407596588135
179 177 6.681601524353027
180 178 6.68332052230835
181 179 6.684406280517578
182 180 6.684821128845215
183 181 6.684565544128418
184 182 6.683947563171387
185 183 6.683539867401123
186 184 6.683383464813232
187 185 6.681382179260254
188 186 6.676774024963379
189 187 6.60246467590332
190 188 6.707942962646484
191 189 6.65845251083374
192 190 6.695793151855469
193 191 6.731545448303223
194 192 6.762855052947998
195 193 6.789050579071045
196 194 6.809808254241943
197 195 6.824751377105713
198 196 6.833522796630859
199 197 6.836491584777832
200 198 6.837913513183594
201 199 6.87576150894165
202 200 6.828190326690674

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.01
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,709.073974609375
2,495.8191223144531
3,341.93377685546875
4,222.57504272460938
5,139.26637268066406
6,74.83617401123047
7,53.98528289794922
8,38.16682052612305
9,27.8438777923584
10,23.520118713378906
11,22.530841827392578
12,19.72782325744629
13,17.38500213623047
14,14.899020195007324
15,14.787221908569336
16,10.725893020629883
17,9.341848373413086
18,8.074580192565918
19,7.725108623504639
20,7.458004474639893
21,6.513791084289551
22,6.004675388336182
23,5.53383731842041
24,5.154810905456543
25,4.863739967346191
26,4.55768346786499
27,4.2035627365112305
28,4.016801357269287
29,3.918255567550659
30,3.719083547592163
31,3.2709083557128906
32,2.9124770164489746
33,2.701054096221924
34,2.493518352508545
35,2.4270212650299072
36,2.3753654956817627
37,2.3193585872650146
38,2.225131034851074
39,2.1728315353393555
40,2.090550184249878
41,2.053687572479248
42,2.02075457572937
43,1.9915975332260132
44,1.9656181335449219
45,1.9419896602630615
46,1.9166289567947388
47,1.8342567682266235
48,1.8140790462493896
49,1.7414928674697876
50,1.7183444499969482
51,1.6922093629837036
52,1.656137228012085
53,1.564334511756897
54,1.5517386198043823
55,1.5408828258514404
56,1.5310156345367432
57,1.5218757390975952
58,1.5132888555526733
59,1.5051143169403076
60,1.497243046760559
61,1.4895693063735962
62,1.4819893836975098
63,1.4744113683700562
64,1.4667556285858154
65,1.4589650630950928
66,1.4509804248809814
67,1.442771077156067
68,1.4343284368515015
69,1.425663948059082
70,1.4167985916137695
71,1.4077568054199219
72,1.39854097366333
73,1.3891140222549438
74,1.3793730735778809
75,1.3691059350967407
76,1.3579398393630981
77,1.3455252647399902
78,1.331896185874939
79,1.3189271688461304
80,1.3150438070297241
81,1.3551465272903442
82,1.3525034189224243
83,1.3481627702713013
84,1.3436731100082397
85,1.3391659259796143
86,1.3346431255340576
87,1.330066204071045
88,1.3254109621047974
89,1.3206435441970825
90,1.3157306909561157
91,1.3106341361999512
92,1.305282711982727
93,1.2995468378067017
94,1.2931654453277588
95,1.285457968711853
96,1.27390718460083
97,1.241784930229187
98,1.2112523317337036
99,1.197953224182129
100,1.1868702173233032
101,1.1793657541275024
102,1.174189805984497
103,1.1699280738830566
104,1.1660338640213013
105,1.1622047424316406
106,1.1581628322601318
107,1.1534538269042969
108,1.1470062732696533
109,1.13511323928833
110,1.1066371202468872
111,1.09613835811615
112,1.0915520191192627
113,1.0877546072006226
114,1.0836739540100098
115,1.0782710313796997
116,1.069420576095581
117,1.051637887954712
118,1.0333954095840454
119,1.0285404920578003
120,1.025154948234558
121,1.0219345092773438
122,1.0183597803115845
123,1.0139137506484985
124,1.0078046321868896
125,0.998563289642334
126,0.9844892621040344
127,0.9735323190689087
128,0.9691516160964966
129,0.9641470313072205
130,0.9285060167312622
131,0.9258484840393066
132,0.9253765344619751
133,0.9247449636459351
134,0.9239556193351746
135,0.9230836033821106
136,0.9221816658973694
137,0.9212821125984192
138,0.9204072952270508
139,0.9195725917816162
140,0.9187923669815063
141,0.9180744886398315
142,0.9174267649650574
143,0.9168514609336853
144,0.9163491725921631
145,0.9159142374992371
146,0.9155377149581909
147,0.9152078032493591
148,0.914908230304718
149,0.9146233201026917
150,0.9143359661102295
151,0.9140377044677734
152,0.9137194752693176
153,0.9133816361427307
154,0.9130290150642395
155,0.912667453289032
156,0.9123040437698364
157,0.9119446873664856
158,0.9115933775901794
159,0.9112532138824463
160,0.910923182964325
161,0.9106049537658691
162,0.9102975130081177
163,0.9099986553192139
164,0.9097082614898682
165,0.9094234704971313
166,0.9091455936431885
167,0.9088707566261292
168,0.9086005091667175
169,0.9083322882652283
170,0.9080663919448853
171,0.9078022837638855
172,0.9075387120246887
173,0.9072778820991516
174,0.907016396522522
175,0.9067560434341431
176,0.9064957499504089
177,0.9062370657920837
178,0.905977725982666
179,0.9057194590568542
180,0.9054611921310425
181,0.9052043557167053
182,0.9049480557441711
183,0.9046922326087952
184,0.9044370651245117
185,0.9041823744773865
186,0.9039294123649597
187,0.9036769270896912
188,0.9034252762794495
189,0.9031746983528137
190,0.902924656867981
191,0.9026755094528198
192,0.9024269580841064
193,0.9021785259246826
194,0.9019310474395752
195,0.9016833901405334
196,0.901436448097229
197,0.9011895656585693
198,0.9009423851966858
199,0.9006948471069336
200,0.9004477262496948
1 Epoch Loss
2 0 1117.280029296875
3 1 709.073974609375
4 2 495.8191223144531
5 3 341.93377685546875
6 4 222.57504272460938
7 5 139.26637268066406
8 6 74.83617401123047
9 7 53.98528289794922
10 8 38.16682052612305
11 9 27.8438777923584
12 10 23.520118713378906
13 11 22.530841827392578
14 12 19.72782325744629
15 13 17.38500213623047
16 14 14.899020195007324
17 15 14.787221908569336
18 16 10.725893020629883
19 17 9.341848373413086
20 18 8.074580192565918
21 19 7.725108623504639
22 20 7.458004474639893
23 21 6.513791084289551
24 22 6.004675388336182
25 23 5.53383731842041
26 24 5.154810905456543
27 25 4.863739967346191
28 26 4.55768346786499
29 27 4.2035627365112305
30 28 4.016801357269287
31 29 3.918255567550659
32 30 3.719083547592163
33 31 3.2709083557128906
34 32 2.9124770164489746
35 33 2.701054096221924
36 34 2.493518352508545
37 35 2.4270212650299072
38 36 2.3753654956817627
39 37 2.3193585872650146
40 38 2.225131034851074
41 39 2.1728315353393555
42 40 2.090550184249878
43 41 2.053687572479248
44 42 2.02075457572937
45 43 1.9915975332260132
46 44 1.9656181335449219
47 45 1.9419896602630615
48 46 1.9166289567947388
49 47 1.8342567682266235
50 48 1.8140790462493896
51 49 1.7414928674697876
52 50 1.7183444499969482
53 51 1.6922093629837036
54 52 1.656137228012085
55 53 1.564334511756897
56 54 1.5517386198043823
57 55 1.5408828258514404
58 56 1.5310156345367432
59 57 1.5218757390975952
60 58 1.5132888555526733
61 59 1.5051143169403076
62 60 1.497243046760559
63 61 1.4895693063735962
64 62 1.4819893836975098
65 63 1.4744113683700562
66 64 1.4667556285858154
67 65 1.4589650630950928
68 66 1.4509804248809814
69 67 1.442771077156067
70 68 1.4343284368515015
71 69 1.425663948059082
72 70 1.4167985916137695
73 71 1.4077568054199219
74 72 1.39854097366333
75 73 1.3891140222549438
76 74 1.3793730735778809
77 75 1.3691059350967407
78 76 1.3579398393630981
79 77 1.3455252647399902
80 78 1.331896185874939
81 79 1.3189271688461304
82 80 1.3150438070297241
83 81 1.3551465272903442
84 82 1.3525034189224243
85 83 1.3481627702713013
86 84 1.3436731100082397
87 85 1.3391659259796143
88 86 1.3346431255340576
89 87 1.330066204071045
90 88 1.3254109621047974
91 89 1.3206435441970825
92 90 1.3157306909561157
93 91 1.3106341361999512
94 92 1.305282711982727
95 93 1.2995468378067017
96 94 1.2931654453277588
97 95 1.285457968711853
98 96 1.27390718460083
99 97 1.241784930229187
100 98 1.2112523317337036
101 99 1.197953224182129
102 100 1.1868702173233032
103 101 1.1793657541275024
104 102 1.174189805984497
105 103 1.1699280738830566
106 104 1.1660338640213013
107 105 1.1622047424316406
108 106 1.1581628322601318
109 107 1.1534538269042969
110 108 1.1470062732696533
111 109 1.13511323928833
112 110 1.1066371202468872
113 111 1.09613835811615
114 112 1.0915520191192627
115 113 1.0877546072006226
116 114 1.0836739540100098
117 115 1.0782710313796997
118 116 1.069420576095581
119 117 1.051637887954712
120 118 1.0333954095840454
121 119 1.0285404920578003
122 120 1.025154948234558
123 121 1.0219345092773438
124 122 1.0183597803115845
125 123 1.0139137506484985
126 124 1.0078046321868896
127 125 0.998563289642334
128 126 0.9844892621040344
129 127 0.9735323190689087
130 128 0.9691516160964966
131 129 0.9641470313072205
132 130 0.9285060167312622
133 131 0.9258484840393066
134 132 0.9253765344619751
135 133 0.9247449636459351
136 134 0.9239556193351746
137 135 0.9230836033821106
138 136 0.9221816658973694
139 137 0.9212821125984192
140 138 0.9204072952270508
141 139 0.9195725917816162
142 140 0.9187923669815063
143 141 0.9180744886398315
144 142 0.9174267649650574
145 143 0.9168514609336853
146 144 0.9163491725921631
147 145 0.9159142374992371
148 146 0.9155377149581909
149 147 0.9152078032493591
150 148 0.914908230304718
151 149 0.9146233201026917
152 150 0.9143359661102295
153 151 0.9140377044677734
154 152 0.9137194752693176
155 153 0.9133816361427307
156 154 0.9130290150642395
157 155 0.912667453289032
158 156 0.9123040437698364
159 157 0.9119446873664856
160 158 0.9115933775901794
161 159 0.9112532138824463
162 160 0.910923182964325
163 161 0.9106049537658691
164 162 0.9102975130081177
165 163 0.9099986553192139
166 164 0.9097082614898682
167 165 0.9094234704971313
168 166 0.9091455936431885
169 167 0.9088707566261292
170 168 0.9086005091667175
171 169 0.9083322882652283
172 170 0.9080663919448853
173 171 0.9078022837638855
174 172 0.9075387120246887
175 173 0.9072778820991516
176 174 0.907016396522522
177 175 0.9067560434341431
178 176 0.9064957499504089
179 177 0.9062370657920837
180 178 0.905977725982666
181 179 0.9057194590568542
182 180 0.9054611921310425
183 181 0.9052043557167053
184 182 0.9049480557441711
185 183 0.9046922326087952
186 184 0.9044370651245117
187 185 0.9041823744773865
188 186 0.9039294123649597
189 187 0.9036769270896912
190 188 0.9034252762794495
191 189 0.9031746983528137
192 190 0.902924656867981
193 191 0.9026755094528198
194 192 0.9024269580841064
195 193 0.9021785259246826
196 194 0.9019310474395752
197 195 0.9016833901405334
198 196 0.901436448097229
199 197 0.9011895656585693
200 198 0.9009423851966858
201 199 0.9006948471069336
202 200 0.9004477262496948

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.02
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,493.4759521484375
2,231.8644256591797
3,74.98271942138672
4,34.731101989746094
5,40.91058349609375
6,23.155973434448242
7,17.50538444519043
8,13.624457359313965
9,9.656967163085938
10,8.718084335327148
11,5.820390701293945
12,4.073130130767822
13,2.8194162845611572
14,2.123771905899048
15,1.8268510103225708
16,1.6188355684280396
17,1.4526609182357788
18,1.2036542892456055
19,1.1610134840011597
20,1.1221102476119995
21,1.111304521560669
22,1.059097170829773
23,1.0939955711364746
24,1.0240399837493896
25,0.9261047840118408
26,0.7364070415496826
27,0.7431311011314392
28,0.7233320474624634
29,0.7144321203231812
30,0.7045190930366516
31,0.6955621242523193
32,0.685667097568512
33,0.6775506138801575
34,0.6531374454498291
35,0.62312251329422
36,0.6128858327865601
37,0.6042770743370056
38,0.5960917472839355
39,0.5880089402198792
40,0.5804252028465271
41,0.5773665904998779
42,0.5167471766471863
43,0.498338907957077
44,0.4847373068332672
45,0.47214746475219727
46,0.4642353653907776
47,0.448978066444397
48,0.444645494222641
49,0.440988153219223
50,0.4376847445964813
51,0.43460506200790405
52,0.4317190945148468
53,0.42901602387428284
54,0.42648711800575256
55,0.42412224411964417
56,0.4219107925891876
57,0.4198407232761383
58,0.4179006516933441
59,0.4160793721675873
60,0.4143664836883545
61,0.41275227069854736
62,0.4112277626991272
63,0.40978431701660156
64,0.40841400623321533
65,0.40710973739624023
66,0.4058656692504883
67,0.4046768844127655
68,0.40353813767433167
69,0.4024451673030853
70,0.4013938307762146
71,0.40038156509399414
72,0.3994040787220001
73,0.3984582722187042
74,0.3975414037704468
75,0.3966507017612457
76,0.39578351378440857
77,0.3949383497238159
78,0.39411264657974243
79,0.3933041989803314
80,0.39251118898391724
81,0.39173153042793274
82,0.3909638226032257
83,0.3902067244052887
84,0.38945937156677246
85,0.3887210190296173
86,0.3879912197589874
87,0.38727036118507385
88,0.38655948638916016
89,0.3858600854873657
90,0.38517510890960693
91,0.3845070004463196
92,0.3838585615158081
93,0.3832317590713501
94,0.38262757658958435
95,0.3820451498031616
96,0.38148272037506104
97,0.38093799352645874
98,0.3804081976413727
99,0.3798912763595581
100,0.37938645482063293
101,0.37833061814308167
102,0.37949138879776
103,0.3789972960948944
104,0.37853196263313293
105,0.3780079782009125
106,0.3799726665019989
107,0.3797537386417389
108,0.37934213876724243
109,0.3789252042770386
110,0.3785046935081482
111,0.3780812621116638
112,0.3776549994945526
113,0.3772265315055847
114,0.3767963945865631
115,0.376365065574646
116,0.37593284249305725
117,0.37550023198127747
118,0.37506720423698425
119,0.3746345341205597
120,0.37420201301574707
121,0.37377023696899414
122,0.3733391761779785
123,0.3729095757007599
124,0.3724810779094696
125,0.37205418944358826
126,0.3716287314891815
127,0.3712047338485718
128,0.37078243494033813
129,0.37036195397377014
130,0.3699432611465454
131,0.36952653527259827
132,0.36911168694496155
133,0.3686988055706024
134,0.36828792095184326
135,0.367879182100296
136,0.36747244000434875
137,0.3670676648616791
138,0.3666650950908661
139,0.3662647008895874
140,0.3658662438392639
141,0.36546987295150757
142,0.3650754392147064
143,0.36468300223350525
144,0.3642923831939697
145,0.36390364170074463
146,0.36351659893989563
147,0.36313116550445557
148,0.3627473711967468
149,0.36236515641212463
150,0.3619844317436218
151,0.3616049289703369
152,0.36122655868530273
153,0.360848993062973
154,0.3604722023010254
155,0.3600959777832031
156,0.35971975326538086
157,0.35934358835220337
158,0.3589669167995453
159,0.3585893213748932
160,0.3582102358341217
161,0.3578289747238159
162,0.3574448227882385
163,0.35705697536468506
164,0.3566639721393585
165,0.3562643229961395
166,0.35585635900497437
167,0.35543739795684814
168,0.35500410199165344
169,0.35455214977264404
170,0.35407522320747375
171,0.35356444120407104
172,0.35300639271736145
173,0.35237836837768555
174,0.3516445457935333
175,0.3507348895072937
176,0.3495071828365326
177,0.34760916233062744
178,0.3438992500305176
179,0.33222779631614685
180,0.3015848398208618
181,0.3012305200099945
182,0.30088508129119873
183,0.30004850029945374
184,0.2983735203742981
185,0.29803985357284546
186,0.29748770594596863
187,0.2976519465446472
188,0.2999178171157837
189,0.3013850450515747
190,0.3018149435520172
191,0.3014383912086487
192,0.30032309889793396
193,0.2984141409397125
194,0.2959742844104767
195,0.29493051767349243
196,0.29480159282684326
197,0.2946833074092865
198,0.29449525475502014
199,0.29426562786102295
200,0.29401499032974243
1 Epoch Loss
2 0 1117.280029296875
3 1 493.4759521484375
4 2 231.8644256591797
5 3 74.98271942138672
6 4 34.731101989746094
7 5 40.91058349609375
8 6 23.155973434448242
9 7 17.50538444519043
10 8 13.624457359313965
11 9 9.656967163085938
12 10 8.718084335327148
13 11 5.820390701293945
14 12 4.073130130767822
15 13 2.8194162845611572
16 14 2.123771905899048
17 15 1.8268510103225708
18 16 1.6188355684280396
19 17 1.4526609182357788
20 18 1.2036542892456055
21 19 1.1610134840011597
22 20 1.1221102476119995
23 21 1.111304521560669
24 22 1.059097170829773
25 23 1.0939955711364746
26 24 1.0240399837493896
27 25 0.9261047840118408
28 26 0.7364070415496826
29 27 0.7431311011314392
30 28 0.7233320474624634
31 29 0.7144321203231812
32 30 0.7045190930366516
33 31 0.6955621242523193
34 32 0.685667097568512
35 33 0.6775506138801575
36 34 0.6531374454498291
37 35 0.62312251329422
38 36 0.6128858327865601
39 37 0.6042770743370056
40 38 0.5960917472839355
41 39 0.5880089402198792
42 40 0.5804252028465271
43 41 0.5773665904998779
44 42 0.5167471766471863
45 43 0.498338907957077
46 44 0.4847373068332672
47 45 0.47214746475219727
48 46 0.4642353653907776
49 47 0.448978066444397
50 48 0.444645494222641
51 49 0.440988153219223
52 50 0.4376847445964813
53 51 0.43460506200790405
54 52 0.4317190945148468
55 53 0.42901602387428284
56 54 0.42648711800575256
57 55 0.42412224411964417
58 56 0.4219107925891876
59 57 0.4198407232761383
60 58 0.4179006516933441
61 59 0.4160793721675873
62 60 0.4143664836883545
63 61 0.41275227069854736
64 62 0.4112277626991272
65 63 0.40978431701660156
66 64 0.40841400623321533
67 65 0.40710973739624023
68 66 0.4058656692504883
69 67 0.4046768844127655
70 68 0.40353813767433167
71 69 0.4024451673030853
72 70 0.4013938307762146
73 71 0.40038156509399414
74 72 0.3994040787220001
75 73 0.3984582722187042
76 74 0.3975414037704468
77 75 0.3966507017612457
78 76 0.39578351378440857
79 77 0.3949383497238159
80 78 0.39411264657974243
81 79 0.3933041989803314
82 80 0.39251118898391724
83 81 0.39173153042793274
84 82 0.3909638226032257
85 83 0.3902067244052887
86 84 0.38945937156677246
87 85 0.3887210190296173
88 86 0.3879912197589874
89 87 0.38727036118507385
90 88 0.38655948638916016
91 89 0.3858600854873657
92 90 0.38517510890960693
93 91 0.3845070004463196
94 92 0.3838585615158081
95 93 0.3832317590713501
96 94 0.38262757658958435
97 95 0.3820451498031616
98 96 0.38148272037506104
99 97 0.38093799352645874
100 98 0.3804081976413727
101 99 0.3798912763595581
102 100 0.37938645482063293
103 101 0.37833061814308167
104 102 0.37949138879776
105 103 0.3789972960948944
106 104 0.37853196263313293
107 105 0.3780079782009125
108 106 0.3799726665019989
109 107 0.3797537386417389
110 108 0.37934213876724243
111 109 0.3789252042770386
112 110 0.3785046935081482
113 111 0.3780812621116638
114 112 0.3776549994945526
115 113 0.3772265315055847
116 114 0.3767963945865631
117 115 0.376365065574646
118 116 0.37593284249305725
119 117 0.37550023198127747
120 118 0.37506720423698425
121 119 0.3746345341205597
122 120 0.37420201301574707
123 121 0.37377023696899414
124 122 0.3733391761779785
125 123 0.3729095757007599
126 124 0.3724810779094696
127 125 0.37205418944358826
128 126 0.3716287314891815
129 127 0.3712047338485718
130 128 0.37078243494033813
131 129 0.37036195397377014
132 130 0.3699432611465454
133 131 0.36952653527259827
134 132 0.36911168694496155
135 133 0.3686988055706024
136 134 0.36828792095184326
137 135 0.367879182100296
138 136 0.36747244000434875
139 137 0.3670676648616791
140 138 0.3666650950908661
141 139 0.3662647008895874
142 140 0.3658662438392639
143 141 0.36546987295150757
144 142 0.3650754392147064
145 143 0.36468300223350525
146 144 0.3642923831939697
147 145 0.36390364170074463
148 146 0.36351659893989563
149 147 0.36313116550445557
150 148 0.3627473711967468
151 149 0.36236515641212463
152 150 0.3619844317436218
153 151 0.3616049289703369
154 152 0.36122655868530273
155 153 0.360848993062973
156 154 0.3604722023010254
157 155 0.3600959777832031
158 156 0.35971975326538086
159 157 0.35934358835220337
160 158 0.3589669167995453
161 159 0.3585893213748932
162 160 0.3582102358341217
163 161 0.3578289747238159
164 162 0.3574448227882385
165 163 0.35705697536468506
166 164 0.3566639721393585
167 165 0.3562643229961395
168 166 0.35585635900497437
169 167 0.35543739795684814
170 168 0.35500410199165344
171 169 0.35455214977264404
172 170 0.35407522320747375
173 171 0.35356444120407104
174 172 0.35300639271736145
175 173 0.35237836837768555
176 174 0.3516445457935333
177 175 0.3507348895072937
178 176 0.3495071828365326
179 177 0.34760916233062744
180 178 0.3438992500305176
181 179 0.33222779631614685
182 180 0.3015848398208618
183 181 0.3012305200099945
184 182 0.30088508129119873
185 183 0.30004850029945374
186 184 0.2983735203742981
187 185 0.29803985357284546
188 186 0.29748770594596863
189 187 0.2976519465446472
190 188 0.2999178171157837
191 189 0.3013850450515747
192 190 0.3018149435520172
193 191 0.3014383912086487
194 192 0.30032309889793396
195 193 0.2984141409397125
196 194 0.2959742844104767
197 195 0.29493051767349243
198 196 0.29480159282684326
199 197 0.2946833074092865
200 198 0.29449525475502014
201 199 0.29426562786102295
202 200 0.29401499032974243

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.04
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,253.33795166015625
2,45.80424499511719
3,25.825145721435547
4,16.4755916595459
5,9.410402297973633
6,3.9728331565856934
7,1.800004482269287
8,1.3182741403579712
9,1.1249911785125732
10,1.0045852661132812
11,0.9323318600654602
12,0.8709807991981506
13,0.6613484025001526
14,0.590243935585022
15,0.5436501502990723
16,0.4970184564590454
17,0.47161486744880676
18,0.4561002254486084
19,0.4436229467391968
20,0.43597620725631714
21,0.42784276604652405
22,0.41964825987815857
23,0.41253232955932617
24,0.40620720386505127
25,0.40055468678474426
26,0.3952847421169281
27,0.390365868806839
28,0.385780930519104
29,0.3814968466758728
30,0.3774702250957489
31,0.37366336584091187
32,0.37004172801971436
33,0.36657091975212097
34,0.3632228672504425
35,0.3599718511104584
36,0.35679373145103455
37,0.3536655306816101
38,0.35055750608444214
39,0.34745293855667114
40,0.3443319499492645
41,0.341170072555542
42,0.33794111013412476
43,0.3346177339553833
44,0.3311908543109894
45,0.3276398181915283
46,0.32393792271614075
47,0.32005396485328674
48,0.31597650051116943
49,0.311715692281723
50,0.30731064081192017
51,0.30287235975265503
52,0.2982768416404724
53,0.2933236062526703
54,0.28700676560401917
55,0.2744968831539154
56,0.2504507899284363
57,0.24037237465381622
58,0.23697751760482788
59,0.23493534326553345
60,0.23335000872612
61,0.23200269043445587
62,0.23080512881278992
63,0.229712575674057
64,0.228705033659935
65,0.22776801884174347
66,0.2268800437450409
67,0.22603340446949005
68,0.22522996366024017
69,0.22446167469024658
70,0.22372108697891235
71,0.22301088273525238
72,0.22233040630817413
73,0.22167536616325378
74,0.22104525566101074
75,0.2204381823539734
76,0.21985098719596863
77,0.21927841007709503
78,0.21871766448020935
79,0.21816550195217133
80,0.21761885285377502
81,0.217072993516922
82,0.21652279794216156
83,0.21596549451351166
84,0.21540065109729767
85,0.21483899652957916
86,0.2143053114414215
87,0.21380974352359772
88,0.21335522830486298
89,0.2129397690296173
90,0.2125503420829773
91,0.2121739238500595
92,0.21180400252342224
93,0.21143756806850433
94,0.21107426285743713
95,0.21071504056453705
96,0.21035990118980408
97,0.21000896394252777
98,0.20966216921806335
99,0.20931969583034515
100,0.20898093283176422
101,0.2086455076932907
102,0.2083137035369873
103,0.2079847753047943
104,0.20765823125839233
105,0.20733380317687988
106,0.20701144635677338
107,0.2066904902458191
108,0.2063707858324051
109,0.20605218410491943
110,0.20573419332504272
111,0.2054162621498108
112,0.20509827136993408
113,0.2047804445028305
114,0.2044622302055359
115,0.20414365828037262
116,0.20382466912269592
117,0.20350508391857147
118,0.20318488776683807
119,0.20286431908607483
120,0.2025442272424698
121,0.2022247016429901
122,0.2019062489271164
123,0.20158982276916504
124,0.20127613842487335
125,0.20096565783023834
126,0.20065905153751373
127,0.20035697519779205
128,0.2000596970319748
129,0.19976764917373657
130,0.19948124885559082
131,0.19920064508914948
132,0.19892551004886627
133,0.19865578413009644
134,0.19839173555374146
135,0.19813266396522522
136,0.1978784203529358
137,0.19762872159481049
138,0.19738328456878662
139,0.1971416175365448
140,0.19690367579460144
141,0.19666928052902222
142,0.19643785059452057
143,0.19620905816555023
144,0.1959826946258545
145,0.1957588940858841
146,0.1955370157957077
147,0.19531691074371338
148,0.1950983703136444
149,0.1948813945055008
150,0.19466543197631836
151,0.1944502592086792
152,0.19423557817935944
153,0.19402137398719788
154,0.19380685687065125
155,0.19359144568443298
156,0.19337455928325653
157,0.19315576553344727
158,0.19293376803398132
159,0.19270721077919006
160,0.19247455894947052
161,0.19223438203334808
162,0.19198423624038696
163,0.19172175228595734
164,0.19144487380981445
165,0.19115254282951355
166,0.19084583222866058
167,0.19053016602993011
168,0.19021639227867126
169,0.18991774320602417
170,0.18964335322380066
171,0.18939486145973206
172,0.1891680359840393
173,0.18895632028579712
174,0.18875430524349213
175,0.1885584592819214
176,0.1883668154478073
177,0.18817731738090515
178,0.18798872828483582
179,0.1877996027469635
180,0.18760891258716583
181,0.1874145269393921
182,0.18721400201320648
183,0.18700382113456726
184,0.18677860498428345
185,0.18652983009815216
186,0.18624606728553772
187,0.18591801822185516
188,0.18555717170238495
189,0.18522000312805176
190,0.1849629431962967
191,0.1847710907459259
192,0.18460716307163239
193,0.1844528317451477
194,0.1843007504940033
195,0.18414883315563202
196,0.18399596214294434
197,0.1838420033454895
198,0.18368744850158691
199,0.18353231251239777
200,0.1833767145872116
1 Epoch Loss
2 0 1117.280029296875
3 1 253.33795166015625
4 2 45.80424499511719
5 3 25.825145721435547
6 4 16.4755916595459
7 5 9.410402297973633
8 6 3.9728331565856934
9 7 1.800004482269287
10 8 1.3182741403579712
11 9 1.1249911785125732
12 10 1.0045852661132812
13 11 0.9323318600654602
14 12 0.8709807991981506
15 13 0.6613484025001526
16 14 0.590243935585022
17 15 0.5436501502990723
18 16 0.4970184564590454
19 17 0.47161486744880676
20 18 0.4561002254486084
21 19 0.4436229467391968
22 20 0.43597620725631714
23 21 0.42784276604652405
24 22 0.41964825987815857
25 23 0.41253232955932617
26 24 0.40620720386505127
27 25 0.40055468678474426
28 26 0.3952847421169281
29 27 0.390365868806839
30 28 0.385780930519104
31 29 0.3814968466758728
32 30 0.3774702250957489
33 31 0.37366336584091187
34 32 0.37004172801971436
35 33 0.36657091975212097
36 34 0.3632228672504425
37 35 0.3599718511104584
38 36 0.35679373145103455
39 37 0.3536655306816101
40 38 0.35055750608444214
41 39 0.34745293855667114
42 40 0.3443319499492645
43 41 0.341170072555542
44 42 0.33794111013412476
45 43 0.3346177339553833
46 44 0.3311908543109894
47 45 0.3276398181915283
48 46 0.32393792271614075
49 47 0.32005396485328674
50 48 0.31597650051116943
51 49 0.311715692281723
52 50 0.30731064081192017
53 51 0.30287235975265503
54 52 0.2982768416404724
55 53 0.2933236062526703
56 54 0.28700676560401917
57 55 0.2744968831539154
58 56 0.2504507899284363
59 57 0.24037237465381622
60 58 0.23697751760482788
61 59 0.23493534326553345
62 60 0.23335000872612
63 61 0.23200269043445587
64 62 0.23080512881278992
65 63 0.229712575674057
66 64 0.228705033659935
67 65 0.22776801884174347
68 66 0.2268800437450409
69 67 0.22603340446949005
70 68 0.22522996366024017
71 69 0.22446167469024658
72 70 0.22372108697891235
73 71 0.22301088273525238
74 72 0.22233040630817413
75 73 0.22167536616325378
76 74 0.22104525566101074
77 75 0.2204381823539734
78 76 0.21985098719596863
79 77 0.21927841007709503
80 78 0.21871766448020935
81 79 0.21816550195217133
82 80 0.21761885285377502
83 81 0.217072993516922
84 82 0.21652279794216156
85 83 0.21596549451351166
86 84 0.21540065109729767
87 85 0.21483899652957916
88 86 0.2143053114414215
89 87 0.21380974352359772
90 88 0.21335522830486298
91 89 0.2129397690296173
92 90 0.2125503420829773
93 91 0.2121739238500595
94 92 0.21180400252342224
95 93 0.21143756806850433
96 94 0.21107426285743713
97 95 0.21071504056453705
98 96 0.21035990118980408
99 97 0.21000896394252777
100 98 0.20966216921806335
101 99 0.20931969583034515
102 100 0.20898093283176422
103 101 0.2086455076932907
104 102 0.2083137035369873
105 103 0.2079847753047943
106 104 0.20765823125839233
107 105 0.20733380317687988
108 106 0.20701144635677338
109 107 0.2066904902458191
110 108 0.2063707858324051
111 109 0.20605218410491943
112 110 0.20573419332504272
113 111 0.2054162621498108
114 112 0.20509827136993408
115 113 0.2047804445028305
116 114 0.2044622302055359
117 115 0.20414365828037262
118 116 0.20382466912269592
119 117 0.20350508391857147
120 118 0.20318488776683807
121 119 0.20286431908607483
122 120 0.2025442272424698
123 121 0.2022247016429901
124 122 0.2019062489271164
125 123 0.20158982276916504
126 124 0.20127613842487335
127 125 0.20096565783023834
128 126 0.20065905153751373
129 127 0.20035697519779205
130 128 0.2000596970319748
131 129 0.19976764917373657
132 130 0.19948124885559082
133 131 0.19920064508914948
134 132 0.19892551004886627
135 133 0.19865578413009644
136 134 0.19839173555374146
137 135 0.19813266396522522
138 136 0.1978784203529358
139 137 0.19762872159481049
140 138 0.19738328456878662
141 139 0.1971416175365448
142 140 0.19690367579460144
143 141 0.19666928052902222
144 142 0.19643785059452057
145 143 0.19620905816555023
146 144 0.1959826946258545
147 145 0.1957588940858841
148 146 0.1955370157957077
149 147 0.19531691074371338
150 148 0.1950983703136444
151 149 0.1948813945055008
152 150 0.19466543197631836
153 151 0.1944502592086792
154 152 0.19423557817935944
155 153 0.19402137398719788
156 154 0.19380685687065125
157 155 0.19359144568443298
158 156 0.19337455928325653
159 157 0.19315576553344727
160 158 0.19293376803398132
161 159 0.19270721077919006
162 160 0.19247455894947052
163 161 0.19223438203334808
164 162 0.19198423624038696
165 163 0.19172175228595734
166 164 0.19144487380981445
167 165 0.19115254282951355
168 166 0.19084583222866058
169 167 0.19053016602993011
170 168 0.19021639227867126
171 169 0.18991774320602417
172 170 0.18964335322380066
173 171 0.18939486145973206
174 172 0.1891680359840393
175 173 0.18895632028579712
176 174 0.18875430524349213
177 175 0.1885584592819214
178 176 0.1883668154478073
179 177 0.18817731738090515
180 178 0.18798872828483582
181 179 0.1877996027469635
182 180 0.18760891258716583
183 181 0.1874145269393921
184 182 0.18721400201320648
185 183 0.18700382113456726
186 184 0.18677860498428345
187 185 0.18652983009815216
188 186 0.18624606728553772
189 187 0.18591801822185516
190 188 0.18555717170238495
191 189 0.18522000312805176
192 190 0.1849629431962967
193 191 0.1847710907459259
194 192 0.18460716307163239
195 193 0.1844528317451477
196 194 0.1843007504940033
197 195 0.18414883315563202
198 196 0.18399596214294434
199 197 0.1838420033454895
200 198 0.18368744850158691
201 199 0.18353231251239777
202 200 0.1833767145872116

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.05
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,168.8037567138672
2,37.446983337402344
3,15.201050758361816
4,6.206844329833984
5,1.900891900062561
6,1.2928937673568726
7,0.53959721326828
8,0.39234036207199097
9,0.3335712254047394
10,0.303561270236969
11,0.29242849349975586
12,0.2858237624168396
13,0.2743768095970154
14,0.2610970139503479
15,0.23385579884052277
16,0.2145366668701172
17,0.2070813626050949
18,0.20200279355049133
19,0.19795219600200653
20,0.19437052309513092
21,0.18969160318374634
22,0.18701449036598206
23,0.18504182994365692
24,0.183305025100708
25,0.18177363276481628
26,0.18041014671325684
27,0.17918525636196136
28,0.1780770868062973
29,0.17706739902496338
30,0.17614296078681946
31,0.17529171705245972
32,0.17450518906116486
33,0.17377537488937378
34,0.17309515178203583
35,0.17245927453041077
36,0.17186272144317627
37,0.17130157351493835
38,0.17077195644378662
39,0.17027027904987335
40,0.16979408264160156
41,0.16934029757976532
42,0.1689072549343109
43,0.16849283874034882
44,0.16809505224227905
45,0.1677125096321106
46,0.16734285652637482
47,0.1669856160879135
48,0.16663943231105804
49,0.16630303859710693
50,0.1659761220216751
51,0.16565768420696259
52,0.16534684598445892
53,0.1650434285402298
54,0.16474653780460358
55,0.16445572674274445
56,0.16417063772678375
57,0.1638907790184021
58,0.16361567378044128
59,0.16334481537342072
60,0.1630783975124359
61,0.16281579434871674
62,0.1625569611787796
63,0.1623014211654663
64,0.16204912960529327
65,0.16179987788200378
66,0.16155338287353516
67,0.16130955517292023
68,0.1610681116580963
69,0.16082876920700073
70,0.16059139370918274
71,0.16035576164722443
72,0.16012166440486908
73,0.15988866984844208
74,0.15965639054775238
75,0.15942464768886566
76,0.1591930389404297
77,0.15896111726760864
78,0.15872836112976074
79,0.15849418938159943
80,0.15825818479061127
81,0.15801993012428284
82,0.1577788144350052
83,0.15753473341464996
84,0.1572876274585724
85,0.15703721344470978
86,0.15678274631500244
87,0.15652316808700562
88,0.15625640749931335
89,0.15597723424434662
90,0.15567433834075928
91,0.1553262621164322
92,0.154906764626503
93,0.1544761061668396
94,0.15419036149978638
95,0.15401294827461243
96,0.15387068688869476
97,0.1537317931652069
98,0.15358847379684448
99,0.15344171226024628
100,0.15329362452030182
101,0.153145432472229
102,0.15299825370311737
103,0.15285244584083557
104,0.1527082920074463
105,0.15256595611572266
106,0.1524258702993393
107,0.15228796005249023
108,0.15215222537517548
109,0.15201859176158905
110,0.1518874317407608
111,0.15175843238830566
112,0.15163178741931915
113,0.15150722861289978
114,0.15138481557369232
115,0.1512645035982132
116,0.15114623308181763
117,0.1510297805070877
118,0.1509149819612503
119,0.15080200135707855
120,0.15069064497947693
121,0.15058085322380066
122,0.1504722386598587
123,0.15036490559577942
124,0.15025866031646729
125,0.1501535177230835
126,0.15004925429821014
127,0.14994587004184723
128,0.14984329044818878
129,0.1497412621974945
130,0.14963987469673157
131,0.14953893423080444
132,0.14943856000900269
133,0.149338498711586
134,0.14923866093158722
135,0.14913934469223022
136,0.14904026687145233
137,0.14894142746925354
138,0.14884281158447266
139,0.14874443411827087
140,0.1486465334892273
141,0.14854909479618073
142,0.14845190942287445
143,0.1483551263809204
144,0.14825879037380219
145,0.14816325902938843
146,0.14806832373142242
147,0.14797411859035492
148,0.1478807032108307
149,0.14778819680213928
150,0.14769680798053741
151,0.1476065218448639
152,0.14751732349395752
153,0.1474292278289795
154,0.14734241366386414
155,0.14725685119628906
156,0.14717264473438263
157,0.14708977937698364
158,0.14700815081596375
159,0.14692777395248413
160,0.14684851467609406
161,0.14677031338214874
162,0.14669303596019745
163,0.14661668241024017
164,0.14654117822647095
165,0.14646634459495544
166,0.14639215171337128
167,0.14631864428520203
168,0.14624586701393127
169,0.14617347717285156
170,0.14610148966312408
171,0.14602980017662048
172,0.14595860242843628
173,0.14588791131973267
174,0.14581742882728577
175,0.14574715495109558
176,0.14567704498767853
177,0.14560718834400177
178,0.14553765952587128
179,0.14546822011470795
180,0.14539891481399536
181,0.14532974362373352
182,0.14526067674160004
183,0.1451919674873352
184,0.14512339234352112
185,0.1450549215078354
186,0.14498651027679443
187,0.14491812884807587
188,0.144849956035614
189,0.14478208124637604
190,0.14471425116062164
191,0.14464643597602844
192,0.14457865059375763
193,0.1445109248161316
194,0.14444345235824585
195,0.1443759948015213
196,0.14430856704711914
197,0.14424116909503937
198,0.14417381584644318
199,0.1441066712141037
200,0.14403969049453735
1 Epoch Loss
2 0 1117.280029296875
3 1 168.8037567138672
4 2 37.446983337402344
5 3 15.201050758361816
6 4 6.206844329833984
7 5 1.900891900062561
8 6 1.2928937673568726
9 7 0.53959721326828
10 8 0.39234036207199097
11 9 0.3335712254047394
12 10 0.303561270236969
13 11 0.29242849349975586
14 12 0.2858237624168396
15 13 0.2743768095970154
16 14 0.2610970139503479
17 15 0.23385579884052277
18 16 0.2145366668701172
19 17 0.2070813626050949
20 18 0.20200279355049133
21 19 0.19795219600200653
22 20 0.19437052309513092
23 21 0.18969160318374634
24 22 0.18701449036598206
25 23 0.18504182994365692
26 24 0.183305025100708
27 25 0.18177363276481628
28 26 0.18041014671325684
29 27 0.17918525636196136
30 28 0.1780770868062973
31 29 0.17706739902496338
32 30 0.17614296078681946
33 31 0.17529171705245972
34 32 0.17450518906116486
35 33 0.17377537488937378
36 34 0.17309515178203583
37 35 0.17245927453041077
38 36 0.17186272144317627
39 37 0.17130157351493835
40 38 0.17077195644378662
41 39 0.17027027904987335
42 40 0.16979408264160156
43 41 0.16934029757976532
44 42 0.1689072549343109
45 43 0.16849283874034882
46 44 0.16809505224227905
47 45 0.1677125096321106
48 46 0.16734285652637482
49 47 0.1669856160879135
50 48 0.16663943231105804
51 49 0.16630303859710693
52 50 0.1659761220216751
53 51 0.16565768420696259
54 52 0.16534684598445892
55 53 0.1650434285402298
56 54 0.16474653780460358
57 55 0.16445572674274445
58 56 0.16417063772678375
59 57 0.1638907790184021
60 58 0.16361567378044128
61 59 0.16334481537342072
62 60 0.1630783975124359
63 61 0.16281579434871674
64 62 0.1625569611787796
65 63 0.1623014211654663
66 64 0.16204912960529327
67 65 0.16179987788200378
68 66 0.16155338287353516
69 67 0.16130955517292023
70 68 0.1610681116580963
71 69 0.16082876920700073
72 70 0.16059139370918274
73 71 0.16035576164722443
74 72 0.16012166440486908
75 73 0.15988866984844208
76 74 0.15965639054775238
77 75 0.15942464768886566
78 76 0.1591930389404297
79 77 0.15896111726760864
80 78 0.15872836112976074
81 79 0.15849418938159943
82 80 0.15825818479061127
83 81 0.15801993012428284
84 82 0.1577788144350052
85 83 0.15753473341464996
86 84 0.1572876274585724
87 85 0.15703721344470978
88 86 0.15678274631500244
89 87 0.15652316808700562
90 88 0.15625640749931335
91 89 0.15597723424434662
92 90 0.15567433834075928
93 91 0.1553262621164322
94 92 0.154906764626503
95 93 0.1544761061668396
96 94 0.15419036149978638
97 95 0.15401294827461243
98 96 0.15387068688869476
99 97 0.1537317931652069
100 98 0.15358847379684448
101 99 0.15344171226024628
102 100 0.15329362452030182
103 101 0.153145432472229
104 102 0.15299825370311737
105 103 0.15285244584083557
106 104 0.1527082920074463
107 105 0.15256595611572266
108 106 0.1524258702993393
109 107 0.15228796005249023
110 108 0.15215222537517548
111 109 0.15201859176158905
112 110 0.1518874317407608
113 111 0.15175843238830566
114 112 0.15163178741931915
115 113 0.15150722861289978
116 114 0.15138481557369232
117 115 0.1512645035982132
118 116 0.15114623308181763
119 117 0.1510297805070877
120 118 0.1509149819612503
121 119 0.15080200135707855
122 120 0.15069064497947693
123 121 0.15058085322380066
124 122 0.1504722386598587
125 123 0.15036490559577942
126 124 0.15025866031646729
127 125 0.1501535177230835
128 126 0.15004925429821014
129 127 0.14994587004184723
130 128 0.14984329044818878
131 129 0.1497412621974945
132 130 0.14963987469673157
133 131 0.14953893423080444
134 132 0.14943856000900269
135 133 0.149338498711586
136 134 0.14923866093158722
137 135 0.14913934469223022
138 136 0.14904026687145233
139 137 0.14894142746925354
140 138 0.14884281158447266
141 139 0.14874443411827087
142 140 0.1486465334892273
143 141 0.14854909479618073
144 142 0.14845190942287445
145 143 0.1483551263809204
146 144 0.14825879037380219
147 145 0.14816325902938843
148 146 0.14806832373142242
149 147 0.14797411859035492
150 148 0.1478807032108307
151 149 0.14778819680213928
152 150 0.14769680798053741
153 151 0.1476065218448639
154 152 0.14751732349395752
155 153 0.1474292278289795
156 154 0.14734241366386414
157 155 0.14725685119628906
158 156 0.14717264473438263
159 157 0.14708977937698364
160 158 0.14700815081596375
161 159 0.14692777395248413
162 160 0.14684851467609406
163 161 0.14677031338214874
164 162 0.14669303596019745
165 163 0.14661668241024017
166 164 0.14654117822647095
167 165 0.14646634459495544
168 166 0.14639215171337128
169 167 0.14631864428520203
170 168 0.14624586701393127
171 169 0.14617347717285156
172 170 0.14610148966312408
173 171 0.14602980017662048
174 172 0.14595860242843628
175 173 0.14588791131973267
176 174 0.14581742882728577
177 175 0.14574715495109558
178 176 0.14567704498767853
179 177 0.14560718834400177
180 178 0.14553765952587128
181 179 0.14546822011470795
182 180 0.14539891481399536
183 181 0.14532974362373352
184 182 0.14526067674160004
185 183 0.1451919674873352
186 184 0.14512339234352112
187 185 0.1450549215078354
188 186 0.14498651027679443
189 187 0.14491812884807587
190 188 0.144849956035614
191 189 0.14478208124637604
192 190 0.14471425116062164
193 191 0.14464643597602844
194 192 0.14457865059375763
195 193 0.1445109248161316
196 194 0.14444345235824585
197 195 0.1443759948015213
198 196 0.14430856704711914
199 197 0.14424116909503937
200 198 0.14417381584644318
201 199 0.1441066712141037
202 200 0.14403969049453735

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.08
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,55.68273162841797
2,9.872925758361816
3,1.963661551475525
4,0.9787135124206543
5,0.7916083335876465
6,0.4721930921077728
7,0.42302462458610535
8,0.3919275104999542
9,0.36464861035346985
10,0.3468245267868042
11,0.33433470129966736
12,0.32403257489204407
13,0.3147295415401459
14,0.3060360848903656
15,0.29778382182121277
16,0.28982260823249817
17,0.2820647656917572
18,0.27448129653930664
19,0.26704537868499756
20,0.2596017122268677
21,0.2515541613101959
22,0.24218204617500305
23,0.2301894724369049
24,0.2158924639225006
25,0.20453421771526337
26,0.19752803444862366
27,0.1927802562713623
28,0.1891675740480423
29,0.1861967146396637
30,0.18366797268390656
31,0.18144959211349487
32,0.1794964224100113
33,0.17777636647224426
34,0.17623917758464813
35,0.17485374212265015
36,0.1735938936471939
37,0.17243877053260803
38,0.17137150466442108
39,0.1703803688287735
40,0.16945573687553406
41,0.1685897260904312
42,0.16777782142162323
43,0.1670132428407669
44,0.16629073023796082
45,0.16560514271259308
46,0.1649521440267563
47,0.16432896256446838
48,0.1637326180934906
49,0.1631588190793991
50,0.16260544955730438
51,0.16207125782966614
52,0.16155461966991425
53,0.16105377674102783
54,0.1605674922466278
55,0.1600952297449112
56,0.15963557362556458
57,0.15918754041194916
58,0.1587504744529724
59,0.1583232581615448
60,0.15790531039237976
61,0.15749642252922058
62,0.15709561109542847
63,0.1567022055387497
64,0.15631519258022308
65,0.15593452751636505
66,0.15556052327156067
67,0.15519264340400696
68,0.15483036637306213
69,0.15447358787059784
70,0.15412193536758423
71,0.15377554297447205
72,0.1534338891506195
73,0.15309658646583557
74,0.15276409685611725
75,0.1524360477924347
76,0.1521124392747879
77,0.15179307758808136
78,0.15147767961025238
79,0.1511661559343338
80,0.15085995197296143
81,0.1505594551563263
82,0.1502632051706314
83,0.1499709188938141
84,0.14968234300613403
85,0.1493975669145584
86,0.14911627769470215
87,0.14883846044540405
88,0.14856404066085815
89,0.14829295873641968
90,0.1480255275964737
91,0.14776113629341125
92,0.1474996954202652
93,0.14724165201187134
94,0.14698658883571625
95,0.1467345654964447
96,0.146484836935997
97,0.1462372988462448
98,0.14599229395389557
99,0.14574968814849854
100,0.14550957083702087
101,0.14527229964733124
102,0.1450374275445938
103,0.1448051929473877
104,0.14457577466964722
105,0.14434877038002014
106,0.1441236287355423
107,0.14390061795711517
108,0.14367987215518951
109,0.14346177875995636
110,0.14324592053890228
111,0.14303214848041534
112,0.14282050728797913
113,0.1426113247871399
114,0.14240434765815735
115,0.1421993225812912
116,0.1419966220855713
117,0.1417957991361618
118,0.14159731566905975
119,0.14140087366104126
120,0.14120642840862274
121,0.14101392030715942
122,0.1408233791589737
123,0.14063473045825958
124,0.14044782519340515
125,0.1402626931667328
126,0.14007918536663055
127,0.1398979276418686
128,0.1397186666727066
129,0.13954110443592072
130,0.13936544954776764
131,0.1391913741827011
132,0.13901928067207336
133,0.13884882628917694
134,0.1386798620223999
135,0.13851258158683777
136,0.1383471041917801
137,0.13818322122097015
138,0.13802076876163483
139,0.13785958290100098
140,0.13770020008087158
141,0.13754227757453918
142,0.13738587498664856
143,0.13723087310791016
144,0.137077197432518
145,0.13692516088485718
146,0.1367741972208023
147,0.13662445545196533
148,0.13647589087486267
149,0.13632865250110626
150,0.13618281483650208
151,0.13603803515434265
152,0.1358942985534668
153,0.13575167953968048
154,0.13561028242111206
155,0.1354701817035675
156,0.13533125817775726
157,0.13519375026226044
158,0.1350572407245636
159,0.13492171466350555
160,0.13478708267211914
161,0.13465379178524017
162,0.13452289998531342
163,0.13439339399337769
164,0.13426481187343597
165,0.1341373473405838
166,0.13401108980178833
167,0.13388602435588837
168,0.13376210629940033
169,0.1336398869752884
170,0.13351888954639435
171,0.1333991289138794
172,0.13328048586845398
173,0.13316282629966736
174,0.13304634392261505
175,0.13293097913265228
176,0.13281670212745667
177,0.13270321488380432
178,0.13259081542491913
179,0.13247935473918915
180,0.13236908614635468
181,0.13225960731506348
182,0.13215100765228271
183,0.13204315304756165
184,0.13193675875663757
185,0.1318313479423523
186,0.13172699511051178
187,0.13162346184253693
188,0.13152112066745758
189,0.13141986727714539
190,0.1313198059797287
191,0.1312207281589508
192,0.1311229020357132
193,0.13102643191814423
194,0.13093139231204987
195,0.1308375895023346
196,0.1307452768087387
197,0.13065454363822937
198,0.13056552410125732
199,0.13047799468040466
200,0.13039186596870422
1 Epoch Loss
2 0 1117.280029296875
3 1 55.68273162841797
4 2 9.872925758361816
5 3 1.963661551475525
6 4 0.9787135124206543
7 5 0.7916083335876465
8 6 0.4721930921077728
9 7 0.42302462458610535
10 8 0.3919275104999542
11 9 0.36464861035346985
12 10 0.3468245267868042
13 11 0.33433470129966736
14 12 0.32403257489204407
15 13 0.3147295415401459
16 14 0.3060360848903656
17 15 0.29778382182121277
18 16 0.28982260823249817
19 17 0.2820647656917572
20 18 0.27448129653930664
21 19 0.26704537868499756
22 20 0.2596017122268677
23 21 0.2515541613101959
24 22 0.24218204617500305
25 23 0.2301894724369049
26 24 0.2158924639225006
27 25 0.20453421771526337
28 26 0.19752803444862366
29 27 0.1927802562713623
30 28 0.1891675740480423
31 29 0.1861967146396637
32 30 0.18366797268390656
33 31 0.18144959211349487
34 32 0.1794964224100113
35 33 0.17777636647224426
36 34 0.17623917758464813
37 35 0.17485374212265015
38 36 0.1735938936471939
39 37 0.17243877053260803
40 38 0.17137150466442108
41 39 0.1703803688287735
42 40 0.16945573687553406
43 41 0.1685897260904312
44 42 0.16777782142162323
45 43 0.1670132428407669
46 44 0.16629073023796082
47 45 0.16560514271259308
48 46 0.1649521440267563
49 47 0.16432896256446838
50 48 0.1637326180934906
51 49 0.1631588190793991
52 50 0.16260544955730438
53 51 0.16207125782966614
54 52 0.16155461966991425
55 53 0.16105377674102783
56 54 0.1605674922466278
57 55 0.1600952297449112
58 56 0.15963557362556458
59 57 0.15918754041194916
60 58 0.1587504744529724
61 59 0.1583232581615448
62 60 0.15790531039237976
63 61 0.15749642252922058
64 62 0.15709561109542847
65 63 0.1567022055387497
66 64 0.15631519258022308
67 65 0.15593452751636505
68 66 0.15556052327156067
69 67 0.15519264340400696
70 68 0.15483036637306213
71 69 0.15447358787059784
72 70 0.15412193536758423
73 71 0.15377554297447205
74 72 0.1534338891506195
75 73 0.15309658646583557
76 74 0.15276409685611725
77 75 0.1524360477924347
78 76 0.1521124392747879
79 77 0.15179307758808136
80 78 0.15147767961025238
81 79 0.1511661559343338
82 80 0.15085995197296143
83 81 0.1505594551563263
84 82 0.1502632051706314
85 83 0.1499709188938141
86 84 0.14968234300613403
87 85 0.1493975669145584
88 86 0.14911627769470215
89 87 0.14883846044540405
90 88 0.14856404066085815
91 89 0.14829295873641968
92 90 0.1480255275964737
93 91 0.14776113629341125
94 92 0.1474996954202652
95 93 0.14724165201187134
96 94 0.14698658883571625
97 95 0.1467345654964447
98 96 0.146484836935997
99 97 0.1462372988462448
100 98 0.14599229395389557
101 99 0.14574968814849854
102 100 0.14550957083702087
103 101 0.14527229964733124
104 102 0.1450374275445938
105 103 0.1448051929473877
106 104 0.14457577466964722
107 105 0.14434877038002014
108 106 0.1441236287355423
109 107 0.14390061795711517
110 108 0.14367987215518951
111 109 0.14346177875995636
112 110 0.14324592053890228
113 111 0.14303214848041534
114 112 0.14282050728797913
115 113 0.1426113247871399
116 114 0.14240434765815735
117 115 0.1421993225812912
118 116 0.1419966220855713
119 117 0.1417957991361618
120 118 0.14159731566905975
121 119 0.14140087366104126
122 120 0.14120642840862274
123 121 0.14101392030715942
124 122 0.1408233791589737
125 123 0.14063473045825958
126 124 0.14044782519340515
127 125 0.1402626931667328
128 126 0.14007918536663055
129 127 0.1398979276418686
130 128 0.1397186666727066
131 129 0.13954110443592072
132 130 0.13936544954776764
133 131 0.1391913741827011
134 132 0.13901928067207336
135 133 0.13884882628917694
136 134 0.1386798620223999
137 135 0.13851258158683777
138 136 0.1383471041917801
139 137 0.13818322122097015
140 138 0.13802076876163483
141 139 0.13785958290100098
142 140 0.13770020008087158
143 141 0.13754227757453918
144 142 0.13738587498664856
145 143 0.13723087310791016
146 144 0.137077197432518
147 145 0.13692516088485718
148 146 0.1367741972208023
149 147 0.13662445545196533
150 148 0.13647589087486267
151 149 0.13632865250110626
152 150 0.13618281483650208
153 151 0.13603803515434265
154 152 0.1358942985534668
155 153 0.13575167953968048
156 154 0.13561028242111206
157 155 0.1354701817035675
158 156 0.13533125817775726
159 157 0.13519375026226044
160 158 0.1350572407245636
161 159 0.13492171466350555
162 160 0.13478708267211914
163 161 0.13465379178524017
164 162 0.13452289998531342
165 163 0.13439339399337769
166 164 0.13426481187343597
167 165 0.1341373473405838
168 166 0.13401108980178833
169 167 0.13388602435588837
170 168 0.13376210629940033
171 169 0.1336398869752884
172 170 0.13351888954639435
173 171 0.1333991289138794
174 172 0.13328048586845398
175 173 0.13316282629966736
176 174 0.13304634392261505
177 175 0.13293097913265228
178 176 0.13281670212745667
179 177 0.13270321488380432
180 178 0.13259081542491913
181 179 0.13247935473918915
182 180 0.13236908614635468
183 181 0.13225960731506348
184 182 0.13215100765228271
185 183 0.13204315304756165
186 184 0.13193675875663757
187 185 0.1318313479423523
188 186 0.13172699511051178
189 187 0.13162346184253693
190 188 0.13152112066745758
191 189 0.13141986727714539
192 190 0.1313198059797287
193 191 0.1312207281589508
194 192 0.1311229020357132
195 193 0.13102643191814423
196 194 0.13093139231204987
197 195 0.1308375895023346
198 196 0.1307452768087387
199 197 0.13065454363822937
200 198 0.13056552410125732
201 199 0.13047799468040466
202 200 0.13039186596870422

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.1
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,46.96703338623047
2,215.8382568359375
3,4.094510078430176
4,1.8183082342147827
5,1.04926598072052
6,0.6929357051849365
7,0.6246272921562195
8,0.5809897184371948
9,0.5476904511451721
10,0.5209845304489136
11,0.49953627586364746
12,0.48101580142974854
13,0.4639415442943573
14,0.4483540952205658
15,0.43413591384887695
16,0.421217679977417
17,0.40988436341285706
18,0.4000408947467804
19,0.3915589153766632
20,0.38419127464294434
21,0.3777240216732025
22,0.37203750014305115
23,0.3669728636741638
24,0.3623976707458496
25,0.35819607973098755
26,0.3543192148208618
27,0.3507266640663147
28,0.3474043905735016
29,0.34432777762413025
30,0.34145602583885193
31,0.3387616276741028
32,0.3362254202365875
33,0.3338276147842407
34,0.3315518796443939
35,0.3293822407722473
36,0.3273060917854309
37,0.3253139555454254
38,0.32339656352996826
39,0.3215467631816864
40,0.3197569251060486
41,0.31802159547805786
42,0.316334992647171
43,0.3146916925907135
44,0.31308823823928833
45,0.31151989102363586
46,0.30998361110687256
47,0.3084765374660492
48,0.3069947063922882
49,0.30553609132766724
50,0.30409857630729675
51,0.3026788532733917
52,0.30127477645874023
53,0.29988518357276917
54,0.29850825667381287
55,0.2971433401107788
56,0.29578885436058044
57,0.29444408416748047
58,0.2931082546710968
59,0.2917797267436981
60,0.2904578149318695
61,0.28914275765419006
62,0.28783300518989563
63,0.286528080701828
64,0.2852277457714081
65,0.2839314043521881
66,0.282638818025589
67,0.28134945034980774
68,0.2800632119178772
69,0.2787805497646332
70,0.27750059962272644
71,0.2762226164340973
72,0.274946928024292
73,0.2736726999282837
74,0.27239856123924255
75,0.27112266421318054
76,0.26984456181526184
77,0.2685655653476715
78,0.26728516817092896
79,0.2660021185874939
80,0.2647096812725067
81,0.26340290904045105
82,0.2620942294597626
83,0.26078423857688904
84,0.259472519159317
85,0.25816041231155396
86,0.2568482458591461
87,0.2555355727672577
88,0.25422364473342896
89,0.25291234254837036
90,0.2516021728515625
91,0.2502938210964203
92,0.24898703396320343
93,0.24768275022506714
94,0.2463807314634323
95,0.24508239328861237
96,0.24378810822963715
97,0.24249695241451263
98,0.2412099391222
99,0.23992682993412018
100,0.23864810168743134
101,0.23737378418445587
102,0.23610378801822662
103,0.23483656346797943
104,0.23357287049293518
105,0.23231376707553864
106,0.23105895519256592
107,0.2298084795475006
108,0.2285614311695099
109,0.22731627523899078
110,0.22607190907001495
111,0.2248329222202301
112,0.2236006110906601
113,0.22237347066402435
114,0.22115246951580048
115,0.21993736922740936
116,0.21872633695602417
117,0.2175176441669464
118,0.21631740033626556
119,0.2151271104812622
120,0.2139468491077423
121,0.2127772569656372
122,0.21161945164203644
123,0.2104736566543579
124,0.2093401700258255
125,0.20821693539619446
126,0.2071075737476349
127,0.20601247251033783
128,0.20493170619010925
129,0.20386061072349548
130,0.20280055701732635
131,0.20175863802433014
132,0.20073530077934265
133,0.19973139464855194
134,0.19874787330627441
135,0.19778387248516083
136,0.1968403309583664
137,0.1959167867898941
138,0.19501352310180664
139,0.19413042068481445
140,0.19326692819595337
141,0.1924235224723816
142,0.19159796833992004
143,0.19079041481018066
144,0.19000135362148285
145,0.18922974169254303
146,0.1884726583957672
147,0.18772822618484497
148,0.1869969218969345
149,0.18627792596817017
150,0.18557040393352509
151,0.18487697839736938
152,0.18419566750526428
153,0.18352806568145752
154,0.1828753799200058
155,0.18223674595355988
156,0.18161168694496155
157,0.18100093305110931
158,0.18040356040000916
159,0.17981959879398346
160,0.1792488396167755
161,0.1786903738975525
162,0.17814412713050842
163,0.17760956287384033
164,0.17708641290664673
165,0.1765754669904709
166,0.17607606947422028
167,0.17558734118938446
168,0.1751091331243515
169,0.17464062571525574
170,0.17418168485164642
171,0.17373202741146088
172,0.17329108715057373
173,0.17285823822021484
174,0.17243355512619019
175,0.17201721668243408
176,0.1716080754995346
177,0.17120610177516937
178,0.1708107888698578
179,0.17042167484760284
180,0.17003877460956573
181,0.1696620136499405
182,0.16929090023040771
183,0.16892534494400024
184,0.16856539249420166
185,0.16821065545082092
186,0.16786113381385803
187,0.1675170511007309
188,0.16717761754989624
189,0.1668427586555481
190,0.16651247441768646
191,0.1661866009235382
192,0.16586506366729736
193,0.16554787755012512
194,0.16523492336273193
195,0.1649257242679596
196,0.16462019085884094
197,0.16431882977485657
198,0.16402101516723633
199,0.1637265682220459
200,0.16343551874160767
1 Epoch Loss
2 0 1117.280029296875
3 1 46.96703338623047
4 2 215.8382568359375
5 3 4.094510078430176
6 4 1.8183082342147827
7 5 1.04926598072052
8 6 0.6929357051849365
9 7 0.6246272921562195
10 8 0.5809897184371948
11 9 0.5476904511451721
12 10 0.5209845304489136
13 11 0.49953627586364746
14 12 0.48101580142974854
15 13 0.4639415442943573
16 14 0.4483540952205658
17 15 0.43413591384887695
18 16 0.421217679977417
19 17 0.40988436341285706
20 18 0.4000408947467804
21 19 0.3915589153766632
22 20 0.38419127464294434
23 21 0.3777240216732025
24 22 0.37203750014305115
25 23 0.3669728636741638
26 24 0.3623976707458496
27 25 0.35819607973098755
28 26 0.3543192148208618
29 27 0.3507266640663147
30 28 0.3474043905735016
31 29 0.34432777762413025
32 30 0.34145602583885193
33 31 0.3387616276741028
34 32 0.3362254202365875
35 33 0.3338276147842407
36 34 0.3315518796443939
37 35 0.3293822407722473
38 36 0.3273060917854309
39 37 0.3253139555454254
40 38 0.32339656352996826
41 39 0.3215467631816864
42 40 0.3197569251060486
43 41 0.31802159547805786
44 42 0.316334992647171
45 43 0.3146916925907135
46 44 0.31308823823928833
47 45 0.31151989102363586
48 46 0.30998361110687256
49 47 0.3084765374660492
50 48 0.3069947063922882
51 49 0.30553609132766724
52 50 0.30409857630729675
53 51 0.3026788532733917
54 52 0.30127477645874023
55 53 0.29988518357276917
56 54 0.29850825667381287
57 55 0.2971433401107788
58 56 0.29578885436058044
59 57 0.29444408416748047
60 58 0.2931082546710968
61 59 0.2917797267436981
62 60 0.2904578149318695
63 61 0.28914275765419006
64 62 0.28783300518989563
65 63 0.286528080701828
66 64 0.2852277457714081
67 65 0.2839314043521881
68 66 0.282638818025589
69 67 0.28134945034980774
70 68 0.2800632119178772
71 69 0.2787805497646332
72 70 0.27750059962272644
73 71 0.2762226164340973
74 72 0.274946928024292
75 73 0.2736726999282837
76 74 0.27239856123924255
77 75 0.27112266421318054
78 76 0.26984456181526184
79 77 0.2685655653476715
80 78 0.26728516817092896
81 79 0.2660021185874939
82 80 0.2647096812725067
83 81 0.26340290904045105
84 82 0.2620942294597626
85 83 0.26078423857688904
86 84 0.259472519159317
87 85 0.25816041231155396
88 86 0.2568482458591461
89 87 0.2555355727672577
90 88 0.25422364473342896
91 89 0.25291234254837036
92 90 0.2516021728515625
93 91 0.2502938210964203
94 92 0.24898703396320343
95 93 0.24768275022506714
96 94 0.2463807314634323
97 95 0.24508239328861237
98 96 0.24378810822963715
99 97 0.24249695241451263
100 98 0.2412099391222
101 99 0.23992682993412018
102 100 0.23864810168743134
103 101 0.23737378418445587
104 102 0.23610378801822662
105 103 0.23483656346797943
106 104 0.23357287049293518
107 105 0.23231376707553864
108 106 0.23105895519256592
109 107 0.2298084795475006
110 108 0.2285614311695099
111 109 0.22731627523899078
112 110 0.22607190907001495
113 111 0.2248329222202301
114 112 0.2236006110906601
115 113 0.22237347066402435
116 114 0.22115246951580048
117 115 0.21993736922740936
118 116 0.21872633695602417
119 117 0.2175176441669464
120 118 0.21631740033626556
121 119 0.2151271104812622
122 120 0.2139468491077423
123 121 0.2127772569656372
124 122 0.21161945164203644
125 123 0.2104736566543579
126 124 0.2093401700258255
127 125 0.20821693539619446
128 126 0.2071075737476349
129 127 0.20601247251033783
130 128 0.20493170619010925
131 129 0.20386061072349548
132 130 0.20280055701732635
133 131 0.20175863802433014
134 132 0.20073530077934265
135 133 0.19973139464855194
136 134 0.19874787330627441
137 135 0.19778387248516083
138 136 0.1968403309583664
139 137 0.1959167867898941
140 138 0.19501352310180664
141 139 0.19413042068481445
142 140 0.19326692819595337
143 141 0.1924235224723816
144 142 0.19159796833992004
145 143 0.19079041481018066
146 144 0.19000135362148285
147 145 0.18922974169254303
148 146 0.1884726583957672
149 147 0.18772822618484497
150 148 0.1869969218969345
151 149 0.18627792596817017
152 150 0.18557040393352509
153 151 0.18487697839736938
154 152 0.18419566750526428
155 153 0.18352806568145752
156 154 0.1828753799200058
157 155 0.18223674595355988
158 156 0.18161168694496155
159 157 0.18100093305110931
160 158 0.18040356040000916
161 159 0.17981959879398346
162 160 0.1792488396167755
163 161 0.1786903738975525
164 162 0.17814412713050842
165 163 0.17760956287384033
166 164 0.17708641290664673
167 165 0.1765754669904709
168 166 0.17607606947422028
169 167 0.17558734118938446
170 168 0.1751091331243515
171 169 0.17464062571525574
172 170 0.17418168485164642
173 171 0.17373202741146088
174 172 0.17329108715057373
175 173 0.17285823822021484
176 174 0.17243355512619019
177 175 0.17201721668243408
178 176 0.1716080754995346
179 177 0.17120610177516937
180 178 0.1708107888698578
181 179 0.17042167484760284
182 180 0.17003877460956573
183 181 0.1696620136499405
184 182 0.16929090023040771
185 183 0.16892534494400024
186 184 0.16856539249420166
187 185 0.16821065545082092
188 186 0.16786113381385803
189 187 0.1675170511007309
190 188 0.16717761754989624
191 189 0.1668427586555481
192 190 0.16651247441768646
193 191 0.1661866009235382
194 192 0.16586506366729736
195 193 0.16554787755012512
196 194 0.16523492336273193
197 195 0.1649257242679596
198 196 0.16462019085884094
199 197 0.16431882977485657
200 198 0.16402101516723633
201 199 0.1637265682220459
202 200 0.16343551874160767

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.125
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,25.416610717773438
2,12.460535049438477
3,3.301673412322998
4,2.035637140274048
5,1.9072792530059814
6,1.8045721054077148
7,1.2616894245147705
8,1.383488416671753
9,1.3897730112075806
10,1.327128291130066
11,0.5599731802940369
12,0.4427085518836975
13,0.35550200939178467
14,0.32063814997673035
15,0.2932041883468628
16,0.27575379610061646
17,0.2632509469985962
18,0.2566462755203247
19,0.2506917417049408
20,0.2465008944272995
21,0.2448531985282898
22,0.25037911534309387
23,0.257413387298584
24,0.24231451749801636
25,0.23797905445098877
26,0.23620785772800446
27,0.23359674215316772
28,0.22942906618118286
29,0.22509388625621796
30,0.22137677669525146
31,0.21825669705867767
32,0.2156987488269806
33,0.21356263756752014
34,0.21173253655433655
35,0.2101239562034607
36,0.20868539810180664
37,0.20737895369529724
38,0.206176295876503
39,0.20505648851394653
40,0.20400212705135345
41,0.203004390001297
42,0.20205388963222504
43,0.20114275813102722
44,0.20026379823684692
45,0.1994129717350006
46,0.19858486950397491
47,0.19777712225914001
48,0.19698774814605713
49,0.19621434807777405
50,0.19545570015907288
51,0.19470971822738647
52,0.1939750462770462
53,0.19325202703475952
54,0.192539244890213
55,0.1918366551399231
56,0.19114284217357635
57,0.1904570311307907
58,0.18978029489517212
59,0.18911223113536835
60,0.18845421075820923
61,0.1878041923046112
62,0.18716353178024292
63,0.18653032183647156
64,0.1859055757522583
65,0.1852886825799942
66,0.1846802532672882
67,0.18407940864562988
68,0.1834857016801834
69,0.18289890885353088
70,0.18231847882270813
71,0.18174611032009125
72,0.18118013441562653
73,0.18062226474285126
74,0.18007032573223114
75,0.17952516674995422
76,0.17898808419704437
77,0.17845849692821503
78,0.17793715000152588
79,0.17742258310317993
80,0.17691533267498016
81,0.17641441524028778
82,0.17591972649097443
83,0.1754307895898819
84,0.1749468892812729
85,0.1744685024023056
86,0.17399463057518005
87,0.17352597415447235
88,0.17306190729141235
89,0.1726016104221344
90,0.17214526236057281
91,0.17169179022312164
92,0.17124241590499878
93,0.1707962155342102
94,0.17035266757011414
95,0.1699114441871643
96,0.16947190463542938
97,0.16903504729270935
98,0.1685996651649475
99,0.1681666523218155
100,0.16773536801338196
101,0.16730543971061707
102,0.1668776422739029
103,0.16645094752311707
104,0.16602490842342377
105,0.16559816896915436
106,0.1651712954044342
107,0.16474437713623047
108,0.16431687772274017
109,0.16388902068138123
110,0.1634603589773178
111,0.16303059458732605
112,0.16259914636611938
113,0.16216568648815155
114,0.16172975301742554
115,0.16129212081432343
116,0.16085131466388702
117,0.16040660440921783
118,0.15995921194553375
119,0.15950998663902283
120,0.15906113386154175
121,0.15861767530441284
122,0.1581854522228241
123,0.1577717661857605
124,0.15738065540790558
125,0.15701137483119965
126,0.15666204690933228
127,0.1563282310962677
128,0.1560068428516388
129,0.15569107234477997
130,0.15537510812282562
131,0.1550557017326355
132,0.1547318994998932
133,0.15440315008163452
134,0.15407121181488037
135,0.15373623371124268
136,0.15340030193328857
137,0.15306580066680908
138,0.15273365378379822
139,0.15240582823753357
140,0.15208397805690765
141,0.1517682820558548
142,0.15145832300186157
143,0.15115343034267426
144,0.1508520245552063
145,0.15055416524410248
146,0.15025962889194489
147,0.14996697008609772
148,0.1496761590242386
149,0.14938677847385406
150,0.14909863471984863
151,0.1488126963376999
152,0.1485283076763153
153,0.14824539422988892
154,0.14796419441699982
155,0.14768461883068085
156,0.1474074423313141
157,0.14713236689567566
158,0.146859809756279
159,0.14658954739570618
160,0.1463223248720169
161,0.14605723321437836
162,0.14579488337039948
163,0.1455349326133728
164,0.14527782797813416
165,0.1450224667787552
166,0.14476902782917023
167,0.14451739192008972
168,0.14426788687705994
169,0.14401759207248688
170,0.14376720786094666
171,0.14351677894592285
172,0.14326635003089905
173,0.14301647245883942
174,0.1427665799856186
175,0.14251752197742462
176,0.14226900041103363
177,0.14202111959457397
178,0.14177405834197998
179,0.14152806997299194
180,0.14128316938877106
181,0.14103861153125763
182,0.14079482853412628
183,0.14055180549621582
184,0.1403103470802307
185,0.1400698870420456
186,0.13983117043972015
187,0.13959400355815887
188,0.1393582820892334
189,0.1391250044107437
190,0.13889408111572266
191,0.13866513967514038
192,0.13843762874603271
193,0.1382121741771698
194,0.1379885971546173
195,0.13776741921901703
196,0.13754916191101074
197,0.13733389973640442
198,0.13712115585803986
199,0.13691113889217377
200,0.13670364022254944
1 Epoch Loss
2 0 1117.280029296875
3 1 25.416610717773438
4 2 12.460535049438477
5 3 3.301673412322998
6 4 2.035637140274048
7 5 1.9072792530059814
8 6 1.8045721054077148
9 7 1.2616894245147705
10 8 1.383488416671753
11 9 1.3897730112075806
12 10 1.327128291130066
13 11 0.5599731802940369
14 12 0.4427085518836975
15 13 0.35550200939178467
16 14 0.32063814997673035
17 15 0.2932041883468628
18 16 0.27575379610061646
19 17 0.2632509469985962
20 18 0.2566462755203247
21 19 0.2506917417049408
22 20 0.2465008944272995
23 21 0.2448531985282898
24 22 0.25037911534309387
25 23 0.257413387298584
26 24 0.24231451749801636
27 25 0.23797905445098877
28 26 0.23620785772800446
29 27 0.23359674215316772
30 28 0.22942906618118286
31 29 0.22509388625621796
32 30 0.22137677669525146
33 31 0.21825669705867767
34 32 0.2156987488269806
35 33 0.21356263756752014
36 34 0.21173253655433655
37 35 0.2101239562034607
38 36 0.20868539810180664
39 37 0.20737895369529724
40 38 0.206176295876503
41 39 0.20505648851394653
42 40 0.20400212705135345
43 41 0.203004390001297
44 42 0.20205388963222504
45 43 0.20114275813102722
46 44 0.20026379823684692
47 45 0.1994129717350006
48 46 0.19858486950397491
49 47 0.19777712225914001
50 48 0.19698774814605713
51 49 0.19621434807777405
52 50 0.19545570015907288
53 51 0.19470971822738647
54 52 0.1939750462770462
55 53 0.19325202703475952
56 54 0.192539244890213
57 55 0.1918366551399231
58 56 0.19114284217357635
59 57 0.1904570311307907
60 58 0.18978029489517212
61 59 0.18911223113536835
62 60 0.18845421075820923
63 61 0.1878041923046112
64 62 0.18716353178024292
65 63 0.18653032183647156
66 64 0.1859055757522583
67 65 0.1852886825799942
68 66 0.1846802532672882
69 67 0.18407940864562988
70 68 0.1834857016801834
71 69 0.18289890885353088
72 70 0.18231847882270813
73 71 0.18174611032009125
74 72 0.18118013441562653
75 73 0.18062226474285126
76 74 0.18007032573223114
77 75 0.17952516674995422
78 76 0.17898808419704437
79 77 0.17845849692821503
80 78 0.17793715000152588
81 79 0.17742258310317993
82 80 0.17691533267498016
83 81 0.17641441524028778
84 82 0.17591972649097443
85 83 0.1754307895898819
86 84 0.1749468892812729
87 85 0.1744685024023056
88 86 0.17399463057518005
89 87 0.17352597415447235
90 88 0.17306190729141235
91 89 0.1726016104221344
92 90 0.17214526236057281
93 91 0.17169179022312164
94 92 0.17124241590499878
95 93 0.1707962155342102
96 94 0.17035266757011414
97 95 0.1699114441871643
98 96 0.16947190463542938
99 97 0.16903504729270935
100 98 0.1685996651649475
101 99 0.1681666523218155
102 100 0.16773536801338196
103 101 0.16730543971061707
104 102 0.1668776422739029
105 103 0.16645094752311707
106 104 0.16602490842342377
107 105 0.16559816896915436
108 106 0.1651712954044342
109 107 0.16474437713623047
110 108 0.16431687772274017
111 109 0.16388902068138123
112 110 0.1634603589773178
113 111 0.16303059458732605
114 112 0.16259914636611938
115 113 0.16216568648815155
116 114 0.16172975301742554
117 115 0.16129212081432343
118 116 0.16085131466388702
119 117 0.16040660440921783
120 118 0.15995921194553375
121 119 0.15950998663902283
122 120 0.15906113386154175
123 121 0.15861767530441284
124 122 0.1581854522228241
125 123 0.1577717661857605
126 124 0.15738065540790558
127 125 0.15701137483119965
128 126 0.15666204690933228
129 127 0.1563282310962677
130 128 0.1560068428516388
131 129 0.15569107234477997
132 130 0.15537510812282562
133 131 0.1550557017326355
134 132 0.1547318994998932
135 133 0.15440315008163452
136 134 0.15407121181488037
137 135 0.15373623371124268
138 136 0.15340030193328857
139 137 0.15306580066680908
140 138 0.15273365378379822
141 139 0.15240582823753357
142 140 0.15208397805690765
143 141 0.1517682820558548
144 142 0.15145832300186157
145 143 0.15115343034267426
146 144 0.1508520245552063
147 145 0.15055416524410248
148 146 0.15025962889194489
149 147 0.14996697008609772
150 148 0.1496761590242386
151 149 0.14938677847385406
152 150 0.14909863471984863
153 151 0.1488126963376999
154 152 0.1485283076763153
155 153 0.14824539422988892
156 154 0.14796419441699982
157 155 0.14768461883068085
158 156 0.1474074423313141
159 157 0.14713236689567566
160 158 0.146859809756279
161 159 0.14658954739570618
162 160 0.1463223248720169
163 161 0.14605723321437836
164 162 0.14579488337039948
165 163 0.1455349326133728
166 164 0.14527782797813416
167 165 0.1450224667787552
168 166 0.14476902782917023
169 167 0.14451739192008972
170 168 0.14426788687705994
171 169 0.14401759207248688
172 170 0.14376720786094666
173 171 0.14351677894592285
174 172 0.14326635003089905
175 173 0.14301647245883942
176 174 0.1427665799856186
177 175 0.14251752197742462
178 176 0.14226900041103363
179 177 0.14202111959457397
180 178 0.14177405834197998
181 179 0.14152806997299194
182 180 0.14128316938877106
183 181 0.14103861153125763
184 182 0.14079482853412628
185 183 0.14055180549621582
186 184 0.1403103470802307
187 185 0.1400698870420456
188 186 0.13983117043972015
189 187 0.13959400355815887
190 188 0.1393582820892334
191 189 0.1391250044107437
192 190 0.13889408111572266
193 191 0.13866513967514038
194 192 0.13843762874603271
195 193 0.1382121741771698
196 194 0.1379885971546173
197 195 0.13776741921901703
198 196 0.13754916191101074
199 197 0.13733389973640442
200 198 0.13712115585803986
201 199 0.13691113889217377
202 200 0.13670364022254944

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.16
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,17.944499969482422
2,2.47705340385437
3,0.484020471572876
4,0.24442158639431
5,0.2179873287677765
6,0.2021477073431015
7,0.19135542213916779
8,0.185320183634758
9,0.18135514855384827
10,0.17861361801624298
11,0.17687556147575378
12,0.1755312979221344
13,0.17443758249282837
14,0.1735261231660843
15,0.17275245487689972
16,0.17208434641361237
17,0.1714925915002823
18,0.17095743119716644
19,0.17046169936656952
20,0.16999274492263794
21,0.16954094171524048
22,0.1691007912158966
23,0.16866475343704224
24,0.16822782158851624
25,0.16779044270515442
26,0.1673501431941986
27,0.1669057011604309
28,0.16645866632461548
29,0.16600798070430756
30,0.16555380821228027
31,0.1650974452495575
32,0.16464105248451233
33,0.1641860455274582
34,0.16373208165168762
35,0.16328014433383942
36,0.16283167898654938
37,0.16238801181316376
38,0.1619482785463333
39,0.161519393324852
40,0.16110296547412872
41,0.1607038825750351
42,0.16031385958194733
43,0.15993204712867737
44,0.15955865383148193
45,0.15919306874275208
46,0.15883561968803406
47,0.15848466753959656
48,0.1581413447856903
49,0.15780547261238098
50,0.15747694671154022
51,0.15715625882148743
52,0.1568424552679062
53,0.15653584897518158
54,0.15623602271080017
55,0.15594322979450226
56,0.15565718710422516
57,0.15537692606449127
58,0.15510223805904388
59,0.15483269095420837
60,0.15456868708133698
61,0.1543092429637909
62,0.15405379235744476
63,0.15380138158798218
64,0.1535535752773285
65,0.15331099927425385
66,0.15307343006134033
67,0.15284107625484467
68,0.1526138037443161
69,0.15239115059375763
70,0.15217235684394836
71,0.1519571989774704
72,0.15174569189548492
73,0.15153814852237701
74,0.15133386850357056
75,0.15113313496112823
76,0.15093545615673065
77,0.15074068307876587
78,0.15054841339588165
79,0.1503596305847168
80,0.15017245709896088
81,0.14998677372932434
82,0.149802103638649
83,0.14961914718151093
84,0.14943701028823853
85,0.14925558865070343
86,0.1490735560655594
87,0.14889158308506012
88,0.1487090289592743
89,0.14852556586265564
90,0.14834226667881012
91,0.14815910160541534
92,0.14797575771808624
93,0.14779269695281982
94,0.1476098895072937
95,0.1474272906780243
96,0.1472456455230713
97,0.14706560969352722
98,0.14688701927661896
99,0.1467091143131256
100,0.1465328186750412
101,0.1463572233915329
102,0.14618365466594696
103,0.146011620759964
104,0.14584070444107056
105,0.14567124843597412
106,0.14550313353538513
107,0.14533625543117523
108,0.14517037570476532
109,0.14500568807125092
110,0.14484179019927979
111,0.14467979967594147
112,0.1445179581642151
113,0.14435721933841705
114,0.14419730007648468
115,0.14403806626796722
116,0.14387960731983185
117,0.14372208714485168
118,0.14356476068496704
119,0.14340879023075104
120,0.14325331151485443
121,0.14309942722320557
122,0.14294670522212982
123,0.14279527962207794
124,0.1426447480916977
125,0.1424955427646637
126,0.14234718680381775
127,0.1422002613544464
128,0.14205405116081238
129,0.14190930128097534
130,0.14176583290100098
131,0.14162404835224152
132,0.14148327708244324
133,0.14134378731250763
134,0.14120493829250336
135,0.14106784760951996
136,0.14093123376369476
137,0.14079616963863373
138,0.14066173136234283
139,0.14052800834178925
140,0.14039483666419983
141,0.14026221632957458
142,0.1401299089193344
143,0.13999822735786438
144,0.1398668885231018
145,0.13973625004291534
146,0.1396058350801468
147,0.1394759565591812
148,0.13934676349163055
149,0.13921797275543213
150,0.13909010589122772
151,0.13896234333515167
152,0.13883568346500397
153,0.13870911300182343
154,0.13858340680599213
155,0.1384579837322235
156,0.13833338022232056
157,0.13820935785770416
158,0.13808588683605194
159,0.13796310126781464
160,0.13784083724021912
161,0.13771948218345642
162,0.13759863376617432
163,0.13747896254062653
164,0.1373596042394638
165,0.13724073767662048
166,0.13712146878242493
167,0.13700245320796967
168,0.13688287138938904
169,0.13676390051841736
170,0.13664504885673523
171,0.13652671873569489
172,0.13640792667865753
173,0.1362890750169754
174,0.13617002964019775
175,0.13605129718780518
176,0.13593222200870514
177,0.1358131468296051
178,0.1356937438249588
179,0.13557419180870056
180,0.13545438647270203
181,0.13533441722393036
182,0.13521386682987213
183,0.1350928097963333
184,0.13497158885002136
185,0.13484954833984375
186,0.13472768664360046
187,0.13460521399974823
188,0.1344829797744751
189,0.13436035811901093
190,0.13423778116703033
191,0.1341153383255005
192,0.1339929699897766
193,0.13387112319469452
194,0.13374927639961243
195,0.1336275041103363
196,0.13350629806518555
197,0.13338521122932434
198,0.13326533138751984
199,0.13314510881900787
200,0.13302506506443024
1 Epoch Loss
2 0 1117.280029296875
3 1 17.944499969482422
4 2 2.47705340385437
5 3 0.484020471572876
6 4 0.24442158639431
7 5 0.2179873287677765
8 6 0.2021477073431015
9 7 0.19135542213916779
10 8 0.185320183634758
11 9 0.18135514855384827
12 10 0.17861361801624298
13 11 0.17687556147575378
14 12 0.1755312979221344
15 13 0.17443758249282837
16 14 0.1735261231660843
17 15 0.17275245487689972
18 16 0.17208434641361237
19 17 0.1714925915002823
20 18 0.17095743119716644
21 19 0.17046169936656952
22 20 0.16999274492263794
23 21 0.16954094171524048
24 22 0.1691007912158966
25 23 0.16866475343704224
26 24 0.16822782158851624
27 25 0.16779044270515442
28 26 0.1673501431941986
29 27 0.1669057011604309
30 28 0.16645866632461548
31 29 0.16600798070430756
32 30 0.16555380821228027
33 31 0.1650974452495575
34 32 0.16464105248451233
35 33 0.1641860455274582
36 34 0.16373208165168762
37 35 0.16328014433383942
38 36 0.16283167898654938
39 37 0.16238801181316376
40 38 0.1619482785463333
41 39 0.161519393324852
42 40 0.16110296547412872
43 41 0.1607038825750351
44 42 0.16031385958194733
45 43 0.15993204712867737
46 44 0.15955865383148193
47 45 0.15919306874275208
48 46 0.15883561968803406
49 47 0.15848466753959656
50 48 0.1581413447856903
51 49 0.15780547261238098
52 50 0.15747694671154022
53 51 0.15715625882148743
54 52 0.1568424552679062
55 53 0.15653584897518158
56 54 0.15623602271080017
57 55 0.15594322979450226
58 56 0.15565718710422516
59 57 0.15537692606449127
60 58 0.15510223805904388
61 59 0.15483269095420837
62 60 0.15456868708133698
63 61 0.1543092429637909
64 62 0.15405379235744476
65 63 0.15380138158798218
66 64 0.1535535752773285
67 65 0.15331099927425385
68 66 0.15307343006134033
69 67 0.15284107625484467
70 68 0.1526138037443161
71 69 0.15239115059375763
72 70 0.15217235684394836
73 71 0.1519571989774704
74 72 0.15174569189548492
75 73 0.15153814852237701
76 74 0.15133386850357056
77 75 0.15113313496112823
78 76 0.15093545615673065
79 77 0.15074068307876587
80 78 0.15054841339588165
81 79 0.1503596305847168
82 80 0.15017245709896088
83 81 0.14998677372932434
84 82 0.149802103638649
85 83 0.14961914718151093
86 84 0.14943701028823853
87 85 0.14925558865070343
88 86 0.1490735560655594
89 87 0.14889158308506012
90 88 0.1487090289592743
91 89 0.14852556586265564
92 90 0.14834226667881012
93 91 0.14815910160541534
94 92 0.14797575771808624
95 93 0.14779269695281982
96 94 0.1476098895072937
97 95 0.1474272906780243
98 96 0.1472456455230713
99 97 0.14706560969352722
100 98 0.14688701927661896
101 99 0.1467091143131256
102 100 0.1465328186750412
103 101 0.1463572233915329
104 102 0.14618365466594696
105 103 0.146011620759964
106 104 0.14584070444107056
107 105 0.14567124843597412
108 106 0.14550313353538513
109 107 0.14533625543117523
110 108 0.14517037570476532
111 109 0.14500568807125092
112 110 0.14484179019927979
113 111 0.14467979967594147
114 112 0.1445179581642151
115 113 0.14435721933841705
116 114 0.14419730007648468
117 115 0.14403806626796722
118 116 0.14387960731983185
119 117 0.14372208714485168
120 118 0.14356476068496704
121 119 0.14340879023075104
122 120 0.14325331151485443
123 121 0.14309942722320557
124 122 0.14294670522212982
125 123 0.14279527962207794
126 124 0.1426447480916977
127 125 0.1424955427646637
128 126 0.14234718680381775
129 127 0.1422002613544464
130 128 0.14205405116081238
131 129 0.14190930128097534
132 130 0.14176583290100098
133 131 0.14162404835224152
134 132 0.14148327708244324
135 133 0.14134378731250763
136 134 0.14120493829250336
137 135 0.14106784760951996
138 136 0.14093123376369476
139 137 0.14079616963863373
140 138 0.14066173136234283
141 139 0.14052800834178925
142 140 0.14039483666419983
143 141 0.14026221632957458
144 142 0.1401299089193344
145 143 0.13999822735786438
146 144 0.1398668885231018
147 145 0.13973625004291534
148 146 0.1396058350801468
149 147 0.1394759565591812
150 148 0.13934676349163055
151 149 0.13921797275543213
152 150 0.13909010589122772
153 151 0.13896234333515167
154 152 0.13883568346500397
155 153 0.13870911300182343
156 154 0.13858340680599213
157 155 0.1384579837322235
158 156 0.13833338022232056
159 157 0.13820935785770416
160 158 0.13808588683605194
161 159 0.13796310126781464
162 160 0.13784083724021912
163 161 0.13771948218345642
164 162 0.13759863376617432
165 163 0.13747896254062653
166 164 0.1373596042394638
167 165 0.13724073767662048
168 166 0.13712146878242493
169 167 0.13700245320796967
170 168 0.13688287138938904
171 169 0.13676390051841736
172 170 0.13664504885673523
173 171 0.13652671873569489
174 172 0.13640792667865753
175 173 0.1362890750169754
176 174 0.13617002964019775
177 175 0.13605129718780518
178 176 0.13593222200870514
179 177 0.1358131468296051
180 178 0.1356937438249588
181 179 0.13557419180870056
182 180 0.13545438647270203
183 181 0.13533441722393036
184 182 0.13521386682987213
185 183 0.1350928097963333
186 184 0.13497158885002136
187 185 0.13484954833984375
188 186 0.13472768664360046
189 187 0.13460521399974823
190 188 0.1344829797744751
191 189 0.13436035811901093
192 190 0.13423778116703033
193 191 0.1341153383255005
194 192 0.1339929699897766
195 193 0.13387112319469452
196 194 0.13374927639961243
197 195 0.1336275041103363
198 196 0.13350629806518555
199 197 0.13338521122932434
200 198 0.13326533138751984
201 199 0.13314510881900787
202 200 0.13302506506443024

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.2
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,10.523073196411133
2,1.0487546920776367
3,0.8225482702255249
4,0.35030558705329895
5,0.264265239238739
6,0.19754540920257568
7,0.19256769120693207
8,0.19020164012908936
9,0.1887328028678894
10,0.1883114129304886
11,0.1884777545928955
12,0.1881391555070877
13,0.18750280141830444
14,0.18667548894882202
15,0.18571577966213226
16,0.1846942901611328
17,0.18363597989082336
18,0.182551771402359
19,0.18145303428173065
20,0.18034888803958893
21,0.1792447566986084
22,0.17814350128173828
23,0.1770513355731964
24,0.1759728342294693
25,0.174910306930542
26,0.17386794090270996
27,0.1728464514017105
28,0.17184147238731384
29,0.17085741460323334
30,0.1698959320783615
31,0.16895736753940582
32,0.16804185509681702
33,0.1671496033668518
34,0.16628000140190125
35,0.1654323786497116
36,0.1646023988723755
37,0.16378958523273468
38,0.16299575567245483
39,0.16222333908081055
40,0.1614740490913391
41,0.16074825823307037
42,0.16003820300102234
43,0.1593407243490219
44,0.1586601734161377
45,0.15799680352210999
46,0.1573510318994522
47,0.15672443807125092
48,0.15611961483955383
49,0.15554890036582947
50,0.15500310063362122
51,0.15447908639907837
52,0.15397536754608154
53,0.1534905880689621
54,0.15302379429340363
55,0.15257319808006287
56,0.1521410048007965
57,0.1517259031534195
58,0.1513252854347229
59,0.15093742311000824
60,0.15056192874908447
61,0.15019799768924713
62,0.1498444825410843
63,0.1495017558336258
64,0.1491684764623642
65,0.14884807169437408
66,0.14854004979133606
67,0.14824135601520538
68,0.1479509472846985
69,0.14766792953014374
70,0.147392138838768
71,0.1471225768327713
72,0.14685960114002228
73,0.14660316705703735
74,0.14635297656059265
75,0.14610791206359863
76,0.14586761593818665
77,0.1456320732831955
78,0.14540094137191772
79,0.14517422020435333
80,0.1449516862630844
81,0.1447330117225647
82,0.14451785385608673
83,0.14430657029151917
84,0.1440984606742859
85,0.14389431476593018
86,0.14369367063045502
87,0.14349649846553802
88,0.14330197870731354
89,0.14311029016971588
90,0.14292126893997192
91,0.14273469150066376
92,0.14255082607269287
93,0.14236922562122345
94,0.14218996465206146
95,0.1420130878686905
96,0.14183850586414337
97,0.1416657716035843
98,0.14149489998817444
99,0.14132565259933472
100,0.14115816354751587
101,0.14099234342575073
102,0.1408277153968811
103,0.14066441357135773
104,0.14050231873989105
105,0.14034222066402435
106,0.14018329977989197
107,0.14002615213394165
108,0.13986995816230774
109,0.1397157460451126
110,0.13956262171268463
111,0.13941112160682678
112,0.13926084339618683
113,0.13911210000514984
114,0.1389646977186203
115,0.13881848752498627
116,0.13867396116256714
117,0.1385304480791092
118,0.1383887231349945
119,0.1382482647895813
120,0.13810965418815613
121,0.1379726231098175
122,0.13783687353134155
123,0.1377028375864029
124,0.1375698745250702
125,0.13743841648101807
126,0.13730791211128235
127,0.13717877864837646
128,0.13705067336559296
129,0.13692374527454376
130,0.1367981731891632
131,0.1366734355688095
132,0.13654987514019012
133,0.1364271491765976
134,0.1363050788640976
135,0.1361834853887558
136,0.13606294989585876
137,0.13594330847263336
138,0.1358238011598587
139,0.13570469617843628
140,0.13558584451675415
141,0.13546763360500336
142,0.1353498250246048
143,0.1352328658103943
144,0.1351166069507599
145,0.13500121235847473
146,0.13488689064979553
147,0.1347731649875641
148,0.13466036319732666
149,0.13454805314540863
150,0.1344371885061264
151,0.13432690501213074
152,0.1342175006866455
153,0.13410913944244385
154,0.1340017467737198
155,0.13389530777931213
156,0.133789524435997
157,0.13368487358093262
158,0.13358107209205627
159,0.13347816467285156
160,0.1333763301372528
161,0.13327497243881226
162,0.13317418098449707
163,0.13307379186153412
164,0.13297398388385773
165,0.1328737586736679
166,0.1327730268239975
167,0.1326727569103241
168,0.13257251679897308
169,0.1324726790189743
170,0.1323729306459427
171,0.1322728991508484
172,0.13217271864414215
173,0.13207149505615234
174,0.13196925818920135
175,0.13186684250831604
176,0.1317642778158188
177,0.13166141510009766
178,0.13155800104141235
179,0.1314544528722763
180,0.1313505470752716
181,0.1312459409236908
182,0.13114115595817566
183,0.1310356855392456
184,0.1309296190738678
185,0.13082298636436462
186,0.13071583211421967
187,0.13060900568962097
188,0.13050265610218048
189,0.13039728999137878
190,0.1302928775548935
191,0.1301892250776291
192,0.13008688390254974
193,0.1299852579832077
194,0.12988491356372833
195,0.12978608906269073
196,0.12968870997428894
197,0.12959307432174683
198,0.12949855625629425
199,0.12940570712089539
200,0.1293140947818756
1 Epoch Loss
2 0 1117.280029296875
3 1 10.523073196411133
4 2 1.0487546920776367
5 3 0.8225482702255249
6 4 0.35030558705329895
7 5 0.264265239238739
8 6 0.19754540920257568
9 7 0.19256769120693207
10 8 0.19020164012908936
11 9 0.1887328028678894
12 10 0.1883114129304886
13 11 0.1884777545928955
14 12 0.1881391555070877
15 13 0.18750280141830444
16 14 0.18667548894882202
17 15 0.18571577966213226
18 16 0.1846942901611328
19 17 0.18363597989082336
20 18 0.182551771402359
21 19 0.18145303428173065
22 20 0.18034888803958893
23 21 0.1792447566986084
24 22 0.17814350128173828
25 23 0.1770513355731964
26 24 0.1759728342294693
27 25 0.174910306930542
28 26 0.17386794090270996
29 27 0.1728464514017105
30 28 0.17184147238731384
31 29 0.17085741460323334
32 30 0.1698959320783615
33 31 0.16895736753940582
34 32 0.16804185509681702
35 33 0.1671496033668518
36 34 0.16628000140190125
37 35 0.1654323786497116
38 36 0.1646023988723755
39 37 0.16378958523273468
40 38 0.16299575567245483
41 39 0.16222333908081055
42 40 0.1614740490913391
43 41 0.16074825823307037
44 42 0.16003820300102234
45 43 0.1593407243490219
46 44 0.1586601734161377
47 45 0.15799680352210999
48 46 0.1573510318994522
49 47 0.15672443807125092
50 48 0.15611961483955383
51 49 0.15554890036582947
52 50 0.15500310063362122
53 51 0.15447908639907837
54 52 0.15397536754608154
55 53 0.1534905880689621
56 54 0.15302379429340363
57 55 0.15257319808006287
58 56 0.1521410048007965
59 57 0.1517259031534195
60 58 0.1513252854347229
61 59 0.15093742311000824
62 60 0.15056192874908447
63 61 0.15019799768924713
64 62 0.1498444825410843
65 63 0.1495017558336258
66 64 0.1491684764623642
67 65 0.14884807169437408
68 66 0.14854004979133606
69 67 0.14824135601520538
70 68 0.1479509472846985
71 69 0.14766792953014374
72 70 0.147392138838768
73 71 0.1471225768327713
74 72 0.14685960114002228
75 73 0.14660316705703735
76 74 0.14635297656059265
77 75 0.14610791206359863
78 76 0.14586761593818665
79 77 0.1456320732831955
80 78 0.14540094137191772
81 79 0.14517422020435333
82 80 0.1449516862630844
83 81 0.1447330117225647
84 82 0.14451785385608673
85 83 0.14430657029151917
86 84 0.1440984606742859
87 85 0.14389431476593018
88 86 0.14369367063045502
89 87 0.14349649846553802
90 88 0.14330197870731354
91 89 0.14311029016971588
92 90 0.14292126893997192
93 91 0.14273469150066376
94 92 0.14255082607269287
95 93 0.14236922562122345
96 94 0.14218996465206146
97 95 0.1420130878686905
98 96 0.14183850586414337
99 97 0.1416657716035843
100 98 0.14149489998817444
101 99 0.14132565259933472
102 100 0.14115816354751587
103 101 0.14099234342575073
104 102 0.1408277153968811
105 103 0.14066441357135773
106 104 0.14050231873989105
107 105 0.14034222066402435
108 106 0.14018329977989197
109 107 0.14002615213394165
110 108 0.13986995816230774
111 109 0.1397157460451126
112 110 0.13956262171268463
113 111 0.13941112160682678
114 112 0.13926084339618683
115 113 0.13911210000514984
116 114 0.1389646977186203
117 115 0.13881848752498627
118 116 0.13867396116256714
119 117 0.1385304480791092
120 118 0.1383887231349945
121 119 0.1382482647895813
122 120 0.13810965418815613
123 121 0.1379726231098175
124 122 0.13783687353134155
125 123 0.1377028375864029
126 124 0.1375698745250702
127 125 0.13743841648101807
128 126 0.13730791211128235
129 127 0.13717877864837646
130 128 0.13705067336559296
131 129 0.13692374527454376
132 130 0.1367981731891632
133 131 0.1366734355688095
134 132 0.13654987514019012
135 133 0.1364271491765976
136 134 0.1363050788640976
137 135 0.1361834853887558
138 136 0.13606294989585876
139 137 0.13594330847263336
140 138 0.1358238011598587
141 139 0.13570469617843628
142 140 0.13558584451675415
143 141 0.13546763360500336
144 142 0.1353498250246048
145 143 0.1352328658103943
146 144 0.1351166069507599
147 145 0.13500121235847473
148 146 0.13488689064979553
149 147 0.1347731649875641
150 148 0.13466036319732666
151 149 0.13454805314540863
152 150 0.1344371885061264
153 151 0.13432690501213074
154 152 0.1342175006866455
155 153 0.13410913944244385
156 154 0.1340017467737198
157 155 0.13389530777931213
158 156 0.133789524435997
159 157 0.13368487358093262
160 158 0.13358107209205627
161 159 0.13347816467285156
162 160 0.1333763301372528
163 161 0.13327497243881226
164 162 0.13317418098449707
165 163 0.13307379186153412
166 164 0.13297398388385773
167 165 0.1328737586736679
168 166 0.1327730268239975
169 167 0.1326727569103241
170 168 0.13257251679897308
171 169 0.1324726790189743
172 170 0.1323729306459427
173 171 0.1322728991508484
174 172 0.13217271864414215
175 173 0.13207149505615234
176 174 0.13196925818920135
177 175 0.13186684250831604
178 176 0.1317642778158188
179 177 0.13166141510009766
180 178 0.13155800104141235
181 179 0.1314544528722763
182 180 0.1313505470752716
183 181 0.1312459409236908
184 182 0.13114115595817566
185 183 0.1310356855392456
186 184 0.1309296190738678
187 185 0.13082298636436462
188 186 0.13071583211421967
189 187 0.13060900568962097
190 188 0.13050265610218048
191 189 0.13039728999137878
192 190 0.1302928775548935
193 191 0.1301892250776291
194 192 0.13008688390254974
195 193 0.1299852579832077
196 194 0.12988491356372833
197 195 0.12978608906269073
198 196 0.12968870997428894
199 197 0.12959307432174683
200 198 0.12949855625629425
201 199 0.12940570712089539
202 200 0.1293140947818756

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.25
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,4.735718250274658
2,1.1212695837020874
3,1.0804215669631958
4,0.9687809944152832
5,0.664789617061615
6,0.4559202194213867
7,0.3644809424877167
8,0.30109620094299316
9,0.25103065371513367
10,0.2182338833808899
11,0.20096541941165924
12,0.1904691457748413
13,0.1832406371831894
14,0.17790460586547852
15,0.17378538846969604
16,0.1705252081155777
17,0.1678985208272934
18,0.1657484918832779
19,0.1639525592327118
20,0.16242505609989166
21,0.1611054688692093
22,0.15997186303138733
23,0.15900669991970062
24,0.15817378461360931
25,0.15744054317474365
26,0.15679077804088593
27,0.15620975196361542
28,0.15568807721138
29,0.15521635115146637
30,0.15478578209877014
31,0.15439066290855408
32,0.1540256291627884
33,0.1536850482225418
34,0.15336422622203827
35,0.1530608981847763
36,0.1527712196111679
37,0.15249258279800415
38,0.1522228717803955
39,0.1519608199596405
40,0.1517060250043869
41,0.1514573097229004
42,0.15121351182460785
43,0.15097670257091522
44,0.15074703097343445
45,0.15052597224712372
46,0.15030896663665771
47,0.1500956118106842
48,0.14988499879837036
49,0.14967674016952515
50,0.14947077631950378
51,0.14926649630069733
52,0.14906525611877441
53,0.14886687695980072
54,0.14867021143436432
55,0.14847566187381744
56,0.14828374981880188
57,0.14809352159500122
58,0.14790494740009308
59,0.14771825075149536
60,0.1475338190793991
61,0.14735087752342224
62,0.14716871082782745
63,0.14698754251003265
64,0.14680695533752441
65,0.1466272473335266
66,0.14644865691661835
67,0.14627180993556976
68,0.14609526097774506
69,0.1459190398454666
70,0.14574378728866577
71,0.14556874334812164
72,0.14539436995983124
73,0.1452210396528244
74,0.1450481414794922
75,0.14487643539905548
76,0.14470753073692322
77,0.1445402204990387
78,0.14437434077262878
79,0.14421044290065765
80,0.14404833316802979
81,0.14388754963874817
82,0.14372828602790833
83,0.14357063174247742
84,0.14341437816619873
85,0.14325998723506927
86,0.14310836791992188
87,0.14295753836631775
88,0.14280842244625092
89,0.14266178011894226
90,0.14251698553562164
91,0.1423739790916443
92,0.142233744263649
93,0.14209556579589844
94,0.14195914566516876
95,0.1418246626853943
96,0.14169295132160187
97,0.14156298339366913
98,0.14143547415733337
99,0.14130999147891998
100,0.14118598401546478
101,0.141063392162323
102,0.14094212651252747
103,0.1408226490020752
104,0.1407041847705841
105,0.140586718916893
106,0.14047104120254517
107,0.14035674929618835
108,0.14024321734905243
109,0.14013051986694336
110,0.14001908898353577
111,0.13990870118141174
112,0.13979917764663696
113,0.1396905481815338
114,0.13958308100700378
115,0.13947640359401703
116,0.13937024772167206
117,0.13926489651203156
118,0.1391608566045761
119,0.13905777037143707
120,0.13895466923713684
121,0.13885191082954407
122,0.13874773681163788
123,0.13864268362522125
124,0.1385367065668106
125,0.1384298950433731
126,0.13832204043865204
127,0.13821221888065338
128,0.13809983432292938
129,0.1379866898059845
130,0.13787369430065155
131,0.13776060938835144
132,0.13764768838882446
133,0.13753435015678406
134,0.13742102682590485
135,0.13730810582637787
136,0.1371951550245285
137,0.13708244264125824
138,0.13696986436843872
139,0.1368577927350998
140,0.1367463320493698
141,0.13663510978221893
142,0.13652357459068298
143,0.13641045987606049
144,0.13629834353923798
145,0.13618749380111694
146,0.1360766738653183
147,0.13596634566783905
148,0.13585664331912994
149,0.1357475370168686
150,0.13563914597034454
151,0.13552996516227722
152,0.13542059063911438
153,0.13531316816806793
154,0.13520589470863342
155,0.13509878516197205
156,0.13499261438846588
157,0.13488854467868805
158,0.1347859501838684
159,0.1346846967935562
160,0.1345834732055664
161,0.13448265194892883
162,0.13438403606414795
163,0.1342869997024536
164,0.13419078290462494
165,0.1340949684381485
166,0.13400046527385712
167,0.1339072436094284
168,0.13381564617156982
169,0.13372503221035004
170,0.13363485038280487
171,0.1335456520318985
172,0.13345743715763092
173,0.13337014615535736
174,0.1332840770483017
175,0.13319852948188782
176,0.13311375677585602
177,0.1330300122499466
178,0.13294754922389984
179,0.1328665018081665
180,0.1327866017818451
181,0.13270726799964905
182,0.13262872397899628
183,0.1325518637895584
184,0.13247625529766083
185,0.13240094482898712
186,0.13232623040676117
187,0.13225337862968445
188,0.1321813017129898
189,0.1321110725402832
190,0.13204190135002136
191,0.1319749802350998
192,0.13190904259681702
193,0.13184469938278198
194,0.13178123533725739
195,0.13171963393688202
196,0.13165932893753052
197,0.13159993290901184
198,0.13154126703739166
199,0.13148349523544312
200,0.13142673671245575
1 Epoch Loss
2 0 1117.280029296875
3 1 4.735718250274658
4 2 1.1212695837020874
5 3 1.0804215669631958
6 4 0.9687809944152832
7 5 0.664789617061615
8 6 0.4559202194213867
9 7 0.3644809424877167
10 8 0.30109620094299316
11 9 0.25103065371513367
12 10 0.2182338833808899
13 11 0.20096541941165924
14 12 0.1904691457748413
15 13 0.1832406371831894
16 14 0.17790460586547852
17 15 0.17378538846969604
18 16 0.1705252081155777
19 17 0.1678985208272934
20 18 0.1657484918832779
21 19 0.1639525592327118
22 20 0.16242505609989166
23 21 0.1611054688692093
24 22 0.15997186303138733
25 23 0.15900669991970062
26 24 0.15817378461360931
27 25 0.15744054317474365
28 26 0.15679077804088593
29 27 0.15620975196361542
30 28 0.15568807721138
31 29 0.15521635115146637
32 30 0.15478578209877014
33 31 0.15439066290855408
34 32 0.1540256291627884
35 33 0.1536850482225418
36 34 0.15336422622203827
37 35 0.1530608981847763
38 36 0.1527712196111679
39 37 0.15249258279800415
40 38 0.1522228717803955
41 39 0.1519608199596405
42 40 0.1517060250043869
43 41 0.1514573097229004
44 42 0.15121351182460785
45 43 0.15097670257091522
46 44 0.15074703097343445
47 45 0.15052597224712372
48 46 0.15030896663665771
49 47 0.1500956118106842
50 48 0.14988499879837036
51 49 0.14967674016952515
52 50 0.14947077631950378
53 51 0.14926649630069733
54 52 0.14906525611877441
55 53 0.14886687695980072
56 54 0.14867021143436432
57 55 0.14847566187381744
58 56 0.14828374981880188
59 57 0.14809352159500122
60 58 0.14790494740009308
61 59 0.14771825075149536
62 60 0.1475338190793991
63 61 0.14735087752342224
64 62 0.14716871082782745
65 63 0.14698754251003265
66 64 0.14680695533752441
67 65 0.1466272473335266
68 66 0.14644865691661835
69 67 0.14627180993556976
70 68 0.14609526097774506
71 69 0.1459190398454666
72 70 0.14574378728866577
73 71 0.14556874334812164
74 72 0.14539436995983124
75 73 0.1452210396528244
76 74 0.1450481414794922
77 75 0.14487643539905548
78 76 0.14470753073692322
79 77 0.1445402204990387
80 78 0.14437434077262878
81 79 0.14421044290065765
82 80 0.14404833316802979
83 81 0.14388754963874817
84 82 0.14372828602790833
85 83 0.14357063174247742
86 84 0.14341437816619873
87 85 0.14325998723506927
88 86 0.14310836791992188
89 87 0.14295753836631775
90 88 0.14280842244625092
91 89 0.14266178011894226
92 90 0.14251698553562164
93 91 0.1423739790916443
94 92 0.142233744263649
95 93 0.14209556579589844
96 94 0.14195914566516876
97 95 0.1418246626853943
98 96 0.14169295132160187
99 97 0.14156298339366913
100 98 0.14143547415733337
101 99 0.14130999147891998
102 100 0.14118598401546478
103 101 0.141063392162323
104 102 0.14094212651252747
105 103 0.1408226490020752
106 104 0.1407041847705841
107 105 0.140586718916893
108 106 0.14047104120254517
109 107 0.14035674929618835
110 108 0.14024321734905243
111 109 0.14013051986694336
112 110 0.14001908898353577
113 111 0.13990870118141174
114 112 0.13979917764663696
115 113 0.1396905481815338
116 114 0.13958308100700378
117 115 0.13947640359401703
118 116 0.13937024772167206
119 117 0.13926489651203156
120 118 0.1391608566045761
121 119 0.13905777037143707
122 120 0.13895466923713684
123 121 0.13885191082954407
124 122 0.13874773681163788
125 123 0.13864268362522125
126 124 0.1385367065668106
127 125 0.1384298950433731
128 126 0.13832204043865204
129 127 0.13821221888065338
130 128 0.13809983432292938
131 129 0.1379866898059845
132 130 0.13787369430065155
133 131 0.13776060938835144
134 132 0.13764768838882446
135 133 0.13753435015678406
136 134 0.13742102682590485
137 135 0.13730810582637787
138 136 0.1371951550245285
139 137 0.13708244264125824
140 138 0.13696986436843872
141 139 0.1368577927350998
142 140 0.1367463320493698
143 141 0.13663510978221893
144 142 0.13652357459068298
145 143 0.13641045987606049
146 144 0.13629834353923798
147 145 0.13618749380111694
148 146 0.1360766738653183
149 147 0.13596634566783905
150 148 0.13585664331912994
151 149 0.1357475370168686
152 150 0.13563914597034454
153 151 0.13552996516227722
154 152 0.13542059063911438
155 153 0.13531316816806793
156 154 0.13520589470863342
157 155 0.13509878516197205
158 156 0.13499261438846588
159 157 0.13488854467868805
160 158 0.1347859501838684
161 159 0.1346846967935562
162 160 0.1345834732055664
163 161 0.13448265194892883
164 162 0.13438403606414795
165 163 0.1342869997024536
166 164 0.13419078290462494
167 165 0.1340949684381485
168 166 0.13400046527385712
169 167 0.1339072436094284
170 168 0.13381564617156982
171 169 0.13372503221035004
172 170 0.13363485038280487
173 171 0.1335456520318985
174 172 0.13345743715763092
175 173 0.13337014615535736
176 174 0.1332840770483017
177 175 0.13319852948188782
178 176 0.13311375677585602
179 177 0.1330300122499466
180 178 0.13294754922389984
181 179 0.1328665018081665
182 180 0.1327866017818451
183 181 0.13270726799964905
184 182 0.13262872397899628
185 183 0.1325518637895584
186 184 0.13247625529766083
187 185 0.13240094482898712
188 186 0.13232623040676117
189 187 0.13225337862968445
190 188 0.1321813017129898
191 189 0.1321110725402832
192 190 0.13204190135002136
193 191 0.1319749802350998
194 192 0.13190904259681702
195 193 0.13184469938278198
196 194 0.13178123533725739
197 195 0.13171963393688202
198 196 0.13165932893753052
199 197 0.13159993290901184
200 198 0.13154126703739166
201 199 0.13148349523544312
202 200 0.13142673671245575

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.3
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,202 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,2.4868006706237793
2,1.484640121459961
3,1.7096853256225586
4,1.775104284286499
5,1.6283886432647705
6,1.4071059226989746
7,1.179467797279358
8,0.9630540013313293
9,0.7795644402503967
10,0.6399717926979065
11,0.5351330041885376
12,0.45858535170555115
13,0.40104109048843384
14,0.35629913210868835
15,0.32037678360939026
16,0.2911825478076935
17,0.26756322383880615
18,0.24803200364112854
19,0.23176150023937225
20,0.2182576060295105
21,0.20708318054676056
22,0.1860899031162262
23,0.20702745020389557
24,0.2633511424064636
25,0.3418039381504059
26,0.43667033314704895
27,4.755534648895264
28,29.954187393188477
29,2.504987955093384
30,1.2452938556671143
31,1.1811820268630981
32,1.23821222782135
33,1.493226408958435
34,1.491268277168274
35,1.348840594291687
36,1.3181228637695312
37,1.4254050254821777
38,1.4296796321868896
39,1.311000108718872
40,1.2047390937805176
41,1.554042100906372
42,1.4558824300765991
43,1.762275218963623
44,2.4600048065185547
45,3.2620596885681152
46,3.060593366622925
47,5.07982873916626
48,4.794989109039307
49,4.253286361694336
50,4.180200099945068
51,3.853510618209839
52,3.646782398223877
53,3.3116555213928223
54,3.248952865600586
55,3.0244901180267334
56,2.785672664642334
57,2.552765369415283
58,2.5126535892486572
59,1.7299529314041138
60,1.810992956161499
61,1.6813421249389648
62,2.373183488845825
63,1.8572572469711304
64,1.490265130996704
65,1.2071611881256104
66,1.0321846008300781
67,0.8953080773353577
68,0.7624420523643494
69,0.6590163707733154
70,0.5752772688865662
71,0.5047502517700195
72,0.44731876254081726
73,0.4040822982788086
74,0.37129536271095276
75,0.34268179535865784
76,0.32143375277519226
77,0.3045608699321747
78,0.2937592566013336
79,0.3098408579826355
80,0.30327191948890686
81,0.2762918472290039
82,0.2588250935077667
83,0.25327014923095703
84,0.2487478256225586
85,0.24490554630756378
86,0.2415083944797516
87,0.23874922096729279
88,0.23610429465770721
89,0.2335314005613327
90,0.23100532591342926
91,0.2285926342010498
92,0.22634120285511017
93,0.22421899437904358
94,0.22224673628807068
95,0.22036027908325195
96,0.21853965520858765
97,0.2168452888727188
98,0.21525098383426666
99,0.21378274261951447
100,0.2123057246208191
101,0.21090421080589294
102,0.20945139229297638
103,0.20808307826519012
104,0.20677639544010162
105,0.20548024773597717
106,0.20427177846431732
107,0.20310868322849274
108,0.2020944207906723
109,0.20108744502067566
110,0.20010478794574738
111,0.1991451531648636
112,0.19820468127727509
113,0.19726260006427765
114,0.19636264443397522
115,0.1954805552959442
116,0.19456946849822998
117,0.1936563104391098
118,0.19273792207241058
119,0.19179967045783997
120,0.19086560606956482
121,0.1898810863494873
122,0.18887849152088165
123,0.18788135051727295
124,0.1870756596326828
125,0.18631485104560852
126,0.18553754687309265
127,0.18471305072307587
128,0.18383847177028656
129,0.18288269639015198
130,0.18213161826133728
131,0.18164408206939697
132,0.18112273514270782
133,0.18054257333278656
134,0.17988277971744537
135,0.17918159067630768
136,0.1784527450799942
137,0.1776980310678482
138,0.17701567709445953
139,0.17636188864707947
140,0.17572693526744843
141,0.1751149743795395
142,0.17449145019054413
143,0.1738605797290802
144,0.17329174280166626
145,0.17266027629375458
146,0.17202697694301605
147,0.17143873870372772
148,0.1708587408065796
149,0.17026464641094208
150,0.16967269778251648
151,0.16906973719596863
152,0.16848166286945343
153,0.167888343334198
154,0.1672850400209427
155,0.16668175160884857
156,0.1660834401845932
157,0.1655428558588028
158,0.1649676114320755
159,0.16443903744220734
160,0.16393868625164032
161,0.16343052685260773
162,0.16290929913520813
163,0.16239312291145325
164,0.16184574365615845
165,0.16130831837654114
166,0.16077281534671783
167,0.16025584936141968
168,0.15976187586784363
169,0.1592087298631668
170,0.15869498252868652
171,0.15818679332733154
172,0.15768297016620636
173,0.1571572870016098
174,0.15662449598312378
175,0.15607595443725586
176,0.15556268393993378
177,0.15505540370941162
178,0.15450039505958557
179,0.1539914309978485
180,0.15351274609565735
181,0.15303245186805725
182,0.1525363326072693
183,0.15202881395816803
184,0.15154071152210236
185,0.1510714292526245
186,0.1505616307258606
187,0.15005679428577423
188,0.14956863224506378
189,0.14911112189292908
190,0.14865951240062714
191,0.14824065566062927
192,0.14778512716293335
193,0.14734184741973877
194,0.1469673365354538
195,0.14652669429779053
196,0.14614005386829376
197,0.14572632312774658
198,0.14531955122947693
199,0.14494429528713226
200,0.1445523053407669
1 Epoch Loss
2 0 1117.280029296875
3 1 2.4868006706237793
4 2 1.484640121459961
5 3 1.7096853256225586
6 4 1.775104284286499
7 5 1.6283886432647705
8 6 1.4071059226989746
9 7 1.179467797279358
10 8 0.9630540013313293
11 9 0.7795644402503967
12 10 0.6399717926979065
13 11 0.5351330041885376
14 12 0.45858535170555115
15 13 0.40104109048843384
16 14 0.35629913210868835
17 15 0.32037678360939026
18 16 0.2911825478076935
19 17 0.26756322383880615
20 18 0.24803200364112854
21 19 0.23176150023937225
22 20 0.2182576060295105
23 21 0.20708318054676056
24 22 0.1860899031162262
25 23 0.20702745020389557
26 24 0.2633511424064636
27 25 0.3418039381504059
28 26 0.43667033314704895
29 27 4.755534648895264
30 28 29.954187393188477
31 29 2.504987955093384
32 30 1.2452938556671143
33 31 1.1811820268630981
34 32 1.23821222782135
35 33 1.493226408958435
36 34 1.491268277168274
37 35 1.348840594291687
38 36 1.3181228637695312
39 37 1.4254050254821777
40 38 1.4296796321868896
41 39 1.311000108718872
42 40 1.2047390937805176
43 41 1.554042100906372
44 42 1.4558824300765991
45 43 1.762275218963623
46 44 2.4600048065185547
47 45 3.2620596885681152
48 46 3.060593366622925
49 47 5.07982873916626
50 48 4.794989109039307
51 49 4.253286361694336
52 50 4.180200099945068
53 51 3.853510618209839
54 52 3.646782398223877
55 53 3.3116555213928223
56 54 3.248952865600586
57 55 3.0244901180267334
58 56 2.785672664642334
59 57 2.552765369415283
60 58 2.5126535892486572
61 59 1.7299529314041138
62 60 1.810992956161499
63 61 1.6813421249389648
64 62 2.373183488845825
65 63 1.8572572469711304
66 64 1.490265130996704
67 65 1.2071611881256104
68 66 1.0321846008300781
69 67 0.8953080773353577
70 68 0.7624420523643494
71 69 0.6590163707733154
72 70 0.5752772688865662
73 71 0.5047502517700195
74 72 0.44731876254081726
75 73 0.4040822982788086
76 74 0.37129536271095276
77 75 0.34268179535865784
78 76 0.32143375277519226
79 77 0.3045608699321747
80 78 0.2937592566013336
81 79 0.3098408579826355
82 80 0.30327191948890686
83 81 0.2762918472290039
84 82 0.2588250935077667
85 83 0.25327014923095703
86 84 0.2487478256225586
87 85 0.24490554630756378
88 86 0.2415083944797516
89 87 0.23874922096729279
90 88 0.23610429465770721
91 89 0.2335314005613327
92 90 0.23100532591342926
93 91 0.2285926342010498
94 92 0.22634120285511017
95 93 0.22421899437904358
96 94 0.22224673628807068
97 95 0.22036027908325195
98 96 0.21853965520858765
99 97 0.2168452888727188
100 98 0.21525098383426666
101 99 0.21378274261951447
102 100 0.2123057246208191
103 101 0.21090421080589294
104 102 0.20945139229297638
105 103 0.20808307826519012
106 104 0.20677639544010162
107 105 0.20548024773597717
108 106 0.20427177846431732
109 107 0.20310868322849274
110 108 0.2020944207906723
111 109 0.20108744502067566
112 110 0.20010478794574738
113 111 0.1991451531648636
114 112 0.19820468127727509
115 113 0.19726260006427765
116 114 0.19636264443397522
117 115 0.1954805552959442
118 116 0.19456946849822998
119 117 0.1936563104391098
120 118 0.19273792207241058
121 119 0.19179967045783997
122 120 0.19086560606956482
123 121 0.1898810863494873
124 122 0.18887849152088165
125 123 0.18788135051727295
126 124 0.1870756596326828
127 125 0.18631485104560852
128 126 0.18553754687309265
129 127 0.18471305072307587
130 128 0.18383847177028656
131 129 0.18288269639015198
132 130 0.18213161826133728
133 131 0.18164408206939697
134 132 0.18112273514270782
135 133 0.18054257333278656
136 134 0.17988277971744537
137 135 0.17918159067630768
138 136 0.1784527450799942
139 137 0.1776980310678482
140 138 0.17701567709445953
141 139 0.17636188864707947
142 140 0.17572693526744843
143 141 0.1751149743795395
144 142 0.17449145019054413
145 143 0.1738605797290802
146 144 0.17329174280166626
147 145 0.17266027629375458
148 146 0.17202697694301605
149 147 0.17143873870372772
150 148 0.1708587408065796
151 149 0.17026464641094208
152 150 0.16967269778251648
153 151 0.16906973719596863
154 152 0.16848166286945343
155 153 0.167888343334198
156 154 0.1672850400209427
157 155 0.16668175160884857
158 156 0.1660834401845932
159 157 0.1655428558588028
160 158 0.1649676114320755
161 159 0.16443903744220734
162 160 0.16393868625164032
163 161 0.16343052685260773
164 162 0.16290929913520813
165 163 0.16239312291145325
166 164 0.16184574365615845
167 165 0.16130831837654114
168 166 0.16077281534671783
169 167 0.16025584936141968
170 168 0.15976187586784363
171 169 0.1592087298631668
172 170 0.15869498252868652
173 171 0.15818679332733154
174 172 0.15768297016620636
175 173 0.1571572870016098
176 174 0.15662449598312378
177 175 0.15607595443725586
178 176 0.15556268393993378
179 177 0.15505540370941162
180 178 0.15450039505958557
181 179 0.1539914309978485
182 180 0.15351274609565735
183 181 0.15303245186805725
184 182 0.1525363326072693
185 183 0.15202881395816803
186 184 0.15154071152210236
187 185 0.1510714292526245
188 186 0.1505616307258606
189 187 0.15005679428577423
190 188 0.14956863224506378
191 189 0.14911112189292908
192 190 0.14865951240062714
193 191 0.14824065566062927
194 192 0.14778512716293335
195 193 0.14734184741973877
196 194 0.1469673365354538
197 195 0.14652669429779053
198 196 0.14614005386829376
199 197 0.14572632312774658
200 198 0.14531955122947693
201 199 0.14494429528713226
202 200 0.1445523053407669

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.4
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,36 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,1.301098108291626
2,1.2400227785110474
3,0.4081510901451111
4,0.2377406358718872
5,0.16962634027004242
6,0.15084166824817657
7,0.14229442179203033
8,0.13815578818321228
9,0.13637560606002808
10,0.13574624061584473
11,0.1355505883693695
12,0.13551384210586548
13,0.13551504909992218
14,0.1354827880859375
15,0.13595089316368103
16,0.1362253725528717
17,0.13604842126369476
18,345.6454772949219
19,3698461.5
20,0.8034685850143433
21,0.5383837819099426
22,0.602199137210846
23,0.6575281620025635
24,0.7057322859764099
25,2.505042552947998
26,340.17291259765625
27,14390.5888671875
28,41889896.0
29,41895364.0
30,41895364.0
31,41895364.0
32,41895364.0
33,41895364.0
34,41895364.0
1 Epoch Loss
2 0 1117.280029296875
3 1 1.301098108291626
4 2 1.2400227785110474
5 3 0.4081510901451111
6 4 0.2377406358718872
7 5 0.16962634027004242
8 6 0.15084166824817657
9 7 0.14229442179203033
10 8 0.13815578818321228
11 9 0.13637560606002808
12 10 0.13574624061584473
13 11 0.1355505883693695
14 12 0.13551384210586548
15 13 0.13551504909992218
16 14 0.1354827880859375
17 15 0.13595089316368103
18 16 0.1362253725528717
19 17 0.13604842126369476
20 18 345.6454772949219
21 19 3698461.5
22 20 0.8034685850143433
23 21 0.5383837819099426
24 22 0.602199137210846
25 23 0.6575281620025635
26 24 0.7057322859764099
27 25 2.505042552947998
28 26 340.17291259765625
29 27 14390.5888671875
30 28 41889896.0
31 29 41895364.0
32 30 41895364.0
33 31 41895364.0
34 32 41895364.0
35 33 41895364.0
36 34 41895364.0

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.5
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,16 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,0.9015329480171204
2,0.5356652140617371
3,0.3600023090839386
4,0.2878721058368683
5,0.23070190846920013
6,0.1845388561487198
7,0.16549819707870483
8,0.15569016337394714
9,0.14944268763065338
10,0.14969000220298767
11,nan
12,nan
13,nan
14,nan
1 Epoch Loss
2 0 1117.280029296875
3 1 0.9015329480171204
4 2 0.5356652140617371
5 3 0.3600023090839386
6 4 0.2878721058368683
7 5 0.23070190846920013
8 6 0.1845388561487198
9 7 0.16549819707870483
10 8 0.15569016337394714
11 9 0.14944268763065338
12 10 0.14969000220298767
13 11 nan
14 12 nan
15 13 nan
16 14 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.6
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,13 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,0.7408900260925293
2,0.5513259172439575
3,0.32663169503211975
4,0.21077245473861694
5,0.17094270884990692
6,0.1536352038383484
7,0.14601771533489227
8,nan
9,nan
10,nan
11,nan
1 Epoch Loss
2 0 1117.280029296875
3 1 0.7408900260925293
4 2 0.5513259172439575
5 3 0.32663169503211975
6 4 0.21077245473861694
7 5 0.17094270884990692
8 6 0.1536352038383484
9 7 0.14601771533489227
10 8 nan
11 9 nan
12 10 nan
13 11 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.7
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,11 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,0.7520824074745178
2,0.3878028988838196
3,0.31070250272750854
4,0.26424312591552734
5,0.23525479435920715
6,nan
7,nan
8,nan
9,nan
1 Epoch Loss
2 0 1117.280029296875
3 1 0.7520824074745178
4 2 0.3878028988838196
5 3 0.31070250272750854
6 4 0.26424312591552734
7 5 0.23525479435920715
8 6 nan
9 7 nan
10 8 nan
11 9 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.8
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

View File

@ -1,39 +0,0 @@
Epoch,Loss
0,1117.280029296875
1,0.9854243397712708
2,0.3447346091270447
3,3.1059296131134033
4,2.4283204078674316
5,0.8883350491523743
6,0.5983593463897705
7,0.3887821435928345
8,0.26367807388305664
9,0.21425235271453857
10,0.19075612723827362
11,0.18758946657180786
12,0.19773417711257935
13,0.5665905475616455
14,1.0985172986984253
15,0.6045243144035339
16,0.4887182116508484
17,0.3533113896846771
18,0.3073558509349823
19,0.2879619002342224
20,0.27278897166252136
21,0.26235079765319824
22,0.25458696484565735
23,0.24976076185703278
24,0.24581611156463623
25,0.24198336899280548
26,0.401131272315979
27,7.404294967651367
28,0.43706411123275757
29,0.38758718967437744
30,0.3742772042751312
31,0.3644647002220154
32,0.3567458689212799
33,0.3482499420642853
34,nan
35,nan
36,nan
37,nan
1 Epoch Loss
2 0 1117.280029296875
3 1 0.9854243397712708
4 2 0.3447346091270447
5 3 3.1059296131134033
6 4 2.4283204078674316
7 5 0.8883350491523743
8 6 0.5983593463897705
9 7 0.3887821435928345
10 8 0.26367807388305664
11 9 0.21425235271453857
12 10 0.19075612723827362
13 11 0.18758946657180786
14 12 0.19773417711257935
15 13 0.5665905475616455
16 14 1.0985172986984253
17 15 0.6045243144035339
18 16 0.4887182116508484
19 17 0.3533113896846771
20 18 0.3073558509349823
21 19 0.2879619002342224
22 20 0.27278897166252136
23 21 0.26235079765319824
24 22 0.25458696484565735
25 23 0.24976076185703278
26 24 0.24581611156463623
27 25 0.24198336899280548
28 26 0.401131272315979
29 27 7.404294967651367
30 28 0.43706411123275757
31 29 0.38758718967437744
32 30 0.3742772042751312
33 31 0.3644647002220154
34 32 0.3567458689212799
35 33 0.3482499420642853
36 34 nan
37 35 nan
38 36 nan
39 37 nan

View File

@ -1,63 +0,0 @@
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
Time Span: 0 to 10, Points: 1000
Learning Rate: 0.9
Weight Decay: 0
Loss Function Name: four
Loss Function Exponent: 4
Current Loss Function (wrapper) Source Code:
def loss_fn(state_traj):
theta = state_traj[:, :, 0] # [batch_size, t_points]
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
return torch.mean(base_loss_fn(theta, desired_theta))
Specific Base Loss Function Source Code:
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor, min_val: float = 0.01) -> torch.Tensor:
return normalized_loss(theta, desired_theta, exponent=4, min_val=min_val)
Normalized Loss Function Source Code:
def normalized_loss(theta: torch.Tensor, desired_theta: torch.Tensor, exponent: float, min_val: float = 0.01, delta: float = 1) -> torch.Tensor:
"""
Computes a normalized loss that maps the error (|theta - desired_theta|) on [0, 2π]
to the range [min_val, 1]. To avoid an infinite gradient at error=0 for exponents < 1,
a shift 'delta' is added.
The loss is given by:
loss = min_val + (1 - min_val) * ( ((error + delta)^exponent - delta^exponent)
/ ((2π + delta)^exponent - delta^exponent) )
so that:
- When error = 0: loss = min_val
- When error = 2π: loss = 1
"""
error = torch.abs(theta - desired_theta)
numerator = (error + delta) ** exponent - delta ** exponent
denominator = (2 * math.pi + delta) ** exponent - delta ** exponent
return min_val + (1 - min_val) * (numerator / denominator)
Training Cases:
[theta0, omega0, alpha0, desired_theta]
[0.5235987901687622, 0.0, 0.0, 0.0]
[-0.5235987901687622, 0.0, 0.0, 0.0]
[2.094395160675049, 0.0, 0.0, 0.0]
[-2.094395160675049, 0.0, 0.0, 0.0]
[0.0, 1.0471975803375244, 0.0, 0.0]
[0.0, -1.0471975803375244, 0.0, 0.0]
[0.0, 6.2831854820251465, 0.0, 0.0]
[0.0, -6.2831854820251465, 0.0, 0.0]
[0.0, 0.0, 0.0, 6.2831854820251465]
[0.0, 0.0, 0.0, -6.2831854820251465]
[0.0, 0.0, 0.0, 1.5707963705062866]
[0.0, 0.0, 0.0, -1.5707963705062866]
[0.0, 0.0, 0.0, 1.0471975803375244]
[0.0, 0.0, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]

Some files were not shown because too many files have changed in this diff Show More