Done re-training max_normalized time weighting (with a minimum weight of 0.01) and time_weight_learning_sweep). Started work on base loss function training
This commit is contained in:
parent
eb71ab0de9
commit
a401ca3f59
BIN
analysis/__pycache__/time_weighting_functions.cpython-310.pyc
Normal file
BIN
analysis/__pycache__/time_weighting_functions.cpython-310.pyc
Normal file
Binary file not shown.
@ -1,29 +0,0 @@
|
|||||||
import numpy as np
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
|
|
||||||
# Define time span
|
|
||||||
t_start, t_end, t_points = 0, 10, 1000
|
|
||||||
t_span = np.linspace(t_start, t_end, t_points)
|
|
||||||
|
|
||||||
# Define normalized weight functions
|
|
||||||
weight_functions = {
|
|
||||||
'constant': lambda t: np.ones_like(t) / np.ones_like(t).mean(),
|
|
||||||
'linear': lambda t: ((t+1) / (t+1).max()) / ((t+1) / (t+1).max()).mean(),
|
|
||||||
'quadratic': lambda t: ((t+1)**2 / ((t+1)**2).max()) / ((t+1)**2 / ((t+1)**2).max()).mean(),
|
|
||||||
'cubic': lambda t: ((t+1)**3 / ((t+1)**3).max()) / ((t+1)**3 / ((t+1)**3).max()).mean(),
|
|
||||||
'inverse': lambda t: ((t+1)**-1 / ((t+1)**-1).max()) / ((t+1)**-1 / ((t+1)**-1).max()).mean(),
|
|
||||||
'inverse_squared': lambda t: ((t+1)**-2 / ((t+1)**-2).max()) / ((t+1)**-2 / ((t+1)**-2).max()).mean(),
|
|
||||||
'inverse_cubed': lambda t: ((t+1)**-3 / ((t+1)**-3).max()) / ((t+1)**-3 / ((t+1)**-3).max()).mean()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Plot all weight functions
|
|
||||||
plt.figure(figsize=(10, 6))
|
|
||||||
for name, func in weight_functions.items():
|
|
||||||
plt.plot(t_span, func(t_span), label=name)
|
|
||||||
|
|
||||||
plt.xlabel("Time (s)")
|
|
||||||
plt.ylabel("Weight Value")
|
|
||||||
plt.title("Average Normalized Weight Values")
|
|
||||||
plt.legend()
|
|
||||||
plt.grid(True)
|
|
||||||
plt.savefig("average_normalized_weights.png")
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 56 KiB |
@ -1,30 +1,18 @@
|
|||||||
import numpy as np
|
import torch
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
# Define time span
|
# Define time span
|
||||||
t_start, t_end, t_points = 0, 10, 1000
|
t_start, t_end, t_points = 0, 10, 1000
|
||||||
t_span = np.linspace(t_start, t_end, t_points)
|
t_span = torch.linspace(t_start, t_end, t_points)
|
||||||
|
|
||||||
# Define weight functions
|
# Define weight functions
|
||||||
weight_functions = {
|
from time_weighting_functions import weight_functions
|
||||||
'constant': lambda t: np.ones_like(t),
|
|
||||||
'linear': lambda t: (t+1) / (t+1).max(),
|
|
||||||
'quadratic': lambda t: (t+1)**2 / ((t+1)**2).max(),
|
|
||||||
'cubic': lambda t: (t+1)**3 / ((t+1)**3).max(),
|
|
||||||
'inverse': lambda t: (t+1)**-1 / ((t+1)**-1).max(),
|
|
||||||
'inverse_squared': lambda t: (t+1)**-2 / ((t+1)**-2).max(),
|
|
||||||
'inverse_cubed': lambda t: (t+1)**-3 / ((t+1)**-3).max(),
|
|
||||||
'linear_mirrored': lambda t: ((-t+10)+1) / ((-t+10)+1).max(),
|
|
||||||
'quadratic_mirrored': lambda t: ((-t+10)+1)**2 / (((-t+10)+1)**2).max(),
|
|
||||||
'cubic_mirrored': lambda t: ((-t+10)+1)**3 / (((-t+10)+1)**3).max(),
|
|
||||||
'inverse_mirrored': lambda t: ((-t+10)+1)**-1 / (((-t+10)+1)**-1).max(),
|
|
||||||
'inverse_squared_mirrored': lambda t: ((-t+10)+1)**-2 / (((-t+10)+1)**-2).max(),
|
|
||||||
'inverse_cubed_mirrored': lambda t: ((-t+10)+1)**-3 / (((-t+10)+1)**-3).max()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Plot all weight functions
|
# Plot all weight functions
|
||||||
plt.figure(figsize=(10, 6))
|
plt.figure(figsize=(10, 6))
|
||||||
for name, func in weight_functions.items():
|
for name, func in weight_functions.items():
|
||||||
|
y_vals = func(t_span)
|
||||||
|
print(f"{name}: {y_vals[0]:.3f} and {y_vals[-1]:.3f}")
|
||||||
plt.plot(t_span, func(t_span), label=name)
|
plt.plot(t_span, func(t_span), label=name)
|
||||||
|
|
||||||
plt.xlabel("Time (s)")
|
plt.xlabel("Time (s)")
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 146 KiB After Width: | Height: | Size: 184 KiB |
107
analysis/time_weighting_functions.py
Normal file
107
analysis/time_weighting_functions.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
import torch
|
||||||
|
from typing import Union, List
|
||||||
|
|
||||||
|
def constant(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
return torch.ones_like(t_span)
|
||||||
|
|
||||||
|
def linear(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**1) * t_span**1
|
||||||
|
|
||||||
|
def quadratic(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**2) * t_span**2
|
||||||
|
|
||||||
|
def cubic(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**3) * t_span**3
|
||||||
|
|
||||||
|
def square_root(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**(1/2)) * t_span**(1/2)
|
||||||
|
|
||||||
|
def cubic_root(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**(1/3)) * t_span**(1/3)
|
||||||
|
|
||||||
|
def inverse(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/1) - 1) * 1/t_max * t_span + 1)**-1
|
||||||
|
|
||||||
|
def inverse_squared(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/2) - 1) * 1/t_max * t_span + 1)**-2
|
||||||
|
|
||||||
|
def inverse_cubed(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/3) - 1) * 1/t_max * t_span + 1)**-3
|
||||||
|
|
||||||
|
def linear_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**1) * (-t_span + t_max)**1
|
||||||
|
|
||||||
|
def quadratic_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**2) * (-t_span + t_max)**2
|
||||||
|
|
||||||
|
def cubic_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**3) * (-t_span + t_max)**3
|
||||||
|
|
||||||
|
def square_root_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**(1/2)) * (-t_span + t_max)**(1/2)
|
||||||
|
|
||||||
|
def cubic_root_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return min_val + ((1 - min_val) / (t_max)**(1/3)) * (-t_span + t_max)**(1/3)
|
||||||
|
|
||||||
|
def inverse_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/1) - 1) * 1/t_max * (-t_span + t_max) + 1)**-1
|
||||||
|
|
||||||
|
def inverse_squared_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/2) - 1) * 1/t_max * (-t_span + t_max) + 1)**-2
|
||||||
|
|
||||||
|
def inverse_cubed_mirrored(t_span: Union[torch.Tensor, List[float]], t_max: float = None, min_val: float = 0.01) -> torch.Tensor:
|
||||||
|
t_span = t_span.clone().detach() if isinstance(t_span, torch.Tensor) else torch.tensor(t_span)
|
||||||
|
t_max = t_max if t_max is not None else t_span[-1]
|
||||||
|
return (((1/min_val)**(1/3) - 1) * 1/t_max * (-t_span + t_max) + 1)**-3
|
||||||
|
|
||||||
|
# Dictionary to store function references
|
||||||
|
weight_functions = {
|
||||||
|
'constant': constant,
|
||||||
|
'linear': linear,
|
||||||
|
'quadratic': quadratic,
|
||||||
|
'cubic': cubic,
|
||||||
|
'square_root': square_root,
|
||||||
|
'cubic_root': cubic_root,
|
||||||
|
'inverse': inverse,
|
||||||
|
'inverse_squared': inverse_squared,
|
||||||
|
'inverse_cubed': inverse_cubed,
|
||||||
|
'linear_mirrored': linear_mirrored,
|
||||||
|
'quadratic_mirrored': quadratic_mirrored,
|
||||||
|
'cubic_mirrored': cubic_mirrored,
|
||||||
|
'square_root_mirrored': square_root_mirrored,
|
||||||
|
'cubic_root_mirrored': cubic_root_mirrored,
|
||||||
|
'inverse_mirrored': inverse_mirrored,
|
||||||
|
'inverse_squared_mirrored': inverse_squared_mirrored,
|
||||||
|
'inverse_cubed_mirrored': inverse_cubed_mirrored,
|
||||||
|
}
|
||||||
Binary file not shown.
Binary file not shown.
BIN
training/__pycache__/base_loss_functions.cpython-310.pyc
Normal file
BIN
training/__pycache__/base_loss_functions.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
training/__pycache__/time_weighting_functions.cpython-310.pyc
Normal file
BIN
training/__pycache__/time_weighting_functions.cpython-310.pyc
Normal file
Binary file not shown.
83
training/base_loss_functions.py
Normal file
83
training/base_loss_functions.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import torch
|
||||||
|
from typing import Union, List
|
||||||
|
|
||||||
|
def one_ninth_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 1/9: |theta - desired_theta|^(1/9)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** (1/9)
|
||||||
|
|
||||||
|
def one_eighth_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 1/8: |theta - desired_theta|^(1/8)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** (1/8)
|
||||||
|
|
||||||
|
def one_fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 1/4: |theta - desired_theta|^(1/4)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** (1/4)
|
||||||
|
|
||||||
|
def one_third_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 1/3: |theta - desired_theta|^(1/3)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** (1/3)
|
||||||
|
|
||||||
|
def one_half_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 1/2: |theta - desired_theta|^(1/2)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** (1/2)
|
||||||
|
|
||||||
|
def abs_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the absolute error loss: |theta - desired_theta| (exponent 1)
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta)
|
||||||
|
|
||||||
|
def square_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the squared error loss: |theta - desired_theta|^2
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** 2
|
||||||
|
|
||||||
|
def cube_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the cubed error loss: |theta - desired_theta|^3
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** 3
|
||||||
|
|
||||||
|
def fourth_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 4: |theta - desired_theta|^4
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** 4
|
||||||
|
|
||||||
|
def eight_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 8: |theta - desired_theta|^8
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** 8
|
||||||
|
|
||||||
|
def nine_loss(theta: torch.Tensor, desired_theta: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""
|
||||||
|
Computes the error loss raised to the power of 9: |theta - desired_theta|^9
|
||||||
|
"""
|
||||||
|
return torch.abs(theta - desired_theta) ** 9
|
||||||
|
|
||||||
|
# Dictionary to store function references along with their corresponding exponent.
|
||||||
|
base_loss_functions = {
|
||||||
|
'one_ninth': (1/9, one_ninth_loss),
|
||||||
|
'one_eighth': (1/8, one_eighth_loss),
|
||||||
|
'one_fourth': (1/4, one_fourth_loss),
|
||||||
|
'one_third': (1/3, one_third_loss),
|
||||||
|
'one_half': (1/2, one_half_loss),
|
||||||
|
'abs': (1, abs_loss),
|
||||||
|
'square': (2, square_loss),
|
||||||
|
'cube': (3, cube_loss),
|
||||||
|
'four': (4, fourth_loss),
|
||||||
|
'eight': (8, eight_loss),
|
||||||
|
'nine': (9, nine_loss)
|
||||||
|
}
|
||||||
110
training/base_loss_training.py
Normal file
110
training/base_loss_training.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import torch
|
||||||
|
import torch.optim as optim
|
||||||
|
from torchdiffeq import odeint
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import csv
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from PendulumController import PendulumController
|
||||||
|
from PendulumDynamics import PendulumDynamics
|
||||||
|
from initial_conditions import initial_conditions
|
||||||
|
from base_loss_functions import base_loss_functions # Import the base loss functions
|
||||||
|
|
||||||
|
# Device setup
|
||||||
|
device = torch.device("cpu")
|
||||||
|
base_controller_path = "/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth"
|
||||||
|
|
||||||
|
# Initial conditions (theta0, omega0, alpha0, desired_theta)
|
||||||
|
state_0 = torch.tensor(initial_conditions, dtype=torch.float32, device=device)
|
||||||
|
|
||||||
|
# Pendulum constants
|
||||||
|
m = 10.0
|
||||||
|
g = 9.81
|
||||||
|
R = 1.0
|
||||||
|
|
||||||
|
# Time grid settings
|
||||||
|
t_start, t_end, t_points = 0, 10, 1000
|
||||||
|
t_span = torch.linspace(t_start, t_end, t_points, device=device)
|
||||||
|
|
||||||
|
# Directory for storing results
|
||||||
|
output_dir = "base_loss_training"
|
||||||
|
os.makedirs(output_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Optimizer hyperparameters
|
||||||
|
learning_rate = 1e-1
|
||||||
|
weight_decay = 1e-4
|
||||||
|
|
||||||
|
# Training parameters
|
||||||
|
num_epochs = 1000
|
||||||
|
|
||||||
|
# Iterate over the base loss functions.
|
||||||
|
# Each entry in base_loss_functions is a tuple: (exponent, loss_fn)
|
||||||
|
for name, (exponent, loss_fn) in base_loss_functions.items():
|
||||||
|
|
||||||
|
# Create a wrapper loss function that applies the base loss function
|
||||||
|
# to the extracted theta and desired_theta from the state trajectory,
|
||||||
|
# and then reduces it to a scalar.
|
||||||
|
def current_loss_fn(state_traj):
|
||||||
|
theta = state_traj[:, :, 0] # [batch_size, t_points]
|
||||||
|
desired_theta = state_traj[:, :, 3] # [batch_size, t_points]
|
||||||
|
return torch.mean(loss_fn(theta, desired_theta))
|
||||||
|
|
||||||
|
# Initialize the controller and load the base parameters.
|
||||||
|
controller = PendulumController().to(device)
|
||||||
|
controller.load_state_dict(torch.load(base_controller_path))
|
||||||
|
pendulum_dynamics = PendulumDynamics(controller, m, R, g).to(device)
|
||||||
|
print(f"Loaded base controller from {base_controller_path} for loss '{name}' (exponent {exponent})")
|
||||||
|
|
||||||
|
optimizer = optim.Adam(controller.parameters(), lr=learning_rate, weight_decay=weight_decay)
|
||||||
|
|
||||||
|
# Set up directories for saving models and logs for this loss function.
|
||||||
|
function_output_dir = os.path.join(output_dir, name)
|
||||||
|
controllers_dir = os.path.join(function_output_dir, "controllers")
|
||||||
|
if os.path.exists(controllers_dir):
|
||||||
|
shutil.rmtree(controllers_dir)
|
||||||
|
os.makedirs(controllers_dir, exist_ok=True)
|
||||||
|
|
||||||
|
config_file = os.path.join(function_output_dir, "training_config.txt")
|
||||||
|
log_file = os.path.join(function_output_dir, "training_log.csv")
|
||||||
|
|
||||||
|
# Save configuration details including the loss function's exponent and source code.
|
||||||
|
with open(config_file, "w") as f:
|
||||||
|
f.write(f"Base controller path: {base_controller_path}\n")
|
||||||
|
f.write(f"Time Span: {t_start} to {t_end}, Points: {t_points}\n")
|
||||||
|
f.write(f"Learning Rate: {learning_rate}\n")
|
||||||
|
f.write(f"Weight Decay: {weight_decay}\n")
|
||||||
|
f.write(f"\nLoss Function Name: {name}\n")
|
||||||
|
f.write(f"Loss Function Exponent: {exponent}\n")
|
||||||
|
f.write("\nLoss Function Source Code:\n")
|
||||||
|
f.write(inspect.getsource(loss_fn))
|
||||||
|
f.write("\nTraining Cases:\n")
|
||||||
|
f.write("[theta0, omega0, alpha0, desired_theta]\n")
|
||||||
|
for case in state_0.cpu().numpy():
|
||||||
|
f.write(f"{case.tolist()}\n")
|
||||||
|
|
||||||
|
# Create log file with header.
|
||||||
|
with open(log_file, "w", newline="") as csvfile:
|
||||||
|
csv_writer = csv.writer(csvfile)
|
||||||
|
csv_writer.writerow(["Epoch", "Loss"])
|
||||||
|
|
||||||
|
# Begin training loop.
|
||||||
|
for epoch in range(num_epochs + 1):
|
||||||
|
optimizer.zero_grad()
|
||||||
|
state_traj = odeint(pendulum_dynamics, state_0, t_span, method='rk4')
|
||||||
|
loss = current_loss_fn(state_traj)
|
||||||
|
loss.backward()
|
||||||
|
|
||||||
|
# Save the model at this epoch.
|
||||||
|
model_file = os.path.join(controllers_dir, f"controller_{epoch}.pth")
|
||||||
|
torch.save(controller.state_dict(), model_file)
|
||||||
|
print(f"{model_file} saved with loss: {loss.item()}")
|
||||||
|
|
||||||
|
optimizer.step()
|
||||||
|
|
||||||
|
# Log the training progress.
|
||||||
|
with open(log_file, "a", newline="") as csvfile:
|
||||||
|
csv_writer = csv.writer(csvfile)
|
||||||
|
csv_writer.writerow([epoch, loss.item()])
|
||||||
|
|
||||||
|
print("Training complete. Models and logs are saved under respective directories for each loss function.")
|
||||||
149
training/different_theta_powers_training.py
Normal file
149
training/different_theta_powers_training.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
import torch
|
||||||
|
import torch.optim as optim
|
||||||
|
from torchdiffeq import odeint
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import csv
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from PendulumController import PendulumController
|
||||||
|
from PendulumDynamics import PendulumDynamics
|
||||||
|
|
||||||
|
# Device setup
|
||||||
|
device = torch.device("cpu")
|
||||||
|
base_controller_path = f"/home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth"
|
||||||
|
|
||||||
|
# Initial conditions (theta0, omega0, alpha0, desired_theta)
|
||||||
|
from initial_conditions import initial_conditions
|
||||||
|
state_0 = torch.tensor(initial_conditions, dtype=torch.float32, device=device)
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
m = 10.0
|
||||||
|
g = 9.81
|
||||||
|
R = 1.0
|
||||||
|
|
||||||
|
# Time grid
|
||||||
|
t_start, t_end, t_points = 0, 10, 1000
|
||||||
|
t_span = torch.linspace(t_start, t_end, t_points, device=device)
|
||||||
|
|
||||||
|
# Specify directory for storing results
|
||||||
|
output_dir = "loss_function_powers"
|
||||||
|
os.makedirs(output_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Optimizer values
|
||||||
|
learning_rate = 1e-1
|
||||||
|
weight_decay = 1e-4
|
||||||
|
|
||||||
|
# Training parameters
|
||||||
|
num_epochs = 1000
|
||||||
|
|
||||||
|
# Define different loss functions based on theta
|
||||||
|
def make_loss_fn(loss_fn):
|
||||||
|
def loss_fn_wrapper(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0] # Extract theta [batch_size, t_points]
|
||||||
|
desired_theta = state_traj[:, :, 3] # Extract desired theta
|
||||||
|
|
||||||
|
# Compute loss using the provided function
|
||||||
|
return loss_fn(theta, desired_theta)
|
||||||
|
|
||||||
|
return loss_fn_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# Define different loss functions based on transformations of theta
|
||||||
|
loss_functions = {
|
||||||
|
'abs_theta': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs(theta - desired_theta)),
|
||||||
|
'description': 'Loss is the absolute difference between theta and desired theta'
|
||||||
|
},
|
||||||
|
'theta_squared': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs((theta - desired_theta) ** 2)),
|
||||||
|
'description': 'Loss is the squared difference'
|
||||||
|
},
|
||||||
|
'theta_cubed': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs((theta - desired_theta) ** 3)),
|
||||||
|
'description': 'Loss is the cubed difference'
|
||||||
|
},
|
||||||
|
'neg_abs_theta': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs(-torch.abs(theta - desired_theta))),
|
||||||
|
'description': 'Loss is the absolute negative absolute difference'
|
||||||
|
},
|
||||||
|
'inverse_theta': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs(1 / (torch.abs(theta - desired_theta) + 1e-6))),
|
||||||
|
'description': 'Loss is the inverse absolute difference'
|
||||||
|
},
|
||||||
|
'inverse_theta_squared': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs(1 / ((theta - desired_theta) ** 2 + 1e-6))),
|
||||||
|
'description': 'Loss is the inverse squared difference'
|
||||||
|
},
|
||||||
|
'inverse_theta_cubed': {
|
||||||
|
'function': lambda theta, desired_theta: torch.mean(torch.abs(1 / ((theta - desired_theta) ** 3 + 1e-6))),
|
||||||
|
'description': 'Loss is the inverse cubed difference'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Training loop for each loss function
|
||||||
|
for name, loss_info in loss_functions.items():
|
||||||
|
controller = PendulumController().to(device)
|
||||||
|
controller.load_state_dict(torch.load(base_controller_path))
|
||||||
|
pendulum_dynamics = PendulumDynamics(controller, m, R, g).to(device)
|
||||||
|
print(f"Loaded {base_controller_path} as base controller")
|
||||||
|
|
||||||
|
optimizer = optim.Adam(controller.parameters(), lr=learning_rate, weight_decay=weight_decay)
|
||||||
|
loss_fn = make_loss_fn(loss_info['function'])
|
||||||
|
|
||||||
|
# File paths
|
||||||
|
function_output_dir = os.path.join(output_dir, name)
|
||||||
|
controllers_dir = os.path.join(function_output_dir, "controllers")
|
||||||
|
|
||||||
|
# Check if controllers directory exists and remove it
|
||||||
|
if os.path.exists(controllers_dir):
|
||||||
|
shutil.rmtree(controllers_dir)
|
||||||
|
os.makedirs(controllers_dir, exist_ok=True)
|
||||||
|
|
||||||
|
config_file = os.path.join(function_output_dir, "training_config.txt")
|
||||||
|
log_file = os.path.join(function_output_dir, "training_log.csv")
|
||||||
|
|
||||||
|
# Overwrite configuration and log files
|
||||||
|
with open(config_file, "w") as f:
|
||||||
|
f.write(f"Base controller path: {base_controller_path}\n")
|
||||||
|
f.write(f"Time Span: {t_start} to {t_end}, Points: {t_points}\n")
|
||||||
|
f.write(f"Learning Rate: {learning_rate}\n")
|
||||||
|
f.write(f"Weight Decay: {weight_decay}\n")
|
||||||
|
f.write("\nLoss Function:\n")
|
||||||
|
f.write(inspect.getsource(loss_fn))
|
||||||
|
f.write(f"\nLoss Description: {loss_info['description']}\n")
|
||||||
|
f.write("\nTraining Cases:\n")
|
||||||
|
f.write("[theta0, omega0, alpha0, desired_theta]\n")
|
||||||
|
for case in state_0.cpu().numpy():
|
||||||
|
f.write(f"{case.tolist()}\n")
|
||||||
|
|
||||||
|
with open(log_file, "w", newline="") as csvfile:
|
||||||
|
csv_writer = csv.writer(csvfile)
|
||||||
|
csv_writer.writerow(["Epoch", "Loss"])
|
||||||
|
|
||||||
|
# Training loop
|
||||||
|
for epoch in range(0, num_epochs+1):
|
||||||
|
optimizer.zero_grad()
|
||||||
|
state_traj = odeint(pendulum_dynamics, state_0, t_span, method='rk4')
|
||||||
|
loss = loss_fn(state_traj, t_span)
|
||||||
|
loss.backward()
|
||||||
|
|
||||||
|
# Save the model before training on this epoch
|
||||||
|
# Therefore, controller_epoch represents the controller after {epoch} training iterations
|
||||||
|
model_file = os.path.join(controllers_dir, f"controller_{epoch}.pth")
|
||||||
|
torch.save(controller.state_dict(), model_file)
|
||||||
|
print(f"{model_file} saved with loss: {loss}")
|
||||||
|
|
||||||
|
# Update the weights and biases
|
||||||
|
optimizer.step()
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
with open(log_file, "a", newline="") as csvfile:
|
||||||
|
csv_writer = csv.writer(csvfile)
|
||||||
|
csv_writer.writerow([epoch, loss.item()])
|
||||||
|
|
||||||
|
print("Training complete. Models and logs are saved under respective directories for each loss function.")
|
||||||
@ -1,4 +1,3 @@
|
|||||||
import torch
|
|
||||||
from torch import pi
|
from torch import pi
|
||||||
|
|
||||||
initial_conditions = [
|
initial_conditions = [
|
||||||
|
|||||||
57700
training/nohup.out
57700
training/nohup.out
File diff suppressed because it is too large
Load Diff
17
training/old/PendulumController.py
Normal file
17
training/old/PendulumController.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
|
||||||
|
class PendulumController(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.net = nn.Sequential(
|
||||||
|
nn.Linear(4, 64),
|
||||||
|
nn.ReLU(),
|
||||||
|
nn.Linear(64, 64),
|
||||||
|
nn.ReLU(),
|
||||||
|
nn.Linear(64, 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
raw_torque = self.net(x)
|
||||||
|
return torch.clamp(raw_torque, -250, 250)
|
||||||
26
training/old/PendulumDynamics.py
Normal file
26
training/old/PendulumDynamics.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
|
||||||
|
class PendulumDynamics(nn.Module):
|
||||||
|
def __init__(self, controller, m:'float'=1, R:'float'=1, g:'float'=9.81):
|
||||||
|
super().__init__()
|
||||||
|
self.controller = controller
|
||||||
|
self.m: 'float' = m
|
||||||
|
self.R: 'float' = R
|
||||||
|
self.g: 'float' = g
|
||||||
|
|
||||||
|
def forward(self, t, state):
|
||||||
|
# Get the current values from the state
|
||||||
|
theta, omega, alpha, desired_theta = state[:, 0], state[:, 1], state[:, 2], state[:, 3]
|
||||||
|
|
||||||
|
# Make the input stack for the controller
|
||||||
|
input = torch.stack([theta, omega, alpha, desired_theta], dim=1)
|
||||||
|
|
||||||
|
# Get the torque (the output of the neural network)
|
||||||
|
tau = self.controller(input).squeeze(-1)
|
||||||
|
|
||||||
|
# Relax alpha
|
||||||
|
alpha_desired = (self.g / self.R) * torch.sin(theta) + tau / (self.m * self.R**2)
|
||||||
|
dalpha = alpha_desired - alpha
|
||||||
|
|
||||||
|
return torch.stack([omega, alpha, dalpha, torch.zeros_like(desired_theta)], dim=1)
|
||||||
@ -54,38 +54,35 @@ def make_loss_fn(weight_fn):
|
|||||||
return loss_fn
|
return loss_fn
|
||||||
|
|
||||||
# Define and store weight functions with descriptions, normalized by average weight
|
# Define and store weight functions with descriptions, normalized by average weight
|
||||||
# weight_functions = {
|
|
||||||
# 'constant': {
|
|
||||||
# 'function': lambda t: torch.ones_like(t) / torch.ones_like(t).mean(),
|
|
||||||
# 'description': 'Constant weight: All weights are 1, normalized by the average (remains 1)'
|
|
||||||
# },
|
|
||||||
# 'linear': {
|
|
||||||
# 'function': lambda t: ((t+1) / (t+1).max()) / ((t+1) / (t+1).max()).mean(),
|
|
||||||
# 'description': 'Linear weight: Weights increase linearly from 0 to 1, normalized by the average weight'
|
|
||||||
# },
|
|
||||||
# 'quadratic': {
|
|
||||||
# 'function': lambda t: ((t+1)**2 / ((t+1)**2).max()) / ((t+1)**2 / ((t+1)**2).max()).mean(),
|
|
||||||
# 'description': 'Quadratic weight: Weights increase quadratically from 0 to 1, normalized by the average weight'
|
|
||||||
# },
|
|
||||||
# 'cubic': {
|
|
||||||
# 'function': lambda t: ((t+1)**3 / ((t+1)**3).max()) / ((t+1)**3 / ((t+1)**3).max()).mean(),
|
|
||||||
# 'description': 'Quadratic weight: Weights increase cubically from 0 to 1, normalized by the average weight'
|
|
||||||
# },
|
|
||||||
# 'inverse': {
|
|
||||||
# 'function': lambda t: ((t+1)**-1 / ((t+1)**-1).max()) / ((t+1)**-1 / ((t+1)**-1).max()).mean(),
|
|
||||||
# 'description': 'Inverse weight: Weights decrease inversely, normalized by the average weight'
|
|
||||||
# },
|
|
||||||
# 'inverse_squared': {
|
|
||||||
# 'function': lambda t: ((t+1)**-2 / ((t+1)**-2).max()) / ((t+1)**-2 / ((t+1)**-2).max()).mean(),
|
|
||||||
# 'description': 'Inverse squared weight: Weights decrease inversely squared, normalized by the average weight'
|
|
||||||
# },
|
|
||||||
# 'inverse_cubed': {
|
|
||||||
# 'function': lambda t: ((t+1)**-3 / ((t+1)**-3).max()) / ((t+1)**-3 / ((t+1)**-3).max()).mean(),
|
|
||||||
# 'description': 'Inverse cubed weight: Weights decrease inversely cubed, normalized by the average weight'
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
|
|
||||||
weight_functions = {
|
weight_functions = {
|
||||||
|
'constant': {
|
||||||
|
'function': lambda t: torch.ones_like(t) / torch.ones_like(t).mean(),
|
||||||
|
'description': 'Constant weight: All weights are 1, normalized by the average (remains 1)'
|
||||||
|
},
|
||||||
|
'linear': {
|
||||||
|
'function': lambda t: ((t+1) / (t+1).max()) / ((t+1) / (t+1).max()).mean(),
|
||||||
|
'description': 'Linear weight: Weights increase linearly from 0 to 1, normalized by the average weight'
|
||||||
|
},
|
||||||
|
'quadratic': {
|
||||||
|
'function': lambda t: ((t+1)**2 / ((t+1)**2).max()) / ((t+1)**2 / ((t+1)**2).max()).mean(),
|
||||||
|
'description': 'Quadratic weight: Weights increase quadratically from 0 to 1, normalized by the average weight'
|
||||||
|
},
|
||||||
|
'cubic': {
|
||||||
|
'function': lambda t: ((t+1)**3 / ((t+1)**3).max()) / ((t+1)**3 / ((t+1)**3).max()).mean(),
|
||||||
|
'description': 'Quadratic weight: Weights increase cubically from 0 to 1, normalized by the average weight'
|
||||||
|
},
|
||||||
|
'inverse': {
|
||||||
|
'function': lambda t: ((t+1)**-1 / ((t+1)**-1).max()) / ((t+1)**-1 / ((t+1)**-1).max()).mean(),
|
||||||
|
'description': 'Inverse weight: Weights decrease inversely, normalized by the average weight'
|
||||||
|
},
|
||||||
|
'inverse_squared': {
|
||||||
|
'function': lambda t: ((t+1)**-2 / ((t+1)**-2).max()) / ((t+1)**-2 / ((t+1)**-2).max()).mean(),
|
||||||
|
'description': 'Inverse squared weight: Weights decrease inversely squared, normalized by the average weight'
|
||||||
|
},
|
||||||
|
'inverse_cubed': {
|
||||||
|
'function': lambda t: ((t+1)**-3 / ((t+1)**-3).max()) / ((t+1)**-3 / ((t+1)**-3).max()).mean(),
|
||||||
|
'description': 'Inverse cubed weight: Weights decrease inversely cubed, normalized by the average weight'
|
||||||
|
},
|
||||||
'linear_mirrored': {
|
'linear_mirrored': {
|
||||||
'function': lambda t: (((-t+10)+1) / ((-t+10)+1).max()) / (((-t+10)+1) / ((-t+10)+1).max()).mean(),
|
'function': lambda t: (((-t+10)+1) / ((-t+10)+1).max()) / (((-t+10)+1) / ((-t+10)+1).max()).mean(),
|
||||||
'description': 'Linear mirrored weight: Weights decrease linearly from 0 to 1, normalized by the average weight'
|
'description': 'Linear mirrored weight: Weights decrease linearly from 0 to 1, normalized by the average weight'
|
||||||
27
training/old/initial_conditions.py
Normal file
27
training/old/initial_conditions.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import torch
|
||||||
|
from torch import pi
|
||||||
|
|
||||||
|
initial_conditions = [
|
||||||
|
[1/6 * pi, 0.0, 0.0, 0.0],
|
||||||
|
[-1/6 * pi, 0.0, 0.0, 0.0],
|
||||||
|
[2/3 * pi, 0.0, 0.0, 0.0],
|
||||||
|
[-2/3 * pi, 0.0, 0.0, 0.0],
|
||||||
|
[0.0, 1/3 * pi, 0.0, 0.0],
|
||||||
|
[0.0, -1/3 * pi, 0.0, 0.0],
|
||||||
|
[0.0, 2 * pi, 0.0, 0.0],
|
||||||
|
[0.0, -2 * pi, 0.0, 0.0],
|
||||||
|
[0.0, 0.0, 0.0, 2 * pi],
|
||||||
|
[0.0, 0.0, 0.0, -2 * pi],
|
||||||
|
[0.0, 0.0, 0.0, 1/2 * pi],
|
||||||
|
[0.0, 0.0, 0.0, -1/2 * pi],
|
||||||
|
[0.0, 0.0, 0.0, 1/3 * pi],
|
||||||
|
[0.0, 0.0, 0.0, -1/3 * pi],
|
||||||
|
[1/4 * pi, 1 * pi, 0.0, 0.0],
|
||||||
|
[-1/4 * pi, -1 * pi, 0.0, 0.0],
|
||||||
|
[1/2 * pi, -1 * pi, 0.0, 1/3 * pi],
|
||||||
|
[-1/2 * pi, 1 * pi, 0.0, -1/3 * pi],
|
||||||
|
[1/4 * pi, 1 * pi, 0.0, 2 * pi],
|
||||||
|
[-1/4 * pi, -1 * pi, 0.0, 2 * pi],
|
||||||
|
[1/2 * pi, -1 * pi, 0.0, 4 * pi],
|
||||||
|
[-1/2 * pi, 1 * pi, 0.0, -4 * pi],
|
||||||
|
]
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.01
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,929.5012817382812
|
||||||
|
2,760.7491455078125
|
||||||
|
3,719.2329711914062
|
||||||
|
4,628.367919921875
|
||||||
|
5,497.7966003417969
|
||||||
|
6,438.876220703125
|
||||||
|
7,345.08258056640625
|
||||||
|
8,249.2259521484375
|
||||||
|
9,232.88772583007812
|
||||||
|
10,195.8839111328125
|
||||||
|
11,153.7075653076172
|
||||||
|
12,162.1884307861328
|
||||||
|
13,156.3458709716797
|
||||||
|
14,146.13671875
|
||||||
|
15,139.6178741455078
|
||||||
|
16,137.17164611816406
|
||||||
|
17,127.34637451171875
|
||||||
|
18,124.68274688720703
|
||||||
|
19,120.20781707763672
|
||||||
|
20,119.68919372558594
|
||||||
|
21,118.0666732788086
|
||||||
|
22,112.44929504394531
|
||||||
|
23,111.71492004394531
|
||||||
|
24,107.95944213867188
|
||||||
|
25,103.55692291259766
|
||||||
|
26,104.09497833251953
|
||||||
|
27,104.35210418701172
|
||||||
|
28,103.973388671875
|
||||||
|
29,108.83365631103516
|
||||||
|
30,106.27936553955078
|
||||||
|
31,97.43461608886719
|
||||||
|
32,91.98582458496094
|
||||||
|
33,85.0150375366211
|
||||||
|
34,81.576416015625
|
||||||
|
35,78.11054229736328
|
||||||
|
36,74.8123779296875
|
||||||
|
37,71.9703369140625
|
||||||
|
38,69.01492309570312
|
||||||
|
39,66.16272735595703
|
||||||
|
40,63.46235275268555
|
||||||
|
41,69.29388427734375
|
||||||
|
42,66.73934173583984
|
||||||
|
43,65.28871154785156
|
||||||
|
44,64.34028625488281
|
||||||
|
45,63.76631164550781
|
||||||
|
46,63.3863639831543
|
||||||
|
47,63.1524772644043
|
||||||
|
48,63.009090423583984
|
||||||
|
49,62.83045959472656
|
||||||
|
50,61.66761779785156
|
||||||
|
51,62.12139129638672
|
||||||
|
52,62.43665313720703
|
||||||
|
53,61.677330017089844
|
||||||
|
54,61.29457473754883
|
||||||
|
55,57.714046478271484
|
||||||
|
56,52.49813461303711
|
||||||
|
57,43.35012435913086
|
||||||
|
58,39.513614654541016
|
||||||
|
59,38.22907257080078
|
||||||
|
60,37.408077239990234
|
||||||
|
61,36.67443084716797
|
||||||
|
62,35.95701217651367
|
||||||
|
63,35.44706726074219
|
||||||
|
64,35.04279327392578
|
||||||
|
65,34.68356704711914
|
||||||
|
66,34.38911819458008
|
||||||
|
67,34.61005401611328
|
||||||
|
68,34.5826301574707
|
||||||
|
69,34.29618453979492
|
||||||
|
70,34.02177429199219
|
||||||
|
71,33.777122497558594
|
||||||
|
72,33.156009674072266
|
||||||
|
73,32.17774200439453
|
||||||
|
74,31.8585262298584
|
||||||
|
75,31.68084144592285
|
||||||
|
76,31.552824020385742
|
||||||
|
77,31.268096923828125
|
||||||
|
78,31.126066207885742
|
||||||
|
79,30.796327590942383
|
||||||
|
80,29.9853515625
|
||||||
|
81,30.022335052490234
|
||||||
|
82,30.004865646362305
|
||||||
|
83,30.024503707885742
|
||||||
|
84,30.350528717041016
|
||||||
|
85,30.382726669311523
|
||||||
|
86,30.38064193725586
|
||||||
|
87,30.345109939575195
|
||||||
|
88,30.2956600189209
|
||||||
|
89,30.249067306518555
|
||||||
|
90,30.427114486694336
|
||||||
|
91,30.257610321044922
|
||||||
|
92,30.033601760864258
|
||||||
|
93,30.09646224975586
|
||||||
|
94,30.13431167602539
|
||||||
|
95,30.13173484802246
|
||||||
|
96,30.09049415588379
|
||||||
|
97,30.030664443969727
|
||||||
|
98,29.978221893310547
|
||||||
|
99,29.949363708496094
|
||||||
|
100,29.94510841369629
|
||||||
|
101,29.95542335510254
|
||||||
|
102,29.963903427124023
|
||||||
|
103,29.95401954650879
|
||||||
|
104,29.91761589050293
|
||||||
|
105,29.857656478881836
|
||||||
|
106,29.783775329589844
|
||||||
|
107,29.705184936523438
|
||||||
|
108,29.62762451171875
|
||||||
|
109,29.553142547607422
|
||||||
|
110,29.481164932250977
|
||||||
|
111,29.409881591796875
|
||||||
|
112,29.336000442504883
|
||||||
|
113,29.254844665527344
|
||||||
|
114,29.159570693969727
|
||||||
|
115,29.039579391479492
|
||||||
|
116,28.878250122070312
|
||||||
|
117,28.632080078125
|
||||||
|
118,27.398883819580078
|
||||||
|
119,27.31397247314453
|
||||||
|
120,27.296682357788086
|
||||||
|
121,27.295808792114258
|
||||||
|
122,27.296131134033203
|
||||||
|
123,27.287403106689453
|
||||||
|
124,27.262155532836914
|
||||||
|
125,27.215559005737305
|
||||||
|
126,27.1453857421875
|
||||||
|
127,27.056438446044922
|
||||||
|
128,26.970943450927734
|
||||||
|
129,26.889707565307617
|
||||||
|
130,26.791837692260742
|
||||||
|
131,26.66192626953125
|
||||||
|
132,26.499778747558594
|
||||||
|
133,26.349170684814453
|
||||||
|
134,26.233783721923828
|
||||||
|
135,26.13144874572754
|
||||||
|
136,26.02330207824707
|
||||||
|
137,25.897985458374023
|
||||||
|
138,25.744298934936523
|
||||||
|
139,25.593557357788086
|
||||||
|
140,25.54541778564453
|
||||||
|
141,25.556602478027344
|
||||||
|
142,25.545976638793945
|
||||||
|
143,25.517065048217773
|
||||||
|
144,25.476497650146484
|
||||||
|
145,25.424301147460938
|
||||||
|
146,25.359004974365234
|
||||||
|
147,25.29531478881836
|
||||||
|
148,25.28548240661621
|
||||||
|
149,25.30689811706543
|
||||||
|
150,25.302318572998047
|
||||||
|
151,25.271909713745117
|
||||||
|
152,25.219276428222656
|
||||||
|
153,25.18211555480957
|
||||||
|
154,25.183395385742188
|
||||||
|
155,25.1825008392334
|
||||||
|
156,25.167064666748047
|
||||||
|
157,25.137479782104492
|
||||||
|
158,25.101871490478516
|
||||||
|
159,25.076181411743164
|
||||||
|
160,25.07478904724121
|
||||||
|
161,25.06527328491211
|
||||||
|
162,25.037206649780273
|
||||||
|
163,25.007558822631836
|
||||||
|
164,24.996585845947266
|
||||||
|
165,24.987852096557617
|
||||||
|
166,24.972766876220703
|
||||||
|
167,24.95099449157715
|
||||||
|
168,24.92755126953125
|
||||||
|
169,24.913040161132812
|
||||||
|
170,24.904504776000977
|
||||||
|
171,24.887758255004883
|
||||||
|
172,24.86594581604004
|
||||||
|
173,24.851192474365234
|
||||||
|
174,24.84052276611328
|
||||||
|
175,24.82664680480957
|
||||||
|
176,24.80894660949707
|
||||||
|
177,24.791629791259766
|
||||||
|
178,24.779516220092773
|
||||||
|
179,24.767210006713867
|
||||||
|
180,24.750703811645508
|
||||||
|
181,24.734315872192383
|
||||||
|
182,24.721378326416016
|
||||||
|
183,24.708040237426758
|
||||||
|
184,24.691965103149414
|
||||||
|
185,24.67508316040039
|
||||||
|
186,24.660314559936523
|
||||||
|
187,24.64565658569336
|
||||||
|
188,24.628795623779297
|
||||||
|
189,24.61121368408203
|
||||||
|
190,24.594440460205078
|
||||||
|
191,24.577133178710938
|
||||||
|
192,24.558361053466797
|
||||||
|
193,24.53976058959961
|
||||||
|
194,24.52371597290039
|
||||||
|
195,24.5109806060791
|
||||||
|
196,24.50069236755371
|
||||||
|
197,24.495590209960938
|
||||||
|
198,24.492685317993164
|
||||||
|
199,24.475553512573242
|
||||||
|
200,24.458232879638672
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.02
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,848.4541015625
|
||||||
|
2,585.0303344726562
|
||||||
|
3,357.997314453125
|
||||||
|
4,222.19033813476562
|
||||||
|
5,166.72950744628906
|
||||||
|
6,168.06515502929688
|
||||||
|
7,149.70050048828125
|
||||||
|
8,133.18544006347656
|
||||||
|
9,123.33529663085938
|
||||||
|
10,112.84333038330078
|
||||||
|
11,104.2765121459961
|
||||||
|
12,90.75269317626953
|
||||||
|
13,77.59008026123047
|
||||||
|
14,62.69501876831055
|
||||||
|
15,52.964908599853516
|
||||||
|
16,47.52260208129883
|
||||||
|
17,41.17879867553711
|
||||||
|
18,34.73335647583008
|
||||||
|
19,32.59225082397461
|
||||||
|
20,26.943431854248047
|
||||||
|
21,25.71811866760254
|
||||||
|
22,24.074312210083008
|
||||||
|
23,21.00414276123047
|
||||||
|
24,20.200172424316406
|
||||||
|
25,18.574867248535156
|
||||||
|
26,16.756731033325195
|
||||||
|
27,16.277568817138672
|
||||||
|
28,16.236690521240234
|
||||||
|
29,14.540406227111816
|
||||||
|
30,13.96481990814209
|
||||||
|
31,13.146289825439453
|
||||||
|
32,12.308053970336914
|
||||||
|
33,11.763128280639648
|
||||||
|
34,11.230616569519043
|
||||||
|
35,10.92441463470459
|
||||||
|
36,11.060528755187988
|
||||||
|
37,11.085006713867188
|
||||||
|
38,11.011038780212402
|
||||||
|
39,10.908602714538574
|
||||||
|
40,10.785636901855469
|
||||||
|
41,10.577847480773926
|
||||||
|
42,10.587580680847168
|
||||||
|
43,10.458529472351074
|
||||||
|
44,10.573966026306152
|
||||||
|
45,10.447955131530762
|
||||||
|
46,10.454204559326172
|
||||||
|
47,10.36115837097168
|
||||||
|
48,10.225329399108887
|
||||||
|
49,10.246866226196289
|
||||||
|
50,10.265070915222168
|
||||||
|
51,10.250393867492676
|
||||||
|
52,10.218449592590332
|
||||||
|
53,10.165895462036133
|
||||||
|
54,10.004765510559082
|
||||||
|
55,10.001520156860352
|
||||||
|
56,9.96434497833252
|
||||||
|
57,9.87012767791748
|
||||||
|
58,9.775923728942871
|
||||||
|
59,9.761712074279785
|
||||||
|
60,9.766066551208496
|
||||||
|
61,9.767330169677734
|
||||||
|
62,9.749612808227539
|
||||||
|
63,9.717607498168945
|
||||||
|
64,9.671631813049316
|
||||||
|
65,9.609047889709473
|
||||||
|
66,9.521329879760742
|
||||||
|
67,9.341747283935547
|
||||||
|
68,9.420880317687988
|
||||||
|
69,9.473664283752441
|
||||||
|
70,9.477346420288086
|
||||||
|
71,9.449597358703613
|
||||||
|
72,9.353282928466797
|
||||||
|
73,9.322884559631348
|
||||||
|
74,9.15274715423584
|
||||||
|
75,9.3987398147583
|
||||||
|
76,9.465538024902344
|
||||||
|
77,9.506175994873047
|
||||||
|
78,9.523160934448242
|
||||||
|
79,9.530499458312988
|
||||||
|
80,9.530074119567871
|
||||||
|
81,9.523048400878906
|
||||||
|
82,9.510130882263184
|
||||||
|
83,9.491634368896484
|
||||||
|
84,9.467304229736328
|
||||||
|
85,9.434998512268066
|
||||||
|
86,9.37868595123291
|
||||||
|
87,9.398971557617188
|
||||||
|
88,9.382739067077637
|
||||||
|
89,9.349221229553223
|
||||||
|
90,9.310247421264648
|
||||||
|
91,9.26604175567627
|
||||||
|
92,9.10019302368164
|
||||||
|
93,9.072464942932129
|
||||||
|
94,9.008045196533203
|
||||||
|
95,9.061010360717773
|
||||||
|
96,9.136478424072266
|
||||||
|
97,9.16582202911377
|
||||||
|
98,9.173788070678711
|
||||||
|
99,9.171384811401367
|
||||||
|
100,9.162890434265137
|
||||||
|
101,9.149582862854004
|
||||||
|
102,9.127596855163574
|
||||||
|
103,9.00387954711914
|
||||||
|
104,9.084137916564941
|
||||||
|
105,9.08071231842041
|
||||||
|
106,9.068263053894043
|
||||||
|
107,9.05156135559082
|
||||||
|
108,9.031661987304688
|
||||||
|
109,9.009000778198242
|
||||||
|
110,8.983818054199219
|
||||||
|
111,8.95622730255127
|
||||||
|
112,8.92620849609375
|
||||||
|
113,8.893411636352539
|
||||||
|
114,8.856374740600586
|
||||||
|
115,8.804402351379395
|
||||||
|
116,8.795841217041016
|
||||||
|
117,8.779950141906738
|
||||||
|
118,8.751302719116211
|
||||||
|
119,8.720248222351074
|
||||||
|
120,8.68759822845459
|
||||||
|
121,8.653236389160156
|
||||||
|
122,8.616758346557617
|
||||||
|
123,8.577445983886719
|
||||||
|
124,8.534051895141602
|
||||||
|
125,8.484238624572754
|
||||||
|
126,8.42344856262207
|
||||||
|
127,8.344918251037598
|
||||||
|
128,8.217096328735352
|
||||||
|
129,8.157134056091309
|
||||||
|
130,8.455394744873047
|
||||||
|
131,8.602924346923828
|
||||||
|
132,8.67727279663086
|
||||||
|
133,8.704824447631836
|
||||||
|
134,8.682988166809082
|
||||||
|
135,8.496933937072754
|
||||||
|
136,8.317636489868164
|
||||||
|
137,8.348366737365723
|
||||||
|
138,8.366308212280273
|
||||||
|
139,8.372465133666992
|
||||||
|
140,8.370376586914062
|
||||||
|
141,8.362154006958008
|
||||||
|
142,8.34916877746582
|
||||||
|
143,8.332263946533203
|
||||||
|
144,8.312002182006836
|
||||||
|
145,8.288758277893066
|
||||||
|
146,8.262754440307617
|
||||||
|
147,8.234086036682129
|
||||||
|
148,8.202699661254883
|
||||||
|
149,8.168371200561523
|
||||||
|
150,8.130666732788086
|
||||||
|
151,8.08890151977539
|
||||||
|
152,8.041812896728516
|
||||||
|
153,7.9888153076171875
|
||||||
|
154,7.932823181152344
|
||||||
|
155,7.876833915710449
|
||||||
|
156,7.779386520385742
|
||||||
|
157,7.75201940536499
|
||||||
|
158,7.717170238494873
|
||||||
|
159,7.6829681396484375
|
||||||
|
160,7.651601791381836
|
||||||
|
161,7.515256881713867
|
||||||
|
162,7.587852478027344
|
||||||
|
163,7.577576160430908
|
||||||
|
164,7.560608863830566
|
||||||
|
165,7.539144039154053
|
||||||
|
166,7.514283180236816
|
||||||
|
167,7.486752033233643
|
||||||
|
168,7.457034111022949
|
||||||
|
169,7.42548942565918
|
||||||
|
170,7.392409801483154
|
||||||
|
171,7.358027935028076
|
||||||
|
172,7.322559833526611
|
||||||
|
173,7.286151885986328
|
||||||
|
174,7.248970985412598
|
||||||
|
175,7.211185455322266
|
||||||
|
176,7.172971725463867
|
||||||
|
177,7.13458251953125
|
||||||
|
178,7.096227169036865
|
||||||
|
179,7.058074474334717
|
||||||
|
180,7.020253658294678
|
||||||
|
181,6.982621669769287
|
||||||
|
182,6.944700241088867
|
||||||
|
183,6.905762195587158
|
||||||
|
184,6.864636421203613
|
||||||
|
185,6.819009780883789
|
||||||
|
186,6.742674827575684
|
||||||
|
187,6.766718864440918
|
||||||
|
188,6.739691257476807
|
||||||
|
189,6.681982040405273
|
||||||
|
190,6.753336429595947
|
||||||
|
191,6.8020100593566895
|
||||||
|
192,6.815769195556641
|
||||||
|
193,6.806506156921387
|
||||||
|
194,6.787343978881836
|
||||||
|
195,6.765802383422852
|
||||||
|
196,6.742506504058838
|
||||||
|
197,6.7116851806640625
|
||||||
|
198,6.6643524169921875
|
||||||
|
199,6.608691215515137
|
||||||
|
200,6.475497245788574
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.04
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,701.9725341796875
|
||||||
|
2,264.0076904296875
|
||||||
|
3,159.40110778808594
|
||||||
|
4,126.43462371826172
|
||||||
|
5,92.0433120727539
|
||||||
|
6,74.90762329101562
|
||||||
|
7,67.69001770019531
|
||||||
|
8,60.541873931884766
|
||||||
|
9,48.747215270996094
|
||||||
|
10,45.35858917236328
|
||||||
|
11,46.4842414855957
|
||||||
|
12,40.74010467529297
|
||||||
|
13,39.21887969970703
|
||||||
|
14,39.1895751953125
|
||||||
|
15,36.91448974609375
|
||||||
|
16,29.01195526123047
|
||||||
|
17,25.2808837890625
|
||||||
|
18,19.1278076171875
|
||||||
|
19,18.45102310180664
|
||||||
|
20,17.770423889160156
|
||||||
|
21,18.373388290405273
|
||||||
|
22,16.749378204345703
|
||||||
|
23,16.542858123779297
|
||||||
|
24,16.642562866210938
|
||||||
|
25,15.759243965148926
|
||||||
|
26,15.941630363464355
|
||||||
|
27,15.951122283935547
|
||||||
|
28,15.904982566833496
|
||||||
|
29,15.810111999511719
|
||||||
|
30,15.635967254638672
|
||||||
|
31,16.531970977783203
|
||||||
|
32,16.44236946105957
|
||||||
|
33,16.097810745239258
|
||||||
|
34,16.66484832763672
|
||||||
|
35,16.582366943359375
|
||||||
|
36,17.218414306640625
|
||||||
|
37,17.34698486328125
|
||||||
|
38,17.035799026489258
|
||||||
|
39,17.000234603881836
|
||||||
|
40,17.03762435913086
|
||||||
|
41,17.069860458374023
|
||||||
|
42,17.03567123413086
|
||||||
|
43,16.875198364257812
|
||||||
|
44,16.454540252685547
|
||||||
|
45,15.30571460723877
|
||||||
|
46,14.484301567077637
|
||||||
|
47,14.928971290588379
|
||||||
|
48,15.227804183959961
|
||||||
|
49,15.299188613891602
|
||||||
|
50,15.255221366882324
|
||||||
|
51,15.136775970458984
|
||||||
|
52,14.960775375366211
|
||||||
|
53,14.729593276977539
|
||||||
|
54,14.436092376708984
|
||||||
|
55,14.060531616210938
|
||||||
|
56,13.573005676269531
|
||||||
|
57,12.97873306274414
|
||||||
|
58,12.422508239746094
|
||||||
|
59,12.276296615600586
|
||||||
|
60,12.742033958435059
|
||||||
|
61,12.347969055175781
|
||||||
|
62,12.179417610168457
|
||||||
|
63,12.147784233093262
|
||||||
|
64,12.13644027709961
|
||||||
|
65,12.125748634338379
|
||||||
|
66,12.105073928833008
|
||||||
|
67,12.069154739379883
|
||||||
|
68,12.01697826385498
|
||||||
|
69,11.948393821716309
|
||||||
|
70,11.864931106567383
|
||||||
|
71,11.773849487304688
|
||||||
|
72,11.701828002929688
|
||||||
|
73,11.681047439575195
|
||||||
|
74,11.67745590209961
|
||||||
|
75,11.655662536621094
|
||||||
|
76,11.62376880645752
|
||||||
|
77,11.585184097290039
|
||||||
|
78,11.536943435668945
|
||||||
|
79,11.479323387145996
|
||||||
|
80,11.413002967834473
|
||||||
|
81,11.339367866516113
|
||||||
|
82,11.263132095336914
|
||||||
|
83,11.196410179138184
|
||||||
|
84,11.154114723205566
|
||||||
|
85,11.133700370788574
|
||||||
|
86,11.122450828552246
|
||||||
|
87,11.111432075500488
|
||||||
|
88,11.09665298461914
|
||||||
|
89,11.076613426208496
|
||||||
|
90,11.050999641418457
|
||||||
|
91,11.020368576049805
|
||||||
|
92,10.986037254333496
|
||||||
|
93,10.950230598449707
|
||||||
|
94,10.916117668151855
|
||||||
|
95,10.887368202209473
|
||||||
|
96,10.866628646850586
|
||||||
|
97,10.852522850036621
|
||||||
|
98,10.84048843383789
|
||||||
|
99,10.826284408569336
|
||||||
|
100,10.808121681213379
|
||||||
|
101,10.786215782165527
|
||||||
|
102,10.762040138244629
|
||||||
|
103,10.737741470336914
|
||||||
|
104,10.715201377868652
|
||||||
|
105,10.6950101852417
|
||||||
|
106,10.676193237304688
|
||||||
|
107,10.657072067260742
|
||||||
|
108,10.636316299438477
|
||||||
|
109,10.613516807556152
|
||||||
|
110,10.589245796203613
|
||||||
|
111,10.564495086669922
|
||||||
|
112,10.540307998657227
|
||||||
|
113,10.517278671264648
|
||||||
|
114,10.495426177978516
|
||||||
|
115,10.47428035736084
|
||||||
|
116,10.45315170288086
|
||||||
|
117,10.431408882141113
|
||||||
|
118,10.4087553024292
|
||||||
|
119,10.385278701782227
|
||||||
|
120,10.361358642578125
|
||||||
|
121,10.337419509887695
|
||||||
|
122,10.313730239868164
|
||||||
|
123,10.290315628051758
|
||||||
|
124,10.266986846923828
|
||||||
|
125,10.243412971496582
|
||||||
|
126,10.219423294067383
|
||||||
|
127,10.194990158081055
|
||||||
|
128,10.170079231262207
|
||||||
|
129,10.144745826721191
|
||||||
|
130,10.119159698486328
|
||||||
|
131,10.093442916870117
|
||||||
|
132,10.067682266235352
|
||||||
|
133,10.04179573059082
|
||||||
|
134,10.015642166137695
|
||||||
|
135,9.989130973815918
|
||||||
|
136,9.962286949157715
|
||||||
|
137,9.935125350952148
|
||||||
|
138,9.907644271850586
|
||||||
|
139,9.879822731018066
|
||||||
|
140,9.851569175720215
|
||||||
|
141,9.822710037231445
|
||||||
|
142,9.793022155761719
|
||||||
|
143,9.76225757598877
|
||||||
|
144,9.730083465576172
|
||||||
|
145,9.696220397949219
|
||||||
|
146,9.66089916229248
|
||||||
|
147,9.625883102416992
|
||||||
|
148,9.594341278076172
|
||||||
|
149,9.564790725708008
|
||||||
|
150,9.5355224609375
|
||||||
|
151,9.505996704101562
|
||||||
|
152,9.4760103225708
|
||||||
|
153,9.44546127319336
|
||||||
|
154,9.414304733276367
|
||||||
|
155,9.382354736328125
|
||||||
|
156,9.349360466003418
|
||||||
|
157,9.315010070800781
|
||||||
|
158,9.278667449951172
|
||||||
|
159,9.23918342590332
|
||||||
|
160,9.194244384765625
|
||||||
|
161,9.137089729309082
|
||||||
|
162,8.930317878723145
|
||||||
|
163,8.911437034606934
|
||||||
|
164,8.884065628051758
|
||||||
|
165,8.855144500732422
|
||||||
|
166,8.826417922973633
|
||||||
|
167,8.798074722290039
|
||||||
|
168,8.77006721496582
|
||||||
|
169,8.742522239685059
|
||||||
|
170,8.715385437011719
|
||||||
|
171,8.688645362854004
|
||||||
|
172,8.662213325500488
|
||||||
|
173,8.63603687286377
|
||||||
|
174,8.610076904296875
|
||||||
|
175,8.584427833557129
|
||||||
|
176,8.559142112731934
|
||||||
|
177,8.534276008605957
|
||||||
|
178,8.50981616973877
|
||||||
|
179,8.48571491241455
|
||||||
|
180,8.46186351776123
|
||||||
|
181,8.43826961517334
|
||||||
|
182,8.414827346801758
|
||||||
|
183,8.391619682312012
|
||||||
|
184,8.368552207946777
|
||||||
|
185,8.345622062683105
|
||||||
|
186,8.322759628295898
|
||||||
|
187,8.299935340881348
|
||||||
|
188,8.277108192443848
|
||||||
|
189,8.2542142868042
|
||||||
|
190,8.231208801269531
|
||||||
|
191,8.208024024963379
|
||||||
|
192,8.18459415435791
|
||||||
|
193,8.160835266113281
|
||||||
|
194,8.136693000793457
|
||||||
|
195,8.11206340789795
|
||||||
|
196,8.086776733398438
|
||||||
|
197,8.0606107711792
|
||||||
|
198,8.03337574005127
|
||||||
|
199,8.004700660705566
|
||||||
|
200,7.974150657653809
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.05
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,644.8335571289062
|
||||||
|
2,215.26113891601562
|
||||||
|
3,128.4003448486328
|
||||||
|
4,83.97894287109375
|
||||||
|
5,46.74564743041992
|
||||||
|
6,38.07755661010742
|
||||||
|
7,29.783950805664062
|
||||||
|
8,27.605365753173828
|
||||||
|
9,24.226465225219727
|
||||||
|
10,24.657800674438477
|
||||||
|
11,25.81980323791504
|
||||||
|
12,27.723743438720703
|
||||||
|
13,28.50939178466797
|
||||||
|
14,28.479114532470703
|
||||||
|
15,27.975360870361328
|
||||||
|
16,27.167585372924805
|
||||||
|
17,26.238357543945312
|
||||||
|
18,25.436201095581055
|
||||||
|
19,24.742801666259766
|
||||||
|
20,24.102548599243164
|
||||||
|
21,23.43455696105957
|
||||||
|
22,22.674776077270508
|
||||||
|
23,20.805768966674805
|
||||||
|
24,19.99936294555664
|
||||||
|
25,19.256731033325195
|
||||||
|
26,15.360869407653809
|
||||||
|
27,14.673091888427734
|
||||||
|
28,14.179522514343262
|
||||||
|
29,13.689996719360352
|
||||||
|
30,13.191728591918945
|
||||||
|
31,12.630029678344727
|
||||||
|
32,11.739797592163086
|
||||||
|
33,10.139701843261719
|
||||||
|
34,9.716131210327148
|
||||||
|
35,9.348210334777832
|
||||||
|
36,9.01505184173584
|
||||||
|
37,8.707425117492676
|
||||||
|
38,8.410775184631348
|
||||||
|
39,8.118062973022461
|
||||||
|
40,7.822728157043457
|
||||||
|
41,7.514976501464844
|
||||||
|
42,7.188068389892578
|
||||||
|
43,6.83927059173584
|
||||||
|
44,6.478740215301514
|
||||||
|
45,6.136829376220703
|
||||||
|
46,5.821681976318359
|
||||||
|
47,5.541790008544922
|
||||||
|
48,5.275959014892578
|
||||||
|
49,5.048756122589111
|
||||||
|
50,4.975151062011719
|
||||||
|
51,4.896034240722656
|
||||||
|
52,4.81827974319458
|
||||||
|
53,4.744032382965088
|
||||||
|
54,4.673309803009033
|
||||||
|
55,4.605315208435059
|
||||||
|
56,4.539967060089111
|
||||||
|
57,4.476802349090576
|
||||||
|
58,4.415337562561035
|
||||||
|
59,4.355484962463379
|
||||||
|
60,4.297171115875244
|
||||||
|
61,4.23993444442749
|
||||||
|
62,4.183554172515869
|
||||||
|
63,4.127974987030029
|
||||||
|
64,4.072920322418213
|
||||||
|
65,4.018280506134033
|
||||||
|
66,3.9640462398529053
|
||||||
|
67,3.910109043121338
|
||||||
|
68,3.856426954269409
|
||||||
|
69,3.8028271198272705
|
||||||
|
70,3.7491157054901123
|
||||||
|
71,3.6950912475585938
|
||||||
|
72,3.640612840652466
|
||||||
|
73,3.584808349609375
|
||||||
|
74,3.528102159500122
|
||||||
|
75,3.4718668460845947
|
||||||
|
76,3.417095184326172
|
||||||
|
77,3.3649251461029053
|
||||||
|
78,3.316835880279541
|
||||||
|
79,3.273427963256836
|
||||||
|
80,3.2342891693115234
|
||||||
|
81,3.1955134868621826
|
||||||
|
82,3.1419663429260254
|
||||||
|
83,3.20237135887146
|
||||||
|
84,3.249563455581665
|
||||||
|
85,3.2764530181884766
|
||||||
|
86,3.286076307296753
|
||||||
|
87,3.2826080322265625
|
||||||
|
88,3.2665212154388428
|
||||||
|
89,3.2349460124969482
|
||||||
|
90,3.1874094009399414
|
||||||
|
91,3.1095824241638184
|
||||||
|
92,3.137726306915283
|
||||||
|
93,3.149625062942505
|
||||||
|
94,3.1463003158569336
|
||||||
|
95,3.13478684425354
|
||||||
|
96,3.1171982288360596
|
||||||
|
97,3.092816114425659
|
||||||
|
98,3.055276870727539
|
||||||
|
99,3.024710178375244
|
||||||
|
100,3.0551295280456543
|
||||||
|
101,3.0515856742858887
|
||||||
|
102,3.0148844718933105
|
||||||
|
103,3.02044677734375
|
||||||
|
104,3.0339386463165283
|
||||||
|
105,3.0258543491363525
|
||||||
|
106,2.997575521469116
|
||||||
|
107,2.981809377670288
|
||||||
|
108,2.9945714473724365
|
||||||
|
109,2.960761785507202
|
||||||
|
110,2.9637069702148438
|
||||||
|
111,2.9638519287109375
|
||||||
|
112,2.944010019302368
|
||||||
|
113,2.9376676082611084
|
||||||
|
114,2.941311836242676
|
||||||
|
115,2.922370672225952
|
||||||
|
116,2.9276344776153564
|
||||||
|
117,2.9230570793151855
|
||||||
|
118,2.910398483276367
|
||||||
|
119,2.914470911026001
|
||||||
|
120,2.9052014350891113
|
||||||
|
121,2.899569511413574
|
||||||
|
122,2.899691343307495
|
||||||
|
123,2.8897809982299805
|
||||||
|
124,2.8870503902435303
|
||||||
|
125,2.883997917175293
|
||||||
|
126,2.875964403152466
|
||||||
|
127,2.8759713172912598
|
||||||
|
128,2.869760036468506
|
||||||
|
129,2.8663084506988525
|
||||||
|
130,2.864819288253784
|
||||||
|
131,2.8590943813323975
|
||||||
|
132,2.858699321746826
|
||||||
|
133,2.854114055633545
|
||||||
|
134,2.8517744541168213
|
||||||
|
135,2.8493831157684326
|
||||||
|
136,2.845494270324707
|
||||||
|
137,2.8441381454467773
|
||||||
|
138,2.8398468494415283
|
||||||
|
139,2.838564872741699
|
||||||
|
140,2.834953546524048
|
||||||
|
141,2.833138942718506
|
||||||
|
142,2.8306643962860107
|
||||||
|
143,2.8283536434173584
|
||||||
|
144,2.8267295360565186
|
||||||
|
145,2.824315071105957
|
||||||
|
146,2.8230884075164795
|
||||||
|
147,2.8207955360412598
|
||||||
|
148,2.8198399543762207
|
||||||
|
149,2.817699670791626
|
||||||
|
150,2.8167099952697754
|
||||||
|
151,2.814849853515625
|
||||||
|
152,2.813981771469116
|
||||||
|
153,2.8121609687805176
|
||||||
|
154,2.8110880851745605
|
||||||
|
155,2.8093085289001465
|
||||||
|
156,2.8082404136657715
|
||||||
|
157,2.8065898418426514
|
||||||
|
158,2.805633306503296
|
||||||
|
159,2.8042190074920654
|
||||||
|
160,2.80330753326416
|
||||||
|
161,2.801994562149048
|
||||||
|
162,2.8009207248687744
|
||||||
|
163,2.79971981048584
|
||||||
|
164,2.7985475063323975
|
||||||
|
165,2.7975475788116455
|
||||||
|
166,2.7964365482330322
|
||||||
|
167,2.795574426651001
|
||||||
|
168,2.794424533843994
|
||||||
|
169,2.7933738231658936
|
||||||
|
170,2.7923145294189453
|
||||||
|
171,2.7912774085998535
|
||||||
|
172,2.7904186248779297
|
||||||
|
173,2.7894043922424316
|
||||||
|
174,2.788450241088867
|
||||||
|
175,2.7874832153320312
|
||||||
|
176,2.786539316177368
|
||||||
|
177,2.785719394683838
|
||||||
|
178,2.784792423248291
|
||||||
|
179,2.7838704586029053
|
||||||
|
180,2.7830042839050293
|
||||||
|
181,2.7821247577667236
|
||||||
|
182,2.7812938690185547
|
||||||
|
183,2.7804317474365234
|
||||||
|
184,2.7795510292053223
|
||||||
|
185,2.778747081756592
|
||||||
|
186,2.7779335975646973
|
||||||
|
187,2.777086019515991
|
||||||
|
188,2.7762763500213623
|
||||||
|
189,2.775479316711426
|
||||||
|
190,2.77467942237854
|
||||||
|
191,2.7738873958587646
|
||||||
|
192,2.773098945617676
|
||||||
|
193,2.7723257541656494
|
||||||
|
194,2.771559953689575
|
||||||
|
195,2.770782947540283
|
||||||
|
196,2.7700226306915283
|
||||||
|
197,2.7692785263061523
|
||||||
|
198,2.768521308898926
|
||||||
|
199,2.767770528793335
|
||||||
|
200,2.7670392990112305
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.08
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,481.686279296875
|
||||||
|
2,181.51026916503906
|
||||||
|
3,355.3207092285156
|
||||||
|
4,122.40864562988281
|
||||||
|
5,60.330623626708984
|
||||||
|
6,50.91759490966797
|
||||||
|
7,37.912994384765625
|
||||||
|
8,33.112186431884766
|
||||||
|
9,28.84535026550293
|
||||||
|
10,26.272972106933594
|
||||||
|
11,23.49559211730957
|
||||||
|
12,21.08464813232422
|
||||||
|
13,18.613008499145508
|
||||||
|
14,17.03631591796875
|
||||||
|
15,16.266324996948242
|
||||||
|
16,15.668737411499023
|
||||||
|
17,14.981435775756836
|
||||||
|
18,13.404241561889648
|
||||||
|
19,11.934287071228027
|
||||||
|
20,11.188985824584961
|
||||||
|
21,9.877406120300293
|
||||||
|
22,8.8971529006958
|
||||||
|
23,8.62330436706543
|
||||||
|
24,8.392090797424316
|
||||||
|
25,8.258062362670898
|
||||||
|
26,8.22425651550293
|
||||||
|
27,8.564380645751953
|
||||||
|
28,8.833946228027344
|
||||||
|
29,8.667107582092285
|
||||||
|
30,8.438118934631348
|
||||||
|
31,8.214349746704102
|
||||||
|
32,7.960752010345459
|
||||||
|
33,7.633885860443115
|
||||||
|
34,7.222818851470947
|
||||||
|
35,6.820206165313721
|
||||||
|
36,6.43884801864624
|
||||||
|
37,6.140181064605713
|
||||||
|
38,5.897441387176514
|
||||||
|
39,5.695476531982422
|
||||||
|
40,5.519347667694092
|
||||||
|
41,5.361562252044678
|
||||||
|
42,5.223142147064209
|
||||||
|
43,5.111656188964844
|
||||||
|
44,5.02293586730957
|
||||||
|
45,4.940958023071289
|
||||||
|
46,4.857954978942871
|
||||||
|
47,4.7718634605407715
|
||||||
|
48,4.682060241699219
|
||||||
|
49,4.588162422180176
|
||||||
|
50,4.489864826202393
|
||||||
|
51,4.389747142791748
|
||||||
|
52,4.296416282653809
|
||||||
|
53,4.223264694213867
|
||||||
|
54,4.170172691345215
|
||||||
|
55,4.12441873550415
|
||||||
|
56,4.081700325012207
|
||||||
|
57,4.041499137878418
|
||||||
|
58,3.9991700649261475
|
||||||
|
59,3.950439214706421
|
||||||
|
60,3.8954970836639404
|
||||||
|
61,3.836456298828125
|
||||||
|
62,3.7764170169830322
|
||||||
|
63,3.718632459640503
|
||||||
|
64,3.6660656929016113
|
||||||
|
65,3.618180513381958
|
||||||
|
66,3.572890281677246
|
||||||
|
67,3.530268430709839
|
||||||
|
68,3.488551139831543
|
||||||
|
69,3.4476544857025146
|
||||||
|
70,3.4081103801727295
|
||||||
|
71,3.3707573413848877
|
||||||
|
72,3.3357975482940674
|
||||||
|
73,3.3026270866394043
|
||||||
|
74,3.2701234817504883
|
||||||
|
75,3.2370574474334717
|
||||||
|
76,3.202831745147705
|
||||||
|
77,3.1679775714874268
|
||||||
|
78,3.1346850395202637
|
||||||
|
79,3.105441093444824
|
||||||
|
80,3.0802769660949707
|
||||||
|
81,3.0570485591888428
|
||||||
|
82,3.0333969593048096
|
||||||
|
83,3.0083813667297363
|
||||||
|
84,2.98229718208313
|
||||||
|
85,2.956451416015625
|
||||||
|
86,2.932147741317749
|
||||||
|
87,2.910200595855713
|
||||||
|
88,2.8916757106781006
|
||||||
|
89,2.876786470413208
|
||||||
|
90,2.8648440837860107
|
||||||
|
91,2.855344533920288
|
||||||
|
92,2.847583532333374
|
||||||
|
93,2.841017484664917
|
||||||
|
94,2.8352744579315186
|
||||||
|
95,2.830068826675415
|
||||||
|
96,2.82525897026062
|
||||||
|
97,2.820791482925415
|
||||||
|
98,2.8166685104370117
|
||||||
|
99,2.812885284423828
|
||||||
|
100,2.8094534873962402
|
||||||
|
101,2.8064448833465576
|
||||||
|
102,2.803892135620117
|
||||||
|
103,2.8016836643218994
|
||||||
|
104,2.7995994091033936
|
||||||
|
105,2.797438621520996
|
||||||
|
106,2.795107126235962
|
||||||
|
107,2.7926275730133057
|
||||||
|
108,2.7900969982147217
|
||||||
|
109,2.7876229286193848
|
||||||
|
110,2.7852766513824463
|
||||||
|
111,2.7830710411071777
|
||||||
|
112,2.7809808254241943
|
||||||
|
113,2.7789688110351562
|
||||||
|
114,2.777012586593628
|
||||||
|
115,2.7751083374023438
|
||||||
|
116,2.7732582092285156
|
||||||
|
117,2.7714569568634033
|
||||||
|
118,2.769692897796631
|
||||||
|
119,2.7679555416107178
|
||||||
|
120,2.766244411468506
|
||||||
|
121,2.7645716667175293
|
||||||
|
122,2.762967586517334
|
||||||
|
123,2.761441707611084
|
||||||
|
124,2.759993553161621
|
||||||
|
125,2.7586171627044678
|
||||||
|
126,2.7573001384735107
|
||||||
|
127,2.7560253143310547
|
||||||
|
128,2.7547781467437744
|
||||||
|
129,2.753549814224243
|
||||||
|
130,2.7523365020751953
|
||||||
|
131,2.7511441707611084
|
||||||
|
132,2.7499773502349854
|
||||||
|
133,2.748842239379883
|
||||||
|
134,2.74773907661438
|
||||||
|
135,2.7466633319854736
|
||||||
|
136,2.7456111907958984
|
||||||
|
137,2.7445733547210693
|
||||||
|
138,2.7435455322265625
|
||||||
|
139,2.7425248622894287
|
||||||
|
140,2.741511821746826
|
||||||
|
141,2.740504741668701
|
||||||
|
142,2.739506483078003
|
||||||
|
143,2.7385175228118896
|
||||||
|
144,2.7375380992889404
|
||||||
|
145,2.7365682125091553
|
||||||
|
146,2.735607147216797
|
||||||
|
147,2.734656572341919
|
||||||
|
148,2.7337160110473633
|
||||||
|
149,2.7327868938446045
|
||||||
|
150,2.731867551803589
|
||||||
|
151,2.730954885482788
|
||||||
|
152,2.730048894882202
|
||||||
|
153,2.729149580001831
|
||||||
|
154,2.728256940841675
|
||||||
|
155,2.72737193107605
|
||||||
|
156,2.7264957427978516
|
||||||
|
157,2.7256274223327637
|
||||||
|
158,2.72476863861084
|
||||||
|
159,2.723917007446289
|
||||||
|
160,2.7230725288391113
|
||||||
|
161,2.722234010696411
|
||||||
|
162,2.7214012145996094
|
||||||
|
163,2.720574140548706
|
||||||
|
164,2.719752550125122
|
||||||
|
165,2.7189362049102783
|
||||||
|
166,2.718125104904175
|
||||||
|
167,2.7173190116882324
|
||||||
|
168,2.7165169715881348
|
||||||
|
169,2.7157201766967773
|
||||||
|
170,2.714927911758423
|
||||||
|
171,2.7141404151916504
|
||||||
|
172,2.7133569717407227
|
||||||
|
173,2.712578058242798
|
||||||
|
174,2.711803913116455
|
||||||
|
175,2.711033582687378
|
||||||
|
176,2.710268020629883
|
||||||
|
177,2.7095067501068115
|
||||||
|
178,2.708749532699585
|
||||||
|
179,2.7079970836639404
|
||||||
|
180,2.7072479724884033
|
||||||
|
181,2.706502676010132
|
||||||
|
182,2.7057619094848633
|
||||||
|
183,2.7050249576568604
|
||||||
|
184,2.7042922973632812
|
||||||
|
185,2.7035627365112305
|
||||||
|
186,2.7028379440307617
|
||||||
|
187,2.702116012573242
|
||||||
|
188,2.701399087905884
|
||||||
|
189,2.70068621635437
|
||||||
|
190,2.699977159500122
|
||||||
|
191,2.6992716789245605
|
||||||
|
192,2.6985700130462646
|
||||||
|
193,2.697871685028076
|
||||||
|
194,2.6971774101257324
|
||||||
|
195,2.6964871883392334
|
||||||
|
196,2.6958022117614746
|
||||||
|
197,2.6951215267181396
|
||||||
|
198,2.6944448947906494
|
||||||
|
199,2.6937716007232666
|
||||||
|
200,2.6931018829345703
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.1
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,469.2649230957031
|
||||||
|
2,326.39837646484375
|
||||||
|
3,213.38052368164062
|
||||||
|
4,50.33772277832031
|
||||||
|
5,29.021835327148438
|
||||||
|
6,25.483436584472656
|
||||||
|
7,31.197837829589844
|
||||||
|
8,31.746530532836914
|
||||||
|
9,31.48188591003418
|
||||||
|
10,31.22617530822754
|
||||||
|
11,31.1026611328125
|
||||||
|
12,30.768352508544922
|
||||||
|
13,30.411008834838867
|
||||||
|
14,29.993667602539062
|
||||||
|
15,29.517818450927734
|
||||||
|
16,28.948278427124023
|
||||||
|
17,28.161788940429688
|
||||||
|
18,26.979352951049805
|
||||||
|
19,26.17275047302246
|
||||||
|
20,25.484460830688477
|
||||||
|
21,24.835721969604492
|
||||||
|
22,24.191198348999023
|
||||||
|
23,23.523496627807617
|
||||||
|
24,22.81980323791504
|
||||||
|
25,22.155441284179688
|
||||||
|
26,21.573843002319336
|
||||||
|
27,20.940412521362305
|
||||||
|
28,20.22989845275879
|
||||||
|
29,19.461017608642578
|
||||||
|
30,18.64518928527832
|
||||||
|
31,17.78531265258789
|
||||||
|
32,16.921890258789062
|
||||||
|
33,16.067153930664062
|
||||||
|
34,15.233980178833008
|
||||||
|
35,14.445463180541992
|
||||||
|
36,13.704170227050781
|
||||||
|
37,13.012430191040039
|
||||||
|
38,12.367860794067383
|
||||||
|
39,11.769373893737793
|
||||||
|
40,11.210058212280273
|
||||||
|
41,10.685431480407715
|
||||||
|
42,10.193699836730957
|
||||||
|
43,9.727736473083496
|
||||||
|
44,9.281412124633789
|
||||||
|
45,8.85403823852539
|
||||||
|
46,8.441299438476562
|
||||||
|
47,8.038119316101074
|
||||||
|
48,7.6389007568359375
|
||||||
|
49,7.241733551025391
|
||||||
|
50,6.854685306549072
|
||||||
|
51,6.486843109130859
|
||||||
|
52,6.1454758644104
|
||||||
|
53,5.838573455810547
|
||||||
|
54,5.568541049957275
|
||||||
|
55,5.331871509552002
|
||||||
|
56,5.121151447296143
|
||||||
|
57,4.925756454467773
|
||||||
|
58,4.739625453948975
|
||||||
|
59,4.560769081115723
|
||||||
|
60,4.389829635620117
|
||||||
|
61,4.229304790496826
|
||||||
|
62,4.083106994628906
|
||||||
|
63,3.9540810585021973
|
||||||
|
64,3.8425064086914062
|
||||||
|
65,3.7474048137664795
|
||||||
|
66,3.668313503265381
|
||||||
|
67,3.6057379245758057
|
||||||
|
68,3.5602753162384033
|
||||||
|
69,3.5308995246887207
|
||||||
|
70,3.514744281768799
|
||||||
|
71,3.5086121559143066
|
||||||
|
72,3.5091707706451416
|
||||||
|
73,3.5134897232055664
|
||||||
|
74,3.519155263900757
|
||||||
|
75,3.5244719982147217
|
||||||
|
76,3.5282037258148193
|
||||||
|
77,3.5291249752044678
|
||||||
|
78,3.526078462600708
|
||||||
|
79,3.517765522003174
|
||||||
|
80,3.5036728382110596
|
||||||
|
81,3.4844303131103516
|
||||||
|
82,3.461963415145874
|
||||||
|
83,3.4379873275756836
|
||||||
|
84,3.4133715629577637
|
||||||
|
85,3.3891208171844482
|
||||||
|
86,3.365788698196411
|
||||||
|
87,3.3436439037323
|
||||||
|
88,3.3228886127471924
|
||||||
|
89,3.3037564754486084
|
||||||
|
90,3.2864136695861816
|
||||||
|
91,3.270982265472412
|
||||||
|
92,3.2574620246887207
|
||||||
|
93,3.2457728385925293
|
||||||
|
94,3.235722064971924
|
||||||
|
95,3.227010726928711
|
||||||
|
96,3.2192482948303223
|
||||||
|
97,3.211989402770996
|
||||||
|
98,3.204843282699585
|
||||||
|
99,3.197505235671997
|
||||||
|
100,3.1897997856140137
|
||||||
|
101,3.181614398956299
|
||||||
|
102,3.172982692718506
|
||||||
|
103,3.1640524864196777
|
||||||
|
104,3.1548635959625244
|
||||||
|
105,3.1454713344573975
|
||||||
|
106,3.1358983516693115
|
||||||
|
107,3.1261637210845947
|
||||||
|
108,3.1163058280944824
|
||||||
|
109,3.106369972229004
|
||||||
|
110,3.096465587615967
|
||||||
|
111,3.0867152214050293
|
||||||
|
112,3.0772151947021484
|
||||||
|
113,3.0680408477783203
|
||||||
|
114,3.059187173843384
|
||||||
|
115,3.0506298542022705
|
||||||
|
116,3.042323589324951
|
||||||
|
117,3.0342187881469727
|
||||||
|
118,3.0262749195098877
|
||||||
|
119,3.0184576511383057
|
||||||
|
120,3.010756492614746
|
||||||
|
121,3.0031373500823975
|
||||||
|
122,2.995593309402466
|
||||||
|
123,2.988088846206665
|
||||||
|
124,2.9806129932403564
|
||||||
|
125,2.973134994506836
|
||||||
|
126,2.965670585632324
|
||||||
|
127,2.9582111835479736
|
||||||
|
128,2.9507784843444824
|
||||||
|
129,2.943403720855713
|
||||||
|
130,2.9360909461975098
|
||||||
|
131,2.9288642406463623
|
||||||
|
132,2.9217276573181152
|
||||||
|
133,2.914686679840088
|
||||||
|
134,2.907742738723755
|
||||||
|
135,2.900902509689331
|
||||||
|
136,2.8941538333892822
|
||||||
|
137,2.88749623298645
|
||||||
|
138,2.8809375762939453
|
||||||
|
139,2.874469041824341
|
||||||
|
140,2.868089199066162
|
||||||
|
141,2.8617947101593018
|
||||||
|
142,2.8556067943573
|
||||||
|
143,2.849527597427368
|
||||||
|
144,2.843579053878784
|
||||||
|
145,2.8377885818481445
|
||||||
|
146,2.8321847915649414
|
||||||
|
147,2.826768398284912
|
||||||
|
148,2.821537733078003
|
||||||
|
149,2.816493034362793
|
||||||
|
150,2.8116228580474854
|
||||||
|
151,2.806945323944092
|
||||||
|
152,2.802446126937866
|
||||||
|
153,2.798109769821167
|
||||||
|
154,2.7939236164093018
|
||||||
|
155,2.7898664474487305
|
||||||
|
156,2.785916805267334
|
||||||
|
157,2.782067060470581
|
||||||
|
158,2.7783143520355225
|
||||||
|
159,2.7746522426605225
|
||||||
|
160,2.771038770675659
|
||||||
|
161,2.767500638961792
|
||||||
|
162,2.7640469074249268
|
||||||
|
163,2.760666608810425
|
||||||
|
164,2.757354736328125
|
||||||
|
165,2.7541048526763916
|
||||||
|
166,2.7509143352508545
|
||||||
|
167,2.7477827072143555
|
||||||
|
168,2.7447049617767334
|
||||||
|
169,2.7416677474975586
|
||||||
|
170,2.738690137863159
|
||||||
|
171,2.7357540130615234
|
||||||
|
172,2.7328598499298096
|
||||||
|
173,2.7300024032592773
|
||||||
|
174,2.727181911468506
|
||||||
|
175,2.7243926525115967
|
||||||
|
176,2.721630334854126
|
||||||
|
177,2.718885898590088
|
||||||
|
178,2.71616268157959
|
||||||
|
179,2.713449239730835
|
||||||
|
180,2.710740327835083
|
||||||
|
181,2.7080299854278564
|
||||||
|
182,2.705319881439209
|
||||||
|
183,2.7026026248931885
|
||||||
|
184,2.699883460998535
|
||||||
|
185,2.6971538066864014
|
||||||
|
186,2.6944162845611572
|
||||||
|
187,2.6916680335998535
|
||||||
|
188,2.6889026165008545
|
||||||
|
189,2.6861231327056885
|
||||||
|
190,2.6833181381225586
|
||||||
|
191,2.6804773807525635
|
||||||
|
192,2.6775612831115723
|
||||||
|
193,2.6745879650115967
|
||||||
|
194,2.6716082096099854
|
||||||
|
195,2.6686434745788574
|
||||||
|
196,2.6657087802886963
|
||||||
|
197,2.662813663482666
|
||||||
|
198,2.659982442855835
|
||||||
|
199,2.6572070121765137
|
||||||
|
200,2.654489755630493
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.125
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,481.4687194824219
|
||||||
|
2,96.23600006103516
|
||||||
|
3,46.30360412597656
|
||||||
|
4,42.67131423950195
|
||||||
|
5,22.463510513305664
|
||||||
|
6,16.559965133666992
|
||||||
|
7,14.444090843200684
|
||||||
|
8,13.130633354187012
|
||||||
|
9,11.771244049072266
|
||||||
|
10,10.568934440612793
|
||||||
|
11,9.973428726196289
|
||||||
|
12,9.366987228393555
|
||||||
|
13,8.804798126220703
|
||||||
|
14,8.27409553527832
|
||||||
|
15,7.755214691162109
|
||||||
|
16,7.259061813354492
|
||||||
|
17,6.791775703430176
|
||||||
|
18,6.366294860839844
|
||||||
|
19,5.982115745544434
|
||||||
|
20,5.6443047523498535
|
||||||
|
21,5.381455421447754
|
||||||
|
22,5.193507671356201
|
||||||
|
23,5.0417962074279785
|
||||||
|
24,4.908753395080566
|
||||||
|
25,4.789371967315674
|
||||||
|
26,4.680208683013916
|
||||||
|
27,4.579638957977295
|
||||||
|
28,4.486494541168213
|
||||||
|
29,4.399327754974365
|
||||||
|
30,4.317553997039795
|
||||||
|
31,4.240206241607666
|
||||||
|
32,4.166420936584473
|
||||||
|
33,4.095785617828369
|
||||||
|
34,4.027955055236816
|
||||||
|
35,3.962388515472412
|
||||||
|
36,3.898862600326538
|
||||||
|
37,3.836763858795166
|
||||||
|
38,3.7760908603668213
|
||||||
|
39,3.7168807983398438
|
||||||
|
40,3.658808946609497
|
||||||
|
41,3.6020450592041016
|
||||||
|
42,3.546821117401123
|
||||||
|
43,3.4929871559143066
|
||||||
|
44,3.4405100345611572
|
||||||
|
45,3.3894295692443848
|
||||||
|
46,3.339735746383667
|
||||||
|
47,3.291717767715454
|
||||||
|
48,3.24536395072937
|
||||||
|
49,3.2008402347564697
|
||||||
|
50,3.1579949855804443
|
||||||
|
51,3.117474317550659
|
||||||
|
52,3.079694986343384
|
||||||
|
53,3.0453310012817383
|
||||||
|
54,3.0150771141052246
|
||||||
|
55,2.989461898803711
|
||||||
|
56,2.968668222427368
|
||||||
|
57,2.9526264667510986
|
||||||
|
58,2.940664529800415
|
||||||
|
59,2.9317538738250732
|
||||||
|
60,2.9254977703094482
|
||||||
|
61,2.9206764698028564
|
||||||
|
62,2.9161529541015625
|
||||||
|
63,2.9112279415130615
|
||||||
|
64,2.9060800075531006
|
||||||
|
65,2.9006295204162598
|
||||||
|
66,2.8947958946228027
|
||||||
|
67,2.8885629177093506
|
||||||
|
68,2.8819518089294434
|
||||||
|
69,2.8750643730163574
|
||||||
|
70,2.8680176734924316
|
||||||
|
71,2.860836982727051
|
||||||
|
72,2.853696823120117
|
||||||
|
73,2.847111463546753
|
||||||
|
74,2.8412044048309326
|
||||||
|
75,2.83565354347229
|
||||||
|
76,2.8303771018981934
|
||||||
|
77,2.825225353240967
|
||||||
|
78,2.8200435638427734
|
||||||
|
79,2.8147404193878174
|
||||||
|
80,2.809227705001831
|
||||||
|
81,2.8034520149230957
|
||||||
|
82,2.7973809242248535
|
||||||
|
83,2.791006565093994
|
||||||
|
84,2.784371852874756
|
||||||
|
85,2.777538776397705
|
||||||
|
86,2.7705130577087402
|
||||||
|
87,2.763420343399048
|
||||||
|
88,2.7563579082489014
|
||||||
|
89,2.749314546585083
|
||||||
|
90,2.742288589477539
|
||||||
|
91,2.73541522026062
|
||||||
|
92,2.7287261486053467
|
||||||
|
93,2.7222392559051514
|
||||||
|
94,2.716036081314087
|
||||||
|
95,2.7100298404693604
|
||||||
|
96,2.7042503356933594
|
||||||
|
97,2.6986300945281982
|
||||||
|
98,2.6931324005126953
|
||||||
|
99,2.687795877456665
|
||||||
|
100,2.6827449798583984
|
||||||
|
101,2.677870988845825
|
||||||
|
102,2.673168659210205
|
||||||
|
103,2.6686530113220215
|
||||||
|
104,2.664296865463257
|
||||||
|
105,2.660098075866699
|
||||||
|
106,2.6560823917388916
|
||||||
|
107,2.652400493621826
|
||||||
|
108,2.64913272857666
|
||||||
|
109,2.646261215209961
|
||||||
|
110,2.6437904834747314
|
||||||
|
111,2.641684055328369
|
||||||
|
112,2.6399388313293457
|
||||||
|
113,2.638502597808838
|
||||||
|
114,2.6373291015625
|
||||||
|
115,2.6363699436187744
|
||||||
|
116,2.635528087615967
|
||||||
|
117,2.6347572803497314
|
||||||
|
118,2.6341447830200195
|
||||||
|
119,2.633460760116577
|
||||||
|
120,2.632854700088501
|
||||||
|
121,2.632253646850586
|
||||||
|
122,2.63169264793396
|
||||||
|
123,2.6311864852905273
|
||||||
|
124,2.6307148933410645
|
||||||
|
125,2.630263090133667
|
||||||
|
126,2.629851818084717
|
||||||
|
127,2.629472494125366
|
||||||
|
128,2.6291165351867676
|
||||||
|
129,2.6287875175476074
|
||||||
|
130,2.6284642219543457
|
||||||
|
131,2.6281540393829346
|
||||||
|
132,2.627856492996216
|
||||||
|
133,2.627563953399658
|
||||||
|
134,2.627284526824951
|
||||||
|
135,2.6270041465759277
|
||||||
|
136,2.6267237663269043
|
||||||
|
137,2.6264452934265137
|
||||||
|
138,2.6261661052703857
|
||||||
|
139,2.625885009765625
|
||||||
|
140,2.625603437423706
|
||||||
|
141,2.6253230571746826
|
||||||
|
142,2.6250407695770264
|
||||||
|
143,2.6247570514678955
|
||||||
|
144,2.6244759559631348
|
||||||
|
145,2.624196767807007
|
||||||
|
146,2.6239166259765625
|
||||||
|
147,2.6236376762390137
|
||||||
|
148,2.6233632564544678
|
||||||
|
149,2.623093366622925
|
||||||
|
150,2.622823715209961
|
||||||
|
151,2.622556209564209
|
||||||
|
152,2.622292995452881
|
||||||
|
153,2.6220321655273438
|
||||||
|
154,2.6217737197875977
|
||||||
|
155,2.6215174198150635
|
||||||
|
156,2.621264934539795
|
||||||
|
157,2.621016263961792
|
||||||
|
158,2.6207706928253174
|
||||||
|
159,2.620528221130371
|
||||||
|
160,2.620288372039795
|
||||||
|
161,2.6200501918792725
|
||||||
|
162,2.6198153495788574
|
||||||
|
163,2.6195836067199707
|
||||||
|
164,2.619354486465454
|
||||||
|
165,2.619126319885254
|
||||||
|
166,2.6189000606536865
|
||||||
|
167,2.6186749935150146
|
||||||
|
168,2.6184520721435547
|
||||||
|
169,2.6182305812835693
|
||||||
|
170,2.6180107593536377
|
||||||
|
171,2.6177918910980225
|
||||||
|
172,2.61757493019104
|
||||||
|
173,2.617359161376953
|
||||||
|
174,2.617144823074341
|
||||||
|
175,2.6169333457946777
|
||||||
|
176,2.616722822189331
|
||||||
|
177,2.6165122985839844
|
||||||
|
178,2.6163036823272705
|
||||||
|
179,2.616096019744873
|
||||||
|
180,2.615889310836792
|
||||||
|
181,2.6156835556030273
|
||||||
|
182,2.615478992462158
|
||||||
|
183,2.6152756214141846
|
||||||
|
184,2.6150732040405273
|
||||||
|
185,2.6148719787597656
|
||||||
|
186,2.6146717071533203
|
||||||
|
187,2.614471673965454
|
||||||
|
188,2.614272117614746
|
||||||
|
189,2.614072561264038
|
||||||
|
190,2.6138744354248047
|
||||||
|
191,2.6136767864227295
|
||||||
|
192,2.6134798526763916
|
||||||
|
193,2.613284111022949
|
||||||
|
194,2.6130881309509277
|
||||||
|
195,2.6128928661346436
|
||||||
|
196,2.6126976013183594
|
||||||
|
197,2.6125032901763916
|
||||||
|
198,2.6123099327087402
|
||||||
|
199,2.612117052078247
|
||||||
|
200,2.611924171447754
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.16
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,553.6963500976562
|
||||||
|
2,49.08141326904297
|
||||||
|
3,46.070884704589844
|
||||||
|
4,46.076011657714844
|
||||||
|
5,51.14850616455078
|
||||||
|
6,44.729087829589844
|
||||||
|
7,39.44825744628906
|
||||||
|
8,35.0629768371582
|
||||||
|
9,28.447023391723633
|
||||||
|
10,23.73651885986328
|
||||||
|
11,20.84491539001465
|
||||||
|
12,18.04330062866211
|
||||||
|
13,15.992259979248047
|
||||||
|
14,14.384294509887695
|
||||||
|
15,13.096992492675781
|
||||||
|
16,12.36588191986084
|
||||||
|
17,12.033136367797852
|
||||||
|
18,11.882885932922363
|
||||||
|
19,11.748250961303711
|
||||||
|
20,11.487778663635254
|
||||||
|
21,11.079614639282227
|
||||||
|
22,10.560137748718262
|
||||||
|
23,9.991604804992676
|
||||||
|
24,9.441896438598633
|
||||||
|
25,8.951383590698242
|
||||||
|
26,8.533259391784668
|
||||||
|
27,8.188826560974121
|
||||||
|
28,7.9125518798828125
|
||||||
|
29,7.69334077835083
|
||||||
|
30,7.5136542320251465
|
||||||
|
31,7.357945442199707
|
||||||
|
32,7.215080738067627
|
||||||
|
33,7.0778608322143555
|
||||||
|
34,6.940879821777344
|
||||||
|
35,6.800954341888428
|
||||||
|
36,6.656413555145264
|
||||||
|
37,6.506791114807129
|
||||||
|
38,6.352499485015869
|
||||||
|
39,6.194592475891113
|
||||||
|
40,6.0344014167785645
|
||||||
|
41,5.8734822273254395
|
||||||
|
42,5.713569641113281
|
||||||
|
43,5.556545257568359
|
||||||
|
44,5.4044318199157715
|
||||||
|
45,5.259422779083252
|
||||||
|
46,5.122798919677734
|
||||||
|
47,4.994835376739502
|
||||||
|
48,4.875750541687012
|
||||||
|
49,4.7653279304504395
|
||||||
|
50,4.662687301635742
|
||||||
|
51,4.566817283630371
|
||||||
|
52,4.4768900871276855
|
||||||
|
53,4.392213821411133
|
||||||
|
54,4.3114728927612305
|
||||||
|
55,4.233184337615967
|
||||||
|
56,4.156534671783447
|
||||||
|
57,4.081078052520752
|
||||||
|
58,4.006781101226807
|
||||||
|
59,3.9344382286071777
|
||||||
|
60,3.8644073009490967
|
||||||
|
61,3.7971014976501465
|
||||||
|
62,3.7331008911132812
|
||||||
|
63,3.6725046634674072
|
||||||
|
64,3.6153955459594727
|
||||||
|
65,3.5623679161071777
|
||||||
|
66,3.5130515098571777
|
||||||
|
67,3.4666190147399902
|
||||||
|
68,3.422694206237793
|
||||||
|
69,3.380918264389038
|
||||||
|
70,3.34086012840271
|
||||||
|
71,3.302433967590332
|
||||||
|
72,3.2654566764831543
|
||||||
|
73,3.229963541030884
|
||||||
|
74,3.195887804031372
|
||||||
|
75,3.1632513999938965
|
||||||
|
76,3.132321834564209
|
||||||
|
77,3.10307240486145
|
||||||
|
78,3.0756163597106934
|
||||||
|
79,3.049919843673706
|
||||||
|
80,3.025923728942871
|
||||||
|
81,3.0035178661346436
|
||||||
|
82,2.9826807975769043
|
||||||
|
83,2.963444232940674
|
||||||
|
84,2.9456756114959717
|
||||||
|
85,2.9293460845947266
|
||||||
|
86,2.914433002471924
|
||||||
|
87,2.9008066654205322
|
||||||
|
88,2.8883070945739746
|
||||||
|
89,2.876821994781494
|
||||||
|
90,2.866337537765503
|
||||||
|
91,2.8568198680877686
|
||||||
|
92,2.8481810092926025
|
||||||
|
93,2.840397357940674
|
||||||
|
94,2.8333535194396973
|
||||||
|
95,2.8269832134246826
|
||||||
|
96,2.8211967945098877
|
||||||
|
97,2.8158717155456543
|
||||||
|
98,2.8109676837921143
|
||||||
|
99,2.8064279556274414
|
||||||
|
100,2.8022096157073975
|
||||||
|
101,2.798267364501953
|
||||||
|
102,2.794576406478882
|
||||||
|
103,2.791100025177002
|
||||||
|
104,2.7878060340881348
|
||||||
|
105,2.7846758365631104
|
||||||
|
106,2.7817060947418213
|
||||||
|
107,2.778864622116089
|
||||||
|
108,2.7761218547821045
|
||||||
|
109,2.77345871925354
|
||||||
|
110,2.770862579345703
|
||||||
|
111,2.768308162689209
|
||||||
|
112,2.7657721042633057
|
||||||
|
113,2.7632362842559814
|
||||||
|
114,2.7607133388519287
|
||||||
|
115,2.758206367492676
|
||||||
|
116,2.755723714828491
|
||||||
|
117,2.753251314163208
|
||||||
|
118,2.750777244567871
|
||||||
|
119,2.7483010292053223
|
||||||
|
120,2.745819330215454
|
||||||
|
121,2.7433362007141113
|
||||||
|
122,2.7408525943756104
|
||||||
|
123,2.738375663757324
|
||||||
|
124,2.735898733139038
|
||||||
|
125,2.7334342002868652
|
||||||
|
126,2.730978012084961
|
||||||
|
127,2.7285351753234863
|
||||||
|
128,2.726107597351074
|
||||||
|
129,2.723715305328369
|
||||||
|
130,2.721348285675049
|
||||||
|
131,2.7189998626708984
|
||||||
|
132,2.7166740894317627
|
||||||
|
133,2.714364767074585
|
||||||
|
134,2.7120742797851562
|
||||||
|
135,2.70979642868042
|
||||||
|
136,2.707547664642334
|
||||||
|
137,2.7053351402282715
|
||||||
|
138,2.703160524368286
|
||||||
|
139,2.701014757156372
|
||||||
|
140,2.6989171504974365
|
||||||
|
141,2.6968579292297363
|
||||||
|
142,2.6948509216308594
|
||||||
|
143,2.692901372909546
|
||||||
|
144,2.6910157203674316
|
||||||
|
145,2.689157247543335
|
||||||
|
146,2.687323808670044
|
||||||
|
147,2.685511350631714
|
||||||
|
148,2.6837263107299805
|
||||||
|
149,2.6819632053375244
|
||||||
|
150,2.680239200592041
|
||||||
|
151,2.6785459518432617
|
||||||
|
152,2.676875114440918
|
||||||
|
153,2.6752192974090576
|
||||||
|
154,2.673585891723633
|
||||||
|
155,2.6719651222229004
|
||||||
|
156,2.670372247695923
|
||||||
|
157,2.6687915325164795
|
||||||
|
158,2.6672275066375732
|
||||||
|
159,2.665682792663574
|
||||||
|
160,2.6641595363616943
|
||||||
|
161,2.6626577377319336
|
||||||
|
162,2.6611926555633545
|
||||||
|
163,2.6597442626953125
|
||||||
|
164,2.6583008766174316
|
||||||
|
165,2.656860113143921
|
||||||
|
166,2.6554226875305176
|
||||||
|
167,2.6539745330810547
|
||||||
|
168,2.652524709701538
|
||||||
|
169,2.651076078414917
|
||||||
|
170,2.6496360301971436
|
||||||
|
171,2.6482040882110596
|
||||||
|
172,2.6467795372009277
|
||||||
|
173,2.6453614234924316
|
||||||
|
174,2.6439476013183594
|
||||||
|
175,2.6425416469573975
|
||||||
|
176,2.6411399841308594
|
||||||
|
177,2.639739751815796
|
||||||
|
178,2.6383423805236816
|
||||||
|
179,2.636948823928833
|
||||||
|
180,2.6355528831481934
|
||||||
|
181,2.634155035018921
|
||||||
|
182,2.6327567100524902
|
||||||
|
183,2.6313607692718506
|
||||||
|
184,2.6299684047698975
|
||||||
|
185,2.6285769939422607
|
||||||
|
186,2.627183437347412
|
||||||
|
187,2.625798225402832
|
||||||
|
188,2.6244194507598877
|
||||||
|
189,2.623044729232788
|
||||||
|
190,2.62166690826416
|
||||||
|
191,2.6202921867370605
|
||||||
|
192,2.618919849395752
|
||||||
|
193,2.617548942565918
|
||||||
|
194,2.616185426712036
|
||||||
|
195,2.614823579788208
|
||||||
|
196,2.613467216491699
|
||||||
|
197,2.612114667892456
|
||||||
|
198,2.610764980316162
|
||||||
|
199,2.609421968460083
|
||||||
|
200,2.608083486557007
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.2
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,638.138916015625
|
||||||
|
2,126.95638275146484
|
||||||
|
3,25.703227996826172
|
||||||
|
4,17.30948257446289
|
||||||
|
5,14.884600639343262
|
||||||
|
6,10.920865058898926
|
||||||
|
7,9.254437446594238
|
||||||
|
8,7.943879127502441
|
||||||
|
9,6.872048377990723
|
||||||
|
10,6.092459678649902
|
||||||
|
11,5.6081719398498535
|
||||||
|
12,5.297410011291504
|
||||||
|
13,5.066235065460205
|
||||||
|
14,4.879238605499268
|
||||||
|
15,4.723977565765381
|
||||||
|
16,4.591020584106445
|
||||||
|
17,4.4769816398620605
|
||||||
|
18,4.377741813659668
|
||||||
|
19,4.291906833648682
|
||||||
|
20,4.218208312988281
|
||||||
|
21,4.155269145965576
|
||||||
|
22,4.10132360458374
|
||||||
|
23,4.055699348449707
|
||||||
|
24,4.017193794250488
|
||||||
|
25,3.9850821495056152
|
||||||
|
26,3.9585351943969727
|
||||||
|
27,3.936767339706421
|
||||||
|
28,3.9188969135284424
|
||||||
|
29,3.9041504859924316
|
||||||
|
30,3.8918404579162598
|
||||||
|
31,3.881377935409546
|
||||||
|
32,3.8722589015960693
|
||||||
|
33,3.864015579223633
|
||||||
|
34,3.8562610149383545
|
||||||
|
35,3.8487372398376465
|
||||||
|
36,3.841275453567505
|
||||||
|
37,3.833747148513794
|
||||||
|
38,3.826080322265625
|
||||||
|
39,3.8182294368743896
|
||||||
|
40,3.810185670852661
|
||||||
|
41,3.8019659519195557
|
||||||
|
42,3.793586015701294
|
||||||
|
43,3.7850866317749023
|
||||||
|
44,3.7764933109283447
|
||||||
|
45,3.767857551574707
|
||||||
|
46,3.7592031955718994
|
||||||
|
47,3.7505853176116943
|
||||||
|
48,3.7419936656951904
|
||||||
|
49,3.7334494590759277
|
||||||
|
50,3.7249693870544434
|
||||||
|
51,3.716557502746582
|
||||||
|
52,3.7081964015960693
|
||||||
|
53,3.6998841762542725
|
||||||
|
54,3.69161319732666
|
||||||
|
55,3.6833760738372803
|
||||||
|
56,3.6751856803894043
|
||||||
|
57,3.6670145988464355
|
||||||
|
58,3.6588683128356934
|
||||||
|
59,3.650744676589966
|
||||||
|
60,3.6426455974578857
|
||||||
|
61,3.634566307067871
|
||||||
|
62,3.626511812210083
|
||||||
|
63,3.6184847354888916
|
||||||
|
64,3.610483407974243
|
||||||
|
65,3.6025326251983643
|
||||||
|
66,3.594625949859619
|
||||||
|
67,3.5867767333984375
|
||||||
|
68,3.578998565673828
|
||||||
|
69,3.5712890625
|
||||||
|
70,3.5636508464813232
|
||||||
|
71,3.556091547012329
|
||||||
|
72,3.548602342605591
|
||||||
|
73,3.5411875247955322
|
||||||
|
74,3.53383731842041
|
||||||
|
75,3.526548385620117
|
||||||
|
76,3.519317388534546
|
||||||
|
77,3.512122869491577
|
||||||
|
78,3.504964590072632
|
||||||
|
79,3.497835874557495
|
||||||
|
80,3.4907400608062744
|
||||||
|
81,3.4836747646331787
|
||||||
|
82,3.476620674133301
|
||||||
|
83,3.4695959091186523
|
||||||
|
84,3.462590217590332
|
||||||
|
85,3.4555938243865967
|
||||||
|
86,3.4486193656921387
|
||||||
|
87,3.441661834716797
|
||||||
|
88,3.434725761413574
|
||||||
|
89,3.4278018474578857
|
||||||
|
90,3.420893430709839
|
||||||
|
91,3.4140071868896484
|
||||||
|
92,3.407135009765625
|
||||||
|
93,3.4002761840820312
|
||||||
|
94,3.3934319019317627
|
||||||
|
95,3.386613368988037
|
||||||
|
96,3.3798131942749023
|
||||||
|
97,3.3730344772338867
|
||||||
|
98,3.3662712574005127
|
||||||
|
99,3.359532594680786
|
||||||
|
100,3.3528053760528564
|
||||||
|
101,3.346094846725464
|
||||||
|
102,3.3393967151641846
|
||||||
|
103,3.3327183723449707
|
||||||
|
104,3.326056718826294
|
||||||
|
105,3.3194069862365723
|
||||||
|
106,3.312777280807495
|
||||||
|
107,3.3061654567718506
|
||||||
|
108,3.299579620361328
|
||||||
|
109,3.2930171489715576
|
||||||
|
110,3.2864811420440674
|
||||||
|
111,3.2799644470214844
|
||||||
|
112,3.2734663486480713
|
||||||
|
113,3.266991138458252
|
||||||
|
114,3.260540723800659
|
||||||
|
115,3.254108190536499
|
||||||
|
116,3.2476890087127686
|
||||||
|
117,3.2412848472595215
|
||||||
|
118,3.234895706176758
|
||||||
|
119,3.2285244464874268
|
||||||
|
120,3.2221744060516357
|
||||||
|
121,3.2158422470092773
|
||||||
|
122,3.2095162868499756
|
||||||
|
123,3.203117847442627
|
||||||
|
124,3.1966679096221924
|
||||||
|
125,3.1901097297668457
|
||||||
|
126,3.1834967136383057
|
||||||
|
127,3.176847219467163
|
||||||
|
128,3.1701719760894775
|
||||||
|
129,3.163466691970825
|
||||||
|
130,3.1567556858062744
|
||||||
|
131,3.1500422954559326
|
||||||
|
132,3.143339157104492
|
||||||
|
133,3.136652708053589
|
||||||
|
134,3.1299829483032227
|
||||||
|
135,3.1233255863189697
|
||||||
|
136,3.116701602935791
|
||||||
|
137,3.110090970993042
|
||||||
|
138,3.103492259979248
|
||||||
|
139,3.0969111919403076
|
||||||
|
140,3.090346574783325
|
||||||
|
141,3.083801031112671
|
||||||
|
142,3.0772898197174072
|
||||||
|
143,3.0708088874816895
|
||||||
|
144,3.064385414123535
|
||||||
|
145,3.0579960346221924
|
||||||
|
146,3.051602363586426
|
||||||
|
147,3.0452427864074707
|
||||||
|
148,3.0389111042022705
|
||||||
|
149,3.0326015949249268
|
||||||
|
150,3.0263562202453613
|
||||||
|
151,3.020143747329712
|
||||||
|
152,3.0139853954315186
|
||||||
|
153,3.007880687713623
|
||||||
|
154,3.0018320083618164
|
||||||
|
155,2.9958269596099854
|
||||||
|
156,2.9898507595062256
|
||||||
|
157,2.9839470386505127
|
||||||
|
158,2.9780802726745605
|
||||||
|
159,2.972240686416626
|
||||||
|
160,2.966510057449341
|
||||||
|
161,2.960875988006592
|
||||||
|
162,2.9553921222686768
|
||||||
|
163,2.9501094818115234
|
||||||
|
164,2.945035934448242
|
||||||
|
165,2.9401001930236816
|
||||||
|
166,2.9353580474853516
|
||||||
|
167,2.9308011531829834
|
||||||
|
168,2.9264252185821533
|
||||||
|
169,2.9222323894500732
|
||||||
|
170,2.9182190895080566
|
||||||
|
171,2.914371967315674
|
||||||
|
172,2.910691499710083
|
||||||
|
173,2.9071621894836426
|
||||||
|
174,2.903806447982788
|
||||||
|
175,2.9006123542785645
|
||||||
|
176,2.8975651264190674
|
||||||
|
177,2.8946545124053955
|
||||||
|
178,2.8918910026550293
|
||||||
|
179,2.889240264892578
|
||||||
|
180,2.886709690093994
|
||||||
|
181,2.884303569793701
|
||||||
|
182,2.8820247650146484
|
||||||
|
183,2.8798563480377197
|
||||||
|
184,2.877760887145996
|
||||||
|
185,2.8757522106170654
|
||||||
|
186,2.873821973800659
|
||||||
|
187,2.8719937801361084
|
||||||
|
188,2.8702189922332764
|
||||||
|
189,2.8685176372528076
|
||||||
|
190,2.866842269897461
|
||||||
|
191,2.8651974201202393
|
||||||
|
192,2.8636062145233154
|
||||||
|
193,2.8620219230651855
|
||||||
|
194,2.860455274581909
|
||||||
|
195,2.8589398860931396
|
||||||
|
196,2.8574321269989014
|
||||||
|
197,2.8559417724609375
|
||||||
|
198,2.8544905185699463
|
||||||
|
199,2.8530449867248535
|
||||||
|
200,2.8516249656677246
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.25
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,1172.7674560546875
|
||||||
|
2,100.25691986083984
|
||||||
|
3,119402.2890625
|
||||||
|
4,133.49362182617188
|
||||||
|
5,59.150943756103516
|
||||||
|
6,52.43195343017578
|
||||||
|
7,54.74357223510742
|
||||||
|
8,60.726932525634766
|
||||||
|
9,63.98731231689453
|
||||||
|
10,64.91996765136719
|
||||||
|
11,63.94999313354492
|
||||||
|
12,62.00520324707031
|
||||||
|
13,58.6434440612793
|
||||||
|
14,53.395198822021484
|
||||||
|
15,46.27482223510742
|
||||||
|
16,40.115535736083984
|
||||||
|
17,36.4989013671875
|
||||||
|
18,33.63146209716797
|
||||||
|
19,31.102542877197266
|
||||||
|
20,28.929868698120117
|
||||||
|
21,26.843608856201172
|
||||||
|
22,25.418996810913086
|
||||||
|
23,24.649375915527344
|
||||||
|
24,24.061758041381836
|
||||||
|
25,23.3399658203125
|
||||||
|
26,22.555253982543945
|
||||||
|
27,21.701343536376953
|
||||||
|
28,20.815927505493164
|
||||||
|
29,19.92568588256836
|
||||||
|
30,19.006975173950195
|
||||||
|
31,18.06807518005371
|
||||||
|
32,17.145551681518555
|
||||||
|
33,16.292030334472656
|
||||||
|
34,15.532393455505371
|
||||||
|
35,14.863213539123535
|
||||||
|
36,14.26880931854248
|
||||||
|
37,13.735945701599121
|
||||||
|
38,13.256166458129883
|
||||||
|
39,12.821511268615723
|
||||||
|
40,12.426076889038086
|
||||||
|
41,12.06234359741211
|
||||||
|
42,11.727265357971191
|
||||||
|
43,11.416626930236816
|
||||||
|
44,11.131233215332031
|
||||||
|
45,10.866288185119629
|
||||||
|
46,10.621492385864258
|
||||||
|
47,10.391637802124023
|
||||||
|
48,10.174287796020508
|
||||||
|
49,9.965923309326172
|
||||||
|
50,9.76606559753418
|
||||||
|
51,9.573927879333496
|
||||||
|
52,9.389328002929688
|
||||||
|
53,9.210346221923828
|
||||||
|
54,9.037010192871094
|
||||||
|
55,8.868074417114258
|
||||||
|
56,8.70367431640625
|
||||||
|
57,8.543137550354004
|
||||||
|
58,8.385336875915527
|
||||||
|
59,8.230120658874512
|
||||||
|
60,8.077096939086914
|
||||||
|
61,7.925225257873535
|
||||||
|
62,7.7738261222839355
|
||||||
|
63,7.623720169067383
|
||||||
|
64,7.475256443023682
|
||||||
|
65,7.329106330871582
|
||||||
|
66,7.182750701904297
|
||||||
|
67,7.031722068786621
|
||||||
|
68,6.880136966705322
|
||||||
|
69,6.727870941162109
|
||||||
|
70,6.576241493225098
|
||||||
|
71,6.42543888092041
|
||||||
|
72,6.277204513549805
|
||||||
|
73,6.132631301879883
|
||||||
|
74,5.991875171661377
|
||||||
|
75,5.855222702026367
|
||||||
|
76,5.723228931427002
|
||||||
|
77,5.593400955200195
|
||||||
|
78,5.467288017272949
|
||||||
|
79,5.346905708312988
|
||||||
|
80,5.2328925132751465
|
||||||
|
81,5.125042915344238
|
||||||
|
82,5.021970748901367
|
||||||
|
83,4.923079013824463
|
||||||
|
84,4.8285136222839355
|
||||||
|
85,4.7386698722839355
|
||||||
|
86,4.653127670288086
|
||||||
|
87,4.571839809417725
|
||||||
|
88,4.494254112243652
|
||||||
|
89,4.4201788902282715
|
||||||
|
90,4.349775314331055
|
||||||
|
91,4.2830986976623535
|
||||||
|
92,4.219934940338135
|
||||||
|
93,4.160295009613037
|
||||||
|
94,4.103320121765137
|
||||||
|
95,4.048985958099365
|
||||||
|
96,3.9969615936279297
|
||||||
|
97,3.9467976093292236
|
||||||
|
98,3.898562431335449
|
||||||
|
99,3.8514668941497803
|
||||||
|
100,3.806110382080078
|
||||||
|
101,3.762852668762207
|
||||||
|
102,3.7217857837677
|
||||||
|
103,3.6830904483795166
|
||||||
|
104,3.6466803550720215
|
||||||
|
105,3.6124815940856934
|
||||||
|
106,3.580472230911255
|
||||||
|
107,3.550790548324585
|
||||||
|
108,3.5235748291015625
|
||||||
|
109,3.4986579418182373
|
||||||
|
110,3.475843667984009
|
||||||
|
111,3.4549434185028076
|
||||||
|
112,3.43583345413208
|
||||||
|
113,3.4183998107910156
|
||||||
|
114,3.402480363845825
|
||||||
|
115,3.387868642807007
|
||||||
|
116,3.374448776245117
|
||||||
|
117,3.362361431121826
|
||||||
|
118,3.351301908493042
|
||||||
|
119,3.3412811756134033
|
||||||
|
120,3.3322646617889404
|
||||||
|
121,3.324073076248169
|
||||||
|
122,3.316577672958374
|
||||||
|
123,3.309654712677002
|
||||||
|
124,3.3031740188598633
|
||||||
|
125,3.2971014976501465
|
||||||
|
126,3.2913944721221924
|
||||||
|
127,3.285975217819214
|
||||||
|
128,3.2807655334472656
|
||||||
|
129,3.2757186889648438
|
||||||
|
130,3.270798921585083
|
||||||
|
131,3.2659785747528076
|
||||||
|
132,3.261248826980591
|
||||||
|
133,3.2565717697143555
|
||||||
|
134,3.2519161701202393
|
||||||
|
135,3.2472610473632812
|
||||||
|
136,3.2425618171691895
|
||||||
|
137,3.2378664016723633
|
||||||
|
138,3.2331740856170654
|
||||||
|
139,3.2284865379333496
|
||||||
|
140,3.223806858062744
|
||||||
|
141,3.2191410064697266
|
||||||
|
142,3.214459180831909
|
||||||
|
143,3.2097601890563965
|
||||||
|
144,3.20505690574646
|
||||||
|
145,3.2003579139709473
|
||||||
|
146,3.1956534385681152
|
||||||
|
147,3.1909520626068115
|
||||||
|
148,3.1862666606903076
|
||||||
|
149,3.1815884113311768
|
||||||
|
150,3.1769208908081055
|
||||||
|
151,3.172271728515625
|
||||||
|
152,3.1676464080810547
|
||||||
|
153,3.163057804107666
|
||||||
|
154,3.1585025787353516
|
||||||
|
155,3.1539885997772217
|
||||||
|
156,3.1495325565338135
|
||||||
|
157,3.1451261043548584
|
||||||
|
158,3.140794038772583
|
||||||
|
159,3.1365439891815186
|
||||||
|
160,3.132366418838501
|
||||||
|
161,3.128267288208008
|
||||||
|
162,3.124246835708618
|
||||||
|
163,3.120305061340332
|
||||||
|
164,3.1164445877075195
|
||||||
|
165,3.1126551628112793
|
||||||
|
166,3.1089229583740234
|
||||||
|
167,3.105269193649292
|
||||||
|
168,3.101688861846924
|
||||||
|
169,3.0981662273406982
|
||||||
|
170,3.094719171524048
|
||||||
|
171,3.091362476348877
|
||||||
|
172,3.088073253631592
|
||||||
|
173,3.0848498344421387
|
||||||
|
174,3.0816967487335205
|
||||||
|
175,3.078617811203003
|
||||||
|
176,3.0756008625030518
|
||||||
|
177,3.0726370811462402
|
||||||
|
178,3.0697410106658936
|
||||||
|
179,3.0669026374816895
|
||||||
|
180,3.0641438961029053
|
||||||
|
181,3.0614750385284424
|
||||||
|
182,3.0588855743408203
|
||||||
|
183,3.0564775466918945
|
||||||
|
184,3.054225444793701
|
||||||
|
185,3.0520174503326416
|
||||||
|
186,3.0498387813568115
|
||||||
|
187,3.04767107963562
|
||||||
|
188,3.0455057621002197
|
||||||
|
189,3.043341636657715
|
||||||
|
190,3.041179656982422
|
||||||
|
191,3.039034128189087
|
||||||
|
192,3.0368995666503906
|
||||||
|
193,3.034780502319336
|
||||||
|
194,3.0326836109161377
|
||||||
|
195,3.0306124687194824
|
||||||
|
196,3.028563976287842
|
||||||
|
197,3.0265390872955322
|
||||||
|
198,3.024538993835449
|
||||||
|
199,3.022556781768799
|
||||||
|
200,3.020589590072632
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.3
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,42 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,2825.916259765625
|
||||||
|
2,45.81127166748047
|
||||||
|
3,167.4499053955078
|
||||||
|
4,39.7562255859375
|
||||||
|
5,31.285991668701172
|
||||||
|
6,28.110328674316406
|
||||||
|
7,26.23940086364746
|
||||||
|
8,24.85036849975586
|
||||||
|
9,23.69524383544922
|
||||||
|
10,22.691848754882812
|
||||||
|
11,21.820920944213867
|
||||||
|
12,21.053966522216797
|
||||||
|
13,20.368806838989258
|
||||||
|
14,19.758020401000977
|
||||||
|
15,19.222614288330078
|
||||||
|
16,18.762393951416016
|
||||||
|
17,18.373872756958008
|
||||||
|
18,18.048959732055664
|
||||||
|
19,17.780969619750977
|
||||||
|
20,17.5612735748291
|
||||||
|
21,17.3798828125
|
||||||
|
22,17.223751068115234
|
||||||
|
23,17.082439422607422
|
||||||
|
24,64.6371078491211
|
||||||
|
25,12853.6376953125
|
||||||
|
26,29927.26953125
|
||||||
|
27,35657.75
|
||||||
|
28,40116.0234375
|
||||||
|
29,43236.8984375
|
||||||
|
30,43042.046875
|
||||||
|
31,45883.66796875
|
||||||
|
32,44355.76171875
|
||||||
|
33,105720.296875
|
||||||
|
34,185412.984375
|
||||||
|
35,196373.640625
|
||||||
|
36,196373.640625
|
||||||
|
37,196373.640625
|
||||||
|
38,196373.640625
|
||||||
|
39,196373.640625
|
||||||
|
40,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.4
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,36838.27734375
|
||||||
|
2,44.573402404785156
|
||||||
|
3,105924.5390625
|
||||||
|
4,41.59011459350586
|
||||||
|
5,36.11030578613281
|
||||||
|
6,35.55046081542969
|
||||||
|
7,37.81698989868164
|
||||||
|
8,41.02517318725586
|
||||||
|
9,42.91999053955078
|
||||||
|
10,45.7131233215332
|
||||||
|
11,41.85399627685547
|
||||||
|
12,40.049232482910156
|
||||||
|
13,38.6837043762207
|
||||||
|
14,37.556556701660156
|
||||||
|
15,36.00605773925781
|
||||||
|
16,33.6727409362793
|
||||||
|
17,31.685684204101562
|
||||||
|
18,37.23391342163086
|
||||||
|
19,37.71848678588867
|
||||||
|
20,33.69709014892578
|
||||||
|
21,31.826297760009766
|
||||||
|
22,31.2726993560791
|
||||||
|
23,31.138790130615234
|
||||||
|
24,31.04647445678711
|
||||||
|
25,30.865480422973633
|
||||||
|
26,30.721553802490234
|
||||||
|
27,30.514291763305664
|
||||||
|
28,30.227590560913086
|
||||||
|
29,29.922489166259766
|
||||||
|
30,29.53095245361328
|
||||||
|
31,29.23976707458496
|
||||||
|
32,28.990644454956055
|
||||||
|
33,28.765079498291016
|
||||||
|
34,28.542724609375
|
||||||
|
35,28.22769546508789
|
||||||
|
36,27.828588485717773
|
||||||
|
37,24.716533660888672
|
||||||
|
38,25.0787410736084
|
||||||
|
39,nan
|
||||||
|
40,nan
|
||||||
|
41,nan
|
||||||
|
42,nan
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.5
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,17 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,97624.71875
|
||||||
|
2,193010.671875
|
||||||
|
3,193945.328125
|
||||||
|
4,184485.71875
|
||||||
|
5,139493.53125
|
||||||
|
6,74732.3828125
|
||||||
|
7,81682.4296875
|
||||||
|
8,27178.181640625
|
||||||
|
9,9615.1865234375
|
||||||
|
10,213.50404357910156
|
||||||
|
11,27.556537628173828
|
||||||
|
12,nan
|
||||||
|
13,nan
|
||||||
|
14,nan
|
||||||
|
15,nan
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.6
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,11 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,139986.984375
|
||||||
|
2,195732.21875
|
||||||
|
3,196348.1875
|
||||||
|
4,196373.640625
|
||||||
|
5,196373.640625
|
||||||
|
6,196373.640625
|
||||||
|
7,196373.640625
|
||||||
|
8,196373.640625
|
||||||
|
9,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.7
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,165115.203125
|
||||||
|
2,196340.71875
|
||||||
|
3,196373.640625
|
||||||
|
4,196373.640625
|
||||||
|
5,196373.640625
|
||||||
|
6,196373.640625
|
||||||
|
7,196373.640625
|
||||||
|
8,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.8
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,178371.703125
|
||||||
|
2,196373.4375
|
||||||
|
3,196373.640625
|
||||||
|
4,196373.640625
|
||||||
|
5,196373.640625
|
||||||
|
6,196373.640625
|
||||||
|
7,196373.640625
|
||||||
|
8,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.9
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,9 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,188662.65625
|
||||||
|
2,196373.640625
|
||||||
|
3,196373.640625
|
||||||
|
4,196373.640625
|
||||||
|
5,196373.640625
|
||||||
|
6,196373.640625
|
||||||
|
7,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 1
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,192997.171875
|
||||||
|
2,196373.40625
|
||||||
|
3,196373.640625
|
||||||
|
4,196373.640625
|
||||||
|
5,196373.640625
|
||||||
|
6,196373.640625
|
||||||
|
7,196373.640625
|
||||||
|
8,196373.640625
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 16
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,197062.375
|
||||||
|
2,197062.375
|
||||||
|
3,197062.375
|
||||||
|
4,197062.375
|
||||||
|
5,197062.375
|
||||||
|
6,197062.375
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 2
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,197062.375
|
||||||
|
2,197062.375
|
||||||
|
3,197062.375
|
||||||
|
4,197062.375
|
||||||
|
5,197062.375
|
||||||
|
6,197062.375
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 4
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,197062.375
|
||||||
|
2,197062.375
|
||||||
|
3,197062.375
|
||||||
|
4,197062.375
|
||||||
|
5,197062.375
|
||||||
|
6,197062.375
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 8
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Constant weight: All weights are 1
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,1087.9490966796875
|
||||||
|
1,197062.375
|
||||||
|
2,197062.375
|
||||||
|
3,197062.375
|
||||||
|
4,197062.375
|
||||||
|
5,197062.375
|
||||||
|
6,197062.375
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.01
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Quadratic weight: Weights increase cubically, normalized by max
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,618.634765625
|
||||||
|
1,512.5068969726562
|
||||||
|
2,418.2740478515625
|
||||||
|
3,356.53851318359375
|
||||||
|
4,296.678466796875
|
||||||
|
5,226.89950561523438
|
||||||
|
6,179.27964782714844
|
||||||
|
7,108.04243469238281
|
||||||
|
8,93.19664001464844
|
||||||
|
9,80.423828125
|
||||||
|
10,76.92301940917969
|
||||||
|
11,65.71626281738281
|
||||||
|
12,69.39179229736328
|
||||||
|
13,59.06550979614258
|
||||||
|
14,56.84078598022461
|
||||||
|
15,50.234397888183594
|
||||||
|
16,47.69053268432617
|
||||||
|
17,46.15516662597656
|
||||||
|
18,44.57769775390625
|
||||||
|
19,46.01506805419922
|
||||||
|
20,42.49873733520508
|
||||||
|
21,33.65034866333008
|
||||||
|
22,28.672056198120117
|
||||||
|
23,23.744226455688477
|
||||||
|
24,20.466888427734375
|
||||||
|
25,16.260761260986328
|
||||||
|
26,14.576375961303711
|
||||||
|
27,13.816559791564941
|
||||||
|
28,12.563608169555664
|
||||||
|
29,12.783113479614258
|
||||||
|
30,12.036373138427734
|
||||||
|
31,12.50173282623291
|
||||||
|
32,12.821029663085938
|
||||||
|
33,11.885549545288086
|
||||||
|
34,11.783653259277344
|
||||||
|
35,11.066816329956055
|
||||||
|
36,10.851370811462402
|
||||||
|
37,10.661981582641602
|
||||||
|
38,10.143963813781738
|
||||||
|
39,10.138947486877441
|
||||||
|
40,10.044787406921387
|
||||||
|
41,9.787169456481934
|
||||||
|
42,10.170178413391113
|
||||||
|
43,9.873494148254395
|
||||||
|
44,9.739782333374023
|
||||||
|
45,9.611645698547363
|
||||||
|
46,9.438860893249512
|
||||||
|
47,9.201300621032715
|
||||||
|
48,8.927261352539062
|
||||||
|
49,8.705388069152832
|
||||||
|
50,8.59840202331543
|
||||||
|
51,8.558561325073242
|
||||||
|
52,8.221025466918945
|
||||||
|
53,6.999885082244873
|
||||||
|
54,6.352283954620361
|
||||||
|
55,6.305906295776367
|
||||||
|
56,6.760247230529785
|
||||||
|
57,6.968291282653809
|
||||||
|
58,6.984772205352783
|
||||||
|
59,6.860345363616943
|
||||||
|
60,6.610193252563477
|
||||||
|
61,6.577552318572998
|
||||||
|
62,6.550919532775879
|
||||||
|
63,6.5334858894348145
|
||||||
|
64,6.514031887054443
|
||||||
|
65,6.484747886657715
|
||||||
|
66,6.442281246185303
|
||||||
|
67,6.380998611450195
|
||||||
|
68,6.289003372192383
|
||||||
|
69,6.206309795379639
|
||||||
|
70,6.186569690704346
|
||||||
|
71,6.145936012268066
|
||||||
|
72,6.096487045288086
|
||||||
|
73,6.050929069519043
|
||||||
|
74,6.0140910148620605
|
||||||
|
75,5.98677396774292
|
||||||
|
76,5.967842102050781
|
||||||
|
77,5.955361366271973
|
||||||
|
78,5.947484493255615
|
||||||
|
79,5.942018508911133
|
||||||
|
80,5.936336040496826
|
||||||
|
81,5.927982330322266
|
||||||
|
82,5.915496349334717
|
||||||
|
83,5.898886680603027
|
||||||
|
84,5.879325866699219
|
||||||
|
85,5.858447074890137
|
||||||
|
86,5.837739944458008
|
||||||
|
87,5.818169593811035
|
||||||
|
88,5.800340175628662
|
||||||
|
89,5.784473419189453
|
||||||
|
90,5.77069616317749
|
||||||
|
91,5.7588629722595215
|
||||||
|
92,5.748737335205078
|
||||||
|
93,5.739949703216553
|
||||||
|
94,5.732027053833008
|
||||||
|
95,5.724431991577148
|
||||||
|
96,5.716646194458008
|
||||||
|
97,5.708223819732666
|
||||||
|
98,5.698890686035156
|
||||||
|
99,5.688602447509766
|
||||||
|
100,5.677513599395752
|
||||||
|
101,5.665926933288574
|
||||||
|
102,5.654216766357422
|
||||||
|
103,5.642754077911377
|
||||||
|
104,5.631827354431152
|
||||||
|
105,5.621591091156006
|
||||||
|
106,5.612072944641113
|
||||||
|
107,5.6031951904296875
|
||||||
|
108,5.594830513000488
|
||||||
|
109,5.586812496185303
|
||||||
|
110,5.578970432281494
|
||||||
|
111,5.571152687072754
|
||||||
|
112,5.563292026519775
|
||||||
|
113,5.555334091186523
|
||||||
|
114,5.547294616699219
|
||||||
|
115,5.539220333099365
|
||||||
|
116,5.531171798706055
|
||||||
|
117,5.523221015930176
|
||||||
|
118,5.515440464019775
|
||||||
|
119,5.507876396179199
|
||||||
|
120,5.500539779663086
|
||||||
|
121,5.493409633636475
|
||||||
|
122,5.486453056335449
|
||||||
|
123,5.4796319007873535
|
||||||
|
124,5.472895622253418
|
||||||
|
125,5.466193199157715
|
||||||
|
126,5.459502220153809
|
||||||
|
127,5.452808380126953
|
||||||
|
128,5.44610595703125
|
||||||
|
129,5.439391613006592
|
||||||
|
130,5.432681560516357
|
||||||
|
131,5.425978183746338
|
||||||
|
132,5.41928768157959
|
||||||
|
133,5.412608623504639
|
||||||
|
134,5.405939102172852
|
||||||
|
135,5.3992838859558105
|
||||||
|
136,5.3926215171813965
|
||||||
|
137,5.385936737060547
|
||||||
|
138,5.379227161407471
|
||||||
|
139,5.372498989105225
|
||||||
|
140,5.365703105926514
|
||||||
|
141,5.358867645263672
|
||||||
|
142,5.351985931396484
|
||||||
|
143,5.345057010650635
|
||||||
|
144,5.338069438934326
|
||||||
|
145,5.331023216247559
|
||||||
|
146,5.323905944824219
|
||||||
|
147,5.316718578338623
|
||||||
|
148,5.309450626373291
|
||||||
|
149,5.302096366882324
|
||||||
|
150,5.294659614562988
|
||||||
|
151,5.287112712860107
|
||||||
|
152,5.279477119445801
|
||||||
|
153,5.271742820739746
|
||||||
|
154,5.263896465301514
|
||||||
|
155,5.255940914154053
|
||||||
|
156,5.247891426086426
|
||||||
|
157,5.239718437194824
|
||||||
|
158,5.231440544128418
|
||||||
|
159,5.223037242889404
|
||||||
|
160,5.2145233154296875
|
||||||
|
161,5.205879211425781
|
||||||
|
162,5.197113513946533
|
||||||
|
163,5.188205242156982
|
||||||
|
164,5.179158687591553
|
||||||
|
165,5.169968128204346
|
||||||
|
166,5.160633087158203
|
||||||
|
167,5.151153564453125
|
||||||
|
168,5.141519069671631
|
||||||
|
169,5.131739139556885
|
||||||
|
170,5.121806621551514
|
||||||
|
171,5.1117262840271
|
||||||
|
172,5.101496696472168
|
||||||
|
173,5.091122150421143
|
||||||
|
174,5.080606460571289
|
||||||
|
175,5.0699663162231445
|
||||||
|
176,5.059206485748291
|
||||||
|
177,5.048330783843994
|
||||||
|
178,5.037352085113525
|
||||||
|
179,5.026280403137207
|
||||||
|
180,5.015138626098633
|
||||||
|
181,5.003931045532227
|
||||||
|
182,4.992681980133057
|
||||||
|
183,4.981400489807129
|
||||||
|
184,4.970108509063721
|
||||||
|
185,4.958832263946533
|
||||||
|
186,4.947584629058838
|
||||||
|
187,4.936384677886963
|
||||||
|
188,4.92525577545166
|
||||||
|
189,4.9142165184021
|
||||||
|
190,4.903319358825684
|
||||||
|
191,4.892570495605469
|
||||||
|
192,4.882018089294434
|
||||||
|
193,4.871712684631348
|
||||||
|
194,4.861731052398682
|
||||||
|
195,4.8521952629089355
|
||||||
|
196,4.843234539031982
|
||||||
|
197,4.834994792938232
|
||||||
|
198,4.8276567459106445
|
||||||
|
199,4.821413040161133
|
||||||
|
200,4.816390037536621
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.02
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Quadratic weight: Weights increase cubically, normalized by max
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,618.634765625
|
||||||
|
1,457.2260437011719
|
||||||
|
2,294.4682312011719
|
||||||
|
3,150.26934814453125
|
||||||
|
4,87.86925506591797
|
||||||
|
5,87.49060821533203
|
||||||
|
6,79.6585922241211
|
||||||
|
7,70.69172668457031
|
||||||
|
8,67.6927261352539
|
||||||
|
9,58.88420867919922
|
||||||
|
10,51.81982421875
|
||||||
|
11,47.86821746826172
|
||||||
|
12,42.56962966918945
|
||||||
|
13,41.756282806396484
|
||||||
|
14,40.5606803894043
|
||||||
|
15,36.02946472167969
|
||||||
|
16,35.94963073730469
|
||||||
|
17,31.59294891357422
|
||||||
|
18,28.988901138305664
|
||||||
|
19,23.564542770385742
|
||||||
|
20,20.328468322753906
|
||||||
|
21,20.751220703125
|
||||||
|
22,20.90888023376465
|
||||||
|
23,20.37900733947754
|
||||||
|
24,19.194168090820312
|
||||||
|
25,17.428382873535156
|
||||||
|
26,15.198305130004883
|
||||||
|
27,14.53298282623291
|
||||||
|
28,15.081659317016602
|
||||||
|
29,15.677597045898438
|
||||||
|
30,14.444275856018066
|
||||||
|
31,14.155533790588379
|
||||||
|
32,14.122387886047363
|
||||||
|
33,14.434682846069336
|
||||||
|
34,14.615585327148438
|
||||||
|
35,14.220343589782715
|
||||||
|
36,14.484125137329102
|
||||||
|
37,14.63113021850586
|
||||||
|
38,14.572996139526367
|
||||||
|
39,14.353352546691895
|
||||||
|
40,14.171430587768555
|
||||||
|
41,14.269484519958496
|
||||||
|
42,13.981660842895508
|
||||||
|
43,13.731574058532715
|
||||||
|
44,13.58879280090332
|
||||||
|
45,13.35403823852539
|
||||||
|
46,13.04866886138916
|
||||||
|
47,12.793230056762695
|
||||||
|
48,12.740480422973633
|
||||||
|
49,12.925248146057129
|
||||||
|
50,12.973424911499023
|
||||||
|
51,12.824810981750488
|
||||||
|
52,12.67618465423584
|
||||||
|
53,12.563031196594238
|
||||||
|
54,12.470541000366211
|
||||||
|
55,12.3731689453125
|
||||||
|
56,12.253230094909668
|
||||||
|
57,11.685256958007812
|
||||||
|
58,10.541313171386719
|
||||||
|
59,10.725662231445312
|
||||||
|
60,10.913461685180664
|
||||||
|
61,10.371492385864258
|
||||||
|
62,10.165139198303223
|
||||||
|
63,9.972925186157227
|
||||||
|
64,9.842660903930664
|
||||||
|
65,9.80553150177002
|
||||||
|
66,9.744610786437988
|
||||||
|
67,10.370219230651855
|
||||||
|
68,9.265556335449219
|
||||||
|
69,9.229262351989746
|
||||||
|
70,9.350346565246582
|
||||||
|
71,9.472978591918945
|
||||||
|
72,9.444009780883789
|
||||||
|
73,9.34517765045166
|
||||||
|
74,9.32608413696289
|
||||||
|
75,9.381977081298828
|
||||||
|
76,9.442815780639648
|
||||||
|
77,9.465365409851074
|
||||||
|
78,9.43093204498291
|
||||||
|
79,9.335206031799316
|
||||||
|
80,9.200751304626465
|
||||||
|
81,9.09628677368164
|
||||||
|
82,9.064566612243652
|
||||||
|
83,9.054361343383789
|
||||||
|
84,8.982239723205566
|
||||||
|
85,8.844391822814941
|
||||||
|
86,8.740495681762695
|
||||||
|
87,8.855620384216309
|
||||||
|
88,8.591736793518066
|
||||||
|
89,8.747834205627441
|
||||||
|
90,8.711812973022461
|
||||||
|
91,8.543971061706543
|
||||||
|
92,8.466073036193848
|
||||||
|
93,8.430896759033203
|
||||||
|
94,8.461553573608398
|
||||||
|
95,8.427389144897461
|
||||||
|
96,8.367480278015137
|
||||||
|
97,8.390375137329102
|
||||||
|
98,8.391266822814941
|
||||||
|
99,8.32000732421875
|
||||||
|
100,8.308785438537598
|
||||||
|
101,8.307022094726562
|
||||||
|
102,8.28654956817627
|
||||||
|
103,8.264345169067383
|
||||||
|
104,8.240246772766113
|
||||||
|
105,8.151167869567871
|
||||||
|
106,8.132182121276855
|
||||||
|
107,8.174140930175781
|
||||||
|
108,8.15860652923584
|
||||||
|
109,8.111069679260254
|
||||||
|
110,8.075194358825684
|
||||||
|
111,8.073436737060547
|
||||||
|
112,8.015433311462402
|
||||||
|
113,8.73005485534668
|
||||||
|
114,8.976434707641602
|
||||||
|
115,8.553707122802734
|
||||||
|
116,8.604686737060547
|
||||||
|
117,8.807379722595215
|
||||||
|
118,8.939229011535645
|
||||||
|
119,8.931767463684082
|
||||||
|
120,8.78708267211914
|
||||||
|
121,8.568319320678711
|
||||||
|
122,8.370025634765625
|
||||||
|
123,8.28404712677002
|
||||||
|
124,8.269047737121582
|
||||||
|
125,8.199615478515625
|
||||||
|
126,8.246225357055664
|
||||||
|
127,8.338099479675293
|
||||||
|
128,8.365686416625977
|
||||||
|
129,8.355802536010742
|
||||||
|
130,8.333215713500977
|
||||||
|
131,8.297627449035645
|
||||||
|
132,8.246842384338379
|
||||||
|
133,8.185032844543457
|
||||||
|
134,8.128602981567383
|
||||||
|
135,8.113287925720215
|
||||||
|
136,8.126509666442871
|
||||||
|
137,8.135257720947266
|
||||||
|
138,8.12671184539795
|
||||||
|
139,8.11308479309082
|
||||||
|
140,8.10457992553711
|
||||||
|
141,8.083026885986328
|
||||||
|
142,8.042129516601562
|
||||||
|
143,8.021602630615234
|
||||||
|
144,8.032041549682617
|
||||||
|
145,8.02796459197998
|
||||||
|
146,7.991859436035156
|
||||||
|
147,7.9547834396362305
|
||||||
|
148,7.942188262939453
|
||||||
|
149,7.940114974975586
|
||||||
|
150,7.9366960525512695
|
||||||
|
151,7.920051097869873
|
||||||
|
152,7.898233890533447
|
||||||
|
153,7.91605806350708
|
||||||
|
154,7.888813018798828
|
||||||
|
155,7.888662815093994
|
||||||
|
156,7.876744270324707
|
||||||
|
157,7.877063274383545
|
||||||
|
158,7.864513397216797
|
||||||
|
159,7.853992938995361
|
||||||
|
160,7.850217819213867
|
||||||
|
161,7.834463119506836
|
||||||
|
162,7.826395034790039
|
||||||
|
163,7.817248344421387
|
||||||
|
164,7.805809497833252
|
||||||
|
165,7.8009352684021
|
||||||
|
166,7.789071083068848
|
||||||
|
167,7.780850887298584
|
||||||
|
168,7.7727251052856445
|
||||||
|
169,7.760769844055176
|
||||||
|
170,7.752568244934082
|
||||||
|
171,7.739583969116211
|
||||||
|
172,7.729412078857422
|
||||||
|
173,7.717775344848633
|
||||||
|
174,7.705650806427002
|
||||||
|
175,7.6954345703125
|
||||||
|
176,7.684081077575684
|
||||||
|
177,7.67674446105957
|
||||||
|
178,7.666431903839111
|
||||||
|
179,7.658679008483887
|
||||||
|
180,7.6476149559021
|
||||||
|
181,7.638239860534668
|
||||||
|
182,7.626233100891113
|
||||||
|
183,7.6154632568359375
|
||||||
|
184,7.60280704498291
|
||||||
|
185,7.591740131378174
|
||||||
|
186,7.5797200202941895
|
||||||
|
187,7.569161415100098
|
||||||
|
188,7.5570807456970215
|
||||||
|
189,7.545845031738281
|
||||||
|
190,7.533291816711426
|
||||||
|
191,7.521530628204346
|
||||||
|
192,7.508481502532959
|
||||||
|
193,7.495509147644043
|
||||||
|
194,7.481839656829834
|
||||||
|
195,7.468157768249512
|
||||||
|
196,7.454339981079102
|
||||||
|
197,7.43977689743042
|
||||||
|
198,7.425281047821045
|
||||||
|
199,7.409961700439453
|
||||||
|
200,7.394648551940918
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.04
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Quadratic weight: Weights increase cubically, normalized by max
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,618.634765625
|
||||||
|
1,358.09967041015625
|
||||||
|
2,92.27668762207031
|
||||||
|
3,77.0481948852539
|
||||||
|
4,54.688392639160156
|
||||||
|
5,48.95355224609375
|
||||||
|
6,31.422712326049805
|
||||||
|
7,29.814496994018555
|
||||||
|
8,29.130786895751953
|
||||||
|
9,26.820755004882812
|
||||||
|
10,25.682769775390625
|
||||||
|
11,21.164920806884766
|
||||||
|
12,21.212703704833984
|
||||||
|
13,20.535888671875
|
||||||
|
14,21.002750396728516
|
||||||
|
15,21.16645050048828
|
||||||
|
16,24.990877151489258
|
||||||
|
17,20.385900497436523
|
||||||
|
18,20.349010467529297
|
||||||
|
19,21.548460006713867
|
||||||
|
20,20.38816261291504
|
||||||
|
21,20.32730484008789
|
||||||
|
22,23.402429580688477
|
||||||
|
23,25.815576553344727
|
||||||
|
24,27.374675750732422
|
||||||
|
25,24.019874572753906
|
||||||
|
26,20.947362899780273
|
||||||
|
27,19.967166900634766
|
||||||
|
28,20.48531723022461
|
||||||
|
29,19.696857452392578
|
||||||
|
30,18.35830307006836
|
||||||
|
31,17.78197479248047
|
||||||
|
32,17.28527069091797
|
||||||
|
33,16.481258392333984
|
||||||
|
34,15.587751388549805
|
||||||
|
35,15.384886741638184
|
||||||
|
36,15.633015632629395
|
||||||
|
37,16.207876205444336
|
||||||
|
38,16.068439483642578
|
||||||
|
39,15.760542869567871
|
||||||
|
40,15.562320709228516
|
||||||
|
41,15.388416290283203
|
||||||
|
42,15.153722763061523
|
||||||
|
43,14.683268547058105
|
||||||
|
44,13.478330612182617
|
||||||
|
45,12.819177627563477
|
||||||
|
46,12.928230285644531
|
||||||
|
47,13.078646659851074
|
||||||
|
48,13.159015655517578
|
||||||
|
49,13.130351066589355
|
||||||
|
50,12.972328186035156
|
||||||
|
51,12.680214881896973
|
||||||
|
52,12.331439971923828
|
||||||
|
53,12.132861137390137
|
||||||
|
54,12.106900215148926
|
||||||
|
55,12.151511192321777
|
||||||
|
56,12.207792282104492
|
||||||
|
57,12.218520164489746
|
||||||
|
58,12.153889656066895
|
||||||
|
59,12.027456283569336
|
||||||
|
60,11.866910934448242
|
||||||
|
61,11.9298734664917
|
||||||
|
62,11.833691596984863
|
||||||
|
63,11.765508651733398
|
||||||
|
64,11.622321128845215
|
||||||
|
65,11.738268852233887
|
||||||
|
66,11.816873550415039
|
||||||
|
67,11.75278091430664
|
||||||
|
68,11.62586498260498
|
||||||
|
69,11.525328636169434
|
||||||
|
70,11.454194068908691
|
||||||
|
71,11.391156196594238
|
||||||
|
72,11.316991806030273
|
||||||
|
73,11.223878860473633
|
||||||
|
74,11.123824119567871
|
||||||
|
75,11.045502662658691
|
||||||
|
76,10.97950267791748
|
||||||
|
77,10.940563201904297
|
||||||
|
78,10.80444049835205
|
||||||
|
79,10.846137046813965
|
||||||
|
80,10.760048866271973
|
||||||
|
81,10.736858367919922
|
||||||
|
82,10.734830856323242
|
||||||
|
83,10.648832321166992
|
||||||
|
84,10.591168403625488
|
||||||
|
85,10.592988967895508
|
||||||
|
86,10.5894193649292
|
||||||
|
87,10.553829193115234
|
||||||
|
88,10.495099067687988
|
||||||
|
89,10.456531524658203
|
||||||
|
90,10.39877700805664
|
||||||
|
91,10.366183280944824
|
||||||
|
92,10.267182350158691
|
||||||
|
93,10.351202011108398
|
||||||
|
94,10.303343772888184
|
||||||
|
95,10.285795211791992
|
||||||
|
96,10.263039588928223
|
||||||
|
97,10.299613952636719
|
||||||
|
98,10.256726264953613
|
||||||
|
99,10.198566436767578
|
||||||
|
100,10.201037406921387
|
||||||
|
101,10.185620307922363
|
||||||
|
102,10.12901496887207
|
||||||
|
103,10.095890998840332
|
||||||
|
104,10.103973388671875
|
||||||
|
105,10.043774604797363
|
||||||
|
106,10.027207374572754
|
||||||
|
107,10.018889427185059
|
||||||
|
108,9.972893714904785
|
||||||
|
109,9.94866943359375
|
||||||
|
110,9.936760902404785
|
||||||
|
111,9.890128135681152
|
||||||
|
112,9.875396728515625
|
||||||
|
113,9.85168743133545
|
||||||
|
114,9.81449031829834
|
||||||
|
115,9.79965877532959
|
||||||
|
116,9.771160125732422
|
||||||
|
117,9.735275268554688
|
||||||
|
118,9.697542190551758
|
||||||
|
119,9.66472339630127
|
||||||
|
120,9.70080852508545
|
||||||
|
121,9.597423553466797
|
||||||
|
122,9.787515640258789
|
||||||
|
123,9.782074928283691
|
||||||
|
124,9.592306137084961
|
||||||
|
125,9.857139587402344
|
||||||
|
126,9.734759330749512
|
||||||
|
127,9.958001136779785
|
||||||
|
128,9.570645332336426
|
||||||
|
129,9.810420036315918
|
||||||
|
130,9.525829315185547
|
||||||
|
131,9.659528732299805
|
||||||
|
132,9.610642433166504
|
||||||
|
133,9.492512702941895
|
||||||
|
134,9.59126091003418
|
||||||
|
135,9.45970630645752
|
||||||
|
136,9.43966007232666
|
||||||
|
137,9.453958511352539
|
||||||
|
138,9.351001739501953
|
||||||
|
139,9.423657417297363
|
||||||
|
140,9.315823554992676
|
||||||
|
141,9.387983322143555
|
||||||
|
142,9.267569541931152
|
||||||
|
143,9.372727394104004
|
||||||
|
144,9.367060661315918
|
||||||
|
145,9.31779670715332
|
||||||
|
146,9.245911598205566
|
||||||
|
147,9.301057815551758
|
||||||
|
148,9.22503662109375
|
||||||
|
149,9.328397750854492
|
||||||
|
150,9.148051261901855
|
||||||
|
151,9.27135181427002
|
||||||
|
152,9.122589111328125
|
||||||
|
153,9.157631874084473
|
||||||
|
154,9.113636016845703
|
||||||
|
155,9.052767753601074
|
||||||
|
156,9.116308212280273
|
||||||
|
157,9.001723289489746
|
||||||
|
158,9.116778373718262
|
||||||
|
159,8.95185375213623
|
||||||
|
160,9.175684928894043
|
||||||
|
161,10.139451026916504
|
||||||
|
162,9.721688270568848
|
||||||
|
163,8.983831405639648
|
||||||
|
164,11.030898094177246
|
||||||
|
165,10.12893009185791
|
||||||
|
166,9.96270751953125
|
||||||
|
167,10.315241813659668
|
||||||
|
168,9.929939270019531
|
||||||
|
169,9.611710548400879
|
||||||
|
170,9.673215866088867
|
||||||
|
171,9.701788902282715
|
||||||
|
172,9.58718204498291
|
||||||
|
173,9.548637390136719
|
||||||
|
174,9.56892204284668
|
||||||
|
175,9.707029342651367
|
||||||
|
176,9.621599197387695
|
||||||
|
177,9.647336959838867
|
||||||
|
178,9.403302192687988
|
||||||
|
179,9.298757553100586
|
||||||
|
180,9.379677772521973
|
||||||
|
181,9.39447021484375
|
||||||
|
182,9.332596778869629
|
||||||
|
183,9.132309913635254
|
||||||
|
184,8.942119598388672
|
||||||
|
185,8.761064529418945
|
||||||
|
186,9.211796760559082
|
||||||
|
187,9.171370506286621
|
||||||
|
188,8.662088394165039
|
||||||
|
189,8.686671257019043
|
||||||
|
190,8.651787757873535
|
||||||
|
191,8.638099670410156
|
||||||
|
192,8.672558784484863
|
||||||
|
193,8.638679504394531
|
||||||
|
194,8.550908088684082
|
||||||
|
195,8.5366849899292
|
||||||
|
196,8.52907657623291
|
||||||
|
197,8.489187240600586
|
||||||
|
198,8.421223640441895
|
||||||
|
199,8.39641284942627
|
||||||
|
200,8.397357940673828
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.05
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Quadratic weight: Weights increase cubically, normalized by max
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
@ -0,0 +1,202 @@
|
|||||||
|
Epoch,Loss
|
||||||
|
0,618.634765625
|
||||||
|
1,321.7766418457031
|
||||||
|
2,94.66898345947266
|
||||||
|
3,52.99771499633789
|
||||||
|
4,34.460628509521484
|
||||||
|
5,15.112871170043945
|
||||||
|
6,11.031822204589844
|
||||||
|
7,8.822002410888672
|
||||||
|
8,5.261537075042725
|
||||||
|
9,3.8453757762908936
|
||||||
|
10,2.913972854614258
|
||||||
|
11,3.044447422027588
|
||||||
|
12,2.9120566844940186
|
||||||
|
13,2.7660698890686035
|
||||||
|
14,2.5967488288879395
|
||||||
|
15,2.5515990257263184
|
||||||
|
16,2.406590461730957
|
||||||
|
17,2.2655930519104004
|
||||||
|
18,2.2174527645111084
|
||||||
|
19,2.147364377975464
|
||||||
|
20,2.071032762527466
|
||||||
|
21,1.9788775444030762
|
||||||
|
22,1.8006500005722046
|
||||||
|
23,1.712820291519165
|
||||||
|
24,1.6395798921585083
|
||||||
|
25,1.6000370979309082
|
||||||
|
26,1.5945934057235718
|
||||||
|
27,1.576403021812439
|
||||||
|
28,1.5456539392471313
|
||||||
|
29,1.505415678024292
|
||||||
|
30,1.4548964500427246
|
||||||
|
31,1.3889611959457397
|
||||||
|
32,1.2936322689056396
|
||||||
|
33,1.1240094900131226
|
||||||
|
34,0.8581461310386658
|
||||||
|
35,0.7276880741119385
|
||||||
|
36,0.6626058220863342
|
||||||
|
37,0.6120113730430603
|
||||||
|
38,0.5709574818611145
|
||||||
|
39,0.5382158160209656
|
||||||
|
40,0.5101480484008789
|
||||||
|
41,0.48418474197387695
|
||||||
|
42,0.4625280201435089
|
||||||
|
43,0.44639497995376587
|
||||||
|
44,0.43177369236946106
|
||||||
|
45,0.41359296441078186
|
||||||
|
46,0.391738623380661
|
||||||
|
47,0.36960142850875854
|
||||||
|
48,0.35168078541755676
|
||||||
|
49,0.3391108214855194
|
||||||
|
50,0.3297618627548218
|
||||||
|
51,0.32077065110206604
|
||||||
|
52,0.31085601449012756
|
||||||
|
53,0.29945528507232666
|
||||||
|
54,0.2864932715892792
|
||||||
|
55,0.27271798253059387
|
||||||
|
56,0.26020491123199463
|
||||||
|
57,0.24999116361141205
|
||||||
|
58,0.2417416125535965
|
||||||
|
59,0.23356789350509644
|
||||||
|
60,0.22345070540905
|
||||||
|
61,0.21089839935302734
|
||||||
|
62,0.19665925204753876
|
||||||
|
63,0.18188656866550446
|
||||||
|
64,0.1672874391078949
|
||||||
|
65,0.15328507125377655
|
||||||
|
66,0.14072933793067932
|
||||||
|
67,0.13305748999118805
|
||||||
|
68,0.1298307180404663
|
||||||
|
69,0.1286945641040802
|
||||||
|
70,0.12890808284282684
|
||||||
|
71,0.13054677844047546
|
||||||
|
72,0.13316243886947632
|
||||||
|
73,0.13528332114219666
|
||||||
|
74,0.13547192513942719
|
||||||
|
75,0.13359470665454865
|
||||||
|
76,0.1306418627500534
|
||||||
|
77,0.12778067588806152
|
||||||
|
78,0.12530772387981415
|
||||||
|
79,0.122781902551651
|
||||||
|
80,0.11974873393774033
|
||||||
|
81,0.11625165492296219
|
||||||
|
82,0.11282520741224289
|
||||||
|
83,0.11014677584171295
|
||||||
|
84,0.10857890546321869
|
||||||
|
85,0.1080014631152153
|
||||||
|
86,0.10786277055740356
|
||||||
|
87,0.10765348374843597
|
||||||
|
88,0.107293501496315
|
||||||
|
89,0.10696996748447418
|
||||||
|
90,0.10678070783615112
|
||||||
|
91,0.10657951980829239
|
||||||
|
92,0.10613757371902466
|
||||||
|
93,0.10533472895622253
|
||||||
|
94,0.10423466563224792
|
||||||
|
95,0.10304559022188187
|
||||||
|
96,0.10198061168193817
|
||||||
|
97,0.10111872106790543
|
||||||
|
98,0.1004212349653244
|
||||||
|
99,0.09982679039239883
|
||||||
|
100,0.09929343312978745
|
||||||
|
101,0.09882328659296036
|
||||||
|
102,0.09844270348548889
|
||||||
|
103,0.09815369546413422
|
||||||
|
104,0.09791020303964615
|
||||||
|
105,0.09764356911182404
|
||||||
|
106,0.09730883687734604
|
||||||
|
107,0.09690766036510468
|
||||||
|
108,0.09647495299577713
|
||||||
|
109,0.09604500234127045
|
||||||
|
110,0.09562782198190689
|
||||||
|
111,0.09521264582872391
|
||||||
|
112,0.09478937089443207
|
||||||
|
113,0.09436561167240143
|
||||||
|
114,0.09396271407604218
|
||||||
|
115,0.09359800070524216
|
||||||
|
116,0.09327208250761032
|
||||||
|
117,0.0929718017578125
|
||||||
|
118,0.09268113970756531
|
||||||
|
119,0.09239138662815094
|
||||||
|
120,0.0921025201678276
|
||||||
|
121,0.09181789308786392
|
||||||
|
122,0.09153783321380615
|
||||||
|
123,0.09125769138336182
|
||||||
|
124,0.0909714326262474
|
||||||
|
125,0.09067727625370026
|
||||||
|
126,0.09037893265485764
|
||||||
|
127,0.09008336812257767
|
||||||
|
128,0.08979589492082596
|
||||||
|
129,0.08951734751462936
|
||||||
|
130,0.08924560993909836
|
||||||
|
131,0.08897840231657028
|
||||||
|
132,0.08871591836214066
|
||||||
|
133,0.08845949172973633
|
||||||
|
134,0.08820980042219162
|
||||||
|
135,0.08796560019254684
|
||||||
|
136,0.08772407472133636
|
||||||
|
137,0.08748286217451096
|
||||||
|
138,0.08724135160446167
|
||||||
|
139,0.0870002880692482
|
||||||
|
140,0.08676107227802277
|
||||||
|
141,0.08652393519878387
|
||||||
|
142,0.08628863096237183
|
||||||
|
143,0.08605487644672394
|
||||||
|
144,0.0858229398727417
|
||||||
|
145,0.08559362590312958
|
||||||
|
146,0.08536790311336517
|
||||||
|
147,0.0851457267999649
|
||||||
|
148,0.08492659032344818
|
||||||
|
149,0.08470979332923889
|
||||||
|
150,0.08449476212263107
|
||||||
|
151,0.08428158611059189
|
||||||
|
152,0.08407024294137955
|
||||||
|
153,0.08386079221963882
|
||||||
|
154,0.08365292847156525
|
||||||
|
155,0.08344639837741852
|
||||||
|
156,0.08324127644300461
|
||||||
|
157,0.0830373764038086
|
||||||
|
158,0.08283528685569763
|
||||||
|
159,0.08263520896434784
|
||||||
|
160,0.08243700861930847
|
||||||
|
161,0.08224057406187057
|
||||||
|
162,0.08204568922519684
|
||||||
|
163,0.0818525180220604
|
||||||
|
164,0.0816609114408493
|
||||||
|
165,0.08147081732749939
|
||||||
|
166,0.08128223568201065
|
||||||
|
167,0.08109483867883682
|
||||||
|
168,0.08090858906507492
|
||||||
|
169,0.0807233676314354
|
||||||
|
170,0.080539271235466
|
||||||
|
171,0.08035635948181152
|
||||||
|
172,0.08017465472221375
|
||||||
|
173,0.07999400794506073
|
||||||
|
174,0.07981448620557785
|
||||||
|
175,0.07963612675666809
|
||||||
|
176,0.07945891469717026
|
||||||
|
177,0.0792829841375351
|
||||||
|
178,0.07910815626382828
|
||||||
|
179,0.07893437147140503
|
||||||
|
180,0.07876163721084595
|
||||||
|
181,0.07858990132808685
|
||||||
|
182,0.07841918617486954
|
||||||
|
183,0.07824946939945221
|
||||||
|
184,0.07808071374893188
|
||||||
|
185,0.077912837266922
|
||||||
|
186,0.07774580270051956
|
||||||
|
187,0.07757966965436935
|
||||||
|
188,0.07741446793079376
|
||||||
|
189,0.07725026458501816
|
||||||
|
190,0.07708704471588135
|
||||||
|
191,0.07692476361989975
|
||||||
|
192,0.07676338404417038
|
||||||
|
193,0.07660288363695145
|
||||||
|
194,0.07644329220056534
|
||||||
|
195,0.07628464698791504
|
||||||
|
196,0.07612685859203339
|
||||||
|
197,0.07596984505653381
|
||||||
|
198,0.0758136510848999
|
||||||
|
199,0.07565821707248688
|
||||||
|
200,0.07550347596406937
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
Base controller path: /home/judson/Neural-Networks-in-GNC/inverted_pendulum/training/controller_base.pth
|
||||||
|
Time Span: 0 to 10, Points: 1000
|
||||||
|
Learning Rate: 0.08
|
||||||
|
Weight Decay: 0
|
||||||
|
|
||||||
|
Loss Function:
|
||||||
|
def loss_fn(state_traj, t_span):
|
||||||
|
theta = state_traj[:, :, 0]
|
||||||
|
desired_theta = state_traj[:, :, 3]
|
||||||
|
weights = weight_fn(t_span)
|
||||||
|
weights = weights.view(-1, 1)
|
||||||
|
return torch.mean(weights * (theta - desired_theta) ** 2)
|
||||||
|
|
||||||
|
Weight Description: Quadratic weight: Weights increase cubically, normalized by max
|
||||||
|
|
||||||
|
Training Cases:
|
||||||
|
[theta0, omega0, alpha0, desired_theta]
|
||||||
|
[0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[-0.5235987901687622, 0.0, 0.0, 0.0]
|
||||||
|
[2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[-2.094395160675049, 0.0, 0.0, 0.0]
|
||||||
|
[0.0, 1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, -1.0471975803375244, 0.0, 0.0]
|
||||||
|
[0.0, 6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, -6.2831854820251465, 0.0, 0.0]
|
||||||
|
[0.0, 0.0, 0.0, 6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, -6.2831854820251465]
|
||||||
|
[0.0, 0.0, 0.0, 1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, -1.5707963705062866]
|
||||||
|
[0.0, 0.0, 0.0, 1.0471975803375244]
|
||||||
|
[0.0, 0.0, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 0.0]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 0.0]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 1.0471975803375244]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -1.0471975803375244]
|
||||||
|
[0.7853981852531433, 3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[-0.7853981852531433, -3.1415927410125732, 0.0, 6.2831854820251465]
|
||||||
|
[1.5707963705062866, -3.1415927410125732, 0.0, 12.566370964050293]
|
||||||
|
[-1.5707963705062866, 3.1415927410125732, 0.0, -12.566370964050293]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user