"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.scatter(X[:, 0], X[:, 1], c=y, cmap='brg')\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "9xpEuZM9Fjme"
},
"source": [
"\n",
"\n",
"The neural network will not be aware of the color differences as the data have no class encodings
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "m9vYNDymFjme"
},
"source": [
"\n",
"DENSE LAYER CLASS \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "_bHtuDc1Fjme",
"outputId": "0e533278-4141-4e66-cb18-7e6c49a338dc"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n",
" [-1.0475188e-04 1.1395361e-04 -4.7983500e-05]\n",
" [-2.7414842e-04 3.1729150e-04 -8.6921798e-05]\n",
" [-4.2188365e-04 5.2666257e-04 -5.5912682e-05]\n",
" [-5.7707680e-04 7.1401405e-04 -8.9430439e-05]]\n"
]
}
],
"source": [
"import numpy as np\n",
"import nnfs\n",
"from nnfs.datasets import spiral_data\n",
"nnfs.init()\n",
"# Dense layer\n",
"class Layer_Dense:\n",
" # Layer initialization\n",
" def __init__(self, n_inputs, n_neurons):\n",
" # Initialize weights and biases\n",
" self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)\n",
" self.biases = np.zeros((1, n_neurons))\n",
"\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Calculate output values from inputs, weights and biases\n",
" self.output = np.dot(inputs, self.weights) + self.biases\n",
"\n",
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"# Create Dense layer with 2 input features and 3 output values\n",
"dense1 = Layer_Dense(2, 3)\n",
"# Perform a forward pass of our training data through this layer\n",
"dense1.forward(X)\n",
"\n",
"\n",
"# Let's see output of the first few samples:\n",
"print(dense1.output[:5])\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "gFxBpF1ZFjme"
},
"source": [
"\n",
"ACTIVATION FUNCTION: RELU \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "rGjvDHswFjme",
"outputId": "13209c8a-ec91-456b-b459-d02c32fce102"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0. 2. 0. 3.3 0. 1.1 2.2 0. ]\n"
]
}
],
"source": [
"import numpy as np\n",
"inputs = [0, 2, -1, 3.3, -2.7, 1.1, 2.2, -100]\n",
"output = np.maximum(0, inputs)\n",
"print(output)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "niT8Hh_5Fjme"
},
"outputs": [],
"source": [
"# ReLU activation\n",
"class Activation_ReLU:\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Calculate output values from input\n",
" self.output = np.maximum(0, inputs)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "ZzQlDPPfFjme",
"outputId": "023f5b81-cba9-41f0-e1cc-fdc486d63b99"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[0. 0. 0.]\n",
" [0. 0. 0.]\n",
" [0. 0. 0.]\n",
" [0. 0. 0.]\n",
" [0. 0. 0.]]\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"# Create Dense layer with 2 input features and 3 output values\n",
"dense1 = Layer_Dense(2, 3)\n",
"# Create ReLU activation (to be used with Dense layer):\n",
"activation1 = Activation_ReLU()\n",
"# Make a forward pass of our training data through this layer\n",
"dense1.forward(X)\n",
"# Forward pass through activation func.\n",
"# Takes in output from previous layer\n",
"activation1.forward(dense1.output)\n",
"# Let's see output of the first few samples:\n",
"print(activation1.output[:5])"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "vT3iHO6rFjme"
},
"source": [
"\n",
"ACTIVATION FUNCTION: SOFTMAX \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "fvhMk3zGFjme",
"outputId": "868e3bd4-6153-445c-ac23-1ded36133c00"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"45\n",
"[12 15 18]\n",
"(3,)\n",
"[ 6 15 24]\n",
"(3,)\n",
"[[12 15 18]]\n",
"(1, 3)\n",
"[[ 6]\n",
" [15]\n",
" [24]]\n",
"(3, 1)\n",
"[7 8 9]\n",
"[3 6 9]\n"
]
}
],
"source": [
"### TRY THESE EXERCISES FOR YOURSELF!\n",
"\n",
"A = [[1, 2, 3], [4, 5, 6], [7, 8,9]]\n",
"print(np.sum(A))\n",
"\n",
"print(np.sum(A, axis = 0))\n",
"print(np.sum(A, axis = 0).shape)\n",
"\n",
"print(np.sum(A, axis = 1))\n",
"print(np.sum(A, axis = 1).shape)\n",
"\n",
"print(np.sum(A, axis = 0,keepdims = True))\n",
"print(np.sum(A, axis = 0,keepdims = True).shape)\n",
"\n",
"print(np.sum(A, axis = 1,keepdims = True))\n",
"print(np.sum(A, axis = 1,keepdims = True).shape)\n",
"\n",
"print(np.max(A, axis = 0))\n",
"print(np.max(A, axis = 1))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "4Ux1-5ZUFjmi",
"outputId": "7de739d1-deca-48ed-fe79-3fe5f8e32565"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[0.06414769 0.17437149 0.47399085 0.28748998]\n",
" [0.04517666 0.90739747 0.00224921 0.04517666]\n",
" [0.00522984 0.34875873 0.63547983 0.0105316 ]]\n"
]
},
{
"data": {
"text/plain": [
"array([1., 1., 1.])"
]
},
"execution_count": 85,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"inputs = [[1, 2, 3, 2.5],\n",
" [2., 5., -1., 2],\n",
" [-1.5, 2.7, 3.3, -0.8]]\n",
"\n",
"# Get unnormalized probabilities\n",
"exp_values = np.exp(inputs - np.max(inputs, axis=1,keepdims=True))\n",
" # Normalize them for each sample\n",
"probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True)\n",
"print(probabilities)\n",
"np.sum(probabilities, axis = 1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "kRdUvsWAFjml"
},
"outputs": [],
"source": [
"# Softmax activation\n",
"class Activation_Softmax:\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Get unnormalized probabilities\n",
" exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))\n",
" # Normalize them for each sample\n",
" probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True)\n",
" self.output = probabilities"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "RzQG-tdnFjml"
},
"source": [
"\n",
"ONE FORWARD PASS (WITHOUT LOSS) \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Sha3z7fPFjml",
"outputId": "a38ee32e-abad-4752-8ed7-a6614931aca7"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[0.33333334 0.33333334 0.33333334]\n",
" [0.33333364 0.3333334 0.3333329 ]\n",
" [0.33333385 0.3333335 0.33333266]\n",
" [0.33333433 0.3333336 0.33333206]\n",
" [0.33333462 0.33333373 0.33333164]]\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"# Create Dense layer with 2 input features and 3 output values\n",
"dense1 = Layer_Dense(2, 3)\n",
"# Create ReLU activation (to be used with Dense layer):\n",
"activation1 = Activation_ReLU()\n",
"# Create second Dense layer with 3 input features (as we take output\n",
"# of previous layer here) and 3 output values\n",
"dense2 = Layer_Dense(3, 3)\n",
"# Create Softmax activation (to be used with Dense layer):\n",
"activation2 = Activation_Softmax()\n",
"\n",
"# Make a forward pass of our training data through this layer\n",
"dense1.forward(X)\n",
"\n",
"# Make a forward pass through activation function\n",
"# it takes the output of first dense layer here\n",
"activation1.forward(dense1.output)\n",
"# Make a forward pass through second Dense layer\n",
"# it takes outputs of activation function of first layer as inputs\n",
"dense2.forward(activation1.output)\n",
"# Make a forward pass through activation function\n",
"# it takes the output of second dense layer here\n",
"activation2.forward(dense2.output)\n",
"# Let's see output of the first few samples:\n",
"print(activation2.output[:5])"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "In5zGYb4Fjml"
},
"source": [
"\n",
"CALCULATING NETWORK ERROR WITH LOSS \n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "aQnZKLOOFjml"
},
"source": [
"\n",
"CROSS ENTROPY LOSS BUILDING BLOCKS IN PYTHON\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "D8AgvY5hFjml",
"outputId": "2e395228-3098-4f22-d753-2b8b176a095b"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0.7 0.5 0.9]\n"
]
}
],
"source": [
"softmax_outputs = np.array([[0.7, 0.1, 0.2],\n",
" [0.1, 0.5, 0.4],\n",
" [0.02, 0.9, 0.08]])\n",
"class_targets = [0, 1, 1]\n",
"print(softmax_outputs[range(len(softmax_outputs)), class_targets])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Blvy02fjFjmm",
"outputId": "4d35fb6e-eb49-43ef-af0f-c1bc4faf2216"
},
"outputs": [
{
"data": {
"text/plain": [
"range(0, 3)"
]
},
"execution_count": 89,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"range(len(softmax_outputs))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "nyIehvEcFjmm",
"outputId": "d0b7eec5-55ea-4409-b867-9d8ae4f64065"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0.35667494 0.69314718 0.10536052]\n",
"0.38506088005216804\n"
]
}
],
"source": [
"print(-np.log(softmax_outputs[\n",
" range(len(softmax_outputs)), class_targets\n",
"]))\n",
"neg_log = -np.log(softmax_outputs[\n",
" range(len(softmax_outputs)), class_targets\n",
" ])\n",
"average_loss = np.mean(neg_log)\n",
"print(average_loss)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "NxZXtcNdFjmm"
},
"source": [
"\n",
"IF DATA IS ONE HOT ENCODED, HOW TO EXTRACT THE RELEVANT PREDICTIONS\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "oIHjRQnpFjmm",
"outputId": "bac7a3a7-8796-47cc-f75e-c4a46f88ef8e"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0.35667494 0.69314718 0.10536052]\n",
"0.38506088005216804\n"
]
}
],
"source": [
"y_true_check = np.array([\n",
" [1, 0, 0],\n",
" [0, 1, 0],\n",
" [0, 1, 0]\n",
"])\n",
"\n",
"y_pred_clipped_check = np.array([\n",
" [0.7, 0.2, 0.1],\n",
" [0.1, 0.5, 0.4],\n",
" [0.02, 0.9, 0.08]\n",
"])\n",
"\n",
"A = y_true_check*y_pred_clipped_check\n",
"B = np.sum(A, axis = 1)\n",
"C = - np.log(B)\n",
"\n",
"print(C)\n",
"print(np.mean(C))\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "nCWwEIPZFjmm"
},
"source": [
"\n",
"IMPLEMENTING THE LOSS CLASS\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Nw_ZQP13Fjmm"
},
"outputs": [],
"source": [
"# Common loss class\n",
"class Loss:\n",
" # Calculates the data and regularization losses\n",
" # given model output and ground truth values\n",
" def calculate(self, output, y):\n",
" # Calculate sample losses\n",
" sample_losses = self.forward(output, y)\n",
" # Calculate mean loss\n",
" data_loss = np.mean(sample_losses)\n",
" # Return loss\n",
" return data_loss"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "vJ1fW8VLFjmm"
},
"source": [
"\n",
"IMPLEMENTING THE CATEGORICAL CROSS ENTROPY CLASS\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "bkfGuRE0Fjmm"
},
"outputs": [],
"source": [
"# Cross-entropy loss\n",
"class Loss_CategoricalCrossentropy(Loss):\n",
" # Forward pass\n",
" def forward(self, y_pred, y_true):\n",
" # Number of samples in a batch\n",
" samples = len(y_pred)\n",
" # Clip data to prevent division by 0\n",
" # Clip both sides to not drag mean towards any value\n",
" y_pred_clipped = np.clip(y_pred, 1e-7, 1 - 1e-7)\n",
" # Probabilities for target values -\n",
" # only if categorical labels\n",
" if len(y_true.shape) == 1:\n",
" correct_confidences = y_pred_clipped[\n",
" range(samples),\n",
" y_true\n",
" ]\n",
" # Mask values - only for one-hot encoded labels\n",
" elif len(y_true.shape) == 2:\n",
" correct_confidences = np.sum(\n",
" y_pred_clipped*y_true,\n",
" axis=1\n",
" )\n",
" # Losses\n",
" negative_log_likelihoods = -np.log(correct_confidences)\n",
" return negative_log_likelihoods"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Q9YEVg4dFjmm",
"outputId": "3fa4ae1f-ffbf-480e-a1f0-61bd4482febb"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.38506088005216804\n"
]
}
],
"source": [
"softmax_outputs = np.array([[0.7, 0.1, 0.2],\n",
" [0.1, 0.5, 0.4],\n",
" [0.02, 0.9, 0.08]])\n",
"class_targets = np.array([[1, 0, 0],\n",
" [0, 1, 0],\n",
" [0, 1, 0]])\n",
"loss_function = Loss_CategoricalCrossentropy()\n",
"loss = loss_function.calculate(softmax_outputs, class_targets)\n",
"print(loss)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "llVv_pJDFjmm"
},
"source": [
"\n",
"FULL CODE UPTO THIS POINT\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "ocqJJcMsFjmm",
"outputId": "9f6418fe-8a42-4b1f-d5d7-efdd017815da"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[0.33333334 0.33333334 0.33333334]\n",
" [0.3333341 0.33333302 0.3333329 ]\n",
" [0.3333341 0.33333302 0.33333296]\n",
" [0.3333341 0.333333 0.33333293]\n",
" [0.3333364 0.33333203 0.33333158]]\n",
"loss: 1.0986193\n",
"acc: 0.28\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"# Create Dense layer with 2 input features and 3 output values\n",
"dense1 = Layer_Dense(2, 3)\n",
"# Create ReLU activation (to be used with Dense layer):\n",
"activation1 = Activation_ReLU()\n",
"# Create second Dense layer with 3 input features (as we take output\n",
"# of previous layer here) and 3 output values\n",
"dense2 = Layer_Dense(3, 3)\n",
"# Create Softmax activation (to be used with Dense layer):\n",
"activation2 = Activation_Softmax()\n",
"# Create loss function\n",
"loss_function = Loss_CategoricalCrossentropy()\n",
"\n",
"\n",
"# Perform a forward pass of our training data through this layer\n",
"dense1.forward(X)\n",
"# Perform a forward pass through activation function\n",
"# it takes the output of first dense layer here\n",
"activation1.forward(dense1.output)\n",
"\n",
"# Perform a forward pass through second Dense layer\n",
"# it takes outputs of activation function of first layer as inputs\n",
"dense2.forward(activation1.output)\n",
"# Perform a forward pass through activation function\n",
"# it takes the output of second dense layer here\n",
"activation2.forward(dense2.output)\n",
"# Let's see output of the first few samples:\n",
"print(activation2.output[:5])\n",
"# Perform a forward pass through activation function\n",
"# it takes the output of second dense layer here and returns loss\n",
"loss = loss_function.calculate(activation2.output, y)\n",
"# Print loss value\n",
"print('loss:', loss)\n",
"\n",
"# Calculate accuracy from output of activation2 and targets\n",
"# calculate values along first axis\n",
"predictions = np.argmax(activation2.output, axis=1)\n",
"if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
"accuracy = np.mean(predictions == y)\n",
"# Print accuracy\n",
"print('acc:', accuracy)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "NC3lHEW0Fjmn"
},
"source": [
"\n",
"INTRODUCING ACCURACY \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Hxxm7ePwFjmn",
"outputId": "32f1df40-341a-497d-fa9a-a4a4f283b516"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"acc: 1.0\n"
]
}
],
"source": [
"import numpy as np\n",
"# Probabilities of 3 samples\n",
"softmax_outputs = np.array([[0.7, 0.2, 0.1],\n",
" [0.1, 0.5, 0.4],\n",
" [0.02, 0.9, 0.08]])\n",
"# Target (ground-truth) labels for 3 samples\n",
"class_targets = np.array([0, 1, 1])\n",
"# Calculate values along second axis (axis of index 1)\n",
"predictions = np.argmax(softmax_outputs, axis=1)\n",
"# If targets are one-hot encoded - convert them\n",
"if len(class_targets.shape) == 2:\n",
" class_targets = np.argmax(class_targets, axis=1)\n",
"# True evaluates to 1; False to 0\n",
"accuracy = np.mean(predictions == class_targets)\n",
"print('acc:', accuracy)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "28USj38zFjmn"
},
"source": [
"\n",
"THE NEED FOR OPTIMIZATION \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "YlIycjcjFjmn",
"outputId": "bf133cf2-dea4-49f5-cb61-3122dd15cf5b"
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAADChklEQVR4nOydd5gTVRfG35nJFnrvHQQUERAERDpipVjoKiAoSLX7SVMEC4JYULEhVUVAFEUECyAiKFVAigLSe++wJbnv98clu5vdZGbSt9xfnnlgk1vOnWT3npx7ikaSUCgUCoVCoYgSerQFUCgUCoVCkbNRyohCoVAoFIqoopQRhUKhUCgUUUUpIwqFQqFQKKKKUkYUCoVCoVBEFaWMKBQKhUKhiCpKGVEoFAqFQhFVlDKiUCgUCoUiqjiiLYAdhBA4fPgw8uXLB03Toi2OQqFQKBQKG5DEhQsXULp0aei6b/tHllBGDh8+jHLlykVbDIVCoVAoFAFw4MABlC1b1ufrWUIZyZcvHwC5mPz580dZGoVCoVAoFHY4f/48ypUrl7KP+yJLKCPuo5n8+fMrZUShUCgUiiyGlYuFcmBVKBQKhUIRVZQyolAoFAqFIqooZUShUCgUCkVUUcqIQqFQKBSKqKKUEYVCoVAoFFFFKSMKhUKhUCiiilJGFAqFQqFQRBWljCgUCoVCoYgqWSLpmSJzsH8/MGMGcPAgULgw0LUrUKtWtKVSKBQKRVZHKSMKS4QAnn0WeOcdQNflRQJjxgDt2wNffAHkzRttKRUKhUKRVVHHNApLRoyQiggJuFxAcjLgdMrXfvgB6NxZvqZQKBQKRSAoZURhyqlTwPjxvpUNlwtYtAhYsyaycikUCoUi+6CUEYUpX3+dagXxhcMBfPZZZORRKBQKRfZDKSMKU44fBwzDvI0Qsp1CoVAoFIGglBGFKaVKyaMYM3RdtlMoFAqFIhCUMqIwpUMHIDbWvI3TCfTsGRl5FAqFQpH9UMqIwpSCBYFhw3y/ruvA/fcDdetGTCRFNod//QU+9hh4c0OwZUvw7bfBM2eiLZZCoQgjKs+IwpIXXpDWjzFjpH+IYch/XS6Z+OzTT6MtoSI7QBJ45hngnbelV7Tbc3r5b8DoUeAPC6Hdckt0hVQoFGFBIzN/hojz58+jQIECOHfuHPLnzx9tcXIsx48DM2cCBw4ARYoAXboAVapEWypFdoETJgBPPen9RV0H8uQB/t0OTTkoKRRZBrv7t1JGFApF1KHTCZQrCxw75ruRYQAjXoA2cmTkBFMoFEFhd/9WPiMKhSL6rF1rrogA8lxwzuzIyKNQKCKKUkYUCkX0uXDBXrvzNtspFIoshVJGFJmCCxeAceOkD4phAAUKAI8+CmzdGm3JFBHBjvORYQDVq4VfFoVCEXGUMqKIOidPAg0aAEOHArt3y0id8+eB6dOBG28EFi6MtoSKcKNVqQI0a2ae7tflAh7rFzmhFApFxFDKiCLqPPoosHOnVELS4nTKq2NH4MSJ6MimiCBvvyMz7HlTSHQdaHUrcN99ERdLoVCEH6WMKKLKvn3A/Pm+U86TQGIiMGVKZOVSRB7txhuBZb8BNWt6vhATIzXW77+H5lCpkRSK7Ij6zVZElWXLpMJhhhDA4sXA889HRCRFFNHq1wc2bATXr5cOQ/HxwK23QitSJNqiKRSKMKKUEUVUsSrC5yY5ObxyKDIXWr16QL160RZDoVBECHVMo4gqN91k3cYwgJtvDr8sCoVCoYgOShlRRJVataSiYRZEQQJ9+0ZOJoVCoVBEFqWMKKLO1KlA/vyyNlpa9KufznfeASpXjrhYCoVCoYgQShlRRJ1rrwXWrQO6dZOBE27q1QO+/RYYPDhqoikUOZb92I/X8BoGYiBexIv4F/9GWyRFNkYVylNkKs6fBw4fBvLlA8qUibY0CkXOwwUXnsWzmIAJ0K8+CMIJJzqjM6ZhGnIhV7TFVGQRwloob+LEiahYsSLi4+PRsGFDrFmzxmfbFi1aQNO0DFebNm0CmVqRzcmfX1pKlCKiUESHYRiGCZgAgnDBhWQkwwknAGAu5uIhPBRlCRXZEb+VkdmzZ+Ppp5/GyJEj8ddff6F27dq44447cPz4ca/tv/nmGxw5ciTl2rJlCwzDQKdOnYIWXqFQKBSh4ziO4y28BcK7wVxA4Bt8g43YGFnBFNkev5WRt956C3369EGvXr1Qo0YNfPTRR8idOzem+EiRWbhwYZQsWTLl+uWXX5A7d26ljESYjRuByZOBGTOA/fujLY1CociMfIWvICBM2zjgwGf4LEISKXIKfiU9S0pKwvr16zF06NCU53RdR+vWrfHnn3/aGmPy5Mno2rUr8uTJ47NNYmIiEhMTU34+f/68P2Iq0rB1K9CrF7B2bepzmiZLfEyaBBQuHJo5vvsOuHQJqFYN6NQJyJ07YzshgKNHZZRMiRJSDoVCkXk4hmMwYJgqJARxDMciKJUiJ+CXZeTkyZNwuVwoUaKEx/MlSpTA0aNHLfuvWbMGW7ZswaOPPmrabsyYMShQoEDKVa5cOX/EVFxl506gcWPgr788nyel8tC8OXDxYuDjnzkD3HWXLCXy4ovAG28ADz8MlCwpK+66SUoCxo0DypeXviClSsmK8e++65mBVQhg5Upg3jzgzz8zFs5TKBThpQRKpPiH+EKDhpIoGSGJFDmFiIb2Tp48GTfccAMaNGhg2m7o0KE4d+5cynXgwIEISZi9eOEFaa3wlnLd5ZIWjU8/DWzs5GTgzjuBX35JHc+dsv3CBamUfPWVVETatgWGDgUOHUrtv3cv8OSTQJcusu/nnwMVKwJNmgD33w/ccgtwzTVyDIVCERk6ozMMmGQgBOCEE93RPUISKXIKfikjRYsWhWEYOHbM00R37NgxlCxprilfunQJs2bNwiOPPGI5T1xcHPLnz+9xKfzjzBng668Bp/mXHHz4YWDjf/cdsGaNeW2Z556TCcsWL85o5SDl9fXXQI8eQPfuQHqdc88eoHNnmRQtO7N9u1TWunUD+ve3VzxQoQgHxVAMz+JZn6/r0NEJnVAbtSMolSIn4JcyEhsbi3r16mHJkiUpzwkhsGTJEjRq1Mi071dffYXExEQ89JAKC4sEBw5YKyKktFAEwpQp5incAWDfPuDNN6031i+/NH990CBpbcluuFzAwIEylPmNN4A5c6SlqmVLaRk6eTLaEipyIq/iVTyLZ1NyjMQgJsVa0hVdMQMzoiyhIjvi9zHN008/jUmTJmH69On4559/0L9/f1y6dAm9evUCAPTo0cPDwdXN5MmTce+996KIKgUeEewak0z8iE05cMBexV0fEd8eWCkrly8DM2fakysr8cILqZYpl0taj9wK5Nq1wN13K78ZReTRoeMNvIEDOIAxGIP+6I+RGIkd2IEv8AXiER9tERXZEL+iaQCgS5cuOHHiBF588UUcPXoUderUwY8//pji1Lp//37ouqeOs337dqxYsQI///xzaKRWWFKhgixCt2WL7w3N4QC6dg1s/BIlgG3bIrdZTpsGPPZYZOaKBGfOAG+95VsRc7mkQvLLL8Add0RWNoUCAEqjNP6H/0VbDEUOQaWDz8bMnSvDbL2haUBsLLBpE1C9uv9jT58unVQjhcMhN/C8eSM3Zzixc/8MA3jwQc/IJIVCochKhDUdvCJr0LEjMH68VDzS+nfoOpArl3RCDUQRAWQUTLVqGSvthgunE1i0KDJzRYJTp1KrEvvC5ZLtFAqFIrujlJFszjPPADt2AE8/DTRrBtx6K/D669K5NBjzf3w8sHQpUKOG/NnhkJfVBpuevn3t+62cOePf2JmZsmWtj7gcDkCl2FEoFDmBCH2vVUSTa66RScdCTZkyMs380qUyUdmlS9IKYycUNyZGpqbv0gU4fVoeKVlRoULQImca2rUDChQAzp3z3cbplNlzFQqFIrujfEYUIeWvv4B69czbaBpQty6wbp38eflymQ3WjNKlZU0dq3DirMSHHwIDBnh/Tddlyv6vvlJp8xUKRdZF+YwoosKNN0pfEqsNNO03/qZNgfbtzY943noreykigExw9t57qcdUMTHyHui6dG794guliCgU2YWN2Ihe6IWCKIhcyIW6qItP8SmSkBRt0TIFyjKiCDlffy2dZ73hcEh/ib//BvLlS30+IQEYPFge8QghN2SXSxbye/ddGVWSljNnZFKwIkVCU+wvmly8CHzzjUxAV7Ag0KGDPAJTKBTZg5mYie7oDh16Su0fHToEBFqgBRZiIXIhV5SlDA9292+ljCjCwpQpMrtoYqJUQEjpA1GrFvD997JonjeOHAHmz5cZVytVkr4VsbGpr//1FzBqFLBggVRaNE0mB3vxRcCi5JFCoVBEnB3YgRqoARe8Z4nUoWMwBuMdvBNZwSKEUkYUUefcOVkAb9s2IC5OKhYtWgR+9PDrr7I4n8vlmf3VMOSYCxaoBGEKhSJz8SSexERMNK2GnBu5cRRHkQ/5fLbJqihlRJGtSEqSxzunTnkPidV1mQL/8GGZQ0WhUCgyA9VRHTuww7LdL/gFrdE6AhJFFuXAqshWfPstcOKE79wcQgBnz8roE0V04KFD4KhR4N13gW3uBseNA6NQ7Y8uF/jdd2DbNmD1amCD+uD774PZsdqiItOTjGRb7cwsJzkBlWdEkSVYs0ZGmySb/F7HxMh2PXpETi6FhNOmAX37SK3QrTH+9BPw0khw9hxo7dpFRo5Ll4B2bYFlyzxfWLcOGD4M/G05tDp1IiKLQgEADdAAB3DAVNnQoeMG3BBBqTIfyjKiyBLYDevNbuG/WQEuWQI80lt6KKc1XQkhPZg73A9u3BgZYR7rm1ERcXPhAnBzQzA7pfJVZHoGYqCpIuKAA+3RHmWQs0PolDKiyBK0bGluFQHk6y1bRkYeRRpee9V3khi3S9pbb4ZdDB48CHz5pXmjpCSg18Nhl0WhcNMUTfEUngIAaPD03nfAgWIohnfxbjREy1QoZUSRJbj9dqBKFd+WD8OQDq5t20ZWrpwOz5yRYU4u72GLAKTFZM4chN1X/ocfUpUfMxYtAhMSwiuLQpGGN/EmPsEnqIzKKc/FIhYP4SGsxVqUgypCpZQRRZZA16UTa4ECGRUSw5AJ1L7/PnJVhBVXOX/eXrukJGvTVrBcvmyvXXIysHNneGVRKNKgQUMf9MFO7MR2bMcmbMIJnMBUTM3xxzNulDKiyDLUrCkztz79tMy8CgCFCgFPPAFs2gQov8QoULy4LOFso52WNntdOKhe3X5bf8tLKxQhQIOGaqiGWqiF/FBpKtKi8owosizutPHh4PRp4LPPZMK2XLlkwrZWrVStGG+wb19g2lR5HOMNwwCGDYc2alR45XC5gLx5pNOsGYUKAUeOhl85UigUKs+IIvsTLkXko4+AUqWkBWbqVGDiRKB1a6B2bVk5WJGOESO8n58BqcWIHn887GJohgEMH2HRSAMGP64UkUxIMpJxGqdt5+VQZC+UMqJQpGHWLFlNNylJWl6Sk1O/8P/zj0xnf/FiVEXMdGjlywMr/wDq3XT1iTTmo2bNgBUrobnP1cIty4gRQNeuvhvceiswbFhEZFHYYzu2oxd6IS/yogiKIC/yohd6YTu2R1s0RQRRxzQKxVWEAK65Btizx3cbTQM++ADo1y9ycmUluHEjsGqVNFs1awbt2mujI8d33wGjRwEbNsgnqlYFBj8OPPYYtJiYqMikyMgqrMKtuBVJSPLIxeGAA3GIw2Isxs24OYoSKoJF1aZRZFucThk58+WXwMmTQIUKQK9eQNOmwfl0rF4N3Gzxd0/TZHXgVasCn0cROeh0Ai4XtLi4aIuiSEcyklEe5XECJ7xWtDVgoBiKYT/2IwZKgcyqKJ8RRbbk6FGgbl3g/vuBb76RKS4+/xxo3hxo3x64ciXwsU+csG5DAseOBT5HVockuGwZ2KkTWKE8WKUyOHAguG1btEXziuZwKEUkkzIf83EUR70qIgDgggtHcRTzMT/CkimigVJGFFkGIYC77pK+G0Bqni23T8fChcEdn5QqZd1G14EyOTQtAEngqSeBVi2B774FDhyQZ1qTPgFq3SDr0ygUNlmJlZYWjxjEYCVWRkgiRTRRyogiy7B4MbBxo+8IUiFkOO7Bg4GNX7euTFVhdtQjBNC7d2DjZ3k+/RR492ra6rRvgrsmzSO9wXXroiObIttCZHpPAkUIUMqIIsswd651hlVNA+bNC2x8TQPGjvX9usMB1KgBdOsW2PhZGZLAuLHmmpphAO9OiJxQ6TiKo3gX72IERmAiJuIEbJy7KaLGLbjFMow3GclogiYRkkgRTZQyosgynDvnWRTWG7ou2wXKPfdIH5R8+eTPMTGpClCjRsDSpTIJWo5j1y55mfm7O53Ad99FTib3tHDiKTyFsiiLp/AUxmEcHsfjKI3SGIZhELD40Ciiwj24ByVREga8F5wyYKAkSqI92kdYMkU0UJU8FFmGypWto2WcTtkuGB54ALj3XmDOHOmf4s7AWq9ecONmaayymrpJSgqvHF54Ak/gQ3yYYs53Kx8CAmMwBgQxBmMiLpfCnBjE4Bt8g9Zo7TW0Nxax+AbfhCSSZg/24BROofTVhyLzoUJ7FVmGnTuBatXM2+TPLyNucqT1Iozw4kWgeDHArNqtrgO1akH7a4P/4//7r9T+zpyRsdoPPACteHHLfnuwB1VQxdSvwAEHDuAASqKk33Ipws+/+BdjMRYzMRNJSEIsYvEAHsDzeB7XIrg8NYuwCCMxEmuxNuW51miNV/EqGqBBsKIrbKBCexXZjqpVgSefNG/z1ltKEQkHWt68QPce5k47QgADB/k1Li9dAjt3AmpcB7w8GvhgIvDsM0DZMuDo0bD6rvQZPoNu8WdMQGAmZvollyJyXItrMRVTcQEXcBIncQEXMBVTg1ZEZmAG2qAN1mO9x/O/4lc0QRMsxdKgxleEFqWMKFI4d04mrPz3X2vfjGjx5puyzEn6PbFgQWDaNOCRR6IhVQ5h1CgZ/+xNIdF1oGVLoHt328ORBDp1kgljABmrnZwsP3xOJ/DSSHOPYgCHcMhSGTFg4BAO2ZZLER1iEYsiKIJYBF836DROoy/6gmAGnyHX1cdDeMjjaEgRXZQyosChQzKDafHiMrz1uuuAKlWADz8091eMBrNny3TsaeXSdeDsWfM07org0UqWBFatBjp08FRI8uQBnnwK+GGhfwXo/vgD+HGRueb7ysvghQs+Xy6KopahnwICRVHUvlyKLM90TEcSfPsvCQgcwREsxMIISqUwQzmw5nAOHpTpzU+c8EwdsXcvMGAAsHUr8N57waVZDxXr1wMPPZRx73L/PGqU9Cl54IHIy5ZT0EqVAr6cBR4/DmzeLJWSm26ClieP/4PNmCH7+0ocAwCXL8sInYce8vpyN3TDa3jNdBoBga4wKZ6Xg9iGbZiGaTiAAyiMwuiGbmiMxtCQCX7BQ8gGbIAO3Wd2V0D6Em3AhkwbrZOEJCzCIuzDPhREQbRDOxRCoWiLFTaUMpLDefLJjIpIWiZOBDp2lNVqo83bb0sriK8v0roOjBkj84BkBuUpO6MVLy4r4AbD8WPmigggc5ccOeLz5ZqoiQ7ogHmY5zWEV4eOnuiJSqgUnKxZnGQkox/6YQqmwAEHBAR06PgAH6A5muNbfIuCKBhtMUNGDGIsFSyCmbbmzWf4DE/jaZzESejQISAQhzgMwiC8jtfhyIZbtzqmycEcOSIThJntBw6HPBbJDFjJKgSwZUvgGVgVEaZoMessdi4XUKKEaZMZmIF7cA8A+W3XgJHyx7oruuJDfBgScbMyz+AZTMVUADIvi4BI8ZdYgRW4F/dmq0ynt+N2S38QF1y4DbdFSCL7fIbP0AM9cBInAaSGqiciEW/hLfRF32iKFzZUaG8O5uefgTvusG5XoYI8tok2DkdqPRoz/vkHiFLleoUfcPlyoEVz80bx8cDRY9Bs/N5vxEZ8js9xHMdRCqXQAz1wPa4PkbRZl6M4inIoZ7k5/47fs0220yQkoSIq4jiOez2qccCBeqiHVchc5beTkIRSKIXTOG3abiM2ojZqR0iq4FChvQpLYmxaKP3xSQwn11xjffwSHw+ULRsZeRRB0rQp0KqVPIrxxZChthQRAKiDOhiP8ZiBGRiLsUoRuco3+MbUdwKQm3N2Cn+ORSwWYiHyI3+GDK86dJRFWXyFr6IknW9+wA+WiogDDkzG5AhJFDmUMpKDqV9fBkKYYRiyUm5mYMAA89cdDhlZmjdvZORRBIemacC8b1PNcw6H1JANQzoAPT8EeOGFqMqYHTiFUz5TrrsREDiFUxGSKDLUQR1swRYMwRCURVnEIQ55kRe5kAsXcAGDMTjT5RrZgz2W75UTTuxB9gsdVMpIDiZvXqBfP/l334yBAyMjjxV9+gA33eT9i7TDARQrBrz0UsTFUgSBli8ftAU/ABs2As8+B/R+BHj5FWDffmhjxkiFRREUZVHW0jKiQ0c5lIuQRJGjNErjFbyC/uiPRCQiAQm4hEs4hVP4AT/gVtyK5/F8pvGXKYiClu+VASNbORu7UT4jOYxt22RE5eHDcvPu2BF48UVg8WLPSBWHQ+bymDEjc4XKXrgAPPWUlCv5asFPTQPuvlvmRSmX/f6eKhRBcR7nURIlcQVXTNttxmbURM0ISRU5FmIh2qCNaZuZmIluiH457hM4gdIobenfMx/z0Q7tIiRVcNjdv5UykkNISpKWBXdqB1Ju4k4n0KYN0L49MGmSdP6Mi5PVa594AqidSX2kTp0C/vxTyl+3LlC+fLQlUigyL+MxHs/hOZ+v34pbsRiLIyhR5GiFVliO5T4tDjp01EZt/IW/IiyZdwZjMCZioldrjQMOVEd1bMImy+OczIJSRhQe9O0LTJ7sPUeHYQBt2wLffhtxsRQKRQQgiEfwSEp4b3o0aPgcn+MBZCIzaAhIRjLiEGfrGOYkTqIIikRAKnOSkYwe6IFZmAUHHHDCCQMGXHChBmrgZ/yMMigTbTFto5QRRQr79wMVK1qndt+wAahTJxISKRShhadOAVOnAr8uBZwumVa4b19o6twOgMxRURqlcQZnfG7MuZEbR3AE+ZH5/8aewzlswzYYMFALtRCPeK/truAKciO3rTEP4zBKoVQoxQyKtViLyZiMvdiLQiiELuiCtmib5RKe2d2/s9aqFAExZ448kjFTRhwOYObMyCsj588D06cDn38OnD4tlaY+fYD77rMfeuwPQgB//w1cvAhUqgSUyTpfMBQ+4KJFQKeOQEJCqulv6RLg9THgBx9C69MnugJmAuZhnmXI6BVcwRf4Av3RP0JS+c8pnMIQDMFn+AyJSAQAFEABDMAAjMRIxCHOo3084lEJlSyjT4qiKIqhWNjkDoT6Vx85BRVNkwM4edI8lUPadpFk+3ZZlO+JJ4C1a4H//gN+/RXo0gVo3lwqKqGCBD7+WCogN94oU1yUKyf9ZbZtC908isjCLVuA++4FrlzxPIN0ueT1WF+prORwNmKjZepzAwY2YmNkBAqA0ziNW3ALpmJqiiICSCvJWIxFW7RFMpI9+mjQMAiDTCs7GzAwAAOynMUhu6GUkRxAmTLWmUvJyFoJkpNleoljx+TcbquNW841a4BHHgndfMOGyTDm/ftTnyOBn34CGjYENm0K3VyKCPL2W1IJ8WX2MwxgjHkhvZxALGJt+U3EIpNkOPTCaIzGLuzy6ogqILAYizEN01KeW43V6IZuGImRPtduwMD1uN7UuVcRGZTPSA7gxAmgdGnrmmQ7d8osp5Hgq6+Azp3N22gasHu3PLoJhk2bzI+fDENG5KxZE9w82QWeOSPNRTExQO3a0OLirDuFS5adO2XM9rJfpcLRrDnQvz+0q/n+mTePrOxrxdFjsrhfDmUlVtpK9b4ACyzDYKNBAhJQFEVxCZd8ttGhoyZqYhM24WN8jP7oDwOGzzDZXMiF3uiN1/BalvCTyaqodPCKFIoVA4YM8f26pslom0gpIgAwf7710ZGmAQsWBD/XRx+Z12NzueQxUU63jvD4cbB3L6BUSaBpE+DmhkCpkuALL4BJSZGXZ9Ik4LprgffeBTZulG/Qhx8A19cA338fJO0pIkBoz/yyILfgFtyIG30eRRgwUAVVcCfujLBk9liFVaaKCCCtI1uwBeuxHv3RHwR9KiK90RvHcAzv432liGQSlDKSQxg9WiY3i4mRm3xMjExyZhjA4MHAxImRlefyZe9hxmnRNOkKECx//WVtFQKkY2tOhSdOAI1uBj77TCalcXP2rDzmuP8+0M5NDJU8y5YB/R6TH5K0Z4xOp7SQPD5YVnq0U4goNhYoWTJssmYFNGiYh3kojdIZ/Cd06CiKoliABRHNXXEcx/ESXkI5lEMsYlEapTEcw3EERzzaCQjblWpjEIP38b7lOuZhXqY+ksqJKGUkh6BpwKhRwJEjUvF4/nng7beBgweBCROsK7mHmuuus05D73KFpvpuvPeovwxE8TQi+rz4onSo8eZcJASwcKEMt4oU498w/4AYBjBuLND3MfN2DgfQ7QFoqmARKqACNmETxmIsqqM68iM/qqAKRmM0NmMzrkXkSl3vxE7UQi28gldwEAeRjGQcwRGMxVjURE1swZaUtouxGDux03JMAwbuwl1YhEWWGUzP4Az+Rg7+9pEJUT4jiqiwZw9QpYpvv0NNA0qUAA4cCF5RGjcOGDrU3BLjcKSmyM9p8OJFoHgxGRrrC10H6taFtmZt+OVJSgJy57I2nQHA/gNAyxbAvn0ZzV+GAeTPD6xdB61y5XCIqggAgrge12MHdnh1RjVgoCzKYhd2wYCBwRiMj/CRpYIBAL/hN9yH+yzDmAHgD/yBRmgU0BoU9lE+I4pMTaVKwMiR3l/TdamMTJoUGotN795Arly+v0AbBtCjR85URABIzdBMEQGkYrBli3mbUJE2X4gVsbHA7yuAW2/N+FqdOsCKlUoRyWT8il/xD/7xmZ7dBRf2YR9+wA8AgMuw5xd0G25DMzRDfdS3PKaJRWxELUEKawJSRiZOnIiKFSsiPj4eDRs2xBqLMISzZ89i4MCBKFWqFOLi4lCtWjUsXLgwIIEV2YcXXwQ++CDjcf511wGLFskU9aGgaFHg++/lMUxap1m3ctKoEfDuu6GZK9okJUkLj1/+mnbPp8KRhc4b+fLJN82KAgWBwoWhlSwJbdGPwPYdwMefAB98CKxbD23tOmjXXRd2cRX+sRiLLXN6xCAmpVbOdbgOAtbK6UN4CAAwCINMK9864MADeACFUMgPqRXhxm9lZPbs2Xj66acxcuRI/PXXX6hduzbuuOMOHD9+3Gv7pKQk3Hbbbdi7dy/mzp2L7du3Y9KkSSijUl/meDQN6N9fHsX89pusjbN+PbB5M3D77aGdq2VLWQTw2WeBChWAIkVkfpEZM4AlS4A8eUI7X6Q5dAgYNAgoVEjmiylQALjtNmDpUhudr7nGutKgwwG0CZF2aIGmaUC//ubhVoYh072nMZ1pVatC69MHWr9+0OrWjYCkikBwwgkNmq12ANATPS0tHfmRHx3REQDQBm3QG729tjNgoBzKYSzG+im1IuzQTxo0aMCBAwem/OxyuVi6dGmOGTPGa/sPP/yQlStXZlJSkr9TpXDu3DkC4Llz5wIeQ5GRbdvIQYPIKlXIcuXITp3IZctIIaz7/vUXOWIE+eST5Pvvk6dPh1/erERyMvnNN/KetmxJPvww+dtv9u6tv+zaRZYoQToc7vRx8jIMUtPIqVOtxxDvvkuhwfelaxSrVpmPIQTF779TjBkjr+XLKQJcsDhzhqJ6NYoYR0ZZYhwUlStRnDwZ0NiK6DKLswgbj0mclNJnAif4bKdR4xf8wmMOF12cwAksx3Ip7XIxF/uxH4/zeEqbkzzJ8zwf0fXnNOzu334pI4mJiTQMg/PmzfN4vkePHmzfvr3XPnfddRcffPBB9unTh8WLF+f111/PV199lU6n0+c8CQkJPHfuXMp14MABpYyEmOnTSV2XG5Z783JvZk884XvTPHWKvPXW1PYxMXLDi4sj3303okvwm0uXyEOHyMuXwzvPkSPkDTekKgRp723btqGfv2nTjIpIeqVk/37zMYTLRfHoI3KzN/SMCsCAARQmv7Ni2zaKmtfLtg5DXhooalxHsWVLQOsSx49TdOrkKY+uUdx3L8WRIwGNqYg+CUxgERahRs2ncpGP+XiRFz36DeRA6tQztL+TdzKRiV7nctHFf/gPN3ETL/ACSfIiL3I0R7MES6SMcTNv5lzODfvacyJhUUYOHTpEAPzjjz88nn/uuefYoEEDr32qV6/OuLg49u7dm+vWreOsWbNYuHBhvvTSSz7nGTlyJAFkuJQyEhqmT/e9cbmvTz7J2C85maxf31OBSX99+mnk12PFhg1kx46pcsfEkA89RP7zT+jncrnIG2/0fY90nezZM3Tzbd1q/V4aBvnCC9ZjCSEoPvuMIm8eT0XErVg0aUzh5XdQ7NtHUaRwarv0fQsXotizJ+A1ikOHKL7+Wl4HDgQ8jiLz8BN/ooMOGjQ8FAvj6mMe53m0/4pf+bSM6NTZgR0oaG2FO8/zrMu6GZQa988v0MYvisIvMo0yUrVqVZYrV87DEvLmm2+yZMmSPudRlpHw8euv0pJhtnlpmjy6SW8d+eYb642veHEyiBO5kPPLL2RsbEbLgcNB5s5Nrl4d2vns3CNNs7ZU2OXTT63nA8hWrazHEkJQ1Kvr/WjErVjck9ECKgYN8t3HfazSv39oFpzN2c/9fJNvcjiH8yN+xNPMvuefq7iKd/AODwtJS7bkb/zNo52TTpZmactjnRVcYTnnIA7KoAClf6SfXxEcdpURvxxYixYtCsMwcOzYMY/njx07hpI+MhyWKlUK1apVg5HGGe26667D0aNHkeQjxXRcXBzy58/vcSmCx+kEunXzndvDDQns2iWvtEyfbp3C/fhxm06TEeDKFVn/xunMmILC6QQSE4EOHayLCNrl3Dl7xf00DZg3LzRzWiWOSzunJcuWmaerdbmA+fPB7dtTnqLLBUybap7i1ukEpk8Dk5N9t8nhJCIRfdAHFVABz+E5jMM49Ed/lEIpjMEYCOECf/oJfOYZcPBgcMoU8JJ5evTMTkM0xI/4EUdwBJuwCYdxGEuxFM3QzKPdEizBYRw2HcsBBz7Fp6ZtLuACJmOyZaTN+3jf/iIUIcMvZSQ2Nhb16tXDkiVLUp4TQmDJkiVo1Mh78pjGjRvjv//+g0iTN2DHjh0oVaoUYmNVOt5IsmABcPSo/fbpU08cPmxv4/ZnjnAyZw5w5ozvlBUul8xAa6fCvMsl82r5SlIKyJT7Z89aj6XroSuV0sS69hl0HWjRwsZgX39tndjFMDw1qQsXADub4pUrUlvzAk+eBNetA//9V9abyYE8jIcxBVNAEAICyUgGQSQiEcMwDOPeKgHcdaes0/PJx8CjjwClS4HffBNt0YOmBEqgFmqhFEoBADZgA4ZhGAZgAMZhHDbBumiUE07swi7TNluwBVdgXl/CCSd+w2/2hVeEDn9NLrNmzWJcXBynTZvGbdu2sW/fvixYsCCPHj1KkuzevTuHDBmS0n7//v3Mly8fBw0axO3bt3PBggUsXrw4X3nllZCbeRTmjBgh/SXsmPXj48nz6ZzM27Uz9xdxX4sWRWd96XnsMXPHTrf/yNChvsdITCRfe40sVSq1T5ky5NixnsdRV66Q+fPbu7cA+fnnoVvnbbf5XqemyTXa8fcUD/c0P27RQBEbQzF8eGqfpCTrPu4jnitXPOfbsYOiY0dPX5NqVSmmTcso27p1FL0epqhQgaJCeYoe3SlCfcYWJTZwg+URRK5L4Nn8XiKcDJ1i6dJoLyEknOVZ3sk7CYIOOhjDGOrUfTq6pvf5uIf3mI7/J/+0HAcEi7N4ZBacQwiLz4ib9957j+XLl2dsbCwbNGjAVWlC/po3b86e6Tz0/vjjDzZs2JBxcXGsXLmyZTRNepQyEhpGjbLenN0bWJ8+Gft/9ZV131y55MacGejXz3q9Dgc5bJj3/omJcqPXde/36O67UxWSbdvsKyL588vIHm/znTzpv8/NoUNkhQoZFUWHQz731Vf2xhGvvOLdCTX9lU5ZEN26WvuMdOrk2WfbNopCBTP20zX578iRqW3Hj08dJ+2YGij8+FKTWXmST9JBh+kGqbnAyb283FtDp2jSONpLCBpBweZsbunPYfb4kl+aznGBF5ibuU3HcNDBLuwSoVXnDMKqjEQapYyEhpUr7W2WhQt7/yadlCQjRbxtzmmvRx6J/Nq88cUX9tb744/e+7/1lrmzr6alhjPv3GlfGZk0yXOev/8mH3ww1WoVHy+Vwf/+s7/Wkyel5atIkVRFpHNncs0a+2OIgwe9h/WmvfLlpbjoGXIpNmyQFhNvfXVNKg7r1nn2adLYWvHZtIli0SJr5ejbb+0vMhPSiZ28hqx6bJJJ4OgRJvdg375oLyMoFnNxwEqIgw5ew2t8hvem5Qk+Yanw/M7fI7DinINSRhQZEEIqE2bWgvh4cu9e32McOyZzilhtuGvXhlb25GTy66/Je+6R4cX33CN/Tk723SchgSxWzLfyZBhk5coyHDc9QpAVK1pbkK65RrZ1uWTiOKv7UqeO5zxLl8r76S3aJ39+mVzOH4SQVhc/DI+e/UePNt/4fcRuix9/lIqKrqXmGdE1GSb8ww+ebbdssVYwrkbgiFtbmSst2cAy0J/9bVlGPuhncr/80TozIT3Z07ZVxH2v3O2rsir30uSPVhou8AIbsEEG5c891miODvNKcx5KGVF4ZfdusmzZjBu0rtvb/JYssd5wHQ6yb9/QyXzyJHnTTakKRNp/b7pJvu6LlStlCK+3zb5gQXLTJu/9Tp2yb+lwfyytLCmAVD7cXL5MFipkrixVquRdWQoXQgiKCRNk3pC0m12Z0hQWji7i/HmKDz+keOhBigcfoJg4kSK94xFJ8fnn1sqIBoqb6tlrp8Fr/pOswu/83XoDTgKPFYueZeQ8z/NDfshO7MT7eB9f5as8yqMhG9/tK2L20KnzWT7LQRzEu3gXu7AL53Iuk+jfueYlXuLrfJ1lWTZl7GZsxu/4XcjWo0hFKSMKn5w8Sb78Mlm+vMzBUbIkOWQIaSef1Mcf29ugmzUzH+fAAXL4cLJaNekQeuut5Ny53r/Rt2zp23HWMOTrZvz7L/noo9LqA5B58sg0+GZ5uM6eta+MuE8tnE6yWzdPZcmt+ADk6697zjFtmr3xfR0jhRORmEgxYwZF27YUhQvLY5hyZSleeoni+PHgxp49256CcXND+8pIkDJFE0HBVmxFQ3i3DGgu8OnxJpahZk0DmtdFF3/hLxzAAezBHhzN0TzAjH8EfuNvLMAC1KilOJTq1BnDGE7hlGCXT5LsxV6W1iHQ2i/EHwQFz/EcLzPMKZlzOEoZUYQFO34Yui7Tnvti2TJprUi7Ybv/f+edng6w69bZ27DTuSR4xeUiL1ywZ2kQgqxZ09zSoesZj11cLnL2bLJJE6n85M0rj5SWLcs4R58+5kdmRXGcnYy5nH3/lxT//mstdAgRf/8tlZD0RyQOg6JUSYqdOwMf+9gx6wgcQ6d4+WWK0qWsFZGiRUxT1WcFzvIsb3PdmmIFMZLkvyD42IdgksPLut1HYr/5n6TrEA+xDuukHHu4M5/q1DmKo1Kyme7kTuZiLp8+LRo1/sjgteVf+aulIpKXeXmJXjy/FZkapYwowsLJk9KaYqUc+CrOduyYtEz4OprQdfLxx1PbDxsWXERMMEyZYr3Ozz4LfHxfykhenOdkPMxEpNuwWzSn2L49dAv0gUhOluGzvnw1HIasQxNE1T/Ro4fv8XWNIncuiiNHZISPmVOtw6Cwk+s+CyAouKpvbT7xjsbu08Ghr4Lbq5ooYQULUHzn/9FCAhN4La81tURM4ASSMmOpWTudOm/hLSFZeyu2MvUbGc/xQc+jiDxKGVGEjcGDzf0cypTxXQzutdeso3Hi4+UxCSkVE6vcKDExUqZQI4SMDEp/7OL+/2OPBVeF19sxTRyu8A80ZBJ81XkpTLFrV+gW6QUxb569o5Fffw18jvPnKRo0kIqHO5zX7bgaH0dxNVmNuHCBok5t33Vvrq+Rpf1F0mPr3t95B8X06RQBVlz8gl9YWiEKszATmMBCLGTZFgQP8VDQaz/P82zP9gSlQ6k7z4iDDr7CV2zVnlFkPsKSDl6hAIDx44F77pH/dyfsdKcbL1ECWLwYyJXLe9/5831nRHWTkCAzkwPANddYZ311uWS7UKNpwKRJwJdfAg0ayEymug7cfLPM7vrhhzbTrPugc2egUCHPlO49MR0NsAYOzcuiXS7gwnlg+LCUp3jkCDhvHvjNN+DBg4ELk5bFi60zsTocsl2AaPnyAb/9Bnz0MVC7NpA3L1C8ONCvP7Dpb2h33ikb5skDLPgB6N0biItPHSA2FujZE/h9BbRsVC5Cu/deYNhw+UPa98Bdh+Hxx4GFi6D16AHN1y+ZBZ/jc+gWybdP4zSWYAnOw16q4LM4G5AsacmHfPgO32ELtmAERmAgBuItvIUjOILhGA4NQfyyKTI/EVKOgkJZRiLLsWPSafKnn8jTPup0CSGL7nXrJiNaWreWlX7TpaDIQO3a9nxAZs2S7U+dsj4Wio01j6gJFS5XcJYQb/z6q7QEuY9rNqAWk6GZfzOOcVBs307RpYunxUDXKO67l+Lw4aBkEn372svE+vzzobkJ3mQ4fJjimWfkUYR7vs6dKD6dRLFkCYWvD2Y2QfzyC0W7thT588mQ6TvvpPjhh6COxtzUYz1b1o7pnM7yLG/ZzqCRrQv6KYJDHdMo/Ob4cfKBBzz9GOLiZCRKqG59jx72ssA6HGS+fFKep54yb/vmm6GRLRzs3Sujhu68k2zfnnzvvYz3cvNm8qGH5HHTBeS2d0RSsYLvo4tKFSlOnAhYZvHxx55HJ76u2bODvDs+5t+5k6JE8Yzri3FQxMVSWIQXibNnKb79lmLmTIoNG8IiY1bmPt5nK6fHYi7mq3zVNCGbgw52YifrSRU5FqWMKPzi1CmZwMubomAYMlmaldXDDn/8Yc8yklYp0TTy4YdlAjOPaJOiMtQ4szJhgvSPcfuYaJq88ucnvQVAJCaSziJF7SkjVk6dzz4bsNziwgWZrMyXQmLoFMWKUiRaZ7z0e24hKOrV9e3cauhSNi9/C0RiIsXTT1PkivfsU69ulk8KFkrmc76lIlKGZeikk6d5mpVYyasTq0GDeZiHW7k12ktSZGKUMqLwi2efNS+Cp+sZ82QESv/+/ikk7vk3bSIXLpSROgsX+l/DJZJ8/bX5WnLnlgno0iP69zc9InG5lQ0rZaVgAQqz9LQWiG++kRu/t9De2BiKn38O4u6YzLt6tfXadI3i7bcpJk2SidEKFqAoW4aiSmXvCpShSwUl1GmBsyhOOtmETUytIzM5M6X9QR5kczYnKKNn3JaS6qzOdbQRU6/I0ShlRGGbpCSyQAFrhaB8+dDMJwQ5fjxZvLh/FpJ+/WSytFOnQiNHuBCCvPZa8/UYBvn00176/vOP3OztHJNYXXZK9ZqtY9kyihbNPZWAO+6gSFMYM9SI8eOt6+PoWmqGWKu2aZWoWxqFTe6sxAVe4HzOZyM28ohcAcF8zMepnOq132Zu5gRO4Jt8k7/xNxXdorCF3f1bI8loOc/a5fz58yhQoADOnTuH/NnIcz6zcPAgUK6cvbZXrgDx8dbt7JCcDKxdC6xYATz/vHV7TZNbOQA0aQIMHQrcfXdoZAklX3wBPPSQdbsSJYCjRzM+zwUL4OrQEUx2wgEZVZNMB2I0J17hcAzVxsCARUgSAJw9F5JIEx4+DJw4AZQoAa1kyaDHM51r/Hhg6BDrEKq0HwZ/aN8eqFQZeOABaPXrByZkFiURiRiO4fgIH+ESLqU8fx2uw224DTfhJnRAB+RG7ihKqchu2N2/lTKiwKlTQNGi1u10HUhKSo0yDBU//wzccYd/fQxD7lcTJshox8zCgQNA1apAYqJ121y5gMuXMz4vBHBLxcO44+Ak3MEfEYdErEUDfIj++Bu1MR9tcQd+Qozm9D6wYQBNmkD7dVlQa4kGXLkSaNokfBO447OdTvmhmz0nW4UG+8IJJ+7G3ViCJRDpFFkDBoqiKNZiLcrB5rcSRUBcwiXMwiysxmro0NEczXE/7kcc4qItWtiwvX9HwEoTNOqYJvw0aGCejMwwzFO8B8ORI+b+KmaXppH//BMeubxx5Qo5fTp5220yTLl9e/Lbb1Nr6gwdan8t113nfY5ly8z7NcVvdGqa9B/xdS1c6HMNYvt2isGDpZ9F4UIUTZrIyJOrPibC5ZLp4FetCioqJxCEEDK7qx2/mGAvh0Fxx+0hCZfN7MzgDFOHVQcdfJAPRlvMbM1CLmR+5k+5326n4GIsxj/5Z7TFCxvKZ0ThF2YOl+4riISblnTubC/k15svyRNPhE+utOzfT1atmuqE6lbSALJxYxmye8019mWfMMH7PHbS0PfGp0zW0jmYxjikP4WvgUmK776TPilpnWTdfhe33y4r9lYo77lhd+lM4c3bNkyILVukkpTekddhhMaXJv0VRh+YzEJDNjQN0XVvkKeYyR2ysihruIYOOqhR8xmVtJOB13vKzChlROE3r7ySusGn3ew1jZw4MbxzHzpEli0bmEJSt254ZSNlwrOaNX3LZxiyIF7p0vZkLl9eFu3zhh3FUNfJXk12UDz1FEWdOhS1a0lrx7ZtPtcg9u6VeTr83dAdhnQYDaI4nr+I3bsp+vRJDdM1dIr27WQa9FAqIjEOinDUEogC53iOEzmRbdiGt/JWPs2nuZ2yllFe5rUM5wXBtVQRR+GgHduZRi856OAADoi2mGFBKSOKgFi7Vub0qFJFfsvv14/8++/IzH3kCNm3r8xI6q91xCIPVtAsWmRPloYN7R3TmOXiOn9ehv5ajTHFz+rtYsiQwI8/HAbF7bcHdQ8DQSQkyGysaZLciPr1Q3eM4zAoHugW8XWFmj/5JwuxELWrD/cGB4KjOIpFWdSWMrKZm6O6jrVcy8f4GG/lrezIjpzFWUxk6PPZRJKzPGtplQLBPMyTLSOUlDKiyLJcuEBu3UquWCGtMnYVkgAqqdvmscesrTa6LjOpmrUxDNLOnj58uO+1GwZZrhx5yc9q6qLm9cFt3LoW0eMan+v4+2+ZJj0UCkmMI6gEcZmBgzzIfMxnuuE1ZVPT6rsgWI7l6KIrKmtIZjJ7sqeHEuVeTxVW4R7uiYpcoWAP99hSBEFkecXLG6pQniLLkjcvUKMG0Lgx0KGDZyE5X7hcwJAh4ZPp4kWpCphhGEClSjJIw5vMhiEjaN56y3q+UaNkbTggtV6ae8wyZYAlS4Dc/kZgJib52SEdJPD332BSEjhnDtizB9i5EzhyJLh/f3Bj+4F2ww3AqtVAmzbBVSoEZFTNww+HRK5o8RE+wmVczhAl40aDhl3YBcL8A/w//M+ygF64GIZhmIEZAGTkD4CU9ezGblRGZcQiFjfiRkzGZCQjOSpyBkJRFIUDFoUnARREQcQiNgISZVIipBwFhbKM5FxOnJD+FXaPbHbsCI8co0bZO3754gsZcfP44xmPmxo1kllk/WHdOmmVadVK+qR8/jmZkBDYGkS3rtYF8Kyujz9OdXB1O5Q6DOnTMWZMYIIFgThyhOLPPylmzaIoXizV2pFWNjNLT48eEZc51FRhFVvful/n63TQ4eG74LZC9Gf/qB0RnOEZxjPe1hrc1pLWbM0EBviLEAW6sIupz4hBg8/wmWiLGRZU0jNFtuHhh4Hp0+21XbIEaNUq9DIcOABUrChzgKTFgBMuGAA0FCggk5i5k8KdOwf89huQkADUrCmtPdGEy5cDLZoHPkB8PJA/v0xM4ysp2aRPoT3ySOBzBAETE4GvvwYWLwZcTuDGukCPHsCHHwKvvCyT5DgcqbL37QtMeBdaTExA813ABezCLsQiFtVRHQZCnIDHJsVQDCdx0rLdMixDGZTBh/gQC7AASUhCfdTHAAxAczSHhiCtTAHyJb7EA3jArz46dDyH5/A6Xg+TVKFlK7aiPuojCUlwwfN3x4CBAiiATdiEsigbJQnDh8ozko04d4786Sfy++/JffuiLU3kufNO+5aRjRvDJ8eoUXKOAjjDERjNgyhFoYFXEMvP8AAXvPxX+CYPAUIIigH9vVsJ3OG9ZsXxmjaxjsQpX47CnXQlEyFOn6b46COKESNkXZuDBwMe6ziPsy/7enybL8MyfItvRcXnwk7YLgju5d6QzXmUR/kzf+YSLuF5ng9qrA/5oW2firSPfMzH9/k+u7IrO7ETx3EcTzCyeXH84Q/+wbIsSxCMYUyKVaoaq2XrYoPKgTUbcOUK+eSTZK5cqZutppF33+29yFp25cEHzROyua8SJWRdmHAhBDnplaP8z7iGSfCsieI0HPJ44JtvwidACBBCULzzDkWZ0p4RJR06yCMYXzlIWrWkuLGOvaOcP/6I9jLDxjEeYyVW8mly78meET/umMqpppu2QYO38taQzHWYh9mZnT3Wn5u5+QSf4GVeDmjMH/ljQMpI2vW5C/jFMpaf8/OQrDUcOOnkfM7ncA7nC3yBi7k4W0bQpEUpI1mcpCSydWvvm7BhkEWLkntD90UnU7NggT2ryKefhl8W0a4tXb78EHRN5vE4coROp4zu+eorGRXkik6Qgk+E00nx118UK1dSHD2a+vw//1AMHEhRsoSshtuwgcztkZTkmQzN7PrhhyiuLLz0Yi/LqJT5nB9RmRKYwAZs4FVBMmgwnvFcz/VBz3OUR1mO5byuX6fOZmwWkB+Hk06WZumgFJK0D40aF3Nx0OtVhAaljGRxpk+3DmXt2jW6MrpcMv/GwIFk796yEm84soc7nTKxmZkDaSRSYIjdu62PKQyd6zu8wjJlPOWrUIGcPTv8MoYT0byZvSq5kUpMEwZEcjLFiRMUV65keO0MzzCWsZbf0m/jbQHPf4InOJqjWZ7lGc94lmEZDudwHuZh037neI5d2CXDcc11vI6rGJoMs4/xMVMnTI0aP+JHAY09l3O9ZicN1FLSgi1CsmZF8ChlxIITJ8jPPiM/+ohcujTzfXO1qhXjVkhOnoyOfLt3y9oqbjkcDilvTAz5/vuhn+/4cXlP3JYhTUvNw9G7d2TePzvZP10a+Ata+XzPJk8Ov5zhQnz2maUiJm6sE20xA0Ls308xaBBFntypa7n/foo1a1La/Mk/bW2GhVgoIBn+438sxVIZFAqDBouwiK2EZAd4gFM5lR/xI67gCr+OABKZyC/5Je/knazFWryTd6YkHbvIi5YRLxo11mRNjzH/4l8cyqEcwAF8g2/wKI/6mJ38il+xJEumjBWsUnKMx2yvXRE+lDLigytXZFbR9AmsKlUif/45BMKGiHz57B1NpPlbGTHOn5fhtmZJwGbODP28QkjFsV8/sls3WZQuXKG8XuefMsXWMcVSNPd5X3LnlvcvKyISEynq3+Q9XFbX5Aa+OOuZx8X27RRFi2QMe4656gf07bckyXVcZ2sTLM7i/stAwZqs6fMIyKDB8izPZCb7NeZZnrXly3GMx3gDbyCYGj7r/rc2a/N3/m5r7TGMISmzjt7O2wnK8OEYxlCnTgcdHMVRPpWkZCbzB/7A9/geb+WtQSkm7lT4iuiilBEvuFxkmzbeLQ66Lr9xL1kSIqGDpHhxe8rI5ihkb37/fevMqJUqZT5rU7CIrVstFZEkzeCrGOrzvmgaOWmSyRwuF8XChbI4XZMmFJ06yuJ2mSRCRZw5I2vEuK0H7g28VElLXxHhdFLs2iWvZPubajgRQlDUvdF3PhJdo8idi+LUKSYykYVZ2HQDdNDBh/mw33L8yl9tbbDf8lvLsS7wAkdzdIqVAQSbszl/oPf3R1CwMRv7VIQcdLA+69uSLw/z0EUXm7Kp6ZHOG3zD+r2h4If8kJVZOaVfLuZiHuaxlMNBB8/yrN/vQ05jB3fwCT7BSqzE0izNu3k3F3BBSJ1qlTLiBav6IrpO1qgR3ogMu3iz3qS/ypePzoZfv769NO2rV0detnAjmjU1TaSVrOmsgD0+70lMDPmMj9xG4uxZGT7rjnBJ+2/9myiidSbnBbFzp4zKef11ivnzTZULkZREMWYMRelSqfeqRHGKUaMoAs3gFiLE6tXW1i5do3jrLZLkKI4y/aauUQvIWXQ4h1s6xjro4CAOMh3nLM+yNmt7PeoBwbEcm6HPKq6ypWiUZ3nTtTvoYDd248/82XKsfMzHS7RXz0BQ8D/+x63cyou8yJf4kmXRuS7s4vd7kNOYy7l0XH2k/5w8yAfpZGi+ACllxAv33WevKuzaTFC4cuvW1Iq5vuQMh2+GHexmRF2wwHqsnTvJ554jb7tN+oTUrk1WrkzWqUOOHi2L52UmxH//URQrmtGkf9Wxs7/+oaWfz4sv+hi7zd2+FR2HQdG8eUTXGgpEUhLFXXd5d3w1dIqWLaKqkIi33rJ2ytU1ig73kySTmMR7eA9BeGz47vLwH/PjgOT4H//HGMZYKiOP8THTcfqwj+lGDWaszPs8n7elCN3Nu03baNS4hmvYnd0tZQDB2QzMo/skT/qM6jFoMA/zcBt9V69WkP/y35TPrK/38jW+FpK5VG0aL+zcKUtRWLF7d/hlsaJGDWD2bJkw0kiT2NFdp2TQIGDAgOjIVq6cvXoxpUubv/7660C1asCbbwK//AKsWQNs2iTv/8aNwEsvAddcAyxfHgqpM3LkCPDGG8ATT8haMNu3W/fRqlQB1v8F9OqdmmoVABo3xuaxi/Ch6Gfa3+kE7rkn4/PcsgVYuNB3ZlOXC1j+G7h2rbWQmYlPPgF++jFj6lpAPrd8OTBhQuTlcmOnto2mpbSLQQy+xtf4Al+gIRoiN3KjAAqgC7pgFVahL/oGJEZd1LWst+KCC3VR1+frZ3EWMzAjQ4bPtDjgwERM9HjuEi5ZZl/VoKEKquAZPJMyTtoxdeiYgimoj/o4giOmMrjHO4Zjpm18UQRFsAIrUB/1AchsrO7st5VRGb/hN1yH6wIaO6fg/gwQ9Po6QbyFtyJbAygkqk+YCZVlpEkTe8cLixaFSPAQsHs3+b//yeOjqlVlOO/y5dGVafJk8/unadbHXTNm2LOu6DqZNy951LcTvt+4XNL51TBSI4DcYcOdO9uvhisuX6bYsyfl+EQIGYLsy/pmGKQv44Z46SXrujExDornngvNTQgQcekSxaFDFJetnSKFEBTVq1mHQ5crS2HzvFFs3iytGePGUSxeTBHkmapYs8beMc3bbwc1jxWJTGRRFjX9ppqHeXiBF3yOsZRLLa0RIFiBFTz6TeAESydRjRrfpzTFLudydmVXVmAFVmEVDuRAD0tEd3a3tLSA4BzOCfq+/cW/+Cbf5FiO5VIujXoCsY3cyMEczLt5N7uxG7/hN345HUeK8ixv67OymsGftatjGi/YcbwsUEBG3Ch8c/kyef313jddd8it2RGNEOQ119hTDN0Kyauvhk7+F14wn+uee3z3TUoi58whH3mE7NGDfPtt8tSp1NcPHSKrV08dK+2/tWrJEGWv9+Tpp2X2U7NNMTaGol+/0N0IPxAbN1J07pR6jBQbQ9H9IYptvs3h4tIlW9FHQgPFMfMwTHHoEEWL5qnHO245rqlCsSq4PBqiXl3fiqDbgfX06aDmsMNP/ClDITv30YNOnXM517S/XWWkPMt79DvFU5b5U+IYxzM8Y2sdv/AXSxnyM79tn5GsgJNO9mEfgqnFB93vY3VW5z5mrjoexVnc1mflN/4W9FxKGfE6jkwZbpY8K5SbXnbm2DH5Ld/9jT8mJlWZs0rutXmzPSUk7VW3bmjkPnWKjI0NzG9o40amJDNzOFLzncTFySR1bq5cIadNI1u0IK+9lrz1VlnN18w1QkycaCuhmhg3LjQ3wg/E4sUU8XHeQ1/z5PapDIgrV+wrIybOueLsWYoqlb0rDA5DKgv+lkNOO/6OHbLir6/Q3vmRy6j6J//kbbzNY0Noxmb8lb9a9j3DM4xjnOnm4qCD3dk9Q993+a5pP7dVxA4uuticzU39Rt7iW/7clkzPMA7zaV1y0MGqrMpEJkZbzBRas7WlX49BwzQvjF2UMuKDLVvIUqU8v7G6v+H375/9wlHDzYYN0tF02DBZ3t6OVWnFCv+VkerVQyPvxx9bW2QcDvLxxz37HT5MFirkW5HVNHLhwsDlEqdPyw3f6pgmlOdVduS6coWicGHfTp4OQx6z+Ag9Fg0bmjuI6hrF9TVMj1vE2LHmYzgMinvaB7fOgwcpnniCIl/e1DE7d6JYty6ocQPlCI9wAzfwIP0r6PcoH7XcZHyZ3qdwikc4MAiWYilO4zS/ZJjP+WzIhh7juGvHOOjgq3w16scpoeQsz1omhAPBWZwVbVFT+IbfWCqtHdghJHMpZcSES5fIKVPIdu3Ili2lErJhQ0iGVtjgwAH/FBGHg+zYMTRzjx5tHVGl69J3JC3Dh5tb1HRdhjwHg3j7bXNl5JVXgpsgEJlsZJ0VGnxaD8SsWdZ9LYoKiSqV7fl1+DoD82e9TqdUDBMzz7dYfzjDM6zJml6PekBYRkgkM5mLuZhf8Asu4ZIM4Z3neZ4/8Ad+w2+8JhUby7Epykfa+TVqLMqi/If/hHS9mYEv+aWlIqJT5728N9qipuCiix3Ywas1x6DBYizGPdwTkrmUMqLI1Nx+u/nmnv4KVWJPu5aRwYM9+5UrZ0/OYKspiw8/pChS2HOjLVhA5vSIQgIc0a+fPV+WIUO89xeC4vHBqdaGtEdOGigefcRyXZYWI/e1cWM4bkGW4zzPcyRHshiLpWwwTdiE3/G7gMdMYAL7sV+GY6BmbMYt3EJSOpNafdt+gA+EapmZhg/5oaUyAsrEc5FCUHAd1/En/pTy/qQnmckcyZEsyIIZlKZQKSKkUkYUmZwNG8hcuawVEk0jH3ggdInoTp4MzGckb157ykgo0vOLhASZSOzjjynmzfNatC1SiP797UX5DB3qewwh5DpaNJdtHQZFk8YUs2fbUrBEieL2lJF9mctJMNq46OJJnjSNwLHDCq5gURb1usEaNJiP+biN29ibvW3lK4lEzRhx9REJFnGRpSLioIO92Csi8nzJL1mFVTzmr83a/Jne650kMIEruIJLuMSyIGMgKGUkk3P8ODl1Kvnee9LXIJNk+44oq1ZJB09fG3v+/DJBWKjvzfDh5sct7dpl7FO1qj1l5KB/R/yZHjFzpj1FwGY8vBDC2hJy8aJMO3+1nXj6adOst8LQKRo0CHqt/iCSkihWrqT4+WeKYM1hISKJSVzFVfyVv/rta+KLn/mzLUfHO3knq7KqLQvBIoYvd8L3/J6t2ZoxjKGDDjZiI87kTLoYPmfAZCZn8LXx9ljBFWGTwc37fJ9gxno+bp8dO+UEQo1SRjIpCQnSR8Xtt+A+Mihdmpw3L9rSRR4hpEPrxImyZsuSJeR338mihTZSWQSEyyVzt7jrEcXEpL4fHTp4zzMydqx5FWXDIFu3Do+80UQkJMhIEzMH1sqVbOcJMZ3rm28obrkldeyyZShee41i2zbpWGrmxGpRF8cvOdaupXikN0WtGyhuqkfx4osUV7VMIQTF+PEZrTW3tgoqoicYXHTxDb7hEa6pUWMbtgmqWJw794kdBUOjxkqsFFVl5Hk+n6Icpd2EQbA7u4dVIZnN2T7Xq1NnB3YIu6XmGI+ZZvHVqLEwC0c8qkcpI5kQIWRKem+bmjs/x3eBH+sq/OTgQfL118mBA6UFxiRlBs+cIStW9O786k6cFmS6i0yLWLlShvB6C30tWDAkvhrixRc9fUnSWT3E4sWpvjSGnlolODbG0gHWtgxCUAwZkrq2tApXfJw8Ohs4wLdSlic3RYQ94QVFSn4LbxaLAiwQsNOo2Qbr7dGWbS2PaWIYw5MMfY2leZxnKd8H/CDk86blS36Z4qfjVoIcdHAAB0REARjLsRkch709Ak3DHyhKGcmE/PqrtX9EhQoqvDizsn8/2bBhqiXEnVulZMnQOdhmVsT27RR9HqXIFS8337x5KAYNotizJ/ixly0zPwJyGBRPPSWTqH36KUW3rrKa8WuvhTTUWUyaZB6tY3ZU5Jbz5ptDJo8dlnGZ6cZj0OCtvDWgse3Uy0n7+IyfmWZyNWgEVNHYDs3YzPQ4SaPGa3hN2K0TSUzid/yOb/NtTuZkHmfwEV526cEelkdqMYzhi/RRHCtM2N2/NZKMXPL5wDh//jwKFCiAc+fOIX/+/NEWJ2AeekjWm7Gqj7N0KdCyZWRkUvjP2rWylk5yMlCnDtCmTWrNoOwOhQAuXwZy54Zmp0CRnTE7dgTmf2f+i5E3L3DkKLQ8eUIyZwYZSKDqNcCePfK7gTfcdWys/mT+vRlazZqhFdAHXdEVX+NrOGH+R+U//IcqqOLX2CMwAmMx1nJsACiO4jiIg/gYH2MwBsOA4VGfRoeO63E9lmM5CqKgX3JY4YILMYgBYb2VbcM2VEd16Mh+Zdkew2OYgimm75cBAy/jZQzF0IjJZXf/zn7vSCbmv//sFerbuzfsoiiCoH59YNgwYORIWfQupygiAICTJ2Vlw8qVwDy5wYoVwNGjwRMnAh9z+W/WvxgXLwJbtwY+hxX//isrNJopGm4jphXhlDMdG7DBlrKwFf7L1BqtbY0NAM/iWcQgBoMwCIuxGLfhtpTie6VRGqMxGiuxMuSKCICUr/12qIEaKIMyeBkv4yIuhlyWaNIGbSzfLxdcaIM2EZLIP5QyEkEKFrTXrkCBsIqhUAQEt28Hat0AvPIysH8/cOWK/Hf0KKDWDeCOHQEObNM4G04j7qVLoRsrbTXnMBMPe3PFIc7vsZujOa7H9R4Ver3xCB5JqeYLALfiVizCIiQiERdxEQdxEMMxHPmQz/bcl3EZe7EXZ3DGsq0DDtyAGywrD7s5iqN4CS+hCZrgHM7Zlimz0wZtUAVVUioYp8cBB1qiJWqhVoQls4dSRiJIbKx1mzx5gNtvD78skeLYMWDePGDuXGXxySpw3z5w82bwTOpGQCGA9u2A06cBV7ry8EJIi8k97WU7f2nWzNq8lCcPEM6jj0qVAMP7H/EUNBubXXw80KJFSESywz24x+fm4yYP8qAxGvs9tgYN3+JbFEVRr8casYjFFEzBJEzy+noMYpAHeWwrCQCwG7vRC71QCIVQCZVQGIVxG27DMiwz7fc4HrdtHQEAAYEt2IIhGGK7T2bHgIFFWIQSKAHt6gNAyntTHdUxC7OiKaI5kXBgCZas5MAqhCzGdvas5/PHj1unIQfIAQOiI3eoOXOGfOghzzVrGnn33dIRVJH5EF9/TXFjHc9omW5dKXbupFi0yF6+kZ9+8n/eX3+1TvXe+BaKv/4K/aLTytGls7WTanyc7xBjQ6dIX9QozBzgAcYz3qfjqE6dz/LZoOY4zuN8iS+xHMsxlrEszdIcxmE8xEN00cWlXMqP+TE/5+dBRcps4RYWZMEMETkGDWrU+AW/8NnXSSfv4T2mDrTeHvGM51me9TluVuQMz/Btvs06rMMyLMOGbMhP+SkvM0y5EixQ0TQRJjGRHD+eLF8+dfOtVUtWbxVCJjczy1Ph3qyffz7aKwmeCxfk2r1lVzUMWajw0KHU9gkJspLv5s3mlW0V4SOlLk76jdZhUBQqSNGzp3Va+BgHxXPPBTb/iBGp8/mKVNFA0aQJRdoPTwgRu3bJooBpZNhYC5zeHfyyq8ajbevLEOO8eTzldP//zjspovABXsRFjGOcRySF+/9t2CZsYaU/8SdWZMUM0RoDOdDvOQUFb+ANptEgMYwxzd6azGSO4ziWYRm/FJLf+Fuwt0JhglJGIkhCAtmqVUZlw/3zY4+RQ4akhoL6uhwOsmfPaK8meMaNM1e8HA6yXz+Z1GzYMLJgwdTXChYkhw4NX8IzRUbEf/9J64NZyGqRItZWg9gYiiefDFyOuXMpbmlkrfBUvYbi/PkQ3oFUNu2Zz2dmlePdC8AShz03LYeQoannD22neOEFimurU5QtS3Fbayl7FNMo7+ZuPsfnWImVWJIl2Yqt+BW/ylDoLlT8wl9SLBberDH38l6/wmj/5J+WSoNOnWM4xnIsJ53cxV22sqIqZST8KGUkgrz8srXVo2dP6zYOB/lscBbVTEHFitbHUblykY0aeb8nuk42aUJGsSRLwIjLl+Xmfjj0NR7ChXjuOWtFw+41fXrw8jRpbJ5tVdco3nknBCtPJZGJfIAPpFoVBOSV7mHQ4C28JeJZLDMTgoLVWd0ywdYSLrE95jt8x3I8jZpflW8f4AOWSdiy4zFNZsPu/h2QA+vEiRNRsWJFxMfHo2HDhlizZo3PttOmTYOmaR5XfAS9zcON0wm895704fOFYQD//GNvrAcfDJ1s0cDlsueoeuUKsGqV9/smBPDHH/K+ZhV45Ag4YABQtIjMV1GmNHhTPXDu3GiLZs2a1RmdUv1F02S4WKdOQQ3DffuAlSvNf6EAYNKkoOZJz0AMTHHuc8EFaIA3v0sXXPgDf2AmZoZ0/mBx766RYDVWYzu2Q8D3e+SAA5/gE9tj2s374U9+kMEYbJlz42E8jAJQ4YuZAb+VkdmzZ+Ppp5/GyJEj8ddff6F27dq44447cPz4cZ998ufPjyNHjqRc+/btC0rozMS+fYDJ0gHIv/N//QX06+fbId8wZM6KOnVCLmJE0XUgzmYUoVmkphDAyy8DHToA3bsDX30lk4xlRnjgAFD/JuDTSVLLcrNxI9C5E/j661GTzRYxMcH1Nwz5xk+fAS1XLtOm3L8ffOcd8KWXwGnTwIvpcj0cOmQ9HwkcOhiEwJ7sx35MxmTTzTUtOnR8gA9CNn+gCAh8js/RAA0QgxjEIQ534k78iB9xHMfxNb7GLMzCDgQYcu2DXdhl2cYJp1/zNkdzW/e/OZrbHvNm3IxX8SoAZIg40qGjJmpiLMbaHk8RZvw1uTRo0IADBw5M+dnlcrF06dIcM8b7Wd7UqVNZoEABf6fxIDMf0+zaZa+aa0yMdHJ99NHUIxn3BZD33uu9QFtWpGtX88ghq+Oq9E69bkfYihXJ7YHX/Qobom0b62OOKBVRs4MYO9b8WMTqatmCYvly8zmuXKF4uKdnTRldk/Vc3nsvtd2//9qbs0rlkK3/Db5hq6ZH2kchFgrZ/IHgpJOd2TnFlyLFr8IlfTh04bmelmzJndwZkrnnc77l/dGosQVb+DVuEzbxeayiU2ce5gnoSGU+57Mpm6aMVYIlOJqjeYEX/B5L4T9h8RlJTEykYRicl668bI8ePdi+fXuvfaZOnUrDMFi+fHmWLVuW7du355YtW0znSUhI4Llz51KuAwcOZFplJDmZLFHCfEM1DLJ589Q+O3aQI0aQvXrJ6rEhqDNmmz/+ILt1I8uUIcuWJR98MPQF3tas8U/hsHsZhpQ7M30MxN695s6fbqfLxx6Ltqg+ESdOSKXAah3ersLWm7IQguK+e80Vng8+SG1bu5a5LIZOMWpUyNY/hEP8qsECgmVZNmTzB8JbfMuvMFaDBouwCPdwT9BzX+Il5mVeS2XkI37k17j7uI9lWTZDRI2DDsYylj/yx6DlPs3TYa3eq8hIWJSRQ4cOEQD/+OMPj+efe+45NmjQwGufP/74g9OnT+eGDRu4bNkytm3blvnz5+eBAwd8zjNy5EgCyHBlRmWEJF95xXrz/eabaEtJjh6dapVJ6zQLkD4MWwEzc6YcO214r3uusWPJ+vUDU1g0jXz//dDKGgzi66/tbdo31Iy2qKaIH3+UOTTSV+a1uiqUtx77jz+sxylYgOJqCJX47jvf7RwGRdEiIS2Q9z7f98syYtAIOndHMLjoYjmW80t5cm/qD/LBkMgwkiN9KkMGDZZiqYAsD8d5nCM4IqX6bTzj+TAf5mZuDoncisiTaZSR9CQlJbFKlSocMWKEzzZZyTJCyuOX226TG6WmZTyOGDhQ5hqJJvPmWW/0CxaEds49e2TelLp1Zd6R/v3JtWvJt9+2tiaZKSONGoVWzmAQ335rb9OuUyfaoloi/vmHon9/mVckNsZelVob4V+ib197Ss6cOal9Pv1UymDoch53/7JlKP7+O6TrPsETti0jOnXmYi7u5u6QyuAPu7nbb0UkrUJyhmeClsFFF/uwT8qY7nsDgqVZmtu4zbT/GZ7hQi7kfM7nPu7z2iaRiWGvsqsIP5nmmMYbHTt2ZNeuXW23z8w+I24SE8l33iErV07dOOvWJb/4IvqKCEnecou5JcIwyBb+HfH6zcWLUpFIq7AFclWrFl45/UEcP2690TqMgJOBRRPx/vvmRyW54il2W2/K4s47rBURQ6d46y3PfidOULzxBkX3hyh696b46iuKpKSwrPUlvmS+jQvph5Gbuf0KWQ0HO7kzYGUEBDcxdP5La7mWfdmXzdiMbdmWUzjFNNPnRV7kAA5gHONS5NGosQ3bcC/3hkyuUCCuPhTBEbY8Iw0aNOCgQYNSfna5XCxTpoxPB9b0OJ1OVq9enU899ZTtObOCMuJGCOnTcPFitCVJ5cIF+xt9OHN7PP6496ys/vqN3H57+GQMBNGzh7kVwWFQ7NoVbTH9RghB8fTTqX4vaRWH3Lkpfv7Z3jhVr7FnPZoxI8wrSiPTxYsUH39M0bgxxTVV6GrZnC9tuJ+xIpaaC4xJBDWXvEocBpv8Br7+agEe2xViB6sASGQiC7FQwMrIf/wvKnInMIGN2dhrllUHHSzBEjxA38f3kSCJSfyEn/AG3kCdOmMYw7ZsG3UFNCsTNmVk1qxZjIuL47Rp07ht2zb27duXBQsW5NGrZ7jdu3fnkCFDUtqPGjWKP/30E3ft2sX169eza9eujI+P59atW0O+mOyEEOTSpWSnTmSlSuS115LPPUfa+CKagZMn7W/2obrFQpArV5KjRkln3ZkzZaIzK0WjYEFry8lXX4VGxlAhzpyhqFM7o4NmjEM+9/nn0RYxKMTq1RS9e8k1NmxA8fLLFEeO2Ou7ZYs9RSQ+jmLDBopZsyjmzAlbynfyqtNx5UrSSdbtKHv1vTtVqQA/ekzjiy+B7w4Cj5RIp1RWrhQ264w/DOMwGi7/6rBo1FiDNaL2bf8jfmTqdOuggw/z4ajIRkpl6XbeTu3qI61cIDie46MmW1YmrBlY33vvPZYvX56xsbFs0KABV6UJx2jevDl7pslp/uSTT6a0LVGiBO+++27+5WfBq5ymjLhcZN++GZ1NDUOGCKc7JbM1XvHi1opI6dKhOVLas4e88cZU+a3S4Ke9ihWTx0neFBLDkMdNycn+yXP+PDllCvnSS+SECeTBg8GvMT3i4kV5pFC5Uurm2q0rxZo1oZ8siyCEoLj9NnvKSLFiGa1JXbtSnDoVWplcLoqa1/vvqJv2mj07pDIFwkVe5E3nr6WRnPE4yVvmWPfjc0ZeMU5gAs/xHG/gDZYRQLGM5TlG5+/8CI6wdGReyZVRkS0ro9LBZ2HeesvcgdPhIP/9178xX3rJ/IhE18lXXw1e9pMnZciwnQrF3q6yZcn581MdXA0jVTnp2NF/y80770iLjKZJpUjX5dWnT/iK8gmXiyIzOApFGTF0qP0N3lvYr8OQioNFHRqxa5eMwPnxR+u2P/4YuBLilqmbfX+3cHJBXODQKaVY6FTqZln0OGgkg7ozjcVByG/2oxi6cGg7LOIitmIrv6w3IPg3Q+ugbIcEJrAgC5rK5aCDXdgl4rJldZQykkVJTpZVbc02bIeDHDzYv3HPnzevpFu3bmj8XOzU6TFbV5cuqffhu+9kGPB77wV2PPXuu+bKlx8+1CkIQf7zD/nnn9LCIi5coPjpJ4r58yn27PF/wGyK2LgxuE0/rZIydqz3ObZvl0Xq0rbPnYviqad8Vs8VAwcGZxXRQNG+XThvnV+IQ4eYULMqt1cF/7tGo1MHD5TT+MIosMHGON6YWJP92C+kTqt2GM/xBGFahdfXI1TJ2dzs4i6+ylf5FJ/iG3yDh5mxbtR6rrclWxEWCalsOQGljGRR1q+3t3GXKeP/2GfPymq58fGp4+TKRQ4YIJWVUFC+fGCKiPuySORpm4sXybx5redbt87+mHPmkDVqyH6xSOAb2jO8bOT23Khuv51ix47QLCKTIf75R1an/f57CovfRdGvX/CbvvuqWDHj+Dt3UhQu5N1x2NAp7riDwst5nujdOzi5YhwUzzwTsnsaCsSlSxSTJlHccgtFpYoUjRpRfPIJRZDfLjZzM5/kk2zHduzGbpzLuUyitb/MOq7zWwEBpU9LNVYLmU9LAhPYgz2oUaNBgzGMoU6dBg0+xaf4A3/gbM7mOq7jaq62JWNBFvQ5n6DgL/yFvdmb9/JeDuZgruf6kKwlK6OUkSzKihX2Nu3ChQOf4+xZ8rff5MYf6lsaF+e/AuK2pKTxew6aL76wZ4lJExhmyjvvpB6T6XByAe5iMnwcLRQuRLEztN/uoonYvJmiaRPPdeaKp3jySd8WiIYNQqOIuC+XZ9ZMcd+91krFzJkZ5Ro3LrjU9xoo/vknLPc5s+CiiwM5MOVoIq2FoyqrWmZxfZgPW1bL9fWYzMkeY+3lXo7iKPZiLz7Fp/gn/7StrHRjN9vJ7K7jdZYyGzR4J+/0OtdJnuTNvNnjnrn/7cquObrKs1JGsijHjlmHv+o62bRptCX1TsmS9hSQfPlS/1+rFvn556HNxzJ2rL0w4nvusR5r/37Po6fOmGXtV3DfvaFbTBQRW7dS5M/n2wJx910UTmfGfs2ahk4RyRUvx9y3j+LFFynatbXu4zAommX8JRHHjlHExAQui40kb1kds5wrDjpYmZWZQN8OVxVZMSBF5Hk+n6JouOjic3wuxarhuPoAZZ2d0zxtuobN3OzX3HZT6y9gxsyQgoK38BafR1I6dT7GzFsOItwoZSQL07Gj9Ub65ZfRltI7Q4ZYO8o2aSKTxO3dSx49GrgScu4cOWkSOXSoTGeftojep5/as4z07m09z4sveq7pVzRjEiyykxq67fDXzIy44w7rTKxjx1KMHEnx3HMUkydLP5rXX7e2QDgMihvrmLeLcVA8+ADFq69S6FezsdqtoVPU+/m+eOcd7+3dob7PPUdRrarna4UKSqtKNndMvsiLzMM8lpuyWVROIKnq06bXP8uzbMu2PtsaNNiYjU1rzPyP/wvYOuPr0ZzNvVpllnKpZV+DhldflZyAUkayMLt3k0WKeN/UdZ286y7/w1sjxcGDZKFC3mV3p8tfvDj4ed5/3zNKxj3f/ffLJG8nT9oLKbYjS/v2nn2Ooai9zfD33zOMJVwuip9/lmG/jW6maNeO4ssvKRIznxlX7Ntnf+N3GKmVePPmoXjvPfmvr/4OQ+YumTfP95i6JtsNHx6YFaOc72J2Yvp0igrlPdtXr0bx7bfydSFkTZ2ZMykWLfJ5HJXd+JpfW26sOnXezbt9jnE9r/d7o3cnFfuAHzCe8bb6/MJffMrQgz0Ccp719dCosRG916F4jI9ZKj46db7H97z2z+4oZSSLs2sX2aaNZ76NPHlk4rNMuG95sGGDzFnitj44HHIduXKRs2YFP/6kSb6VC8MgW7WSuVWeecZ3AjXDkKnp7XzR7djR85hmL8rZ2wzTeceKS5ekpcH9jd9tQdFAcW11inAkQAkCsWRJ4McZGijuuD2jMuL+uUljiguykJoYP97znqRVbr78kqJMaf/njnFQPPmk+fpcLooVK2R9oTVrsr3Vww6TOdnW5nwzb/baP4EJtiwraTfpSqxEF12cxmm2+1klSHuWz4ZUGQHB8vReFLIzO1v6pjjo4GiODsl7lNWwu387oMiUVK4MLFgA7N8PbNkCxMYCjRoBefJEWzJr6tQB9u4F5s0DFi8GnE6gbl2ge3egQIHgxk5KAoYM8f26ywUsXQosWQKMHQtcvAh8/DHgcABCALou5bn5ZmD+fEDTrOe84w5g7tzUn+fhfgzgRMRoTt+dSpUCatf2fK5vH2DxL/L/zqt9hZD/7toF3HUnuHETNF23FioS5M4dXP+ff854g0mg/T3A9OnA11+DM2cCJ08At7YGcucCDh2Sb1br24C+fYEDB4DDh/2bV9PkGz1woHkzXQcaN/ZzUdmbCqhg2cYBByqjstfXdmEXLuGSrbn0q49JmAQBgSEw+cVOhxNOHMdxn68/hIcwHuNtj2eFBg3FUdzra+VRHjp0CAif/Z1w2rq3OZoIKUdBkRMtIwrvzJ9vzxfkgQdS+/zzj6we3LWrrBz822/++alcvCiPzdzWkWuwg1cQSydMjjDSF33bu9fekcfChSG6U8EjkpIoShQPzjri6ypdytMy5PZLadXSIyRVvPuuf+M6DJn99vvvo3jnsi5OOm35fPzKX732/5f/2rY01GbtlHF+4S9+WSkcdLAP+5iuJR/zhcwqolHzecyylVst++dhHl7ghWDemiyL3f07k3wFU4SCM2eA9euBbdtSv3BnN+x8SXY65RdqN9deC7z+OvDll8AHHwDNmtmziLjJkwf44Qcgb17AMID/UBUd8TWStFg4YaQ2dFw1ND72GPDkk56DfPed9aQOB/D11/YFCzNaTAzw7HPhGfzIEfmv+4Pqcsl/ly8HHn00td2ff9obLzYWuKEW8PwQYPsOaG3bhk7WABEQ+AW/4HE8jj7og7fxNk7hVFjnZHIyuG4duHIleOKE3/0NGHgH70CD98+qDh3t0A7N0dzr61VQBSVQwnQOHTqaoik2YiNaoAUA4AiO+CWnE070RE/TNlZy2MUBB8qhHHqgh9fXa6AG+qCPz3sGAK/hNeRF3pDIk22JkHIUFMoyYs6BA+RDD3k6bFaoQE6cGNpw2czA3LnWlhHDkAUGQ82BA+SwYTKxW/785O3X7uPaO4bReX1NiqpVKbp0oVi2zKvvgXjlFeu8GIZO0aVz6AUPAuFyUfR7LKNPRzgvXUvJZivuvMNenx49wrP+kyelM+6zz8qIHpsJ7fZyL2uyZsq3eAcdKVVgJ3Ji6OV0uWQEU1pLlsOg6NKZYu9ev8ebwzksxmIEkeIPYdBgH/bhFZqX9n6Nr1mGyv5Mz4rPP/JH21YKnTrbsI1lvpFu7GYrosYtaxEWSXmf0oYSX8truZvmKaCTmcxn+AxjGJMSjgyC+ZgvxzquulEOrDmEfftkbg9ftWAGDsxeCsmlS545SnxdV4MiMg3iyy/tHTEMHRr6uRMTpZL0ww8BZYcVQkhHzwcflLVi6t8kk46FSxkxdIoJE+TcnTvZO976+OPQ3jMhpPIRGyPliY1JPUrq2oXi0iWffS/wAiuyoulG+AW/CK2sPXp4v08OQyooASgkSUzit/yWb/JNfsJPeIT2QtWTmJQSmpvWsdO9Qb/IF732KcqitpSRruzKS/R9/90s53LbCo47lwkIlmIpdmZnDuAALuIi0xDi9JzkSX7KTzmWYzmTM23Jmd1RykgO4Z57rIvSLVsWbSlDy9ix5laRunUzX+izuHKFomAB6031v/9CN6fLRTFmDEWRwp5zNGtKsWFDcGNfvixzcYTDWhLjoLhatVF88YU9JS7EOV3EuHHmylK7tj6jbz7kh5aWAXcESUhkXbjQ+n7ef39I5rJLMpM5iZM8wnxzMzfrsA5ncZbXtPIf8SNT60VJluQ2brMtg6BIySTrz8Od2O0iQ1CsS6GUkZzAwYO+Q1fTOnN2zlyW/6ARghwxQjqUGkZq+DAgw3WPH/d/zMRE8quvyBdeIF95RdYICrnc06ebbxohzO4phKDo28f35p07F0WQixQHDsikZe4Nz21FMHT7+Ul8XVdjwEVCAkXFCr4Trxk6xaOPhuKWpa7rwgWKPLmtZfzzT6/9G7KhrYyeq7gqNPK2a2udmM7QKQ5HNunWLu5ieZandvWR1lLSkA15hmcy9BnHcR5HWm5rRSM24jEe81sGQcF3+S5zM7dfColGjR/xoxDcBYVSRnIAixZZH1cApJc6YyFHCNJLVnBbnDhBbtok0677w4EDskpwr17kE0+Qv/8e2JHUggVk0aLyXqVNoNakCRnqv9/iyy9TI0ncV/78FK+9FtI8F2LFCmtrQv36wc8jBMXSpbJOTd++FG++SbFnD0X9+hkzq/pTE+a5VMVM7NhBUa6sfN6t5LgtMnffRXHF3IfB7zV99pk9602/fl77202HPp/zQyNvmTL27mkosg3axEknq7Kqz6Mqgwbbsq3Xvsd5nOM5no/xMT7DZ7iSK4MqnreKq/y2jmjU2JANA55TkYpSRnIAixfbU0aqVQufDD//TN55Z+oGft110nHWTmK2v/+Wx0xprTsNG5KRjG5dtkzK7s3C5HCQVavKjK6hRCQnU/z0E8Wnn8oMpCb+BwHP0f0he0comwIvLS8uX6Y4dEgqIA0byMRt7dtRLFggrQujR1OULJE61411KD7/nKJDB2u5csVTnDmTOtfFi/J+3XorRd0bKTp1kvfQFZqjDo91jRtnbWnQQHFPe6/9m7CJrQJt6+hHyWgzeatUtqeMeMkIHC7sJjDbzu3WgwVJoIX7KjIC3+JyAEoZyQFcuEDmzm0dWWKRiDJg3L4b3lK/Fy5MPv44+eyz8khl5UpPq8Xq1TIja/q+ui4Vg8mTfc8bSm6+2TO7avpL02Tq+XAgzp6lmDiRol8/iieeoPjxx5BtrqJ2Lb+OQ2yP63TK8vQ1r/dtcdFA0eZuecTidMridKdTC5uJBx6wd4wzY0ZI7oW/iKlT7VlGfBwPTeEUy2/d1Vk9qG/7HvI+8YS18lSwoKUFyUWXV18Of5nACbaOqXTqHM/xQc9nxU28yW9FRKfOxmwcdtlyAkoZySE8/bT5ZupwkAEEUFiycqU9q4ympSoc9erJoxghyCpVzAvqxcTICsbhZMcOe/LXrh36ucWMGdJvQ9fkxubeTCqU94h4EWfPUnz8McXzz8ujnO32vkmKRo3sKSN+hB0Jp1NGt9gZ19ApBvT3Po6dcN00ETX+cpZn+TpfZ0VWpE6d+Zmfj/Ex/sN/7K3zzBlpmbGS0Ydn+GVeZg3W8PltXKPG7/hdQGvzKu/27fIz5EvB0zWKF17w2f93/s72bJ8ibwVW4DiOCygS5HN+bnvDj1SK9OZs7rcyAoKf8tOwy5YTUMpIDuHKFbJ161SrQlolxDACqwXjcpGrVpE//CB9Oby5MnTpYh3F400xqlzZXhZVXSdfey34+2PGsmX25C7ivfhrwIj5862/df/2G8WECXJT1DXP0NL770+p6+JzjrFjrX00csVT+PE7JSZO9M8xNS6W4uTJjOP07WvvCGnePH9vLY/yKKuxWoZjEgcdjGMcf+JP9tb64ou+5XIYFC1bmPr4HOMxNmVTgtI/IoYxBGXeiZmc6fe6LOWdM0fe0/T1fTTIMOwk7xaPT/iJR16MtJaBuqzLc7T/+XDRxUqs5NeGP4dzQnULfPI237ZlqUn7WbmW16qw3BChlJEcRFISOW0a2aABmTevdMbs00f6ZPjLjBkyYVrazfiGG8if0v0NL1LEP0UkraWhfXtzq4i7XceOIbk9PtmyxZ7M114bujmFEBQ31LTeiB0mm7XDoLi1lelmKE6coMifz7dCYugUTz3ln9zVqvofJTMz48Yr/vzTul+RwgFVMr6bd/u0SOjUmZu5eYqnrNfrclE891xq5eC0G/0dt1OcPWtLnnVcx5f4Ep/n85zGaWHd4MS2bRQDB1KULUNRtIhMrf/VVz6P/rZxm+kmbdDgo7QfqbSGa2xv+Bo1FmZhJjL8VT9P8zQLsqCpH4870RkoI3fs5lRRWKOUEYXfvPWWbyuFppFpv6gWKhS4MlKqlPnREiCVla5dw7teIcgaNczDo3WdHDMmhHNu3erfZm52WURHiN9/p8iX11MhcX9bbtfWr81enDwZmIyfZjR1CyEoOnYwt9xMmeL3vd3JndaboND4Jt+0v+79+ylefpni0UelcrIuNE6nmYFBHGTp2BnLWFvKG0ku4iK/lJG5nBvmFaaymqtZgAW8JmGrxVp8kk9yBEdwNVeHzJdHIVHKiMIvjhwxt1ZomrSGuPevu++2tm74uuwqMl72sZDzzTfmClGJEqSXk4aAEb/+GhpFJMZB0f0h6/kOH6YYNYqiZk2KihUp7rqL4ttv/XaUFSdOBCbnihXex0tIkMc1DiPVb0aDTAwX4Bs/iZOst0EB3u28M6DxsxvX8lpbisOP/NHWeJu52dZ48YznAi4I8+oycpzHOYZjeCNvZBVWYVu25ff8PmTJ5xTeUYXyFH4xdarcgn1BAqdOyXpvADB4cGptM38pVQpo3jy1rlx6dB0oXBjo1i2w8f3hvvuATz4BYmLkvA5HqlzlygHLlgFFioRwwlKlQjOO0wkcOGjZTCtVCtqLL0LbvBnanj3QFi6Eds890HQ/f/WLFAEqVbJfYVDXgerVgVtu8S5XXBy0jz8G9h8A3p8IjH4ZmPklcPgItEce8U+2qzjhBEw+w3JiIHn7loDGz26YlbwPpN31uB61Udu0YBwATMREtEEbW2OGCoLYju3Ygi1wwokiKIJbcAsaoAF0qG0wM6DeBQUAYOtW630mJkZWBAaAO+4ABg3yfx5dB3r1AmbOBMqXlz+nxTCA3LmB77+X/0aCPn1kEdk33gAefBDo3Rv49ltg505Z8TeUaNWrA3XrBj+QYQAlQlOV1A6apgGPP2Gvsa7L66OPZT+zcUuVgta/P7QhQ6B17QotV66AZayP+rDYB2E4gfrzD4Onwls9NyvQGI3hgI9vBFcxYOBG3GhrPA0a3sAb0K4+0uOAAzfgBjyAB/yS8wIuYBd2BVzxWECgH/qhKZpiNmZjMzZjDdZgBEagCqpgOZYHNK4ixETIUhMU6pgm/Dz6qHV0jGGQr7+e2kcI8pNPyDx57B27GAZZpgzpTjlx5ozMVVK5MhkXRxYvLkOVrxZszbaIn38OzVHN999HVu7kZIp27eSxipkja90bI5pgKy03bs9LI9n3EYHuBPeWB8Xs2VGRLzOxjutMj1McdLALu/g97nf8jkVYJGUMt29Ga7bmCZ6wNcYGbmAHdsiQxr01W3MZl/klz+t83ffngTrzMA8P8ZDf61TYQ/mMKPzi22/tKRRbtmTsm5AgE6vlz5+xfdq6MdWqkTt3Rn5tmRExcWLgSojDoLjpJopA8+8HI3dyMsU771BUrpQqT9VrKAYPlpEbGzdGXKa0bO52AwucAR1JGZUQEJzY/6rM06dHVc7MwmiOTtmU094vgwYrsRKP8mhA4yYykXM4hy/xJb7O17mZm233fZ/v+4zycde5+ZJf2pbDrRj5ehg0OJIjA1qnwhq7+7dGmnkKZA7Onz+PAgUK4Ny5c8ifP3+0xcmWOJ3ySGLfPvn/9BgG0KoV8PPPvsdITAS2bJH9S5cG5s8HNm8G4uKAu+8Gbrst47FMToY//QTcdy+QkJD6pGFIPe6dd4Bjx4Exr8nzM/dxh9MJNG4CzJsHrWjRaIgNAKDbiUjTgMKFLY9jIgX79sF/v0/FSyNcmNMZcMbI5xusAka8CrT94WrDNWuh3XRT1OTMTMzFXLyO17Ee6wEA+ZAPj+ARDMdwFEVwnzFedeKx8iNxsxzL0RzNLdvFIQ6HcAhFYO7Q9Tt+RzM0sxzvOlyHbdhmS0aFf9jdv5Uyokhh506gZUvg8GH5MymVByGA2rWBxYuB9PsfCaxeLZ1A//0XKFAAuP9+4IEHgDx5wi8zCWzcCBw7Jl0o6tSx72OZGeC5c8CMGfLmOpOBuvWAvn2hlSsnXz94EJg2DdizB8ifH+jUCWjUKNNs/pkNrl8P1JdKxrn8wOHSQIFzQOkjVxvoOlCzJrBho7qH6TiBE7iCKyiBEohDXMDjEMQszMK7eBdrsAYaNNyCW/AknsR9uM9UMbkH9+AH/AAXzL3j3f4pz+AZ03Y/4SfciTstZS6HctiP/ZbtFP5je/8Ov5EmeNQxjXf275d1X268UebL6NGD9FHV3DZnz5ITJsjU7eXLk02byoRq3spaOJ1kz56pxzHuEGCALFnS+5FOKJk3Tx79pD0WuvZaeeSkyLmIxwd7P94ydJl1dtWqaIuYbXHRxZ7smeHox+03MpiDfebxEBQpmWqtHho1dmInS3l2cZflWAaNFF+UX/gLjzG8dSj2ci+XcAlXczWTmRzWuTIDymckm/P992RsrGeuD7dC8NRT3lO4uzlzhpwzh5w6lVyxwrytGcOH+04YZhhSIQl1xVtSyjtggO98KIDMJKvImYiVK33Xlnn0UdPMtYrg+ISfWG7+s+i9RoWTTluKiFvR6Up7WRFbsEWGdPfpH3GMS/m/23H3MA+H8tbwb/7N1mztMW9JluQ7fCdbJ1pTykg2Zvt2WUjOLHPoBx9k7JeUJBWV+HjPttWqkUuX+p5v3Tryvfdk9Vp3xfkLF6yjaDSN/Oij0K5dCLJfP2tH29y5w6MIZQUSEsi9e8NfaJC8mk31jz9kHZ333qNIYw4TTqeswzNkCMXQoRQLFwZdlVgcO0bx6qsUTZtQNKhP0b8/hftDSVL89x9FfJy5A/AnnwQlg8I7goLX8lrTFPM6dTZkQ59jVGM123VkPqK9Py6buIm5mdtSIUn7cNDBciwXsrTwG7iBeZjHpwxP8smQzJMZUcpINmbQIOsw3PLlZcE7N0KQHTp4V2B0XY6XXiH59195XONWLNx9mzSRIb1WCoGmka1ahXbtdqN+NE3KmJM4flxGNeXNm3of6tcn5waQdVscOiQVjJEjKT791GstFvH336l1dgw9Ndy3ZQuK7+dTlCsrf46NSc2wWqkixV9/BbQ+8dNPFHlye6aRd4/7wgsUFy7IuixW0UgVygetFCkycoInbG/2vmrSTOAEW/3zMz8v0P63jQ3cwEZs5DGG1ZGQgw4+xsdCcm8asIGlMrSWa0MyV2ZDKSPZmJIl7W3IaQvlLVli3lbXpd+J24K9b59M/+4t5bvDYT+le716oV17y5bmFiH3FRNDPvFEaOfOzBw+LAscpn+/8mkX+AgmcXWjwRTPPEPxyy/mBfYSEyn6PSY3fEOXioSuyWOPceNS+oodOygK5E+tdZM+9FjXvNeecRgy5fuuXX6tT/z3n5TBrJ7Nba3th0evXx/M7c5x2DlGOMqjtpWRK/TihEbyCq+wAitY9g/UkrCFWziHczif83kDbzAtngfK1PX+KD3e2MRNlutx0MHe7B3UPJkVlQ4+G3Plir12ixcDs2fLaJePP/adfh2QETPbtgFr18qfx4wBzp3znvLd6ZSvWeFwANWq2ZPVLitWyG3WCiGA+PjQzp2Zefxx4NAhz/erE+bgIEvhY/RF7T8/gpgwAbj9NuCGmuDu3d4HeuQRGRolhLySk+UNT0gAnv8fUKI4WLIEcEsj4OJF7x8Ql0v2EV7SiLtcwKVLwPjx/i3w/fflB8/bmIAMoVqyxP5458/7N38OZAmWoC3aIg5xiEEM6qIupmKqTLvvhWIohjIoYzqmBg3X4lrEw/svZyxikYQkyzEWYZG9RaTjelyPTuiEdmiHHdhhmeo+AQnYh30BzeVmK7ZatnHCiU3YFNQ8WZ4IKUdBoSwjnjRsaF31Nv0VF2ev3RdfSJ+DXLnsHYVYtfn8c/KZZ8jq1clKlchOneRxUKA+hDEx9tccpSSgEefQoYyfh9vwE53Q6ISXTKkxDnmE4k6FexWxcWNoMsPauXLn8itpmyhVMrTzZ/c0v0Hizlqa9mjBbUVox3ZMYpLPfmbWBo0aP+SHPuf1x7pymZeDWmM+5rM1z04Gl6nxa35ta57GbBzUPJkVZRnJxgwY4PsLoi8SE+21y5dP5rKyY31xWyi8pWvQNKBRI+Dhh2X+ru3bZaqMefNk8rQBA7xbOJKTZTG+ceOAiRNlEra0NGxoL3HajTcCjRtbt8sObNiQ8fPwKoaB0KBrXm6y0ymTyUye7Pn89Onm5rNQcuWKtKzY5dKl0M3dqhW0ihVNm5AEFywAb7sNjI8D42LBZk3BuXNBO6a5LMxyLMcQDAEAj3wfbivCAizAOIzz2vdJPIlmaOaz+Fwe5EEykpGABK+vGzBsy+lPW2+0QzvT2jwaNFyDa1AFVYKapwVaIBaxpm106GiP9kHNk+WJiGoUJMoy4klSkvSd8Nc6YnXlzUtevEieP2/P6uG2jqT3U4iPl/lHDMN8nAkTPNf17beyPg2Q2lfTyC5dUiNjvvrKWqZChcgDByL+tkSNhQs9138NdtizDlx3ncc4onMnc5+MUF7xcf5ZRurVDY1sDsPSX0QIQfHss6nt0/bVQPHoI9k6PPh+3k8HHabf4ouzuE/rSAIT+DJfZn7m92oZ0aixERvxIi9m6CsoeB2vs4zIuZk3B73ONVxjGbnzMT8Oeh6S7M/+Pi1GOnXmZV7bdXuyGsqBNZtz+bKMqkkfpmtXifCmVLzwQur4d93l3XnV1/XCC+TkyeTs2TKPSf/+1hE/ZcvKxGkk+cMPnhE7aS/DIJs1k22FkEX9fI15zTXkiez5O+2Tkyc9j68a43d7G3PhQh7jiH79UqNTwnnFOCh69/JrjWLSpNDM/bF1OKiYO9d6nE8/9Uv+rITd44st9J3VcAd3mEaPGDR8RqpM4iTLuWczNIUOp3Iqdeoeypf7/4/zca+Ouxd5kSu4gr/xN57maS+jZuQyL6fkGEl7XwwazM3cfhf/y0ooZSSHcPYsuWAB+c03csMO1CpSrhx5JE1I/e+/27e8OBxk376ecpUoYa/vxo1Swaha1VqRcmdWFUIqPjVqpL5WpoysKJyQELl7n5no1StVeQzYMrJ8eWg2fHeIr7fKvoYuw3P/+cev9YmEBIpGN3uP3rErz/PP25urSWNzK4yuUdS4LttaR/Iwjy1lZCM3+hzjKT5lK9HYGZ7J0FdQsDd7p1gN0m7cIPgEnwhpkrCt3Mr+7M9KrMRyLMeO7Mhf+WuGOS7zMp/m08zLvCkyxTKWvdjLllUjmcmcxVlswRYsxVK8htdwOIdzP/eHbC2ZEaWM5CDWriUffNDaEmFmPTEM6WSa9hZ/+aU9hcThkNaKtBQoYE+WVatkCnurdoZBtm3rOYcQ5KlTMrlXTk8bcfasLAvgfr9WoT6TYbKhGjrF+PEeYwghKFo0D2zDT3vliqd44w2KIoXlz7Ex8tJAUawYxYoVAa1RnD9P0bOn/8c1tWtRfPGFLeVBJCXZHzebmuDsZCzNy7y8xEs+x7iO19lSaBZxkdf+goKzOZuN2ZgGDTroYAu24DzOi0q20gQmsCmbej1qMWiwKqvyFE9FXK6sgFJGcgjTpknlwq4iYnbpOjl2rOf4M2bY6ztpkme/Ro2sFRmHQx6pzJ5tb44aNSJ3X7MiFy+Sb7who5Za42c6NY1OzUc0TflyFGfOZBhDnDkjk5a52wXip2HoFH/9RXHlCsWMGfL4p39/ipkzKUJguhIff2xPjmuvpTh61C8LhkhIsL/OI6HJzpnZsIr+MGjwCT5hOkY1VrOljHzP7y3lEVcf0eQ9vmfqX2LQ4NN8OqoyZlaUMpID2Lo19E6sFSt6zuF0St8OX/4jmkbmyyc3wrR8/rm1ItKli2z788/2rDnNmgV+r1wu6eh5771SqbnlFulA6yWxaLYgOZkUc76iyJ8v1Trh9ge5oSbF7t0++wohZH2XQYMoHuhG0aypf8pIjIOiR/ewrc1WCHKMg6Jbt8DGv+5a70dMaa/SpbJtFldBwYf5MEFk2IANGryBN/AszX9xurO7pROsRo0HmDU8za3S3IMyK2wCc+g5sQlKGckBDBhgzyLijyOqpmXMAfLHHzLvSPpxDEPO/8MPGWVLTibvvNO7smQYZLFisn4KSSYmymyvVnJ9HKBje0IC2aaN571wO8sWK+aZqTa7IS5epJg8meKJJyj+9z+KpUsD8nUQU6dSVK5kXyHJkzsMq0kjj53ommXLAhv7gw/MlRFDp3j11RCvKHPhoovv831WYqWUzbYAC/B//B/P0frv8CqusrSutGVb7uVebud2r74jmQUXXbasPCC4i/5lFs4JKGUkB1Cpkj0Fo0kT8rPP7LXNk8f7XFu2kF27pio/uk62b0+uXu1bvoQE8tlnPQvqaRrZrh2Z/ov522+bK1PlygVe+K5/f98WJMOQzrbpLTuKjAiXi2LePPtHNWF08BTTp/lWRnSNonv3gOcXSUkUd93lfXxDlw6uV7ynM89uuOjiHu7hDu7w+1v/UA71aV3JzdwZLCfVWC1kUTKhRFBY1rFxPw7yYLTFzXQoZSQH4M7JYXUtXizbW2VudThkVIYZFy6Qe/b4d7xx8aKUYeFCcr8Px3EhyOHDUxWEtPlLKlUidwaYBPHECXtZW7NxpGZIERcvWlfF1TWKa64JnwxffCGVAl91cYYO8SuHidc5kpJkdeC0mV+LFpEF+S4Hl/kzpyAoOJ3TPZxZ4xjHXMxluqEP53Dbc7jo4kIu5GAOZl/25Xt8LyxWlnZsZ3rspFFjDdaIum9LZkQpI9mYy5fJ+++3p4g4HOTRo7Lf/PnmxyAxMdE/svjvP3LIEPK++8iHHpIVZ5O851ayxRdf2DuaatMmdGvI7ohHHzHPR6JrFG++GZ65d+wwj/bRNYoBA0I3X3Iyxc6dFNu3UwTzQYwQJ3mS7/JdPsNn+DJf5nZuj7ZIFBTcwz3cxm1swRaWvhcg+Bt/sxx3J3eyOqsTlBV4YxhDjRrjGc9pnBbSNSzjMkuZQz1ndkEpI9mYDh3sh9y6nUTdvPuu7JvW/0PXZfK0+fOjs55w8skn9pS2YJxjcxriwAGKkiV8Wybq1KYI07mXeOop69DjXPFeI4WyM4KCr/JVxjCGOnXGMCYlPLcjO3rNdhppdnCHraMOjRo7sZPpWKd5mmVYxtRa8R2/C6n87/JdgvCaIO0ZPqOsIj5QtWmyKVu2AF9/bV2bxjCAUqWAt9/2fH7wYGDHDuDpp4GmTYGWLYHRo4G9e4F27cImdtSoXt26jcMB1KgRflmyC1rZssCfq2SRobQ4HEDXbsCvy6DlyROeyX/60Xul4LQkJMhS1TmIN/EmhmM4kpEMAYFkJKfUlZmHeeiMziAYVRnXYI2tdgSxHMtN20zBFBzBEZ8VhDVoGI7hIV3zYAzGeqzHQ3gIZVAGJVES7dEeS7EU4zEeGrwU6VLYJkJVsRShYuZM+Tff6f13MIV69WTBuZIlM75WpYosRJcTaNoUuOYaYPdu3wqc0wn07RtZucINz58HzpwBihSBljdvyMfXKlYEfvoZ3LULWL9ear9NmkArUSLkc3lg9cH3t1024CIu4iW85PN1F1xYiIX4A3+gMaJXPdJX8bxAmIZpKYX7vEEQW7AF27AN1+P6kM1bF3UxFVNDNp4iFWUZyQS4XMDmzcC6dXL/MOPECevxHA6gWTPvikh2hgkJ4Gefgb0eBrs/BE6YAJw9g48/lpV+fVX77d9fVvnNDnDtWvDee4DChYBKFYHChcAHuoFbt4ZlPq1KFWidO0Pr0CH8iggA3HyzdWVhTQPq1Am/LJmE+ZiPSzCvauyAA9MxPUISeacxGtuyHmjQ0AqtTNscwzFbcx7HcVvtFNFHKSNRRAh5jFK+PFCrFlC/PlCiBNCzp6zw7o3SpaWXg9W4pUuHXt7MDFevBiqUB3r2AL74Apg1C3j6KaBMabQ8Pgu//JLxKKZAAeDVV4H334+OzKGGP/4INL4F+OGHVDOQ0wnMnQs0qA/++Wd0BQwF/QeYWz0cDqB9e2hlykROpihzBEdgwDBt44QTR3AkQhJ5pzzKoz3aWyokBDEIg0zblEZpW4pNaeSwP4RZmICUkYkTJ6JixYqIj49Hw4YNsWaNvbPAWbNmQdM03HvvvYFMm60ggT59pO9GWsUjOVkexdSvDxw6lLFf9+7WR+a6DnTrFlp5MxuktBIdOQK4du0FbmsNnDolX3Q65U0ipf/Agw+guViKv/+W1qevvgJ+/BE4ehQYNsy3xSQrwcuXga5d5LrTf0CcTiApCejcCbT68GRytJtvBp4fcvWHdJuRwwEULw68+17kBUsH9+8Hhw0Da9cCr70WfPBBcOXKsMxVHMVT/EN84YADxVE8LPP7wyf4BJVR2bTNa3gNt+AW0zaP4BHT13XoqId6qA4bTmOKzIG/nrGzZs1ibGwsp0yZwq1bt7JPnz4sWLAgjx07Ztpvz549LFOmDJs2bcp77rnHrzmzYzTNokXm0R2GkTESxk2fPuYVbm0WJ82SeKvY+1GeJ+k0TCIsHAZF8+wdLiMmT7aXjCwbhEwJISimTaO4trpnBM2jj1AcOhRt8Si++44iLtYz6scdCv344JAngzvHc4xnvGWUyq/8NaTzBsoZnuEIjvCofguCdVmXP9BLOmcvnOd5VmEVr9E02tXHz/w5zCtR2CFsob0NGjTgwIEDU352uVwsXbo0x4wZ47OP0+nkLbfcwk8//ZQ9e/ZUyghlFlKrVO4Oh6xIm57kZHLgQBmSq+syP4iuy/ZDhmTfCrZCkH37puYGcd+n4yhibyM+fDjaSwgbok8f89wfGmR9muH2E0pldoQQFLt3U2zdSnH+fLTFIUmKf/+V99ksnfz774d83tEc7VMJMWiwBVtkytDTy7zM8zzPRCb63fcAD7A+66eE2LqzpBZkQX7Nr8MgrSIQ7O7ffkXTJCUlYf369Rg6dGjKc7quo3Xr1vjT5Dx69OjRKF68OB555BH8/vvvlvMkJiYiMTEx5efz58/7I2aWYN06a4d/pxPYtk1antPicEg/h6FDgTlzgOPHpY9Ily4Z22YXnE5g/Hjgk0/kz2n9ZgrirL1BTp+W8c7ZETtnTWT2OJO6iqZpQKVK0RbDk/ffT/0+4YtxY8H+/aGF8L0YgRG4hEt4A29AgwYdOgjCCSduxa2YgzmZMvQ0F3IF3LcsymL11cdCLEQiEnEDbkBHdEQ84kMopSfu+xqDmLDNkRPxSxk5efIkXC4XSqTzmi9RogT+/fdfr31WrFiByZMnY+PGjbbnGTNmDEaNGuWPaFmO2Fh77WJMPu9lygBPPRUaeULBqVPAvHny3zJlgHvvBYKNKiWBiROB116T/iHeOIJSKIeD5uNoOrTsHF7UogXwycfmbZxOoHnziIiTY/nma+tvGQcOyIRBtWqFbFoNGl7H6xiEQZiBGdiLvSiEQuiMzqiHeiGbJ7OhQcPNVx/hZhVW4U28ie/wHZKRjLIoiwEYgEEYhHzIF/b5szth/Zp04cIFdO/eHZMmTULRokVt9xs6dCjOnTuXch04cCCMUkaHNm2sIxQLFJD5QjI7Lhfwv/9J60yfPsDw4dLRtmRJGS1kFf1jxpAhMlGbL0UEACbjUbhM8vcl08B8tkPnfkWwfn1gcjidwDffAPfdJ6NLO3QA5s+3diaOGPffL81iho+oCsMAqlXLmKhMEVquXAltOz8pi7IYhmH4BJ9gLMZma0UkknyGz3ALbsG3+BbJSAYAHMRBjMAINEIjnMbpKEuYDfDn7CcxMZGGYXDevHkez/fo0YPt27fP0H7Dhg0EQMMwUi5N06hpGg3D4H///Wdr3uzoM7J1q2dKdm/1UrLK8f6AAeYOtePHBzbupk32UrkXxknuQ1kmIqPPRBIMXkQu1sJGOhzSr8ZfH84TJ8i6dVMdi9P+e/PNZGbJPC7+/JMib56MviMOg6JIYYqtW6MtolfEtm0U8+dT/PorRaL/vgO25hCCYvVq6fg6Zw7FqVPhmadRI9/VhNO+H8ePh2X+9CQzmbM4i83YjEVZlOVZnk/xKf5He397FeRu7k5Jre/LJ6cbu0VbzExLWB1YBw0alPKzy+VimTJlvDqwXrlyhZs3b/a47rnnHrZq1YqbN29mos0/PNlRGSHJzz5LdTxNG0UDkG3bBlcgLhQ4nbLI3qlT0nnUGzt2WCsL8fFkIG/dwIHWTr7uqwL2cDVukgqIZjBRkxvyHpRnI6z0UPJy57avQAhBNm3qW3E0DPKOO/xfW7gQO3dS9O9PkSe33PgKFqB4+mmKAweiLVoGxOrVFI1u9tyoixahGDuWIoRe2GLlSoobanrOExdLMXAAxZUrIZuHJMVnn5krIjEOis7mdVdCRQIT2IqtMmyeOnXGMc525EpO53/8n6ky4nagPcIj0RY1UxI2ZWTWrFmMi4vjtGnTuG3bNvbt25cFCxbk0aulYbt3784hQ4b47K+iaTzZuJHs3ZssWpTMl49s3FhWmg2yAnpQXLpEjh5NliiRuunWri2Vp/RKybBh5hYetwIwaZL/cjRpYk8RSTvPa/eu5vMYw+F4mXdgETW4vLZ75x17Mvz5p725N23yf31pEVeuUMyaRfHyyxRvv02xe3dw4wlBkZAQ8jDSUCH++IMiPs530btBA60HsTPPqlVyHm/WCkOnuPuu0Co+SUkUrVt7n89todq1K2TzmdGarU030DjG8QAzn5Ka2ajHepZh0yD4Db+JtqiZkrBW7X3vvfdYvnx5xsbGskGDBly1alXKa82bN2fPnj199lXKSObm4kWyQYOMVYHdPz/xhKdC8tBD1spITAz5wgv+y3LbbebHP+kVjNtvJ8eOta5orOtkJy9fTleulOupUUMey7z4Itmvn7V1xjDIkSMDveOk+PxzikIFU745uwydLk3jphqd+cG4C8xuEclCCGmpsDrOWLs2+LkaNrSeJ8S5V8SVKxRPPCFzn6Sd5/bbKXbuDOlcvpjKqZabp06dLzCAX0wTBAUP8iB3c3dA4bqZkTqsY0sZmcu50RY1UxJWZSTSKGUkcjz7rLVysXBhavvBg603a10n33rLf1nee8+eMlKlCvn++/JY6803/VdGhCAHDZKvpT8yc19WytbTTwd2v8VXX/ncJJM0g4u1lnToLg4eLPPLRAuxfTvFa69RPP88xUcfUZw9G/hYq1db54SJcVA8+ohnPyEoFi6kuKc9RZXKFLVqUbz0ks/8MWLrVut5HAZF2zYBr8V0nefOSXm//TZi1hCSdNHFEixhawOtzdohmVNQcBInsTqrp4xdgAX4P/6PZxn4ZyUzMIiDvCZXS/vQqHEv90Zb1EyJUkYUfnP5Mpk/v7UV4K67UvusXGmtLOg6GYjLwtmzZKFCvpULTSO7d/fsY+dYJf0xzYQJ/h0HeRvvgw/8X59wOinKl7PcMO/EQmqazLwbacSFCxQdO6Zu3O6EXrniKd5+O7Axp02zl6Tu5oapfZKSKDqlkSOtMpE3D8WyZRnnmT/f3jzXVAn09kSVy7zMn/kz53EetzLVOfkP/mFLEQHB6qwetByCggM4IGVTTju+QYPX83qe4Zmg54kWW7nV9B466GBbto22mJkWpYwo/GbDBnubb758qX2EIFu08G090HXykUd8TumTbdvIn34ip02TClJahcQ9V/Pm8lgpLUJI/xZf8qR3YHU6yVKlglNG4uMDi6gRS5dabpSJMDgHHVLm2rHD/3kCRQhBcVtr334dGijeeYfivfcoalwnFZX8+Sgefphi40bf486eba0g6BrFra1S+wwb5jurqaFLheSq31pKHxv3V2igqFc3bPcwHDjp5CiOYgEW8NgUG7Ih13AN53GebWUkFFEgC7nQdA6DBgcyND5A0WI8x9N9tJV+bWVYRvnemKCUEYXfbNxob/PNn9+z3+nTqc6mDofc8N3HHZ06kQkJ9mVYupSsV89zvvLlyQ4dpC9HuXJky5bkrFm+o43+/ZcsUiTj8ZG30N5164JTRABpWQkEMX26rc1yDeqlyD9iRGBzBSTfkiX2jjl0zVNRiHHIa84c7+OeOiWjWayUkXffle0vXqTIl9e8vaFTvPyy5zwJCdJh1Krf66+H/V6GCkHBh/lwBguEe6OMZzw/5ae2lZHlXB60THfzbstok9zMzQu8EII7ED2+43dsxEYpa8rDPBzEQTzKo9adczBKGVH4TUICWbCg+cbrcMi6OukRQioSjz5K3nef9MFYt86/+RculBYNX8cy/vid7N8v/Vny5Em1pnTqlFGm5cvtKRx58pCFC3s+V7RoYFFCbsT331tu9snQ+QPuTLn3vXoFPp/f8vXsYV3vxkyZiI2h2LvX+9gDB/h2LHUYFIULpfiliB9/DNjCIV5/3VyRKlSQ4sSJsN7HUPI7fzfd9HXqvJE3siIrWioiLdkyJPVqCrGQLcVnDdeE4A5EnxM8wX3cxwT68S0rB6OUEUVADBtm7QC6eHHo501OlqHEZg6ruk4ePOj/uCdP+rbOHD5s7SRrGDLXSEIC+f33smrwwoXB54ERV67IPCAWm+yD+CxFGXn22eDm9Eu+W1sFpoik3ex9hPmLK1dkWK27XVpLRaGCFGtSNy4xb569+a7N6P8gXC6p+LgtNm5FSdcoChf2mCcr8CAftHSmBMGxHGv6enEW5yVeColMRVjEljKynutDMp8ia2F3/84+VbMUIeGFF4CmTQFNk5cbd5bxESOAW29NfT4xETh8GLh4Mbh5f/gBOHZMbv++0DRgyhT/xnU4gCJFgLg476+XKgW0a2deP87lAvr3l2O0bQv07g3cdZd53SA7aPHxwPARPl9PpgM7WBVz0RGATEn/4IPBzekXJUr4Ti9vB5cL+OlHry9p8fHA/O+Bb78Dbr8DqFwZqHMj8MqrwPYd0OrXT2183XXWczkcwA0Za71oug7t/YnA6jVAz55Aw4YyJf47E4Dduz3nyQJswRY4YVH7BsAarEFRpJbg0KHDgHwv66M+NmIjciN3SGRqjdZwWJQ5K4iCuB7Xh2Q+RTYlQspRUCjLSGRJSCDffpusXDnVOtC0KZm2CsDevWTfvtJ50+0Y2qaNjK4JhFdftRci3KVLKFboyYIF5vPGxclMtOFACEExdCiFrjFZN5is6Slp7TejBstif4p1pm2EHfbFggXBWUY0UNxQMzSyNG1i7kirgSIcJrtMRmM2tmWFSO9oCYJFWIQLuCAkRzNpWcmVlrIMZxapbaEIOcoykoU4eBB46SWgUydZYO7LL4GkpOjJExcHPPkk8N9/wKVL0vqxfLmswgsA//wD3HijtFIkJMjnSODHH4FmzYC5cwObUwjzNroOxIehMvjUqeaWkaQk4JNPQj8vAGiaBu2114D/dgHPD8W6Cp0wAz3RTl+IG/XNOOooB0BaYr78Mjwy+OTOO4H6DQK3jjgc0hIRCt57X7753mTRNODBh3JEEcD7cB90G/VNBTL+Mp3FWbyDd6BB89IjcG7BLRiHcQCQYn0BZEVdDRpaoRVewAshnVOR/dBIM8N45uD8+fMoUKAAzp07h/z580dbnJAyfjzw/PPy76kQclN0uYAyZeTmXrNmtCX0hJSKyJYt3ivWahoQGwscOiSPR+zy77/2rPFffQV07Gh/XCtOnwaKFbNWhMqWlZXfI8G2bcD06fL4q1gxeTQTrerNPHUKuP8+4PffU8tMO62PCVJY/xe0G2+UY5HAsmXAkiXyw3PTTf9v787Do6iyh49/q7uzgIQkCCQBg0hkV2Akkh8gqGMEhEEBFQRkGzcEEYjKMjriO6hEDIgsgoM6biCLso0iogFmANlEERRk32QIGJYkgiTprvv+USQQSG9Zuro758PTD6Ryu+rkGlMnt+49F+69F83D511q+3YY/jT85z+XDkZGwoiR8MILaKV5pBQgTnOaBBLIJrvYhMMTP/MzTWhSqjgUim/5ljnMIZNMalGLRjTi3/yblazEgYPGNGYYw3iERwihlM80RcDy+P7ti2Ga0grWxzQffuh60mSNGsbkS3+ycaP7lScWS8l26u3UyfWGdHXqGJNGT50yzt+tm1L33WeUgC/pgoidOz1bTWO1luz8/k4/ftzYD+f29kpv29bYVG/37qJtdN3YS2bkSKUntXJe7+PK12U18vXdu5XetMmliaShIca/Y2oqfdUq72Les8d4hJSeXuYb3QWCDWqDilSRRR7FeDKpteCRyWRVgnLIl8lSWeoudVfhdS3KUnj9XqqX+kP9oRzKu/1+dqqdKkWlqPvUfaqv6qsWqUUqX5lYcliUGVlN4+ccDqVuuMH9Tf2118yOtKjJk92vttE0pXr08P7cmZlGwbKCr/3yfoiJMRKHpUuVqlTJOKZpxstiUSo0VCknZS1cOn7cs2SkWjXvz+3v9KVLr95ErqBuiJN11Hq9G9wnIVaL0j94/9J7MjKUHhtT/DJhq8WoOeLtOvAK7qQ6qSaoCeoWdYtqoBqo+9R96jZ1W7H1Ry7/Y1VWlapKV1elg+rgtK6IRVnUIOX5+nOHcqhhaliRhKrg3A1UAymxHgRkzoif++EHOHjQdRtdh48+8k08nnI1t+JyWgkeS197LWzcaMxFSUqCWrWgWTN47TXj0cX583D//cY8FV2/lCroOuTnQ+/esGGD59fTdVi5EqpUcd3OZjPm8gQTtWMHPHC/MSHm8mdUDofRqc+koJYsKfqevDz337RgnO+u5EsfT58OmZnFP97RdeOa/++lEn0dFVUNajCGMWxlK7vZzRKW0IlObueDOHDQjKtXHXlqC1sKH8MUR0fnfd7nMIc9Ot/LvMw0pgEUrhIqOPcBDnAXd5FLbonjFYFDkhGTnD5dtu18pV079/MrwJjIWhLh4TBoEHz7rTHv5Mcf4dlnoVo1eP11o01xs5yUMhKg1FTPrqPr8PDDxmrPc+ect7NYjMm1Tz/t/dfi16a8YfztbMqYxQKvvlr0mNXqeTZ6+Vrq994tfoJRAYcDvvgClZnp2blFsf7KX10mIxYsxBNPBzqU+BrzmOd2Ga+GxkIWuj3XOc7xOq87/bwdO/vZz6eUYEa8CDiSjJikTh33bSwWuP768o/FG7fcAq1aXZrLeCVNg0qVoH//sr2u3Q6ffeZ67qTdDp9/boyguDNr1qXVKa6mcEdHw9dfG2UwgsrCha47U9fhuy2oY8cKD2lWKyTf7Xp1jcUCLVqgVb9U44KTJ93Ho5Rn7YRTccTxJm8CXJWUWC/++ZAPi6x48dZp3P92ZMXqUbuVrOR3XBcosmBhLnM9jk8ELklGTNKwIdx6q+tfNHUdHnvMdzF56pNPoHr1q+9JNpvxWrgQoqLK9prnz3u2iEPX3RdgUwreeMP1oyRNg4QEYwVN69bexRoQPMnYAA4eRB06hMq9OFT+7LOuRzl0HZ4bVfRYtWqeXevyBEaUyFCGspCFNKbo0rT2tGcta7mDO0p1/njiUbhegGnHTjzxbs91hjNu2+jonOKUx/GJwCXJiInS0oxkpLibotUKLVoY8yD8Tb16xpyXlJRLSUdICPTqBVu2QOfOZX/NKlWMVZzuVK5sjGa48ttvRg0VVyMiSsH+/c5HgAJe3bru22gatG8H9W6AGtVRI0YY35Rpk4zPX945Bf8eMxYeeqjoeQYMdD2aYrVCcjJazZqexy+ceoAH+Imf2MlO1rGOwxxmFatIovQ1XwYwwOl8kQIhhNCLXm7PdT3uh31t2KhHsA1LiuJIMmKi9u2NMuhxccbHlz+S79jRKMdQHkW+ykJsLEycaMxpycmBP/6Ajz+G5s3L53oWCzz6qOt7ms1mzDdxV7bC1S/2V/L/KjwlNPhJ9/M/Lv/if/8dZkyHVrcahU82bYY+fY2COHFx0L0HrPkP2quvol2ZXQ8bBhERzguWAbw4rnRfj4+o/HyUJ5OmTKah0ZjGtKUtdfDgmbCHEkhgCENczk15kRephvvRsDu4g+u4zmUbO3Ye5VGv4xQByEere0olGJf2Xs5uN0qSp6YqNWWKUleUeRAXHT+uVGxs8WXjrVZjF90jR9yfx+FQqnZt98uqbyqbSuZ+Sc/OVvpNTd2XWL/yFWJT+kPe1+TXf/hB6dfVNs4RGmKcx6IpPaKK0pcsKYevsOzoFy4ofepUpTdscGk5cseOSl+50uzQTGFXdvWselaFqBClKU3ZlE1pSlOVVCU1QU3wqtz8p+pTlzVRuqguZV6+XviWp/dvqcAapI4fh9mzYelSY9SiRQsYPPjSJniB6sAB4xfzjRsvbean60aF0rlzoUEDz87z2mvwt7+5Xhk0e7YxGhOs1KlTMPgJWLSo6CiIprkeErJa4eivaLGx3l0vPx+WLYNVq4wJQImJ0KcP2jXXlPArKH/qjz/gnk5GBVq41C9WqzHElvoa2qhRzk8QxDLJZBGL+I3fqE1tetCDqnj/83kBC3iKp/iN37BiRUfHgoWBDGQ60wnHT4eHhUc8vX9LMhKEvvkG7rvP2FOm4JGEzWb8/H/0UXj7bc9XaPqrH36AdeuMe0ObNsZ9zRu5ucZ+L//5T9GEpCBR694dFiwo3aa1gUIdPWrcbO12+PsLntW9/2I52j33lH9wJlOjR8OkNNdZ6/pv0YJylrPv5JPPcpazj31EEEFXuhJHnNlhiTIgyUgFdeQINGpkFAZz9l924kR47jnfxuWPcnNh8mSYNs0YSQJjXufw4cY0h4qQiFxJNWoIe/a4b7jiK7QOJa9XEQjUH39AXCxkZztvZLPBAw+gzfX1LoZCBAZP79/Bulagwpo1yyiq6SrFfP11Y1deD/cn82tHjhi7BJ85Y9Ru6dnTs1U3YNTlGjsWRo0yNqWzWIy5mIE+alQqyXcbz8JcraMOCzPWpQe7n35ynYiA0U/p6b6JR4ggVpF/7AalTz91v1rkt9/gu+98E095uXDBWDlTt64xyvPaa/DEE8Yqn0mTvFsFY7VCfLyxMKRCJyIAQ4a4/gayWmHAADR366eDgaerZgJgdY2/+YM/OMlJ8sk3OxThJyr6j96g46q0eUna+auHH4YPPyy6N41SRpLy7LPw5ptmRxiYtCZNYNbbxuSZy+uIFMwWbvEneD3No3Op8+dRs2ahWt6CurYa6sYE1LhxqIJnYv6uSRP3a+ttNmPSkvDIRjbSjW5UoQoxxBBJJEMZylE8mKckgprMGQkyHToYixVc/XKraXDokGcl6f3Rli1GSXpXqlSBjAzw44Uafk2tXw+TJxn19fPzjUp3Q4bC4MFolSu7f/+pU3DnHfDzzxcPXLYKJSICvv4GrWXLcou/rKghQ2D2P13/D/XVSrS77/ZdUAFqEYvoSU+AIoXTbNiIJJL1rKchDc0KT5QTT+/fMjISZJ580v0oe4cOgZuIALz/vvvKqL//DldsOhswsrKMScY33mhMz6hZE0aO9GzDXAClFOrMGVROTolj0Nq2RftsEVzIhXw72r79aCkpHiUiAPTvB7t2XSrdUsDhMKrkdb7HmCDq7159FRo3vno2c8HzvBEjITn56veJIk5xir70RUe/qoKrHTtnOUtv/LDctPAZSUaCzL33wl/+UvzcB6vVKJc+ebLv4ypLx46536fGajXaBZrjx42aKWPHGuXo8/KMOT7Tp8PNN8P69c7fq3JzUZMmwQ114dpqEFkVldgSNXcuJR0A1TTN2CDPC2rvXvjyS+dZscNhfFELFpQoJl/SoqJg3XoYNbroPgNNmsAHH8KkSVdXnBVX+Rf/Io88p/vaOHDwAz+whS3lcn2FIossssl2u7eOMIckI0HGajV2tx050kg8Lte2LWzYYPwcDWTVq7sfGdH1wNx3rW9fOHz46jmRdrtRvO4vfyl+vo/KzTWKc40eZSwxKrBtGzzcF0aOKHFC4rWvvnJfWc9igRVf+iaeUtKqVkV75RXIOAGHj8DxDPhxO1q/fpKIeOhbvnWbBFiwsB4X2XYJ2LEzgxk0pCFRRBFJJDdxE+/wDjoy8difSDIShEJDjU34MjJg+XKjwObu3UaBr6ZNzY6u9Pr0cT8yEhpqFC4LJD//DKtXO//adN14hDNnTjGfnDAB/vvfq7OYgo+nTjXmf/hCbq5n+94U7ARcRpTdjlqyBDVyJGr4cGNEqAyvoYWEoMXHo8XESBLiJVd72ZSknSfs2HmABxjGMPaxr/D4LnbxGI8xgAGSkPgRSUaCWESEUWW0e3fPy6QHgjvvhNtuc16UTNOMOiqBtvo0Pd39PVzTjAq7l1P5+fDWDNdLTK1WIyHxhebN3a8vt1jg5mZldkn1/ffG7sI9uht9MWumMSJUuzaqDOuAFAz3n+d8mZ2zIriN29y20dE9auept3iLZSwr3OymQMG/P+ZjPuKjMrueKB1JRkTA0TRji5PbLv7cstmMe21BcjJkCLzyinnxlVS+ByUXlCpm5GT/fsjMdP1GhwPWrytxbF7585+NAjCuMiul4LHHyuRy6tAh+POdl8ro5udf6syzZ6BLZ9QPP5TqGrnkMolJ3MANRBHFNVxDEknMZ77MQfDAQAYSTrjTkQ8rVm7lVlpSNiusFIopTHHZxoLFbRvhO5KMiIAUHW080li/3lhB1Ls3jBkDe/cakz0DsZR7y5bu62dpGtxyi2/iKSnNYjEmd1qtzueOpE1Cu8719vEemzwZzp8vfjRG143jL79c4tNf4AId6MAoRnGYw4XHv+M7HuIhRlExN8rzRjTRzGc+VqzYrij8bcVKdarzCWVXUj+TTA5y0GWiqKOzjW3kUraPC0XJSDIiApamGfWmpk6Fjz4y7jc33mh2VCV3++1Qv77rRMpiKWYn4YQEuPZa1ye3WqFt2Q2Bu5WYaMyUdjZptoyqliql4IP3XU8icjhg6RKUu9LuTrzCK6xj3VXzCwo+TiONLwmMybhm6kpXNrGJ+7m/MCGpSlWGM5zv+Z4EEkyOUJhJkhEh/ISmGZNTw8Kcl7V46y2j5H2R94WEGAXJXD0WcTjg6afLNmBX/v532LHD+eeffQa1eXPpr5Ofb9QtcUfX4fRpr0+fRx4zmOFyoqMVK1Px0XycAHcLtzCPeZznPGc5yxnOMIlJ1KJWmV6nOtW5gRtcToi1YKEFLQgjrEyvLUpGkhEh/Mitt8KmTdC1a9HcIjHRWAzjdJrF2LHQvv3VCUnBx08/bawL9gF17hz8823Xox82G0yfVvqLhYSAJ1WZrVb3o0fF2MteznDGZRsHDtaytvBjdf48av581OTJqA8+QJ096/V1g10IIUQSiaWcbkEaGsMZ7rKNjs4IRpTL9YX3JBkRws/cdBMsXgwnTsCPPxp1RzZtgi5dnL9HCw+HL1fAaxPh+usvfaJFC/h4DrwxxXfLUbdvN0rgulJGu91qmgYDBrouPGOzQbfuaBERXp/f08mpBe3U9OlQKw56P2TUfBk0EOLiUC+8gJIN9XxqKEO5l3vRLv4pUPDvfhf/CP8ge9MIEWSUUkZBEpsNrUoV319//Xpo58H8lJgYtOMZpb/e4cPwpxbG45orJ7FaLMboycZNaM2be33uXHKJJZaznHXaxoqVu7iLFW92hpEjnJ/s6afRpsgOjlfaxjZmMIPVrAagPe15iqe4hdLP1LZj523e5k3eZC97AWhKU0YykkEMKreRGXGJp/dvSUaEEGVKZWVBbIzromY2G3Ttaux/UxbX/PFH6HafMYwUEmIczM+HGjVgwUK0228v8bmf53lSSXU5b+Tz8wvpXKO/USbXlb370BJkomaBN3iDFFKwYcOOMQm54N8TmchzPFcm11EosslGQ6Mqcg/xJdkoTwhhCi0yEvr1c70syG6Hp4aV3TWbN4d9++Hfn8PwEca55y+AX4+VKhEBeIEXaEObq36LLvh4BCO4Z14WXLjg+kRWq7HLowDgK74ihRSAwkTk8n+PYhSfUzZVgzU0IomURMSPSTIihEDpOionB+WucqqnXptolP11tizomWfR7ryzbK51kWa1onXpgjZxItqkSWgPPmisNCqlSlTia75mAhO4jku1UVrQgjnMYTKT0Q4ddr9hEsDhQ6WOp7w5cPAZn5FMMtdxHQ1pyAu8wK/8WqbXmchErDhPWK1YmcjEMr2m8F+SjAhRganDh1HDhkFkVeNV5RrUXwehdu0q1Xm16GhY/y088yxERV36RNOmRkG0iYF1kwknnFGM4ghHOMUpsshiK1vpQx9jQmRUlPsS+JoGkVG+CLfEcsmlK115gAdYwxqOcYw97CGVVBrQoHBeR2ld4AKrWIUD531WsErpd9xMhhZBQeaMCFFBqZ9+gtvbGxM/Ly8aZrMZrxVfobVvX/rr5OcbpdrDwqBmzaDcZE4dPmzsjePux+l/16Ld5sPic14awQimMa3Y+TEWLIQTzgEOEENMqa6TRRZRRHnU9jd+ozoBuAW3AGTOiBDCBaUUPHA/ZGdfXb3Uboe8POjRHeVuQqYHtJAQtDp1gnq3W+366415Ms4Kz1mtxmZKbdv6NjAvZJHF27ztdKKujs4FLvAu75b6WlWp6lFCcy3XEk2A7XgpSkSSESEqolWrYM8e548WCiqWLljg27gC2ay34d57jX8XzB8p+DvxVliy1K+Tsf/yXy7gehKujs5Slpb6WhoaQxjicmmtFStP8qTLeSUieEgyIkRFtHat+wmXNpvRTnhECw+HzxbBtxtg0F+hcxfo2xdWfAXr16NVq2Z2iC65S0QK/EHpR8sARjKSJjQpNtmwYqU+9XmWZ8vkWsL/eTD9WwgRdDydKub/U8r8iqZp8H//Z7wCTFOaum1jw0ZzvC8eV5wIIljLWlJIYQ5zyCMPMErF96EPk5lMJJFlci3h/yQZEaIiat3a9U63YHy+TRvfxCNM14QmtKY1m9nsdJWLHTtP8mSZXTOKKN7jPdJI4zu+A4zN9MpqwmoeeaxlLVlkUY96NKe5y83zhHlkNY0QFZDSdbgxAY4eLX7eiMUCVarAsf+hXXON7wMUptjOdtrQhgtcKDYhGcxgZjLzquN27GxmMznkUI961Ke+L8J1SqFII41UUjnNpd2am9GMqUzldkpXCE94TlbTCCGc0iwWWPgpVK589dwRm81Y/bFgoSQiFUwzmrGRjdxJ0YJ0NanJ67zODGYUOa5QTGUq13EdbWlLJzrRgAa0o13hSIcZnuEZRjGqSCIC8BM/kUwy6ZR+k0ZRtmRkRIgKTO3ZA6kTYO5cYzmv1Qrde8CYMWi3lH6jMhG4DnGIvezlGq7hVm4lhKur2T7Hc6SRdtVxK1ZCCGENa0giyRfhFtrOdpfzWixYuJ7r2cc+2SjPB2SjPCGEx9SFC3DmDERGolWubHY4IgD8yI+0oIXTz1uw0IQmbGe7T+dpDGMYs5hVZL+b4qxi1VUjQKLsletjmhkzZlC3bl3Cw8NJSkpi8+bNTtsuWrSIxMREoqKiuOaaa2jRogUfffRRSS4rhCgnWng4WlycJCLCY2/zNjYXayB0dH7iJ7awxYdRwU52uk1EAH7hFx9EIzzldTIyf/58UlJSGDduHN9//z3NmzenY8eOnDx5stj21apV4/nnn2fDhg1s376dQYMGMWjQIL766qtSBy+EEMIc29nu0U3/Z372QTSXXMM1Ho3EVEYSb3/idTIyefJkHnvsMQYNGkSTJk2YNWsWlStX5r333iu2/R133EH37t1p3LgxCQkJDB8+nGbNmrFu3bpSBy+EEMIclans0U0/nHAfRHNJN7qhcD37wIaNe7jHRxEJT3iVjOTl5bF161aSk5MvncBiITk5mQ0bNrh9v1KK9PR0du/eTXsXG3Dl5uaSnZ1d5CWEEMJ/dKWr2zY2bCST7LZdWXqIh4gl1mkZeQsWBjCAmtT0aVzCNa+SkczMTBwOBzExRTc4iomJISMjw+n7srKyqFKlCqGhoXTp0oVp06Zx9913O20/YcIEIiMjC1/x8fHehCmEEKKc9ac/UUQ5XZFScNOvQQ2fxlWZyqxkJdWoVmTkpiA5uYu7mMY0n8Yk3PPJuqaIiAi2bdvGli1beOWVV0hJSWHNmjVO248dO5asrKzC19GjR30RphBCCA9FEsmXfEkVqhRJSApu+u1pz1SmmhLbzdzMHvYwmcm0ohUNaUgnOrGUpXzJl1SikilxCee8KgdfvXp1rFYrJ06cKHL8xIkTxMbGOn2fxWLhxhtvBKBFixbs2rWLCRMmcMcddxTbPiwsjLCwMG9CE0II4WNJJLGHPcxmNp/wCTnkUJ/6DGYw3enucrVNeYsiihEX/wj/59XISGhoKC1btiQ9/VL1Ol3XSU9Pp3Xr1h6fR9d1cnNzvbm0EEIIPxRDDC/wAj/zM0c4QjrpPMiDpiYiIvB4/d2SkpLCgAEDSExMpFWrVkyZMoVz584xaNAgAPr370/t2rWZMGECYMz/SExMJCEhgdzcXJYvX85HH33EzJlX728ghBBCiIrH62SkV69e/Pbbb7z44otkZGTQokULVqxYUTip9ciRI1gslwZczp07x5AhQ/j111+pVKkSjRo14uOPP6ZXr15l91UIIYQQImBJOXghhBBClAvZtVcIIYQQAUFmGPmZP/6ARYtg925jd/f77oPGjc2OSgghhCg/koz4kblzYcgQyMqCkBDQdRg7Fu65B+bMgehosyMUQlQ0JzjBQhZykpPEEktPelKd6maHJYKMzBnxE4sXQ48exX/OaoWWLWHdOiNJEaKsqdxcWLIEduyAsDC45x60xESzwxImcuBgFKOYylR0dKxYsWPHho3neI7xjHdafVWIAp7evyUZ8QO6DjfeCIcOgav/GgsWwIMP+iwsUUGof/8bBg2E06cvDck5HNC6NSz8FK1WLbNDFCZ4mqeZznSnm86NZjSppPo4KhFoZAJrANm4EQ4edJ2IWK3w7ru+i0lUDGr1aujeDc6cMQ7k5xuJCMCWLXDnHajffzcrPGGSQxxymYgApJHGcY77MCoRzCQZ8QPHjrlv43DAkSPlH4uoYMaMNv4uLhO222HfPvjgA9/GJEz3MR+7fQSjUMxlro8iEsFOkhE/UN2DuWCaBjVlx2tRhtSePcboh667bvjuO74JSPiNYxxzm4xYsXIMD36TEsIDkoz4gXbt4GIBW6eUgv79fROPqCD+9z/3bZSCX38t/1iEX6lOdZePaAB0dFlVI8qMJCN+wGaDf/zD9ecTEuChh3wXk6gAPBmSA6hRo3zjEH6nN72xY3fZRkfnIeSHkigbkoz4iccfh9RUY6KqxWL8bbtYBaZBA1i1yiiCJkSZadoUmjQxngE6Y7HAgIE+C0mY7zzn+ZEfuYmbnLbR0HiYh6lHPR9GJoKZLO31M8ePw7/+dakCa7ducPfdxj1BiLKmFi+G+10UuKlRA376Ga1aNd8GJkwxk5mMZjQ55GDBgk7x84lqUIOd7JTHNMItqTMihPCIeucdeGqosazXajUO2u1Qrx58sRytYUNzAxQ+MZOZDGGIR22tWOlBDxawoJyjEoFOkhEhhMfU6dPGEt6CCqydO0PnzmgFyYkIauc5Tyyx5JDj8Xs0NPaznxu4oRwjE4HO0/u37E0jhDAew4wcaXYYwiRLWOJVIgJGMrKMZQxneDlFJSoSmYkghBAV3BGOYPPyd1MLFs5xrpwiEhWNJCNCCFHBRRONA4dX77FjpwENyikiUdFIMiKEEBVcd7pjxbv5QddyLfdybzlFJCoaSUaEEKKCq0lNnuRJNFzUnLlIu/hnJjMJJdQH0YmKQJIRIYQQTGIS/egHUDh/pLj9aW7gBhazmAd50KfxieAmq2mEEEIQQggf8AEppPAv/sURjhBNNPdzPxoaZzlLHerQhjYejaAI4Q1JRoQQQhRqTnOmMMXsMEQFI8mIEEIEEYViM5t5l3fZz36iiaYnPelGN5njIfyWJCNCCBEk8slnAAP4hE+wYcOOHQsWPuMz6lOfb/iGOtQxO0whriITWIUQIkg8x3PMYx5g1AEBCje7O8hBkkkmn3zT4hPCGUlGhBAiCJziFDOZiaL47cbs2NnLXpay1MeRCeGeJCNCCBEEvuAL8shz2caKlYUs9FFEQnhOkhEhhAgCWWQVWxfkcg4cnOWsbwISwgsygVWIAKe2bYOlS+HcOWjYEHr1QqtSxeywhI/Vo17h/BBnbNhIIMFHEQnhOUlGhAhQKjMTevWC1avAZgNNA7sdRgxHvTkV7a9/NTtE4UMd6UhNanKSk07b2LHzCI/4MCohPCOPaYQIQCovDzp0gP/+xzhgt0N+PihljJA8+ghq/nxzgxQ+ZcPGNKY5/byGxgAG0JKWPoxKCM9IMiJEIPrsM9j2AzhcbPs+ZjRKdz1sL4JLT3qygAXEEgtQWLY9nHCe4Rne4R0zwxPCKXlMI0Qgev9fYLGAq2Tj8GFYvx7atfNdXMJ0D/Ig3enO13zNIQ4RSSSd6UwUUWaHJoRTkowIEYiOHXOdiBQ4frz8YxF+x4aNe7jH7DCE8Jg8phEiEMXEGCMj7tSsWf6xCCFEKUkyIkQg6j/A/chIrVryiEYIERAkGREiEPXqZdQUsbl40jr+ZTSr1XcxCSFECUkyIkQA0sLDIX0VNGtmHLDZICTEeHQTEgKT30AbNMjcIIUQwkMygVWIAKXVqoXa8h2sXQtLllyqwNq/P1r16maHJ4QQHpNkRIgApmkatG9vvIQQIkDJYxohhBBCmEqSESGEEEKYSpIRIYQQQphKkhEhhBBCmEqSESGEEEKYSpIRIYQQQphKkhEhhBBCmEqSESGEEEKYSpIRIYQQQphKkhEhhBBCmKpEyciMGTOoW7cu4eHhJCUlsXnzZqdtZ8+eTbt27YiOjiY6Oprk5GSX7YUQQghRsXidjMyfP5+UlBTGjRvH999/T/PmzenYsSMnT54stv2aNWvo3bs3q1evZsOGDcTHx9OhQweOHTtW6uCFEEIIEfg0pZTy5g1JSUnceuutTJ8+HQBd14mPj2fYsGGMGTPG7fsdDgfR0dFMnz6d/v37e3TN7OxsIiMjycrKomrVqt6EK4QQIgDsYQ8f8zEnOUkMMTzMw9SnvtlhiVLy9P7t1a69eXl5bN26lbFjxxYes1gsJCcns2HDBo/Ocf78efLz86lWrZo3lxZCCBGEcsnlcR7nQz7EihULFnR0/sE/GMhA3uZtQgk1O0xRzrxKRjIzM3E4HMTExBQ5HhMTwy+//OLROUaPHk2tWrVITk522iY3N5fc3NzCj7Ozs70JUwghRIB4gif4mI8BcFz8U6AgQXmHd8wKT/iIT1fTpKamMm/ePBYvXkx4eLjTdhMmTCAyMrLwFR8f78MohRBC+MJe9vIBH6CjF/t5HZ33eI+DHPRxZMLXvEpGqlevjtVq5cSJE0WOnzhxgtjYWJfvTUtLIzU1lZUrV9KsWTOXbceOHUtWVlbh6+jRo96EKYQQIgDMZS5WrC7bWLAwl7k+ikiYxatkJDQ0lJYtW5Kenl54TNd10tPTad26tdP3TZw4kfHjx7NixQoSExPdXicsLIyqVasWeQkhhAguJzmJxc1tyIKFkxS/WlMED6/mjACkpKQwYMAAEhMTadWqFVOmTOHcuXMMGjQIgP79+1O7dm0mTJgAwGuvvcaLL77I3LlzqVu3LhkZGQBUqVKFKlWqlOGXIoQQIpDEEef0EU0BHZ044nwUkTCL13NGevXqRVpaGi+++CItWrRg27ZtrFixonBS65EjRzh+/Hhh+5kzZ5KXl8cDDzxAXFxc4SstLa3svgohhBABpy99PUpG+tLXRxEJs3hdZ8QMUmdECCGC02AG80/+ieLqW5GGxhCGMJ3pJkQmykK51BkRQgghytJ0pmPFykxmYrn4p2B57xCG8AZvmByh8AUZGRFCCGG6oxzlEz7hBCeIJZbe9OY6rjM7LFFKMjIihBAiYMQTzyhGmR2GMIlPi54JIYQQQlxJkhEhhBBCmEqSESGEEEKYSpIRIYQQQphKkhEhhBBCmEqSESGEEEKYSpIRIYQQQphKkhEhhBBCmEqSESGEEEKYKiAqsBZUrM/OzjY5EiGEEEJ4quC+7W7nmYBIRnJycgCIj483ORIhhBBCeCsnJ4fIyEinnw+IjfJ0Xed///sfERERaJpmdjguZWdnEx8fz9GjR2VTPw9If3lH+stz0lfekf7yjvSXZ5RS5OTkUKtWLSwW5zNDAmJkxGKxcN11gbV7Y9WqVeUb1AvSX96R/vKc9JV3pL+8I/3lnqsRkQIygVUIIYQQppJkRAghhBCmkmSkjIWFhTFu3DjCwsLMDiUgSH95R/rLc9JX3pH+8o70V9kKiAmsQgghhAheMjIihBBCCFNJMiKEEEIIU0kyIoQQQghTSTIihBBCCFNJMlIGTp8+Td++falatSpRUVE88sgj/P777y7bDxs2jIYNG1KpUiXq1KnD008/TVZWlg+j9p0ZM2ZQt25dwsPDSUpKYvPmzS7bL1y4kEaNGhEeHs7NN9/M8uXLfRSp+bzpq9mzZ9OuXTuio6OJjo4mOTnZbd8GG2+/twrMmzcPTdPo1q1b+QboZ7ztr7NnzzJ06FDi4uIICwujQYMGFeb/R2/7asqUKYU/0+Pj4xk5ciQXLlzwUbRBQIlS69Spk2revLnauHGjWrt2rbrxxhtV7969nbbfsWOH6tGjh1q2bJnat2+fSk9PV/Xr11f333+/D6P2jXnz5qnQ0FD13nvvqZ9//lk99thjKioqSp04caLY9uvXr1dWq1VNnDhR7dy5U73wwgsqJCRE7dixw8eR+563fdWnTx81Y8YM9cMPP6hdu3apgQMHqsjISPXrr7/6OHJzeNtfBQ4ePKhq166t2rVrp+677z7fBOsHvO2v3NxclZiYqDp37qzWrVunDh48qNasWaO2bdvm48h9z9u+mjNnjgoLC1Nz5sxRBw8eVF999ZWKi4tTI0eO9HHkgUuSkVLauXOnAtSWLVsKj3355ZdK0zR17Ngxj8+zYMECFRoaqvLz88sjTNO0atVKDR06tPBjh8OhatWqpSZMmFBs+549e6ouXboUOZaUlKSeeOKJco3TH3jbV1ey2+0qIiJCffDBB+UVol8pSX/Z7XbVpk0b9c4776gBAwZUqGTE2/6aOXOmqlevnsrLy/NViH7D274aOnSo+vOf/1zkWEpKimrbtm25xhlM5DFNKW3YsIGoqCgSExMLjyUnJ2OxWNi0aZPH58nKyqJq1arYbAGxXZBH8vLy2Lp1K8nJyYXHLBYLycnJbNiwodj3bNiwoUh7gI4dOzptHyxK0ldXOn/+PPn5+VSrVq28wvQbJe2vf/zjH9SsWZNHHnnEF2H6jZL017Jly2jdujVDhw4lJiaGm266iVdffRWHw+GrsE1Rkr5q06YNW7duLXyUc+DAAZYvX07nzp19EnMwCJ47n0kyMjKoWbNmkWM2m41q1aqRkZHh0TkyMzMZP348jz/+eHmEaJrMzEwcDgcxMTFFjsfExPDLL78U+56MjIxi23val4GqJH11pdGjR1OrVq2rkrlgVJL+WrduHe+++y7btm3zQYT+pST9deDAAVatWkXfvn1Zvnw5+/btY8iQIeTn5zNu3DhfhG2KkvRVnz59yMzM5LbbbkMphd1uZ/Dgwfztb3/zRchBQUZGnBgzZgyaprl8eXqTcCU7O5suXbrQpEkTXnrppdIHLiqk1NRU5s2bx+LFiwkPDzc7HL+Tk5NDv379mD17NtWrVzc7nICg6zo1a9bkn//8Jy1btqRXr148//zzzJo1y+zQ/M6aNWt49dVXeeutt/j+++9ZtGgRX3zxBePHjzc7tIAhIyNOPPPMMwwcONBlm3r16hEbG8vJkyeLHLfb7Zw+fZrY2FiX78/JyaFTp05ERESwePFiQkJCShu2X6levTpWq5UTJ04UOX7ixAmnfRMbG+tV+2BRkr4qkJaWRmpqKt988w3NmjUrzzD9hrf9tX//fg4dOkTXrl0Lj+m6Dhgjmbt37yYhIaF8gzZRSb6/4uLiCAkJwWq1Fh5r3LgxGRkZ5OXlERoaWq4xm6UkffX3v/+dfv368eijjwJw8803c+7cOR5//HGef/55LBb5vd8d6SEnatSoQaNGjVy+QkNDad26NWfPnmXr1q2F7121ahW6rpOUlOT0/NnZ2XTo0IHQ0FCWLVsWlL/NhoaG0rJlS9LT0wuP6bpOeno6rVu3LvY9rVu3LtIe4Ouvv3baPliUpK8AJk6cyPjx41mxYkWReUvBztv+atSoETt27GDbtm2Fr3vvvZc777yTbdu2ER8f78vwfa4k319t27Zl3759hUkbwJ49e4iLiwvaRARK1lfnz5+/KuEoSOKUbP/mGbNn0AaDTp06qT/96U9q06ZNat26dap+/fpFlvb++uuvqmHDhmrTpk1KKaWysrJUUlKSuvnmm9W+ffvU8ePHC192u92sL6NczJs3T4WFhan3339f7dy5Uz3++OMqKipKZWRkKKWU6tevnxozZkxh+/Xr1yubzabS0tLUrl271Lhx4yrU0l5v+io1NVWFhoaqTz/9tMj3UE5Ojllfgk95219XqmirabztryNHjqiIiAj11FNPqd27d6vPP/9c1axZU7388stmfQk+421fjRs3TkVERKhPPvlEHThwQK1cuVIlJCSonj17mvUlBBxJRsrAqVOnVO/evVWVKlVU1apV1aBBg4rcEA4ePKgAtXr1aqWUUqtXr1ZAsa+DBw+a80WUo2nTpqk6deqo0NBQ1apVK7Vx48bCz91+++1qwIABRdovWLBANWjQQIWGhqqmTZuqL774wscRm8ebvrr++uuL/R4aN26c7wM3ibffW5eraMmIUt7317fffquSkpJUWFiYqlevnnrllVeC7hcmZ7zpq/z8fPXSSy+phIQEFR4eruLj49WQIUPUmTNnfB94gNKUkjEkIYQQQphH5owIIYQQwlSSjAghhBDCVJKMCCGEEMJUkowIIYQQwlSSjAghhBDCVJKMCCGEEMJUkowIIYQQwlSSjAghhBDCVJKMCCGEEMJUkowIIYQQwlSSjAghhBDCVJKMCCGEEMJU/x+bkTGFvdt/sgAAAABJRU5ErkJggg==",
"text/plain": [
""
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"#SIMPLER DATASET\n",
"import matplotlib.pyplot as plt\n",
"import nnfs\n",
"from nnfs.datasets import vertical_data\n",
"nnfs.init()\n",
"X, y = vertical_data(samples=100, classes=3)\n",
"plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap='brg')\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "dPzWOMqSFjmn"
},
"source": [
"\n",
"STRATEGY 1: RANDOMLY SELECT WEIGHTS AND BIASES - DOES NOT WORK!\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "_klS09rqFjmn",
"outputId": "a012ca91-3393-4ab1-9443-3e81828ba998"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"New set of weights found, iteration: 0 loss: 1.1016203 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 1 loss: 1.1002508 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 2 loss: 1.0992025 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 3 loss: 1.0986239 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 10 loss: 1.0984299 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 22 loss: 1.0976521 acc: 0.36333333333333334\n",
"New set of weights found, iteration: 150 loss: 1.0974255 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 874 loss: 1.0972673 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 894 loss: 1.096895 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 1036 loss: 1.095428 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 88633 loss: 1.0952065 acc: 0.3333333333333333\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = vertical_data(samples=100, classes=3)\n",
"# Create model\n",
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
"activation1 = Activation_ReLU()\n",
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
"activation2 = Activation_Softmax()\n",
"# Create loss function\n",
"loss_function = Loss_CategoricalCrossentropy()\n",
"\n",
"# Helper variables\n",
"lowest_loss = 9999999 # some initial value\n",
"best_dense1_weights = dense1.weights.copy()\n",
"best_dense1_biases = dense1.biases.copy()\n",
"best_dense2_weights = dense2.weights.copy()\n",
"best_dense2_biases = dense2.biases.copy()\n",
"\n",
"for iteration in range(100000):\n",
" # Generate a new set of weights for iteration\n",
" dense1.weights = 0.05 * np.random.randn(2, 3)\n",
" dense1.biases = 0.05 * np.random.randn(1, 3)\n",
" dense2.weights = 0.05 * np.random.randn(3, 3)\n",
" dense2.biases = 0.05 * np.random.randn(1, 3)\n",
" # Perform a forward pass of the training data through this layer\n",
" dense1.forward(X)\n",
" activation1.forward(dense1.output)\n",
" dense2.forward(activation1.output)\n",
" activation2.forward(dense2.output)\n",
" # Perform a forward pass through activation function\n",
" # it takes the output of second dense layer here and returns loss\n",
" loss = loss_function.calculate(activation2.output, y)\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(activation2.output, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
" # If loss is smaller - print and save weights and biases aside\n",
" if loss < lowest_loss:\n",
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
" best_dense1_weights = dense1.weights.copy()\n",
" best_dense1_biases = dense1.biases.copy()\n",
" best_dense2_weights = dense2.weights.copy()\n",
" best_dense2_biases = dense2.biases.copy()\n",
" lowest_loss = loss"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "pSEJ6av5Fjmn"
},
"source": [
"\n",
"STRATEGY 2: RANDOMLY ADJUST WEIGHTS AND BIASES - WORKS!\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "3_gxk2XdFjmn",
"outputId": "e2a285c4-d44d-4c3a-c61b-a7e7d533f85b"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"New set of weights found, iteration: 0 loss: 1.1008747 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 3 loss: 1.1005714 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 4 loss: 1.099462 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 9 loss: 1.0994359 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 10 loss: 1.09855 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 13 loss: 1.098517 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 14 loss: 1.0938607 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 15 loss: 1.0920315 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 17 loss: 1.091391 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 19 loss: 1.0910357 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 20 loss: 1.0898421 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 21 loss: 1.0843327 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 26 loss: 1.0835577 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 27 loss: 1.0823517 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 28 loss: 1.0778279 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 31 loss: 1.076321 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 35 loss: 1.0729524 acc: 0.3\n",
"New set of weights found, iteration: 36 loss: 1.0699975 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 47 loss: 1.0631136 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 52 loss: 1.062574 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 53 loss: 1.0624933 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 54 loss: 1.0592291 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 57 loss: 1.0574998 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 58 loss: 1.0493913 acc: 0.38\n",
"New set of weights found, iteration: 64 loss: 1.0450443 acc: 0.65\n",
"New set of weights found, iteration: 68 loss: 1.0377563 acc: 0.65\n",
"New set of weights found, iteration: 70 loss: 1.0333902 acc: 0.66\n",
"New set of weights found, iteration: 71 loss: 1.0315365 acc: 0.63\n",
"New set of weights found, iteration: 74 loss: 1.0283976 acc: 0.6033333333333334\n",
"New set of weights found, iteration: 75 loss: 1.0214951 acc: 0.6133333333333333\n",
"New set of weights found, iteration: 82 loss: 1.0101981 acc: 0.6466666666666666\n",
"New set of weights found, iteration: 84 loss: 1.0100108 acc: 0.5733333333333334\n",
"New set of weights found, iteration: 90 loss: 1.0035288 acc: 0.7133333333333334\n",
"New set of weights found, iteration: 92 loss: 1.0008268 acc: 0.65\n",
"New set of weights found, iteration: 95 loss: 0.99175525 acc: 0.6\n",
"New set of weights found, iteration: 98 loss: 0.9812336 acc: 0.66\n",
"New set of weights found, iteration: 101 loss: 0.97777444 acc: 0.58\n",
"New set of weights found, iteration: 106 loss: 0.977026 acc: 0.67\n",
"New set of weights found, iteration: 110 loss: 0.97546613 acc: 0.6766666666666666\n",
"New set of weights found, iteration: 111 loss: 0.9656759 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 115 loss: 0.96165353 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 116 loss: 0.94877195 acc: 0.9\n",
"New set of weights found, iteration: 122 loss: 0.93169373 acc: 0.8766666666666667\n",
"New set of weights found, iteration: 127 loss: 0.9187146 acc: 0.81\n",
"New set of weights found, iteration: 131 loss: 0.91708404 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 136 loss: 0.9127953 acc: 0.6933333333333334\n",
"New set of weights found, iteration: 139 loss: 0.90787965 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 141 loss: 0.9043988 acc: 0.66\n",
"New set of weights found, iteration: 146 loss: 0.90065634 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 147 loss: 0.89855623 acc: 0.6533333333333333\n",
"New set of weights found, iteration: 149 loss: 0.8932796 acc: 0.69\n",
"New set of weights found, iteration: 152 loss: 0.87403554 acc: 0.84\n",
"New set of weights found, iteration: 153 loss: 0.8707889 acc: 0.7666666666666667\n",
"New set of weights found, iteration: 156 loss: 0.8667261 acc: 0.88\n",
"New set of weights found, iteration: 158 loss: 0.8490861 acc: 0.87\n",
"New set of weights found, iteration: 162 loss: 0.84764665 acc: 0.8333333333333334\n",
"New set of weights found, iteration: 165 loss: 0.8428589 acc: 0.8033333333333333\n",
"New set of weights found, iteration: 166 loss: 0.8239612 acc: 0.66\n",
"New set of weights found, iteration: 167 loss: 0.7897708 acc: 0.7\n",
"New set of weights found, iteration: 171 loss: 0.7849403 acc: 0.6733333333333333\n",
"New set of weights found, iteration: 172 loss: 0.782472 acc: 0.6633333333333333\n",
"New set of weights found, iteration: 173 loss: 0.7803537 acc: 0.6633333333333333\n",
"New set of weights found, iteration: 175 loss: 0.7747049 acc: 0.67\n",
"New set of weights found, iteration: 178 loss: 0.77419984 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 179 loss: 0.75924987 acc: 0.7233333333333334\n",
"New set of weights found, iteration: 183 loss: 0.7520049 acc: 0.67\n",
"New set of weights found, iteration: 187 loss: 0.7399888 acc: 0.7366666666666667\n",
"New set of weights found, iteration: 188 loss: 0.73904145 acc: 0.8333333333333334\n",
"New set of weights found, iteration: 190 loss: 0.7320137 acc: 0.8266666666666667\n",
"New set of weights found, iteration: 195 loss: 0.712903 acc: 0.69\n",
"New set of weights found, iteration: 198 loss: 0.70528257 acc: 0.7266666666666667\n",
"New set of weights found, iteration: 199 loss: 0.70251924 acc: 0.6966666666666667\n",
"New set of weights found, iteration: 200 loss: 0.69334394 acc: 0.72\n",
"New set of weights found, iteration: 201 loss: 0.6803275 acc: 0.79\n",
"New set of weights found, iteration: 205 loss: 0.6770639 acc: 0.7666666666666667\n",
"New set of weights found, iteration: 206 loss: 0.66997415 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 207 loss: 0.6599941 acc: 0.8066666666666666\n",
"New set of weights found, iteration: 208 loss: 0.65543926 acc: 0.7766666666666666\n",
"New set of weights found, iteration: 209 loss: 0.62252766 acc: 0.8033333333333333\n",
"New set of weights found, iteration: 210 loss: 0.61185175 acc: 0.7766666666666666\n",
"New set of weights found, iteration: 211 loss: 0.599142 acc: 0.8766666666666667\n",
"New set of weights found, iteration: 213 loss: 0.5920534 acc: 0.8766666666666667\n",
"New set of weights found, iteration: 222 loss: 0.5918576 acc: 0.8533333333333334\n",
"New set of weights found, iteration: 229 loss: 0.57633215 acc: 0.8533333333333334\n",
"New set of weights found, iteration: 234 loss: 0.55926883 acc: 0.8933333333333333\n",
"New set of weights found, iteration: 239 loss: 0.5579059 acc: 0.83\n",
"New set of weights found, iteration: 243 loss: 0.55486155 acc: 0.8466666666666667\n",
"New set of weights found, iteration: 246 loss: 0.5507333 acc: 0.8866666666666667\n",
"New set of weights found, iteration: 247 loss: 0.545519 acc: 0.8833333333333333\n",
"New set of weights found, iteration: 248 loss: 0.5346363 acc: 0.86\n",
"New set of weights found, iteration: 264 loss: 0.5342746 acc: 0.83\n",
"New set of weights found, iteration: 265 loss: 0.5253426 acc: 0.8866666666666667\n",
"New set of weights found, iteration: 267 loss: 0.51280546 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 269 loss: 0.50337905 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 276 loss: 0.5024293 acc: 0.89\n",
"New set of weights found, iteration: 278 loss: 0.491296 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 283 loss: 0.48956776 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 284 loss: 0.48599878 acc: 0.93\n",
"New set of weights found, iteration: 289 loss: 0.48009065 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 290 loss: 0.47792414 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 291 loss: 0.46914592 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 298 loss: 0.46764258 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 300 loss: 0.45708776 acc: 0.8933333333333333\n",
"New set of weights found, iteration: 301 loss: 0.45304757 acc: 0.8966666666666666\n",
"New set of weights found, iteration: 304 loss: 0.45023417 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 308 loss: 0.44732147 acc: 0.8833333333333333\n",
"New set of weights found, iteration: 309 loss: 0.44072458 acc: 0.88\n",
"New set of weights found, iteration: 313 loss: 0.4319237 acc: 0.8933333333333333\n",
"New set of weights found, iteration: 318 loss: 0.42440805 acc: 0.92\n",
"New set of weights found, iteration: 320 loss: 0.41862 acc: 0.92\n",
"New set of weights found, iteration: 324 loss: 0.41297048 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 326 loss: 0.41245985 acc: 0.9\n",
"New set of weights found, iteration: 328 loss: 0.40676734 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 329 loss: 0.40640786 acc: 0.93\n",
"New set of weights found, iteration: 335 loss: 0.39842996 acc: 0.93\n",
"New set of weights found, iteration: 338 loss: 0.39804557 acc: 0.94\n",
"New set of weights found, iteration: 344 loss: 0.39646825 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 347 loss: 0.38945505 acc: 0.93\n",
"New set of weights found, iteration: 348 loss: 0.3871968 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 351 loss: 0.3779128 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 354 loss: 0.37311223 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 355 loss: 0.37276617 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 356 loss: 0.36852098 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 360 loss: 0.35868368 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 362 loss: 0.35691482 acc: 0.93\n",
"New set of weights found, iteration: 367 loss: 0.3429358 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 368 loss: 0.33983532 acc: 0.93\n",
"New set of weights found, iteration: 375 loss: 0.33676398 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 378 loss: 0.33293056 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 379 loss: 0.32861033 acc: 0.93\n",
"New set of weights found, iteration: 380 loss: 0.32313567 acc: 0.93\n",
"New set of weights found, iteration: 383 loss: 0.32154855 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 386 loss: 0.32056552 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 392 loss: 0.32019016 acc: 0.93\n",
"New set of weights found, iteration: 394 loss: 0.31832498 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 400 loss: 0.315471 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 401 loss: 0.3055538 acc: 0.93\n",
"New set of weights found, iteration: 407 loss: 0.3045038 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 410 loss: 0.30153093 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 417 loss: 0.29838136 acc: 0.92\n",
"New set of weights found, iteration: 420 loss: 0.29762506 acc: 0.93\n",
"New set of weights found, iteration: 428 loss: 0.29648978 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 434 loss: 0.29306 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 435 loss: 0.29192674 acc: 0.93\n",
"New set of weights found, iteration: 440 loss: 0.28488693 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 444 loss: 0.28334972 acc: 0.93\n",
"New set of weights found, iteration: 448 loss: 0.28309777 acc: 0.93\n",
"New set of weights found, iteration: 449 loss: 0.28108674 acc: 0.93\n",
"New set of weights found, iteration: 453 loss: 0.27773702 acc: 0.94\n",
"New set of weights found, iteration: 461 loss: 0.27312914 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 467 loss: 0.2729932 acc: 0.93\n",
"New set of weights found, iteration: 472 loss: 0.27115387 acc: 0.93\n",
"New set of weights found, iteration: 473 loss: 0.26995963 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 475 loss: 0.26944205 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 477 loss: 0.268688 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 479 loss: 0.26242334 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 493 loss: 0.26207444 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 495 loss: 0.25908068 acc: 0.93\n",
"New set of weights found, iteration: 496 loss: 0.2590734 acc: 0.93\n",
"New set of weights found, iteration: 497 loss: 0.2582146 acc: 0.94\n",
"New set of weights found, iteration: 503 loss: 0.25407296 acc: 0.93\n",
"New set of weights found, iteration: 505 loss: 0.25202662 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 506 loss: 0.24973544 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 512 loss: 0.24969201 acc: 0.93\n",
"New set of weights found, iteration: 517 loss: 0.24860601 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 519 loss: 0.24686712 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 529 loss: 0.24602742 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 535 loss: 0.2445193 acc: 0.93\n",
"New set of weights found, iteration: 536 loss: 0.24081425 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 538 loss: 0.24043368 acc: 0.93\n",
"New set of weights found, iteration: 546 loss: 0.23582341 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 550 loss: 0.23467208 acc: 0.93\n",
"New set of weights found, iteration: 552 loss: 0.23393926 acc: 0.93\n",
"New set of weights found, iteration: 557 loss: 0.22943695 acc: 0.93\n",
"New set of weights found, iteration: 564 loss: 0.22614151 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 570 loss: 0.22351934 acc: 0.93\n",
"New set of weights found, iteration: 582 loss: 0.2227324 acc: 0.93\n",
"New set of weights found, iteration: 585 loss: 0.21830775 acc: 0.93\n",
"New set of weights found, iteration: 597 loss: 0.2167703 acc: 0.93\n",
"New set of weights found, iteration: 610 loss: 0.21555844 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 625 loss: 0.21515213 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 629 loss: 0.21122937 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 631 loss: 0.21094893 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 637 loss: 0.21082413 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 659 loss: 0.21066985 acc: 0.93\n",
"New set of weights found, iteration: 661 loss: 0.20929192 acc: 0.93\n",
"New set of weights found, iteration: 684 loss: 0.20894809 acc: 0.93\n",
"New set of weights found, iteration: 686 loss: 0.20748574 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 689 loss: 0.205267 acc: 0.93\n",
"New set of weights found, iteration: 708 loss: 0.20465927 acc: 0.93\n",
"New set of weights found, iteration: 712 loss: 0.20393105 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 729 loss: 0.20358147 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 733 loss: 0.20317748 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 739 loss: 0.20279907 acc: 0.94\n",
"New set of weights found, iteration: 740 loss: 0.20236613 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 746 loss: 0.2021528 acc: 0.93\n",
"New set of weights found, iteration: 748 loss: 0.20100321 acc: 0.93\n",
"New set of weights found, iteration: 754 loss: 0.1993275 acc: 0.93\n",
"New set of weights found, iteration: 757 loss: 0.19792211 acc: 0.93\n",
"New set of weights found, iteration: 773 loss: 0.19783711 acc: 0.93\n",
"New set of weights found, iteration: 775 loss: 0.19740187 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 776 loss: 0.19721994 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 777 loss: 0.1961473 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 779 loss: 0.19598241 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 784 loss: 0.19526182 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 785 loss: 0.19456321 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 791 loss: 0.1943641 acc: 0.93\n",
"New set of weights found, iteration: 799 loss: 0.19334234 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 806 loss: 0.19311096 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 809 loss: 0.19277118 acc: 0.93\n",
"New set of weights found, iteration: 811 loss: 0.19264112 acc: 0.93\n",
"New set of weights found, iteration: 815 loss: 0.190967 acc: 0.93\n",
"New set of weights found, iteration: 890 loss: 0.18890005 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 892 loss: 0.18846218 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 893 loss: 0.18713883 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 903 loss: 0.1869782 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 909 loss: 0.18646298 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 917 loss: 0.18598829 acc: 0.93\n",
"New set of weights found, iteration: 927 loss: 0.18594755 acc: 0.93\n",
"New set of weights found, iteration: 932 loss: 0.18573302 acc: 0.93\n",
"New set of weights found, iteration: 942 loss: 0.18538505 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 946 loss: 0.18424016 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 959 loss: 0.1835666 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 969 loss: 0.1830834 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 974 loss: 0.18286093 acc: 0.93\n",
"New set of weights found, iteration: 983 loss: 0.1820501 acc: 0.93\n",
"New set of weights found, iteration: 985 loss: 0.18183175 acc: 0.93\n",
"New set of weights found, iteration: 993 loss: 0.18173474 acc: 0.93\n",
"New set of weights found, iteration: 995 loss: 0.18133913 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1001 loss: 0.18080577 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1006 loss: 0.1802216 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1025 loss: 0.17995203 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1042 loss: 0.17973644 acc: 0.93\n",
"New set of weights found, iteration: 1061 loss: 0.17944387 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1082 loss: 0.17943431 acc: 0.93\n",
"New set of weights found, iteration: 1083 loss: 0.17871827 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1093 loss: 0.17833588 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1123 loss: 0.1782659 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1144 loss: 0.17753273 acc: 0.93\n",
"New set of weights found, iteration: 1154 loss: 0.17727007 acc: 0.93\n",
"New set of weights found, iteration: 1165 loss: 0.17724434 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1166 loss: 0.17719936 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1171 loss: 0.17672187 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1176 loss: 0.17671774 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1212 loss: 0.17666677 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1223 loss: 0.17643958 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1275 loss: 0.1762153 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1303 loss: 0.17542142 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1378 loss: 0.17535225 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1382 loss: 0.17529377 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1404 loss: 0.17457567 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1457 loss: 0.17447841 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1461 loss: 0.17424846 acc: 0.93\n",
"New set of weights found, iteration: 1477 loss: 0.1737377 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1548 loss: 0.17335981 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1580 loss: 0.1733058 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1587 loss: 0.17321306 acc: 0.93\n",
"New set of weights found, iteration: 1596 loss: 0.17308939 acc: 0.93\n",
"New set of weights found, iteration: 1614 loss: 0.17301595 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1644 loss: 0.17284796 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1647 loss: 0.17258313 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1662 loss: 0.1723816 acc: 0.93\n",
"New set of weights found, iteration: 1706 loss: 0.17223743 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 1719 loss: 0.1719875 acc: 0.93\n",
"New set of weights found, iteration: 1978 loss: 0.17198084 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 2001 loss: 0.17195481 acc: 0.93\n",
"New set of weights found, iteration: 2026 loss: 0.1717552 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 2031 loss: 0.17165588 acc: 0.93\n",
"New set of weights found, iteration: 2066 loss: 0.17161626 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 2074 loss: 0.17140518 acc: 0.93\n",
"New set of weights found, iteration: 2441 loss: 0.17138883 acc: 0.93\n",
"New set of weights found, iteration: 2636 loss: 0.17134853 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 2718 loss: 0.17130204 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 2775 loss: 0.17122428 acc: 0.93\n",
"New set of weights found, iteration: 2829 loss: 0.1711681 acc: 0.93\n",
"New set of weights found, iteration: 2874 loss: 0.17114341 acc: 0.93\n",
"New set of weights found, iteration: 2978 loss: 0.17100853 acc: 0.93\n",
"New set of weights found, iteration: 3138 loss: 0.17093514 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 3293 loss: 0.17080545 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 3486 loss: 0.1707664 acc: 0.93\n",
"New set of weights found, iteration: 3681 loss: 0.17070974 acc: 0.93\n",
"New set of weights found, iteration: 4038 loss: 0.17066137 acc: 0.93\n",
"New set of weights found, iteration: 4090 loss: 0.17065905 acc: 0.93\n",
"New set of weights found, iteration: 4116 loss: 0.17063397 acc: 0.93\n",
"New set of weights found, iteration: 4258 loss: 0.17062972 acc: 0.93\n",
"New set of weights found, iteration: 5960 loss: 0.17062148 acc: 0.93\n",
"New set of weights found, iteration: 6352 loss: 0.17058212 acc: 0.93\n",
"New set of weights found, iteration: 8192 loss: 0.17057395 acc: 0.93\n",
"New set of weights found, iteration: 8254 loss: 0.17056267 acc: 0.93\n",
"New set of weights found, iteration: 9463 loss: 0.17056097 acc: 0.93\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = vertical_data(samples=100, classes=3)\n",
"# Create model\n",
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
"activation1 = Activation_ReLU()\n",
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
"activation2 = Activation_Softmax()\n",
"# Create loss function\n",
"loss_function = Loss_CategoricalCrossentropy()\n",
"# Helper variables\n",
"lowest_loss = 9999999 # some initial value\n",
"best_dense1_weights = dense1.weights.copy()\n",
"best_dense1_biases = dense1.biases.copy()\n",
"best_dense2_weights = dense2.weights.copy()\n",
"best_dense2_biases = dense2.biases.copy()\n",
"for iteration in range(10000):\n",
" # Update weights with some small random values\n",
" dense1.weights += 0.05 * np.random.randn(2, 3)\n",
" dense1.biases += 0.05 * np.random.randn(1, 3)\n",
" dense2.weights += 0.05 * np.random.randn(3, 3)\n",
" dense2.biases += 0.05 * np.random.randn(1, 3)\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
" activation1.forward(dense1.output)\n",
" dense2.forward(activation1.output)\n",
" activation2.forward(dense2.output)\n",
" # Perform a forward pass through activation function\n",
" # it takes the output of second dense layer here and returns loss\n",
" loss = loss_function.calculate(activation2.output, y)\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(activation2.output, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
" # If loss is smaller - print and save weights and biases aside\n",
" if loss < lowest_loss:\n",
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
" best_dense1_weights = dense1.weights.copy()\n",
" best_dense1_biases = dense1.biases.copy()\n",
" best_dense2_weights = dense2.weights.copy()\n",
" best_dense2_biases = dense2.biases.copy()\n",
" lowest_loss = loss\n",
" # Revert weights and biases\n",
" else:\n",
" dense1.weights = best_dense1_weights.copy()\n",
" dense1.biases = best_dense1_biases.copy()\n",
" dense2.weights = best_dense2_weights.copy()\n",
" dense2.biases = best_dense2_biases.copy()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "2hJxmRLrFjmo"
},
"source": [
"\n",
"STRATEGY 2: FOR SPIRAL DATASET - DOES NOT WORK!\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "TtnQHR0PFjmo",
"outputId": "9fb9bb12-2c59-4f08-e419-b19fad1e595e"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"New set of weights found, iteration: 0 loss: 1.0991902 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 1 loss: 1.0988214 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 6 loss: 1.0982754 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 14 loss: 1.0978643 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 23 loss: 1.0972557 acc: 0.35\n",
"New set of weights found, iteration: 37 loss: 1.0970367 acc: 0.35\n",
"New set of weights found, iteration: 40 loss: 1.0958394 acc: 0.37\n",
"New set of weights found, iteration: 41 loss: 1.0950066 acc: 0.4066666666666667\n",
"New set of weights found, iteration: 44 loss: 1.0948946 acc: 0.37333333333333335\n",
"New set of weights found, iteration: 53 loss: 1.0943341 acc: 0.3933333333333333\n",
"New set of weights found, iteration: 58 loss: 1.0942849 acc: 0.36666666666666664\n",
"New set of weights found, iteration: 59 loss: 1.0941792 acc: 0.35\n",
"New set of weights found, iteration: 60 loss: 1.0926698 acc: 0.38666666666666666\n",
"New set of weights found, iteration: 61 loss: 1.0913428 acc: 0.3933333333333333\n",
"New set of weights found, iteration: 63 loss: 1.0910325 acc: 0.4066666666666667\n",
"New set of weights found, iteration: 65 loss: 1.0902771 acc: 0.3933333333333333\n",
"New set of weights found, iteration: 75 loss: 1.0892566 acc: 0.39\n",
"New set of weights found, iteration: 88 loss: 1.0892477 acc: 0.39666666666666667\n",
"New set of weights found, iteration: 90 loss: 1.0862132 acc: 0.39\n",
"New set of weights found, iteration: 95 loss: 1.0858525 acc: 0.38333333333333336\n",
"New set of weights found, iteration: 99 loss: 1.0850619 acc: 0.44\n",
"New set of weights found, iteration: 103 loss: 1.0833515 acc: 0.4166666666666667\n",
"New set of weights found, iteration: 108 loss: 1.0818787 acc: 0.41333333333333333\n",
"New set of weights found, iteration: 111 loss: 1.0813941 acc: 0.39\n",
"New set of weights found, iteration: 113 loss: 1.0787892 acc: 0.38666666666666666\n",
"New set of weights found, iteration: 114 loss: 1.0777413 acc: 0.36333333333333334\n",
"New set of weights found, iteration: 116 loss: 1.0772293 acc: 0.38\n",
"New set of weights found, iteration: 121 loss: 1.0759072 acc: 0.3933333333333333\n",
"New set of weights found, iteration: 132 loss: 1.075869 acc: 0.43\n",
"New set of weights found, iteration: 134 loss: 1.075571 acc: 0.39666666666666667\n",
"New set of weights found, iteration: 143 loss: 1.0749155 acc: 0.4066666666666667\n",
"New set of weights found, iteration: 148 loss: 1.0747138 acc: 0.4066666666666667\n",
"New set of weights found, iteration: 156 loss: 1.0740647 acc: 0.42\n",
"New set of weights found, iteration: 183 loss: 1.0736305 acc: 0.42333333333333334\n",
"New set of weights found, iteration: 184 loss: 1.0735968 acc: 0.4266666666666667\n",
"New set of weights found, iteration: 196 loss: 1.0734725 acc: 0.4266666666666667\n",
"New set of weights found, iteration: 197 loss: 1.0726037 acc: 0.44\n",
"New set of weights found, iteration: 198 loss: 1.0724471 acc: 0.4533333333333333\n",
"New set of weights found, iteration: 213 loss: 1.0718489 acc: 0.38666666666666666\n",
"New set of weights found, iteration: 225 loss: 1.07182 acc: 0.4166666666666667\n",
"New set of weights found, iteration: 228 loss: 1.0711939 acc: 0.41\n",
"New set of weights found, iteration: 276 loss: 1.0709915 acc: 0.42\n",
"New set of weights found, iteration: 395 loss: 1.0707066 acc: 0.42333333333333334\n",
"New set of weights found, iteration: 471 loss: 1.070367 acc: 0.42\n",
"New set of weights found, iteration: 1317 loss: 1.0702449 acc: 0.43333333333333335\n",
"New set of weights found, iteration: 1857 loss: 1.0702258 acc: 0.43333333333333335\n",
"New set of weights found, iteration: 2198 loss: 1.070218 acc: 0.4266666666666667\n",
"New set of weights found, iteration: 2315 loss: 1.0701423 acc: 0.44333333333333336\n",
"New set of weights found, iteration: 5115 loss: 1.0700669 acc: 0.43333333333333335\n",
"New set of weights found, iteration: 5942 loss: 1.069947 acc: 0.44333333333333336\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)# Create model\n",
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
"activation1 = Activation_ReLU()\n",
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
"activation2 = Activation_Softmax()\n",
"# Create loss function\n",
"loss_function = Loss_CategoricalCrossentropy()\n",
"# Helper variables\n",
"lowest_loss = 9999999 # some initial value\n",
"best_dense1_weights = dense1.weights.copy()\n",
"best_dense1_biases = dense1.biases.copy()\n",
"best_dense2_weights = dense2.weights.copy()\n",
"best_dense2_biases = dense2.biases.copy()\n",
"for iteration in range(10000):\n",
" # Update weights with some small random values\n",
" dense1.weights += 0.05 * np.random.randn(2, 3)\n",
" dense1.biases += 0.05 * np.random.randn(1, 3)\n",
" dense2.weights += 0.05 * np.random.randn(3, 3)\n",
" dense2.biases += 0.05 * np.random.randn(1, 3)\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
" activation1.forward(dense1.output)\n",
" dense2.forward(activation1.output)\n",
" activation2.forward(dense2.output)\n",
" # Perform a forward pass through activation function\n",
" # it takes the output of second dense layer here and returns loss\n",
" loss = loss_function.calculate(activation2.output, y)\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(activation2.output, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
" # If loss is smaller - print and save weights and biases aside\n",
" if loss < lowest_loss:\n",
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
" best_dense1_weights = dense1.weights.copy()\n",
" best_dense1_biases = dense1.biases.copy()\n",
" best_dense2_weights = dense2.weights.copy()\n",
" best_dense2_biases = dense2.biases.copy()\n",
" lowest_loss = loss\n",
" # Revert weights and biases\n",
" else:\n",
" dense1.weights = best_dense1_weights.copy()\n",
" dense1.biases = best_dense1_biases.copy()\n",
" dense2.weights = best_dense2_weights.copy()\n",
" dense2.biases = best_dense2_biases.copy()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "shaawiNMFjmo"
},
"source": [
"\n",
"BACKPROPAGATION \n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "QOQ4rGwpFjmo"
},
"source": [
"\n",
"GRADIENTS OF THE LOSS WITH RESPECT TO WEIGHTS\n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "OltKYL03Fjmo"
},
"source": [
""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "PX3ZDMbJFjmo",
"outputId": "a9bbfedd-430e-4aa0-cf2e-a1871cfb8cc2"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[ 0.5 0.5 0.5]\n",
" [20.1 20.1 20.1]\n",
" [10.9 10.9 10.9]\n",
" [ 4.1 4.1 4.1]]\n"
]
}
],
"source": [
"import numpy as np\n",
"# Passed-in gradient from the next layer\n",
"# for the purpose of this example we're going to use\n",
"# an array of an incremental gradient values\n",
"dvalues = np.array([[1., 1., 1.],\n",
" [2., 2., 2.],\n",
" [3., 3., 3.]])\n",
"# We have 3 sets of inputs - samples\n",
"inputs = np.array([[1, 2, 3, 2.5],\n",
" [2., 5., -1., 2],\n",
" [-1.5, 2.7, 3.3, -0.8]])\n",
"# sum weights of given input\n",
"# and multiply by the passed-in gradient for this neuron\n",
"dweights = np.dot(inputs.T, dvalues)\n",
"print(dweights)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "11Jzrm4-Fjmo"
},
"source": [
"\n",
"GRADIENTS OF THE LOSS WITH RESPECT TO BIASES\n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "OT8ehIYWFjmo"
},
"source": [
""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "nhGHvU8HFjmo",
"outputId": "cdef2cae-dfe6-4470-c06e-39abb3442fe1"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[6. 6. 6.]]\n"
]
}
],
"source": [
"import numpy as np\n",
"# Passed-in gradient from the next layer\n",
"# for the purpose of this example we're going to use\n",
"# an array of an incremental gradient values\n",
"dvalues = np.array([[1., 1., 1.],\n",
" [2., 2., 2.],\n",
" [3., 3., 3.]])\n",
"# One bias for each neuron\n",
"# biases are the row vector with a shape (1, neurons)\n",
"biases = np.array([[2, 3, 0.5]])\n",
"# dbiases - sum values, do this over samples (first axis), keepdims\n",
"# since this by default will produce a plain list -\n",
"# we explained this in the chapter 4\n",
"dbiases = np.sum(dvalues, axis=0, keepdims=True)\n",
"print(dbiases)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "1dBu2U50Fjmo"
},
"source": [
"\n",
"GRADIENTS OF THE LOSS WITH RESPECT TO INPUTS\n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "S6CoYaxNFjmr"
},
"source": [
""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "K1hy-OyrFjms",
"outputId": "7427ef16-a3d0-4ea4-cb56-657c6f55c471"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[ 0.44 -0.38 -0.07 1.37]\n",
" [ 0.88 -0.76 -0.14 2.74]\n",
" [ 1.32 -1.14 -0.21 4.11]]\n"
]
}
],
"source": [
"import numpy as np\n",
"# Passed-in gradient from the next layer\n",
"# for the purpose of this example we're going to use\n",
"# an array of an incremental gradient values\n",
"dvalues = np.array([[1., 1., 1.],\n",
" [2., 2., 2.],\n",
" [3., 3., 3.]])\n",
"# We have 3 sets of weights - one set for each neuron\n",
"# we have 4 inputs, thus 4 weights\n",
"# recall that we keep weights transposed\n",
"weights = np.array([[0.2, 0.8, -0.5, 1],\n",
" [0.5, -0.91, 0.26, -0.5],\n",
" [-0.26, -0.27, 0.17, 0.87]]).T\n",
"# sum weights of given input\n",
"# and multiply by the passed-in gradient for this neuron\n",
"dinputs = np.dot(dvalues, weights.T)\n",
"print(dinputs)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "wd-g9Sg6Fjms"
},
"source": [
"\n",
"ADDING THE \"BACKWARD\" METHOD IN THE LAYER-DENSE CLASS\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "obOwiDVWFjms"
},
"outputs": [],
"source": [
"# class Layer_Dense:\n",
"# ...\n",
"# # Backward pass\n",
"# def backward(self, dvalues):\n",
"# # Gradients on parameters\n",
"# self.dweights = np.dot(self.inputs.T, dvalues)\n",
"# self.dbiases = np.sum(dvalues, axis=0, keepdims=True)\n",
"# # Gradient on values\n",
"# self.dinputs = np.dot(dvalues, self.weights.T)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "UcDJcQHiFjms"
},
"source": [
"\n",
"ADDING THE \"BACKWARD\" METHOD IN THE RELU ACTIVATION CLASS\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "D6Th9Qw-Fjms"
},
"outputs": [],
"source": [
"# # ReLU activation\n",
"# class Activation_ReLU:\n",
"# # Forward pass\n",
"# def forward(self, inputs):\n",
"# # Remember input values\n",
"# self.inputs = inputs\n",
"# self.output = np.maximum(0, inputs)\n",
"# # Backward pass\n",
"# def backward(self, dvalues):\n",
"# # Since we need to modify the original variable,\n",
"# # let's make a copy of the values first\n",
"# self.dinputs = dvalues.copy()\n",
"# # Zero gradient where input values were negative\n",
"# self.dinputs[self.inputs <= 0] = 0"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "VXsxCajzFjms"
},
"source": [
"\n",
"LOSS FUNCTION BACKPROPAGATION\n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "piEwnKVRFjms"
},
"outputs": [],
"source": [
"# Common loss class\n",
"class Loss:\n",
" # Calculates the data and regularization losses\n",
" # given model output and ground truth values\n",
" def calculate(self, output, y):\n",
" # Calculate sample losses\n",
" sample_losses = self.forward(output, y)\n",
" # Calculate mean loss\n",
" data_loss = np.mean(sample_losses)\n",
" # Return loss\n",
" return data_loss"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "DxVLILbuFjms"
},
"outputs": [],
"source": [
"class Loss_CategoricalCrossentropy(Loss):\n",
" # Forward pass\n",
" def forward(self, y_pred, y_true):\n",
" # Number of samples in a batch\n",
" samples = len(y_pred)\n",
"\n",
" # Clip data to prevent division by 0\n",
" # Clip both sides to not drag mean towards any value\n",
" y_pred_clipped = np.clip(y_pred, 1e-7, 1 - 1e-7)\n",
"\n",
" # Probabilities for target values -\n",
" # only if categorical labels\n",
" if len(y_true.shape) == 1:\n",
" correct_confidences = y_pred_clipped[\n",
" range(samples),\n",
" y_true\n",
" ]\n",
" # Mask values - only for one-hot encoded labels\n",
" elif len(y_true.shape) == 2:\n",
" correct_confidences = np.sum(\n",
" y_pred_clipped * y_true,\n",
" axis=1\n",
" )\n",
"\n",
" # Losses\n",
" negative_log_likelihoods = -np.log(correct_confidences)\n",
" return negative_log_likelihoods\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues, y_true):\n",
" # Number of samples\n",
" samples = len(dvalues)\n",
" # Number of labels in every sample\n",
" # We'll use the first sample to count them\n",
" labels = len(dvalues[0])\n",
"\n",
" # If labels are sparse, turn them into one-hot vector\n",
" if len(y_true.shape) == 1:\n",
" y_true = np.eye(labels)[y_true]\n",
"\n",
" # Calculate gradient\n",
" self.dinputs = -y_true / dvalues\n",
" # Normalize gradient\n",
" self.dinputs = self.dinputs / samples\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "YygD9joxFjms"
},
"source": [
"\n",
"Softmax classifier - combined Softmax activation and cross-entropy loss for faster backward step\n",
"
\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "2k1LfT_3Fjms"
},
"outputs": [],
"source": [
"# Softmax classifier - combined Softmax activation\n",
"# and cross-entropy loss for faster backward step\n",
"class Activation_Softmax_Loss_CategoricalCrossentropy:\n",
" # Creates activation and loss function objects\n",
" def __init__(self):\n",
" self.activation = Activation_Softmax()\n",
" self.loss = Loss_CategoricalCrossentropy()\n",
"\n",
" # Forward pass\n",
" def forward(self, inputs, y_true):\n",
" # Output layer's activation function\n",
" self.activation.forward(inputs)\n",
" # Set the output\n",
" self.output = self.activation.output\n",
" # Calculate and return loss value\n",
" return self.loss.calculate(self.output, y_true)\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues, y_true):\n",
" # Number of samples\n",
" samples = len(dvalues)\n",
" # If labels are one-hot encoded,\n",
" # turn them into discrete values\n",
" if len(y_true.shape) == 2:\n",
" y_true = np.argmax(y_true, axis=1)\n",
" # Copy so we can safely modify\n",
" self.dinputs = dvalues.copy()\n",
" # Calculate gradient\n",
" self.dinputs[range(samples), y_true] -= 1\n",
" # Normalize gradient\n",
" self.dinputs = self.dinputs / samples\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "ftANOmqyFjms",
"outputId": "6f8bc7ac-3ee9-4e36-d3a7-4f5f2cc00bdd"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Gradients: combined loss and activation:\n",
"[[-0.1 0.03333333 0.06666667]\n",
" [ 0.03333333 -0.16666667 0.13333333]\n",
" [ 0.00666667 -0.03333333 0.02666667]]\n"
]
}
],
"source": [
"softmax_outputs = np.array([[0.7, 0.1, 0.2],\n",
" [0.1, 0.5, 0.4],\n",
" [0.02, 0.9, 0.08]])\n",
"class_targets = np.array([0, 1, 1])\n",
"softmax_loss = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"softmax_loss.backward(softmax_outputs, class_targets)\n",
"dvalues1 = softmax_loss.dinputs\n",
"print('Gradients: combined loss and activation:')\n",
"print(dvalues1)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "6MZYl0XMFjmt"
},
"source": [
"\n",
"ALL CLASSES TOGETHER \n",
"
"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "hmRI3JiSFjmt"
},
"source": [
"\n",
"\n",
"CREATING LAYERS: FORWARD AND BACKWARD PASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "4KkiOXfWFjmt"
},
"outputs": [],
"source": [
"# Dense layer\n",
"class Layer_Dense:\n",
" # Layer initialization\n",
" def __init__(self, n_inputs, n_neurons):\n",
" # Initialize weights and biases\n",
" self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)\n",
" self.biases = np.zeros((1, n_neurons))\n",
"\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Remember input values\n",
" self.inputs = inputs\n",
" # Calculate output values from input ones, weights and biases\n",
" self.output = np.dot(inputs, self.weights) + self.biases\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues):\n",
" # Gradients on parameters\n",
" self.dweights = np.dot(self.inputs.T, dvalues)\n",
" self.dbiases = np.sum(dvalues, axis=0, keepdims=True)\n",
" # Gradient on values\n",
" self.dinputs = np.dot(dvalues, self.weights.T)\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "DB42nEQnFjmt"
},
"source": [
"\n",
"\n",
"RELU ACTIVATION: FORWARD AND BACKWARD PASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "oJJrADlcFjmt"
},
"outputs": [],
"source": [
"# ReLU activation\n",
"class Activation_ReLU:\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Remember input values\n",
" self.inputs = inputs\n",
" # Calculate output values from inputs\n",
" self.output = np.maximum(0, inputs)\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues):\n",
" # Since we need to modify the original variable,\n",
" # let’s make a copy of values first\n",
" self.dinputs = dvalues.copy()\n",
" # Zero gradient where input values were negative\n",
" self.dinputs[self.inputs <= 0] = 0\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "82a7lCIQFjmt"
},
"source": [
"\n",
"\n",
"SOFTMAX ACTIVATION: FORWARD PASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "4-kY3NeDFjmt"
},
"outputs": [],
"source": [
"# Softmax activation\n",
"class Activation_Softmax:\n",
" # Forward pass\n",
" def forward(self, inputs):\n",
" # Get unnormalized probabilities\n",
" exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))\n",
" # Normalize them for each sample\n",
" probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True)\n",
" self.output = probabilities"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "NROH2PwUFjmt"
},
"source": [
"\n",
"LOSS CLASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "3pxeeZjbFjmt"
},
"outputs": [],
"source": [
"# Common loss class\n",
"class Loss:\n",
" # Calculates the data and regularization losses\n",
" # given model output and ground truth values\n",
" def calculate(self, output, y):\n",
" # Calculate sample losses\n",
" sample_losses = self.forward(output, y)\n",
" # Calculate mean loss\n",
" data_loss = np.mean(sample_losses)\n",
" # Return loss\n",
" return data_loss"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "JUnaSdwBFjmt"
},
"source": [
"\n",
"\n",
"CATEGORICAL CROSS ENTROPY LOSS: FORWARD AND BACKWARD PASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "u8Q55WE_Fjmt"
},
"outputs": [],
"source": [
"class Loss_CategoricalCrossentropy(Loss):\n",
" # Forward pass\n",
" def forward(self, y_pred, y_true):\n",
" # Number of samples in a batch\n",
" samples = len(y_pred)\n",
"\n",
" # Clip data to prevent division by 0\n",
" # Clip both sides to not drag mean towards any value\n",
" y_pred_clipped = np.clip(y_pred, 1e-7, 1 - 1e-7)\n",
"\n",
" # Probabilities for target values -\n",
" # only if categorical labels\n",
" if len(y_true.shape) == 1:\n",
" correct_confidences = y_pred_clipped[\n",
" range(samples),\n",
" y_true\n",
" ]\n",
" # Mask values - only for one-hot encoded labels\n",
" elif len(y_true.shape) == 2:\n",
" correct_confidences = np.sum(\n",
" y_pred_clipped * y_true,\n",
" axis=1\n",
" )\n",
"\n",
" # Losses\n",
" negative_log_likelihoods = -np.log(correct_confidences)\n",
" return negative_log_likelihoods\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues, y_true):\n",
" # Number of samples\n",
" samples = len(dvalues)\n",
" # Number of labels in every sample\n",
" # We'll use the first sample to count them\n",
" labels = len(dvalues[0])\n",
"\n",
" # If labels are sparse, turn them into one-hot vector\n",
" if len(y_true.shape) == 1:\n",
" y_true = np.eye(labels)[y_true]\n",
"\n",
" # Calculate gradient\n",
" self.dinputs = -y_true / dvalues\n",
" # Normalize gradient\n",
" self.dinputs = self.dinputs / samples"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "hbib1JllFjmt"
},
"source": [
"\n",
"\n",
"COMBINED SOFTMAX ACTIVATION AND CATEGORICAL CROSS ENTROPY FOR LAST LAYER: FORWARD AND BACKWARD PASS
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "wTdI_gu9Fjmt"
},
"outputs": [],
"source": [
"# Softmax classifier - combined Softmax activation\n",
"# and cross-entropy loss for faster backward step\n",
"class Activation_Softmax_Loss_CategoricalCrossentropy:\n",
" # Creates activation and loss function objects\n",
" def __init__(self):\n",
" self.activation = Activation_Softmax()\n",
" self.loss = Loss_CategoricalCrossentropy()\n",
"\n",
" # Forward pass\n",
" def forward(self, inputs, y_true):\n",
" # Output layer's activation function\n",
" self.activation.forward(inputs)\n",
" # Set the output\n",
" self.output = self.activation.output\n",
" # Calculate and return loss value\n",
" return self.loss.calculate(self.output, y_true)\n",
"\n",
" # Backward pass\n",
" def backward(self, dvalues, y_true):\n",
" # Number of samples\n",
" samples = len(dvalues)\n",
" # If labels are one-hot encoded,\n",
" # turn them into discrete values\n",
" if len(y_true.shape) == 2:\n",
" y_true = np.argmax(y_true, axis=1)\n",
" # Copy so we can safely modify\n",
" self.dinputs = dvalues.copy()\n",
" # Calculate gradient\n",
" self.dinputs[range(samples), y_true] -= 1\n",
" # Normalize gradient\n",
" self.dinputs = self.dinputs / samples"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "STEbc7_DFjmt"
},
"source": [
"\n",
"\n",
"DATASET
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "7tHQAI-bFjmu",
"outputId": "1e3b7a17-42a8-4faa-8eb7-b72572046230"
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAAGdCAYAAAAfTAk2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAADWa0lEQVR4nOydZ5gTVReA30myhbZL70VAwEIHaZ8KKlJEAREbKk1RQFDBhgWwI0XBAqIUUVGqICIKCFIUERFQqoCC9F52aVuSOd+PS3Y3bNrupu1y3zzzJJncufdMMpk5c+4phogIGo1Go9FoNHkIS7gF0Gg0Go1Gowk0WsHRaDQajUaT59AKjkaj0Wg0mjyHVnA0Go1Go9HkObSCo9FoNBqNJs+hFRyNRqPRaDR5Dq3gaDQajUajyXNoBUej0Wg0Gk2ewxZuAcKBaZocPHiQQoUKYRhGuMXRaDQajUbjByLCmTNnKFu2LBaLdxvNZangHDx4kAoVKoRbDI1Go9FoNNlg3759lC9f3muboCo4K1euZOTIkaxbt45Dhw4xd+5cOnbs6HWb5cuXM3DgQLZs2UKFChV4+eWX6d69u0ubsWPHMnLkSA4fPkydOnX44IMPaNSokd9yFSpUCFBfUFxcXFZ3S6PRaDQaTRhITEykQoUKaddxbwRVwTl37hx16tShZ8+edOrUyWf73bt3065dO3r37s2XX37J0qVLeeSRRyhTpgytW7cGYMaMGQwcOJDx48fTuHFjxowZQ+vWrdm+fTslS5b0Sy7ntFRcXJxWcDQajUajyWX4415ihKrYpmEYPi04zz//PAsWLGDz5s1p6+677z5Onz7NwoULAWjcuDHXXXcdH374IaD8aSpUqED//v0ZNGiQX7IkJiYSHx9PQkKCVnA0Go1Go8klZOX6HVFRVKtXr6Zly5Yu61q3bs3q1asBSElJYd26dS5tLBYLLVu2TGvjjuTkZBITE10WjUaj0Wg0eZeIUnAOHz5MqVKlXNaVKlWKxMRELly4wPHjx3E4HG7bHD582GO/w4YNIz4+Pm3RDsYajUaj0eRtIkrBCRYvvPACCQkJacu+ffvCLZJGo9FoNJogElFh4qVLl+bIkSMu644cOUJcXBz58uXDarVitVrdtildurTHfmNiYoiJiQmKzBqNRqPRaCKPiLLgNG3alKVLl7qs+/HHH2natCkA0dHRNGjQwKWNaZosXbo0rY1Go9FoNBpNUBWcs2fP8ueff/Lnn38CKgz8zz//ZO/evYCaOuratWta+969e7Nr1y6ee+45/v77b8aNG8fMmTMZMGBAWpuBAwcyYcIEPvvsM7Zt20afPn04d+4cPXr0COauaDQajUajyUUEdYrqjz/+4Kabbkp7P3DgQAC6devGlClTOHToUJqyA1C5cmUWLFjAgAEDeO+99yhfvjwTJ05My4EDcO+993Ls2DGGDBnC4cOHqVu3LgsXLszkeKzRaDQajebyJWR5cCIJnQdHo8mdiAj88gvs2gVFi8Ktt2LExoZbLI1GEyKycv2OKCdjjUaj8YQsWwaP9oJ//01fGR+PvPIqPPGELpyr0Whc0AqORqOJeGTVKmjdCkzT9YOEBBjwFCQnw3PPhUU2jUYTmURUFJVGo9G4ZdDzSrm5VMFx8spQJCEhtDJpNJqIRis4Go0mopH//oNVqzwrN6AsOF9/HTKZNBpN5KMVHI1GE9l4KcOShtUKhw4FXxaNRpNr0AqORqOJbMqW9d3G4YBy5YIvi0ajyTVoBUej0UQ0RsWK0Ly5stJ4IjYW7rordEJpNJqIRys4Go0m8hkxEmw2sHg4Zb09HKNQodDKpNFoIhqt4Gg0mojHuO46WLYcatZ0/aBkSZgwEaN//7DIpdFoIhedB0ej0eQKjCZNkA1/wp9/pmcyvuEGDJs+jWk0mszoM4NGo8k1GIaB1K0Lp0/DihWwfDly441w8806k7FGo3FBKzgajSbXILt2QYf2sGWL8skBeP01uOoq5Jt5GNWrh1dAjUYTMWgfHI1GkyuQhARofiNs365W2O1qAdi5E1o0R06eDJ+AGo0motAKjkajyR18+ikcPJiu1GTE4YCjR2HixNDLpdFoIhKt4Gg0mtzBtK9AxPPnpglffRU6eTQaTUSjFRyNRpM7OHnKd5tTfrTRaDSXBVrB0Wg0EY8sXQpnEr03slrhqhqhEegyRVJSkAkTkAb1kbhCSMUKyAsvIAcPhls0jSYTWsHRaHI5Yrcjc+Ygd9yO1KmNtLsNmTkTSU0Nt2gBQd59F25tCcePe2/ocEDvPqER6jJELlyAVrdC78dULqKzZ2H/fhg1EmrVRDZtCreIGo0LWsHRaMKMJCQgo0cjDRsgla9AWrdCvv4acTh8b3vunLr4d74LFi6ETZtg8WK4714VVXTmTAj2IHjIhg3wzNPqjWl6bmgY0KkTdOgQGsEuR155BX75RflBZfSFcjggMRE63Yl4+400mhCjFRyNJozIf/9B7VrqIr5+PezZAz/9BHd3hrs6+bbCPPUk/Pyzeu1UiJzPv/8OfXoHTfaQMG5ser4bT8TEwGuvw/QZGJ5qVWlyhFy4AB+P96xkOhzw77+wZEloBdNovKDPBhpNmBAR6HQnHDqU+Y4YYP58eOMNz9sfOwaffeb9ojNjRu72j1i92n1YeEaKFcd46SVdsiGY7NyprDTesNngt99CI49G4wdawdFowsWvvypfBk8XcBH48AMkOdnz9r4u/g5HuoUnNxId47tNrB9tNDnDavXdRiTN2iZnziCTJiHPP4+89RayY0eQBdRoMqMVHI0mXKxY4fvCceoU/P23+8/89XfIzX4Rt9/u/Tuy2eCO9qGT53KlRg0oU8Z7G4cDbr0V+eorKFMaHu0FY0bD0CFwVQ3kwQeQpKTQyKvRoBUcjSZ85LQ4ZKNG4MvnxDCgSZOcjRNOHntM+di420/DUMrP44+HXq7LDMNmg2ee9dzAZoNmzeDECXjoQTh/Xll0UlPTp1ynT4dHHg6NwBoNWsHRaMJH8+bpJ39PFCkCV13l9iOjXDkVOeTJwmGzwW3tMCpXzqGg4cMoVw4WfA/587sqhBYLxMbCnLkYV14ZPgEvJ558Eh59VL12+js5Fc9q1WD218pa40lxv5hpWnbuDL6sGg1awdFowkfTplCvnucoIcOA/k9gxHjxMRn/MVx9tWrrvLA4X1etCpMmBV7uEGM0bw7/7YERI+G226BtW3jjTdj9H0bbtuEW77LBsFjgo/Hwyyp44EFlGWzTBr6YCus3KH+wtWu9T4larTB7duiE1lzWGCLeirvkTRITE4mPjychIYG4uLhwi6O5jJH//oMWzWHfvvRIKqtVWXY6dICZszCiorz3ce6ciqaaOEElXitbFno+DD16YBQqFPyd0GgA2boVal7rvVFUFAx8GmPYsNAIpclzZOX6reMqNZowYlxxBbJxE0yZAlO/gJMnoVp1NRXQoYNfeV2MAgWgb1+1aDThokIFiI6GlBTPbVJToXr10MmkuazRFhxtwdFoNJqAID17wNSp7tMXGIbypTp8RCnlGk02yMr1W/vgaDQajSYwvPkWlC6d2a/MaYmcMFErN5qQoRUcjUaj0QQEo0wZWPM7dO2qwvudNG4MCxdh3Hdf+ITTXHboKSo9RaXRaDQBR86cgQMHIC4Oo2zZcIujySPoKSqNRqPRhBWjUCGMq66KKOVGzp9Hxo1DGjZAypZFrmuIjB+violq8hw6ikqj0Wg0eR45fhxuagFbt15cIXDkMKzvCx+PR35ahlGkSDhF1ASYkFhwxo4dyxVXXEFsbCyNGzfm999/99i2RYsWGIaRaWnXrl1am+7du2f6vE2bNqHYFY1Go9HkRh7tpeq6iaTnnHK+3rwZ+vQJr3yagBN0BWfGjBkMHDiQoUOHsn79eurUqUPr1q05evSo2/Zz5szh0KFDacvmzZuxWq3cfffdLu3atGnj0m7atGnB3hWNRqPR5EJkzx6YN89zaRSHA2bPQg4eDK1gmqASdAXn3XffpVevXvTo0YNrrrmG8ePHkz9/fiZPnuy2fdGiRSldunTa8uOPP5I/f/5MCk5MTIxLuyLatKjRaDQad6xenW618YRpwm+/hUYeTUgIqoKTkpLCunXraNmyZfqAFgstW7Zk9erVfvUxadIk7rvvPgpckjth+fLllCxZkho1atCnTx9OnDjhsY/k5GQSExNdFo1Go9FcJngqAJrddppcQVAVnOPHj+NwOChVqpTL+lKlSnH48GGf2//+++9s3ryZRx55xGV9mzZt+Pzzz1m6dCnDhw9nxYoVtG3bFocH8+OwYcOIj49PWypUqJD9ndJoNBpN7uKGG9KTDXrCZoP//S808mhCQkSHiU+aNIlatWrRqFEjl/X33Xcf7du3p1atWnTs2JHvvvuOtWvXsnz5crf9vPDCCyQkJKQt+/btC4H0Go1Go4kEjLJl4d57VSFbd1it8OCDGCVLhlYwTVAJqoJTvHhxrFYrR44ccVl/5MgRSpcu7XXbc+fOMX36dB5++GGf41SpUoXixYvzzz//uP08JiaGuLg4l0Wj0Wg0lxEfjYeGDdVrpzXHqfA0aQLvfxAeuTRBI6gKTnR0NA0aNGDp0qVp60zTZOnSpTRt2tTrtrNmzSI5OZkHH3zQ5zj79+/nxIkTlClTJscyazQajSbvYcTFwYqV8OVXcPPNcNXV6nnadPhpGUbBguEWURNggl6qYcaMGXTr1o2PP/6YRo0aMWbMGGbOnMnff/9NqVKl6Nq1K+XKlWPYsGEu291www2UK1eO6dOnu6w/e/Ysr776KnfddRelS5fm33//5bnnnuPMmTNs2rSJmIz1TzygSzVoNBqNRpP7yMr1O+iZjO+9916OHTvGkCFDOHz4MHXr1mXhwoVpjsd79+7Fconz1/bt2/nll19YvHhxpv6sVisbN27ks88+4/Tp05QtW5ZWrVrx+uuv+6XcaDQajUajyfvoYpvagqPRaDQaTa5AF9vUaDSaAJOQAMeOqXxwGo0m8tEKjkaj0Xhh3jwVZFO4MJQsCRUqwPDhkJISbsk0Go03tIKj0Wg0Hhg9Gjp2hLVr09cdPAgvvgh33AGpqWETTaPR+EArOBqNRuOGf/+Fp59Wry+dljJN+PFHmDgx9HJpNBr/0AqORqPRuGHCBN/Z/T/8MDSyaDSarBP0MHGNJtjI33/D55/D4cNQpgx07YpRo0a4xdLkEkRgwwbYtQuKFlVli6KiYPNm8FDeLm27v/9Wz7m9RmNCAnzxBaxYofaleXN46CHQQaaa3IxWcDS5FnE4oH8/GD9eFcpzMuwtpG9feP8DDF+34JrLmtWroXdv2LgxfV3JkvDmm1CwoMrk703JiY3N/crNypXKn+jMmfR9mT0bXnoJvvsOrr8+vPJpNNlFn/01uZehQ+Hjj9Vruz19ARg3Dl57LXyyaSKetWvhppuUpSYjR49Cr16QL5935cZmg7vvDq6MwWbfPmjbFs6eVZYo01SLiFJ42rSBAwfCLaVGkz20gqPJlUhiIox+V52JPfHOKOTcudAJpclVPPOM0oc95bWZPh2qVnU1DjoxDLUMHBhcGYPNRx9BcrL778A0ISkp/R5Co8ltaAVHkzv56Se4cMF7m3PnYPnykIijyV3s2aOmZrxZaJKToW9fqF5dvbfZ1GIYavrq22+hdu3QyBss5s71/h04HKqNRpMb0T44mtzJ+fOBbae5rDh0yHcbq1VZMDZtgsWLYcECldyvQQPo0kUpObmd5OTAtNFoIhGt4GhyJzVr+tfu2muDK4cmV3Kx1q9XHA4oXVqFirdpo5ackpQEs2Yp593kZKhbV/n7lCuX876zQ6NGyg/H6bp2KTYbXHddaGXSaAKFLrap4yBzLdKkCaz7w72N3WqFJk0wfv4l9IJpcgX/+x/89ptnH5zYWJV5ID4+MOPt3AktW8LevUppMk11mBqGyrnTvXtgxskKP/8MN97ovc2qVdCsWWjk0Wh8oYttai4PJk9WiTou9QK12SAuHiZOCo9cmlzBiBFK0fCUSeC11wKn3CQnK+XGGZHkVKocDmU96dlTKRuh5oYbVDg4KGXLifP10KFaudHkXrSCo8m1GNdcA3+sg27dICZGrYyNhe49YN26yzLZ35kzyhrw9NPq4rRpU7glilz+9z9VbqFyZdf18fHw3nsqyipQfP21stx4cui1WmHkSM/bm5j8xm/MYx5/8AdC4Azvb7wBc+ZA06bpCl+zZvDNN/DKKwEbRqMJOXqKSk9R5QkkJQUSEyE+HiMqKtzihIUZM5Ql4MIFZcQyTXVBbd8evvoKChQIt4SRiYiahnFmMm7ZUunJgeSBB9Tv4y1iyWpVTsyXWpTmMY8BDGA3u9PW1aAGH/IhLWkZUDmdliWdH1MTqWTl+q0VHK3gaPIAP/2kLsyQOTWQ1Qq33abCmjXh4e67lRXH19k2NdV1xnUOc+hMZwAXq40FCwYGP/ADt3JrMETWaCIS7YOj0VxmvPKKuut2dwF1OGD+fPjrr+z3L6LyxgwfDqNGwZ9/Zr+vy5H69b2XdDAMuOYaV+XGjp1+9APINCVlYiII/ekf0OkqjSYvoRUcjSaXc/y4clD1VVZg9uzs9b9jB9SqpQowvvQSDBoE9eqp90eOZK/P7HCBC0xiEjdzM3Wowz3cw4/8mCsu8D17ujrxXooIPPmk67qf+IlDHPK4fyYm29nOH/wRQEk1oUaOHUPefBOpVQupVBFp2waZNw/xFN6n8RudB0ejyeWcPeu7jWEoB+SscvSoirQ5cUK9z6hE/for3HwzrFsXeJ+VSznIQW7iJnawAwsWTEy2spVZzKILXficz7HiRYMIM6VKqYL3DzygLG3OvDPOcPHOneHhh123OYB/RaD2s5/r0MlqciOyZQvc1AJOnkx3gDp4EBYtgvvuQ76YiuFNM9Z4RVtwNJpcTunSkD+/9zZ2OxQq5J8ylJGPPlLKjTvrkN0OW7eqxHXBpjOd+Zd/AWW5ADWFAzCNaQxnePCFyCH33aeUwo4dITpaKTc1a8LEifD59BS+ts6kO93pQhfe4R3ykc+vfkvhR9bCMGNiso997GVv2u93uSMOB7S/A06dck3G5PyzzZgBY8aERba8gnYy1k7GmjzAE0+oAurepqlAWVp69FChwUWL+u63alUVXeQJiwVuvRUWLsyavFlhLWtpRCOvbYpRjIMcJJro4AkSYESUZe1f/uVWbmU3u7FhS1MAbNiIIYYzuDe9GRhUohL/8i+WCL1XNTEZxzhGMYo97AGgHOUYwACe4qmItroFG/nuO6XgeKNcOfhvj7biZEA7GWs0lxlDh6p8Lr7Og0lJ8MknKgfM6dO++z11yvvnpql8gILJEpb4bHOCE2xjW1DlOMc5jnM8YBYIw4AkkriFW9jHPkBZpcyLj1RSOYtnk5sgvMu7EavcCEJvetOf/uxlb9r6AxzgWZ7lQR68vK05P/8MvlJaHDigEihpskVk/jM0Gk2WKFZMlR3o3dv3dJXDocoGjBjhu9+qVb3nRLHZoFq1rMmaVbaz3a925zgXlPGXsYxbuZWCFKQEJShNaV7lVc6T80Kus5jFHvakTbdlRBCsWGlEI4pRzOWz0pRmJjO5kztzLEOw+ImfmMAEIHMUmCBMZzrzmR8O0XIX3sLvNF7RCo5Gk0coVgw+/FBZVAYM8K6YOBwwfrzvKa3evT3XagLlh9OrV/bk9Rd/LRQGgb8QfMmX3MItLGNZ2rpjHOM1XuMmbsqxkjOf+V73z46dTWziIAeZz3wmMpEf+IF97ONu7s7R2MFmPOOxeYljsWJlHONCKFF4kD17kCFDkPvuRR55BFm0SEVI3XSTSnzkjYoV1aLJFjqKSqPJY+TLp4IynBE6njh1SiV/LlLEc5uHHoLPPlOZfi/tyzCgSxd1ng4m1anuV7s4AutPd4ITPMzDCIIDV03QxOQP/mAkIxnK0GyPkUSSz2maZJKJIorbuT3b44SDzWx2a5ly4sDBFraEUKLQI++8A88/p/4sIupPOXmSKtH+3QJl/ty1y/OdxoCBGDqtdLbR35xGkwfxp0ikxeJ+Oss0VbSVaapon4UL4amnoGDB9DbFisHrryvlJ9gW9La09dmmHOW4iqsCOu4XfEEqnu+wTUzGMjZHfiR1qOPV0daChZrUDIp1Ktj4o3AWolAIJAkPMmsWPPtMes0U00zPD7Bhg8oN8O18KFHC9U/kzPbYvTv07x9yufMSWsHRaPIg99yTfi51h80Gd96ZXqMUVPqNJ5+EwoVVSHnBgmqK6uhReOcdOHwYfv8d1q9XbV96ybdTcyCoQx1u4RavUzmDGORXRM4mNvE4j9OEJrSgBWMYw2lOu227mc0+p8eOcYxT+PDE9kIvenlNVGhi0p/ceZG7j/u8KmYWLHShSwglCh0iAm+87ln7t9vh55WQkABbtsLIUdCwIVSrDrffDj8shEmTtfUmp8hlSEJCggCSkJAQblEiErvYxRQz3GJocoBpitxyi4jVKqJs4+mLYYhERYmsW5fefvdukdKlM7e32USKFBHZsiVsuyIiIsfluDSUhoIgVrEKgtjEJggyQAb4dbyOkBEu2yGIIYYUl+Lyl/yVqX1/6e/S1tPjrJzN0b59LB+77JdTLkMM6SSdxC72HPUfLk7JKSktpV32y/mwmlYpcKGo9Hv9sLz1lsjOneGWNrCYBw6IaeB9ibKJ+dJL4RY115GV67dWDzUApJDC+7xPNaphw0YssdzP/axnfVDHlZ07lQNer17qeefOzG1ME9m2DVm/HklMDKo8uZUzZ2DKFHjzTZU47vRpmDsX2l6c3bFa0yNSixaFBQtUfSQnjz4Kx45ldgWw25WfTteuodgLzxSjGKtZzTzmcTd305rWPMZj/MmfvMu7PqdwFrCA53gOwMUvRBBOcYpWtOICF1y2uZM7vfqQWLHSkpYUIGdl2h/lUZaylFu4JW0/qlGNsYxlJjNzba6YwhRmBSuoTGUAoi4+AMx95TjXaDkfv1aKwYOVK0rXrpCcHE6JA0hSku82huFfO032CYHCFXFoC44rSZIkN8vNaXeNzrss28XHfJkf8DFNh0PMJ/qrOxmbVd3N2Kzqfb/HxXQ4VLvJk8WsUjn9ric2Rsxej4h5/HjAZYp0HA6RI0dETpxwXT92rEi+fMoyY7Op55gYkTffVJaczZvV65dfFpk+XSQpyXX7f//NbOVxt6xfH7p9DTTNpblbS0LGxxSZ4rKNKaY0lsYetzPEkCWyJKBypkqqXJALAe0z3NjFLgtkgTwjz8id/z4tdPhGsKZmOr4sFpFu3cItbWAwk5LEjI/3bcX54otwi5rryMr1Wys4GnlL3hKLWDyexAtIAUmUxICOab7yivc//pAhYr7+unptMVw/s1nFvKqGmKdOBVSmSCU1VWTUKJEKFdIvBnXrKmVlyhTvSsnIkb77/+Yb/xScTz8N+q4GhVRJdVHc3T2sYpUH5cFM2x6TY9JYGguC2/9IBakgy2RZ6HcqF2KaItWrKwXc0zFmGCK7doVb0sBgPvts+k3bpYvVImbRImJeyFvKbCjQU1QavzEx+YAPPEaCCMJ5zvMlXwZsTDl3DkaN9N5o1EgYMvjiBpc4YToc8M8/MNJHH3kAh0MFWzz7LOzbl75+40ZV26hfP+/bv/YanPeRqiWffyWP/G4XaTi1EV9t3E1HFac4q1lNe9q7/Y8c4ACtaa0revtg9264+mpVmd5bcSCLJftV7yOOIUOgbr3MCalsNrVMm44R7Cq1lzlawbnMOcEJDnHIaxsrVjawIXCDLlsG53xknb1wwXemuo/Hq2iFPMyXX8K8eZkvCs6cNL6KZ54547tO1PXXq6gpb0RHq5pTWcGXUhEqooiiDnW8RkQJQlOauv1sN7s9Ztw1MXHgYAhDAiJrXiQhAZo3V/ckvrBYlM9XXsAoWBCWL4fXXlc1pUA5wnW+G9b8jtGqVVjluxwIiYIzduxYrrjiCmJjY2ncuDG///67x7ZTpkzBMAyXJfYSLVdEGDJkCGXKlCFfvny0bNmSnW6cUzW+8bc4YQwxvhv5iy/lxokv5eXkSf/7yqV8+KF3Pc8fTp70/nn+/PD0054jWg0D+vTxrzjnSU4ylKGUpSwWLBSlKE/zNAc4kHXBA8gABni0UlqwkJ/8dCWzJ7UgPM3TXpU1Bw4WspCT+PiiL1OmTIH9+31nzQaV2DfYpT9CiVGgAMaLL2Ls2w8XkuBCEsZXX2HUqRNu0S4Lgq7gzJgxg4EDBzJ06FDWr19PnTp1aN26NUePHvW4TVxcHIcOHUpb9uzZ4/L5iBEjeP/99xk/fjxr1qyhQIECtG7dmiTtkZ5l4omnGc18posPaBbVa6/1r52vK3tUlCqPnYfZutV7NmJ/qFLFd5vBg+Gxx9Rrm01FXTnzjd1/v3+zgYc5zHVcx5u8mWYVPMUp3uM96lLX75pSwaArXemFqimRMSrJho0oopjDHApT2GUbQXiMx/iGb3z2L4hWcDzw5Ze+71VAKdKFCqkp2byIEROj89qEmmA7BDVq1Egef/zxtPcOh0PKli0rw4YNc9v+008/lfj4eI/9maYppUuXlpEZvCdPnz4tMTExMm3aNL9k0k7GrsyX+R6dL21ik1pSSxziCOiYZtOmnh3wbFYxr77adw6JB7oEVKZIpEQJ/xyAPS2xsSJDhojs3+/feFu2iDz7rMj994sMGCCyYYP/snaSTh4jjqxilQbSIFvfQaAwxZS5MldulpuliBSR0lJa+kpf+Vv+dtt+gSzwmQMn4/8k0I74eYVq1fw7Vg1DZMaMcEuriXQiJooqOTlZrFarzJ0712V9165dpX379m63+fTTT8VqtUrFihWlfPny0r59e9m8eXPa5//++68AsuGSM++NN94oTzzxhNs+k5KSJCEhIW3Zt2+fVnAu4QP5QCxiEatYxRAj7UJ1tVwt+8XPq2MWMLduVVEEUbbMikuRwmJu3izmrS3dK0FWi5j5YsXctCngckUajz+uQr+9XRiiotwn9MsYfhsVJeKn/p8tDsgBj5F4GR9/yB/BEyLAtJN2PkPLncqNuwgsjeKOO7wfnyASHy+yeHG4JdXkBiImiur48eM4HA5KlSrlsr5UqVIcPnzY7TY1atRg8uTJzJs3j6lTp2KaJs2aNWP//v0Aadtlpc9hw4YRHx+ftlSoUCGnu5bn6Ec/drGLF3iBO7mT+7mfOcxhIxspR7mAjiVr18KYMXDVVVC+fPpcSHQ0PPggrP0D49pr4es5cEd79VnGOZNSpWDhIoyaNQMqVyTy5JNqJs6dZdtmg0qVlM92ixae+zBN5dvwwAOwbl1w5NzIRr9qMq0jSAIEgQ1syFRk0x1xxPEar4VAotzJY4/59r+ZNCnrTuwajS8irpp406ZNado0PZqhWbNmXH311Xz88ce8/vrr2erzhRdeYODAgWnvExMTtZLjhkpU4nWy9x37g5gm9Hscxo9XV2e7XSkuDgfUrQsLvscoUyatvVGoEMyZg2zfDvPnq8iq2rWhXTsMW8QdukGhWjUVBXXnncpZ2FmUGJTS8/jj0KwZLFmiil8O8RLMY7HA6NEwdWrg5QyLs3qQ+JEfeZu3OchBn23jiGMNa9Ky9Woyc9ttKqXBjBmZfXEMAzp0UMe3RhNogmrBKV68OFarlSNHjrisP3LkCKVLl/arj6ioKOrVq8c/F2MMndtlpc+YmBji4uJcFk0YGD1aKTeQXgnSeWu3aRP07OF2M6NGDYxnnsEYPBijQ4fLRrlxcuONqpr3paSmwnPPqTtkEVi92ntlb7td6YnBoAlNfFaPtmKlFZERGruYxbSjHUUpSklK0oMe/MVffMzHtKIVK1jhsw8Dg5d4iSu5MgQS514MQynVb74JJUumry9eHF59FWbNynmkoEbjjqAeVtHR0TRo0IClS5emrTNNk6VLl7pYabzhcDjYtGkTZS7e2VeuXJnSpUu79JmYmMiaNWv87lMTeiQ1FUaO8NzA4YBFi5CtW0MnVC5h48Z0y0zGO2Dn6wkT4NtvVR0fX9EqqanBkTE/+RnAAI81oSxYeIiHKEMZt5+Hkpd4ida0ZhGLOMUpjnGMqUylPvXpQx8An1NTVqwUoQg96RkKkXM9Viu88IIKF9+6FbZsURXpBw9On3nWaAJN0PXmgQMHMmHCBD777DO2bdtGnz59OHfuHD16qLv1rl278sILL6S1f+2111i8eDG7du1i/fr1PPjgg+zZs4dHHnkEAMMweOqpp3jjjTf49ttv2bRpE127dqVs2bJ07Ngx2LujyS5btoCX1ACAuo3LkJVOLlxAJk9GWjRHrr4Kue02ZM4cxJ+EGkHi3DllhGraFKpWhVtuUaZ3u+eajDnGOaPnCasVPvgAGjdWr721u+66wMvnZDCD6UY3QIVfZ3xuRSvGMS54g/vJ93zPW7wFuCoxduyYmH4nJyxJSX7iJ4pTPChy5lWiolRG42uuSS/+qtEEi6Drzvfeey/Hjh1jyJAhHD58mLp167Jw4cI0J+G9e/diyWCfPHXqFL169eLw4cMUKVKEBg0a8Ouvv3LNNdektXnuuec4d+4cjz76KKdPn+b6669n4cKFmRICaiKIlBTfbQwjzcQgx47BTS3U7Z7Fojxl//kHFv4At7ZC5s0LeZrzw4dVRlZnTkkR2LMHfvpJKTrz5wennMEff3hXoBwO2LBBVREf4cNI9sQTgZfPiRUrn/Ipj/M4k5nMPvZRkpI8xEM0p7nPit+hYAxjsGL1y3nYE1OZyt3c7bffkSZ3kpICp09DXFyeT7eVdwlBVFfEofPghB4zMVGFdvuqrrt0qWrfulXmEPKMYeIDBoR8H26+2XPItsUi8uSTwRn3xht95xApW1a1/fTT9KriGWUDkT59VMHDy5mCUtDv3DbuHjESI6Zc5l9iFvhH/pGn5Wm5Vq6Vq+Vq6S29ZaNsDLdYmTBNkTlzRG66SaRgQZU/yhnabrOpvFBbtoRbSo1IBOXBiVS0ghMezL59vSf3q15NTNMU8++/fStCBfKLmRi6xGqbN/tWMvLlE3EnkmmKrFghMnGiyKxZImfOZG3s0aO9V2C22UT6909vv2qVSOfOIvnzi0RHizRrphKoXe7KjYhInMRlW7mxiU3ulXvDvQu5hm/kG4mSKJdcQjaxiSGGTJAJ4RYvDdMU6d3b9Wbg0sVqVf+nNWvCLa1GKzg+0ApOeDATEsRs0EBMi6GWjMpN0SJiblR3duZHH7l+7mlZtixkso8b513JcC7Ll7tut2KFSNWqrm0KFBB54w3/FY6TJ0WKFXOfLM1iEYmJEdm+PfD7nBe5V+4Vm9i8KjLuEhYaYohNbLJW1oZ7F4JCiqTIbJktnaST3Cg3Sk/pKatkVbatVf/JfxIlUWKI4fY7NsSImKSPX33lX6Zlq1VlZdY3CuElYhL9aTQZMeLiYOVKGPUOXFlNJfYrWRKefAr+/AujVi3V0N8K4SGsJJ6dodauVcnLdu92XX/uHLz8svecNRkpUgSWLlVhtaCcha1W5bKUL5/y/alePevyXY48xVMe/W8sWChAgbScNjZsWLFiYBBDDDOZSUMahlLckHCSkzSlKZ3pzDzmsZKVfM7n/I//0YtefiVwvJTxjPfqtG3Fynu8x2Y2M5jB9Kc/7/EeJziR093JMqNH+xem7nAo/7uffw6+TJrAYIiE8CoRISQmJhIfH09CQoLOieMF2b8fvvoKDh2CMmXggQcwygU2q7HbcTdvhtq1vDeKjYVDhzHi44MuD8DmzVDLD5GOHFFOiaCUm2XLPGdxtdlU2OwlSbk9cuECzJypkvo5HCrB30MPQYi+gjzDZCbTi14YGGnKjlO5+Z7vaUITFrCA7/iOZJKpT3260Y0iFAmz5MGhDW1YwhKPit8whjGIQVnq8zqu4w/+8NomlliSSMKGLe23sGFjDGPSwvUDTXIy/PADHDigTmlt20LBgv4XtDUMGDsW+gRHPI0fZOn6HXR7UgSip6i8Y5qmmIMGKWdeq0XM6Kj01y++KGYIbLRm8+benYz79A66DJfSooV3J+OMfjCHDvk2eVssIu+9F/Ld0IjIDtkhA2WgNJEmcoPcIMNkmByVo+EWK+RskS0+fY+KS3FJkZQs9dtAGuTImXuWzAr4vk6ZIlKkSHphTxCJi/NdJ+vSZcqUgIumyQJ6ikqTM4YNg+Fvq9saZyEj5+thb8HIkcGX4csvoWJF19S8Tjty06YwclTwZbiEr76CypWVSE6xnHlnmjeH4cPT2x475rs/q1VZfDShpxrVeId3WM1qVrKSQQyiBCXCLVbIWcQiLD7SoR3nOBvZmKV+b+ZmrHhJyuQFA4OhDPU4vZUdvvoKuneHU6fUe+e8RWKisob6m0nZZlOlJzS5A63gaFyQc+fg7WHeG731FnLhQlDlMMqVg/UbYPQYVaeqbFlo0hSmfAZLf8IoUCCo47ujTBlYvx4+/BAaNlSFLm+8EaZNg0WLXHPglC7tvWwCqNw25csHV2aNxht27H7lJ0rFNQW2iclSlvIBHzCZyRzBVVPvTe9s5z0ShK1s5R/+ydb2l+JwwDPPeG/jzxSVYUDv3lDi8tODcy3aB0f74Lgg8+dDh/a+Gy74HqNt2+ALlIu54w413+/JByc6Wrk3FS0aWrk0GicrWEELWnhtE0ssRziSVmtsNat5kAfZxS4MDATBho3HeIzRjCYKlaJ4FrPoQhdAKVKgHLft2LFg8em8vJ711KNeDvcQli+Hm27yr62z9q+7dV27qmSaOgNzeMnK9VtXAdG4cuZMYNtdxgwbppyMk5LcKzlvvqmVG014uZEbuZqr2cEOt07GVqz0pGeacrOJTdzCLSSTDJA2jWTHzjjGcYYzfMZnANzN3dSiFh/yIQtZiAMHLWhBdarzIi96lSuKqIBVaPdnuhjUFPOhQyr6UUQ57xcvrqysDzwA114bEHE0IURbcLQFxwVZtw6u8yMUdsOfGHXqBF+gXM6ffyqz9po16etKloTXXlNVwDWacLOVrdzADSSQkKbkOKeXGtCAn/iJQhQClNLyDd+kWWTcsYUtXMM1Hj9PJpmylOUUp9z62diw0YUuaYpSTlmzBpo08d1u+XLlS6eJbLJy/dYKjlZwXBARqF9PxUW7MztYrVC3LsZa7yGgGle2blWltOLjVXi3NnNrIomDHOQDPuAzPuMUp7iCK+hNbx7lUfKhnMvOc5444rzW8bJh4zme403e9Dre93xPBzogiEt/NmyUoQxrWBOwyvMicNVV6v/nztfGMFQ8w65d/jsba8JHVq7f+ufUuGAYBkyarJK6XFrC2mpVnrQTJ4VHuFzMNddA+/bqDjEnyo2JyQxmcCM3UpSilKc8AxjAbnb73lij8UBZyjKMYRzkIBe4wDa28SRPpik3gIuFxxvHOe6zzW3cxgpWcAu3pFmL8pGPXvRiLWsDptyAUmDGjVPPlyowzojIceO0cpMX0T+pJhNG/frw2xpo3yH9X2+1Qsc7Yc3vemoqCzizZwQCBw4e4AHu4z5WsYpTnOIAB/iAD6hJTX7hl8AMpNG4oShFicV7WW1BqEQlv/prRjMWsYiTnGQPezjJScYxjlL4mfkyC9xyC/z4I9Ss6br+mmtUIIAO/c6b6CkqPUXlFUlIgOPHoXjxkGUNzgssWgSjRql5fdNUqXsGDoQ77/QdPu6J93mfp3jKrd+CBQuFKcx+9rvcdWs0geQxHmMykz364FiwsIc9lCcy8x+IqNl3Zybj2rWz/3/UhAc9RaUJGEZ8PEbVqlq5yQIjR0KbNiqCym5XCs5vv8Fdd8Hzz2evT0EYwxiPn5uYnOQkM5mZvQE0Gj8YwhCKUcxjEr8hDIlY5QaUMlOrlvp/1qmjlZu8jlZwNJoAsmEDPPecep3RR9v5euRIVUsqq5zkJLvZ7TW7qw0bv/Jr1jvXaPykHOVYwxra0tYlkV9pSjOOcQzBzwqyGk0I0HlwNJoAMm6c8s22e4iitdng/fehZcus9etv2nub/ktrgkwlKjGf+exnP9vZTgEK0JCG+tjTRBz6iNRoAsjvv3tWbkB9tnZt1vstTGHqUIdNbPKYAdaOnZZkUXPSaLJJ+YsPjSZS0VNUIeI855nMZO7hHjrRiWEM4yhHwy2WJsDExASmjTue53mPyo0VK5WpzB3ckb3ONRqNJo+hFZwQsIlNVKEKD/MwX/M13/ANL/MyFajA13wdbvE0AaRDB+/5NGw26Ngxe33fz/28zMuqn4vGV+PioxSl+IEf9DSBRqPRXESHiQc5TPwMZ6hKVU5yMlOSLAMDK1bWspa61A2qHJrQcPQoVKsGZ89mzppqGKrA5ubNcOWV2R9jPev5iI/YyEYKUpDOdOZBHkxLp6/RaDSBQETgl1/g11/VndvNN2M0aBBWmXSxzQhiKlM5znG30S/OdaMZHbC6K5rwUrKkyoHTti0kJLh+li8fzJ2bM+UGoD71mcCEnHWi0Wg0XpB//oG7OsGmTSrRK4DDgTRtCrNmY5QtG14B/UArOEFmPvO9fm7HzjzmhUgaTSho0gT27IGpU+Gnn5Ql5/rroXt3XT1co9FEPnLiBDS/Mb0Ue8acF2vXws03Ies3YOTPHx4B/UQrOEEmmWSvuUsAUkgJkTSaULB2rbLipKRAt24qDbzVvyhvTRAR01RmtdhYjHzZy/YsqanwzTfw5Zdw7ChUvRIefhhuvFHVcdNo8gIffwxHjrivTmq3w44dMG2aOvYjGO1kHGQa0tBrDhMrVmpTm+EM5yquoihFqUtdxjKW85wPoaShQf76C/nyS2TuXCQxMdziBJSjR+GGG6BRI3jlFRg2TBXYrNjwKI8deJUqVKEIRahPfT7mY5JJDrfIlwVy4QLy5ptQvhwUKwoF8iOtWiHLl2etn5MnVSn4e++BBd/B6tUwfRrc1AIeeghx+C5EGSzk+HFk6FCkQgUkNgapVBF5/XXk1KmwyaTJxXzxhXvlxolhwJdTQydPdpHLkISEBAEkISEh6GP9K/+KRSyCl0dxKe7Sxrj4qCf15LScDrqMocDcvFnMhg3FNEhf8ucT86WXxLTbwy1ejklJEaldW8Rmc5bXvLhcuUM4VEqwWzP9vs2kmZyVsx77PCWn5G15W6pLdYmTOKkhNWSUjJJESQzhnuVuzPPnxbz+f2JaLa7Hns0qpsUQ84sv/O/rtrZqu4z9OBeLIearr/rfl2mK+dtvYn7yiZhffCHmsWPZ2T3V1549YlaskFk2q0XMqlXEPHgw231rLk/MUiXdH+cZl1o1wyJbVq7fWsEJAeNlvCCIVdIvck6FprSUdlmf8WEVq3STbiGR0R9M0xTzjz/E/P57MTdv9n+7f/4Rs0hh9xcHiyFmn95BlDo0zJp1iWKDCJjCn7WFFJvH3/cJecJtfwfloFSRKpmUY4tYpIbUkKNyNMR7mDsx33gjs3KTcYmJFvP4cd/9bNvm+4RfpLCYFy747uvPP8WsXct12+goMfv3EzM5Oev7ePNNYkbZ3MsUZRPzjtuz3KfGFbtd5LvvRN55R+Tjj0UOHQq3RMHFbNbU+/8myiZmpzvDIptWcHwQagVHRGS5LJfb5XaJkiixiEXqS315S97yatlBEJvYIuJiZs6bJ2b1aq4Hef16Yv7yi+9te3T3fAJ2Ltu3h2AvgkfnziJW6yUKTrNffP6++SW/W4tMK2nlVfG9U8JzcslNmKYpZtky3o87q0XMUaN89/XBB0oZ96XkrF7tvZ9//hEzPs6zsv9Al6ztoz+Kl8UQ87//stSvJp2lS0XKllX/aatVxDDU8+OPK8ttILHbRZYsEfn8c5HFi0VSUwPbv7+Yn33m+7j64YewyJaV67f2wQkRzWnOfOaTTDKppLKOdRSnuM/t7NhZz/oQSOgZmTUL7uwI//zj+sFffylv+lWrPG+bnAxffeW9foHNBp9/Hhhhw8TJk66BBgA0XQ12797F5znPFra4rPuHf1jM4kx5k5w4cPAN37CPfTkROe+TkACHDnlvYxiwZbPvvux2/0pPezvOAYa9BefPuzlYUHrxV18hf/3lexwnf/zhu40IrA/vOSS38vvvqvL44cPqvcOhvk6HQ9Wd69s3a/0lJalTXbt2yp3r4YfVGABffw0VK6o6dV27QqtWUKECTJ8e2H3yi/vvh1taus9aahhwz73QunXo5coiWsEJMQYGlotfu78FFP1tFwwkNRX690s3TGTENNU//cknPHeQmKjCiXzh60IU4dSoofQ0FxxWMHzn0bz09/2d331uIwhryUZRq8uJmBjfSolhQKwfEVWNG3t3ugSIiYVatTx+LHa7ir7ypex/8YVveZxER/vXLirK/z41aQwdqn52dz+9CEycmPm+zxMHDkDt2iqycuFC5aP++efq0GrXDu6+Gw4edN3m8GGla8yYkfN9yQpGVBTMnw8Dn4ZCGRKIFi0KQ1+BqVNzRdSgVnDCyE3chIH3gyQf+WhEoxBJ5IbFi1V4kCdME9avRzZ7uAuOj/ddfEkEckHSKG88+qib69aSlmD1flEsQhFqU9tlXbAqh5/lLJ/wCd3pTk96MpWpJJGUpT5yE0a+fNDyVu8x+nY73Hmn786aNIE6ddxosRexWqF7N4z4eM99nD0LyT4i50RU+Lm/tGjhWSYnFgu8PQzp0wfZsMH/vi9zTp5U6R68BcdZrSpa2hciKqJy92713qkwOc8Z33+f+f4xIwMG+DYOBhojNhZjxAg4dBjW/gHr1sOBgxhDhmD4OuYiBK3ghJHKVKYjHT1e0CxY6E1v4ghuOQmv7N3rX7t97qdLjOhoeOAB7ydhh0PZZHMxdevCwIGXrNxcC5bcAqnuf18DgwEMIAZXBbA5zX0qOdFEcz3XA/A3f7Oc5exgh8f2K1lJecrTm95MZSpf8AUP8RCVqcxfZGFKJIyICLJyJTJhAjJ9OnL6tO+NXnzRs+XFZoN69eCWW3x2YxgGzJwFxYq5KkyGoZa69WDESO+dFCoEBQr4GgjK+V+h2yhZEnr08F4AzTRVqv1JE6FBfWTQIJWCX+OV06e9Kx2gvvYTJ3z3tWqVmiXMrpJy6BBkMatBwDDy58do0ACjXj2M7FYKDhdB9wiKQMLhZOyJU3JKrpPrXKKsnM/tpJ0kSVJY5TNnzfLtbGYg5tq1nvvYvVvMokU9h9g+0T+EexQ8TFNk3DiRihXTHY0LVj4qxQ/Ucomcs4mKqrpf7he7pIfIb5Nt8pg8JsWleFobdw+LWKS39Jaf5CepJ/VcPmskjeQXcXX83i27Jb/kd5uuwCpWKSbF5IScCPXXlSXMVavErHal63GTL1alGXA4vG/75ZcqWspqUc7uTof3+vXEPHw4a3IcPizm4MEqLLtQQTGvvVbM998X8/x5/7bv3z/gDvfm+fNitm2bHt3iz/910qQsjRHpnJEzskSWyCJZFLCgjLNnRaKj3UVHpi8Wi8jo0b77evllNykksrhkIaNBnkZHUfkgkhQcEZEUSZEZMkNuk9ukntSTTtJJFsgCcYj3E3coMM+dU1Ef3iI0ql0ppml672fbNpWPJOO2cYXEfPVVnxeo3IbDIbJ9u8jmzSLnz4skSZJ8KV9KW2krDaWh3Cf3yRJZIqakf2eLZJFEO2LE6sis2BhiuCi+raW1zJN5YhWr2zDyKImSFbIire+n5WmPEVnObUaJ70iicGFu2KCUGU9hq6VLqXwyXpQM8/hxMd95R8wePcTs21fMxYtDetyZR46IuXatUtTKlPasiDz1VPb6N00xf/xRzIceVIqbr6iq6tV8/mdzA8mSLM/IM5Jf8rtEnj4kD8kpOZXj/rt3966YREWJHPVDn3rhBdU2JwrO0qU53p08QcQpOB9++KFUqlRJYmJipFGjRrJmzRqPbT/55BO5/vrrpXDhwlK4cGG55ZZbMrXv1q2bAC5L69at/ZYn0hScSMd8/33vJ8tvvvG/ry1blFVowQIxz50LotS5h9VbT4vlfAHB7jkh5BVyhXSWzrJAFkiqpMoVckWa4uNOYblGrklToCpJJZ/h6s2kWZi/Bc+Yd9zu2fqXcalXT8xTp8Itrgvmzp1iduzgqpzVvFbMhg1cw86LFBZz2LCAKB3moEH+WXL27g3AHoYPhzikg3TwaJmsLbW9JtL0hz17REqUcJMC4uLy7rv+9fPNNzlTbsqXVyHkmghTcKZPny7R0dEyefJk2bJli/Tq1UsKFy4sR44ccdu+S5cuMnbsWNmwYYNs27ZNunfvLvHx8bJ///60Nt26dZM2bdrIoUOH0paTJ0/6LVOoFZzTclpmyAyZJJNklaxyuXPPLZjvv59uyXGemEuWEHPGjHCLlqv5+2+R2Kc/EBzulRXno4JUSNtmuSz3qbAgyB/yh4iIlJSSPtvWkTph+ga8Y5486V/+GQOlBN13X7hFTsPcsUPMokXcZxi2GGJ++KFS9H/6Scwk/6aizd27xVy6VMz16z0qQ+Yzz6jEgb6+r127Arm7IWeRLPJ6TBtiyHvyXo7H2bVL5I47VP4bp8JRqZLIZ5/530dqqlJSPClKzr4zjuF8bxgiX3+d493IM0SUgtOoUSN5/PHH0947HA4pW7asDBs2zK/t7Xa7FCpUSD7LcDR169ZNOnTokG2ZQqXg2MUuL8lLEiuxLn+8q+VqWSOerViRinn+vJizZ4s5bpyY8+eLGegsV5chHTuKGF88KKR6nkJyPrbIFhERmSJT/FJwZstsEfGeNBBRJv3u0j2cX4NHzJ07/VNuMio5Bw6EW2wRETHb3+G9tIOfmY9FLpY6ueUW1z6urOr2BsMvv7kSxcUMVxa5AHGP3OPVV80QQ66VawM23oEDIitWiKxfr6ahs8q6dSLx8a5KjsWinlu2FJkzR6RyZVcFp1IltV6TTsQoOMnJyWK1WmXu3Lku67t27Srt27f3q4/ExESJjY2V+fPnp63r1q2bxMfHS4kSJaR69erSu3dvOe4l3XpSUpIkJCSkLfv27QuJgvOEPOF2GsEiFskn+WSjbAzq+JrI5tixiye4yd0Fu3cLDoI8Jco/4zv5zi8FZ7ksFxGRb+Vbn23Ximcn8XBiJiT47zjrXGbPDrfYYh465J/lado0331t3eo5+7GBmBMnurZPSVF+Pp58lqwWMV95JVi7HjIaSSOfx3WcxLnd9i/5Sx6Xx6W5NJc75A75XD6XC+KfspkT9u4VefZZkTJlRAoWFKlfX2TixPSMyA6HyC+/iMycKfLzz9lTpPI6EaPgHDhwQAD59ddfXdY/++yz0qhRI7/66NOnj1SpUkUuZLjTmTZtmsybN082btwoc+fOlauvvlquu+46sXuYpBw6dGgmn51gKzi7ZbdHHwlEzRF3kk5BG18T+WzcePFO7b6v/FJYrpQrRUQ5LReRIl7blpWyaRFappjSS3ql3dVmVLQRZKgMDeO34Bvz/vuypuREgD3f/O03/2R97DHfffnyQSpYQMwzZzKPX6ig6/fmVHha3ZqtmleRRntp79UyiSBVpIrLNqaYMlgGC5Iezej8H1SVqrJH9oRpbzT+kmcUnGHDhkmRIkXkr7/+8tru33//FUCWLFni9vNwWHDelDd9/vksYpEE0Y7OlysHD15UcKKTPBbkzPjI6IczTsZ5bfu5fO4ylimmTJJJUlNqprVpJI1klswK2f6aJ0+KuXKluvj66XMicnGaKq6QfxaRKFuWQ7+Dgbl9u38KTv163vs5fNj3flsMMadMybztrl1iPvWUqgxdIL+Y9eqKOWFCnplani2zfZ5f35A3XLb5Qr7w2N4mNqkpNSMielXjmYipRVW8eHGsVitHjhxxWX/kyBFKly7tddtRo0bx9ttvs3jxYmrXru21bZUqVShevDj/eMiZHRMTQ1xcnMsSbI5wJK0kgydMTE7gR5YoTZ6kTBmViNbqiIFv71B2RQ/YsNGMZmnv+9CH93mfAqjEcc7EgHHE8Qmf8BAPuWxvYNCTnmxiE+c4xwUusIY1dKZzwPfrUuT0aeSRh6FMaWh+IzRtAuXKIm+9hfgqfwAqy1lKiu+sa1Yr3Hc/RqlSgRE8A7J9O/L440jZMkixYkirW5Fvv/WcMK9aNd9J/QD+/df75/v3+95vmw327Mm02qhcGWP0aIzDRzDOnsNYvwHjkUdUGv48QAc60IQmbpNi2rBRjnL0oU/aOkEYxjCP2ePt2NnMZn7ip6DJrAkxwda2GjVqJP369Ut773A4pFy5cl6djIcPHy5xcXGy2kdlXif79u0TwzBk3rx5frUPhZPxcBnuNnzx0juGM3LGd2eaPMuvv6o8G0bDP3xacFbKykzbn5Wz8qV8Ke/IOzJdpst58S/hXKgwz5wRs3Ytz9WzH+7pNTTaTEpSDrGe/Emc/RiI2bSJmImZK7PneB9++EElCsw43ePcn759PEcz1arp24IT795HJK2P3bt992G1iPnhhwHf79zAaTktd8ldmdwBrpfr5T9xraB+SA75/I/ZxCbPyDNh2pvci3n+vJhTpoj56KNi9umjglGC5MQeMVNUIipMPCYmRqZMmSJbt26VRx99VAoXLiyHL5qRH3roIRk0aFBa+7fffluio6Nl9uzZLmHgZy7OMZ85c0aeeeYZWb16tezevVuWLFki9evXl2rVqkmSn2bvUCg4B+SAVwXHJjbpIl2CNr4m97BkiQoh5ZkR6uhISZ/atJrq9SuSO51CzREjvCsnBmL+9pvn7b/6yvcFPjpKzM8/D4pfiXnypJre8TZNNHWq+21fftn7djarmO1u8z7+nj2+vz+bVUwPaTcuF3bLbpkkk+Rj+Vj+EvcuDftkn08FJ0qi5El5MrTCB4A//xT58kuVbycIOr5XzFWrxCxeLP2/6ExRcEUlMbdtC/h4EaXgiIh88MEHUrFiRYmOjpZGjRrJbxlOaM2bN5du3bqlva9UqZJbh+ChQ4eKiMj58+elVatWUqJECYmKipJKlSpJr1690hQmfwhVmPhQGer2T2QVqxSWwrJTdgZ1fE3w2b5dpH9/Fc5ZpoxIp04iy5ZlvR+7XWTRIpF+8xZLnf1tJcaMlRiJkZbSUhbIgkCLHTLMqlV9+8z06uV5+wED/Mvpsnt3cOR/913vSorVImbDhu633bvXt+yLF3sf/9lnfSs4rVoFY9fzHHaxSykp5VPJ+VK+DLeofrNxo0iDBq6h5fnziwweHJoILHP3buXk7u4YtVlVNN/p0wEdM+IUnEgjVAqOKaaMltFSVIq6/IFukBtkq2wN6tia4DN/vkq/njGVu/P14MHhli4y8Es5aXlLevuNG5WJu24dMRtdJ+YN1/sXQRWk3DfmPXf7VjAMxPQQwWnOmuVa/8qp1BmI+dprvsevWMH32C1aBHq38yxvypseLesWsUhxKR72+n/+smOHSFyc++SBhiGSwTMkaJhPP+09ws9iiPn++wEdM2KcjC93DAye4ikOcYilLGUe8/ibv1nJSq7m6nCLp8kBR47A3Xer6sAZKwQ7X7/+Onz/fc7GOM1pVrGKtawlhZScdRYuihTx/rnVCiVKACDvvQd1asPECfDXX7B2rSrD7KsEs2HA5s0BEtiNfL6wWJQMbjA6d4YNf0LPh6FcOShVCu64A35ahjF4sO++z53z3ebsGd9tvCDnziG//YasWYNcuJCjviKdZ3iGVrQCcAkCsWEjlljmMpcYckfF7Ndeg/PnweHI/JkIfPihbx/2HDN7lnsBMvL17CAL4Rmt4ISAaKK5mZtpT3tqUCOssohpIuvXI8uWIXv3Bm+clBRk1izkxReRV19F1q0L2ljhYNIk74E9ViuMHp29vk9zmkd4hFKU4nqupxGNKEc5RjACEz+ijiKJrt28KwkOB3R5AFm2DAY8pdZlVGh8RRA56dgB2bUr22J65JaW4C3Sy2qFFjdhWDyfSo1rr8UYPx5j336MQ4cxvp6D0aKFf+Nfe633789mg5q1/OvrEiQpCXnmGShdCpo1VdFtZcogL72EpORShdoH0UTzLd8ykYnUoQ6xxFKc4vSmN3/xF9dzfbhF9IsLF2DGDO+6v9UKX3wRZEF8KeAicPZskIXwOv7lx+VabNP88ksxK1/haj5s3VrM7dsDO86KFSr3htPpzGmSv/kmMb1knM5NtG3ru0BebGzW+z0jZ6SW1PKYQ+kx8Z0YLpIw9+9XDojuzNg2q5j/ayam3e5/QU1vjrYDBwZe/nPnVM01b7L98EPAx00bf9o03/vuxUnbY78pKWLecrP76TeLIWaH9iGttq7JGmk5tLwsUVEijz4aXDnMm1p4/29E2cTs2SOgY+opKk0m5KOP4MEH4L//MqwUWLoEmjZBPOQQyvI4W7dCm9Zw/LhakZqafpuxciXcdht+5T6JcDzMSGS5zaWMYxxb2IID92bfj/mYdeQea5hRrhys/Bmuvjglm3E65/bbYcH3GFYrLFni29TtDYcDvpmbc4EvwcifHxYugvh41x/UZlPPI0dhtGkT8HHTuOce6Nw588HkfP/0MxiNG2e935kz4aef3FunRODbb+G777LeryYkFCkCvtIZmSaULRtkQfo+7v1/a7fDY72DLIRntIJzGSCnT8PTA91/6HDAmTPw4ouBGWzkSHVQuztxOhyw9ndYvDgwY4WRW25R12pP2GyqTVYZz3iv01A2bExkYtY7DiPG1VfDXxvh519g1Dvw/gewYyfG3G8w4uNVsrzU1JwPlJyc8z7cYNSrBzv/UbI3bw6NGsGjj8HGTRhPPx2UMdPGtljgq2kwchSUL5/+wVVXweRPYcSI7HX8ycfeD2CrVflCaSKS2Fjo0iVdz3aHacJDD3n+PCB06qQEAVcl3HlsDXoBo1GjIAvhGUPE30nuvENiYiLx8fEkJCSEJKtxuJGPP4a+fbz7M1itcOQoRtGi2R9HBArkh6Qkz41sNnjwQYzJn2Z7nEjg5EmoVEk5+XkySP30E9x0k+u6AxxgHvM4wxlqUIN2tCOK9FuxKKKw492pthWtWMSinO5CxCAvvADD3/bd0GLx/GXbbNCuHcbcbwIqWyQhpgmHD6v/asmSGNkxETr7Kl8ODh703qjGVRjbtmV7DE1w2bULGjRQ96fujCgDBsC77wZfDjFN+OgjGDM63au5dm147nm4//4cHafuyMr124v+p8kz7NmjLgDe7pIdDnXCy4GCg8PhXblxtklMzP4YAUYENmxQM3dFi8L113u/K3JStCgsWAC33aZ22XmCsdmUAWv0aFflJoUUnuAJJjABQbBixY6dkpTkcz6nNa0BiCfea/kOK1aKUzwHexxZyP79MNIPK0R0tPLq9oTdDv36B06wbOJwqOi5339Xx0Lr1tC4cfamKy/FsFgCN+dQrJh3BccwoGSJwIylCQpVqqggw0cegdWr09cXKgTPPRc4o7wvDIsFHn8c6dtX3flZrRiFC4dmcB9oBedyoFgx//wbihXL0TCGzYZUqAD79nluZLHAldVyNE6gWLUK+vSBTZvS15UuDW+/Dd26+d7+xhth506YMAHmz1czJM2aQd++6gYmI73pzRSmICgrmtNKc4xj3M7trGQlTWlKV7ryPu979MFx4KALXbK1vxHJV1/5d/X/7HPYvRtefEFZMJzHs/P1s89hZGdOMICsWwd33qkO/6gopTy/8go0aQJz56pjK2J4qCsMet6zRUwEHgz2/IYmp1xzDfz6K2zZAlu3qvJnLVpA/vyhl8UwjBxfQwJOQN2bcwmXWxSVuW+f92RlNquYLZoHZqxhw3wnRtsZ/gzOv/0mEh0tYrG4j0AYPz5wY+2QHV4zp1rFKq1EZaPdK3uliBRxG0VlFas0k2ZiF/dJ5XIj5pNP+pcM8J9/VPvvvxezZUt1zFotKhHg1197rWcVCv77z3PSNZtN5JprRIJQSSLbmKdOqSSC7pIoRtnErFFdzHPnwi2mRpMJHUWlccEoXx7693d/p2wYannjzcAM1r8/1KuXOXeHc+xXX8O48srAjJUDnn5a3fh7uoF95hn/cqz5wwxmuK147MSBg8Us5gQnqEAFVrKSaigrlwVLWvXjdrTje7732leuo2xZ73lmnCQkAGC0bYvx44+QkgqpdoyVP2N06hTwef6sMmaMOl7cGUrtdnV3PWdOyMXyiFG4MKxYCXXrqhVWa7pjaJOmKhFhOMwAlxHbt8PgwfDoo/Dqq64BrprAoKeoLhdGvaP8GMaMUb44TofN0qVh8qcY//tfQIYxChRAli2HoUNVFMaZi1lWnRlcH3ggIOPkhF271PSUN86ehXnz0gMEcsJJTmLB4nHayckpTlGMYtSkJlvZys/8zO/8zn72E0ccFanIWc4ST3zOhYoUunSBFwZ5b2MYMHky1K+PnD+vQsoTEqBaNaRx47ArN6Bm2rzNAlssMH063HdfzsaR8+dVhrelF8PqmzSFbt2y5fNgXHEF/L4WWbsWfv5Zfc833YThVHo0QcFuh8cfh08+UX5ahqFOxa++Cs8+q6bII+CQzhuEwKIUcVxuU1QZMY8dE3PSJDFHjxbzu+9cStqbpinm2rVqGmDz5pyPtW+fmDfdlDmJ2B23i3nsWI77zy4//+w7SZbVKjJ8eGDGe1/eF0MMr9NU0RItZ+SMy3YbZINcJVcJQtr2FrFIN+km5+V8YIQLI6ZpirlmjZhFi/ieoipZQsy33xYzrpDr+muuFvPXX8O9KxIb6/uYat48Z2OYGzakJ9B0TtFZDDELFBBz0aKA7Icm+Dz7rKoV5ek4GTEi3BJGNrrYpg8uZwXHE+bcuWJWu9L14tGgvpirVmWvv/Pnxaxdy3MG25rXhm2Of8cO3xcjEPnss8CMd1yOS7REe1RubGKT7tLdZZt/5B+Jkzi3vjgWsUh7aS+mhNbvxHQ4xFy0SBXYe+oplWU3m44l5r59YjZu5H+m4pho9+utFjHzxYq5bl2A9zZr1Kzp/aJls4k8loMk1ObJk2IWK+r+/2S1iBkbE/CM5JrAc+qUSEyM9/NO4cIiSbmj3mdY0AqOD7SC44o5fXq6deXSE2dMdLbukM2JE71fsCyGmJ98EoS98Y/rrvPsYAwi+fKJJCYGbrwP5AMXS0xG5aaklJS9stelfS/pJTaxebX6rJbVgRPQB+bu3WJee026E6rTMbhUSTFXZ00O8+xZMa+s6l+VcOdx6KtMQ5s2Qdpz/xg3zruCAyJ//JH9/s133sn8/7zUMTgU5aM1OWL6dP9urpYsCbekkYt2Mtb4jaSkQL/H1aTvpYkATVNNGD/1ZNY7/vRT75lSAT6dnPV+A8SoUV6LQPPGGyqfRKDoRz++4iuqUjVtnQULHejA7/xOBSqkrTcxmcpUrwn/bNj4ki8DJ6AX5Px5uPkm2LFDrbDb03MqnTgBrW5Fdu9Ob+9wIMuXI9OnIz//nLk0x9SpyhHKV5VwJ6bp3SnB4YDFi5CjR7OwV4GlZ0+44QbPh/yAASopW7aZ9433RJ12O8yNIC9mjVv8DVy4tN3x4yq/0g8/qFQzGv/QCs7lzg8/qIuUp5OnacLatUhWM5oeOuQ9OkZEtQkTN96oThiVKrmuj4+HDz5QF6RAcz/3s4MdbGITv/IrBznIbGZTCVchkkjiAhe89mVicoxjgRfSHdOmqRAPdwqJw6FKG3/wAQAyaxZUvkIpRF3uh+Y3QtUqyPz56dt85adi5ozwq1HDe0VtUMdTGBWcmBhYuBCef14dQ04qVYJx4+Cdd3I4gK8Emv620YSVa6/1r90116jnM2eU8ly2LLRrpxKLlikDjz0WuCjPvIyOorrc2bfPvfXGXTtnwUR/qFQR9vznWcmxWKBiJfefhYhbb1WZxX/+WeWQK1ZMrYuNDd6YBgY1qem1TT7yUYQinOKUxzYWLJkUo6Axe5b3MgkOB0yfhjRpAvfdm/nzvXuhYwfkm3kYd9zhXaHOyBVXwBNPquNzoA+N0zBUpF4YyZcP3npLBRDu2qUiZKpW9W3I9IsGDVXKbU9WL6sV6tfP8TCSkKCi1M6fh1q1dERVgGnUCGrWhG3b3EfdWa3q5uvKK1Xi0FtvhT/+cG2bkgITJ8Lff6ufylfRzcsZbcG53ClRwr+LTYkspm1/+BHvFhzTVDnGw4zFouondu+uotiDqdz4i4FBL3p5zXdjx04PeoRGoMRE37lqzp3zPJXpPL4GPKXqlVWv7t0iYxgq/PmffzGefFKFkntrb7WqOlRZPUaDREyMuheoVi1Ayg2olNvepvQcjhyVqhC7HRk0CMqUhrs7Q7euUL8ecl1DZMuWbPerccUw4PPPlTJ8aUkYm01Z/z7+WL2fNg3WrHGvCJkmrFwZWbmVIhGt4Fzu3H47FCzo+XPnFEFW7+TuvlvVLXB3YbJaoWlTuNfN3b4GgGd5lvKUx+bByDqAAVzFVaER5tqa3gt0OWskHT7suY2IMmusXg29HvWeNEYE+vWDEyeQ995TDlE33Oi+rdWqNIo33/JvX3IpRq1aMPxiza6M/ymnBtW7t9LQs0uf3qom2KXTXH/+CTdcj+zalf2+NS7Uq6esMvfem259iYlR5WHWr1eKMagSMN4UZItFtdF4Ifg+z5GHjqJyxRw92nu00/z52ev3zBkxH3nYNRV/lE3Mnj3EPHPGdweXOQfloHSWzmIRS1rkVHEpLiNlZEhDxM0//vAd6dSrl38RUbNmqfw399ztPirIahGz1a3qmIyOUu+dz+7CxRvUFzMn4Um5DHPBAjFvapH+3dWvJ+bnn+eoVIW5ebPviMf69dLKZWgCx4ULIocOuQ8Lr1DBd7RVtWqhlzncZOX6bYj4Mz+Rt8hKufXLARGB99+HIYOVV5vTJ6dkSfhwLEbnzjnr//hxVV4ZoFEjjOJ5pxp2KDjMYbawhXzkoyENiSY65DLIoEEwYnhmfy2LRXk+9ukL7W7z3dHPv2D873+I3a4cVt5/Lz0sJC5eTcVcdRX06O5+e8OAtrepOcVq1TDq1MnpruVK5GKdESMADhgyaBC8+45/UW1PPgnvvKsqSGuCynXXqQKunq7QFgv8739qqupyIivXb63gaAUnDTl/XoUWHT2qwj9atQrICTS3YJqwfLmqEVOokLpuFy3q2saOnW/5lvnMJ4kk6lCHHvSgFJ4dXHexi9/4DQsWbuAGylEuuDsSBEREOQ8Mf1t5N4IK5+j/hCrsZRhQoTwcOeK+A8NQTsM7/3G5OEpKiiqFLKIcV2JjoXo15f3tCYsF9uzFKJf7vsdIRHp0hy+/9D9s/9XXMAYPDqpMGhV916+fdxfJyZOhR4hc8SIFreD4QCs4mkv5+Wc1B757d7qRIiYGnnoK3nxTuT3sYQ+taMUOdmDDholyvLViZTKTeZAHXfo8ylF60pPv+R5B/c0sWLiHe/iYj4kj9x17IqJ8bex2KFsWI4M/iEyfrkLDL8WZw2bOXIwOHdLbHzmicuLs2QPFiytn4qQkqF3LuxCGAR98iNG3bw73RZW0slqV4XLZMuUa1Lixcju7XJAXX4RRI/1XcAoWhEOHMQoUCK5glznnzkHDhrBzZ2aXNatVRWP99ltkBEaEkixdv4M3Uxa5aB8cTUbWrVPp091lNjYMkX79RFIlVapLdY/ZhQ0xZKWsTOvzjJyRGlLDbakFq1ilqTSVFEkJ414HB3P6dDHLlXX14ahYQcy5c13bDR+u/LGcPjbOEgRNm/j244myifn229mW0W4Xef99kSpVPPs23HKLyP79Ofwycgnm33/7XzLDuXz3XbjFviw4elSkbdvM56T27UVOnAi3dOFB++D4QFtwNBlp1w4WLfIc2GMY8NHhufQu2cljH1astKUt81EJ7d7jPQYwIM1y444ZzOAe7smR7JGIOBywYoWy9JQrBzfc4DotNXEiPNorZ4PM/cbFGuQkIUHlCPn0UzXTWrEi9OqlrHOxsWoa8r77YPZs76Z/mw3Kl1dRLUWKKMveH3+oqJcbb8w8dZnbkf79YOxY/zeYPgPjnrx37EYq//wDv/yizkU33giVK4dbovChp6h8oBUcjZOTJ9XsiLd/gdUKddf15K86X3gtn2DBQjLJ2LBRhzpsYpNHBceKlVu5lR/4Iae7kKsQh0MlgTx4MGcd/bsL45Kz/MGDqlzCf/+lp+1xzo41bKiSon37LTz0kH9DWCzw3HMqUnrRovRjJDpapXB69101jZkXENNU4fgjhqskf77YtBnD37S8Gk0Aycr1W7vCa7KNHDqEjBqFPPUUMmwYsmdPuEXKMidP+s5zaLHAWUdSms+NJ0zMNAXoEIe8Wm8cODjAgSzLG+mICDJzJnLD9UiB/EjheKRbV+TPP1WDdetyrtxYLMq78hIeeEAlTc6Yk9Bp2F+/XvlTffih/8n3TBOGD1clGDIeIykpMH48dO7sX47M3IBhsWAMGQJHj3nPUGixqDoCbw9DmjVF2rdHZsxQzuIaTaQR3NmyyET74OQM0zTFfPVV5TeR0YfCYoj51JNiOhzhFtFvEhJEbDbvuSYsFpHbf37bJR+NOx+cqlI1rd+6UjdT5fBL/XDaSbsw7nngMU1TzId7plf4zugzE2UTc/ZsMZcuzbq/h7ulVSuXsTdv9p0zJCpKpEABEar/LYx5Qth8jbDlauH9fkKNbX5Veb50Wbo0TF92EDE3bRKzcHzmau82a3pOK+dnzvxE9eqJefx4uEXXXAboauKa4PLBB/DKUOW0YpqqsrTDoc75770Hr7wSbgnTEIFff1V+GG3bqpDKn35Kv/OOi1NJl30l6n27Rg+vpRMAnuCJtNeP4L0MhQMHPenp937kCr78Mt2yktGhyW5X7x/oohxavFUG9wfDgBjXXECrV/veLDUVzHunwZZroe84uHYrXLMNeo+HzTWhS9aqs9tsMGVK+vtt21Ro74cfqrJRuRWjZk1Y+4eKanOmiYiOhlq10yOtnM9Oc9mmjfDgA6EXNsSsYAUd6EChi492tGMJS8ItlsYTIVC4Ig5twck+ZnKymMWLeb+7zhcrZgR8t6mpIl26qDttp5XG+dymjcj586rdP/+IFCkiYrW6v0t3BuxMlaliiOESSWVcfLSVti5RUefknNSSWm6jqCxikRbSQlIlNQzfSvAwGzZIv6N3t1gtYr71lpi3tXW18GRnufoqMRcvTht70iQ/LC41tolhtwoONzY1E8FuEa7ekiULzk03qUiXW29Nj3AxDPW6cWORPXvC+IMEAPP8eTEPHhTz9Gkxixbx/bts2RJukYPGaBktCC7/f+frYTIs3OJdNmTl+q0VHE2WMH/6yb8L0MyZ4RZVBg1Kv9i4m3bq2TO97d9/q9DgjG3KlBH5+GPXPn+Wn+UOuSNtuupKuVLek/fchnyfkBNyr9zrMrUVJVHyiDwiZ+VskPc+tJgOh3flxpnyv9OdYv77r5glimeeAsnK4hzrww9FROTffz3/1mnhtR/2E4vDfZg/gpBiEz7s67dyY7OJ3H+/SM2a7qc5bTaVbv/kSe/f3ZEjImPGiDz3nMi774ocPBiCHyyLmL/95t9v8t574RY1KKyX9Z6Pm4uPVbIq3GJeFmgFxwdawck+5rx5/l2AJk0Kq5xnzojkz+/7AnXokOt2u3aJLFwosmqVsgB5wi52SRI3BWTccEAOyFyZK/NknhyTYznYq8jFNE3XmmOeLoD33qPa79kjZo/u6bWlLIaYhQqmv/ZX0bFYxNy5U0REOnTwbIUDEbbV8HmRYnu1LFlwnnvOt//WiBGevjOR119Xx6HFonyELBa1Dy+9pD6PFMxVq/xTcN55J9yiBoWH5WGPObCclpz75X6/+nI4RFauFJkxQ2TFCvVe4z/aB0cTPPxN8XpViCpde+DXX31Hu9rtKnQ4I5UrQ+vWqhC6N78cK1Zi8C9GuCxl6UhH2tOe4uTNOlyGYUCrVu6rxzsxTWjVWrWvWBFj8qdw4iTs2g0nT8HBQzDqHahWHQoUUIloChXyPrDFgI8/BlTuG2clZgDq/AnDBsH4x+C54RCV6ntHLCb0mAyrmsG+8rCuPvT7AAqczdT0tttUUJi3qCzTdPXTych778Hgweo4dLqymaZyV3rzTRXBFTHUrOk7Za5pQpMmoZEnxPzKr15TRNixs4pVPvuZPx+qVFG5bO69F5o3V+ecuXMDKa0mjRAoXBGHtuB4xvzzTzHnzBFz2TIxPZgwzBtv8OxDYbMq/4gw337On+/fHXiYDU15CnPZMs939zarmKVKinnuXNb6zJ/Pt+XglptFRGUeLldxh/zvhVZS6MRF3ycHQopF+dc4DPXscYrKKhwqme6PI6htHIawrbpQ8nDacXPXXaoSdK1avo+xEiUy71dSkvL78rZdoUIiWfy6gorZt6/3/32d2mH/3weLWlLLp/UvYxSlO7791tVHK23q9OK62bNDtDO5nIiz4IwdO5YrrriC2NhYGjduzO/OytIemDVrFldddRWxsbHUqlWL77//3uVzEWHIkCGUKVOGfPny0bJlS3bu3BnMXYh45ORJZORI5LqGSI0aSOe7kCVLEGe4kK/t16xB6teDenXhrk5w801QsQLy6aeZG380XtWjudTEYbWqaItPp6g7+jBSt65/wToNGwZdlGxjYrKYxTzFU/ShD5OYxDnOhVssjxgtWsD4j5VJw2nJMQy1FC0KPy7ByJ8/a51G+1E5/d9/kT//5I2Rc7nwRw1WvbGYM0UuRnFZgCgTrCZYRD27+0sIYHNAieNgoNqB2sYiUHUXfN4Vm01ZimbPVkn+fJVvctYYTRtGVN2zvn3h1Cnv2545Az/+6Hv3Q8bbb0P9Bum/qROrFYoVg5mzwv6/Dxa3c7vXKEobNtrRjvnMpze96UEP3ud9TqF+ZNOE/v1V20tPyU5V58knPWdT12STYGtb06dPl+joaJk8ebJs2bJFevXqJYULF5YjR464bb9q1SqxWq0yYsQI2bp1q7z88ssSFRUlmzZtSmvz9ttvS3x8vHzzzTfy119/Sfv27aVy5cpy4cIFv2TKaxYcc8sWdXec0cnT6cD5aC+fd1Xm2rVixsZ4dhK96Mjpss3OnWLef79rPoyOHcT8669g7WaW6dDBc44bm02kWbNwS+iZfbJPakrNNMfkKIkSBImTOFksi313EEbMf/8V87nnxLzlZjHb3SbmuHFiJiZmr6/u3X07I1stkhyFlNtriCXVx322eclzmpXm4rOPx68n/k6TbdAg/6yERYuKjB0rsn27ckh2+ub4s+3nnwfqVwkM5vnzYn74oZi1aimfqUoVxRw8WMzDh8MtWlDZI3skn+Rzm9vKEEOiJEqqSJU0fxyb2MQQQ/JJPpkjc2TlSv9+77yYVynQRJSTcaNGjeTxxx9Pe+9wOKRs2bIybJj7sLp77rlH2rVzTYDWuHFjeeyxx0REOTOWLl1aRo4cmfb56dOnJSYmRqZNm+aXTHlJwTHtdjErX+E97HbcOO99tGjuffsC+T1eoMzERHVBO306GLuXIw4dUgUVL72YWK0ipUqpyJtIJFmSPRb2tIhFoiVaNskm3x3lAcy//lIKjg/H45mdfboPe1Z4nIsfj4kyUURE9u3zHbXlTqn26gTtZvnttzD/AJo0fpQfJb/kd4mKtIhFYiRGykk5t/9XQwyxilXeXPy7X7/3F1+Eey8jn4iZokpJSWHdunW0bNkybZ3FYqFly5as9pCZa/Xq1S7tAVq3bp3Wfvfu3Rw+fNilTXx8PI0bN/bYZ3JyMomJiS5LnmHBAlV8x1ulyHff8ThVJXv3qsKI3myj58/DnDnuuy9UCKNKFYz4+CwKHnxKl1YFEocOVTUfLRYoWRKefVbVF6pSJdwSumcuc9nBDrdOjebFxzu8EwbJso+cOYOsX49s3qzqUfmJUbs2zP5azQd5mf5Y3gKislMtwMiw+NVcNfzyS/9LPjhx5jv0B6sVrr0WGjXK2hi5DbHbkQULkLFjkenT1XEiguzahWzciCQkhFvENFrSkt3s5g3eoCUtuYVbeIVX+JAPOcABt/9XQTAw+LHOSL/GKFMm0FJf3gRVwTl+/DgOh4NSpUq5rC9VqhSHDx92u83hw4e9tnc+Z6XPYcOGER8fn7ZUqFAhW/sTkSxfnp5t1B0i8O+/cOiQ+8/9qQtks8GB3Fk3qUgRGDIE9u9XF5cjR2DYMKX8RCpzmON1vt+OnVnMCqFE2UcSEpC+faFUSWjYAGrXgiqVkY8+8ts/zGjfHvbth/YdPI8TAtcPA4PmNMc0Yc2azL4UgcLpyjZpUs6TPkcyMm8eVKwAd9wOT/SHLvdDieJQtgxcWRXq1oFSJZEePZAjR8ItLgAlKckLvMCP/MgSljCYwaxghc//66oS31C+gnj9PcuWhRYtAi/z5cxlESb+wgsvkJCQkLbs27cv3CIFDn/Psp7aXaIousXhiGyNII9xlrM48H6rf4ELIZIm+8jZs9D8RpjwCSQlpX+wbx883hcGDfKvnz//VGa4bVs9XvFv+BlS/fBHzi5WrLSjHfxblWuvVWG9GYt6BgrDgDZtVJqDxo0D33+kIIsXQ6c71R0HpJ+fUlLS1znffzkVmjRGjh4NvaB+cJ7zPgvxphqpjH7Pe5vRo71nWdBknaAqOMWLF8dqtXLkEu37yJEjlPZwwSxdurTX9s7nrPQZExNDXFycy5JnuOEGlUDDG1dc4dH2aVSuDE2bere3x8RAp07Zl1GTJa7lWq93hAYGNfAzH1E4+eAD2LzZ87zMyBHIli0eNxcRZOBAqF8PPh4PO3Z4VNQ7zYHSh8DqI6opuxgYnLafofHbc9jxT+A1G6tVKTZHj8J336kowLyKiMBzzzrf+N7Ablcm2NdeC65g2aQWtbB4uZQaGFSnOp3vtDJ7tpouz0jZsjBjBtxzT5AFvQwJqoITHR1NgwYNWLp0ado60zRZunQpTZs2dbtN06ZNXdoD/Pjjj2ntK1euTOnSpV3aJCYmsmbNGo995mnat1cJ0byp/k8NwPCmwAwfoRQcT22GvoJRuHCOxNT4Ty96+bwj7Ee/EEmTAz76yLuZw2ZT8zCeGDwYxoxWr304r0Snwne3Q6Ezlyg5AZpGsmPnV8svnJhwF+bnD4AlsPG8Dgc8/TQUz5t5IF35+2/YuDFrc3wOB3w6GcloCYwQfBXWBeiPihHv1An27IGlS+GLL1Si0b17tXITNILr76zCxGNiYmTKlCmydetWefTRR6Vw4cJy+GJY4UMPPSSDBg1Ka79q1Sqx2WwyatQo2bZtmwwdOtRtmHjhwoVl3rx5snHjRunQocPlHSb+119iFivqGubtjIrq2lVMP3KBm0uWiFm1imt0Sny8mKNH59nkXZHMSBmZFqVxaRTVrXKr29pXkYTpcPhXaqFjB/fbr1yZtZINF5cDZZAhryBXbUXK7kUKJiIWezYjrDw9HIYwcFSWoqG8LYahisJeLn8zc8WKLP+uaUuEVi/9WD4WhEzFdQ0xpJW0ivj/a24iosLERUQ++OADqVixokRHR0ujRo3ktwyxj82bN5du3bq5tJ85c6ZUr15doqOj5dprr5UFCxa4fG6apgwePFhKlSolMTExcsstt8j27dv9lievKTgiIuaRI2K+/rqY114jZsUKYt52m5jz52dJOTFNU518Jk8W85tvxHSW287DbNwo8vDDqrBmiRIqd06k5KKYI3PkOrku7WRZRsrIW/KWJEtyuEXzi7TaUp6WKJuY3btl3i41VcySJbN/EcywnI5DXn4NKXEkkAoOwt7yguHIpKzExaljyV/lplgxVY/Kbvf/e3U4RBYtEunXT6RXL5VjJzedysxdu7L/m/qqXBpGFskiuUluSjtOKkpFGSWjtHITYCJOwYk08qKCo8k6M2eqvCQZkwE6X7/ySrilS+eEnJBDckgckruq8pmPPeY7Sd/ChZm387egaxYWx+uvyVk5K3axyzJZJm2kjUtF+E7SKeuKToU9mfIrtWihlJVBg1T+pYw5mDIeW7//LrJunUhyFnXVAwdEatdO789mUxag/PlFvvkmQD9cCDCb3+g999ali80qZqtW4RbbL87LeTktp8WUy8QkF2K0guMDreBo9u5V1Zu9JWtbtCjcUuZuzJ07lRXH3YXMZhXzf83cTp+ar77qWzHKyoXx1VfdWjJTJVUuiJrWfkfeyTS94PNRbl/68VLkhFBzo4z/dn9a/1u2iPTpI1KxokjZsiJ3362qSJtm9qajUlNFrrnGfXZuw1Dr167Ner/hwFy3TtUZ80fJsRjqePjll3CLrYkAIibRn0YTqXz8sfJ/9eTnaLWqas+a7GNceSUs/Sk9bMRmS3eGb90avlvg3vk9JsY/B1SbzXeimF9XYwwZ4rZGkg0bsagK2dWo5jM0Pw3TgH+rwIFyUHkXzLgHjpaETbXpfUd5/sf/WM5yrrkGxo1TTqX790PnzvD88yptVUwMtG2rnE395bvvYOtW9/WvnF/XiBH+9xdOjPr1YcVKuO461w8KFFDPVmt6fq/CheHrORj/+19IZfSH4xxnJCO5i7u4h3uYyMSIrhd3uWGIBCtdVeSSmJhIfHw8CQkJeStkXOM3zZvDypXe2xQqBHkp6XW4EIcDFi2C9evVlb1dO4xrrvHcfssWqFXTe6eGoaL/nOHGl2KxwN13Y0yb7peMduxUoAJHUM46vig4/WHOrq8GL74FBc+pQp3OoS8Gp85lLu1pjwg89RS8/74SyxlYZrWq4KD33oMnnvAtY7du8NVX3gt8RkVBcnJ4EwRKQgJMnw47d0J8PNxzD0YNz2kNZPt2lY29WDFo0EAlJp0zB86ehauvhk6dMGJiQrcDfjKf+dzDPaSQgqAyFpuYlKAEi1hEPeqFW8Q8SZau30G3J0UgeopKc/PNvh1ACxcOt5SXL+Ztbb1PX7ytatmZEyaoQrEWQ8zoqPSprbs7++0kbxe77Jbd8ql8KhaxuC2o6IyIcRcp46ltcSkuyZIs333nO4pqy5bMcp09K7J4sch336naV507+1f/KiWMPq3mlClq6sn5ezh/wwe6iJmUFD7BAswW2SJREuX2WLGKVYpKUTklp8ItZp5ET1Fp8hRimsiFC36n9veHW2/1ntvQZoNWrQI2nCarfDVNJaCE9Kkom029f3kwPPc8AMYjj8CBgzDmPejdBwa9ABs3YcychZEvn9chHDgYxSgqUpHKVKYHPShM4bR6U5dSiEJp2/lCEI5znO/4jvff956mympVKYOc2O3w0ksqyXirVnD77VCxosqZ6M0yYxiqvpq3yi3BRMaNhR7d4cIFpWulpqbnL5o+HR7tFR7BgsAYxqRpNJfiwMEpTvEZn4VBMk1G9BRVEKao7NhZwAJWsjKtfs1t3OY1O60mM7JlCwx/W6X5TE1VZ/zefWDgQIxChXLU97FjULmyOhe7y0VnGCpdfpMmORpGkwNEBH76CaZNg4TTUKUqPPIIRrVqOe7bxOQBHmAGM/yaksoONmy8yZsML/YcJ096b3vddfD770ov6NpVFfO89MyccXrLHYYB77wDAwbkXPasIH/9BY89Br+v8d7QMGDHToyqVUMjWBApSUmOccxrm5u5maVkwclK4xdZuX7bQiTTZcNmNnM7t7OHPUShbqXe4R0qU5kFLOBqrg6zhKFBRODcOYiKytb8uaxcCW1aq9tZp9PBkSPwxuvw9Wxk5c85yq5cooQqxN6unauSY7Op1x9/rJWb7ODAgQWLRytIVjAMA265RS0BZj7zmY5//jnZxYGDeOKJ9qNGVqzydWbNGpg61X0b01Q6gki6/44Ti0X5lfXtm3O5s4Js2QLX/0/9iXxhsSjfmmc9+E3lIlLwXbo+N9SLy+voKaoAcoxjtKAF+9kPQOrFB8Be9tKCFpzEx61cLkdSUpB33oEqlSGuEOTPh7Rpgyxf7n8fqalw7z2q0N6lHpUOB2zbBi+/nGNZmzdX/oyvvqpmQxo2hH79VPeP+M6+rrmIickkJlGb2tiwEUUU7WjHClaEWzSPfMRHQbeoWrHSkY507Jg+u+YOi0VVXAH49FPvbUWgaFHIGFBUpgy88Qb88IPy4Q4pL76gCqn6U3nUYskzXvv1qe/1+LFhoxGNQiiRxi3BdQeKTILlZPymvJkptX7Gh0UsMlJGBnTMSMJMThbz1paZU+zbrGrdlCn+9fP1175zY+TPJ+aZM0HeI40vHOKQ++X+tOM7o6OlIYZMlInhFtEtFaRC1hP7ZeFhiCFPypMiIrJ1q+ecS1arSJEiIsePK7natvXtRGyxqLaJiSLHjqnMxuHAPHYs6+U0Pv88PMIGmLky1+fvv1W2hlvMPIl2Mg4T05jmtUiiick0poVQohAzdqxK7HGp84DDodY92gs5fNh3P3/95dtT8sIF2L07+7JqAsJUpqYd0xmPfQcOBOFRHmUPe8Ilnkd8FTPNMgKYFiyipuf60pdRjAJUpPOcOcq6YrGoaSbnUriwiqAvVkx1U6qUdwsOKAsOqDQGxYt7d5YPKkeO+JevCNTOxsWpZEB5gA50oDe9AVwqiTutOu/x3mXjjhDJaAUngCTi2/x6hjMhkCRMfPiB988dDmWD90VsrH8mb6fjgiZsvM/7Lif4SzEwmMCEEErkmxWs4AAHAteh3YBz+WH8Y1heGMG8P/fwIR9iy+DiePvtsG8fDBumpqPuvFMlAfzvP9dcd127es9zY7VCz56BEz1HlCjhX8IdpzY3abLPyLbcgoHBOMbxJV/SgAYYGFix0pKW/MiPadXDA0ESSSwxf+Kl1d9xQ9fdVKwItWrBW2+pYAmNF0JgUYo4gjVFdZvc5jVHhk1s0lE6BnTMSMG8cMG3edpqEfP++3z3tXGj79Tt1a7UVc4jgCiJ8jld01bahltMF9pIm6yXZfD22FZdqLEtbfqoVq3sVwY3TTVNlbGGVcZaViVLihw8GNjvIyeYrVr5LrfQpLGYP/0UblGDikMcAa895RCHDJNhEm/Gux5vC28VrtglFosqEOwuh1JeRk9RhYk+9PGaI8OOnT70CaFEISQqyret3GKBWN93cEatWiqPvafkISLw0stu0+/nBbZvh7lzYfFi5b8ZyTgjBT1hYKSVQ4gEUkllMYt95rK5hmuoRS3KUc57h6YBFoHtKlOvacKmTbBuXfbkMwz4+mvo3j3z4d+wIaxapZyKI4a33lJzap7++088gbH6N4ybbgqtXCEmUJGDGXmGZ3iBF0gwElw/uPknWN0Us/QBTp6EO+5wjajTpKMVnADSjnY8yIOZ1jsP/J705FZuDbVYIcGwWlXMtbeMZnY7dOjgX4dfTUsPFXGeQK1WdQV47XWMbt1yLnSEsX073HADXHUVdOqkyjWVLg1vv+2/q0OoaU97l6mYSxGEO7gjhBJ5x47dp/+NFSt1qctGNtKLXl73D4tA9Z1Q2dUfbOfO7MuYLx9MmqTqV33+OUycCBs2wOrVcOWV2e83GBgNGsCPS+DS3DYFCsDrb8DoMWGRK7ezgx2MZrT7D6McUOwEDHobhwN27VJ+XJrM6Dw4AeAc5/iQDxnPePawh1hisWHjLGcBqEQlnuZp+tI34Fp+RPHc8yq5jDtsNnV2btfOr66M+Hhk2XJYtkwl+ktMgKpXQs+eGFWqBE7mCGHXLhWqfmkUbUICvPACnDwZmYUUn+EZZjELAwPBVQuzYqUUpbiXe8MkXWZiiaUylfmP/zLJ60QQ6lAHUPlO/PrPxiS7vA1E/tDSpeGhh3LeT7Axrr8e+Xu7Mi/t3Kl2vnVrjIIFwy1awEkiiWMcI5544gheHcPP+AwrVs+Wxig79JwMA0YTZbGxciXcdlvQxMm9hGDKLOIIpA/OaTktdaVupvBwm9ikgBSQ7+Q7cUiY4jjDgDltmpgx0crfxmZNrw10zdVi7t0bbvEilu7dlY+Ft3pFu3eHW0r3zJJZEi3RaXWcnP4t5aW8bJNt4RYvE+/Kux7rTTkfTaWpzJAZMlNm+vbBSSgkxJ5P+63i40UuXAj3XmoCyQE5II/KoxIrsYKolAh3yB3yh/wRlPG6Slexic33sVf4pNhsIoMGBUWMiET74ISQF3mRTWzKZPa2YyeJJB7n8TBJFh6M++5TtYGGj4D774fuPeDb+ao+UIUK4RYvIklK8l0l2mKBL74InUxZoTOd2cc+3uRNOtGJe7iHL/iCnezkKq4Kt3iZ6Ec/2tAG4+LDHb/zO/dyL7OZTUlKeo4Us1thQi9ISvcte/llHeCXl9jHPhrSkMlMJgnlFGdi8j3f04xmLGd5wMcsSUnfjZJi4GxB7HZo0SLgIuQJdC2qHNiSz3KWEpRIO+g98QM/0IY22R5Hk7c5eBDK+fBltVhU/pNq1ZQpulcvlTNFkz1SSeUjPmIMY9iN53xKBgZP8ASf8AmppGLnohZ6Me8N6xpga/0TZmJBLBZ4+mlVGPO//9Tvdc89qgCmJvfSmc7MY176b58BCxZKU5q97A1oZuyNbEybJnVLqg0+fwjrY5OpWlVlXw9bPqQQk5Xrt1ZwcqDgrGMdDWnotY0NG0MZysvkvLRAJCEisGKF8rlJSYH69eGee/JMnovs4nCo3BQxMVCkiH/bnD+vEr6lpvrX3mKB/Pnh+++VU7LGO0kkcYITxBNPQVz9Qj7jM7rT3eO2BgZXcAULWMA7vMM0pnGe81zBFTx0tg9xn/fj+N78lCmjfr/BgyE5Ob2mmWnCww+rnDfhqvKtyT5HOEJZyvp0TF/AAm4jsE4wXenKVKZm9hWzW+FcAWi4jjLnrmTFCnXjc7mQpet3cGfLIpNA+eBskk0+50gtYpERMiJAkkcG5sGDYjZsoHxromxiRkep10UKi7lkSbjFCwvnz4sMHaryUjh9MZo2FZk/37/tu3Tx7oPjLl1/oULpKf41mTkgB+QxeUzySb60/2JH6SgbZENam8fkMb98HY7JsbRt3PnUzZzp3X+qb99Q7LEm0PwsP/s8NqxiDUoJnhRJkSflyUy5pqL/riXXdvlT3n1X5NSpgA8b8WTl+q0tODmw4DhwUIlKPrOibmYz13JttscBZVJPIIFCFCKGUFfUS0fsdqhXV8U0X+o0YrGo29S1f2DUrBkW+cJBUhLceiv8+qtrAmaLRb0fO9Z3left21WekwsX/M9pYbHA8OHwzDPe25mYXrMNX8ppTjOVqWxlKwUoQCc60YQmuSoCcC97aUxjjnPcZWrBihUbNn7kR27gBh7ncT7hE7fTDxnZzW5SSKE4xSlKUZfPRFRo/86dnsP5rVbYuxfKls3xrmlCyAY2UJ/6Xts4sxo7SzcEmhOcYBGLuMAFalKTRjTKVf/FQKMtOD4IZBTVB/KBV82+jbTJUf8H5IA8Lo9LfskvCBIlUfKAPBC26BSfhTCjbGJ2756zMez2XJWlePhw95lnM1pb9u3z3c+GDSK1a/tvxQGR1q3d93VCTshgGSylpJQgSLzEy5PypOwV75FsX8lXEiuxYoghURKVZt1oLs3lpJzM+pcTJjpKR4/Zii1ikYpSURzikDkyx+vduSGGxEt8Wl+GGHK73O5iBdq82ffvZBgiY8eG7/vQZA+HOKSSVPJppT8gB8It6mVDVq7fWsHJIaaY8rQ8LQhpFwPncxNpkqOLwn/yn5SW0plO1M4Q9LWyNsfyZxXzgS6+U7Pnz5f1fi9cEHPkSDErV1Z9xMaI+dCDYm7cGIS9CCwVK3q/uFmtIq+84l9fpiny++8iU6aIREf7vnDeckvmPg7LYakiVdweN0WkiMcqxz/JTx7Dp61ilebSPODp6IPBQTnoMwwcQRbKQkmVVLlCrvBauuHSFBBWsUo+ySerZbWIiKxe7ft3stlEhg0L8xejyRafyqdej41H5dFwi3hZocPEQ4iBwShGsZGN9KUvt3M793EfC1jAKlZRBD89Td3Qj34c41imZE/OEPQHeAAhxDOMZ876nkNJSlJOyH4i58/DrS1h0PPw38WIluRkmD4dGl2HLFmSA4GDS3Kymnrwhghs3epff4ahii926wY33ug9MbTF4t7JuD/92cMet8dNIom0ohW96c1ABrKSlWnH0Ou87nEqy4GDFaxgNasBOMYxtrCF4xz3KN9xjjOCETSnOc1oxtM8zQ52+PgGcs4Odvj8XxgYbGUrNmwsZCElKekSNu7MXmxgZHIwdeAgyUym7ZHufDhWKFLEdwSL3Q7Vq2d/nzThozvdGclIbNiwYCGKqLTjowtd+AAfRYY14SP4+lbkEaxim4Fkr+z16y50pawMqVzmc895t+BYDDGrVctany+/rBIDeirQWbSImOfPB2mPcobD4ds52GYT6dEj630vWOB9yiMqSmT/ftdtDsmhTBYHTxaZjJbGHbLD5zY2sclD8pC0kTZpx6YhhrSTdvKX/OUix6/yq8RJnIssVrGKRSzyiXySg2/cN2tlrc99QZDe0jttm0RJlLEyVm6Wm+U6uU7ukrv86oNmqyQmRqRePWWp8/RbFS8ukpwc1N3WBJnDclhGykjpK31lsAz2aAnVBBc9ReWD3KDgLJJFPk+uhhgyTsaFVC5z506lxHhTcMaM8b+/lBQxixX1XYn8s8+CuFc5o3Nn30rOd99lr++XX06f5sqoMNlsIl9/nbm9P8eNO2UnXuL9aud8XLo+v+RPy+p6Uk5KvMR7VLQMMWSVrMrBN+4du9iltJT2uT/VpJrHKbdv5Bv/vr3uk9N+l7i4zMeB1aqW7P7+Go3GFT1FlQfIT36fbQTxq10gMa68Eoa9rd5cape3WNS8Su8sRBMcPKgKLXkjKkpVG4xQnn9ePbsrbm6zQZ060CabeR5ffx1+/FEl9ytRQkXh9Oypvo5OnTK3z06EnQMHCST41c7EzDT15cBBMsn0ohcAk5hEAgkec4dYsfIu72ZZTn+xYqU5zX2228lOtrHN7WeFKOTfYIkqisNiUQn+7r1X/eZObrhBlVPzswSbJhdhx85JTpJCSrhF0XgiBApXxJEbLDjJkizFpbjXu0eb2OSIHAmLfObMmWLWrZNuYSlRXMyhQ8XMYhEe89Ah39abKJuYzz8fpD0JDN9+K1KgQPrUkfNOvmFDkcOHQyfHBbnglzUmWI9pMk1iJMZnu4JSUExT5Nw55VgdSA7JISkhJfyS91f51W0fyZIsxaSY963P5hcKJrpYbPbuFUlIENm2LbS/eygwT54U8513xGzcSMyrrxLz7rvFXLo0V0U8BoJDckiekCekoBQUJD2yVU9ZhQY9ReWD3KDgiIiMltEeT64WsUhfCX/2MPPIETH37hUzNTV725ummHVqe/bBcS4//xxgyQPPmTMiH38s0qePyIABIsuWuV6816wRuftukXz5lALUsKHIF18oP55A8pq85pf/VjAeznQGvh625HxSsKBSCgoWFOnf379Qel/8J/9JWSnrlwyGGHJIDnns63153/PWJsKQVzJNRW7ZkvN9iETMbdvELF3KdXraWUi3T+/LRsnZJ/uknJRzG6GYX/LLGlkTbhHzPFrB8UFuUXBMMeUFeSGtQrPt4gNBukgXSZa84bVozpzp3XrTpHGuP4F++aXKh5PRR8OZO6dr18AqOXaxyyPySMCUFkMMaSpN/Wrrj4MzqVZh6c2ZHLFLlBDZuTP7+/27/O63gmURi7SX9l772ypbpZk0S1doUi2C3RDsFmH4s4LhcNmHqCjlGB7s00pKisiJE+o5FJh2u5hVq3gPLhg/PjTChJmO0tFj5murWOVKuTJXpFLIzWgFxwe5RcFxskt2yVAZKj2khzwnz8lGiazcMOb27WKOGSPmiBHZNlmbI0aou0ObVT077w7r1hEzl9v6Dx5UFz9vTsiB9qH2lrsjO4+v5WuJk7icKzfOx+3fus0XdP312dvfFEmRMlLGb8tVnMTJP/KPx/5Gy2gxxEi/mJkXl8SCwk1LvP6WsbEi/fqJZHG21if//SfSq5fq3zlOr14iu3cHdpxLMb/91rt11WKIeWXVXH8T4osDcsCv42uZLAu3qHkareD4ILcpOJGKefq0mB3ap4dzO+/wqlcT888/s97fv/+K+cILYna6U8yuD6kTq90eBMlDy+uv+850XL9+4MYzxZRr5Bq/TsbeEtw5P68m1cQhDhkuw722ddZ88vl4+zmvCkJ2pnlmySz/lStBvpVvPfb1vXzvectUq7C/rBCd5DO5X3y8+0i37PD33yJFi2aO0rLZRIoUUT4/wcIcMCC93py35UDezua7RJb4PK4sYpH35f1wi5qn0VFUmqAjpgl33K6qiYMquuRMALhrF9zUAtmzJ0t9GlWqYLz1FsbXczA++xzjjjswvGW6yyWsXetao+pSTBP+/FNdsgJBAglsZSuC7w6b0pTv+Z5nUAWtMia4MzAoQxm+53ssWHiWZxnCEJeEZxYs2LAxkIG+I7jEgB9bwqDhXptt3OjffmZkNauJwr9y3YUoREtaevx8BCOw4uG4szmg3EHoPNvnOAkJcNdd8NZbfonllZ49VX+Xln+z2yExUX0eNPw9MAN1AEco/kSsmpghj2yNJMQ0kUWLkPvuRZo0Rjp2RObMUTUMwyLQZYi24OQc84cffEc+PflkuMWMCDp39m7BAVWWIVCclJM+7zRtYpNu0s1lu9/kN+ku3aWhNJSb5Wb5SD6SM3ImU/9H5Ii8J+/JIBkkH8gHsl7WS3EpLhbTjymqJr/6tHzMnZv1fX5Wns1UddndwxBDnpFnPPaTKqk+LV9Wsco9SQ9Jw4a+f1fnsmFD1vfJiT+1rkBk06bsj+ENc/Zs39abKyqJGWhv+QgjVVKlpJT0eWx4c1zPy5hJSWK2v8PVAd1p1W/WVMwAXW8jZorqxIkT0qVLFylUqJDEx8dLz5495cyZzCfMjO379esn1atXl9jYWKlQoYL0799fTp8+7So0ZFqmTZvmt1xawck5ZteHfNekKlI43GJGBJMne78w2WwiHTsGbjxTTKkhNXxeqKfK1ICM10W6iMXh3vESQfmuCMKY/j4VgthYkUv+7n7xo/zoW7kS5Ea5Uc6L56zYyZLssw+LWOR+uV/KlfNP8bDZRB7NQbmi6dP9GycLp8AsYaakiFm+nOf/exaTe+Zm3pP3vB4XvaRXuEUMG+bAgZ6jYW1WMe+5JyDjRIyC06ZNG6lTp4789ttv8vPPP8uVV14p999/v8f2mzZtkk6dOsm3334r//zzjyxdulSqVasmd911l6vQIJ9++qkcOnQobbmQBY8+reDkHLPdbb7v6qyWPO946A/nzomULu09lf8vvwR2zAkywetdZkkpKUmSlONxTstpsXpTbpwKztT7BUwxDM8XaMMQefbZ7MnhEIdcK9d6jHBBkK7S1a/Iw5pS06tyaIghY2SM1KsnXvcn49KgQfb2S0TlWPJnjG++yf4YvjD//FOVTMmo5DhfP/BAnrfeODHFlBflRbeRrffKvQH5T+VGzIQEMfPn8+2MvmdPjseKCAVn69atAsjatekVr3/44QcxDEMOZMEZbebMmRIdHS2pGfKsADI3O3bsi2gFJ+eY/fqlmyE9LZUqhVvMiGHzZpEyZdIv5JCexn/KlMCPZ4opj8ljguBy0beIRQpL4YBVot+Uus233SQ5SoVVX3JBtlpdQ+e7dhXJZjolERHZLbvlCrkiTQnJuO9Py9N+h+9OlIlelZv8kl9OySl57z3/FZzsRoeJqPxK+fN77z9/fpHExOyP4Q/moUNivvKKmFfVELNcOTFbtxLzm2/c3sSYDkeevrlxRrZ2k27yjDwjG2RDuEUKK+aiRb5veA3EDMDJLiIUnEmTJknhwq5TFKmpqWK1WmXOnDl+9zNhwgQpXry4yzpAypYtK8WKFZPrrrtOJk2a5PXPlJSUJAkJCWnLvn37tIKTQ8wNG3xbb956K9xiRhTnzolMmiTSqZPIbbeJDBmiMt8GC1NMWSgL5Q65QypKRblarpahMlQOysGAjTH1x8O+FZxUq/D8sEwX5TvuELn3XpGnnsqZj0pGzsk5mSSTpJW0ksbSWHpKT/ldfs9SHw5xSFfpmmbtcu6HTWwSJVGyQBaIiFIoqlb1reQYhsi77+ZsvwYP9jyOYYi89FLO+g8EpmmKOX26mI0bq/+/1SJmy5Zi/vBDuEXTBBnz++/9U3AmTszxWBGh4Lz55ptSvXr1TOtLlCgh48b5VyDy2LFjUrFiRXnxxRdd1r/22mvyyy+/yPr16+Xtt9+WmJgYee+99zz2M3ToULd+O1rByRlm/36e51tr1RQz2LeUmrAzdqwIP7VQSoynh8MQKu3OZL0ZNSrc0nvGIQ6ZITPkerleCkpBKSbF5FF5VLaIawz7gQMijRt7Vm6sVpFixUROnsyZPHa7SO/e6T49zgVEHntMfR5OTNMU88kn0m9uLp3GGjkyvAJqgop5+LBvi76BmJs353isoCo4zz//vFtlIeOybdu2HCs4CQkJ0qhRI2nTpo2k+EjZOXjwYClfvrzHz7UFJziYpqlq05QqmX4Ax8aI+cjDYp46FW7xNJdgF7tMkAlSW2qLRSyST/LJ/XK/rJN12e5z+nQR/vezkGJTWX7dKTcfPeb24v/69B0yX+bLMlmW67NyT5yYnoDPYkn3typVSiQbKaE8snWryKBBIt26qeetEVL+yFywwPfF7a+/wi2mJoiYXbp4dkSPsol5440BGScrCo4hIpKVsPJjx45x4sQJr22qVKnC1KlTefrppzl16lTaervdTmxsLLNmzeLOO+/0uP2ZM2do3bo1+fPn57vvviM2NtbreAsWLOD2228nKSmJmBjf1ZQTExOJj48nISGBuLg4n+2Dwfnzqvp0vnxhGT6gSGqqSl6SkgJXX41RuHC4RdJcgh07d3M33/ANBgaC+ts7897MYhYd6Zjlfs+ehVKl4Hzz72FKdyh5DFKtYDXBtMBHfWDAaHBkKLFd42/45DG4cWXaquIUZzCD6U9/DNyUZc8FJCbC1KmwahVYrdCyJdxzD/g4feUJ5Lbb4MfF6bmwLsVmg4cfwfjoo9AKpgkZcvIktGgOW7ZcXHFRtbBYoHx5+PkXjAoVcjxOlq7fAVGp3OB0Mv7jjz/S1i1atMink3FCQoI0adJEmjdvLufOnfNrrDfeeEOKFCnit2zhcjI2TZHPPxepUyf9LrZ+fZGvvgp8RWVN7mLPHpE331RFJ996K/C+OR/IBx4jgwwxJFZi5YScyFbfw4dfPJ5tKUKHucKzw4XHPhJKHcpsuanyj3CysBh291Nar8grgd1xTUgwS5bwbcEJZLpuTURinj2ryvZcc42Y8XGqhMebb4p5InvnFndEhA+OiAoTr1evnqxZs0Z++eUXqVatmkuY+P79+6VGjRqyZo2qwJqQkCCNGzeWWrVqyT///OMSBm6/OMn87bffyoQJE2TTpk2yc+dOGTdunOTPn1+GDBnit1zhUHBMU+SJJ9JN2M4TvvP1c8+FTBRNGLHbRb77TuTBB0XathXp21dFDxmGmtaIilLPhiHy/POBUXxNMeVKudJn6PO7kj1PWNMUGTYsfYrGOT2TL5/aH8NQzxaLCF88IIbdcyi3VawBc4JOThaZM0f5+kyenHM/mEs5fFjlnvn88+CWSsgNmBXK+1Zw/tcs3GJq8gARo+CcOHFC7r//filYsKDExcVJjx49XBL97d69WwBZtmyZiIgsW7bMo1/P7osV5X744QepW7euFCxYUAoUKCB16tSR8ePHiyMLeRjCoeD8+KPvUNKVK0MmjiYMnD4t0qyZqxLgK/HdsGE5H/ecnPMZ6WQVqzwgD+R4/z77TCkUM2eqYpOnT4uMH6/8Rd4Yc0ZspvecORaxyAgZkdbn/v0iY8aIDB2q+vbTqCuzZyvn3ozfdUyMilzLacqW8+dFHn44c12om25SlrjLEZ9pI6wWMYcPD7eYmjxAxCg4kUo4FJyOHTOfEDMuNpsKmdXkXW6/3XOyP09LXJy6oOaEZEn2mdXYZtqkh/TIct+JiSJLlogsWiRy7Jj3trtkl09FK0qi5Al5QlJT1XSdxaIWZzX2QoVEpvpIwPzDD95DtwcPzvJupuFwiLRp414xtdlEypf3/T3kRczt21WAgcVwH1VZpLCYx4+HW0xNHkAX24xA1q/PXCgvI3Y7bNgQOnk0oeXvv+G77zz7YHoiMRGWLcvZ2NFEczM3ey4gCdgNO1X/bud3n8nJ8PTTysG4ZUto3RrKlIHu3VVRSHcUpSgWvJ9yTExKUpKnn4YPP1SFSE0TUlPV52fOwEMPqe/SHSLw/PPKgd8Tw4eDjzgJjyxZAgsXui+earfDoUNK7ssNo3p1mPuNipowDLVYLOo5vjAsWoxRrFi4xQwaduwkkoiJl6q6mpCjFZwQkd+PArP+tNFkDdm2DXn8caRKZaRiReTBB5Dffgu5HAsXqvN9djh7NufjP8/zOPCgXaVa4d8qvNqgPX/84bsv01RVsseMgQsX0tfb7SqK6OabVZTgpcQTT3vae1W0TExuPfoAY8emB2G448UX3X++Y4cK6PNWvT01FebO9fy5Nz7/XAUEecLhgEmTstd3bsdo0wb27oNR78Cdd6qD5KPxsGcPxnXXhVu8oLCJTXShC/nJTzzxlKAEL/Iipzjle2NN0NEKTojo3FmFjnrCYlHnA03gkNmzoU5tmPAJ/Pcf7N8HM2dCs6bIO++EVJaUFO9WBW9cfXXOx7+VWxmR+BE4LGC3Ks82x8W//4Hy0GoxZnIUL77ou68ffoAFC9wrEQ6HskROmeJ+21d5lWii3So5BgaP8zhrZ13hVbkRgU2b4J9/Mn/mj2XGas2+BefQIe+WWICjR7PXd17AKFoUY8AAjNlfY8yYifHooxgFCoRbrKCwkpU0ohGzmEUqysR4kpOMYARNaMIJsnmQaQKGVnBCRO/eykLjTsmxWiEuDnr1Cr1ceRXZswce6KKuuBmvSM7Xzz6D/PxzyORp0CDr01NWK1x3HdSqFRgZYj/rjXHlLnh7EHx3O3x9FzwwFWpsh11VcTjgxx/h8OHM2yYkKOvFO+/Aa6/5tkZ98on79bWpzTKWcSVXuspGLC/wAmMYw6lT/lm7Tp/OvK5iRd/b2e1QqZLvdu4oX967BQfUVJ0mb2PHzr3cSwop2HHVeB04+Jd/eZ7nwySdxomPv6omUJQrB4sXw+23q7tHq1Xd0dvtULQofP+98mfQBIjx45WJwZMpwGaD99+HG24IiTg33QRXXgm7d/un6FitSiEO5HTHkSNgO1CJ1MFveG139CiULq1eiyiflVdfhaQkJZcv+UVg717PnzemMdvYxq/8yja2UZCCtKUt8cQDULWqbysJwJ490LChq2WsfHnlE7RsmWc54+OhY0ff/buje3el6HnCYtE3KpcDC1jAYdzcCVzEgYOpTOVd3iWO8CST1WgLTkhp0kSd+CdNggcfVMuUKeknak0AWbHC+5XYboflOfTezQIWC8yeDQULZrYAWCzqouu0WthscPfdsHZt4Kw3oJRsX4qDYaQrNwAjRsALLyjlBvy3QpUs6WMcDP7H/3iER7iP+9KUG1DuG4ULe5/SMwz1HdWvDwcPun42erTydb3UWursb+zY7GcXbtFCyefOwmSzQeXK8Pjj2etbk3vYyMa0LOCeSCaZf3Azj6oJGVrBCTH580PPnkqx+fRT6NYtb5RriDgsfji8ZNcpJpvUqQN//QV9+6oLuMUCV1wBb70F+/erKZfdu+HkSZg2DWrUCOz499zjfXrFaoXbbktXTs6eVdNRWcVigR49sicjKOXDOcXlaarKaZjbvBlatXJV3GrWhF9/hRtvdN2mRg3lXPzAA9mXzTBg+nR44glXJckw1Hf3yy/qt40E5L//kGefVQ72pUshrVsh336LZK06j/cxUlIQbx7deZR85Esrd+KrnSaMBD9q/f/tnXd8VMXagJ+zuyTUEJASAkqvAgJ6QVCBK1FAVECv0lRAiiheGxbwXkFFARVF8UNApFhARARsNAuIaAxFQC8EBKQqBCGQEErI7r7fH+MuLNmabMtmnv2dX7LnzJl5Z8/ZnffMvCX6iFSqBk34sI8e7TnxmyP5W98+viuKMZxpFdxkvS5dWuSXX86X/fDDwGL2OGLB1KwZnKjBy5eLXHmlf+0uWeK+jr17RVavVv0KdjqUEydUVOrFi6MvwJ/9++/FXqa063fA8f/gQWIvxIdhP3NG7BMnir12rfPfpTvvEPv69UHsQXSzXbb7jOlUV+qKXXQOnmCjA/35QCs4sY/94EGxlyrpPvCYgdr/d4qQ4oTdLvLWWyKVKrkqCFdeKbLxoqTib77pPWCeu619++Dm0bLbzwf587SZzSJ33x28Nos69lOnxF6xgooe7EnBf+edgtV95ozY21+Xv+4SFrV99lmQexO99JSeYhb3OdUQZJbMirSIMYkO9Kcp9hjVq8MniyAuztUYw2JR6wlTp2G0bh05ASOEYcD99yu7lW++gUWLVNyYDRuUPcuF1KrlPRaNgxdegOnTlev2d99BEBIGO7HZzgf584TdDqdOBa/NIs+HH8Lx456DARkGTHqtYHW/8opKl35x3Varulh9+yAnTxas7iLGu7zL9VwPgAUL5r9fBgYv8AIDKcQ6rSYoaC8qTcxidO2K7PgNpk5VkfasedC+AzzwAMbll0davIhSooQKyOeNLl2UPc5ff7lXdMxmaN0a/vMf9f74cRUjx25XRvPB8Aq0WKBOHWWb5EnZMpmgSZPCtxUz/PCD+uA8WZSLwLZtSE4ORtmyflcrNhu8NcWz4iSiIjzOmwf33VcAwYsW5SjHClbwIz8yn/mc4AR1qctABlKTAsYh0AQVreBoYhrjsstg/Hi1aQLCYlHe9rffrh76L1QwzGalJE2erKIZjxgBs2apFA6Oc3v1UmkLCmt0O3w4PP645+MiMHhw4dqIKfwNmR2okf3RoyrWgDcsFti8ObB6I4ANG8tZzkIWkkMOjWnMYAZzGX4EUroAhzfgNVwTIkk1hUEvURUxdrGLMYzhPu7jOZ5jD3sCrmMPe3iER6hKVUpRiuY0ZxrTOMe5EEisKcr07KliNF084dWunfIYatkSbr1VLVE5lBtQkwfz56v4PxemcygIw4dDhw75x23H+0mTCh64Lybp1Ml7PACTCa68KqAIw2KzwZrv/CtcUB/8MHGEI1zFVdzMzXzAB3zCJ4xjHLWpzSQmRVo8TTAJg01Q1FEUjYxtYpMH5UFBELOYpYSUELOYxRBDHpPHxCY2v+pJkzQpK2XFIhanMZzx96ujdJQzcibEPdEURex2ka1bRVatEtm9+/z+xYu9GwAbhsjUqYVv/+xZkbFjRapWPV9327Yixcim1W/sZ8+KvXqydy/CefPEPmOG2FtcIfa4EmJPKCf2AQPEfqEbnaO+jAyxt2rlua6Lt2++iUCv/cMudmkjbVx+/y5+fSKfRFpMjRcCGb8NkSAGRSgiZGdnU758ebKyskhIKBpRJkczmhd4wWPshRd4gf/wH691WLFSk5pkkOE28aIJE0/zNGMZGxSZNbHPLbcouxtPAQANQ8X/efxxZcgcFwddu6oZmYKEIbLZ1EpJfHz0xJuJRuTXX6HT9SpsuuMn3mGXM+ppZRH+xeeua48OA/wln2J07arqEVHTdRs3+I4SabGoyJQbNmKEOcaUv6xlLdfhOXq5CRMtackG/Mg6q4kIgYzfWsEpAgrOSU6SRBKncZOi+W8SSOAQhyiN55Tki1nMbdzmta0KVOAwh4kjrsDyaooPzZursdIbjjG0RAn112pVHluffw7JyeGRszgix4+raKKffAKnT6n1xGH3KyPkx0e4t9o2DBWN9M9DGOXKqXxtHdrnL+eOZs1g+QqMKE7G9RRP8Rqv5csfdTFHOEJlKodJKk0gBDJ+axucIsA3fONVuQHIJpvVrPZaZh3rKEEJr2WOc5x97AtURE0xJTnZt02rYxzNyzs/CfDLLypnlC8XcE3BMSpUwHjsMYwffsDYtBlj1myVvfXNyZ5d0hyeUHPnqveff+47uyjA+x/Az5uiWrkBOMtZDHzPLp3lbBik0YQareAUAU7hX5APX+UsWPwKL+5LCdJoHAwY4Nlr2BtWK6Snq/QJ775bsDo0BeDECdi713sZs1klQgOVhMyf5aYOHTAuTv4VhbSkJXl416orUpEkkryW0RQNtIJTBGiCf0E+Lsd7bJcudPE6NWtgUJe6OoaDxm9uu03Fwino2LZnj1KS+vXTSk5YKOHnw0vc30vUV1zhe5rtkkuCE/QoDNzJnZSnPCYPQ58JEw/wgH7IixG0glMEaElLWtEKM+5HETNm2tHOpyLUjnZcxVUes+AKwlM85dcUrkYDahxcuVK5k1/8oB+Inen8+fDOO8GVTZMfo2xZuOYa7+uKVqvKHArQuzeUK+f5YppMcP8DGP4qThGmNKWZz3zMmPP9Dpow0YY2jGJUhKTzzKZNaqZzwQIVUFPjH1rBKSLMYQ5lKJPvS2nBQgIJzGCGzzoMDD7lU+pRD8D5FOOo83EeZzA6YpomMMqXh48/Visf770Hc+bAjh3QqJH/So5hwBtvhFJKjZMnn/I8XWaxQP36TgXHKFMG5n2opugutsUxmaBNGxg5MsQCB5cudGEd67iDO5wzNTWpyXjG8w3feHXUCDdbt6qo4K1aqZnOXr2gWjUVWNOXU5tGe1EVCS8qBzvZyfM8z0d8RB55xBFHX/ryDM9Qhzp+13OOcyxiEfOZTxZZNKIRQxlKS1qGUHpNceP//g8eesi/fFYOcnPPr44UB+T4cfjqK2XY26wZxpVXhqfdSZOUJ5XJpHzvTSal9NSqBd98i1G7tmv5jRvhpZdgyWI1staoAQ8Mh4cfxihVKiwyhwJBnL+l0caePUqxOXkyfxgGw4D+/ZWTXHFDu4n7oKgqOA7OcIYTnKACFShJdEcN1RRfzp1T+ay++84/+xqTSZ0TaltV+eMPNd1UoQI0bhyRmC1itcKokUoLvDAEdMuWMOddjGbNQi/Drl0wYwb8+guUKQM9esK//oURH+/5HLsdzp3DiPJoxbHAkCFqNtTbTM0vvyjv/OKEVnB8UNQVHI2mqHD2LEyYAK+9pp5EPWE2K7fx5ctDJ4vs2AGPPaoacfzsNW4M48ZjdO8euobdyTJ4kHr8vvjn12yGsmVVsLy6dcMqkyZ6sFqV6dNZL97qFgs8+ii8/HL45IoGdBwcjUYTFZQsCc8+C4cOqZg5nmZnbDZ44onQySG//QZtr1YW0RcqFdu3Q88eyAcfhK7xi2XZulVlJnX3bGmzwalTMO7FsMmjiT5ycrwrN6Bun8OHwyNPUUUrOBqNJuSUKQOrVikDSThvfGw2q6WpadNUjsiQ8cTj7o0ZHErG8AeQ096DaQaN99/3HjzPaoW5cxEdBbHYUq6cCijtDcOA6tXDI09RRSs4Go0mLDRooLyrZs2CHj2Ufc6TT8KuXXDffaFrVw4fhi++8JwwC5Tys2hR6IS4kCMZvsucOwfZ2aGXRROVmM3Ka8qXHty/f9hEKpL4EYNbo9FogkPp0jBwoNrCxoEDvl25LBblthIOkqv7lqdUKdD2gcWaUaNU+IXjx90bGj/wgArFoPGMnsHRaDSxTcWKvsvYbCoibzi45x7vs0kWC9zTv8gEzytqZJPNVKbSn/4MYhALWMA5zkVarHzUqAE//QTXXZT8vFw5eO45ePPNyMhVlNBeVPopSaOJeeSqK2HzZu8B7g4cxAhTygF55GGYPNm9HImJsPFnjEsvDYssxYmVrOR2bucUpzBhwsDAipXLuIwVrKAR0TklsnOnCvpXqpRSeHzZ58QygYzfeolKowkBkpur3JGPHFGPYjfcgOFPVmZNaBg3Hrp2UZaZFz/TGQY88mjYlBsAXpsElavAKy+72tpcdx28PUMrNyEgnXRu4RasWBEEG+dn0f7gD67nen7jN8pSNoJSuqd+fbVpAkPP4OgZHE2QkXfeUdazJy5IGlO1Kkx+E+OOO0Lf/pkzkJamDFWbN8dICn1mZNmwAdasUW/at8e46qqQtxkosmgRDB0CmZnKitNuV38ffQzGjYtINmw5cwbWrlWRjC+/HKNevcDOF4Hvv1fu7zYbXH01dOumlWk3DGMYM5npMeGwgcE0pjGUoWGWTBMIOtCfD7SCowkV8s47ahB1h2HAJ4swevQITds2G4wdC69POj8rYDbDv/6llKvKlYPf5oEDcOcdSqFyJHC021WOogUfR91MhOTmwuefw++/q6Wgnj1D8rmEAzlwALrfqpbeLBZ1f+XlwaWXwuIlGK1aRVrEqKIylTnKUY/HDQw605llLAujVJpA0QqOD7SCowkFkpsLydU8p/s1DJXrZ+cuDG/ZnAvStgjcO1Blu3QXHbdOHUhbh5GYGLw2s7OhxRVw8GB+Nw+LRQ20mzZj6O9Y0JEzZ6B5M9i3L/9n74iG/MuvUadgRpJylCOHHK9lruM61rAmTBJpCkLURDLOzMykX79+JCQkkJiYyKBBg8jJ8X6DdezYEcMwXLZhw4a5lNm/fz/dunWjdOnSVKlShSeeeAKrTq0aNk5wgnWsYwtbXNaxiz0rVnhWbkApHnv2wLp1wW973Tp4913P0XF371Z5j4LJnDnuB1hQ+/buVTJpgs9HH6lr6u6zt9nUDN4DDyC+wuEWI5rTHJOXIc+CxWfC4exs+OADlXpkwQI4cybYUmqCSUgVnH79+rF161a++uorvvjiC9asWcPQob7XN4cMGcKhQ4ec28sXJNuw2Wx069aNc+fO8eOPP/Luu+8yZ84cRo8eHcquaIBMMhnMYJJIog1taEELLuMypjAFodhNBObnyJHglguEWbO8RwWz2+Ht6cFt84P3fZd5/73gtqlRfDT//JKgO0Tgyy+gVk3k55/DJ1cU8yAPYsdz1lcrVu7DfcRJEXj1VUhKgrvvVmlFevVSkbnnzAmRwJrCIyFi27ZtAsj69eud+5YtWyaGYcgff/zh8bwOHTrIww8/7PH40qVLxWQyyeHDh537pk6dKgkJCZKbm+uXbFlZWQJIVlaWX+U1IlmSJY2lsZjFLLh5PSlPRlrEiGNftkzsBr63jRuD33bXLr7bNZuC22btWr7brF0rqG0WZey7dol9wQKxL1ki9uPHC1dXm9b+3WsWs9grVhB7RkZwOlGEsYlN+kgfQRBDDOdvl+M37WV52eO5kyaJKDXH/TZ/fvj6UdwJZPwO2QxOamoqiYmJXHWBN0VKSgomk4m0tDSv586dO5dKlSrRtGlTRo0axekLcsSkpqbSrFkzql7g0tm5c2eys7PZunWr2/pyc3PJzs522TSBMYlJ/MZvHpekXuZl0kkPs1RRRkqKesTzhMkEl18OLb1PgxeIKlW8z+CAfwHvAqFePc/ZM0EdC9ArKBaRgweRLp2hfj3odSf07AHVkpBHHkbOFTDAXJMmvq83qOWqrCx4552CtRNDmDDxAR8wlak0oIFzf1va8hmf8QTus72ePg2+FgieespziKVIIdnZyCefIO++i6xbp+z0ihkhU3AOHz5MlSpVXPZZLBYqVqzIYS8pUPv27csHH3zAqlWrGDVqFO+//z533XWXS71VL4pX4Xjvqd7x48dTvnx553apNrwLmGlM82pvY8HCTGaGUaLow7BYYPKbypjYkU3SgcmktjcmY1x8LBj0u8u9PYYDsxkGBDk/wpCh3iPy2mwwNIRJpooAcvQoXNMOvv3W9UBurrKJ6tunYAPP0Pu8X+8Lsdvhk4WBtxGDmDAxjGFsZzs55HCGM3zP99zCLR7PWbZMpSrzxr59oTGtKwhityOjR0O1JLjjXzBwAFzdBq5ojmzcGGnxwkrACs7IkSPzGQFfvG3fvr3AAg0dOpTOnTvTrFkz+vXrx3vvvcfixYvZvXt3gescNWoUWVlZzu3AgQMFrqs4YsXKYTwrpQA2bPzO7wHVe4YzzGEOAxhAf/ozgxmc4lRhRI04xr/+BYsWQ+3argcaN4YVKzGuvz40DXfqBJ1S3NtlmM1QoQI88khw27ztNuja1X2bJhPcdBP07BncNosab7wBf/7pXhmx21WCzx9+8Ls6+fZbpEd3uPWWwMLZ5hTt71UoKEMZSlLSZ7ljx/yr76hnD/TwMuIxeGFsfgvo9HTo0B7Zti0yckWAgKNBjRgxggEDBngtU6dOHZKSkjhykTGl1WolMzOTpAACj7Vp0waAXbt2UbduXZKSklh3kaqckaGy83qqNz4+nvj4eL/b1LhixkxpSnOa017LVMT/JZCNbKQrXfmLvzCjljne4z2e4ik+53Ou4ZpCyx0pjO7dkVtvVY90jkjGLVqEZubG0abJhCxZAsPug3nzXL2pmjeHD+djJCcHt02zGVm0GJ59Fqa+dT72TkIC3P8APPdcRILnRRUz3/Gdd2rOHLj2Wp9VyZgxMPZ5dY5DYTKZfK+NWCyhWRYtJtSqFdxyoUT27HGfAgTUfXjunPq+LlgQVrkiRqgMgRxGxhs2bHDuW7FihU8j44tZu3atALJlyxYROW9knHGB0dz06dMlISFBzp4961ed2sg4cIbIELGIxa2BseP1jXzjV11H5IhUkApuDZZNYpIyUkb2y/4Q9yh2sR84IPaZM8U+darYLzDyD2mbp06JPS1NbadOhaXNooDdbPJtCNy1i+96Vqzwz6jY0/bdd2HobWxitYpUry5iGO4NjE0mkZYtIy2lwj52rDIs92V4np0daVELTCDjd8gUHBGRLl26SMuWLSUtLU3Wrl0r9evXlz59+jiPHzx4UBo2bChpaWkiIrJr1y55/vnnZcOGDbJnzx759NNPpU6dOtK+fXvnOVarVZo2bSo33nijbN68WZYvXy6VK1eWUaNG+S2XVnACZ6fslLJS1q1SYhazdJSOYhe7X3WNk3FiEpNHRcksZnlang5xjzSa0GNPqup9sClhEfu99/qup9tN3gcuk+H0lLvQa85uIPbHHgtDT2Obzz9XiszFSo7JJBIfL5KaGmkJFfYHHxR7XAnfCu/evZEWtcBEjYJz7Ngx6dOnj5QtW1YSEhJk4MCBcvLkSefxPXv2CCCrVq0SEZH9+/dL+/btpWLFihIfHy/16tWTJ554Il9H9u7dK127dpVSpUpJpUqVZMSIEZKXl+e3XFrBKRjrZb3UklpOJcShpPSUnpIl/n+WV8qVXmeCEKS+1A9hTzSa8GB/+mnfT9R+zK7YE8r5HrQaNhR7s6bn37e4Quzvvy92u38PHtHCWTkreeL/73m4WLlSpHlzVwWnXTuRn36KtGTnsY8f7/t+K2ERe05OpEUtMIGM3zpVgw4jHxB27HzFV2xiE/HE041uLi6X/tCEJj5dymtQgwNoY3BN0Ub++gtatYSMjPyGxiYT3Hyzyhvlwz5LKlaAEye8N3b99Rhff4OcPAmGgVE2+rJie8KOnXd4h9d5nXTSMTC4nut5kie5kRsjLZ4TEWWr6zCti7YoCHLgANSq6T6iOSh7rF69MN7/ILyCBZGoSdWgiT1MmOhMZ0Yykkd5NGDlBqAVrbB4sW/3J2S6RlMUMCpXhh9+hPbtXQ/ExcGwYfDRAv+Mzzt39h73xmSCG5QiYJQrV+SUm3704z7uYzvKA1cQVrOaznRmClMiLOF5DEOFIOrYMfqUG0DlHhs5yv1BsxnKlIExz4ZVpkiiZ3D0DE7Y+ZEffXpJrWBFVD25aTSFRX77DTZuVMpNx44Yl1zi/7k//aTi6bj7uTaZoFQp+H1PkcyM/j7vcw/3eDxuwsRv/EZd6haqnZwcmDsXvv5aORS1bQsDB0KlSoWqNuoQR16JcS+6zvpdcw1MfxujSZOIyRYMdDZxH2gFJ/I8zdOMZzwmTM78MI7/H+RBJjMZg9C5VWs0RQ2ZORPuG6oUmgvdxEuVgi++xOjQIbICFpDWtGYjGz3miTJj5jEe42VednvcHzZsgC5dIDNTzcKIqL/x8bBwoQrZFGtIbi6sWaM0u0aNMBo3jrRIQUErOD7QCk50sIAFTGQi61kPwBVcwQhGcBd3aeVGo3GD/PYbTJ0KP/4AlhJqZB48GOOi6O5FiVKU4izes56nkMJXfFWg+o8dU8tJJ0/mD0lkGFCiBPzyCzRsWKDqNWEmkPE74EB/Gk2wuPPv11nOIgilKBX0Ng5ykLd5m+Usx4qV9rTnfu6nIfrXTFP0MBo0gEmTIi1GUIkjzquCY2AU6rdh1iwVg9JdPEQRtf/NN1XmDE1soY2MNRGnJCVDotysZCX1qc84xrGe9WxiE1OYQhOaMIc5QW8vEogIsmYNMm4cMn48kpZWbJLqiQjy2WdIl85IjepIo4bImDHIoUORFk0TAD3p6dXpQBC6073A9X/6qfdgz1YrLF5c4Oo1UYxeotJLVDHJn/xJPeo5Z4cuxsBgAxtoRasISBccZNcuuK0n/O9/57N622zQujUs/ASjRo3IClgI5MQJ+OAD2LZNeX7cdhtcfbXT40jsdhgwAD54X/XdsfZgNkO5cvD1Nxitiu61LU5sYQtXcRU2bPm+q2bMVKUqO9lJaQLIvXUB//iHssHxxiWXRFEuKY1XtJu4ptjzNm+TS65b5QbUD+cbvBFmqYKHHDsGHdqDI7GtzXZ+kP/5Z/hnR+RU0UywKPPmQXI1ePghlcvpjdeVB1Gn65XiAzBlilJuwNWwwmZTxha33Izk5YVbdE0BuIIr+IRPKElJDAxMmJwzOskk8y3fFli5AaXve/OwN5uVEqSJPbSCo4lJVrLSo1cGqAzpy1keRomCzNtvuw8eB2rf778rn9gihnz7Ldx9F5w9qwwk8vLO9/H77+G2nmr2ZtJrykLUHTYbHDoES5aETW5N4biVW/mDP3iVV+lFL/rSlw/5kF3sKrS93P33u/+aOLDZ4MEHC9WEJkrRCo4mJvGm3ARSJmr54H3fWaTnFsFopS+MVa7P7rDZYPVqWLYM9u71HK0VlGvM99+HQkJNiKhABR7lUeYxj3d5l970Jo64QtfbtCm89pr6/8Lk9o7b7KGHYtNNXKMVHE2M0p72mDF7PG7BQnvaezwe9Rw/7v24iAr6UYSQEyeUAnOxL++FWCyw9Ev/KvQnQrCmWPDoo7ByJXTqpG4hk0ktXc2fD6+/rm+VWEUrOJqYZBjDvMbSsWLlYR4Oo0RBpn59zzMdoB5V69cPnzzB4PRp32UMQ81cNWjgfVTKy4N//jN4smmKPDfcACtWwLlzaskqNRV69dLKTSyjFRxNTFKHOrzHey4Gi4Dz/5d4qWjP4Ay9z/sSlc2myhQlKleGxETvZaxWuLwpjHjc8xKV2Qy1aqlElhrNRRiGVmqKC1rB0cQsfejDJjZxD/eQTDJVqUpPerKGNTzJk5EWr3D06gU3dnY/i2MYcGcvlaAxShG7HZk7F2l7NVKmNFLpEhg+HHre5moocTHx8XDXXTB4sCoPri4yhqF8fr9ciuHNdUaj0cQ8Og6OjoOjKaJIbi489xy89RZkZ6mdl1wCjzwKI0dieFMUAmnn0CHYskUZ7rZti1G64C67AGKzQb++sGCBUtAcM1EO44jkZDhwwNUWx2xW5d59D+Ouu1Q9IirXzrSpKtZ+uXJKsRs4EKNChULJqNFoohOdi8oHWsHRxBJy9qyKh2MY0LgxRlzhPU8A5K+/1CzJok/OKyFly8K/H4LnnivwDIm89Rb8+0HvmbEHD4E5syHrb8WtY0f4z38xOnUqWGc0Gk1MoHNRaTTFCKNkSWjRIqh1SlYWXHsN7NnjauuTkwMTxsPevcgHHzgjC/tdr4gK3OcJu10ZG9evD0f+gr/+gjJlMMqXL1hHNGFFEP7iLwShClWiKmnukSNqsvO99+DECahTB4YNg3vugSA9E2iiDG2Do9Fo8vN//we7d7uPkCYCH85TbiiBcuoU7NzpPYaNyQTr12GUKIGRnKyVmyKAILzN2zSkIVWpShJJ1KMeU5gSFfGmduyAZs1g7Filsx8/Dps2wZAhkJICZ85EWkJNKNAKjkajyc+Mt717aVksKk1zoPhjF2QYYNaTy0UFQRjOcO7jPnaxy7l/D3t4kAcZxCCPKVPCIp+oVGbHjrne0o7/f/gB/vvfyMimCS1awdFoNPnxlZHbaoX9+wOu1ihVCq65xruiY7VCly4B162JDKtYxVSmArgoMo7/5zCHZSyLiGyg7NC3bfMcP9JuV5lPimjqNo0XtIKj0WjyU7Gi9+NmMyRVLVjdTz7lebQxm6FmTejevWB1a8LONKa5xJq6GDNm3uKtMErkSmqq74nDnBxITw+PPJrwoRUcjUaTnwEDvY8KNhvcdXeBqjZuuQVefkW9cXhiOYyVq1aFFSsxSpQoUN2a8LOFLVjxnM3Sho1f+CWMErliNns3+bqwnCa20AqORqPJz8MPQ6VKrkH0HJhMcH0nZZ1ZQIzHH4dt6TD8QejQAbp2hbdnwI7fMBo0KITgmnBTjnI+y5SlbBgkcU9Kiu+8tJUqweWXh0ceTfjQlnwazQUIElWurZHCSEpCvl8Ld/WDdevOHzCZoG9fmDoNw1suLH/aaNQIJk0qpKSaSHMnd7KJTR69pUyY6E3vMEt1npYtoX17+PFH906BhqGScWpX8dhDB/rTgf6KPYc5zOu8zixmcZSjVKEKgxnMIzxCJSpFWryII5s3KyUnLg5uuAGjevVIi6SJIo5xjEY04jjHseFqW2XGTAIJpJNOVQposxUEMjLg+uuVsbEjeLbFohSeu+6COXP0ElVRQUcy9oFWcDQO9rCHdrTjL/5y+XE2Y6Y61fmBH6hBjQhKqNFEP1vZSle6coADlEDZT+WRRzLJfMmXtKBFZAUEcnNVdpC5c+HoURVLcuhQFSRbJ98sOmgFxwdawQk+Z87ARx/Bl1+qH5Irr1T5EEP9sH+Oc+xkJ4LQgAbEEdg8c3vak0qqWyNJCxZu4AaWsjRY4mo0MYsVK5/xGatZjSC0pz096OFUeDSaYKAVHB9oBSe4pKcrQ74//zw//WsyqW3mTBUKPdhYsTKe8bzBGxzjGACXcAkP8zCjGOXVbdXBNrZxOd4tCw0MdrOb2tQOitwaTSjIJpuv+IoccmhMY/7BP7QtmSYm0bmoNGHjzBml3GRkqPcObwW7XW0DBkC9etCuXfDatGOnN71ZxCKXwGLHOMYYxrCJTSxkISYfToI/87PPtgRhE5u0glOEOcc51rGO05ymMY25lEsjLVLQsGPnWZ5lIhM5w/l8A01pyhzmcCVXRlA6jSayaDdxTaH46CM1c+Mtbturrwa3zc/5nE/4xG34d0FYzGI+4zOf9fi7nBVPfMAyaiKPILzO61SnOtdxHZ3pTE1qcgu3sJ/AozBHI4/yKC/wgotyA5BOOu1pzza2RUgyjSbyaAVHUyiWLlVLUZ6wWuGLL/wLtOUv05mOGc8uD2bMTGOaz3qu53qf9gGlKMV1XBewjJrI8x/+w6M8ylGOOvcJwjKW0YY2/MmfEZSu8OxhD2/ypltF34aNc5zjOZ6LgGQaTXSgFRxNoTh3zncQLXexJwrDdrbnc0e9EBs2drDDZz2VqMRgBntcyjIweJAHSUDbaRU1fud3JjDB7TEbNo5ylPGMD7NUwWUuc70uw1qx8gmfcJKTYZRKo4ketIKjKRStWnmPH2EywRVXBNcNsyLe8yQZGFSggl91TWISN3MzgNMw2fG3F714kRcLIWnh+ZM/eZVXeZIneZ3XySAjovIUFd7lXZ+D/yxmkUdeGKUKLhlk+LQzs2Ejk8wwSaTRRBdawdEUisGDvS9R2e3w0EPBbbMf/Xx6iNzFXX7VFU88S1jCGtYwgAF0oxv3ci+ppDKPeRFzcRWEp3maS7nUqdyMYAQ1qMFYxrpdltCc5wAHfN4jpzlNFllhkij4VKe6x+jBDixYdLBKTbElpApOZmYm/fr1IyEhgcTERAYNGkROTo7H8nv37sUwDLfbxx9/7Czn7vj8+fND2RWNB5KTYfZsNUNzYdoih9LTt2/w3cTv5V6qU92tHY4FC9Wpzr3c63d9BgbXcR0zmMEXfMF0pnM1V0fUzXYc4xjPeOx/v/LIw44dK1ZGM5rJTI6YbEWBylT2WaYEJfzKoxSt3MVdXhVdCxZ605sylAmjVBpN9BBSBadfv35s3bqVr776ii+++II1a9YwdOhQj+UvvfRSDh065LI999xzlC1blq5du7qUnT17tku5Hj16hLIrGi/066fyvPToAfHxSrlp0UKFP3//fe8zPAWhPOX5ju9oQhNA/ZA7lpUa05jv+I5EEoPbaBg5xSmf9iHP8zznOBcmiYoe/ejnNcO1Y/Avyh5yNajBSEa6PWbGTFnKMoYxYZZKo4keQhboLz09nSZNmrB+/XquuuoqAJYvX85NN93EwYMHSU5O9queli1b0qpVK2bOnHleaMNg8eLFBVZqdKC/0CISntDngvA93/Md3wEqKnF72hf5AGeLWcxt3Oaz3Fd8RQoFz+gd6/SjH/OZn28Zx4yZkpRkAxtoRKMISRccBOFVXuVFXuQEJ5z7r+VapjPd+RCg0cQKgYzfIZvBSU1NJTEx0ancAKSkpGAymUhLS/Orjo0bN7J582YGDRqU79jw4cOpVKkSrVu3ZtasWXjT03Jzc8nOznbZNKEjXHldDAza055n/n51oEORV24Av71estH3sTdmM5thDHPO7jnujbrUZTWri7xyA6pPj/M4hznMSlayiEWkk873fK+VG02xJ2SRjA8fPkyVKlVcG7NYqFixIocPH/arjpkzZ9K4cWPaXRQG9/nnn+f666+ndOnSrFy5kgceeICcnBwe8mDNOn78eJ57TseD0MAGNjCXuRzjGLWpzQAGRF2U4gY0CGq54koccUxhCmMYwzKWcZrTNKUp13JtVCrCgrCLXZzkJLWo5dNb8ELiiecGbiCXXJawhAUsoBzl6ElPalErdEIXktxcFSdr/36oXBluvRX0pLomaEiAPPXUUwJ43dLT0+XFF1+UBg0a5Du/cuXK8tZbb/ls5/Tp01K+fHmZOHGiz7LPPPOM1KhRw+Pxs2fPSlZWlnM7cOCAAJKVleWzbk1scFpOSw/pIQhiEYuY/34ZYsgz8ozYxR5pEZ3YxS5NpImYxSy4eZnFLP+Qf0RaTE0QWSgLpYk0cV5ji1ikr/SVg3LQ7zo+l8+lolQUBCkhJcQkJjHEkP7SX87K2RBKXzDmzhWpUEEEREwm9bdUKZGXXhKxR8/XURNlZGVl+T1+BzyDM2LECAYMGOC1TJ06dUhKSuLIkSMu+61WK5mZmSQlJflsZ+HChZw+fZp7/HDBadOmDWPHjiU3N5f4+PxGg/Hx8W73a4oPwxjmTN9wsfHpWMZSjWrcz/2REC0fBgazmMU/+SfnOOcS1NBhPzKDGRGUUBNM3uZt7uM+l1klK1YWsIDv+I71rKca1bzWsZa19KCH097owvg+7/M+eeQxl7mh6UABWLxYOSc4cAQLPXMGnnpKLXM/8URkZNPEEKHSsrZt2yaAbNiwwblvxYoVYhiG/PHHHz7P79Chg9x+++1+tfXCCy9IhQoV/JYtEA1QU/TZJ/vEEMPtbIjjlSzJYhVrQPXult3ypDwpbaWtXCvXyvPyvBySQ0GTe7NslpvlZqfsJjFJT+kpW2WrS9+ekCfkMrlMLpFLpIN0kI/kI7GJLWhyaEJHpmRKvMR7vC8tYpHBMthnPZ2kk8cZP8crXdLD0CPf2O0ideuKGIaatXG3lSkjcvJkpCXVRCOBjN8hU3BERLp06SItW7aUtLQ0Wbt2rdSvX1/69OnjPH7w4EFp2LChpKWluZy3c+dOMQxDli1blq/Ozz77TGbMmCG//vqr7Ny5U9566y0pXbq0jB492m+5tIJTvHhL3vKp4CDIBtngu7K/mStznctcjvNNYpLSUlq+lq+DKv9ROSrpki7H5JjL/lRJlbJS1kUGx/+9pXfACpsm/Lwpb/q8N0tKScmRHI91HJWjPu9ts5jlWXk2jD3zzIYNnhWbC7cPP4y0pJpoJJDxO6RxcObOnUujRo3o1KkTN910E9deey1vv/2283heXh47duzg9OnTLufNmjWLGjVqcOONN+ars0SJEkyZMoW2bdvSokULpk+fzmuvvcaYMTreg8Y9pzntM6Q9kC8jsyc2s5m7uRvb3y8Hduyc4Qy3cAuHOFRgeS/mEi6hEY1cjE5zyeVWbuUMZ1xkcPz/ER8xhSlBk0ETGnaz2+nl5YmznPWaosOfaMwmTC5u5JHk2DHfZQwDjh71XU6j8UbIvKgAKlasyLx58zwer1Wrllv37nHjxjFu3Di353Tp0oUuXboETUZN7NOMZl6Tc4IK/NaQhn7VN5nJmDC5DZMvCLnkMoMZjGZ0geT1h4Us5C/+8lpmEpP4N/+OSo8hjaICFXymWwAV3NITSSRRkpKc5azHMlas1KVugWQMNjVr+i4jArVqhVwUTYyjc1FpYp4UUqhFLY+zOGbM3M7tfoX3B1jKUq9Rcu3YWc7yAsnqLz/yo9c8WYKwl70c4YjHMprIcyd3elW+zZjpRCcu4RKPZUpTmn708zoTFEcc/ejn8Xg4adgQrr7ac4Rzw4AqVaBz5/DKpYk9tIKjiXlMmPiQD4knPt8gYMZMDWrwOq/7XZ+v2SDI76kVbNzl4SpMOU1kaEQj+tLXrfLtmHl7lmd91jOWsVSlar7721HH67xOBSoUXuAgMXkyxMWB+aLb0xEkdOpUKBGZPLeaGEIrOJpiwdVczQY20JvezpmPBBJ4mIdZz3qSOB+64NQpmDEDevWCf/0LXn3V1W7gOq7z+rRsxsx1XBeyvgB0opOLK/DFGBhczuVen/w14SU3Fz77DGbOhJUrwfq3DjyLWdzN3RgYmDA5788KVGAJS7iWa33WXY1qpJHGndzpcm82pjELWcgwhoWkTwXlH/+A779XMzkX0qQJfPkl3OY7U4lG45OQ5aKKZnQuquJNHnmc4hTlKJdvhmPjRjU1fuyYmkJ35NUqWRI+/hhuuglWs5p/8k+P9ZswsZ3t1Kd+yPpgxUpDGrKPfR5nlN7jPe7m7pDJoPGft9+GkSPh+PHz+6pVgylToGdP9X4ve1nMYk5ykkY0ojvdC5QMNJNM9rKXcpSjHvWi3gZr1y44cEBFMr788vCletEUTQIZv7WCoxUczd9kZkK9epCdDbaLdAbDUFPmW7ZAo0bwIi/yX/6LBYtzOcqCBRs2JmTMxvxBf3JyoGlTuOUWNR0fbH7jNzrSkcOo1CeCOOV5kieZwISoH9yKA2+/Dffdl3+/YyD/7DO4+ebwyqTRFFW0guMDreBoHOTlwZ49arZm8WL1lG334NRiscDQoeqpG2AVq5jMZNawBhMmbrB25egzD/PVhCsxmZR9QV4eVKoE8+bBDTcEX/5ssnmf91nAAk5ykiu4gmEMow1tgt+YJmDOnoWkJMjy4MltGEqp3rFDz1xoNP6gFRwfaAVHk5cH48fDm2+ej7cRFwfnznk/LykJDnkIcdOjB3z+eX4FyaHs/PgjXHWVf/JZrbB0Kfz2G5QvD927K8+SWOKvv+D99+H336FCBejdWy1RxBJLlpxfgvLG+vX+3xsaTXEmkPE7pHFwNJpoxGZTRoxLl7oqI76UG29lfv4ZPv3U/TG7XT2djx3rucyFLF8OAwZARoZSjOx2eOABGD4cJk5UM0lFncmT4fHH1bUwm5Wt0wsvKCVnzhwoaqnjrFZlHLtmjbrWHTooe60Mz/H5XPC3nEaj8Z8Y+KnUaAJjwQL44ovAzzOb4cor3R/76COleFg9eIfbbKrNU6egTBnPbaxdq+wxHIqXwxbIalVKwblz8NZbgcseTXz4ITz88Pn3FyqZCxYog+7Zs8MvlzdOcYp5f7+Oc5zGNGYoQ+lIR7b+z+Dmm2HfvvOuza++CrVr+58wskaN0Mmu0RRX9BKVXqIqdrRvDz/84NnWxhuff+7eIHToUDXzkOfZcxuAw4ehalXPxzt2VErOxUbODgwD9u6Fyy7zU+AoQwQaNIDdu9X/7oi2Ph7gAP/kn+xmNwaGizF3nzMDWVnzHU5kmvJdM7MZLrlEKaeZme7rNpnUstyWLdoGR6Pxh0DGbx0HR1PsSE8PTLlxRFwdPhy6dXNfpm5dz0qJg7JloWJFz8czMuC777zXYzLB/Pne24lm0tOVW7C3xyrDULYr0YAg9KAH+9jnfA/nAzl+WHI2mfdMcnvNbDZl39W1q/u6TSa1TZ6slRuNJhRoBUdT7CjvOa0PoAabxEQ1+BiGMv6cN08ZJHsaiPr39xx6HtTT/KBB3qOz+pOE0GTyr1y0kpPju4zJ5F+5cPAjP/IzP3uNTC2PvQpm98ftdqXULVyYP7dS48Yq4F/HjsGTV6PRnEcrOJpiR79++UPEX8yECWppwWqFtDTo08f7U3ZSErzyivr/4nJms1pu+e9/vbeZnOzbgNhqVbYdRZU6dfzrY+PG4ZHHF9/wjfds3waQfAjq7fJYJDsbbr9dLcv9+KOandq4EX79Ff7pOV6kRqMpJFrB0RQ77r9fzeK4U3IsFpXtuF8/pah4m5W5mEceUTM9DRqc3xcXB3ffDampKh6ONxITVWoIbwpAfLzyNCqqVKoEd9zhvY+VK0dP4Dt/8o4BYHZfzmI57/puMkHbtsrlv1UrvSyl0YQareBoih1JScrWxWHEWqLE+QG3SRNYvVrZyxSEPn3UksT27eopPSNDeQR5Myy+kHHjICHB8wzTa68pRagoM3Gi+jw8KTkmk/IU82XTFA6u4RrfiVMzK8BO92k5rFalUGs0mvCjFRxNsaRpU9i5U8UuefJJGDUKVq2CzZvVDE5hMAxo2FA9pQeqjNSurZbEunZ1fcKvW1e5V8fCYJmcrALbDRrkXsk5cgQefRT69i2Yp1swSSGFetTzmJXdJCaar3kQ8lxzcTiu3b33wo03hlpKjUbjDu0mrt3ENVHKH3+oKL8JCdC8eewtaWzYoLJKe+Ojj+DOO8Mjjye2sY0OdCCTTOwojcuECTt2utCFxfZPmf5mHK+9Bvv3q3Nq1YIRI1SAxkCWOTUajXd0qgYfaAVHo4k8gwbBe+95Do5oNsM116jlxEiTQQZTmcoHfEAWWdSnPvdzP33o4zRCttuVUmoYapZKKzYaTfDRCo4PtIKj0agZlEWLVHTlyy9X9kPlyoWv/X/8Q8ngjYoVi7ZbvEajCS46F5VGo/FIVpby1vr6a2UDYxhqFuXRR+Hdd9WxcFCunGrb2yNW6dLBbdNmU30tarmuNBpN4OhJVI0mhtmyRS0DffwxHD+ulInbblMG1aAG+7w8tf/MGejVC77/Pjyy+VKkzGYlTzBITVXu2XFxKtdV3brwxhv+JVjVaDRFE71EpZeoNDFIerqKrrx+/fl98fFKufnwQ8/nmc3QqROsWBF6GU+ehEaNlCv9xS7hJhOUKgX/+1/+CMCB8tFHyiPLMM634zDYTklRSVDj4jyfr9Foogedi0qjKcbs2wfXXgs//+y6PzdXKTfevLFsNpU+4OTJ0MoIaolq1arzbvklSpxPZVGhgpKjsMrNsWNK0RNxVaJE1Pb11yoFh0ajiT20DY5GE2NMmKDSA3gKlOfPnO3p0+ExOG7QAHbsUPGIvv5aLZldc41avipZsvD1OzK8e+qziFJwHnss9tzwNZrijlZwNJoYwmZThsKeXK/9ITERLrkkaCL5xGJR9jHduwe/7i1bfCsu+/YpT7KCRq/WaDTRiV6i0mhiiNOnlbFwQTGbYdgw3wkxiwrx8b4VHMPQNjgaTSyiFRyNJoYoU0Zt3nAEoLs4EJ3ZrLJ4jxoVGtkiwS23eJ/NMpvhhhu0gqPRxCJawdFoYgiTCQYO9JysE1TE3WnT4Oqrz+9LSFDZ0NeuVf/HCt26KU8tTzNSdjs89VR4ZdJoNOFBu4nH0q+5RoNKF9CqFWRm5p+9MAwYMgSmT1fvjx5Vy1pJSbE7i3HggEp4uX27UnQcCTxNJpgxAwYMiKh4Go0mAHQkY42mGFO9Ovz0EwweDN9+e35/2bIqAeTo0ef3VaoUfvnCzaWXwq+/qng3n36qbJSaNVO5sJKSIi2dRqMJFXoGR8/gaGKYnTtVsLySJaF9e9/2ORqNRhPN6BkcjUYDQP36atNoNJrihjYy1mg0Go1GE3OETMF58cUXadeuHaVLlyYxMdGvc0SE0aNHU61aNUqVKkVKSgo7d+50KZOZmUm/fv1ISEggMTGRQYMGkZOTE4IeaDQajUajKaqETME5d+4cd9xxB/fff7/f57z88stMnjyZadOmkZaWRpkyZejcuTNnz551lunXrx9bt27lq6++4osvvmDNmjUMHTo0FF3QaDQajUZTRAm5kfGcOXN45JFHOHHihNdyIkJycjIjRozg8ccfByArK4uqVasyZ84cevfuTXp6Ok2aNGH9+vVcddVVACxfvpybbrqJgwcPkpyc7JdM2shYo9FoNJqiR5HMJr5nzx4OHz5MSkqKc1/58uVp06YNqampAKSmppKYmOhUbgBSUlIwmUykpaWFXWaNRqPRaDTRSdR4UR0+fBiAqlWruuyvWrWq89jhw4epUqWKy3GLxULFihWdZdyRm5tLbm6u8312dnawxNZoNBqNRhOFBDSDM3LkSAzD8Lpt3749VLIWmPHjx1O+fHnndumll0ZaJI1Go9FoNCEkoBmcESNGMMBHXPM6deoUSJCkv0OKZmRkUK1aNef+jIwMWrRo4Sxz5MgRl/OsViuZmZnO890xatQoHnvsMef77OxsreRoNBqNRhPDBKTgVK5cmcqVK4dEkNq1a5OUlMQ333zjVGiys7NJS0tzemK1bduWEydOsHHjRq688koAvv32W+x2O23atPFYd3x8PPHx8SGRW6PRaDQaTfQRMhuc/fv3k5mZyf79+7HZbGzevBmAevXqUbZsWQAaNWrE+PHj6dmzJ4Zh8Mgjj/DCCy9Qv359ateuzTPPPENycjI9evQAoHHjxnTp0oUhQ4Ywbdo08vLyePDBB+ndu7ffHlSgPLZA2+JoNBqNRlOUcIzbfjmAS4jo37+/APm2VatWOcsAMnv2bOd7u90uzzzzjFStWlXi4+OlU6dOsmPHDpd6jx07Jn369JGyZctKQkKCDBw4UE6ePBmQbAcOHHArm970pje96U1veov+7cCBAz7H+mKZbNNut/Pnn39Srlw5DMMocD0OW54DBw7EdDwd3c/YoTj0EXQ/Yw3dz9iiMP0UEU6ePElycjImk3c/qahxEw8nJpOJGjVqBK2+hISEmL4ZHeh+xg7FoY+g+xlr6H7GFgXtZ/ny5f0qFzWB/jQajUaj0WiChVZwNBqNRqPRxBxawSkE8fHxjBkzJuZd0HU/Y4fi0EfQ/Yw1dD9ji3D1s1gaGWs0Go1Go4lt9AyORqPRaDSamEMrOBqNRqPRaGIOreBoNBqNRqOJObSCo9FoNBqNJubQCo4XXnzxRdq1a0fp0qVJTEz06xwRYfTo0VSrVo1SpUqRkpLCzp07XcpkZmbSr18/EhISSExMZNCgQeTk5ISgB/4RqDx79+7FMAy328cff+ws5+74/Pnzw9EltxTkc+/YsWO+PgwbNsylzP79++nWrRulS5emSpUqPPHEE1it1lB2xSuB9jMzM5N///vfNGzYkFKlSnHZZZfx0EMPkZWV5VIu0tdzypQp1KpVi5IlS9KmTRvWrVvntfzHH39Mo0aNKFmyJM2aNWPp0qUux/35rkaCQPo5Y8YMrrvuOipUqECFChVISUnJV37AgAH5rluXLl1C3Q2fBNLPOXPm5OtDyZIlXcpE4/UMpI/ufmsMw6Bbt27OMtF4LdesWcMtt9xCcnIyhmGwZMkSn+esXr2aVq1aER8fT7169ZgzZ06+MoF+390SUBKnYsbo0aPltddek8cee0zKly/v1zkTJkyQ8uXLy5IlS2TLli1y6623Su3ateXMmTPOMl26dJErrrhCfvrpJ/n++++lXr160qdPnxD1wjeBymO1WuXQoUMu23PPPSdly5Z1yQsGKtfYheUu/BzCTUE+9w4dOsiQIUNc+pCVleU8brVapWnTppKSkiKbNm2SpUuXSqVKlWTUqFGh7o5HAu3nr7/+Krfddpt89tlnsmvXLvnmm2+kfv36cvvtt7uUi+T1nD9/vsTFxcmsWbNk69atMmTIEElMTJSMjAy35X/44Qcxm83y8ssvy7Zt2+S///2vlChRQn799VdnGX++q+Em0H727dtXpkyZIps2bZL09HQZMGCAlC9fXg4ePOgs079/f+nSpYvLdcvMzAxXl9wSaD9nz54tCQkJLn04fPiwS5lou56B9vHYsWMu/fvf//4nZrPZJV9jNF7LpUuXyn/+8x9ZtGiRALJ48WKv5X///XcpXbq0PPbYY7Jt2zZ58803xWw2y/Lly51lAv3sPKEVHD+YPXu2XwqO3W6XpKQkeeWVV5z7Tpw4IfHx8fLhhx+KiMi2bdsEkPXr1zvLLFu2TAzDkD/++CPosvsiWPK0aNFC7r33Xpd9/tzs4aKg/ezQoYM8/PDDHo8vXbpUTCaTy4/t1KlTJSEhQXJzc4MieyAE63ouWLBA4uLiJC8vz7kvktezdevWMnz4cOd7m80mycnJMn78eLfl77zzTunWrZvLvjZt2sh9990nIv59VyNBoP28GKvVKuXKlZN3333Xua9///7SvXv3YItaKALtp6/f4Gi8noW9lpMmTZJy5cpJTk6Oc180XssL8ec34sknn5TLL7/cZV+vXr2kc+fOzveF/ewc6CWqILJnzx4OHz5MSkqKc1/58uVp06YNqampAKSmppKYmMhVV13lLJOSkoLJZCItLS3sMgdDno0bN7J582YGDRqU79jw4cOpVKkSrVu3ZtasWf6luA8Bhenn3LlzqVSpEk2bNmXUqFGcPn3apd5mzZpRtWpV577OnTuTnZ3N1q1bg98RHwTr/srKyiIhIQGLxTVdXSSu57lz59i4caPL98pkMpGSkuL8Xl1MamqqS3lQ18VR3p/vargpSD8v5vTp0+Tl5VGxYkWX/atXr6ZKlSo0bNiQ+++/n2PHjgVV9kAoaD9zcnKoWbMml156Kd27d3f5fkXb9QzGtZw5cya9e/emTJkyLvuj6VoWBF/fzWB8dg6KZbLNUHH48GEAl8HO8d5x7PDhw1SpUsXluMVioWLFis4y4SQY8sycOZPGjRvTrl07l/3PP/88119/PaVLl2blypU88MAD5OTk8NBDDwVNfn8paD/79u1LzZo1SU5O5pdffuGpp55ix44dLFq0yFmvu+vtOBZugnE9jx49ytixYxk6dKjL/khdz6NHj2Kz2dx+ztu3b3d7jqfrcuH30LHPU5lwU5B+XsxTTz1FcnKyy+DQpUsXbrvtNmrXrs3u3bt5+umn6dq1K6mpqZjN5qD2wR8K0s+GDRsya9YsmjdvTlZWFhMnTqRdu3Zs3bqVGjVqRN31LOy1XLduHf/73/+YOXOmy/5ou5YFwdN3Mzs7mzNnznD8+PFCfw8cFDsFZ+TIkbz00ktey6Snp9OoUaMwSRQa/O1nYTlz5gzz5s3jmWeeyXfswn0tW7bk1KlTvPLKK0EdEEPdzwsH+WbNmlGtWjU6derE7t27qVu3boHrDZRwXc/s7Gy6detGkyZNePbZZ12OheN6agrOhAkTmD9/PqtXr3YxwO3du7fz/2bNmtG8eXPq1q3L6tWr6dSpUyREDZi2bdvStm1b5/t27drRuHFjpk+fztixYyMoWWiYOXMmzZo1o3Xr1i77Y+FahpNip+CMGDGCAQMGeC1Tp06dAtWdlJQEQEZGBtWqVXPuz8jIoEWLFs4yR44ccTnParWSmZnpPD8Y+NvPwsqzcOFCTp8+zT333OOzbJs2bRg7diy5ublBy0ESrn46aNOmDQC7du2ibt26JCUl5bPuz8jIAChy1/PkyZN06dKFcuXKsXjxYkqUKOG1fCiupzsqVaqE2Wx2fq4OMjIyPPYpKSnJa3l/vqvhpiD9dDBx4kQmTJjA119/TfPmzb2WrVOnDpUqVWLXrl0RGRQL008HJUqUoGXLluzatQuIvutZmD6eOnWK+fPn8/zzz/tsJ9LXsiB4+m4mJCRQqlQpzGZzoe8PJwFZ7BRTAjUynjhxonNfVlaWWyPjDRs2OMusWLEi4kbGBZWnQ4cO+bxtPPHCCy9IhQoVCixrYQjW57527VoBZMuWLSJy3sj4Quv+6dOnS0JCgpw9ezZ4HfCTgvYzKytLrr76aunQoYOcOnXKr7bCeT1bt24tDz74oPO9zWaT6tWrezUyvvnmm132tW3bNp+RsbfvaiQItJ8iIi+99JIkJCRIamqqX20cOHBADMOQTz/9tNDyFpSC9PNCrFarNGzYUB599FERic7rWdA+zp49W+Lj4+Xo0aM+24iGa3kh+Glk3LRpU5d9ffr0yWdkXJj7wylPQKWLGfv27ZNNmzY5XaA3bdokmzZtcnGFbtiwoSxatMj5fsKECZKYmCiffvqp/PLLL9K9e3e3buItW7aUtLQ0Wbt2rdSvXz/ibuLe5Dl48KA0bNhQ0tLSXM7buXOnGIYhy5Yty1fnZ599JjNmzJBff/1Vdu7cKW+99ZaULl1aRo8eHfL+eCLQfu7atUuef/552bBhg+zZs0c+/fRTqVOnjrRv3955jsNN/MYbb5TNmzfL8uXLpXLlyhF3Ew+kn1lZWdKmTRtp1qyZ7Nq1y8UF1Wq1ikjkr+f8+fMlPj5e5syZI9u2bZOhQ4dKYmKi03vt7rvvlpEjRzrL//DDD2KxWGTixImSnp4uY8aMcesm7uu7Gm4C7eeECRMkLi5OFi5c6HLdHL9RJ0+elMcff1xSU1Nlz5498vXXX0urVq2kfv36EVHAHQTaz+eee05WrFghu3fvlo0bN0rv3r2lZMmSsnXrVmeZaLuegfbRwbXXXiu9evXKtz9ar+XJkyedYyMgr732mmzatEn27dsnIiIjR46Uu+++21ne4Sb+xBNPSHp6ukyZMsWtm7i3z85ftILjhf79+wuQb1u1apWzDH/HBnFgt9vlmWeekapVq0p8fLx06tRJduzY4VLvsWPHpE+fPlK2bFlJSEiQgQMHuihN4caXPHv27MnXbxGRUaNGyaWXXio2my1fncuWLZMWLVpI2bJlpUyZMnLFFVfItGnT3JYNF4H2c//+/dK+fXupWLGixMfHS7169eSJJ55wiYMjIrJ3717p2rWrlCpVSipVqiQjRoxwca8ON4H2c9WqVW7vc0D27NkjItFxPd9880257LLLJC4uTlq3bi0//fST81iHDh2kf//+LuUXLFggDRo0kLi4OLn88svlyy+/dDnuz3c1EgTSz5o1a7q9bmPGjBERkdOnT8uNN94olStXlhIlSkjNmjVlyJAhAQ8UoSCQfj7yyCPOslWrVpWbbrpJfv75Z5f6ovF6BnrPbt++XQBZuXJlvrqi9Vp6+v1w9K1///7SoUOHfOe0aNFC4uLipE6dOi5jqANvn52/GCIR8tvVaDQajUajCRE6Do5Go9FoNJqYQys4Go1Go9FoYg6t4Gg0Go1Go4k5tIKj0Wg0Go0m5tAKjkaj0Wg0mphDKzgajUaj0WhiDq3gaDQajUajiTm0gqPRaDQajSbm0AqORqPRaDSamEMrOBqNRqPRaGIOreBoNBqNRqOJObSCo9FoNBqNJub4fzbnzhIGzgH6AAAAAElFTkSuQmCC",
"text/plain": [
""
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from nnfs.datasets import spiral_data\n",
"import numpy as np\n",
"import nnfs\n",
"nnfs.init()\n",
"import matplotlib.pyplot as plt\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"plt.scatter(X[:, 0], X[:, 1], c=y, cmap='brg')\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "KOZ15RsfFjmu"
},
"source": [
"\n",
"FULL CODE UPTO THIS POINT: FORWARD AND BACKWARD PASS \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "MvVmaqA1Fjmu",
"outputId": "5d6b274f-adaa-44e5-f248-bd2f74d4ee1c"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[0.33333334 0.33333334 0.33333334]\n",
" [0.33333334 0.33333334 0.33333334]\n",
" [0.33333334 0.3333333 0.33333334]\n",
" [0.3333335 0.33333302 0.33333355]\n",
" [0.33333334 0.33333334 0.33333334]]\n",
"loss: 1.0986118\n",
"acc: 0.33666666666666667\n",
"[[ 7.7209341e-05 -1.0590541e-04 -8.3512554e-05]\n",
" [ 2.8525142e-04 6.1521467e-05 -9.9994701e-05]]\n",
"[[ 0.00036935 -0.00025332 0.00021116]]\n",
"[[ 1.04255480e-04 1.59160336e-05 -1.20171506e-04]\n",
" [-4.91499777e-05 1.94195833e-04 -1.45045851e-04]\n",
" [ 3.61476232e-05 1.08254273e-04 -1.44401885e-04]]\n",
"[[ 1.0388438e-05 -1.0533840e-05 3.9814040e-08]]\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"# Create Dense layer with 2 input features and 3 output values\n",
"dense1 = Layer_Dense(2, 3)\n",
"# Create ReLU activation (to be used with Dense layer):\n",
"activation1 = Activation_ReLU()\n",
"# Create second Dense layer with 3 input features (as we take output\n",
"# of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(3, 3)\n",
"# Create Softmax classifier’s combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"# Perform a forward pass of our training data through this layer\n",
"dense1.forward(X)\n",
"# Perform a forward pass through activation function\n",
"# takes the output of first dense layer here\n",
"activation1.forward(dense1.output)\n",
"# Perform a forward pass through second Dense layer\n",
"# takes outputs of activation function of first layer as inputs\n",
"dense2.forward(activation1.output)\n",
"# Perform a forward pass through the activation/loss function\n",
"# takes the output of second dense layer here and returns loss\n",
"loss = loss_activation.forward(dense2.output, y)\n",
"\n",
"# Let’s see output of the first few samples:\n",
"print(loss_activation.output[:5])\n",
"# Print loss value\n",
"print('loss:', loss)\n",
"# Calculate accuracy from output of activation2 and targets\n",
"# calculate values along first axis\n",
"predictions = np.argmax(loss_activation.output, axis=1)\n",
"if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
"accuracy = np.mean(predictions == y)\n",
"# Print accuracy\n",
"print('acc:', accuracy)\n",
"# Backward pass\n",
"loss_activation.backward(loss_activation.output, y)\n",
"dense2.backward(loss_activation.dinputs)\n",
"activation1.backward(dense2.dinputs)\n",
"dense1.backward(activation1.dinputs)\n",
"# Print gradients\n",
"print(dense1.dweights)\n",
"print(dense1.dbiases)\n",
"print(dense2.dweights)\n",
"print(dense2.dbiases)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "yDgOzov1Fjmu",
"outputId": "7d32d783-2485-4ecf-e4ce-cae23205e516"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, Loss: 36.0\n",
"Iteration 2, Loss: 33.872397424621624\n",
"Iteration 3, Loss: 31.87054345809546\n",
"Iteration 4, Loss: 29.98699091998773\n",
"Iteration 5, Loss: 28.214761511794592\n",
"Iteration 6, Loss: 26.54726775906168\n",
"Iteration 7, Loss: 24.978326552541866\n",
"Iteration 8, Loss: 23.5021050739742\n",
"Iteration 9, Loss: 22.11313179151597\n",
"Iteration 10, Loss: 20.806246424284897\n",
"Iteration 11, Loss: 19.576596334671486\n",
"Iteration 12, Loss: 18.41961908608719\n",
"Iteration 13, Loss: 17.33101994032309\n",
"Iteration 14, Loss: 16.306757070164853\n",
"Iteration 15, Loss: 15.343027506224132\n",
"Iteration 16, Loss: 14.436253786815284\n",
"Iteration 17, Loss: 13.583071280700132\n",
"Iteration 18, Loss: 12.780312744165439\n",
"Iteration 19, Loss: 12.024995767388878\n",
"Iteration 20, Loss: 11.314319082257104\n",
"Iteration 21, Loss: 10.64564263994962\n",
"Iteration 22, Loss: 10.016485041642266\n",
"Iteration 23, Loss: 9.424510031713222\n",
"Iteration 24, Loss: 8.867521365009814\n",
"Iteration 25, Loss: 8.34345204094211\n",
"Iteration 26, Loss: 7.850353118483743\n",
"Iteration 27, Loss: 7.386397874602818\n",
"Iteration 28, Loss: 6.94986173712617\n",
"Iteration 29, Loss: 6.539124434950737\n",
"Iteration 30, Loss: 6.1526621719118015\n",
"Iteration 31, Loss: 5.789039869058961\n",
"Iteration 32, Loss: 5.446907999417336\n",
"Iteration 33, Loss: 5.124995576577539\n",
"Iteration 34, Loss: 4.822108497170647\n",
"Iteration 35, Loss: 4.537121521071987\n",
"Iteration 36, Loss: 4.268978030723312\n",
"Iteration 37, Loss: 4.01668121563854\n",
"Iteration 38, Loss: 3.7792956126389763\n",
"Iteration 39, Loss: 3.5559389510643094\n",
"Iteration 40, Loss: 3.345782865003274\n",
"Iteration 41, Loss: 3.1480471758404285\n",
"Iteration 42, Loss: 2.961997679823884\n",
"Iteration 43, Loss: 2.78694359065541\n",
"Iteration 44, Loss: 2.622235303237792\n",
"Iteration 45, Loss: 2.467261121418954\n",
"Iteration 46, Loss: 2.321446092335641\n",
"Iteration 47, Loss: 2.184248486806066\n",
"Iteration 48, Loss: 2.0551593804914616\n",
"Iteration 49, Loss: 1.9336995852420789\n",
"Iteration 50, Loss: 1.8194178573235094\n",
"Iteration 51, Loss: 1.7118903069357754\n",
"Iteration 52, Loss: 1.6107175940030252\n",
"Iteration 53, Loss: 1.5155241897377694\n",
"Iteration 54, Loss: 1.4259567411109748\n",
"Iteration 55, Loss: 1.3416826255281136\n",
"Iteration 56, Loss: 1.262389208248047\n",
"Iteration 57, Loss: 1.1877819791340551\n",
"Iteration 58, Loss: 1.1175840765571434\n",
"Iteration 59, Loss: 1.0515348500680068\n",
"Iteration 60, Loss: 0.9893891461492582\n",
"Iteration 61, Loss: 0.930916260625565\n",
"Iteration 62, Loss: 0.875899078709395\n",
"Iteration 63, Loss: 0.8241334819517507\n",
"Iteration 64, Loss: 0.7754271861095672\n",
"Iteration 65, Loss: 0.7295994320679934\n",
"Iteration 66, Loss: 0.6864801042040583\n",
"Iteration 67, Loss: 0.6459091389617334\n",
"Iteration 68, Loss: 0.6077358933180028\n",
"Iteration 69, Loss: 0.5718187120029812\n",
"Iteration 70, Loss: 0.5380242202642829\n",
"Iteration 71, Loss: 0.5062269967452033\n",
"Iteration 72, Loss: 0.4763089781884024\n",
"Iteration 73, Loss: 0.4481591180173807\n",
"Iteration 74, Loss: 0.42167291418136477\n",
"Iteration 75, Loss: 0.3967520449790852\n",
"Iteration 76, Loss: 0.3733039992368791\n",
"Iteration 77, Loss: 0.3512417316144445\n",
"Iteration 78, Loss: 0.33048334753976116\n",
"Iteration 79, Loss: 0.31095177724411444\n",
"Iteration 80, Loss: 0.2925745286179104\n",
"Iteration 81, Loss: 0.2752833763568879\n",
"Iteration 82, Loss: 0.25901412505149535\n",
"Iteration 83, Loss: 0.2437063914735247\n",
"Iteration 84, Loss: 0.22930333977371198\n",
"Iteration 85, Loss: 0.21575151284725816\n",
"Iteration 86, Loss: 0.2030006012946216\n",
"Iteration 87, Loss: 0.19100326852350488\n",
"Iteration 88, Loss: 0.17971497196649536\n",
"Iteration 89, Loss: 0.1690938194815031\n",
"Iteration 90, Loss: 0.1591003719214838\n",
"Iteration 91, Loss: 0.14969754273736763\n",
"Iteration 92, Loss: 0.14085041966208015\n",
"Iteration 93, Loss: 0.13252615564761738\n",
"Iteration 94, Loss: 0.1246938532452423\n",
"Iteration 95, Loss: 0.11732446503349986\n",
"Iteration 96, Loss: 0.11039058885430607\n",
"Iteration 97, Loss: 0.10386649785129919\n",
"Iteration 98, Loss: 0.09772798570124883\n",
"Iteration 99, Loss: 0.09195226348280558\n",
"Iteration 100, Loss: 0.0865178816583512\n",
"Iteration 101, Loss: 0.08140467291758889\n",
"Iteration 102, Loss: 0.07659366262828358\n",
"Iteration 103, Loss: 0.07206697005843195\n",
"Iteration 104, Loss: 0.06780781192053903\n",
"Iteration 105, Loss: 0.06380037696069592\n",
"Iteration 106, Loss: 0.06002977345222309\n",
"Iteration 107, Loss: 0.0564820075507719\n",
"Iteration 108, Loss: 0.05314393144118542\n",
"Iteration 109, Loss: 0.050003114234231524\n",
"Iteration 110, Loss: 0.04704793686603195\n",
"Iteration 111, Loss: 0.04426740148833972\n",
"Iteration 112, Loss: 0.04165120020443161\n",
"Iteration 113, Loss: 0.03918961375201954\n",
"Iteration 114, Loss: 0.0368735034129829\n",
"Iteration 115, Loss: 0.034694277992582755\n",
"Iteration 116, Loss: 0.032643851730490094\n",
"Iteration 117, Loss: 0.03071459534999028\n",
"Iteration 118, Loss: 0.028899363239415818\n",
"Iteration 119, Loss: 0.027191414181739672\n",
"Iteration 120, Loss: 0.02558439994540113\n",
"Iteration 121, Loss: 0.024072362337913877\n",
"Iteration 122, Loss: 0.022649683089386127\n",
"Iteration 123, Loss: 0.021311092099735786\n",
"Iteration 124, Loss: 0.02005160424149179\n",
"Iteration 125, Loss: 0.01886655505507656\n",
"Iteration 126, Loss: 0.017751540667355833\n",
"Iteration 127, Loss: 0.016702427744061103\n",
"Iteration 128, Loss: 0.01571531497821091\n",
"Iteration 129, Loss: 0.014786535770396103\n",
"Iteration 130, Loss: 0.013912651762769943\n",
"Iteration 131, Loss: 0.013090418519936803\n",
"Iteration 132, Loss: 0.012316768931710837\n",
"Iteration 133, Loss: 0.011588849600126475\n",
"Iteration 134, Loss: 0.010903943586632107\n",
"Iteration 135, Loss: 0.010259526183227799\n",
"Iteration 136, Loss: 0.009653186757193668\n",
"Iteration 137, Loss: 0.009082688171817357\n",
"Iteration 138, Loss: 0.008545899068542421\n",
"Iteration 139, Loss: 0.00804083320361364\n",
"Iteration 140, Loss: 0.007565618804557518\n",
"Iteration 141, Loss: 0.007118492429622391\n",
"Iteration 142, Loss: 0.006697793120481266\n",
"Iteration 143, Loss: 0.0063019473730584336\n",
"Iteration 144, Loss: 0.005929501997799936\n",
"Iteration 145, Loss: 0.005579070290327091\n",
"Iteration 146, Loss: 0.005249347396309216\n",
"Iteration 147, Loss: 0.004939114136252681\n",
"Iteration 148, Loss: 0.004647215154254898\n",
"Iteration 149, Loss: 0.00437256400626425\n",
"Iteration 150, Loss: 0.004114139259196158\n",
"Iteration 151, Loss: 0.0038709956233987848\n",
"Iteration 152, Loss: 0.0036422222163822442\n",
"Iteration 153, Loss: 0.0034269635873455254\n",
"Iteration 154, Loss: 0.0032244300300798123\n",
"Iteration 155, Loss: 0.003033866206344064\n",
"Iteration 156, Loss: 0.0028545694817259646\n",
"Iteration 157, Loss: 0.0026858615040063873\n",
"Iteration 158, Loss: 0.002527124440860861\n",
"Iteration 159, Loss: 0.002377772426750458\n",
"Iteration 160, Loss: 0.0022372501846465924\n",
"Iteration 161, Loss: 0.002105026221950533\n",
"Iteration 162, Loss: 0.0019806188966821317\n",
"Iteration 163, Loss: 0.001863566163059441\n",
"Iteration 164, Loss: 0.0017534302886055876\n",
"Iteration 165, Loss: 0.0016498016244949178\n",
"Iteration 166, Loss: 0.0015522968336895225\n",
"Iteration 167, Loss: 0.0014605572212372654\n",
"Iteration 168, Loss: 0.0013742383231737623\n",
"Iteration 169, Loss: 0.0012930183418168389\n",
"Iteration 170, Loss: 0.0012166008279945002\n",
"Iteration 171, Loss: 0.0011447005613673634\n",
"Iteration 172, Loss: 0.0010770513341135804\n",
"Iteration 173, Loss: 0.001013397095948145\n",
"Iteration 174, Loss: 0.0009535029620325111\n",
"Iteration 175, Loss: 0.0008971534673183893\n",
"Iteration 176, Loss: 0.0008441301639000644\n",
"Iteration 177, Loss: 0.0007942435095401501\n",
"Iteration 178, Loss: 0.0007473036766382048\n",
"Iteration 179, Loss: 0.0007031374518087182\n",
"Iteration 180, Loss: 0.0006615806720993984\n",
"Iteration 181, Loss: 0.0006224808039162045\n",
"Iteration 182, Loss: 0.0005856932236775429\n",
"Iteration 183, Loss: 0.0005510780772974099\n",
"Iteration 184, Loss: 0.0005185112321657664\n",
"Iteration 185, Loss: 0.00048786689510026934\n",
"Iteration 186, Loss: 0.00045903387854597503\n",
"Iteration 187, Loss: 0.00043190420223823955\n",
"Iteration 188, Loss: 0.000406378034681195\n",
"Iteration 189, Loss: 0.00038236074013664776\n",
"Iteration 190, Loss: 0.0003597649139507893\n",
"Iteration 191, Loss: 0.0003385032407062897\n",
"Iteration 192, Loss: 0.00031849748027454767\n",
"Iteration 193, Loss: 0.00029967346881992795\n",
"Iteration 194, Loss: 0.0002819629431575354\n",
"Iteration 195, Loss: 0.0002652991815966534\n",
"Iteration 196, Loss: 0.00024961903501571355\n",
"Iteration 197, Loss: 0.00023486641976601822\n",
"Iteration 198, Loss: 0.00022098629075865584\n",
"Iteration 199, Loss: 0.00020792651372860275\n",
"Iteration 200, Loss: 0.00019563773612380077\n",
"Final weights: [-3.3990955 -0.20180899 0.80271349]\n",
"Final bias: 0.6009044964517248\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Initial parameters\n",
"weights = np.array([-3.0, -1.0, 2.0])\n",
"bias = 1.0\n",
"inputs = np.array([1.0, -2.0, 3.0])\n",
"target_output = 0.0\n",
"learning_rate = 0.001\n",
"\n",
"def relu(x):\n",
" return np.maximum(0, x)\n",
"\n",
"def relu_derivative(x):\n",
" return np.where(x > 0, 1.0, 0.0)\n",
"\n",
"for iteration in range(200):\n",
" # Forward pass\n",
" linear_output = np.dot(weights, inputs) + bias\n",
" output = relu(linear_output)\n",
" loss = (output - target_output) ** 2\n",
"\n",
" # Backward pass\n",
" dloss_doutput = 2 * (output - target_output)\n",
" doutput_dlinear = relu_derivative(linear_output)\n",
" dlinear_dweights = inputs\n",
" dlinear_dbias = 1.0\n",
"\n",
" dloss_dlinear = dloss_doutput * doutput_dlinear\n",
" dloss_dweights = dloss_dlinear * dlinear_dweights\n",
" dloss_dbias = dloss_dlinear * dlinear_dbias\n",
"\n",
" # Update weights and bias\n",
" weights -= learning_rate * dloss_dweights\n",
" bias -= learning_rate * dloss_dbias\n",
"\n",
" # Print the loss for this iteration\n",
" print(f\"Iteration {iteration + 1}, Loss: {loss}\")\n",
"\n",
"print(\"Final weights:\", weights)\n",
"print(\"Final bias:\", bias)\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "ZV_fcx05Fjmu"
},
"source": [
"\n",
"OPTIMIZERS GRADIENT DESCENT \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "lCy3lHCsFjmu"
},
"outputs": [],
"source": [
"# SGD optimizer\n",
"class Optimizer_SGD:\n",
" # Initialize optimizer - set settings,\n",
" # learning rate of 1. is default for this optimizer\n",
" def __init__(self, learning_rate=0.5):\n",
" self.learning_rate = learning_rate\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" layer.weights += -self.learning_rate * layer.dweights\n",
" layer.biases += -self.learning_rate * layer.dbiases"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "oXmtcdZeFjmu",
"outputId": "daa697a4-64b9-4833-d38b-82bd7848d970"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.337, loss: 1.099\n",
"epoch: 100, acc: 0.403, loss: 1.097\n",
"epoch: 200, acc: 0.407, loss: 1.084\n",
"epoch: 300, acc: 0.403, loss: 1.077\n",
"epoch: 400, acc: 0.403, loss: 1.075\n",
"epoch: 500, acc: 0.403, loss: 1.074\n",
"epoch: 600, acc: 0.397, loss: 1.073\n",
"epoch: 700, acc: 0.397, loss: 1.073\n",
"epoch: 800, acc: 0.400, loss: 1.072\n",
"epoch: 900, acc: 0.410, loss: 1.071\n",
"epoch: 1000, acc: 0.410, loss: 1.070\n",
"epoch: 1100, acc: 0.410, loss: 1.068\n",
"epoch: 1200, acc: 0.410, loss: 1.066\n",
"epoch: 1300, acc: 0.413, loss: 1.063\n",
"epoch: 1400, acc: 0.410, loss: 1.058\n",
"epoch: 1500, acc: 0.430, loss: 1.053\n",
"epoch: 1600, acc: 0.443, loss: 1.046\n",
"epoch: 1700, acc: 0.447, loss: 1.037\n",
"epoch: 1800, acc: 0.427, loss: 1.052\n",
"epoch: 1900, acc: 0.417, loss: 1.053\n",
"epoch: 2000, acc: 0.407, loss: 1.051\n",
"epoch: 2100, acc: 0.410, loss: 1.048\n",
"epoch: 2200, acc: 0.413, loss: 1.044\n",
"epoch: 2300, acc: 0.420, loss: 1.041\n",
"epoch: 2400, acc: 0.413, loss: 1.037\n",
"epoch: 2500, acc: 0.417, loss: 1.033\n",
"epoch: 2600, acc: 0.427, loss: 1.027\n",
"epoch: 2700, acc: 0.437, loss: 1.026\n",
"epoch: 2800, acc: 0.440, loss: 1.020\n",
"epoch: 2900, acc: 0.460, loss: 1.016\n",
"epoch: 3000, acc: 0.470, loss: 1.013\n",
"epoch: 3100, acc: 0.477, loss: 1.009\n",
"epoch: 3200, acc: 0.477, loss: 1.010\n",
"epoch: 3300, acc: 0.463, loss: 1.002\n",
"epoch: 3400, acc: 0.437, loss: 0.996\n",
"epoch: 3500, acc: 0.420, loss: 1.026\n",
"epoch: 3600, acc: 0.473, loss: 0.982\n",
"epoch: 3700, acc: 0.450, loss: 0.975\n",
"epoch: 3800, acc: 0.480, loss: 0.993\n",
"epoch: 3900, acc: 0.493, loss: 0.965\n",
"epoch: 4000, acc: 0.447, loss: 0.953\n",
"epoch: 4100, acc: 0.440, loss: 0.963\n",
"epoch: 4200, acc: 0.530, loss: 0.947\n",
"epoch: 4300, acc: 0.490, loss: 0.929\n",
"epoch: 4400, acc: 0.470, loss: 0.948\n",
"epoch: 4500, acc: 0.537, loss: 0.917\n",
"epoch: 4600, acc: 0.453, loss: 0.904\n",
"epoch: 4700, acc: 0.477, loss: 0.934\n",
"epoch: 4800, acc: 0.507, loss: 0.903\n",
"epoch: 4900, acc: 0.483, loss: 0.922\n",
"epoch: 5000, acc: 0.467, loss: 0.877\n",
"epoch: 5100, acc: 0.453, loss: 0.911\n",
"epoch: 5200, acc: 0.523, loss: 0.885\n",
"epoch: 5300, acc: 0.510, loss: 0.918\n",
"epoch: 5400, acc: 0.480, loss: 0.874\n",
"epoch: 5500, acc: 0.510, loss: 0.877\n",
"epoch: 5600, acc: 0.480, loss: 0.897\n",
"epoch: 5700, acc: 0.490, loss: 0.860\n",
"epoch: 5800, acc: 0.557, loss: 0.886\n",
"epoch: 5900, acc: 0.500, loss: 0.879\n",
"epoch: 6000, acc: 0.493, loss: 0.861\n",
"epoch: 6100, acc: 0.470, loss: 0.917\n",
"epoch: 6200, acc: 0.503, loss: 0.872\n",
"epoch: 6300, acc: 0.563, loss: 0.870\n",
"epoch: 6400, acc: 0.470, loss: 0.894\n",
"epoch: 6500, acc: 0.500, loss: 0.864\n",
"epoch: 6600, acc: 0.567, loss: 0.877\n",
"epoch: 6700, acc: 0.480, loss: 0.872\n",
"epoch: 6800, acc: 0.513, loss: 0.848\n",
"epoch: 6900, acc: 0.550, loss: 0.849\n",
"epoch: 7000, acc: 0.573, loss: 0.856\n",
"epoch: 7100, acc: 0.520, loss: 0.840\n",
"epoch: 7200, acc: 0.540, loss: 0.827\n",
"epoch: 7300, acc: 0.610, loss: 0.820\n",
"epoch: 7400, acc: 0.550, loss: 0.810\n",
"epoch: 7500, acc: 0.547, loss: 0.809\n",
"epoch: 7600, acc: 0.627, loss: 0.825\n",
"epoch: 7700, acc: 0.553, loss: 0.809\n",
"epoch: 7800, acc: 0.580, loss: 0.801\n",
"epoch: 7900, acc: 0.607, loss: 0.798\n",
"epoch: 8000, acc: 0.533, loss: 0.832\n",
"epoch: 8100, acc: 0.577, loss: 0.834\n",
"epoch: 8200, acc: 0.513, loss: 0.848\n",
"epoch: 8300, acc: 0.567, loss: 0.799\n",
"epoch: 8400, acc: 0.570, loss: 0.790\n",
"epoch: 8500, acc: 0.603, loss: 0.789\n",
"epoch: 8600, acc: 0.613, loss: 0.806\n",
"epoch: 8700, acc: 0.573, loss: 0.788\n",
"epoch: 8800, acc: 0.603, loss: 0.783\n",
"epoch: 8900, acc: 0.587, loss: 0.827\n",
"epoch: 9000, acc: 0.537, loss: 0.841\n",
"epoch: 9100, acc: 0.610, loss: 0.788\n",
"epoch: 9200, acc: 0.580, loss: 0.767\n",
"epoch: 9300, acc: 0.597, loss: 0.776\n",
"epoch: 9400, acc: 0.613, loss: 0.794\n",
"epoch: 9500, acc: 0.553, loss: 0.799\n",
"epoch: 9600, acc: 0.610, loss: 0.783\n",
"epoch: 9700, acc: 0.583, loss: 0.755\n",
"epoch: 9800, acc: 0.593, loss: 0.779\n",
"epoch: 9900, acc: 0.613, loss: 0.794\n",
"epoch: 10000, acc: 0.573, loss: 0.768\n"
]
}
],
"source": [
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output\n",
"# of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"optimizer = Optimizer_SGD()\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "lXLjUFnmFjmx"
},
"source": [
"\n",
"OPTIMIZERS: LEARNING RATE DECAY \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "y99yd9jrFjmx"
},
"outputs": [],
"source": [
"class Optimizer_SGD:\n",
" # Initialize optimizer - set settings,\n",
" # learning rate of 1. is default for this optimizer\n",
" def __init__(self, learning_rate=1., decay=0.):\n",
" self.learning_rate = learning_rate\n",
" self.current_learning_rate = learning_rate\n",
" self.decay = decay\n",
" self.iterations = 0\n",
"\n",
" # Call once before any parameter updates\n",
" def pre_update_params(self):\n",
" if self.decay:\n",
" self.current_learning_rate = self.learning_rate * \\\n",
" (1. / (1. + self.decay * self.iterations))\n",
"\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" layer.weights += -self.current_learning_rate * layer.dweights\n",
" layer.biases += -self.current_learning_rate * layer.dbiases\n",
"\n",
" # Call once after any parameter updates\n",
" def post_update_params(self):\n",
" self.iterations += 1\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "fXewScxsFjmx",
"outputId": "f55251a7-a85b-4fd3-b75e-c314e4a9e01a"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.287, loss: 1.099, lr: 1.0\n",
"epoch: 100, acc: 0.410, loss: 1.083, lr: 0.9099181073703367\n",
"epoch: 200, acc: 0.447, loss: 1.067, lr: 0.8340283569641367\n",
"epoch: 300, acc: 0.443, loss: 1.065, lr: 0.7698229407236336\n",
"epoch: 400, acc: 0.433, loss: 1.064, lr: 0.7147962830593281\n",
"epoch: 500, acc: 0.437, loss: 1.063, lr: 0.66711140760507\n",
"epoch: 600, acc: 0.423, loss: 1.062, lr: 0.6253908692933083\n",
"epoch: 700, acc: 0.430, loss: 1.061, lr: 0.5885815185403178\n",
"epoch: 800, acc: 0.427, loss: 1.060, lr: 0.5558643690939411\n",
"epoch: 900, acc: 0.430, loss: 1.058, lr: 0.526592943654555\n",
"epoch: 1000, acc: 0.443, loss: 1.056, lr: 0.5002501250625312\n",
"epoch: 1100, acc: 0.447, loss: 1.054, lr: 0.4764173415912339\n",
"epoch: 1200, acc: 0.450, loss: 1.052, lr: 0.45475216007276037\n",
"epoch: 1300, acc: 0.450, loss: 1.050, lr: 0.43497172683775553\n",
"epoch: 1400, acc: 0.460, loss: 1.048, lr: 0.4168403501458941\n",
"epoch: 1500, acc: 0.453, loss: 1.045, lr: 0.4001600640256102\n",
"epoch: 1600, acc: 0.447, loss: 1.042, lr: 0.3847633705271258\n",
"epoch: 1700, acc: 0.447, loss: 1.038, lr: 0.3705075954057058\n",
"epoch: 1800, acc: 0.447, loss: 1.034, lr: 0.35727045373347627\n",
"epoch: 1900, acc: 0.467, loss: 1.028, lr: 0.3449465332873405\n",
"epoch: 2000, acc: 0.477, loss: 1.022, lr: 0.33344448149383127\n",
"epoch: 2100, acc: 0.500, loss: 1.015, lr: 0.32268473701193934\n",
"epoch: 2200, acc: 0.510, loss: 1.007, lr: 0.31259768677711786\n",
"epoch: 2300, acc: 0.520, loss: 1.000, lr: 0.3031221582297666\n",
"epoch: 2400, acc: 0.537, loss: 0.992, lr: 0.29420417769932333\n",
"epoch: 2500, acc: 0.547, loss: 0.984, lr: 0.2857959416976279\n",
"epoch: 2600, acc: 0.547, loss: 0.977, lr: 0.2778549597110308\n",
"epoch: 2700, acc: 0.543, loss: 0.969, lr: 0.2703433360367667\n",
"epoch: 2800, acc: 0.550, loss: 0.963, lr: 0.26322716504343247\n",
"epoch: 2900, acc: 0.563, loss: 0.956, lr: 0.25647601949217746\n",
"epoch: 3000, acc: 0.567, loss: 0.951, lr: 0.25006251562890724\n",
"epoch: 3100, acc: 0.570, loss: 0.945, lr: 0.2439619419370578\n",
"epoch: 3200, acc: 0.577, loss: 0.940, lr: 0.23815194093831865\n",
"epoch: 3300, acc: 0.577, loss: 0.935, lr: 0.23261223540358225\n",
"epoch: 3400, acc: 0.580, loss: 0.930, lr: 0.22732439190725165\n",
"epoch: 3500, acc: 0.580, loss: 0.925, lr: 0.22227161591464767\n",
"epoch: 3600, acc: 0.580, loss: 0.920, lr: 0.21743857360295715\n",
"epoch: 3700, acc: 0.590, loss: 0.916, lr: 0.21281123643328367\n",
"epoch: 3800, acc: 0.593, loss: 0.911, lr: 0.20837674515524068\n",
"epoch: 3900, acc: 0.593, loss: 0.907, lr: 0.20412329046744235\n",
"epoch: 4000, acc: 0.590, loss: 0.902, lr: 0.2000400080016003\n",
"epoch: 4100, acc: 0.587, loss: 0.898, lr: 0.19611688566385566\n",
"epoch: 4200, acc: 0.587, loss: 0.893, lr: 0.19234468166955185\n",
"epoch: 4300, acc: 0.583, loss: 0.889, lr: 0.18871485185884126\n",
"epoch: 4400, acc: 0.587, loss: 0.884, lr: 0.18521948508983144\n",
"epoch: 4500, acc: 0.590, loss: 0.880, lr: 0.18185124568103292\n",
"epoch: 4600, acc: 0.590, loss: 0.875, lr: 0.1786033220217896\n",
"epoch: 4700, acc: 0.597, loss: 0.871, lr: 0.1754693805930865\n",
"epoch: 4800, acc: 0.613, loss: 0.866, lr: 0.17244352474564578\n",
"epoch: 4900, acc: 0.617, loss: 0.861, lr: 0.16952025767079165\n",
"epoch: 5000, acc: 0.620, loss: 0.856, lr: 0.16669444907484582\n",
"epoch: 5100, acc: 0.610, loss: 0.851, lr: 0.16396130513198884\n",
"epoch: 5200, acc: 0.617, loss: 0.847, lr: 0.16131634134537828\n",
"epoch: 5300, acc: 0.623, loss: 0.842, lr: 0.15875535799333226\n",
"epoch: 5400, acc: 0.627, loss: 0.837, lr: 0.1562744178777934\n",
"epoch: 5500, acc: 0.627, loss: 0.832, lr: 0.15386982612709646\n",
"epoch: 5600, acc: 0.633, loss: 0.828, lr: 0.15153811183512653\n",
"epoch: 5700, acc: 0.627, loss: 0.822, lr: 0.14927601134497687\n",
"epoch: 5800, acc: 0.633, loss: 0.817, lr: 0.14708045300779526\n",
"epoch: 5900, acc: 0.637, loss: 0.813, lr: 0.14494854326714016\n",
"epoch: 6000, acc: 0.640, loss: 0.808, lr: 0.1428775539362766\n",
"epoch: 6100, acc: 0.643, loss: 0.803, lr: 0.1408649105507818\n",
"epoch: 6200, acc: 0.653, loss: 0.798, lr: 0.13890818169190167\n",
"epoch: 6300, acc: 0.653, loss: 0.793, lr: 0.13700506918755992\n",
"epoch: 6400, acc: 0.657, loss: 0.788, lr: 0.13515339910798757\n",
"epoch: 6500, acc: 0.653, loss: 0.783, lr: 0.13335111348179757\n",
"epoch: 6600, acc: 0.657, loss: 0.779, lr: 0.13159626266614027\n",
"epoch: 6700, acc: 0.660, loss: 0.774, lr: 0.12988699831146902\n",
"epoch: 6800, acc: 0.667, loss: 0.769, lr: 0.12822156686754713\n",
"epoch: 6900, acc: 0.667, loss: 0.764, lr: 0.126598303582732\n",
"epoch: 7000, acc: 0.670, loss: 0.760, lr: 0.12501562695336915\n",
"epoch: 7100, acc: 0.667, loss: 0.755, lr: 0.12347203358439313\n",
"epoch: 7200, acc: 0.667, loss: 0.751, lr: 0.12196609342602757\n",
"epoch: 7300, acc: 0.670, loss: 0.747, lr: 0.12049644535486204\n",
"epoch: 7400, acc: 0.677, loss: 0.742, lr: 0.11906179307060363\n",
"epoch: 7500, acc: 0.683, loss: 0.738, lr: 0.11766090128250381\n",
"epoch: 7600, acc: 0.677, loss: 0.734, lr: 0.11629259216187929\n",
"epoch: 7700, acc: 0.680, loss: 0.730, lr: 0.11495574203931487\n",
"epoch: 7800, acc: 0.683, loss: 0.726, lr: 0.11364927832708263\n",
"epoch: 7900, acc: 0.680, loss: 0.722, lr: 0.11237217664906168\n",
"epoch: 8000, acc: 0.677, loss: 0.718, lr: 0.11112345816201799\n",
"epoch: 8100, acc: 0.680, loss: 0.715, lr: 0.10990218705352237\n",
"epoch: 8200, acc: 0.680, loss: 0.711, lr: 0.10870746820306555\n",
"epoch: 8300, acc: 0.680, loss: 0.708, lr: 0.1075384449940854\n",
"epoch: 8400, acc: 0.680, loss: 0.704, lr: 0.10639429726566654\n",
"epoch: 8500, acc: 0.687, loss: 0.701, lr: 0.10527423939362038\n",
"epoch: 8600, acc: 0.687, loss: 0.698, lr: 0.10417751849150952\n",
"epoch: 8700, acc: 0.687, loss: 0.694, lr: 0.10310341272296113\n",
"epoch: 8800, acc: 0.693, loss: 0.691, lr: 0.1020512297173181\n",
"epoch: 8900, acc: 0.690, loss: 0.688, lr: 0.10102030508132134\n",
"epoch: 9000, acc: 0.697, loss: 0.685, lr: 0.1000100010001\n",
"epoch: 9100, acc: 0.700, loss: 0.682, lr: 0.09901970492127933\n",
"epoch: 9200, acc: 0.700, loss: 0.679, lr: 0.09804882831650162\n",
"epoch: 9300, acc: 0.703, loss: 0.676, lr: 0.09709680551509856\n",
"epoch: 9400, acc: 0.703, loss: 0.673, lr: 0.09616309260505818\n",
"epoch: 9500, acc: 0.707, loss: 0.670, lr: 0.09524716639679968\n",
"epoch: 9600, acc: 0.697, loss: 0.666, lr: 0.09434852344560807\n",
"epoch: 9700, acc: 0.717, loss: 0.662, lr: 0.09346667912889055\n",
"epoch: 9800, acc: 0.713, loss: 0.658, lr: 0.09260116677470137\n",
"epoch: 9900, acc: 0.713, loss: 0.656, lr: 0.09175153683824203\n",
"epoch: 10000, acc: 0.717, loss: 0.653, lr: 0.09091735612328393\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Assuming the necessary classes (Layer_Dense, Activation_ReLU,\n",
"# Activation_Softmax_Loss_CategoricalCrossentropy, and spiral_data) are defined elsewhere\n",
"\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output\n",
"# of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"optimizer = Optimizer_SGD(decay=1e-3)\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}, ' +\n",
" f'lr: {optimizer.current_learning_rate}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.pre_update_params()\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n",
" optimizer.post_update_params()\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "7nloxILOFjmx"
},
"source": [
"\n",
"OPTIMIZERS: MOMENTUM \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "nMYZmJkfFjmx"
},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"class Optimizer_SGD:\n",
" # Initialize optimizer - set settings,\n",
" # learning rate of 1. is default for this optimizer\n",
" def __init__(self, learning_rate=1., decay=0., momentum=0.):\n",
" self.learning_rate = learning_rate\n",
" self.current_learning_rate = learning_rate\n",
" self.decay = decay\n",
" self.iterations = 0\n",
" self.momentum = momentum\n",
"\n",
" # Call once before any parameter updates\n",
" def pre_update_params(self):\n",
" if self.decay:\n",
" self.current_learning_rate = self.learning_rate * \\\n",
" (1. / (1. + self.decay * self.iterations))\n",
"\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" # If we use momentum\n",
" if self.momentum:\n",
" # If layer does not contain momentum arrays, create them\n",
" # filled with zeros\n",
" if not hasattr(layer, 'weight_momentums'):\n",
" layer.weight_momentums = np.zeros_like(layer.weights)\n",
" layer.bias_momentums = np.zeros_like(layer.biases)\n",
"\n",
" # Build weight updates with momentum - take previous\n",
" # updates multiplied by retain factor and update with\n",
" # current gradients\n",
" weight_updates = self.momentum * layer.weight_momentums - \\\n",
" self.current_learning_rate * layer.dweights\n",
" layer.weight_momentums = weight_updates\n",
"\n",
" # Build bias updates\n",
" bias_updates = self.momentum * layer.bias_momentums - \\\n",
" self.current_learning_rate * layer.dbiases\n",
" layer.bias_momentums = bias_updates\n",
"\n",
" # Vanilla SGD updates (as before momentum update)\n",
" else:\n",
" weight_updates = -self.current_learning_rate * layer.dweights\n",
" bias_updates = -self.current_learning_rate * layer.dbiases\n",
"\n",
" # Update weights and biases using either\n",
" # vanilla or momentum updates\n",
" layer.weights += weight_updates\n",
" layer.biases += bias_updates\n",
"\n",
" # Call once after any parameter updates\n",
" def post_update_params(self):\n",
" self.iterations += 1\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "iGJ6XL-cFjmy",
"outputId": "ab7f1d7d-954d-4838-e2cc-46f62c18d28c"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.320, loss: 1.099, lr: 1.0\n",
"epoch: 100, acc: 0.423, loss: 1.030, lr: 0.9099181073703367\n",
"epoch: 200, acc: 0.430, loss: 0.966, lr: 0.8340283569641367\n",
"epoch: 300, acc: 0.717, loss: 0.703, lr: 0.7698229407236336\n",
"epoch: 400, acc: 0.763, loss: 0.494, lr: 0.7147962830593281\n",
"epoch: 500, acc: 0.800, loss: 0.451, lr: 0.66711140760507\n",
"epoch: 600, acc: 0.870, loss: 0.347, lr: 0.6253908692933083\n",
"epoch: 700, acc: 0.580, loss: 1.827, lr: 0.5885815185403178\n",
"epoch: 800, acc: 0.837, loss: 0.343, lr: 0.5558643690939411\n",
"epoch: 900, acc: 0.870, loss: 0.290, lr: 0.526592943654555\n",
"epoch: 1000, acc: 0.897, loss: 0.263, lr: 0.5002501250625312\n",
"epoch: 1100, acc: 0.900, loss: 0.245, lr: 0.4764173415912339\n",
"epoch: 1200, acc: 0.910, loss: 0.233, lr: 0.45475216007276037\n",
"epoch: 1300, acc: 0.917, loss: 0.225, lr: 0.43497172683775553\n",
"epoch: 1400, acc: 0.923, loss: 0.218, lr: 0.4168403501458941\n",
"epoch: 1500, acc: 0.917, loss: 0.212, lr: 0.4001600640256102\n",
"epoch: 1600, acc: 0.923, loss: 0.206, lr: 0.3847633705271258\n",
"epoch: 1700, acc: 0.913, loss: 0.198, lr: 0.3705075954057058\n",
"epoch: 1800, acc: 0.920, loss: 0.192, lr: 0.35727045373347627\n",
"epoch: 1900, acc: 0.910, loss: 0.186, lr: 0.3449465332873405\n",
"epoch: 2000, acc: 0.913, loss: 0.181, lr: 0.33344448149383127\n",
"epoch: 2100, acc: 0.923, loss: 0.176, lr: 0.32268473701193934\n",
"epoch: 2200, acc: 0.917, loss: 0.173, lr: 0.31259768677711786\n",
"epoch: 2300, acc: 0.917, loss: 0.170, lr: 0.3031221582297666\n",
"epoch: 2400, acc: 0.923, loss: 0.167, lr: 0.29420417769932333\n",
"epoch: 2500, acc: 0.927, loss: 0.164, lr: 0.2857959416976279\n",
"epoch: 2600, acc: 0.923, loss: 0.162, lr: 0.2778549597110308\n",
"epoch: 2700, acc: 0.923, loss: 0.160, lr: 0.2703433360367667\n",
"epoch: 2800, acc: 0.927, loss: 0.159, lr: 0.26322716504343247\n",
"epoch: 2900, acc: 0.927, loss: 0.157, lr: 0.25647601949217746\n",
"epoch: 3000, acc: 0.930, loss: 0.156, lr: 0.25006251562890724\n",
"epoch: 3100, acc: 0.930, loss: 0.154, lr: 0.2439619419370578\n",
"epoch: 3200, acc: 0.930, loss: 0.153, lr: 0.23815194093831865\n",
"epoch: 3300, acc: 0.930, loss: 0.152, lr: 0.23261223540358225\n",
"epoch: 3400, acc: 0.930, loss: 0.151, lr: 0.22732439190725165\n",
"epoch: 3500, acc: 0.930, loss: 0.149, lr: 0.22227161591464767\n",
"epoch: 3600, acc: 0.933, loss: 0.148, lr: 0.21743857360295715\n",
"epoch: 3700, acc: 0.937, loss: 0.147, lr: 0.21281123643328367\n",
"epoch: 3800, acc: 0.937, loss: 0.146, lr: 0.20837674515524068\n",
"epoch: 3900, acc: 0.940, loss: 0.146, lr: 0.20412329046744235\n",
"epoch: 4000, acc: 0.940, loss: 0.145, lr: 0.2000400080016003\n",
"epoch: 4100, acc: 0.940, loss: 0.144, lr: 0.19611688566385566\n",
"epoch: 4200, acc: 0.937, loss: 0.143, lr: 0.19234468166955185\n",
"epoch: 4300, acc: 0.940, loss: 0.143, lr: 0.18871485185884126\n",
"epoch: 4400, acc: 0.937, loss: 0.142, lr: 0.18521948508983144\n",
"epoch: 4500, acc: 0.937, loss: 0.142, lr: 0.18185124568103292\n",
"epoch: 4600, acc: 0.940, loss: 0.141, lr: 0.1786033220217896\n",
"epoch: 4700, acc: 0.940, loss: 0.141, lr: 0.1754693805930865\n",
"epoch: 4800, acc: 0.940, loss: 0.140, lr: 0.17244352474564578\n",
"epoch: 4900, acc: 0.940, loss: 0.139, lr: 0.16952025767079165\n",
"epoch: 5000, acc: 0.940, loss: 0.139, lr: 0.16669444907484582\n",
"epoch: 5100, acc: 0.940, loss: 0.138, lr: 0.16396130513198884\n",
"epoch: 5200, acc: 0.943, loss: 0.138, lr: 0.16131634134537828\n",
"epoch: 5300, acc: 0.943, loss: 0.138, lr: 0.15875535799333226\n",
"epoch: 5400, acc: 0.943, loss: 0.137, lr: 0.1562744178777934\n",
"epoch: 5500, acc: 0.943, loss: 0.137, lr: 0.15386982612709646\n",
"epoch: 5600, acc: 0.943, loss: 0.136, lr: 0.15153811183512653\n",
"epoch: 5700, acc: 0.943, loss: 0.136, lr: 0.14927601134497687\n",
"epoch: 5800, acc: 0.943, loss: 0.136, lr: 0.14708045300779526\n",
"epoch: 5900, acc: 0.943, loss: 0.135, lr: 0.14494854326714016\n",
"epoch: 6000, acc: 0.943, loss: 0.135, lr: 0.1428775539362766\n",
"epoch: 6100, acc: 0.943, loss: 0.135, lr: 0.1408649105507818\n",
"epoch: 6200, acc: 0.943, loss: 0.134, lr: 0.13890818169190167\n",
"epoch: 6300, acc: 0.943, loss: 0.134, lr: 0.13700506918755992\n",
"epoch: 6400, acc: 0.947, loss: 0.134, lr: 0.13515339910798757\n",
"epoch: 6500, acc: 0.947, loss: 0.133, lr: 0.13335111348179757\n",
"epoch: 6600, acc: 0.950, loss: 0.133, lr: 0.13159626266614027\n",
"epoch: 6700, acc: 0.947, loss: 0.133, lr: 0.12988699831146902\n",
"epoch: 6800, acc: 0.950, loss: 0.133, lr: 0.12822156686754713\n",
"epoch: 6900, acc: 0.950, loss: 0.132, lr: 0.126598303582732\n",
"epoch: 7000, acc: 0.950, loss: 0.132, lr: 0.12501562695336915\n",
"epoch: 7100, acc: 0.950, loss: 0.132, lr: 0.12347203358439313\n",
"epoch: 7200, acc: 0.950, loss: 0.132, lr: 0.12196609342602757\n",
"epoch: 7300, acc: 0.950, loss: 0.131, lr: 0.12049644535486204\n",
"epoch: 7400, acc: 0.950, loss: 0.131, lr: 0.11906179307060363\n",
"epoch: 7500, acc: 0.950, loss: 0.131, lr: 0.11766090128250381\n",
"epoch: 7600, acc: 0.950, loss: 0.131, lr: 0.11629259216187929\n",
"epoch: 7700, acc: 0.950, loss: 0.131, lr: 0.11495574203931487\n",
"epoch: 7800, acc: 0.950, loss: 0.130, lr: 0.11364927832708263\n",
"epoch: 7900, acc: 0.950, loss: 0.130, lr: 0.11237217664906168\n",
"epoch: 8000, acc: 0.950, loss: 0.130, lr: 0.11112345816201799\n",
"epoch: 8100, acc: 0.950, loss: 0.130, lr: 0.10990218705352237\n",
"epoch: 8200, acc: 0.953, loss: 0.130, lr: 0.10870746820306555\n",
"epoch: 8300, acc: 0.950, loss: 0.130, lr: 0.1075384449940854\n",
"epoch: 8400, acc: 0.953, loss: 0.130, lr: 0.10639429726566654\n",
"epoch: 8500, acc: 0.953, loss: 0.129, lr: 0.10527423939362038\n",
"epoch: 8600, acc: 0.953, loss: 0.129, lr: 0.10417751849150952\n",
"epoch: 8700, acc: 0.953, loss: 0.129, lr: 0.10310341272296113\n",
"epoch: 8800, acc: 0.953, loss: 0.129, lr: 0.1020512297173181\n",
"epoch: 8900, acc: 0.950, loss: 0.129, lr: 0.10102030508132134\n",
"epoch: 9000, acc: 0.953, loss: 0.129, lr: 0.1000100010001\n",
"epoch: 9100, acc: 0.953, loss: 0.129, lr: 0.09901970492127933\n",
"epoch: 9200, acc: 0.953, loss: 0.128, lr: 0.09804882831650162\n",
"epoch: 9300, acc: 0.953, loss: 0.128, lr: 0.09709680551509856\n",
"epoch: 9400, acc: 0.953, loss: 0.128, lr: 0.09616309260505818\n",
"epoch: 9500, acc: 0.953, loss: 0.128, lr: 0.09524716639679968\n",
"epoch: 9600, acc: 0.953, loss: 0.128, lr: 0.09434852344560807\n",
"epoch: 9700, acc: 0.953, loss: 0.128, lr: 0.09346667912889055\n",
"epoch: 9800, acc: 0.953, loss: 0.128, lr: 0.09260116677470137\n",
"epoch: 9900, acc: 0.953, loss: 0.128, lr: 0.09175153683824203\n",
"epoch: 10000, acc: 0.953, loss: 0.128, lr: 0.09091735612328393\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Assuming the necessary classes (Layer_Dense, Activation_ReLU,\n",
"# Activation_Softmax_Loss_CategoricalCrossentropy, Optimizer_SGD, and spiral_data) are defined elsewhere\n",
"\n",
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output\n",
"# of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"optimizer = Optimizer_SGD(decay=1e-3, momentum=0.9)\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}, ' +\n",
" f'lr: {optimizer.current_learning_rate}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.pre_update_params()\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n",
" optimizer.post_update_params()\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "3kAX2D2BFjmy"
},
"source": [
"\n",
"OPTIMIZERS: ADAGRAD \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "cZpH377mFjmy"
},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"# Adagrad optimizer\n",
"class Optimizer_Adagrad:\n",
" # Initialize optimizer - set settings\n",
" def __init__(self, learning_rate=1., decay=0., epsilon=1e-7):\n",
" self.learning_rate = learning_rate\n",
" self.current_learning_rate = learning_rate\n",
" self.decay = decay\n",
" self.iterations = 0\n",
" self.epsilon = epsilon\n",
"\n",
" # Call once before any parameter updates\n",
" def pre_update_params(self):\n",
" if self.decay:\n",
" self.current_learning_rate = self.learning_rate * \\\n",
" (1. / (1. + self.decay * self.iterations))\n",
"\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" # If layer does not contain cache arrays, create them filled with zeros\n",
" if not hasattr(layer, 'weight_cache'):\n",
" layer.weight_cache = np.zeros_like(layer.weights)\n",
" layer.bias_cache = np.zeros_like(layer.biases)\n",
"\n",
" # Update cache with squared current gradients\n",
" layer.weight_cache += layer.dweights**2\n",
" layer.bias_cache += layer.dbiases**2\n",
"\n",
" # Vanilla SGD parameter update + normalization with square rooted cache\n",
" layer.weights += -self.current_learning_rate * \\\n",
" layer.dweights / \\\n",
" (np.sqrt(layer.weight_cache) + self.epsilon)\n",
" layer.biases += -self.current_learning_rate * \\\n",
" layer.dbiases / \\\n",
" (np.sqrt(layer.bias_cache) + self.epsilon)\n",
"\n",
" # Call once after any parameter updates\n",
" def post_update_params(self):\n",
" self.iterations += 1\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "PmBRgb8OFjmy",
"outputId": "cebb6205-93bd-4c0e-ee9c-b6c1ae86c6de"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.303, loss: 1.099, lr: 1.0\n",
"epoch: 100, acc: 0.490, loss: 1.006, lr: 0.9901970492127933\n",
"epoch: 200, acc: 0.477, loss: 0.972, lr: 0.9804882831650161\n",
"epoch: 300, acc: 0.497, loss: 0.946, lr: 0.9709680551509855\n",
"epoch: 400, acc: 0.510, loss: 0.927, lr: 0.9616309260505818\n",
"epoch: 500, acc: 0.520, loss: 0.902, lr: 0.9524716639679969\n",
"epoch: 600, acc: 0.550, loss: 0.871, lr: 0.9434852344560807\n",
"epoch: 700, acc: 0.610, loss: 0.825, lr: 0.9346667912889054\n",
"epoch: 800, acc: 0.650, loss: 0.788, lr: 0.9260116677470135\n",
"epoch: 900, acc: 0.667, loss: 0.757, lr: 0.9175153683824203\n",
"epoch: 1000, acc: 0.640, loss: 0.733, lr: 0.9091735612328392\n",
"epoch: 1100, acc: 0.630, loss: 0.706, lr: 0.9009820704567978\n",
"epoch: 1200, acc: 0.700, loss: 0.692, lr: 0.892936869363336\n",
"epoch: 1300, acc: 0.723, loss: 0.660, lr: 0.8850340738118416\n",
"epoch: 1400, acc: 0.640, loss: 0.647, lr: 0.8772699359592947\n",
"epoch: 1500, acc: 0.670, loss: 0.638, lr: 0.8696408383337683\n",
"epoch: 1600, acc: 0.713, loss: 0.611, lr: 0.8621432882145013\n",
"epoch: 1700, acc: 0.733, loss: 0.591, lr: 0.8547739123001966\n",
"epoch: 1800, acc: 0.737, loss: 0.575, lr: 0.8475294516484448\n",
"epoch: 1900, acc: 0.733, loss: 0.562, lr: 0.8404067568703253\n",
"epoch: 2000, acc: 0.737, loss: 0.551, lr: 0.8334027835652972\n",
"epoch: 2100, acc: 0.737, loss: 0.537, lr: 0.8265145879824779\n",
"epoch: 2200, acc: 0.740, loss: 0.528, lr: 0.8197393228953193\n",
"epoch: 2300, acc: 0.753, loss: 0.520, lr: 0.8130742336775347\n",
"epoch: 2400, acc: 0.750, loss: 0.507, lr: 0.8065166545689169\n",
"epoch: 2500, acc: 0.747, loss: 0.501, lr: 0.8000640051204096\n",
"epoch: 2600, acc: 0.757, loss: 0.492, lr: 0.7937137868084768\n",
"epoch: 2700, acc: 0.777, loss: 0.486, lr: 0.7874635798094338\n",
"epoch: 2800, acc: 0.793, loss: 0.477, lr: 0.7813110399249941\n",
"epoch: 2900, acc: 0.813, loss: 0.466, lr: 0.7752538956508256\n",
"epoch: 3000, acc: 0.803, loss: 0.446, lr: 0.7692899453804138\n",
"epoch: 3100, acc: 0.807, loss: 0.437, lr: 0.7634170547370028\n",
"epoch: 3200, acc: 0.810, loss: 0.425, lr: 0.7576331540268202\n",
"epoch: 3300, acc: 0.830, loss: 0.417, lr: 0.7519362358072035\n",
"epoch: 3400, acc: 0.830, loss: 0.412, lr: 0.7463243525636241\n",
"epoch: 3500, acc: 0.843, loss: 0.406, lr: 0.7407956144899621\n",
"epoch: 3600, acc: 0.860, loss: 0.396, lr: 0.735348187366718\n",
"epoch: 3700, acc: 0.850, loss: 0.392, lr: 0.7299802905321557\n",
"epoch: 3800, acc: 0.857, loss: 0.386, lr: 0.7246901949416624\n",
"epoch: 3900, acc: 0.863, loss: 0.382, lr: 0.7194762213108857\n",
"epoch: 4000, acc: 0.863, loss: 0.377, lr: 0.7143367383384527\n",
"epoch: 4100, acc: 0.867, loss: 0.373, lr: 0.7092701610043266\n",
"epoch: 4200, acc: 0.877, loss: 0.368, lr: 0.7042749489400663\n",
"epoch: 4300, acc: 0.870, loss: 0.365, lr: 0.6993496048674733\n",
"epoch: 4400, acc: 0.870, loss: 0.361, lr: 0.6944926731022988\n",
"epoch: 4500, acc: 0.873, loss: 0.357, lr: 0.6897027381198704\n",
"epoch: 4600, acc: 0.870, loss: 0.354, lr: 0.6849784231796698\n",
"epoch: 4700, acc: 0.873, loss: 0.350, lr: 0.6803183890060548\n",
"epoch: 4800, acc: 0.870, loss: 0.347, lr: 0.6757213325224677\n",
"epoch: 4900, acc: 0.863, loss: 0.345, lr: 0.6711859856366199\n",
"epoch: 5000, acc: 0.863, loss: 0.342, lr: 0.6667111140742716\n",
"epoch: 5100, acc: 0.863, loss: 0.339, lr: 0.6622955162593549\n",
"epoch: 5200, acc: 0.860, loss: 0.337, lr: 0.6579380222383051\n",
"epoch: 5300, acc: 0.857, loss: 0.335, lr: 0.6536374926465782\n",
"epoch: 5400, acc: 0.860, loss: 0.332, lr: 0.649392817715436\n",
"epoch: 5500, acc: 0.863, loss: 0.329, lr: 0.6452029163171817\n",
"epoch: 5600, acc: 0.863, loss: 0.328, lr: 0.6410667350471184\n",
"epoch: 5700, acc: 0.860, loss: 0.326, lr: 0.6369832473405949\n",
"epoch: 5800, acc: 0.867, loss: 0.324, lr: 0.6329514526235838\n",
"epoch: 5900, acc: 0.867, loss: 0.322, lr: 0.6289703754953141\n",
"epoch: 6000, acc: 0.873, loss: 0.318, lr: 0.6250390649415589\n",
"epoch: 6100, acc: 0.880, loss: 0.314, lr: 0.6211565935772407\n",
"epoch: 6200, acc: 0.883, loss: 0.312, lr: 0.6173220569170937\n",
"epoch: 6300, acc: 0.883, loss: 0.310, lr: 0.6135345726731701\n",
"epoch: 6400, acc: 0.883, loss: 0.308, lr: 0.6097932800780536\n",
"epoch: 6500, acc: 0.883, loss: 0.307, lr: 0.6060973392326807\n",
"epoch: 6600, acc: 0.880, loss: 0.305, lr: 0.6024459304777396\n",
"epoch: 6700, acc: 0.883, loss: 0.304, lr: 0.5988382537876519\n",
"epoch: 6800, acc: 0.883, loss: 0.303, lr: 0.5952735281862016\n",
"epoch: 6900, acc: 0.883, loss: 0.302, lr: 0.5917509911829102\n",
"epoch: 7000, acc: 0.887, loss: 0.300, lr: 0.5882698982293076\n",
"epoch: 7100, acc: 0.887, loss: 0.299, lr: 0.5848295221942803\n",
"epoch: 7200, acc: 0.887, loss: 0.298, lr: 0.5814291528577243\n",
"epoch: 7300, acc: 0.887, loss: 0.297, lr: 0.5780680964217585\n",
"epoch: 7400, acc: 0.887, loss: 0.296, lr: 0.5747456750387954\n",
"epoch: 7500, acc: 0.887, loss: 0.295, lr: 0.5714612263557918\n",
"epoch: 7600, acc: 0.887, loss: 0.294, lr: 0.5682141030740383\n",
"epoch: 7700, acc: 0.887, loss: 0.294, lr: 0.5650036725238714\n",
"epoch: 7800, acc: 0.887, loss: 0.293, lr: 0.5618293162537221\n",
"epoch: 7900, acc: 0.887, loss: 0.292, lr: 0.5586904296329404\n",
"epoch: 8000, acc: 0.887, loss: 0.291, lr: 0.5555864214678593\n",
"epoch: 8100, acc: 0.887, loss: 0.290, lr: 0.5525167136305873\n",
"epoch: 8200, acc: 0.887, loss: 0.289, lr: 0.5494807407000385\n",
"epoch: 8300, acc: 0.887, loss: 0.289, lr: 0.5464779496147331\n",
"epoch: 8400, acc: 0.887, loss: 0.288, lr: 0.5435077993369205\n",
"epoch: 8500, acc: 0.890, loss: 0.286, lr: 0.5405697605275961\n",
"epoch: 8600, acc: 0.890, loss: 0.286, lr: 0.5376633152320017\n",
"epoch: 8700, acc: 0.890, loss: 0.285, lr: 0.5347879565752179\n",
"epoch: 8800, acc: 0.890, loss: 0.284, lr: 0.5319431884674717\n",
"epoch: 8900, acc: 0.890, loss: 0.283, lr: 0.5291285253188\n",
"epoch: 9000, acc: 0.890, loss: 0.283, lr: 0.5263434917627243\n",
"epoch: 9100, acc: 0.887, loss: 0.279, lr: 0.5235876223886068\n",
"epoch: 9200, acc: 0.893, loss: 0.277, lr: 0.5208604614823689\n",
"epoch: 9300, acc: 0.893, loss: 0.276, lr: 0.5181615627752734\n",
"epoch: 9400, acc: 0.893, loss: 0.276, lr: 0.5154904892004742\n",
"epoch: 9500, acc: 0.893, loss: 0.275, lr: 0.5128468126570593\n",
"epoch: 9600, acc: 0.893, loss: 0.274, lr: 0.5102301137813153\n",
"epoch: 9700, acc: 0.893, loss: 0.274, lr: 0.5076399817249606\n",
"epoch: 9800, acc: 0.893, loss: 0.273, lr: 0.5050760139400979\n",
"epoch: 9900, acc: 0.893, loss: 0.273, lr: 0.5025378159706518\n",
"epoch: 10000, acc: 0.893, loss: 0.272, lr: 0.5000250012500626\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"# optimizer = Optimizer_SGD(decay=8e-8, momentum=0.9)\n",
"optimizer = Optimizer_Adagrad(decay=1e-4)\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}, ' +\n",
" f'lr: {optimizer.current_learning_rate}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.pre_update_params()\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n",
" optimizer.post_update_params()\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "_8hEKQBrFjmy"
},
"source": [
"\n",
"OPTIMIZERS: RMSPROP \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "4VkLC_ukFjmy"
},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"class Optimizer_RMSprop:\n",
" # Initialize optimizer - set settings\n",
" def __init__(self, learning_rate=0.001, decay=0., epsilon=1e-7, rho=0.9):\n",
" self.learning_rate = learning_rate\n",
" self.current_learning_rate = learning_rate\n",
" self.decay = decay\n",
" self.iterations = 0\n",
" self.epsilon = epsilon\n",
" self.rho = rho\n",
"\n",
" # Call once before any parameter updates\n",
" def pre_update_params(self):\n",
" if self.decay:\n",
" self.current_learning_rate = self.learning_rate * \\\n",
" (1. / (1. + self.decay * self.iterations))\n",
"\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" # If layer does not contain cache arrays,\n",
" # create them filled with zeros\n",
" if not hasattr(layer, 'weight_cache'):\n",
" layer.weight_cache = np.zeros_like(layer.weights)\n",
" layer.bias_cache = np.zeros_like(layer.biases)\n",
"\n",
" # Update cache with squared current gradients\n",
" layer.weight_cache = self.rho * layer.weight_cache + \\\n",
" (1 - self.rho) * layer.dweights**2\n",
" layer.bias_cache = self.rho * layer.bias_cache + \\\n",
" (1 - self.rho) * layer.dbiases**2\n",
"\n",
" # Vanilla SGD parameter update + normalization\n",
" # with square rooted cache\n",
" layer.weights += -self.current_learning_rate * \\\n",
" layer.dweights / \\\n",
" (np.sqrt(layer.weight_cache) + self.epsilon)\n",
" layer.biases += -self.current_learning_rate * \\\n",
" layer.dbiases / \\\n",
" (np.sqrt(layer.bias_cache) + self.epsilon)\n",
"\n",
" # Call once after any parameter updates\n",
" def post_update_params(self):\n",
" self.iterations += 1\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "-TwxlA-yFjmy",
"outputId": "8f5695a6-e8f9-4b04-dc8d-bf028f8d2d4a"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.263, loss: 1.099, lr: 0.02\n",
"epoch: 100, acc: 0.490, loss: 1.017, lr: 0.01998021958261321\n",
"epoch: 200, acc: 0.550, loss: 0.962, lr: 0.019960279044701046\n",
"epoch: 300, acc: 0.557, loss: 0.902, lr: 0.019940378268975763\n",
"epoch: 400, acc: 0.650, loss: 0.845, lr: 0.01992051713662487\n",
"epoch: 500, acc: 0.610, loss: 0.853, lr: 0.01990069552930875\n",
"epoch: 600, acc: 0.643, loss: 0.773, lr: 0.019880913329158343\n",
"epoch: 700, acc: 0.630, loss: 0.785, lr: 0.019861170418772778\n",
"epoch: 800, acc: 0.683, loss: 0.715, lr: 0.019841466681217078\n",
"epoch: 900, acc: 0.707, loss: 0.697, lr: 0.01982180200001982\n",
"epoch: 1000, acc: 0.703, loss: 0.694, lr: 0.019802176259170884\n",
"epoch: 1100, acc: 0.667, loss: 0.702, lr: 0.01978258934311912\n",
"epoch: 1200, acc: 0.717, loss: 0.672, lr: 0.01976304113677013\n",
"epoch: 1300, acc: 0.747, loss: 0.630, lr: 0.019743531525483964\n",
"epoch: 1400, acc: 0.730, loss: 0.618, lr: 0.01972406039507293\n",
"epoch: 1500, acc: 0.743, loss: 0.614, lr: 0.019704627631799327\n",
"epoch: 1600, acc: 0.743, loss: 0.603, lr: 0.019685233122373254\n",
"epoch: 1700, acc: 0.740, loss: 0.593, lr: 0.019665876753950384\n",
"epoch: 1800, acc: 0.767, loss: 0.595, lr: 0.01964655841412981\n",
"epoch: 1900, acc: 0.770, loss: 0.578, lr: 0.019627277990951823\n",
"epoch: 2000, acc: 0.750, loss: 0.576, lr: 0.019608035372895814\n",
"epoch: 2100, acc: 0.757, loss: 0.559, lr: 0.01958883044887805\n",
"epoch: 2200, acc: 0.780, loss: 0.565, lr: 0.019569663108249594\n",
"epoch: 2300, acc: 0.777, loss: 0.552, lr: 0.01955053324079414\n",
"epoch: 2400, acc: 0.770, loss: 0.560, lr: 0.019531440736725945\n",
"epoch: 2500, acc: 0.770, loss: 0.539, lr: 0.019512385486687673\n",
"epoch: 2600, acc: 0.783, loss: 0.546, lr: 0.019493367381748363\n",
"epoch: 2700, acc: 0.783, loss: 0.533, lr: 0.019474386313401298\n",
"epoch: 2800, acc: 0.737, loss: 0.609, lr: 0.019455442173562\n",
"epoch: 2900, acc: 0.783, loss: 0.522, lr: 0.019436534854566128\n",
"epoch: 3000, acc: 0.763, loss: 0.547, lr: 0.01941766424916747\n",
"epoch: 3100, acc: 0.803, loss: 0.513, lr: 0.019398830250535893\n",
"epoch: 3200, acc: 0.803, loss: 0.509, lr: 0.019380032752255354\n",
"epoch: 3300, acc: 0.787, loss: 0.530, lr: 0.01936127164832186\n",
"epoch: 3400, acc: 0.810, loss: 0.495, lr: 0.01934254683314152\n",
"epoch: 3500, acc: 0.803, loss: 0.504, lr: 0.019323858201528515\n",
"epoch: 3600, acc: 0.817, loss: 0.491, lr: 0.019305205648703173\n",
"epoch: 3700, acc: 0.820, loss: 0.484, lr: 0.01928658907028997\n",
"epoch: 3800, acc: 0.800, loss: 0.476, lr: 0.01926800836231563\n",
"epoch: 3900, acc: 0.783, loss: 0.499, lr: 0.019249463421207133\n",
"epoch: 4000, acc: 0.797, loss: 0.482, lr: 0.019230954143789846\n",
"epoch: 4100, acc: 0.797, loss: 0.477, lr: 0.019212480427285565\n",
"epoch: 4200, acc: 0.793, loss: 0.472, lr: 0.019194042169310647\n",
"epoch: 4300, acc: 0.803, loss: 0.463, lr: 0.019175639267874092\n",
"epoch: 4400, acc: 0.780, loss: 0.489, lr: 0.019157271621375684\n",
"epoch: 4500, acc: 0.790, loss: 0.462, lr: 0.0191389391286041\n",
"epoch: 4600, acc: 0.800, loss: 0.459, lr: 0.019120641688735073\n",
"epoch: 4700, acc: 0.810, loss: 0.464, lr: 0.019102379201329525\n",
"epoch: 4800, acc: 0.823, loss: 0.438, lr: 0.01908415156633174\n",
"epoch: 4900, acc: 0.793, loss: 0.455, lr: 0.01906595868406753\n",
"epoch: 5000, acc: 0.803, loss: 0.446, lr: 0.01904780045524243\n",
"epoch: 5100, acc: 0.800, loss: 0.441, lr: 0.019029676780939874\n",
"epoch: 5200, acc: 0.753, loss: 0.541, lr: 0.019011587562619416\n",
"epoch: 5300, acc: 0.823, loss: 0.428, lr: 0.01899353270211493\n",
"epoch: 5400, acc: 0.807, loss: 0.435, lr: 0.018975512101632844\n",
"epoch: 5500, acc: 0.807, loss: 0.429, lr: 0.018957525663750367\n",
"epoch: 5600, acc: 0.810, loss: 0.426, lr: 0.018939573291413745\n",
"epoch: 5700, acc: 0.813, loss: 0.416, lr: 0.018921654887936498\n",
"epoch: 5800, acc: 0.823, loss: 0.414, lr: 0.018903770356997706\n",
"epoch: 5900, acc: 0.830, loss: 0.396, lr: 0.018885919602640248\n",
"epoch: 6000, acc: 0.810, loss: 0.411, lr: 0.018868102529269144\n",
"epoch: 6100, acc: 0.813, loss: 0.402, lr: 0.018850319041649778\n",
"epoch: 6200, acc: 0.813, loss: 0.399, lr: 0.018832569044906263\n",
"epoch: 6300, acc: 0.813, loss: 0.397, lr: 0.018814852444519702\n",
"epoch: 6400, acc: 0.850, loss: 0.381, lr: 0.018797169146326564\n",
"epoch: 6500, acc: 0.857, loss: 0.393, lr: 0.01877951905651696\n",
"epoch: 6600, acc: 0.857, loss: 0.392, lr: 0.018761902081633034\n",
"epoch: 6700, acc: 0.857, loss: 0.389, lr: 0.018744318128567278\n",
"epoch: 6800, acc: 0.860, loss: 0.368, lr: 0.018726767104560903\n",
"epoch: 6900, acc: 0.823, loss: 0.391, lr: 0.018709248917202218\n",
"epoch: 7000, acc: 0.823, loss: 0.388, lr: 0.018691763474424996\n",
"epoch: 7100, acc: 0.820, loss: 0.385, lr: 0.018674310684506857\n",
"epoch: 7200, acc: 0.817, loss: 0.422, lr: 0.01865689045606769\n",
"epoch: 7300, acc: 0.843, loss: 0.394, lr: 0.01863950269806802\n",
"epoch: 7400, acc: 0.853, loss: 0.383, lr: 0.018622147319807447\n",
"epoch: 7500, acc: 0.850, loss: 0.383, lr: 0.018604824230923075\n",
"epoch: 7600, acc: 0.747, loss: 0.540, lr: 0.01858753334138793\n",
"epoch: 7700, acc: 0.840, loss: 0.424, lr: 0.018570274561509396\n",
"epoch: 7800, acc: 0.860, loss: 0.378, lr: 0.018553047801927663\n",
"epoch: 7900, acc: 0.863, loss: 0.378, lr: 0.018535852973614212\n",
"epoch: 8000, acc: 0.787, loss: 0.511, lr: 0.01851868998787026\n",
"epoch: 8100, acc: 0.823, loss: 0.379, lr: 0.018501558756325222\n",
"epoch: 8200, acc: 0.830, loss: 0.375, lr: 0.01848445919093522\n",
"epoch: 8300, acc: 0.823, loss: 0.371, lr: 0.018467391203981567\n",
"epoch: 8400, acc: 0.833, loss: 0.381, lr: 0.018450354708069265\n",
"epoch: 8500, acc: 0.853, loss: 0.375, lr: 0.018433349616125496\n",
"epoch: 8600, acc: 0.857, loss: 0.372, lr: 0.018416375841398172\n",
"epoch: 8700, acc: 0.867, loss: 0.370, lr: 0.01839943329745444\n",
"epoch: 8800, acc: 0.873, loss: 0.348, lr: 0.01838252189817921\n",
"epoch: 8900, acc: 0.830, loss: 0.369, lr: 0.018365641557773718\n",
"epoch: 9000, acc: 0.830, loss: 0.369, lr: 0.018348792190754044\n",
"epoch: 9100, acc: 0.843, loss: 0.358, lr: 0.0183319737119497\n",
"epoch: 9200, acc: 0.880, loss: 0.346, lr: 0.018315186036502167\n",
"epoch: 9300, acc: 0.867, loss: 0.365, lr: 0.018298429079863496\n",
"epoch: 9400, acc: 0.860, loss: 0.367, lr: 0.018281702757794862\n",
"epoch: 9500, acc: 0.817, loss: 0.443, lr: 0.018265006986365174\n",
"epoch: 9600, acc: 0.820, loss: 0.369, lr: 0.018248341681949654\n",
"epoch: 9700, acc: 0.827, loss: 0.363, lr: 0.018231706761228456\n",
"epoch: 9800, acc: 0.827, loss: 0.362, lr: 0.018215102141185255\n",
"epoch: 9900, acc: 0.873, loss: 0.349, lr: 0.018198527739105907\n",
"epoch: 10000, acc: 0.870, loss: 0.360, lr: 0.018181983472577025\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"# optimizer = Optimizer_SGD(decay=8e-8, momentum=0.9)\n",
"#optimizer = Optimizer_Adagrad(decay=1e-4)\n",
"#optimizer = Optimizer_RMSprop(decay=1e-4)\n",
"optimizer = Optimizer_RMSprop(learning_rate=0.02, decay=1e-5,rho=0.999)\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}, ' +\n",
" f'lr: {optimizer.current_learning_rate}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.pre_update_params()\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n",
" optimizer.post_update_params()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "hunJrmJNFjmy"
},
"source": [
"\n",
"OPTIMIZERS: ADAM \n",
"
"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "4vZtNOVeFjmy"
},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"# Adam optimizer\n",
"class Optimizer_Adam:\n",
" # Initialize optimizer - set settings\n",
" def __init__(self, learning_rate=0.001, decay=0., epsilon=1e-7, beta_1=0.9, beta_2=0.999):\n",
" self.learning_rate = learning_rate\n",
" self.current_learning_rate = learning_rate\n",
" self.decay = decay\n",
" self.iterations = 0\n",
" self.epsilon = epsilon\n",
" self.beta_1 = beta_1\n",
" self.beta_2 = beta_2\n",
"\n",
" # Call once before any parameter updates\n",
" def pre_update_params(self):\n",
" if self.decay:\n",
" self.current_learning_rate = self.learning_rate * (1. / (1. + self.decay * self.iterations))\n",
"\n",
" # Update parameters\n",
" def update_params(self, layer):\n",
" # If layer does not contain cache arrays, create them filled with zeros\n",
" if not hasattr(layer, 'weight_cache'):\n",
" layer.weight_momentums = np.zeros_like(layer.weights)\n",
" layer.weight_cache = np.zeros_like(layer.weights)\n",
" layer.bias_momentums = np.zeros_like(layer.biases)\n",
" layer.bias_cache = np.zeros_like(layer.biases)\n",
"\n",
" # Update momentum with current gradients\n",
" layer.weight_momentums = self.beta_1 * layer.weight_momentums + (1 - self.beta_1) * layer.dweights\n",
" layer.bias_momentums = self.beta_1 * layer.bias_momentums + (1 - self.beta_1) * layer.dbiases\n",
"\n",
" # Get corrected momentum\n",
" # self.iteration is 0 at first pass and we need to start with 1 here\n",
" weight_momentums_corrected = layer.weight_momentums / (1 - self.beta_1 ** (self.iterations + 1))\n",
" bias_momentums_corrected = layer.bias_momentums / (1 - self.beta_1 ** (self.iterations + 1))\n",
"\n",
" # Update cache with squared current gradients\n",
" layer.weight_cache = self.beta_2 * layer.weight_cache + (1 - self.beta_2) * layer.dweights**2\n",
" layer.bias_cache = self.beta_2 * layer.bias_cache + (1 - self.beta_2) * layer.dbiases**2\n",
"\n",
" # Get corrected cache\n",
" weight_cache_corrected = layer.weight_cache / (1 - self.beta_2 ** (self.iterations + 1))\n",
" bias_cache_corrected = layer.bias_cache / (1 - self.beta_2 ** (self.iterations + 1))\n",
"\n",
" # Vanilla SGD parameter update + normalization with square rooted cache\n",
" layer.weights += -self.current_learning_rate * weight_momentums_corrected / (np.sqrt(weight_cache_corrected) + self.epsilon)\n",
" layer.biases += -self.current_learning_rate * bias_momentums_corrected / (np.sqrt(bias_cache_corrected) + self.epsilon)\n",
"\n",
" # Call once after any parameter updates\n",
" def post_update_params(self):\n",
" self.iterations += 1\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "0jPa07gQFjmy",
"outputId": "a0e17fa7-3600-4184-a911-97ef141ef8bf"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch: 0, acc: 0.310, loss: 1.099, lr: 1.0\n",
"epoch: 100, acc: 0.537, loss: 1.003, lr: 0.9999920800627259\n",
"epoch: 200, acc: 0.487, loss: 0.899, lr: 0.9999840802534423\n",
"epoch: 300, acc: 0.687, loss: 0.662, lr: 0.9999760805721527\n",
"epoch: 400, acc: 0.723, loss: 0.566, lr: 0.9999680810188538\n",
"epoch: 500, acc: 0.760, loss: 0.518, lr: 0.9999600815935427\n",
"epoch: 600, acc: 0.767, loss: 0.496, lr: 0.9999520822962162\n",
"epoch: 700, acc: 0.770, loss: 0.466, lr: 0.9999440831268714\n",
"epoch: 800, acc: 0.787, loss: 0.419, lr: 0.9999360840855052\n",
"epoch: 900, acc: 0.620, loss: 1.111, lr: 0.9999280851721145\n",
"epoch: 1000, acc: 0.883, loss: 0.281, lr: 0.999920086386696\n",
"epoch: 1100, acc: 0.740, loss: 0.561, lr: 0.9999120877292469\n",
"epoch: 1200, acc: 0.877, loss: 0.256, lr: 0.999904089199764\n",
"epoch: 1300, acc: 0.873, loss: 0.236, lr: 0.9998960907982444\n",
"epoch: 1400, acc: 0.567, loss: 2.495, lr: 0.9998880925246847\n",
"epoch: 1500, acc: 0.790, loss: 0.445, lr: 0.9998800943790821\n",
"epoch: 1600, acc: 0.803, loss: 0.435, lr: 0.9998720963614335\n",
"epoch: 1700, acc: 0.800, loss: 0.431, lr: 0.9998640984717357\n",
"epoch: 1800, acc: 0.803, loss: 0.429, lr: 0.9998561007099859\n",
"epoch: 1900, acc: 0.803, loss: 0.426, lr: 0.9998481030761807\n",
"epoch: 2000, acc: 0.807, loss: 0.424, lr: 0.9998401055703172\n",
"epoch: 2100, acc: 0.810, loss: 0.422, lr: 0.9998321081923923\n",
"epoch: 2200, acc: 0.810, loss: 0.420, lr: 0.999824110942403\n",
"epoch: 2300, acc: 0.793, loss: 0.401, lr: 0.9998161138203462\n",
"epoch: 2400, acc: 0.803, loss: 0.399, lr: 0.9998081168262187\n",
"epoch: 2500, acc: 0.807, loss: 0.397, lr: 0.9998001199600176\n",
"epoch: 2600, acc: 0.810, loss: 0.395, lr: 0.9997921232217397\n",
"epoch: 2700, acc: 0.823, loss: 0.385, lr: 0.999784126611382\n",
"epoch: 2800, acc: 0.820, loss: 0.383, lr: 0.9997761301289415\n",
"epoch: 2900, acc: 0.820, loss: 0.380, lr: 0.999768133774415\n",
"epoch: 3000, acc: 0.813, loss: 0.372, lr: 0.9997601375477995\n",
"epoch: 3100, acc: 0.813, loss: 0.370, lr: 0.9997521414490919\n",
"epoch: 3200, acc: 0.813, loss: 0.369, lr: 0.9997441454782892\n",
"epoch: 3300, acc: 0.813, loss: 0.368, lr: 0.9997361496353881\n",
"epoch: 3400, acc: 0.813, loss: 0.366, lr: 0.9997281539203858\n",
"epoch: 3500, acc: 0.813, loss: 0.365, lr: 0.9997201583332792\n",
"epoch: 3600, acc: 0.813, loss: 0.365, lr: 0.9997121628740652\n",
"epoch: 3700, acc: 0.813, loss: 0.364, lr: 0.9997041675427408\n",
"epoch: 3800, acc: 0.813, loss: 0.363, lr: 0.9996961723393027\n",
"epoch: 3900, acc: 0.810, loss: 0.363, lr: 0.999688177263748\n",
"epoch: 4000, acc: 0.813, loss: 0.362, lr: 0.9996801823160735\n",
"epoch: 4100, acc: 0.813, loss: 0.361, lr: 0.9996721874962763\n",
"epoch: 4200, acc: 0.810, loss: 0.360, lr: 0.9996641928043533\n",
"epoch: 4300, acc: 0.813, loss: 0.360, lr: 0.9996561982403013\n",
"epoch: 4400, acc: 0.810, loss: 0.359, lr: 0.9996482038041173\n",
"epoch: 4500, acc: 0.810, loss: 0.359, lr: 0.9996402094957983\n",
"epoch: 4600, acc: 0.810, loss: 0.359, lr: 0.9996322153153412\n",
"epoch: 4700, acc: 0.813, loss: 0.358, lr: 0.999624221262743\n",
"epoch: 4800, acc: 0.807, loss: 0.358, lr: 0.9996162273380004\n",
"epoch: 4900, acc: 0.810, loss: 0.357, lr: 0.9996082335411106\n",
"epoch: 5000, acc: 0.807, loss: 0.357, lr: 0.9996002398720704\n",
"epoch: 5100, acc: 0.813, loss: 0.356, lr: 0.9995922463308767\n",
"epoch: 5200, acc: 0.807, loss: 0.357, lr: 0.9995842529175265\n",
"epoch: 5300, acc: 0.807, loss: 0.370, lr: 0.9995762596320168\n",
"epoch: 5400, acc: 0.797, loss: 0.397, lr: 0.9995682664743444\n",
"epoch: 5500, acc: 0.803, loss: 0.351, lr: 0.9995602734445063\n",
"epoch: 5600, acc: 0.807, loss: 0.348, lr: 0.9995522805424993\n",
"epoch: 5700, acc: 0.810, loss: 0.354, lr: 0.9995442877683206\n",
"epoch: 5800, acc: 0.803, loss: 0.347, lr: 0.999536295121967\n",
"epoch: 5900, acc: 0.800, loss: 0.369, lr: 0.9995283026034353\n",
"epoch: 6000, acc: 0.803, loss: 0.348, lr: 0.9995203102127226\n",
"epoch: 6100, acc: 0.807, loss: 0.345, lr: 0.9995123179498259\n",
"epoch: 6200, acc: 0.810, loss: 0.346, lr: 0.9995043258147419\n",
"epoch: 6300, acc: 0.803, loss: 0.346, lr: 0.9994963338074676\n",
"epoch: 6400, acc: 0.807, loss: 0.346, lr: 0.9994883419280001\n",
"epoch: 6500, acc: 0.803, loss: 0.347, lr: 0.9994803501763364\n",
"epoch: 6600, acc: 0.800, loss: 0.359, lr: 0.9994723585524731\n",
"epoch: 6700, acc: 0.813, loss: 0.343, lr: 0.9994643670564072\n",
"epoch: 6800, acc: 0.810, loss: 0.342, lr: 0.9994563756881358\n",
"epoch: 6900, acc: 0.810, loss: 0.341, lr: 0.9994483844476557\n",
"epoch: 7000, acc: 0.813, loss: 0.342, lr: 0.9994403933349639\n",
"epoch: 7100, acc: 0.807, loss: 0.342, lr: 0.9994324023500574\n",
"epoch: 7200, acc: 0.807, loss: 0.348, lr: 0.999424411492933\n",
"epoch: 7300, acc: 0.803, loss: 0.342, lr: 0.9994164207635877\n",
"epoch: 7400, acc: 0.810, loss: 0.343, lr: 0.9994084301620185\n",
"epoch: 7500, acc: 0.810, loss: 0.340, lr: 0.9994004396882222\n",
"epoch: 7600, acc: 0.810, loss: 0.340, lr: 0.999392449342196\n",
"epoch: 7700, acc: 0.803, loss: 0.343, lr: 0.9993844591239364\n",
"epoch: 7800, acc: 0.800, loss: 0.345, lr: 0.9993764690334407\n",
"epoch: 7900, acc: 0.803, loss: 0.342, lr: 0.9993684790707056\n",
"epoch: 8000, acc: 0.807, loss: 0.364, lr: 0.9993604892357283\n",
"epoch: 8100, acc: 0.810, loss: 0.347, lr: 0.9993524995285055\n",
"epoch: 8200, acc: 0.777, loss: 0.452, lr: 0.9993445099490342\n",
"epoch: 8300, acc: 0.807, loss: 0.346, lr: 0.9993365204973114\n",
"epoch: 8400, acc: 0.810, loss: 0.342, lr: 0.999328531173334\n",
"epoch: 8500, acc: 0.807, loss: 0.359, lr: 0.9993205419770989\n",
"epoch: 8600, acc: 0.847, loss: 0.284, lr: 0.9993125529086031\n",
"epoch: 8700, acc: 0.863, loss: 0.272, lr: 0.9993045639678434\n",
"epoch: 8800, acc: 0.867, loss: 0.283, lr: 0.999296575154817\n",
"epoch: 8900, acc: 0.863, loss: 0.285, lr: 0.9992885864695206\n",
"epoch: 9000, acc: 0.860, loss: 0.265, lr: 0.9992805979119511\n",
"epoch: 9100, acc: 0.863, loss: 0.266, lr: 0.9992726094821057\n",
"epoch: 9200, acc: 0.867, loss: 0.263, lr: 0.9992646211799814\n",
"epoch: 9300, acc: 0.867, loss: 0.259, lr: 0.9992566330055745\n",
"epoch: 9400, acc: 0.867, loss: 0.269, lr: 0.9992486449588827\n",
"epoch: 9500, acc: 0.873, loss: 0.249, lr: 0.9992406570399023\n",
"epoch: 9600, acc: 0.877, loss: 0.246, lr: 0.9992326692486306\n",
"epoch: 9700, acc: 0.877, loss: 0.245, lr: 0.9992246815850646\n",
"epoch: 9800, acc: 0.877, loss: 0.244, lr: 0.999216694049201\n",
"epoch: 9900, acc: 0.877, loss: 0.243, lr: 0.9992087066410369\n",
"epoch: 10000, acc: 0.873, loss: 0.244, lr: 0.9992007193605691\n"
]
}
],
"source": [
"import numpy as np\n",
"\n",
"# Create dataset\n",
"X, y = spiral_data(samples=100, classes=3)\n",
"\n",
"# Create Dense layer with 2 input features and 64 output values\n",
"dense1 = Layer_Dense(2, 64)\n",
"\n",
"# Create ReLU activation (to be used with Dense layer)\n",
"activation1 = Activation_ReLU()\n",
"\n",
"# Create second Dense layer with 64 input features (as we take output of previous layer here) and 3 output values (output values)\n",
"dense2 = Layer_Dense(64, 3)\n",
"\n",
"# Create Softmax classifier's combined loss and activation\n",
"loss_activation = Activation_Softmax_Loss_CategoricalCrossentropy()\n",
"\n",
"# Create optimizer\n",
"#optimizer = Optimizer_SGD(decay=8e-8, momentum=0.9)\n",
"#optimizer = Optimizer_Adagrad(decay=1e-4)\n",
"#optimizer = Optimizer_RMSprop(decay=1e-4)\n",
"#optimizer = Optimizer_RMSprop(learning_rate=0.02, decay=1e-5,rho=0.999)\n",
"\n",
"optimizer = Optimizer_Adam(learning_rate=0.02, decay=1e-5)\n",
"\n",
"# Train in loop\n",
"for epoch in range(10001):\n",
" # Perform a forward pass of our training data through this layer\n",
" dense1.forward(X)\n",
"\n",
" # Perform a forward pass through activation function\n",
" # takes the output of first dense layer here\n",
" activation1.forward(dense1.output)\n",
"\n",
" # Perform a forward pass through second Dense layer\n",
" # takes outputs of activation function of first layer as inputs\n",
" dense2.forward(activation1.output)\n",
"\n",
" # Perform a forward pass through the activation/loss function\n",
" # takes the output of second dense layer here and returns loss\n",
" loss = loss_activation.forward(dense2.output, y)\n",
"\n",
" # Calculate accuracy from output of activation2 and targets\n",
" # calculate values along first axis\n",
" predictions = np.argmax(loss_activation.output, axis=1)\n",
" if len(y.shape) == 2:\n",
" y = np.argmax(y, axis=1)\n",
" accuracy = np.mean(predictions == y)\n",
"\n",
" if not epoch % 100:\n",
" print(f'epoch: {epoch}, ' +\n",
" f'acc: {accuracy:.3f}, ' +\n",
" f'loss: {loss:.3f}, ' +\n",
" f'lr: {optimizer.current_learning_rate}')\n",
"\n",
" # Backward pass\n",
" loss_activation.backward(loss_activation.output, y)\n",
" dense2.backward(loss_activation.dinputs)\n",
" activation1.backward(dense2.dinputs)\n",
" dense1.backward(activation1.dinputs)\n",
"\n",
" # Update weights and biases\n",
" optimizer.pre_update_params()\n",
" optimizer.update_params(dense1)\n",
" optimizer.update_params(dense2)\n",
" optimizer.post_update_params()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "idw4i9VtFjmz"
},
"outputs": [],
"source": [
"#ADAM: 0.957\n",
"#RMSPROP: 0.717\n",
"#MOMENTUM: 0.873"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "9e9f8aRZFjmz"
},
"outputs": [],
"source": []
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
}
},
"nbformat": 4,
"nbformat_minor": 0
}