1075 lines
99 KiB
Plaintext
1075 lines
99 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Previous Class Definitions\n",
|
|
"The previously defined Layer_Dense, Activation_ReLU, and Activation_Softmax"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 5,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# imports\n",
|
|
"import matplotlib.pyplot as plt\n",
|
|
"import numpy as np\n",
|
|
"import nnfs\n",
|
|
"from nnfs.datasets import spiral_data, vertical_data\n",
|
|
"nnfs.init()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 6,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"class Layer_Dense:\n",
|
|
" def __init__(self, n_inputs, n_neurons):\n",
|
|
" # Initialize the weights and biases\n",
|
|
" self.weights = 0.01 * np.random.randn(n_inputs, n_neurons) # Normal distribution of weights\n",
|
|
" self.biases = np.zeros((1, n_neurons))\n",
|
|
"\n",
|
|
" def forward(self, inputs):\n",
|
|
" # Calculate the output values from inputs, weights, and biases\n",
|
|
" self.output = np.dot(inputs, self.weights) + self.biases # Weights are already transposed\n",
|
|
"\n",
|
|
"class Activation_ReLU:\n",
|
|
" def forward(self, inputs):\n",
|
|
" self.output = np.maximum(0, inputs)\n",
|
|
" \n",
|
|
"class Activation_Softmax:\n",
|
|
" def forward(self, inputs):\n",
|
|
" # Get the unnormalized probabilities\n",
|
|
" # Subtract max from the row to prevent larger numbers\n",
|
|
" exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))\n",
|
|
"\n",
|
|
" # Normalize the probabilities with element wise division\n",
|
|
" probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True)\n",
|
|
" self.output = probabilities"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Forward Pass with No Loss Consideration\n",
|
|
"2 input neural network with 2 layers of 3 neurons each. ReLU activation in the first layer with Softmax in the second layer to normalize the outputs."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 4,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"[[0.33333334 0.33333334 0.33333334]\n",
|
|
" [0.33333316 0.3333332 0.33333364]\n",
|
|
" [0.33333287 0.3333329 0.33333418]\n",
|
|
" [0.3333326 0.33333263 0.33333477]\n",
|
|
" [0.33333233 0.3333324 0.33333528]]\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# Create dataset\n",
|
|
"X, y = spiral_data(samples=100, classes=3)\n",
|
|
"# Create Dense layer with 2 input features and 3 output values\n",
|
|
"dense1 = Layer_Dense(2, 3)\n",
|
|
"# Create ReLU activation (to be used with Dense layer):\n",
|
|
"activation1 = Activation_ReLU()\n",
|
|
"# Create second Dense layer with 3 input features (as we take output\n",
|
|
"# of previous layer here) and 3 output values\n",
|
|
"dense2 = Layer_Dense(3, 3)\n",
|
|
"# Create Softmax activation (to be used with Dense layer):\n",
|
|
"activation2 = Activation_Softmax()\n",
|
|
"\n",
|
|
"# Make a forward pass of our training data through this layer\n",
|
|
"dense1.forward(X)\n",
|
|
"\n",
|
|
"# Make a forward pass through activation function\n",
|
|
"# it takes the output of first dense layer here\n",
|
|
"activation1.forward(dense1.output)\n",
|
|
"# Make a forward pass through second Dense layer\n",
|
|
"# it takes outputs of activation function of first layer as inputs\n",
|
|
"dense2.forward(activation1.output)\n",
|
|
"# Make a forward pass through activation function\n",
|
|
"# it takes the output of second dense layer here\n",
|
|
"activation2.forward(dense2.output)\n",
|
|
"# Let's see output of the first few samples:\n",
|
|
"print(activation2.output[:5])"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Calculating Network Error with Categorical Cross Entropy Loss\n",
|
|
"loss = negative sum of the expected output * log(neural network output)\n",
|
|
"loss = - sum(expected_i * log(nn_output_i)) for all i in outputs\n",
|
|
"\n",
|
|
"In the classification case, incorrect outputs do not end up mattering as the expected_i for the wrong class is 0.\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 6,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"Losses: [0.35667494 0.69314718 0.10536052]\n",
|
|
"Average Loss: 0.38506088005216804\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"nn_outputs = np.array([\n",
|
|
" [0.7, 0.1, 0.2],\n",
|
|
" [0.1, 0.5, 0.4],\n",
|
|
" [0.02, 0.9, 0.08]])\n",
|
|
"class_targets = [0, 1, 1]\n",
|
|
"losses = -np.log(nn_outputs[range(len(nn_outputs)), class_targets])\n",
|
|
"print(f\"Losses: {losses}\")\n",
|
|
"print(f\"Average Loss: {np.average(losses)}\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Loss with One Hot Encoding\n",
|
|
"Classification typically has the expected output to be all zero except for the class the inputs belong too. This leads to simplfiying the cross entropy loss calculation."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 7,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"Losses: [0.35667494 0.69314718 0.10536052]\n",
|
|
"Average Loss: 0.38506088005216804\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"true_output = np.array([\n",
|
|
" [1, 0, 0],\n",
|
|
" [0, 1, 0],\n",
|
|
" [0, 1, 0]\n",
|
|
"])\n",
|
|
"\n",
|
|
"nn_output = np.array([\n",
|
|
" [0.7, 0.2, 0.1],\n",
|
|
" [0.1, 0.5, 0.4],\n",
|
|
" [0.02, 0.9, 0.08]\n",
|
|
"])\n",
|
|
"\n",
|
|
"# Element by element multiplication \"erases\" the output terms corresponding with 0\n",
|
|
"A = true_output*nn_output\n",
|
|
"\n",
|
|
"# Sum the columns (ie, sum every element in row 0, then row 1, etc) because each row is a batch of output\n",
|
|
"B = np.sum(A, axis = 1)\n",
|
|
"\n",
|
|
"# Get the cross entropy loss\n",
|
|
"C = -np.log(B)\n",
|
|
"\n",
|
|
"print(f\"Losses: {C}\")\n",
|
|
"print(f\"Average Loss: {np.mean(C)}\")\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Implementing the Loss Class"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 2,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Base class for Loss functions\n",
|
|
"class Loss:\n",
|
|
" '''Calculates the data and regularization losses given\n",
|
|
" model output and ground truth values'''\n",
|
|
" def calculate(self, output, y):\n",
|
|
" sample_losses = self.forward(output, y)\n",
|
|
" data_loss = np.average(sample_losses)\n",
|
|
" return data_loss"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Implementing the Categorical Cross Entropy Loss Class"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 3,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"class Loss_CategoricalCrossEntropy(Loss):\n",
|
|
" def forward(self, y_pred, y_true):\n",
|
|
" '''y_pred is the neural network output\n",
|
|
" y_true is the ideal output of the neural network'''\n",
|
|
" samples = len(y_pred)\n",
|
|
" # Bound the predicted values \n",
|
|
" y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7)\n",
|
|
" \n",
|
|
" if len(y_true.shape) == 1: # Categorically labeled\n",
|
|
" correct_confidences = y_pred_clipped[range(samples), y_true]\n",
|
|
" elif len(y_true.shape) == 2: # One hot encoded\n",
|
|
" correct_confidences = np.sum(y_pred_clipped*y_true, axis=1)\n",
|
|
"\n",
|
|
" # Calculate the losses\n",
|
|
" negative_log_likelihoods = -np.log(correct_confidences)\n",
|
|
" return negative_log_likelihoods"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 17,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"Losses: 0.38506088005216804\n",
|
|
"Average Loss: 0.38506088005216804\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"nn_outputs = np.array([\n",
|
|
" [0.7, 0.1, 0.2],\n",
|
|
" [0.1, 0.5, 0.4],\n",
|
|
" [0.02, 0.9, 0.08]])\n",
|
|
"class_targets = np.array([\n",
|
|
" [1, 0, 0],\n",
|
|
" [0, 1, 0],\n",
|
|
" [0, 1, 0]])\n",
|
|
"\n",
|
|
"loss_function = Loss_CategoricalCrossEntropy()\n",
|
|
"losses = loss_function.calculate(nn_outputs, class_targets)\n",
|
|
"print(f\"Losses: {losses}\")\n",
|
|
"print(f\"Average Loss: {np.average(losses)}\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Introducing Accuracy\n",
|
|
"In the simple example, if the highest value in the outputs align with the correct classification, then that accuracy is 1. Even if it was 51% red and 49% blue, and the true output is red, it would be considered fully accurate."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 18,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"Losses: 0.38506088005216804\n",
|
|
"Average Loss: 0.38506088005216804\n",
|
|
"Accuracy: 1.0\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"nn_outputs = np.array([\n",
|
|
" [0.7, 0.1, 0.2],\n",
|
|
" [0.1, 0.5, 0.4],\n",
|
|
" [0.02, 0.9, 0.08]])\n",
|
|
"class_targets = np.array([\n",
|
|
" [1, 0, 0],\n",
|
|
" [0, 1, 0],\n",
|
|
" [0, 1, 0]])\n",
|
|
"\n",
|
|
"# Calculate the losses\n",
|
|
"loss_function = Loss_CategoricalCrossEntropy()\n",
|
|
"losses = loss_function.calculate(nn_outputs, class_targets)\n",
|
|
"print(f\"Losses: {losses}\")\n",
|
|
"print(f\"Average Loss: {np.average(losses)}\")\n",
|
|
"\n",
|
|
"# Calculate the accuracy\n",
|
|
"predictions = np.argmax(nn_outputs, axis=1)\n",
|
|
"# If targets are one-hot encoded - convert them\n",
|
|
"if len(class_targets.shape) == 2:\n",
|
|
" class_targets = np.argmax(class_targets, axis=1)\n",
|
|
"# True evaluates to 1; False to 0\n",
|
|
"accuracy = np.mean(predictions == class_targets)\n",
|
|
"print(f\"Accuracy: {accuracy}\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# The Need for Optimization"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 5,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAB1H0lEQVR4nO2dd5gT1RrG35NkN8kkWXovUhUQBakiUkQR9SKKoiJIERVQUUGxK3avlauigoqAHVEQG4qgKAqCUkWKdCkivWxv571/nM1usplJ2U022WV+eeaBTWbOfJOFd8585yuCJExMTExMyj+WeBtgYmJiYhIdTEE3MTExqSCYgm5iYmJSQTAF3cTExKSCYAq6iYmJSQXBFq8TV69enY0aNYrX6U1MTEzKJStXrjxEsobeZ3ET9EaNGmHFihXxOr2JiYlJuUQI8bfRZ6bLxcTExKSCYAq6iYmJSQXBFHQTExOTCoIp6CYmJiYVhLgtipoAJPDbb8Dq1UDDhsCFFwI28zdiYmJSQkz5iBNpaUCfPsDatYCUSshTUoAffwSaNYu3dSYmJuUR0+USJ8aOBVauBNLTgcxMIDUV+OcfoG9fNXM3MTExiRRT0ONAfj7w/vtAdrb/+ySwZw+wbl187DIxMSnfmIIeB3JygLw8/c9sNuDw4bK1x8TEpGJgCnoccDqN/eTZ2UC7dmVrj4k+3L4dvOlGsHkzsFs38LPP4m2SiUlQTEGPE5MmKWH3RdOA++4DKlWKj00mRXDjRqDdWcA77wDbtgFLfgGGDgEnPBxv00xMDDEFPU707g189x3QowdQpQrQqhXw1lvAhAnxtswEAHD3eLVS7esbS08HXngB3L8/fnaZmATBDFuMI+eeq8IUS8OuXcCCBWp237cv4PFExTSTRYv0w42SktQv7ZprytwkE5NQmIJeTiGBe+4BXn0VsFoBi0VFz3z0EdCvX7ytqwA4HCqetDhCAG532dtjYhIGpsulnDJ3LjB5MpCVpTwBqalARgYwcCDw77/xtq4CMGwYYLcHvi8EcMEFZW+PiUkYmIJeTnn5ZSXkxSGBmTPL3p4KxxNPAmeeqWbjQiiflssFfDYXQk/oTUwSANPlUk45eFD//aws489Mwke4XOCvy4Dvvwd+/RWoVQu45hqIypXjbZqJiSGmoJdT+vQBtm5VSUq+uN0qcsak9AiLRYUj9e4db1NMTMLCdLmUU8aPV+JttRa953AArVubLl6T6LIO69Af/VELtdAarTEDM0CYBYcSkbAEXQhxkRDiLyHEViHEfTqf3y2EWFOw/SmEyBdCVI2+uSZe6tYFVq0Crr1WxbHXrQvcfTfwww8q4sXEJBqswip0QRd8js9xAAewHusxBmMwDuPibZqJDoIhSvsJIawANgPoDWAPgN8BXEtyg8H+lwIYR7JXsHE7dOjAitgkevVqVeO8bl3gootU2LKJSXnlPJyHH/FjwPt22LEd21EXdcveqJMcIcRKkh30PgtnLtcJwFaS20nmAJgJ4LIg+18L4KPIzSzfZGWpBhXnnguMGwcMHgzUrw/8+Wf4Y8yZA7RtC1StCnTtqmbbgKqX/sUXKpflmmuAr75S7wGq9su6dar0rolJtPkVv+q+n4xk/IJfytgak1CEsyhaD8Bun5/3AOist6MQQgNwEYAxBp+PBDASABo2bBiRoYnOQw8BP/+shN1Laipw8cXA33+HdoO8/DLwwAMqlhwAli4FLr0UePddtX3/fVGY4tdfq9l/9+7qvACQmwt07AjMmgXUrh396yst6enApk1AzZpAgwbxtsYkXNxwIxvZup9VQZUytsYkFOHM0IXOe0Z+mksBLCF5RO9Dkm+S7ECyQ40aNcK1sVzw1lv+Yu7l+HElzsHIylLC7BVzLxkZwKhR/mIOqL9//bXKFE1NVVtWFrBkiYpwSaQGGSTw+ONAjRpAr17AqacCPXsCBw7E2zKTcLgJN8EBR8D7dthxHs6Lg0UmwQhH0PcA8J1T1Qdg9IA/ECehuwXQT/IBVE5KqPrm69cbz+APH9YfOysrsEGGlMCWLSqLNFGYOhV49lmVRX/ihLJ76VIVdplINx4TfR7BI+iKrnDBBTvs8MCDyqiMb/ANbGbUc8IRjqD/DqC5EKKxECIZSrS/KL6TEKISgB4APo+uieUDoxrm2dlAZ10HVRFVqyqXSTQggeeei85Y0eDJJwOfPHJz1Y1n1ar42GQSPg44sBAL8QN+wHN4Dm/hLfyDf9ABumtyJnEmpKCTzIPyic8HsBHALJLrhRCjhRCjfXbtD+A7kgZz1YrN//6nssN90TTg5ptD+7QbNwZOP90/pjwUQs8RVsCaNeGPE2v27dN/32oFtm8vW1tMSk4ndMLtuB3X4Bo44Qx9gElcCCtimeQ8kqeSbEryqYL3ppCc4rPPDJIDY2VootO1q6qq2rs3ULmy8hW/8gowcWJ4x8+eDTRsqMrfOp3BBTspCTjtNOPPgx1b1jRpov9+bq6qAW9ikigQxGEcRhrS4m1KiTFTUKJIx46qacXRo8BffwE33BC+uDZsqFL5P/1U3QTOPFN/P6sVGDlSlRfRi3G3WIDLfIJKo+XKKSlPPRX45GK3A126qKcSE5NE4Ht8j+Zojrqoi6qoiktwCf5FOSxbSjIuW/v27WlizBdfkJpGKq940eZ2k8eOqX2mTyedTtJiUZ9pGlm7Nrl7N/nqq2StWqQQZI0a5EsvkVLG51pmzFA2OByk3U5edx2ZmhofW0xMirOSK6lRI3xeNtrYmI2Zy9x4mxcAgBU00FVzhp6gXHqp6i9qtwMpKcoVU6UKMG9eUc/R4cOBZcuAG29UcemPPw5s3Ah88IEKady/X90GDh5UMe5PPBGfaxk2TNVo37EDOHIEeO+9+PaIoDcrKxZjZ2aCRiFPJgnJk3gSmfBvZpKHPBzCIXyNr+NkVQkxUvpYb+YMPTwOHiRnzybnzydzckLvn5VFejyBM3uAdLnIjIzY25yoyM8/pzy1OaUAZaUUygcfpMyNzgxM7thBeU4XSotQW4P6lIsWRWVsk9jSiI38Zufel6DgE3wi3uYFAHOGXn6pXh244gpVViCcujB79hSVBSiOxaKyVk9G+OWXwKBrVbwkoILiX/ofMHxY6cdOTQXOaqsWNrz3zz17gF7ngT//XOrxTWJLMzTTfd8NN5rAYFU/QTEFvYJRo4Z/o3pfcnJU6v1JyT1366fizpkD7tpVurHfeUelBOsx5LrSjW0Scx7AA9Dgv3IvIOCAA1fgijhZVTJMQa9gpKQAV14Z2A7Tbld++aonYVFjksDmzfofJicDa9eW7gTfzDP+bNcusHgXEpOE4jych9fwGiqhEjzwQIOGFmiBxVisW/YgkTEFvQLy5puqborTqRZQnU5V42XatMjGWbUKuPxylfh0wQXAokUxMTfmCCHUirIe+flAvXqlO0GDIIXmEikpwMSQ4RiOAziARViEVViFDdiAFmgRb7MixhT0CojLpaJh1q8HPv5YldedP19FyoTLwoVAt26qbO/OnapAWN++wPTpMTM7towdGxgQb7UCjRoDZ51VurEffND4s44dIZKTSzd+BWEHdmAgBqIyKqMWauFe3It0JE5EUDKS0R7tcRqCZO0lOKagV2ByctQ63RtvAIsXh18MiwRGj1YuZt9jMjKULpYnDwJJMDMTuO9+YOBA1aevUiV112vVCvjmGzWDLwWiQQNVgaw4bjfw3vulGrui4K3/8gk+wXEcxwEcwCt4BT3RExKxCyM92TAFvYLy4otq4vnUU8DzzwOXXAL07688DKE4fBjYvVv/M1LN+MsDnPoWUK8ukOIBalQHTmkEbN4CfPAh8MsSiLV/QNSvH5VzibvvATZuAq4dBJxzjgr637ETonnzqIxf3nkezyMVqX7inYUsbMImzMf8OFpWsTDrX1ZAtmxR9dV967Onpys3ygcfAEOHBj/eGaT2Un5+ZK6beMHJk4G7xxdFthw/Djz7DHD4EMRLL0f9fOlIh+W0hnB+8EHUx64ILMAC5CKwDkUa0rAYi3ExLo6DVRUPc4ZeAfnoI/3QxfR05X4JhculFkFtxW73QgCNGqnCY4kM8/OBCQ/rhym++SZ47FjUzrUGa9AJnVAZlZGCFPRGb/yNkzTYPwg1oR8v64DD8DOTyDEFvQKSkWHsWimucUa8/XZR9UeLRbmDq1dXVSETniNHgDSDinl2u6qcFgSS4MqV4Jw54LZthvvtwi50R3f8jt+RV/BahEXohE7lumJfLBiHcXDBFfC+BRYMwqCwxliN1RiO4eiO7ngYD5fP4lkxxhT0Cki/foEBHYBaDxwYZoHj2rWV7r3/vnIHv/kmsGsX0KI8RHJVqgRYDIrLZ2cHDVPkvn1AmzZAzx7AiOuBM1qD/fuDxdtDAXgJLyEL/n0H85GPdKTjA5iuF18uxaW4A3fADjvccMMDD1xwYRZmoRZqhTz+PbyHc3Eu3sN7+Bk/43k8j1ZohS3YUgbWlyOMagLEejuZarn88gt5zjmqGmKDBqryYX5+7M4nJXnWWYG1XOrVI48fj915Ewl52xhKzanqtng3ezLlhRcGP65DB0qb1f84zUl5x+0B+3ZhF90aICB4Pa+P1aXFDEnJHIZRMKgU7OEevsN3+Ak/YRrTwjomnel00aVba6UP+8TU3kQEZi2X+LF4sWp6sXSpcnfs3q0qH95yS+zO+fPP+l6FY8cC+5BWWF54URWGtzuKsqvO6aoC8w3gX38BGzcE+qsyM4GpU5Vv3ofmaA6Lzn8hBxzlKpY5C1m4HbfDDTfssKMFWuBbfBuTc9VDPQzFUAzAAF0XjB5LsARWBD5xEcRCLAQNe9ZHjwVYgC7ogqqoinZoh88TtdOmkdLHejtZZujt2+tXPnQ4yD17YnPOfv30z+l0khMnxuaciYrcs4dy4ULKrVtD77toEWXlSv6zc++WZKNM859RruGagDraIOimm//y3xhdUfTpwz500OF3DU46+T2/j7dpJMnv+T1TmKL7JJTEJErGttD/R/wo4PesUePrfD2m5zUC5gw9fhj190xOBn77LTbnNKqomJl58lVbFPXqQZx/PkTTpqF3PuMM/1hPX+rWDViYaIM2eBfvojIqwwMP3HCjDurgW3wbll84EfgTf2IxFgesBWQiE/fj/jhZ5c+5OBcCgclfFljQF311P4sWEhJjMRYZ8I8myEAG7sN9yEZiPfKagh5jUlL035dSVUaMBd266ZfadbtV6zcTfUS1asDomwNXlJ1O4PkXdDNKr8SVOIAD+A7f4Uf8iD3Yg67oWkYWl55VWKXrzgCU2CcCyUjGTMyEBg02n9QZr8sllgvQ+7APJ3BC9zOC+AvBI6bKGlPQY8CcOUDbtqoelNOpZuO+CKGqHp5zTmzOP358YKPppCSgVi2VLWoShBdfBB57HKhVW9V6Oe004MOPIK66yvCQJCThbJyN9miv61NPZBqggeEMtwZiNOMoARfhIizHcr+bD0GkIhUjMRLLsTwm5/XAg3zoxwDnIhdVkVjlS8vXv75ywKRJwJAhqiLrsWPAP/+oNbakJDVD9njU0/v8+Sq+OxaccopahD3vPHUOux24+mrVrs6sExUcYbFA3HUXxL59ELl5EBs3Qfh23daBGRng2rXgv+UvLroHehiKUn3UL5MFx3D5CT/pPk1kIhPP4bmYnDMFKbgQFyIJ/o+8NtjQDu1QH9EpHREtTEGPIllZKoKlePJOfj5QubIS+zlzlB+7tPHc69erSJn//Ad44QV18/Dl9NNVhcS8POU7f/99lRikx4YNwOefA5s2lc6mkw2S4BOPAzVrAN27AY0bgRdfDB45Em/TwsYCC27BLbqz9DVYg2VYVma2/Igf0Q3dUAVVcCbOxCzM8vt8G7YF+LIBNVPfiq0xs2s6pqMlWsINN5xwwgMPTsEpAfYlBEarpbHeKmKUy+rVxv08HQ7yn3+ic54PPlAx7VZrUfRKzZrkrl2RjXPsGNmtmzo+JUX92asXeeJEdOys6MjXXqN0uwJj3bueE2/TIqIHe+hGkAgK3sE7ysSGr/iVbiTJM3yGJHmcxzmcw5nEpAA7rbTyRt4YU/skJX/iT5zMyVzABcxnDBNJQoAgUS6moEeRnTuVcOsJenIymZpa+nOkpSkxLz6+1UpeeWVkY/Xrp+zyHcduJ6+6qvR2liXyxAnKN9+kHDuWcupUymh80b7j795NecMIylo1KRudQvnE45SZmZT16uqHOLo0yj//jKoNscRI0C20cCzHxvz8kpK1WVvXBo0aV3AFq7KqbogoCLro4mZujrmdiYIp6GVI585FM2fvlpRE9u8fnfG/+krNpvVuGnZ7+OMcPKj2Nxrn2LHo2Btr5IYNlNWrFc2UPW7KGjUoN0fnP7jct4+yRnUVh+6bOdrtXEqL0Bf0SimUc+dG5fxlwTRO083E1KjxV/4alXOs5moO4AA2Z3P2ZV8u4ZLCz2Zxlq5Qg2AKU9iMzSgodD9vy7ZcxmVRsbG8YAp6GbJ7N9mkiXK9OJ2k2022bUsePhyd8b/+OjqCvmGDsk1vHJeL3LYtOvbGGtmmTaCwWgRlp07RGf/u8cqNUly0PW7KWrX0BV1zUv71V1TOXxbkMIc92dNP1F10cSRHRmX87/gdNWp+oqxR40zOJEk2ZVNDQXfQwWQm636WzGT+wyj5McsRwQQ9rEVRIcRFQoi/hBBbhRD3GezTUwixRgixXgjxU/S8/OWL+vVVPfLZs4H//U+1glu1KnrNmXv21K+kaLWqTPdwadzY+DObTV1HosPdu4HNfwW2YiKBP9aC+/eX/iTzvtFv0ZSWBnTqGBizbrcD3bpBJHqNYR+SkIQFWIAZmIErcSUGYzDmYi6mYEqpxyaIURiFDGT4RcxkIAO34BYcw7Gg5YZroqZf7LkvVlgTLrEn7hgpvXcDYAWwDUATAMkA1gJoVWyfygA2AGhY8HPNUONW1Bl6cXJyyJUryU2bVNEsI6Qks7KC7+Pl44+VH91mUzNqTSNr1468lMB//xvoj9e0kpcHyM9X6wilfRrZskW5qNxuslo18q679Ncf5ObNyl9tNEveubN0hpCUPbrrj++wU774IuXrr1NWrUKpaZROB+V11wWUCDiZ+Yf/BJQV8L7cdHMFV9BOu+7ngoLzOZ/1WE/384ZsGPO0/0QEpXG5AOgCYL7Pz/cDuL/YPrcAeDLUWL7bySDo77yjRMnlUkJ52mnk+vX+++Tnk08/TVapQlosZJ065NSpocfeuJG84w7y8svJl18uWRVFKcnnnlMRMoCqBBnOufX45BN1U9E0tdDauze5b1/k4+zaRVaurL4LX1dS+/aBFSplfj5lXYOFyQb1KYvdHeXChZRt26pqitWqUk6YQJkTvLqg/PTTwEgW7w1j7161T24u5a5dUV+MrQgc5VFDl4mTTm7jNg7jMN3olRZsQbLIZWOlld6oFiedvIN38JyC1xROYTaz43y1ZUNpBX0AgKk+Pw8B8GqxfV4C8BqAHwGsBDA01LgVXdDvvjvQNy2EmnH6TuDuvFN/lvzaa7G1b98+skcPJZYeD1mpEvnGGyUba/78wGgZm41s1izyMsFjxqhF5OLfndtNzpsXuL+cN0/NjIsLrttFuXp10X4//BBYTldzUl4dPKRHSkl5xx3qHJpT+c41J+Wnn0Z2YTEml7lczuVcwRVxDanT4zyeVyjGvrPv03k6SfJH/kgLLQGCfgEvKBzjT/7JYRzGs3gWh3Io27GdX9SLRo3n8tyYl/9NBEor6FfpCPqkYvu8CmAZABeA6gC2ADhVZ6yRAFYAWNGwYcMy/ArKlvnz/WeYxRccZ8xQ+x07ZhzmWK0amZcXG/vy88kWLYpcNr43ks8/j2ysY8fU4q/eNXg85DffRDZey5b6YwHkgw/qHyPbnKk/Sz/t1MJZuuzQXn8fpyO8SozbtlFOnkz5zjuUR49GdlEx5it+xaqsSg89dNPNWqzFRf98RHnnOMpe51GOG0u5fXvc7NvFXazP+vTQU+hqqcZq/Jk/8wRP8AJeYDiD/5OB4Z8zOVM3KsdFFz/mx3G4wrIlmKCHsyi6B0ADn5/rA/hHZ59vSaaTPARgMYA2xQci+SbJDiQ71IhVZaoE4MknVfEtPdLTAW9Xs7/+Mk7FT08HDh+OjX0//QTs2RPYdzQjA3j00cD9t25VjaUbNADatQPee69oHfKpp1Qmqh7Z2cDGjZHZVsugSKHTqf8ZDx82TnHds0cZDwDr1unvk5QErFihxpISnDIZbHEaWLMGeOWV4IYNAADRpImqxvjdd8A1V4OvvAKmpkZyaTFhEzbhalyNIziCVKQiDWnYj/3o67kWe+dMAhYtAl57DTjzDPCXX+JiYwM0wDZsw1t4Cw/jYYzCKGjQcAEuQHVUx4/4Ufc4AaGbqfoJPkE60gPeT0c6PoZxvfuTAiOl924AbAC2A2iMokXR04vt0xLA9wX7agD+BNA62LgV2eVSr57xLDM5WS1qkmoR02iGbrGQsZoIvvGGfnISoEIifdm4Ub3nG1vvcinXSKhrdTjIL79U+61bR95+O3nNNeT06WRmpr5tX3yhxtd7sjl4MHB/+e+/aoFSb/btcRcm+Mg6tfX3SfFQ/vST2ueGEf6LrBahxli3jvLpp9Rn3hBJl0bZuBFltOJRS8jNvDnAnQGC9kxwwqPFrrVJ44B1hbJmKZfSSafujLz4K4Up/JJfBoxxHa8zPOY6XheHqypbUNo4dACXANgMFe3yYMF7owGM9tnnbqhIlz8BjA01ZkUW9EsuMRa56tXJbJ+1m9699fezWpUARkpODvnCC2Tz5mTduuTIkYHRLz/9ZByD3qGD/76XXaZ8/3pivXOnWsQ1utaqVZXb6I03lFvGe1NwuchTTzW+YT30kBrf41E3E7dbubH0kFIq14qeWNeqSVngxJfPPhsYEWMRKvMzP59y61Z9X7xFUJ7fS/8zezLluLGR/5KiyHk8z1DcrvlIZyF3x45SnU9S8g/+wRVcwVzmRnz8hbwwLDEHwRqsoesTX8iFhi6XRGnKEUtKLeix2CqyoC9frj8D1jTy77/99120yNjfrmmBYYxpaSqssHNnsmdP8sMPixYepST79PH3adts6ibiW0dGSvKMMwIXHzVNJS75YpTE5F0LuOOOwAVR7wLwb7+pWbXeU0hyMjk2iBbu26dq1nz2GZmREfz7lr/8ohZBfbM5LYLy1lsLo1hkXp6agTsdKpPT46Zs2oRyyxb1+fTp+tEs3hBFo/DIunWCGxdj7uf9umF/Whr44lid9YIQBX9ymMOZnMlhHMbxHM/1LArLWs7lbMiGdNFFDz2syqqcy7kR2VuXdXXFW1DQRhs9Ba9arMU1XKM7hqTkrbyVGjVaCl4aNd7CW06KMEZT0OPA/PkqYzQpSW2XXEIeOBC43zffqAgTPdEUwn9hNDVVLRr6CrbLpWqvSEn+/LO+uyIpSQmvLwcPkhdfrKJcNE2J/rvvBtpnNANPSVFie/gw2bix/w3M4SCfUTWVOGNG4NNAX3zBJTib/1jrUl7al3LFilJ/33LVKuU+8c0adWmUF/XxczPI3bspP/uMculS//fnzlXH64l2lcrGgt6gfqltLw17uVe1Z8svEkdLHljtAHikcjFbW7UMOlYqU9mGbeimmyBoo41OOjmFU3iQBwsXNf1uHNS4lmvDtrcbu+kKuoceTuEUzuRMLuRC5jF0RMBv/I13F7x+429h21DeMQU9TkhJHjpEpqcb72M0gwXI00/33/f55/UjSlwuJeaPPabvHgGUC0aPI0fIHTuMI2omTNC3LyWlyA+emkpOmkRedBE5ZAi5dGnR8dOm+d9k7sBEpqKYn9qlUf74o/rOcnMpDx8udJWEi3zmmcCwRG/44sKFoY/PylLCrRdv/sD9+i4Xh53y/vsisjMW/ME/2Hmdm7Yc0JYD9vwB3NysmJ2VUihXrQo6zkN8SDcJyEEHJ3CCru/bSiuHcmjYts7n/IAiWxZaWId1ToqQw2hgCnqCcOAAOWWKcpn4JhiNHx/oonE6yQUL/I/v0MF4Jn/VVeQDDxiHEJa0tElWliqp63Ip943LpWbcixbp75+fr+rZeIt77dtXdENwIZVp0BFdAcrTW1GOH68E2J6sCmJNDr8Jr+zYQX9cAcpgvh3fMZYuVU2iUzxKwF0a5YW9KYcPL5qhe58A3C5lc0kyumKA/OknnqjhYJrmc902K2XtWpRPP02p93hYjIZsqDt79sZ4G/m6O7NzRLZO5VRWYiV66KGTTrZlW25l6NBRE4Up6AnAhx8qsdU05eZwOskbblCzeCnVwmHTpkoszzlHLVwWp2tXfbH2+qSdTv0ZustFvvdeyW2XklyyRGWVTptmnJX66afKReN0Knsuvpjcv18t0moa2UP8yCOoZCy8xWfYLo1yyuTwbOx1nv6YSTbKCRPCv9aMDMqZMylffZVy0SIliL6+eatFzeSnTaNMS6M8elT9+cADlHXrqDIA1w2OStmBSJHffUfZ+vSip57bb6MMtQDhg1GKvZNOXstrdRcik5jEMRwTsa3ZzOYqruI2lpMqcAmEKehxZu9eY1fJhx+GP8477+j7yItHxwjhf+MYMSK8GjGl4YcfAl0zNptKYMrPJ3/9lXzwP6uZbnPrCm++kcjXrBFWqJ38+GP9RU2ng3LTphJdk3z0UeMs1L59KV0uyuQkJfg2q//MuHo1ymh1NInU7tzcEoUn3sE7dFPwHXRwAzewJmsGZHS66eZ2xi9p6WTEFPQ48+KLxrXHu3QJf5y8PNWUwuUy9pV7XTDeqJU77ohdxqmX9HTjhV23u8h1JKWkbNqU+cXK3WYJG7NEkr6gJydRhtFCSUpJOWyomplaLcpt43RQvvxSia9Lnt3Z+GnCd9aut1mEaoAxYEBhnHsi8wbfYBVWCRBzjRonUD3hbOd29mRPJhW82rBNhVuMTGUqd3FXiUIyywpT0OPMww8bi2/L4IEHAUip/NdjxgSfqXs3TVOx6NFASnLhQnLoUHLQIHLuXHWzCGZLcrIqHuYl74/1PGCpwWPwMAtJPA431+M0pkNnJiyg/Nk+dySZm0v5/vuUfS5U2/vvq/f276f8+muVmv/YoyruvJTp7rJfv+CiHc7mdX88/3ypbIkl7/CdgIVKQcHarK0b153KVB7l0bI3NIakMpXX8lraaadGjVVZlZMZnruvrDEFPc78+KN+Ik9ysiriVVIaNAhP1JOTyX//Ld01SKl8/r4uH7dbRbYYZZ16byi+RbX++IOs6sriVfiYd+F5XoDvKJDP+biAmaJYIwmXRvngA0U25OVR9r7A37Xi0ihPaVgUyeFxUzZrGpWORXLBAuPY9Eg3hz2shclYIyn5C3/hJE7iF/yCOczhKTwlYGbudbVs4ZZ4m1wmnM/zA+L5NWr8gB/E27QATEGPM1KSF17o70dPSlJla/fvL/m4778fXEx9tyuvLJ0f/aef9P33oc5fs6a/y2fzZv1jKuEo51suZL7DoSJNHHbK0aP8Z+dGpWz1ZsUN6vsdW1LkYwV+dLdL3Swcdv0ORqG2FA/lB/EVh+M8zo7sSBdddNBBDz2GvTxBsBIr8Qt+EXU7ZnM2z+bZbMiGHMiB3MANUT9HJGzgBsNyBE3YJK626WEKegKQk6N86aedpmbWY8aUrF54cd5/X41nlG3qu0D5evhRgAGMHm3st/d49N8Xglyzxn8cKdV3UHxfi4Xs3p2Uf/9NuWQJ5aFDATbIK6+MTEC//oryvfcorx1IedttlGvDT4DxO+/evZRvv63cO0ePUnbrFrmop3goZ80q0fmjxTAOC5iFCgrdWjDeGapetcPS8Cgf9YuWsdBCK62sz/oczMFxEffZnK2Ss3ReVlrL3J5QmIJewZFSJfncc09wYW/QoOTnGDnSX9A1pNGOTAKq+YReHP099+iP9ccfqqGHd8bvdpO1aqkEp6DXOXBg+ALqcqlFSY+7KPJEc1JOmlTyL8Frx+HDlD17qJl7qMVRX9dQGIu7R3iEG7iB6QySjVYCcplr2BkomckBCUXJTGYXRrBiHwaHeMiwe5FXPF10cTmXR/W8oVjLtQFrCN5XA5biP02MMAW9FGzerFqgXX65SghKsFLYfuTk+FdFLL4lJZV87O+/VwLcAb9xBc5iNmzMQhLnWS/hJy/t4a+/qgSkypVVqOL06cFdPMePqySrO+9U5QHCCZeW33wTvk87yaYiZIq/73RQRuPRiKTcvp3y++8pb7hB3SzsySrMsnatohuJPVl95i2xaUA60wsX5Tz0UKPGB/hA1JpVpDPdcCaewhQO4RA66WQKU+igg73Zm4epX0kyn/n8ml9zCIdwOIdzAReEVUPlC35hOBP2fdVhHTZiIzZgA97FuwztiCZn8+yAzkoaNb7BEnZ9iSGmoJeQzz9XM09vEStNUz7h4gW2ooWUSuhCdEULSuXKxoLepBTuQCnJsZdu5QnhH0eeI6zMr1+f+//O5Guvkc8+S4bIMC+FDZJy6BAl6hahNqfDPwbcK9p68ePemXJJWzMFsy07u7BkgczOpvzgA8oRIygfeohyW+jkmct5ecDsVaPGp/hU2DYc4RH+wB+4jut0P2/BFroC6qSTJ3iCqUzlCq7gHho3p81nPq/gFX5uExddHMIhIUV9ERfp1oMJ9kpmMhuzMU8w9NNNaTjMw+zDPoU3VBddfJJPJmSxrwoj6FKqBJWPPgrszRltsrL0Kw1aLCoWPNp88QXZqJHydTsc5PXX6zdGDsWYMca+7rlzS2ejHDWSedZAF0O2080Rye/S6VT2axo5cGDk7efCskFKykWL1ILp6FGUP/6oolHOPEPZ43FT3nmncrfoCbrmpJwcn3A0eeyYSsPv1JGyd2/KOXMopeQe7jF0RVRm5ZCzdEnJB/gAHXSwEitRo8bWbM2d3Om33wIuCHAtBLtppDOd0ziNN/Em/pf/5T7u4xzOMSxdu4Aq4WAP9/A1vsZJnORnQx7zWIM1IhJ07w3nJb5Uym8/PPZxH9dybdRdXtGkQgj63r2qWJXLpRbhnE5VSzxY4avSsHChcelYmy26mZcLFwZmktrtZLdukY915IgqIeBbGlcI5TYqLbJtG0MXx0Tc4We/y6XKBASMkZdHOX++ihcvYQanoX0+dxB51136C5c6JWSllJS5sU0kkYcPq9rrvk8ObhflzaP5E39iJVbSFbMkJvE4g9eLeZNvBoishRY2ZuOAm8ESLmEv9mJVVmVd1uWFvJDv8T1m0r/jyF7uZT3WKxzXQQdddAWt6TKcwzmRE+mgg0466Sh4Pc7HC8ddzuWsxEqFFR3DffVir+j9MuLIaq7mRE7kdE7nMR4r0RgVQtA7dQr0Dzsc5I03luAbCYMFC4wF3WqNrqB37qx/HpeLNKosK6XqJrR+vaqzsmJF0Yw4M1P5pQcPJseNI6Olm3LAAP/ytAVbmnDyVrwSYH+7dsWOX79edQ5K8RQ1W77yisKa5dFEHj6sOvT4FtVyaZRPPFG0T34+5X//S1mtalGo4zszom4LScr77tW/wWhO7tn0g+EMvQqrhJyhN2VT3WPddHMRFwXs/zN/ppvuwnO66WZDNuQ+Fq0t9GM/XZ+7XmkA7+s//I9u+J9GjT/z58KxU5nKd/gOx3AMXXQVPjUEG/tqXh2130U8yGUu+7M/NWpMZjJdBa/5NOjcEoRyL+hbthhXEXQ4/DsARYvMTP1wPItF1TaPJkbdg1wu8u23A/f//nvV+s3bWMJbu6VOHSXuJSE/n/zuO1WCd/JkNdMvjly2jFILrAt+THhYBYcD7G/a1OfY/HzK+vX1XSCPPFIyo0MgU1MpX3lFVUwcPIjy55/9Px83LrDOuUujfOvN6NvSrKn+001yEuWzz3IAB+j60J/lsyHHNorQcNPNd/iO3755zGNN1gzY10Ybr+AVhfvYaNMd0zvr1jvXf/gf3ZuAoOBgDta1/QRPcCqn8hE+wpf4kuENQe/GVJ54iS/p/p5cdEW8PlDuBX3JEuNaIcnJsYs8+eQTJZQ2GwtD8apXJ6PdQP3UU/WvzeNR7hhfjBJzvJvbrd9IIxhpaeoJyHtj0TR1M/nhh8B9j7z6PrO0FGZrKcz3uJlVsx67O5brRtTceWfRcXLRIuMGEjWqR/ydBUPm5akmFgMHUg4bpiJRij1SyaNHjRdOa9TQrccujxyhvPcelZnapDHlo49QpqWFZ9PprfTP5XRQvvwyM5jB4Rxe6Npw083H+FhYi3Id2VFXfPWaTyzlUsOFSRttzGMec5kbUITL+/LQww7sELAoegWv4FW8SvcYEOzDPmF9T5M4iQ46qFErdNs8wkfCOjZcfuJPbMd2tNDCFKZwPMczi1lRPUdxmrO57vfippvvMbJSqOVe0I8fN56h168f20qCGzaQt96qUtyfeUY1rIg2elUULRbVCai4rtxyS9ENRm9zOlWZ20i480794mEpKWpxmFTf8QMPqP2qubN4gWsJz3Wt4oLvJC+7zP8m482C9S03IGfNMhb05FLEUxZD5uYGlgdwuyhHj/Lfz1v7XM8ehz0gsUmmpqqWdb4NqZ0Ota4QxiOinDhRvwGH00Hp0/T1BE9wK7dGJDALuCBgZuugg+fz/IB9f+APhqGDFloKm0wE6yyUylR+zI95KS/l5byccziH+cznDM7Qnb1r1PgaXwv7evZyLydzMidxEndwR9jHhcNSLg2YKTvp5EW8KKrnKU4t1tL9Ph10cBIjy40o94JOkg8+GCh6mkbOnBnRMAmJlOSjjyoxrlRJXdeZZ6omzMXp3t1YzL1bpMW4qlY1fkL4sqDp+mef6af+u1zqieD111Wf0saNVYXH4qHe8u+//cXQdzs7sgYJwZDvvqsfq+7SKH1aKcm//zaeoWvOAJGWr7yi34bO7aIMowayzM6m7NmzKD49OUmdpzTpuz58xa/YlE1ppZVOOjmKo3QjNTKYYbgg2ZVdC/dbx3VMYUphbLa3b+d7fI+buZlTOIUf8kOmUoViHeZhtmRLCgq/MZOYxGZsxjSG9yQTa3qwh+61a9T4B/+I2XkHc7DuU4+TzoizcSuEoEupfLsNGyo3S+vWKtSvInH8uGolt3Gj8T5jxwY2dy4usFOnRnZeIxeOx1NUr71bN/19NC38kgLyhhH6Putivu3SYNjowiIoizVWlb16BS5Uak7KW28NHPeC8w0jfOTgQeHZlp9P+e23qgzBhIejUkCsOOlMD9mPczqnU6NWKL5JTKKHngD3zC7u4jiOY2d25iAO4jzO45k8k4KCFlropLNwYe9qXq27qFmFVXiEOgsyEZDPfKYzPSox4UbuJhddfJs6C1YRspIreSWvZDM2Y1/25VKqScQ2bmMlVvJbY9CocRDD+7fjS4UQ9FiRl6dm+X36qEzHadNis8gaLXbsMF5EtVrJ2rWVTzwS+vbVj113OIpm2s2bG99Ewl3TlPn5lP+bSNmwgRLy7t38Zs3RQJ7X01jQb/PvrCMPH6Y891xlS+VKasZ+1QDVXzQ3V/nhH32Ucvp0FY2jN67NSnnH7ZHZmJ9PuXgx5UcfFYq6lJJy4ULKEddTDh+msmKj7EuUlPyO33EER/AyXsbu7M6zeBbHcExAzHpxtnKr4eKrRs0wQsVFl2GiUyhymcsH+AA99NBKK+uwDqdzeonG8tKETXTt9NDDb/hNqcb29kv13igFBTVqnEVVw2c7t3MYh7Eu67IlW3IyJ5coE9gUdAPy84saRvjOcM85J7FFfdkylV6flFQkxDab8vMXC7EOi40blb/c1zfvcimfuZdRo/R99263io5JFOS0afouF7eLcvFi/WM2bVICWpACLPfvV1EpXp+/x602PReN5oyo6Jfctk0tqHrcRb1Lr+hPOeS6QL//lVdG3CzbiHzm8ypeVbiYKSjooosjOCKsme/lvFxXCL2CbrSImsIU/sJfSmTzDbxBNxGqNKI+mZMNa7+XpqmFpDQsQ1yN1UI+NUWCKegGfPON/mzX5VILlYnOP/8o/3V6ugqzLA3bt5M33UQ2a6Z6l372mf/nO3Yo/77vTN5uJzt2jE1GaEmROTlq5l9cHIcPC3vGKy+/XL/olncW73QoIY+wI5KUkvLU5qqjku+49mT9ujNuF2Vp03sLmMu5hhmeek0simNUXtbrX6/O6rqfuegqUdblfu43jM2vwRr8hyVr7ycpeTtvp4MOpjCFHnrYkA1LXeUxWLavm+4SP6XoYQq6ASNGBIq5d7vwwnhbl3j89VfRE021auT48ZG7d8oCmZND+c4M5dqxWpQ416tL+XHoFXSZlaUvrl6Bnfc15aRJlK+/Trl3b2R2/fZb0aJouFv//iX9Gvzoz/6Ggnw9rw95fLAaLHba+Spf9XM3eMX8Fb5i/H0UvPT4gT8YZs96z9mJnbiVW0v0fezjPn7Oz7mES6Limz/MwwHFvbwvJ51R7bsaTNBtOIn55x/jz5KTy86OYPzxB/DJJ0B+PnDFFUCHDvGz5dRTgc8/j9/5jeDBg8DcuUBWFnDRRRDNm4PvvAscOABIqbZ//gFGjAA1F0TfvsaD5eaqe7oeFguQUglizJiSGbp/vxojEvLzSnauYuQit0SfeRmEQZiGabr7dkd33IJb0Bmd8SgexWqsRiM0wgN4AP/BfwL234EduA23YT7mAwD+g/9gEiahARoU7tMADZCDHEN7spGNFViBruiKHdgBJ5whr8GX2qiNfugX0THBqIqqOBtnYwmWIB/5he8LCDRHczRG46idKyhGSh/rrSxn6JmZqvTtGWeQrVqRTz9NnjihYqWNZughqp2WCffdp0IZrVYVl+7tDyolmZurImJ++KH07pbyjHz/vaKOQl53yOBB+jHfApRnnhF6zDNaG8+YO3agXL26ZLb+84+fH/7PVuC04eC8i8CcJIMngij9Q/yQHxq6XL7iVyGPP8IjbMEWAf7n2qxdGCo5giNCRrQc4iFWZ3U/n7uFFtZkzYDaJt3YLWg5AK87I9LEnFixi7tYj/UKn2bcdLMGa/Av/hXV8+Bkdrnk5pJnn+2fmORwqOxMh0NfzG02soTNbaLG0qX64YQul7ohVaumFjJTUlR44fvvx89WmZ2tsihjmeGld96dOymdOsJts+rWnPEmDYUc9+efVeSL0RgpHspQ3Tj0xqXk2+/0YKMdgiIPFPmgPQP0HAdrHUriug5OfzG/sHfUioblMpe92Csgw/NSXhp2pEUOc/gxP+atvJXjOT4gnj2ZyWzFVkEXF5/m04bp/S/zZb99D/EQu7EbnXQaLrqC4IN8sFTfTTTJZjZnciYf5sN8l+8yg2EU+o+QUgs6gIsA/AVgK4D7dD7vCeA4gDUF24RQY5aVoM+apb/w6Z356gm6w6Gf1FOWjBplXAZXz26nU4Vf/vlnbDNnfZHp6ZQ33aRmnfZkyvr1KGd+VDYnJymfejLyVnBNGhtfy969hX1I5bp1lJ076Yt6clJACGQ4PM2nqUn90D9IsF5GVeZd1pey738oZ86MagXIPObxG37DG3kju7ALL+ElnMmZhZmhkTKKo3TrvXjo4VzONTzuIl5kKMzeWjLF2cIt7MZuAUlL3lnwu3y3RNdQXimVoAOwAtgGoAmAZABrAbQqtk9PAF+FGst3KytBHzRIXxQBlSFZXBwtlsAqgSUhN1ctIpa0CfR11xnbbST0NpuawTdsqOrGxxp5UZ/AUD6XRvn557E/OUl5152RiblLo5z6lv8YaWmUw4aqLFbNSVm9GuWbqgGGHD0qqOslEtKZbhjH7SuGP/LHqH0/XnZwB0/hKfRID7VcB505NqbkarRIC5OYxAEcwH/5b+iBfGjFVobXcR/vMzzuZt5sWMXxLhrXeP6dvwd8f15XTSxmwYlMMEEPZ4WmE4CtJLeTzAEwE8BlpXbelxGVKxuvQ51zDtCgAeDxAFar+rNmTbUIWRqmT1fjtG8PNGwInH++Wg+LhCuuANzuwPdtNuM1u7w8ID0d2LUL6N078nNGAjduBBYvVguRvmRkAPffH7sT+3JBb/0vSQ+bDRh/NzDiBv/3B1wJzJoFZGcDmZnA4cPAuHHghx8CTZsBTp3FNiGAZs0iMvUv/AVbiBgEAYGDOBjRuOHQD/2wm7uRKlKRYctCZlIeTlgzIIVELnIxF3PRGZ2RhazQgxVQH/V133fCafgZANyKW2GHPeD9JCRhNEYbHtcBHfAxPkYd1IEGDXbY0R7tsRRLI14QrdAYKb13AzAAwFSfn4cAeLXYPj0BHIaavX8D4HSDsUYCWAFgRcOGDcvkbvbbb8a+6O++UzPpuXOVX3rWrNInFH31VeD5bDaVCBSJKyQvj+zRw38shyN42n9xt9FT4Xcvixj58cdlUmwrqA35+SrT02gB1MdvLl94PvD4v/4yrufSpDHlgQP6YYYujfL33yOydTd3B22QDKpQvN3cHa2vhyS5gRtCPhmUZHHxO36nO66bbr8eoLnM5af8lFfzag7lUC7iIs7kTLrpZorPK5ibxpd85nMLt3AvIwsZrUiglC6Xq3QEfVKxfVIAuAv+fgmALaHGLcsol8cfVz7mpCQlrk6nqokSjsAeOqR6ZOrVB9ejfXt9gXW79cvRBiM7WzVS7tRJReh465+Huw0dGtn5IkEuX27csLlB2XVKl5mZlE8/pSohVq6knxBk0BhaTp5sfBOwWtQ+S5ZQ1i5oylEpRf354Qfh25eVRfnqq5Qd2rPH724m5ekv7ml5Do5khFXVwuBn/syU/PD6eN7KwBo2wXiBLxQm6KQwhVVZlT/xp8LPc5jDHuwRkJ06hmOYwQx+x++4gAuiVrp2IzfyKl7FmqzJFmzBKZwStSbbiURpBb0LgPk+P98P4P4Qx+wEUD3YPmWdWLR5syp/+9RT4fUjzcwkhwxR2ZApKerPG24I3cDZqEmz06nEOVzy89VTQ/XqymeekmK8iKu3aRr58svBz7F2LfnWW6qiYqRNg6SUKryvuIC6tKhVEIwUmZNDefFFRTcae7KKgpkxI3BfKSlbtQw+q69SRbWNe/YZyl+XquiXrPDFR+bmUnY9p7Ag2b81wTZrBF2poJYGWvJASy7YcAf48l3JzDt8MJpfB0lVjteZH/zJAFRlXJ9n4FNMKI7yKL/kl/ye3xcusEpK/s2/+QJf0J3Fa9S4nMujep3ruZ5uuv2iYTRqYSVNlTdKK+g2ANsBNEbRoujpxfapDUAU/L0TgF3en422RMgUDcbw4YE12J1OVRs9GB06RGeGfsstwRtZ+G7F3TBCqLBGo8YfWVmq65Kmqc3jUTH568LITs7NVTH8Ml9Srl1L2aG9cnlUSlEz4QceKPPwRV+klKpx9IMPUD73XED/0ML9xoyJbDF1wJWB55k3T5UJ6HUe5WuvBjS7kLNmBTzF5AtweUfww4HgmjPL5kb4eO6jdKUFF3QXXdzPEq7g+/Ajf2RjNg7qXrLQEnQBNBibuZmLuIgH6X/z68d+ulEwDjq4hVtKfV2JRKkEXR2PSwBshop2ebDgvdEARhf8fQyA9QVivwzAOaHGTGRBP3rUOEbd6QzemPrrrwOFOCmJbNkyfB/6gQPG59ez5847yTZt1HmSklQtlmCVWb0JS8XHqlfPuC5LVhY5Zow6rod1MbfamjM3ya5mwR3aK/E6diy8C4wxMjWVcupUyvHjVaXEYr8wuWOHse/caNOclH8U1cuWt9/uL9YujbJlC8oTRe3E5DXXRHaOCQ/H5vug5HsLr2fLjYIpx8Amm0FHBphyDEzJdrAaqwWNrjnIg3yYD7Md27EP+/BLfqmbLr+Jm8Ly1wsK3smidlYHeCDkzWQ/97MLu9BJJyuxEh108EJeyHEcx5f5smGNd72yuFu4hSM5ku3YjoM4iKu4KsJvNL6UWtBjsSWyoP/5p34/Ue9iaqgWdDNmqBmyy6VcNb17Rxa+uHChccu94pvHU9RF6cgRVVM9FFWqGI/100/6x/Tvr8T8NGxkKorVNLdaKGvVDLsdWyyRGzeq0EOv2HrcyratRTU/5PTpxv7/YII+aRLlnDmUgwYZ++qferLoPDfeYJycVHzzuCnnzYvtdzN3ruqwVCmFqee25XfLnuRiLg6aCLSP+1iLtWin3U8kx3N8wL4jOVI3JFHP5fIrf+VaruVZPIvJTKaddp7JM7mSK3Xt6MAOhlmjwZ4G3HRzDucUjrOMy+iiqzCG3tu441N+WvovuIwwBT1CTpwwbnnncoWXap+bS27dSh4sgVt048bg7paUFOXCqVs3eLz5wYNqVl27ttr37ruV4Bu1sEtJIWfPDhxn27aiJ4a3MILZsAYKkttFWayjtdy1Sy08vvkm5b+RxTmXFNmmTaCIWi1K5KtXU/1Ar7028iJZHrc6NtSNoFXLIlt++UW/y5Gev/6stlErlRtNRnO0bgKRg46A9nBn8IyQYm6hhcM4jKM5WtdF4qEnIIJlHdeFNfM3unlksug/bGu21t2vCquUqnxuWWIKegm4/fZAUdU0/xrhsaRDh0Dh9UbnLFlCrlwZvGztiRPkKaf4R8bY7cr107GjvqA7HKReAcG5c5XYA+RKtDUWptuLGj3Ixx9XM1aXpjang/K18PtKloSgbeWKz6T13k+yqexQvVZ5yUmBZW/1tmK1YuS996jZvc2qjndpqtbMRRep81RKUf78E5F1fi8rarO2rgA66eTr9Pf5N2CDkAJ7Ps9nC7YwnMnbaQ9I5f+aXxu6VEK9urN74ThHedRwlu+hhyu4oky+09ISTNBP6mqLwZg4UVVcfP119bPFAowdCzz2WNmc/8svgYsvBrZsUUlPOTnApZcCzz4bXiXIadOAgwfVcV6ys4Hdu4Fx44D161UOkBdNA66/HqhbN3CsU05R1R4BYANa4Uz+AauQ/jtpGnDaqQAALl4MPPtMYNLR3ePB7t0hWrcO4xsoAVlZ6ssKZz89kpNVeUtZcG1Wq7oui0V9WVLqH+fF6QSuH+H3lnjmWfC6IcDs2YDMB/pfAdG2bWgbEwS9JCAAsMACBxx+7x3CoaBjadDQAz3wHJ7zq0joSzaysRIr/d47A2cgE5kRWF1ENVQr/HuwxC4JaXit5QojpY/1lugzdC9ZWeTu3YEJR8ePqzDIDh1UAtBHH8Wm0cPq1ap3aqS1oC64wNhlc+215PLl5Pnnq5l306aqL2iwRdu2bdUTQ1usCvShC1BWqVy4KCoHXqPvO7ZZKcfeUdKvIiQyP5+yTu3IXCnBtuQk1U1o167wXDJdz4ksrDE7m/Krryg/+IByt3FC0U/8ie3YjhZamMIUjuf4Esduyy1bKG+6kfL0VpR9+1IaLZoU8Bgf0/VRO+jgIR7y27cmaxrOlO208z2+x8EcHHRGncxkXf98ZVaOeHbuoouz6e9DPI/n6T4dNGKjqNRFLwtgulyiy7FjSgR9I1FcLlU3JlEYNEi/5ovVShbrlRwW//5bVLVyoPY594sazEh2M9+lUZ52KuWaNYX7yl69jIXvusERn1seP075yiuUA66kvPtuvwXOgH2/+kq5NcJxj4SzpXgof/9dfxHUu/W7VPUfjaCYlly6lLJqlaJkJaeD8rYxASGfS7k0wH/spJN92Cfy73HFCnXjsfmsgbi0wto1emQwg53ZudDlkcxkOunk+wws73k/79cVf40aj/IoSfIu3qXrk/cV4eK+eZK8mldHJOZuutmXfQNav+3kTtZircJkJyedTGEKf2dkmb/xxBT0KPP44/phhS6XKjWQCPzyi/7CqtOponiMyM8n588nb75ZhUMWL/29ZYuKhDl8II/yjz8oN28OECH50kv6i4EeN+UH4WdZkgULq7VrFY2XnKT+/sUXxsesWkV5zdWUrU+n7NIldGmAYJvVouqY6/nVBVTYZoRZWTItTQl58bHcroAkqB7soStYGjWu4RqDMxict3Mn/WtwuwJCO33JYx6/5Je8g3fwCT5h2FA6k5k8l+cGCHYVVuFqriapShEYtbOrwRqG4ZOruTqshVErrWzJlvyMnxlmiaYxjW/zbd7KW/kyX/YrVVAeMAU9yrRuHSiUgKrUOGFCycddvVolNJ17rmrvZvQUvm8fOW6cquneubOqha7nLvnvf9VCqKapm43DETxbNTdXJRx5yw17m2o8+mhk1yFPnKBs1Mi/tK3DrjJLIyyWIy+7zH9G6d0qpYTl3pBZWZSDrlU3giSb2iwi/Bm8RVD++Sfl6NGBNwaHnfLxxyL7ckjK9983jrJpc6bfvilMMRT0qZwa/jmzs42vuVIKZaR1KQx4mA/rzsCrszqzqX73b/NtOuigm2666GIyk3kX7wrp8viIH4UUdFA1Za7ImIIeZYyyQZOSVLp+SXj/ff0ZdaVKZIMGqj767t0qCqVGDf/sUJdLlSXQY88eld7/9tsqYSkY06apsfRm9eFkkfoiDx2iHDeWslpVJeyak7JtW8pvvy3aZ9cuysWLKQ2C9KWUxq6OSimUCxcan19KlbLvcfsLmUUoe2pUVzNTb+Pn6tWMZ+ijR6s0/jvHqacDzancJI8/5vd0so3b+Cyf5RN8Imiyipw40biOe53afvs2ZVNd0fLQw3kMP25d5uUZ90pN8VAuW+a3/2Zu5hN8gg/yQS7jsrD8y/fxPkOR9dDDL1j0VHWIh/gu3+U0TuM+BtbZ0WMWZwXtbep9JbFsisPFC1PQo8ybb+oLn8OhYraNSE9XSUcPPaRa3HknqxkZ+k04fDebTSUrXXedfhy500luKF3jcnbtqn9uq1XZHCny7bf13R0DriyqpV65EqXDQTlsWIDrQkqpPzv3Cvp33xmf+9VJwWPAnQ7K2bMpv/mGcv9+ZZPRvhdcUDRuVpZqhFHM1omcSAcdTGISrbRSo8YbeIOuEMpff9WPZ7daKK+52m/fyZwc4GoQFKzN2hHHTcsBV+rfIOvW8YuBf47P0UEHbbQVFtQaxEFBC139wB90W9z5PlG8xbcMjw+HmZwZlqCfw3NKdZ5ExxT0KON1TbhcauHRW8Hxf/8zPmbDBiXIXuF2u8n69dWse+HCojjvUKJulHBkt5MvvVS66zJ68hBCJSVFgszJUWJtJJLFhVpzUo4dGzjOJRfruwo8bsoM/cYGUkrKmjWCu1KSbJSPP150jJHf3+mgfCS4H83IL+zK1/yyFP3sO79X4M3O46YsdleWlLydtxdWNfTQw4ZsyA2M/O4t9++nbNKkyN3j0gJm5+u5Xv9a6OJMzjQc+xpeE1RknXTyTwZZvAmDIzwSsgSxNwu1ImMKegyQkly0SC0cPvKI6k4UjJYtA6NOrFYVOrhoUXiC7hV1vfddLuVaKQ3/+59+hqzLpXqcRoJcty68LEnfzaUF+Njltm3KHeIVP5tV7TfTWFxkVlZoH7nVQvlw0WOHPHZM3QSKu2eqVA6Z5Xof76MtXz9R5sIjnYxtfOghypo11fVc1MevVkxx9nEfP+fnXMIlpQqvkzk5qu7OPXdTvv46ZbEKbg/wAcMolF7sZThuH/YxFFkrrbyMl5XYZl/e4Bt00FGYZWqjjVZa6aGH5/P8chWtUlJMQY8zW7YYz6yTk1WKvlF9leJbrVrG0SuHDoW2JRjp6aruuu/43nDMSAsoyt27jX22wdwgOo5+eeiQqnl+4YWUo0ZShnDoSymNfeK+N49iTSrkjh1KWJNs6sZxXk/KTZtCXuutvNVQzM5eaY9r9clIuYN3GF5LJ+rfnEhyCqcYRqEM4ADd3qUneIJTOIU38kY+w2e4jUH8lQXcy3uZzGRaCl422vgcnyvVNZc3TEGPM6tXGxf7sttVjPe334buRuStcX7hhUXuHodDifknn0TH1vR08rXXVKRNnz7kp5+WPGFKtjkzMkGvWqWwSXNpkS++aPyE4HZRXm9cJ1vm5EQUjfPNwffpPqHjZkgHX7g3KWjcfKKxkAt1feFOOvkCXzA8LoMZbMmWfi4RBx1sz/a6Yv423w54EkhiEh+jcdTQd/zO0LbSunPKE6agx5ncXOMZeNOmRbPfHTvIVq2UUHubWVgsRb7zK65QremkVLHgjz6q/OY6zXj8OHRIxceHinKJNoa1VWzWwNm7S6N86X/RO7eUlI9MUON6k2mqV1ez/M8+i+qsOW/vbvb6XlDzqTnuyACbbQaP13RQBlspTzAkJS/hJX6zbQcdPJWn8gSD15s5zuMczuGFvv7BHMzjDCz/+Tt/96ve6Pty0MFlXKYzOtmf/XWPsdJa4vrq5RFT0KPIsWOqgmGVKmph8+qrjdPyfcvZfvSRvyvDG+PtjbzLzCS7dy+aedvtarvjDvK558gVJagblJ5O9uqlbg5OpxrvmmtUVE1ZIY8do+zRXbkxHA7lk375ZdWAIsWjwveqV1OZoDFwTcj0dMr16ylL648KQXbbVnz1ZrDtKrDlevDRh8GjlUDZvFm5crmQqg/o23ybndmZbdiG/+V/Q4o5Sd7O2+miq9C/7aKLPdgjYIY+hEN0hdn7uoH6Mbjd2d3wmJZsyUEcxA/4QWG8e6Ss5mrexts4hEP4CT9J2OqLpqBHiZwcNYP2rWBosajoFd91s5UrVe2T4g0nliwh+/YlmzdX9V+GDyeffFKFOk6YoJ996naTJSkz/u+/+gutdrtqrVfWyBMnKHfu9EuPl3l5SvANfDrr15MffED+/HPkPny/c2dlUX70EeVdd1JOeoXyyBHKPXsoH3uMcuhQyilTKFNTwxtrzx7Ku+6i7NiBcsAAyl9+Ue8bhT0+d3L4d1dwhWG7ueIJUEbZr97XFbxC9xwTOdEwy9Tbes5NN8/kmUxleL9PL8/xOWrU/MbpyI7MYBnOfsLEFPQoMWuWfry43U7ef7/a5++/A/3lvi3hcnPJiy8uimNPTlazZ6MoF4+nZP7xPn30x/PaG27T63iQkaHsdzrV9Vd1ZfG2OrN45JH/qb6eOuouZ0xX/T+tFsqGDSinvqUaSO/Zo7JWfUP1NKfK8vSm87tdqqhXkAJZZEHzjMqVipKCLEKNN3SI8brAKQ3L3QzdiKM8GlCQy8vdvNuvn6fv61ye67fvY3zMcF8LLXyP7+me4wRPsDEbM5nJQW8IDjo4geGnbO/gDt1wSCedfJbPhv8FlRGmoEeJW24xFskqVdRMvGtX/cVN74Lm66+H3yvUG2Xy2GPkq6+SH34Y3mw9Pd04vNE7ZrB6LvFm9Oiip5UzsJYHUI3HhIeZlmQlvl3O9ptRy5d1Ysi94YcWEV7XIJuV8tK+Qe2SF/YOvwORd7MnU5aky0kCsYVb2JVdmcQkJjOZrdk6wM89nuN1G1aAgYk++7nfcF9BEdAv1JcjPMJ7eS8bsRFrsIZhiOUpPCXs65vIiYY+/RZsEdF3VRaYgh4lnnxSzW6DCbBehUPvdt11qvdnuGLudenY7UrgPJ7ANnHHjqkbxaBBSvj37iUPHw4eMWO3qwYYiUheXlEsvEA+d6Ee81BMJB12yptvJhlGAlMkW5LNMMomaNZqKEHXWbSQGzdSXn6ZynitW5fysUcjrnNTFpzgCVZn9YAZtZtuvzDDZVym63IRFBzCIX6x84d4yHCWncIUw0XR4rzKVw1dMA3YIOxrfJ7PGza+aM7m4X9ZZYQp6FFi1y7j1nShNodD3RBOPTX8Y5KT9WfaKSlqFr51K1m9etGM325Xs+8ffySbNTMe97bb4v1NGpOWVnTNXbCEx+DRF0pNI1mQeBRpf9Bgs3SDErhSSuOKi8G2y/oFjrVlixJy39m+5qTsfUHCuWde42u6oYJJTOIYjvHb9ybepDvT1ajxARa1+spilmHGp5NObmeIpr0F7ORO3XGK11Tfzu2cy7lcxVWGza31bgyRum7KClPQo8jnnyvR9HiCuzX0Ztrbtilfe7BZvs2mSgJ07qyKcBn51T/+mDzvPDVu8c9r1yYXLNC/+VxyiZoFJypSko0aKVsvxtc8Ap0ys15XSn6+WlQtidDqjXdez+C2DR4UWbKUx02ps1ghh1ynn8nqdlEGaxIbB27gDbrCC4Jd2MVvX0lp2IbOQYdfmdphHBYg/jbaAsYMxaN8lBq1QheORo2N2ZhHeITZzOYADigsm+Cii23ZVrcY2HiO97txadR4Kk/VDbuMN6agR5nly5WoRyLoycmqYuLhw0qwjfZLSSG9pTWqVdPfx+kkX3nF+Pxut4q0WbWK7N9f9RY991yVvFSc3Fzlwlm4sGzDGYPx2WfqqaMqDjEdBv0/O3Yo3F9eNSByUU+yFYmqw67cNiGqm8kDByibNA7e7MI7Xr26lHv26I9Tv57+cclJlM8/H82vstQ8z+d1Z69WWnk9/ZOzspltuNhZiZW4kEXVMdOYxl7sRSed9NBDF11szdZhV1705Uf+yIEcyPN5Pl/hK4URLmM5NsB2G23syI664yzgAg7gAJ7P8/kaX2MaSxBeVgaYgh5lWrUKX8iLC3F+voowqVnTWNC9rtQrr9SfgTudSrCNBN3jUTedUCxcSFatqs6ZkqJuBO++G9vvLly++UatNzxpfZhpFh+XijeyZMmSwn3l8eOU551X0JA6DPeL00H58MOUw4dRdu9Gef99lP/8E5ZdMiuL8rJ+xoujrVpSfvutYSgmScq2bfSPdbsop08v5TcXXQ7yoG6FQ40a19G/BEM+8w192k46eRfv4mzO9gsFXMd1/IgfhV2iN1zymW9Y/VGjxk0MXdIhUTEFPYps3x7cZeJwGC+MWiwqgYhUMemaVpQRKoT62VdQN21S4uwr6ppW1OquUyf981Stqmbewdi7V78EsKaVLIkpVkgpKWfOpGx3lupc1O9SylX6tcblhg2Uc+ZQTnhYiWOwRcwRI0pu0yef6Au620X5+eehj58xQ9/v73FTJuBq9W/8jY3YiBo1uulmDdbgV/xKd9/RHB2yIqKNNr7Dd4Kecwu3cCqn8lN+WqJY8HSm6/YO9T4t/MDoNPSIB6agR4m0NOMSs17BfvZZY8E/7TT/8TZsIIcOVTP+fv1U27jibNqkslFr1FAJSZMmFfnA165VM2tvopPVqgQ5SHe2Qp54Qt9Oi4UcHHnbz4RDpqerMEM9X7XTQVnCTiTy4EGV4ap3kxh4TViLmlJKyltuVna4XWq8SimUixaVyKZIOMETfIpPsQ3bsBM7cSqnhpURKSm5gRu4hmsC+nT6ks50nsfzqBW8jMITk5ikWxkxn/kczuF00EEXXfTQwxSm8CcGb2atZ6+RP99Oe9DQyETHFPQoMXp08Nl5kyZqUe/BBwNjzZ1OfR92admzh7zvPrJnT3LkSJVdGQ4jRhhfR5fI1qUSFrlqlX6BLreLMlQBHKMxn39ev2mH1UI5wrjgl+5YO3ZQTp+unirKYAEjlak8laf6zaA1aryEl0S94/0qrlJlhYM0hB7EwK7qRlUbPfRE7NOeyZkBY2nUAqJzyhumoIdJXl7RYmJxF6iUwUMWHQ5V19y77/TpKkTR4yHPOUeFEsYbuXEj5XPPUU6cyI+f3aHrcklOJu+5J96WUrV8mzaNsus5lB3aq+qJJaiBID/6qGgGnOJRNc9L8cuQI0YYu3ES/E74Al/Q9XG76eb3/D7q5/uYHxvGd4Pg2Tw74JgWbKG7r4cefsDIGoyT5BzOYXM2p6BgDdbgM3wmaOel8kCpBR3ARQD+ArAVwH1B9usIIB/AgFBjJpqgf/edWqh0u9VWqxbp2zc3N9fYN26zlb65RCyRUqp+mJpTRVI47Mx3OvhYpecLffheP36lSmrWH1d78/NVpyLf2bXmpGx9etDu9IbjZWWp3qW//lrq8rzytdf0/d/JSZS3xWbmJ/fvVze3t98O2WwjGJ3YyVBc7+Ad0TO4gM3cHFTQ9Sok1mIt3X3ttPMVvlJiW6L9BBJPSiXoAKwAtgFoAiAZwFoArQz2+wHAvPIm6Fu36qfju1yqNouX1q2NZ+eHDxuPHw82blT1UGw28tLkb5lhCxShPKeT485fQ5tN+c579Sp9X9JoIL/9tqj2iu/m0ihfnRRf244fVw2mi/vmPe6YlMmVr71W5Gt3u9Tf/zexRGP1Yi9dsbTRxgf5YJQtV/RjP91zatS4m4G1c67m1bqhjxq1oI23TyZKK+hdAMz3+fl+APfr7DcWwK0AZpQ3QR83Tj9VPjlZ+ae9fP99oNtF05TPPFGQUi2KalrRE8Wn6K/vIrBZKceOZX5+YiUbyZtvNnZrnHtu6AFibd+WLZRdu6pZuT2Z8ozWfn05o3ae1av1/fUujTKcuNRifMyPDRtElKRHaThkM5ujOdov4uR0nm7YkGITN9FDj99iqpNOXspLS3R+Sclt3MZN3FTuXS1eggm6BaGpB2C3z897Ct4rRAhRD0B/AFOCDSSEGCmEWCGEWHHw4MEwTl02bNwI5OYGvp+TA2zaVPRzr17AwoXAeecBVaoALVsCU6YATzwRPVsOHgTGjQNOOQU49VTgueeA7Ozwjt21S9l05ZVARoa65QBAZRzTPyA/H3/9dgyZmYDVGnr81auBSZOAjz5S48cMl8vYIJcrhicOD9GsGcQvvwD7DwB79kL8sQ6ic+fon+itN9U/wuJkZQFvBP2vpssADMBluAwaNAgIJCEJDjjwGB5DS7SMgsGBJCMZkzEZuchFFrKQi1z8iT9xOk7X3f80nIblWI7LcBkqozIaoAEmYAJmY3bE516LtWiFVmiN1miP9miIhliIhaW9pMTGSOlZNPO+CsBUn5+HAJhUbJ9PAJxd8PcZKGcz9Ece0a9F7nSS//1v2dlx5IjKIvWtt+50qizPUG3gpFThj74+ce92K15hKgKjPY7DzWvts9mggX899+Lk5JCXXqpm/Q6HWmNISdEPs4wGcs0a/Zmp20X56aexOan33Nu3Uz76COXoUSrePCewfVrIMXJzKT/9lPK66yhvuSWgd2nY41x5hfGTyn8uKdmYlFzGZXyQD/IJPsFH+SgbsiE1ajybZ/NH/liicRONwzzMSqyk67qJ1dNIWYFYu1wA7ACws2BLA3AAwOXBxk0kQf/3X7UY6LvoKYQqiRvtyqdffKESgmrWJC+4QCUYeXniCeMmF/PmBR935Ur9RCGA1JDGTWjOdFGURp8GJ5eiM63Ipc0WGHuek0Pu3q3KATz1lH6ET6VKRYlSoZBpaSpE75FHKGfPDimU8rFHlagn2VQSj9tFed3gmBavkjNnqnN665173JRnnhFRso/MzKQ8p0vRwqnVosZ8/PHI7Zk6VX8B1qVRvvZqxOMV5wJeECB4Djr4LWMQX1vGTORE3fBHK628iTfF27xSUVpBtwHYDqAxihZFTw+yf7mboZPkunWqIJa3y9A550R/gfDll/Xj07/5Rn0eLGnp9tuDj/3VV0pgjY5PwTE+Zn2Emy3N+Sda8k48Tzsy/ewg1Uz/qadUuKWmqfeNbhQej6q7Egq5di1l1SrM97iZL8ATFg+3Wpqwb8d/g87y5fr1lBMmUN57L+WyZWGLuczKovz6ayXQ4ab0Hzum/1TgsFPeXVS5Tx45ohYq77qT8sMPKbOy/Md56SX9cTQn5ebNYdlSOFZGBuWpzYtuMALq740bh91hyYi3+FaA2HlfkdYAl5T8lt/ycl7O7uzOF/liWC3rYskIjjC8Pr1wyfJEqQRdHY9LAGyGinZ5sOC90QBG6+xbLgXdy/HjsakVnpGh3+3INyGpd2/9z5OSVEPoYOzebZz0ZLEosX/8cePOSHa7GufZZ40FXC8KaNq04HZJKSmbNAkQuCzY+Bn60en0r+9eWuTixZSVKxfFnTvslPfdp4R4+nTVu1TnTi0//NA4A7RWTbXP8uWUKSlF4ZQet+qS5HPTkG3O1B8jOYmyBP47eeSICjmtU1uVPrj99oj7o67net7O29mf/fkaX2MqU1mP9QwFD0RAH9BgFK9U6KSzsOJhvHiFr+jO0G208WbeHDe7okGpBT0WW6IKeqxYtsxYTJOTlf987lx9MXU6VeldPQ4fVu4Sr9+9eDGv5GRVhz07W92o9FwnFouqypiXR1auHJ6Ye8M1t24Nft3yjz8M65VnIYl2ZDJa/xTksWP64Y72ZLV53CrsT3NSXn+9XwEtOWOG/rEClFWrqNh4vSqJSTa/muey9en6YyTZKJ98MjoXGgHv8T066SyMMnHRxQZsEDQ+PIlJYcdtb+Zm3dotdtp5H+8LPUCMOMqjrMIqAaUHXHRxC7fEza5oYAp6ArBpk3HrueRkNYOXUjWfcDrVjNnpVKI5dar+mHl5gU2rfUW6detA3/u0acoOr/A7HKpJxs6d5KFDwUsb+K4xaBp5fRiZ7nLpUjVb1hG5bNjoxglaraX/fskgPme9ze2ifKeoQJT85x/9ErxJNsoR11P+/rux4CcnFa4JyP/+19jlsm6dkekx4TiP62aG2mjTraDofV3Fq8I+R7D2bY3ZOIZXF5oN3MD2bE877XTQwWZsxp/5c1xtigbBBN0WtXAZk6CcdhrQuLEKkZSy6P2kJKBfP+DoUWDGDBWl9vjj6jOXC+jfH6hdW3/Mb78Fdu8OjGyzWICBA4EPPgg85vrrgTZtgFdfBXbuBM4/Hxg1CqheHcjLU/bohUkmJQEXXgj8/jtQtSowdixw001hXPhZZwGSuh9tQgukwYMqKWGMEw7796uQvnBITwcmvQIMHQoAEHXqgA8+BDzz36KYTIcDSEkBHn8C2LFDfbF6SFn05Y0ZA3zwvtrfO47LBYy4AaJ161JeYGQsxELYdP6L5yEPaUjTPcYDD2ZgRtjnsMEGAWH4WTxpiZZYgRXYj/3IRS7qoZ6hrRUGI6WP9ZaIM/SVK5X7omNHtQi5Y0d0x9+yhaxTRy0m2u3Kp966NfnJJ2rG650du93Krx4qwubxx43LETQu4eSofXv98Vwu9QRREuTbbzPfpTGvYLaaAwtTobErfqbTqbo4RQP5ww/Gs2i9rWmTomOzsihTUykXLqTs14+ycycVkVPwS5CZmZQeAx+7T7MNsmAx8403KHv3phwwQGW+xqG13Kf8NOhMvPirPdv7dRUKh9/5u+4M3Uknn2bJKlqaBAemyyU0H3/s74pISlLCalB6u8Tk5Chf+UsvqczTzEx933pSkiqtG4zp040XMHv2VO6WW25R2a7hVGGUUj9s0utiKc13IRct4olz+3CbrRln2a5hR20dnU7yssuKGnpENF5+PuVXXylf+M2jlWtHShUy6DToclTcTTJmDOW//6pmzclJyr1yRmvKn/Ufy+WMGWpB1FsLPcmmbiAljDOPNcd4zLDhRPGXlVaO5Miwx17FVTydp9NBB220UVAU+unddLMTO+nWMV/GZbyr4BVuM2gTf0xBD0F2tvGCZadOsT33t98an9sbSmhEaqp+qKKmqW4/3qgam02NNTFECZDjx/XtANR5vvuu9Nebl6fGmTGj5GGhMjeX8uKLi2bj3i5Gd92p6qCPH69aygUT9GrVKHfupGzaJLClnEsz9HfLJUso+1+uhP+mGym3JPYC2zRO81sUDVbO9kyeGdaY//JfpjAl4PgkJvFqXs05nBNQY11ScjRHF9ZIFxTUqHEkR1aowlllgSnoIViyxFhUrVayBAX+wuaLL4zPnZQU+viVK8l69ZQbJyVFCfcVV+gvwDocquOSHv/8QzZoYOzCcTjUomkiIN991zjhpmC2LOfONVyMlQKUf/+t6pDrhSpaLZTXXhvnq4wea7iGIzmSF/Ni9md/3Vm7oGB/9g9rvMf5uG5kSzKTDSNbFnKhbh0ZF11cwAXRvNwKTzBBD6eWS4UnOdl/odIXIcKrc1JSunfXryMjBNC7d+jj27VTNVwWLABmz1brgtu26ddakVLto8eYMcC+fUq+i+N0ArffDlSrFtqeMmH6NLWoWZzMTOD558EFC4BatYD8fP3ja9WCaNgQWLsWSE0N/FxKYMXv0bU5jrRBG7yBNzAP8/A23tZdrHTCibtxd1jjrcZqZCFw8TkHOViFVbrHzMAMpCPwd5aOdEzH9LDOaxIaU9ChRNHjCXzfagUuuACw22N37kqVgBdeADRNiTigbjCVKgEvvRTeGBYL0LmzstXjMS7mlZ+v/xkJfPGFCtQojhDArbcCzzwTni1lQq6OoYC6kM/nAlcNAHpfoEJ3kpL899E04J571d9POQVwu/XHato0auaGgiSYlgbq/QKiTBVUwUIsRF3UhRtupCAFHngwGZPRBV3CGuMMnAE7Av9TJCMZbdBG95hMZBqOF+wzkwgxmrrHeksklwup3C5ud1HijculIlJ27Qrv+P37yVmz1EJkSRb5fvmFHDBARdjcc49q4lxSJkzQjyfXNHLNmsD98/MDE5K8W0oKuSDBnojl5Mn6reX0Yr/btlV/VkpRi6V3jy+MOJGpqSppSM9149vdJJbXMmeOyjZNsin7Ro0sUROPSMlnPldwBX/mz8xkmAV5CtjLvXTTHeA+cdPNndype8wszjJ0uXzMj6NxSScNMH3o4XHgAPn88+SoUSqZJ9z/VxMmKB+z149duXJ8W84dO0Y2beqfFepykTcFqUnUq5e+/9zliu0aQkmQWVkqrDBUEpHVoioebt+uFjOPHg0ca+1aVRvF7VKi73FTGmVyRfs6vvkmMAnJ6aDsfUGZnL80LOdyNmMzOumkRo2N2Zi/0LgwTy5z2YM9/NLxNWrsxm5hNak2KSKYoAvqOU3LgA4dOnDFihVxOXc0mTsXuO66QJeu262SfipXjodVyjX85pvAJ58oV0vHjsDgwcA55xS5dnzZuBHo0kW5oXNylBvH4QBefx0YNqzs7Q8Fs7NVYfaPPgI2bgD27NHfscs5EEuWBB+LBNatA9LSgHbtIByOGFisc952ZwFr1gR+4HQCy38r80SkSCGIndgJCYkmaBIyaScXuXgP72EGZoAghmM4hmAIkpFcRhZXDIQQK0l20P3MFPTS0a0b8Msvge+7XMCLL6oszHixZw/Qsydw4IASdSGAFi3UAmqVKoH7790LvPIKsHgx0KSJarTRQfefTWLBt98Gxt4ReFe124Fx4yCe/q/+cWvWAD/8oBYsrrgCQu9LiSHUnPqZrR4PMHkKxKBBZWpPLDiMw3gP72ELtqADOuAaXAMNWrzNKtcEE3TT5VJKmjbV9z0LQT72WHxt69w5sOFFcjJ55ZXxtSsrS8Wif/21iqUvLTI9nbJBff94cotQseg65XNlXh7lVVcpX7k9WblbXBrll1+W3phI7G7cSN9V5HEbJjeVJ5ZzOT30FIZJuulmHdbhLoa5MGWiC8ywxdjRo4d+WKPLBZx9dtnb4+Xvv1VUXvHIvZwc4Kuv9KP+yoJ584CaNYEBA4Brr1XRhdN1ota4cyd4881gy5bg+b3Ar782HFNoGrD8N+Cyy1RUi9WqitQsWw5Rp07gAW++Acz7WsV25uSoLyMjAxh4DXj0aBSvNgT33aeibnyxWoG6dYGuXcvOjhhAEAMwAKlILYxiSUMaDuAAbsSNJR43E5lYjdXY7dcV06QQI6WP9VZRZujbtqnFUN8FRbtdNasI1TYulqxcaZyw5HCQ+/aVvU1//62f8KRppG/2vNy4US1Q+s643a6wuv5IKf3K4uru06qVcQXGEiyISilV6YE5cyj//juy4x54QC2EVq6knhLanUUZbmhVArOaq3UjYbwZpemMfKX9OT5HF11MYQoddPBcnst9jMM/5DgDc4YeO5o0AZYtAy6+WC0iVq4M3HwzsGiRcXG+sqBlS+NkqSpV1My4rJk6VT/WPSsLePllnzfG36VWdX13Tk8H/vs0GKK5uBACItQXf+K4/vu5ucBxg88M4PbtwKnNgT4XAtcPB1qcBg4fBholNRW39amnkPvPLnzx8714ZdtYLF75P6BB/YhsSESykQ1LEHnJR+jvx5f38T4exaNIRzpO4ASykIVlWIbzcT6I+KwDJiRGSh/rraLM0BOZ//1Pv+XdrFnxsWfoUP0nBkA1wvZiWFwrxUMZBePljTdQ2qz6ceurV4c/jpSqFozVEhjH/tRTYY2xhVtYh3XooYd22ummm2fxLB7l0ZJdXIKQzWxWZmXdGfpZPCvi8U7lqbpjuekOGi5ZEYE5Qz85GTsWeOcd4IwzVFnvTp2Azz8HrroqPvZ0767WFopjtwPnnVfsDT2E0B8gUu68y6DGgQZEEir4668qhKj4o1BGBvDySyEPJ4h+6Id/8S9SkYpsZCMNaViP9bgFt4RvRwKSjGS8gTegQSucqdtggwsuvIE3Ih5vDwzCUgFsx/YS21nRMAW9gjNgAPDHH8qTsHx5ePVhYsW116rmGDafUiIWi1oXvPVWnx2HDNEXdSHUYmdpWbhQ1VcoTk62WjEOl337jP1qYSyubsRG/I2/A1wGOcjBbMxGdnYq+PbbalH4oj7grFlhuXIShatxNRZjMa7G1WiHdrgJN+EP/IGO6BjxWM3QTPd9CYlWaFVaUysORlP3WG+my+XkZN8+cuBAtXBss5H/+Y9q/OGLPHGC8qy2ReVxNadasPz++6jYIM/raZxdOiKMvnrecXbsoHQYuIfOahvy+F/4i24ZWhC0SRuP9Wrvnw3rdqnSvXFolhEN1nEdB3MwW7EV+7M/l3N52Md+yS8Dmj7baWcXdomhxYkJzNR/k/KGzMuj/Ppryoceonz11Yg73Qcd+/LL9EXYZqUcNy6ysQYPDqwrozkpwyiAk8Y03c70INjsWE390gZuV1hjR5t85pcqRf8H/kCNGi200FuuV6PGT/hJ2GO8z/dZi7XooIN22nk1r+YxHiuxTeUVU9BNKiRy927KZ5+lvPtuynnzQoYrFh43b56+WGpOyrVrI7MhN5fy8ccoq1dXN4S2bSgj6ATyAl8IEHWNGr+5t43xU8So8DsLlZaDPMhreS2TmUxBwbN5NldyZURjSEo2YzPdG1c1VovoRpHPfP7Df5jGtEgvpcJgCrpJhUN+8okSYIe9KLuyy9mUGYFtzwKOlZLyjjvU8clJagyng/L558vA8kBmczbbsR1rsAbP5/lcwiWUF11kXHDstjFlYlcuc9mczZnEJD8RdtPNv/hX2OP8y391+456x1rLyG6iJzvBBN2s5WJS7uCxY0C9uqqSmC8OB3DPPRCPPhbeOBs3Al9+qVZpr7gColGjqNtaUjhrFnDDiMCUXk0DFn4PUQZpyHMwB8MwDGlI83vfCiuGYiimYVpY4xzDMdRCLeQgJ+AzDRrWYq3hoqdJIMFquZhRLibljy+/1K+3kJUFTAtPZABAtGwJcc89EHfemVBiDkCFJ/XpUxSm6Q0HumlkmYg5AKzAigAxB1RS0K/4NexxKqMyuqALrPD/nQkINERDU8yjSGAvKhOTRCcz0zgNVq96YTlEWCzgJ5+qapCffqJq1Ay+DqJz5zKzoSEaQoOGDAT2MzwFp0Q01rt4F2fjbKQiFWlIgwsu2GHHp/g0rOMzkIGZmImlWIpmaIbhGI7aqB2RDScDpsvFpNzBnTuBVi0DxdtmA4YMhXj77bjYVdE4hmM4BafgBE74va9Bw5f4Er3QCzuxE3fhLszDPFhhxRW4Ai/iRdRAjYDxspCF2ZiNdViH5miOa3AN3DBoAejDXuxFJ3TCcRxHOtLhgANWWPE1vkYP9Ija9ZYXzHroJhUO3nM3MHlykY85OVmlw65aDVG//NdCSRSWYzkux+VIRzoEBPKQh+fxPG7BLTiEQ2iJljiCI5BQT0w22FAf9bEBG+CEMyo29EM/zMO8gPovNVAD+7AvwJVT0Sm1D10IcZEQ4i8hxFYhxH06n18mhPhDCLFGCLFCCHFuaY02MQnKs88BH3yoMkfPOFN141j3pynmUaYzOmMv9mI+5mM2ZuMADhSWJXgdryMNaYViDgB5yMNBHMRMzIzK+fOQh2/wjW4xryxk4Tf8FpXzVBRC+tCFEFYArwHoDWAPgN+FEF+Q3OCz2/cAviBJIcSZAGYBaBELg01MAFWpEP36qc0kplhgQRd0CXj/e3yPLASuWaQjHT/iR1yP60t9boJ+NwxfBIRu5MzJTDgz9E4AtpLcTjIHwEwAl/nuQKoo/4IfXYBZz9LEpKLTEA11S+QmIxkN0CAq50hCku7NBFBi3xllt0hcHghH0OsBfu1B9hS854cQor8QYhOArwGM0BtICDGywCWz4mCIutYmJiaJzW24DQ4ENtS2wlqqrkTFmYIpSEFKYTNpCyzQoGEyJuue/2QmHEHXa+UdMAMn+RnJFgAuB/CE3kAk3yTZgWSHGjUCV8FNTEzKD53QCRMxEU44kVLwcsGFD/EhGqFR1M7TGq3xJ/7ErbgVZ+NsDMRALMZiDMbgqJ2johBOHPoewO/5qT6Af4x2JrlYCNFUCFGd5KHSGmhiYpK4jMIoDMRALMIi2GBDL/SCBi30gRHSAA0wEROjPm5FIxxB/x1AcyFEYwB7AQwEMMh3ByFEMwDbChZF2wFIBnA42saamJgkHpVQCZfj8nibYYIwBJ1knhBiDID5AKwAppFcL4QYXfD5FABXAhgqhMgFkAngGsYrwD0K/Pkn8MorwJYtwLnnquYLtc2kNBMTHMERCAhUQZV4m2Kig5lYVIzZs4GhQ4HsbCA/XzXOcThUt7GWLeNtnUlxuG0b8MksIDML6NsXomPk3XBMQrMWazEcw7EBKlr5DJyBGZiB1oigZZ9JVDAzRcMkJweoUQM44Z/pDCGAbt2An36Kj10m+nDiROChh4D8PHX3dTiAK64AZrwDYdQaziRi9mEfWqCFXwkAAYEUpGALtuim+ZvEDrPaYpgsX67/PgksWaIE3yQx4KZNwMMPAVmZQG6uKtaVkQF89hnwaXgFn0zCYzImIxvZfu8RRDay8SbejJNVJnqYgu6D1arfDB5Qs3ShF8BpEh8+eF8JeXHS04E3Iu8qb2LM7/g9QNABlXq/Aon1lH2yYwq6D5066TeDt1qBCy5QFUxNEoS0NCAvz+Cz1LK1pQIjIeGBxzAj9HScHgerTIwwBd0Hmw34+GPVU8BuV++5XEC1aqqwn0kC0ffSouYPvjidwFVXl709FZDt2I4maIKv8bVuPRWCGIVRcbDMxAizwUUxzj8f2LgReOstFbbYtauKeklJibdlJn706gV0765WqjMKGjA4HEC9esAoU2RKC0H8B//Bbuw2LI5lhRUHcCBqdVtMSo8p6Do0aAA8/ni8rTAJhhAC/PwLYPp04K03VbOLgQOBMbdBeDzxNq/cswZrsAd7DMUcAHKQg/fxPtqjfRlaZhIMU9BNyi3CZgNuukltJlHlIA6GbBwhIZGJzKD7mJQtpg/dxMQkgHZopxvZ4osLLlyJK8vIIpNwMAXdxMQkgOqojltwi2GhLRdc6ImeOB/nl7FlJsEwXS4mJichJ3ACczAHB3EQXdEVXdAFolil7BfwApqhGZ7Dc9iP/aiJmnDDjeqojhtxI67FtbrhjCbxwxR0E5OTjJ/wE/qib2G2px12dEInfINvYIe9cD8BgZsLXiblA/P2amJyEpGFLFyGy5CGNKQjHXnIQzrSsQzL8DSejrd5JqXEFHQTk5OI+ZivG4qYiUyzLksFwBR0k4SBJPjee2CH9mCTxuCoUeDff8fbrArFcRwHDXq4pyGtjK0xiTamoJskDjePBm65GVi1Cti5E5g+DTirLbh9e7wtqzD0RE/kIbAGjoBAL/SKg0Um0cQUdJOEgFu3Au++q6olesnLU8XpJ0yIn2EVjIZoiBtxI1woqoNjhRVuuPEMnomjZSbRwBR0k8Tghx8AvaYUUgLzvy17eyowr+AVTMEUnIWzUB/1MQiDsAqr0BJmS67yjhm2aJIYeDyqTrEebnfZ2lLBERC4ruBlUrEwZ+gmiUHfvmo2XhynExhpVk80MQkHU9BNEgLh8QCffApomtqsVlXv/NxuwF13xds8E5NygelyMUkYxEUXgbt2q56gR46oeuddukCYvf9MTMLCFHSThEJUrQqMHBlvM0xMyiWmy8XExMSkgmAKuomJiUkFwRR0ExMTkwqCKegmJiYmFYSwBF0IcZEQ4i8hxFYhxH06nw8WQvxRsC0VQrSJvqkmJiYmJsEIKehCCCuA1wBcDKAVgGuFEK2K7bYDQA+SZwJ4AjDrcJqYmASyBVswGqPRER0xFEPxB/6It0kVinDCFjsB2EpyOwAIIWYCuAzABu8OJJf67L8MQP1oGmliYlL+WYqluBAXIhvZyEMeVmM1ZmM2PsbH6Iu+8TavQhCOy6UegN0+P+8peM+IGwB8o/eBEGKkEGKFEGLFwYMHw7fSxMSk3HMDbijskgQA+chHBjIwAiOQj/w4W1cxCEfQ9dL0dCvkCyHOgxL0e/U+J/kmyQ4kO9SoUSN8K01MTMo1B3AAO7BD97NMZGJD0QO/SSkIx+WyB0ADn5/rA/in+E5CiDMBTAVwMcnD0THPxMSkIpCEJMNOSRLSrzm1SckJZ4b+O4DmQojGQohkAAMBfOG7gxCiIYA5AIaQ3Bx9M01MTMozVVAFHdABFh3JqYu6aI7mcbCq4hFS0EnmARgDYD6AjQBmkVwvhBgthBhdsNsEANUAvC6EWCOEWBEzi01MTMol7+JdVEO1wm5JGjSkIAWzMAtC17NrEimC1H8MijUdOnTgihWm7puYnEykIQ0f4AOswRq0QAsMxVBUQZV4m1WuEEKsJNlB7zOz2qKJiUmZ4YYbo2A2LIkVZuq/iYmJSQXBFHQTExOTCoIp6CYmJiYVBFPQTUxMTCoIpqCbmJiYVBDiFrYohDgI4O84nLo6gENxOG8sMK8lMTGvJTGpKNdyCknd2ilxE/R4IYRYYRTDWd4wryUxMa8lMalI12KE6XIxMTExqSCYgm5iYmJSQTgZBb0idVMyryUxMa8lMalI16LLSedDNzExMamonIwzdBMTE5MKiSnoJiYmJhWECi3oQoiqQogFQogtBX8G1OkUQjQQQiwSQmwUQqwXQtwRD1uNEEJcJIT4SwixVQhxn87nQgjxSsHnfwgh2sXDznAI41oGF1zDH0KIpUKINvGwMxxCXYvPfh2FEPlCiAFlaV+khHM9QoieBf0O1gshfiprG8MljH9nlYQQXwoh1hZcy/XxsDMmkKywG4DnANxX8Pf7ADyrs08dAO0K/u4BsBlAq3jbXmCPFcA2AE0AJANYW9w2AJdANeUWAM4GsDzedpfiWs4BUKXg7xeX52vx2e8HAPMADIi33aX83VQGsAFAw4Kfa8bb7lJcywNeLQBQA8ARAMnxtj0aW4WeoQO4DMA7BX9/B8DlxXcguY/kqoK/p0J1ZapXVgaGoBOArSS3k8wBMBPqmny5DMC7VCwDUFkIUaesDQ2DkNdCcinJowU/LoPqX5uIhPN7AYDbAMwGcKAsjSsB4VzPIABzSO4CAJKJek3hXAsBeIQQAoAbStDzytbM2FDRBb0WyX2AEm4ANYPtLIRoBOAsAMtjb1pY1AOw2+fnPQi82YSzTyIQqZ03QD15JCIhr0UIUQ9AfwBTytCukhLO7+ZUAFWEED8KIVYKIYaWmXWREc61vAqgJVSz+3UA7iApy8a82FLuOxYJIRYCqK3z0YMRjuOGmk2NJXkiGrZFAb1Gi8XjTMPZJxEI204hxHlQgn5uTC0qOeFcy0sA7iWZryaCCU0412MD0B7A+QCcAH4VQixj4jWFD+da+gBYA6AXgKYAFgghfk6g//clptwLOskLjD4TQuwXQtQhua/ADaH7mCiESIIS8w9IzomRqSVhD4AGPj/Xh5pVRLpPIhCWnUKIMwFMBXAxycNlZFukhHMtHQDMLBDz6gAuEULkkZxbJhZGRrj/zg6RTAeQLoRYDKAN1JpTIhHOtVwP4BkqJ/pWIcQOAC0A/FY2JsaOiu5y+QLAsIK/DwPwefEdCvxobwPYSHJiGdoWDr8DaC6EaCyESAYwEOqafPkCwNCCaJezARz3upkSjJDXIoRoCGAOgCEJOPPzJeS1kGxMshHJRgA+BXBLgoo5EN6/s88BdBNC2IQQGoDOUOtNiUY417IL6kkDQohaAE4DsL1MrYwR5X6GHoJnAMwSQtwA9Uu8CgCEEHUBTCV5CYCuAIYAWCeEWFNw3AMk58XBXj9I5gkhxgCYD7V6P43keiHE6ILPp0BFUFwCYCuADKjZR8IR5rVMAFANwOsFM9s8JmB1vDCvpdwQzvWQ3CiE+BbAHwAk1P+fP+NntT5h/m6eADBDCLEOykVzL8mKUFbXTP03MTExqShUdJeLiYmJyUmDKegmJiYmFQRT0E1MTEwqCKagm5iYmFQQTEE3MTExqSCYgm5iYmJSQTAF3cTExKSC8H+xIoT1SLzDJwAAAABJRU5ErkJggg==",
|
|
"text/plain": [
|
|
"<Figure size 432x288 with 1 Axes>"
|
|
]
|
|
},
|
|
"metadata": {
|
|
"needs_background": "light"
|
|
},
|
|
"output_type": "display_data"
|
|
}
|
|
],
|
|
"source": [
|
|
"#SIMPLER DATASET\n",
|
|
"nnfs.init()\n",
|
|
"X, y = vertical_data(samples=100, classes=3)\n",
|
|
"plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap='brg')\n",
|
|
"plt.show()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Test Strategy 1: Randomly Select Weights and Biases\n",
|
|
"For a large number of tests, randomly set weights and biases and look at accuracy."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"New set of weights found, iteration: 0 loss: 1.0986564 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 3 loss: 1.098138 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 117 loss: 1.0980115 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 124 loss: 1.0977516 acc: 0.6\n",
|
|
"New set of weights found, iteration: 165 loss: 1.097571 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 552 loss: 1.0974693 acc: 0.34\n",
|
|
"New set of weights found, iteration: 778 loss: 1.0968257 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 4307 loss: 1.0965533 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 4615 loss: 1.0964499 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 9450 loss: 1.0964295 acc: 0.3333333333333333\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# Create dataset\n",
|
|
"X, y = vertical_data(samples=100, classes=3)\n",
|
|
"\n",
|
|
"# Create model\n",
|
|
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
|
|
"activation1 = Activation_ReLU()\n",
|
|
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
|
|
"activation2 = Activation_Softmax()\n",
|
|
"\n",
|
|
"# Create loss function\n",
|
|
"loss_function = Loss_CategoricalCrossEntropy()\n",
|
|
"\n",
|
|
"# Helper variables\n",
|
|
"lowest_loss = 9999999 # some initial value\n",
|
|
"best_dense1_weights = dense1.weights.copy()\n",
|
|
"best_dense1_biases = dense1.biases.copy()\n",
|
|
"best_dense2_weights = dense2.weights.copy()\n",
|
|
"best_dense2_biases = dense2.biases.copy()\n",
|
|
"\n",
|
|
"for iteration in range(10000):\n",
|
|
" # Generate a new set of weights for iteration\n",
|
|
" dense1.weights = 0.05 * np.random.randn(2, 3)\n",
|
|
" dense1.biases = 0.05 * np.random.randn(1, 3)\n",
|
|
" dense2.weights = 0.05 * np.random.randn(3, 3)\n",
|
|
" dense2.biases = 0.05 * np.random.randn(1, 3)\n",
|
|
" \n",
|
|
" # Perform a forward pass of the training data through this layer\n",
|
|
" dense1.forward(X)\n",
|
|
" activation1.forward(dense1.output)\n",
|
|
" dense2.forward(activation1.output)\n",
|
|
" activation2.forward(dense2.output)\n",
|
|
"\n",
|
|
" # Perform a forward pass through activation function\n",
|
|
" # it takes the output of second dense layer here and returns loss\n",
|
|
" loss = loss_function.calculate(activation2.output, y)\n",
|
|
"\n",
|
|
" # Calculate accuracy from output of activation2 and targets\n",
|
|
" # calculate values along first axis\n",
|
|
" predictions = np.argmax(activation2.output, axis=1)\n",
|
|
" accuracy = np.mean(predictions == y)\n",
|
|
"\n",
|
|
" # If loss is smaller - print and save weights and biases aside\n",
|
|
" if loss < lowest_loss:\n",
|
|
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
|
|
" best_dense1_weights = dense1.weights.copy()\n",
|
|
" best_dense1_biases = dense1.biases.copy()\n",
|
|
" best_dense2_weights = dense2.weights.copy()\n",
|
|
" best_dense2_biases = dense2.biases.copy()\n",
|
|
" lowest_loss = loss"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Test Strategy 2: Randomly Adjust Weights and Biases\n",
|
|
"For a large number of tests with a starting weight and bias, update the weights and biases by some small, random value. If the new accuracy is higher, keep the weights and biases. If the new accuracy is lower, revert back to the last weights and biases."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 8,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"New set of weights found, iteration: 0 loss: 1.1004413 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 1 loss: 1.1003714 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 2 loss: 1.0999109 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 6 loss: 1.098478 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 7 loss: 1.0979133 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 10 loss: 1.0962688 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 11 loss: 1.0956886 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 18 loss: 1.0933328 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 27 loss: 1.0928771 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 35 loss: 1.0894114 acc: 0.64\n",
|
|
"New set of weights found, iteration: 38 loss: 1.0819336 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 42 loss: 1.0804778 acc: 0.5866666666666667\n",
|
|
"New set of weights found, iteration: 48 loss: 1.0791433 acc: 0.6\n",
|
|
"New set of weights found, iteration: 50 loss: 1.076686 acc: 0.66\n",
|
|
"New set of weights found, iteration: 57 loss: 1.0729789 acc: 0.6333333333333333\n",
|
|
"New set of weights found, iteration: 62 loss: 1.0626912 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 64 loss: 1.062278 acc: 0.6266666666666667\n",
|
|
"New set of weights found, iteration: 66 loss: 1.0609949 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 71 loss: 1.0521731 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 76 loss: 1.0494336 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 79 loss: 1.0357945 acc: 0.39\n",
|
|
"New set of weights found, iteration: 81 loss: 1.0338205 acc: 0.39\n",
|
|
"New set of weights found, iteration: 83 loss: 1.0324905 acc: 0.3433333333333333\n",
|
|
"New set of weights found, iteration: 84 loss: 1.0297213 acc: 0.39666666666666667\n",
|
|
"New set of weights found, iteration: 86 loss: 1.0251579 acc: 0.5866666666666667\n",
|
|
"New set of weights found, iteration: 91 loss: 1.021905 acc: 0.64\n",
|
|
"New set of weights found, iteration: 94 loss: 1.0156672 acc: 0.44333333333333336\n",
|
|
"New set of weights found, iteration: 97 loss: 1.0146924 acc: 0.6366666666666667\n",
|
|
"New set of weights found, iteration: 101 loss: 1.0104957 acc: 0.6566666666666666\n",
|
|
"New set of weights found, iteration: 104 loss: 1.0020038 acc: 0.6433333333333333\n",
|
|
"New set of weights found, iteration: 105 loss: 0.99834555 acc: 0.5933333333333334\n",
|
|
"New set of weights found, iteration: 106 loss: 0.9966272 acc: 0.6166666666666667\n",
|
|
"New set of weights found, iteration: 119 loss: 0.99438596 acc: 0.65\n",
|
|
"New set of weights found, iteration: 125 loss: 0.993829 acc: 0.6633333333333333\n",
|
|
"New set of weights found, iteration: 127 loss: 0.98677754 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 130 loss: 0.98621744 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 132 loss: 0.9835827 acc: 0.6533333333333333\n",
|
|
"New set of weights found, iteration: 135 loss: 0.97278476 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 140 loss: 0.96625113 acc: 0.6533333333333333\n",
|
|
"New set of weights found, iteration: 141 loss: 0.9602833 acc: 0.6266666666666667\n",
|
|
"New set of weights found, iteration: 142 loss: 0.960012 acc: 0.6633333333333333\n",
|
|
"New set of weights found, iteration: 148 loss: 0.9562915 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 149 loss: 0.9523678 acc: 0.6633333333333333\n",
|
|
"New set of weights found, iteration: 152 loss: 0.95160383 acc: 0.6266666666666667\n",
|
|
"New set of weights found, iteration: 153 loss: 0.9451301 acc: 0.6433333333333333\n",
|
|
"New set of weights found, iteration: 154 loss: 0.9373847 acc: 0.3566666666666667\n",
|
|
"New set of weights found, iteration: 161 loss: 0.9350953 acc: 0.39666666666666667\n",
|
|
"New set of weights found, iteration: 165 loss: 0.93221325 acc: 0.3566666666666667\n",
|
|
"New set of weights found, iteration: 170 loss: 0.9305483 acc: 0.39\n",
|
|
"New set of weights found, iteration: 172 loss: 0.9251562 acc: 0.5133333333333333\n",
|
|
"New set of weights found, iteration: 175 loss: 0.92369926 acc: 0.5133333333333333\n",
|
|
"New set of weights found, iteration: 178 loss: 0.9234428 acc: 0.5466666666666666\n",
|
|
"New set of weights found, iteration: 179 loss: 0.922442 acc: 0.45\n",
|
|
"New set of weights found, iteration: 181 loss: 0.9147015 acc: 0.3566666666666667\n",
|
|
"New set of weights found, iteration: 183 loss: 0.90779805 acc: 0.7166666666666667\n",
|
|
"New set of weights found, iteration: 184 loss: 0.9027177 acc: 0.7266666666666667\n",
|
|
"New set of weights found, iteration: 187 loss: 0.89574933 acc: 0.69\n",
|
|
"New set of weights found, iteration: 193 loss: 0.875872 acc: 0.49333333333333335\n",
|
|
"New set of weights found, iteration: 196 loss: 0.8563512 acc: 0.77\n",
|
|
"New set of weights found, iteration: 211 loss: 0.8544111 acc: 0.73\n",
|
|
"New set of weights found, iteration: 213 loss: 0.8524884 acc: 0.7533333333333333\n",
|
|
"New set of weights found, iteration: 216 loss: 0.8505173 acc: 0.66\n",
|
|
"New set of weights found, iteration: 221 loss: 0.8498888 acc: 0.6566666666666666\n",
|
|
"New set of weights found, iteration: 229 loss: 0.8474012 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 231 loss: 0.8292791 acc: 0.6966666666666667\n",
|
|
"New set of weights found, iteration: 237 loss: 0.8153417 acc: 0.73\n",
|
|
"New set of weights found, iteration: 239 loss: 0.8059437 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 240 loss: 0.8048912 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 242 loss: 0.80074865 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 245 loss: 0.7959438 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 246 loss: 0.7937235 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 247 loss: 0.7921869 acc: 0.7133333333333334\n",
|
|
"New set of weights found, iteration: 248 loss: 0.7904797 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 249 loss: 0.7902046 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 251 loss: 0.7724022 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 255 loss: 0.7669011 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 256 loss: 0.75499004 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 257 loss: 0.74510765 acc: 0.7166666666666667\n",
|
|
"New set of weights found, iteration: 259 loss: 0.73144156 acc: 0.7033333333333334\n",
|
|
"New set of weights found, iteration: 260 loss: 0.729851 acc: 0.7033333333333334\n",
|
|
"New set of weights found, iteration: 264 loss: 0.72137403 acc: 0.6666666666666666\n",
|
|
"New set of weights found, iteration: 265 loss: 0.7202212 acc: 0.77\n",
|
|
"New set of weights found, iteration: 267 loss: 0.71849644 acc: 0.7966666666666666\n",
|
|
"New set of weights found, iteration: 271 loss: 0.7111821 acc: 0.8466666666666667\n",
|
|
"New set of weights found, iteration: 272 loss: 0.6996752 acc: 0.8233333333333334\n",
|
|
"New set of weights found, iteration: 280 loss: 0.69436175 acc: 0.85\n",
|
|
"New set of weights found, iteration: 286 loss: 0.6916804 acc: 0.8233333333333334\n",
|
|
"New set of weights found, iteration: 295 loss: 0.69114727 acc: 0.85\n",
|
|
"New set of weights found, iteration: 296 loss: 0.6862494 acc: 0.84\n",
|
|
"New set of weights found, iteration: 300 loss: 0.6840637 acc: 0.8366666666666667\n",
|
|
"New set of weights found, iteration: 301 loss: 0.66635275 acc: 0.8866666666666667\n",
|
|
"New set of weights found, iteration: 316 loss: 0.66121036 acc: 0.8333333333333334\n",
|
|
"New set of weights found, iteration: 318 loss: 0.6531562 acc: 0.8266666666666667\n",
|
|
"New set of weights found, iteration: 319 loss: 0.65067077 acc: 0.8133333333333334\n",
|
|
"New set of weights found, iteration: 323 loss: 0.64174473 acc: 0.9\n",
|
|
"New set of weights found, iteration: 327 loss: 0.64135444 acc: 0.8766666666666667\n",
|
|
"New set of weights found, iteration: 330 loss: 0.634816 acc: 0.9\n",
|
|
"New set of weights found, iteration: 331 loss: 0.63439554 acc: 0.83\n",
|
|
"New set of weights found, iteration: 337 loss: 0.61878663 acc: 0.8766666666666667\n",
|
|
"New set of weights found, iteration: 338 loss: 0.61758184 acc: 0.8366666666666667\n",
|
|
"New set of weights found, iteration: 340 loss: 0.6141628 acc: 0.84\n",
|
|
"New set of weights found, iteration: 350 loss: 0.60532016 acc: 0.8333333333333334\n",
|
|
"New set of weights found, iteration: 353 loss: 0.5960431 acc: 0.9\n",
|
|
"New set of weights found, iteration: 354 loss: 0.5949759 acc: 0.8966666666666666\n",
|
|
"New set of weights found, iteration: 356 loss: 0.5906401 acc: 0.8433333333333334\n",
|
|
"New set of weights found, iteration: 359 loss: 0.5798522 acc: 0.8466666666666667\n",
|
|
"New set of weights found, iteration: 360 loss: 0.5785292 acc: 0.86\n",
|
|
"New set of weights found, iteration: 362 loss: 0.57752687 acc: 0.86\n",
|
|
"New set of weights found, iteration: 363 loss: 0.5740031 acc: 0.8633333333333333\n",
|
|
"New set of weights found, iteration: 364 loss: 0.57001764 acc: 0.8466666666666667\n",
|
|
"New set of weights found, iteration: 366 loss: 0.5677138 acc: 0.8333333333333334\n",
|
|
"New set of weights found, iteration: 367 loss: 0.5671378 acc: 0.8433333333333334\n",
|
|
"New set of weights found, iteration: 368 loss: 0.5650589 acc: 0.8333333333333334\n",
|
|
"New set of weights found, iteration: 369 loss: 0.55381805 acc: 0.8933333333333333\n",
|
|
"New set of weights found, iteration: 378 loss: 0.553261 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 379 loss: 0.55210435 acc: 0.9066666666666666\n",
|
|
"New set of weights found, iteration: 380 loss: 0.55189973 acc: 0.85\n",
|
|
"New set of weights found, iteration: 381 loss: 0.54758984 acc: 0.8433333333333334\n",
|
|
"New set of weights found, iteration: 384 loss: 0.53211606 acc: 0.85\n",
|
|
"New set of weights found, iteration: 404 loss: 0.5225475 acc: 0.9\n",
|
|
"New set of weights found, iteration: 409 loss: 0.5161618 acc: 0.93\n",
|
|
"New set of weights found, iteration: 410 loss: 0.5132672 acc: 0.92\n",
|
|
"New set of weights found, iteration: 425 loss: 0.5113815 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 443 loss: 0.51136065 acc: 0.8933333333333333\n",
|
|
"New set of weights found, iteration: 444 loss: 0.50422305 acc: 0.8866666666666667\n",
|
|
"New set of weights found, iteration: 445 loss: 0.5036243 acc: 0.9\n",
|
|
"New set of weights found, iteration: 448 loss: 0.501374 acc: 0.89\n",
|
|
"New set of weights found, iteration: 449 loss: 0.49592137 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 453 loss: 0.4913305 acc: 0.9066666666666666\n",
|
|
"New set of weights found, iteration: 454 loss: 0.49017328 acc: 0.8933333333333333\n",
|
|
"New set of weights found, iteration: 455 loss: 0.47922668 acc: 0.92\n",
|
|
"New set of weights found, iteration: 462 loss: 0.47773126 acc: 0.92\n",
|
|
"New set of weights found, iteration: 465 loss: 0.4757397 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 466 loss: 0.46511835 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 468 loss: 0.45972347 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 474 loss: 0.45767045 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 475 loss: 0.4531248 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 477 loss: 0.45079032 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 493 loss: 0.44731975 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 501 loss: 0.44255528 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 504 loss: 0.43813455 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 509 loss: 0.43588492 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 514 loss: 0.4349694 acc: 0.91\n",
|
|
"New set of weights found, iteration: 522 loss: 0.42528915 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 525 loss: 0.4251014 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 527 loss: 0.42076018 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 529 loss: 0.42038155 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 532 loss: 0.42007548 acc: 0.93\n",
|
|
"New set of weights found, iteration: 537 loss: 0.417006 acc: 0.93\n",
|
|
"New set of weights found, iteration: 541 loss: 0.41556618 acc: 0.93\n",
|
|
"New set of weights found, iteration: 545 loss: 0.41544887 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 547 loss: 0.413486 acc: 0.92\n",
|
|
"New set of weights found, iteration: 552 loss: 0.4105945 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 554 loss: 0.41028866 acc: 0.93\n",
|
|
"New set of weights found, iteration: 558 loss: 0.40889078 acc: 0.91\n",
|
|
"New set of weights found, iteration: 567 loss: 0.4067462 acc: 0.9033333333333333\n",
|
|
"New set of weights found, iteration: 572 loss: 0.40230143 acc: 0.93\n",
|
|
"New set of weights found, iteration: 584 loss: 0.4005359 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 589 loss: 0.39260605 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 590 loss: 0.3811324 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 596 loss: 0.38065374 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 597 loss: 0.37342408 acc: 0.93\n",
|
|
"New set of weights found, iteration: 602 loss: 0.37175146 acc: 0.93\n",
|
|
"New set of weights found, iteration: 606 loss: 0.37065893 acc: 0.93\n",
|
|
"New set of weights found, iteration: 613 loss: 0.3700704 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 615 loss: 0.3681412 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 617 loss: 0.3666133 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 620 loss: 0.3613251 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 622 loss: 0.35900766 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 629 loss: 0.35549423 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 637 loss: 0.35331622 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 640 loss: 0.35213044 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 642 loss: 0.34433836 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 648 loss: 0.34404942 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 651 loss: 0.33547923 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 653 loss: 0.3346703 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 655 loss: 0.3344787 acc: 0.92\n",
|
|
"New set of weights found, iteration: 658 loss: 0.33275664 acc: 0.92\n",
|
|
"New set of weights found, iteration: 660 loss: 0.33186576 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 661 loss: 0.32966954 acc: 0.92\n",
|
|
"New set of weights found, iteration: 666 loss: 0.32630792 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 689 loss: 0.32379147 acc: 0.93\n",
|
|
"New set of weights found, iteration: 692 loss: 0.32295424 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 694 loss: 0.3228055 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 700 loss: 0.3196157 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 709 loss: 0.3168142 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 710 loss: 0.31664997 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 716 loss: 0.31424063 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 717 loss: 0.31154537 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 719 loss: 0.31104082 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 722 loss: 0.30798188 acc: 0.92\n",
|
|
"New set of weights found, iteration: 726 loss: 0.30246973 acc: 0.93\n",
|
|
"New set of weights found, iteration: 732 loss: 0.3008993 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 735 loss: 0.2962119 acc: 0.92\n",
|
|
"New set of weights found, iteration: 737 loss: 0.29480347 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 739 loss: 0.28837058 acc: 0.93\n",
|
|
"New set of weights found, iteration: 740 loss: 0.287716 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 758 loss: 0.28682068 acc: 0.92\n",
|
|
"New set of weights found, iteration: 768 loss: 0.2837636 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 774 loss: 0.28291276 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 792 loss: 0.28240937 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 803 loss: 0.28232166 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 818 loss: 0.28129843 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 822 loss: 0.27883106 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 836 loss: 0.2774233 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 839 loss: 0.27680957 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 844 loss: 0.27618676 acc: 0.93\n",
|
|
"New set of weights found, iteration: 846 loss: 0.27285808 acc: 0.92\n",
|
|
"New set of weights found, iteration: 848 loss: 0.26968768 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 858 loss: 0.26904428 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 884 loss: 0.268827 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 889 loss: 0.26809755 acc: 0.92\n",
|
|
"New set of weights found, iteration: 897 loss: 0.26714522 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 898 loss: 0.26399252 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 900 loss: 0.26366332 acc: 0.93\n",
|
|
"New set of weights found, iteration: 908 loss: 0.26240122 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 925 loss: 0.26182953 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 937 loss: 0.26113904 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 942 loss: 0.2595808 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 945 loss: 0.25693676 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 948 loss: 0.25630182 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 953 loss: 0.2554938 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 955 loss: 0.25241867 acc: 0.9366666666666666\n",
|
|
"New set of weights found, iteration: 956 loss: 0.24921493 acc: 0.93\n",
|
|
"New set of weights found, iteration: 959 loss: 0.24579681 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 960 loss: 0.24326381 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 963 loss: 0.24075015 acc: 0.93\n",
|
|
"New set of weights found, iteration: 968 loss: 0.24000326 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 973 loss: 0.2382134 acc: 0.92\n",
|
|
"New set of weights found, iteration: 991 loss: 0.23589782 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 992 loss: 0.23532195 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1003 loss: 0.23462674 acc: 0.9133333333333333\n",
|
|
"New set of weights found, iteration: 1006 loss: 0.2343067 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 1011 loss: 0.23375021 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1017 loss: 0.23200704 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1027 loss: 0.23154141 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1038 loss: 0.23015086 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1042 loss: 0.2283845 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1044 loss: 0.22817597 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 1045 loss: 0.22744556 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1048 loss: 0.22593918 acc: 0.9333333333333333\n",
|
|
"New set of weights found, iteration: 1052 loss: 0.22136909 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1058 loss: 0.22063312 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1062 loss: 0.21697378 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1063 loss: 0.2154923 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1081 loss: 0.21519203 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1088 loss: 0.21324503 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1089 loss: 0.21294884 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1107 loss: 0.21196988 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1123 loss: 0.21161827 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1132 loss: 0.20942998 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1134 loss: 0.20683934 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1152 loss: 0.20673288 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1158 loss: 0.20602302 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1196 loss: 0.20581451 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1211 loss: 0.20252144 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1219 loss: 0.20165876 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1222 loss: 0.20161158 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1227 loss: 0.20160003 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1238 loss: 0.2002937 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1242 loss: 0.19846326 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1253 loss: 0.19829592 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1258 loss: 0.19702826 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1269 loss: 0.19549341 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1274 loss: 0.1936545 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1295 loss: 0.19341804 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1310 loss: 0.19328523 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1340 loss: 0.19306006 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1343 loss: 0.1926579 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1369 loss: 0.19204491 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1370 loss: 0.19107494 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1386 loss: 0.19073413 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1416 loss: 0.19026798 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1420 loss: 0.19009912 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1430 loss: 0.1890326 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1461 loss: 0.1889198 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1482 loss: 0.18784311 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 1483 loss: 0.18726717 acc: 0.9266666666666666\n",
|
|
"New set of weights found, iteration: 1497 loss: 0.18642144 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1504 loss: 0.18634032 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1535 loss: 0.18609515 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1572 loss: 0.18588851 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1601 loss: 0.18577391 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1605 loss: 0.18567057 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1610 loss: 0.18486346 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1616 loss: 0.18467394 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1622 loss: 0.18462817 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1636 loss: 0.18456662 acc: 0.9166666666666666\n",
|
|
"New set of weights found, iteration: 1704 loss: 0.18404384 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1706 loss: 0.1837163 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1780 loss: 0.18321306 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1796 loss: 0.18313819 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1797 loss: 0.18233562 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1858 loss: 0.18162519 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1879 loss: 0.18157774 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1916 loss: 0.18105999 acc: 0.92\n",
|
|
"New set of weights found, iteration: 1928 loss: 0.18020274 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1974 loss: 0.1798982 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 1994 loss: 0.17920457 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2006 loss: 0.17876633 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2088 loss: 0.17826015 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2273 loss: 0.1780754 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2278 loss: 0.17789875 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2349 loss: 0.17731167 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2439 loss: 0.1767629 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2580 loss: 0.17660397 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2648 loss: 0.1765489 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2873 loss: 0.17626359 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 2975 loss: 0.17622189 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3036 loss: 0.17601877 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3158 loss: 0.17592181 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3175 loss: 0.17590967 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3240 loss: 0.17587931 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3251 loss: 0.17557663 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3252 loss: 0.17530455 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3293 loss: 0.17516907 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3308 loss: 0.17511182 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3338 loss: 0.17484583 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3360 loss: 0.17471306 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3393 loss: 0.17469047 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3395 loss: 0.1743806 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3642 loss: 0.17433934 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3895 loss: 0.17422049 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 3922 loss: 0.17404793 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 4161 loss: 0.17401835 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 4186 loss: 0.17398895 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 4194 loss: 0.17370409 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 4392 loss: 0.17358227 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 5189 loss: 0.17355132 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 5918 loss: 0.17352708 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 6323 loss: 0.17347664 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 6503 loss: 0.17335322 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 6539 loss: 0.17328598 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 7013 loss: 0.17325447 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 7724 loss: 0.17321768 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 7816 loss: 0.17312418 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 7890 loss: 0.17311428 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8234 loss: 0.1730845 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8588 loss: 0.173055 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8671 loss: 0.17292134 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8800 loss: 0.17289625 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8953 loss: 0.1728954 acc: 0.9233333333333333\n",
|
|
"New set of weights found, iteration: 8970 loss: 0.17283815 acc: 0.9233333333333333\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# Create dataset\n",
|
|
"X, y = vertical_data(samples=100, classes=3)\n",
|
|
"\n",
|
|
"# Create model\n",
|
|
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
|
|
"activation1 = Activation_ReLU()\n",
|
|
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
|
|
"activation2 = Activation_Softmax()\n",
|
|
"\n",
|
|
"# Create loss function\n",
|
|
"loss_function = Loss_CategoricalCrossEntropy()\n",
|
|
"\n",
|
|
"# Helper variables\n",
|
|
"lowest_loss = 9999999 # some initial value\n",
|
|
"best_dense1_weights = dense1.weights.copy()\n",
|
|
"best_dense1_biases = dense1.biases.copy()\n",
|
|
"best_dense2_weights = dense2.weights.copy()\n",
|
|
"best_dense2_biases = dense2.biases.copy()\n",
|
|
"for iteration in range(10000):\n",
|
|
" # Update weights with some small random values\n",
|
|
" dense1.weights += 0.05 * np.random.randn(2, 3)\n",
|
|
" dense1.biases += 0.05 * np.random.randn(1, 3)\n",
|
|
" dense2.weights += 0.05 * np.random.randn(3, 3)\n",
|
|
" dense2.biases += 0.05 * np.random.randn(1, 3)\n",
|
|
"\n",
|
|
" # Perform a forward pass of our training data through this layer\n",
|
|
" dense1.forward(X)\n",
|
|
" activation1.forward(dense1.output)\n",
|
|
" dense2.forward(activation1.output)\n",
|
|
" activation2.forward(dense2.output)\n",
|
|
"\n",
|
|
" # Perform a forward pass through activation function\n",
|
|
" # it takes the output of second dense layer here and returns loss\n",
|
|
" loss = loss_function.calculate(activation2.output, y)\n",
|
|
"\n",
|
|
" # Calculate accuracy from output of activation2 and targets\n",
|
|
" # calculate values along first axis\n",
|
|
" predictions = np.argmax(activation2.output, axis=1)\n",
|
|
" accuracy = np.mean(predictions == y)\n",
|
|
"\n",
|
|
" # If loss is smaller - print and save weights and biases aside\n",
|
|
" if loss < lowest_loss:\n",
|
|
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
|
|
" best_dense1_weights = dense1.weights.copy()\n",
|
|
" best_dense1_biases = dense1.biases.copy()\n",
|
|
" best_dense2_weights = dense2.weights.copy()\n",
|
|
" best_dense2_biases = dense2.biases.copy()\n",
|
|
" lowest_loss = loss\n",
|
|
" # Revert weights and biases\n",
|
|
" else:\n",
|
|
" dense1.weights = best_dense1_weights.copy()\n",
|
|
" dense1.biases = best_dense1_biases.copy()\n",
|
|
" dense2.weights = best_dense2_weights.copy()\n",
|
|
" dense2.biases = best_dense2_biases.copy()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Test Strategy 2 on Spiral Dataset"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 9,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"New set of weights found, iteration: 0 loss: 1.0986983 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 42 loss: 1.0984432 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 48 loss: 1.0983725 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 54 loss: 1.097728 acc: 0.38666666666666666\n",
|
|
"New set of weights found, iteration: 55 loss: 1.0976882 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 56 loss: 1.0973428 acc: 0.38333333333333336\n",
|
|
"New set of weights found, iteration: 57 loss: 1.0970833 acc: 0.3333333333333333\n",
|
|
"New set of weights found, iteration: 64 loss: 1.0964628 acc: 0.3566666666666667\n",
|
|
"New set of weights found, iteration: 65 loss: 1.0957834 acc: 0.34\n",
|
|
"New set of weights found, iteration: 84 loss: 1.0957702 acc: 0.34\n",
|
|
"New set of weights found, iteration: 90 loss: 1.0955024 acc: 0.3566666666666667\n",
|
|
"New set of weights found, iteration: 95 loss: 1.0942755 acc: 0.39\n",
|
|
"New set of weights found, iteration: 100 loss: 1.0938662 acc: 0.3466666666666667\n",
|
|
"New set of weights found, iteration: 101 loss: 1.091843 acc: 0.33666666666666667\n",
|
|
"New set of weights found, iteration: 104 loss: 1.0912626 acc: 0.34\n",
|
|
"New set of weights found, iteration: 105 loss: 1.0882009 acc: 0.36666666666666664\n",
|
|
"New set of weights found, iteration: 106 loss: 1.0867509 acc: 0.41\n",
|
|
"New set of weights found, iteration: 110 loss: 1.0861986 acc: 0.38333333333333336\n",
|
|
"New set of weights found, iteration: 111 loss: 1.0858816 acc: 0.3433333333333333\n",
|
|
"New set of weights found, iteration: 114 loss: 1.0845512 acc: 0.32666666666666666\n",
|
|
"New set of weights found, iteration: 115 loss: 1.0842649 acc: 0.3433333333333333\n",
|
|
"New set of weights found, iteration: 124 loss: 1.0840762 acc: 0.32666666666666666\n",
|
|
"New set of weights found, iteration: 125 loss: 1.0813359 acc: 0.39\n",
|
|
"New set of weights found, iteration: 133 loss: 1.0780971 acc: 0.37\n",
|
|
"New set of weights found, iteration: 137 loss: 1.077851 acc: 0.38666666666666666\n",
|
|
"New set of weights found, iteration: 138 loss: 1.0777876 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 143 loss: 1.0771211 acc: 0.39666666666666667\n",
|
|
"New set of weights found, iteration: 144 loss: 1.0768937 acc: 0.38333333333333336\n",
|
|
"New set of weights found, iteration: 146 loss: 1.0742698 acc: 0.38333333333333336\n",
|
|
"New set of weights found, iteration: 148 loss: 1.0733455 acc: 0.41\n",
|
|
"New set of weights found, iteration: 162 loss: 1.0730222 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 179 loss: 1.0726937 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 191 loss: 1.0725039 acc: 0.42\n",
|
|
"New set of weights found, iteration: 222 loss: 1.0716708 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 253 loss: 1.0708596 acc: 0.39\n",
|
|
"New set of weights found, iteration: 272 loss: 1.0706216 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 290 loss: 1.0698603 acc: 0.44\n",
|
|
"New set of weights found, iteration: 300 loss: 1.0697052 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 325 loss: 1.069674 acc: 0.43666666666666665\n",
|
|
"New set of weights found, iteration: 381 loss: 1.0691994 acc: 0.3933333333333333\n",
|
|
"New set of weights found, iteration: 398 loss: 1.0687411 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 406 loss: 1.0684437 acc: 0.43\n",
|
|
"New set of weights found, iteration: 550 loss: 1.0684316 acc: 0.43333333333333335\n",
|
|
"New set of weights found, iteration: 570 loss: 1.0684133 acc: 0.41\n",
|
|
"New set of weights found, iteration: 594 loss: 1.068293 acc: 0.4\n",
|
|
"New set of weights found, iteration: 596 loss: 1.0681537 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 597 loss: 1.0677991 acc: 0.42\n",
|
|
"New set of weights found, iteration: 642 loss: 1.0676459 acc: 0.39666666666666667\n",
|
|
"New set of weights found, iteration: 661 loss: 1.0675713 acc: 0.3933333333333333\n",
|
|
"New set of weights found, iteration: 681 loss: 1.0674102 acc: 0.38333333333333336\n",
|
|
"New set of weights found, iteration: 695 loss: 1.0673658 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 701 loss: 1.0666102 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 719 loss: 1.0663345 acc: 0.42\n",
|
|
"New set of weights found, iteration: 737 loss: 1.066033 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 752 loss: 1.0657896 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 903 loss: 1.0655118 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 981 loss: 1.065493 acc: 0.41333333333333333\n",
|
|
"New set of weights found, iteration: 1006 loss: 1.0654801 acc: 0.41\n",
|
|
"New set of weights found, iteration: 1048 loss: 1.0651859 acc: 0.39666666666666667\n",
|
|
"New set of weights found, iteration: 1175 loss: 1.064625 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 1209 loss: 1.0643268 acc: 0.41333333333333333\n",
|
|
"New set of weights found, iteration: 1245 loss: 1.0643263 acc: 0.43666666666666665\n",
|
|
"New set of weights found, iteration: 1302 loss: 1.0640283 acc: 0.4\n",
|
|
"New set of weights found, iteration: 1303 loss: 1.0634205 acc: 0.44333333333333336\n",
|
|
"New set of weights found, iteration: 1352 loss: 1.0630084 acc: 0.43\n",
|
|
"New set of weights found, iteration: 1577 loss: 1.0626279 acc: 0.42333333333333334\n",
|
|
"New set of weights found, iteration: 1594 loss: 1.0625374 acc: 0.43333333333333335\n",
|
|
"New set of weights found, iteration: 1600 loss: 1.0623267 acc: 0.44333333333333336\n",
|
|
"New set of weights found, iteration: 1794 loss: 1.0622777 acc: 0.41\n",
|
|
"New set of weights found, iteration: 1851 loss: 1.0618818 acc: 0.43333333333333335\n",
|
|
"New set of weights found, iteration: 1877 loss: 1.0616083 acc: 0.43333333333333335\n",
|
|
"New set of weights found, iteration: 1958 loss: 1.0614555 acc: 0.43666666666666665\n",
|
|
"New set of weights found, iteration: 1998 loss: 1.0613961 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 2031 loss: 1.0606906 acc: 0.46\n",
|
|
"New set of weights found, iteration: 2130 loss: 1.0606595 acc: 0.43\n",
|
|
"New set of weights found, iteration: 2431 loss: 1.06059 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 3294 loss: 1.0603732 acc: 0.4\n",
|
|
"New set of weights found, iteration: 3492 loss: 1.0603614 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 3662 loss: 1.0598251 acc: 0.4\n",
|
|
"New set of weights found, iteration: 3756 loss: 1.0595479 acc: 0.39\n",
|
|
"New set of weights found, iteration: 3769 loss: 1.0593852 acc: 0.42333333333333334\n",
|
|
"New set of weights found, iteration: 3875 loss: 1.0583456 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 3981 loss: 1.0582583 acc: 0.42333333333333334\n",
|
|
"New set of weights found, iteration: 4146 loss: 1.0579673 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 4153 loss: 1.0578284 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 4301 loss: 1.0575745 acc: 0.41\n",
|
|
"New set of weights found, iteration: 4405 loss: 1.057048 acc: 0.43333333333333335\n",
|
|
"New set of weights found, iteration: 4498 loss: 1.056719 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 4594 loss: 1.0565504 acc: 0.43666666666666665\n",
|
|
"New set of weights found, iteration: 5092 loss: 1.0562842 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 5117 loss: 1.0557985 acc: 0.4\n",
|
|
"New set of weights found, iteration: 5497 loss: 1.0555316 acc: 0.44\n",
|
|
"New set of weights found, iteration: 6021 loss: 1.0554525 acc: 0.3933333333333333\n",
|
|
"New set of weights found, iteration: 6154 loss: 1.0551611 acc: 0.4033333333333333\n",
|
|
"New set of weights found, iteration: 6168 loss: 1.0548483 acc: 0.42\n",
|
|
"New set of weights found, iteration: 6210 loss: 1.0546328 acc: 0.44666666666666666\n",
|
|
"New set of weights found, iteration: 6233 loss: 1.0541582 acc: 0.44\n",
|
|
"New set of weights found, iteration: 6323 loss: 1.0541245 acc: 0.4533333333333333\n",
|
|
"New set of weights found, iteration: 6386 loss: 1.0537696 acc: 0.4633333333333333\n",
|
|
"New set of weights found, iteration: 6702 loss: 1.0534701 acc: 0.4533333333333333\n",
|
|
"New set of weights found, iteration: 6997 loss: 1.0533447 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 7101 loss: 1.0529538 acc: 0.41\n",
|
|
"New set of weights found, iteration: 7182 loss: 1.0524737 acc: 0.42\n",
|
|
"New set of weights found, iteration: 7476 loss: 1.0522219 acc: 0.44333333333333336\n",
|
|
"New set of weights found, iteration: 7719 loss: 1.0521553 acc: 0.44666666666666666\n",
|
|
"New set of weights found, iteration: 7858 loss: 1.0520765 acc: 0.4266666666666667\n",
|
|
"New set of weights found, iteration: 7877 loss: 1.0507878 acc: 0.41\n",
|
|
"New set of weights found, iteration: 7953 loss: 1.0506427 acc: 0.41333333333333333\n",
|
|
"New set of weights found, iteration: 8026 loss: 1.0503834 acc: 0.42\n",
|
|
"New set of weights found, iteration: 8763 loss: 1.0503162 acc: 0.41333333333333333\n",
|
|
"New set of weights found, iteration: 9308 loss: 1.0501956 acc: 0.41\n",
|
|
"New set of weights found, iteration: 9399 loss: 1.0493395 acc: 0.4066666666666667\n",
|
|
"New set of weights found, iteration: 9529 loss: 1.0491025 acc: 0.4166666666666667\n",
|
|
"New set of weights found, iteration: 9822 loss: 1.0488548 acc: 0.4533333333333333\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# Create dataset\n",
|
|
"X, y = spiral_data(samples=100, classes=3)\n",
|
|
"\n",
|
|
"# Create model\n",
|
|
"dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n",
|
|
"activation1 = Activation_ReLU()\n",
|
|
"dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n",
|
|
"activation2 = Activation_Softmax()\n",
|
|
"\n",
|
|
"# Create loss function\n",
|
|
"loss_function = Loss_CategoricalCrossEntropy()\n",
|
|
"\n",
|
|
"# Helper variables\n",
|
|
"lowest_loss = 9999999 # some initial value\n",
|
|
"best_dense1_weights = dense1.weights.copy()\n",
|
|
"best_dense1_biases = dense1.biases.copy()\n",
|
|
"best_dense2_weights = dense2.weights.copy()\n",
|
|
"best_dense2_biases = dense2.biases.copy()\n",
|
|
"for iteration in range(10000):\n",
|
|
" # Update weights with some small random values\n",
|
|
" dense1.weights += 0.05 * np.random.randn(2, 3)\n",
|
|
" dense1.biases += 0.05 * np.random.randn(1, 3)\n",
|
|
" dense2.weights += 0.05 * np.random.randn(3, 3)\n",
|
|
" dense2.biases += 0.05 * np.random.randn(1, 3)\n",
|
|
"\n",
|
|
" # Perform a forward pass of our training data through this layer\n",
|
|
" dense1.forward(X)\n",
|
|
" activation1.forward(dense1.output)\n",
|
|
" dense2.forward(activation1.output)\n",
|
|
" activation2.forward(dense2.output)\n",
|
|
"\n",
|
|
" # Perform a forward pass through activation function\n",
|
|
" # it takes the output of second dense layer here and returns loss\n",
|
|
" loss = loss_function.calculate(activation2.output, y)\n",
|
|
"\n",
|
|
" # Calculate accuracy from output of activation2 and targets\n",
|
|
" # calculate values along first axis\n",
|
|
" predictions = np.argmax(activation2.output, axis=1)\n",
|
|
" accuracy = np.mean(predictions == y)\n",
|
|
"\n",
|
|
" # If loss is smaller - print and save weights and biases aside\n",
|
|
" if loss < lowest_loss:\n",
|
|
" print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)\n",
|
|
" best_dense1_weights = dense1.weights.copy()\n",
|
|
" best_dense1_biases = dense1.biases.copy()\n",
|
|
" best_dense2_weights = dense2.weights.copy()\n",
|
|
" best_dense2_biases = dense2.biases.copy()\n",
|
|
" lowest_loss = loss\n",
|
|
" # Revert weights and biases\n",
|
|
" else:\n",
|
|
" dense1.weights = best_dense1_weights.copy()\n",
|
|
" dense1.biases = best_dense1_biases.copy()\n",
|
|
" dense2.weights = best_dense2_weights.copy()\n",
|
|
" dense2.biases = best_dense2_biases.copy()"
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.10.12"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 2
|
|
}
|