From eea0464cd3e8a8d7a2a69d3a36a1bed9c923927a Mon Sep 17 00:00:00 2001 From: judsonupchurch Date: Wed, 4 Dec 2024 22:25:12 +0000 Subject: [PATCH] Lecture 12 --- lecture07/notes_07.ipynb | 197 +++++++------------ lecture12/handout_12.ipynb | 48 +++++ lecture12/notes_12.ipynb | 387 +++++++++++++++++++++++++++++++++++++ lecture12/notes_12.pdf | Bin 0 -> 52330 bytes lecture12/notes_12.py | 130 +++++++++++++ 5 files changed, 634 insertions(+), 128 deletions(-) create mode 100644 lecture12/notes_12.ipynb create mode 100644 lecture12/notes_12.pdf create mode 100644 lecture12/notes_12.py diff --git a/lecture07/notes_07.ipynb b/lecture07/notes_07.ipynb index 676f3fb..dc7152b 100644 --- a/lecture07/notes_07.ipynb +++ b/lecture07/notes_07.ipynb @@ -5,7 +5,7 @@ "metadata": {}, "source": [ "# Previous Class Definitions\n", - "The previously defined Layer_Dense, Activation_ReLU, and Activation_Softmax" + "The previously defined Layer_Dense, Activation_ReLU, and Activation_Softmax classes." ] }, { @@ -389,12 +389,12 @@ ], "source": [ "# Create dataset\n", - "X, y = vertical_data(samples=100, classes=3)\n", + "X, Y = vertical_data(samples=100, classes=3)\n", "\n", "# Create model\n", - "dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n", + "dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs, 3 neurons\n", "activation1 = Activation_ReLU()\n", - "dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs\n", + "dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 neurons\n", "activation2 = Activation_Softmax()\n", "\n", "# Create loss function\n", @@ -422,12 +422,12 @@ "\n", " # Perform a forward pass through activation function\n", " # it takes the output of second dense layer here and returns loss\n", - " loss = loss_function.calculate(activation2.output, y)\n", + " loss = loss_function.calculate(activation2.output, Y)\n", "\n", " # Calculate accuracy from output of activation2 and targets\n", " # calculate values along first axis\n", " predictions = np.argmax(activation2.output, axis=1)\n", - " accuracy = np.mean(predictions == y)\n", + " accuracy = np.mean(predictions == Y)\n", "\n", " # If loss is smaller - print and save weights and biases aside\n", " if loss < lowest_loss:\n", @@ -449,7 +449,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -804,7 +804,7 @@ ], "source": [ "# Create dataset\n", - "X, y = vertical_data(samples=100, classes=3)\n", + "X, Y = vertical_data(samples=100, classes=3)\n", "\n", "# Create model\n", "dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n", @@ -821,6 +821,7 @@ "best_dense1_biases = dense1.biases.copy()\n", "best_dense2_weights = dense2.weights.copy()\n", "best_dense2_biases = dense2.biases.copy()\n", + "\n", "for iteration in range(10000):\n", " # Update weights with some small random values\n", " dense1.weights += 0.05 * np.random.randn(2, 3)\n", @@ -836,12 +837,12 @@ "\n", " # Perform a forward pass through activation function\n", " # it takes the output of second dense layer here and returns loss\n", - " loss = loss_function.calculate(activation2.output, y)\n", + " loss = loss_function.calculate(activation2.output, Y)\n", "\n", " # Calculate accuracy from output of activation2 and targets\n", " # calculate values along first axis\n", " predictions = np.argmax(activation2.output, axis=1)\n", - " accuracy = np.mean(predictions == y)\n", + " accuracy = np.mean(predictions == Y)\n", "\n", " # If loss is smaller - print and save weights and biases aside\n", " if loss < lowest_loss:\n", @@ -868,133 +869,72 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "New set of weights found, iteration: 0 loss: 1.0986983 acc: 0.3333333333333333\n", - "New set of weights found, iteration: 42 loss: 1.0984432 acc: 0.3333333333333333\n", - "New set of weights found, iteration: 48 loss: 1.0983725 acc: 0.3333333333333333\n", - "New set of weights found, iteration: 54 loss: 1.097728 acc: 0.38666666666666666\n", - "New set of weights found, iteration: 55 loss: 1.0976882 acc: 0.3333333333333333\n", - "New set of weights found, iteration: 56 loss: 1.0973428 acc: 0.38333333333333336\n", - "New set of weights found, iteration: 57 loss: 1.0970833 acc: 0.3333333333333333\n", - "New set of weights found, iteration: 64 loss: 1.0964628 acc: 0.3566666666666667\n", - "New set of weights found, iteration: 65 loss: 1.0957834 acc: 0.34\n", - "New set of weights found, iteration: 84 loss: 1.0957702 acc: 0.34\n", - "New set of weights found, iteration: 90 loss: 1.0955024 acc: 0.3566666666666667\n", - "New set of weights found, iteration: 95 loss: 1.0942755 acc: 0.39\n", - "New set of weights found, iteration: 100 loss: 1.0938662 acc: 0.3466666666666667\n", - "New set of weights found, iteration: 101 loss: 1.091843 acc: 0.33666666666666667\n", - "New set of weights found, iteration: 104 loss: 1.0912626 acc: 0.34\n", - "New set of weights found, iteration: 105 loss: 1.0882009 acc: 0.36666666666666664\n", - "New set of weights found, iteration: 106 loss: 1.0867509 acc: 0.41\n", - "New set of weights found, iteration: 110 loss: 1.0861986 acc: 0.38333333333333336\n", - "New set of weights found, iteration: 111 loss: 1.0858816 acc: 0.3433333333333333\n", - "New set of weights found, iteration: 114 loss: 1.0845512 acc: 0.32666666666666666\n", - "New set of weights found, iteration: 115 loss: 1.0842649 acc: 0.3433333333333333\n", - "New set of weights found, iteration: 124 loss: 1.0840762 acc: 0.32666666666666666\n", - "New set of weights found, iteration: 125 loss: 1.0813359 acc: 0.39\n", - "New set of weights found, iteration: 133 loss: 1.0780971 acc: 0.37\n", - "New set of weights found, iteration: 137 loss: 1.077851 acc: 0.38666666666666666\n", - "New set of weights found, iteration: 138 loss: 1.0777876 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 143 loss: 1.0771211 acc: 0.39666666666666667\n", - "New set of weights found, iteration: 144 loss: 1.0768937 acc: 0.38333333333333336\n", - "New set of weights found, iteration: 146 loss: 1.0742698 acc: 0.38333333333333336\n", - "New set of weights found, iteration: 148 loss: 1.0733455 acc: 0.41\n", - "New set of weights found, iteration: 162 loss: 1.0730222 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 179 loss: 1.0726937 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 191 loss: 1.0725039 acc: 0.42\n", - "New set of weights found, iteration: 222 loss: 1.0716708 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 253 loss: 1.0708596 acc: 0.39\n", - "New set of weights found, iteration: 272 loss: 1.0706216 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 290 loss: 1.0698603 acc: 0.44\n", - "New set of weights found, iteration: 300 loss: 1.0697052 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 325 loss: 1.069674 acc: 0.43666666666666665\n", - "New set of weights found, iteration: 381 loss: 1.0691994 acc: 0.3933333333333333\n", - "New set of weights found, iteration: 398 loss: 1.0687411 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 406 loss: 1.0684437 acc: 0.43\n", - "New set of weights found, iteration: 550 loss: 1.0684316 acc: 0.43333333333333335\n", - "New set of weights found, iteration: 570 loss: 1.0684133 acc: 0.41\n", - "New set of weights found, iteration: 594 loss: 1.068293 acc: 0.4\n", - "New set of weights found, iteration: 596 loss: 1.0681537 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 597 loss: 1.0677991 acc: 0.42\n", - "New set of weights found, iteration: 642 loss: 1.0676459 acc: 0.39666666666666667\n", - "New set of weights found, iteration: 661 loss: 1.0675713 acc: 0.3933333333333333\n", - "New set of weights found, iteration: 681 loss: 1.0674102 acc: 0.38333333333333336\n", - "New set of weights found, iteration: 695 loss: 1.0673658 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 701 loss: 1.0666102 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 719 loss: 1.0663345 acc: 0.42\n", - "New set of weights found, iteration: 737 loss: 1.066033 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 752 loss: 1.0657896 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 903 loss: 1.0655118 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 981 loss: 1.065493 acc: 0.41333333333333333\n", - "New set of weights found, iteration: 1006 loss: 1.0654801 acc: 0.41\n", - "New set of weights found, iteration: 1048 loss: 1.0651859 acc: 0.39666666666666667\n", - "New set of weights found, iteration: 1175 loss: 1.064625 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 1209 loss: 1.0643268 acc: 0.41333333333333333\n", - "New set of weights found, iteration: 1245 loss: 1.0643263 acc: 0.43666666666666665\n", - "New set of weights found, iteration: 1302 loss: 1.0640283 acc: 0.4\n", - "New set of weights found, iteration: 1303 loss: 1.0634205 acc: 0.44333333333333336\n", - "New set of weights found, iteration: 1352 loss: 1.0630084 acc: 0.43\n", - "New set of weights found, iteration: 1577 loss: 1.0626279 acc: 0.42333333333333334\n", - "New set of weights found, iteration: 1594 loss: 1.0625374 acc: 0.43333333333333335\n", - "New set of weights found, iteration: 1600 loss: 1.0623267 acc: 0.44333333333333336\n", - "New set of weights found, iteration: 1794 loss: 1.0622777 acc: 0.41\n", - "New set of weights found, iteration: 1851 loss: 1.0618818 acc: 0.43333333333333335\n", - "New set of weights found, iteration: 1877 loss: 1.0616083 acc: 0.43333333333333335\n", - "New set of weights found, iteration: 1958 loss: 1.0614555 acc: 0.43666666666666665\n", - "New set of weights found, iteration: 1998 loss: 1.0613961 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 2031 loss: 1.0606906 acc: 0.46\n", - "New set of weights found, iteration: 2130 loss: 1.0606595 acc: 0.43\n", - "New set of weights found, iteration: 2431 loss: 1.06059 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 3294 loss: 1.0603732 acc: 0.4\n", - "New set of weights found, iteration: 3492 loss: 1.0603614 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 3662 loss: 1.0598251 acc: 0.4\n", - "New set of weights found, iteration: 3756 loss: 1.0595479 acc: 0.39\n", - "New set of weights found, iteration: 3769 loss: 1.0593852 acc: 0.42333333333333334\n", - "New set of weights found, iteration: 3875 loss: 1.0583456 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 3981 loss: 1.0582583 acc: 0.42333333333333334\n", - "New set of weights found, iteration: 4146 loss: 1.0579673 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 4153 loss: 1.0578284 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 4301 loss: 1.0575745 acc: 0.41\n", - "New set of weights found, iteration: 4405 loss: 1.057048 acc: 0.43333333333333335\n", - "New set of weights found, iteration: 4498 loss: 1.056719 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 4594 loss: 1.0565504 acc: 0.43666666666666665\n", - "New set of weights found, iteration: 5092 loss: 1.0562842 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 5117 loss: 1.0557985 acc: 0.4\n", - "New set of weights found, iteration: 5497 loss: 1.0555316 acc: 0.44\n", - "New set of weights found, iteration: 6021 loss: 1.0554525 acc: 0.3933333333333333\n", - "New set of weights found, iteration: 6154 loss: 1.0551611 acc: 0.4033333333333333\n", - "New set of weights found, iteration: 6168 loss: 1.0548483 acc: 0.42\n", - "New set of weights found, iteration: 6210 loss: 1.0546328 acc: 0.44666666666666666\n", - "New set of weights found, iteration: 6233 loss: 1.0541582 acc: 0.44\n", - "New set of weights found, iteration: 6323 loss: 1.0541245 acc: 0.4533333333333333\n", - "New set of weights found, iteration: 6386 loss: 1.0537696 acc: 0.4633333333333333\n", - "New set of weights found, iteration: 6702 loss: 1.0534701 acc: 0.4533333333333333\n", - "New set of weights found, iteration: 6997 loss: 1.0533447 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 7101 loss: 1.0529538 acc: 0.41\n", - "New set of weights found, iteration: 7182 loss: 1.0524737 acc: 0.42\n", - "New set of weights found, iteration: 7476 loss: 1.0522219 acc: 0.44333333333333336\n", - "New set of weights found, iteration: 7719 loss: 1.0521553 acc: 0.44666666666666666\n", - "New set of weights found, iteration: 7858 loss: 1.0520765 acc: 0.4266666666666667\n", - "New set of weights found, iteration: 7877 loss: 1.0507878 acc: 0.41\n", - "New set of weights found, iteration: 7953 loss: 1.0506427 acc: 0.41333333333333333\n", - "New set of weights found, iteration: 8026 loss: 1.0503834 acc: 0.42\n", - "New set of weights found, iteration: 8763 loss: 1.0503162 acc: 0.41333333333333333\n", - "New set of weights found, iteration: 9308 loss: 1.0501956 acc: 0.41\n", - "New set of weights found, iteration: 9399 loss: 1.0493395 acc: 0.4066666666666667\n", - "New set of weights found, iteration: 9529 loss: 1.0491025 acc: 0.4166666666666667\n", - "New set of weights found, iteration: 9822 loss: 1.0488548 acc: 0.4533333333333333\n" + "New set of weights found, iteration: 0 loss: 1.0990145 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 2 loss: 1.0988153 acc: 0.33666666666666667\n", + "New set of weights found, iteration: 6 loss: 1.0986578 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 10 loss: 1.0986433 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 18 loss: 1.098215 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 37 loss: 1.0981511 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 39 loss: 1.0980632 acc: 0.37666666666666665\n", + "New set of weights found, iteration: 41 loss: 1.0969162 acc: 0.3566666666666667\n", + "New set of weights found, iteration: 42 loss: 1.096638 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 44 loss: 1.0957416 acc: 0.3433333333333333\n", + "New set of weights found, iteration: 59 loss: 1.0945884 acc: 0.3333333333333333\n", + "New set of weights found, iteration: 60 loss: 1.0926462 acc: 0.3566666666666667\n", + "New set of weights found, iteration: 61 loss: 1.0917169 acc: 0.35333333333333333\n", + "New set of weights found, iteration: 63 loss: 1.0914608 acc: 0.3566666666666667\n", + "New set of weights found, iteration: 66 loss: 1.0909171 acc: 0.38333333333333336\n", + "New set of weights found, iteration: 67 loss: 1.0909047 acc: 0.38\n", + "New set of weights found, iteration: 69 loss: 1.0905784 acc: 0.3933333333333333\n", + "New set of weights found, iteration: 70 loss: 1.0899522 acc: 0.42\n", + "New set of weights found, iteration: 77 loss: 1.0879942 acc: 0.39666666666666667\n", + "New set of weights found, iteration: 79 loss: 1.0872517 acc: 0.4066666666666667\n", + "New set of weights found, iteration: 81 loss: 1.0858692 acc: 0.3933333333333333\n", + "New set of weights found, iteration: 85 loss: 1.0848513 acc: 0.37333333333333335\n", + "New set of weights found, iteration: 95 loss: 1.0845288 acc: 0.36333333333333334\n", + "New set of weights found, iteration: 99 loss: 1.0844362 acc: 0.36333333333333334\n", + "New set of weights found, iteration: 105 loss: 1.0842372 acc: 0.3933333333333333\n", + "New set of weights found, iteration: 107 loss: 1.0820792 acc: 0.39666666666666667\n", + "New set of weights found, iteration: 108 loss: 1.0811542 acc: 0.37666666666666665\n", + "New set of weights found, iteration: 109 loss: 1.079949 acc: 0.39666666666666667\n", + "New set of weights found, iteration: 121 loss: 1.0790045 acc: 0.3933333333333333\n", + "New set of weights found, iteration: 122 loss: 1.0788108 acc: 0.4066666666666667\n", + "New set of weights found, iteration: 124 loss: 1.0781832 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 128 loss: 1.0773427 acc: 0.4266666666666667\n", + "New set of weights found, iteration: 138 loss: 1.0766457 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 140 loss: 1.0765723 acc: 0.4266666666666667\n", + "New set of weights found, iteration: 177 loss: 1.0755141 acc: 0.4033333333333333\n", + "New set of weights found, iteration: 181 loss: 1.0748584 acc: 0.43666666666666665\n", + "New set of weights found, iteration: 185 loss: 1.0729859 acc: 0.4033333333333333\n", + "New set of weights found, iteration: 195 loss: 1.072569 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 205 loss: 1.0725149 acc: 0.41\n", + "New set of weights found, iteration: 207 loss: 1.0712609 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 230 loss: 1.0707617 acc: 0.44\n", + "New set of weights found, iteration: 260 loss: 1.0705951 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 263 loss: 1.0704489 acc: 0.38666666666666666\n", + "New set of weights found, iteration: 278 loss: 1.0694778 acc: 0.4166666666666667\n", + "New set of weights found, iteration: 339 loss: 1.0694373 acc: 0.42\n", + "New set of weights found, iteration: 344 loss: 1.0693406 acc: 0.41333333333333333\n", + "New set of weights found, iteration: 378 loss: 1.0693176 acc: 0.4066666666666667\n", + "New set of weights found, iteration: 385 loss: 1.0691156 acc: 0.42\n", + "New set of weights found, iteration: 387 loss: 1.0687498 acc: 0.4266666666666667\n", + "New set of weights found, iteration: 418 loss: 1.0683544 acc: 0.41\n", + "New set of weights found, iteration: 476 loss: 1.0682718 acc: 0.43\n", + "New set of weights found, iteration: 523 loss: 1.0680497 acc: 0.41\n", + "New set of weights found, iteration: 655 loss: 1.0680199 acc: 0.43\n" ] } ], "source": [ "# Create dataset\n", - "X, y = spiral_data(samples=100, classes=3)\n", + "X, Y = spiral_data(samples=100, classes=3)\n", "\n", "# Create model\n", "dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs\n", @@ -1011,6 +951,7 @@ "best_dense1_biases = dense1.biases.copy()\n", "best_dense2_weights = dense2.weights.copy()\n", "best_dense2_biases = dense2.biases.copy()\n", + "\n", "for iteration in range(10000):\n", " # Update weights with some small random values\n", " dense1.weights += 0.05 * np.random.randn(2, 3)\n", @@ -1026,12 +967,12 @@ "\n", " # Perform a forward pass through activation function\n", " # it takes the output of second dense layer here and returns loss\n", - " loss = loss_function.calculate(activation2.output, y)\n", + " loss = loss_function.calculate(activation2.output, Y)\n", "\n", " # Calculate accuracy from output of activation2 and targets\n", " # calculate values along first axis\n", " predictions = np.argmax(activation2.output, axis=1)\n", - " accuracy = np.mean(predictions == y)\n", + " accuracy = np.mean(predictions == Y)\n", "\n", " # If loss is smaller - print and save weights and biases aside\n", " if loss < lowest_loss:\n", diff --git a/lecture12/handout_12.ipynb b/lecture12/handout_12.ipynb index 43d9bc4..4099331 100644 --- a/lecture12/handout_12.ipynb +++ b/lecture12/handout_12.ipynb @@ -1,5 +1,53 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "# Initial parameters\n", + "weights = np.array([-3.0, -1.0, 2.0])\n", + "bias = 1.0\n", + "inputs = np.array([1.0, -2.0, 3.0])\n", + "target_output = 0.0\n", + "learning_rate = 0.001\n", + "\n", + "def relu(x):\n", + " return np.maximum(0, x)\n", + "\n", + "def relu_derivative(x):\n", + " return np.where(x > 0, 1.0, 0.0)\n", + "\n", + "for iteration in range(200):\n", + " # Forward pass\n", + " linear_output = np.dot(weights, inputs) + bias\n", + " output = relu(linear_output)\n", + " loss = (output - target_output) ** 2\n", + "\n", + " # Backward pass\n", + " dloss_doutput = 2 * (output - target_output)\n", + " doutput_dlinear = relu_derivative(linear_output)\n", + " dlinear_dweights = inputs\n", + " dlinear_dbias = 1.0\n", + "\n", + " dloss_dlinear = dloss_doutput * doutput_dlinear\n", + " dloss_dweights = dloss_dlinear * dlinear_dweights\n", + " dloss_dbias = dloss_dlinear * dlinear_dbias\n", + "\n", + " # Update weights and bias\n", + " weights -= learning_rate * dloss_dweights\n", + " bias -= learning_rate * dloss_dbias\n", + "\n", + " # Print the loss for this iteration\n", + " print(f\"Iteration {iteration + 1}, Loss: {loss}\")\n", + "\n", + "print(\"Final weights:\", weights)\n", + "print(\"Final bias:\", bias)\n" + ] + }, { "cell_type": "code", "execution_count": 17, diff --git a/lecture12/notes_12.ipynb b/lecture12/notes_12.ipynb new file mode 100644 index 0000000..80b8958 --- /dev/null +++ b/lecture12/notes_12.ipynb @@ -0,0 +1,387 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Previous Class Definitions\n", + "The previously defined Layer_Dense, Activation_ReLU, Activation_Softmax, Loss, and Loss_CategoricalCrossEntropy classes." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import nnfs\n", + "from nnfs.datasets import spiral_data, vertical_data\n", + "nnfs.init()" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "class Layer_Dense:\n", + " def __init__(self, n_inputs, n_neurons):\n", + " # Initialize the weights and biases\n", + " self.weights = 0.01 * np.random.randn(n_inputs, n_neurons) # Normal distribution of weights\n", + " self.biases = np.zeros((1, n_neurons))\n", + "\n", + " def forward(self, inputs):\n", + " # Calculate the output values from inputs, weights, and biases\n", + " self.output = np.dot(inputs, self.weights) + self.biases # Weights are already transposed\n", + "\n", + "class Activation_ReLU:\n", + " def forward(self, inputs):\n", + " self.output = np.maximum(0, inputs)\n", + " \n", + "class Activation_Softmax:\n", + " def forward(self, inputs):\n", + " # Get the unnormalized probabilities\n", + " # Subtract max from the row to prevent larger numbers\n", + " exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))\n", + "\n", + " # Normalize the probabilities with element wise division\n", + " probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True)\n", + " self.output = probabilities\n", + "\n", + "# Base class for Loss functions\n", + "class Loss:\n", + " '''Calculates the data and regularization losses given\n", + " model output and ground truth values'''\n", + " def calculate(self, output, y):\n", + " sample_losses = self.forward(output, y)\n", + " data_loss = np.average(sample_losses)\n", + " return data_loss\n", + "\n", + "class Loss_CategoricalCrossEntropy(Loss):\n", + " def forward(self, y_pred, y_true):\n", + " '''y_pred is the neural network output\n", + " y_true is the ideal output of the neural network'''\n", + " samples = len(y_pred)\n", + " # Bound the predicted values \n", + " y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7)\n", + " \n", + " if len(y_true.shape) == 1: # Categorically labeled\n", + " correct_confidences = y_pred_clipped[range(samples), y_true]\n", + " elif len(y_true.shape) == 2: # One hot encoded\n", + " correct_confidences = np.sum(y_pred_clipped*y_true, axis=1)\n", + "\n", + " # Calculate the losses\n", + " negative_log_likelihoods = -np.log(correct_confidences)\n", + " return negative_log_likelihoods" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Backpropagation of a Single Neuron\n", + "Backpropagation helps us find the gradient of the neural network with respect to each of the parameters (weights and biases) of each neuron.\n", + "\n", + "Imagine a layer that has 3 inputs and 1 neuron. There are 3 inputs (x0, x1, x2), three weights (w0, w1, w2), 1 bias (b0), and 1 output (z). There is a ReLU activation layer after the neuron output going into a square loss function (loss = z^2).\n", + "\n", + "Loss = (ReLU(sum(mul(x0, w0), mul(x1, w1), mul(x2, w2(, b0)))))^2\n", + "\n", + "$\\frac{\\delta Loss()}{\\delta w0} = \\frac{\\delta Loss()}{\\delta ReLU()} * \\frac{\\delta ReLU()}{\\delta sum()} * \\frac{\\delta sum()}{\\delta mul(x0, w0)} * \\frac{\\delta mul(x0, w0)}{\\delta w0}$\n", + "\n", + "$\\frac{\\delta Loss()}{\\delta ReLU()} = 2 * ReLU(sum(...))$\n", + "\n", + "$\\frac{\\delta ReLU()}{\\delta sum()}$ = 0 if sum(...) is less than 0 and 1 if sum(...) is greater than 0\n", + "\n", + "$\\frac{\\delta sum()}{\\delta mul(x0, w0)} = 1$\n", + "\n", + "$\\frac{\\delta mul(x0, w0)}{\\delta w0} = x0$\n", + "\n", + "This is repeated for w0, w1, w2, b0.\n", + "\n", + "We then use numerical differentiation to approximate the gradient. Then, we update the parameters using small step sizes, such that $w0[i+1] = w0[i] - step*\\frac{\\delta Loss()}{\\delta w0}$\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Iteration 1, Loss: 36.0\n", + "Iteration 2, Loss: 33.872399999999985\n", + "Iteration 3, Loss: 31.870541159999995\n", + "Iteration 4, Loss: 29.98699217744401\n", + "Iteration 5, Loss: 28.21476093975706\n", + "Iteration 6, Loss: 26.54726856821742\n", + "Iteration 7, Loss: 24.978324995835766\n", + "Iteration 8, Loss: 23.502105988581878\n", + "Iteration 9, Loss: 22.113131524656684\n", + "Iteration 10, Loss: 20.80624545154949\n", + "Iteration 11, Loss: 19.576596345362915\n", + "Iteration 12, Loss: 18.419619501351963\n", + "Iteration 13, Loss: 17.331019988822064\n", + "Iteration 14, Loss: 16.306756707482677\n", + "Iteration 15, Loss: 15.343027386070442\n", + "Iteration 16, Loss: 14.43625446755368\n", + "Iteration 17, Loss: 13.583071828521266\n", + "Iteration 18, Loss: 12.780312283455652\n", + "Iteration 19, Loss: 12.024995827503426\n", + "Iteration 20, Loss: 11.314318574097976\n", + "Iteration 21, Loss: 10.645642346368787\n", + "Iteration 22, Loss: 10.016484883698395\n", + "Iteration 23, Loss: 9.424510627071816\n", + "Iteration 24, Loss: 8.867522049011871\n", + "Iteration 25, Loss: 8.34345149591527\n", + "Iteration 26, Loss: 7.850353512506679\n", + "Iteration 27, Loss: 7.386397619917536\n", + "Iteration 28, Loss: 6.949861520580408\n", + "Iteration 29, Loss: 6.539124704714106\n", + "Iteration 30, Loss: 6.152662434665503\n", + "Iteration 31, Loss: 5.789040084776769\n", + "Iteration 32, Loss: 5.446907815766464\n", + "Iteration 33, Loss: 5.124995563854669\n", + "Iteration 34, Loss: 4.822108326030859\n", + "Iteration 35, Loss: 4.537121723962434\n", + "Iteration 36, Loss: 4.268977830076255\n", + "Iteration 37, Loss: 4.016681240318748\n", + "Iteration 38, Loss: 3.7792953790159096\n", + "Iteration 39, Loss: 3.55593902211607\n", + "Iteration 40, Loss: 3.345783025909011\n", + "Iteration 41, Loss: 3.148047249077789\n", + "Iteration 42, Loss: 2.9619976566572896\n", + "Iteration 43, Loss: 2.786943595148845\n", + "Iteration 44, Loss: 2.622235228675548\n", + "Iteration 45, Loss: 2.4672611266608238\n", + "Iteration 46, Loss: 2.3214459940751673\n", + "Iteration 47, Loss: 2.1842485358253243\n", + "Iteration 48, Loss: 2.055159447358047\n", + "Iteration 49, Loss: 1.9336995240191863\n", + "Iteration 50, Loss: 1.8194178821496518\n", + "Iteration 51, Loss: 1.7118902853146072\n", + "Iteration 52, Loss: 1.6107175694525138\n", + "Iteration 53, Loss: 1.5155241610978685\n", + "Iteration 54, Loss: 1.4259566831769857\n", + "Iteration 55, Loss: 1.3416826432012259\n", + "Iteration 56, Loss: 1.2623891989880334\n", + "Iteration 57, Loss: 1.18778199732784\n", + "Iteration 58, Loss: 1.1175840812857638\n", + "Iteration 59, Loss: 1.0515348620817762\n", + "Iteration 60, Loss: 0.9893891517327436\n", + "Iteration 61, Loss: 0.930916252865338\n", + "Iteration 62, Loss: 0.8758991023209965\n", + "Iteration 63, Loss: 0.8241334653738256\n", + "Iteration 64, Loss: 0.775427177570232\n", + "Iteration 65, Loss: 0.7295994313758314\n", + "Iteration 66, Loss: 0.6864801049815188\n", + "Iteration 67, Loss: 0.6459091307771113\n", + "Iteration 68, Loss: 0.6077359011481849\n", + "Iteration 69, Loss: 0.5718187093903269\n", + "Iteration 70, Loss: 0.538024223665358\n", + "Iteration 71, Loss: 0.5062269920467352\n", + "Iteration 72, Loss: 0.4763089768167732\n", + "Iteration 73, Loss: 0.44815911628690125\n", + "Iteration 74, Loss: 0.4216729125143454\n", + "Iteration 75, Loss: 0.3967520433847474\n", + "Iteration 76, Loss: 0.3733039976207088\n", + "Iteration 77, Loss: 0.35124173136132447\n", + "Iteration 78, Loss: 0.3304833450378703\n", + "Iteration 79, Loss: 0.3109517793461324\n", + "Iteration 80, Loss: 0.29257452918677557\n", + "Iteration 81, Loss: 0.275283374511837\n", + "Iteration 82, Loss: 0.2590141270781873\n", + "Iteration 83, Loss: 0.24370639216786646\n", + "Iteration 84, Loss: 0.22930334439074573\n", + "Iteration 85, Loss: 0.21575151673725296\n", + "Iteration 86, Loss: 0.20300060209808138\n", + "Iteration 87, Loss: 0.1910032665140845\n", + "Iteration 88, Loss: 0.17971497346310233\n", + "Iteration 89, Loss: 0.16909381853143318\n", + "Iteration 90, Loss: 0.159100373856225\n", + "Iteration 91, Loss: 0.14969754176132244\n", + "Iteration 92, Loss: 0.1408504170432283\n", + "Iteration 93, Loss: 0.13252615739597354\n", + "Iteration 94, Loss: 0.1246938614938715\n", + "Iteration 95, Loss: 0.11732445427958361\n", + "Iteration 96, Loss: 0.11039057903166032\n", + "Iteration 97, Loss: 0.10386649581088914\n", + "Iteration 98, Loss: 0.09772798590846545\n", + "Iteration 99, Loss: 0.09195226194127527\n", + "Iteration 100, Loss: 0.08651788326054573\n", + "Iteration 101, Loss: 0.08140467635984756\n", + "Iteration 102, Loss: 0.07659365998698067\n", + "Iteration 103, Loss: 0.07206697468175016\n", + "Iteration 104, Loss: 0.06780781647805846\n", + "Iteration 105, Loss: 0.06380037452420505\n", + "Iteration 106, Loss: 0.060029772389824425\n", + "Iteration 107, Loss: 0.05648201284158581\n", + "Iteration 108, Loss: 0.05314392588264792\n", + "Iteration 109, Loss: 0.05000311986298341\n", + "Iteration 110, Loss: 0.04704793547908098\n", + "Iteration 111, Loss: 0.044267402492267266\n", + "Iteration 112, Loss: 0.04165119900497416\n", + "Iteration 113, Loss: 0.03918961314378044\n", + "Iteration 114, Loss: 0.03687350700698295\n", + "Iteration 115, Loss: 0.03469428274287037\n", + "Iteration 116, Loss: 0.032643850632766785\n", + "Iteration 117, Loss: 0.030714599060370343\n", + "Iteration 118, Loss: 0.028899366255902458\n", + "Iteration 119, Loss: 0.027191413710178605\n", + "Iteration 120, Loss: 0.025584401159906987\n", + "Iteration 121, Loss: 0.02407236305135653\n", + "Iteration 122, Loss: 0.02264968639502141\n", + "Iteration 123, Loss: 0.02131108992907558\n", + "Iteration 124, Loss: 0.020051604514267202\n", + "Iteration 125, Loss: 0.018866554687474092\n", + "Iteration 126, Loss: 0.01775154130544445\n", + "Iteration 127, Loss: 0.01670242521429262\n", + "Iteration 128, Loss: 0.015715311884128023\n", + "Iteration 129, Loss: 0.014786536951776045\n", + "Iteration 130, Loss: 0.01391265261792606\n", + "Iteration 131, Loss: 0.013090414848206555\n", + "Iteration 132, Loss: 0.01231677133067759\n", + "Iteration 133, Loss: 0.011588850145034609\n", + "Iteration 134, Loss: 0.01090394910146302\n", + "Iteration 135, Loss: 0.010259525709566512\n", + "Iteration 136, Loss: 0.00965318774013127\n", + "Iteration 137, Loss: 0.009082684344689475\n", + "Iteration 138, Loss: 0.008545897699918257\n", + "Iteration 139, Loss: 0.008040835145853137\n", + "Iteration 140, Loss: 0.00756562178873318\n", + "Iteration 141, Loss: 0.0071184935410191314\n", + "Iteration 142, Loss: 0.006697790572744897\n", + "Iteration 143, Loss: 0.0063019511498957235\n", + "Iteration 144, Loss: 0.0059295058369368625\n", + "Iteration 145, Loss: 0.005579072041973895\n", + "Iteration 146, Loss: 0.005249348884293221\n", + "Iteration 147, Loss: 0.004939112365231496\n", + "Iteration 148, Loss: 0.0046472108244463226\n", + "Iteration 149, Loss: 0.004372560664721515\n", + "Iteration 150, Loss: 0.004114142329436494\n", + "Iteration 151, Loss: 0.0038709965177668067\n", + "Iteration 152, Loss: 0.003642220623566796\n", + "Iteration 153, Loss: 0.003426965384714043\n", + "Iteration 154, Loss: 0.0032244317304774253\n", + "Iteration 155, Loss: 0.003033867815206219\n", + "Iteration 156, Loss: 0.0028545662273275238\n", + "Iteration 157, Loss: 0.002685861363292454\n", + "Iteration 158, Loss: 0.002527126956721865\n", + "Iteration 159, Loss: 0.0023777737535795648\n", + "Iteration 160, Loss: 0.002237247324743051\n", + "Iteration 161, Loss: 0.0021050260078507234\n", + "Iteration 162, Loss: 0.001980618970786757\n", + "Iteration 163, Loss: 0.001863564389613244\n", + "Iteration 164, Loss: 0.0017534277341871227\n", + "Iteration 165, Loss: 0.001649800155096659\n", + "Iteration 166, Loss: 0.0015522969659304577\n", + "Iteration 167, Loss: 0.0014605562152439574\n", + "Iteration 168, Loss: 0.001374237342923055\n", + "Iteration 169, Loss: 0.0012930199159562866\n", + "Iteration 170, Loss: 0.0012166024389232565\n", + "Iteration 171, Loss: 0.0011447012347829103\n", + "Iteration 172, Loss: 0.0010770493918072343\n", + "Iteration 173, Loss: 0.0010133957727514104\n", + "Iteration 174, Loss: 0.0009535040825818146\n", + "Iteration 175, Loss: 0.0008971519913012098\n", + "Iteration 176, Loss: 0.0008441303086153165\n", + "Iteration 177, Loss: 0.0007942422073761319\n", + "Iteration 178, Loss: 0.0007473024929202092\n", + "Iteration 179, Loss: 0.0007031369155886454\n", + "Iteration 180, Loss: 0.0006615815238773228\n", + "Iteration 181, Loss: 0.0006224820558161947\n", + "Iteration 182, Loss: 0.0005856933663174615\n", + "Iteration 183, Loss: 0.0005510788883681067\n", + "Iteration 184, Loss: 0.0005185101260655349\n", + "Iteration 185, Loss: 0.0004878661776150635\n", + "Iteration 186, Loss: 0.00045903328651800607\n", + "Iteration 187, Loss: 0.0004319044192847727\n", + "Iteration 188, Loss: 0.00040637886810505474\n", + "Iteration 189, Loss: 0.0003823618770000461\n", + "Iteration 190, Loss: 0.00035976429006934636\n", + "Iteration 191, Loss: 0.00033850222052625716\n", + "Iteration 192, Loss: 0.0003184967392931672\n", + "Iteration 193, Loss: 0.0002996735820009388\n", + "Iteration 194, Loss: 0.000281962873304691\n", + "Iteration 195, Loss: 0.0002652988674923804\n", + "Iteration 196, Loss: 0.0002496197044235683\n", + "Iteration 197, Loss: 0.00023486717989213552\n", + "Iteration 198, Loss: 0.00022098652956051033\n", + "Iteration 199, Loss: 0.0002079262256634926\n", + "Iteration 200, Loss: 0.00019563778572677975\n", + "Final weights: [-3.3990955 -0.20180899 0.80271349]\n", + "Final bias: 0.6009044964039992\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "# Initial parameters\n", + "weights = np.array([-3.0, -1.0, 2.0])\n", + "bias = 1.0\n", + "inputs = np.array([1.0, -2.0, 3.0])\n", + "target_output = 0.0\n", + "learning_rate = 0.001\n", + "\n", + "def relu(x):\n", + " return np.maximum(0, x)\n", + "\n", + "def relu_derivative(x):\n", + " return np.where(x > 0, 1.0, 0.0)\n", + "\n", + "for iteration in range(200):\n", + " # Forward pass\n", + " linear_output = np.dot(weights, inputs) + bias\n", + " output = relu(linear_output)\n", + " loss = (output - target_output) ** 2\n", + "\n", + " # Backward pass to calculate gradient\n", + " dloss_doutput = 2 * (output - target_output)\n", + " doutput_dlinear = relu_derivative(linear_output)\n", + " dlinear_dweights = inputs\n", + " dlinear_dbias = 1.0\n", + "\n", + " dloss_dlinear = dloss_doutput * doutput_dlinear\n", + " dloss_dweights = dloss_dlinear * dlinear_dweights\n", + " dloss_dbias = dloss_dlinear * dlinear_dbias\n", + "\n", + " # Update weights and bias\n", + " weights -= learning_rate * dloss_dweights\n", + " bias -= learning_rate * dloss_dbias\n", + "\n", + " # Print the loss for this iteration\n", + " print(f\"Iteration {iteration + 1}, Loss: {loss}\")\n", + "\n", + "print(\"Final weights:\", weights)\n", + "print(\"Final bias:\", bias)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/lecture12/notes_12.pdf b/lecture12/notes_12.pdf new file mode 100644 index 0000000000000000000000000000000000000000..48d64276f845d786a4fe291533c2eb1443c0d8e7 GIT binary patch literal 52330 zcma&NQ*xBh*{ww;b`+wR!5ZQD*dwr!go+qOIGIJtxWv&Z-j_WSMk;5n;NqYi4+ zHS0I8YpuD+6~!dznHkt&$&c@@Utw8Th?$5TjICk$_!uRuY+cQq86|9uT+PJHOdL$j z80F0DEnF>$S=hO_1q5JST%FB~>|i}NS9D|(NI4Pv&NP;Z8YFI^klBJEI1r9Gg!h8+ zH?J=y3Y*s_2%m2wmW#=%OuMUWk&vf5s-qc_(8-gCsJ-4fqN0WcnYm%U-yWYS`S^#= zY`*w7=hExHy)y+q=_n2|&?OWYZ+{&p!;qGRV90GT_2&2klU)1`x1ne4_I0H@4$t7` z@4S8AKTpyx_82cqDy5+@W0RCf~W}6-e#nSrKJnJZK)3?WkiCPf&kiJ+K^hg*h3VPy%CqBi$-ki=;k~ffDy;nvo zd^6Wq2=A+zq-dOp)!J5(F1^kXJ;QWJq!fb<&?eF8MNkek*W<%Sp7FSsqQV>($gH*>t6$`Cu zYAr1B^&r?vibRh!E5vstW$ColP%2#2!14oStzr@yO)4=2$`t$<>P4yr%P#z~1T3l{ zFKe}RBh(QFWfbVMrIx!`>7nfp+-gfje5g$9u5aZEp6Qnp#Msrj^(rlqX2IZnj1;Ki zyx>wZ+EI(;)r6F~S{;j!X(<=SRI$-z*b#o@H+osDIlO)+riu3b4!iS;7dpe78jt$cOsf?`#WiN5(Ziz$XU`2@SL= zp=(?yZ=hv9@1+-EbPN;REEiP5Eksumc~pv!T0ka=6>G0CbFRjAkIs+rmt`0huwZIa zvh2$(t<=wAi{(`+K`?`V4JkzP=)iecZ4f)C!*Z;V#|taaq4$KmEk08328X=UY{=sY zS`lFrSI_FE;A6xq9ls>d* z0fsiJfzda+9%!>tX3?`Gi7mC7^C$DiEF)nZK!V)CeI?jOD8k?9ms+za&Qj>J=U#J6 zr1}|Kl*R}CU-va;@LOsY?p-okqo}_0+T!2{Ef~(GdmG-)HEOhc-S{UInp3+zcx zNJ~jIrh2&nK$%Uv5;Zuu2FnBsO(zAcnv5)M)kia0oU(P@7w ziUgwuIn`!45>!jtn|(()Zga2vZ~sW$p^=lb z1Xdoe>dhvoF=#&{dE{-i(Q4>boF_~XyLW-}TqirSA_W9$Ye;7|e7av6o0+if)EPGV zf<6MquJ&NUh$?#@2V~GE>8g4zql$wh`J=-vkb+4LvY8raJb3 z)S(Ih4`|po)jPT{q#?#aSrf#S#YQQ}#B~IqrBQxdCZSuoGm^D#sA5Pf`DK}moSYB) zN}Y#4az6%dXR)FSCzuaqb@pz&1PX?y@bNT7yqZk<#s~@o-1~$I`uo0`MLv7XD#>IQw(DbC+XYImTkB>-TF)o6`p$Ln?82zk6I^u-gY3{ zglg~y)kddF)iN(EP=R>ew(M}uS|jb;sK4)BLT(q8b0kX{8e&!wf;#Vc<5i7f&k5V^NTg(Vb)v{l6{M*4E-L9g9_CsG8dm7In$ zi}$OBnI3pBb83yw&qZgp9)0Hxylbbv#47dctvRxo9xXdP%pDu<4}HWdWkEO^upB>! z)P*MetA{cWP6w`eH(cU^HYM$;m=n`x5QPN}ik;uJ+?Pq1wj3gw_Pk~gZ4QE9&F2xa zS+9;|)L$!<+X~EEJk}5BZ|0nb<@k&I%tAbNgi{Ir)=?*4!&k=Gs0{ zxfFk>FUmA^ckzg6qGVJ=w7*C`%s~CLWz@9;>C{__7;W1SR4H7wcbspz$mMh8^wiOH zUBlQY^S8Nv1LPxQT7w_0nUA`U8d)~6>u+TwO&nh+A?mnvFGAb!KkGW&NcdGxF^p;P zkZ$&-lEZ;4Xur%)qNX;MHZ8aM%9%yMYUyeHaqZ@Y(Rnk8Io*EABGM5%A=vps^@8P= z`t|wd_WBml53=g_DG6(4Z~FiF#DA{-$62y){40ye&BFD+E#{1ltOHp)%0CwKkqDb% z4@n3)81NUg&#sAc7)rX>3GtHn693~xMZ8H$9m%K6LV_cBC#`E^#+YAFJ!$#gPbnre zz#0bkYwF}$!}A?}jq&N7k1u_%H$fM1l;;579oTowcuyIFP9fg@(RXZ)k_vlp_2O`M zQ2zbl{C>F=gF1iR|4KcNL2Xs=X5+=S3VoIuH^ph)FyxcUhbp5TUdm)tYUSrMwbi6~ z8>Tv1R``a;0achHZ4z)sKV_xWr+Q66)XNRzU$K|bm#jNj3_aw*3Z~UnEJl+FPbr9K-Q2FVM6zZb zWD$o_RE0IdHnklM8NsY>FvVe)h*OM{~!yZ7zNvD-#;FralQftZz>_cSHC?l`{VlZ{sO8L z6QXIdB(0wESWFZi*y79o^+;CA&rg)&?pWTM@&~~e-yd&B+u$I3eed;o6r4JDD5C&V zmfm9TjkJd!(^_D#m%+do&-d+NjVQf8fS`p00-I$@)ghb7oIDofx?fur-}_m$tDy(&)Y8T1AJ8J!-wmT7Nzts1;==I++)^506CHg$>S zWlCd#{x+-vW&B7=wR||wt#?HVZY@&2ricmRkOOVr{k6Yp;5LA@Y-y7^MV6a*DZgyP z9e%FJv|sL^h@e6R>M5Z9DxtW7M><^(v_--97QcVW+R{tNK$lr(9-4iM&t_p|k--gW z&q4IUVR2)435|ZXL1}*3yuzA;dSZJ(c&E|9h`siWc@qU6i^Zh?m14KWU7z&!p*Lmm zvHW_OA+`b-Z>4_r3x2gxE5;@^b?~$`a&};pn*sa{y^kIls$b3hd+BZlMjf3ZE|~a* zp!OHrE@0(~c{PogxY{wU&LHVWRhgp9JuOy`_qm%7cUDQu)s=;xtvP>kT~rOk)arPM zJ32rAa5x0_$Q-3YanmMK?y-H=DZ&9qByEx>Pvt6Y2Y%lTBLLn=B*Tj8g3i_)$R19j&?A=Lz7Q zJC-stiLXs4>X5>#d4G*$aAvyD?xw?Vd;-a8vW^#AzO=u*$z$45MhJKdo_d;ke~wJk ztA32u-KpO|~h9n!b0 zb0HlY(bF1&M=kI-t16gc!-US&A`_Ue0#}Kpc5pX|?K0}bY}XXGg;%E+Tt!uq`jpMC zJrHZ-?pSJu%=rED5HCNnFZFfmCZ~EYFi(EwMCo1RvzOlN>k-L)=g{X(N8L=#+UU=9 z=Uq(e<&fSS>xjy;GM5!^AUHWh$hX3=y6_EP$%&$oa(`N4lop20SPf3T-y01n&d&1f zOZ>#OlF4fzaS6DuBajPDt`g`@ExaDmaF{*c0o1DAxxf~i)Sqs4+cd%+R0kc-sv+*u zTX?rh{n*Dv`Cg^zTRrqeHYs1)t^Hk`3EKy@tR{D9ZzAut))LO3n;6$U9km)~PyOo- z)?b%#gmV+#s$*9J`&F}XQl~JN;4W>{eqHo35ej5U4go8jzXH~pHd&`NS^nk75}IVv9Ajt2Lojd1 zG$kE@`&x%7mBmv20BK-3&GB!Ld#zJgW(rYkwNUqT_O={RNHu*=2Y5X%3n{h5Pylh8 z^P1KAAF|XCJI+&8lsb%TQujDs0nS1y&jP2@Z)9;CllDTY%Xdw)g3@{|B+@QQ#};LF z4KqDI`fpa)F2U}5mdRWky&EC7@d=8qo!=Kn9o5*jI*y&`ml?$H*KYM4nU2mX4>y&? zr8$`|!|G_U(ZtO>20uHxNd1l8F77e6SJ=T!9+A~j9^!z9c3URvP~*Vy+nrxd+%Z1~ zFgBov3565bSl&7U*glBJ(tXO}qH%SNkOmTA$(7E*J%o{evVswXC?Wd`Al?(vxLkr1 zCu|fM|NbIVsFizaR@{OrSof-8eEKK$yeQkRLg8fowpUWezGTnEhLtA!<*n=jI6Za* z$n*=~`Oa34r`<{|%pPA^#=y91E^rhMaPN4jsj1q8BtI?c07ut4X@9D&mJGy361|y+GF7jM@M3bdD4Q`i|%Na3z2qMkUZ}%j|@0 ziyO^HI=d+c)-0BIU!%x%?^n2)z^`z2A!?dUJe^cZfsQa38iaU!~9WFy|1l+^(QUAngo~I5Ds9dP6>;g>#K8AQT8}DgX`I+SiZ0lji=JT zT2wjGkL-r)z&F3pkB6bw=W)FQA&kUPJv2thF_PLPXEH^PZzg&)>cQ`6Z0c^HgQ@Nn z6L4J(C{2A_nmuLj^2sT;9!Pbp#Gm%%ylp|@)nCONjn`1i9PMQNE?(u!&SF8oo#0#V zl$bJDSGZ#Pd;VilRE4{gLr$%)+;8SSWua^S2jB1EYKKpL!6DpZ7aPHBgq-ff<eeddJw=&=p>}Vn&M6#3O2<0kOAxQnYUkOp`0Po9dcDf~5n9kn41}PB zPWHw9rQN;_kG;SxSg+}ijvJgz-{TmZ&ic-{m9ko7DYap(Q1cVT-)x)=RqN+l!R1eG z-VO%n*)LBi8HAD@l=iC{N_s!(3fg9@nr2!92l}J9s2E+Ti&DJ7MoKBqJ;2%f2cGg6 zse?Lv3srA`By^{w{b(?hwez^&u)>rmB{%eYX#pZ#>WiCBS5zr%cR0t~g@n`B#gqu_ zVpnv{aa<@tH7JGo@Qa^FLH;(S?_~d+>sdNtE4(NJ~YMzBKU0~KzFRp z0DK+rEX8==l)m#XNhzDm*P%S?#4kX9CS0G5VOemAFo3s8$ZC6^c`Y@uVf8LsM(Ct? zYSdH9mXzd6Z(glY4V6&&t8yF9#7TogDHJ=S0>$;_&`rr_4?)X>K=Aml9?SXuVjdkg zuVb;tq`T}6OGkpC%V^1? z5$>e;mm9&s@n&W?FZ?pBdEtadl>-qOXR^u%Jey`csCD05(a~;0Ow@=3GD%istmDuG z4N?;!woOU5STs0s)U*tXy*g8`k_%HP?F8mXUdMspG9j!`T*rXNBq-K$4 zW_>1}Wrv2UOt zF1jDOl6$>}CI3Nc+|K27`<#g98!GyT^^>#m=0IkQquUtfk7)!Qg0|g##mAh_oEvI` zn+#e1T^Ozi1BkVI*t4KK^%vi<{L5dZ5!2+SKq>JvbyUzHGxCGMvziI0`(WOv1kTJ= zU7GsNMie5yiN{wuBU>je?64%z`XNmYp`dfR__^v2-&CZ@5!?<~vse81+^~>g={XFk z5JMA;Y!$}-{;w=^<(JiYxkdQQ;GYKi^4gbkrEVEXe6&x9jS@A*XHIn@7pf;o@s^HP z=`4r5yGq3e$3YZ{`Tj}nHU5iT`jQCyXqMIZvH0a^W!eS?^2A`YN>=FZP}j5Sc;W7x#TCi=|@$SrKV~1SuQH%}wWP zo}<8!<$})EH-zjUBqrP?JSdBTC-dd0j<^D{x@EcCzBoJ>ook3j4JuCv-AGKvrB@aV z{nKLorDCC{`nqlpre9iJ3lJIK&mQVs%sdz!p)M+-9^sFnr7+cb~`U>NgyZ3)03UU$y|0zuz_b$m8u%~7^@ek6U!58RJnEM*O6Sd6N3;jYSbXPKT-dsMK|v|vlv zrY5mGu3^IPtjysFJ{z!APz!`_ou32^NVgoKFJ+tsFgXFRBGzcTR0}9``(el%_aWRN z0dnKtC7q`9qDW0B?uz{4_F#kKRVgYH+eu1%q(hOp`Ia-GnPGW~{u&w4@n)>M^!S~Nh6KIpK=oE(0Y{>!A_Ik2*Z3a^Ozl6w-Yna0$AbU0$ zQ!EGG>N2U}r(mJ*M;rgBV}`Wv#K!=d1TeiAGHW}8+vnUa()LjM#Ss%!`~#0rf8hND zbBf!cUQ51Z;&IlPLBD!oln_lW(;l^%*5c6&8+^a%7)&0sW~z0%{ULuB`ytymQw85x zl|*<#c%vhY7W1a2vdz;)96ewpV`Su%@m+Wo|M29L``wsWjwks?4VIh?TjszShXO9b zwoWzh88+-Gpg<9Ok0!;r?wO9U`v$WlQlsq!cKsOz$!ra7Bm zCmN=v*lePT&L|wmx^A zPsa@J`UZ8uN>Xc$m^S~Ej4EJBd_~#{Tm)eV7zRx7Zx1O~$xr-#+(eNxHEyT|Q#Aqf zHQcN2BH2}rQAcUZO79UV=2=Rw5mXgIb(67;Y2S?y?xI$9qH2m_?L*{T$Oub{;;hv| z`N>FSQk;64gq743>EG+!`x+G!iHuB>9JIq5TJ=wE;<++nbvObna-r4=JJD4NxSq7v z>hJ^*qm_;sl6a2N!d{XRbbqf(qi3Yo(n;Ji!yUQbosg|P%%r6=^)3%$Q)e4nQMOJB ze$v6h;L2&+@WO~xWkB}9n{E#YscK)C+D1@ z)u}n+)lM}d!MflNe1Xm(gL5Boo2?^5<2glP0A+F>!9EaAfIY?H{*+nxHF%kJFTCa2 zO=fr;FJmbs77xM}FX6@ROi0@G(&dYxtWDRYF`poA4H;2ldq>eP348Z-L9l5u%oXg) zrN0w@^))xURrxU5NcvAh8a=5QQmRiO-KoX?&1wR9b1H6V=pBe7vehTUUHz^anKV9z z>XyCt>iH%=ftE?6829LCTiWllNcEK`Br;qzwUpS>fA5&o!b}*D4#=ZGY^{PENQNo> z?MetlT;pl3f6Wt%chYDXF@wK}tEVA`U!9f;DB}G=Y6+;BFgk5?h(hhggd5!wpc2%g z>$n#P02cXfw0qSkoWN9Ju7-_(z0QeBMD{-44DZGiYp{9>0{0XP-XYGq#ri{X@!saP zmeXF19h(>-4U@cm42H-lcL<5(hs#EwdQ`;8acPA%Quu9eE>fjuJ(MoWElons&t~}5 zCb$E|T@-GY*h=t7CK%GqThD_YewPXZ8{Vm(B9z=h`he7>y80j`))G4i)>H^S_9J<7 zlh>I>v5>KYOV}61w9Q3LEENK0Y@3CDc^m{w0~W~|(q1s~opLwQ_I~gD@z>`N_3++(z$2%er|VzhI(7M6co8)S&W2 z&cI=}EN;C?XZih;=m30;UK|EcEA%ND*jxw;Ymx3)GDoM1(lOzCkrBY`_bzC#Kjl54 z#*4JTwOmh5F?yf=4Yq2gPxF6WD(k-*UtnfoX8PYQwN|@3ewzz<`V-?lXlE(%I1(HY z5&jik*L}b=0oMc&l~%6(jeYungTL=KzpsQBt88p7;5)~{C;QAN!=m}>%q!2q zVXSxOc1OUx-{03MW-Fh~KM$+3`I6b+Al}4lc-(I*pB-2sShpi!{p`@Q_38M~Q|9p9 zW-#X5Hs(?8g4^Al@Xh}PHx{3)~~s~uEkfi@2=B$PI{M8?wK95-P)aItYsqqt?X z;0VBo$s7*Gmd!6GRVaxatg-@aRnz z&0GPX2?oXSR2|5iHDh_Zy$6Q`$Q!J~Vkgl8w~BW<#H2L|c1!q0&^B7!OvYeS;LNtF+{ zl)#0Uf~wm<;-?Hg5K7x|; zKrY7q9CSAQMK_dxahKEI`Umd_!ffv+CWjf=sfp0vsf+d1I)_B8*X-kXB`jFwWPxb#abWCB>!`C_;hP=K2SE&wKh3$FEvEjk?;I1bQEDO^n(APb4| zhYbo%*wthQ`G-LrWuK=7oVqmOwDg*k%RvB>0A-&&j^Vnx-7nlY*V;*f5J;GxV}@5K z@RqepHQKGwihc5fOkPG|qK4QYtIt6vHG#Bxv*boIAu?MjK7_UQKW zyrH{1I-axbSvKrd%SMV?Pb!<_%*2j49oFmXO6A=2G$R(edev%8`)o>F=llTM>Tu~` zok!>enChNnHhdf|vm^V^fD!jkP)&JL5NtH$YMrK=?vvSOLlYv+Q>#=Tuc}}Kx590o zG&cn!yHy5jS}6n5g_SW9=}fdJ9DiiEtXA6PK>*?$Jr5a(rY!5vzcl4B!YSSqY3|ae*!R{;#(@+^;beQAtBR6SrQ17MD9O1}# zO@)$98JTo%7!<#z!&0+GJKd<0-=m1;d}4ID#2v7%{?D6I30P1=gXg%#gh%Zn0Jf?O zsK(~}#AlJJF`LV!GhV2Gbo=X2nrVAheUXU>0Y?~##)}5qp3v906q>p3Nviu_%lX`H z1L5F|uQEuQ_G1%i%g#5>R>a6b@gabht90q|Opa%GyUReF^berT=RM_is-9(<3Iq1r}$FSOB$F;!Z&r+)?%D zQ*N*Lrd_CTDJbU?M?DN~so9TdzyN=*xS0M8x>MNzTbenwotW;f?*NNCh`Xx$mP$g% zu_)D{mEd*5-I?U4AUy3i(y+08{gicCIw8>ZG>I^_wyI|_ON?N%vPRT-fhze<8{VZZ|{x6I@)Bp2~f`K z_W32DhhAmt8QQKhrP*(9^|NiR_1f&>5LiFgPhhCJMlRE>b`7 z!SS<6o)^r}!unU}hQHwJH59R1asJ8CXl4zcO&>#le+Gg*DJwsK2Iwh+7!6uVa2D)+ z!1PGRY+3!T!zt>}zb?Hd8iclh+78n8xdWVyv+M`?Epo%WdVKx_C$T=kMt+C;6AOpD z1x*U@>F$q$;l%);z(xw6uhc&60vgmETzrBPnKGXkRwm7_c^{zwFVUFt2kbzdKcra6 zai-%!(v**E{dEIhq5Y+zjN%xP3{qV5dqcJ0*21i zs!(Vd*4_s)&T;grMkjX%M=VZR8ioM}s zXTk$QVcP@!xlgE42(;!44i}CX! zO^d;SbF6X=yOF>J$Uc*_ipRLL3X zpqLw$(Zd6-<>xgkdI-y>9)rnDSY{;EHm|%gDg1l|VU}@Tsvu|mxJwqz_olRW$S-8W z$=Mh|gp>I=4do$vjRs@AWT+mTQi6yRcG2_YNz3+fcU}x}piu#%XtnY0_vsUbD6VbZ zL{wA7mSP+@p%Qs{^MrOl#^BMY+5+wckH-QWS}#B{RH=8)JUoY$V%nJa-^4S-2@#j= z*4>bg{6RW?4E9+N2dsNmpZ7o#ozMCh)5ntVR9tn?)4PIq=h)%LJRDbbrTtyByNEn= zBOGC~>X*MNSx1UTT+A`I>DU^Kmf-9|-1AdEeg}3* zhI&DPI1|3Oqs~~9jMZzd`115muDUU+UQC00!o`$T-X(8y2B3R#v(xS^uL3Yb-?4Nj z`^63#=u(J3cF9%l_S-~f&E>N9@0!0uH{pxR+2iBGeJu!50`+E4e)bNIRgY+(J03VR8mf)g^p@~~v{m-sqNBf=^=S>KP@wgNh8uL*{-vQ>RC$pJP zC5-3wK!YQbXYsu=K=1wbN{6wG49^jy$2BcbI3q>zGXtFB5J~_SRQ9F?z*YEnv2r08 zsUlgJnz0L<^lr2Gux*d7A9>zHWE%KMNV&f?MK>Uf<>>3P$H9dgEOXsqjB$fRH}GY@ z9RcD+gwl(XGi#(|RE&%dA8!PLXz=S?>I+xK0&$NNI04fmE+|?!pUbXdXth0d?@BBB z0%$)!J4l!jOgL}<33_YNujaiRj^_9)aFyXE4zd+U_nOc!_oMgPg6Ny=!s0qgdRW;T z+(9*76Od>D5WSfs&+mVmzIWg*AT4(RNH@Q-_%JVPWJbKZNCkBFj^faXLA?7D{0XnrBR&^Ec}}^G8GoJOy=2=f&8%FGj!3;I1G+vLHiO z{OOmb2Mkm()Dfq${jk)intk>%J{h@HVtmOGI9=BJYPrOU(wwTJq|EvVeb2xe?|_D# zp1pR&q7F9dpEp`9_bef%d;LCT#x68~;EAv1Q`C}9{{5A|&4T7&Cc8~X9%2jA;ufOA zQ{AnRLQ{ywo(av#pen!ghK7&qSSdeRw4MbHTQKa|coj+ICV_5MqT{pzoF;# z%BvgU^xcj4(DI!ye3}1wZf4LwO{LJuBhUAOG3r>-vpa&ZwXD%0jp6ULgKg&XY^uEyoFa@8M+69-i^; zlFV&yE$|+=`F!-bNq=l(rEu9%t}EEbXdLk6_3aLQ%P>eA#yDgdrjQRpZm1AUSkP93 z4T4kEv~5i0yU9L88I&`Xl^dgIMIe$k$KP@@ff`SDgd?Olm7h|QYS3d+EqhEW5h!Vv z8MGkvb^*6M8L?le#s?499-*<~$c(y6Pmsm9>5e{p$V>~P!v}51DntuVLJ|rjhL83s zS-Z-*lE3H)Xm`#nK=-G;QKDOa_0To&FIqWB+=<_8#j2scXePp9Kt@>oz#ROVFzMb>ANdoxt`(5cEEF}@gF#>uYtnQv< zWw(P14?s-gBYQM2@uc4i)*u(an!NAJLrcubXluUr0sKY;=mNWbnHjrzX@cxo47I-s zwO)$J0Cg~ur`nF#)da?i%s1EWYhB3;DTod#Yir$+k&@qg5zc~dS9Y7*%u1L5VPMt} zv+oo+aI)25d)l``BtK)gbK^)RCW};r&2K;kjL1^wxqX!PXmL1jLFVrnIM1mNu`@c5 zEp4jPGHW;KkvlO5Wxxk>k-_``)$m12>#E+l;S@U|?xT{}#V6=?7 zG{HVt*K_l-A|jyF{~rkgtJw$%oJbDA!*)3zTGgOK1px8k_S?T>FhqE>D#Kzh0QFGB z^cWTjgxX59qi*L%%fdv%+K73b)kP>sj>JHdG4{%H4pJ~nFVC`R#=J&#`!kx`J2pu_ zhZfv59)6?;p`3DT-QK@a?KmKI>Lx_xX9wmCF{_hmoJQ9!_4GfE;sBOjnxU?(T_8|X z+cYi3ierAxY#Dmo{7B8oon?XDF_;%3l%qOBMi_&G@%Pk&&XLlIGrey0FF45J*XmQT9D#rImqj&PWjfTBSHpu z*-IS!v9tA5XZCy7<#A{|-=5Xb*plH<2m?WE2JAQ&qc;XX7ivBNZ4foS4n20z z`Jt&d^y+kJds=pqUTNo8$^xty5+i9ENf1 zbp`ee4o*QtdJ>&D4&T`Q$`n_4^O-gb5im5p{8lGdiBo|&&3zQYnsuUNUNikU{TT&P zFga;s?Sd1fbDJ07+u7cal=b!^{Ht5PYy&}|-K+06Hr)77=;P`w(HDs9W(V?r!57=V z!51sXe?~@CUi)Dt_#GcuJz@rOr8IDG9M~3+E{r>M*T{!#p2siQH)gxi@#GRIXc;o` zK1yYG!CBsdd=Kt7RAfHB>3iqxMa@m&1HKEzH>IurNFlcUw=R;Z&2!6^h^bAV55>frB|R4KbqUrXC6p~dkQ@=3~LWY=k3Ecm}7L)UjoP8BExhd6Mvs} z%B70*CYeV5N!r!B4nk;6LJ4inOenFzb0@8DCUw-0n7UQ5?^pQo&TxFL?l-GPJwiOz zFDaPk4`MZl{`>!`#>x6`RAb>{{@*=EFX^YvKdOb_zG3jV$0pbN+aiPNEX5CWrBWDK ztf{kzn&44#KkhNM$snPv-;LMXUbuD8#wbsq4E^_xNF&v4_(c zanF032{W{ll*zbC8tS&=`>{NiKh}Jc8Qng`>b{h^VWvWKhyZOXt+xj+;(XRrVLu${ zSE-Glg$W3C$;6lN2^q3X7 zUF^0*?tgAC&wZM$Wi~$^Q03R;o;~#WoNXs69~{}la@0-M3yM^P|CbQL`LF&77ViJZ zAbLrI_96fCb@{>)a?7HTd8%ZftwP+4wcD#&kP<1vf25MZI-YMK;V%!RJ@9#_ zr;3)Z0S5~>S_c5P>eS)gSa}lNMk_aDC&gNP%px84AxI;Cu^d1rgc5Rt8!rU@3QmtSTe)=!Lt%#7Wu_Zl7C zxnn9kCNOi6yL=;guq1gt-jtiRk;XTy27K3%#zb3mYOqD2D0ZK4Q3)e?Z87mm&9WLU zTMs=%M`^__OJ`QpY^z?NwWDLZHI2>DvqV35Vk-Vi8UL#VD^_;4|0v_dpD`g!$dLQp z{Ul=yvVVE)g+o`{6xZ8i?X=dj*-bPmn8kd~`weFByfuXnq|L))aX9yCJ(_SbfTi74 z1wkuu=vnVKg|x;RUF?g0!6>q<8(8+L=u?kQfyxAvX&K!g0))?kxA{PowX{W~)9M?B z)AWbcGdab$`3DsgxEhbk!wsb68`GUeHaBb+1(Os1c?Axg=o=Y_rA;Nc#kiiv4RXe5 znZJsEH93K9+T zCjd7}h?$~6En&juVUyJHuEZsN+4TGL;Nnpcmz2?mdCU!V1F)Hk7z@- z4nO~a7-x(W9g7<2gZzF}fm%!&PZPBQDiVOtAIgDbAw6$rY*V!LJMUNV*k9PI{yYW~ z+`=_*HKf`xT6P!naI>y8iA5W9wem8$hc6JR{T9UkBGbQ`*=OV8{12H@lx(Vnki+&q zF@WpJ(Kml?+K@w4X~FG%7=MZatzDC0-BRPc#{2ubETjrWK=XN7EuOF9of@e*>$1f@ z=8$I6D4fo9FE7|~{6viv#%9HpquOjJ%I@|apRh+pWcmA6`J+5F{QS}DJp%N1!~5J> zIs=N2-nyS(x?f~)7ZyPlZqt)OjdxSFrmTwmj}SvNYe{|_a@X?y{35Ymy>2MqI@vMU zOTOJ!+P;dm^K9l%IO2RdrbrU=BAk?NS%as~9qsyI&`l~{ZR+pbSanL7Fy7j~+A6&H zHSfFP50kFo;+bs9f}{%0A~zTG=t5kITq#6NlDVoWD_mifF)369C^SzCq7e&Lj$%g$ z&NB+ue+Ntok&lxs1`;-mdL%5*bViiPWMfvLW7K;G{q@2d#*3u%jwOBV)i8-iey|`=kD?s9W~Nz z8oCwa`hp$%FSWw?Z#okj=YJgU&`a5~{-;((Z~dZsZK=g~`su-cSc`(qaF<#U4V;r< zWT|nSk~jpo_7`GFO6`|7Klrg8WvFj?Ujn5|ZjCHo*rH5syM@vZaJ5mPc%B3%O3&Zj zjibM>J8^!sY_cEapURQ=`O!`_`Fn0%Lkb>m96{yQ3I6o_c;7x&cXbuJJF_GEk=g$A z<(_%o^E4)0qaKBP;ak%$cR+Eoy5m>?JlWlwS33Wc-u6=idRhb!j1lqt$`?yR7ne&P z*5h?`#mP<+IZ{ZeX?mmiZOv*RF470Q?Ns7&=W5&gsX6pujX1}WjRiMBm#iCto+mx8 zLZ%X&C(K?URs#B?{E&{_S-k*(#Epzx7!sWS34D>5iV9C!CvljYf=eX_ViujYffXtk z>j~7Daehgc_t6!x*8&-68jKYqZ|S*Ep)eftPnwv8af5UF1Z5nzDzM-rKBtx&DL`WY z!+8z00b@*V7SamQK$rtrYN=Rd7M?TmPvcF<(>kYq+-xHB)u!Tl+^j>H&{FI+ zhN$439xrkap^NVo8f?U>;H@KA;i=$Ny;Q6n_Br*uX_Q^0(=W+0I50za^)tXU);iwf zx3B*(ZG+Ly&v4bG(_6lWG{hAUV5e+fGT|AUj;k7)i) zHVL8imEVa}mLe)!W#^@wx)QP^a9V%&`x&pYJ*lE7h**0#fh@eT4|JSvjOGSyD-chI zy4jP9wY@q&55>|qJ%y>Nv|r?I3^>isW)}T6{8A92HeUWx`fc^}-M-XXfdyo}oV2bk z9T{OsDJ|giMo3DMn9epII$c&y%yeQ@r&2ydD+*^#l7UpRI z_Q5A74b3Z|2cK3j*D7U)a8xjx#Sfvkh|)W=MU=$~qtc+3D4G~UK^Q|(tm<+_gSs*Z zUk!jmb%G537&2V|Wv6f-7m^~`40a@$0DnO)kMncA5y}u}`O#cBqI4|9Y*!eJCS5^Q zLR*MBEPN;yZ3QEehw3squ3{`d0v>_gOyw@r6-}^Tw;mLACe{TDLvvi2n5o?zb~uBh zVbg;$2UmEziU{|oE>7puN!CVmE_pECK|Cy3v&2poJl z#q)DGJySW5U6{w{EZ2}EG2A^lsU|vaRMDVUr52wq{A~N@~6zO^sG#u3P@G@(~m9BVMoWNx`hl zHGs9uNB_7yjYSp}J+uHoMo*5X2s$ZjQxQsRDN{yoj8Y*4kNzGxQUGA4*d`{AWJi-l zv0xA$k7bz$Wu_2-%2z_%)Q)!QrV~QZKMwlmWGsd5SVib#;skgqc73X28j~>jsEuSd z$7rbbm}hNhb0`dZ%mLj2P%$dhk3ncVwbYLD#m)MItv=vLTQCl1Yz(oLVA?8NTRgyJ z@4}wPtlxk?f1>&SYRJX(uQ<-}9|^-xWy`*p3EA(A_G4X5EyxaDB03Qoi~5hbj@=@X zO}>KV>2iup889f`&m zy;7}3loHQ42q1seHag;o)3z#N;n3Kpgl0SP^v8sQUz7MVWfb zRN6Pp_Lty(++$qNJPl;nFJjwm(lEgD`eXgmBy%c9=rs0K&-76T>zi9v;?(dVrhnz_ zjL+njYsuKA1RQJCS@^6CdT>={5Lc*D7(2bX(Yte5Hto%OZmAQ|!e?z0K8nDI*Z-m9 zQKn8s0PzUl?Eg{rPEn#n%d+Td+qP}Hd$n!bwry**ZQHhO+qSLSpJ%_f#~t_l)mP4s zs+yS*85z05mqder{$IX`|Dmo?bT)8uw=<>_6c-Y)wQ*AVeWn-uzgUL+YHt4HXSY_e zwL&t+?!M@u+E6!%L0stHf=bKXU58-{AA}p<)bow2iX_I8XxUzFuU=_Vl`y8StJJ=! z)*K0OT;v?R%nYn!2hH$7Utqw82f`GmPZOCV=NA_9d%5j(i5TeTs8!m($V4#)sel!Xrx>7PWF>%KN>HnHP#RoxiLqFAd@~J*)S)IZ%PA zls-^`6u&{{7We@4sd3=g0$7Tuc7DTeN7U0hcWmO7r<-i5_uTINuiqUj(A(oY?fP-S zRqsvnf=(}|S#4SII%EU}8`O9fOyh=VU?%n-FJ>5Q>WekKy4r72rUUDG=5frNp{JBl zQ}LH0WZSaFQgPLzIpwpoQnS^2_K#m_t~=$NxXmOSD|JPXO*{ve-M!c9I&>M+n% z>sIZDz(VyppQJ`L4G+UB?b7si&)2_UCohh3&){uiw$9yF>ve`^~G`K)?P4JRbc{X9bMnByyZ}DYD^dWp)C-mZ*?PTPu z|E4ZKM8@5!yX*6!4{glDCKQIT&Q&h0bVX@Mq79sgwM3c?>hdUfjU}WX<_OMjepSd( zpUkdr4^&+wO@+wsd}T6TEQ!buNTr>-qrydM;JQq`B*#0%N&-_AMI`ax?;2qOkMA>b zvdK@Fmvc1P8V+0yhG^F1mTtZQMV$*-j*7zxbqi(Y_p7^ua~KPcp*kb*EE*#ztx@-0 z?oDHRkpRf7(-cD_l8Q8H{Et=k5}kfH$3Z-WUDrj;=cUa|lcF3Ek@s_1!*dHff@Gy& zf~iF7I)9rREq2ROHNcAG0A*lr!!yFWfZkt*c@F65WfBK&^pf?5nL1l?tT3FhYo9Et zzJ!5JqRaecZMQDex3Gx@1TrrepYF78U0j%c1pv6=F{Ma=hk);AeJssKOsOoJxGN+G)La=f*e1}v%7nRZNV z(Z3Qwj1Y*4|54EkA}S3x;kwTbw{SHaOj#*pn4;BD;J;d{d!sU1A9I)Q_|u?pdf2}L zg+@y?1Zc(p%<`cF-6YD{m}f`OGeWG+Qj4jvbaK+u>a+mP!E zY}T(j<{pS#*l<$r<0`PZ3YWI+9~3evr;g!;5Mx@*7@rmj%pV3-rzX4NhWC#w;Y+(N z+l_j8>$m2ALrQ-@M))rm{6CP)0IvTf?EgUB|Lgmf znf*UFlOknLDMeN6?v;6+#B~mftg|K0nivdWbNLe}X>4S?zkSe>UX!YMEwl1BL%ihY z$duqTq4)64kP13+I>iYTGTMtA%r^P2sS8WB;*(OFn~UrxF0{DGRaxO5h99;)nV-{R zDL0u9S#&+0u>XXG^w|H2UMw)aMvK_IN3BA$j+nEo_n#WrN3o-hQpEKOVWOoG?6Zfn z^8%h7#wXl|^|?8@MJ+x9ijNqe>pW|)w{*dx+~4ITSw(pnpqs_^gbTy=*-=Ac`9}{A z8^qM|*qJTV>~kNB(yCPHdai9UO2bqgq?j7W&n?W#&bx1j z6rey20b5a`Mw-`Rta4MMZ1^4NXOIvq2p|Lt2VX_@ZmQwQ>>a%(5O6}!+DrI^sl|LN zNx86vualacMZg008GFMo11Gf^z%-WUGCniJqc5*Up+asGCXT>sQy+z0I2To%_2p=U zP3x4kJpp?L2MsQW$_dj|D$zJ&U2!BXC0T0=n;hes?+Do1f-ZqA(nreC+5mTV!6=z- zi0?tK-F1Rvn1L|1Y3ACsbb#<723@?97(hoUT+xDL*#}QczxkMoer;>BG$|=3!Ts6YP$`z#YP9;&DBo1vqft#+N?f{x-3|dR za)S*5DDxYr^WHB7i}yEj(*Ld!E{76n0t|%z8n5=w5eV3f%UdEA;%o$Z^ukeR-)@PNUe5Uc$@5lUDkQsZ-L`H~c z_Egs(7g!`KK#eI2RJ39i79&%~wTI#Fw@&1+I;A-X{BND_f(9$An-r#Q*DaAs+%5Ua zKh=x~M^2e1_oDYNF%ZT+XzfA?l81Q(SSN=ub_&_mzFElP>}<5%TD0$*X3TM!W9u`dd5hn@e=-qM2RQ0stkLFIkA zc@mWeV_@1jX%=P1Xq?P!vf^C8da&IfR=_^!M$c8uZx*H#%-mm$2C&2|RffCAKCi^| zXWR)A<%=#Ka4B?A_re0Y;^)c1eJ79iNbEJERLS`ejCJz3F_^0il01$Ar0|>0-v1#t z(5mOHg)69jkwmeW*yF`z-j4IG^rA0|6R)(iSYo{8dQ#P~Y6wPJ`eU5}q!mJ!J4k`x zeR6>7YggZ;r(S7s0EDt+vY}{jcN?>uLUXB0O#!E$`lxHq$ddL}(KS zIh-&>D!b{PxYUAI&6iy6^u*PTKRC4g`sTYarPGyGoXgW$K!h@SIS;T*dk_MI99_(5 zZzQmmmK*`*Ho3g#vLh3JDW}L%UAjAAaEfK2b2}@9-g&ZmaGKa+uMX?k@$kiTMjQEb z+SV(Et{lxLR|66iHPj1Rp_Fct?U9XMpteH*6Vo>;{H^4gqp95 z4v1PUUnR)b4}p*(2l@P9_0vu^CX&_+)%huQM{$L^AVe75?M)6p1~KGGZyQikPc>JWIx+)nCttAHAP4Ee=v$XT&zvPMkCpv}kKk|X4o}L~&yrP{R zyq=yJyq?{kKk$SAC;$K09RC9a{%>;-6FvQZ=tFi0U>NFYF~Y=i@7rz+V@LqS4ArlHviO|hQD z$4r#f%ub`qvOlnw=Jl2&rizgRhZ6Hu&F2x7r~;Mc`^wRvQT3H;(#^ya&7H^67lo1t zinb<=#x9N8=k?6KXbil@e9+=VZA?`-ioeLSM?HbU9m z7GP*)fcfbGyZ!6-J=5TeUiF_zE;WaGUzY<{J`!6@y!}lX1s#`MLqGwSygHGOwLYFj zQD;KvP$m%j>(~Ox{Gn9DY`M@Xjp6n!BgDS6q!`%|w}YG&Zns0jqbqekWMCDj^~M3A z$T-3LJG)f9=&IHD!%A&HDO$okjST$!2O)mF89uY9Z3ei8ks&hq!%q5vt4UL*K zt*_3{l+8~9inBH=A4OFu~#=GGNO4+!WEf;ib=(&s=z)L=9G!L zlcwnVp!_e*Rj?}CV_jEzDo=$u_PZ7Q^GNwd9z94gc27eLj6WGl>)&&B3%>o!cxTq` z?ZG+MidwI#tS2lDlEe{jSScF>G0+~6wA7r5pKA8>#SblV)Qj3E2#x&ifE&?L@z4uO zPO%nk!DW-=42T#0@x8s4u|BA$Ys}CojlRQUv_m5bHfIw&v-5JU;nzKwEO?g-3$jBe zOxQP9E#EU(>Mlk5tat=3IzFAb%&jtc%ZB;}^zj_CWm2V~h178ER_Z2=jS5r8W)m_p zilu6y6V{M-iWwzjV;^yhuRe#cpf=qu?mrBO6cc zDO}QnLkuNRAjkKHnD>Qt7nKIUb4?E3lr`FNc1$}UYf?A47^qgtDK1E7f1F}aTL zwA1-;q|^CmEOG-NC>=o}5m{C6BiP9D6|P43Kp%D{xzDY$q{BD1Y(=0NH4RdTvJj|)@i7BBFe_zTEy4yk>E0)aGQl}qs+=h z;_c^UR;9!)4LKm>5?@$eJTtTS(ZS*f(o}B-OT@*GPqctzKK16l1&rX=_|nxs^Fr&I z0RR-pF)!+~BlZe!8j7FK?`U&q;X%1n;dySp!hF`)zQ5-4-gRBPh)@B)9)pbj#X9Em zyCKxXDQ!%%rmEXuOcI=l7nHpt6_@UwvdOa#nN`J;V6twGLL5%Ft}|XRX0Xyz*GEtBgrjpS>J&5o9zODRBuacP ze$EK5o5|_BYOXQ3yWDeiixeb&hU%`-fqH1rjRFmsE9f)4a95cod+>+*eVI8Gw!!8T z`Wre~mCA&O-xHbrkN?XP@1uz!+QNcUr+>lDnFQOV;R{(_tFr{3{{rK6r!epaKjASA z05&d}(3*)b#bh~&S`_11)w#&oMG58eW38aH8aaa6LF}tF8eq*uh$5UZ^gF(smn}?A z%^!pF6*pKW=BK>P$ctNCLCz zhk(zJJr-=>Hg^ou(|bH*@~S@^Eb%{xQjmH9Jwr}Tog~Ic52)fWpKWR`KYUqrMkW&s z2cFDP9Uml8dUf1?z=B6=6e=>_6);MP%jC)6a?1GxeC=2HeTnK^<%H(nzezN;}!qz3?(@(2|Fm|V`W~e5|R4leQXlSmzMT5Vg zkxVuSVfQ`o^F^JmNriSUyC{xl`aOnTv_1=q3v)`8!HK2(3EEkI4Wk*wgl!nd#RB-y>2gj4Ld3DmltNcQp@z z?S~i(0@w6+XrQ;i?0Ve&nnxob9n#|GJNx$))Vm1k2*bAn$cV6Sk)tLr$xR|Pw}g+J zWcg#{3$)N9>i3B*z@R6;2{U+35ajL&6&|4MM{|8Q%>N5%;xE5ICu_y8*=Z&p~O9z56fR^P7;B}W@ASSTFFs)&f(gVIue6cl zJ4KfBMt^1VfZ`Nwcpr>rQ@anf(TSX`iai<@}dtdUYVvYI>1^+5R~dhG8^S!?7URu)0GPXNX7p4~+tT0BR(DjUy`kZg|41(=ub-4x zrhPfu!E&xqztjC&&${zpFv0)m1N>i1!vDW`V^@w|0a@<6OwSE|z8CNh7B$fMe~JA+ zsI>nr-e9F?`j0cC)ggZI#&QmPJio9~h+0-1)0~E7)xOS<1L89WjOD68KP7x} z>`{E}7@;RVIJ6jipY}y{(tOEsGc8I5Tt~Z-<_?aB`_7B>?0Wahd-n_DC#a7rt?Tqx zj^p=xw_o)ExQV=F-Y3u{e>y7G>1^GxQd3^*DXHn)0CNj-iTZOpQ z^KhSl{RR7m1A;|9asKmgT16GwMV9&dZ*y{IgQa3+J=BA;xWv(r@g8H%V#;-l!j;Tw z2t^V9yf>83Q4leyqGJ?-B;R%*)3lD!sGt(d34x3u%NT`7wY%*DQ=HV53KiNZng60< zP$kHu5he|f8_mNPF?wt-mPshrS|$>@S-|s=xs-nn6|R;w|76r#U18QxRH)GEw3`el zs2T?-U?LaAf{rKpS`1(ID^`?^x2jyTN@FBB;7S!3VVyzNi;#ff*M@SD98#2|V ztx{XTvZ6T-Y!9FsT&IFHLu>x4>g63&F}z@qorKy5v+i%!+hD+(M0*(ctoth!60Db} zhK~AgtPY$SF>=`8w`v^RuLY(?knC3gE(q!re2u|6HgSyRL8a>jQ3M9NI$mdivd;^x zUGiz>p~9r4MA$;18w%a`hs}o74ite?fX3xzcevdqxCqs9$~;9sdVQEx7W_4lc1gc{ zoT_ym5FV%^(h^Z(=i>Na=OYmatI^)n=GK-Gj~Dre=l;`_xP9-9n@@d%_kkJ-AYg+*pc1e1jA&(eMN^rc^JJ5I>dc_l<|x0(`k{hE6k*la61F zRQIY9T(q62m}qG9uXjZVK1?=`A|Ja-zJP>?)z3`hUq+>BpATeP?-pVqqM)an;^E-g z;xRF5G9SB%i%mBGP_43 zr5$R0zWNZQ8dxYl6eg*|`9?p`l6A;cC6kfI?pr9XY}kM|^rxJ|&J`3fbsnCI_St)8 zC*Kn+rufUo*u@}Q)VW@94=mhr#ZHOg(Iz@WUQhwvqt8t4-fi{afVE?6tca~ZVC#^Y zU1G?<&|q|E)s2n8xm=9Vm`TeWGbX|FGFl^jEpGJy#K%ZhH5K}{R@0jTS*KnIf7bL& zhwAfRLpb}yiXaCF;%%R(K`5^-qy%#}6w!kvLJprMdP}T@pDd6TA8LhUXPd9g06`xg zV3NsAZ1Gqh>q8k0Tr#K)F$8|Dz>9T*v_wq3a^gHb9T10ziL-_)Q^k z`$w{@_W&iU9b^@|XAHD0;>!#`MPlJxd}SEsGf>Q~I0|P$Kns}ldbsQ$yuPUqT7Lc; zX6C$faR$IG+;Z$x3-r`~s(ye?Dp2#kMcXx@xqzE zm0{hIa4`}?$^)$P3aa$)`r0pVW&_IpS-Zn~H$o>rBPJ*6{4D%P)SeDKdAE7vXeCoO!OB$Dm|9WgSDShc7w&Y&Pox zeN|Ns$?~K+i&`-g_%Xcm0qxe^F0N~&pX0N8$ew`43 zNHt^;n1)c^9GzjyJHU4>`=|CWAQj5~n#CKj2j`>*#yuiKX8j5j!%>tlHi&ZmcVx|< z3U`pzlYO-7m{NaS6MjLs$y1-RA1OnZ5S@gwl95m;u(QQwzhYJrBztHlP4QJ~){MVX z2hnhI;D_|i+`eSKX~6cUPUw&osY!N?z(tXJ8*i>OAElsosZ%tO&8i3XbUV~h~XdkQ#K7~ zzMGESZ5Nif5&BR0-4y-27+!>BGJhzXeq#0$f#LCVLB`Rq zJ?+&|N=;@-J-^pz+kZUos?lbi={IBcd74dSZUOZ;5TH!AXqcgLyYpC?T}Al>8-G@tLd5$!bw4b`1-c>Z8EdOpg)ZCZrXJW+ciFTtC#4sqqUVXu5#8__UZ+IsN2AH>EkYQUeFVUz^A!EBdWz2hN>!1ok_xr&bU7SX}THjHT_rpT1 z%F7*08KpP=i5>h7Kf`BA;~%*bift?DH5;EPnuy$x@fG$qT8mQ~O$5ULYKbS%v#!L$ zLB~RF2)w`Lb|wfXK&4?!l?pK^FX~rNO)-Lygcw`zgJ&r)(M=mvB@Qj&6X9{s7ghODn+QU(qIHlV3a0a#1rZ$1_J6a&RSSE zb4E*1N>kC5QSq z;KznZGXJMS2d&%v?dntKnDLOtN!Xss1MT`(xdD z`XyxY-h81=nY0wOkZB_wj_$p@gE1n{bOG`<4$`_^Idh5-=IYCj8lAZ^Y0Qgec_5Epe zX1D*!iPR}_`=hii+}sQAf@D40d;yTr`qLLquSAo0+d=dMF$k|V~-(-eZ%$j zBa}oP-j=hk)0uF8`>c&K^cjGpn^)JfGL-I%r8{`X7gPH~3klZ$16Pa~{lojLtL}2t z(^z@8)2&?6Y_|YPX{ZxTpe8zAWGG5D~#DIiY2T6&-7 zTJA;IqR`FDBASmeL|`{2(Hywo9C}LR8MBO?3yY)lw#@X@t=AQWohwoueYKy)@a{;! zW=f3pPj&^Rg<;dZ6Ba$`F|@)oz)s?ys_vgBJcci0FO?$ck^yUVXT0AKn|Py?*U0OG z{bMFJ%ozsuK0YAHFYp7p=gZ;WB2OW2Z zozu~}-SG`XbF)Q`n?OcOICIQ&29D+8=#faYWLPn5*@e}*>ES}1Fa`1kn>8K#ATcvg zd)kR43;>1-Aasp+j_g`+C9$)He2=>z2pIo;R@7pF9%v@j4Su}~3n zvrJ+_1^Q?=-f}`mC+U#iwe(WQs@FlJW6~_x=ODsf#=2Y|2~H9POBPOlb(2Q9)7}29 zv@#;nK<|sYvO5$jF8Jw)S~xVl3B!E@hC@Ayo2dc@yNYYhbE9o~(gx|25J%TJ$Hmpv z#l<<-MMpb3N3(Leho2S#XlwZhk1pj&$M4*qXF{e(Be4{|>-xk9z#fq1#hDEbK+Nu0 zjP!{x1t$LhsCQRxB?pm-lo^~QF5a5ijvGqT(ItmyUA=v}oK#A4vym&CWaG%B?TJ=8 zUNG6gaXZ=*mcQr!P>z8Obw1CGGR{ge6NE!rXdaKB!k3 zD#RX+j%A*}^5zLpeJ!4yolXvz%qiOKIVVufOnq%_c$oksqDME+BH*P7i7H<+u5L_Z z-oO+`fl8^jBk&9le!v7a?clu%sAvz$M6HhFR(Cg*VTsmKn>(-XT{s!5D52X5x=EYc z%zA4i1v85%Q;1a^>cs)_mQU`BO%@Jyi@+rMxjK z48adgPtC8TcNkk9awJ4M#@P|7;j6k}RKtmYPWvlb6i_Jy>V6=OU(t#`qNep1NYQ43 zf-~_7^15g@cOxbj2M(@^6|17PPI;PUB=?8mh{1sNGL$x65kcCV#7WIjFHbjbAY>n< z=wxj9{h#M7t!?|J*ohJHL%db*ehw(~^m1UMl17R2%CtU=#?_V z6a!59i~@Gpg~?Uc;m~1ntRIMDz>Ig+-;{Icwlg{!92Pxu9T^#A6l-QcL*t~to!^r% zA@@ajza~j%VC*-$u%^8J(d=>c4tC2U7NQW5d7CFC%ZClb_KXs;Cf#&vY3#@9+94$> z(Ep9PoP(Zo5(*|bmS)7cxY&=cGNHBI z*zuJ=Q`a^MBJb6K5?G!$5PyTd4~_hN%Y!{6H4|qArkN0M)`Y-SpR`yW7HN!0dPm-> zExv8#bD#SOt~NEF#IHBOsu?pSdX>)J8jhkyQ(KFMs-ZRf*ygB74AIOB5rGsNu|u2L zRZj7sPpQ^*0A}_nW=CY6E-DN(QWSGw9K}DiD%vwMM=G9p-~}%A;yeVbz$i*o+M-fGEK# zVL5zAede*pPe?`&ocraHqrMJ zUYe(0G^B~>kVzFP{CSfP?HKm<7E|&J+G5rU=-!Q6woNN*XIf;(ADA&K1qI#ihjIXJ z%s_ei=32#Xq@P6sX=W3SMRrvT?_k=eNp-f|?W`P$OrMfz=lv7 z%P=o_|Mp+FL9u^E`M4X{MFoTX;E(X3#}WgbfWYq$^8ZfzF1A4DG_{t9J_}tAbl?QRZ%K>VQ~pLXF=zr=!+(0IEAoojnGoRCm=cTuHUmd4 zE4IK$(*2%bAtS}F(La@#qr{i|+ZJWC=O#i&ytFIRN7A9QCFePpBX`=(ZXH;mGkq8x4o zAb}@3dzo&o8ri*qFwI9Sp^Xt0zcJvzF>3n6*n5zJaVI5iH%OZrs8 z=fY<>tC=W&9XiVk9E$PnV!U)Q^6hdTZe6s=ly|{oz%#De=!CDXo3;bjhs)^6V+_~k z_*ZEYF2nD_o!9zHkej$jhZ0xn!V{cMT;EL2FXJh=(9~lpcQ)HNM@izyUWcd9$3yLG z1g^4P=VUx2j3?pSE0B69rw3<5*s^znmSBrBnM}1IWa~drs&`5E&yhN;Gt_>?HodxD z{tl;Dq;(;e8@1D-#Z$!_glvbxiV1N8CP55hpD`uU0!YZ;v?sQ`xj%M@!3}2gPQ9Kn zI6R*vWb))y^6{0CveASr91&d@jkG96g>m4bd2&H~oeOVccOk%kf0hgQ^H(esgKnj_ z?e-^o3C^LGs0ZU5L&(>1+5XJut^^`)v&j*YzT@q1-P$LxXsb(qT(HQG8L4?&C|g^* zFM!m@xT-Q6K_Slmn8z^P5`mO`LzjPzz`73mjr#r~z&YRY$GzmZpQY4?8I) zXXpWwjqaZZ7nQWOAr$(0_8sbCunkXP4p_i{f~$S1A*qg093qA?{&QySho@#|^ha0^ z(xi$yKGg@aKR~PQDlY$p?f)MN*Z)f=;Xg(Nf0awIjPx~~soW&C`x(A9+4On=pnql| zz&-v;{Qseh`rmdFm{~afW3(k$4MI&(bfuf%5EMYZwXi8XE5o9qnNXn$wW--JA|QZf zzZ-wNTUY=eqQeT1ty1J!`L7BErSni#E3oB;*^-53hj_(Sf43igNpdry`Qt`C$r_i_iH0E8{zB&LM z+kr0peZr*kGGJ$~hFK%4Z#^m&9+l5ZcyA(Z%%et+*}0qE7oxGg z2n4yL1@;p)OWb>SKVY8Tf87hiah08 z#O#&7JcIu|2GikvYVTk7Fo`C^@5!|CRXx#QMmH093?wEd13S4%sfA9AYpO19)5=@5 z#zD^|MA0M>5;z6&BLIKOpV=_my5(v-5ZL?D*Hbn&Tt z6U7>N$Fda)Qw!WCKMk=i;WoMIM64qjnxbU+2@1OL43E(JJg>5)MNA8vrtEbo>!N0P z0401Bp5a+o5P4SZrB{F=5!tpw4@GHpOle+@e^yPZoKrY7|g zyL5WRSKnjXv>vK@G~)=|`JIT8h>9UoYHI2L>Ted4yHsjPunEwLlgefFrr%eDW3^@D zZC-h``eeA%a#vf1@v_MtV;_B>i?d@v)!@C*+-H-w&|hUs2K1PiNyD?ig7qX;x~s?0Pn9lU24MEbi||C|05_U`(Z-5O_bHvfzYo|LMiB|O-Cs1( z*?h&f`XG(gsnomf_bf=z1JO^cO!1D5qo6`_6-Eno-XAo-U%!uO4zN;>vv(IMNrHy!&;Lr-4#;KlBN-;(~p~rvATD*-~WJGLQUewWADhNL8 zOE?}r{S+2oY_E@lqZQgs@-mP~Y6K2b-nDNVS4$VOK~q6fQLPuwv)c|J1ll(vLe2>J zCKMLi+p6BvS!*B3N{H-V(n{kfOYTsBl&B?Sgrqu+>#ZPE(bhWsrDaWYf5V%(ObgUE z!v{Ff&d1~(ei9!{;3|`Axes+Rqt))SeZ^vQVCBZWhX04ez1<_akS0X(@9N&2kzFno zW}Av)#t^QPnurGb$B2Wy(A@MFI5+5Z-GF04lz`ig?!DXFuFdGJ5D6N_T<@>v>g|Zp zC4Ko7zbo>^2AGV?0)DGWP^u3B9x$M|BXUYwEt6=Le>@XQhL*7-n)chb5K|bxifgQ- zp`a6sI81#=dwH^DLL1GhF-bN%mgdhb;wnJLPYUI72$1IOA58!&KuvaoWhN zwRP$y<~qXe@~=I0dcnZB_qs7Qms#L{*CMWVjV=1|?mQ4{X0#C2dy%I->|itbn*Ghn zHc9$@PGFO@QoOi7TU0;i-nP1uOa`5SZH0oTnA&S*5elMi+Wk(|>-(R_`$?)FzU>Zg z<6t}$UeZ*KXnbAZ&!Ai;V0iRK9M&Gqq(D`X+F8Z1ZLOJ~r2fH*PTDXlAdmODw@j-R z?@HYNjN5CcY83whBK`AS2}2zqhN#Y~Nz!%uiMVvixfkZKOtA3d2C`yZ#bJ2I)Te8s zhwX&iBxd8LYhBKpdBM5jAnW3JL2d3|uIXXeIaz(5$wzQpBC&bfpq*E}o+6d0?eIJ? zJ%kP==GF1B2|KfU;9U1?e;G7wnYPxNNInQebd6D-4!g5|K4rn2)_woJZbR7ycCoD4 z>?YhjaOV;?g=fI)mL&tRCm_JlqJZjID8kVqi^Q4a8fZlh@-0#9AMK(wP-vA+agFn2 z%>?L|vh5Qjl48bXS^pInJfy{21~R-2x~xK>SdAnU*2bZ-{{w#401pg*WTo}*E$If` zLRztV$|KO~XAL5Y*L<@ygF)-)NvNn0E`EB4)|2e_4Zb_E4ekW<^FP8ojUcVpVon*A@#n3@0Ab2Fc&v(PRcVyOn^o^E)q$Qa z*9L|W)WtGEj|4`-jl8~ZJ#yFM`YZBwd1y4!h314s1doWCNhD{-*z1IwDSJQi6(YVX zy?Ln&%R!KhXgHoy9K39eI-_pY_g zvk&(Y@5wtFUDZA1`LNx)lzpw|TX_wUlP7IL(iDlQ?H)NfEiqjqTXRMn>V6#RIXCkY zs2q1~%QEM&VUjv!xiR_GL`ijBPOi9NRlbqUFs9PN)*LD_ML%zFWXAT&;^)8=-m&d= zGmfZarn|))=64%Oc+SLfnW8o%S!&Z2#>4uG8~}|tmQyxFNnCh$Jn$%P>=uL zc*=Tb^Lk+E2nj(C@pASc9{RU11*Z4~76oN#i6UOz5;{>ktmeo^VdD zH5_{Mjt{8<6X+y%LB_)wB6&{y+EPz%ZEgWO(&th`Ypi z_u_w}fw_6tZk}mY;i-XP}WYpR$I$mQLGwsM$uqt5cm&L^kJfa z>Pj6Kt8qm8+are^>jJ@XVbWI|i5s4{#nlx7VT%pswf2H5Imzgq6$v3XeFa zS9dc~u!5fOnoBP@ZmhrMUs<%jsc)6V#%4nnbp@>w&+hA2U|3k#@$U4rb?+qW7dM!) ztKE8C$n`~Mte*gYrWgGI1&*rAs#yI#0p5IpL)X*z#zThZ%w7T09^@1;Yi)X^dW8|W_*(sJpWSm@aIvFv^>t|1AfX+O9{K#I%cRapj53Q!pkSRQk^Q-3Q-Fan+nTlI;5p55~*iO>5^#B zSg*x{D*@SD6ANt@1EX;2L$Otk(MdAzVv*06j`_^xD#TwREZryR*en3 zKEFUP-EOO&=^GsJ$b+CO*#xtEna`UQ#xoMh`Taqz7>{cayJ@F-Tg=VGtR`_FRa*98M9!tNL&Ti*Z-b;ly0xnojl8NryZjTH-(!fdi zTtwa5`lwr9`X?Nq97bRp&WhwGki>Z}*v!0)yj;)xjNe?*b0bDKSybetz)f4nD`T>- zP_1AUIh3Izp3y-rD+jM6ZV#l~y^jfRXh+(>bDmFDhX@7|(s8}9{4g0&+4PDsJUQ6T z-r^a>9HU7rigF1JIRd9XXgemY+S01&396LzT-#-^V{iJH z+@JIP8NC%#%jik_fS)DAT+ z@+Pv^ODZ*q@8NwKH?q`@EzwC={MRF@T2chQkpa^<)4wtN(JriqEQyjIwAr7JkxUa> zaWHs~ZsmoXbrLukajBW&GWVe!?~N?K(HE^xO)Bg7E! zwljpNYA`PG7S*)G)NHi4?nm#JN@LrHGHms+pwk771rOF9a+kwpbDb+9ix)VV(y~A7 zuj@~AurF|@bDeN{KooHAs=sFAfkpNmap9Qc@YOc$yK@m9JJ!6eDjr)`MAXB}lC15a zETl`zS5+3YGn)xnG3)Eq@swW&34NM`NP;ed^?P&UnY-9fIojb`iHpdlr|46cZN3(3 zT-y~Y?AE>whi=RW6+^;J^PtjI3Md)qycZ{~;HJDEPg|ZqgUQylTwAghE9688Q=(3i z=^@A+BK~%mQ&By3=OtL7aKBscX`gUEb{#3yWZV3$Yk%STF(^xkN*b$5x^l8g)IVix zTTk8Jo!rN3lB}HvdlUO+moyTRGm{eWBFypD48e9OCe$I4s=7$|B1_&#X@ha?wra(n z6jwp>QZp3~iv2sgoa+deUlr^F9vL0+>%8#Pm(}`;sveWb%XkZDVu1ze}@;rKYNhC_%SV-_D z&s*DzXJ{i5Zivq&kDHiUjhLmZ6Q<}ul?d@TIRmW)L@d+}JY0-oqS^O|DaYoxiMf#L z+LoGN-JFg;T+lc@YJnAXq!8Ckmugjq#n%_PvJ z(gSi#m_bK$*)X?)bMy-_&lh|Q%f)1r;c1C+&bnW&$uIV(o_h3L=*>FKyYvkMmE?wr z8F3WaFfYC{=ZnH%CY)oxWTUUfi$S6=2z5M4^;iD&kN~x{F-xU1jTg}uNy z+i^ETfR%{}mS}IU9G_29>WWI0IicitYecK9iG;O9xY9W(nU1TU&Q< zNhLKkVaFOilewS&{*#=Y4&WwbSKTUPssxetac~>%u0g0TmG2LQ8{rEgMD}qw4I1qu zuw*KzV$@5G?(h26>{-zcsDt6;aU#`v(ZT{=R`o1?ye=qy3dC zgBf#G!NG@7#k%Dq4~!QSGm6M8JC73&B`#$opZSPtY6@}!M3RoW!B6PZ>*IRnjhu4o zg-l(LmQWFuRa_VMkP*%-?L4Mj&uTG87gs;3_qJ#Mhq8C-5-r-6HPg0j+qP}2v~AnA zZQHi(%$2rnyViZ%yXw?w?f!u=#?x%GM~{e4Wk>4&D$E4 zkBdVK5Al_h8m@i;J_P^(N*M^f|LKPR@YVibt5s|q|6MQq$IBUcxi?qAltO{3d2}rL z269|Feozs_n--oR?m;PkvZ}ecBOz}N3yBu^{S_D_x1DT6&sGpgK2Qz-qd!z zQ&oZLZK{#9;-K2lSFZXR^#k!kkqU4gpg4w&+x*mCU{$J?o(Hm)ij8QCmox z#ozWwLuIonDP6qtSZ(p(T(hBtB1%;DoosdR22=9|}ns#;u=@{=wQVRYA%HtqVdnZ)gt@M5cT56=5_A7f!Ns-QO0trt( zIctgtzUXrPwVvl{JlU+&8^4dQ*XE+M0=@LUY%Z%d%a_r> zV(>j!&kO=-VtD5huV;?Y>3kZOc{f8fxtZWR$$0tuaqmJz7(_SDUjHkF!xLflc*APr z9jK{Q1pJ6>Sq_u1%EGDB`YOsB{zJiw*raMii>g~HqqT4Y1`Zu{;}|4p+7ylP=wNA+ zKCat$@S3b8w)LuMxT4ia7kwS)CC6!hnu;TAN=9R1xG%e+4=MP}%>H8pP)`g@qL@el zD*zJ_PRETk_9i%E%gXD68LQumHczZ~aF^45AF{ukQI%7b;;gXS7xEfvVvzEk_(tJIo6Sv&FaV@}@Y!2WqzWv$@#~H9-TDP#015ui@!i zrtTE3e_YXuW>yuN-8?7>s#s7bl8O~&lo~4j3I5Y9ak}a>)Cob-0Euqy+>ZA?``tiW zX0xrN2`K4u()7_+NF>og#+LjKq+T#UZBPX9_qvuuM#-punya#0-zr)q@T=jcISSg2 zVL{IMg`rDegbxJd#&yk8;qIH2Xs1sAJSB>@-O-49txL{Aww>1lphU- z@E^J+kni7oqAmg+prkd>0E)ljzaWif6yeT)YG#Zvx<2H5_8{J0dpo5CzXosiS*K~x z>WocyUQ(}W9qawv)43k{<~_kY+v#T(_+X{#<39P_or2G-lpA@BmGc>1#yWnTKGZd5 zVSPh2uUol3*VJxFVw(D#qQ5y0yTT~b+p?g>JQFk4H2#DO0X3b)Q1R-XE4&0-&$e~v zuUuc%cN|{xnRNlfn`4udpl$2Y>O)-BtN~z~CaaiaIRHxMl!AG6A(JpYKy0H2G!bR* z*X_IMjV-938EpyrzGZY|!hTi;^v)wzZ{hVhc$?T6ai_;AnnqSzFYpOjHE%c z1HpgAaDQ5W=|CrEA4&B~HKzhbYpHy@o>XysbLC2}sNAS+G{!=q=>gi`z}(;F`_E z8F`zQ$`CvuRyRbEf@1%1{AQOF(Z{#MWU2?KyuwjQh5|k_J^KfwBN`r(S$~F$~ zyN#ng(28XUV!6zNi-$rTml#`t52NdlQO{YPmTO6^I)B)D&ktimAv4NUdy;euxK^l<>It}}?iB%v=te2#jG<})%1I?1Fp`QAm{NSy zuJ~n-S@54_Up%Q^f{iOE$$~f0q)e1LGq1Cs;xMuzd&mlzws6$9D0B+1vZ5Iu_$@vB zXa}BBiKel9D>KO{nI7f96@B_8Fz5c_A~po%VQvO6s|AkgzG&(~q@blUq`#3m-9`eW zEo#e*gE1|h<-%7M<0ao`1q=TcZ74XN9xONTJX=+%C(;l>>*p_>T^E`X>YE5Mp3r{m zZuVNHu|)LnFclA2A|iqv%?g5QC{=E1hYe3>wa@iH>whleKp^5&E0$K$X}BD5m$(%H zaAe7O^WVij(R_`hcUU<4_(KTu`+q1MJb z@Ri{@JQ=j7ZR!bF2x7Uy&5F+%=K5CV*_4xjt}Wy#nh~409LQGs%M_=nX7%Hp7&s5) zeb*x`JyZk+`c4YxjxDCYn`4#IV%V7LduY+lN-rEF`j=Mp)J_P;Sb!_jV(xx?}zgD!fgNc^Y1S8%NU)UN_b@C)41>|_u=S0 zhl0)e>$jhmArl-zxgqK)4Pu3+6qYWn&JM}!GCrjB&hE_(^9!YLtJxVyxfFU01WhQK z>+{CPu-5k#$9|<`%&AUg)ImAzWWn>Rqkm_kpHqaATmvH0XeUcWO;@9 zkJH;D9wjj9ecRDue@Q~^#yjk}yb*YvES`3_*0r?4*(rkzuYd5=fF3b+DKbxK5$md! znOik^eZj@AQIu$;iL2MwzJYy3dL5VCVU569epcY)(q%a}(OuIr`}Xvxr{D|3_k}Cc_nDKGMQm?G z606qlu|+AXyY>Vr)~hir@de@q(l(_*^@3 zmyl_HhHLeP!?CdZJqpRwno5n>9uxB<*eX@sTjfNgoFW9iSdyWU5iE59!%3Odli-cc zmcJW)Z!xVAN*KH0!^pl;{Xi-8Y_iO#OCv;%Q;u(hox$h)+A!^=Ar6WxT(n@Q3?&yg z`rY~DcKOinF4kg5ouOqO`4$4372zW)P7K@X-xKkLv?B&S%K8-t+#8D;gT$MQq~Z7Y z0?+@m1=$;XcjLeN&zjBEfTa`#D`2Y9Lqq=8w9m{sEY0Avyr=mPmnlswAN`DW51!;kc>vo;rRbq|sw#h7&Fcx3K4#8IgcvzA5dB7Xa_EhsABMIpYt3Al(Z$`IPdWto+;e|G~6>pQqkuCBVa;q2Jh(QQX)q}JK{og3V? z|8&=XHB7KjT`JEgZWr`5cK3A5q!!Z1l!@JyDQca0Z9x;e0k?5z4DTXIsnYu&BLq8_pxBdff1`KX@?A6V;t8F2o09Xy6oZ;7d8Q}KeE27b8Ei<&bT4^OX$S za!Wpk6GK?sj(JTcUsq5v#@!PT*{&KOtN7rG$1v_k!pusijah=NTW%dI7MzaX*sk)n zYQ#rVFJz6vWTHE4LcnCy(9{&GA%zQ-Lj}Fsiin$E1luz^K=Ta_87nabUPl0f9nwc3dE~R?qW0i^ZoL|LLnW39Wn}rRTT}`!(St+x0Ewq z4S_0V%8W?MyH)Me+=T>*;`Qcs)2HMyn0pKQVemUt4#;sQEGaHp&4gd1oRLkgbJ>sd zouo%{=sR|l{3J%OoC)w9D_N3a#thFZam|KvVM~jD-Yd!3&2y0U?fvSedgL=hV{oC_ z0Cf^jj@=bD7^1GHv2n7x3MtV8`tK*xT}fJ`SmwRkO+M#4qU;Y&nV-fKmsZ6$*JNS0K~(Ly9X&R+fojBCw0{+WEmPz~(qE`- zH8mY0m)~Q{?&GP+b$T`fMsxndrbQ~y!Kh%9BIYbNV4&+1scdLS^1ZF)5@_?RE*m|% z+?;!ye-}hqK43NoF#oPm#5Ks+6VUgC>uD-zDYl5(wsGL#p`SrGud*Y> zz=ElbQEzB28vx050^Q=1AdBPEEQt($i6eq|u)Q#}^Hekj$hq{R^R%eJ$Yf=cNu*1W z%EZY;$>G~JkJ5Fu3>-^HaM-nQTVJheTIJIjfNgvJ+T8pCEd4a}{7*{Nf26ek7cK0+ z(%AoeTYC%)s%$d3UP0#pyH#KS#EPKJ|I^|BVG8@dmBIdzBL1s#%H@xjwz7IFAE0BP zki{{EwbKRPxl#2N)m~8RM#D;^-R0W~l^Rem2!x~zkR$>FP+&HWKtkAf7QjAXUAXA{ z%EsnKhngSN3P-FkO^p%$;lF%_4_6ld4WD2X^XrbVFXU(Th_02-<>sZ9h{)SJB2mIw z$MS7o-O`0xmZ|8pw|rRy`VH-=-ci{GFd#fcF5kcc50rP^;1C=>5Un8SRfUR&29c-3IrGOjzkb8MGuGL9_TVYS~DU7{;qzeOx`H zx55bY2=#D_VXI+3ggRsck$;%VblGmnZrN^e8sb#ss_@qcmtiqO>V~9nLKG320x~2> z*aRVSNL~Y?MQ_I1H_r@r4S}^d24r{pKBzSwVX^hX zUd@;KOX~#G9(d`DG9!-~3e+J^1Fs!TjFBi=%MXmf$m+>gZJ#%(F}#jCGj-1EAVckuahX2k=&% zvie>~UCer+Xp&FWXuNs=AiIz?*4wmam!19g_66AqOT5c;J_A= zw40TU#&1c1ClVuzmU|ihgeLS=4C}M6vDjiFG`9m zn$2LcIWf8K=c*C|>Rf&mU~eLKP;F;*c;DhZsu zf!q=t$F~Y_#EjVSbEZeXP~k_Y#79N)3Jge$M#;D6?ci2Cu3yZY;gbhbjIYY=&iMz- z<73s>r#MiA*sPGKQdsZ7vjR$?R6h*45P5>ok4zg0{n4HO{2o!NvCpB;v_NF%GxwmEP*ZM z0HXr@k~Hes3&Qw^nOGT{iC|wWg#5&@pyTkhBo%z-Tau-YU9q@r?5u{FhvtU^6oQ({ ze^dkg1-{FPY;fRprIh%3N=$loh-BNLL~Rl4JhgNGUVZ(wt{>{A9TT9}`&m$>2Iijl zr`7Q%SGR|i=J(HAoN+!~&H43*_&VH;nUW#NB^CM{byO|^|BAwAOXg_JLPT_I#W9Se z9T3%=$V>HkHqCR{4ZLU&n5GUF(kfJFI^eV=P#f}y{gGX1cG%{n%ub2$r#ll@Yda{( zZDJU&OXG3-5TG8X;j6yP$JVlpWfB~BN1esF=z&VpgSG8Q(en&{TiuS^Ns}ovgP1f$ z^f=_FOA5E+*jY!~ggl6iNT`#Qj}A)#b=dXzJ}IwVJBZv1bzz$LDT;+n3Kg#Wxb?35 zrngdfR!wFlROyq{^G*G8aID`4Ze)M~EzybhZ{9n+S1}NYK3O-zs1OK)DX1mADL*l~ z?dG8f|4B%E0TII9RPl9a>5*}eVA7WH*_pZJg}b9Wo@YKByFX2UIowJ>gejAL;ko-r zesSftR-iN33>hs~cbx*oX}J)4?}3lRDq#SRQP)1($rSlMg-bSQzCU>wXh=*R8U{n9 z0CJwD9;ubR4SreIFz(m-{P)yr7+If0&ViLzu{bTb3V>t>c zBR{*Fyf(sFW5$C)wuZO6g?805hN;aps?g5AG1?c5Z{3&Q4B5q+m7uj&j2+ANcpjgI ztNr>N2FwBxlt}b4LSzid)eoJotx5}+@H9yIXe0@l4)E+c_tEF!LP@vXa6SdMIo$C; zb}16XtMnSJ*5AhoS$e)6KZEQ1J&=L%qp%k2Jo_{H?ql``3^2H*xqedGM$E91{+=xf zGj!^Z4OTr3>dY!$uM}0jmG=9_Eou6C5Cw-@=t+`2JsrPft^S1|Jtny;THM)Gfm!=1 zdhU;f&GZu$YG=sEhq0Djv2D(-p4|In`>5W^@jv7H%ef)BL^N?A&owc1UDPh;R~j0c zpKO~y)!G8vMAI*aL8n(?jkwdgVJ&P;gLxkrZ;fpy)zUB9-0X@3c%l+fc;MH=-Zglt z#c_4{eQ91o*BBRY4cNAw5^CJnp=zZomh#Gz<$BiCSClOi1RbTcJz2f;-sc!WX#Myl z+^AYRKvnytP9&^CTUVRiOVUw7ZA$*Aa~TKT5sDVYV#A^_rE%(z*2pTjPvt!x3XW+FY!$_583G1 zhEdqd1)`bz(rO;+jMhKgxmIjh7$vA0>^E*g7op53*Ice1r<+)^-=25vyfi%LYn;IK z9EZHwTTjoQ4UzYvQ_Way6;k7+2MgEkhXpeZ zL(LbLJ>lq(3$NO4_S$S$bOGqrrRje>L8wjlX~HI%FV@t9!`prXkOOjP)8Ee$8$HHC zH@)!Om@VLKxiP;n-8MI2mxJU?3_K1g+vH=5CjG5=kFwq{sa}HAUrwU@dL>(pc^E$z z?y7x$N@a5fqT2YQG`0fu80U5*>SP=FX3QeEfj(verD} z!J?Zk^ylYzOh_nk(BpokATm3k)3l$?VcDJc?r|4%LQn|bS-6^K%iON`GOHsO3hbjF z+i+}&ium9WkGdjL1d!p0Fq`UzsH$Phr3^(g=+~`LmL$1I{QchwyY(-6vT4jeB>jkqsJL-*FKdl% zRYS1_|5Yrf&o7F1K{rOfg75MzhkKvq-g&=;@_k>$V@Mrn^gyfQPmSZ7nuo(bXWfU* zUr@PvaIkiAParVtD9(K@1ubs|hC4^Xv6MYk9ZGeqH<(?9clGmMfYscl|A8jO$nYO2 zpZ{eV{V)E=|49)cu*sn)WFIWN?P;R}V84Na{SU(^FWU&ELp+Fs{>Elz zvEjkRAGwOd`dv-Ua4Rz9d)c1O+b+$G4A;;0iM|?}$>4E%&{Av~-*-0sn|+s&q3A?R zG$zVGL?+III1uwjY6NFQYV^oo+s3Avoc&^HsTr$6p*ma=K`NvO=QmM^`rB*RL!nNH zb|ankkjB?#d_EXr+!sxc5b+LAd&#{)PPyc@;`P4eZ`{wt4u z)0&NYZ9wCK29!CS))bo8_ML_c?~L{AU81p1dhF0)$_A}506F_8YP|c8EE!&ZMfRPH zmFd2_=ax%TinRh%$q_VJ)`aQL#7laX^CwpxCaA@pT zyQU}W_uvkHb-7%iO+6jGhvG@-;&MHbt}_khAE&#J$A=<+vAv{^fnB$3C*4qLa}Q8( z-Qbu^*-B9E#@%3*wVmR(pc9s@KsSAq*-mlG`{JlvGJza>HAU`SY+x&W=Ez*|>w+Cu zu-Tq|7~WqIFViC=WetlS_O0gVI~z&! zGVwj^U=8Wo#RZlY*ZakEBmGx5br=@v%%u)bB=@9}{qKaGwtX@jZ$P#F$?Yn`HUh5H z*?bl(M~y(Cglco19(?)>v-mUbM+z=Kg9hK|QH-eP`A_GqH$RUUkY zPc{Ber>laLI5ML=EeF2=RLs(n_iD_!A22ydcd^C<&)1%r@pn!_2qvgE1E7NBUWc~bp_KOEt6)#kFkvhp zeZn9uHk(hF$_9zo=My*|sTlHH%^A7hmufNd` zmMQuBfd2H+aa;X?HTQF+3;GH>IM#G-5tCT#vXQ$B>f=@ssaEfywdEp*|F=*$qo)hKbiv7G11eEOw|*mgnbz%Nvr~fh zEr7wamvFay`Lsu}Xi&r^mkano(Odlk@g_%?XLTrQ)CQi`xVbfyF1r>B*U}(%yT9po z1wvK(M7n_=Yv1At#4-#%{)rd#whYG|aXzY^S`fjkg2g2QYvegYj*%vSH!FE@QCZT) z4k?gAVwp0YNT(-qlq))-y?E}GKoQIwL1Cy&3m)`v> z=|#=^*T_^Nri;_fHCtzB8`kb>N@6*dWCi*MJ58v3=jFA2o><%WF2q4n7ON-`p+9Jq zqG#S(0zjoY0H|l_?4(7YiWeeh-sA${sG3BwE~Ic<;bdQM1n|pSL)k=*;hTdX`p%dK zo_|J*e>~tC)47;P_U!>16UZI)@9Kakn(G?%ykd70m6ZkLii%5HTk5+cv$agvb+CJx z1rT}f@FP7AoD61f4PS_#1Jw=Er>;3CoC6#wTy;Si)XKNFUgF#CM6T4T1=42+%xIPgU4`E@k2S^Ar&IcJ!@nth(p^Pa{m zSKx@=l`4{3Bu%GtvJ`(C0!0TJR3s$vU9WLXnOBUILVnKYlGu-P`hjvd2Wv0;lovv7@{{%KrVr!_t+!@HhW)`-nLeRV3wF z`qR*NKqR3XJJbQhr0((dZMh?x`z!LHB?Mfeng?1BLXSk80-Dj}q{|(bIlLf!H){8y zcWvRdfn;M`Xq02X-fdLldE2MgYY=!PZ7~IDW;RlazV8p(8Zh*1gg8J>NUrdYfXBbJ zt>9T`Xy^sp(QO6KWBroIkMYmlXYP_8*2qWD)qX<^;V@^0RtZYfqp`L^mlV&3qpUJs zH{f8l*@NrxNJ-gkfcv-&aS0ZUmD}4qV0WY=Sa;8!(AC_ad;3WKJoXWpFb}TLIJ0pU zYCZswMRmzy`&G3H(SwB6EAg5A3+(>b)^Vj? zqWt#|t;Hmye30UJy@)j<*;xTz$$hj8trXhXsVy%VzH!o@pZS|ggs0+Mu%~UxX$z<_ z;5cW4qV@QJU5W{k&X#)MPXBnga|N9@%E{#|`%7V>N9+RKn6=#~sbj;!Z>T9~&GvxU zOkbhWtbFodFAO947pAer`WPejv@joQI$BB@t#|>fF)|`>3boV`sY#sv<8mQ)v2tD- z)$TTc7B4~oG(AFjs1DNz(3)oJdZl-QhAByL)qv{oCu>JN+`s9UGi=NVfnY(3A!q0C zfyr{_E5DiJXskz7RD+G6e`JFbw_0{mqd4HUND`=y1%2T8P=v-oZl$^+ zj3r@U?Fl;coxXaHrfEa4!((tTMszD_%$vZk%#&TP#Rs$NwKIMQUJxl*6R62-6axu3 zAs_Pdal|Bouw0uFIu6VUwa-Yewh1k%%jas6JMN43dgAuMJ`8XC=AAm8EYt!%egTBn zl=?mpF>Ev{!qlN*U5T=0HN|OFcv&(oAkEZ-eNxbhyM-762g!&hbo%D)4^dJGjC8^I$1-OjjEb=c2TBK-^wg&qBgOk`;Mns!u;x-2^k_F)%3Gn}v zwTQSH(CuNmdz1Sj`9%E6czjlJ*KWi&O=`|X?bN~=3L?-$GV!&CcEK?Tf827=>2&03 z39-cJ_IWb$2K;?ulQ<8Pj|i`>;)s~7540a)Q}QZUcLUj&pIDoPbw>HF8hOg}FiYKs zbbn6a|7hp;%^Mpzby)DY z97)=q{_KJ*3p?FM3n&>bPpFlVkt58ueL~z{+pDhTR9WD&huFAUNKmzy7A&iR&iNAu?+u*UxBOs2KqoYm zfVC3(NIr=ts_KVC^Os#4K8Ti%*Vuwl867ghOAkSSO$195gCsH2g@o=HtSQc~DS}f~ z`$C{NoaZeUpGyoT)PW|PDILB~Xs-OD1>X1HrBl6O7#{4ofu%c~n@Bu3iO5s(?(9|{ ztrJ@UEpY9CfQ`COIb84C>l5MPMtEp_+Q^+kSiJTiGreKVl;_a}L#8sNq*M~QlxxBVE>KtZ;+&jV0p$H7 zGEq@qxmXs1&GJ3!?ha`A^TWX!57t1Y9N8<121E@)3gy4ZYS!fzkZ+n_+id(1C-syB z)6|sHuU)Z^h4cu2rH@+ok_&E@&}C2WC-`1c7*XUR3&kZ3SdP%f(5<17 zQwsuy=WlN)t;pbP+y8m!Ccz<_R#DX`1OY;Ee8@a==p7q!O; zIg5gOdNOJf`Fz;318Qq$WBW_tr%{XNSXP%^3kC9SZR(LLQ@=q@dZI=jF?O0vvfq3;>-qif6PUq1|(aNu<-xZyN_ zv-}8#g_#EsPxNsA`%#=z`GMUly#)XK@B^t%N=bZi7i|j46{nzqK8x6``u7#&g9_s_ zcYiFt(rE__O1l7?ADGj;Gv3z$VnuW zR))ER!Nl;cSWnhq#4IvtuJGyB(O|v zo>{`D3<6P#|B4(#snF> zdmayWM%U!QBm5jRfcMwQy#80EX6eKp=?Q)p|L5uEF0VHrGwgB4G*oxgi6i$SQPTvo z@HFy zL<2Eqy6rjl*RN~2>CQozj4?FF{xWLy^`!2)v@(A{7&Ks%OsJR;;}*k~UWGc(H*0_f zovbcbl8eNnz^m1yCD-!SG%7sHjbtc6VOQ}3$cZk0+kuu%+1>U{T-6DP#F#Vhbj zNWV-HQlt@S$;(y&q~cY|&gPa;fj5Wsg)7TS9$kL#e0)}Rx4*8P2}1(Q(3h_sNjXqN zpim0VZ{)FK!u)ZWuPP!U!y9+>X!3HHRXJE6nZ(e+E1wQ<$>18!jjV~>4&K!imO>hT zMZz=x@-yx^aN(fJsjH~`7W<~n?K^Yg(5Yt-Uzj1Fe(OD?*=YPv((Zr6?f(B|W0wC8 z^o`T#8~!sg8Ddb(`^zT=4JZZ_En$tCAR3x$HwQR!gO*=Iig7S1`2ztQ^?|bee{W^_ z59ieXfo9Xo8(Jzm+tAAquyg*yXFEDM6ReIl7PTP9_ySe@ObiJ8e zY?v)M7W^#ZPzr)I(ky6{(GXhEU!5c)eWKkD^oR#d1ERnCM0?WWI_)nPq@hI?4ZwXC?I`2bJP(362r?1v&x-WWZ`WX+KgLDDD#HQyzUKzz zyyuk4>K ztPL{_TGnh2o+>ql>-5L%4@lLM{^Bqeh;Y2Ie+^?i5HQP~n1H_hH1%+b(+RGo2oQ{v zu&O{XtRbepy1z4^bn`GB1Wa=*BPs-D_9#dGJrLrc8X>`?0WktPUb^@fO)`Ntpjg0| zqqYCojihxsKo3}1e*GRgi{`0C(H^;BL_H@+#(ME72mOxI2|+5W&(LDyEVPET!0+3CRZo#b~<+$7JvJb{X48 zJClbK!mhcws*`HVonmrwzYCL7-JTeo&uU+fs+Y432sLUU>QBAtRI%&H%HOJwUDaoI zzt{HmU!a3(n#U2Ij&Z-N*vHFNGuQ#Wu?&urvp8llIl0RBs`H^{?bukl$a{oVy2*hB z;v>8$!OZR{yoVT4TtcrWQtaPH2StB6>ar2;z}k!=8c~ag z&*!D1?POLior>yn*gFymz}m8x33wRmq;r2Cwo-YR89Ed6!ADt$*%?};bMbL#9~nI$ zlh{N*0`61dK@D|yXu}RQTD9TG-?yFlNK=miUSSohMZyD)v5_71MWvtVmnIyGiQ#oF z8X^LUse8z1?Bp z3PFjb;Ma-r(p0n(kZU3gqcy|8t0UCTs#N7{rLHk7mvUEj&L?(%PSHf~<4R-1$wc2xYib<@f+YrNcX zRbP))ji`xQQg243y40x+QrnCtvwNIi;im45F<)LRA|BV=c_*5iG<0xI**?iz?<+l4 ze?k3+FaEL>7#TaV$fl3&mG#Zyle@v|ZScd9qyuf^`7&ZCZ+GyeSJ=VP`2v;JlrYAk zybs28{4w)}rxLxze|)##x%vn4%TBd;IRt?a3U@;;vsu;$B;*YJp2rDr7G_Tk1TLfw;(} z5|s`q$&xwI*~>C7QDz#$9we8G`C+hh04TgLF-U_@)cP<5f|E|w^N3Tv1*Bte6HABt zK`oJ^8($e%IDa-cp@FIeAxtu?C107m3K4_nMH=e?#5meF8`c2|ZQcn}r(4Fg#Stt?5zQ!ROg9_CR()mb>w%;n#0BX+kt9T~~&;nsN z2aMTqoH3HGkbuZB@{zS}k4sZ-`WYw6c5!L@jtb$4<@60yNM?;f9YU`K30sHTtUD5s z__G*XY|GOQQJA!9bXY9O$^A!Jy{2<{UKD7E-89A)zN!KNvZfrXMR<=*v|Xyn$!bQI zWgGS#WmuJ6WFRU%f)=Hx>BIFp=hT6IlGTFM*1!c3r za)g34=37-C;t%-Y;=j5`5*1{~?a1NQOY`rO?s%=k9@~FiAEK&5!tXu0o28q>t12|q z94eaHO)c1;y0;!ZyU%<}l<(SHnE#=9fA7*>@glXD;7XFzahxXdYt5~gTeSQ-lp{*iYQi$F}k3jvQ_A<-O`6H#Sk0TukrMZWmLQf=xsXy512iqOorG~obX8o? zCiFo3=|Iw?pk0wDXs?luorJR}gfeSy|8@J5v&-vEy3_V_;`1*Z8`Jm^FaZ{SmwC9?#iTEdYhO4IlIkR)=VLUkKSK5a+31E=TVZ>eICqpu zK+W?qfdRO?)_l$KD;dchq4O1IYS&dc21=} zx$K1ApEju2IehYr%I#nuQXLQDC*_|V8v)sjZ$7ECmt{&c$cv1eih)Sz!wwGI{pj z{>6~p|G*2NMA}m?KNvUv>_>p}`{`$UQ%`F&8?|WyI-kV4kR*IJ2uTH1hoM1&l{TG2 zngLP<;}igS84%f`grOUm`UgPm1-u?mYR@JgWF45oOY~JV0)d->@!R0x{^7Cg(alQv zZR$8Fzct>_SDo)1Byynu7DW_YKEE#q@-^%<>EAQ-3)%Gzz50u#J?)Ciaf{b}B;L<2 zuCA_tR=R0VMxua5Yye4lpef1+vGOG-UjakRHBq7LfJY$MRLXI3B1iJK#SMkYK@%?yz->Vr*p$_olkUlnF!DPqNEf-_TAz97aj4|!-ywP*&lkj$+Rx-azOhwiW8ibHi_0(Au34(z z^u+Jz>+tJO_A?f6{ISMt0UH_`D;Ox`CuMQj*yw|sz!5>IY5JZn(KePT#%4(dDJBMq z<`!nj$%ba8DTan77UqU&W=Y0LsfKohRmAEC<>!}h83L^c()Y|u%U1xk@`Ln)GpkY+ zfQIRZM;ii#jAHRMu`N*A{JIxUnjK1%IR5eb$vKM2lNRzU6_Wg=?Rlx;%<)63x(nvy zL~h$~aMNR^9n9KdYjk3oxHq2sZQFnLc}@N9=eFw&m)$IpoFCcb%Hbqla)@n-0=MLi zHeC$|xl;ixwKrOwR~+D!>HH|+ELpk#$}F+(Q>PtRQ=x1XYoLBGf3B{;vup)k|4B9fe$#~ z%AwQoIV*nNr>INYr}^BNWV`*zI^Cb#-Urv4FJCHh-OFR?nys2s)z_w4{=IVQWMn|L z>nhK+vMcXpul0KCwpwMY{94nM?pwbtsarmEO55U6gLOh3-o_s#&B^-Vo^y&QED2OsiB3D8JDW6tG^o;07pqJBme*a literal 0 HcmV?d00001 diff --git a/lecture12/notes_12.py b/lecture12/notes_12.py new file mode 100644 index 0000000..c7d732e --- /dev/null +++ b/lecture12/notes_12.py @@ -0,0 +1,130 @@ +# %% [markdown] +# # Previous Class Definitions +# The previously defined Layer_Dense, Activation_ReLU, Activation_Softmax, Loss, and Loss_CategoricalCrossEntropy classes. + +# %% +# imports +import matplotlib.pyplot as plt +import numpy as np +import nnfs +from nnfs.datasets import spiral_data, vertical_data +nnfs.init() + +# %% +class Layer_Dense: + def __init__(self, n_inputs, n_neurons): + # Initialize the weights and biases + self.weights = 0.01 * np.random.randn(n_inputs, n_neurons) # Normal distribution of weights + self.biases = np.zeros((1, n_neurons)) + + def forward(self, inputs): + # Calculate the output values from inputs, weights, and biases + self.output = np.dot(inputs, self.weights) + self.biases # Weights are already transposed + +class Activation_ReLU: + def forward(self, inputs): + self.output = np.maximum(0, inputs) + +class Activation_Softmax: + def forward(self, inputs): + # Get the unnormalized probabilities + # Subtract max from the row to prevent larger numbers + exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True)) + + # Normalize the probabilities with element wise division + probabilities = exp_values / np.sum(exp_values, axis=1,keepdims=True) + self.output = probabilities + +# Base class for Loss functions +class Loss: + '''Calculates the data and regularization losses given + model output and ground truth values''' + def calculate(self, output, y): + sample_losses = self.forward(output, y) + data_loss = np.average(sample_losses) + return data_loss + +class Loss_CategoricalCrossEntropy(Loss): + def forward(self, y_pred, y_true): + '''y_pred is the neural network output + y_true is the ideal output of the neural network''' + samples = len(y_pred) + # Bound the predicted values + y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7) + + if len(y_true.shape) == 1: # Categorically labeled + correct_confidences = y_pred_clipped[range(samples), y_true] + elif len(y_true.shape) == 2: # One hot encoded + correct_confidences = np.sum(y_pred_clipped*y_true, axis=1) + + # Calculate the losses + negative_log_likelihoods = -np.log(correct_confidences) + return negative_log_likelihoods + +# %% [markdown] +# # Backpropagation of a Single Neuron +# Backpropagation helps us find the gradient of the neural network with respect to each of the parameters (weights and biases) of each neuron. +# +# Imagine a layer that has 3 inputs and 1 neuron. There are 3 inputs (x0, x1, x2), three weights (w0, w1, w2), 1 bias (b0), and 1 output (z). There is a ReLU activation layer after the neuron output going into a square loss function (loss = z^2). +# +# Loss = (ReLU(sum(mul(x0, w0), mul(x1, w1), mul(x2, w2(, b0)))))^2 +# +# $\frac{\delta Loss()}{\delta w0} = \frac{\delta Loss()}{\delta ReLU()} * \frac{\delta ReLU()}{\delta sum()} * \frac{\delta sum()}{\delta mul(x0, w0)} * \frac{\delta mul(x0, w0)}{\delta w0}$ +# +# $\frac{\delta Loss()}{\delta ReLU()} = 2 * ReLU(sum(...))$ +# +# $\frac{\delta ReLU()}{\delta sum()}$ = 0 if sum(...) is less than 0 and 1 if sum(...) is greater than 0 +# +# $\frac{\delta sum()}{\delta mul(x0, w0)} = 1$ +# +# $\frac{\delta mul(x0, w0)}{\delta w0} = x0$ +# +# This is repeated for w0, w1, w2, b0. +# +# We then use numerical differentiation to approximate the gradient. Then, we update the parameters using small step sizes, such that $w0[i+1] = w0[i] - step*\frac{\delta Loss()}{\delta w0}$ +# + +# %% +import numpy as np + +# Initial parameters +weights = np.array([-3.0, -1.0, 2.0]) +bias = 1.0 +inputs = np.array([1.0, -2.0, 3.0]) +target_output = 0.0 +learning_rate = 0.001 + +def relu(x): + return np.maximum(0, x) + +def relu_derivative(x): + return np.where(x > 0, 1.0, 0.0) + +for iteration in range(200): + # Forward pass + linear_output = np.dot(weights, inputs) + bias + output = relu(linear_output) + loss = (output - target_output) ** 2 + + # Backward pass to calculate gradient + dloss_doutput = 2 * (output - target_output) + doutput_dlinear = relu_derivative(linear_output) + dlinear_dweights = inputs + dlinear_dbias = 1.0 + + dloss_dlinear = dloss_doutput * doutput_dlinear + dloss_dweights = dloss_dlinear * dlinear_dweights + dloss_dbias = dloss_dlinear * dlinear_dbias + + # Update weights and bias + weights -= learning_rate * dloss_dweights + bias -= learning_rate * dloss_dbias + + # Print the loss for this iteration + print(f"Iteration {iteration + 1}, Loss: {loss}") + +print("Final weights:", weights) +print("Final bias:", bias) + + +