Lecture 9

This commit is contained in:
judsonupchurch 2024-12-04 04:24:10 +00:00
parent 85da8f42aa
commit 3508f75516
6 changed files with 927 additions and 315 deletions

View File

@ -4,4 +4,4 @@ Lectures 1-2 use same handout.
Lectures 3-6 use same handout.
Lectures 7-12 use same handout.
Lectures 7-11 use same handout.

View File

@ -1306,317 +1306,18 @@
},
{
"cell_type": "code",
"execution_count": 43,
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"New set of weights found, iteration: 0 loss: 1.099905 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 5 loss: 1.099738 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 11 loss: 1.0992013 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 13 loss: 1.0977142 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 14 loss: 1.0957412 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 16 loss: 1.0941366 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 29 loss: 1.0926114 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 35 loss: 1.0908598 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 42 loss: 1.0890985 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 44 loss: 1.0882245 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 53 loss: 1.080014 acc: 0.3333333333333333\n",
"New set of weights found, iteration: 61 loss: 1.072066 acc: 0.47\n",
"New set of weights found, iteration: 65 loss: 1.0632849 acc: 0.3433333333333333\n",
"New set of weights found, iteration: 68 loss: 1.0544518 acc: 0.37\n",
"New set of weights found, iteration: 69 loss: 1.0531789 acc: 0.33666666666666667\n",
"New set of weights found, iteration: 70 loss: 1.0516953 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 73 loss: 1.0501534 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 75 loss: 1.0490755 acc: 0.65\n",
"New set of weights found, iteration: 78 loss: 1.0362376 acc: 0.84\n",
"New set of weights found, iteration: 79 loss: 1.0319735 acc: 0.6933333333333334\n",
"New set of weights found, iteration: 82 loss: 1.0308099 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 84 loss: 1.0245655 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 86 loss: 1.0163056 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 91 loss: 1.0100644 acc: 0.43333333333333335\n",
"New set of weights found, iteration: 92 loss: 1.0020715 acc: 0.41333333333333333\n",
"New set of weights found, iteration: 94 loss: 0.99989504 acc: 0.4666666666666667\n",
"New set of weights found, iteration: 95 loss: 0.99057025 acc: 0.6066666666666667\n",
"New set of weights found, iteration: 96 loss: 0.9842712 acc: 0.61\n",
"New set of weights found, iteration: 98 loss: 0.98155546 acc: 0.6\n",
"New set of weights found, iteration: 99 loss: 0.9771661 acc: 0.64\n",
"New set of weights found, iteration: 102 loss: 0.9674396 acc: 0.7266666666666667\n",
"New set of weights found, iteration: 103 loss: 0.94826436 acc: 0.7966666666666666\n",
"New set of weights found, iteration: 107 loss: 0.94145477 acc: 0.8666666666666667\n",
"New set of weights found, iteration: 109 loss: 0.9377437 acc: 0.73\n",
"New set of weights found, iteration: 110 loss: 0.91910625 acc: 0.6433333333333333\n",
"New set of weights found, iteration: 112 loss: 0.9161494 acc: 0.6466666666666666\n",
"New set of weights found, iteration: 114 loss: 0.91611814 acc: 0.6333333333333333\n",
"New set of weights found, iteration: 115 loss: 0.9146271 acc: 0.6\n",
"New set of weights found, iteration: 117 loss: 0.9106173 acc: 0.6333333333333333\n",
"New set of weights found, iteration: 118 loss: 0.9050189 acc: 0.65\n",
"New set of weights found, iteration: 119 loss: 0.89243126 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 123 loss: 0.8768594 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 127 loss: 0.8671168 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 128 loss: 0.86372316 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 132 loss: 0.84759533 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 134 loss: 0.8325577 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 138 loss: 0.8243833 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 140 loss: 0.8126023 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 141 loss: 0.81067485 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 144 loss: 0.8094295 acc: 0.6666666666666666\n",
"New set of weights found, iteration: 146 loss: 0.79853076 acc: 0.67\n",
"New set of weights found, iteration: 149 loss: 0.79235256 acc: 0.71\n",
"New set of weights found, iteration: 150 loss: 0.78341687 acc: 0.77\n",
"New set of weights found, iteration: 151 loss: 0.76394886 acc: 0.8033333333333333\n",
"New set of weights found, iteration: 152 loss: 0.7592695 acc: 0.74\n",
"New set of weights found, iteration: 158 loss: 0.75623393 acc: 0.7166666666666667\n",
"New set of weights found, iteration: 161 loss: 0.7518161 acc: 0.81\n",
"New set of weights found, iteration: 163 loss: 0.74768335 acc: 0.88\n",
"New set of weights found, iteration: 165 loss: 0.7471585 acc: 0.8066666666666666\n",
"New set of weights found, iteration: 166 loss: 0.7458357 acc: 0.8066666666666666\n",
"New set of weights found, iteration: 167 loss: 0.7457454 acc: 0.8566666666666667\n",
"New set of weights found, iteration: 175 loss: 0.73652244 acc: 0.8866666666666667\n",
"New set of weights found, iteration: 180 loss: 0.7326938 acc: 0.7433333333333333\n",
"New set of weights found, iteration: 186 loss: 0.7188873 acc: 0.79\n",
"New set of weights found, iteration: 196 loss: 0.7007391 acc: 0.8366666666666667\n",
"New set of weights found, iteration: 197 loss: 0.69283545 acc: 0.8033333333333333\n",
"New set of weights found, iteration: 198 loss: 0.6778049 acc: 0.8433333333333334\n",
"New set of weights found, iteration: 204 loss: 0.6732369 acc: 0.78\n",
"New set of weights found, iteration: 206 loss: 0.6590504 acc: 0.8466666666666667\n",
"New set of weights found, iteration: 208 loss: 0.64687824 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 210 loss: 0.64197236 acc: 0.8266666666666667\n",
"New set of weights found, iteration: 212 loss: 0.6315755 acc: 0.8566666666666667\n",
"New set of weights found, iteration: 216 loss: 0.62233186 acc: 0.8433333333333334\n",
"New set of weights found, iteration: 217 loss: 0.6089423 acc: 0.87\n",
"New set of weights found, iteration: 220 loss: 0.6020286 acc: 0.89\n",
"New set of weights found, iteration: 229 loss: 0.59444267 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 230 loss: 0.57657397 acc: 0.8633333333333333\n",
"New set of weights found, iteration: 234 loss: 0.56438416 acc: 0.8766666666666667\n",
"New set of weights found, iteration: 238 loss: 0.55371267 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 239 loss: 0.55123174 acc: 0.87\n",
"New set of weights found, iteration: 241 loss: 0.5472712 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 244 loss: 0.5312096 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 245 loss: 0.52256846 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 248 loss: 0.51849365 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 253 loss: 0.5171079 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 254 loss: 0.5170107 acc: 0.8766666666666667\n",
"New set of weights found, iteration: 258 loss: 0.51433927 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 263 loss: 0.51368594 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 267 loss: 0.50735676 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 270 loss: 0.49404424 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 276 loss: 0.48898834 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 281 loss: 0.48475555 acc: 0.8733333333333333\n",
"New set of weights found, iteration: 283 loss: 0.482244 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 284 loss: 0.46764347 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 288 loss: 0.45943847 acc: 0.93\n",
"New set of weights found, iteration: 293 loss: 0.45457697 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 298 loss: 0.45358613 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 301 loss: 0.4479193 acc: 0.92\n",
"New set of weights found, iteration: 302 loss: 0.44754446 acc: 0.9033333333333333\n",
"New set of weights found, iteration: 305 loss: 0.4435407 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 308 loss: 0.439522 acc: 0.92\n",
"New set of weights found, iteration: 310 loss: 0.4312136 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 311 loss: 0.42785105 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 312 loss: 0.42762664 acc: 0.91\n",
"New set of weights found, iteration: 314 loss: 0.4265803 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 319 loss: 0.4237134 acc: 0.91\n",
"New set of weights found, iteration: 321 loss: 0.41802156 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 324 loss: 0.4131552 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 325 loss: 0.4108623 acc: 0.91\n",
"New set of weights found, iteration: 328 loss: 0.41035053 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 335 loss: 0.4067101 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 336 loss: 0.3959319 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 337 loss: 0.3919371 acc: 0.9066666666666666\n",
"New set of weights found, iteration: 338 loss: 0.38703263 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 339 loss: 0.38482046 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 346 loss: 0.37982863 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 347 loss: 0.37918502 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 348 loss: 0.37770292 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 362 loss: 0.3769898 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 364 loss: 0.37081122 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 366 loss: 0.36963394 acc: 0.92\n",
"New set of weights found, iteration: 373 loss: 0.36226436 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 376 loss: 0.35897696 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 381 loss: 0.35690197 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 382 loss: 0.35525045 acc: 0.93\n",
"New set of weights found, iteration: 386 loss: 0.35105768 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 392 loss: 0.34419277 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 394 loss: 0.3400308 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 402 loss: 0.33625075 acc: 0.9\n",
"New set of weights found, iteration: 404 loss: 0.32828385 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 407 loss: 0.32826573 acc: 0.9133333333333333\n",
"New set of weights found, iteration: 417 loss: 0.32695287 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 419 loss: 0.32475516 acc: 0.92\n",
"New set of weights found, iteration: 423 loss: 0.3235583 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 434 loss: 0.31984544 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 437 loss: 0.31507537 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 441 loss: 0.31470028 acc: 0.92\n",
"New set of weights found, iteration: 442 loss: 0.30537918 acc: 0.92\n",
"New set of weights found, iteration: 443 loss: 0.30088764 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 444 loss: 0.29980597 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 445 loss: 0.29180583 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 446 loss: 0.28543833 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 448 loss: 0.27976722 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 450 loss: 0.27820233 acc: 0.93\n",
"New set of weights found, iteration: 453 loss: 0.26993147 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 461 loss: 0.2693614 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 465 loss: 0.26654485 acc: 0.93\n",
"New set of weights found, iteration: 467 loss: 0.26303545 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 469 loss: 0.263004 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 474 loss: 0.26084086 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 480 loss: 0.2575986 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 488 loss: 0.25060445 acc: 0.93\n",
"New set of weights found, iteration: 492 loss: 0.25023142 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 493 loss: 0.24514885 acc: 0.93\n",
"New set of weights found, iteration: 496 loss: 0.24100976 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 510 loss: 0.24016263 acc: 0.93\n",
"New set of weights found, iteration: 516 loss: 0.23990427 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 517 loss: 0.23756668 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 527 loss: 0.23614492 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 529 loss: 0.23547292 acc: 0.93\n",
"New set of weights found, iteration: 530 loss: 0.23477767 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 531 loss: 0.2303279 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 533 loss: 0.22919251 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 543 loss: 0.22844787 acc: 0.94\n",
"New set of weights found, iteration: 545 loss: 0.22844426 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 547 loss: 0.22694202 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 557 loss: 0.22352041 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 563 loss: 0.22272877 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 566 loss: 0.22242208 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 567 loss: 0.22208746 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 568 loss: 0.22132492 acc: 0.93\n",
"New set of weights found, iteration: 600 loss: 0.21976604 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 608 loss: 0.21949868 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 615 loss: 0.21724503 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 618 loss: 0.21679601 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 622 loss: 0.21339032 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 626 loss: 0.21238428 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 629 loss: 0.2072347 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 630 loss: 0.20694281 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 632 loss: 0.20632517 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 638 loss: 0.20593578 acc: 0.9233333333333333\n",
"New set of weights found, iteration: 644 loss: 0.20315775 acc: 0.94\n",
"New set of weights found, iteration: 645 loss: 0.2030436 acc: 0.94\n",
"New set of weights found, iteration: 649 loss: 0.20148328 acc: 0.94\n",
"New set of weights found, iteration: 652 loss: 0.20143524 acc: 0.93\n",
"New set of weights found, iteration: 655 loss: 0.1997256 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 663 loss: 0.19749916 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 674 loss: 0.19700658 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 677 loss: 0.19635075 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 683 loss: 0.19607982 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 691 loss: 0.19607717 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 705 loss: 0.1945014 acc: 0.9166666666666666\n",
"New set of weights found, iteration: 711 loss: 0.19085401 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 718 loss: 0.190301 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 723 loss: 0.18917027 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 725 loss: 0.18868148 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 727 loss: 0.18696322 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 735 loss: 0.18605982 acc: 0.93\n",
"New set of weights found, iteration: 753 loss: 0.18571393 acc: 0.93\n",
"New set of weights found, iteration: 754 loss: 0.18480046 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 756 loss: 0.18298945 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 775 loss: 0.18280008 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 780 loss: 0.18264478 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 838 loss: 0.18199474 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 839 loss: 0.1819026 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 870 loss: 0.18085685 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 879 loss: 0.18026078 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 880 loss: 0.17951058 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 881 loss: 0.17820631 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 894 loss: 0.17801896 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 897 loss: 0.17786425 acc: 0.93\n",
"New set of weights found, iteration: 898 loss: 0.17726201 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 901 loss: 0.17575936 acc: 0.9266666666666666\n",
"New set of weights found, iteration: 911 loss: 0.17463076 acc: 0.93\n",
"New set of weights found, iteration: 917 loss: 0.1736356 acc: 0.93\n",
"New set of weights found, iteration: 934 loss: 0.17291996 acc: 0.93\n",
"New set of weights found, iteration: 935 loss: 0.17224732 acc: 0.93\n",
"New set of weights found, iteration: 947 loss: 0.17191714 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 967 loss: 0.17177528 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 984 loss: 0.17169482 acc: 0.93\n",
"New set of weights found, iteration: 992 loss: 0.17093095 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 996 loss: 0.17091219 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1011 loss: 0.17054932 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1012 loss: 0.17051615 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1014 loss: 0.17050186 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1016 loss: 0.17006627 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1032 loss: 0.17000513 acc: 0.93\n",
"New set of weights found, iteration: 1036 loss: 0.16972722 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1042 loss: 0.16963226 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1067 loss: 0.16902782 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1100 loss: 0.16885021 acc: 0.93\n",
"New set of weights found, iteration: 1125 loss: 0.16875145 acc: 0.94\n",
"New set of weights found, iteration: 1131 loss: 0.16835134 acc: 0.93\n",
"New set of weights found, iteration: 1136 loss: 0.16822483 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1155 loss: 0.16820814 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1165 loss: 0.16806214 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1177 loss: 0.16759826 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1196 loss: 0.16752647 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1221 loss: 0.1674471 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1292 loss: 0.16733679 acc: 0.93\n",
"New set of weights found, iteration: 1349 loss: 0.1672686 acc: 0.93\n",
"New set of weights found, iteration: 1374 loss: 0.16720611 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1524 loss: 0.16713503 acc: 0.93\n",
"New set of weights found, iteration: 1554 loss: 0.16697857 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1566 loss: 0.1667167 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1611 loss: 0.16653392 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1614 loss: 0.16648601 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1623 loss: 0.16623822 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1827 loss: 0.16623314 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1840 loss: 0.16610965 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1844 loss: 0.16608463 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1853 loss: 0.1660742 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1909 loss: 0.16592857 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 1951 loss: 0.16568528 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1959 loss: 0.16554318 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 1989 loss: 0.16552317 acc: 0.93\n",
"New set of weights found, iteration: 1996 loss: 0.16528629 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2028 loss: 0.16519576 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2043 loss: 0.16503423 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2062 loss: 0.16495857 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2072 loss: 0.16491874 acc: 0.93\n",
"New set of weights found, iteration: 2085 loss: 0.16468659 acc: 0.94\n",
"New set of weights found, iteration: 2097 loss: 0.16459472 acc: 0.94\n",
"New set of weights found, iteration: 2099 loss: 0.16432194 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2168 loss: 0.16422528 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2186 loss: 0.16416107 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2191 loss: 0.16411497 acc: 0.9333333333333333\n",
"New set of weights found, iteration: 2218 loss: 0.16404505 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2276 loss: 0.16398491 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2303 loss: 0.16397893 acc: 0.94\n",
"New set of weights found, iteration: 2308 loss: 0.16390242 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 2320 loss: 0.1638276 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2322 loss: 0.16374306 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2330 loss: 0.1633055 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2389 loss: 0.16307148 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2419 loss: 0.16291772 acc: 0.94\n",
"New set of weights found, iteration: 2500 loss: 0.1628382 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2520 loss: 0.16280368 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2835 loss: 0.16272317 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2893 loss: 0.16264299 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 2929 loss: 0.16250354 acc: 0.94\n",
"New set of weights found, iteration: 3028 loss: 0.16240475 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 3100 loss: 0.16229728 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 3162 loss: 0.16214713 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 3326 loss: 0.16197507 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 3368 loss: 0.16193718 acc: 0.9466666666666667\n",
"New set of weights found, iteration: 3404 loss: 0.16185224 acc: 0.9366666666666666\n",
"New set of weights found, iteration: 3441 loss: 0.16182107 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 3585 loss: 0.16174312 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 3655 loss: 0.16167627 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 5384 loss: 0.16161478 acc: 0.94\n",
"New set of weights found, iteration: 5428 loss: 0.16156833 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 5463 loss: 0.16155337 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 5504 loss: 0.16146044 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 5861 loss: 0.16145106 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 6448 loss: 0.16144219 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 6716 loss: 0.16144097 acc: 0.9466666666666667\n",
"New set of weights found, iteration: 6848 loss: 0.16140525 acc: 0.9466666666666667\n",
"New set of weights found, iteration: 7269 loss: 0.1614049 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 7333 loss: 0.16136383 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 7362 loss: 0.16134067 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 7661 loss: 0.16133144 acc: 0.9466666666666667\n",
"New set of weights found, iteration: 7968 loss: 0.16128716 acc: 0.9433333333333334\n",
"New set of weights found, iteration: 8526 loss: 0.1612735 acc: 0.9433333333333334\n"
"ename": "NameError",
"evalue": "name 'vertical_data' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m/tmp/ipykernel_169632/3189844108.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Create dataset\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mvertical_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclasses\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0;31m# Create model\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mdense1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mLayer_Dense\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# first dense layer, 2 inputs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mactivation1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mActivation_ReLU\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'vertical_data' is not defined"
]
}
],

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -4,9 +4,10 @@
# %%
# imports
import matplotlib.pyplot as plt
import numpy as np
import nnfs
from nnfs.datasets import spiral_data
from nnfs.datasets import spiral_data, vertical_data
nnfs.init()
# %%
@ -192,4 +193,188 @@ if len(class_targets.shape) == 2:
accuracy = np.mean(predictions == class_targets)
print(f"Accuracy: {accuracy}")
# %% [markdown]
# # The Need for Optimization
# %%
#SIMPLER DATASET
nnfs.init()
X, y = vertical_data(samples=100, classes=3)
plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap='brg')
plt.show()
# %% [markdown]
# # Test Strategy 1: Randomly Select Weights and Biases
# For a large number of tests, randomly set weights and biases and look at accuracy.
# %%
# Create dataset
X, y = vertical_data(samples=100, classes=3)
# Create model
dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs
activation1 = Activation_ReLU()
dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs
activation2 = Activation_Softmax()
# Create loss function
loss_function = Loss_CategoricalCrossEntropy()
# Helper variables
lowest_loss = 9999999 # some initial value
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
# Generate a new set of weights for iteration
dense1.weights = 0.05 * np.random.randn(2, 3)
dense1.biases = 0.05 * np.random.randn(1, 3)
dense2.weights = 0.05 * np.random.randn(3, 3)
dense2.biases = 0.05 * np.random.randn(1, 3)
# Perform a forward pass of the training data through this layer
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
# Perform a forward pass through activation function
# it takes the output of second dense layer here and returns loss
loss = loss_function.calculate(activation2.output, y)
# Calculate accuracy from output of activation2 and targets
# calculate values along first axis
predictions = np.argmax(activation2.output, axis=1)
accuracy = np.mean(predictions == y)
# If loss is smaller - print and save weights and biases aside
if loss < lowest_loss:
print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
# %% [markdown]
# # Test Strategy 2: Randomly Adjust Weights and Biases
# For a large number of tests with a starting weight and bias, update the weights and biases by some small, random value. If the new accuracy is higher, keep the weights and biases. If the new accuracy is lower, revert back to the last weights and biases.
# %%
# Create dataset
X, y = vertical_data(samples=100, classes=3)
# Create model
dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs
activation1 = Activation_ReLU()
dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs
activation2 = Activation_Softmax()
# Create loss function
loss_function = Loss_CategoricalCrossEntropy()
# Helper variables
lowest_loss = 9999999 # some initial value
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
# Update weights with some small random values
dense1.weights += 0.05 * np.random.randn(2, 3)
dense1.biases += 0.05 * np.random.randn(1, 3)
dense2.weights += 0.05 * np.random.randn(3, 3)
dense2.biases += 0.05 * np.random.randn(1, 3)
# Perform a forward pass of our training data through this layer
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
# Perform a forward pass through activation function
# it takes the output of second dense layer here and returns loss
loss = loss_function.calculate(activation2.output, y)
# Calculate accuracy from output of activation2 and targets
# calculate values along first axis
predictions = np.argmax(activation2.output, axis=1)
accuracy = np.mean(predictions == y)
# If loss is smaller - print and save weights and biases aside
if loss < lowest_loss:
print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
# Revert weights and biases
else:
dense1.weights = best_dense1_weights.copy()
dense1.biases = best_dense1_biases.copy()
dense2.weights = best_dense2_weights.copy()
dense2.biases = best_dense2_biases.copy()
# %% [markdown]
# # Test Strategy 2 on Spiral Dataset
# %%
# Create dataset
X, y = spiral_data(samples=100, classes=3)
# Create model
dense1 = Layer_Dense(2, 3) # first dense layer, 2 inputs
activation1 = Activation_ReLU()
dense2 = Layer_Dense(3, 3) # second dense layer, 3 inputs, 3 outputs
activation2 = Activation_Softmax()
# Create loss function
loss_function = Loss_CategoricalCrossEntropy()
# Helper variables
lowest_loss = 9999999 # some initial value
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
# Update weights with some small random values
dense1.weights += 0.05 * np.random.randn(2, 3)
dense1.biases += 0.05 * np.random.randn(1, 3)
dense2.weights += 0.05 * np.random.randn(3, 3)
dense2.biases += 0.05 * np.random.randn(1, 3)
# Perform a forward pass of our training data through this layer
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
# Perform a forward pass through activation function
# it takes the output of second dense layer here and returns loss
loss = loss_function.calculate(activation2.output, y)
# Calculate accuracy from output of activation2 and targets
# calculate values along first axis
predictions = np.argmax(activation2.output, axis=1)
accuracy = np.mean(predictions == y)
# If loss is smaller - print and save weights and biases aside
if loss < lowest_loss:
print('New set of weights found, iteration:', iteration,'loss:', loss, 'acc:', accuracy)
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
# Revert weights and biases
else:
dense1.weights = best_dense1_weights.copy()
dense1.biases = best_dense1_biases.copy()
dense2.weights = best_dense2_weights.copy()
dense2.biases = best_dense2_biases.copy()