diff --git a/TD2 Deep Learning.ipynb b/TD2 Deep Learning.ipynb
index 2ecfce959ae6b947b633a758433f9bea0bf6992e..6c833bc6d98fe4b49f1dabf3e164d40e72a21e54 100644
--- a/TD2 Deep Learning.ipynb	
+++ b/TD2 Deep Learning.ipynb	
@@ -52,10 +52,72 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 1,
    "id": "b1950f0a",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "tensor([[-2.0332e-01,  6.5762e-01, -1.2718e+00, -1.0667e+00,  6.3339e-01,\n",
+      "          3.0044e-01, -9.4605e-02, -4.8752e-01,  2.5361e+00, -9.0793e-01],\n",
+      "        [ 1.8772e+00,  8.5481e-01, -8.4488e-01,  1.5698e+00, -1.6336e+00,\n",
+      "         -1.9409e+00, -4.7927e-01, -9.8437e-01,  8.8057e-01,  1.0159e+00],\n",
+      "        [-2.2458e-01, -1.5407e-01,  9.7949e-01, -1.3478e+00, -8.4752e-01,\n",
+      "          2.2291e+00,  2.0097e-02, -3.6921e-02, -1.5207e+00,  2.2080e+00],\n",
+      "        [ 1.8335e+00, -1.1116e-01,  1.7158e+00,  4.3745e-01, -1.5080e-02,\n",
+      "          2.2063e-02,  9.2092e-03, -1.8087e-01, -7.6077e-01,  3.4914e-01],\n",
+      "        [-1.5280e-03, -1.7249e+00,  3.9487e-01,  4.8410e-01, -1.2762e+00,\n",
+      "          4.3278e-01,  3.5852e-01, -7.2127e-01,  9.7572e-01, -4.2663e-01],\n",
+      "        [-9.6790e-01,  3.1904e-01,  1.9527e+00,  1.5507e-01, -2.6549e-02,\n",
+      "          3.3455e-02, -1.3134e+00,  1.4105e-01,  1.2060e+00,  4.3760e-01],\n",
+      "        [ 1.3415e-01,  7.1450e-01, -5.9799e-02, -4.5007e-01, -7.7680e-02,\n",
+      "          4.0893e-01, -1.9673e+00,  8.9624e-01,  6.0989e-01,  3.0245e+00],\n",
+      "        [ 9.0126e-01,  1.1798e+00, -1.6314e-01,  9.8894e-01, -5.0119e-01,\n",
+      "         -1.9976e-01, -7.0183e-01, -8.8300e-01, -1.1321e+00, -8.8728e-01],\n",
+      "        [-3.1896e-01,  5.0318e-02,  1.0354e+00, -3.3261e-01, -8.8974e-01,\n",
+      "         -7.7209e-01, -1.6692e-01, -6.9670e-01, -1.7232e-01, -6.2842e-01],\n",
+      "        [-6.6295e-01,  2.0141e+00,  3.3106e-01,  2.9839e-01, -1.1237e+00,\n",
+      "         -7.8125e-01, -3.0903e-01,  3.5664e-01, -1.9195e-01, -3.4968e-02],\n",
+      "        [-7.5442e-01,  6.8441e-01, -1.6399e+00, -1.5894e+00,  3.3328e-01,\n",
+      "         -5.4040e-01, -2.0520e-01,  1.1902e+00, -4.0546e-01,  5.4631e-01],\n",
+      "        [ 3.5297e-01,  1.8425e-01, -2.6629e-01,  2.6103e-01, -1.6353e-01,\n",
+      "         -1.5099e+00, -2.3602e+00,  1.8305e+00, -6.0727e-01,  7.7936e-01],\n",
+      "        [ 3.3149e-01, -1.0999e+00, -4.7988e-01, -1.2186e+00,  1.6860e+00,\n",
+      "          3.1453e-01, -1.3638e-01, -3.7778e-01,  1.0254e-01, -9.3037e-02],\n",
+      "        [-8.2793e-01,  1.0470e+00, -1.3539e+00, -6.7968e-01, -1.0165e+00,\n",
+      "         -5.5619e-02,  1.8310e+00,  3.9036e-01,  9.2613e-01, -1.7741e-01]])\n",
+      "AlexNet(\n",
+      "  (features): Sequential(\n",
+      "    (0): Conv2d(3, 64, kernel_size=(11, 11), stride=(4, 4), padding=(2, 2))\n",
+      "    (1): ReLU(inplace=True)\n",
+      "    (2): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "    (3): Conv2d(64, 192, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))\n",
+      "    (4): ReLU(inplace=True)\n",
+      "    (5): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "    (6): Conv2d(192, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (7): ReLU(inplace=True)\n",
+      "    (8): Conv2d(384, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (9): ReLU(inplace=True)\n",
+      "    (10): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (11): ReLU(inplace=True)\n",
+      "    (12): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  )\n",
+      "  (avgpool): AdaptiveAvgPool2d(output_size=(6, 6))\n",
+      "  (classifier): Sequential(\n",
+      "    (0): Dropout(p=0.5, inplace=False)\n",
+      "    (1): Linear(in_features=9216, out_features=4096, bias=True)\n",
+      "    (2): ReLU(inplace=True)\n",
+      "    (3): Dropout(p=0.5, inplace=False)\n",
+      "    (4): Linear(in_features=4096, out_features=4096, bias=True)\n",
+      "    (5): ReLU(inplace=True)\n",
+      "    (6): Linear(in_features=4096, out_features=1000, bias=True)\n",
+      "  )\n",
+      ")\n"
+     ]
+    }
+   ],
    "source": [
     "import torch\n",
     "\n",
@@ -95,10 +157,18 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "id": "6e18f2fd",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "CUDA is not available.  Training on CPU ...\n"
+     ]
+    }
+   ],
    "source": [
     "import torch\n",
     "\n",
@@ -121,10 +191,33 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 3,
    "id": "462666a2",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to data\\cifar-10-python.tar.gz\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "100.0%\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Extracting data\\cifar-10-python.tar.gz to data\n",
+      "Files already downloaded and verified\n"
+     ]
+    }
+   ],
    "source": [
     "import numpy as np\n",
     "from torchvision import datasets, transforms\n",
@@ -193,10 +286,25 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 24,
    "id": "317bf070",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Net(\n",
+      "  (conv1): Conv2d(3, 6, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (conv2): Conv2d(6, 16, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (fc1): Linear(in_features=400, out_features=120, bias=True)\n",
+      "  (fc2): Linear(in_features=120, out_features=84, bias=True)\n",
+      "  (fc3): Linear(in_features=84, out_features=10, bias=True)\n",
+      ")\n"
+     ]
+    }
+   ],
    "source": [
     "import torch.nn as nn\n",
     "import torch.nn.functional as F\n",
@@ -242,10 +350,67 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 25,
    "id": "4b53f229",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch: 0 \tTraining Loss: 44.456879 \tValidation Loss: 40.405201\n",
+      "Validation loss decreased (inf --> 40.405201).  Saving model ...\n",
+      "Epoch: 1 \tTraining Loss: 37.539734 \tValidation Loss: 34.893430\n",
+      "Validation loss decreased (40.405201 --> 34.893430).  Saving model ...\n",
+      "Epoch: 2 \tTraining Loss: 32.288289 \tValidation Loss: 30.938065\n",
+      "Validation loss decreased (34.893430 --> 30.938065).  Saving model ...\n",
+      "Epoch: 3 \tTraining Loss: 29.366940 \tValidation Loss: 27.821222\n",
+      "Validation loss decreased (30.938065 --> 27.821222).  Saving model ...\n",
+      "Epoch: 4 \tTraining Loss: 27.163584 \tValidation Loss: 26.858292\n",
+      "Validation loss decreased (27.821222 --> 26.858292).  Saving model ...\n",
+      "Epoch: 5 \tTraining Loss: 25.464075 \tValidation Loss: 24.828610\n",
+      "Validation loss decreased (26.858292 --> 24.828610).  Saving model ...\n",
+      "Epoch: 6 \tTraining Loss: 24.102266 \tValidation Loss: 24.066201\n",
+      "Validation loss decreased (24.828610 --> 24.066201).  Saving model ...\n",
+      "Epoch: 7 \tTraining Loss: 22.977551 \tValidation Loss: 23.100200\n",
+      "Validation loss decreased (24.066201 --> 23.100200).  Saving model ...\n",
+      "Epoch: 8 \tTraining Loss: 22.004219 \tValidation Loss: 25.045447\n",
+      "Epoch: 9 \tTraining Loss: 21.150548 \tValidation Loss: 22.796602\n",
+      "Validation loss decreased (23.100200 --> 22.796602).  Saving model ...\n",
+      "Epoch: 10 \tTraining Loss: 20.331048 \tValidation Loss: 22.531555\n",
+      "Validation loss decreased (22.796602 --> 22.531555).  Saving model ...\n",
+      "Epoch: 11 \tTraining Loss: 19.613238 \tValidation Loss: 22.437164\n",
+      "Validation loss decreased (22.531555 --> 22.437164).  Saving model ...\n",
+      "Epoch: 12 \tTraining Loss: 18.859432 \tValidation Loss: 21.191249\n",
+      "Validation loss decreased (22.437164 --> 21.191249).  Saving model ...\n",
+      "Epoch: 13 \tTraining Loss: 18.185451 \tValidation Loss: 20.865803\n",
+      "Validation loss decreased (21.191249 --> 20.865803).  Saving model ...\n",
+      "Epoch: 14 \tTraining Loss: 17.615607 \tValidation Loss: 20.782799\n",
+      "Validation loss decreased (20.865803 --> 20.782799).  Saving model ...\n",
+      "Epoch: 15 \tTraining Loss: 16.942239 \tValidation Loss: 21.159325\n",
+      "Epoch: 16 \tTraining Loss: 16.310783 \tValidation Loss: 21.481381\n",
+      "Epoch: 17 \tTraining Loss: 15.756336 \tValidation Loss: 20.873583\n",
+      "Epoch: 18 \tTraining Loss: 15.156594 \tValidation Loss: 21.744170\n",
+      "Epoch: 19 \tTraining Loss: 14.669365 \tValidation Loss: 21.543261\n",
+      "Epoch: 20 \tTraining Loss: 14.132257 \tValidation Loss: 21.448154\n",
+      "Epoch: 21 \tTraining Loss: 13.608869 \tValidation Loss: 22.079492\n",
+      "Epoch: 22 \tTraining Loss: 13.124700 \tValidation Loss: 22.396737\n"
+     ]
+    },
+    {
+     "ename": "KeyboardInterrupt",
+     "evalue": "",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
+      "\u001b[1;32md:\\ECL\\3A\\MOD\\IA\\TD1\\gitlab_repo\\mod_4_6-td2\\TD2 Deep Learning.ipynb Cell 15\u001b[0m line \u001b[0;36m2\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X20sZmlsZQ%3D%3D?line=25'>26</a>\u001b[0m loss \u001b[39m=\u001b[39m criterion(output, target)\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X20sZmlsZQ%3D%3D?line=26'>27</a>\u001b[0m \u001b[39m# Backward pass: compute gradient of the loss with respect to model parameters\u001b[39;00m\n\u001b[1;32m---> <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X20sZmlsZQ%3D%3D?line=27'>28</a>\u001b[0m loss\u001b[39m.\u001b[39;49mbackward()\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X20sZmlsZQ%3D%3D?line=28'>29</a>\u001b[0m \u001b[39m# Perform a single optimization step (parameter update)\u001b[39;00m\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X20sZmlsZQ%3D%3D?line=29'>30</a>\u001b[0m optimizer\u001b[39m.\u001b[39mstep()\n",
+      "File \u001b[1;32mc:\\Users\\basil\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\torch\\_tensor.py:488\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m    478\u001b[0m \u001b[39mif\u001b[39;00m has_torch_function_unary(\u001b[39mself\u001b[39m):\n\u001b[0;32m    479\u001b[0m     \u001b[39mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m    480\u001b[0m         Tensor\u001b[39m.\u001b[39mbackward,\n\u001b[0;32m    481\u001b[0m         (\u001b[39mself\u001b[39m,),\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    486\u001b[0m         inputs\u001b[39m=\u001b[39minputs,\n\u001b[0;32m    487\u001b[0m     )\n\u001b[1;32m--> 488\u001b[0m torch\u001b[39m.\u001b[39;49mautograd\u001b[39m.\u001b[39;49mbackward(\n\u001b[0;32m    489\u001b[0m     \u001b[39mself\u001b[39;49m, gradient, retain_graph, create_graph, inputs\u001b[39m=\u001b[39;49minputs\n\u001b[0;32m    490\u001b[0m )\n",
+      "File \u001b[1;32mc:\\Users\\basil\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\torch\\autograd\\__init__.py:197\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m    192\u001b[0m     retain_graph \u001b[39m=\u001b[39m create_graph\n\u001b[0;32m    194\u001b[0m \u001b[39m# The reason we repeat same the comment below is that\u001b[39;00m\n\u001b[0;32m    195\u001b[0m \u001b[39m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m    196\u001b[0m \u001b[39m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 197\u001b[0m Variable\u001b[39m.\u001b[39;49m_execution_engine\u001b[39m.\u001b[39;49mrun_backward(  \u001b[39m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m    198\u001b[0m     tensors, grad_tensors_, retain_graph, create_graph, inputs,\n\u001b[0;32m    199\u001b[0m     allow_unreachable\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m, accumulate_grad\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m)\n",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
+     ]
+    }
+   ],
    "source": [
     "import torch.optim as optim\n",
     "\n",
@@ -324,18 +489,44 @@
     "Does overfit occur? If so, do an early stopping."
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Overfit occurs starting from epoch 15 :\n",
+    "\n",
+    "![overfit2](results/model1_overfit2.PNG)\n",
+    "\n",
+    "The training loss decreases but not the validation loss, which means the model is trying to hard to get the right results for the training data only. That means that the model will be very efficient on the training data, but not so much with new data it has never been confronted to. We thus perform an early stopping of the training.\n"
+   ]
+  },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 26,
    "id": "d39df818",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAABfZ0lEQVR4nO3dd1hTZ/8G8DsBEjbKBtlDUNwbZxWL6+fEWq2tWu2y2lZrx2uX2qW276vdttqqXdaqFVeruHEUF4qiIgqiIntPWcn5/UFITUWFEDgJ3J/rytVyTvLNN0jIzXmecx6JIAgCiIiIiAyQVOwGiIiIiLTFIENEREQGi0GGiIiIDBaDDBERERksBhkiIiIyWAwyREREZLAYZIiIiMhgMcgQERGRwWKQISIiIoPFIENkID799FP4+PjAyMgIXbp0EbudFmPPnj3o0qULTE1NIZFIkJ+fL3ZL95BIJFi8eHG9H3fjxg1IJBKsX79e5z0RNRUGGSItrV+/HhKJRH0zNTVF27ZtMXfuXGRkZOj0ufbu3Ys33ngD/fr1w7p16/Dxxx/rtD7VLicnB5MmTYKZmRm+/vpr/Pzzz7CwsKj1vnf/PBw7duye/YIgwN3dHRKJBP/3f//X2K3r3EcffYQxY8bAyclJ6+BE1BiMxW6AyNC9//778Pb2RllZGY4dO4ZVq1bhr7/+wsWLF2Fubq6T5zh48CCkUil++OEHyGQyndSkhzt9+jSKiorwwQcfYOjQoXV6jKmpKTZs2ID+/ftrbI+MjMTt27chl8sbo9VG984778DZ2Rldu3ZFRESE2O0QqfGIDFEDjRgxAk8++SSeeeYZrF+/HvPmzUNSUhK2b9/e4NqlpaUAgMzMTJiZmeksxAiCgDt37uikVnOWmZkJAGjVqlWdHzNy5Ehs3rwZVVVVGts3bNiA7t27w9nZWZctNpmkpCSkpaXhl19+EbsVIg0MMkQ6NmTIEADVv/hr/PLLL+jevTvMzMxga2uLyZMnIzk5WeNxjzzyCDp06IDo6GgMHDgQ5ubmeOuttyCRSLBu3TqUlJSohy5q5jRUVVXhgw8+gK+vL+RyOby8vPDWW2+hvLxco7aXlxf+7//+DxEREejRowfMzMzw3Xff4fDhw5BIJNi0aROWLFmCNm3awMrKChMnTkRBQQHKy8sxb948ODo6wtLSEk8//fQ9tdetW4chQ4bA0dERcrkc7du3x6pVq+75vtT0cOzYMfTq1Qumpqbw8fHBTz/9dM998/PzMX/+fHh5eUEul8PNzQ3Tpk1Ddna2+j7l5eVYtGgR/Pz8IJfL4e7ujjfeeOOe/u5n8+bN6n8Te3t7PPnkk0hJSdH495g+fToAoGfPnpBIJJgxY8ZD606ZMgU5OTnYt2+feltFRQW2bNmCJ554otbHlJSUYMGCBXB3d4dcLkdAQAD++9//QhAEjfuVl5dj/vz5cHBwgJWVFcaMGYPbt2/XWjMlJQUzZ86Ek5MT5HI5goKCsHbt2of2fz9eXl5aP5aoMXFoiUjHEhMTAQB2dnYAqucWvPvuu5g0aRKeeeYZZGVl4csvv8TAgQNx7tw5jb/2c3JyMGLECEyePBlPPvkknJyc0KNHD6xevRqnTp3C999/DwDo27cvAOCZZ57Bjz/+iIkTJ2LBggU4efIkli5diri4OISHh2v0FR8fjylTpuD555/Hs88+i4CAAPW+pUuXwszMDP/5z3+QkJCAL7/8EiYmJpBKpcjLy8PixYtx4sQJrF+/Ht7e3njvvffUj121ahWCgoIwZswYGBsbY+fOnXjxxRehVCoxZ84cjR4SEhIwceJEzJo1C9OnT8fatWsxY8YMdO/eHUFBQQCA4uJiDBgwAHFxcZg5cya6deuG7Oxs7NixA7dv34a9vT2USiXGjBmDY8eO4bnnnkO7du0QGxuLlStX4urVq9i2bdsD/43Wr1+Pp59+Gj179sTSpUuRkZGBzz//HMePH1f/m7z99tsICAjA6tWr1cOHvr6+D/339/LyQnBwMH777TeMGDECALB7924UFBRg8uTJ+OKLLzTuLwgCxowZg0OHDmHWrFno0qULIiIi8PrrryMlJQUrV65U3/eZZ57BL7/8gieeeAJ9+/bFwYMHMWrUqHt6yMjIQJ8+fSCRSDB37lw4ODhg9+7dmDVrFgoLCzFv3ryHvg4igyEQkVbWrVsnABD2798vZGVlCcnJycLGjRsFOzs7wczMTLh9+7Zw48YNwcjISPjoo480HhsbGysYGxtrbB80aJAAQPj222/vea7p06cLFhYWGttiYmIEAMIzzzyjsf21114TAAgHDx5Ub/P09BQACHv27NG476FDhwQAQocOHYSKigr19ilTpggSiUQYMWKExv2Dg4MFT09PjW2lpaX39Dts2DDBx8dHY1tND0eOHFFvy8zMFORyubBgwQL1tvfee08AIGzduvWeukqlUhAEQfj5558FqVQqHD16VGP/t99+KwAQjh8/fs9ja1RUVAiOjo5Chw4dhDt37qi379q1SwAgvPfee+ptNf/Gp0+fvm+92u771VdfCVZWVurvzWOPPSYMHjxY/X0YNWqU+nHbtm0TAAgffvihRr2JEycKEolESEhIEAThn3/vF198UeN+TzzxhABAWLRokXrbrFmzBBcXFyE7O1vjvpMnTxZsbGzUfSUlJQkAhHXr1j309dXIysq65/mIxMShJaIGGjp0KBwcHODu7o7JkyfD0tIS4eHhaNOmDbZu3QqlUolJkyYhOztbfXN2doa/vz8OHTqkUUsul+Ppp5+u0/P+9ddfAIBXX31VY/uCBQsAAH/++afGdm9vbwwbNqzWWtOmTYOJiYn66969e0MQBMycOVPjfr1790ZycrLG/A8zMzP1/xcUFCA7OxuDBg3C9evXUVBQoPH49u3bY8CAAeqvHRwcEBAQgOvXr6u3/fHHH+jcuTPGjx9/T58SiQRA9bBQu3btEBgYqPF9rRnW+/f39W5nzpxBZmYmXnzxRZiamqq3jxo1CoGBgfd837QxadIk3LlzB7t27UJRURF27dp132Glv/76C0ZGRnj55Zc1ti9YsACCIGD37t3q+wG4537/ProiCAL++OMPjB49GoIgaHx/hg0bhoKCApw9e7bBr5FIX3BoiaiBvv76a7Rt2xbGxsZwcnJCQEAApNLqvxGuXbsGQRDg7+9f62PvDg8A0KZNmzpP6L158yakUin8/Pw0tjs7O6NVq1a4efOmxnZvb+/71vLw8ND42sbGBgDg7u5+z3alUomCggL10Nnx48exaNEiREVFqScn1ygoKFDXqu15AKB169bIy8tTf52YmIiwsLD79gpUf1/j4uLg4OBQ6/6aSbq1qfm+3D20ViMwMLDWU6fry8HBAUOHDsWGDRtQWloKhUKBiRMn3rcfV1dXWFlZaWxv166dRr81/97/Ht769+vIyspCfn4+Vq9ejdWrV9f6nA/6/hAZGgYZogbq1asXevToUes+pVIJiUSC3bt3w8jI6J79lpaWGl/ffXSjrmqOUjzMg2rX1tuDtguqSaiJiYkICQlBYGAgVqxYAXd3d8hkMvz1119YuXIllEplverVlVKpRMeOHbFixYpa9/87gInhiSeewLPPPov09HSMGDGiXmc+NUTN9/zJJ59UT1b+t06dOjVJL0RNgUGGqBH5+vpCEAR4e3ujbdu2Oq3t6ekJpVKJa9euqf96B6oneubn58PT01Onz1ebnTt3ory8HDt27NA42vKgoZ2H8fX1xcWLFx96n/PnzyMkJKTOQa5GzfclPj5ePRRVIz4+Xmfft/Hjx+P555/HiRMn8Pvvvz+wn/3796OoqEjjqMyVK1c0+q35905MTNQ4ChMfH69Rr+aMJoVCUedr3xAZMs6RIWpEEyZMgJGREZYsWXLPUQdBEJCTk6N17ZEjRwIAPvvsM43tNUcpajubRddqjrDc/doKCgqwbt06rWuGhYXh/Pnz95x1dffzTJo0CSkpKVizZs0997lz5w5KSkruW79Hjx5wdHTEt99+q3Gq9u7duxEXF6ez75ulpSVWrVqFxYsXY/To0fe938iRI6FQKPDVV19pbF+5ciUkEon6zKea//77rKd///sbGRkhLCwMf/zxR62BMCsrS5uXQ6S3eESGqBH5+vriww8/xMKFC3Hjxg2MGzcOVlZWSEpKQnh4OJ577jm89tprWtXu3Lkzpk+fjtWrVyM/Px+DBg3CqVOn8OOPP2LcuHEYPHiwjl/NvUJDQyGTyTB69Gg8//zzKC4uxpo1a+Do6Ii0tDStar7++uvYsmULHnvsMcycORPdu3dHbm4uduzYgW+//RadO3fGU089hU2bNuGFF17AoUOH0K9fPygUCly5cgWbNm1SXy+nNiYmJli+fDmefvppDBo0CFOmTFGffu3l5YX58+c35Fui4X5DO3cbPXo0Bg8ejLfffhs3btxA586dsXfvXmzfvh3z5s1Tz4np0qULpkyZgm+++QYFBQXo27cvDhw4gISEhHtqLlu2DIcOHULv3r3x7LPPon379sjNzcXZs2exf/9+5Obm1vu1/Pzzz7h586Z6HtSRI0fw4YcfAgCeeuqpJjkCSFQbBhmiRvaf//wHbdu2xcqVK7FkyRIA1XM4QkNDMWbMmAbV/v777+Hj44P169cjPDwczs7OWLhwIRYtWqSL1h8qICAAW7ZswTvvvIPXXnsNzs7OmD17NhwcHO4546muLC0tcfToUSxatAjh4eH48ccf4ejoiJCQELi5uQEApFIptm3bhpUrV+Knn35CeHg4zM3N4ePjg1deeeWhw3gzZsyAubk5li1bhjfffBMWFhYYP348li9f3mRzWWpIpVLs2LED7733Hn7//XesW7cOXl5e+PTTT9VnoNVYu3YtHBwc8Ouvv2Lbtm0YMmQI/vzzz3vmBDk5OeHUqVN4//33sXXrVnzzzTews7NDUFAQli9frlWfP/zwAyIjI9VfHzp0SD2E2L9/fwYZEo1EqO8sOyIiIiI9wTkyREREZLAYZIiIiMhgMcgQERGRwWKQISIiIoPFIENEREQGi0GGiIiIDFazv46MUqlEamoqrKys6n0pcyIiIhKHIAgoKiqCq6ureiHe2jT7IJOamqoXC8gRERFR/SUnJ6svhlmbZh9kahZhS05OhrW1tcjdEBERUV0UFhbC3d1dYzHV2jT7IFMznGRtbc0gQ0REZGAeNi2Ek32JiIjIYDHIEBERkcFikCEiIiKDxSBDREREBktvgsyyZcsgkUgwb9489bZHHnkEEolE4/bCCy+I1yQRERHpFb04a+n06dP47rvv0KlTp3v2Pfvss3j//ffVX5ubmzdla0RERKTHRD8iU1xcjKlTp2LNmjVo3br1PfvNzc3h7OysvvEUaiIiIqohepCZM2cORo0ahaFDh9a6/9dff4W9vT06dOiAhQsXorS09IH1ysvLUVhYqHEjIiKi5knUoaWNGzfi7NmzOH36dK37n3jiCXh6esLV1RUXLlzAm2++ifj4eGzduvW+NZcuXYolS5Y0VstERESkRySCIAhiPHFycjJ69OiBffv2qefGPPLII+jSpQs+++yzWh9z8OBBhISEICEhAb6+vrXep7y8HOXl5eqvay5xXFBQwGEpIiIiA1FYWAgbG5uHfn6LdkQmOjoamZmZ6Natm3qbQqHAkSNH8NVXX6G8vBxGRkYaj+nduzcAPDDIyOVyyOXyxmuciIiI9IZoQSYkJASxsbEa255++mkEBgbizTffvCfEAEBMTAwAwMXFpSlaJCIiIj0nWpCxsrJChw4dNLZZWFjAzs4OHTp0QGJiIjZs2ICRI0fCzs4OFy5cwPz58zFw4MBaT9NuaoIgID6jCI5WprC1kIndDhERUYsk+llL9yOTybB//36EhoYiMDAQCxYsQFhYGHbu3Cl2awCA2b+cxfDPjuLPC6lit0JERNRi6cUF8WocPnxY/f/u7u6IjIwUr5mH6Ohmgz2X0nHkWjaeCvYSux0iIqIWSW+PyOi7gf4OAICoxBxUKpQid0NERNQyMchoKcjVGnYWMhSXV+HszTyx2yEiImqRGGS0JJVK0N/fHgBw5FqWyN0QERG1TAwyDTBANbx09Fq2yJ0QERG1TAwyDTBQdUQmNqUAuSUVIndDRETU8jDINICjtSkCna0gCMBRDi8RERE1OQaZBhrYtnp46chVDi8RERE1NQaZBhqonieTBZHW3yQiImqxGGQaqIdXa5iaSJFZVI74jCKx2yEiImpRGGQayNTECH187AAAR65yngwREVFTYpDRAZ6GTUREJA4GGR0Y1Lb6NOyTSbm4U6EQuRsiIqKWg0FGB3wdLOFqY4qKKiVOJuWI3Q4REVGLwSCjAxKJhKdhExERiYBBRkcG3HUaNhERETUNBhkd6e9nD6kEuJZZjNT8O2K3Q0RE1CIwyOiIjbkJOru3AsCjMkRERE2FQUaHaoaXOE+GiIioaTDI6FDNadjHErKhUHK5AiIiosbGIKNDnd1awcrUGAV3KnHhdr7Y7RARETV7DDI6ZGwkRX+/6qMyHF4iIiJqfAwyOqaeJ8MJv0RERI2OQUbHBqrmycQk56PgTqXI3RARETVvDDI65tbaHD4OFlAoBUQlcniJiIioMTHINIKBquGlSM6TISIialQMMo2gZnjpyNUsCAJPwyYiImosDDKNoI+PHWRGUqTk30FSdonY7RARETVbDDKNwFxmjB5erQFUH5UhIiKixsEg00j+OQ2b82SIiIgaC4NMI6mZJxOVmIPyKoXI3RARETVPDDKNpJ2zNewt5bhTqUD0zTyx2yEiImqWGGQaiVQqwUB/LldARETUmBhkGtGAu07DJiIiIt1jkGlENRN+L6cVIquoXORuiIiImh8GmUZkbylHkKs1AOBYAo/KEBER6RqDTCNTn4bNeTJEREQ6xyDTyGpOwz56LQtKJZcrICIi0iUGmUbWw9MW5jIjZBdXIC69UOx2iIiImhUGmUYmM5Yi2McOAIeXiIiIdI1BpgkM8Odp2ERERI2BQaYJDGxbPeH3zM1clFZUidwNERFR88Eg0wS87S3g1toMlQoBJ67niN0OERFRs6E3QWbZsmWQSCSYN2+eeltZWRnmzJkDOzs7WFpaIiwsDBkZGeI1qSWJRKI+KsN5MkRERLqjF0Hm9OnT+O6779CpUyeN7fPnz8fOnTuxefNmREZGIjU1FRMmTBCpy4YZyHkyREREOid6kCkuLsbUqVOxZs0atG7dWr29oKAAP/zwA1asWIEhQ4age/fuWLduHf7++2+cOHFCxI6109fPHkZSCa5nlyA5t1TsdoiIiJoF0YPMnDlzMGrUKAwdOlRje3R0NCorKzW2BwYGwsPDA1FRUU3dZoNZm5qgq3srAMDRaxxeIiIi0gVjMZ9848aNOHv2LE6fPn3PvvT0dMhkMrRq1Upju5OTE9LT0+9bs7y8HOXl/yzQWFioPxehG+DvgDM383Dkahae6O0hdjtEREQGT7QjMsnJyXjllVfw66+/wtTUVGd1ly5dChsbG/XN3d1dZ7Ubqma5guOJ2ahSKEXuhoiIyPCJFmSio6ORmZmJbt26wdjYGMbGxoiMjMQXX3wBY2NjODk5oaKiAvn5+RqPy8jIgLOz833rLly4EAUFBepbcnJyI7+Suuvk1gqtzE1QVFaF87fzxW6HiIjI4IkWZEJCQhAbG4uYmBj1rUePHpg6dar6/01MTHDgwAH1Y+Lj43Hr1i0EBwfft65cLoe1tbXGTV8YSSXo51d9VCaSp2ETERE1mGhzZKysrNChQweNbRYWFrCzs1NvnzVrFl599VXY2trC2toaL730EoKDg9GnTx8xWtaJgf72+PNCGo5czcKrj7YVux0iIiKDJupk34dZuXIlpFIpwsLCUF5ejmHDhuGbb74Ru60Gqbkw3oXb+cgvrUArc5nIHRERERkuiSAIgthNNKbCwkLY2NigoKBAb4aZHl0RiWuZxfj6iW4Y1clF7HaIiIj0Tl0/v0W/jkxLNMC/ZrkCXuWXiIioIRhkRFBzGvaRa1lo5gfEiIiIGhWDjAh6e9tBZixFWkEZErOKxW6HiIjIYDHIiMBMZoTe3rYAeBo2ERFRQzDIiGQAV8MmIiJqMAYZkdSchn0yKQdllQqRuyEiIjJMDDIiCXCygpO1HGWVSpy5kSd2O0RERAaJQUYkEonkn9Owr3F4iYiISBsMMiLiPBkiIqKGYZAR0QB/B0gkwJX0ImQUlondDhERkcFhkBGRrYUMHdvYAACOXuNp2ERERPXFICMyDi8RERFpj0FGZANVE36PJWRDqeRyBURERPXBICOybp6tYSk3Rm5JBS6mFojdDhERkUFhkBGZiZEUwb52ADhPhoiIqL4YZPTAQNU8mUjOkyEiIqoXBhk9ULNcwdmbeSgqqxS5GyIiIsPBIKMHPO0s4GlnjiqlgKjEHLHbISIiMhgMMnqi5jRszpMhIiKqOwYZPVFzGnbk1SwIAk/DJiIiqgsGGT3R188ecmMpbuWW4mJKodjtEBERGQQGGT1hKTfG0PZOAICt526L3A0REZFhYJDRIxO6tgEA7DyfiiqFUuRuiIiI9B+DjB4Z2NYBdhYyZBdXcNIvERFRHTDI6BETIylGd3YFAPxxlsNLRERED8Mgo2cmdKseXtp3OQOFvDgeERHRAzHI6JmObWzg62CB8iol9sSmi90OERGRXmOQ0TMSiQQTurkB4NlLRERED8Mgo4fGdqmeJ3Piei5S8u+I3A0REZH+YpDRQ26tzdHb2xYAsO1cisjdEBER6S8GGT1VM+l369nbXLKAiIjoPhhk9NSIji6QG0uRmFWC2JQCsdshIiLSSwwyesra1ASP1ixZcJbDS0RERLVhkNFjNcNLO8+nopJLFhAREd2DQUaPDfCvXrIgp6QCR69lid0OERGR3mGQ0WN3L1nA4SUiIqJ7McjouTDVxfH2cskCIiKiezDI6LkObazh52iJiioldsemid0OERGRXmGQ0XMSiQTju9ZcU4bDS0RERHdjkDEA41RB5mRSLm7nlYrcDRERkf5gkDEAbVqZoY9P9ZIF22NSRe6GiIhIfzDIGIgJXasn/f7BJQuIiIjUGGQMxIiOzpAbS3E9qwQXbnPJAiIiIkDkILNq1Sp06tQJ1tbWsLa2RnBwMHbv3q3e/8gjj0AikWjcXnjhBRE7Fo+VqQlCg5wBAOFcEZuIiAiAyEHGzc0Ny5YtQ3R0NM6cOYMhQ4Zg7NixuHTpkvo+zz77LNLS0tS3Tz75RMSOxTWhK5csICIiupuxmE8+evRoja8/+ugjrFq1CidOnEBQUBAAwNzcHM7OzmK0p3cG+NvD3lKG7OIKHLmahZB2TmK3REREJCq9mSOjUCiwceNGlJSUIDg4WL39119/hb29PTp06ICFCxeitPTBpx+Xl5ejsLBQ49ZcGN+9ZAGHl4iIiMQ9IgMAsbGxCA4ORllZGSwtLREeHo727dsDAJ544gl4enrC1dUVFy5cwJtvvon4+Hhs3br1vvWWLl2KJUuWNFX7TW5CVzesO34D+y5noOBOJWzMTMRuiYiISDQSQeRzeSsqKnDr1i0UFBRgy5Yt+P777xEZGakOM3c7ePAgQkJCkJCQAF9f31rrlZeXo7y8XP11YWEh3N3dUVBQAGtr60Z7HU1FEASErjyCa5nFWDahIyb38hC7JSIiIp0rLCyEjY3NQz+/RR9akslk8PPzQ/fu3bF06VJ07twZn3/+ea337d27NwAgISHhvvXkcrn6LKiaW3MikUgwvptqyQIOLxERUQsnepD5N6VSqXFE5W4xMTEAABcXlybsSP+M69IGEglwKikXyblcsoCIiFouUefILFy4ECNGjICHhweKioqwYcMGHD58GBEREUhMTMSGDRswcuRI2NnZ4cKFC5g/fz4GDhyITp06idm26FxbmaGPtx2irudge0wK5g7xF7slIiIiUYh6RCYzMxPTpk1DQEAAQkJCcPr0aURERODRRx+FTCbD/v37ERoaisDAQCxYsABhYWHYuXOnmC3rjbuHl7hkARERtVSiT/ZtbHWdLGRoisoq0fOj/SirVGLbnH7o4t5K7JaIiIh0xmAm+5J2rExNENpetWTB2dsid0NERCQOBhkDVjO8tPNCGpcsICKiFolBxoAN8LOHvaUcuSUViIzPErsdIiKiJscgY8CMjaQYo1qygCtiExFRS8QgY+AmqIaX9sVVL1lARETUkjDIGLggV2u0dbJERZUSf8Wmid0OERFRk2KQMXASiQTju7oBAMLPcniJiIhaFgaZZmBcV9fqJQtucMkCIiJqWRhkmgEXGzME+9gBALZx0i8REbUgDDLNxPiu1ZN+w7lkARERtSAMMs3EiI4uMDWR4np2CWKS88Vuh4iIqEkwyDQTlnJjDAtSLVnA4SUiImohGGSakZrhpZ3nU1FRxSULiIio+WOQaUb6q5YsyCutRORVLllARETNH4NMM2JsJMXYLjVLFnBFbCIiav4YZJqZmuGl/XGZKCjlkgVERNS8Mcg0M0Gu1ghwskJFlRJ/cskCIiJq5hhkmhmJRILx3WquKcPhJSIiat4YZJqhsV2qlyw4fSOPSxYQEVGzxiDTDLnYmKGvb/WSBbymDBERNWcMMs2UekVsLllARETNGINMMzW8gzNMTaRIyi7BOS5ZQEREzRSDTDNlKTfGiA4uAID/RsTzqAwRETVLDDLN2Csh/jA1keLvxBxsPJ0sdjtEREQ6xyDTjHnZW+C10AAAwEd/xiGt4I7IHREREekWg0wz93Q/b3Rxb4Xi8iq8tTWWQ0xERNSsMMg0c0ZSCT6d2AkyIykOxWdhWwxPxyYiouaj3kHmzp07KC395yJrN2/exGeffYa9e/fqtDHSHX8nK7wc4gcAWLLzMrKKykXuiIiISDfqHWTGjh2Ln376CQCQn5+P3r1743//+x/Gjh2LVatW6bxB0o3nB/mivYs18ksrsWjHRbHbISIi0ol6B5mzZ89iwIABAIAtW7bAyckJN2/exE8//YQvvvhC5w2SbpgYSfHJxE4wkkrwV2w69lzkgpJERGT46h1kSktLYWVlBQDYu3cvJkyYAKlUij59+uDmzZs6b5B0p0MbG7wwyAcA8M62S8gvrRC5IyIiooapd5Dx8/PDtm3bkJycjIiICISGhgIAMjMzYW1trfMGSbdeGuIPXwcLZBeX4/1dl8Vuh4iIqEHqHWTee+89vPbaa/Dy8kLv3r0RHBwMoProTNeuXXXeIOmWqYkRPpnYGRIJsPVsCg7FZ4rdEhERkdYkghYXFklPT0daWho6d+4MqbQ6C506dQrW1tYIDAzUeZMNUVhYCBsbGxQUFPCI0V3e33kZa48nwcXGFHvnD4SVqYnYLREREanV9fNbq+vIODs7o2vXrpBKpSgsLMS2bdtgZWWldyGG7u+1YW3hYWuOtIIyLNt9Rex2iIiItFLvIDNp0iR89dVXAKqvKdOjRw9MmjQJnTp1wh9//KHzBqlxmMuMsSysIwDg15O3EJWYI3JHRERE9VfvIHPkyBH16dfh4eEQBAH5+fn44osv8OGHH+q8QWo8fX3t8URvDwDAm39cQGlFlcgdERER1U+9g0xBQQFsbW0BAHv27EFYWBjMzc0xatQoXLt2TecNUuNaOCIQLjamuJVbiv/tvSp2O0RERPVS7yDj7u6OqKgolJSUYM+ePerTr/Py8mBqaqrzBqlxWZma4OPx1UNMa48n4eytPJE7IiIiqrt6B5l58+Zh6tSpcHNzg6urKx555BEA1UNOHTt21HV/1AQGBzpiQtc2EATgjS0XUF6lELslIiKiOql3kHnxxRcRFRWFtWvX4tixY+rTr318fDhHxoC9N7o97C3lSMgsxpcHEsRuh4iIqE60uo5MjZqHSiQSnTWka7yOTN3tjk3D7F/PwkgqwfY5/dChjY3YLRERUQvVqNeR+emnn9CxY0eYmZnBzMwMnTp1ws8//6x1s6QfRnR0wciOzlAoBbyx5QIqFUqxWyIiInqgegeZFStWYPbs2Rg5ciQ2bdqETZs2Yfjw4XjhhRewcuXKetVatWoVOnXqBGtra1hbWyM4OBi7d+9W7y8rK8OcOXNgZ2cHS0tLhIWFISMjo74tUz0sGdMBrcxNcDmtEN9FJordDhER0QPVe2jJ29sbS5YswbRp0zS2//jjj1i8eDGSkpLqXGvnzp0wMjKCv78/BEHAjz/+iE8//RTnzp1DUFAQZs+ejT///BPr16+HjY0N5s6dC6lUiuPHj9f5OTi0VH/h525j/u/nITOS4s+X+8PfyUrsloiIqIWp6+d3vYOMqakpLl68CD8/P43t165dQ8eOHVFWVqZdxyq2trb49NNPMXHiRDg4OGDDhg2YOHEiAODKlSto164doqKi0KdPnzrVY5CpP0EQMHP9aRyKz0IX91b4Y3ZfGEn1dx4UERE1P402R8bPzw+bNm26Z/vvv/8Of3//+pZTUygU2LhxI0pKShAcHIzo6GhUVlZi6NCh6vsEBgbCw8MDUVFR961TXl6OwsJCjRvVj0QiwccTOsJKboyY5HysO173o2xERERNybi+D1iyZAkef/xxHDlyBP369QMAHD9+HAcOHKg14DxMbGwsgoODUVZWBktLS4SHh6N9+/aIiYmBTCZDq1atNO7v5OSE9PT0+9ZbunQplixZUu8+SJOLjRkWjmyHt8Jj8d+98Rjazgle9hZit0VERKSh3kdkwsLCcPLkSdjb22Pbtm3Ytm0b7O3tcerUKYwfP77eDQQEBCAmJgYnT57E7NmzMX36dFy+fLnedWosXLgQBQUF6ltycrLWtVq6Kb3c0dfXDmWVSvxn6wUolVqfqU9ERNQoGnQdmbtlZmbi+++/x1tvvdWgOkOHDoWvry8ef/xxhISEIC8vT+OojKenJ+bNm4f58+fXqR7nyDTMrZxSDPvsCO5UKvDhuA54so+n2C0REVEL0KjXkalNWloa3n333QbXUSqVKC8vR/fu3WFiYoIDBw6o98XHx+PWrVsIDg5u8PNQ3XjYmeP1YQEAgGW7ryAl/47IHREREf2j3nNkdGnhwoUYMWIEPDw8UFRUhA0bNuDw4cOIiIiAjY0NZs2ahVdffRW2trawtrbGSy+9hODg4DqfsUS6Mb2vF3ZdSMXZW/l4a2ss1j/dU6+v5kxERC2Hzo7IaCMzMxPTpk1DQEAAQkJCcPr0aURERODRRx8FAKxcuRL/93//h7CwMAwcOBDOzs7YunWrmC23SEZSCT6Z2BkyYykir2Zh69kUsVsiIiICoMM5MufPn0e3bt2gUOjXysmcI6M7Xx9KwKcR8bCSG+OPF/uiLS+UR0REjaSun991Hlp69dVXH7g/Kyur7t2RQXp+oA8i47Nw6kYunl53GuFz+sLRylTstoiIqAWrc5A5d+7cQ+8zcODABjVD+s3YSIrvnuqOCav+RlJ2CWatP4ONz/WBhVzUqVZERNSC6WxoSV9xaEn3bmSXYMKqv5FbUoGQQEesntaDSxgQEZFONfnp19RyeNlbYM20HpAZS3HgSiaW7LyEZp6HiYhITzHIkFa6e7bGZ493AQD8FHUTPxzjekxERNT0GGRIayM7uuCtkYEAgI/+isOei2kid0RERC0Ngww1yLMDfPBkHw8IAvDKxhicu5UndktERNSCMMhQg0gkEiweHYTBAQ4or1LimR/P4FZOqdhtERFRC6HVWUv5+fk4deoUMjMzoVQqNfZNmzZNZ83pAs9aahol5VWY9F0ULqUWwsfBAltn90Urc5nYbRERkYGq6+d3vYPMzp07MXXqVBQXF8Pa2lpjzR2JRILc3Fztu24EDDJNJ6OwDOO/Po7UgjL08rbFz7N6QW5sJHZbRERkgBrt9OsFCxZg5syZKC4uRn5+PvLy8tQ3fQsx1LScrE2x9umesJIb41RSLt7YcoGnZRMRUaOqd5BJSUnByy+/DHNz88bohwxcoLM1Vj3ZHcZSCbbHpGLFvqtit0RERM1YvYPMsGHDcObMmcbohZqJ/v72+Hh8RwDAlwcTsOl0ssgdERFRc1XvRXJGjRqF119/HZcvX0bHjh1hYmKisX/MmDE6a44M16Se7kjOK8WXBxPwVngsXFqZYoC/g9htERFRM1Pvyb5S6f0P4kgkEigUigY3pUuc7CseQRAw7/cYbI9JhZXcGJtnByPQmf8GRET0cI022VepVN73pm8hhsQlkUjwycRO6OVti6LyKjy97jQyCsvEbouIiJoRXhCPGpXc2Airn+oOHwcLpBWU4el1p1FcXiV2W0RE1EzUaWjpiy++wHPPPQdTU1N88cUXD7zvyy+/rLPmdIFDS/rhVk4pxn9zHDklFRgc4IA103rA2Ig5moiIaqfTC+J5e3vjzJkzsLOzg7e39/2LSSS4fv26dh03EgYZ/XHuVh4mrz6B8iolpvb2wIfjOmhcUJGIiKhGo13Z19AwyOiXPRfTMPvXsxAE4K2RgXhuoK/YLRERkR5qtMm+RA0xvIML3h7ZDgDw8V9X8OeFNJE7IiIiQ1bv68gAwO3bt7Fjxw7cunULFRUVGvtWrFihk8ao+ZrV3xvJuaX4Meom5m+KgbONHN09bcVui4iIDFC9g8yBAwcwZswY+Pj44MqVK+jQoQNu3LgBQRDQrVu3xuiRmhmJRIL3RgchJf8O9sdl4tmforHp+WD4OVqK3RoRERmYeg8tLVy4EK+99hpiY2NhamqKP/74A8nJyRg0aBAee+yxxuiRmiEjqQRfTOmKjm1skFtSgYnf/o3om3lit0VERAam3kEmLi4O06ZNAwAYGxvjzp07sLS0xPvvv4/ly5frvEFqvsxlxlj/dE90drNBfmklnlhzAhGX0sVui4iIDEi9g4yFhYV6XoyLiwsSExPV+7Kzs3XXGbUIdpZy/PZcHwwOcEB5lRKzf4nGzyduit0WEREZiHoHmT59+uDYsWMAgJEjR2LBggX46KOPMHPmTPTp00fnDVLzZy4zxpppPfB4D3coBeDdbRfxacQVNPMrAxARkQ7Ue7LvihUrUFxcDABYsmQJiouL8fvvv8Pf359nLJHWjI2kWBbWEc42pvj8wDV8fSgR6QXlWBbWESa8AjAREd1HvS6Ip1AocPz4cXTq1AmtWrVqxLZ0hxfEMzwbT93C29suQqEUMLCtA76Z2g2Wcq2uFEBERAaqUS6IZ2RkhNDQUOTl8ewSajyTe3lgzbTuMDMxwpGrWZi8OgqZRVw1m4iI7lXvY/YdOnTQu/WUqPkZEuiE357rA1sLGS6mFGLCN38jMatY7LaIiEjP1DvIfPjhh3jttdewa9cupKWlobCwUONGpCtd3Fth6+y+8LQzx+28O5i4iteaISIiTXWeI/P+++9jwYIFsLKy+ufBd61cLAgCJBIJFAqF7rtsAM6RMXzZxeWYuf40LtwugNxYii+ndEVokLPYbRERUSPS+erXRkZGSEtLQ1xc3APvN2jQoPp12sgYZJqHkvIqzN1wFofisyCVAB+M64CpvT3FbouIiBqJzoOMVCpFeno6HB0dddZkU2CQaT6qFEq8HX4Rv59JBgC8NMQPrz7aVuPIIBERNQ+NctYSPzBITDXXmnklxB8A8OXBBLy+5QIqFUqROyMiIrHU6+Icbds+/K/f3NzcBjVE9CASiQTzH20LFxtTvL3tIrZE30ZmUTlWTe0GC15rhoioxanXb/4lS5bAxsamsXohqrPJvTzgaC3HnF/P4cjVLDy+OgprZ/SEo5Wp2K0REVET4hwZMmgxyfmYuf40cksq4G5rhh+f7gUfB0ux2yIiogbS+RwZzo8hfXT3tWaSc+8gbNXfOHuL15ohImop6hxkuBIx6Ssvewv8MbsvOrnZIK+0Ek+sOYGIS+lit0VERE2gzkFGqVQa3LAStRz2lnL89mwfDA5wQFmlEs//HI0lOy+hrFK/LtBIRES6Ve8lCnRp6dKl6NmzJ6ysrODo6Ihx48YhPj5e4z6PPPIIJBKJxu2FF14QqWPSZxZyY6yZ1gMz+noBANYdv4FxXx9HfHqRuI0REVGjETXIREZGYs6cOThx4gT27duHyspKhIaGoqSkRON+zz77LNLS0tS3Tz75RKSOSd8ZG0mxeEwQ1s3oCXtLGa6kF2H0V8ew/ngSh0eJiJqhOp+11BSysrLg6OiIyMhIDBw4EED1EZkuXbrgs88+06omz1pqubKKyvHGlvM4FJ8FAHgkwAGfTuwMByu5yJ0REdHDNMqVfRtbQUEBAMDW1lZj+6+//gp7e3t06NABCxcuRGlpqRjtkYFxsJJj7YyeWDImCDJjKQ7HZ2H4Z0dw8EqG2K0REZGO6M0RGaVSiTFjxiA/Px/Hjh1Tb1+9ejU8PT3h6uqKCxcu4M0330SvXr2wdevWWuuUl5ejvLxc/XVhYSHc3d15RKaFu5pRhJd/O4crqvky04M9sXBkO5iaGIncGRER1Ubni0Y2ttmzZ2P37t04duwY3Nzc7nu/gwcPIiQkBAkJCfD19b1n/+LFi7FkyZJ7tjPIUFmlAp/sicfa40kAAH9HS3wxpSvaufDngohI3xhUkJk7dy62b9+OI0eOwNvb+4H3LSkpgaWlJfbs2YNhw4bds59HZOhhIq9m4bXN55FVVA6ZkRRvjgjE0329IJXyoo9ERPrCIObICIKAuXPnIjw8HAcPHnxoiAGAmJgYAICLi0ut++VyOaytrTVuRHcb1NYBe14ZgKHtHFGhUOKDXZcxY/1pZBaWid0aERHVk6hHZF588UVs2LAB27dvR0BAgHq7jY0NzMzMkJiYiA0bNmDkyJGws7PDhQsXMH/+fLi5uSEyMrJOz8Gzluh+BEHALydv4cNdl1FepYSthQzLwzrh0fZOYrdGRNTiGcTQ0v3Wb1q3bh1mzJiB5ORkPPnkk7h48SJKSkrg7u6O8ePH45133qlzKGGQoYdJyCzCS7/FIC6tEAAwtbcH3hnVHmYyTgQmIhKLQQSZpsAgQ3VRXqXAfyPiseZo9URgP0dLfD65C4JcbUTujIioZTKIOTJE+kJubIS3R7XHL7N6w9FKjoTMYoz7+jjWHLkOpbJZZ30iIoPGIEN0l/7+9tgzbyAebe+ESoWAj/6Kw7S1p5DBicBERHqJQYboX2wtZFj9VHcsndARZiZGOJaQjeGfHcHmM8k8OkNEpGcYZIhqIZFIMKWXB3a93B8d2lgjr7QSr2+5gAmr/sb55Hyx2yMiIhUGGaIH8HWwxNbZ/fDWyEBYyIwQk5yPcd8cx5tbLiC7uPzhBYiIqFHxrCWiOsosLMOy3Vew9VwKAMDK1Bjzh7bFU8GeMDHi3wRERLrE069VGGRI16Jv5mLRjku4mFJ93Zm2TpZYPDoIff3sRe6MiKj5YJBRYZChxqBQCvj9dDI+jbiCvNJKAMDIjs54e1R7tGllJnJ3RESGj0FGhUGGGlNBaSVW7IvHzyduQikApiZSzB7kh+cH+cDUhFcGJiLSFoOMCoMMNYW4tEIs3nEJJ5NyAQBurc3wzqj2GBbkdN+lOIiI6P4YZFQYZKipCIKAXRfS8PFfcUgrqL6A3gB/eywa3R5+jlYid0dEZFgYZFQYZKiplVZU4ZtDiVh95DoqFEoYSyWY0dcLrwz1h5WpidjtEREZBAYZFQYZEsvNnBJ8sOsy9sdlAgDsLeV4c3gAwrq5QSrlcBMR0YMwyKgwyJDYDsVn4oOdl3E9uwQA0NWjFZaMCUInt1biNkZEpMcYZFQYZEgfVFQpsfZ4Er48cA0lFQpIJEBYNze8EuIPd1tzsdsjItI7DDIqDDKkTzJUVwcOV10d2FgqwaSe7pg72A+uvP4MEZEag4wKgwzpo3O38rBi31UcvZYNAJAZSTG5lzvmDPaDk7WpyN0REYmPQUaFQYb02amkXKzYF48T16uvPyMzluLJ3p544REfOFox0BBRy8Ugo8IgQ4bg78RsrNh7FWdu5gGovkLw9GAvPDfQB3aWcpG7IyJqegwyKgwyZCgEQcDRa9lYse8qYpLzAQDmMiM83c8Lzw7wQStzmbgNEhE1IQYZFQYZMjSCIOBQfCZW7LuqXmHbUm6Mmf29Mau/N2zMeFE9Imr+GGRUGGTIUAmCgL2XM7By31VcSS8CAFibGuPZAT6Y0c+LVwkmomaNQUaFQYYMnVIpYM+ldKzcdxXXMosBAK3MTfD8QF9MC/aEhdxY5A6JiHSPQUaFQYaaC4VSwK4Lqfh8/zX1VYLtLGR4YZAvnuzjCTOZkcgdEhHpDoOMCoMMNTdVCiW2x6Ti8wPXcCu3FADgYCXHi4/4YnJPDwYaImoWGGRUGGSouapUKLH17G18cSABKfl3AACtzU3wZB9PPBXsyevQEJFBY5BRYZCh5q6iSolNZ5LxbWQibudVBxqZkRRju7hi1gBvBDrz556IDA+DjAqDDLUUVQol9l3OwJqj13H2Vr56+wB/e8zq741BbR0gkUjEa5CIqB4YZFQYZKglir6Zh7XHkrD7YhqUqne4v6MlnhngjbFd2sDUhPNoiEi/McioMMhQS5acW4p1x2/g99O3UFKhAADYW8rwVB8vPNnHg8sfEJHeYpBRYZAhAgrLKrHx1C2sO34DaQVlAAC5sRQTurlhVn8v+DlaidwhEZEmBhkVBhmif1QqlPgrNg3fH01CbEqBevvgAAc8O8AHwb52nEdDRHqBQUaFQYboXoIg4PSNPHx/9Dr2xWWg5rdAOxdrPNPfG6M7u0JmLBW3SSJq0RhkVBhkiB4sKbsE644nYfOZ27hTWT2PxtFKjul9vTC1twdX3SYiUTDIqDDIENVNfmkFfj15Cz/+fQOZReUAAFMTKcZ3bYPpfb14PRoialIMMioMMkT1U1GlxK4Lqfj+aBIupxWqtwf72GFGPy8MbecEIynn0RBR42KQUWGQIdJOzTya9X8nIeJSBhSqC9K0aWWGacGeeLynO4ediKjRMMioMMgQNVxK/h38cuImNp66hbzSSgA1w05umNHXCwHOPH2biHSLQUaFQYZId8oqFdgRk4p1f99AHIediKgRMcioMMgQ6Z4gCDiVlIv1f99AxKV09TIIbq1Vw049PGBjbiJuk0Rk0BhkVBhkiBpXzbDTb6duIZ/DTkSkIwwyKgwyRE2jrFKB7TEpWHf8Bq6kF6m39/W1w/S+HHYiovphkFFhkCFqWg8adnqqjyfCurvBnotVEtFD1PXzW9RrkC9duhQ9e/aElZUVHB0dMW7cOMTHx2vcp6ysDHPmzIGdnR0sLS0RFhaGjIwMkTomooeRSCTo7WOHVU92x9E3h2D2I75oZW6C23l3sHT3FfT5+ACe/ekMIi6lo1KhFLtdIjJwoh6RGT58OCZPnoyePXuiqqoKb731Fi5evIjLly/DwsICADB79mz8+eefWL9+PWxsbDB37lxIpVIcP368Ts/BIzJE4qsZdvr15C1cuP3PYpV2FjKM7dIGE7u7ob0r359E9A+DHFrKysqCo6MjIiMjMXDgQBQUFMDBwQEbNmzAxIkTAQBXrlxBu3btEBUVhT59+jy0JoMMkX65mlGEP6JvY+u5FGSplkIAgPYu1pjY3Q1ju7jCjkNPRC2eQQwt/VtBQfVfara2tgCA6OhoVFZWYujQoer7BAYGwsPDA1FRUbXWKC8vR2FhocaNiPRHWycrLBzZDlH/GYK1M3pgZEdnyIykuJxWiPd3XUbvjw/guZ/OYN/lDA49EdFDGYvdQA2lUol58+ahX79+6NChAwAgPT0dMpkMrVq10rivk5MT0tPTa62zdOlSLFmypLHbJaIGMjaSYkigE4YEOiGvpAI7L6RiS/RtXLhdgL2XM7D3cgbsLGQY17V66KmdC4+oEtG99CbIzJkzBxcvXsSxY8caVGfhwoV49dVX1V8XFhbC3d29oe0RUSNqbSHDtGAvTAv2Qnx6EbZEJyP8XCqyi8vxw7Ek/HAsCUGuNUNPbWBrwTWeiKiaXgSZuXPnYteuXThy5Ajc3NzU252dnVFRUYH8/HyNozIZGRlwdnautZZcLodczvF1IkMV4GyFt0e1xxvDA3Hkaha2RN/G/rgMXEotxKXUy/j4rzgMCXTEY93dMSjAASZGejVCTkRNTNQgIwgCXnrpJYSHh+Pw4cPw9vbW2N+9e3eYmJjgwIEDCAsLAwDEx8fj1q1bCA4OFqNlImoiJkZShLRzQki76qGnHeerh55iUwoQcSkDEZcyYG8pw/iubfB4Tw/4OVqK3TIRiUDUs5ZefPFFbNiwAdu3b0dAQIB6u42NDczMzABUn379119/Yf369bC2tsZLL70EAPj777/r9Bw8a4moeYlLK8Qf0bexLSYF2cUV6u29vGwxuZc7RnZ0gamJkYgdEpEuGMTp1xJJ7ZcrX7duHWbMmAGg+oJ4CxYswG+//Yby8nIMGzYM33zzzX2Hlv6NQYaoeapUKHE4Pgu/n76Fg1cy1VcQtjY1xviubTC5lwcnCBMZMIMIMk2BQYao+UsvKMPmM8nYeDoZKfl31Ns7u7fClJ7uGN3ZFRZyvZgSSER1xCCjwiBD1HIolQKOJWRj4+lb2HspA1WqwzQWMiOM6eKKyT090MnN5r5Hg4lIfzDIqDDIELVM2cXl+CP6Nn4/nYzr2SXq7e1crDGllzvGdmkDGzMTETskogdhkFFhkCFq2QRBwMmkXGw8dQt/XUxHRVX11YJNTaQY2dEFU3p5oIdnax6lIdIzDDIqDDJEVCO/tALh51Kw8VQy4jOK1Nv9HC0xuac7JnRz48X2iPQEg4wKgwwR/ZsgCIhJzsfGU8nYeSEVpRUKAIDMSIrQICdM6NYGA/x5sT0iMTHIqDDIENGDFJVVYuf5NGw8fQsXbheot7c2N8GoTi4Y26UNunu0hlTKoSeipsQgo8IgQ0R1dSm1AFuib2Pn+TRkF5ert7dpZYbRnV0xrqsrAp35e4SoKTDIqDDIEFF9VSmUOHE9F9tiUrDnYjqKy6vU+wKcrDCmiyvGdHaFu625iF0SNW8MMioMMkTUEGWVChy6koltMSk4dCULFQqlel8Pz9YY28UVIzu6wM6Si9US6RKDjAqDDBHpSsGdSkRcTMe2mBREXc9BzW9PI6kEA/ztMa5LGzza3olXESbSAQYZFQYZImoMGYVl2Hk+FTvOp2pMEjY1keLR9s4Y18UVA/wdIDPmmU9E2mCQUWGQIaLGlphVjB0xqdgek4IbOaXq7a3MTTCyowv+r6MLenrb8nRuonpgkFFhkCGipiIIAi7cLsD2mFTsvJCKrKJ/znyykhtjYIADQgIdMTjAEa154T2iB2KQUWGQISIxKJQCohJzsON8Cg7EZSKnpEK9TyoBunm0xpB2jhjazgn+jpZcIoHoXxhkVBhkiEhsSqWAmNv5OBiXif1xGbiSXqSx3621GUICHRHSzgm9fWwhNzYSqVMi/cEgo8IgQ0T6JiX/Dg5eycTBuAwcT8xRL2QJABYyIwzwd8CQdtVDUA5WPK2bWiYGGRUGGSLSZ6UVVTiekIODVzJwIC4TmXfNq5FIgM5urRAS6Igh7RzR3sWaQ1DUYjDIqDDIEJGhUCoFXEotxAFVqIlNKdDY72JjiiGB1fNq+vnZ89RuatYYZFQYZIjIUGUUluHQlUzsj8vEsYQslFX+MwRlY1Z9avfYLq7o5WXLRS2p2WGQUWGQIaLmoKxSgajrOTgYl4mIS+kaQ1AuNqYY3dkVY7u4cviJmg0GGRUGGSJqbhRKASev52B7TCr+upiGorJ/FrX0c7TE2M6uGNPFFZ52FiJ2SdQwDDIqDDJE1JyVVylw6EoWdpxPwf64TI0zoLq4t8LYLq4Y1ckFjlamInZJVH8MMioMMkTUUhSVVSLiUga2x6TgeEI2lKrf7lIJ0M/PHmO7tMGwICdYmZqI2yhRHTDIqDDIEFFLlFVUjj8vpGJbTCpikvPV22XGUgxt54gxndvgkQAHmJrw4nuknxhkVBhkiKilu5lTgh0xqdgWk4LErBL1ditTY4zo4IyxXdqgj48djHjmE+kRBhkVBhkiomqCIOByWiG2x6RiR0wq0gvL1PvsLeUY3sEJIzu6oLc3Qw2Jj0FGhUGGiOheSqWAUzdyq898ik1DwZ1K9T57SxlCg5wxqqMLenvbwtiIF96jpscgo8IgQ0T0YBVVSvydmI2/YtOw93IG8kv/CTW2FjIMC6o+UtPHxw4mDDXURBhkVBhkiIjqrlKhRFRiDv6KTUPEpXTk3RVqWpubILS9M0Z2ckFfX4YaalwMMioMMkRE2qlSKHHiei7+upiGiIvpyCmpUO+zMTNBaPvqIzVc94kaA4OMCoMMEVHDVSmUOJVUHWr2XMxAdvE/SyRYmxrj0fbOGNnRGf397SE35ind1HAMMioMMkREuqVQCjh9Ixd/xaZh98V0ZN217pOV3BhD2zthRAdnDPB3gJmMoYa0wyCjwiBDRNR4FEoB0TfzVKEmDRmF/4QambEUwT52GBzggMGBjlz7ieqFQUaFQYaIqGkolQLO3srDX7HpiLiUjpT8Oxr7fewtMDjQEYMDHNHTuzWHoOiBGGRUGGSIiJqeIAhIyCzGofhMHLqShdM3clGl/OfjxkJmhH5+9hgc6IhHAhzgYmMmYrekjxhkVBhkiIjEV1hWiePXsquDTXyWxrwaAAh0tsLgQEcMCXREV/dWvAgfMcjUYJAhItIvSmX1UgmHrmTiUHwmziXn4+5PImtTYwxs64DBAY4YFOAAe0u5eM2SaBhkVBhkiIj0W25JBY5czcKh+ExEXs3SuLKwRAJ0cmuFwQEOCAl0Qoc21pBIuA5US8Ago8IgQ0RkOBRKATHJeTh0pTrYXEot1NjvamOK0CBnhLZ3Qk9vW15duBljkFFhkCEiMlwZhWU4HJ+Jg1cyceRqNu5UKtT7bMxMENLOEaHtnTGwrT3MZcYidkq6xiCjwiBDRNQ8lFUqcOxaNvZeTsf+uEzk3rVkgqmJFAP8HRDa3gkh7ZxgayETsVPSBQYZFQYZIqLmp0qhRPTNPERcysDey+m4nffPNWukEqCXty1C2zsjNMgJbq3NReyUtMUgo8IgQ0TUvAmCgLi0IkRcSsfeyxmIS9OcVxPkaq0ONYHOVpwsbCAMIsgcOXIEn376KaKjo5GWlobw8HCMGzdOvX/GjBn48ccfNR4zbNgw7Nmzp87PwSBDRNSyJOeWqkPNmRu5uOs6fPCwNUdoeyeEBjmju2drGEkZavRVXT+/RZ0ZVVJSgs6dO2PmzJmYMGFCrfcZPnw41q1bp/5aLuf1BIiI6P7cbc3xzAAfPDPABznF5TgQl4m9l9Nx5Fo2buWW4vtjSfj+WBJsLWQI9rVDsI8d+vrawdvegkdrDJCoQWbEiBEYMWLEA+8jl8vh7OzcRB0REVFzYmcpx6Se7pjU0x0l5VU4cjULey9n4EBcBnJLKvDnhTT8eSENAOBkLUewjx2Cfe3Q19ce7racW2MI9P5ctcOHD8PR0RGtW7fGkCFD8OGHH8LOzu6+9y8vL0d5+T+Xvi4sLLzvfYmIqOWwkBtjREcXjOjogkqFEmdv5iHqeg6iEnNw7lY+MgrLsS0mFdtiUgEAbVqZqUJNdbjhelD6SW8m+0okknvmyGzcuBHm5ubw9vZGYmIi3nrrLVhaWiIqKgpGRrWvmrp48WIsWbLknu2cI0NERPdTVqlA9M08RCXmIOp6Ds4n52sscgkAXnbm1UNRvvbo42MLRytTkbptGQxisu/dagsy/3b9+nX4+vpi//79CAkJqfU+tR2RcXd3Z5AhIqI6KymvwukbuYi6noMTiTmITSnAv3IN/Bwtq4/W+Niht48dr12jYwYx2be+fHx8YG9vj4SEhPsGGblczgnBRETUIBZyYzwS4IhHAhwBVK/efep6rnooKi69EAmZxUjILMZPUTcBVK/g3d/PHoMCHNDTyxamJrWPHJBuGVSQuX37NnJycuDi4iJ2K0RE1IJYm5pgaHsnDG3vBADIL63Aieu5iErMRtT1HFzNKMaV9CJcSS/C98eSYGoiRbCPHQa1dcCgAEd42ZnzjKhGIurQUnFxMRISEgAAXbt2xYoVKzB48GDY2trC1tYWS5YsQVhYGJydnZGYmIg33ngDRUVFiI2NrfNRF15HhoiIGlt2cTmiEnNw9FoWIq9mIaOwXGO/h615dahp64BgXztYyA3qOIIoDGKOzOHDhzF48OB7tk+fPh2rVq3CuHHjcO7cOeTn58PV1RWhoaH44IMP4OTkVOfnYJAhIqKmJAgC4jOKEBlfHWpO38hFpeKfj1oTIwl6etmqjtY4IMCJVxuujUEEmabAIENERGIqKa9CVGIOIq9m4fDVTCTn3tHY72QtVx2tcUR/P3vYmJuI1Kl+YZBRYZAhIiJ9IQgCbuSUIjI+E5FXsxB1PQdllUr1fqkE6OLeCoPaOmJQgAM6trFpscsoMMioMMgQEZG+KqtU4PSNXPUw1LXMYo39rc1N0M/PHgPbOmCAv32Luigfg4wKgwwRERmKlPw7OHI1C5HxWTiekI2i8iqN/f6Olhjg74ABbe3R29sW5rLmO2mYQUaFQYaIiAxRpUKJmOR8HL2ahSPXsnHhdr7GRflkRlL08GqNAf4OGNjWHu2crSFtRsNQDDIqDDJERNQc5JdW4G/VKd5HrmYjJV9z0rC9pQz9/eyrj9j428PR2rCXUGCQUWGQISKi5kYQBCRll+DI1SwcvVZ9Ub7SCoXGfQKdrTDAvzrY9PI2vCsNM8ioMMgQEVFzV1GlxNlbeTh6rTrYxKYU4O5Pd7mxFL28bTHQ3wF9/ewMYhiKQUaFQYaIiFqa3JIKHE/IVg9DpReWaexvZW6C3t62CPapXs27rZOl3l2Uj0FGhUGGiIhaMkEQkJBZjCPXqoPNqaTce4ah7Cxk6ONjhz6+dgj2sYWvg/jBhkFGhUGGiIjoH5UKJWJTChCVmIMT13Nw+kauxkX5AMDBSo4+PnaqIzZ2oix6ySCjwiBDRER0fxVVSpy/nY+oxBxEJeYg+lYeKqo0g42ztSn6+Ngi2NcOwT72cLc1a/RgwyCjwiBDRERUd2WVCsQkq4LN9RzE3MpHhUIz2LRpZVZ9xMbXDn18bOHW2lznfTDIqDDIEBERaa+sUoHom3k4cb36iE1Mcj6qlJrR4bXQtpg7xF+nz1vXz+/me21jIiIiajBTEyP087NHPz97AEBpRRXO3MhDlCrYxKYUIKiNjWj9McgQERFRnZnLjDGwrQMGtnUAABSXV0FmJBWtHwYZIiIi0pqlXNwoIV6EIiIiImogBhkiIiIyWAwyREREZLAYZIiIiMhgMcgQERGRwWKQISIiIoPFIENEREQGi0GGiIiIDBaDDBERERksBhkiIiIyWAwyREREZLAYZIiIiMhgMcgQERGRwWr2q18LggAAKCwsFLkTIiIiqquaz+2az/H7afZBpqioCADg7u4ucidERERUX0VFRbCxsbnvfonwsKhj4JRKJVJTU2FlZQWJRKKzuoWFhXB3d0dycjKsra31siZ7ZI/6VJM9skd9qske9bfHGoIgoKioCK6urpBK7z8TptkfkZFKpXBzc2u0+tbW1jr/x9N1Tfaon/UaoyZ71M96jVGTPepnvcao2VJ7BPDAIzE1ONmXiIiIDBaDDBERERksBhktyeVyLFq0CHK5XG9rskf2qE812SN71Kea7FF/e6yvZj/Zl4iIiJovHpEhIiIig8UgQ0RERAaLQYaIiIgMFoMMERERGSwGGS19/fXX8PLygqmpKXr37o1Tp05pXevIkSMYPXo0XF1dIZFIsG3btgb1tnTpUvTs2RNWVlZwdHTEuHHjEB8f36Caq1atQqdOndQXPQoODsbu3bsbVPNuy5Ytg0Qiwbx587SusXjxYkgkEo1bYGBgg/pKSUnBk08+CTs7O5iZmaFjx444c+aM1vW8vLzu6VEikWDOnDla1VMoFHj33Xfh7e0NMzMz+Pr64oMPPnjo2iQPUlRUhHnz5sHT0xNmZmbo27cvTp8+XefHP+znWRAEvPfee3BxcYGZmRmGDh2Ka9euNajm1q1bERoaCjs7O0gkEsTExGhdr7KyEm+++SY6duwICwsLuLq6Ytq0aUhNTW1Qj4sXL0ZgYCAsLCzQunVrDB06FCdPntS63t1eeOEFSCQSfPbZZw3qccaMGff8bA4fPrxBPcbFxWHMmDGwsbGBhYUFevbsiVu3bmlds7b3j0QiwaeffqpVveLiYsydOxdubm4wMzND+/bt8e233963v7rUzMjIwIwZM+Dq6gpzc3MMHz78gT/jdfmdXVZWhjlz5sDOzg6WlpYICwtDRkaG1vVWr16NRx55BNbW1pBIJMjPz3/ga35YzdzcXLz00ksICAiAmZkZPDw88PLLL6OgoOCBdXWBQUYLv//+O1599VUsWrQIZ8+eRefOnTFs2DBkZmZqVa+kpASdO3fG119/rZP+IiMjMWfOHJw4cQL79u1DZWUlQkNDUVJSonVNNzc3LFu2DNHR0Thz5gyGDBmCsWPH4tKlSw3u9/Tp0/juu+/QqVOnBtcKCgpCWlqa+nbs2DGta+Xl5aFfv34wMTHB7t27cfnyZfzvf/9D69atta55+vRpjf727dsHAHjssce0qrd8+XKsWrUKX331FeLi4rB8+XJ88skn+PLLL7Xu8ZlnnsG+ffvw888/IzY2FqGhoRg6dChSUlLq9PiH/Tx/8skn+OKLL/Dtt9/i5MmTsLCwwLBhw1BWVqZ1zZKSEvTv3x/Lly9vcI+lpaU4e/Ys3n33XZw9exZbt25FfHw8xowZo3VNAGjbti2++uorxMbG4tixY/Dy8kJoaCiysrK0qlcjPDwcJ06cgKur6wPvV9eaw4cP1/gZ/e2337Sul5iYiP79+yMwMBCHDx/GhQsX8O6778LU1FTrmnf3lpaWhrVr10IikSAsLEyreq+++ir27NmDX375BXFxcZg3bx7mzp2LHTt2aNWjIAgYN24crl+/ju3bt+PcuXPw9PTE0KFD7/s7uC6/s+fPn4+dO3di8+bNiIyMRGpqKiZMmKB1vdLSUgwfPhxvvfXWfV9nfWqmpqYiNTUV//3vf3Hx4kWsX78ee/bswaxZs+pUv0EEqrdevXoJc+bMUX+tUCgEV1dXYenSpQ2uDUAIDw9vcJ27ZWZmCgCEyMhIndZt3bq18P333zeoRlFRkeDv7y/s27dPGDRokPDKK69oXWvRokVC586dG9TP3d58802hf//+OqtXm1deeUXw9fUVlEqlVo8fNWqUMHPmTI1tEyZMEKZOnapVvdLSUsHIyEjYtWuXxvZu3boJb7/9dr3r/fvnWalUCs7OzsKnn36q3pafny/I5XLht99+06rm3ZKSkgQAwrlz57TusTanTp0SAAg3b97UWc2CggIBgLB//36t692+fVto06aNcPHiRcHT01NYuXJlnfq7X83p06cLY8eOrXONh9V7/PHHhSeffFKrever+W9jx44VhgwZonW9oKAg4f3339fYVp+f93/XjI+PFwAIFy9eVG9TKBSCg4ODsGbNmjrV/Pfv7Pz8fMHExETYvHmz+j5xcXECACEqKqre9e526NAhAYCQl5dXp97qUrPGpk2bBJlMJlRWVtardn3xiEw9VVRUIDo6GkOHDlVvk0qlGDp0KKKiokTs7P5qDu3Z2trqpJ5CocDGjRtRUlKC4ODgBtWaM2cORo0apfH9bIhr167B1dUVPj4+mDp16gMPYT/Mjh070KNHDzz22GNwdHRE165dsWbNGp30CVT/LP3yyy+YOXOm1gua9u3bFwcOHMDVq1cBAOfPn8exY8cwYsQIrepVVVVBoVDc8xezmZlZg45u1UhKSkJ6errGv7eNjQ169+6tt+8foPo9JJFI0KpVK53Uq6iowOrVq2FjY4POnTtrVUOpVOKpp57C66+/jqCgIJ30BQCHDx+Go6MjAgICMHv2bOTk5Gjd359//om2bdti2LBhcHR0RO/evRs8dH63jIwM/Pnnnw36q79v377YsWMHUlJSIAgCDh06hKtXryI0NFSreuXl5QCg8R6SSqWQy+V1fg/9+3d2dHQ0KisrNd43gYGB8PDwqNP7RtefAXWtWVBQAGtraxgbN+6yjgwy9ZSdnQ2FQgEnJyeN7U5OTkhPTxepq/tTKpWYN28e+vXrhw4dOjSoVmxsLCwtLSGXy/HCCy8gPDwc7du317rexo0bcfbsWSxdurRBfdXo3bu3+nDmqlWrkJSUhAEDBqCoqEiretevX8eqVavg7++PiIgIzJ49Gy+//DJ+/PFHnfS7bds25OfnY8aMGVrX+M9//oPJkycjMDAQJiYm6Nq1K+bNm4epU6dqVc/KygrBwcH44IMPkJqaCoVCgV9++QVRUVFIS0vTus8aNe8RQ3n/ANVzE958801MmTKlwYvi7dq1C5aWljA1NcXKlSuxb98+2Nvba1Vr+fLlMDY2xssvv9ygnu42fPhw/PTTTzhw4ACWL1+OyMhIjBgxAgqFot61MjMzUVxcjGXLlmH48OHYu3cvxo8fjwkTJiAyMlIn/f7444+wsrK67xBLXXz55Zdo37493NzcIJPJMHz4cHz99dcYOHCgVvVqAsbChQuRl5eHiooKLF++HLdv367Te6i239np6emQyWT3BOm6vG90+RlQn5rZ2dn44IMP8Nxzz+nkOR+k2a9+3dLNmTMHFy9e1Mlf0wEBAYiJiUFBQQG2bNmC6dOnIzIyUqswk5ycjFdeeQX79u174Hh5fdx9FKJTp07o3bs3PD09sWnTJq3+YlMqlejRowc+/vhjAEDXrl1x8eJFfPvtt5g+fXqD+/3hhx8wYsSIOs1tuJ9Nmzbh119/xYYNGxAUFISYmBjMmzcPrq6uWvf4888/Y+bMmWjTpg2MjIzQrVs3TJkyBdHR0Vr3aagqKysxadIkCIKAVatWNbje4MGDERMTg+zsbKxZswaTJk3CyZMn4ejoWK860dHR+Pzzz3H27Fmtj+bVZvLkyer/79ixIzp16gRfX18cPnwYISEh9aqlVCoBAGPHjsX8+fMBAF26dMHff/+Nb7/9FoMGDWpwv2vXrsXUqVMb9Dvkyy+/xIkTJ7Bjxw54enriyJEjmDNnDlxdXbU6UmxiYoKtW7di1qxZsLW1hZGREYYOHYoRI0bUaRK+Ln9nN0a9utQsLCzEqFGj0L59eyxevFhnz3s/PCJTT/b29jAyMrpntnhGRgacnZ1F6qp2c+fOxa5du3Do0CG4ubk1uJ5MJoOfnx+6d++OpUuXonPnzvj888+1qhUdHY3MzEx069YNxsbGMDY2RmRkJL744gsYGxtr9Rfgv7Vq1Qpt27ZFQkKCVo93cXG5J6S1a9euQcNVNW7evIn9+/fjmWeeaVCd119/XX1UpmPHjnjqqacwf/78Bh3l8vX1RWRkJIqLi5GcnIxTp06hsrISPj4+DeoVgPo9Ygjvn5oQc/PmTezbt6/BR2MAwMLCAn5+fujTpw9++OEHGBsb44cffqh3naNHjyIzMxMeHh7q98/NmzexYMECeHl5NbjPGj4+PrC3t9fqPWRvbw9jY+NGew8dPXoU8fHxDXoP3blzB2+99RZWrFiB0aNHo1OnTpg7dy4ef/xx/Pe//9W6bvfu3RETE4P8/HykpaVhz549yMnJeeh76H6/s52dnVFRUXHPmUUPe9/o+jOgLjWLioowfPhwWFlZITw8HCYmJjp53gdhkKknmUyG7t2748CBA+ptSqUSBw4caPB8EV0RBAFz585FeHg4Dh48CG9v70Z5HqVSqR4Prq+QkBDExsYiJiZGfevRowemTp2KmJgYGBkZNbi/4uJiJCYmwsXFRavH9+vX755TFq9evQpPT88G97Zu3To4Ojpi1KhRDapTWloKqVTzbWxkZKT+a7ghLCws4OLigry8PERERGDs2LENrunt7Q1nZ2eN909hYSFOnjypN+8f4J8Qc+3aNezfvx92dnaN8jzavoeeeuopXLhwQeP94+rqitdffx0RERE66+/27dvIycnR6j0kk8nQs2fPRnsP/fDDD+jevbvWc4yA6n/nysrKRnsP2djYwMHBAdeuXcOZM2fu+x562O/s7t27w8TERON9Ex8fj1u3btX6vmmMz4C61CwsLERoaChkMhl27Nihs6PtD8OhJS28+uqrmD59Onr06IFevXrhs88+Q0lJCZ5++mmt6hUXF2v8xZOUlISYmBjY2trCw8Oj3vXmzJmDDRs2YPv27bCyslKPodrY2MDMzEyrHhcuXIgRI0bAw8MDRUVF2LBhAw4fPqz1L00rK6t7xlYtLCxgZ2en9Tjua6+9htGjR8PT0xOpqalYtGgRjIyMMGXKFK3qzZ8/H3379sXHH3+MSZMm4dSpU1i9ejVWr16tVb0aSqUS69atw/Tp0xs8CW706NH46KOP4OHhgaCgIJw7dw4rVqzAzJkzta4ZEREBQRAQEBCAhIQEvP766wgMDKzzz/fDfp7nzZuHDz/8EP7+/vD29sa7774LV1dXjBs3Tuuaubm5uHXrlvpaLzUfns7OzrX+xfqgei4uLpg4cSLOnj2LXbt2QaFQqN9Dtra2kMlk9e7Rzs4OH330EcaMGQMXFxdkZ2fj66+/RkpKyn1PvX/Ya/53uDIxMYGzszMCAgK0+j7a2tpiyZIlCAsLg7OzMxITE/HGG2/Az88Pw4YN06rH119/HY8//jgGDhyIwYMHY8+ePdi5cycOHz6sVY81vw8LCwuxefNm/O9//7tvnbrWGzRoEF5//XWYmZnB09MTkZGR+Omnn7BixQqta27evBkODg7w8PBAbGwsXnnlFYwbN+6+E4gf9jvbxsYGs2bNwquvvgpbW1tYW1vjpZdeQnBwMPr06VPvekD1vJv09HT164iNjYWVlRU8PDxqncD7sJo1Iaa0tBS//PILCgsLUVhYCABwcHDQyR+n99Wo50Q1Y19++aXg4eEhyGQyoVevXsKJEye0rlVz+tu/b9OnT9eqXm21AAjr1q3TuseZM2cKnp6egkwmExwcHISQkBBh7969WterTUNPv3788ccFFxcXQSaTCW3atBEef/xxISEhoUE97dy5U+jQoYMgl8uFwMBAYfXq1Q2qJwiCEBERIQAQ4uPjG1yrsLBQeOWVVwQPDw/B1NRU8PHxEd5++22hvLxc65q///674OPjI8hkMsHZ2VmYM2eOkJ+fX+fHP+znWalUCu+++67g5OQkyOVyISQk5KHfi4fVXLduXa37Fy1aVO96Nadw13Y7dOiQVj3euXNHGD9+vODq6irIZDLBxcVFGDNmjHDq1CmtX/O/1eX06wfVLC0tFUJDQwUHBwfBxMRE8PT0FJ599lkhPT29QT3+8MMPgp+fn2Bqaip07txZ2LZtm9Y91vjuu+8EMzOzOv1cPqxeWlqaMGPGDMHV1VUwNTUVAgIChP/9738PvCTCw2p+/vnngpubm2BiYiJ4eHgI77zzzgPfk3X5nX3nzh3hxRdfFFq3bi2Ym5sL48ePF9LS0rSut2jRonp9Tjys5v2+JwCEpKSk+752XZCoGiQiIiIyOJwjQ0RERAaLQYaIiIgMFoMMERERGSwGGSIiIjJYDDJERERksBhkiIiIyGAxyBAREZHBYpAhohZHIpFg27ZtYrdBRDrAIENETWrGjBmQSCT33IYPHy52a0RkgLjWEhE1ueHDh2PdunUa2+RyuUjdEJEh4xEZImpycrlcvaBjza1169YAqod9Vq1ahREjRsDMzAw+Pj7YsmWLxuNjY2MxZMgQmJmZwc7ODs899xyKi4s17rN27VoEBQVBLpfDxcUFc+fO1difnZ2N8ePHw9zcHP7+/tixY0fjvmgiahQMMkSkd959912EhYXh/PnzmDp1KiZPnoy4uDgAQElJCYYNG4bWrVvj9OnT2Lx5M/bv368RVFatWoU5c+bgueeeQ2xsLHbs2AE/Pz+N51iyZAkmTZqECxcuYOTIkZg6dSpyc3Ob9HUSkQ406pKURET/Mn36dMHIyEiwsLDQuH300UeCIFSvsvvCCy9oPKZ3797C7NmzBUEQhNWrVwutW7cWiouL1fv//PNPQSqVqldqdnV1Fd5+++379gBAeOedd9RfFxcXCwCE3bt36+x1ElHT4BwZImpygwcPxqpVqzS22draqv8/ODhYY19wcDBiYmIAAHFxcejcuTMsLCzU+/v16welUon4+HhIJBKkpqYiJCTkgT106tRJ/f8WFhawtrZGZmamti+JiETCIENETc7CwuKeoR5dMTMzq9P9TExMNL6WSCRQKpWN0RIRNSLOkSEivXPixIl7vm7Xrh0AoF27djh//jxKSkrU+48fPw6pVIqAgABYWVnBy8sLBw4caNKeiUgcPCJDRE2uvLwc6enpGtuMjY1hb28PANi8eTN69OiB/v3749dff8WpU6fwww8/AACmTp2KRYsWYfr06Vi8eDGysrLw0ksv4amnnoKTkxMAYPHixXjhhRfg6OiIESNGoKioCMePH8dLL73UtC+UiBodgwwRNbk9e/bAxcVFY1tAQACuXLkCoPqMoo0bN+LFF1+Ei4sLfvvtN7Rv3x4AYG5ujoiICLzyyivo2bMnzM3NERYWhhUrVqhrTZ8+HWVlZVi5ciVee+012NvbY+LEiU33AomoyUgEQRDEboKIqIZEIkF4eDjGjRsnditEZAA4R4aIiIgMFoMMERERGSzOkSEivcLRbiKqDx6RISIiIoPFIENEREQGi0GGiIiIDBaDDBERERksBhkiIiIyWAwyREREZLAYZIiIiMhgMcgQERGRwWKQISIiIoP1/zb0h76Q4at+AAAAAElFTkSuQmCC",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
    "source": [
     "import matplotlib.pyplot as plt\n",
     "\n",
-    "plt.plot(range(n_epochs), train_loss_list)\n",
+    "# we stopped the training at epoch 22, so train_loss_list has a length of 23\n",
+    "last_epoch = 22\n",
+    "\n",
+    "plt.plot(range(last_epoch+1), train_loss_list)\n",
     "plt.xlabel(\"Epoch\")\n",
-    "plt.ylabel(\"Loss\")\n",
+    "plt.xticks(range(last_epoch+1))\n",
+    "plt.ylabel(\"Train Loss\")\n",
     "plt.title(\"Performance of Model 1\")\n",
     "plt.show()"
    ]
@@ -350,10 +541,31 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 27,
    "id": "e93efdfc",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Test Loss: 21.040502\n",
+      "\n",
+      "Test Accuracy of airplane: 69% (696/1000)\n",
+      "Test Accuracy of automobile: 79% (798/1000)\n",
+      "Test Accuracy of  bird: 48% (482/1000)\n",
+      "Test Accuracy of   cat: 39% (390/1000)\n",
+      "Test Accuracy of  deer: 54% (545/1000)\n",
+      "Test Accuracy of   dog: 63% (636/1000)\n",
+      "Test Accuracy of  frog: 75% (758/1000)\n",
+      "Test Accuracy of horse: 63% (630/1000)\n",
+      "Test Accuracy of  ship: 73% (736/1000)\n",
+      "Test Accuracy of truck: 72% (724/1000)\n",
+      "\n",
+      "Test Accuracy (Overall): 63% (6395/10000)\n"
+     ]
+    }
+   ],
    "source": [
     "model.load_state_dict(torch.load(\"./model_cifar.pt\"))\n",
     "\n",
@@ -417,6 +629,14 @@
     ")"
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "With the example model, we get an overall test accuracy of 63%.\n",
+    "***"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "944991a2",
@@ -434,6 +654,305 @@
     "Compare the results obtained with this new network to those obtained previously."
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "***\n",
+    "- After a convolution taking an input of $channels_{in}*size_{input}*size_{input}$, the output has $channels_{out}$ channels of size : $size_{ConvOutput} = (size_{input} + 2*padding - size_{kernel}) + 1$ \n",
+    "- The ReLU activation doesn't change the size of the output\n",
+    "- The Maxpool with kernel size of 2 returns an output of : $channels_{out}$ of size $size_{output} = size_{ConvOutput}/2$\n",
+    "- The input is an image consisting of 3 channels of size $32*32$, so the size of the input is $3*32*32$\n",
+    "- After first convolutional layer : $size_{output} = \\frac{(32 + 2*1 - 3) + 1}{2} = 16$ so the size is $16*16*16$\n",
+    "- After second convolutional layer : $size_{output} = \\frac{(16 + 2*1 - 3) + 1}{2} = 8$ so the size is $32*8*8$\n",
+    "- After third convolutional layer : $size_{output} = \\frac{(8 + 2*1 - 3) + 1}{2} = 4$ so the size is $64*4*4$\n",
+    "***\n",
+    "We apply a dropout function, which parameter represents the probability to not correct a parameter during the error backpropagation. It seems that a dropout of 0.5 gives the best results :\n",
+    "- [source1](https://medium.com/@upendravijay2/how-does-dropout-help-to-avoid-overfitting-in-neural-networks-91b90fd86b20#:~:text=A%20good%20value%20for%20dropout,new%20network%20that%20uses%20dropout.) recommends a dropout comprised between 0.5 and 0.8\n",
+    "- [source3](https://arxiv.org/pdf/1207.0580.pdf) seems to recommend a dropout of 0.5\n",
+    "***"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "1. New model definition"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "NewNet(\n",
+      "  (conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (conv3): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (fc1): Linear(in_features=1024, out_features=512, bias=True)\n",
+      "  (fc2): Linear(in_features=512, out_features=64, bias=True)\n",
+      "  (fc3): Linear(in_features=64, out_features=10, bias=True)\n",
+      "  (dropout): Dropout(p=0.5, inplace=False)\n",
+      ")\n"
+     ]
+    }
+   ],
+   "source": [
+    "class NewNet(nn.Module):\n",
+    "    def __init__(self):\n",
+    "        super(NewNet, self).__init__()\n",
+    "        # input : 3 channels, dim = 32*32\n",
+    "        self.conv1 = nn.Conv2d(3, 16, 3, padding=1) # first conv : 3 input channels and 16 output channels ; kernel size : 3 ; padding of 1\n",
+    "        # output dim = (32 + 2*padding - kernelsize) + 1 = (32 + 2 - 3) + 1 = 32 -> pool = 16*16\n",
+    "        self.pool = nn.MaxPool2d(2, 2) # Maxpool with kernel size of 2\n",
+    "        self.conv2 = nn.Conv2d(16, 32, 3, padding=1) # 16 in channels and 32 out channels\n",
+    "        # output dim = (16 + 2 - 3) + 1 = 16 -> pool = 8*8 \n",
+    "        self.conv3 = nn.Conv2d(32, 64, 3, padding=1) # 64 out channels\n",
+    "        # output dim = (8 + 2 - 3) + 1 = 8 -> pool = 4*4\n",
+    "        # Output of conv3 : 64 channels of size 4*4 so 64*4*4\n",
+    "        self.fc1 = nn.Linear(64 * 4 * 4, 512) # output size of 512\n",
+    "        self.fc2 = nn.Linear(512, 64) # output size of 64\n",
+    "        self.fc3 = nn.Linear(64, 10) # we must classify images from the CIFAR10 dataset into 10 classes, so 10 final output classes\n",
+    "        self.dropout = nn.Dropout(p=0.5)\n",
+    "\n",
+    "    def forward(self, x):\n",
+    "        x = self.pool(F.relu(self.conv1(x))) \n",
+    "        x = self.pool(F.relu(self.conv2(x))) \n",
+    "        x = self.pool(F.relu(self.conv3(x))) \n",
+    "        x = x.view(-1, 64 * 4 * 4) # reshape the output of third convolutional layer into a vector\n",
+    "        x = self.dropout(F.relu(self.fc1(x)))\n",
+    "        x = self.dropout(F.relu(self.fc2(x)))\n",
+    "        x = self.fc3(x)\n",
+    "        return x\n",
+    "\n",
+    "\n",
+    "# create a complete CNN\n",
+    "model = NewNet()\n",
+    "print(model)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "2. New model training"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch: 0 \tTraining Loss: 45.826931 \tValidation Loss: 44.174282\n",
+      "Validation loss decreased (inf --> 44.174282).  Saving model ...\n",
+      "Epoch: 1 \tTraining Loss: 40.520748 \tValidation Loss: 36.159333\n",
+      "Validation loss decreased (44.174282 --> 36.159333).  Saving model ...\n",
+      "Epoch: 2 \tTraining Loss: 35.712979 \tValidation Loss: 32.427382\n",
+      "Validation loss decreased (36.159333 --> 32.427382).  Saving model ...\n",
+      "Epoch: 3 \tTraining Loss: 32.764083 \tValidation Loss: 29.844782\n",
+      "Validation loss decreased (32.427382 --> 29.844782).  Saving model ...\n",
+      "Epoch: 4 \tTraining Loss: 30.879695 \tValidation Loss: 27.948342\n",
+      "Validation loss decreased (29.844782 --> 27.948342).  Saving model ...\n",
+      "Epoch: 5 \tTraining Loss: 29.150523 \tValidation Loss: 27.071934\n",
+      "Validation loss decreased (27.948342 --> 27.071934).  Saving model ...\n",
+      "Epoch: 6 \tTraining Loss: 27.658205 \tValidation Loss: 25.440916\n",
+      "Validation loss decreased (27.071934 --> 25.440916).  Saving model ...\n",
+      "Epoch: 7 \tTraining Loss: 26.286160 \tValidation Loss: 24.876248\n",
+      "Validation loss decreased (25.440916 --> 24.876248).  Saving model ...\n",
+      "Epoch: 8 \tTraining Loss: 24.907383 \tValidation Loss: 22.678210\n",
+      "Validation loss decreased (24.876248 --> 22.678210).  Saving model ...\n",
+      "Epoch: 9 \tTraining Loss: 23.764845 \tValidation Loss: 21.806124\n",
+      "Validation loss decreased (22.678210 --> 21.806124).  Saving model ...\n",
+      "Epoch: 10 \tTraining Loss: 22.622246 \tValidation Loss: 20.644137\n",
+      "Validation loss decreased (21.806124 --> 20.644137).  Saving model ...\n",
+      "Epoch: 11 \tTraining Loss: 21.655865 \tValidation Loss: 19.787687\n",
+      "Validation loss decreased (20.644137 --> 19.787687).  Saving model ...\n",
+      "Epoch: 12 \tTraining Loss: 20.642668 \tValidation Loss: 19.097233\n",
+      "Validation loss decreased (19.787687 --> 19.097233).  Saving model ...\n",
+      "Epoch: 13 \tTraining Loss: 19.765453 \tValidation Loss: 18.346761\n",
+      "Validation loss decreased (19.097233 --> 18.346761).  Saving model ...\n",
+      "Epoch: 14 \tTraining Loss: 18.887776 \tValidation Loss: 17.968003\n",
+      "Validation loss decreased (18.346761 --> 17.968003).  Saving model ...\n",
+      "Epoch: 15 \tTraining Loss: 18.183972 \tValidation Loss: 17.411004\n",
+      "Validation loss decreased (17.968003 --> 17.411004).  Saving model ...\n",
+      "Epoch: 16 \tTraining Loss: 17.510276 \tValidation Loss: 17.365755\n",
+      "Validation loss decreased (17.411004 --> 17.365755).  Saving model ...\n",
+      "Epoch: 17 \tTraining Loss: 16.788606 \tValidation Loss: 16.609059\n",
+      "Validation loss decreased (17.365755 --> 16.609059).  Saving model ...\n",
+      "Epoch: 18 \tTraining Loss: 16.158192 \tValidation Loss: 16.423296\n",
+      "Validation loss decreased (16.609059 --> 16.423296).  Saving model ...\n",
+      "Epoch: 19 \tTraining Loss: 15.521866 \tValidation Loss: 16.143898\n",
+      "Validation loss decreased (16.423296 --> 16.143898).  Saving model ...\n",
+      "Epoch: 20 \tTraining Loss: 14.844376 \tValidation Loss: 16.076946\n",
+      "Validation loss decreased (16.143898 --> 16.076946).  Saving model ...\n",
+      "Epoch: 21 \tTraining Loss: 14.292540 \tValidation Loss: 16.009578\n",
+      "Validation loss decreased (16.076946 --> 16.009578).  Saving model ...\n",
+      "Epoch: 22 \tTraining Loss: 13.765445 \tValidation Loss: 16.065225\n",
+      "Epoch: 23 \tTraining Loss: 13.127047 \tValidation Loss: 15.611544\n",
+      "Validation loss decreased (16.009578 --> 15.611544).  Saving model ...\n"
+     ]
+    },
+    {
+     "ename": "KeyboardInterrupt",
+     "evalue": "",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
+      "\u001b[1;32md:\\ECL\\3A\\MOD\\IA\\TD1\\gitlab_repo\\mod_4_6-td2\\TD2 Deep Learning.ipynb Cell 27\u001b[0m line \u001b[0;36m2\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X61sZmlsZQ%3D%3D?line=25'>26</a>\u001b[0m loss \u001b[39m=\u001b[39m criterion(output, target)\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X61sZmlsZQ%3D%3D?line=26'>27</a>\u001b[0m \u001b[39m# Backward pass: compute gradient of the loss with respect to model parameters\u001b[39;00m\n\u001b[1;32m---> <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X61sZmlsZQ%3D%3D?line=27'>28</a>\u001b[0m loss\u001b[39m.\u001b[39;49mbackward()\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X61sZmlsZQ%3D%3D?line=28'>29</a>\u001b[0m \u001b[39m# Perform a single optimization step (parameter update)\u001b[39;00m\n\u001b[0;32m     <a href='vscode-notebook-cell:/d%3A/ECL/3A/MOD/IA/TD1/gitlab_repo/mod_4_6-td2/TD2%20Deep%20Learning.ipynb#X61sZmlsZQ%3D%3D?line=29'>30</a>\u001b[0m optimizer\u001b[39m.\u001b[39mstep()\n",
+      "File \u001b[1;32mc:\\Users\\basil\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\torch\\_tensor.py:488\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m    478\u001b[0m \u001b[39mif\u001b[39;00m has_torch_function_unary(\u001b[39mself\u001b[39m):\n\u001b[0;32m    479\u001b[0m     \u001b[39mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m    480\u001b[0m         Tensor\u001b[39m.\u001b[39mbackward,\n\u001b[0;32m    481\u001b[0m         (\u001b[39mself\u001b[39m,),\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    486\u001b[0m         inputs\u001b[39m=\u001b[39minputs,\n\u001b[0;32m    487\u001b[0m     )\n\u001b[1;32m--> 488\u001b[0m torch\u001b[39m.\u001b[39;49mautograd\u001b[39m.\u001b[39;49mbackward(\n\u001b[0;32m    489\u001b[0m     \u001b[39mself\u001b[39;49m, gradient, retain_graph, create_graph, inputs\u001b[39m=\u001b[39;49minputs\n\u001b[0;32m    490\u001b[0m )\n",
+      "File \u001b[1;32mc:\\Users\\basil\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\torch\\autograd\\__init__.py:197\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m    192\u001b[0m     retain_graph \u001b[39m=\u001b[39m create_graph\n\u001b[0;32m    194\u001b[0m \u001b[39m# The reason we repeat same the comment below is that\u001b[39;00m\n\u001b[0;32m    195\u001b[0m \u001b[39m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m    196\u001b[0m \u001b[39m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 197\u001b[0m Variable\u001b[39m.\u001b[39;49m_execution_engine\u001b[39m.\u001b[39;49mrun_backward(  \u001b[39m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m    198\u001b[0m     tensors, grad_tensors_, retain_graph, create_graph, inputs,\n\u001b[0;32m    199\u001b[0m     allow_unreachable\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m, accumulate_grad\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m)\n",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
+     ]
+    }
+   ],
+   "source": [
+    "model = NewNet()\n",
+    "\n",
+    "criterion = nn.CrossEntropyLoss()  # specify loss function\n",
+    "optimizer = optim.SGD(model.parameters(), lr=0.01)  # specify optimizer\n",
+    "\n",
+    "n_epochs = 30  # number of epochs to train the model\n",
+    "train_loss_list = []  # list to store loss to visualize\n",
+    "valid_loss_min = np.Inf  # track change in validation loss\n",
+    "\n",
+    "for epoch in range(n_epochs):\n",
+    "    # Keep track of training and validation loss\n",
+    "    train_loss = 0.0\n",
+    "    valid_loss = 0.0\n",
+    "\n",
+    "    # Train the model\n",
+    "    model.train()\n",
+    "    for data, target in train_loader:\n",
+    "        # Move tensors to GPU if CUDA is available\n",
+    "        if train_on_gpu:\n",
+    "            data, target = data.cuda(), target.cuda()\n",
+    "        # Clear the gradients of all optimized variables\n",
+    "        optimizer.zero_grad()\n",
+    "        # Forward pass: compute predicted outputs by passing inputs to the model\n",
+    "        output = model(data)\n",
+    "        # Calculate the batch loss\n",
+    "        loss = criterion(output, target)\n",
+    "        # Backward pass: compute gradient of the loss with respect to model parameters\n",
+    "        loss.backward()\n",
+    "        # Perform a single optimization step (parameter update)\n",
+    "        optimizer.step()\n",
+    "        # Update training loss\n",
+    "        train_loss += loss.item() * data.size(0)\n",
+    "\n",
+    "    # Validate the model\n",
+    "    model.eval()\n",
+    "    for data, target in valid_loader:\n",
+    "        # Move tensors to GPU if CUDA is available\n",
+    "        if train_on_gpu:\n",
+    "            data, target = data.cuda(), target.cuda()\n",
+    "        # Forward pass: compute predicted outputs by passing inputs to the model\n",
+    "        output = model(data)\n",
+    "        # Calculate the batch loss\n",
+    "        loss = criterion(output, target)\n",
+    "        # Update average validation loss\n",
+    "        valid_loss += loss.item() * data.size(0)\n",
+    "\n",
+    "    # Calculate average losses\n",
+    "    train_loss = train_loss / len(train_loader)\n",
+    "    valid_loss = valid_loss / len(valid_loader)\n",
+    "    train_loss_list.append(train_loss)\n",
+    "\n",
+    "    # Print training/validation statistics\n",
+    "    print(\n",
+    "        \"Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}\".format(\n",
+    "            epoch, train_loss, valid_loss\n",
+    "        )\n",
+    "    )\n",
+    "\n",
+    "    # Save model if validation loss has decreased\n",
+    "    if valid_loss <= valid_loss_min:\n",
+    "        print(\n",
+    "            \"Validation loss decreased ({:.6f} --> {:.6f}).  Saving model ...\".format(\n",
+    "                valid_loss_min, valid_loss\n",
+    "            )\n",
+    "        )\n",
+    "        torch.save(model.state_dict(), \"new_model_cifar.pt\")\n",
+    "        valid_loss_min = valid_loss"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "model.load_state_dict(torch.load(\"./new_model_cifar.pt\"))\n",
+    "\n",
+    "# track test loss\n",
+    "test_loss = 0.0\n",
+    "class_correct = list(0.0 for i in range(10))\n",
+    "class_total = list(0.0 for i in range(10))\n",
+    "\n",
+    "model.eval()\n",
+    "# iterate over test data\n",
+    "for data, target in test_loader:\n",
+    "    # move tensors to GPU if CUDA is available\n",
+    "    if train_on_gpu:\n",
+    "        data, target = data.cuda(), target.cuda()\n",
+    "    # forward pass: compute predicted outputs by passing inputs to the model\n",
+    "    output = model(data)\n",
+    "    # calculate the batch loss\n",
+    "    loss = criterion(output, target)\n",
+    "    # update test loss\n",
+    "    test_loss += loss.item() * data.size(0)\n",
+    "    # convert output probabilities to predicted class\n",
+    "    _, pred = torch.max(output, 1)\n",
+    "    # compare predictions to true label\n",
+    "    correct_tensor = pred.eq(target.data.view_as(pred))\n",
+    "    correct = (\n",
+    "        np.squeeze(correct_tensor.numpy())\n",
+    "        if not train_on_gpu\n",
+    "        else np.squeeze(correct_tensor.cpu().numpy())\n",
+    "    )\n",
+    "    # calculate test accuracy for each object class\n",
+    "    for i in range(batch_size):\n",
+    "        label = target.data[i]\n",
+    "        class_correct[label] += correct[i].item()\n",
+    "        class_total[label] += 1\n",
+    "\n",
+    "# average test loss\n",
+    "test_loss = test_loss / len(test_loader)\n",
+    "print(\"Test Loss: {:.6f}\\n\".format(test_loss))\n",
+    "\n",
+    "for i in range(10):\n",
+    "    if class_total[i] > 0:\n",
+    "        print(\n",
+    "            \"Test Accuracy of %5s: %2d%% (%2d/%2d)\"\n",
+    "            % (\n",
+    "                classes[i],\n",
+    "                100 * class_correct[i] / class_total[i],\n",
+    "                np.sum(class_correct[i]),\n",
+    "                np.sum(class_total[i]),\n",
+    "            )\n",
+    "        )\n",
+    "    else:\n",
+    "        print(\"Test Accuracy of %5s: N/A (no training examples)\" % (classes[i]))\n",
+    "\n",
+    "print(\n",
+    "    \"\\nTest Accuracy (Overall): %2d%% (%2d/%2d)\"\n",
+    "    % (\n",
+    "        100.0 * np.sum(class_correct) / np.sum(class_total),\n",
+    "        np.sum(class_correct),\n",
+    "        np.sum(class_total),\n",
+    "    )\n",
+    ")"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "bc381cf4",
@@ -940,7 +1459,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.8.5"
+   "version": "3.9.13"
   },
   "vscode": {
    "interpreter": {
diff --git a/results/model1_accuracy.PNG b/results/model1_accuracy.PNG
new file mode 100644
index 0000000000000000000000000000000000000000..4f276f5f7f37b8f41ce3599bd024b8adbe375ac9
Binary files /dev/null and b/results/model1_accuracy.PNG differ
diff --git a/results/model1_overfit2.PNG b/results/model1_overfit2.PNG
new file mode 100644
index 0000000000000000000000000000000000000000..393665b7558753088a2475b66720b7197fc37ad5
Binary files /dev/null and b/results/model1_overfit2.PNG differ