diff --git a/TD2 Deep Learning.ipynb b/TD2 Deep Learning.ipynb
index 00e4fdc78c068248ca0742c64725d155b3681f0d..0b832d1bf9d9158033a621e289afdf64141c4f52 100644
--- a/TD2 Deep Learning.ipynb	
+++ b/TD2 Deep Learning.ipynb	
@@ -52,10 +52,72 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "id": "b1950f0a",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "tensor([[ 0.1159, -0.1632,  0.6574,  1.5902, -0.4352, -1.1418,  0.8810,  0.0847,\n",
+      "         -2.4290, -0.0911],\n",
+      "        [ 0.3400, -1.9606, -0.0214, -0.1179, -0.3917, -0.3592,  0.5251,  0.4169,\n",
+      "         -0.8085, -0.2057],\n",
+      "        [-0.7488,  0.7606,  0.1129, -2.6223, -0.5739, -0.4979,  2.0387,  0.1628,\n",
+      "          1.1597, -0.9275],\n",
+      "        [-1.5324,  1.4420,  0.9108,  0.4737,  0.3852, -1.1974,  1.7244,  1.3268,\n",
+      "          1.4552,  0.5241],\n",
+      "        [-0.3818, -0.4960, -1.5574, -0.8755,  1.2589,  0.8939,  0.0385, -2.5047,\n",
+      "          0.6804, -0.1951],\n",
+      "        [ 0.1988,  0.9232, -1.3031,  1.8143,  0.0756,  1.2082, -1.1921,  0.0647,\n",
+      "          0.1529,  0.4644],\n",
+      "        [ 1.8262,  0.6831, -0.1683, -0.8331, -0.5271, -0.2069,  0.5703,  1.7226,\n",
+      "         -0.6655, -0.4297],\n",
+      "        [-0.0630, -0.2216,  2.2132, -0.8788,  2.8345, -0.0534, -1.7918, -0.6061,\n",
+      "         -0.2461,  0.4126],\n",
+      "        [ 0.3832, -0.2473, -1.1898,  2.3250,  0.1655, -0.4416, -0.4937, -0.1714,\n",
+      "          0.6682, -0.7186],\n",
+      "        [-0.5843,  1.7539,  0.4247,  0.5102, -1.2161,  0.2732,  1.8955,  1.5722,\n",
+      "          0.9527,  0.2717],\n",
+      "        [-1.2976,  0.2779, -0.8085,  0.0037, -1.4008, -1.3840,  0.1210,  0.5056,\n",
+      "          0.6006, -1.5492],\n",
+      "        [-0.1415, -0.8489,  0.3045,  2.3843,  1.4306, -0.5467, -0.2279,  0.2920,\n",
+      "          1.5270, -1.5247],\n",
+      "        [-0.8661, -0.3661,  0.3478, -0.5955,  1.0730, -2.1341, -0.8818,  0.2842,\n",
+      "          0.8046,  0.4630],\n",
+      "        [-0.1986,  1.3981, -0.3965, -0.6231,  2.5136,  0.1703, -1.0520, -0.4539,\n",
+      "         -1.8835, -0.1314]])\n",
+      "AlexNet(\n",
+      "  (features): Sequential(\n",
+      "    (0): Conv2d(3, 64, kernel_size=(11, 11), stride=(4, 4), padding=(2, 2))\n",
+      "    (1): ReLU(inplace=True)\n",
+      "    (2): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "    (3): Conv2d(64, 192, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))\n",
+      "    (4): ReLU(inplace=True)\n",
+      "    (5): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "    (6): Conv2d(192, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (7): ReLU(inplace=True)\n",
+      "    (8): Conv2d(384, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (9): ReLU(inplace=True)\n",
+      "    (10): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "    (11): ReLU(inplace=True)\n",
+      "    (12): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  )\n",
+      "  (avgpool): AdaptiveAvgPool2d(output_size=(6, 6))\n",
+      "  (classifier): Sequential(\n",
+      "    (0): Dropout(p=0.5, inplace=False)\n",
+      "    (1): Linear(in_features=9216, out_features=4096, bias=True)\n",
+      "    (2): ReLU(inplace=True)\n",
+      "    (3): Dropout(p=0.5, inplace=False)\n",
+      "    (4): Linear(in_features=4096, out_features=4096, bias=True)\n",
+      "    (5): ReLU(inplace=True)\n",
+      "    (6): Linear(in_features=4096, out_features=1000, bias=True)\n",
+      "  )\n",
+      ")\n"
+     ]
+    }
+   ],
    "source": [
     "import torch\n",
     "\n",
@@ -95,10 +157,18 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 3,
    "id": "6e18f2fd",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "CUDA is not available.  Training on CPU ...\n"
+     ]
+    }
+   ],
    "source": [
     "import torch\n",
     "\n",
@@ -121,10 +191,33 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 4,
    "id": "462666a2",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to data\\cifar-10-python.tar.gz\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "100.0%\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Extracting data\\cifar-10-python.tar.gz to data\n",
+      "Files already downloaded and verified\n"
+     ]
+    }
+   ],
    "source": [
     "import numpy as np\n",
     "from torchvision import datasets, transforms\n",
@@ -196,7 +289,22 @@
    "execution_count": null,
    "id": "317bf070",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Net(\n",
+      "  (conv1): Conv2d(3, 6, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (conv2): Conv2d(6, 16, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (fc1): Linear(in_features=400, out_features=120, bias=True)\n",
+      "  (fc2): Linear(in_features=120, out_features=84, bias=True)\n",
+      "  (fc3): Linear(in_features=84, out_features=10, bias=True)\n",
+      ")\n"
+     ]
+    }
+   ],
    "source": [
     "import torch.nn as nn\n",
     "import torch.nn.functional as F\n",
@@ -232,6 +340,125 @@
     "    model.cuda()"
    ]
   },
+  {
+   "cell_type": "markdown",
+   "id": "02ad19e0",
+   "metadata": {},
+   "source": [
+    "Creating the model to answer question 1: "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "id": "9f3145ca",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Net_3Conv_3lin(\n",
+      "  (conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (conv3): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (fc1): Linear(in_features=1024, out_features=512, bias=True)\n",
+      "  (fc2): Linear(in_features=512, out_features=64, bias=True)\n",
+      "  (fc3): Linear(in_features=64, out_features=10, bias=True)\n",
+      "  (dropout): Dropout(p=0.3, inplace=False)\n",
+      ")\n"
+     ]
+    }
+   ],
+   "source": [
+    "\n",
+    "\n",
+    "class Net_3Conv_3lin(nn.Module):\n",
+    "    def __init__(self):\n",
+    "        super(Net_3Conv_3lin, self).__init__()\n",
+    "        self.conv1 = nn.Conv2d(kernel_size=3 ,padding=1 ,in_channels=3, out_channels =16)\n",
+    "        self.conv2 = nn.Conv2d(kernel_size=3 ,padding=1,in_channels=16, out_channels =32 )\n",
+    "        self.conv3 = nn.Conv2d(kernel_size=3 ,padding=1,in_channels=32, out_channels =64 )\n",
+    "        self.pool = nn.MaxPool2d(2, 2)\n",
+    "        self.fc1 = nn.Linear(1024,512)\n",
+    "        self.fc2 = nn.Linear(512,64)\n",
+    "        self.fc3 = nn.Linear(64,10)\n",
+    "        self.p = 0.3\n",
+    "        self.dropout = nn.Dropout(self.p) \n",
+    "\n",
+    "    def forward(self, x):\n",
+    "        x = self.pool(F.relu(self.conv1(x)))\n",
+    "        x = self.pool(F.relu(self.conv2(x)))\n",
+    "        x = self.pool(F.relu(self.conv3(x)))\n",
+    "        x = x.view(-1, 64 * 4 * 4)\n",
+    "        x = self.dropout(F.relu(self.fc1(x)))\n",
+    "        x = self.dropout(F.relu(self.fc2(x)))\n",
+    "        x = self.fc3(x)\n",
+    "        return x\n",
+    "\n",
+    "\n",
+    "# create a complete CNN\n",
+    "model = Net_3Conv_3lin()\n",
+    "print(model)\n",
+    "# move tensors to GPU if CUDA is available\n",
+    "if train_on_gpu:\n",
+    "    model.cuda()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Creating a model for training aware quatization"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "71bb37b5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "\n",
+    "\n",
+    "class Net_3Conv_3lin_Quant(nn.Module):\n",
+    "    def __init__(self):\n",
+    "        super(Net_3Conv_3lin_Quant, self).__init__()\n",
+    "        self.quant = torch.ao.quantization.QuantStub()\n",
+    "        self.conv1 = nn.Conv2d(kernel_size=3 ,padding=1 ,in_channels=3, out_channels =16)\n",
+    "        self.conv2 = nn.Conv2d(kernel_size=3 ,padding=1,in_channels=16, out_channels =32 )\n",
+    "        self.conv3 = nn.Conv2d(kernel_size=3 ,padding=1,in_channels=32, out_channels =64 )\n",
+    "        self.pool = nn.MaxPool2d(2, 2)\n",
+    "        self.fc1 = nn.Linear(1024,512)\n",
+    "        self.fc2 = nn.Linear(512,64)\n",
+    "        self.fc3 = nn.Linear(64,10)\n",
+    "        self.p = 0.3\n",
+    "        self.dropout = nn.Dropout(self.p) \n",
+    "\n",
+    "        self.dequant = torch.ao.quantization.DeQuantStub()\n",
+    "\n",
+    "    def forward(self, x):\n",
+    "        x = self.quant(x)\n",
+    "        x = self.pool(F.relu(self.conv1(x)))\n",
+    "        x = self.pool(F.relu(self.conv2(x)))\n",
+    "        x = self.pool(F.relu(self.conv3(x)))\n",
+    "        x = x.view(-1, 64 * 4 * 4)\n",
+    "        x = self.dropout(F.relu(self.fc1(x)))\n",
+    "        x = self.dropout(F.relu(self.fc2(x)))\n",
+    "        x = self.fc3(x)\n",
+    "        x = self.dequant(x)\n",
+    "        return x\n",
+    "\n",
+    "\n",
+    "# create a complete CNN\n",
+    "model = Net_3Conv_3lin_Quant()\n",
+    "print(model)\n",
+    "# move tensors to GPU if CUDA is available\n",
+    "if train_on_gpu:\n",
+    "    model.cuda()"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "a2dc4974",
@@ -242,10 +469,78 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 34,
    "id": "4b53f229",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch: 0 \tTraining Loss: 45.872737 \tValidation Loss: 44.489702\n",
+      "Validation loss decreased (inf --> 44.489702).  Saving model ...\n",
+      "Epoch: 1 \tTraining Loss: 40.061390 \tValidation Loss: 36.100894\n",
+      "Validation loss decreased (44.489702 --> 36.100894).  Saving model ...\n",
+      "Epoch: 2 \tTraining Loss: 34.138645 \tValidation Loss: 31.113222\n",
+      "Validation loss decreased (36.100894 --> 31.113222).  Saving model ...\n",
+      "Epoch: 3 \tTraining Loss: 30.507358 \tValidation Loss: 28.677294\n",
+      "Validation loss decreased (31.113222 --> 28.677294).  Saving model ...\n",
+      "Epoch: 4 \tTraining Loss: 28.486533 \tValidation Loss: 26.945912\n",
+      "Validation loss decreased (28.677294 --> 26.945912).  Saving model ...\n",
+      "Epoch: 5 \tTraining Loss: 26.688108 \tValidation Loss: 25.087968\n",
+      "Validation loss decreased (26.945912 --> 25.087968).  Saving model ...\n",
+      "Epoch: 6 \tTraining Loss: 24.921677 \tValidation Loss: 23.867951\n",
+      "Validation loss decreased (25.087968 --> 23.867951).  Saving model ...\n",
+      "Epoch: 7 \tTraining Loss: 23.352516 \tValidation Loss: 21.780911\n",
+      "Validation loss decreased (23.867951 --> 21.780911).  Saving model ...\n",
+      "Epoch: 8 \tTraining Loss: 21.794870 \tValidation Loss: 21.096160\n",
+      "Validation loss decreased (21.780911 --> 21.096160).  Saving model ...\n",
+      "Epoch: 9 \tTraining Loss: 20.547996 \tValidation Loss: 19.826372\n",
+      "Validation loss decreased (21.096160 --> 19.826372).  Saving model ...\n",
+      "Epoch: 10 \tTraining Loss: 19.401682 \tValidation Loss: 19.596204\n",
+      "Validation loss decreased (19.826372 --> 19.596204).  Saving model ...\n",
+      "Epoch: 11 \tTraining Loss: 18.340276 \tValidation Loss: 18.632437\n",
+      "Validation loss decreased (19.596204 --> 18.632437).  Saving model ...\n",
+      "Epoch: 12 \tTraining Loss: 17.266555 \tValidation Loss: 17.758480\n",
+      "Validation loss decreased (18.632437 --> 17.758480).  Saving model ...\n",
+      "Epoch: 13 \tTraining Loss: 16.353216 \tValidation Loss: 17.932480\n",
+      "Epoch: 14 \tTraining Loss: 15.507940 \tValidation Loss: 16.795444\n",
+      "Validation loss decreased (17.758480 --> 16.795444).  Saving model ...\n",
+      "Epoch: 15 \tTraining Loss: 14.657860 \tValidation Loss: 16.382975\n",
+      "Validation loss decreased (16.795444 --> 16.382975).  Saving model ...\n",
+      "Epoch: 16 \tTraining Loss: 13.861092 \tValidation Loss: 16.670121\n",
+      "Epoch: 17 \tTraining Loss: 12.984836 \tValidation Loss: 16.962824\n",
+      "Epoch: 18 \tTraining Loss: 12.257837 \tValidation Loss: 16.400703\n",
+      "Epoch: 19 \tTraining Loss: 11.436899 \tValidation Loss: 16.729391\n",
+      "Epoch: 20 \tTraining Loss: 10.915463 \tValidation Loss: 16.299635\n",
+      "Validation loss decreased (16.382975 --> 16.299635).  Saving model ...\n",
+      "Epoch: 21 \tTraining Loss: 10.233074 \tValidation Loss: 16.345074\n",
+      "Epoch: 22 \tTraining Loss: 9.549847 \tValidation Loss: 16.705205\n",
+      "Epoch: 23 \tTraining Loss: 8.865565 \tValidation Loss: 16.583533\n",
+      "Epoch: 24 \tTraining Loss: 8.210216 \tValidation Loss: 17.112398\n",
+      "Epoch: 25 \tTraining Loss: 7.781697 \tValidation Loss: 17.192360\n"
+     ]
+    },
+    {
+     "ename": "KeyboardInterrupt",
+     "evalue": "",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
+      "Cell \u001b[1;32mIn[34], line 41\u001b[0m\n\u001b[0;32m     39\u001b[0m     data, target \u001b[38;5;241m=\u001b[39m data\u001b[38;5;241m.\u001b[39mcuda(), target\u001b[38;5;241m.\u001b[39mcuda()\n\u001b[0;32m     40\u001b[0m \u001b[38;5;66;03m# Forward pass: compute predicted outputs by passing inputs to the model\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m     42\u001b[0m \u001b[38;5;66;03m# Calculate the batch loss\u001b[39;00m\n\u001b[0;32m     43\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(output, target)\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1734\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)  \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m   1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1736\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m   1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m   1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m   1745\u001b[0m         \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m   1746\u001b[0m         \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1747\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m   1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m   1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n",
+      "Cell \u001b[1;32mIn[33], line 16\u001b[0m, in \u001b[0;36mNet_3Conv_3lin.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m     14\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m     15\u001b[0m     x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpool(F\u001b[38;5;241m.\u001b[39mrelu(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv1(x)))\n\u001b[1;32m---> 16\u001b[0m     x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpool\u001b[49m\u001b[43m(\u001b[49m\u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrelu\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconv2\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m     17\u001b[0m     x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpool(F\u001b[38;5;241m.\u001b[39mrelu(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv3(x)))\n\u001b[0;32m     18\u001b[0m     x \u001b[38;5;241m=\u001b[39m x\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m64\u001b[39m \u001b[38;5;241m*\u001b[39m \u001b[38;5;241m4\u001b[39m \u001b[38;5;241m*\u001b[39m \u001b[38;5;241m4\u001b[39m)\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1734\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)  \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m   1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1736\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m   1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m   1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m   1745\u001b[0m         \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m   1746\u001b[0m         \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1747\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m   1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m   1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\modules\\pooling.py:213\u001b[0m, in \u001b[0;36mMaxPool2d.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m    212\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor):\n\u001b[1;32m--> 213\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_pool2d\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    214\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m    215\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mkernel_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    216\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    217\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpadding\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    218\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdilation\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    219\u001b[0m \u001b[43m        \u001b[49m\u001b[43mceil_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mceil_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    220\u001b[0m \u001b[43m        \u001b[49m\u001b[43mreturn_indices\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mreturn_indices\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    221\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\_jit_internal.py:624\u001b[0m, in \u001b[0;36mboolean_dispatch.<locals>.fn\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m    622\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m if_true(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m    623\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 624\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mif_false\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[1;32mc:\\Users\\xxpod\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\torch\\nn\\functional.py:830\u001b[0m, in \u001b[0;36m_max_pool2d\u001b[1;34m(input, kernel_size, stride, padding, dilation, ceil_mode, return_indices)\u001b[0m\n\u001b[0;32m    828\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stride \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m    829\u001b[0m     stride \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mjit\u001b[38;5;241m.\u001b[39mannotate(List[\u001b[38;5;28mint\u001b[39m], [])\n\u001b[1;32m--> 830\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_pool2d\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkernel_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpadding\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdilation\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mceil_mode\u001b[49m\u001b[43m)\u001b[49m\n",
+      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
+     ]
+    }
+   ],
    "source": [
     "import torch.optim as optim\n",
     "\n",
@@ -254,7 +549,7 @@
     "\n",
     "n_epochs = 30  # number of epochs to train the model\n",
     "train_loss_list = []  # list to store loss to visualize\n",
-    "valid_loss_min = np.Inf  # track change in validation loss\n",
+    "valid_loss_min = np.inf  # track change in validation loss\n",
     "\n",
     "for epoch in range(n_epochs):\n",
     "    # Keep track of training and validation loss\n",
@@ -326,17 +621,44 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 36,
    "id": "d39df818",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Net_3Conv_3lin(\n",
+      "  (conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (conv3): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (fc1): Linear(in_features=1024, out_features=512, bias=True)\n",
+      "  (fc2): Linear(in_features=512, out_features=64, bias=True)\n",
+      "  (fc3): Linear(in_features=64, out_features=10, bias=True)\n",
+      "  (dropout): Dropout(p=0.3, inplace=False)\n",
+      ")\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABUz0lEQVR4nO3dd1gUd+IG8HeWsnSQuiBFRAEVwdiQGFvAHiuesSTWmNhysaScl0vUXHKk/E4Tk6gxRWOiMerZa6wYFRuKoBEULIA0RVmaLGXn9wfLJhswIgKzu7yf59kn7szs7MuwcV/nO0UQRVEEERERkQGSSR2AiIiIqK5YZIiIiMhgscgQERGRwWKRISIiIoPFIkNEREQGi0WGiIiIDBaLDBERERksFhkiIiIyWCwyREREZLBYZIgMxCeffIKWLVvCxMQEHTp0kDpOk7Fv3z506NABFhYWEAQBeXl5UkeqRhAELFq06LFfd/PmTQiCgDVr1tR7JqLGwiJDVEdr1qyBIAjah4WFBfz9/TF79mxkZ2fX63v98ssvePPNN9G9e3esXr0a//nPf+p1/VSz3NxcjB49GpaWlvjyyy/xww8/wNrausZl//h5OH78eLX5oijCy8sLgiDgueeea+jo9SoxMRFvvvkmOnToAFtbW7i7u2Pw4ME4d+6c1NGIYCp1ACJD995778HX1xclJSU4fvw4VqxYgT179uDSpUuwsrKql/c4fPgwZDIZvv32W5ibm9fLOunRzp49i4KCAvz73/9GRERErV5jYWGB9evX45lnntGZHh0djfT0dMjl8oaI2qC++eYbfPvtt4iMjMTMmTOhVCrx1VdfoVu3bti3b1+ttw1RQ2CRIXpCAwcOROfOnQEAL730EpycnLBkyRJs374dY8eOfaJ1FxcXw8rKCjk5ObC0tKy3EiOKIkpKSmBpaVkv6zNWOTk5AAAHB4dav2bQoEHYtGkTli1bBlPT3/+KXb9+PTp16oS7d+/Wd8wGN3bsWCxatAg2NjbaaVOmTEGbNm2waNEiFhmSFIeWiOrZs88+CwC4ceOGdtqPP/6ITp06wdLSEo6OjhgzZgzS0tJ0Xte7d28EBQUhNjYWPXv2hJWVFf75z39CEASsXr0aRUVF2qGLqmMaysvL8e9//xt+fn6Qy+Vo0aIF/vnPf0KlUumsu0WLFnjuueewf/9+dO7cGZaWlvjqq69w9OhRCIKAjRs3YvHixWjevDlsbW0xatQoKJVKqFQqzJkzB66urrCxscHkyZOrrXv16tV49tln4erqCrlcjrZt22LFihXVtktVhuPHj6Nr166wsLBAy5YtsXbt2mrL5uXlYe7cuWjRogXkcjk8PT0xYcIEnRKgUqmwcOFCtGrVCnK5HF5eXnjzzTer5XuYTZs2aX8nzs7OeOGFF3D79m2d38fEiRMBAF26dIEgCJg0adIj1zt27Fjk5ubiwIED2mmlpaXYvHkzxo0bV+NrioqKMH/+fHh5eUEulyMgIAD/93//B1EUdZZTqVSYO3cuXFxcYGtri6FDhyI9Pb3Gdd6+fRtTpkyBm5sb5HI52rVrh+++++6R+WvSqVMnnRIDAE5OTujRoweuXLlSp3US1RfukSGqZykpKQAq/6IHgA8++ADvvPMORo8ejZdeegl37tzB559/jp49e+LChQs6/9rPzc3FwIEDMWbMGLzwwgtwc3ND586dsWrVKpw5cwbffPMNAODpp58GULkH6Pvvv8eoUaMwf/58nD59GlFRUbhy5Qq2bt2qkyspKQljx47FK6+8gmnTpiEgIEA7LyoqCpaWlvjHP/6B5ORkfP755zAzM4NMJsP9+/exaNEinDp1CmvWrIGvry/effdd7WtXrFiBdu3aYejQoTA1NcXOnTsxc+ZMqNVqzJo1SydDcnIyRo0ahalTp2LixIn47rvvMGnSJHTq1Ant2rUDABQWFmq/IKdMmYKOHTvi7t272LFjB9LT0+Hs7Ay1Wo2hQ4fi+PHjePnll9GmTRskJCRg6dKluHr1KrZt2/aXv6M1a9Zg8uTJ6NKlC6KiopCdnY3PPvsMJ06c0P5O3n77bQQEBGDVqlXa4UM/P79H/v5btGiBsLAw/PTTTxg4cCAAYO/evVAqlRgzZgyWLVums7woihg6dCiOHDmCqVOnokOHDti/fz/eeOMN3L59G0uXLtUu+9JLL+HHH3/EuHHj8PTTT+Pw4cMYPHhwtQzZ2dno1q0bBEHA7Nmz4eLigr1792Lq1KnIz8/HnDlzHvlz1EZWVhacnZ3rZV1EdSYSUZ2sXr1aBCAePHhQvHPnjpiWliZu2LBBdHJyEi0tLcX09HTx5s2boomJifjBBx/ovDYhIUE0NTXVmd6rVy8RgLhy5cpq7zVx4kTR2tpaZ1pcXJwIQHzppZd0pr/++usiAPHw4cPaaT4+PiIAcd++fTrLHjlyRAQgBgUFiaWlpdrpY8eOFQVBEAcOHKizfFhYmOjj46Mzrbi4uFre/v37iy1bttSZVpXh2LFj2mk5OTmiXC4X58+fr5327rvvigDELVu2VFuvWq0WRVEUf/jhB1Emk4m//vqrzvyVK1eKAMQTJ05Ue22V0tJS0dXVVQwKChIfPHignb5r1y4RgPjuu+9qp1X9js+ePfvQ9dW07BdffCHa2tpqt83f/vY3sU+fPtrtMHjwYO3rtm3bJgIQ33//fZ31jRo1ShQEQUxOThZF8fff98yZM3WWGzdunAhAXLhwoXba1KlTRXd3d/Hu3bs6y44ZM0a0t7fX5rpx44YIQFy9evUjf74/O3bsmCgIgvjOO+889muJ6hOHloieUEREBFxcXODl5YUxY8bAxsYGW7duRfPmzbFlyxao1WqMHj0ad+/e1T4UCgVat26NI0eO6KxLLpdj8uTJtXrfPXv2AADmzZunM33+/PkAgN27d+tM9/X1Rf/+/Wtc14QJE2BmZqZ9HhoaClEUMWXKFJ3lQkNDkZaWhvLycu20Px5no1QqcffuXfTq1QvXr1+HUqnUeX3btm3Ro0cP7XMXFxcEBATg+vXr2mn/+9//EBISghEjRlTLKQgCgMphoTZt2iAwMFBnu1YN6/15u/7RuXPnkJOTg5kzZ8LCwkI7ffDgwQgMDKy23epi9OjRePDgAXbt2oWCggLs2rXrocNKe/bsgYmJCf7+97/rTJ8/fz5EUcTevXu1ywGottyf966Iooj//e9/GDJkCERR1Nk+/fv3h1KpxPnz55/o58vJycG4cePg6+uLN99884nWRfSkOLRE9IS+/PJL+Pv7w9TUFG5ubggICIBMVvlvhGvXrkEURbRu3brG1/6xPABA8+bNa31A761btyCTydCqVSud6QqFAg4ODrh165bOdF9f34euy9vbW+e5vb09AMDLy6vadLVaDaVSqR06O3HiBBYuXIiYmBgUFxfrLK9UKrXrqul9AKBZs2a4f/++9nlKSgoiIyMfmhWo3K5XrlyBi4tLjfOrDtKtSdV2+ePQWpXAwMAaT51+XC4uLoiIiMD69etRXFyMiooKjBo16qF5PDw8YGtrqzO9TZs2Onmrft9/Ht76889x584d5OXlYdWqVVi1alWN7/lX2+dRioqK8Nxzz6GgoADHjx+vduwMUWNjkSF6Ql27dtWetfRnarUagiBg7969MDExqTb/z18CdTmLqGovxaP81bpryvZX00XNQagpKSkIDw9HYGAglixZAi8vL5ibm2PPnj1YunQp1Gr1Y62vttRqNdq3b48lS5bUOP/PBUwK48aNw7Rp05CVlYWBAwc+1plPT6Jqm7/wwgvag5X/LDg4uE7rLi0txciRIxEfH4/9+/cjKCiozjmJ6guLDFED8vPzgyiK8PX1hb+/f72u28fHB2q1GteuXdP+6x2oPNAzLy8PPj4+9fp+Ndm5cydUKhV27Nihs7flr4Z2HsXPzw+XLl165DIXL15EeHh4rYtclartkpSUpB2KqpKUlFRv223EiBF45ZVXcOrUKfz8889/mefgwYMoKCjQ2SuTmJiok7fq952SkqKzFyYpKUlnfVVnNFVUVNTradFqtRoTJkzAoUOHsHHjRvTq1ave1k30JHiMDFEDGjlyJExMTLB48eJqex1EUURubm6d1z1o0CAAwKeffqozvWovRU1ns9S3qj0sf/zZlEolVq9eXed1RkZG4uLFi9XOuvrj+4wePRq3b9/G119/XW2ZBw8eoKio6KHr79y5M1xdXbFy5UqdU7X37t2LK1eu1Nt2s7GxwYoVK7Bo0SIMGTLkocsNGjQIFRUV+OKLL3SmL126FIIgaM98qvrvn896+vPv38TEBJGRkfjf//5XYyG8c+dOXX4cvPrqq/j555+xfPlyjBw5sk7rIGoI3CND1ID8/Pzw/vvvY8GCBbh58yaGDx8OW1tb3LhxA1u3bsXLL7+M119/vU7rDgkJwcSJE7Fq1Srk5eWhV69eOHPmDL7//nsMHz4cffr0qeefprp+/frB3NwcQ4YMwSuvvILCwkJ8/fXXcHV1RWZmZp3W+cYbb2Dz5s3429/+hilTpqBTp064d+8eduzYgZUrVyIkJAQvvvgiNm7ciOnTp+PIkSPo3r07KioqkJiYiI0bN2qvl1MTMzMzfPTRR5g8eTJ69eqFsWPHak+/btGiBebOnfskm0THw4Z2/mjIkCHo06cP3n77bdy8eRMhISH45ZdfsH37dsyZM0d7TEyHDh0wduxYLF++HEqlEk8//TQOHTqE5OTkauv88MMPceTIEYSGhmLatGlo27Yt7t27h/Pnz+PgwYO4d+/eY/0cn376KZYvX46wsDBYWVnhxx9/1Jk/YsSIh966gaihscgQNbB//OMf8Pf3x9KlS7F48WIAlcdw9OvXD0OHDn2idX/zzTdo2bIl1qxZg61bt0KhUGDBggVYuHBhfUR/pICAAGzevBn/+te/8Prrr0OhUGDGjBlwcXGpdsZTbdnY2ODXX3/FwoULsXXrVnz//fdwdXVFeHg4PD09AQAymQzbtm3D0qVLsXbtWmzduhVWVlZo2bIlXnvttUcO402aNAlWVlb48MMP8dZbb8Ha2hojRozARx991GjHslSRyWTYsWMH3n33Xfz8889YvXo1WrRogU8++UR7BlqV7777Di4uLli3bh22bduGZ599Frt37652TJCbmxvOnDmD9957D1u2bMHy5cvh5OSEdu3a4aOPPnrsjHFxcQCAmJgYxMTEVJt/48YNFhmSjCA+7lF2RERERHqCx8gQERGRwWKRISIiIoPFIkNEREQGi0WGiIiIDBaLDBERERksFhkiIiIyWEZ/HRm1Wo2MjAzY2to+9qXMiYiISBqiKKKgoAAeHh7aG/HWxOiLTEZGhl7cQI6IiIgeX1pamvZimDUx+iJTdRO2tLQ02NnZSZyGiIiIaiM/Px9eXl46N1OtidEXmarhJDs7OxYZIiIiA/Oow0J4sC8REREZLBYZIiIiMlgsMkRERGSwWGSIiIjIYLHIEBERkcFikSEiIiKDxSJDREREBotFhoiIiAwWiwwREREZLBYZIiIiMlgsMkRERGSwWGSIiIjIYLHI1JEoiohPz4PyQZnUUYiIiJosFpk6mvHjeQz94gR2xWdIHYWIiKjJYpGpo44+DgCA7XEsMkRERFJhkamjISEeEATgzI17yMh7IHUcIiKiJolFpo7c7S3RtYUjAGDnRe6VISIikoLeFJkPP/wQgiBgzpw52mm9e/eGIAg6j+nTp0sX8k+GdWgOgMNLREREUtGLInP27Fl89dVXCA4OrjZv2rRpyMzM1D4+/vhjCRLWbGCQAmYmAn7LzMe17AKp4xARETU5kheZwsJCjB8/Hl9//TWaNWtWbb6VlRUUCoX2YWdnJ0HKmjWzNkcvfxcA3CtDREQkBcmLzKxZszB48GBERETUOH/dunVwdnZGUFAQFixYgOLi4kZO+NeGVg0vXbwNURQlTkNERNS0mEr55hs2bMD58+dx9uzZGuePGzcOPj4+8PDwQHx8PN566y0kJSVhy5YtD12nSqWCSqXSPs/Pz6/33H8U0cYVVuYmSLv3ABfS8tDRu/peJSIiImoYkhWZtLQ0vPbaazhw4AAsLCxqXObll1/W/rl9+/Zwd3dHeHg4UlJS4OfnV+NroqKisHjx4gbJXBMrc1P0a+uGbXEZ2BGXwSJDRETUiCQbWoqNjUVOTg46duwIU1NTmJqaIjo6GsuWLYOpqSkqKiqqvSY0NBQAkJyc/ND1LliwAEqlUvtIS0trsJ+hStXZS7viM1BeoW7w9yMiIqJKku2RCQ8PR0JCgs60yZMnIzAwEG+99RZMTEyqvSYuLg4A4O7u/tD1yuVyyOXyes36KM+0doajtTnuFpbiZEouemoOACYiIqKGJVmRsbW1RVBQkM40a2trODk5ISgoCCkpKVi/fj0GDRoEJycnxMfHY+7cuejZs2eNp2lLycxEhsHt3fHDqVvYHpfBIkNERNRIJD9r6WHMzc1x8OBB9OvXD4GBgZg/fz4iIyOxc+dOqaPVaFgHDwDA/stZKCmrPixGRERE9U/Ss5b+7OjRo9o/e3l5ITo6Wrowj6mjdzM0d7DE7bwHOJyYg0HtHz78RURERPVDb/fIGBqZTMBQzV6Z7XG3JU5DRETUNLDI1KOq4aUjiXegfFAmcRoiIiLjxyJTjwIVdghws0VphRr7L2VJHYeIiMjoscjUM+3w0kUOLxERETU0Fpl6NjSkssicTMlFdn6JxGmIiIiMG4tMPfNytEInn2YQRWDnRd4Rm4iIqCGxyDSAqoN+d7DIEBERNSgWmQYwqL07TGQC4tOVuH6nUOo4RERERotFpgE428jxTCtnANwrQ0RE1JBYZBqIdngpLgOiKEqchoiIyDixyDSQfu0UkJvKcP1uES7dzpc6DhERkVFikWkgNnJTRLR1A8BbFhARETUUFpkGNExzTZmd8RmoUHN4iYiIqL6xyDSg3gGusLc0Q3a+Cqdv5Eodh4iIyOiwyDQgc1MZBrVXAKg86JeIiIjqF4tMAxsa0hwAsCchE6ryConTEBERGRcWmQbW1dcRCjsL5JeUIzrpjtRxiIiIjAqLTAMzkQkYEuIOANjO4SUiIqJ6xSLTCIZ1qBxeOnglGwUlZRKnISIiMh4sMo2gnYcdWrpYQ1Wuxi+Xs6WOQ0REZDRYZBqBIAgYpjnodzvvvURERFRvWGQayVDNvZdOJN/FnQKVxGmIiIiMA4tMI/F1tkaIpz0q1CL2JGRKHYeIiMgosMg0oqGag3557yUiIqL6wSLTiIYEu0MQgPOpeUjNLZY6DhERkcFjkWlErnYWeNrPCUDljSSJiIjoybDINLKqs5e2XbgNUeQdsYmIiJ4Ei0wj6x+kgLmJDNdyCpGYVSB1HCIiIoPGItPI7C3N0CfQBQBvWUBERPSkWGQkMFxz9tLOixlQqzm8REREVFcsMhLoE+gKW7kpbuc9QGzqfanjEBERGSwWGQlYmJmgf5ACQOVBv0RERFQ3LDISGaa5ZcHuhEyUlqslTkNERGSY9KbIfPjhhxAEAXPmzNFOKykpwaxZs+Dk5AQbGxtERkYiO9s47h4d1tIJzjZy5BWX4XjyHanjEBERGSS9KDJnz57FV199heDgYJ3pc+fOxc6dO7Fp0yZER0cjIyMDI0eOlChl/TI1keG5YHcAPHuJiIioriQvMoWFhRg/fjy+/vprNGvWTDtdqVTi22+/xZIlS/Dss8+iU6dOWL16NU6ePIlTp05JmLj+VA0v/XI5G8Wl5RKnISIiMjySF5lZs2Zh8ODBiIiI0JkeGxuLsrIynemBgYHw9vZGTExMY8dsEB28HODtaIUHZRU48JtxDJkRERE1JkmLzIYNG3D+/HlERUVVm5eVlQVzc3M4ODjoTHdzc0NWVtZD16lSqZCfn6/z0FeCIGj3yuy8yOElIiKixyVZkUlLS8Nrr72GdevWwcLCot7WGxUVBXt7e+3Dy8ur3tbdEIaEVBaZ6Kt3oCwukzgNERGRYZGsyMTGxiInJwcdO3aEqakpTE1NER0djWXLlsHU1BRubm4oLS1FXl6ezuuys7OhUCgeut4FCxZAqVRqH2lpaQ38kzwZfzdbBCpsUVYhYt/lTKnjEBERGRTJikx4eDgSEhIQFxenfXTu3Bnjx4/X/tnMzAyHDh3SviYpKQmpqakICwt76Hrlcjns7Ox0Hvquaq/MzossMkRERI/DVKo3trW1RVBQkM40a2trODk5aadPnToV8+bNg6OjI+zs7PDqq68iLCwM3bp1kyJyg3ku2B2f7E/CyZS7yCkogatt/Q21ERERGTPJz1r6K0uXLsVzzz2HyMhI9OzZEwqFAlu2bJE6Vr3zcbJGiJcD1CKwN+HhBzITERGRLkEURaO+/XJ+fj7s7e2hVCr1epjpm1+v4/3dV9DZpxk2z3ha6jhERESSqu33t17vkWlKngv2gCAA527dx+28B1LHISIiMggsMnpCYW+Bri0cAQC7eE0ZIiKiWmGR0SPas5fiWWSIiIhqg0VGjwxq7w4TmYBLt/Nx/U6h1HGIiIj0HouMHnG0NsczrZwB8JoyREREtcEio2eqhpd2XLwNIz+hjIiI6ImxyOiZfu3cYG4qQ8qdIlzJLJA6DhERkV5jkdEzdhZm6BPgAoAH/RIRET0Ki4weGhrSHACw82IGh5eIiIj+AouMHno20BVW5iZIv/8AF9LypI5DRESkt1hk9JCluQn6tnUDAOyI4/ASERHRw7DI6KmhmrOXdidkokLN4SUiIqKasMjoqR6tXWBnYYo7BSqcvpErdRwiIiK9xCKjp8xNZRgY5A6g8qBfIiIiqo5FRo8N7VA5vLT3UhZKy9USpyEiItI/LDJ6rFtLJzjbyJFXXIYTyXeljkNERKR3WGT0mIlMwHPBlcNLOzi8REREVA2LjJ4bElJZZH65nIWSsgqJ0xAREekXFhk995RXMzR3sERRaQUOJ+ZIHYeIiEivsMjoOZlMwHMhPHuJiIioJiwyBmBIcOXZS4cTc1BQUiZxGiIiIv3BImMA2nnYoaWzNVTlahz4LVvqOERERHqDRcYACIKAIZpbFnB4iYiI6HcsMgaiqsj8eu0u7heVSpyGiIhIP7DIGIhWrjZo626HcrWIvZeypI5DRESkF1hkDAiHl4iIiHSxyBiQqqv8nrqRi+z8EonTEBERSY9FxoB4OVqho7cDRBHYHZ8pdRwiIiLJscgYGO3wUjyHl4iIiFhkDMzg9u6QCcCF1Dyk3SuWOg4REZGkWGQMjKudBbq1dALAvTJEREQsMgaoanhpRxyLDBERNW0sMgZoYJACpjIBiVkFuJZdIHUcIiIiybDIGCAHK3P09HcBAOzk2UtERNSESVpkVqxYgeDgYNjZ2cHOzg5hYWHYu3evdn7v3r0hCILOY/r06RIm1h9DQiqvKbPzYgZEUZQ4DRERkTRMpXxzT09PfPjhh2jdujVEUcT333+PYcOG4cKFC2jXrh0AYNq0aXjvvfe0r7GyspIqrl7p21YBuWkCbtwtwuWMfAQ1t5c6EhERUaOTdI/MkCFDMGjQILRu3Rr+/v744IMPYGNjg1OnTmmXsbKygkKh0D7s7OwkTKw/bOSmCG/jCoC3LCAioqZLb46RqaiowIYNG1BUVISwsDDt9HXr1sHZ2RlBQUFYsGABiov/+topKpUK+fn5Og9jNST493svqdUcXiIioqZH0qElAEhISEBYWBhKSkpgY2ODrVu3om3btgCAcePGwcfHBx4eHoiPj8dbb72FpKQkbNmy5aHri4qKwuLFixsrvqT6BLrCRm6KDGUJzqfeR+cWjlJHIiIialSCKPGRoqWlpUhNTYVSqcTmzZvxzTffIDo6Wltm/ujw4cMIDw9HcnIy/Pz8alyfSqWCSqXSPs/Pz4eXlxeUSqVRDkvN+zkOWy7cxoQwH7w3LEjqOERERPUiPz8f9vb2j/z+lnxoydzcHK1atUKnTp0QFRWFkJAQfPbZZzUuGxoaCgBITk5+6Prkcrn2LKiqhzEb0qFyeGlPQibKK9QSpyEiImpckheZP1Or1Tp7VP4oLi4OAODu7t6IifTbM62c4WBlhruFpTh1/Z7UcYiIiBqVpMfILFiwAAMHDoS3tzcKCgqwfv16HD16FPv370dKSgrWr1+PQYMGwcnJCfHx8Zg7dy569uyJ4OBgKWPrFTMTGQYGueOnM6nYcfE2nmntLHUkIiKiRiPpHpmcnBxMmDABAQEBCA8Px9mzZ7F//3707dsX5ubmOHjwIPr164fAwEDMnz8fkZGR2Llzp5SR9dJQzb2X9l3Kgqq8QuI0REREjUfSPTLffvvtQ+d5eXkhOjq6EdMYrq6+jnC1lSOnQIVjV++ib1s3qSMRERE1Cr07RoYen4lMwHOaa8p8few6b1lARERNBouMkXiphy8szGQ4c/Me9l/OkjoOERFRo2CRMRIeDpZ4uUdLAEDU3kQeK0NERE0Ci4wReaWXH1xs5biVW4wfYm5JHYeIiKjBscgYEWu5Kd7oFwAA+OzQNdwrKpU4ERERUcNikTEykZ080cbdDgUl5Vh26JrUcYiIiBoUi4yRMZEJ+NfgNgCAH07dQnJOocSJiIiIGg6LjBHq3soZEW1cUaEWEbXnitRxiIiIGgyLjJFaMKgNTGUCDiXm4Pi1u1LHISIiahAsMkbKz8UGL3TzAQC8v/s3VKh5kTwiIjI+LDJG7LXw1rCzMEViVgE2x6ZJHYeIiKjescgYsWbW5vh7eGsAwP/9chWFqnKJExEREdUvFhkjNyGsBVo4WeFOgQpfRadIHYeIiKhescgYOXNTGf4xsPJ07FXHruN23gOJExEREdUfFpkmoH87N3T1dYSqXI1P9iVKHYeIiKjesMg0AYIg4J3BbQEA2+IyEJeWJ20gIiKiesIi00S097THyI7NAQDv7/oNosjTsYmIyPCxyDQhb/QPgIWZDOdu3cfeS1lSxyEiInpiLDJNiLu9JV7p6QcAiNp7BaryCokTERERPRkWmSbmlV4t4WorR9q9B1hz4qbUcYiIiJ4Ii0wTY2Vuijf6BwAAvjicjNxClcSJiIiI6o5FpgmK7OiJdh52KFCV49OD16SOQ0REVGcsMk2QTCbg7cGVF8lbfyYV17ILJE5ERERUNywyTdTTfs7o29YNFWoR/9lzReo4REREdcIi04QtGBgIU5mAI0l3cOzqHanjEBERPTYWmSaspYsNXgzzAQB8sPsKKtS8SB4RERkWFpkm7rXw1rC3NENSdgF+PpsmdRwiIqLHwiLTxDlYmeO18NYAgCUHklBQUiZxIiIiotpjkSG80M0Hvs7WuFtYihVHU6SOQ0REVGssMgRzUxkWDAwEAHxz/AbS7xdLnIiIiKh2WGQIANC3rRu6tXREabkaH+9LkjoOERFRrbDIEABAEAT8a3BbCAKw42IGYm/dkzoSERHRI7HIkFZQc3tEdvQEALy8NhZJWbziLxER6TdJi8yKFSsQHBwMOzs72NnZISwsDHv37tXOLykpwaxZs+Dk5AQbGxtERkYiOztbwsTG753BbdHOww65RaUY+/UpXMnMlzoSERHRQ0laZDw9PfHhhx8iNjYW586dw7PPPothw4bh8uXLAIC5c+di586d2LRpE6Kjo5GRkYGRI0dKGdno2VuZYf1L3dC+uT3uFZVi3NencDlDKXUsIiKiGgmiKOrV5VwdHR3xySefYNSoUXBxccH69esxatQoAEBiYiLatGmDmJgYdOvWrVbry8/Ph729PZRKJezs7BoyulFRPijDhG9P42K6Eg5WZvhxaiiCmttLHYuIiJqI2n5/680xMhUVFdiwYQOKiooQFhaG2NhYlJWVISIiQrtMYGAgvL29ERMT89D1qFQq5Ofn6zzo8dlbmuGHl0LRwcsBecVlGPf1KSSkc88MERHpF8mLTEJCAmxsbCCXyzF9+nRs3boVbdu2RVZWFszNzeHg4KCzvJubG7Kysh66vqioKNjb22sfXl5eDfwTGC87CzP8MLUrOno7IL+kHOO+OYWLaXlSxyIiItKSvMgEBAQgLi4Op0+fxowZMzBx4kT89ttvdV7fggULoFQqtY+0NN4/6EnYWphh7dRQdPZphoKScrzwzWlcSL0vdSwiIiIAelBkzM3N0apVK3Tq1AlRUVEICQnBZ599BoVCgdLSUuTl5eksn52dDYVC8dD1yeVy7VlQVQ96MjZyU6yZ0hVdWziiQFWOF789g9hbLDNERCQ9yYvMn6nVaqhUKnTq1AlmZmY4dOiQdl5SUhJSU1MRFhYmYcKmyUZuitWTuyDU1xGFqnJM+PY0zt3kRfOIiEhakhaZBQsW4NixY7h58yYSEhKwYMECHD16FOPHj4e9vT2mTp2KefPm4ciRI4iNjcXkyZMRFhZW6zOWqH5Za8pMWEsnFJVWYMJ3Z3DmBssMERFJR9Iik5OTgwkTJiAgIADh4eE4e/Ys9u/fj759+wIAli5diueeew6RkZHo2bMnFAoFtmzZImXkJs/K3BTfTeqCZ1o5o7i0ApNWn8Gp67lSxyIioiZK764jU994HZmGUVJWgWlrz+HXa3dhaWaCbyd1xtN+zlLHIiIiI2Fw15Ehw2JhZoKvJ3RGL38XPCirwJQ1Z3Ei+a7UsYiIqIlhkaE6szAzwVcvdkKfABeUlKkxZc1ZHLt6R+pYRETUhLDI0BOxMDPByhc7IaKNK1Tlary09hyOJuVIHYuIiJoIFhl6YnJTEywf3wl927qhtFyNl9fG4kgiywwRETU8FhmqF+amMnw5riP6t3NDaYUar/wQi0NXsqWORURERo5FhuqNuakMX4zriEHtFSitUGP6j7HYdylT6lhERGTEWGSoXpmZyPDZmKcwONgdZRUiZqw7jzUnbkgdi4iIjBSLDNU7MxMZPnu+A8aHekMUgUU7f8MHu3+DWm3UlywiIiIJsMhQgzA1keH94UF4c0AAAODrX2/g1Q0XUFJWIXEyIiIyJnUqMmlpaUhPT9c+P3PmDObMmYNVq1bVWzAyfIIgYGbvVvj0+Q4wMxGwOz4TL357GnnFpVJHIyIiI1GnIjNu3DgcOXIEAJCVlYW+ffvizJkzePvtt/Hee+/Va0AyfMOfao7vJ3eFrdwUZ2/eR+SKk0i7Vyx1LCIiMgJ1KjKXLl1C165dAQAbN25EUFAQTp48iXXr1mHNmjX1mY+MxNOtnLF5xtNwt7dAyp0ijFh+EgnpSqljERGRgatTkSkrK4NcLgcAHDx4EEOHDgUABAYGIjOTp9tSzQIUttg6szsCFba4W6jC86tieOE8IiJ6InUqMu3atcPKlSvx66+/4sCBAxgwYAAAICMjA05OTvUakIyLwt4Cm6aH4ZlWzigurcBLa8/hpzOpUsciIiIDVaci89FHH+Grr75C7969MXbsWISEhAAAduzYoR1yInoYWwszfDepC0Z2bI4KtYgFWxLw31+SIIo8PZuIiB6PINbx26OiogL5+flo1qyZdtrNmzdhZWUFV1fXegv4pPLz82Fvbw+lUgk7Ozup49AfiKKIpQeuYtnhZADAyI7N8eHIYJib8qoARERNXW2/v+v0jfHgwQOoVCptibl16xY+/fRTJCUl6VWJIf0mCALm9QvAhyPbw0QmYMv525iy5izyS8qkjkZERAaiTkVm2LBhWLt2LQAgLy8PoaGh+O9//4vhw4djxYoV9RqQjN+Yrt74ZmJnWJmb4HjyXYxeGYMsZYnUsYiIyADUqcicP38ePXr0AABs3rwZbm5uuHXrFtauXYtly5bVa0BqGvoEuGLjK2FwsZUjMasAI5afQGJWvtSxiIhIz9WpyBQXF8PW1hYA8Msvv2DkyJGQyWTo1q0bbt26Va8BqekIam6PLTOeRitXG2QqS/C3FTE4kXxX6lhERKTH6lRkWrVqhW3btiEtLQ379+9Hv379AAA5OTk8oJaeiJejFf43/Wl09XVEgaock1afwZbz6Y9+IRERNUl1KjLvvvsuXn/9dbRo0QJdu3ZFWFgYgMq9M0899VS9BqSmx97KDGundMXgYHeUVYiYt/Eivjh8jadnExFRNXU+/TorKwuZmZkICQmBTFbZh86cOQM7OzsEBgbWa8gnwdOvDZdaLeLDfYlYdew6AGB0Z098MKI9zEx4ejYRkbGr7fd3nYtMlaq7YHt6ej7JahoMi4zhWxtzE4t2XIZaBLq3csLy8Z1gb2kmdSwiImpADXodGbVajffeew/29vbw8fGBj48PHBwc8O9//xtqtbrOoYlqMiGshfb07BPJufjbypNIv8+7ZxMRUR2LzNtvv40vvvgCH374IS5cuIALFy7gP//5Dz7//HO888479Z2RCM8GumHjK2FwtZXjanYhRiw/ifj0PKljERGRxOo0tOTh4YGVK1dq73pdZfv27Zg5cyZu375dbwGfFIeWjEtG3gNMWXMWiVkFsDQzwWdjOqBfO4XUsYiIqJ416NDSvXv3ajygNzAwEPfu3avLKolqxcPBEpumh6GnvwselFXglR9j8d3xG1LHIiIiidSpyISEhOCLL76oNv2LL75AcHDwE4ci+iu2Fmb4dmJnjO3qDVEE3tv1GxbtuIwKNU/PJiJqakzr8qKPP/4YgwcPxsGDB7XXkImJiUFaWhr27NlTrwGJamJmIsN/RgTBx8kKH+5NxJqTN5F+/wGWje0AK/M6fayJiMgA1WmPTK9evXD16lWMGDECeXl5yMvLw8iRI3H58mX88MMP9Z2RqEaCIGB6Lz98Oa4jzE1lOHglG89/dQo5+bzhJBFRU/HE15H5o4sXL6Jjx46oqKior1U+MR7s2zTE3rqHaWtjca+oFM0dLPHdpC4IUNhKHYuIiOqoQQ/2JdI3nXwcsXXm02jpbI3beQ8wasVJHL/GG04SERk7SYtMVFQUunTpAltbW7i6umL48OFISkrSWaZ3794QBEHnMX36dIkSkz7zcbLG/2Y8ja4tfr/h5MazaVLHIiKiBiRpkYmOjsasWbNw6tQpHDhwAGVlZejXrx+Kiop0lps2bRoyMzO1j48//liixKTvmlmb44eXumJYBw+Uq0W8+b94fLI/EWqe0UREZJQe6/SOkSNH/uX8vLy8x3rzffv26Txfs2YNXF1dERsbi549e2qnW1lZQaHgRc+oduSmJvj0+Q7wcbTCssPJ+PJIClLvPcAno4JhYWYidTwiIqpHj7VHxt7e/i8fPj4+mDBhQp3DKJVKAICjo6PO9HXr1sHZ2RlBQUFYsGABiosffp8dlUqF/Px8nQc1PYIgYF6/AHw8KhimMgE7L2bgxW9P415RqdTRiIioHtXrWUtPQq1WY+jQocjLy8Px48e101etWgUfHx94eHggPj4eb731Frp27YotW7bUuJ5FixZh8eLF1abzrKWm60TyXUz/MRYFJeVo7mCJFS90RLCng9SxiIjoL9T2rCW9KTIzZszA3r17cfz4cXh6ej50ucOHDyM8PBzJycnw8/OrNl+lUkGlUmmf5+fnw8vLi0WmibuWXYCX1p7DrdximJvI8M6Qtngh1BuCIEgdjYiIamBQp1/Pnj0bu3btwpEjR/6yxABAaGgoACA5ObnG+XK5HHZ2djoPotZuttgx+xn0a+uG0go13tl2CXN/jkNxabnU0YiI6AlIWmREUcTs2bOxdetWHD58GL6+vo98TVxcHADA3d29gdORsbG3NMNXL3bC24PawEQmYFtcBoZ9cQLJOYVSRyMiojqSdGhp5syZWL9+PbZv346AgADtdHt7e1haWiIlJQXr16/HoEGD4OTkhPj4eMydOxeenp6Ijo6u1Xvwyr5UkzM37mH2+vPIKVDBytwEH0UGY0iIh9SxiIhIwyCOkXnY8QmrV6/GpEmTkJaWhhdeeAGXLl1CUVERvLy8MGLECPzrX/+qdSlhkaGHySkowWs/xSHmei4AYNLTLfDPQW1gbqoXI65ERE2aQRSZxsAiQ3+lvEKNJQeuYvnRFABABy8HfDm+I5o7WEqcjIioaTOog32JpGJqIsObAwLx7cTOsLMwRVxaHp5b9iuir96ROhoREdUCiwwRgPA2btj99x5o39we94vLMGn1GSw5cBUVvLUBEZFeY5Eh0vBytMKm6WEYH+oNUQSWHbqGSavPILdQ9egXExGRJFhkiP7AwswEH4xojyWjQ2BpZoJfr93Fc58fR+yt+1JHIyKiGrDIENVgZEdPbJvVHS1drJGpLMHzX8Xgu+M3YOTHxhMRGRwWGaKHCFBUXg14cLA7ytUi3tv1G2avv4CCkjKpoxERkQaLDNFfsJGb4ouxT2HRkLYwMxGwOyETw744gaSsAqmjERERWGSIHkkQBEzq7oufXwmDh70Frt8twrAvj2NzbLrU0YiImjwWGaJa6ujdDLv+3gM9/V1QUqbG65su4q3N8Sgpq5A6GhFRk8UiQ/QYHK3NsWZSF8zr6w9BAH4+l4YRy0/ixt0iqaMRETVJLDJEj0kmE/D38Nb4cWoonG3McSUzH0M+P469CZlSRyMianJYZIjqqHsrZ+z+ew90beGIQlU5Zqw7j/d2/obScrXU0YiImgwWGaIn4GZngfXTQvFKr5YAgO9O3MDzq2JwO++BxMmIiJoGFhmiJ2RqIsOCgW3w9YTKG09eSM3D4GW/4khSjtTRiIiMHosMUT3p2/b3G0/mFZdh8uqz+L/9SbzxJBFRA2KRIapHXo5W2DwjDC928wEAfHEkGS98cxo5BSUSJyMiMk4sMkT1TG5qgn8PD8JnYzrAytwEMddzMXjZcZy6nit1NCIio8MiQ9RAhnVojh2zn4G/mw3uFKgw7utTWH40GWoONRER1RsWGaIG1MrVBttmdcfIp5pDLQIf70vCtLXnkFdcKnU0IiKjwCJD1MCszE3x39EhiBrZHuamMhxKzMHgZccRl5YndTQiIoPHIkPUCARBwNiu3tgy42n4OFnhdt4D/G3lSXwVnYKyCl5Aj4iorlhkiBpRUHN77Hz1GfRv54ayChFRexMx5PPjOHfzntTRiIgMEosMUSOzszDDyhc64aPI9nCwMkNiVgFGrYzBG5suIrdQJXU8IiKDwiJDJAFBEPB8F28cnt8bz3f2AgBsik3Hs/+NxvrTqTyziYiolgRRFI36b8z8/HzY29tDqVTCzs5O6jhENYq9dQ9vb72ExKwCAEAHLwe8PzwIQc3tJU5GRCSN2n5/s8gQ6YnyCjW+j7mFJb8koai0AjIBmBDWAvP6+cPOwkzqeEREjaq2398cWiLSE6YmMkx9xheH5vfGc8HuUIvAmpM3Ef7faGyPuw0j/zcHEVGdsMgQ6RmFvQW+GNcRP0ztCl9na9wpUOG1DXEY/81pJOcUSh2PiEivsMgQ6akerV2wb04PzO/rD7mpDCdTcjHws2P4ZH8iHpRWSB2PiEgvsMgQ6TG5qQleDW+NA3N7oU+AC8oqRHx5JAURS6Jx8LdsqeMREUmORYbIAHg7WeG7SV2w8oVO8LC3wO28B3hp7Tm89P05pN8vljoeEZFkWGSIDIQgCBgQpMCBeb3wSq+WMJUJOHglGxFLovH5oWsoKeNwExE1PTz9mshAXc0uwL+2XcKZG5W3N3C3t8CbAwIwLKQ5ZDJB4nRERE/GIE6/joqKQpcuXWBrawtXV1cMHz4cSUlJOsuUlJRg1qxZcHJygo2NDSIjI5GdzWMDiPzdbPHzy93w2ZgOaO5giUxlCeb+fBHDl5/QlhsiImMnaZGJjo7GrFmzcOrUKRw4cABlZWXo168fioqKtMvMnTsXO3fuxKZNmxAdHY2MjAyMHDlSwtRE+kMQBAzr0ByH5vfCG/0DYG1ugvh0JUZ/FYPpP8Ti5t2iR6+EiMiA6dXQ0p07d+Dq6oro6Gj07NkTSqUSLi4uWL9+PUaNGgUASExMRJs2bRATE4Nu3bo9cp0cWqKm5E6BCksPXsWGM6lQi4CZiYAXu7XA38NbwcHKXOp4RES1ZhBDS3+mVCoBAI6OjgCA2NhYlJWVISIiQrtMYGAgvL29ERMTU+M6VCoV8vPzdR5ETYWLrRz/GdEee1/riV7+ladrf3fiBnp9chTfHb+B0nK11BGJiOqV3hQZtVqNOXPmoHv37ggKCgIAZGVlwdzcHA4ODjrLurm5ISsrq8b1REVFwd7eXvvw8vJq6OhEeidAYYvvp3TF91O6wt/NBsoHZXhv12/otzQa+y9n8XYHRGQ09KbIzJo1C5cuXcKGDRueaD0LFiyAUqnUPtLS0uopIZHh6eXvgj1/74H/jGgPZxtz3Mwtxis/xOL5VaeQkK6UOh4R0RPTiyIze/Zs7Nq1C0eOHIGnp6d2ukKhQGlpKfLy8nSWz87OhkKhqHFdcrkcdnZ2Og+ipszURIZxod44+kYfzOrjB7mpDGdu3MOQL45j3s9xyMh7IHVEIqI6k7TIiKKI2bNnY+vWrTh8+DB8fX115nfq1AlmZmY4dOiQdlpSUhJSU1MRFhbW2HGJDJqN3BRv9A/E4dd7Y3gHDwDAlgu30ef/juK/vyShSFUucUIioscn6VlLM2fOxPr167F9+3YEBARop9vb28PS0hIAMGPGDOzZswdr1qyBnZ0dXn31VQDAyZMna/UePGuJqGYX0/Lw/u7fcPbmfQCAs40cc/u2xujOXjAz0YudtUTUhNX2+1vSIiMINV99dPXq1Zg0aRKAygvizZ8/Hz/99BNUKhX69++P5cuXP3Ro6c9YZIgeThRF7L+chai9ibiVW3nPJh8nK8yJaI2hIc1hwisEE5FEDKLINAYWGaJHU5VXYN2pVHx5JBm5RaUAgNauNpjfzx/92yke+o8OIqKGwiKjwSJDVHtFqnKsOXkTX0WnIL+k8piZoOZ2mN8vAL39XVhoiKjRsMhosMgQPT7lgzJ88+t1fHf8BopKK++q3aVFM8zvF4BuLZ0kTkdETQGLjAaLDFHd5RaqsOJoCtaeuqW9KnCP1s54vV8AQrwcpA1HREaNRUaDRYboyWUpS/D54Wv4+WwaytWVf2X0beuG+f38Eajg/1dEVP9YZDRYZIjqT9q9Ynx68Bq2XkiHWgQEARgS7IG5ff3h62wtdTwiMiIsMhosMkT1LzmnAEsPXMPuhEwAgIlMwKiOnvh7RGs0d7CUOB0RGQMWGQ0WGaKGc+m2EksOXMXhxBwAgLnmdggz+/jB1dZC4nREZMhYZDRYZIgaXuyt+/jvL0k4mZILALA0M8Hk7i3wSk8/2FuZSZyOiAwRi4wGiwxR4zmZfBef/JKEC6l5AABbC1NM7+WHyd1bwMrcVNpwRGRQWGQ0WGSIGpcoijh0JQf/90sSErMKAFTex2l2Hz+MDfWG3NRE4oREZAhYZDRYZIikoVaL2BmfgSUHrmrv49TcwRJzIlpjZEdP3seJiP4Si4wGiwyRtMoq1Nh4Lg3LDl1Ddr4KANDK1Qbz+/pjQBDv40RENWOR0WCRIdIPJWUVWBtzE8uPpiCvuAwA0L65Pd7oH4AerZ1ZaIhIB4uMBosMkX7JLynDN7/ewLe/XtfexynU1xFvDghEJ59mEqcjIn3BIqPBIkOkn3ILVVh+NAU//OE+ThFtXDG/XwDauPP/VaKmjkVGg0WGSL9l5D3AskPXsCk2HRVqUXvbg3l9/dGCtz0garJYZDRYZIgMw/U7hVhy4Cp2xf9+24ORTzXHtJ4t4e9mK3E6ImpsLDIaLDJEhuXSbSX++0sSjiTd0U7r5e+Cl3r44plWPCiYqKlgkdFgkSEyTLG37uObX69j/+UsqDV/SwUqbDH1GV8M7eDBC+sRGTkWGQ0WGSLDlppbjO9O3MDGc2ko1pzl5Gwjx8QwH4zv5gNHa3OJExJRQ2CR0WCRITIOygdl+OlMKtacuIms/BIAgIWZDJEdPTHlGV/4udhInJCI6hOLjAaLDJFxKatQY09CJr7+9Tou3c7XTg8PdMVLPVqiW0tHHkdDZARYZDRYZIiMkyiKOH3jHr759ToOXsnRTm/nYYeXevhicHsPmJvKJExIRE+CRUaDRYbI+F2/U4jvTtzA5th0lJRVXlxPYWeBiU+3wLiu3rC3MpM4IRE9LhYZDRYZoqbjflEp1p2+he9jbuFOQeUNKq3MTTCqkyde6ObD69EQGRAWGQ0WGaKmR1VegZ0XM/HNr9eRmFWgnR7q64gXw3zQr62Cw05Eeo5FRoNFhqjpEkURJ5Jz8cOpmzjwW7b2ejQutnKM7eKFsaHecLe3lDYkEdWIRUaDRYaIACBT+QA/nUnDT2dStcNOMgGIaOOGF8N80N3PGTIZz3Yi0hcsMhosMkT0R2UVavxyORs/nLqJU9fvaaf7OltjfKg3/tbJiwcHE+kBFhkNFhkiephr2QVYdzoV/4tNR4GqHEDlRfaGhnjgxW4t0N7TXuKERE0Xi4wGiwwRPUqRqhzb4zKwNuamzsHBIZ72eKGbD4aEeMDCjPd2ImpMLDIaLDJEVFuiKOJ86n38EHMLexKyUFpReU0ae0szjO7sifGhPmjhbC1xSqKmgUVGg0WGiOoit1CFjefSse70LaTff6Cd3tPfBRO6+aBPoCtMeHAwUYOp7fe3pBdSOHbsGIYMGQIPDw8IgoBt27bpzJ80aRIEQdB5DBgwQJqwRNSkONnIMaO3H6Lf6IPvJnVGnwAXCAJw7OodvLT2HHp+fATLjyYjt1AldVSiJs1UyjcvKipCSEgIpkyZgpEjR9a4zIABA7B69Wrtc7lc3ljxiIhgIhPwbKAbng10Q2puMdadvoWfz6Xhdt4DfLwvCZ8euIZB7RV4MawFOno78IaVRI1M0iIzcOBADBw48C+XkcvlUCgUjZSIiOjhvJ2ssGBQG8zt64/d8ZlYe+oWLqblYVtcBrbFZaCtux0mhPlgaAcPWJlL+tcrUZOh99foPnr0KFxdXREQEIAZM2YgNzf3L5dXqVTIz8/XeRAR1ScLMxNEdvLE9lndsWN2d/ytkyfkpjL8lpmPf2xJQOh/DmHxzsu4fqdQ6qhERk9vDvYVBAFbt27F8OHDtdM2bNgAKysr+Pr6IiUlBf/85z9hY2ODmJgYmJjUfCrkokWLsHjx4mrTebAvETWkvOJSbDqXjh9P38Kt3GLt9GdaOePFMB+EB7rC1ETv/+1IpDcM7qylmorMn12/fh1+fn44ePAgwsPDa1xGpVJBpfr94Lv8/Hx4eXmxyBBRo1CrRRy7dgc/nrqFQ4k5qPob1t3eAuO6emNMV2+42PJYP6JHqW2RMahB3JYtW8LZ2RnJyckPLTJyuZwHBBORZGQyAb0DXNE7wBVp94qx/kwqfj6bhkxlCf574CqWHb6Gfu0UGNvFG0/7OfH+TkRPyKCKTHp6OnJzc+Hu7i51FCKiR/JytMJbAwIxJ6I19iRk4oeYWzifmofd8ZnYHZ8Jz2aWGN3ZC3/r7Mm7cBPVkaRDS4WFhUhOTgYAPPXUU1iyZAn69OkDR0dHODo6YvHixYiMjIRCoUBKSgrefPNNFBQUICEhodZ7XXhBPCLSJ5duK/Hz2TRsi7uNgpLK+zvJBKCXvwue7+KN8DauMOOxNESGcYzM0aNH0adPn2rTJ06ciBUrVmD48OG4cOEC8vLy4OHhgX79+uHf//433Nzcav0eLDJEpI8elFZg76VMbDibhjM3fr8Lt7ONOSI7emJ0Fy/4udhImJBIWgZRZBoDiwwR6bvrdwqx8Vw6Nsem4+4frhTctYUjRnfxwuD27rA0500rqWlhkdFgkSEiQ1FWocbhxBxsPJuGI0k5UGv+draVm2JoBw+M6eKNoOZ2vHowNQksMhosMkRkiLKUJdgcm4afz6Uh7d7vN61s626HMV29MCykOeytzCRMSNSwWGQ0WGSIyJCp1SJOXc/FhrNp2HcpC6UVagCA3FSGQe3dMfHpFujg5SBtSKIGwCKjwSJDRMbiflEptsXdxs9n05CYVaCd3sHLAZO7t8DAIHeYm/KMJzIOLDIaLDJEZGxEUcTFdCXWxtzErouZ2r00LrZyjA/1xrhQb7jaWkickujJsMhosMgQkTG7U6DCT2dS8eOpW8gpqDzjycxEwHPBHhx2IoPGIqPBIkNETUFpuRr7LmdhzYkbOJ+ap53OYScyVCwyGiwyRNTUxKfnYc1JDjuRYWOR0WCRIaKmisNOZMhYZDRYZIioqeOwExkiFhkNFhkiot/VNOzkZG2O54LdMeyp5njKy4FXDia9wCKjwSJDRFRdTcNOAODjZIVhIR4Y9lRz3rSSJMUio8EiQ0T0cGUVahxPvovtF25j/+VsPCir0M5r39wewzp4YGiIB1zteIAwNS4WGQ0WGSKi2ikuLceB37Kx7cJtHLt2FxWau1bKBOBpP2cM6+CBAUEK2FrwHk/U8FhkNFhkiIgeX26hCrsTMrHtwm2dA4TlpjJEtHHD8Keao5e/Cw8SpgbDIqPBIkNE9GRSc4uxPe42tsXdRsqdIu10ByszDGrvjuEdmqOzTzPIZDxImOoPi4wGiwwRUf0QRRGXM/Kx9cJt7LyYoXOQcHMHSwx/ygPPd/aGt5OVhCnJWLDIaLDIEBHVvwq1iJiUXGyLu419l7JQqCrXzuvR2hnjunojoq0bzEw49ER1wyKjwSJDRNSwSsoqcOhKDjacTcWv1+5qp7vYyjG6syfGdPGGlyP30tDjYZHRYJEhImo8qbnF2HA2FRvPpeNuYeXQkyAAPVu7YGxXb4S3ceVeGqoVFhkNFhkiosZXWq7GwSvZ+OmM7l4aV1s5nu/ihee7eMGzGffS0MOxyGiwyBARSetWbhF+OpOGzbFpuFtYCqByL00vfxeM6+qNZwNdYcq9NPQnLDIaLDJERPqhtFyNA79lY/2ZWziRnKudrrCzwGjNXprmDpYSJiR9wiKjwSJDRKR/btwtwoazqdh8Lh25RZV7aWQC0DvAFaM7eyLMzxn2lryCcFPGIqPBIkNEpL9U5RX45XI21p9ORcz13/fSyASgnYc9nvZzQjc/J3Rp4QgbuamESamxschosMgQERmG63cKseFsGg5eycb1P1xBGABMZAJCPO0R5ueEsJbO6OTTDJbmJhIlpcbAIqPBIkNEZHiy80sQk5KLmJRcnLx+F2n3HujMNzeRoYO3A8JaOiHMzwlPeTtAbspiY0xYZDRYZIiIDF/6/WJtsYm5notMZYnOfLmpDJ1bNNMUG2cEe9rzejUGjkVGg0WGiMi4iKKIW7nFOKkpNTEpudqL71WxNjdBaEsnDAhSoH9bBeyteOCwoWGR0WCRISIybqIoIjmnUFtqYq7nIq+4TDvfzERA91bOGNzeHf1YagwGi4wGiwwRUdOiVotIzCrAwSvZ2JOQicSsAu08MxMBz7RyxiCWGr3HIqPBIkNE1LQl5xRiT0ImS42BYZHRYJEhIqIqjyo1g4M90LetGy/Gpwdq+/0t6SHdx44dw5AhQ+Dh4QFBELBt2zad+aIo4t1334W7uzssLS0RERGBa9euSROWiIgMXitXG/w9vDX2zemJg/N6YV5ffwS42aKsQsSRpDt4fdNFdH7/ACavPoPNselQPih79EpJUpIWmaKiIoSEhODLL7+scf7HH3+MZcuWYeXKlTh9+jSsra3Rv39/lJSU1Lg8ERFRbVWVmv1z/7rUTFlzlqVGj+nN0JIgCNi6dSuGDx8OoHJvjIeHB+bPn4/XX38dAKBUKuHm5oY1a9ZgzJgxtVovh5aIiOhxJOcUYHd8FvYkZCIpW3f4qUdrFwxu744IDj81uNp+f+vtjStu3LiBrKwsREREaKfZ29sjNDQUMTExDy0yKpUKKtXv1xPIz89v8KxERGQ8Wrna4rUIW7wW0bpaqTmcmIPDiTkwMxHQs7ULBrV3R992brCzYKmRit4WmaysLACAm5ubznQ3NzftvJpERUVh8eLFDZqNiIiahppKze6EDFzNLsShxBwcSsyB+RYZerR2xuDgyj01LDWNS2+LTF0tWLAA8+bN0z7Pz8+Hl5eXhImIiMgY/LHUXMsuwO6ETOyOz8S1nD+UGhMZevpXntLNUtM49LbIKBQKAEB2djbc3d2107Ozs9GhQ4eHvk4ul0Mulzd0PCIiasJau9lijpst5kT442p2AXbHZ2J3QiaScwpx8EoODl75vdQMDnZHRBs32LLUNAi9LTK+vr5QKBQ4dOiQtrjk5+fj9OnTmDFjhrThiIiINPzdbOHf1xZz+z6q1LjguWB3hLdxZampR5IWmcLCQiQnJ2uf37hxA3FxcXB0dIS3tzfmzJmD999/H61bt4avry/eeecdeHh4aM9sIiIi0idVpWZORGtczS7UDD9lIOVOEQ5eycbBK9kwN5Whl7bUuMFGrrf7FAyCpKdfHz16FH369Kk2feLEiVizZg1EUcTChQuxatUq5OXl4ZlnnsHy5cvh7+9f6/fg6ddERCQlURSRlF2APfGZ2JWQiet3irTzzE1l6O3vgsEsNdXwFgUaLDJERKQvqkrN7vjKA4Wv3/291MhNZegd4ILBwR4ID3SFdRMvNSwyGiwyRESkj0Sx8i7dVcfU3PhTqekT4IrBwe54tomWGhYZDRYZIiLSd6Io4kpmAXYnZGB3fCZu5hZr51mY6ZYaK/OmUWpYZDRYZIiIyJCIoojfMvO1e2pu/anUPBvoigFB7ugT4GLUZz+xyGiwyBARkaESRRGXM/K1F99Lvfd7qTE3keGZ1s4Y0E6BiLZucLQ2lzBp/WOR0WCRISIiYyCKIi7dzsfeS5nYdylL50BhmQCE+jphQJAC/dq5wd3eUsKk9YNFRoNFhoiIjI0oikjOKcS+S1nYdzkLlzN0b5DcwcsBA4IUGNBOgRbO1hKlfDIsMhosMkREZOzS7hVj/+Us7LuUhdjU+/jjN3ugwhb92ykwIEiBQIUtBEGQLuhjYJHRYJEhIqKmJCe/BL/8lo39l7NwMiUXFerfv+Z9nKwwQFNqQjwdIJPpb6lhkdFgkSEioqYqr7gUh67kYN/lLBy7egeqcrV2noutHB29HRDi5YAOng4I8rTXq7t1s8hosMgQEREBRapyRF+9g32XsnA4MQeFqnKd+YIAtHS2riw2Xg4I9nRAG3dbyE1NJMnLIqPBIkNERKRLVV6BC6l5iE/Pw8V0JS6m5SH9/oNqy5mZCGjrbodgT82eGy97tHS2aZQhKRYZDRYZIiKiR8stVCE+XYm4tDxcTM9DfLoS94pKqy1nIzdF++b2CPFyQIhn5X/d7S3q/SBiFhkNFhkiIqLHJ4oi0u8/QFyaZs9NmhIJt5V4UFZRbdk3+gdgVp9W9fr+tf3+bho3bCAiIqLHIggCvByt4OVohSEhHgCA8go1ku8U4mJaHuLSlIhPz0NiVgEC3Gwly8kiQ0RERLViaiJDoMIOgQo7PN+lctqD0grIZBJmku6tiYiIyNBZmktzVlMVCTsUERER0ZNhkSEiIiKDxSJDREREBotFhoiIiAwWiwwREREZLBYZIiIiMlgsMkRERGSwWGSIiIjIYLHIEBERkcFikSEiIiKDxSJDREREBotFhoiIiAwWiwwREREZLKO/+7UoigCA/Px8iZMQERFRbVV9b1d9jz+M0ReZgoICAICXl5fESYiIiOhxFRQUwN7e/qHzBfFRVcfAqdVqZGRkwNbWFoIg1Nt68/Pz4eXlhbS0NNjZ2dXbeqk6buvGwe3cOLidGwe3c+NoyO0siiIKCgrg4eEBmezhR8IY/R4ZmUwGT0/PBlu/nZ0d/ydpJNzWjYPbuXFwOzcObufG0VDb+a/2xFThwb5ERERksFhkiIiIyGCxyNSRXC7HwoULIZfLpY5i9LitGwe3c+Pgdm4c3M6NQx+2s9Ef7EtERETGi3tkiIiIyGCxyBAREZHBYpEhIiIig8UiQ0RERAaLRaaOvvzyS7Ro0QIWFhYIDQ3FmTNnpI5kVBYtWgRBEHQegYGBUscyCseOHcOQIUPg4eEBQRCwbds2nfmiKOLdd9+Fu7s7LC0tERERgWvXrkkT1oA9ajtPmjSp2md8wIAB0oQ1UFFRUejSpQtsbW3h6uqK4cOHIykpSWeZkpISzJo1C05OTrCxsUFkZCSys7MlSmy4arOte/fuXe0zPX369AbPxiJTBz///DPmzZuHhQsX4vz58wgJCUH//v2Rk5MjdTSj0q5dO2RmZmofx48flzqSUSgqKkJISAi+/PLLGud//PHHWLZsGVauXInTp0/D2toa/fv3R0lJSSMnNWyP2s4AMGDAAJ3P+E8//dSICQ1fdHQ0Zs2ahVOnTuHAgQMoKytDv379UFRUpF1m7ty52LlzJzZt2oTo6GhkZGRg5MiREqY2TLXZ1gAwbdo0nc/0xx9/3PDhRHpsXbt2FWfNmqV9XlFRIXp4eIhRUVESpjIuCxcuFENCQqSOYfQAiFu3btU+V6vVokKhED/55BPttLy8PFEul4s//fSTBAmNw5+3syiK4sSJE8Vhw4ZJksdY5eTkiADE6OhoURQrP7tmZmbipk2btMtcuXJFBCDGxMRIFdMo/Hlbi6Io9urVS3zttdcaPQv3yDym0tJSxMbGIiIiQjtNJpMhIiICMTExEiYzPteuXYOHhwdatmyJ8ePHIzU1VepIRu/GjRvIysrS+Xzb29sjNDSUn+8GcPToUbi6uiIgIAAzZsxAbm6u1JEMmlKpBAA4OjoCAGJjY1FWVqbzeQ4MDIS3tzc/z0/oz9u6yrp16+Ds7IygoCAsWLAAxcXFDZ7F6G8aWd/u3r2LiooKuLm56Ux3c3NDYmKiRKmMT2hoKNasWYOAgABkZmZi8eLF6NGjBy5dugRbW1up4xmtrKwsAKjx8101j+rHgAEDMHLkSPj6+iIlJQX//Oc/MXDgQMTExMDExETqeAZHrVZjzpw56N69O4KCggBUfp7Nzc3h4OCgsyw/z0+mpm0NAOPGjYOPjw88PDwQHx+Pt956C0lJSdiyZUuD5mGRIb00cOBA7Z+Dg4MRGhoKHx8fbNy4EVOnTpUwGVH9GDNmjPbP7du3R3BwMPz8/HD06FGEh4dLmMwwzZo1C5cuXeKxdI3gYdv65Zdf1v65ffv2cHd3R3h4OFJSUuDn59dgeTi09JicnZ1hYmJS7aj37OxsKBQKiVIZPwcHB/j7+yM5OVnqKEat6jPMz3fja9myJZydnfkZr4PZs2dj165dOHLkCDw9PbXTFQoFSktLkZeXp7M8P89197BtXZPQ0FAAaPDPNIvMYzI3N0enTp1w6NAh7TS1Wo1Dhw4hLCxMwmTGrbCwECkpKXB3d5c6ilHz9fWFQqHQ+Xzn5+fj9OnT/Hw3sPT0dOTm5vIz/hhEUcTs2bOxdetWHD58GL6+vjrzO3XqBDMzM53Pc1JSElJTU/l5fkyP2tY1iYuLA4AG/0xzaKkO5s2bh4kTJ6Jz587o2rUrPv30UxQVFWHy5MlSRzMar7/+OoYMGQIfHx9kZGRg4cKFMDExwdixY6WOZvAKCwt1/oV048YNxMXFwdHREd7e3pgzZw7ef/99tG7dGr6+vnjnnXfg4eGB4cOHSxfaAP3VdnZ0dMTixYsRGRkJhUKBlJQUvPnmm2jVqhX69+8vYWrDMmvWLKxfvx7bt2+Hra2t9rgXe3t7WFpawt7eHlOnTsW8efPg6OgIOzs7vPrqqwgLC0O3bt0kTm9YHrWtU1JSsH79egwaNAhOTk6Ij4/H3Llz0bNnTwQHBzdsuEY/T8pIfP7556K3t7dobm4udu3aVTx16pTUkYzK888/L7q7u4vm5uZi8+bNxeeff15MTk6WOpZROHLkiAig2mPixImiKFaegv3OO++Ibm5uolwuF8PDw8WkpCRpQxugv9rOxcXFYr9+/UQXFxfRzMxM9PHxEadNmyZmZWVJHdug1LR9AYirV6/WLvPgwQNx5syZYrNmzUQrKytxxIgRYmZmpnShDdSjtnVqaqrYs2dP0dHRUZTL5WKrVq3EN954Q1QqlQ2eTdAEJCIiIjI4PEaGiIiIDBaLDBERERksFhkiIiIyWCwyREREZLBYZIiIiMhgscgQERGRwWKRISIiIoPFIkNETY4gCNi2bZvUMYioHrDIEFGjmjRpEgRBqPYYMGCA1NGIyADxXktE1OgGDBiA1atX60yTy+USpSEiQ8Y9MkTU6ORyORQKhc6jWbNmACqHfVasWIGBAwfC0tISLVu2xObNm3Ven5CQgGeffRaWlpZwcnLCyy+/jMLCQp1lvvvuO7Rr1w5yuRzu7u6YPXu2zvy7d+9ixIgRsLKyQuvWrbFjx46G/aGJqEGwyBCR3nnnnXcQGRmJixcvYvz48RgzZgyuXLkCACgqKkL//v3RrFkznD17Fps2bcLBgwd1isqKFSswa9YsvPzyy0hISMCOHTvQqlUrnfdYvHgxRo8ejfj4eAwaNAjjx4/HvXv3GvXnJKJ60OC3pSQi+oOJEyeKJiYmorW1tc7jgw8+EEWx8i6706dP13lNaGioOGPGDFEURXHVqlVis2bNxMLCQu383bt3izKZTHv3aA8PD/Htt99+aAYA4r/+9S/t88LCQhGAuHfv3nr7OYmocfAYGSJqdH369MGKFSt0pjk6Omr/HBYWpjMvLCwMcXFxAIArV64gJCQE1tbW2vndu3eHWq1GUlISBEFARkYGwsPD/zJDcHCw9s/W1taws7NDTk5OXX8kIpIIiwwRNTpra+tqQz31xdLSslbLmZmZ6TwXBAFqtbohIhFRA+IxMkSkd06dOlXteZs2bQAAbdq0wcWLF1FUVKSdf+LECchkMgQEBMDW1hYtWrTAoUOHGjUzEUmDe2SIqNGpVCpkZWXpTDM1NYWzszMAYNOmTejcuTOeeeYZrFu3DmfOnMG3334LABg/fjwWLlyIiRMnYtGiRbhz5w5effVVvPjii3BzcwMALFq0CNOnT4erqysGDhyIgoICnDhxAq+++mrj/qBE1OBYZIio0e3btw/u7u460wICApCYmAig8oyiDRs2YObMmXB3d8dPP/2Etm3bAgCsrKywf/9+vPbaa+jSpQusrKwQGRmJJUuWaNc1ceJElJSUYOnSpXj99dfh7OyMUaNGNd4PSESNRhBFUZQ6BBFRFUEQsHXrVgwfPlzqKERkAHiMDBERERksFhkiIiIyWDxGhoj0Cke7iehxcI8MERERGSwWGSIiIjJYLDJERERksFhkiIiIyGCxyBAREZHBYpEhIiIig8UiQ0RERAaLRYaIiIgMFosMERERGaz/Bx/EuUc8aQGmAAAAAElFTkSuQmCC",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
    "source": [
     "import matplotlib.pyplot as plt\n",
-    "\n",
-    "plt.plot(range(n_epochs), train_loss_list)\n",
+    "print(model)\n",
+    "plt.plot(range(len(train_loss_list)), train_loss_list)\n",
     "plt.xlabel(\"Epoch\")\n",
     "plt.ylabel(\"Loss\")\n",
-    "plt.title(\"Performance of Model 1\")\n",
+    "plt.title(\"Performance of Model 2\")\n",
     "plt.show()"
    ]
   },
@@ -350,10 +672,39 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 37,
    "id": "e93efdfc",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\xxpod\\AppData\\Local\\Temp\\ipykernel_18828\\3291884398.py:1: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n",
+      "  model.load_state_dict(torch.load(\"./model_cifar.pt\"))\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Test Loss: 16.123924\n",
+      "\n",
+      "Test Accuracy of airplane: 81% (810/1000)\n",
+      "Test Accuracy of automobile: 85% (855/1000)\n",
+      "Test Accuracy of  bird: 63% (633/1000)\n",
+      "Test Accuracy of   cat: 52% (525/1000)\n",
+      "Test Accuracy of  deer: 69% (695/1000)\n",
+      "Test Accuracy of   dog: 71% (717/1000)\n",
+      "Test Accuracy of  frog: 77% (772/1000)\n",
+      "Test Accuracy of horse: 77% (772/1000)\n",
+      "Test Accuracy of  ship: 84% (843/1000)\n",
+      "Test Accuracy of truck: 76% (765/1000)\n",
+      "\n",
+      "Test Accuracy (Overall): 73% (7387/10000)\n"
+     ]
+    }
+   ],
    "source": [
     "model.load_state_dict(torch.load(\"./model_cifar.pt\"))\n",
     "\n",
@@ -431,7 +782,63 @@
     "- The first fully connected layer will have an output size of 512.\n",
     "- The second fully connected layer will have an output size of 64.\n",
     "\n",
-    "Compare the results obtained with this new network to those obtained previously."
+    "Compare the results obtained with this new network to those obtained previously.\n",
+    "\n",
+    "ANSWER: The model is built above and named Net_Conv3_Lin3\n",
+    "\n",
+    "\n",
+    "Results for the previous model : \n",
+    "\n",
+    "we osberve overfitting from about the 10nth Epoch - validation loss plateaued at 22 but training loss kept on decreasing to 10 , as can be seen from the training logs:\n",
+    "\n",
+    "Epoch: 7 \tTraining Loss: 23.183946 \tValidation Loss: 24.331222\n",
+    "Validation loss decreased (25.691083 --> 24.331222).  Saving model ...\n",
+    "\n",
+    "Epoch: 8 \tTraining Loss: 22.215979 \tValidation Loss: 23.632853\n",
+    "Validation loss decreased (24.331222 --> 23.632853).  Saving model ...\n",
+    "\n",
+    "Epoch: 9 \tTraining Loss: 21.408623 \tValidation Loss: 23.475442\n",
+    "Validation loss decreased (23.632853 --> 23.475442).  Saving model ...\n",
+    "\n",
+    "Epoch: 10 \tTraining Loss: 20.637072 \tValidation Loss: 23.639358\n",
+    "\n",
+    "Epoch: 11 \tTraining Loss: 19.877338 \tValidation Loss: 22.408472\n",
+    "Validation loss decreased (23.475442 --> 22.408472).  Saving model ...\n",
+    "\n",
+    "Epoch: 12 \tTraining Loss: 19.188079 \tValidation Loss: 23.296445\n",
+    "\n",
+    "Epoch: 13 \tTraining Loss: 18.647543 \tValidation Loss: 22.897815\n",
+    "\n",
+    "Epoch: 14 \tTraining Loss: 17.989626 \tValidation Loss: 22.755968\n",
+    "\n",
+    "the performance is as follow: ![alt text](perf_model_1.png)\n",
+    "\n",
+    "and the final accuries were:\n",
+    "![Accuracies](final_accuracy_first_model.png)\n",
+    "\n",
+    "\n",
+    "SECOND MODEL:\n",
+    "\n",
+    "for the second model, the validation loss goes lower, thougth in addition to the architectural changes, there are also just more weigth and it is longer to train.\n",
+    "\n",
+    "we archieve a valisation loss of 16, and the model is still improving after a larger number of epoch ( 20 vs 10)\n",
+    "\n",
+    "\n",
+    "here are the final accuracies:\n",
+    "Test Loss: 16.123924\n",
+    "\n",
+    "Test Accuracy of airplane: 81% (810/1000)\n",
+    "Test Accuracy of automobile: 85% (855/1000)\n",
+    "Test Accuracy of  bird: 63% (633/1000)\n",
+    "Test Accuracy of   cat: 52% (525/1000)\n",
+    "Test Accuracy of  deer: 69% (695/1000)\n",
+    "Test Accuracy of   dog: 71% (717/1000)\n",
+    "Test Accuracy of  frog: 77% (772/1000)\n",
+    "Test Accuracy of horse: 77% (772/1000)\n",
+    "Test Accuracy of  ship: 84% (843/1000)\n",
+    "Test Accuracy of truck: 76% (765/1000)\n",
+    "\n",
+    "Test Accuracy (Overall): 73% (7387/10000)"
    ]
   },
   {
@@ -500,6 +907,33 @@
     "For each class, compare the classification test accuracy of the initial model and the quantized model. Also give the overall test accuracy for both models."
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1458a562",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "model = Net_3Conv_3lin_Quant()\n",
+    "print(model)\n",
+    "\n",
+    "\n",
+    "#prepare for fusion\n",
+    "model.eval()\n",
+    "#model.qconfig = torch.ao.quantization.get_default_qat_config('86')\n",
+    "#model_fused = torch.ao.quantization.fuse_modules(model,[['conv', 'relu',]])\n",
+    "\n",
+    "model.train()\n",
+    "model_prepared = torch.ao.quantization.prepare_qat(model)\n",
+    "training_loop(model_prepared)\n",
+    "model_prepared.eval()\n",
+    "model_quantized = torch.ao.quantization.convert(model_prepared)\n",
+    "\n",
+    "evaluate(model_quantized)\n",
+    "print_size_of_model(model_quantized)\n",
+    "\n"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "a0a34b90",
@@ -926,7 +1360,7 @@
  ],
  "metadata": {
   "kernelspec": {
-   "display_name": "Python 3.8.5 ('base')",
+   "display_name": "Python 3",
    "language": "python",
    "name": "python3"
   },
@@ -940,12 +1374,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.8.5"
-  },
-  "vscode": {
-   "interpreter": {
-    "hash": "9e3efbebb05da2d4a1968abe9a0645745f54b63feb7a85a514e4da0495be97eb"
-   }
+   "version": "3.12.2"
   }
  },
  "nbformat": 4,
diff --git a/final_accuracy_first_model.png b/final_accuracy_first_model.png
new file mode 100644
index 0000000000000000000000000000000000000000..f75b599e2f3cc7da8200291bb1fefe87dc2a4eb4
Binary files /dev/null and b/final_accuracy_first_model.png differ
diff --git a/perf_model_1.png b/perf_model_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..39000fab2d6965fc177cde2bf0b8e672e54ff26e
Binary files /dev/null and b/perf_model_1.png differ