diff --git a/TD2 Deep Learning.ipynb b/TD2 Deep Learning.ipynb
index bbea14bb25bded89328c9bfe90ef28bd1e51a264..771d370e403907d020186c837af2b61f6a3052ca 100644
--- a/TD2 Deep Learning.ipynb	
+++ b/TD2 Deep Learning.ipynb	
@@ -41,8 +41,7 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Requirement already satisfied: torch in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (2.1.1)Note: you may need to restart the kernel to use updated packages.\n",
-      "\n",
+      "Requirement already satisfied: torch in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (2.1.1)\n",
       "Requirement already satisfied: torchvision in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (0.16.1)\n",
       "Requirement already satisfied: filelock in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from torch) (3.13.1)\n",
       "Requirement already satisfied: typing-extensions in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from torch) (4.8.0)\n",
@@ -58,7 +57,8 @@
       "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from requests->torchvision) (3.4)\n",
       "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from requests->torchvision) (2.1.0)\n",
       "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from requests->torchvision) (2023.7.22)\n",
-      "Requirement already satisfied: mpmath>=0.19 in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from sympy->torch) (1.3.0)\n"
+      "Requirement already satisfied: mpmath>=0.19 in c:\\users\\anton\\appdata\\local\\packages\\pythonsoftwarefoundation.python.3.11_qbz5n2kfra8p0\\localcache\\local-packages\\python311\\site-packages (from sympy->torch) (1.3.0)\n",
+      "Note: you may need to restart the kernel to use updated packages.\n"
      ]
     },
     {
@@ -94,34 +94,34 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "tensor([[ 0.4791,  0.3412,  0.1479, -0.3699, -0.3322,  0.9886,  0.9518,  0.8991,\n",
-      "          1.6966,  0.2184],\n",
-      "        [ 0.2185,  1.4511,  0.7483, -0.2283,  0.3967,  1.1607,  1.6207,  1.3050,\n",
-      "          0.5088, -0.2559],\n",
-      "        [-0.7833,  0.6155,  1.1458, -1.2771, -1.1954, -0.7270, -1.4076,  0.5607,\n",
-      "         -0.1758,  1.5913],\n",
-      "        [-0.4418, -1.4896, -0.9276,  1.0588,  0.7275, -0.0803,  0.3629, -0.4858,\n",
-      "          2.8499,  1.0363],\n",
-      "        [ 0.0786,  1.1712, -0.7282,  1.9407, -0.9947, -0.1200,  0.4376,  1.1895,\n",
-      "         -0.3456, -0.2010],\n",
-      "        [ 0.3937,  0.8221, -0.8524, -0.9565, -3.0824,  0.1181,  1.2724, -0.1222,\n",
-      "         -0.3387, -0.5109],\n",
-      "        [-0.9247, -0.0354, -0.8964,  0.4619, -1.4556, -1.4296, -0.9903,  0.7843,\n",
-      "          0.3469,  1.3690],\n",
-      "        [ 0.3930,  0.1754,  0.5139,  0.9641,  1.3523,  1.4664, -1.5320,  0.6551,\n",
-      "          1.1731,  0.6391],\n",
-      "        [-0.1749,  0.4058, -0.1276, -3.3441, -0.1479,  0.9434, -0.7914,  0.1780,\n",
-      "          0.6262, -2.1555],\n",
-      "        [ 1.2356, -0.0824,  2.1803,  1.4773,  0.1597, -0.4208, -0.2043, -1.7867,\n",
-      "         -0.4570,  1.0508],\n",
-      "        [ 1.2473,  0.1983, -0.1751, -0.2294, -0.0681,  0.5568,  0.1561, -1.4111,\n",
-      "         -0.7233, -1.6446],\n",
-      "        [-1.7404,  0.3708,  1.1294,  1.1313, -0.4418, -0.1902,  1.0440,  0.2349,\n",
-      "          0.7828,  0.1122],\n",
-      "        [-0.3920,  0.8079,  0.2542, -0.3614,  0.1303, -0.7250,  0.3781,  0.0730,\n",
-      "          1.6195,  1.8573],\n",
-      "        [ 0.6610, -0.4749,  0.5805, -0.0966, -0.5306,  2.2217, -1.6045,  2.3851,\n",
-      "         -0.0588, -1.2246]])\n",
+      "tensor([[ 0.0310, -0.0302,  0.3083,  2.1783,  1.7693, -0.2927, -1.5031,  0.6983,\n",
+      "         -0.2882, -0.3581],\n",
+      "        [-0.8389, -0.4513, -1.6738,  0.1113,  1.2757,  0.5135, -1.0013, -0.9517,\n",
+      "          0.6395, -1.0833],\n",
+      "        [ 0.9713, -1.3316, -0.8702,  1.0859,  1.5969, -0.9029,  0.1113,  1.3646,\n",
+      "         -0.0416,  0.8599],\n",
+      "        [-0.5514,  0.2498,  1.4607,  0.0860, -0.2657, -0.3279, -0.6181, -0.6956,\n",
+      "          0.8563,  0.5628],\n",
+      "        [ 1.8512, -0.1250, -1.3096,  0.3395,  1.0658, -0.4178, -0.7922,  0.7766,\n",
+      "          0.4647, -1.2391],\n",
+      "        [-1.7913,  1.2491,  0.5015,  0.0116, -0.7434, -1.1705, -0.6184,  0.9694,\n",
+      "         -0.6987,  1.0628],\n",
+      "        [-2.1873, -0.2690, -0.6500,  0.9257, -0.7723,  0.4871,  0.2781, -2.2949,\n",
+      "          0.1501,  1.5481],\n",
+      "        [ 0.2679,  0.9268,  0.5985,  1.7889, -1.0061, -0.3169, -1.2766, -0.4841,\n",
+      "         -0.8235,  0.4980],\n",
+      "        [ 0.4781, -1.3183, -0.6166,  0.6537, -0.8165,  1.0060,  0.1168,  0.0368,\n",
+      "          0.5805, -1.5362],\n",
+      "        [ 0.6032,  0.4889, -0.1843, -0.9806, -3.0331,  0.5999, -0.1839, -0.0209,\n",
+      "          0.4151, -0.5782],\n",
+      "        [ 0.4913,  2.5785, -1.3195, -0.2989, -0.7559,  0.0483, -1.0446,  0.2257,\n",
+      "          0.8377,  0.9001],\n",
+      "        [ 0.0820, -0.3789, -0.1717,  0.9201, -0.6229,  0.1561, -0.1077, -0.5384,\n",
+      "          0.7231,  0.2033],\n",
+      "        [ 0.2909,  1.2030,  0.9586,  0.4449,  0.6395, -1.5281,  0.2583,  0.2690,\n",
+      "          0.7670, -0.9770],\n",
+      "        [-1.5164,  1.1118,  1.1490,  0.7234, -0.0395, -1.2704, -0.2489,  0.2779,\n",
+      "         -1.0841,  0.2728]])\n",
       "AlexNet(\n",
       "  (features): Sequential(\n",
       "    (0): Conv2d(3, 64, kernel_size=(11, 11), stride=(4, 4), padding=(2, 2))\n",
@@ -191,10 +191,18 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 3,
    "id": "6e18f2fd",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "CUDA is not available.  Training on CPU ...\n"
+     ]
+    }
+   ],
    "source": [
     "import torch\n",
     "\n",
@@ -217,10 +225,19 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 4,
    "id": "462666a2",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Files already downloaded and verified\n",
+      "Files already downloaded and verified\n"
+     ]
+    }
+   ],
    "source": [
     "import numpy as np\n",
     "from torchvision import datasets, transforms\n",
@@ -289,10 +306,25 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 5,
    "id": "317bf070",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Net(\n",
+      "  (conv1): Conv2d(3, 6, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
+      "  (conv2): Conv2d(6, 16, kernel_size=(5, 5), stride=(1, 1))\n",
+      "  (fc1): Linear(in_features=400, out_features=120, bias=True)\n",
+      "  (fc2): Linear(in_features=120, out_features=84, bias=True)\n",
+      "  (fc3): Linear(in_features=84, out_features=10, bias=True)\n",
+      ")\n"
+     ]
+    }
+   ],
    "source": [
     "import torch.nn as nn\n",
     "import torch.nn.functional as F\n",
@@ -338,10 +370,48 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 6,
    "id": "4b53f229",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch: 0 \tTraining Loss: 42.923192 \tValidation Loss: 36.895676\n",
+      "Validation loss decreased (inf --> 36.895676).  Saving model ...\n",
+      "Epoch: 1 \tTraining Loss: 34.168383 \tValidation Loss: 31.815828\n",
+      "Validation loss decreased (36.895676 --> 31.815828).  Saving model ...\n",
+      "Epoch: 2 \tTraining Loss: 30.695193 \tValidation Loss: 29.604138\n",
+      "Validation loss decreased (31.815828 --> 29.604138).  Saving model ...\n",
+      "Epoch: 3 \tTraining Loss: 28.672564 \tValidation Loss: 28.590166\n",
+      "Validation loss decreased (29.604138 --> 28.590166).  Saving model ...\n",
+      "Epoch: 4 \tTraining Loss: 27.130336 \tValidation Loss: 26.830951\n",
+      "Validation loss decreased (28.590166 --> 26.830951).  Saving model ...\n",
+      "Epoch: 5 \tTraining Loss: 25.797522 \tValidation Loss: 25.764377\n",
+      "Validation loss decreased (26.830951 --> 25.764377).  Saving model ...\n",
+      "Epoch: 6 \tTraining Loss: 24.529675 \tValidation Loss: 24.874938\n",
+      "Validation loss decreased (25.764377 --> 24.874938).  Saving model ...\n",
+      "Epoch: 7 \tTraining Loss: 23.432251 \tValidation Loss: 24.083965\n",
+      "Validation loss decreased (24.874938 --> 24.083965).  Saving model ...\n",
+      "Epoch: 8 \tTraining Loss: 22.499992 \tValidation Loss: 23.986498\n",
+      "Validation loss decreased (24.083965 --> 23.986498).  Saving model ...\n",
+      "Epoch: 9 \tTraining Loss: 21.691602 \tValidation Loss: 23.278707\n",
+      "Validation loss decreased (23.986498 --> 23.278707).  Saving model ...\n",
+      "Epoch: 10 \tTraining Loss: 20.954238 \tValidation Loss: 22.795847\n",
+      "Validation loss decreased (23.278707 --> 22.795847).  Saving model ...\n",
+      "Epoch: 11 \tTraining Loss: 20.221424 \tValidation Loss: 22.731099\n",
+      "Validation loss decreased (22.795847 --> 22.731099).  Saving model ...\n",
+      "Epoch: 12 \tTraining Loss: 19.522140 \tValidation Loss: 22.694839\n",
+      "Validation loss decreased (22.731099 --> 22.694839).  Saving model ...\n",
+      "Epoch: 13 \tTraining Loss: 18.895212 \tValidation Loss: 22.008697\n",
+      "Validation loss decreased (22.694839 --> 22.008697).  Saving model ...\n",
+      "Epoch: 14 \tTraining Loss: 18.276932 \tValidation Loss: 22.355868\n",
+      "Epoch: 15 \tTraining Loss: 17.651188 \tValidation Loss: 22.875571\n",
+      "Epoch: 16 \tTraining Loss: 17.111078 \tValidation Loss: 22.151461\n"
+     ]
+    }
+   ],
    "source": [
     "import torch.optim as optim\n",
     "\n",
@@ -352,7 +422,11 @@
     "train_loss_list = []  # list to store loss to visualize\n",
     "valid_loss_min = np.Inf  # track change in validation loss\n",
     "\n",
-    "for epoch in range(n_epochs):\n",
+    "valid_loss_list = [] \n",
+    "out=0 # We create an output variable\n",
+    "n_epochs_actual=0\n",
+    "while n_epochs_actual<n_epochs and out<3:\n",
+    "    \n",
     "    # Keep track of training and validation loss\n",
     "    train_loss = 0.0\n",
     "    valid_loss = 0.0\n",
@@ -393,11 +467,12 @@
     "    train_loss = train_loss / len(train_loader)\n",
     "    valid_loss = valid_loss / len(valid_loader)\n",
     "    train_loss_list.append(train_loss)\n",
+    "    valid_loss_list.append(valid_loss)\n",
     "\n",
     "    # Print training/validation statistics\n",
     "    print(\n",
     "        \"Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}\".format(\n",
-    "            epoch, train_loss, valid_loss\n",
+    "            n_epochs_actual, train_loss, valid_loss\n",
     "        )\n",
     "    )\n",
     "\n",
@@ -409,7 +484,11 @@
     "            )\n",
     "        )\n",
     "        torch.save(model.state_dict(), \"model_cifar.pt\")\n",
-    "        valid_loss_min = valid_loss"
+    "        valid_loss_min = valid_loss\n",
+    "    else :\n",
+    "        out+=1\n",
+    "    \n",
+    "    n_epochs_actual+=1"
    ]
   },
   {
@@ -422,14 +501,26 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 7,
    "id": "d39df818",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/SrBM8AAAACXBIWXMAAA9hAAAPYQGoP6dpAABezUlEQVR4nO3dd3hUVf7H8fdMeoeEVJLQSWihE4pKVZogCCKKir2hK1jWxV4XXX8rdlhcBRu6NqogClKkN+kQOoSSQkkhkDr398dMBiIgSZhkMuHzep55mLl37s33JjD5cM6555gMwzAQERERcUFmZxcgIiIiUl4KMiIiIuKyFGRERETEZSnIiIiIiMtSkBERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAUZERERcVkKMiIu4q233qJ+/fq4ubnRqlUrZ5dzxfj5559p1aoV3t7emEwmMjIynF3SeUwmEy+99FKZj9u/fz8mk4kpU6Y4vCaRyqIgI1JOU6ZMwWQy2R/e3t40btyYRx55hNTUVId+rV9++YW///3vdOnShcmTJ/PPf/7ToeeXCzt+/DjDhg3Dx8eHDz/8kC+++AI/P78Lvvfcvw9Lly49b79hGMTExGAymbj++usrunSHe/311xk4cCDh4eHlDk4iFcHd2QWIuLpXXnmFevXqkZuby9KlS5kwYQJz5sxhy5Yt+Pr6OuRr/Pbbb5jNZj755BM8PT0dck65tDVr1pCdnc2rr75Kr169SnWMt7c3U6dO5aqrriqxffHixRw6dAgvL6+KKLXCPffcc0RERNC6dWvmzZvn7HJE7NQiI3KZ+vbty2233ca9997LlClTGD16NPv27WPGjBmXfe7Tp08DkJaWho+Pj8NCjGEYnDlzxiHnqs7S0tIAqFGjRqmP6devH9999x2FhYUltk+dOpW2bdsSERHhyBIrzb59+zh69Chffvmls0sRKUFBRsTBevToAVg/+It9+eWXtG3bFh8fH4KDgxk+fDjJyckljuvWrRvNmzdn3bp1XHPNNfj6+vLMM89gMpmYPHkyOTk59q6L4jENhYWFvPrqqzRo0AAvLy/q1q3LM888Q15eXolz161bl+uvv5558+bRrl07fHx8+M9//sOiRYswmUx8++23vPzyy9SuXZuAgACGDh1KZmYmeXl5jB49mrCwMPz9/bnrrrvOO/fkyZPp0aMHYWFheHl50bRpUyZMmHDe96W4hqVLl9KhQwe8vb2pX78+n3/++XnvzcjIYMyYMdStWxcvLy+io6O54447OHbsmP09eXl5vPjiizRs2BAvLy9iYmL4+9//fl59F/Pdd9/Zfya1atXitttu4/DhwyV+HiNHjgSgffv2mEwm7rzzzkue95ZbbuH48eP8+uuv9m35+fl8//333HrrrRc8JicnhyeeeIKYmBi8vLyIi4vj//7v/zAMo8T78vLyGDNmDKGhoQQEBDBw4EAOHTp0wXMePnyYu+++m/DwcLy8vGjWrBmffvrpJeu/mLp165b7WJGKpK4lEQfbs2cPACEhIYB1bMHzzz/PsGHDuPfee0lPT+f999/nmmuu4Y8//ijxv/3jx4/Tt29fhg8fzm233UZ4eDjt2rVj0qRJrF69mv/+978AdO7cGYB7772Xzz77jKFDh/LEE0+watUqxo0bx/bt25k2bVqJupKSkrjlllt44IEHuO+++4iLi7PvGzduHD4+PvzjH/9g9+7dvP/++3h4eGA2mzl58iQvvfQSK1euZMqUKdSrV48XXnjBfuyECRNo1qwZAwcOxN3dnVmzZvHwww9jsVgYNWpUiRp2797N0KFDueeeexg5ciSffvopd955J23btqVZs2YAnDp1iquvvprt27dz991306ZNG44dO8bMmTM5dOgQtWrVwmKxMHDgQJYuXcr9999PkyZN2Lx5M+PHj2fnzp1Mnz79L39GU6ZM4a677qJ9+/aMGzeO1NRU3n33XZYtW2b/mTz77LPExcUxadIke/dhgwYNLvnzr1u3Lp06deLrr7+mb9++AMydO5fMzEyGDx/Oe++9V+L9hmEwcOBAFi5cyD333EOrVq2YN28eTz31FIcPH2b8+PH299577718+eWX3HrrrXTu3JnffvuN/v37n1dDamoqHTt2xGQy8cgjjxAaGsrcuXO55557yMrKYvTo0Ze8DhGXYYhIuUyePNkAjPnz5xvp6elGcnKy8c033xghISGGj4+PcejQIWP//v2Gm5ub8frrr5c4dvPmzYa7u3uJ7V27djUAY+LEied9rZEjRxp+fn4ltm3YsMEAjHvvvbfE9ieffNIAjN9++82+rU6dOgZg/PzzzyXeu3DhQgMwmjdvbuTn59u333LLLYbJZDL69u1b4v2dOnUy6tSpU2Lb6dOnz6u3d+/eRv369UtsK65hyZIl9m1paWmGl5eX8cQTT9i3vfDCCwZg/Pjjj+ed12KxGIZhGF988YVhNpuN33//vcT+iRMnGoCxbNmy844tlp+fb4SFhRnNmzc3zpw5Y98+e/ZsAzBeeOEF+7bin/GaNWsuer4LvfeDDz4wAgIC7N+bm266yejevbv9+9C/f3/7cdOnTzcA47XXXitxvqFDhxomk8nYvXu3YRhnf94PP/xwiffdeuutBmC8+OKL9m333HOPERkZaRw7dqzEe4cPH24EBQXZ69q3b58BGJMnT77k9RVLT08/7+uJOJO6lkQuU69evQgNDSUmJobhw4fj7+/PtGnTqF27Nj/++CMWi4Vhw4Zx7Ngx+yMiIoJGjRqxcOHCEufy8vLirrvuKtXXnTNnDgCPP/54ie1PPPEEAD/99FOJ7fXq1aN3794XPNcdd9yBh4eH/XViYiKGYXD33XeXeF9iYiLJycklxn/4+PjYn2dmZnLs2DG6du3K3r17yczMLHF806ZNufrqq+2vQ0NDiYuLY+/evfZtP/zwAy1btmTw4MHn1WkymQBrt1CTJk2Ij48v8X0t7tb78/f1XGvXriUtLY2HH34Yb29v+/b+/fsTHx9/3vetPIYNG8aZM2eYPXs22dnZzJ49+6LdSnPmzMHNzY2//e1vJbY/8cQTGIbB3Llz7e8Dznvfn1tXDMPghx9+YMCAARiGUeL707t3bzIzM1m/fv1lX6NIVaGuJZHL9OGHH9K4cWPc3d0JDw8nLi4Os9n6f4Rdu3ZhGAaNGjW64LHnhgeA2rVrl3pA74EDBzCbzTRs2LDE9oiICGrUqMGBAwdKbK9Xr95FzxUbG1vidVBQEAAxMTHnbbdYLGRmZtq7zpYtW8aLL77IihUr7IOTi2VmZtrPdaGvA1CzZk1Onjxpf71nzx6GDBly0VrB+n3dvn07oaGhF9xfPEj3Qoq/L+d2rRWLj4+/4K3TZRUaGkqvXr2YOnUqp0+fpqioiKFDh160nqioKAICAkpsb9KkSYl6i3/ef+7e+vN1pKenk5GRwaRJk5g0adIFv+ZffX9EXI2CjMhl6tChA+3atbvgPovFgslkYu7cubi5uZ2339/fv8Trc1s3Squ4leJS/urcF6rtr7YbtkGoe/bsoWfPnsTHx/P2228TExODp6cnc+bMYfz48VgsljKdr7QsFgstWrTg7bffvuD+PwcwZ7j11lu57777SElJoW/fvmW68+lyFH/Pb7vtNvtg5T9LSEiolFpEKoOCjEgFatCgAYZhUK9ePRo3buzQc9epUweLxcKuXbvs/3sH60DPjIwM6tSp49CvdyGzZs0iLy+PmTNnlmht+auunUtp0KABW7ZsueR7Nm7cSM+ePUsd5IoVf1+SkpLsXVHFkpKSHPZ9Gzx4MA888AArV67kf//731/WM3/+fLKzs0u0yuzYsaNEvcU/7z179pRohUlKSipxvuI7moqKiko9942IK9MYGZEKdOONN+Lm5sbLL798XquDYRgcP3683Ofu168fAO+8806J7cWtFBe6m8XRiltYzr22zMxMJk+eXO5zDhkyhI0bN55319W5X2fYsGEcPnyYjz/++Lz3nDlzhpycnIuev127doSFhTFx4sQSt2rPnTuX7du3O+z75u/vz4QJE3jppZcYMGDARd/Xr18/ioqK+OCDD0psHz9+PCaTyX7nU/Gff77r6c8/fzc3N4YMGcIPP/xwwUCYnp5enssRqbLUIiNSgRo0aMBrr73G2LFj2b9/P4MGDSIgIIB9+/Yxbdo07r//fp588slynbtly5aMHDmSSZMmkZGRQdeuXVm9ejWfffYZgwYNonv37g6+mvNdd911eHp6MmDAAB544AFOnTrFxx9/TFhYGEePHi3XOZ966im+//57brrpJu6++27atm3LiRMnmDlzJhMnTqRly5bcfvvtfPvttzz44IMsXLiQLl26UFRUxI4dO/j222/t8+VciIeHB2+++SZ33XUXXbt25ZZbbrHffl23bl3GjBlzOd+SEi7WtXOuAQMG0L17d5599ln2799Py5Yt+eWXX5gxYwajR4+2j4lp1aoVt9xyCx999BGZmZl07tyZBQsWsHv37vPO+cYbb7Bw4UISExO57777aNq0KSdOnGD9+vXMnz+fEydOlPlavvjiCw4cOGAfB7VkyRJee+01AG6//fZKaQEUuRAFGZEK9o9//IPGjRszfvx4Xn75ZcA6huO6665j4MCBl3Xu//73v9SvX58pU6Ywbdo0IiIiGDt2LC+++KIjSr+kuLg4vv/+e5577jmefPJJIiIieOihhwgNDT3vjqfS8vf35/fff+fFF19k2rRpfPbZZ4SFhdGzZ0+io6MBMJvNTJ8+nfHjx/P5558zbdo0fH19qV+/Po899tglu/HuvPNOfH19eeONN3j66afx8/Nj8ODBvPnmm5U2lqWY2Wxm5syZvPDCC/zvf/9j8uTJ1K1bl7feest+B1qxTz/9lNDQUL766iumT59Ojx49+Omnn84bExQeHs7q1at55ZVX+PHHH/noo48ICQmhWbNmvPnmm+Wq85NPPmHx4sX21wsXLrR3IV511VUKMuI0JqOso+xEREREqgiNkRERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAUZERERcVkKMiIiIuKyqv08MhaLhSNHjhAQEFDmqcxFRETEOQzDIDs7m6ioKPtCvBdS7YPMkSNHqsQCciIiIlJ2ycnJ9skwL6TaB5niRdiSk5MJDAx0cjUiIiJSGllZWcTExJRYTPVCqn2QKe5OCgwMVJARERFxMZcaFqLBviIiIuKyFGRERETEZSnIiIiIiMtSkBERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAUZERERcVkKMiIiIuKyFGRERETEZSnIiIiIiMtSkBERERGXpSBTTkUWg82HMjmdX+jsUkRERK5YCjLldMOHSxnwwVJW7j3u7FJERESuWAoy5dQsMgiAVXtPOLkSERGRK5eCTDkl1g8GUIuMiIiIEynIlFNi/RAAthzJ4lSexsmIiIg4g4JMOdWu4UNMsA9FFoO1+9W9JCIi4gwKMpchsZ61VWalxsmIiIg4hYLMZUisZx0ns2qfxsmIiIg4g4LMZehoGyej+WREREScQ0HmMsQE+1K7hg+FFoN1B046uxwREZErjoLMZSruXtJt2CIiIpVPQeYyFXcvaWI8ERGRyqcgc5mKJ8bbeCiDM/lFTq5GRETkyqIgc5lig32JCPSmoMhg/UGNkxEREalMCjKXyWQy0dHWKrNK42REREQqlYKMAxQvV7Byn8bJiIiIVCYFGQcovnNpw8EMcgs0TkZERKSyKMg4QL1afoQGeJFfZOGPgxnOLkdEROSKoSDjANZxMrbbsLVcgYiISKVRkHEQ+7pLmk9GRESk0ijIOEjxnUvrD54kr1DjZERERCqDgoyDNAj1p5a/J3mFFjYmZzq7HBERkSuCgoyDmEwmEusVL1egcTIiIiKVQUHGgYqXK1ipAb8iIiKVQkHGgYrvXFp34CT5hRYnVyMiIlL9Kcg4UKMwf4L9PMktsLD5cIazyxEREan2FGQcyGQy0aGurXtJt2GLiIhUOAUZB7OPk9GAXxERkQqnIONg546TKSjSOBkREZGKpCDjYHHhAdTw9eB0fhGbD2s+GRERkYqkIONgZrOJ9nW1XIGIiEhlUJCpAFpAUkREpHIoyFSA4gUk1+4/SaHGyYiIiFQYBZkK0CQykABvd07lFbL1SJazyxEREam2FGQqgJvZZG+VUfeSiIhIxVGQqSBnF5DUgF8REZGKUmWCzBtvvIHJZGL06NH2bbm5uYwaNYqQkBD8/f0ZMmQIqampziuyDIonxlu97wRFFsPJ1YiIiFRPVSLIrFmzhv/85z8kJCSU2D5mzBhmzZrFd999x+LFizly5Ag33nijk6osm6aRgfh7uZOdV8j2oxonIyIiUhGcHmROnTrFiBEj+Pjjj6lZs6Z9e2ZmJp988glvv/02PXr0oG3btkyePJnly5ezcuVKJ1ZcOu5uZtrXtV6PlisQERGpGE4PMqNGjaJ///706tWrxPZ169ZRUFBQYnt8fDyxsbGsWLHioufLy8sjKyurxMNZEm3zyWgBSRERkYrh7swv/s0337B+/XrWrFlz3r6UlBQ8PT2pUaNGie3h4eGkpKRc9Jzjxo3j5ZdfdnSp5VJ859Ka/SewWAzMZpOTKxIREalenNYik5yczGOPPcZXX32Ft7e3w847duxYMjMz7Y/k5GSHnbusmtcOws/TjcwzBexIyXZaHSIiItWV04LMunXrSEtLo02bNri7u+Pu7s7ixYt57733cHd3Jzw8nPz8fDIyMkocl5qaSkRExEXP6+XlRWBgYImHs3i4mWlbV/PJiIiIVBSnBZmePXuyefNmNmzYYH+0a9eOESNG2J97eHiwYMEC+zFJSUkcPHiQTp06OavsMivuXtKAXxEREcdz2hiZgIAAmjdvXmKbn58fISEh9u333HMPjz/+OMHBwQQGBvLoo4/SqVMnOnbs6IySy6V4AcnV+zRORkRExNGcOtj3UsaPH4/ZbGbIkCHk5eXRu3dvPvroI2eXVSYJ0UH4eLhx8nQBu9JOERcR4OySREREqg2TYRjVetrZrKwsgoKCyMzMdNp4mdv+u4qlu4/x8sBmjOxc1yk1iIiIuJLS/v52+jwyVwItICkiIlIxFGQqQccGZxeQrOYNYCIiIpVKQaYSJEQH4eVu5nhOPrvTTjm7HBERkWpDQaYSeLm70SbWtu7SPi1XICIi4igKMpWk+DbsVZpPRkRExGEUZCpJYv3iAb8aJyMiIuIoCjKVpFVMDTzdzaRn57H3WI6zyxEREakWFGQqibeHG61jagDWu5dERETk8inIVKLE4nEymk9GRETEIRRkKlHHcxaQ1DgZERGRy6cgU4lax9bEw81EalYeB46fdnY5IiIiLk9BphL5eLrRqnicjLqXRERELpuCTCVLrGcdJ7NSA35FREQum4JMJbPPJ6NxMiIiIpdNQaaSta1TE3eziSOZuRw6ecbZ5YiIiLg0BZlK5uvpTkJ0EAArtFyBiIjIZVGQcQL7fDIaJyMiInJZFGScoKMmxhMREXEIBRknaFunJm5mE4dOnuHQSc0nIyIiUl4KMk7g7+VO89rWcTLqXhIRESk/BRknKV6uQN1LIiIi5acg4yRnx8moRUZERKS8FGScpF3dmphNcOD4aY5maj4ZERGR8lCQcZIAbw+aRWmcjIiIyOVQkHGijvU1TkZERORyKMg4kRaQFBERuTwKMk7Uvl4wJhPsO5ZDWlaus8sRERFxOQoyThTk40HTyEAAVuruJRERkTJTkHGy4u6lVVpAUkREpMwUZJws0Tbgd6WCjIiISJkpyDhZh7rWILMnPYf07DwnVyMiIuJaFGScrKafJ/ERAQCs1jgZERGRMlGQKQ/DgLWfwsSrIevoZZ+ueLkCdS+JiIiUjYJMeZhMsOlbSNlkDTSXKVELSIqIiJSLgkx5dbjf+ue6yVB4eWNbOtiCzM7UUxw/pXEyIiIipaUgU15NBkBAFOSkw9bpl3WqEH8vGof7AxonIyIiUhYKMuXl5gHt7rY+X/2fyz6dfT4ZBRkREZFSU5C5HG3vBDdPOLwODq27rFNpwK+IiEjZKchcDv9QaHaj9flltsoUj5NJSs0m43T+5VYmIiJyRVCQuVyJtkG/W36E7NRynyY0wIsGoX4YhrqXRERESktB5nLVbgvR7cFSAOumXNapEusXr7ukICMiIlIaCjKO0OEB659rP4XC8ncLFY+T0XwyIiIipaMg4whNbwD/cDiVAttnlvs0HW3jZLYdzSLzdIGjqhMREam2FGQcwd0T2t5lfb56UrlPExboTb1a1nEya/are0lERORSFGQcpd1dYHaH5FVwZEO5T9OxvpYrEBERKS0FGUcJiICmg6zPL6NVpnhivJUa8CsiInJJCjKOlGgb9Lv5e8g5Vr5T2Fpkth7JJCtX42RERET+ioKMI0W3h8hWUJQH6z8r1ykig3yoE+KLxYB1+086tj4REZFqRkHGkUyms60yaz6FosJynSbRdveSlisQERH5awoyjtbsRvCtBVmHIOmncp3CPk5GM/yKiIj8JQUZR/Pwti4mCbCqfOsvFY+T2XI4k1N55WvVERERuRIoyFSEdneDyQ0OLIOULWU+PLqmL9E1fSiyGKw7oHEyIiIiF6MgUxGCakOTAdbn5VwV++xt2BonIyIicjEKMhWleNDvpu/gdNnHuhR3L61SkBEREbkopwaZCRMmkJCQQGBgIIGBgXTq1Im5c+fa93fr1g2TyVTi8eCDDzqx4jKI7QThLaDwDPzxRZkP72RbQHLToUxO52ucjIiIyIU4NchER0fzxhtvsG7dOtauXUuPHj244YYb2Lp1q/099913H0ePHrU//vWvfzmx4jIwmSDxfuvzNf8FS1GZDo+u6UNUkDeFGicjIiJyUU4NMgMGDKBfv340atSIxo0b8/rrr+Pv78/KlSvt7/H19SUiIsL+CAwMdGLFZdTiJvCpCRkHYefPZTrUZDKRaGuVWaXlCkRERC6oyoyRKSoq4ptvviEnJ4dOnTrZt3/11VfUqlWL5s2bM3bsWE6fPv2X58nLyyMrK6vEw2k8fKDNHdbn5bgVWwtIioiI/DV3ZxewefNmOnXqRG5uLv7+/kybNo2mTZsCcOutt1KnTh2ioqLYtGkTTz/9NElJSfz4448XPd+4ceN4+eWXK6v8S2t/Lyx/H/YthrQdEBZf6kOL71zakJzBmfwifDzdKqpKERERl2QyDMNwZgH5+fkcPHiQzMxMvv/+e/773/+yePFie5g512+//UbPnj3ZvXs3DRo0uOD58vLyyMvLs7/OysoiJiaGzMxM53VLfTMCdsyGdvfA9W+X+jDDMOg4bgGpWXlMvTeRzg1rVWCRIiIiVUdWVhZBQUGX/P3t9K4lT09PGjZsSNu2bRk3bhwtW7bk3XffveB7ExMTAdi9e/dFz+fl5WW/C6r44XTFt2Jv/BrOZJT6MJPJpOUKRERE/oLTg8yfWSyWEi0q59qwYQMAkZGRlViRA9S9GsKaQsFp2PBVmQ7taB/wq3EyIiIif+bUIDN27FiWLFnC/v372bx5M2PHjmXRokWMGDGCPXv28Oqrr7Ju3Tr279/PzJkzueOOO7jmmmtISEhwZtllZzJBh/usz1d/DBZLqQ8tnhjvj+QMcgvKdgu3iIhIdefUIJOWlsYdd9xBXFwcPXv2ZM2aNcybN49rr70WT09P5s+fz3XXXUd8fDxPPPEEQ4YMYdasWc4sufwSbgbvIDi5D3b/WurD6tfyo5a/F/mFFjYkZ1RcfSIiIi7IqXctffLJJxfdFxMTw+LFiyuxmgrm6Qetb4cVH1hvxW7cu1SHmUwmOtYPZvamo6zae8Le1SQiIiJVcIxMtdb+XsAEexbAsV2lPqx4YjwtICkiIlKSgkxlCq53tiVm9celPqxjPes4mfUHT5JXqHEyIiIixRRkKlsH2/pLG6ZCXnapDmkY5k+Inyd5hRY2HcqswOJERERci4JMZavfHUIaQX42bPi6VIdY112ytsrM35ZakdWJiIi4FAWZymY2n22VWT2p1LdiD0iIAmDysv3sTT9VUdWJiIi4FAUZZ2h1C3gGwPFdsPe3Uh3Sp3kE1zQOJb/IwnPTt+DklSVERESqBAUZZ/AKgNYjrM9XTSrVISaTidduaI6Xu5nle44z7Y/DFVigiIiIa1CQcZb2tpl+d/0CJ/aW6pDYEF/+1rMRAK/9tJ2TOfkVVZ2IiIhLUJBxlloNoWEvwIDV/y31YfddXZ/G4f6cyMnnjbk7Kq4+ERERF6Ag40wdbKti//El5JVuAK+nu5l/Dm4BwP/WJrNaq2KLiMgVTEHGmRr2guD6kJcJm/5X6sPa1Q3mlg4xADwzbTP5haVfhFJERKQ6UZBxJrP57FiZ1R9DGe5EerpPPCF+nuxOO8WkJXsqqEAREZGqTUHG2VqPAA8/SN8O+5aU+rAavp48f31TAN7/bTf7j+VUVIUiIiJVloKMs3kHQcvh1uerS3crdrEbWkVxVcNa5BVaeH6G5pYREZErj4JMVVA802/SHDh5oNSHmUwmXhvUHE93M7/vOsbMjUcqqEAREZGqSUGmKgiLh/rdwLDAmtLfig1Qt5Yfj3ZvCMCrs7eRebqgAgoUERGpmhRkqoriW7HXfw75p8t06P1d69Mg1I9jp/J542fNLSMiIlcOBZmqonFvqBELuRmw+bsyHerl7mafW+br1QdZd0Bzy4iIyJVBQaaqMLudcyv2pDLdig2QWD+Em9pGA/DMj1soKNLcMiIiUv0pyFQlrW8Ddx9I3QIHlpf58Gf6NSHYz5Ok1Gz++/u+CihQRESkalGQqUp8gyFhmPX56v+U+fCafp48268JAO8u2EnyibKNtREREXE1CjJVTaJt0O/22ZB5qMyH39imNp3qh5BbYOG56ZpbRkREqjcFmaomvBnUuQqMIlj7aZkPN5lMvDa4OZ5uZhbvTOenzUcroEgREZGqQUGmKkq0TZC3bgoU5Jb58Aah/jzUrQEAL8/aRuYZzS0jIiLVk4JMVRTXHwKj4fRx2PpjuU7xULcG1K/lR3p2Hv83L8nBBYqIiFQNCjJVkZs7tL/H+nzVxDLfig3g7eHGa4ObA/DlqgP8cfCkIysUERGpEhRkqqo2I8HNC45uhOTV5TpF5wa1uLFNbQwDxv64WXPLiIhItaMgU1X5hUCLm6zPy3ErdrFn+zWhhq8HO1KymbxMc8uIiEj1oiBTlRUP+t02A7LKd/dRiL8Xz/S1zi0z/tddHDqpuWVERKT6UJCpyiJbQkxHsBTCusnlPs1N7aLpUC+YMwVFvDBjq+aWERGRakNBpqorbpVZOxkK88t1CpPJxD8HN8fDzcRvO9L4eUuKAwsUERFxHgWZqq7JQAiIhJw02Da93KdpGBbAg12tc8u8NGsr2bmaW0ZERFyfgkxV5+YB7e62Pl9V/kG/AKO6N6RuiC+pWXn8+5edDihORETEuRRkXEHbO8HNEw6vhUPryn0abw83XhvUAoDPVuxnY3KGY+oTERFxEgUZV+AfBs1utD7/8V7IOlLuU13VqBaDWkVhGPDMtM0Uam4ZERFxYQoyrqLXi1AjFk7shc8GQnZquU/1bP+mBHq7s/VIFp+tOODAIkVERCqXgoyrCIyCkbOtazAd3wWfD4RT6eU6VWiAF2P7WeeW+fcvSRzJOOPISkVERCqNgowrqVkH7pwFAVGQvgM+vwFOnyjXqW5uF0O7OjU5nV/EizO3OrhQERGRyqEg42qC68PIWeAfDmlbrWHmTNkXhDSbTfzzxha4m038ui2VeVs1t4yIiLgeBRlXVKuhNcz4hULKJvhiMJzJKPNpGocHcP819QF4aeZWTuUVOrhQERGRiqUg46pC4+COmeATDEf+gK+GQm5WmU/zaI9GxAT7cDQzl/G/am4ZERFxLQoyriy8KdwxA7xrwKE18NVNkHeqTKfw8XTj1RuaAzB52T62HM6sgEJFREQqhoKMq4tMgDumg1cQJK+EqTdDftlWuO4WF8aAllFYbHPLFFm0qKSIiLgGBZnqIKo13P4jeAbAgaXw9XAoKNst1c9f34QAb3c2HcrkixX7K6ZOERERB1OQqS6i28FtP4CnP+xbDN+MgILcUh8eFuDN033iAfi/X3aSkln6Y0VERJxFQaY6iU2EW78FD1/YswC+vQMK80t9+K0dYmkdW4NTeYW8PEtzy4iISNVXriCTnJzMoUOH7K9Xr17N6NGjmTRpksMKk3Kq2wVu+QbcvWHXPPj+LigqKNWhZrOJfw5ugZvZxNwtKSzYXv5lEERERCpDuYLMrbfeysKFCwFISUnh2muvZfXq1Tz77LO88sorDi1QyqF+Vxg+Fdy8YMds+OEeKCrdHDFNIgO596p6ALwwYyun8zW3jIiIVF3lCjJbtmyhQ4cOAHz77bc0b96c5cuX89VXXzFlyhRH1ifl1bAn3PwluHnCthkw7QGwFJXq0Md6NaJ2DR8OZ5zhpZlbseguJhERqaLKFWQKCgrw8vICYP78+QwcOBCA+Ph4jh496rjq5PI0vg6GfQ5md9jyPUx/uFRhxtfTndcHN8dkgm/XHuLJ7zZSWGSphIJFRETKplxBplmzZkycOJHff/+dX3/9lT59+gBw5MgRQkJCHFqgXKa4vjB0MpjcYNM3MOtvYLl0KOkWF8b4Ya1wM5v48Y/DPPTVenILSteiIyIiUlnKFWTefPNN/vOf/9CtWzduueUWWrZsCcDMmTPtXU5ShTQdCEM+BpMZ/vgSfnocjEt3Fw1qXZv/3NYWT3czv25L5e4pa7Qek4iIVCkmwyjFb7QLKCoqIisri5o1a9q37d+/H19fX8LCwhxW4OXKysoiKCiIzMxMAgMDnV2Oc236Fn68HzCgw/3Q919gMl3ysBV7jnPvZ2vIyS+iZUwNptzZnpp+nhVfr4iIXLFK+/u7XC0yZ86cIS8vzx5iDhw4wDvvvENSUlKVCjHyJwnD4IYPAROsngTzni1Vy0ynBiF8fX9Havp6sDE5g5snrSA1SxPmiYiI85UryNxwww18/vnnAGRkZJCYmMi///1vBg0axIQJE0p9ngkTJpCQkEBgYCCBgYF06tSJuXPn2vfn5uYyatQoQkJC8Pf3Z8iQIaSmam6Ty9J6BAx4x/p85Ycw/8VShZmE6Bp8+0AnwgO92Jl6iqETl3PweNnWdBIREXG0cgWZ9evXc/XVVwPw/fffEx4ezoEDB/j888957733Sn2e6Oho3njjDdatW8fatWvp0aMHN9xwA1u3WmeVHTNmDLNmzeK7775j8eLFHDlyhBtvvLE8Jcu52t4J/f7P+nzZu7Dw9VId1ig8gO8f7EydEF+ST5xh6MTlJKVkV1ydIiIil1CuMTK+vr7s2LGD2NhYhg0bRrNmzXjxxRdJTk4mLi6O06fL/z/14OBg3nrrLYYOHUpoaChTp05l6NChAOzYsYMmTZqwYsUKOnbsWKrzaYzMX1g5EX5+2vq82zPQ7elSHZaWlcsdn65mR0o2QT4eTLmrPa1ja176QBERkVKq0DEyDRs2ZPr06SQnJzNv3jyuu+46ANLS0sodFoqKivjmm2/IycmhU6dOrFu3joKCAnr16mV/T3x8PLGxsaxYseKi58nLyyMrK6vEQy6i44Nw3WvW54v+Cb//u1SHhQV68839HWkdW4PMMwWM+O8qlu0+VoGFioiIXFi5gswLL7zAk08+Sd26denQoQOdOnUC4JdffqF169ZlOtfmzZvx9/fHy8uLBx98kGnTptG0aVNSUlLw9PSkRo0aJd4fHh5OSkrKRc83btw4goKC7I+YmJgyX98VpfOj0PMF6/MFr8Dy90t1WA1fT768J5GrGtbidH4Rd01ew7ytF/+5iIiIVIRyBZmhQ4dy8OBB1q5dy7x58+zbe/bsyfjx48t0rri4ODZs2MCqVat46KGHGDlyJNu2bStPWQCMHTuWzMxM+yM5Obnc57piXP2EtWsJ4JfnYGXpBmz7ebnzyZ3t6NMsgvwiCw9/tZ4f1h269IEiIiIO4l7eAyMiIoiIiLCvgh0dHV2uyfA8PT1p2LAhAG3btmXNmjW8++673HzzzeTn55ORkVGiVSY1NZWIiIiLns/Ly8u+fIKUQde/Q1E+/P5/8PM/rMsadLjvkod5ubvxwa2t+cePm/l+3SGe+G4j2bkF3NmlXiUULSIiV7pytchYLBZeeeUVgoKCqFOnDnXq1KFGjRq8+uqrWEox/f2lzp2Xl0fbtm3x8PBgwYIF9n1JSUkcPHjQ3pUlDmQyQY/noMtj1tdznoR1U0p1qLubmX8NSeBuW3h5adY23p2/i3LOtSgiIlJq5WqRefbZZ/nkk09444036NKlCwBLly7lpZdeIjc3l9dfL93tvGPHjqVv377ExsaSnZ3N1KlTWbRoEfPmzSMoKIh77rmHxx9/nODgYAIDA3n00Ufp1KlTqe9YkjIymaDXy1BUACs/glmjIesodHwIfGr85aFms4nnr29CkI8H4+fvZPz8nWSeKeC5/k0wmy89e7CIiEh5lOv266ioKCZOnGhf9brYjBkzePjhhzl8+HCpznPPPfewYMECjh49SlBQEAkJCTz99NNce+21gHVCvCeeeIKvv/6avLw8evfuzUcfffSXXUt/ptuvy8EwYO7frbP/AngGQPu7oePDEHDp7/3kZft4eZZ1nNPQttG8cWML3N3K1fgnIiJXqNL+/i5XkPH29mbTpk00bty4xPakpCRatWrFmTNnyl5xBVGQKSfDgM3fwe9vQ/p26zY3T2h5i7X7KaTBXx7+w7pD/P2HTRRZDPo0i+DdW1rh5e5WCYWLiEh1UKHzyLRs2ZIPPvjgvO0ffPABCQkJ5TmlVDUmk3VtpoeWwy3/g5hE62Dg9Z/B+23h25Fw5I+LHj6kbTQfjWiDp5uZn7emcO9na8nRytkiIuJg5WqRWbx4Mf379yc2NtY+8HbFihUkJyczZ84c+/IFVYFaZBzowApYOh52nb3lnvrd4aoxUO+aC66kvWz3Me77fC2n84toHVuDKXd2IMjXoxKLFhERV1ShLTJdu3Zl586dDB48mIyMDDIyMrjxxhvZunUrX3zxRbmLliquTicY8a21lSbhZjC5wd6F8PlA+LgHbJsJf7prrUvDWnx5byJBPh78cdC6cnZatlbOFhERxyhXi8zFbNy4kTZt2lBUVOSoU142tchUoJMHYMUHsP5zKLSFk5BG1jE0CcPA/ex8PjtSsrj9k9WkZ+dRJ8SXL+9JJCbY10mFi4hIVVehLTIiANSsA/3egtFb4JqnwDsIju+CmY/Au61g+QeQZ10dOz4ikO8f7ERMsA8Hjp9m6MTl7ErVytkiInJ5FGTk8vmHWifTG7PVughlQCRkH4FfnoXxzeC31yDnGHVC/Pjugc40CvMnNSuPYf9ZwaZDGc6uXkREXJiCjDiOV4B1EcrHNsLA9yGkIeRmwpK3YHxzmPMUEZZUvn2gEy2jgzh5uoBbP17Fij3HnV25iIi4qDKNkbnxxhv/cn9GRgaLFy/WGBmxshTBjp9g6dtnb9U2uUGLoZzu8Cj3zMlhxd7jeLqb+ejWNvRqGu7cekVEpMqokAnx7rrrrlK9b/LkyaU9ZYVTkKkCDAP2LbHeur13oX1zUcPevJXTj4n7QnEzm/j3TS0Z1Lq2EwsVEZGqokJn9nUlCjJVzJE/YOk7sG0GYP2rt9enBa9m9mGR0Yrn+jfj7i51MV1gThoREblyKMjYKMhUUcf3wLJ3YePX1hmDge2WGP5dOAzi+vHmkBaE+Htd4iQiIlJd6fZrqdpCGsDA9+CxTdD5bxieATQxJ/Nfz3/Tete79HtnEUt2pju7ShERqeLUIiNVw5kMWPwmrPwIgAVFrRldMIqbujTj733i8PbQgpMiIlcStciIa/GpAX3GwY0fY7h709PtD6Z7Ps/C5csZ9OEydmryPBERuQAFGalaEoZhumsuBNamgfkoM72eJyLtdwa8v5TPV+ynmjcgiohIGSnISNVTuw3ctxBiEgngNJ96vsWdxgxemLGFu6es4dipPGdXKCIiVYSCjFRNAeEwcha0uQMzBmM9vuY9z49YnnSYPu8sYWFSmrMrFBGRKkBBRqoudy8Y8B70+z8wuzPQvIzZvq/iceood01ew0szt5JbUHVmkRYRkcqnICNVm8kEHe6D26eDbwiNLHv41f8F2pl2MGX5fm74YBk7UrKcXaWIiDiJgoy4hnpXW8fNhLfAv/Ak3/qM417fJSSlZjPwg2V8unSfBgKLiFyBFGTEddSsA/fMg6aDMFsKeM4ykU9Dv8FSmM8rs7dx5+Q1pGXnOrtKERGpRAoy4lo8/eCmKdDjecBEj+yZLIt8lwj3UyzemU6fd35nwfZUZ1cpIiKVREFGXI/JBNc8Cbd8DZ4BhJ9cx+81XqZ/6DFO5ORzz2dreX76Fs7kayCwiEh1pyAjriuuL9y3AILr43HqMB+ceZq3m+0B4IuVBxjwwVK2Hsl0cpEiIlKRFGTEtYXGwX2/QYOemArPcOOe51nS9nfC/T3YnXaKwR8u57+/78Vi0UBgEZHqSEFGXJ9PTRjxHXT+GwCxWyfwe52PGRjvR36Rhdd+2s7IyatJzdJAYBGR6kZBRqoHsxtc9yoMngRuXnju+YV3Tz3F+9cF4O1h5vddx+jzzhJ+2Zri7EpFRMSBFGSkeml5M9z9MwREYTq2kwGrbuO3GwppXjuQk6cLuP+LdTwzbTOn8wudXamIiDiAgoxUP7XbwP2LICYR8jKJ+ukOprdaxwPX1MNkgqmrDnL9+0vZclgDgUVEXJ2CjFRPxYtOtr4dDAvuC15k7Jm3+XpkSyICvdmbnsPgj5YxYdEeCosszq5WRETKSUFGqi93Lxj4vnXRSZMbbP6OjotH8PNd9enbPIKCIoM3f97BgA+Wse7ASWdXKyIi5aAgI9Vb8aKTd8wAn2A4uoEaX17LR1fn8383taSmrwfbj2YxZMJyxv64mYzT+c6uWEREykBBRq4M9a62jpsJbw456Zg+G8BQ5rPgiW4MaxcNwNerD9Lz34v5Yd0hLUApIuIiTEY1/8TOysoiKCiIzMxMAgMDnV2OOFt+Dkx/CLbNsL5ucwf0eJ41x9x5dtpmdqaeAiCxXjCvD25Ow7AAJxYrInLlKu3vbwUZufIYBvz+f/Dba9bX7j7Q7i4KOj7CJxtzeXf+Ls4UFOHhZuK+q+vzaI9G+Hi6ObdmEZErjIKMjYKMXNTexbDgFTi81vrazQvajuRo8wd4fmEG822raEfX9OGVG5rRIz7cicWKiFxZFGRsFGTkLxkG7PkNFr8Jyaus29w8ofXtLAm/jX/MP8mRTOvSBn2aRfDiwKZEBvk4sWARkSuDgoyNgoyUimHAviWw+F9wYKl1m9mDgoRb+NgYxL/X5FFkMfDzdGPMtY25s3Nd3N00Vl5EpKIoyNgoyEiZ7V9qDTT7Fltfm9zIaDyE5471ZvZha2tMk8hAXh/cnDaxNZ1YqIhI9aUgY6MgI+V2cKW1y2nPbwAYJjf2R/Vn9JEebDwThskEw9vH8nSfOGr4ejq5WBGR6kVBxkZBRi5b8hpY8i/Y9QsAhsnMH4E9+Hvadew2ognx8+SZfk24sU1tTCaTk4sVEakeFGRsFGTEYQ6vhyVvQdIcAAxMLHbvwric60kyYulYP5jXBmnuGRERR1CQsVGQEYc7utEaaLbPsm/6xejAO/mD2GWux/3X1OeR7pp7RkTkcijI2CjISIVJ3WoNNFunA9Z/Rr8WteXdwsFk1mzGKwOb0z0+zKklioi4KgUZGwUZqXBpO+D3/8PY8gMmwwLAgqLWvF84mMhmV/HCAM09IyJSVgoyNgoyUmmO7YLf/42x6X/2QLO4KIFJpqF0v3aA5p4RESkDBRkbBRmpdMf3wO9vY2z8GpNRBMDSombMCLqdW4YN19wzIiKloCBjoyAjTnNiH8bS8Rh/fIXZKARglSWepNhb6Tv0HkJr+Du5QBGRqktBxkZBRpwu4yC5C/+N+6avcDcKAEgzanKo/k00H/A3PINjnFygiEjVoyBjoyAjVUbmYY4u+AjvzV9S08gAoAgzJ6N7UqvbQ1C/O5g1hkZEBBRk7BRkpKqxFOSxcs5neP4xmXZss28vCKqHR+I90GoE+AY7sUIREedTkLFRkJGqKvNMAVNn/4Lvps8YbF5CoOkMAIabF6bmN0K7eyC6HWjZAxG5AinI2CjISFW3KzWbN2asI/TALG5zm09z8/6zOyNaWANNi5vAS4ODReTKoSBjoyAjrsAwDH7Zlsqrs7ZSK3MLt7nPZ6DbSjzJt77BKxBaDreGmrB45xYrIlIJSvv726kjC8eNG0f79u0JCAggLCyMQYMGkZSUVOI93bp1w2QylXg8+OCDTqpYpGKYTCZ6N4tg/hPd6NmrH8/xMB1y3+f1whEc84yGvCxYPQk+SoTJ/WDz91CY7+yyRUSczqktMn369GH48OG0b9+ewsJCnnnmGbZs2cK2bdvw8/MDrEGmcePGvPLKK/bjfH19S926ohYZcUVHMs7wzznbmb3pKCYsXOe9g2fClhObvtg+yR5+odDmDmh7J9SIdWq9IiKO5pJdS+np6YSFhbF48WKuueYawBpkWrVqxTvvvFOucyrIiCtbufc4L83cyo6UbACuCstjXN31xOz9Dk6lWN9kMkOj66zdTg17glmrbouI63OJrqU/y8zMBCA4uOStp1999RW1atWiefPmjB07ltOnT1/0HHl5eWRlZZV4iLiqjvVDmP3oVbx6QzOCfDxYmubF1as78WjkFxzv/1+o1xUMC+z8GabeBO+1gt/fhlPpzi5dRKRSVJkWGYvFwsCBA8nIyGDp0qX27ZMmTaJOnTpERUWxadMmnn76aTp06MCPP/54wfO89NJLvPzyy+dtV4uMuLqTOfm8/etOvlp1AIsB3h5mHu7WkAeaFeG14XPY8CXkWv8zgJsnNL3B2koT21G3cIuIy3G5rqWHHnqIuXPnsnTpUqKjoy/6vt9++42ePXuye/duGjRocN7+vLw88vLy7K+zsrKIiYlRkJFqY9uRLF6atZXV+04AEF3Th+f6N6F340BMW6fBmk/gyPqzBwTXh4ThkDAMgus5qWoRkbJxqSDzyCOPMGPGDJYsWUK9en/9QZuTk4O/vz8///wzvXv3vuS5NUZGqiPDMJi96Sj/nLOdo5m5AHRpGMKLA5rRODwAjvxhDTRbfoCCc7piYztZb+NuOgh8ajildhGR0nCJIGMYBo8++ijTpk1j0aJFNGrU6JLHLFu2jKuuuoqNGzeSkJBwyfcryEh1djq/kAmL9vCfJXvJL7TgZjZxR6c6jO7VmCAfD8g7BdtnwaZvYO9iwPbP3c0L4vpCy1usA4TdPJx6HSIif+YSQebhhx9m6tSpzJgxg7i4OPv2oKAgfHx82LNnD1OnTqVfv36EhISwadMmxowZQ3R0NIsXLy7V11CQkSvBweOnee2nbfyyLRWAYD9P/t47jpvaxeBmto2PyTwMm7+Djd9A+vazB/vWghZDIeFmiGqt8TQiUiW4RJAxXeQDc/Lkydx5550kJydz2223sWXLFnJycoiJiWHw4ME899xzmkdG5AKW7Ezn5Vlb2ZOeA0CL2kH8vU8cVzWsdfbfm2FAyiZroNn8HeScc4dTrThoeTO0GAY1YpxwBSIiVi4RZCqDgoxcaQqKLHy2fD/vzt9Fdl4hAO3r1mTMtY3p3KBWyTcXFcKe36xdTzt+gsJc2w4T1L3K2vXUdCB4BVTuRYjIFU9BxkZBRq5U6dl5fLhwN1NXHyS/0AJAx/rBjOnVmMT6IecfkJsJ22ZaW2oOnJ0CAXcfaHK99c6n+t3Azb1yLkBErmgKMjYKMnKlS8nM5aNFu/lmdTL5RdZA06VhCI9f25i2dYIvfNDJA7D5W9j4Pzi+6+x2/3DrStwth1tX5hYRqSAKMjYKMiJWhzPO8OHC3Xy3NpmCIus/+2sahzKmVyNax9a88EGGAYfXW7ueNn8PZ06c3RfWzBpoWtwEgZGVcAUiciVRkLFRkBEpKfnEaWugWXeIIov1n3+P+DDG9GpMi+igix9YmA+758PGr61LIhTZVt82ma1dTgnDrV1Qnn4VfxEiUu0pyNgoyIhc2IHjObz/225+XH8IW56hV5NwxlzbiGZRfxFoAM6chK3TrF1PySvPbvfwhYBIa5gpfnj4gqc/ePratvnbtv3FezyKt/vodnCRK5SCjI2CjMhf23csh/cX7GL6hsP2QNOnWQSjr21EfEQp/s2c2AubvrW21Jzc7+DqTOcEnT8HH1soimwJTQbodnGRakZBxkZBRqR0dqed4r0Fu5i16QjFnwr9EyIZ3bMRjcJLcfu1YUD6DmtrTf5pyD9lXR4hP+fso8C2Pd+2vaB4X/F223sKLr7C/UXVbmtdKLPJQK0pJVINKMjYKMiIlM3O1GzeXbCLnzYdBaw9OwMSovhbz0Y0DPOvnCIslrMh6M9h59xwdOakdR6cA8uxL78AEJFgDTVNB0GthpVTs4g4lIKMjYKMSPlsP5rFu/N38fPWFADMJhjUqjaP9mxEvVpVbEBvdirsmA3bZsD+38GwnN0X1tQWam6A0HiNuRFxEQoyNgoyIpdny+FM3pm/i/nbres4uZlNDG5dm7/1aERsiK+Tq7uAnGPWWYq3z4S9i8BSeHZfrcbWrqemN1jnwVGoEamyFGRsFGREHGPToQzemb+L33akAeBuNjG0bTSjujckJrgKBhqwdj0lzbXOWLxnwdlbxgFq1rO11AyEqDYKNSJVjIKMjYKMiGP9cfAk4+fvYslO62KTHm4mbmoXwyPdGxJVw8fJ1f2F3CzYOQ+2TbfOh2NfVwoIirUGmiYDIbo9mM1OK1NErBRkbBRkRCrGugMnGP/rLpbuPgaAp5uZm9vH8EDX+kTXrKItNMXyTsHuX61janb+Yh1QXCwg8mz3U2xHMLs5r06RK5iCjI2CjEjFWr3vBON/3cmKvccB66Dg65pGcGeXuiTWC8ZU1btsCs7A7gW2UPMz5GWd3ecXap2jpukNUOcqLZgpUokUZGwUZEQqx/I9x/ho4R57Cw1AfEQAd3Wpyw2tauPt4QItG4V51gHC22ZY74LKzTy7zycY4vtD4z4Q0wH8w5xWpsiVQEHGRkFGpHLtTM1myvL9/Lj+ELkF1tuga/p6cEuHWG7rWKdqj6M5V2E+7F9iHSi8YzacPl5yf4061kATk2gdVxPeXC02Ig6kIGOjICPiHBmn8/l2bTKfLT/A4YwzgPXW7T7NrN1O7erUrPrdTsWKCuHAMtg+y/pn2nZKTMAH1mUTotpATHuI7mANOX61nFKuSHWgIGOjICPiXEUWg1+3pTJl+T5W7j1h3968diB3dq7H9QmRrtHtdK7cTDi8DpLXwKHVcGhNyW6oYjXrWQNNdHvrn2HN1GojUkoKMjYKMiJVx/ajWXy2fD/T/jhMXqG12ynEz5MRibGM6FiH8EBvJ1dYThYLHN8Fyasg2RZs0nec/z4PP6jd5mywie4AfiGVX6+IC1CQsVGQEal6TuTk882ag3yx4gBHM63zubibTfRrEcmdXerSJramkyt0gDMZcHjtOa02a0veEVUsuMGfWm2a6pZvKb8zGdbJHb2DnF3JZVOQsVGQEam6Coss/LItlSnL9rN6/9lup5YxNbirc136tYjE072aTE5nscCxJFuLzWprwDmWdP77PP1trTa2cTZhTazjb9w8wd0b3Dw0C7GclZ1iXTT1wHI4uAJSt4LJDPWuhmaDIX6Ay7b6KcjYKMiIuIYthzOZsnw/MzccIb/I2u0UGuBl7XZKrENogJeTK6wAp0/YxtrYws2hdZCffYmDTODuZXt4g9s5z91tYcfd6xLbvc85R/F5PM++1+wOZg/bn+7WFiK3c1+f83DzsO63b/PQzMgVxTDg5D5bcFlhHXh+ct9fH2Nyg3rXWENNkwHgG1w5tTqAgoyNgoyIazl2Ko9vVh/ki5UHSM3KA6zLIAxIiOLOLnVJiK7h3AIrkqXIOrameJxN8mo4uR8sBc6urIxM54Qc93OCjsf5wcjdG+p2sbYcaHmIkiwWSN9+tsXlwHI4lfKnN5kgojnEdoY6nSG2k3Wm6q3TYes0SNl0zlvdoH5XaDrIJUKNgoyNgoyIayoosjB3SwpTlu1j/cEM+/Y2sTW4q0s9+jSPwMPtCvmlZ7FAUZ51fajCfNufebZteWdfFz8vyv/TtuLt577/YufJt64YfrFH0bmvHRyw/CMgvh/EXw91r7a2Dl1Jigrg6EZrS8uBFdauotyMku8xe1i7Hut0toaXmA7gU+Pi5zy+x7q+2NbpFwg13aDZIOv3uwqGGgUZGwUZEde3MTmDKcv3M3vTEQqKrB9Z4YFe3N6xDje3j62e3U6uwDDAsJwTcgqsrUrFIcdSePZ10bmvC84ek3PMujTEznklB0N7BUHj3tDkemjYCzz9nHedFSX/tLXl7aCtm+jQWig4XfI9Hn7WsFLc2hLdDjzKOank8T3WVppt0yFl89ntZneo19U2pqZ/lQk1CjI2CjIi1Udadi5TVx3ky5UHOXbK2u3kZjbRPS6UIW2i6dEkDC933fHjkgrzYd8S2DELdsyBnLSz+9y9oUFPa6hp3KfK/KItszMn4eAqOGjrJjqy4fxWLZ+atm6iTtbwEpFg7YZztOJQs3U6pP4p1NTvdjbU+DjvDkIFGRsFGZHqJ6+wiDmbj/LZ8gNsSM6wb6/h68ENLaMY0jaaFrWDXGfmYCnJUmRtqdg+y/rIOHB2n8nt7Jia+P4QVNt5df6VgjNwbBekJ1kHch9Ybr2j6M8zQgdEWQNLnU5QpwvUiqv8cULHdsO24lCz5ex2s8c5oaZfpYcaBRkbBRmR6m13WjbfrzvMtD8O2QcHAzQO92dIm2gGt65NmKtOtCfW7qvULbB9tnXNq3N/0YJ1WYgm11uDTWjjyq/vzElI32m9lT49CY7ttP6ZcZDzQgtASENrF1GdLtbwUqNO1bqd/tiuswOF07ae3W72gAbdrQOFKynUKMjYKMiIXBmKLAZLdx/jh3WHmLc1xT5zsNkEXRuHMqRtNL2ahLvecghS0om9sOMna7BJXkWJsFArzhZqroeo1o4LCIYB2UdLBpXiP8/tAvszn5rWmiJbnh3jEhDumJoqQ/rOswOFzws1PawDheP6/fVg48ugIGOjICNy5cnKLeCnTUf5ft0h1h04ad8e6O3OgJZRDG0bTauYGup6cnXZqZBkCzX7lpQcbxIYbe16anK9dcxJada4shRZb3dPT7K1sNhaWo7tuvCszPavVRtqNYbQuHP+jLMuGlpd/o6lJ1kDzbbpkLbt7PbiUHP1ExCb6NAvqSBjoyAjcmXbm36KH9cf5sf1hzhiWw4BoH6oH0PbRnNj62gigtT15PLOZMCuX62DhXfNt86lUswnGOL6WudOqd/duu347pJhJX2ndb2sovwLn9/kBsH1rAEltPE5fzYGr4AKv7wqpTjUbJ1mnecG4Pbp1q4nB1KQsVGQEREAi8Vgxd7jfL/uEHO3HCW34GzXU5eGtRjaNprezSLU9VQdFJyBPQutY2qS5ljHsRRz97aGFcNy4WPdvSGkUcmwEhoPwfWtsyBLSWk7rOGxyxiHr+yuIGOjICMif5adW8DczSl8v/4Qq/edXeMpwMud61tGMrRtNG1ia6rrqTooKrTe7lw8WDjrsHW7d9CfWlds3UI1YrVoZxWhIGOjICMif+XA8Rx+sHU9HTp5xr69Xi0/hrSpzeA20dSuUc4JyKRqMQzreBfvIPAPqz7jV6opBRkbBRkRKQ2LxWDVvhP8sP4QczYf5XR+EWD9Xde5QQhD2kTTp3kEvp6ObT4XkQtTkLFRkBGRssrJK2TulhR+WHeIFXuP27f7ebrRt0Uk1ydE0qVhrStnrScRJ1CQsVGQEZHLkXziNNP+OMz36w5x8MTZdXBq+nrQp3kkAxIiSawfgptZ3RQijqQgY6MgIyKOYBgGaw+cZOaGI8zdcpRjp87eplvL34t+LSK4PiGKdnVqYlaoEblsCjI2CjIi4miFRRZW7TvB7E1HmLslhYzTZydiiwj0pn+CtftJk+6JlJ+CjI2CjIhUpIIiC0t3H2P2xqP8sjWF7LxC+77omj70T4hkQEIUzaICFWpEykBBxkZBRkQqS25BEUt2pjN701Hmb0+13/kE1tu5r0+I5PqEKOIirrCZYEXKQUHGRkFGRJzhTH4RC5PSmL3pCAu2p9kXsQRoFObP9QlRXN8ykgah/k6sUqTqUpCxUZAREWc7lVfIgu2pzNp4lCU708kvOhtqmkYGcn1La/dTTLCvE6sUqVoUZGwUZESkKsk8U8Cv21KZtfEIy3Yfo9By9iO4ZXQQA1pG0a9FJFGaTViucAoyNgoyIlJVnczJ5+etKczedIQVe45zTqahXZ2aXJ8QSb+ESMICtDq3XHkUZGwUZETEFaRn5zF3y1FmbzzKmgMnKP5kNpkgsV4w1ydE0ad5BLX8tQKzXBkUZGwUZETE1aRk5vLT5qPM2niEDckZ9u1mE3RuUIvrEyLp3SyCmn6ezitSpIIpyNgoyIiIK0s+cZo5m4/y0+ajbDqUad/ubjbRpaE11FzXNIIgXw8nVinieAoyNgoyIlJdHDiew+xNR/lp01G2Hc2yb/dwM3FNo1D6J0RybdNwArwVasT1KcjYKMiISHW0J/0UP9lCTVJqtn27p7uZbo2toaZXk3D8vNydWKVI+SnI2CjIiEh1tys1m1mbjjJ70xH2pufYt3t7mOkRH0b/FlH0iA/Dx9PNiVWKlI2CjI2CjIhcKQzDYEdKNj/ZQs3+46ft+3w93ejZJJz+LSLpFheKt4dCjVRtCjI2CjIiciUyDIOtR7KYbQs1h06ese/z93Ln2qbWUHN141p4uSvUSNWjIGOjICMiVzrDMNh0KJPZm47w06ajHMnMte8L8Hand7MI+idEclXDWni4mZ1YqchZpf397dS/sePGjaN9+/YEBAQQFhbGoEGDSEpKKvGe3NxcRo0aRUhICP7+/gwZMoTU1FQnVSwi4npMJhMtY2rwbP+mLH26Bz881Jm7utQlPNCL7NxCvl93iLsmr6H96/N5+vtNLEpKI/+cRS5FqjKntsj06dOH4cOH0759ewoLC3nmmWfYsmUL27Ztw8/PD4CHHnqIn376iSlTphAUFMQjjzyC2Wxm2bJlpfoaapEREbkwi8Vg7YGTzN50hDmbUzh2Ks++L8DbnV5NwunTPIKujTWmRiqfS3YtpaenExYWxuLFi7nmmmvIzMwkNDSUqVOnMnToUAB27NhBkyZNWLFiBR07drzkORVkREQurchisGrfceZsPsq8ramkZ58NNT4ebnSPD6VP80i6x4VqnhqpFKX9/V2lJhjIzLTOWhkcHAzAunXrKCgooFevXvb3xMfHExsbe9Egk5eXR17e2X+AWVlZ571HRERKcjOb6NygFp0b1OKVgc1Zf/Akc7ek8POWFA5nnGHO5hTmbE7B083M1Y1q0ad5BNc2DaeGr5ZJEOeqMkHGYrEwevRounTpQvPmzQFISUnB09OTGjVqlHhveHg4KSkpFzzPuHHjePnllyu6XBGRastsNtGubjDt6gbzXP8mbDmcxdwtR/l5Swp7j+WwYEcaC3ak4WY20al+CH2aR3Bds3Ct0i1OUWWCzKhRo9iyZQtLly69rPOMHTuWxx9/3P46KyuLmJiYyy1PROSKZDKZaBEdRIvoIJ7qHcfO1FP8vCWFuVuOsiMlm6W7j7F09zGen7GFdnVq0qd5JL2bhRNd09fZpcsVokoEmUceeYTZs2ezZMkSoqOj7dsjIiLIz88nIyOjRKtMamoqERERFzyXl5cXXl5a5l5ExNFMJhNxEQHERQTwWK9G7D+Ww89bU5i7JYWNyRms2X+SNftP8ursbSREB9GneQR9mkVQP9Tf2aVLNebUwb6GYfDoo48ybdo0Fi1aRKNGjUrsLx7s+/XXXzNkyBAAkpKSiI+P12BfEZEq5EjGGebZQs2a/Sc49zdLXHgAfZpH0LdFBHHhAZhMJucVKi7DJe5aevjhh5k6dSozZswgLi7Ovj0oKAgfHx/Aevv1nDlzmDJlCoGBgTz66KMALF++vFRfQ0FGRKRypWfn8cs260DhFXuOU2g5+2umbogvfZpH0rd5BAnRQQo1clEuEWQu9hd48uTJ3HnnnYB1QrwnnniCr7/+mry8PHr37s1HH3100a6lP1OQERFxnozT+czfnsbPW46yZNexEhPtRQV507t5BL2ahNO+bjCe7ppVWM5yiSBTGRRkRESqhlN5hSzckcbPW1JYmJTG6fwi+z4/Tze6NKxF9/gwusWFEhnk48RKpSpQkLFRkBERqXpyC4pYsjOdX7alsigpvcSswgBNIgPpHhdK9/gwWsfUwF1rQF1xFGRsFGRERKo2i8W6UvfCpDQWJqWxITmjxGDhQG93rmkcSve4MLrGhVLLX3emXgkUZGwUZEREXMvxU3ks2ZXOwh3pLN6ZTuaZAvs+kwkSagfRPT6M7nFhtKgdhNmsAcPVkYKMjYKMiIjrKiyysPFQBgt3pLMwKY2tR0ouOxPi50nXOGtrzTWNQgny1TpQ1YWCjI2CjIhI9ZGalcuipDQW7khn6e5jnMortO8zm6BtnZp0i7O21jSJ1Jw1rkxBxkZBRkSkesovtLD2wAkWJaWzcEcau9JOldgfEehN9/hQusWF0aVhLfy9qsRk9lJKCjI2CjIiIleG5BOnWbQznUU70li25xi5BWfnrPFwM9GhXjDd46y3dzcI9VdrTRWnIGOjICMicuXJLShi5d7jLEpK57cdaRw8cbrE/uiaPnSLC6Vb4zA6NwzB11OtNVWNgoyNgoyIyJXNMAz2Hcthoa0LavW+E+QXnW2t8XQz06FesDXYqLWmylCQsVGQERGRc+XkFbJiz3EW7UxjUVI6h06eKbFfrTVVg4KMjYKMiIhcjGEY7EnPYVFSGot3prNqr1prqgoFGRsFGRERKa3T+dbWmoVJaq1xNgUZGwUZEREpD7XWOJeCjI2CjIiIOEJxa82ipHQW7Uwj+YRaayqSgoyNgoyIiDiaYRjsPZZjDTVJaZdorQmjQaifWmvKSEHGRkFGREQq2qVaa2KCfehuWzqhY/0QfDzdnFSp61CQsVGQERGRynSp1hovdzOdGoTYg01siK8Tq626FGRsFGRERMSZTucXsnz32TuhDmeUbK2pH+pnDzXt69XEy12tNaAgY6cgIyIiVYVhGOxKO8XCHWksTEpj7f6TFFrO/hr29XSjS8Na9jWhomr4OLFa51KQsVGQERGRqiort4Blu46xMCmNhUnppGfnldgfHxFAt7gwuseF0qZOTTzczE6qtPIpyNgoyIiIiCswDIOtR7JYZAs1fxw8yTmNNQR4u3NNI+ucNV3jQgkL8HZesZVAQcZGQUZERFzRyZx8luxKZ1FSOot3pnMiJ7/E/ha1g+geF0q3+DBaRtfAzVy9bu9WkLFRkBEREVdXZDHYdCiDhbY7oTYdyiyxv6avB10bh9I9PoyrG4US7OfppEodR0HGRkFGRESqm/TsPBbvTGdhUhpLdqaTnVtYYn98RAAd64fQsX4wHeqFuGSwUZCxUZAREZHqrLDIwvqDGdYBwzvS2JGSfd574sID6Fg/mI71Q+hQL5gQfy8nVFo2CjI2CjIiInIlOX4qj9X7TrBy73FW7j1BUurFg01i/RASq2iwUZCxUZAREZEr2bnBZtW+ExdssWkc7m/rirK22NSqAsFGQcZGQUZEROSsEzn5rN5nba1Zuff4RYNNYj1rsEms75xgoyBjoyAjIiJycaUJNo3CzrbYVFawUZCxUZAREREpvZM5+ayyj7G5cLBpGOZvHzycWC+E0ADHBxsFGRsFGRERkfI7mZPP6v1nBw9vP5p13nvG39ySwa2jHfp1S/v7292hX1VERESqlZp+nvRuFkHvZhFAyWCzau8Jtqdk0aJ2kNPqU5ARERGRUvtzsMk4nU+Qj4fT6lGQERERkXKr4evcWYOvnPXARUREpNpRkBERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAUZERERcVkKMiIiIuKyFGRERETEZSnIiIiIiMtSkBERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4rGq/+rVhGABkZWU5uRIREREpreLf28W/xy+m2geZ7OxsAGJiYpxciYiIiJRVdnY2QUFBF91vMi4VdVycxWLhyJEjBAQEYDKZHHberKwsYmJiSE5OJjAw0GHnrUqq+zXq+lxfdb/G6n59UP2vUddXfoZhkJ2dTVRUFGbzxUfCVPsWGbPZTHR0dIWdPzAwsFr+5TxXdb9GXZ/rq+7XWN2vD6r/Ner6yuevWmKKabCviIiIuCwFGREREXFZCjLl5OXlxYsvvoiXl5ezS6kw1f0adX2ur7pfY3W/Pqj+16jrq3jVfrCviIiIVF9qkRERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAWZcvrwww+pW7cu3t7eJCYmsnr1ameX5BDjxo2jffv2BAQEEBYWxqBBg0hKSnJ2WRXmjTfewGQyMXr0aGeX4lCHDx/mtttuIyQkBB8fH1q0aMHatWudXZZDFBUV8fzzz1OvXj18fHxo0KABr7766iXXY6nKlixZwoABA4iKisJkMjF9+vQS+w3D4IUXXiAyMhIfHx969erFrl27nFNsOfzV9RUUFPD000/TokUL/Pz8iIqK4o477uDIkSPOK7gcLvUzPNeDDz6IyWTinXfeqbT6Lldprm/79u0MHDiQoKAg/Pz8aN++PQcPHqzw2hRkyuF///sfjz/+OC+++CLr16+nZcuW9O7dm7S0NGeXdtkWL17MqFGjWLlyJb/++isFBQVcd9115OTkOLs0h1uzZg3/+c9/SEhIcHYpDnXy5Em6dOmCh4cHc+fOZdu2bfz73/+mZs2azi7NId58800mTJjABx98wPbt23nzzTf517/+xfvvv+/s0sotJyeHli1b8uGHH15w/7/+9S/ee+89Jk6cyKpVq/Dz86N3797k5uZWcqXl81fXd/r0adavX8/zzz/P+vXr+fHHH0lKSmLgwIFOqLT8LvUzLDZt2jRWrlxJVFRUJVXmGJe6vj179nDVVVcRHx/PokWL2LRpE88//zze3t4VX5whZdahQwdj1KhR9tdFRUVGVFSUMW7cOCdWVTHS0tIMwFi8eLGzS3Go7Oxso1GjRsavv/5qdO3a1XjsscecXZLDPP3008ZVV13l7DIqTP/+/Y277767xLYbb7zRGDFihJMqcizAmDZtmv21xWIxIiIijLfeesu+LSMjw/Dy8jK+/vprJ1R4ef58fReyevVqAzAOHDhQOUU52MWu8dChQ0bt2rWNLVu2GHXq1DHGjx9f6bU5woWu7+abbzZuu+02p9SjFpkyys/PZ926dfTq1cu+zWw206tXL1asWOHEyipGZmYmAMHBwU6uxLFGjRpF//79S/wcq4uZM2fSrl07brrpJsLCwmjdujUff/yxs8tymM6dO7NgwQJ27twJwMaNG1m6dCl9+/Z1cmUVY9++faSkpJT4uxoUFERiYmK1/MwB6+eOyWSiRo0azi7FYSwWC7fffjtPPfUUzZo1c3Y5DmWxWPjpp59o3LgxvXv3JiwsjMTExL/sXnMkBZkyOnbsGEVFRYSHh5fYHh4eTkpKipOqqhgWi4XRo0fTpUsXmjdv7uxyHOabb75h/fr1jBs3ztmlVIi9e/cyYcIEGjVqxLx583jooYf429/+xmeffebs0hziH//4B8OHDyc+Ph4PDw9at27N6NGjGTFihLNLqxDFnytXwmcOQG5uLk8//TS33HJLtVpk8c0338Td3Z2//e1vzi7F4dLS0jh16hRvvPEGffr04ZdffmHw4MHceOONLF68uMK/frVf/VrKb9SoUWzZsoWlS5c6uxSHSU5O5rHHHuPXX3+tnL5bJ7BYLLRr145//vOfALRu3ZotW7YwceJERo4c6eTqLt+3337LV199xdSpU2nWrBkbNmxg9OjRREVFVYvru5IVFBQwbNgwDMNgwoQJzi7HYdatW8e7777L+vXrMZlMzi7H4SwWCwA33HADY8aMAaBVq1YsX76ciRMn0rVr1wr9+mqRKaNatWrh5uZGampqie2pqalEREQ4qSrHe+SRR5g9ezYLFy4kOjra2eU4zLp160hLS6NNmza4u7vj7u7O4sWLee+993B3d6eoqMjZJV62yMhImjZtWmJbkyZNKuXugcrw1FNP2VtlWrRowe23386YMWOqbQtb8edKdf/MKQ4xBw4c4Ndff61WrTG///47aWlpxMbG2j93Dhw4wBNPPEHdunWdXd5lq1WrFu7u7k773FGQKSNPT0/atm3LggUL7NssFgsLFiygU6dOTqzMMQzD4JFHHmHatGn89ttv1KtXz9klOVTPnj3ZvHkzGzZssD/atWvHiBEj2LBhA25ubs4u8bJ16dLlvFvmd+7cSZ06dZxUkWOdPn0as7nkR5ebm5v9f4XVTb169YiIiCjxmZOVlcWqVauqxWcOnA0xu3btYv78+YSEhDi7JIe6/fbb2bRpU4nPnaioKJ566inmzZvn7PIum6enJ+3bt3fa5466lsrh8ccfZ+TIkbRr144OHTrwzjvvkJOTw1133eXs0i7bqFGjmDp1KjNmzCAgIMDeBx8UFISPj4+Tq7t8AQEB54338fPzIyQkpNqMAxozZgydO3fmn//8J8OGDWP16tVMmjSJSZMmObs0hxgwYACvv/46sbGxNGvWjD/++IO3336bu+++29mlldupU6fYvXu3/fW+ffvYsGEDwcHBxMbGMnr0aF577TUaNWpEvXr1eP7554mKimLQoEHOK7oM/ur6IiMjGTp0KOvXr2f27NkUFRXZP3eCg4Px9PR0Vtllcqmf4Z/DmYeHBxEREcTFxVV2qeVyqet76qmnuPnmm7nmmmvo3r07P//8M7NmzWLRokUVX5xT7pWqBt5//30jNjbW8PT0NDp06GCsXLnS2SU5BHDBx+TJk51dWoWpbrdfG4ZhzJo1y2jevLnh5eVlxMfHG5MmTXJ2SQ6TlZVlPPbYY0ZsbKzh7e1t1K9f33j22WeNvLw8Z5dWbgsXLrzgv7uRI0cahmG9Bfv55583wsPDDS8vL6Nnz55GUlKSc4sug7+6vn379l30c2fhwoXOLr3ULvUz/DNXu/26NNf3ySefGA0bNjS8vb2Nli1bGtOnT6+U2kyG4cLTYYqIiMgVTWNkRERExGUpyIiIiIjLUpARERERl6UgIyIiIi5LQUZERERcloKMiIiIuCwFGREREXFZCjIicsUxmUxMnz7d2WWIiAMoyIhIpbrzzjsxmUznPfr06ePs0kTEBWmtJRGpdH369GHy5Mkltnl5eTmpGhFxZWqREZFK5+XlRURERIlHzZo1AWu3z4QJE+jbty8+Pj7Ur1+f77//vsTxmzdvpkePHvj4+BASEsL999/PqVOnSrzn008/pVmzZnh5eREZGckjjzxSYv+xY8cYPHgwvr6+NGrUiJkzZ1bsRYtIhVCQEZEq5/nnn2fIkCFs3LiRESNGMHz4cLZv3w5ATk4OvXv3pmbNmqxZs4bvvvuO+fPnlwgqEyZMYNSoUdx///1s3ryZmTNn0rBhwxJf4+WXX2bYsGFs2rSJfv36MWLECE6cOFGp1ykiDlApS1OKiNiMHDnScHNzM/z8/Eo8Xn/9dcMwrCuwP/jggyWOSUxMNB566CHDMAxj0qRJRs2aNY1Tp07Z9//000+G2Ww2UlJSDMMwjKioKOPZZ5+9aA2A8dxzz9lfnzp1ygCMuXPnOuw6RaRyaIyMiFS67t27M2HChBLbgoOD7c87depUYl+nTp3YsGEDANu3b6dly5b4+fnZ93fp0gWLxUJSUhImk4kjR47Qs2fPv6whISHB/tzPz4/AwEDS0tLKe0ki4iQKMiJS6fz8/M7r6nEUHx+fUr3Pw8OjxGuTyYTFYqmIkkSkAmmMjIhUOStXrjzvdZMmTQBo0qQJGzduJCcnx75/2bJlmM1m4uLiCAgIoG7duixYsKBSaxYR51CLjIhUury8PFJSUkpsc3d3p1atWgB89913tGvXjquuuoqvvvqK1atX88knnwAwYsQIXnzxRUaOHMlLL71Eeno6jz76KLfffjvh4eEAvPTSSzz44IOEhYXRt29fsrOzWbZsGY8++mjlXqiIVDgFGRGpdD///DORkZEltsXFxbFjxw7AekfRN998w8MPP0xkZCRff/01TZs2BcDX15d58+bx2GOP0b59e3x9fRkyZAhvv/22/VwjR44kNzeX8ePH8+STT1KrVi2GDh1aeRcoIpXGZBiG4ewiRESKmUwmpk2bxqBBg5xdioi4AI2REREREZelICMiIiIuS2NkRKRKUW+3iJSFWmRERETEZSnIiIiIiMtSkBERERGXpSAjIiIiLktBRkRERFyWgoyIiIi4LAUZERERcVkKMiIiIuKyFGRERETEZf0/KmQhg6wM3RMAAAAASUVORK5CYII=",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
    "source": [
     "import matplotlib.pyplot as plt\n",
     "\n",
-    "plt.plot(range(n_epochs), train_loss_list)\n",
+    "plt.plot(range(n_epochs_actual), train_loss_list)\n",
+    "plt.plot(range(n_epochs_actual), valid_loss_list)\n",
     "plt.xlabel(\"Epoch\")\n",
     "plt.ylabel(\"Loss\")\n",
     "plt.title(\"Performance of Model 1\")\n",
@@ -446,10 +537,31 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 8,
    "id": "e93efdfc",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Test Loss: 22.019259\n",
+      "\n",
+      "Test Accuracy of airplane: 68% (689/1000)\n",
+      "Test Accuracy of automobile: 83% (833/1000)\n",
+      "Test Accuracy of  bird: 40% (406/1000)\n",
+      "Test Accuracy of   cat: 45% (453/1000)\n",
+      "Test Accuracy of  deer: 52% (521/1000)\n",
+      "Test Accuracy of   dog: 48% (488/1000)\n",
+      "Test Accuracy of  frog: 74% (747/1000)\n",
+      "Test Accuracy of horse: 62% (622/1000)\n",
+      "Test Accuracy of  ship: 78% (781/1000)\n",
+      "Test Accuracy of truck: 63% (639/1000)\n",
+      "\n",
+      "Test Accuracy (Overall): 61% (6179/10000)\n"
+     ]
+    }
+   ],
    "source": [
     "model.load_state_dict(torch.load(\"./model_cifar.pt\"))\n",
     "\n",
@@ -513,6 +625,15 @@
     ")"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "Recup_res=class_correct"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "944991a2",
@@ -530,6 +651,195 @@
     "Compare the results obtained with this new network to those obtained previously."
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import torch.nn as nn\n",
+    "import torch.nn.functional as F\n",
+    "\n",
+    "# define the CNN architecture\n",
+    "\n",
+    "\n",
+    "class Net(nn.Module):\n",
+    "    def __init__(self):\n",
+    "        super(Net, self).__init__()\n",
+    "        self.conv1 = nn.Conv2d(3, 6, 5)\n",
+    "        self.pool = nn.MaxPool2d(2, 2)\n",
+    "        self.conv2 = nn.Conv2d(6, 16, 5)\n",
+    "        self.fc1 = nn.Linear(16 * 5 * 5, 120)\n",
+    "        self.fc2 = nn.Linear(120, 84)\n",
+    "        self.fc3 = nn.Linear(84, 10)\n",
+    "\n",
+    "    def forward(self, x):\n",
+    "        x = self.pool(F.relu(self.conv1(x)))\n",
+    "        x = self.pool(F.relu(self.conv2(x)))\n",
+    "        x = x.view(-1, 16 * 5 * 5)\n",
+    "        x = F.relu(self.fc1(x))\n",
+    "        x = F.relu(self.fc2(x))\n",
+    "        x = self.fc3(x)\n",
+    "        return x\n",
+    "\n",
+    "\n",
+    "# create a complete CNN\n",
+    "model2 = Net()\n",
+    "print(model2)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import torch.optim as optim\n",
+    "\n",
+    "criterion = nn.CrossEntropyLoss()  # specify loss function\n",
+    "optimizer = optim.SGD(model.parameters(), lr=0.01)  # specify optimizer\n",
+    "\n",
+    "n_epochs = 30  # number of epochs to train the model\n",
+    "train_loss_list = []  # list to store loss to visualize\n",
+    "valid_loss_min = np.Inf  # track change in validation loss\n",
+    "\n",
+    "valid_loss_list = [] \n",
+    "out=0 # We create an output variable\n",
+    "n_epochs_actual=0\n",
+    "while n_epochs_actual<n_epochs and out<3:\n",
+    "    \n",
+    "    # Keep track of training and validation loss\n",
+    "    train_loss = 0.0\n",
+    "    valid_loss = 0.0\n",
+    "\n",
+    "    # Train the model\n",
+    "    model2.train()\n",
+    "    for data, target in train_loader:\n",
+    "        # Move tensors to GPU if CUDA is available\n",
+    "        if train_on_gpu:\n",
+    "            data, target = data.cuda(), target.cuda()\n",
+    "        # Clear the gradients of all optimized variables\n",
+    "        optimizer.zero_grad()\n",
+    "        # Forward pass: compute predicted outputs by passing inputs to the model\n",
+    "        output = model2(data)\n",
+    "        # Calculate the batch loss\n",
+    "        loss = criterion(output, target)\n",
+    "        # Backward pass: compute gradient of the loss with respect to model parameters\n",
+    "        loss.backward()\n",
+    "        # Perform a single optimization step (parameter update)\n",
+    "        optimizer.step()\n",
+    "        # Update training loss\n",
+    "        train_loss += loss.item() * data.size(0)\n",
+    "\n",
+    "    # Validate the model\n",
+    "    model2.eval()\n",
+    "    for data, target in valid_loader:\n",
+    "        # Move tensors to GPU if CUDA is available\n",
+    "        if train_on_gpu:\n",
+    "            data, target = data.cuda(), target.cuda()\n",
+    "        # Forward pass: compute predicted outputs by passing inputs to the model\n",
+    "        output = model2(data)\n",
+    "        # Calculate the batch loss\n",
+    "        loss = criterion(output, target)\n",
+    "        # Update average validation loss\n",
+    "        valid_loss += loss.item() * data.size(0)\n",
+    "\n",
+    "    # Calculate average losses\n",
+    "    train_loss = train_loss / len(train_loader)\n",
+    "    valid_loss = valid_loss / len(valid_loader)\n",
+    "    train_loss_list.append(train_loss)\n",
+    "    valid_loss_list.append(valid_loss)\n",
+    "\n",
+    "    # Print training/validation statistics\n",
+    "    print(\n",
+    "        \"Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}\".format(\n",
+    "            n_epochs_actual, train_loss, valid_loss\n",
+    "        )\n",
+    "    )\n",
+    "\n",
+    "    # Save model if validation loss has decreased\n",
+    "    if valid_loss <= valid_loss_min:\n",
+    "        print(\n",
+    "            \"Validation loss decreased ({:.6f} --> {:.6f}).  Saving model 2 ...\".format(\n",
+    "                valid_loss_min, valid_loss\n",
+    "            )\n",
+    "        )\n",
+    "        torch.save(model2.state_dict(), \"model2_cifar.pt\")\n",
+    "        valid_loss_min = valid_loss\n",
+    "    else :\n",
+    "        out+=1\n",
+    "    \n",
+    "    n_epochs_actual+=1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "model2.load_state_dict(torch.load(\"./model2_cifar.pt\"))\n",
+    "\n",
+    "# track test loss\n",
+    "test_loss = 0.0\n",
+    "class_correct = list(0.0 for i in range(10))\n",
+    "class_total = list(0.0 for i in range(10))\n",
+    "\n",
+    "model2.eval()\n",
+    "# iterate over test data\n",
+    "for data, target in test_loader:\n",
+    "    # move tensors to GPU if CUDA is available\n",
+    "    if train_on_gpu:\n",
+    "        data, target = data.cuda(), target.cuda()\n",
+    "    # forward pass: compute predicted outputs by passing inputs to the model\n",
+    "    output = model2(data)\n",
+    "    # calculate the batch loss\n",
+    "    loss = criterion(output, target)\n",
+    "    # update test loss\n",
+    "    test_loss += loss.item() * data.size(0)\n",
+    "    # convert output probabilities to predicted class\n",
+    "    _, pred = torch.max(output, 1)\n",
+    "    # compare predictions to true label\n",
+    "    correct_tensor = pred.eq(target.data.view_as(pred))\n",
+    "    correct = (\n",
+    "        np.squeeze(correct_tensor.numpy())\n",
+    "        if not train_on_gpu\n",
+    "        else np.squeeze(correct_tensor.cpu().numpy())\n",
+    "    )\n",
+    "    # calculate test accuracy for each object class\n",
+    "    for i in range(batch_size):\n",
+    "        label = target.data[i]\n",
+    "        class_correct[label] += correct[i].item()\n",
+    "        class_total[label] += 1\n",
+    "\n",
+    "# average test loss\n",
+    "test_loss = test_loss / len(test_loader)\n",
+    "print(\"Test Loss: {:.6f}\\n\".format(test_loss))\n",
+    "\n",
+    "for i in range(10):\n",
+    "    if class_total[i] > 0:\n",
+    "        print(\n",
+    "            \"Test Accuracy of %5s: %2d%% (%2d/%2d)\"\n",
+    "            % (\n",
+    "                classes[i],\n",
+    "                100 * class_correct[i] / class_total[i],\n",
+    "                np.sum(class_correct[i]),\n",
+    "                np.sum(class_total[i]),\n",
+    "            )\n",
+    "        )\n",
+    "    else:\n",
+    "        print(\"Test Accuracy of %5s: N/A (no training examples)\" % (classes[i]))\n",
+    "\n",
+    "print(\n",
+    "    \"\\nTest Accuracy (Overall): %2d%% (%2d/%2d)\"\n",
+    "    % (\n",
+    "        100.0 * np.sum(class_correct) / np.sum(class_total),\n",
+    "        np.sum(class_correct),\n",
+    "        np.sum(class_total),\n",
+    "    )\n",
+    ")"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "bc381cf4",
@@ -547,10 +857,28 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 10,
    "id": "ef623c26",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "model:  fp32  \t Size (KB): 251.278\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "251278"
+      ]
+     },
+     "execution_count": 10,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
     "import os\n",
     "\n",
@@ -576,18 +904,45 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 11,
    "id": "c4c65d4b",
    "metadata": {},
-   "outputs": [],
-   "source": [
-    "import torch.quantization\n",
-    "\n",
-    "\n",
-    "quantized_model = torch.quantization.quantize_dynamic(model, dtype=torch.qint8)\n",
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "model:  int8  \t Size (KB): 76.522\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "76522"
+      ]
+     },
+     "execution_count": 11,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "import torch.quantization\n",
+    "\n",
+    "\n",
+    "quantized_model = torch.quantization.quantize_dynamic(model, dtype=torch.qint8)\n",
     "print_size_of_model(quantized_model, \"int8\")"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(model)"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "7b108e17",
@@ -596,6 +951,157 @@
     "For each class, compare the classification test accuracy of the initial model and the quantized model. Also give the overall test accuracy for both models."
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Test Loss: 22.036278\n",
+      "\n",
+      "Test Accuracy of airplane: 68% (688/1000)\n",
+      "Test Accuracy of automobile: 83% (835/1000)\n",
+      "Test Accuracy of  bird: 40% (407/1000)\n",
+      "Test Accuracy of   cat: 45% (450/1000)\n",
+      "Test Accuracy of  deer: 52% (521/1000)\n",
+      "Test Accuracy of   dog: 49% (490/1000)\n",
+      "Test Accuracy of  frog: 74% (747/1000)\n",
+      "Test Accuracy of horse: 62% (623/1000)\n",
+      "Test Accuracy of  ship: 78% (780/1000)\n",
+      "Test Accuracy of truck: 64% (640/1000)\n",
+      "\n",
+      "Test Accuracy (Overall): 61% (6181/10000)\n"
+     ]
+    }
+   ],
+   "source": [
+    "\n",
+    "#quantized_model.load_state_dict(torch.load(\"./model_cifar.pt\"))\n",
+    "\n",
+    "# track test loss\n",
+    "test_loss = 0.0\n",
+    "class_correct2 = list(0.0 for i in range(10))\n",
+    "class_total = list(0.0 for i in range(10))\n",
+    "\n",
+    "quantized_model.eval()\n",
+    "# iterate over test data\n",
+    "for data, target in test_loader:\n",
+    "    # move tensors to GPU if CUDA is available\n",
+    "    if train_on_gpu:\n",
+    "        data, target = data.cuda(), target.cuda()\n",
+    "    # forward pass: compute predicted outputs by passing inputs to the model\n",
+    "    output = quantized_model(data)\n",
+    "    # calculate the batch loss\n",
+    "    loss = criterion(output, target)\n",
+    "    # update test loss\n",
+    "    test_loss += loss.item() * data.size(0)\n",
+    "    # convert output probabilities to predicted class\n",
+    "    _, pred = torch.max(output, 1)\n",
+    "    # compare predictions to true label\n",
+    "    correct_tensor = pred.eq(target.data.view_as(pred))\n",
+    "    correct = (\n",
+    "        np.squeeze(correct_tensor.numpy())\n",
+    "        if not train_on_gpu\n",
+    "        else np.squeeze(correct_tensor.cpu().numpy())\n",
+    "    )\n",
+    "    # calculate test accuracy for each object class\n",
+    "    for i in range(batch_size):\n",
+    "        label = target.data[i]\n",
+    "        class_correct2[label] += correct[i].item()\n",
+    "        class_total[label] += 1\n",
+    "\n",
+    "# average test loss\n",
+    "test_loss = test_loss / len(test_loader)\n",
+    "print(\"Test Loss: {:.6f}\\n\".format(test_loss))\n",
+    "\n",
+    "for i in range(10):\n",
+    "    if class_total[i] > 0:\n",
+    "        print(\n",
+    "            \"Test Accuracy of %5s: %2d%% (%2d/%2d)\"\n",
+    "            % (\n",
+    "                classes[i],\n",
+    "                100 * class_correct2[i] / class_total[i],\n",
+    "                np.sum(class_correct2[i]),\n",
+    "                np.sum(class_total[i]),\n",
+    "            )\n",
+    "        )\n",
+    "    else:\n",
+    "        print(\"Test Accuracy of %5s: N/A (no training examples)\" % (classes[i]))\n",
+    "\n",
+    "print(\n",
+    "    \"\\nTest Accuracy (Overall): %2d%% (%2d/%2d)\"\n",
+    "    % (\n",
+    "        100.0 * np.sum(class_correct2) / np.sum(class_total),\n",
+    "        np.sum(class_correct2),\n",
+    "        np.sum(class_total),\n",
+    "    )\n",
+    ")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[689.0, 833.0, 406.0, 453.0, 521.0, 488.0, 747.0, 622.0, 781.0, 639.0]\n",
+      "[688.0, 835.0, 407.0, 450.0, 521.0, 490.0, 747.0, 623.0, 780.0, 640.0]\n"
+     ]
+    },
+    {
+     "ename": "ValueError",
+     "evalue": "'step' is not a valid value for drawstyle; supported values are 'default', 'steps-mid', 'steps-pre', 'steps-post', 'steps'",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mValueError\u001b[0m                                Traceback (most recent call last)",
+      "\u001b[1;32mc:\\Users\\anton\\OneDrive\\Documents\\GitHub\\mod-4-6-td-2-antonin-delorme\\TD2 Deep Learning.ipynb Cell 32\u001b[0m line \u001b[0;36m6\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/anton/OneDrive/Documents/GitHub/mod-4-6-td-2-antonin-delorme/TD2%20Deep%20Learning.ipynb#Y104sZmlsZQ%3D%3D?line=1'>2</a>\u001b[0m \u001b[39mprint\u001b[39m(class_correct2)\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/anton/OneDrive/Documents/GitHub/mod-4-6-td-2-antonin-delorme/TD2%20Deep%20Learning.ipynb#Y104sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m fig\u001b[39m=\u001b[39mplt\u001b[39m.\u001b[39mfigure()\n\u001b[1;32m----> <a href='vscode-notebook-cell:/c%3A/Users/anton/OneDrive/Documents/GitHub/mod-4-6-td-2-antonin-delorme/TD2%20Deep%20Learning.ipynb#Y104sZmlsZQ%3D%3D?line=5'>6</a>\u001b[0m plt\u001b[39m.\u001b[39;49mplot(classes,class_correct2,label\u001b[39m=\u001b[39;49m\u001b[39m'\u001b[39;49m\u001b[39mQuantized\u001b[39;49m\u001b[39m'\u001b[39;49m,ds\u001b[39m=\u001b[39;49m\u001b[39m\"\u001b[39;49m\u001b[39mstep\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/anton/OneDrive/Documents/GitHub/mod-4-6-td-2-antonin-delorme/TD2%20Deep%20Learning.ipynb#Y104sZmlsZQ%3D%3D?line=6'>7</a>\u001b[0m plt\u001b[39m.\u001b[39mplot(classes,Recup_res,label\u001b[39m=\u001b[39m\u001b[39m'\u001b[39m\u001b[39mOriginal\u001b[39m\u001b[39m'\u001b[39m,ds\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mstep\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/anton/OneDrive/Documents/GitHub/mod-4-6-td-2-antonin-delorme/TD2%20Deep%20Learning.ipynb#Y104sZmlsZQ%3D%3D?line=7'>8</a>\u001b[0m plt\u001b[39m.\u001b[39mxlabel(\u001b[39m\"\u001b[39m\u001b[39mClasses\u001b[39m\u001b[39m\"\u001b[39m)\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\pyplot.py:3578\u001b[0m, in \u001b[0;36mplot\u001b[1;34m(scalex, scaley, data, *args, **kwargs)\u001b[0m\n\u001b[0;32m   3570\u001b[0m \u001b[39m@_copy_docstring_and_deprecators\u001b[39m(Axes\u001b[39m.\u001b[39mplot)\n\u001b[0;32m   3571\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mplot\u001b[39m(\n\u001b[0;32m   3572\u001b[0m     \u001b[39m*\u001b[39margs: \u001b[39mfloat\u001b[39m \u001b[39m|\u001b[39m ArrayLike \u001b[39m|\u001b[39m \u001b[39mstr\u001b[39m,\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m   3576\u001b[0m     \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs,\n\u001b[0;32m   3577\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m \u001b[39mlist\u001b[39m[Line2D]:\n\u001b[1;32m-> 3578\u001b[0m     \u001b[39mreturn\u001b[39;00m gca()\u001b[39m.\u001b[39;49mplot(\n\u001b[0;32m   3579\u001b[0m         \u001b[39m*\u001b[39;49margs,\n\u001b[0;32m   3580\u001b[0m         scalex\u001b[39m=\u001b[39;49mscalex,\n\u001b[0;32m   3581\u001b[0m         scaley\u001b[39m=\u001b[39;49mscaley,\n\u001b[0;32m   3582\u001b[0m         \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49m({\u001b[39m\"\u001b[39;49m\u001b[39mdata\u001b[39;49m\u001b[39m\"\u001b[39;49m: data} \u001b[39mif\u001b[39;49;00m data \u001b[39mis\u001b[39;49;00m \u001b[39mnot\u001b[39;49;00m \u001b[39mNone\u001b[39;49;00m \u001b[39melse\u001b[39;49;00m {}),\n\u001b[0;32m   3583\u001b[0m         \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs,\n\u001b[0;32m   3584\u001b[0m     )\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_axes.py:1721\u001b[0m, in \u001b[0;36mAxes.plot\u001b[1;34m(self, scalex, scaley, data, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1478\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m   1479\u001b[0m \u001b[39mPlot y versus x as lines and/or markers.\u001b[39;00m\n\u001b[0;32m   1480\u001b[0m \n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m   1718\u001b[0m \u001b[39m(``'green'``) or hex strings (``'#008000'``).\u001b[39;00m\n\u001b[0;32m   1719\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m   1720\u001b[0m kwargs \u001b[39m=\u001b[39m cbook\u001b[39m.\u001b[39mnormalize_kwargs(kwargs, mlines\u001b[39m.\u001b[39mLine2D)\n\u001b[1;32m-> 1721\u001b[0m lines \u001b[39m=\u001b[39m [\u001b[39m*\u001b[39m\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_get_lines(\u001b[39mself\u001b[39m, \u001b[39m*\u001b[39margs, data\u001b[39m=\u001b[39mdata, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs)]\n\u001b[0;32m   1722\u001b[0m \u001b[39mfor\u001b[39;00m line \u001b[39min\u001b[39;00m lines:\n\u001b[0;32m   1723\u001b[0m     \u001b[39mself\u001b[39m\u001b[39m.\u001b[39madd_line(line)\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_base.py:303\u001b[0m, in \u001b[0;36m_process_plot_var_args.__call__\u001b[1;34m(self, axes, data, *args, **kwargs)\u001b[0m\n\u001b[0;32m    301\u001b[0m     this \u001b[39m+\u001b[39m\u001b[39m=\u001b[39m args[\u001b[39m0\u001b[39m],\n\u001b[0;32m    302\u001b[0m     args \u001b[39m=\u001b[39m args[\u001b[39m1\u001b[39m:]\n\u001b[1;32m--> 303\u001b[0m \u001b[39myield from\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_plot_args(\n\u001b[0;32m    304\u001b[0m     axes, this, kwargs, ambiguous_fmt_datakey\u001b[39m=\u001b[39;49mambiguous_fmt_datakey)\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_base.py:539\u001b[0m, in \u001b[0;36m_process_plot_var_args._plot_args\u001b[1;34m(self, axes, tup, kwargs, return_kwargs, ambiguous_fmt_datakey)\u001b[0m\n\u001b[0;32m    537\u001b[0m     \u001b[39mreturn\u001b[39;00m \u001b[39mlist\u001b[39m(result)\n\u001b[0;32m    538\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m--> 539\u001b[0m     \u001b[39mreturn\u001b[39;00m [l[\u001b[39m0\u001b[39;49m] \u001b[39mfor\u001b[39;49;00m l \u001b[39min\u001b[39;49;00m result]\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_base.py:539\u001b[0m, in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m    537\u001b[0m     \u001b[39mreturn\u001b[39;00m \u001b[39mlist\u001b[39m(result)\n\u001b[0;32m    538\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m--> 539\u001b[0m     \u001b[39mreturn\u001b[39;00m [l[\u001b[39m0\u001b[39;49m] \u001b[39mfor\u001b[39;49;00m l \u001b[39min\u001b[39;49;00m result]\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_base.py:532\u001b[0m, in \u001b[0;36m<genexpr>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m    529\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m    530\u001b[0m     labels \u001b[39m=\u001b[39m [label] \u001b[39m*\u001b[39m n_datasets\n\u001b[1;32m--> 532\u001b[0m result \u001b[39m=\u001b[39m (make_artist(axes, x[:, j \u001b[39m%\u001b[39;49m ncx], y[:, j \u001b[39m%\u001b[39;49m ncy], kw,\n\u001b[0;32m    533\u001b[0m                       {\u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs, \u001b[39m'\u001b[39;49m\u001b[39mlabel\u001b[39;49m\u001b[39m'\u001b[39;49m: label})\n\u001b[0;32m    534\u001b[0m           \u001b[39mfor\u001b[39;00m j, label \u001b[39min\u001b[39;00m \u001b[39menumerate\u001b[39m(labels))\n\u001b[0;32m    536\u001b[0m \u001b[39mif\u001b[39;00m return_kwargs:\n\u001b[0;32m    537\u001b[0m     \u001b[39mreturn\u001b[39;00m \u001b[39mlist\u001b[39m(result)\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\axes\\_base.py:346\u001b[0m, in \u001b[0;36m_process_plot_var_args._makeline\u001b[1;34m(self, axes, x, y, kw, kwargs)\u001b[0m\n\u001b[0;32m    344\u001b[0m default_dict \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_getdefaults(\u001b[39mset\u001b[39m(), kw)\n\u001b[0;32m    345\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_setdefaults(default_dict, kw)\n\u001b[1;32m--> 346\u001b[0m seg \u001b[39m=\u001b[39m mlines\u001b[39m.\u001b[39;49mLine2D(x, y, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkw)\n\u001b[0;32m    347\u001b[0m \u001b[39mreturn\u001b[39;00m seg, kw\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\lines.py:373\u001b[0m, in \u001b[0;36mLine2D.__init__\u001b[1;34m(self, xdata, ydata, linewidth, linestyle, color, gapcolor, marker, markersize, markeredgewidth, markeredgecolor, markerfacecolor, markerfacecoloralt, fillstyle, antialiased, dash_capstyle, solid_capstyle, dash_joinstyle, solid_joinstyle, pickradius, drawstyle, markevery, **kwargs)\u001b[0m\n\u001b[0;32m    371\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mset_linewidth(linewidth)\n\u001b[0;32m    372\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mset_linestyle(linestyle)\n\u001b[1;32m--> 373\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mset_drawstyle(drawstyle)\n\u001b[0;32m    375\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_color \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[0;32m    376\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mset_color(color)\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\lines.py:1093\u001b[0m, in \u001b[0;36mLine2D.set_drawstyle\u001b[1;34m(self, drawstyle)\u001b[0m\n\u001b[0;32m   1091\u001b[0m \u001b[39mif\u001b[39;00m drawstyle \u001b[39mis\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[0;32m   1092\u001b[0m     drawstyle \u001b[39m=\u001b[39m \u001b[39m'\u001b[39m\u001b[39mdefault\u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m-> 1093\u001b[0m _api\u001b[39m.\u001b[39;49mcheck_in_list(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mdrawStyles, drawstyle\u001b[39m=\u001b[39;49mdrawstyle)\n\u001b[0;32m   1094\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_drawstyle \u001b[39m!=\u001b[39m drawstyle:\n\u001b[0;32m   1095\u001b[0m     \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mstale \u001b[39m=\u001b[39m \u001b[39mTrue\u001b[39;00m\n",
+      "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\matplotlib\\_api\\__init__.py:129\u001b[0m, in \u001b[0;36mcheck_in_list\u001b[1;34m(values, _print_supported_values, **kwargs)\u001b[0m\n\u001b[0;32m    127\u001b[0m \u001b[39mif\u001b[39;00m _print_supported_values:\n\u001b[0;32m    128\u001b[0m     msg \u001b[39m+\u001b[39m\u001b[39m=\u001b[39m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m; supported values are \u001b[39m\u001b[39m{\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m, \u001b[39m\u001b[39m'\u001b[39m\u001b[39m.\u001b[39mjoin(\u001b[39mmap\u001b[39m(\u001b[39mrepr\u001b[39m,\u001b[39m \u001b[39mvalues))\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m\n\u001b[1;32m--> 129\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(msg)\n",
+      "\u001b[1;31mValueError\u001b[0m: 'step' is not a valid value for drawstyle; supported values are 'default', 'steps-mid', 'steps-pre', 'steps-post', 'steps'"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGiCAYAAAA1LsZRAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/SrBM8AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAZGklEQVR4nO3dbXCdZYH/8V8S6AmICUiX9GGDXWV9mtoHW4hROw4zWbKjW+3uOtbq2NoBXRUByThLK9CCDwRXYfqixS5VR98w1HXczq5gHMxYdxnrdGjprq48LE+265K0XYYEo5usOWdfOBv++TeFntJykfL5zJwZcnFd932dvCBf7vvOSUOtVqsFAKCQxtIbAABe3sQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQVN0x8k//9E9Zvnx55syZk4aGhuzYseN51+zcuTNvectbUqlUcsEFF+Sb3/zmcWwVADgV1R0jIyMjWbhwYbZs2XJM8x9//PG8+93vzsUXX5x9+/bl05/+dC677LL84Ac/qHuzAMCpp+GF/KG8hoaG/P3f/31WrFhx1DnXXHNN7rrrrvz85z+fGPvABz6Qp59+On19fcd7agDgFHHayT7Brl270tXVNWmsu7s7n/70p4+6ZnR0NKOjoxNfV6vVPPXUUzn33HPT0NBwsrYKAJxAtVotzzzzTObMmZPGxqPfjDnpMTIwMJC2trZJY21tbRkeHs5vf/vbnHHGGUes6e3tzY033niytwYAvAgOHDiQP/zDPzzqvz/pMXI81q9fn56enomvh4aGcv755+fAgQNpaWkpuDMA4FgNDw+nvb09r3zlK59z3kmPkVmzZmVwcHDS2ODgYFpaWqa8KpIklUollUrliPGWlhYxAgDTzPM9YnHSP2eks7Mz/f39k8buueeedHZ2nuxTAwDTQN0x8utf/zr79u3Lvn37kvz+V3f37duX/fv3J/n9LZbVq1dPzP/4xz+exx57LH/913+dBx98MLfddlu+/e1v5+qrrz4x7wAAmNbqjpH77rsvixcvzuLFi5MkPT09Wbx4cTZs2JAkefLJJyfCJEn+6I/+KHfddVfuueeeLFy4MLfccku+9rWvpbu7+wS9BQBgOntBnzPyYhkeHk5ra2uGhoY8MwIA08Sx/vz2t2kAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgqOOKkS1btmTevHlpbm5OR0dHdu/e/ZzzN23alNe//vU544wz0t7enquvvjr//d//fVwbBgBOLXXHyPbt29PT05ONGzdm7969WbhwYbq7u3Pw4MEp599xxx1Zt25dNm7cmAceeCBf//rXs3379nz2s599wZsHAKa/umPk1ltvzUc/+tGsXbs2b3rTm7J169aceeaZ+cY3vjHl/J/85Cd5+9vfng9+8IOZN29eLrnkkqxatep5r6YAAC8PdcXI2NhY9uzZk66urmcP0NiYrq6u7Nq1a8o1b3vb27Jnz56J+Hjsscdy9913513vetdRzzM6Oprh4eFJLwDg1HRaPZMPHz6c8fHxtLW1TRpva2vLgw8+OOWaD37wgzl8+HDe8Y53pFar5Xe/+10+/vGPP+dtmt7e3tx44431bA0AmKZO+m/T7Ny5MzfddFNuu+227N27N9/97ndz11135fOf//xR16xfvz5DQ0MTrwMHDpzsbQIAhdR1ZWTmzJlpamrK4ODgpPHBwcHMmjVryjXXX399PvzhD+eyyy5Lkrz5zW/OyMhIPvaxj+Xaa69NY+ORPVSpVFKpVOrZGgAwTdV1ZWTGjBlZsmRJ+vv7J8aq1Wr6+/vT2dk55Zrf/OY3RwRHU1NTkqRWq9W7XwDgFFPXlZEk6enpyZo1a7J06dJcdNFF2bRpU0ZGRrJ27dokyerVqzN37tz09vYmSZYvX55bb701ixcvTkdHRx555JFcf/31Wb58+USUAAAvX3XHyMqVK3Po0KFs2LAhAwMDWbRoUfr6+iYeat2/f/+kKyHXXXddGhoact111+VXv/pV/uAP/iDLly/PF7/4xRP3LgCAaauhNg3ulQwPD6e1tTVDQ0NpaWkpvR0A4Bgc689vf5sGAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAijquGNmyZUvmzZuX5ubmdHR0ZPfu3c85/+mnn87ll1+e2bNnp1Kp5HWve13uvvvu49owAHBqOa3eBdu3b09PT0+2bt2ajo6ObNq0Kd3d3XnooYdy3nnnHTF/bGwsf/Inf5Lzzjsv3/nOdzJ37tz88pe/zNlnn30i9g8ATHMNtVqtVs+Cjo6OXHjhhdm8eXOSpFqtpr29PVdccUXWrVt3xPytW7fmy1/+ch588MGcfvrpx7XJ4eHhtLa2ZmhoKC0tLcd1DADgxXWsP7/ruk0zNjaWPXv2pKur69kDNDamq6sru3btmnLNP/zDP6SzszOXX3552traMn/+/Nx0000ZHx8/6nlGR0czPDw86QUAnJrqipHDhw9nfHw8bW1tk8bb2toyMDAw5ZrHHnss3/nOdzI+Pp677747119/fW655ZZ84QtfOOp5ent709raOvFqb2+vZ5sAwDRy0n+bplqt5rzzzsvtt9+eJUuWZOXKlbn22muzdevWo65Zv359hoaGJl4HDhw42dsEAAqp6wHWmTNnpqmpKYODg5PGBwcHM2vWrCnXzJ49O6effnqampomxt74xjdmYGAgY2NjmTFjxhFrKpVKKpVKPVsDAKapuq6MzJgxI0uWLEl/f//EWLVaTX9/fzo7O6dc8/a3vz2PPPJIqtXqxNjDDz+c2bNnTxkiAMDLS923aXp6erJt27Z861vfygMPPJBPfOITGRkZydq1a5Mkq1evzvr16yfmf+ITn8hTTz2Vq666Kg8//HDuuuuu3HTTTbn88stP3LsAAKatuj9nZOXKlTl06FA2bNiQgYGBLFq0KH19fRMPte7fvz+Njc82Tnt7e37wgx/k6quvzoIFCzJ37txcddVVueaaa07cuwAApq26P2ekBJ8zAgDTz0n5nBEAgBNNjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAijquGNmyZUvmzZuX5ubmdHR0ZPfu3ce07s4770xDQ0NWrFhxPKcFAE5BdcfI9u3b09PTk40bN2bv3r1ZuHBhuru7c/Dgwedc98QTT+Qzn/lMli1bdtybBQBOPXXHyK233pqPfvSjWbt2bd70pjdl69atOfPMM/ONb3zjqGvGx8fzoQ99KDfeeGNe85rXPO85RkdHMzw8POkFAJya6oqRsbGx7NmzJ11dXc8eoLExXV1d2bVr11HXfe5zn8t5552XSy+99JjO09vbm9bW1olXe3t7PdsEAKaRumLk8OHDGR8fT1tb26Txtra2DAwMTLnm3nvvzde//vVs27btmM+zfv36DA0NTbwOHDhQzzYBgGnktJN58GeeeSYf/vCHs23btsycOfOY11UqlVQqlZO4MwDgpaKuGJk5c2aampoyODg4aXxwcDCzZs06Yv6jjz6aJ554IsuXL58Yq1arvz/xaafloYceymtf+9rj2TcAcIqo6zbNjBkzsmTJkvT390+MVavV9Pf3p7Oz84j5b3jDG/Kzn/0s+/btm3i95z3vycUXX5x9+/Z5FgQAqP82TU9PT9asWZOlS5fmoosuyqZNmzIyMpK1a9cmSVavXp25c+emt7c3zc3NmT9//qT1Z599dpIcMQ4AvDzVHSMrV67MoUOHsmHDhgwMDGTRokXp6+ubeKh1//79aWz0wa4AwLFpqNVqtdKbeD7Dw8NpbW3N0NBQWlpaSm8HADgGx/rz2yUMAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKOq4YmTLli2ZN29empub09HRkd27dx917rZt27Js2bKcc845Oeecc9LV1fWc8wGAl5e6Y2T79u3p6enJxo0bs3fv3ixcuDDd3d05ePDglPN37tyZVatW5Uc/+lF27dqV9vb2XHLJJfnVr371gjcPAEx/DbVarVbPgo6Ojlx44YXZvHlzkqRaraa9vT1XXHFF1q1b97zrx8fHc84552Tz5s1ZvXr1lHNGR0czOjo68fXw8HDa29szNDSUlpaWerYLABQyPDyc1tbW5/35XdeVkbGxsezZsyddXV3PHqCxMV1dXdm1a9cxHeM3v/lN/ud//ievetWrjjqnt7c3ra2tE6/29vZ6tgkATCN1xcjhw4czPj6etra2SeNtbW0ZGBg4pmNcc801mTNnzqSg+f+tX78+Q0NDE68DBw7Us00AYBo57cU82c0335w777wzO3fuTHNz81HnVSqVVCqVF3FnAEApdcXIzJkz09TUlMHBwUnjg4ODmTVr1nOu/cpXvpKbb745P/zhD7NgwYL6dwoAnJLquk0zY8aMLFmyJP39/RNj1Wo1/f396ezsPOq6v/mbv8nnP//59PX1ZenSpce/WwDglFP3bZqenp6sWbMmS5cuzUUXXZRNmzZlZGQka9euTZKsXr06c+fOTW9vb5LkS1/6UjZs2JA77rgj8+bNm3i25KyzzspZZ511At8KADAd1R0jK1euzKFDh7Jhw4YMDAxk0aJF6evrm3iodf/+/WlsfPaCy1e/+tWMjY3lfe9736TjbNy4MTfccMML2z0AMO3V/TkjJRzr7ykDAC8dJ+VzRgAATjQxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAoSowAAEWJEQCgKDECABQlRgCAosQIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKLECABQlBgBAIoSIwBAUWIEAChKjAAARYkRAKAoMQIAFCVGAICixAgAUJQYAQCKEiMAQFFiBAAo6rhiZMuWLZk3b16am5vT0dGR3bt3P+f8v/u7v8sb3vCGNDc3581vfnPuvvvu49osAHDqqTtGtm/fnp6enmzcuDF79+7NwoUL093dnYMHD045/yc/+UlWrVqVSy+9NPfff39WrFiRFStW5Oc///kL3jwAMP011Gq1Wj0LOjo6cuGFF2bz5s1Jkmq1mvb29lxxxRVZt27dEfNXrlyZkZGRfO9735sYe+tb35pFixZl69atU55jdHQ0o6OjE18PDQ3l/PPPz4EDB9LS0lLPdgGAQoaHh9Pe3p6nn346ra2tR513Wj0HHRsby549e7J+/fqJscbGxnR1dWXXrl1Trtm1a1d6enomjXV3d2fHjh1HPU9vb29uvPHGI8bb29vr2S4A8BLwzDPPnLgYOXz4cMbHx9PW1jZpvK2tLQ8++OCUawYGBqacPzAwcNTzrF+/flLAVKvVPPXUUzn33HPT0NBQz5aBl7j/+z8nVz7h1FOr1fLMM89kzpw5zzmvrhh5sVQqlVQqlUljZ599dpnNAC+KlpYWMQKnoOe6IvJ/6nqAdebMmWlqasrg4OCk8cHBwcyaNWvKNbNmzaprPgDw8lJXjMyYMSNLlixJf3//xFi1Wk1/f386OzunXNPZ2TlpfpLcc889R50PALy81H2bpqenJ2vWrMnSpUtz0UUXZdOmTRkZGcnatWuTJKtXr87cuXPT29ubJLnqqqvyzne+M7fcckve/e53584778x9992X22+//cS+E2BaqlQq2bhx4xG3ZoGXj7pjZOXKlTl06FA2bNiQgYGBLFq0KH19fRMPqe7fvz+Njc9ecHnb296WO+64I9ddd10++9nP5o//+I+zY8eOzJ8//8S9C2DaqlQqueGGG0pvAyio7s8ZAQA4kfxtGgCgKDECABQlRgCAosQIULcnnngiDQ0N2bdv3ws+1kc+8pGsWLHiBR8HmL5ekp/ACry0tbe358knn8zMmTNLbwU4BYgRoG5NTU3P+SnKtVot4+PjOe00/4kBnp/bNMCU+vr68o53vCNnn312zj333PzZn/1ZHn300SRH3qbZuXNnGhoa8v3vfz9LlixJpVLJvffemxtuuCGLFi3K3/7t36a9vT1nnnlm3v/+92doaOi4zvv/nvu73/1uLr744px55plZuHDhEX85/N57782yZctyxhlnpL29PVdeeWVGRkZO/DcKeMHECDClkZGR9PT05L777kt/f38aGxvz53/+56lWq0dds27dutx888154IEHsmDBgiTJI488km9/+9v5x3/8x/T19eX+++/PJz/5yRd83muvvTaf+cxnsm/fvrzuda/LqlWr8rvf/S5J8uijj+ZP//RP85d/+Zf513/912zfvj333ntvPvWpT52A7wxwwtUAjsGhQ4dqSWo/+9nPao8//ngtSe3++++v1Wq12o9+9KNaktqOHTsmrdm4cWOtqamp9h//8R8TY9///vdrjY2NtSeffLJWq9Vqa9asqb33ve89pvPWarWJc3/ta1+bmPNv//ZvtSS1Bx54oFar1WqXXnpp7WMf+9ik4/zzP/9zrbGxsfbb3/72uL8HwMnhyggwpX//93/PqlWr8prXvCYtLS2ZN29ekt//yYejWbp06RFj559/fubOnTvxdWdnZ6rVah566KEXdN7/u/KSJLNnz06SHDx4MEnyL//yL/nmN7+Zs846a+LV3d2darWaxx9//PnfPPCi8nQZMKXly5fn1a9+dbZt25Y5c+akWq1m/vz5GRsbO+qaV7ziFS/aeU8//fSJf25oaEiSiVs5v/71r/NXf/VXufLKK484/vnnn/+C9wicWGIEOMJ//dd/5aGHHsq2bduybNmyJL9/IPR47N+/P//5n/+ZOXPmJEl++tOfprGxMa9//etP2nnf8pa35Be/+EUuuOCC49oz8OISI8ARzjnnnJx77rm5/fbbM3v27Ozfvz/r1q07rmM1NzdnzZo1+cpXvpLh4eFceeWVef/73z/lrwafqPNec801eetb35pPfepTueyyy/KKV7wiv/jFL3LPPfdk8+bNx/U+gJPHMyPAERobG3PnnXdmz549mT9/fq6++up8+ctfPq5jXXDBBfmLv/iLvOtd78oll1ySBQsW5Lbbbjup512wYEF+/OMf5+GHH86yZcuyePHibNiwYeLqDPDS0lCr1WqlNwGcmm644Ybs2LHjhHxsPHDqcmUEAChKjAAARblNAwAU5coIAFCUGAEAihIjAEBRYgQAKEqMAABFiREAoCgxAgAUJUYAgKL+F9DRch0eaVl1AAAAAElFTkSuQmCC",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "print(Recup_res)\n",
+    "print(class_correct2)\n",
+    "\n",
+    "\n",
+    "fig=plt.figure()\n",
+    "plt.plot(classes,class_correct2,label='Quantized',drawstyle=\"step\")\n",
+    "plt.plot(classes,Recup_res,label='Original',drawstyle =\"step\")\n",
+    "plt.xlabel(\"Classes\")\n",
+    "fig.autofmt_xdate(rotation=45)\n",
+    "plt.ylabel(\"Loss\")\n",
+    "plt.title(\"Performance of Model 1\")\n",
+    "plt.show()\n",
+    "\n"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "a0a34b90",
@@ -668,7 +1174,6 @@
   },
   {
    "cell_type": "markdown",
-   "id": "184cfceb",
    "metadata": {},
    "source": [
     "Experiments:\n",
@@ -679,7 +1184,333 @@
     "\n",
     "Experiment with other pre-trained CNN models.\n",
     "\n",
-    "    \n"
+    "    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"cheval.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"cheval2.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"bateau.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print_size_of_model(model, \"fp32\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "quantized_model = torch.quantization.quantize_dynamic(model, dtype=torch.qint8)\n",
+    "print_size_of_model(quantized_model, \"int8\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"bateau.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"cheval.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import json\n",
+    "from PIL import Image\n",
+    "\n",
+    "# Choose an image to pass through the model\n",
+    "test_image = \"cheval2.jpg\"\n",
+    "\n",
+    "# Configure matplotlib for pretty inline plots\n",
+    "#%matplotlib inline\n",
+    "#%config InlineBackend.figure_format = 'retina'\n",
+    "\n",
+    "# Prepare the labels\n",
+    "with open(\"imagenet-simple-labels.json\") as f:\n",
+    "    labels = json.load(f)\n",
+    "\n",
+    "# First prepare the transformations: resize the image to what the model was trained on and convert it to a tensor\n",
+    "data_transform = transforms.Compose(\n",
+    "    [\n",
+    "        transforms.Resize((224, 224)),\n",
+    "        transforms.ToTensor(),\n",
+    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
+    "    ]\n",
+    ")\n",
+    "# Load the image\n",
+    "\n",
+    "image = Image.open(test_image)\n",
+    "plt.imshow(image), plt.xticks([]), plt.yticks([])\n",
+    "\n",
+    "# Now apply the transformation, expand the batch dimension, and send the image to the GPU\n",
+    "# image = data_transform(image).unsqueeze(0).cuda()\n",
+    "image = data_transform(image).unsqueeze(0)\n",
+    "\n",
+    "# Download the model if it's not there already. It will take a bit on the first run, after that it's fast\n",
+    "model = models.resnet50(pretrained=True)\n",
+    "# Send the model to the GPU\n",
+    "# model.cuda()\n",
+    "# Set layers such as dropout and batchnorm in evaluation mode\n",
+    "model.eval()\n",
+    "\n",
+    "# Get the 1000-dimensional model output\n",
+    "out = model(image)\n",
+    "# Find the predicted class\n",
+    "print(\"Predicted class is: {}\".format(labels[out.argmax()]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "With this model, the quantized version seams to work at the same level than the normal one."
    ]
   },
   {
@@ -738,7 +1569,7 @@
     "    ),\n",
     "}\n",
     "\n",
-    "data_dir = \"hymenoptera_data\"\n",
+    "data_dir = \"C:\\\\Users\\\\anton\\\\OneDrive\\\\Documents\\\\GitHub\\\\mod-4-6-td-2-antonin-delorme\\\\data\\\\hymenoptera_data\"\n",
     "# Create train and validation datasets and loaders\n",
     "image_datasets = {\n",
     "    x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x])\n",
@@ -835,7 +1666,7 @@
     "    ),\n",
     "}\n",
     "\n",
-    "data_dir = \"hymenoptera_data\"\n",
+    "data_dir = \"C:\\\\Users\\\\anton\\\\OneDrive\\\\Documents\\\\GitHub\\\\mod-4-6-td-2-antonin-delorme\\\\data\\\\hymenoptera_data\"\n",
     "# Create train and validation datasets and loaders\n",
     "image_datasets = {\n",
     "    x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x])\n",