diff --git a/.gitignore b/.gitignore
index f81f24f5d9f9099bfc46838a925ee1c452fb225a..73a73adf37a6395cc7ef38999aa94eeae97b55a7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,4 @@
 /data
 /.vscode
 /__pycache__
-MOD_4_6_TD_1.ipynb
\ No newline at end of file
+Solution_TD1_mpl.pdf
diff --git a/MOD_4_6_TD_1.ipynb b/MOD_4_6_TD_1.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..6edd524ec4d27c5953717868c1700cf6a7724468
--- /dev/null
+++ b/MOD_4_6_TD_1.ipynb
@@ -0,0 +1,157 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Imports"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import read_cifar as rc\n",
+    "import knn as knn\n",
+    "import mlp as mlp\n",
+    "import matplotlib.pyplot as plt\n",
+    "from tqdm import tqdm"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# k-nearest neighbours"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Question 4"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkgAAAGwCAYAAABSN5pGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABpkElEQVR4nO3de1yUVf4H8M8MMFyHGS4CgiCCCmoKCkp0MUwUy13LrNA1NdY1y2vir9R10y5b2Faupaa1m2lqabV209IM74o3ELUUvIMoV5HhJgPMPL8/cEaHiwIy88wMn/frNa+XzJznec7jMM7Xc77nfCWCIAggIiIiIj2p2B0gIiIiMjcMkIiIiIjqYYBEREREVA8DJCIiIqJ6GCARERER1cMAiYiIiKgeBkhERERE9diK3QFLpdVqcfXqVcjlckgkErG7Q0RERM0gCALKysrg6+sLqbTpcSIGSK109epV+Pv7i90NIiIiaoXLly+jU6dOTb7OAKmV5HI5gLq/YFdXV5F7Q0RERM1RWloKf39//fd4UxggtZJuWs3V1ZUBEhERkYW5W3oMk7SJiIiI6mGARERERFQPAyQiIiKiehggEREREdXDAImIiIioHgZIRERERPUwQCIiIiKqhwESERERUT0MkIiIiIjqYYBEREREVA8DJCIiIqJ6GCARERER1cMAycyUVtUg61oFyqpqxO4KERFRu8UAycz8bc1RPPLeLuw+Uyh2V4iIiNotBkhmRuloBwAoqeQIEhERkVgYIJkZpVNdgKS6wQCJiIhILAyQzIybkwwAUFJZLXJPiIiI2i8GSGZG4cQpNiIiIrExQDIzSsebI0icYiMiIhINAyQzo9SPIHGKjYiISCwMkMwMV7ERERGJjwGSmdHnIHGKjYiISDQMkMyM8uYqNlVlDQRBELk3RERE7RMDJDOjm2Kr1mhxo0Yjcm+IiIjaJwZIZsZJZgM7GwkA5iERERGJhQGSmZFIJFDolvozQCIiIhIFAyQzpF/qf4NL/YmIiMTAAMkM6fKQVBxBIiIiEoXoAdLy5csRGBgIBwcHREVF4fDhw0223bRpEyIjI6FUKuHs7Izw8HCsXbu2QbvTp09jxIgRUCgUcHZ2Rv/+/ZGdna1/vaqqClOnToWHhwdcXFwwatQo5OfnG+X+WkPJpf5ERESiEjVA2rhxIxITE7Fw4UKkpaUhLCwMcXFxKCgoaLS9u7s75s+fj5SUFJw4cQIJCQlISEjAtm3b9G3Onz+Phx56CKGhodi1axdOnDiB1157DQ4ODvo2s2bNwk8//YRvvvkGu3fvxtWrV/HUU08Z/X6bS+nEHCQiIiIxSQQRN9uJiopC//79sWzZMgCAVquFv78/pk+fjrlz5zbrHP369cPw4cPx1ltvAQBGjx4NOzu7RkeWAEClUqFDhw748ssv8fTTTwMAMjIy0KNHD6SkpOD+++9v1nVLS0uhUCigUqng6urarGOa65+bT+G/+y5i8iNBmPdYjzY9NxERUXvW3O9v0UaQqqurkZqaitjY2FudkUoRGxuLlJSUux4vCAKSk5ORmZmJgQMHAqgLsLZs2YLu3bsjLi4OXl5eiIqKwvfff68/LjU1FTU1NQbXDQ0NRUBAwB2vq1arUVpaavAwFt0UG3OQiIiIxCFagFRUVASNRgNvb2+D5729vZGXl9fkcSqVCi4uLpDJZBg+fDiWLl2KIUOGAAAKCgpQXl6ORYsWYdiwYfj1118xcuRIPPXUU9i9ezcAIC8vDzKZDEqlskXXTUpKgkKh0D/8/f1beed3p7g5xXadBWuJiIhEYSt2B1pKLpcjPT0d5eXlSE5ORmJiIoKCghATEwOtVgsAeOKJJzBr1iwAQHh4OA4cOICVK1fikUceafV1582bh8TERP3PpaWlRguSWLCWiIhIXKIFSJ6enrCxsWmweiw/Px8+Pj5NHieVStG1a1cAdcHP6dOnkZSUhJiYGHh6esLW1hY9e/Y0OKZHjx7Yt28fAMDHxwfV1dUoKSkxGEW623Xt7e1hb2/f0ttsFf0UG1exERERiUK0KTaZTIaIiAgkJyfrn9NqtUhOTkZ0dHSzz6PVaqFWq/Xn7N+/PzIzMw3anDlzBp07dwYAREREwM7OzuC6mZmZyM7ObtF1jUnJnbSJiIhEJeoUW2JiIiZMmIDIyEgMGDAAS5YsQUVFBRISEgAA48ePh5+fH5KSkgDU5QFFRkYiODgYarUaP//8M9auXYsVK1boz/nKK68gPj4eAwcOxKBBg7B161b89NNP2LVrFwBAoVBg4sSJSExMhLu7O1xdXTF9+nRER0c3ewWbsXEnbSIiInGJGiDFx8ejsLAQCxYsQF5eHsLDw7F161Z94nZ2djak0luDXBUVFZgyZQpycnLg6OiI0NBQrFu3DvHx8fo2I0eOxMqVK5GUlIQZM2YgJCQE//vf//DQQw/p2/z73/+GVCrFqFGjoFarERcXh48//th0N34XipsBUlWNFlU1GjjY2YjcIyIiovZF1H2QLJkx90ESBAFd5/8CjVbAob8Phrerw90PIiIiorsy+32QqGkSiQQKrmQjIiISDQMkM3VrqT/zkIiIiEyNAZKZYsFaIiIi8TBAMlO6grUsN0JERGR6DJDMlH6KjUv9iYiITI4BkpnSLfW/zhEkIiIik2OAZKa4mzYREZF4GCCZqVv12DjFRkREZGoMkMyUfhUbR5CIiIhMjgGSmeJGkUREROJhgGSm9Mv8uQ8SERGRyTFAMlPcSZuIiEg8DJDMlC4HqaJag+parci9ISIial8YIJkpuYMdJJK6P3OajYiIyLQYIJkpG6lEn6jNpf5ERESmxQDJjCm5ko2IiEgUDJDMmMKJu2kTERGJgQGSGdONIF3nSjYiIiKTYoBkxm6VG+EIEhERkSkxQDJjzEEiIiISBwMkM6bPQeIqNiIiIpNigGTGOIJEREQkDgZIZow5SEREROJggGTGdAESR5CIiIhMiwGSGVM4MgeJiIhIDAyQzBhHkIiIiMTBAMmM6ZK0y6pqUavRitwbIiKi9oMBkhnTFasFgNKqWhF7QkRE1L4wQDJjtjZSyB1sAQAlLDdCRERkMgyQzJw+D4lL/YmIiEyGAZKZU+pWsnEEiYiIyGTMIkBavnw5AgMD4eDggKioKBw+fLjJtps2bUJkZCSUSiWcnZ0RHh6OtWvXGrR5/vnnIZFIDB7Dhg0zaBMYGNigzaJFi4xyf/eCK9mIiIhMz1bsDmzcuBGJiYlYuXIloqKisGTJEsTFxSEzMxNeXl4N2ru7u2P+/PkIDQ2FTCbD5s2bkZCQAC8vL8TFxenbDRs2DJ9//rn+Z3t7+wbnevPNNzFp0iT9z3K5vI3v7t4pWG6EiIjI5EQPkBYvXoxJkyYhISEBALBy5Ups2bIFq1atwty5cxu0j4mJMfh55syZWLNmDfbt22cQINnb28PHx+eO15bL5Xdto6NWq6FWq/U/l5aWNuu4e8UcJCIiItMTdYqturoaqampiI2N1T8nlUoRGxuLlJSUux4vCAKSk5ORmZmJgQMHGry2a9cueHl5ISQkBC+99BKuXbvW4PhFixbBw8MDffv2xXvvvYfa2qaX0iclJUGhUOgf/v7+LbjT1tPlIKmYg0RERGQyoo4gFRUVQaPRwNvb2+B5b29vZGRkNHmcSqWCn58f1Go1bGxs8PHHH2PIkCH614cNG4annnoKXbp0wfnz5/H3v/8djz32GFJSUmBjYwMAmDFjBvr16wd3d3ccOHAA8+bNQ25uLhYvXtzoNefNm4fExET9z6WlpSYJkjiCREREZHqiT7G1hlwuR3p6OsrLy5GcnIzExEQEBQXpp99Gjx6tb9u7d2/06dMHwcHB2LVrFwYPHgwABsFOnz59IJPJMHnyZCQlJTWar2Rvb9/o88bGHCQiIiLTE3WKzdPTEzY2NsjPzzd4Pj8//465QVKpFF27dkV4eDhmz56Np59+GklJSU22DwoKgqenJ86dO9dkm6ioKNTW1uLSpUstvg9jUjrpCtYyQCIiIjIVUQMkmUyGiIgIJCcn65/TarVITk5GdHR0s8+j1WoNEqjry8nJwbVr19CxY8cm26Snp0MqlTa6ck5Muik25iARERGZjuhTbImJiZgwYQIiIyMxYMAALFmyBBUVFfpVbePHj4efn59+hCgpKQmRkZEIDg6GWq3Gzz//jLVr12LFihUAgPLycrzxxhsYNWoUfHx8cP78ebz66qvo2rWrfpVbSkoKDh06hEGDBkEulyMlJQWzZs3Cc889Bzc3N3H+IprgxhwkIiIikxM9QIqPj0dhYSEWLFiAvLw8hIeHY+vWrfrE7ezsbEiltwa6KioqMGXKFOTk5MDR0RGhoaFYt24d4uPjAQA2NjY4ceIE1qxZg5KSEvj6+mLo0KF466239DlE9vb22LBhA15//XWo1Wp06dIFs2bNMshLMhcK3Sq2GzXQagVIpRKRe0RERGT9JIIgCGJ3whKVlpZCoVBApVLB1dXVaNeprtWi+z9+AQAcXzAUipsjSkRERNRyzf3+NotSI9Q0ma0UzrK6rQmuMw+JiIjIJBggWQCuZCMiIjItBkgW4NZeSBxBIiIiMgUGSBZAv9SfI0hEREQmwQDJAujLjXA3bSIiIpNggGQBdEv9GSARERGZBgMkC3CrYC1zkIiIiEyBAZIFUDrqyo1wBImIiMgUGCBZACXLjRAREZkUAyQLoN8Hicv8iYiITIIBkgXQTbFxBImIiMg0GCBZAN0IEnOQiIiITIMBkgW4PQeJtYWJiIiMjwGSBdCVGtFoBZSpa0XuDRERkfVjgGQBHOxs4GBX91Zxmo2IiMj4GCBZCCV30yYiIjIZBkgWgrtpExERmQ4DJAuhy0PiCBIREZHxMUCyENxNm4iIyHQYIFkIXQ6SirtpExERGR0DJAuhH0HiFBsREZHRMUCyEApOsREREZkMAyQL4ebEZf5ERESmwgDJQugK1qq4zJ+IiMjoGCBZCAVzkIiIiEyGAZKF0K1iu84AiYiIyOgYIFkI3So21Y1qCIIgcm+IiIisGwMkC6ELkGo0AiqrNSL3hoiIyLoxQLIQjnY2kNnUvV1c6k9ERGRcDJAshEQiuS1RmyvZiIiIjMksAqTly5cjMDAQDg4OiIqKwuHDh5tsu2nTJkRGRkKpVMLZ2Rnh4eFYu3atQZvnn38eEonE4DFs2DCDNsXFxRg7dixcXV2hVCoxceJElJeXG+X+2op+qT8TtYmIiIxK9ABp48aNSExMxMKFC5GWloawsDDExcWhoKCg0fbu7u6YP38+UlJScOLECSQkJCAhIQHbtm0zaDds2DDk5ubqH1999ZXB62PHjsUff/yB7du3Y/PmzdizZw9eeOEFo91nW2DBWiIiItOQCCIviYqKikL//v2xbNkyAIBWq4W/vz+mT5+OuXPnNusc/fr1w/Dhw/HWW28BqBtBKikpwffff99o+9OnT6Nnz544cuQIIiMjAQBbt27F448/jpycHPj6+jY4Rq1WQ61W638uLS2Fv78/VCoVXF1dW3LLrfa3NUfx2+l8vDOyN/4SFWCSaxIREVmT0tJSKBSKu35/izqCVF1djdTUVMTGxuqfk0qliI2NRUpKyl2PFwQBycnJyMzMxMCBAw1e27VrF7y8vBASEoKXXnoJ165d07+WkpICpVKpD44AIDY2FlKpFIcOHWr0WklJSVAoFPqHv79/S2/3nt0aQWIOEhERkTGJGiAVFRVBo9HA29vb4Hlvb2/k5eU1eZxKpYKLiwtkMhmGDx+OpUuXYsiQIfrXhw0bhi+++ALJycl49913sXv3bjz22GPQaOqWx+fl5cHLy8vgnLa2tnB3d2/yuvPmzYNKpdI/Ll++3NrbbjU3J+YgERERmYKt2B1oDblcjvT0dJSXlyM5ORmJiYkICgpCTEwMAGD06NH6tr1790afPn0QHByMXbt2YfDgwa26pr29Pezt7dui+62mZMFaIiIikxA1QPL09ISNjQ3y8/MNns/Pz4ePj0+Tx0mlUnTt2hUAEB4ejtOnTyMpKUkfINUXFBQET09PnDt3DoMHD4aPj0+DJPDa2loUFxff8bpiUzhyio2IiMgURJ1ik8lkiIiIQHJysv45rVaL5ORkREdHN/s8Wq3WIIG6vpycHFy7dg0dO3YEAERHR6OkpASpqan6Njt27IBWq0VUVFQr7sQ0lCxYS0REZBKiT7ElJiZiwoQJiIyMxIABA7BkyRJUVFQgISEBADB+/Hj4+fkhKSkJQF2ydGRkJIKDg6FWq/Hzzz9j7dq1WLFiBQCgvLwcb7zxBkaNGgUfHx+cP38er776Krp27Yq4uDgAQI8ePTBs2DBMmjQJK1euRE1NDaZNm4bRo0c3uoLNXOgK1jJAIiIiMi7RA6T4+HgUFhZiwYIFyMvLQ3h4OLZu3apP3M7OzoZUemugq6KiAlOmTEFOTg4cHR0RGhqKdevWIT4+HgBgY2ODEydOYM2aNSgpKYGvry+GDh2Kt956yyCHaP369Zg2bRoGDx4MqVSKUaNG4aOPPjLtzbcQV7ERERGZhuj7IFmq5u6j0JYuF1fi4X/thL2tFJn/fMwk1yQiIrImFrEPErWMbgRJXatFVY1G5N4QERFZLwZIFsTF3hY2UgkA5iEREREZEwMkCyKRSPQFa5mHREREZDwMkCyMgkv9iYiIjI4BkoXRjyAxQCIiIjIaBkgWxu1muREVp9iIiIiMhgGSheEUGxERkfExQLIw+t20bzBAIiKqrtWC2/mRMYi+kza1DOuxEREBtRotPth+Bp/tvQhIAF+FA3yVjnWP2/+sdEBHhSOc7fl1Ry3D3xgLcytAYg4SkbWqUNfiTH4ZzuSXISOvDLUaAZMeDkKAh5PYXTMLeaoqzPjqGA5fKtY/d+laJS5dq2zyGKWTHToqHOGnvBU8dVQ4wO/mn73k9rC14aQK3cIAycIouIqNyGrUaLS4WFSBjLwynMmrC4Yy80txufhGg7bb/sjD2olRCPGRi9BT87HnTCFmbUzHtYpquNjbIump3gj3V+JqyQ1cVd3A1ZKquj+X3Pyz6gbKqmpRUlmDksoanM4tbfS8NlIJvOX2dYHTzZEnP6Ujunq5IDrIAxKJxMR3SmJjgGRhlE7MQSKyNIIgIOf6Df2I0Jn8MmTmleF8YTlqNI3nz3i62CPExwUh3q7Yf64ImfllePaTFHye0B/9AtxMfAfi02gFLPntDJbtPAdBAHp2dMXysf3QxdMZAODv3vToWmlVDXJvBku64Cm3pApXbgZVeaoq1GgEXFVV4aqqCsi6bnD8sr/0xZ/6+Br1/sj8MECyMLp9kFScYiMyS8UV1cjIK8WZvDJk3gyEzuSXo1xd22h7Z5kNuvvIEeojR3dvOUJ85AjxlsPDxV7fRlVZg4TVh5GWXYKx/zmET8dH4OFuHUx1S6IrKK3CjA3HcPBC3ZTaX6ICsOBPPeFgZ9Os410d7ODqY9fk6JtWK6CoXF0XMJVUIVd1A1dKbuD45RKkZZdg+c7zGN67I0eR2hkGSBZGn4PEESQi0VXVaHD4YjH2nSvCqaulyMgrQ1G5utG2djYSBHdwMQiCQnzk8FM6Qiq98xevwskO6/4WhclrU7H3bBH+uvoIPhzdF4/37miM2zIrB84VYcaGdBSVq+Ess8E7T/XGE+F+bXoNqVQCL1cHeLk6oG/AredLKqvxwKIdOJ1bit1nChET4tWm1yXzxgDJwuiW+VdWa6Cu1cDetnn/gyKitnGl5AZ2ZhRgV2YB9p+7hhs1mgZtAtyd0N375qjQzdGhQA9nyGxbnwTsJLPFZxP6Y9bGdGw5mYtpX6bhnZG9MXpAwN0PtkAarYBlO85hSfIZCAIQ6iPH8rH9ENzBxWR9UDrJMGZAAD7bdxErd59ngNTOMECyMHIHW0gkgCAAqhs18JIzQCIyphqNFqlZ17EzswC7MgqRmV9m8Lq3qz1iunuhX2clunvXTZMZa0m5zFaKj8b0haujLb46fBlzN51EyY0avPhIsFGuJ5aicjVe3pCOfeeKAADxkf54fUQvOMpM/+/dxIe6YM2BSzh4oRjHsq+jbzvM/2qvGCBZGKlUAoWjHUoqa6CqrIGX3EHsLhFZnYKyKuzOLMSuzELsOVuIsqpb+UNSCdAvwA2DQr0wKMQLPTrKTZqbYiOV4J2RvaF0kmHFrvNY9EsGSiprMGdYiFXkyBy8cA0zvjqGgjI1HO1s8M8n78OoiE6i9cdX6Ygnwv3wv7QcrNx9Hp+MixStL2RaDJAskPJmgMQ8JKK2odEKOJFTgp2ZhdiVWYATOSqD192dZYjp3gExoV4Y2M1Tv5pULBKJBHOGhULpaIekXzKwcvd5qG5U459P9obNXfKZzJVWK2DF7vP44NdMaAWgm5cLPh7bD928xd/W4MVHgvC/tBz8eiof5wvLTTrNR+JhgGSBlE4y4Fol90IiugclldXYc7YIuzIKsOtMIYorDFeG9umkQEyIFwaFdECfTkqzDDwmPxIMhaMd/v7dSXx1+DJKb9Ti3/Hh95TrJIbiimrM2piO3WcKAQBP9fPDP5+8D04y8/iK6uYtR2wPb/x2Oh+f7r6Ad5/uI3aXyATM47ePWoS7aRO1nCAIOJ1bhp2ZBdiZUYC07OvQ3rYFkdzeFgO7d0BMSAc8EtLBYqavRw8IgKujHWZuOIYtJ3NRWlWDT8ZFmE1wcTdHLxVj2pfHkFdaBXtbKd568j48G+kvdrcaeCkmCL+dzsemYzmYNaQ7fBSW8ftBrWcZnyAyoN8LiVNsRHclCAKW7jiH9YeykF9quAQ/xFuOmNAOGBTihYjObrCz0FITj/fuCLmDrX4bgOf+ewirnu8v+lTgnWi1Av6z9wL+tS0TGq2AoA7O+HhsP4T6uIrdtUZFdHZH/0A3HLl0Hav2X8TfH+8hdpfIyBggWSDdP3rXOYJEdFe7zxRi8fYzAABHOxs82NWjbuos1At+SkeRe9d2Hu7WAev+FoWEz48gLbsE8Z8cxNqJA+Dlan4jHSWV1Zj99XEkZxQAAJ4I98XbI3vDxcwLyr74SDCOXDqKLw9lY+qgrvrST2SdLPO/S+0c67ERNd/qA5cAAKP7++PYgiH474T+eO7+zlYVHOn0C3DD15Oj4SW3R2Z+GZ5emYLsOxRwFUNa9nUM/2gfkjMKILOV4p2RvbEkPtzsgyMAGBTihRBvOcrVtVh3MEvs7pCRMUCyQNxNm6h5zheWY1dmISQS4KWY4GaXprBkIT5yfPviAwhwd0J2cSWeXnkAGXmNF2g1JUEQ8N+9F/DsyhRcKbmBQA8nfDflAfwlKsBitieQSiWY/EgQAODz/ZdQ1cgmoWQ9GCBZIF2ApOIIEtEdfXFz9GhwqBc6eziL2xkTCvBwwrcvRiPUR46CMjXiPzmItOzrdz/QSFSVNZi8NhX/3HIatVoBw/t0xE/TH0IvX4VofWqtP4f5wk/piKJyNf6XliN2d8iIGCBZIF25kZIbzEEiakppVQ2+Ta37Anv+gS4i98b0vFwdsPGFaPQLUEJ1owZj/3MIe88Wmrwfx7KvY/jSvfj1VD5kNlK89UQvLBvTF3IHy8zfsbORYuJDdb9Pn+65AM3tSyHJqjBAskAKJ+YgEd3NN0dzUFGtQTcvFzzY1UPs7ohCV+T24W6euFGjwV9XH8HPJ3ONdj2NVsDvV1RYc+ASpn2ZhuikZIz8+AByrt+Av7sj/vfSAxgXHWgxU2pNGT3AH0onO2Rdq8Qvvxvv75PEZf5ZcdSAfpk/A6Q2IwgCVh+4hNSs61jwp55mufKHmk+jFbDm5vTa8w9a/hfyvdAXuf06HVtO1BW5fXtkb4xpgyK3FepapF8uwZFLxUjNuo5j2SUoV9catLGVSvB4745468n7rGbVl5PMFhOiA/Fh8lms3H0ew3t3bNe/Y9aKAZIF0i3zL1PXokajtdi9W8xFjUaLf3z3OzYevQwAyLl+AxteuL9dJPRaq12ZBcguroSrgy1G9vUTuzuik9lK8dHovnB1sMNXh7Mxb9NJqFpR5DZPVYWjWcU4euk6UrOu41RuaYMpJrm9Lfp1dkNkZzdEBroj3F8pSpFZY5vwQCA+2XMev18pxf5z1/BQN0+xu0RtjAGSBXJ1uPW2ld6ogYeLvYi9sWxlVTWYsj4Ne88WQSqp2ycn/XIJ5n/3O95/pg//V2ihdEv7xwwIsJgdpY2trsjtfVA62TWryK1WK+BMQZk+GDpyqRg51280aOendERkYF0wFNnZDd295WZZlqWtuTvLMLp/AFYfuIQVu88xQLJCZjH0sHz5cgQGBsLBwQFRUVE4fPhwk203bdqEyMhIKJVKODs7Izw8HGvXrm2y/YsvvgiJRIIlS5YYPB8YWDfsfvtj0aJFbXVLRmVrI9UHSVzq33q5qht4ZmUK9p4tgqOdDf4zPhIrx0XARirB/9Jy8Nm+i2J3kVrhbH6ZPuB97v7OYnfHrOiK3M57LBQAsHL3efz9u5PQaAXcqNbg4IVrWL7zHJ7//DDC3/wVw5bsxT++/x3fHbuCnOs3IJUAvXxd8fwDgVg6pi9S5j2K/XMfxYej+2Lc/Z3Ro6NruwiOdP72cBfYSCXYf+4aTtYrcEyWr1X/tdq5cycGDRrUJh3YuHEjEhMTsXLlSkRFRWHJkiWIi4tDZmYmvLy8GrR3d3fH/PnzERoaCplMhs2bNyMhIQFeXl6Ii4szaPvdd9/h4MGD8PX1bfTab775JiZNmqT/WS4Xv2p0cymdZCitqmWidiudulqKv64+grzSKnSQ22PVhP7o3aluyfE/hvfAGz+dwjs/n0ZXLxfEhDT8PSTzpRs9GtLTG/7uTuJ2xkzVL3K771wRckuqUFtvusxJZoO+AUpEdnZHZKAb+ga4WcSGjqbSyc0JI8J88d2xK1i5+zyWj+0ndpeoDbXqN33YsGHo1KkTEhISMGHCBPj7t76w4OLFizFp0iQkJCQAAFauXIktW7Zg1apVmDt3boP2MTExBj/PnDkTa9aswb59+wwCpCtXrmD69OnYtm0bhg8f3ui15XI5fHx8Wt13MSmd7JBdDKi41L/Fdp8pxNT1aShX16Kblws+T+iPTm63vkiffyAQGbll2Hj0MqZ/dQzfT30QwR1cROwxNZeqsgab0q4AABIebH9L+1ti9IAAKBztMHNDOi4X102debva66fK+ge6I9RHDlvmON7R5EeC8N2xK/jl91xcKqpAoGf72W/L2rXqN//KlSuYNm0avv32WwQFBSEuLg5ff/01qqtb9mVdXV2N1NRUxMbG3uqQVIrY2FikpKTc9XhBEJCcnIzMzEwMHDhQ/7xWq8W4cePwyiuvoFevXk0ev2jRInh4eKBv37547733UFtb22RbtVqN0tJSg4eYWG6kdTYeycZfVx9BuboW0UEe+PalBwyCI6BuGuLNJ3shsrMbyqpqMWnNURYGthAbj2bjRo0GoT5yRHVxF7s7Zu+x3h2xacoD+GhMX+x9dRAOzhuM5X/ph4QHu+A+PwWDo2YI9XHFoJAO0ArAp3sviN0dakOt+u339PTErFmzkJ6ejkOHDqF79+6YMmUKfH19MWPGDBw/frxZ5ykqKoJGo4G3t7fB897e3sjLy2vyOJVKBRcXF8hkMgwfPhxLly7FkCFD9K+/++67sLW1xYwZM5o8x4wZM7Bhwwbs3LkTkydPxjvvvINXX321yfZJSUlQKBT6x72MmrWFWwVr+cXdHIIg4P1tmZjzv7p8i6f6+mHNXwc0uezY3tYGK56LgK/CAReKKjD9q2PcEM7M1S3tr6uPldDOl/a3xH1+CowI84W/uxP/zlrppZiuAIBvU3NQUFYlcm+ordzzfw/69euHefPmYdq0aSgvL8eqVasQERGBhx9+GH/88Udb9LEBuVyO9PR0HDlyBG+//TYSExOxa9cuAEBqaio+/PBDrF69+o4f9sTERMTExKBPnz548cUX8cEHH2Dp0qVQq9WNtp83bx5UKpX+cfnyZWPcWrPd2guJU2x3o67V4OWN6Vi28xwAYMbgbvjg2TDIbO/8699Bbo9Px0fCwU6KPWcKseiX06boLrXSb6fzcaXkBtyc7PBEOJf2k+n0D3RDvwAlqmu1+Hz/JbG7Q22k1QFSTU0Nvv32Wzz++OPo3Lkztm3bhmXLliE/Px/nzp1D586d8cwzz9zxHJ6enrCxsUF+fr7B8/n5+XfMDZJKpejatSvCw8Mxe/ZsPP3000hKSgIA7N27FwUFBQgICICtrS1sbW2RlZWF2bNnIzAwsMlzRkVFoba2FpcuXWr0dXt7e7i6uho8xMSCtc1TUlmNcZ8dxg/pV2ErleBfT/dB4pDuzf6f8n1+CnzwTDgA4D97L+pLV5D5+Xx/3arDMQMCuIcVmZREItHvKbXuYBbKqvjvsjVoVYA0ffp0dOzYEZMnT0b37t1x7NgxpKSk4G9/+xucnZ0RGBiI999/HxkZGXc8j0wmQ0REBJKTk/XPabVaJCcnIzo6utn90Wq1+pGfcePG4cSJE0hPT9c/fH198corr2Dbtm1NniM9PR1SqbTRlXPmiDlId3e5uBKjVhzA4YvFkNvbYnXCADwb2fKp0eF9OmLGo3VD6H/fdFLUop/UuNO5pTh4oRg2UgmX9pMoYnt4o6uXC8qqavHloWyxu0NtoFWr2E6dOoWlS5fiqaeegr1945sUenp6YufOnXc9V2JiIiZMmIDIyEgMGDAAS5YsQUVFhX5V2/jx4+Hn56cfIUpKSkJkZCSCg4OhVqvx888/Y+3atVixYgUAwMPDAx4ehnWX7Ozs4OPjg5CQEABASkoKDh06hEGDBkEulyMlJQWzZs3Cc889Bzc3t9b8lZicLgeJI0iNO365BBPXHEFReTU6KhzweUJ/hPq0ftTv5djuyMgrw6+n8vHCF6n4afqD6KhwbMMe073QlRUZ1ssHvkq+L2R6UqkEkwcG4ZVvT+CzfRfx/IOBsLflSKYla1WAdPuIT5MntrXFI488ctd28fHxKCwsxIIFC5CXl4fw8HBs3bpVn7idnZ0NqfTWQFdFRQWmTJmCnJwcODo6IjQ0FOvWrUN8fHyz+29vb48NGzbg9ddfh1qtRpcuXTBr1iwkJiY2+xxiYw5S0379Iw8zNhxDVY0WPTu64vOE/vC+x9pqUqkE/44Px6gVB5CRV4YXvkjF15OjrbKEgqUprqjGd8d0S/sDxe0MtWtPhPth8fYzyFVV4bu0KxjdBvXuSDwSQRBavDQnKSkJ3t7e+Otf/2rw/KpVq1BYWIg5c+a0WQfNVWlpKRQKBVQqlSj5SEcvFePplSno7OGE3a+0zaad1mD1/ot4Y/MpCALwSPcOWD62X5tubHe5uBJPLN+P4opq/DnMFx+NDufKH5F9vOsc/rU1E/f5ueKnaQ/x/SBR/XfvBfxzy2kEeTpje+Ij7WpncUvR3O/vVuUgffLJJwgNDW3wfK9evbBy5crWnJJaSJ+kzRwkAHV1o97afAqv/1QXHI0ZEIDPJkS2+a6//u5O+HhsP9hKJfjp+FV8vOt8m56fWqZWo8XalLql/c8/0IXBEYlu9IAAuDrY4kJRBbafanq7GjJ/rQqQ8vLy0LFjxwbPd+jQAbm5uffcKbo7XQ5SaVVNu9+fp6pGgynr0/S1014dFoJ3Rt5ntE3u7g/ywBtP1G1A+v6vmdh+Kv8uR5Cx/HoqH7mqKni6yPDnsIb/JhGZmou9LSY8EAgAWLH7AloxSUNmolXfIP7+/ti/f3+D5/fv399k3TNqW7pVbIKAdr2k9Fq5GmP+cxBb/8iDzEaKj8b0xZSYrkYfSRgb1Rnj7u8MQQBe3nAMZ/LLjHo9apxuaf9fBgQwIZbMxoQHAmFvK8XxyyVIuXBN7O5QK7UqQJo0aRJefvllfP7558jKykJWVhZWrVqFWbNmGRR/JeOxs5Hqp4/a6zTbhcJyjPz4AI5ll0DhaId1f4vCiDDTBegL/twT9we5o6Jag7+tOYrrFUyYN6Xfr6hw5NJ12EolGMul/WRGPF3s9VuKrNzN8iOWqlUJGq+88gquXbuGKVOm6OuvOTg4YM6cOZg3b16bdpCapnC0Q7m6tl0u9T9yqRiTvjiKksoa+Ls7YnXCAJMXlLWzkeLjsRF4Yvk+ZBdXYsr6NHwxcQDsrKR+VVWNBidyVDiaVYyjl64j53olXh/RCw8Ee4rdNQDA6ptL+x/v3fGeVykStbVJDwdh/aEs7DlTiD+uqtDLVyF2l6iFWrWKTae8vBynT5+Go6MjunXr1uSeSNZI7FVsADD8o73442opPk/oj0EhlrHBZVv46fhVzP7mOKprtQjzV+KzCZHwdBHvdy8zrwxPfbwfFdUaTIjujDeeuE+0vtyLa+VqpGZdx9Gs6zh6qRi/XylFtUZr0EbpZIefpj0Ef3enJs5iGkXlajyQtAPVGi2+m/IA+gZYxv5l1L7M+OoYfjx+FSPCfPHRmL5id4duau739z0t8XFxcUH//v3v5RR0D3Qr2VTtZIpNEASs3H0B726t26F9aE9vfDi6r+h7EYX4yPHv+HC8sDYVa1KyEOLjir9Emff+J4Ig4GJRBY5eul43QpR1HRcKKxq06yC3R2RnN0R0dsMP6Vdx8ooKL61PxbcvPiBqOY+vDmWjWlMXIDM4InM1+ZEg/Hj8KjafuIr/GxqCAA9x/2NBLdPqAOno0aP4+uuvkZ2drZ9m09m0adM9d4zuTul4czftdrJZ5JoDl/TBUcKDgfjH8J5ms8fI0F4++L+h3fH+r2ew4IffEdzBGVFBHnc/0ESqa7X4/aoKRy/VTZelZl3HtUZyprp5uSAy0A2Rnd0RGeiGgNsqvA+7zwd/XroPv18pxYIffse7o/qIsqy+RqPF2oN1S/sTbq4WIjJHvXwVGNi9A/acKcR/9l7AW09a5uhye9WqAGnDhg0YP3484uLi8Ouvv2Lo0KE4c+YM8vPzMXLkyLbuIzVB0Y4K1qpu1ODfv50FAMwe0h3TB3cTuUcNTR3UFRl5Zdh8IhcvrU/Dj9MeRCc3cf7HqKqsQWp2XTB09NJ1HM8pgbrWcLpMZitFWCcFIgPd9aNEuu0jGtPJzQkfjemLCasO4+ujOegb4IYxIuwU/PPJXBSUqdFBbo/He3NpP5m3Fx8Jwp4zhfj66GXMjO1msnSAI5eK8dWhbHjK7TFmQAC6eDqb5LrWpFUB0jvvvIN///vfmDp1KuRyOT788EN06dIFkydPbnR/JDIOZTsqWLty93mobtSgm5cLXooJFrs7jZJIJHjv6TBculaB36+U4m9rjuJ/Lz0A5zberLI+QRBwufiGfqrs6KVinMkvb9DOzclOHwxFBrrhPj9Fi5fGP9ytA2YPDcF72zKx8Ic/0KOjK8L9lW10J82jS85+LqozZLbWkRBP1is6yANh/kocv1yCNQcuYfbQEKNe7/crKrz/ayZ2ZRbqn/t0zwU82NUDY6M6Y0hPb6tZSGJsrfqX+/z58xg+fDgAQCaToaKiAhKJBLNmzcKjjz6KN954o007SY3T5yBZ+QhSnqoKq/SbQIYabQPItuAos8Gn4yIxYtl+ZOSVYfbXx/Hx2H6QtuFUYK1Gi1O5pbfyhy5dR0GZukG7Lp7O+mAoMtAdQZ7ObTIl9tIjwUi/XILtp/IxZV0qfpr+EDxM9L/i9MslOJZdApmN1OzzvIiAuv84vfRIEF5cl4YvUrIw+ZHgNt/hHwDOFZRh8fYz+Plk3e7dNlIJRvXzQ1F5NXZmFmD/uWvYf+4aOsjtER/pj9ED/EUb4bYUrXqX3NzcUFZWtzGen58ffv/9d/Tu3RslJSWorKxs0w5S09pLDtKHyWegrtUisrMbYnuY/2o9X6UjPhkXgTGf1m1g+WHyWcwa0r3V5yurqsGx7JK6/KGs60i/XILKao1BGzsbCXr5KtA/0A0RN/OHjDWUL5VK8MGzYXhi2X5cLKrAjA3HsCZhgEkC19U3N4b8U1hHdJC3n1WzZNmG9PRBkKczLhRVYMPhbPzt4aA2O/fl4kp8mHwWm9JyoBUAiQR4IswXL8d2R+DNabWc65XYcPgyNhy5jMIyNZbtPIflu85hUIgXxkYFICbEy2zyOc1JqwKkgQMHYvv27ejduzeeeeYZzJw5Ezt27MD27dsxePDgtu4jNaE95CCdKyjHxiOXAQBzHwu1mFpbEZ3d8M+R9+HVb0/gw+SzCPGRNztf5mrJDRy5VFy35P7SdWTklaJ+NRlXB1tEdHbTT5mF+StNuqrM1cEOK5+LwJPL92P/uWv4YPsZzBnWsD5jWyoorcKWk3WljBIe6GLUaxG1JRupBC8MDMLcTSfx370XMT468J6nhwtKq7Bs5zl8dTgbNZq6fyCG9PTG7KHdEepjuHS9k5sT/i8uBDNju2H7qXysP5SF/eeuYUdGAXZkFMBX4YAxAwIQ398fXtxTTK9VAdKyZctQVVUFAJg/fz7s7Oxw4MABjBo1Cv/4xz/atIPUNF0OkjUv839vWwa0Qt0HPzLQXezutMizkf7IzCvDZ/suYvbXx9HZw6nBZnEarYDMvDL9VNnRS8W4qqpqcC5/d0f9yrLIzu7o5uXSptN2rRHiI8e7T/fBjK+OYcWu8wjrpMSw+3yMdr31h+q+CCI6u6F3J266R5ZlZD8/LN5+BnmlVfgh/QqeubnTdkuVVFZj5e4LWH3gIqpq6hZePNTVE7OHdr/rlhd2NlI83rsjHu/dERcKy/HV4Wx8k5qDq6oqfLD9DD5MPoshPb0xNqozHgj2EP3fGLG1OECqra3F5s2bERcXBwCQSqWYO3dum3eM7s7N+eYUm5WOIKVmXce2P/IhlQCvxhk3sdFY5j0WijP5Zdh7tggvfJGKDS/cj8vFlXXJ1FnXcSzrOsrUtQbH2Egl6NnR1WC5vbnuFD0izBfp2SVYtf8i/u+b4+jm7WKUHc3VtRqsP5QNAHieS/vJAtnb2mDiQ12Q9EsGPtlzAaP6dWpRAFKursWqfRfxnz0X9P9m9AtQ4v/iQlq1u31QBxfMH94Ts4eG4Jffc7H+YDaOZl3HL7/n4Zff8xDo4YS/RAXg6Qh/uDs3vbrVmrVqJ20nJyecPn0anTu33/pH5rCTdkFpFQa8kwypBDj39uNWFe0LgoD4Tw7i8KViPBvZCf96OkzsLrWaqrIGIz/ejwtFDTdiBOqqf/cNUOqDoXB/pdFXvrWlGo0WY/97CIcvFqOblwu+n/pgm/d/U1oOEr8+Dh9XB+ydM4ircMgilVXV4IFFO1BWVYtPx0VgaK+7j7hW1Wiw7mAWPt51HsU39y4L9ZHjlbgQPBrq1aZpBxl5pfjyUDa+S7uiD8JkNlI83tsHY+/vjMjObhaT5nAnRt1Je8CAAUhPT2/XAZI5cL05xaYVgPLqWrg62Inco7azI6MAhy8Vw95WipdjW5/gbA4UTnb4z4RIPPXxAahu1KCjwsFguX2oj6tFJ0ja2Uix7C998aeP9uFsQTle/d8JLBvTt83+IRUEAZ/vvwQAGBfdmcERWSy5gx2eu78zVuw6j5W7z2NIT+8mPyc1Gi2+OZqDj5LPIq+0btq9i6czEod0x/DeHY3yH+JQH1e8+cR9mPtYKH46fhXrDmbj5BUVvk+/iu/Tr6K7twvGRnXGyH5+VvV905RWBUhTpkxBYmIiLl++jIiICDg7G25A1adPnzbpHN2Zg50NHO1scKNGA1VljdX8wmq0gn7H7OcfDISv0lHkHt274A4u2Pl/Maiq0VjF/dTnJXfAiuf6If6Tg9hyIhd9/ZVttlInLbsEJ6+oILOVYnT/1uVtEJmLhAcD8dm+i0jLLsGRS9cxoIthbqVGK+Cn41fx79/OIOta3apwX4UDZsZ2w6h+nUyyWtRJZov4/gGI7x+AEzklWH8wGz8ev4oz+eVY+OMfWPRLBkaE+WLs/QHo00lp9P6IpVVTbFJpwzdIIpFAEARIJBJoNJpGjrIu5jDFBgDRScnIVVXhx2kPWs0v6repOfi/b47D1cEWe199VL9aj8zfmgOXsPDHP2AjlWDdxChEB997uZVpX6Zh84lci59qJdL5+3cn8eWhbDwa6oVVz9fVMxUEAb+eysfiX88gM79uGx1PFxmmDuqKMQMCRK19CAClVTX4/tgVrDuYZbARbXykP958sleLN50Vk1Gn2C5evNjqjlHbUjjaIVdVZTW7aVfVaLD410wAdaU7GBxZlvHRnXEs+zq+T7+K6V+lYfP0h+GjaH2Cea7qBn75vW7juwlMziYr8cLDQdhwOBs7MgqQkVeKorJqvPdrJo5fLgEAyB1s8eIjwXj+gUCzyUd0dbDD+OhAjLu/M1KzrmPdwSz8ePwqNh69jLMFZVg5LgJecvNcTNJarfqbZ+6R+VBa2V5Ia1OycFVVhY4KB34hWiCJRIKkp/ogI68MGXlleGl9Kja+EN3qPV/WH8yGRitgQBf3BlskEFmqQE9nPHZfR2w5mYvRnx7U/wfX0c4Gf30oEC88HGy2/zmUSCR1OZSB7hjZrxOmf5mGtOwSjFi6H5+Mi0CYiUsPGVOrAqQvvvjijq+PHz++VZ2hltPtpq2ygt20VTdqsGznOQDArCHdRR9SptZxlNlg5XMR+POyfTiWXYJ/bjmFN59oeRXzqhoNvjxct7T/rw8GtnEvicT14iPB2HIyFyWVNfrSOVMHdbWoHeIf6d4BP0x7CJO+OIpzBeV45pMUvDuqN0b27SR219pEqwKkmTNnGvxcU1ODyspKyGQyODk5MUAyIf0IkhVMsekK0nb3dsGoftbxAWuvAj2dsSQ+HBPXHMUXKVkI91fiqRa+pz8ev4riimr4KR0R28PbSD0lEkfvTgq89qeeyC25gYSHusDPQhdvdPF0xndTHsCsjen47XQBZm08jtO5ZZgzLNSiV+cCQKvGva9fv27wKC8vR2ZmJh566CF89dVXbd1HugNrKTdye0HaV+Is/4NFwOAe3pjxaFcAwLxNJ/HHVVWzjxUEAatvW9pvzgWKiVpr4kNd8I8/9bTY4EhH7mCHT8dFYvrNz/uney7g+c8PW3yVhzb7V6dbt25YtGhRg9ElMq5bBWst+xdxyW+WVZCWmmdmbHc80r0D1LVavLQurdn/YB6+WIxTuaVwsOPSfiJLIJVKMHtoCJb/pR8c7Wyw92wRnli+D2dvrsizRG363zJbW1tcvXq1LU9Jd6GbYlPdsNwcpHMF5fj6qOUVpKW7s5FK8OHocHRyc0R2cSVe3ngM2vqVdxux+sAlAMDIvp2gdGqfZQ6ILNHwPh3xv5cegJ/SEZeuVeLJ5fux/VS+2N1qlVblIP34448GPwuCgNzcXCxbtgwPPvhgm3SMmsfNCnKQLLkgLd2d0kmGlc9FYNSKA9iZWYiPdpy94+7oV0puYNsfdUv7WXeNyPL09HXFj9MexNQv03DwQjEmfXEUs4d0x7RHu1rUf4BbFSA9+eSTBj9LJBJ06NABjz76KD744IO26Bc1k8LRsgvWWkNBWrq7+/wUeHtkb/zfN8fxYfJZhHVSYlBo41Opa1OyoBWAB4I9EOIjN3FPiagteLjYY+3EKPxz8ymsScnCB9vP4HReKd57Osxs9na6m1ZNsWm1WoOHRqNBXl4evvzyS3Ts2LGt+0h3YMmr2ARBwLu/1JUUeSbCH928+WVozZ6O6ISxUQEQBGDmhmPIvllG4XY3qjX46ubS/oQHu5i6i0TUhuxspHjjifuw6KnesLOR4OeTeRi14gAuFzf87JsjLg2xcLfnILWiaoyoDArSDukmdnfIBBb8uSfC/ZUorarF5HWpuFFtWJbo+/QrUN2ogb+7Ix5tYoSJiCzL6AEB+GrS/fB0sUdGXhlGLNuHA+eLxO7WXbUqQBo1ahTefffdBs//61//wjPPPNPi8y1fvhyBgYFwcHBAVFQUDh8+3GTbTZs2ITIyEkqlEs7OzggPD8fatWubbP/iiy9CIpFgyZIlBs8XFxdj7NixcHV1hVKpxMSJE1FeXt74ScyYbhVbjUZARbXl1MC7vSBtwoNd0FFh2ctcqXnsbW2w4rl+8HCW4XRuKeZ/f1If2N++tH9CdCC3eiCyIpGB7vhx2oPo7afA9coajPvsMFbvv2jW/7FvVYC0Z88ePP744w2ef+yxx7Bnz54WnWvjxo1ITEzEwoULkZaWhrCwMMTFxaGgoKDR9u7u7pg/fz5SUlJw4sQJJCQkICEhAdu2bWvQ9rvvvsPBgwfh6+vb4LWxY8fijz/+wPbt27F582bs2bMHL7zwQov6bg4c7KT6Mg4lFrSb9qa0HJzJL4ergy1eeiRY7O6QCXVUOGLpX/pCKgE2pV3BukN1U2op568hM78MTjIbPBPJpf1E1sZX6YhvXozGyL5+0GgFvP7TKcz53wmoa83zP/etCpDKy8shkzVcemtnZ4fS0tIWnWvx4sWYNGkSEhIS0LNnT6xcuRJOTk5YtWpVo+1jYmIwcuRI9OjRA8HBwZg5cyb69OmDffv2GbS7cuUKpk+fjvXr18POzrCmzenTp7F161b897//RVRUFB566CEsXboUGzZssLhtCiQSCZSOlpWHVFWjwb+3nwHAgrTt1QPBnpgzLBQA8OZPfyA16zo+v7m0f1S/TlA48neCyBo52Nlg8bNhmP94D0glwNdHczDm04MoKK0Su2sNtCpA6t27NzZu3Njg+Q0bNqBnz57NPk91dTVSU1MRGxt7q0NSKWJjY5GSknLX4wVBQHJyMjIzMzFw4ED981qtFuPGjcMrr7yCXr16NTguJSUFSqUSkZGR+udiY2MhlUpx6NChRq+lVqtRWlpq8DAXt/KQLCNAYkFaAoAXBgbhsft8UKMRMHltKn47XbdXyoQHWAybyJpJJBJMGhiEzxMGwNXBtq7Y7bL9OH65ROyuGWjVWrvXXnsNTz31FM6fP49HH30UAJCcnIyvvvoK33zzTbPPU1RUBI1GA29vwzpL3t7eyMjIaPI4lUoFPz8/qNVq2NjY4OOPP8aQIUP0r7/77ruwtbXFjBkzGj0+Ly8PXl6GCaC2trZwd3dHXl5eo8ckJSXhjTfeaO6tmZQl7abNgrSkI5FI8N4zYTiTX4bzhRUAgIe7eaKrF1czErUHjRW7XfRU7xbXbTSWVo0g/fnPf8b333+Pc+fOYcqUKZg9ezZycnLw22+/NdgjyRjkcjnS09Nx5MgRvP3220hMTMSuXbsAAKmpqfjwww+xevXqNt2Qat68eVCpVPrH5cuX2+zc9+pWPTbzz0FiQVq6nYu9LT4ZFwFnWV2g/NeHuLSfqD3RFbuN7eGF6lotEr8+jre3nEKtRit211o3ggQAw4cPx/Dhw+/p4p6enrCxsUF+vuE25Pn5+fDx8WnyOKlUiq5d64rihYeH4/Tp00hKSkJMTAz27t2LgoICBAQE6NtrNBrMnj0bS5YswaVLl+Dj49MgCby2thbFxcVNXtfe3h729vatvVWjspQcpNsL0r7KgrR0U1cvOTZOjsbFogrEdO8gdneIyMR0xW7//dsZLN1xDv/ZexEZeWVYNqafqDmqrRpBOnLkSKO5OocOHcLRo0ebfR6ZTIaIiAgkJyfrn9NqtUhOTkZ0dHSzz6PVaqFWqwEA48aNw4kTJ5Cenq5/+Pr64pVXXtGvdIuOjkZJSQlSU1P159ixYwe0Wi2ioqKafV1zYSk5SLqCtP0D3TCYBWnpNvf5KfDnMF+LKkNARG3HHIvdtmoEaerUqXj11VcbBBNXrlzBu+++22Sic2MSExMxYcIEREZGYsCAAViyZAkqKiqQkJAAABg/fjz8/PyQlJQEoC4XKDIyEsHBwVCr1fj555+xdu1arFixAgDg4eEBDw8Pg2vY2dnBx8cHISF1pSx69OiBYcOGYdKkSVi5ciVqamowbdo0jB49utEtAcydrpinOS/zP1dQxoK0RER0R8P7dEQXT2dM+uIoLl2rRGZ+mWhVFloVIJ06dQr9+vVr8Hzfvn1x6tSpFp0rPj4ehYWFWLBgAfLy8hAeHo6tW7fqE7ezs7Mhld4a6KqoqMCUKVOQk5MDR0dHhIaGYt26dYiPj2/RddevX49p06Zh8ODBkEqlGDVqFD766KMWncNcWEK5kfe2ZeoL0kZ0ZkFaIiJqnK7YbXJGAf7UR7xBC4nQim0sPTw8sHnz5gbTYAcOHMDw4cNx/fr1NuuguSotLYVCoYBKpYKrq6uofdlyIhdTv0zDgC7u+Hpy86cmTSU16zpGrTgAqQTY9vJA1lwjIiLRNPf7u1U5SEOHDtWv6tIpKSnB3//+d4Pl9mQa+hwkMxxBYkFaIiKyRK2aYnv//fcxcOBAdO7cGX379gUApKenw9vb+4510cg4dLsOm+MyfxakJSIiS9SqAMnPzw8nTpzA+vXrcfz4cTg6OiIhIQFjxoxpUNaDjE83gnS9sgaCIJhNAjQL0hIRkaVq9T5Izs7OeOihhxAQEIDq6rqRi19++QUAMGLEiLbpHTWLbhVbda0WVTVaOMrMY3dqXUFahaMdC9ISEZFFaVWAdOHCBYwcORInT56ERCJpMGqh0ZhnZV5r5Syzga1UglqtgJIb1XCUiT9SU1WjwWJ9QdpgFqQlIiKL0qok7ZkzZ6JLly4oKCiAk5MTfv/9d+zevRuRkZH6kh9kOhKJxOyW+n+Rcgm5qir4KhwwPjpQ7O4QERG1SKsCpJSUFLz55pvw9PSEVCqFjY0NHnroISQlJTVZIJaMS2FG5UZUN2qwfOd5AMDLLEhLREQWqFUBkkajgVxet1zb09MTV69eBQB07twZmZmZbdc7ajZdHpLKDFaysSAtERFZulblIN133304fvw4unTpgqioKPzrX/+CTCbDp59+iqCgoLbuIzWDuRSsZUFaIiKyBq0KkP7xj3+goqICAPDmm2/iT3/6Ex5++GF4eHhg48aNbdpBah5dEnSJyAVrWZCWiIisQasCpLi4OP2fu3btioyMDBQXF8PNzc1s9uBpb9z0BWvFC5AuF1eyIC0REVmFVu+DVJ+7OwuQikk3xSZmDtLhi8XQCkC/ACUL0hIRkUVrVZI2mR9zWOafmV8GALjPTyFaH4iIiNoCAyQroTCDKbaMvLoAKcSHBWmJiMiyMUCyEroptuuV4k2xZeaVAgBCGSAREZGFY4BkJXRTbCqRVrGVVFYjv1QNAOjuzQCJiIgsGwMkK6F0FHeKTTe95qd0hNyBddeIiMiyMUCyErp9kG7UaFBVY/piwZk3AyROrxERkTVggGQl5Pa20G1aXSrCNBsTtImIyJowQLISUqnkVsFaEQIkXYI2AyQiIrIGDJCsiFKkpf6CIOBMfjkAINTH1aTXJiIiMgYGSFZEP4Jk4qX+OddvoFxdCzsbCYI6OJv02kRERMbAAMmKuIlUsFaXoB3cwQV2NvyVIiIiy8dvMyuim2JTmXiKTVdihCvYiIjIWjBAsiK3krRNO8V2awUb84+IiMg6MECyImIVrGWJESIisjYMkKyI0tH0AVJ1rRYXCisAcIk/ERFZDwZIVkS/zN+EU2znC8tRqxUgd7BFR4WDya5LRERkTAyQrIhChCm220uMSCQSk12XiIjImBggWRExpthYYoSIiKwRAyQrol/mb8J9kG6VGOEKNiIish5mESAtX74cgYGBcHBwQFRUFA4fPtxk202bNiEyMhJKpRLOzs4IDw/H2rVrDdq8/vrrCA0NhbOzM9zc3BAbG4tDhw4ZtAkMDIREIjF4LFq0yCj3Zyq6EaRydS1qNFqTXPP2KTYiIiJrIXqAtHHjRiQmJmLhwoVIS0tDWFgY4uLiUFBQ0Gh7d3d3zJ8/HykpKThx4gQSEhKQkJCAbdu26dt0794dy5Ytw8mTJ7Fv3z4EBgZi6NChKCwsNDjXm2++idzcXP1j+vTpRr1XY3O9GSABphlFUt2owVVVFQCguzcDJCIish6iB0iLFy/GpEmTkJCQgJ49e2LlypVwcnLCqlWrGm0fExODkSNHokePHggODsbMmTPRp08f7Nu3T9/mL3/5C2JjYxEUFIRevXph8eLFKC0txYkTJwzOJZfL4ePjo384OzddR0ytVqO0tNTgYW5spBK4OtgCME0e0pmbO2j7Khz0m1QSERFZA1EDpOrqaqSmpiI2Nlb/nFQqRWxsLFJSUu56vCAISE5ORmZmJgYOHNjkNT799FMoFAqEhYUZvLZo0SJ4eHigb9++eO+991BbW9vktZKSkqBQKPQPf3//Zt6lad3KQzL+Un8maBMRkbWyFfPiRUVF0Gg08Pb2Nnje29sbGRkZTR6nUqng5+cHtVoNGxsbfPzxxxgyZIhBm82bN2P06NGorKxEx44dsX37dnh6eupfnzFjBvr16wd3d3ccOHAA8+bNQ25uLhYvXtzoNefNm4fExET9z6WlpWYZJLk52SG72DQjSBm5TNAmIiLrJGqA1FpyuRzp6ekoLy9HcnIyEhMTERQUhJiYGH2bQYMGIT09HUVFRfjPf/6DZ599FocOHYKXlxcAGAQ7ffr0gUwmw+TJk5GUlAR7e/sG17S3t2/0eXOj0G0WaYIAiQnaRERkrUSdYvP09ISNjQ3y8/MNns/Pz4ePj0+Tx0mlUnTt2hXh4eGYPXs2nn76aSQlJRm0cXZ2RteuXXH//ffjs88+g62tLT777LMmzxkVFYXa2lpcunTpnu5JbPq9kIycpC0IAjLzOcVGRETWSdQASSaTISIiAsnJyfrntFotkpOTER0d3ezzaLVaqNXqe2qTnp4OqVSqH2GyVLqCtapK4+YgXVVVoayqFrZSCYI7uBj1WkRERKYm+hRbYmIiJkyYgMjISAwYMABLlixBRUUFEhISAADjx4+Hn5+ffoQoKSkJkZGRCA4Ohlqtxs8//4y1a9dixYoVAICKigq8/fbbGDFiBDp27IiioiIsX74cV65cwTPPPAMASElJwaFDhzBo0CDI5XKkpKRg1qxZeO655+Dm5ibOX0Qb0Y0gXTfyFJtug8igDs6Q2Yq+GJKIiKhNiR4gxcfHo7CwEAsWLEBeXh7Cw8OxdetWfeJ2dnY2pNJbX8AVFRWYMmUKcnJy4OjoiNDQUKxbtw7x8fEAABsbG2RkZGDNmjUoKiqCh4cH+vfvj71796JXr14A6vKJNmzYgNdffx1qtRpdunTBrFmzDPKSLJU+B8nIU2y3VrAxQZuIiKyPRBAEQexOWKLS0lIoFAqoVCq4uppPkPC/1BzM/uY4Hu7mibUTo4x2nZkbjuGH9Kt4JS4EUwd1Ndp1iIiI2lJzv785N2Jl9DlIRh5B0q1gC+EO2kREZIUYIFkZXYBkzGX+NRotzheWA+AKNiIisk4MkKyMwlG3D5LxVrFdKKxAjUaAi70tOrk5Gu06REREYmGAZGV0I0ilVbXQaI2TXpZxcwVbd28XSCQSo1yDiIhITAyQrMztRWNLjZSHlMkVbEREZOUYIFkZOxsp5PZ1uzcYa6k/S4wQEZG1Y4BkhRT6RG3j5CHd2gOJARIREVknBkhWSL+SzQgjSGVVNbhScgMAR5CIiMh6MUCyQsqbK9lURljqf+ZmgVpvV3sob+7aTUREZG0YIFkh3RTbdSNMsbHECBERtQcMkKyQrmCtMTaLZII2ERG1BwyQrJAxy41ksMQIERG1AwyQrJDSSLtpC4Jw2x5IDJCIiMh6MUCyQgojrWLLL1VDdaMGNlIJunq5tOm5iYiIzAkDJCtkrBwkXYmRQA8nONjZtOm5iYiIzAkDJCukW37f1jlItxK0uYKNiIisGwMkK6Q00k7azD8iIqL2ggGSFbp9FZtWK7TZeVlihIiI2gsGSFZIcTMHSSsAZeraNjlnrUaLc4XlALgHEhERWT8GSFbI3tYGTrK6JOq2Kjdy6VoFqmu1cJLZwN/NqU3OSUREZK4YIFkp/Uq2G22Th6SbXuvmLYdUKmmTcxIREZkrBkhWSuGk2yyybUaQ9CvYuIM2ERG1AwyQrJRuBKmtCtYyQZuIiNoTBkhWqq3rsbFILRERtScMkKzUrb2Q7j1AqlDXIru4EgBHkIiIqH1ggGSlFI5tl4N0Jr9u9MjTxR4eLvb3fD4iIiJzxwDJSulHkNpgFRun14iIqL1hgGSldEnabbEPEhO0iYiovWGAZKVujSDde4DEGmxERNTeMECyUrdykO5tik0QBGTmc4qNiIjaF7MIkJYvX47AwEA4ODggKioKhw8fbrLtpk2bEBkZCaVSCWdnZ4SHh2Pt2rUGbV5//XWEhobC2dkZbm5uiI2NxaFDhwzaFBcXY+zYsXB1dYVSqcTEiRNRXl5ulPsTg5tz2yzzLyxXo7iiGhIJ0M2LARIREbUPogdIGzduRGJiIhYuXIi0tDSEhYUhLi4OBQUFjbZ3d3fH/PnzkZKSghMnTiAhIQEJCQnYtm2bvk337t2xbNkynDx5Evv27UNgYCCGDh2KwsJCfZuxY8fijz/+wPbt27F582bs2bMHL7zwgtHv11SUt61iEwSh1efRTa918XCG4836bkRERNZOItzLt2cbiIqKQv/+/bFs2TIAgFarhb+/P6ZPn465c+c26xz9+vXD8OHD8dZbbzX6emlpKRQKBX777TcMHjwYp0+fRs+ePXHkyBFERkYCALZu3YrHH38cOTk58PX1ves1dedUqVRwdXVt5t2aTlWNBqGvbQUA/P5GHFzsbVt1nv/uvYB/bjmNx+7zwYrnItqyi0RERCbX3O9vUUeQqqurkZqaitjYWP1zUqkUsbGxSElJuevxgiAgOTkZmZmZGDhwYJPX+PTTT6FQKBAWFgYASElJgVKp1AdHABAbGwupVNpgKk5HrVajtLTU4GHOHOxsYG9b9/beSx4SV7AREVF7JGqAVFRUBI1GA29vb4Pnvb29kZeX1+RxKpUKLi4ukMlkGD58OJYuXYohQ4YYtNm8eTNcXFzg4OCAf//739i+fTs8PT0BAHl5efDy8jJob2trC3d39yavm5SUBIVCoX/4+/u35pZNqi120+YeSERE1B6JnoPUGnK5HOnp6Thy5AjefvttJCYmYteuXQZtBg0ahPT0dBw4cADDhg3Ds88+22ReU3PMmzcPKpVK/7h8+fI93oXxKe9xN22NVtDvoh3iY37TiERERMbSusSUNuLp6QkbGxvk5+cbPJ+fnw8fH58mj5NKpejatSsAIDw8HKdPn0ZSUhJiYmL0bZydndG1a1d07doV999/P7p164bPPvsM8+bNg4+PT4Ngqba2FsXFxU1e197eHvb2llVmQ3GPu2lnXauAulYLBzspAtyd2rJrREREZk3UESSZTIaIiAgkJyfrn9NqtUhOTkZ0dHSzz6PVaqFWq5vdJjo6GiUlJUhNTdW/vmPHDmi1WkRFRbXwLsyXbjft1o4g6abXunvLYSOVtFm/iIiIzJ2oI0gAkJiYiAkTJiAyMhIDBgzAkiVLUFFRgYSEBADA+PHj4efnh6SkJAB1uUCRkZEIDg6GWq3Gzz//jLVr12LFihUAgIqKCrz99tsYMWIEOnbsiKKiIixfvhxXrlzBM888AwDo0aMHhg0bhkmTJmHlypWoqanBtGnTMHr06GatYLMUuhyk1u6FpE/Q9mb+ERERtS+iB0jx8fEoLCzEggULkJeXh/DwcGzdulWfuJ2dnQ2p9NZAV0VFBaZMmYKcnBw4OjoiNDQU69atQ3x8PADAxsYGGRkZWLNmDYqKiuDh4YH+/ftj79696NWrl/4869evx7Rp0zB48GBIpVKMGjUKH330kWlv3siUTve2m3ZGXt1KPa5gIyKi9kb0fZAslbnvgwQAy3eew3vbMvFMRCe890xYi4+PeW8nLl2rxLqJUXiom6cRekhERGRaFrEPEhnXvRSsrayuRVZxJQCOIBERUfvDAMmKud2cYlO1Ikn7bH45BAHwcJahg9yyVu8RERHdKwZIVky/iq0Vy/wzuYM2ERG1YwyQrJjiHnbSZokRIiJqzxggWTH9KrYbNWhpLn5mft0KNpYYISKi9ogBkhXTTbFV12pxo0bTomNvTbGZ5wo9IiIiY2KAZMWcZDaws6nbAbsl02xF5WoUlVdDIgG6e7sYq3tERERmiwGSFZNIJFC0omCtbvQowN0JTjLR9xIlIiIyOQZIVk7ZioK1LDFCRETtHQMkK6fLQ2rJXkiZeUzQJiKi9o0BkpVrzW7aTNAmIqL2jgGSlWtpDpJWK+BMfjkA7oFERETtFwMkK9fSHKTs4krcqNFAZitFoIeTMbtGRERkthggWTk3p5blIOkStLt5ucDWhr8eRETUPvEb0MopnFo2xcYabERERAyQrF5LC9ayxAgREREDJKunbGHB2gyuYCMiImKAZO2UN1exqZqxzL+qRoNLRRUAOIJERETtGwMkK6cbQbpeefcptnMF5dAKdcd4ye2N3TUiIiKzxQDJyiluBkhVNVpU1Wju2Pb2EiMSicTofSMiIjJXDJCsnNzeFjbSumDnbtNsLDFCRERUhwGSlZNIJFA4Ni9RmwnaREREdRggtQP6pf53yUPiHkhERER1GCC1A4pmFKy9XlGNgjI1AAZIREREDJDaAd0I0p3Kjeim1zq5OcLF3tYk/SIiIjJXDJDaAaWu3MgddtNmgjYREdEtDJDagebspp2Zz/wjIiIiHQZI7YBuN+075SBxBRsREdEtDJDaAd0IUlM5SFqtgDM3AyROsRERETFAahf0U2xN5CBdKbmBimoN7Gwk6OLpbMquERERmSWzCJCWL1+OwMBAODg4ICoqCocPH26y7aZNmxAZGQmlUglnZ2eEh4dj7dq1+tdramowZ84c9O7dG87OzvD19cX48eNx9epVg/MEBgZCIpEYPBYtWmS0exSTbqPI6xWNjyDppteCO7jAzsYsfiWIiIhEJfq34caNG5GYmIiFCxciLS0NYWFhiIuLQ0FBQaPt3d3dMX/+fKSkpODEiRNISEhAQkICtm3bBgCorKxEWloaXnvtNaSlpWHTpk3IzMzEiBEjGpzrzTffRG5urv4xffp0o96rWHSr2JoqNcIVbERERIZE3/Bm8eLFmDRpEhISEgAAK1euxJYtW7Bq1SrMnTu3QfuYmBiDn2fOnIk1a9Zg3759iIuLg0KhwPbt2w3aLFu2DAMGDEB2djYCAgL0z8vlcvj4+LT9TZmZu+2kzQRtIiIiQ6KOIFVXVyM1NRWxsbH656RSKWJjY5GSknLX4wVBQHJyMjIzMzFw4MAm26lUKkgkEiiVSoPnFy1aBA8PD/Tt2xfvvfceamtrmzyHWq1GaWmpwcNS6HKQKqo1qK7VNng9kwnaREREBkQdQSoqKoJGo4G3t7fB897e3sjIyGjyOJVKBT8/P6jVatjY2ODjjz/GkCFDGm1bVVWFOXPmYMyYMXB1vTVCMmPGDPTr1w/u7u44cOAA5s2bh9zcXCxevLjR8yQlJeGNN95oxV2KT+5gB4kEEIS6abYOcnv9a+paDS4UVQDgHkhEREQ6ok+xtYZcLkd6ejrKy8uRnJyMxMREBAUFNZh+q6mpwbPPPgtBELBixQqD1xITE/V/7tOnD2QyGSZPnoykpCTY29ujvnnz5hkcU1paCn9//7a9MSOxkUrg6mAH1Y0aqG5UGwRI5wsqoNEKkDvYoqPCQcReEhERmQ9RAyRPT0/Y2NggPz/f4Pn8/Pw75gZJpVJ07doVABAeHo7Tp08jKSnJIEDSBUdZWVnYsWOHwehRY6KiolBbW4tLly4hJCSkwev29vaNBk6WQulUFyDV3007M/9WgrZEIhGja0RERGZH1BwkmUyGiIgIJCcn65/TarVITk5GdHR0s8+j1WqhVqv1P+uCo7Nnz+K3336Dh4fHXc+Rnp4OqVQKLy+vlt2EhbiVqG0YIN1K0Ob0GhERkY7oU2yJiYmYMGECIiMjMWDAACxZsgQVFRX6VW3jx4+Hn58fkpKSANTlAkVGRiI4OBhqtRo///wz1q5dq59Cq6mpwdNPP420tDRs3rwZGo0GeXl5AOq2CJDJZEhJScGhQ4cwaNAgyOVypKSkYNasWXjuuefg5uYmzl+Ekd0qWFtvBIkr2IiIiBoQPUCKj49HYWEhFixYgLy8PISHh2Pr1q36xO3s7GxIpbcGuioqKjBlyhTk5OTA0dERoaGhWLduHeLj4wEAV65cwY8//gigbvrtdjt37kRMTAzs7e2xYcMGvP7661Cr1ejSpQtmzZplkGNkbW4VrDVc6s8VbERERA1JBEEQxO6EJSotLYVCoYBKpbprfpM5WPjD71iTkoXpj3bF7KF1OVaqyhqEvfkrAOD4wqH6HbeJiIisVXO/v0XfSZtMQ6GbYrstBykzv270yE/pyOCIiIjoNgyQ2gl9kvZtOUi6EiNM0CYiIjLEAKmdaCwHiSvYiIiIGscAqZ24FSDdGkHKYII2ERFRoxggtRMKR90y/7oRJEEQcIYjSERERI1igNRO1B9BulJyA2XqWthKJQjydBGza0RERGaHAVI7oUvSLquqRa1Gq9//KLiDC2S2/DUgIiK6Hb8Z24nbl/GXVtUyQZuIiOgOGCC1E7Y2Usjt6zZOL6msvq3ECAMkIiKi+hggtSNK51t7IbHECBERUdMYILUjypsr2QrL1DhfWA6AI0hERESNYYDUjuhWsqVlX0etVoDc3hZ+SkeRe0VERGR+GCC1I7pE7UMXigEA3X3kkEgkYnaJiIjILDFAakd0I0gnr6gAcHqNiIioKQyQ2hFdDpJGKwBggjYREVFTGCC1I7oRJJ0QbwZIREREjWGA1I7cvlkkAIT6uIrUEyIiIvPGAKkdUTrJ9H/2cXWAot6IEhEREdVhgNSO3D7FxgRtIiKipjFAakeUt02xMUGbiIioaQyQ2hEFR5CIiIiahQFSO6Jb5g8wQCIiIroTW7E7QKYjs5Xiz2G+KCyr4hJ/IiKiO2CA1M4sHdNX7C4QERGZPU6xEREREdXDAImIiIioHgZIRERERPUwQCIiIiKqhwESERERUT0MkIiIiIjqYYBEREREVI9ZBEjLly9HYGAgHBwcEBUVhcOHDzfZdtOmTYiMjIRSqYSzszPCw8Oxdu1a/es1NTWYM2cOevfuDWdnZ/j6+mL8+PG4evWqwXmKi4sxduxYuLq6QqlUYuLEiSgvLzfaPRIREZHlED1A2rhxIxITE7Fw4UKkpaUhLCwMcXFxKCgoaLS9u7s75s+fj5SUFJw4cQIJCQlISEjAtm3bAACVlZVIS0vDa6+9hrS0NGzatAmZmZkYMWKEwXnGjh2LP/74A9u3b8fmzZuxZ88evPDCC0a/XyIiIjJ/EkEQBDE7EBUVhf79+2PZsmUAAK1WC39/f0yfPh1z585t1jn69euH4cOH46233mr09SNHjmDAgAHIyspCQEAATp8+jZ49e+LIkSOIjIwEAGzduhWPP/44cnJy4Ovr2+AcarUaarVa/3NpaSn8/f2hUqng6ura0tsmIiIiEZSWlkKhUNz1+1vUEaTq6mqkpqYiNjZW/5xUKkVsbCxSUlLuerwgCEhOTkZmZiYGDhzYZDuVSgWJRAKlUgkASElJgVKp1AdHABAbGwupVIpDhw41eo6kpCQoFAr9w9/fv5l3SURERJZG1ACpqKgIGo0G3t7eBs97e3sjLy+vyeNUKhVcXFwgk8kwfPhwLF26FEOGDGm0bVVVFebMmYMxY8boI8W8vDx4eXkZtLO1tYW7u3uT1503bx5UKpX+cfny5ZbcKhEREVkQiyxWK5fLkZ6ejvLyciQnJyMxMRFBQUGIiYkxaFdTU4Nnn30WgiBgxYoV93RNe3t72Nvb39M5iIiIyDKIGiB5enrCxsYG+fn5Bs/n5+fDx8enyeOkUim6du0KAAgPD8fp06eRlJRkECDpgqOsrCzs2LHDYJ7Rx8enQRJ4bW0tiouL73hdIiIiah9EDZBkMhkiIiKQnJyMJ598EkBdknZycjKmTZvW7PNotVqDBGpdcHT27Fns3LkTHh4eBu2jo6NRUlKC1NRUREREAAB27NgBrVaLqKioZl1Tl9teWlra7H4SERGRuHTf23ddoyaIbMOGDYK9vb2wevVq4dSpU8ILL7wgKJVKIS8vTxAEQRg3bpwwd+5cfft33nlH+PXXX4Xz588Lp06dEt5//33B1tZW+M9//iMIgiBUV1cLI0aMEDp16iSkp6cLubm5+odardafZ9iwYULfvn2FQ4cOCfv27RO6desmjBkzptn9vnz5sgCADz744IMPPviwwMfly5fv+D0veg5SfHw8CgsLsWDBAuTl5SE8PBxbt27VJ25nZ2dDKr2VS15RUYEpU6YgJycHjo6OCA0Nxbp16xAfHw8AuHLlCn788UcAddNvt9u5c6d+Gm79+vWYNm0aBg8eDKlUilGjRuGjjz5qdr99fX1x+fJlyOVySCSSe/gbMG+67QwuX75s9dsZtKd7BdrX/fJerVd7ul/ea9sQBAFlZWWNbulzO9H3QSLz1tz9IqxBe7pXoH3dL+/VerWn++W9mpboO2kTERERmRsGSERERET1MECiO7K3t8fChQvbxR5Q7elegfZ1v7xX69We7pf3alrMQSIiIiKqhyNIRERERPUwQCIiIiKqhwESERERUT0MkIiIiIjqYYDUjiUlJaF///6Qy+Xw8vLCk08+iczMzDses3r1akgkEoOHg4ODiXrceq+//nqDfoeGht7xmG+++QahoaFwcHBA79698fPPP5uot/cuMDCwwf1KJBJMnTq10faW9L7u2bMHf/7zn+Hr6wuJRILvv//e4HVBELBgwQJ07NgRjo6OiI2NxdmzZ+963uXLlyMwMBAODg6IiorC4cOHjXQHzXene62pqcGcOXPQu3dvODs7w9fXF+PHj8fVq1fveM7WfBZM5W7v7fPPP9+g78OGDbvreS3tvQXQ6OdXIpHgvffea/Kc5vreNue7pqqqClOnToWHhwdcXFwwatSoBoXs62vtZ725GCC1Y7t378bUqVNx8OBBbN++HTU1NRg6dCgqKirueJyrqytyc3P1j6ysLBP1+N706tXLoN/79u1rsu2BAwcwZswYTJw4EceOHcOTTz6JJ598Er///rsJe9x6R44cMbjX7du3AwCeeeaZJo+xlPe1oqICYWFhWL58eaOv/+tf/8JHH32ElStX4tChQ3B2dkZcXByqqqqaPOfGjRuRmJiIhQsXIi0tDWFhYYiLi0NBQYGxbqNZ7nSvlZWVSEtLw2uvvYa0tDRs2rQJmZmZGDFixF3P25LPgind7b0FgGHDhhn0/auvvrrjOS3xvQVgcI+5ublYtWoVJBIJRo0adcfzmuN725zvmlmzZuGnn37CN998g927d+Pq1at46qmn7nje1nzWW6TZ1VnJ6hUUFAgAhN27dzfZ5vPPPxcUCoXpOtVGFi5cKISFhTW7/bPPPisMHz7c4LmoqChh8uTJbdwz05g5c6YQHBwsaLXaRl+31PcVgPDdd9/pf9ZqtYKPj4/w3nvv6Z8rKSkR7O3tha+++qrJ8wwYMECYOnWq/meNRiP4+voKSUlJRul3a9S/18YcPnxYACBkZWU12aalnwWxNHa/EyZMEJ544okWncda3tsnnnhCePTRR+/YxlLe2/rfNSUlJYKdnZ3wzTff6NucPn1aACCkpKQ0eo7WftZbgiNIpKdSqQAA7u7ud2xXXl6Ozp07w9/fH0888QT++OMPU3Tvnp09exa+vr4ICgrC2LFjkZ2d3WTblJQUxMbGGjwXFxeHlJQUY3ezzVVXV2PdunX461//esfCypb6vt7u4sWLyMvLM3jvFAoFoqKimnzvqqurkZqaanCMVCpFbGysxb3fKpUKEokESqXyju1a8lkwN7t27YKXlxdCQkLw0ksv4dq1a022tZb3Nj8/H1u2bMHEiRPv2tYS3tv63zWpqamoqakxeJ9CQ0MREBDQ5PvUms96SzFAIgCAVqvFyy+/jAcffBD33Xdfk+1CQkKwatUq/PDDD1i3bh20Wi0eeOAB5OTkmLC3LRcVFYXVq1dj69atWLFiBS5evIiHH34YZWVljbbPy8uDt7e3wXPe3t7Iy8szRXfb1Pfff4+SkhI8//zzTbax1Pe1Pt3705L3rqioCBqNxuLf76qqKsyZMwdjxoy5Y3HPln4WzMmwYcPwxRdfIDk5Ge+++y52796Nxx57DBqNptH21vLerlmzBnK5/K5TTpbw3jb2XZOXlweZTNYgsL/T+9Saz3pL2bbJWcjiTZ06Fb///vtd56ujo6MRHR2t//mBBx5Ajx498Mknn+Ctt94ydjdb7bHHHtP/uU+fPoiKikLnzp3x9ddfN+t/ZZbss88+w2OPPQZfX98m21jq+0p1ampq8Oyzz0IQBKxYseKObS35szB69Gj9n3v37o0+ffogODgYu3btwuDBg0XsmXGtWrUKY8eOvevCCUt4b5v7XWMOOIJEmDZtGjZv3oydO3eiU6dOLTrWzs4Offv2xblz54zUO+NQKpXo3r17k/328fFpsIIiPz8fPj4+puhem8nKysJvv/2Gv/3tby06zlLfV93705L3ztPTEzY2Nhb7fuuCo6ysLGzfvv2Oo0eNudtnwZwFBQXB09Ozyb5b+nsLAHv37kVmZmaLP8OA+b23TX3X+Pj4oLq6GiUlJQbt7/Q+teaz3lIMkNoxQRAwbdo0fPfdd9ixYwe6dOnS4nNoNBqcPHkSHTt2NEIPjae8vBznz59vst/R0dFITk42eG779u0GoyyW4PPPP4eXlxeGDx/eouMs9X3t0qULfHx8DN670tJSHDp0qMn3TiaTISIiwuAYrVaL5ORks3+/dcHR2bNn8dtvv8HDw6PF57jbZ8Gc5eTk4Nq1a0323ZLfW53PPvsMERERCAsLa/Gx5vLe3u27JiIiAnZ2dgbvU2ZmJrKzs5t8n1rzWW9Nx6mdeumllwSFQiHs2rVLyM3N1T8qKyv1bcaNGyfMnTtX//Mbb7whbNu2TTh//ryQmpoqjB49WnBwcBD++OMPMW6h2WbPni3s2rVLuHjxorB//34hNjZW8PT0FAoKCgRBaHif+/fvF2xtbYX3339fOH36tLBw4ULBzs5OOHnypFi30GIajUYICAgQ5syZ0+A1S35fy8rKhGPHjgnHjh0TAAiLFy8Wjh07pl+5tWjRIkGpVAo//PCDcOLECeGJJ54QunTpIty4cUN/jkcffVRYunSp/ucNGzYI9vb2wurVq4VTp04JL7zwgqBUKoW8vDyT39/t7nSv1dXVwogRI4ROnToJ6enpBp9htVqtP0f9e73bZ0FMd7rfsrIy4f/+7/+ElJQU4eLFi8Jvv/0m9OvXT+jWrZtQVVWlP4c1vLc6KpVKcHJyElasWNHoOSzlvW3Od82LL74oBAQECDt27BCOHj0qREdHC9HR0QbnCQkJETZt2qT/uTmf9XvBAKkdA9Do4/PPP9e3eeSRR4QJEybof3755ZeFgIAAQSaTCd7e3sLjjz8upKWlmb7zLRQfHy907NhRkMlkgp+fnxAfHy+cO3dO/3r9+xQEQfj666+F7t27CzKZTOjVq5ewZcsWE/f63mzbtk0AIGRmZjZ4zZLf1507dzb6e6u7H61WK7z22muCt7e3YG9vLwwePLjB30Hnzp2FhQsXGjy3dOlS/d/BgAEDhIMHD5rojpp2p3u9ePFik5/hnTt36s9R/17v9lkQ053ut7KyUhg6dKjQoUMHwc7OTujcubMwadKkBoGONby3Op988ong6OgolJSUNHoOS3lvm/Ndc+PGDWHKlCmCm5ub4OTkJIwcOVLIzc1tcJ7bj2nOZ/1eSG5elIiIiIhuYg4SERERUT0MkIiIiIjqYYBEREREVA8DJCIiIqJ6GCARERER1cMAiYiIiKgeBkhERERE9TBAIiIiIqqHARIR0U0xMTF4+eWXxe4GEZkBBkhERERE9TBAIiIiIqqHARIRURO2bNkChUKB9evXi90VIjIxW7E7QERkjr788ku8+OKL+PLLL/GnP/1J7O4QkYlxBImIqJ7ly5djypQp+OmnnxgcEbVTHEEiIrrNt99+i4KCAuzfvx/9+/cXuztEJBKOIBER3aZv377o0KEDVq1aBUEQxO4OEYmEARIR0W2Cg4Oxc+dO/PDDD5g+fbrY3SEikXCKjYionu7du2Pnzp2IiYmBra0tlixZInaXiMjEGCARETUiJCQEO3bsQExMDGxsbPDBBx+I3SUiMiGJwEl2IiIiIgPMQSIiIiKqhwESERERUT0MkIiIiIjqYYBEREREVA8DJCIiIqJ6GCARERER1cMAiYiIiKgeBkhERERE9TBAIiIiIqqHARIRERFRPQyQiIiIiOr5fzRtJqJtmyYtAAAAAElFTkSuQmCC",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "def plot_accuracy(data, labels, split_factor=0.9, n=[1, 20]):\n",
+    "    \"\"\"Plot the variation of the accuracy as a function of k from n[0] to n[1].\n",
+    "    Save the plot as an image named knn.png in the directory results.\n",
+    "\n",
+    "    Args:\n",
+    "        data: A numpy array of shape batch_size x data_size containing the data.\n",
+    "        labels: A numpy array of shape batch_size containing the labels.\n",
+    "        split_factor: The ratio of the size of the validation set over the size of the whole dataset. Must be a float between 0 and 1.\n",
+    "        n: A list of two integers, the first and the last value of k.\n",
+    "    \"\"\"\n",
+    "    data_train, labels_train, data_test, labels_test = rc.split_dataset(\n",
+    "        data, labels, split_factor\n",
+    "    )\n",
+    "    accuracies = []\n",
+    "    for k in range(n[0], n[1] + 1):\n",
+    "        accuracy = knn.evaluate_knn(data_train, labels_train, data_test, labels_test, k)\n",
+    "        accuracies.append(accuracy)\n",
+    "    plt.plot(range(n[0], n[1] + 1), accuracies)\n",
+    "    plt.xlabel(\"k\")\n",
+    "    plt.ylabel(\"accuracy\")\n",
+    "    plt.savefig(r\"results\\knn.png\")\n",
+    "    plt.show()\n",
+    "\n",
+    "data, labels = rc.read_cifar(r\"data\\cifar-10-batches-py\")\n",
+    "plot_accuracy(data, labels, split_factor=0.9, n=[1,20])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Artificial Neural Network"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Question 16"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [
+    {
+     "ename": "ValueError",
+     "evalue": "operands could not be broadcast together with shapes (48000,64) (12000,64) ",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mValueError\u001b[0m                                Traceback (most recent call last)",
+      "\u001b[1;32mc:\\Users\\sophi\\OneDrive\\Documents\\ETUDES\\6-ECL\\3A\\MOD\\4_6\\TD_1\\MOD_4_6_TD_1.ipynb Cell 8\u001b[0m line \u001b[0;36m6\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/sophi/OneDrive/Documents/ETUDES/6-ECL/3A/MOD/4_6/TD_1/MOD_4_6_TD_1.ipynb#X10sZmlsZQ%3D%3D?line=3'>4</a>\u001b[0m learning_rate \u001b[39m=\u001b[39m \u001b[39m0.2\u001b[39m\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/sophi/OneDrive/Documents/ETUDES/6-ECL/3A/MOD/4_6/TD_1/MOD_4_6_TD_1.ipynb#X10sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m num_epoch \u001b[39m=\u001b[39m \u001b[39m100\u001b[39m\n\u001b[1;32m----> <a href='vscode-notebook-cell:/c%3A/Users/sophi/OneDrive/Documents/ETUDES/6-ECL/3A/MOD/4_6/TD_1/MOD_4_6_TD_1.ipynb#X10sZmlsZQ%3D%3D?line=5'>6</a>\u001b[0m train_accuracies, test_accuracy, losses \u001b[39m=\u001b[39m mlp\u001b[39m.\u001b[39;49mrun_mlp_training(data_train, labels_train, data_test, labels_test, d_h, learning_rate, num_epoch)\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/sophi/OneDrive/Documents/ETUDES/6-ECL/3A/MOD/4_6/TD_1/MOD_4_6_TD_1.ipynb#X10sZmlsZQ%3D%3D?line=6'>7</a>\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m\"\u001b[39m\u001b[39mok 3\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/sophi/OneDrive/Documents/ETUDES/6-ECL/3A/MOD/4_6/TD_1/MOD_4_6_TD_1.ipynb#X10sZmlsZQ%3D%3D?line=7'>8</a>\u001b[0m plt\u001b[39m.\u001b[39mplot(\u001b[39mrange\u001b[39m(num_epoch), train_accuracies)\n",
+      "File \u001b[1;32mc:\\Users\\sophi\\OneDrive\\Documents\\ETUDES\\6-ECL\\3A\\MOD\\4_6\\TD_1\\mlp.py:287\u001b[0m, in \u001b[0;36mrun_mlp_training\u001b[1;34m(data_train, labels_train, data_test, labels_test, d_h, learning_rate, num_epoch)\u001b[0m\n\u001b[0;32m    285\u001b[0m b2 \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39mzeros((\u001b[39m1\u001b[39m, d_out))  \u001b[39m# second layer biaises\u001b[39;00m\n\u001b[0;32m    286\u001b[0m \u001b[39m# Train the network\u001b[39;00m\n\u001b[1;32m--> 287\u001b[0m w1, b1, w2, b2, train_accuracies, losses \u001b[39m=\u001b[39m train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch)\n\u001b[0;32m    288\u001b[0m \u001b[39m# Test the network\u001b[39;00m\n\u001b[0;32m    289\u001b[0m test_accuracy \u001b[39m=\u001b[39m test_mlp(w1, b1, w2, b2, data_test, labels_test)\n",
+      "File \u001b[1;32mc:\\Users\\sophi\\OneDrive\\Documents\\ETUDES\\6-ECL\\3A\\MOD\\4_6\\TD_1\\mlp.py:251\u001b[0m, in \u001b[0;36mtest_mlp\u001b[1;34m(w1, b1, w2, b2, data_test, labels_test)\u001b[0m\n\u001b[0;32m    239\u001b[0m def test_mlp(w1, b1, w2, b2, data_test, labels_test):\n\u001b[0;32m    240\u001b[0m     \"\"\"Test the neural network on the given test set.\n\u001b[0;32m    241\u001b[0m     Args:\n\u001b[0;32m    242\u001b[0m         w1: A np.float32 array of shape d_in x d_h, the first layer weights.\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    249\u001b[0m         test_accuracy: The accuracy of the network on the given test set.\n\u001b[0;32m    250\u001b[0m     \"\"\"\n\u001b[1;32m--> 251\u001b[0m     # Forward pass\n\u001b[0;32m    252\u001b[0m     a0 = data_test # the data are the input of the first layer\n\u001b[0;32m    253\u001b[0m     z1 = np.matmul(a0, w1) + b1 # input of the hidden layer\n",
+      "\u001b[1;31mValueError\u001b[0m: operands could not be broadcast together with shapes (48000,64) (12000,64) "
+     ]
+    }
+   ],
+   "source": [
+    "data, labels = rc.read_cifar(r\"data\\cifar-10-batches-py\")\n",
+    "split_factor = 0.9\n",
+    "data_train, labels_train, data_test, labels_test = rc.split_dataset(data, labels, split_factor)\n",
+    "d_h = 64\n",
+    "learning_rate = 0.1\n",
+    "num_epoch = 100\n",
+    "train_accuracies, test_accuracy, losses = mlp.run_mlp_training(data_train, labels_train, data_test, labels_test, d_h, learning_rate, num_epoch)\n",
+    "print(\"ok 3\")\n",
+    "plt.plot(range(num_epoch), train_accuracies)\n",
+    "plt.xlabel(\"epoch\")\n",
+    "plt.ylabel(\"accuracy\")\n",
+    "plt.savefig(r\"results\\mlp.png\")\n",
+    "plt.show()\n",
+    "\n",
+    "plt.plot(range(num_epoch), losses)\n",
+    "plt.xlabel(\"epoch\")\n",
+    "plt.ylabel(\"loss\")\n",
+    "plt.show()\n"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.11.5"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/knn.py b/knn.py
index 20fe8326c95f83871bf6c0be9f5b09c9ec528adb..fa5a68650b8e32d07c52adf7211af0ca9915d619 100644
--- a/knn.py
+++ b/knn.py
@@ -1,5 +1,7 @@
-# imports
+## Imports
 import numpy as np
+import matplotlib.pyplot as plt
+
 
 ## QUESTION 1
 def distance_matrix(m1, m2):
@@ -14,9 +16,14 @@ def distance_matrix(m1, m2):
     m1 = m1.reshape(m1.shape[0], -1)
     m2 = m2.reshape(m2.shape[0], -1)
     # Compute distance matrix
-    dists = np.sqrt(np.sum(m1**2, axis=1) + np.sum(m2**2, axis=1, keepdims=True) - 2 * np.dot(m2, m1.T))
+    dists = np.sqrt(
+        np.sum(m1**2, axis=1)
+        + np.sum(m2**2, axis=1, keepdims=True)
+        - 2 * np.dot(m2, m1.T)
+    )
     return dists
 
+
 ## QUESTION 2
 def knn_predict(dists, labels_train, k):
     """Predict the label of data_test using k-nearest neighbors.
@@ -35,6 +42,7 @@ def knn_predict(dists, labels_train, k):
         Ypred[i] = np.argmax(np.bincount(nearest_y))
     return Ypred
 
+
 ## QUESTION 3
 def evaluate_knn(data_train, labels_train, data_test, labels_test, k):
     """Evaluate the performance of k-nearest neighbors on the given dataset.
@@ -52,10 +60,12 @@ def evaluate_knn(data_train, labels_train, data_test, labels_test, k):
     accuracy = np.mean(y_test_pred == labels_test)
     return accuracy
 
+
 if __name__ == "__main__":
     # test distance_matrix
     m1 = np.array([[1, 1], [1, 0], [0, 1]])
     m2 = np.array([[0, 2], [1, 1]])
+    print(m2**2)
     print(distance_matrix(m1, m2))
     print(distance_matrix(m1, m2).shape)
 
@@ -65,28 +75,3 @@ if __name__ == "__main__":
     k = 2
     print(knn_predict(dists, labels_train, k))
 
-    ## QUESTION 4
-    import read_cifar as rc
-    import matplotlib.pyplot as plt
-    data, labels = rc.read_cifar(r"data\cifar-10-batches-py")
-
-    def plot_accuracy(data, labels, split_factor=0.9, n=[1,20]):
-        """Plot the variation of the accuracy as a function of k from n[0] to n[1].
-        Save the plot as an image named knn.png in the directory results.
-        
-        Args:
-            split_factor: The ratio of the size of the validation set over the size of the whole dataset. Must be a float between 0 and 1.
-            n: A list of two integers, the first and the last value of k.
-        """
-        data_train, labels_train, data_test, labels_test = rc.split_dataset(data, labels, split_factor)
-        accuracies = []
-        for k in range(n[0], n[1] + 1):
-            accuracy = evaluate_knn(data_train, labels_train, data_test, labels_test, k)
-            accuracies.append(accuracy)
-        plt.plot(range(n[0], n[1] + 1), accuracies)
-        plt.xlabel("k")
-        plt.ylabel("accuracy")
-        plt.savefig(r"results\knn.png")
-        plt.show()
-    
-    plot_accuracy(data, labels)
diff --git a/mlp.py b/mlp.py
new file mode 100644
index 0000000000000000000000000000000000000000..19c8108664ddd47a98e8707e2aef4d5b711735fa
--- /dev/null
+++ b/mlp.py
@@ -0,0 +1,377 @@
+## Imports
+import numpy as np
+
+
+## QUESTION 10
+def learn_once_mse(w1, b1, w2, b2, data, targets, learning_rate):
+    """Perform one gradient descent step of the neural network using the MSE cost.
+    Args:
+        w1: A np.float32 array of shape d_in x d_h, the first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the second layer biaises.
+        data: A np.float32 array of shape batch_size x d_in, the input data.
+        targets: A np.float32 array of shape batch_size x d_out, the targets.
+        learning_rate: The learning rate.
+    Returns:
+        w1: A np.float32 array of shape d_in x d_h, the updated first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the updated first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the updated second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the updated second layer biaises.
+        loss: The cost of the network on the given data.
+    """
+    # Check shapes
+    assert w1.shape[0] == data.shape[1] # d_in
+    assert w1.shape[1] == b1.shape[1] # d_h
+    assert b1.shape[0] == 1
+    assert w2.shape[0] == b1.shape[1] # d_h
+    assert w2.shape[1] == b2.shape[1] # d_out
+    assert b2.shape[0] == 1
+    assert data.shape[0] == targets.shape[0] # batch_size
+
+    N = data.shape[0] # batch_size
+    d_in = data.shape[1] # Number of input neurons
+    d_h = w1.shape[1] # Number of hidden neurons
+    d_out = w2.shape[1] # Number of output neurons
+
+    # Forward pass
+    a0 = data # the data are the input of the first layer
+    z1 = np.matmul(a0, w1) + b1 # input of the hidden layer
+    a1 = 1 / (1 + np.exp(-z1)) # output of the hidden layer (sigmoid activation function)
+    z2 = np.matmul(a1, w2) + b2 # input of the output layer
+    a2 = 1 / (1 + np.exp(-z2)) # output of the output layer (sigmoid activation function)
+    predictions = a2 # shape batch_size x d_out
+
+    # Compute loss (MSE)
+    loss = np.mean(np.square(predictions - targets)) # scalar
+
+    # Backward pass
+    # Compute gradients
+
+    ## QUESTION 2
+    dcost_da2 = 2 * (predictions - targets) / d_out # shape batch_size x d_out
+    # print("dcost_da2.shape = ", dcost_da2.shape)
+
+    ## QUESTION 3
+    da2_dz2 = a2 * (1 - a2) # shape batch_size x d_out
+    # print("da2_dz2.shape = ", da2_dz2.shape)
+    dcost_dz2 = dcost_da2 * da2_dz2 # shape batch_size x d_out
+    # print("dcost_dz2.shape = ", dcost_dz2.shape)
+
+    ## QUESTION 4
+    dz2_dw2 = np.transpose(a1) #shape d_h x batch_size
+    # print("dz2_dw2.shape = ", dz2_dw2.shape)
+    dcost_dw2 = np.matmul(dz2_dw2, dcost_dz2) # shape d_h x d_out for batch_size = 1
+    # print("dcost_dw2.shape = ", dcost_dw2.shape)
+
+    ## QUESTION 5
+    dz2_db2 = np.ones((d_out)) # shape d_out
+    # print("dz2_db2.shape = ", dz2_db2.shape)
+    dcost_db2 = dcost_dz2 * dz2_db2 # shape batch_size x d_out
+    # print("dcost_db2.shape = ", dcost_db2.shape)
+
+    ## QUESTION 6
+    dz2_da1 = np.transpose(w2) # shape d_h x d_out
+    # print("dz2_da1.shape = ", dz2_da1.shape)
+    dcost_da1 = np.matmul(dcost_dz2, dz2_da1) # shape batch_size x d_h
+    # print("dcost_da1.shape = ", dcost_da1.shape)
+
+    ## QUESTION 7
+    da1_dz1 = a1 * (1 - a1) # shape batch_size x d_h
+    # print("da1_dz1.shape = ", da1_dz1.shape)
+    dcost_dz1 = dcost_da1 * da1_dz1 # shape batch_size x d_h
+    # print("dcost_dz1.shape = ", dcost_dz1.shape)
+
+    ## QUESTION 8
+    dz1_dw1 = np.transpose(a0) # shape batch_size x d_in
+    # print("dz1_dw1.shape = ", dz1_dw1.shape)
+    dcost_dw1 = np.matmul(dz1_dw1, dcost_dz1) # shape d_in x d_h
+    # print("dcost_dw1.shape = ", dcost_dw1.shape)
+
+    ## QUESTION 9
+    dz1_db1 = np.ones((d_h)) # shape d_h
+    # print("dz1_db1.shape = ", dz1_db1.shape)
+    dcost_db1 = dcost_dz1 * dz1_db1 # shape batch_size x d_h    
+    # print("dcost_db1.shape = ", dcost_db1.shape)
+
+    # Update weights and biaises
+    w1 = w1 - learning_rate * dcost_dw1
+    b1 = b1 - learning_rate * dcost_db1
+    w2 = w2 - learning_rate * dcost_dw2
+    b2 = b2 - learning_rate * dcost_db2
+
+    return w1, b1, w2, b2, loss
+
+
+## QUESTION 11
+def one_hot(labels):
+    """Convert a vector of labels to a one-hot matrix, taking a (n)-D array as parameters and returning the corresponding (n+1)-D one-hot matrix.
+    Args:
+        labels: A np.int64 array of shape batch_size, the labels.
+    Returns:
+        b: A np.int64 array of shape batch_size x (labels.max() + 1), the one-hot matrix.
+    """
+    b = np.zeros((labels.size, labels.max() + 1))
+    b[np.arange(labels.size), labels] = 1
+    return b
+
+
+## QUESTION 12
+def learn_once_cross_entropy(w1, b1, w2, b2, data, labels_train, learning_rate):
+    """Perform one gradient descent step of the neural network using a binary cross-entropy loss.
+    The last activation layer of the network is a softmax layer.
+    Args:
+        w1: A np.float32 array of shape d_in x d_h, the first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the second layer biaises.
+        data: A np.float32 array of shape batch_size x d_in, the input data.
+        labels_train: A np.int64 array of shape batch_size, the labels of the training set.
+        learning_rate: The learning rate.
+    Returns:
+        w1: A np.float32 array of shape d_in x d_h, the updated first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the updated first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the updated second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the updated second layer biaises.
+        loss: The loss of the network on the given data.
+    """
+    N = data.shape[0] # batch_size
+    d_h = w1.shape[1] # Number of hidden neurons
+    d_out = labels_train.max() + 1 # Number of output neurons
+
+    # Forward pass
+    a0 = data # the data are the input of the first layer
+    z1 = np.matmul(a0, w1) + b1 # input of the hidden layer
+    a1 = 1 / (1 + np.exp(-z1)) # output of the hidden layer (sigmoid activation function)
+    z2 = np.matmul(a1, w2) + b2 # input of the output layer
+    a2 = np.exp(z2) / np.sum(np.exp(z2), axis=1, keepdims=True) # output of the output layer (softmax activation function)
+    predictions = a2 # shape batch_size x d_out
+    # print("predictions.shape = ", predictions.shape)
+
+    # Compute loss (cross-entropy)
+    labels_train_one_hot = one_hot(labels_train) # shape batch_size x d_out
+    loss = - np.mean(labels_train_one_hot * np.log(predictions) + (1 - labels_train_one_hot) * np.log(1 - predictions)) # scalar
+
+    # Backward pass
+    # Compute gradients
+    # print("labels_train.shape =", labels_train.shape)
+    # dloss_dz2 = predictions - labels_train # shape batch_size x d_out
+    dloss_dz2 = predictions - labels_train_one_hot # shape batch_size x d_out
+    # print("dloss_dz2.shape = ", dloss_dz2.shape)
+
+    dz2_dw2 = np.transpose(a1) # shape d_h x batch_size
+    # print("dz2_dw2.shape = ", dz2_dw2.shape)
+    dloss_dw2 = np.matmul(dz2_dw2, dloss_dz2) # shape d_h x d_out
+    # print("dloss_dw2.shape = ", dloss_dw2.shape)
+
+    dz2_db2 = np.ones((d_out)) # shape 1 x d_out
+    # print("dz2_db2.shape = ", dz2_db2.shape)
+    dloss_db2 = np.sum(dloss_dz2 * dz2_db2, axis=0, keepdims=True) # 1 x d_out
+    # print("dloss_db2.shape = ", dloss_db2.shape)
+
+    dz2_da1 = np.transpose(w2) # shape d_out x d_h
+    # print("dz2_da1.shape = ", dz2_da1.shape)
+    dloss_da1 = np.matmul(dloss_dz2, dz2_da1) # shape batch_size x d_h
+    # print("dloss_da1.shape = ", dloss_da1.shape)
+
+    da1_dz1 = a1 * (1 - a1) # shape batch_size x d_h
+    # print("da1_dz1.shape = ", da1_dz1.shape)
+    dloss_dz1 = dloss_da1 * da1_dz1 # shape batch_size x d_h
+    # print("dloss_dz1.shape = ", dloss_dz1.shape)
+
+    dz1_dw1 = np.transpose(a0) # shape d_in x batch_size
+    # print("dz1_dw1.shape = ", dz1_dw1.shape)
+    dloss_dw1 = np.matmul(dz1_dw1, dloss_dz1) # shape d_in x d_h
+    # print("dloss_dw1.shape = ", dloss_dw1.shape)
+
+    dz1_db1 = np.ones((d_h)) # shape 1 x d_h
+    # print("dz1_db1.shape = ", dz1_db1.shape)
+    dloss_db1 = np.sum(dloss_dz1 * dz1_db1, axis=0, keepdims=True) # 1 x d_h
+    # print("dloss_db1.shape = ", dloss_db1.shape)
+
+    # Update weights and biaises
+    w1 = w1 - learning_rate * dloss_dw1
+    b1 = b1 - learning_rate * dloss_db1
+    w2 = w2 - learning_rate * dloss_dw2
+    b2 = b2 - learning_rate * dloss_db2
+    return w1, b1, w2, b2, loss
+
+
+## QUESTION 13
+def train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch):
+    """Perform num_epoch of training steps of the neural network using the binary cross_entropy loss.
+    Args:
+        w1: A np.float32 array of shape d_in x d_h, the first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the second layer biaises.
+        data_train: A np.float32 array of shape batch_size x d_in, the training set.
+        labels_train: A np.int64 array of shape batch_size, the labels of the training set.
+        learning_rate: The learning rate.
+        num_epoch: The number of training epochs.
+    Returns:
+        w1: A np.float32 array of shape d_in x d_h, the updated first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the updated first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the updated second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the updated second layer biaises.
+        train_accuracies: A list of the training accuracies across epochs as a list of floats.
+    """
+    #print("data_train.shape = ", data_train.shape)
+    #print("labels_train.shape = ", labels_train.shape)
+    #print("w1.shape = ", w1.shape)
+    #print("b1.shape = ", b1.shape)
+    #print("w2.shape = ", w2.shape)
+    #print("b2.shape = ", b2.shape)
+    train_accuracies = []
+    losses = []
+    for epoch in range(num_epoch):
+        w1, b1, w2, b2, loss = learn_once_cross_entropy(w1, b1, w2, b2, data_train, labels_train, learning_rate) 
+        # Compute accuracy
+        # Forward pass
+        a0 = data_train # the data are the input of the first layer
+        z1 = np.matmul(a0, w1) + b1 # input of the hidden layer
+        a1 = 1 / (1 + np.exp(-z1)) # output of the hidden layer (sigmoid activation function)
+        z2 = np.matmul(a1, w2) + b2 # input of the output layer
+        a2 = np.exp(z2) / np.sum(np.exp(z2), axis=1, keepdims=True) # output of the output layer (softmax activation function)
+        predictions = a2 # shape batch_size x d_out
+        y_pred = np.argmax(predictions, axis=1)
+        train_accuracy = np.mean(y_pred == labels_train)
+        train_accuracies.append(train_accuracy)
+        losses.append(loss)
+        if epoch % 10 == 0:
+            print(f'train_accuracy à l epoch {epoch}: {train_accuracy}')
+
+        # print("Epoch %d, loss = %f, train accuracy = %f" % (epoch, loss, train_accuracy))
+    return w1, b1, w2, b2, train_accuracies, losses
+
+
+## QUESTION 14
+def test_mlp(w1, b1, w2, b2, data_test, labels_test):
+    """Test the neural network on the given test set.
+    Args:
+        w1: A np.float32 array of shape d_in x d_h, the first layer weights.
+        b1: A np.float32 array of shape 1 x d_h, the first layer biaises.
+        w2: A np.float32 array of shape d_h x d_out, the second layer weights.
+        b2: A np.float32 array of shape 1 x d_out, the second layer biaises.
+        data_test: A np.float32 array of shape batch_size x d_in, the test set.
+        labels_test: A np.int64 array of shape batch_size, the labels of the test set.
+    Returns:
+        test_accuracy: The accuracy of the network on the given test set.
+    """
+    #print("data_test.shape = ", data_test.shape)
+    #print("labels_test.shape = ", labels_test.shape)
+    #print("w1.shape = ", w1.shape)
+    #print("b1.shape = ", b1.shape)
+    #print("w2.shape = ", w2.shape)
+    #print("b2.shape = ", b2.shape)
+    # Forward pass
+    a0 = data_test # the data are the input of the first layer
+    z1 = np.matmul(a0, w1) + b1 # input of the hidden layer
+    a1 = 1 / (1 + np.exp(-z1)) # output of the hidden layer (sigmoid activation function)
+    z2 = np.matmul(a1, w2) + b2 # input of the output layer
+    a2 = 1 / (1 + np.exp(-z2)) # output of the output layer (sigmoid activation function)
+    predictions = a2
+    # Compute accuracy
+    y_pred = np.argmax(predictions, axis=1)
+    test_accuracy = np.mean(y_pred == labels_test)
+    return test_accuracy
+
+
+## QUESTION 15
+def run_mlp_training(data_train, labels_train, data_test, labels_test, d_h, learning_rate, num_epoch):
+    """Train an MLP classifier and return the trainig accuracies across epochs as a list of floats and the final testing accuracy as a float.
+    Args:
+        data_train: A np.float32 array of shape batch_size x d_in, the training set.
+        labels_train: A np.int64 array of shape batch_size, the labels of the training set.
+        data_test: A np.float32 array of shape batch_size x d_in, the test set.
+        labels_test: A np.int64 array of shape batch_size, the labels of the test set.
+        d_h: The number of neurons in the hidden layer.
+        learning_rate: The learning rate.
+        num_epoch: The number of training epochs.
+    Returns:
+        train_accuracies: A list of the training accuracies across epochs as a list of floats.
+        test_accuracy: The accuracy of the network on the given test set.
+    """
+    # Random initialization of the network weights and biaises
+    d_in = data_train.shape[1]  # input dimension
+    d_out = labels_train.max() + 1  # output dimension (number of neurons of the output layer)
+    w1 = 2 * np.random.rand(d_in, d_h) - 1  # first layer weights
+    b1 = np.zeros((1, d_h))  # first layer biaises
+    w2 = 2 * np.random.rand(d_h, d_out) - 1  # second layer weights
+    b2 = np.zeros((1, d_out))  # second layer biaises
+    # Train the network
+    w1, b1, w2, b2, train_accuracies, losses = train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch)
+    # Test the network
+    test_accuracy = test_mlp(w1, b1, w2, b2, data_test, labels_test)
+    return train_accuracies, test_accuracy, losses
+
+
+if __name__ == "__main__":
+    # Define input data
+    w1 = np.array([[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]]) # d_in = 3, d_h = 2
+    b1 = np.array([[0.1, 0.2]]) # d_h = 2
+    w2 = np.array([[0.1, 0.2, 0.3, 0.4, 0.5], [0.4, 0.5, 0.6, 0.7, 0.8]]) # d_h = 2, d_out = 5
+    b2 = np.array([[0.1, 0.2, 0.3, 0.4, 0.5]]) # d_out = 5
+    data = np.array([[0.1, 0.2, 0.3]]) # batch_size = 1, d_in = 3
+    targets = np.array([[0.1, 0.2, 0.3, 0.4, 0.5]]) # batch_size = 1, d_out = 5
+    learning_rate = 0.1
+
+    # Call function
+    w1, b1, w2, b2, cost = learn_once_mse(w1, b1, w2, b2, data, targets, learning_rate)
+
+    # Check output shapes
+    assert w1.shape == (3, 2)
+    assert b1.shape == (1, 2)
+    assert w2.shape == (2, 5)
+    assert b2.shape == (1, 5)
+    assert cost.shape == ()
+
+    # Test one_hot
+    labels = np.array([0, 4, 2, 3])
+    print(one_hot(labels))
+
+    # Define input data
+    w1 = np.array([[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]]) # d_in = 3, d_h = 2
+    b1 = np.array([[0.1, 0.2]]) # d_h = 2
+    w2 = np.array([[0.1, 0.2, 0.3, 0.4, 0.5], [0.4, 0.5, 0.6, 0.7, 0.8]]) # d_h = 2, d_out = 5
+    b2 = np.array([[0.1, 0.2, 0.3, 0.4, 0.5]]) # d_out = 5
+    data = np.array([[0.1, 0.2, 0.3]]) # batch_size = 1, d_in = 3
+    labels_train = np.array([4]) # batch_size = 1
+    learning_rate = 0.1
+
+    # Test learn_once_cross_entropy
+    w1, b1, w2, b2, loss = learn_once_cross_entropy(w1, b1, w2, b2, data, labels_train, learning_rate)
+    print(w1, b1, w2, b2, loss)
+
+    # Test train_mlp
+    w1, b1, w2, b2, train_accuracies, losses = train_mlp(w1, b1, w2, b2, data, labels_train, learning_rate, 10)
+    print(train_accuracies)
+
+    # Test test_mlp
+    w1, b1, w2, b2, train_accuracies, losses = train_mlp(w1, b1, w2, b2, data, labels_train, learning_rate, 10)
+    print(train_accuracies)
+
+    # Test run_mlp_training
+    train_accuracies, test_accuracy, losses = run_mlp_training(data, labels_train, data, labels_train, 2, 0.1, 10)
+    print(train_accuracies, test_accuracy)
+
+    import read_cifar as rc
+    import matplotlib.pyplot as plt
+    data, labels = rc.read_cifar(r"data\cifar-10-batches-py")
+    split_factor = 0.9
+    data_train, labels_train, data_test, labels_test = rc.split_dataset(data, labels, split_factor)
+    d_h = 64
+    learning_rate = 0.1
+    num_epoch = 100
+    train_accuracies, test_accuracy, losses = run_mlp_training(data_train, labels_train, data_test, labels_test, d_h, learning_rate, num_epoch)
+    print("ok 3")
+    plt.plot(range(num_epoch), train_accuracies)
+    plt.xlabel("epoch")
+    plt.ylabel("accuracy")
+    plt.savefig(r"results\mlp.png")
+    plt.show()
+
+    plt.plot(range(num_epoch), losses)
+    plt.xlabel("epoch")
+    plt.ylabel("loss")
+    plt.show()
diff --git a/read_cifar.py b/read_cifar.py
index 0dee3799c71b57131dfee83742f7759226db47b5..574c63d4efeb10d7381e1a952accff7fb1d7b21f 100644
--- a/read_cifar.py
+++ b/read_cifar.py
@@ -1,14 +1,16 @@
-# imports
+## Imports
 import numpy as np
 import pickle
 
+
 ## QUESTION 2
 def unpickle(file):
     # Source: https://www.cs.toronto.edu/~kriz/cifar.html
-    with open(file, 'rb') as fo:
-        dict = pickle.load(fo, encoding='bytes')
+    with open(file, "rb") as fo:
+        dict = pickle.load(fo, encoding="bytes")
     return dict
 
+
 def read_cifar_batch(file):
     """Read a batch of the CIFAR dataset.
     Args:
@@ -22,6 +24,7 @@ def read_cifar_batch(file):
     labels = np.array(dict[b"labels"], dtype=np.int64)
     return data, labels
 
+
 ## QUESTION 3
 def read_cifar(path):
     """Read the whole CIFAR dataset.
@@ -44,6 +47,7 @@ def read_cifar(path):
     labels = np.concatenate(label_batches, axis=0)
     return data, labels
 
+
 ## QUESTION 4
 def split_dataset(data, labels, split):
     """Split the dataset into a training set and a validation set. Data are shuffled before splitting.
@@ -57,7 +61,7 @@ def split_dataset(data, labels, split):
         data_test: A np.float32 array of shape (1 - split) x batch_size x data_size, the validation set.
         labels_test: A np.int64 array of shape (1 - split) x batch_size, the labels of the validation set.
     """
-    assert 0 <= split <= 1 # split must be between 0 and 1
+    assert 0 <= split <= 1  # split must be between 0 and 1
     data_size = data.shape[0]
     # shuffle data and labels
     indices = np.arange(data_size)
@@ -70,8 +74,13 @@ def split_dataset(data, labels, split):
     labels_train = labels[:split_index]
     data_test = data[split_index:]
     labels_test = labels[split_index:]
+    print("data_train.shape: ", data_train.shape)
+    print("labels_train.shape: ", labels_train.shape)
+    print("data_test.shape: ", data_test.shape)
+    print("labels_test.shape: ", labels_test.shape)
     return data_train, labels_train, data_test, labels_test
 
+
 if __name__ == "__main__":
     dict = unpickle(r"data\cifar-10-batches-py\data_batch_1")
     print(dict.keys())
diff --git a/results/knn.png b/results/knn.png
index f3f3baa44e08700a8fed7fd892428b92b0377a72..8522fe74e7d61084b41020a38e4f42140494f818 100644
Binary files a/results/knn.png and b/results/knn.png differ
diff --git a/results/mlp.png b/results/mlp.png
new file mode 100644
index 0000000000000000000000000000000000000000..b437dd9a7b45a0bafdeefb972aa4adcdb3814e03
Binary files /dev/null and b/results/mlp.png differ