diff --git a/100.png b/100.png new file mode 100644 index 0000000000000000000000000000000000000000..81e8386df1d1335f330caabeb50f214fef656f71 Binary files /dev/null and b/100.png differ diff --git a/200.png b/200.png new file mode 100644 index 0000000000000000000000000000000000000000..c654134c6654d15a82333f254bb98ae4f0cf6de9 Binary files /dev/null and b/200.png differ diff --git a/BE2_GAN_and_cGAN.ipynb b/BE2_GAN_and_cGAN.ipynb index 34930e8932a77d9763039e472794439694fa0706..6f92acfa5305b617110b9b63c41497f575bad3c6 100644 --- a/BE2_GAN_and_cGAN.ipynb +++ b/BE2_GAN_and_cGAN.ipynb @@ -3,7 +3,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "UGwKsKS4GMTN" }, "source": [ @@ -22,7 +21,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "16aVF81lJuiP" }, "source": [ @@ -36,7 +34,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "M-WNKvhOP1ED" }, "source": [ @@ -46,7 +43,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "y_r8nMTGQI9a" }, "source": [ @@ -56,7 +52,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "jiHCy4_UUBFb" }, "source": [ @@ -69,21 +64,386 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "sIL7UvYAZx6L" + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "5fd5ca3c07134c3eb715aa303219704a", + "1c18f2d791b8438fb6e8f7fbe049511b", + "e88326c0629a4dabaebd5bf0c5b955b2", + "9c46b4622fab458f9396af535b5d3fa8", + "823ae10426bf4af7b528ba70a4c58db1", + "40472578b17c406fba0d98b437319240", + "37d8380055914bc0b8c317dbaff31480", + "c7337fd6904846a388436064c9981d12", + "8ee5ea60fccb460ab8e2399131ff6650", + "a70f418062994afaa1d951028793ae39", + "09669e92fbe94221a8eca46cb25d417c", + "006ed596437e401484071852e4dcf478", + "97b0e145241b4c22b04d688d6ac61a7b", + "344eb89047d742cd8ae04352b63ed3eb", + "7e5da0e70e93447ea42becf167e8db38", + "7c9e2a97e6814f6b890c634dfb39f703", + "3d7c01e7ee9c4892a3dbe585bdd982c0", + "88c21629415f4163b52fa6ded0680c35", + "5bd30ef4aeee4753815a01dfe338ddfe", + "184daa5104814b749029e96912e79b2e", + "ac94dcd84d0c42de890ac7431a7ffb1c", + "4410b85dfd1441828241e2b9f574d96e", + "27be0e531de348a88546a28648f3735b", + "84254cce85184864b00b8bf9aee91ad9", + "56bd9277e70948eeb51ea45c8e82f449", + "df28f49a1af44a47953e77bf76f1777d", + "d7c2e4a9ac2848adb838443c4a35c51f", + "5a8f192e615f4e348289d842e67dcb28", + "8187d104f2de4563b59afc04c16035aa", + "5405bd5d3915494f85ba0ca628cc56ac", + "1e74bdbe2f654c3ba9e78aee3586a6d5", + "b10ca89bfa4e4990a9a3f14964755d03", + "ba63bfd561f44c26a24829a7e75e2bd8", + "ff6606057f814c99b556ca2ed99daaea", + "77f8f29eca634c519132112d2a588ed8", + "3947bf97fc6b4122917d04eb93d19d90", + "3da59f3d2fde43b2a65a640431a06403", + "9dc13513249d45adb67946a5666b739d", + "cf5d851292a24c60abbde8b165e9d414", + "107b3fc0d47441c88382b42b8ab1cf93", + "e5205e9e71d0442fb0933e5037b1a004", + "12e84e4422dc47c98445fe35124d3c16", + "fd098180435a4eac8e0461e866cf9430", + "b3275b615ed64036b58c4b772b05611a" + ] + }, + "id": "sIL7UvYAZx6L", + "outputId": "ec98edbd-c05f-48fb-9605-720369ba147e" }, - "outputs": [], + "outputs": [ + { + "ename": "AttributeError", + "evalue": "module 'torch' has no attribute '_six'", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\Users\\FREDJ\\Desktop\\BE\\mso3_4-be2_cgan\\BE2_GAN_and_cGAN.ipynb Cell 6\u001b[0m in \u001b[0;36m4\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#W5sZmlsZQ%3D%3D?line=46'>47</a>\u001b[0m train_dataset \u001b[39m=\u001b[39m dset\u001b[39m.\u001b[39mMNIST(root\u001b[39m=\u001b[39m\u001b[39m'\u001b[39m\u001b[39m./data\u001b[39m\u001b[39m'\u001b[39m, train\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m, download\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m, transform\u001b[39m=\u001b[39mtransforms\u001b[39m.\u001b[39mToTensor())\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#W5sZmlsZQ%3D%3D?line=47'>48</a>\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m---> <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#W5sZmlsZQ%3D%3D?line=48'>49</a>\u001b[0m train_dataset \u001b[39m=\u001b[39m dset\u001b[39m.\u001b[39;49mMNIST(root\u001b[39m=\u001b[39;49m\u001b[39m'\u001b[39;49m\u001b[39m./data\u001b[39;49m\u001b[39m'\u001b[39;49m, train\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m, download\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m, transform\u001b[39m=\u001b[39;49mtransforms\u001b[39m.\u001b[39;49mToTensor())\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#W5sZmlsZQ%3D%3D?line=50'>51</a>\u001b[0m \u001b[39m# check if test dataset exists, download if it doesn't\u001b[39;00m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#W5sZmlsZQ%3D%3D?line=51'>52</a>\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39mexists(test_dataset_path):\n", + "File \u001b[1;32mc:\\Users\\FREDJ\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\torchvision\\datasets\\mnist.py:91\u001b[0m, in \u001b[0;36mMNIST.__init__\u001b[1;34m(self, root, train, transform, target_transform, download)\u001b[0m\n\u001b[0;32m 83\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m__init__\u001b[39m(\n\u001b[0;32m 84\u001b[0m \u001b[39mself\u001b[39m,\n\u001b[0;32m 85\u001b[0m root: \u001b[39mstr\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 89\u001b[0m download: \u001b[39mbool\u001b[39m \u001b[39m=\u001b[39m \u001b[39mFalse\u001b[39;00m,\n\u001b[0;32m 90\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m---> 91\u001b[0m \u001b[39msuper\u001b[39;49m()\u001b[39m.\u001b[39;49m\u001b[39m__init__\u001b[39;49m(root, transform\u001b[39m=\u001b[39;49mtransform, target_transform\u001b[39m=\u001b[39;49mtarget_transform)\n\u001b[0;32m 92\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mtrain \u001b[39m=\u001b[39m train \u001b[39m# training set or test set\u001b[39;00m\n\u001b[0;32m 94\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_check_legacy_exist():\n", + "File \u001b[1;32mc:\\Users\\FREDJ\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\torchvision\\datasets\\vision.py:39\u001b[0m, in \u001b[0;36mVisionDataset.__init__\u001b[1;34m(self, root, transforms, transform, target_transform)\u001b[0m\n\u001b[0;32m 31\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m__init__\u001b[39m(\n\u001b[0;32m 32\u001b[0m \u001b[39mself\u001b[39m,\n\u001b[0;32m 33\u001b[0m root: \u001b[39mstr\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 36\u001b[0m target_transform: Optional[Callable] \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m,\n\u001b[0;32m 37\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m \u001b[39mNone\u001b[39;00m:\n\u001b[0;32m 38\u001b[0m _log_api_usage_once(\u001b[39mself\u001b[39m)\n\u001b[1;32m---> 39\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(root, torch\u001b[39m.\u001b[39;49m_six\u001b[39m.\u001b[39mstring_classes):\n\u001b[0;32m 40\u001b[0m root \u001b[39m=\u001b[39m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39mexpanduser(root)\n\u001b[0;32m 41\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mroot \u001b[39m=\u001b[39m root\n", + "\u001b[1;31mAttributeError\u001b[0m: module 'torch' has no attribute '_six'" + ] + } + ], "source": [ - "#TO DO: your code here to adapt the code from the tutorial to experiment on MNIST dataset" + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.parallel\n", + "import torch.backends.cudnn as cudnn\n", + "import torch.optim as optim\n", + "import torch.utils.data as dt\n", + "import torchvision.datasets as dset\n", + "import torchvision.transforms as transforms\n", + "import torchvision.utils as vutils\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import os\n", + "import matplotlib.animation as animation\n", + "import torch.nn.functional as F\n", + "from IPython.display import HTML\n", + "# We start by defining some of the variables we are going to use \n", + "# Number of workers for dataloader\n", + "workers = 2\n", + "# Batch size during training\n", + "batch_size = 128\n", + "# Spatial size of training images. All images will be resized to this\n", + "# size using a transformer.\n", + "image_size = 64\n", + "# Number of channels in the training images. For color images this is 3\n", + "nc = 3\n", + "# Size of z latent vector (i.e. size of generator input)\n", + "nz = 100\n", + "# Size of feature maps in generator\n", + "ngf = 64\n", + "# Size of feature maps in discriminator\n", + "ndf = 64\n", + "# Number of training epochs\n", + "num_epochs = 5\n", + "# Learning rate for optimizers\n", + "lr = 0.0002\n", + "# Beta1 hyperparam for Adam optimizers\n", + "beta1 = 0.5\n", + "# Number of GPUs available. Use 0 for CPU mode.\n", + "ngpu = 1\n", + "# Then we want to sucessfully load the dataset and visualise some pictures\n", + "# loading the data \n", + "# define paths for the dataset \n", + "train_dataset_path = './data/MNIST/raw/train-images-idx3-ubyte.gz'\n", + "test_dataset_path = './data/MNIST/raw/t10k-images-idx3-ubyte.gz'\n", + "# check if train dataset exists, download if it doesn't\n", + "if not os.path.exists(train_dataset_path):\n", + " train_dataset = dset.MNIST(root='./data', train=True, download=True, transform=transforms.ToTensor())\n", + "else:\n", + " train_dataset = dset.MNIST(root='./data', train=True, download=False, transform=transforms.ToTensor())\n", + "\n", + "# check if test dataset exists, download if it doesn't\n", + "if not os.path.exists(test_dataset_path):\n", + " test_dataset = dset.MNIST(root='./data', train=False, download=True, transform=transforms.ToTensor())\n", + "else:\n", + " test_dataset = dset.MNIST(root='./data', train=False, download=False, transform=transforms.ToTensor())\n", + "\n", + "# create DataLoader for train and test datasets\n", + "train_loader = dt.DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\n", + "test_loader = dt.DataLoader(test_dataset, batch_size=batch_size, shuffle=False)\n", + "\n", + "# Decide which device we want to run on\n", + "device = torch.device(\"cuda:0\" if (torch.cuda.is_available() and ngpu > 0) else \"cpu\")\n", + "\n", + "# Plot some training images\n", + "real_batch = next(iter(train_loader))\n", + "# get an image shape from the dataset\n", + "print(real_batch[0][0].shape)\n", + "plt.figure(figsize=(8,8))\n", + "plt.axis(\"off\")\n", + "plt.title(\"Training Images\")\n", + "plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=2, normalize=True).cpu(),(1,2,0)))\n", + "# uncomment the next line to see \n", + "#plt.show()\n", + "\n", + "# custom weights initialization called on generator and descriminator\n", + "def weights_init(m):\n", + " classname = m.__class__.__name__\n", + " if classname.find('Conv') != -1:\n", + " nn.init.normal_(m.weight.data, 0.0, 0.02)\n", + " elif classname.find('BatchNorm') != -1:\n", + " nn.init.normal_(m.weight.data, 1.0, 0.02)\n", + " nn.init.constant_(m.bias.data, 0)\n", + "\n", + "# now , we implement our generator \n", + "# Generator Code\n", + "\n", + "class Generator(nn.Module):\n", + " def __init__(self, ngpu):\n", + " super(Generator, self).__init__()\n", + " self.ngpu = ngpu\n", + " self.main = nn.Sequential(\n", + " # input is Z, going into a convolution\n", + " nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False),\n", + " nn.BatchNorm2d(ngf * 8),\n", + " nn.ReLU(True),\n", + " # state size. (ngf*8) x 4 x 4\n", + " nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ngf * 4),\n", + " nn.ReLU(True),\n", + " # state size. (ngf*4) x 8 x 8\n", + " nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ngf * 2),\n", + " nn.ReLU(True),\n", + " # state size. (ngf*2) x 16 x 16\n", + " nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ngf),\n", + " nn.ReLU(True),\n", + " # state size. (ngf) x 32 x 32\n", + " nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),\n", + " nn.Tanh()\n", + " # state size. (nc) x 64 x 64\n", + " )\n", + "\n", + " def forward(self, input):\n", + " return self.main(input)\n", + " \n", + "# implement the discriminator\n", + "class Discriminator(nn.Module):\n", + " def __init__(self, ngpu):\n", + " super(Discriminator, self).__init__()\n", + " self.ngpu = ngpu\n", + " self.main = nn.Sequential(\n", + " # input is (nc) x 64 x 64\n", + " nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),\n", + " nn.LeakyReLU(0.2, inplace=True),\n", + " # state size. (ndf) x 32 x 32\n", + " nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ndf * 2),\n", + " nn.LeakyReLU(0.2, inplace=True),\n", + " # state size. (ndf*2) x 16 x 16\n", + " nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ndf * 4),\n", + " nn.LeakyReLU(0.2, inplace=True),\n", + " # state size. (ndf*4) x 8 x 8\n", + " nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),\n", + " nn.BatchNorm2d(ndf * 8),\n", + " nn.LeakyReLU(0.2, inplace=True),\n", + " # state size. (ndf*8) x 4 x 4\n", + " nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),\n", + " nn.Sigmoid()\n", + " )\n", + "\n", + " def forward(self, input):\n", + " return self.main(input)\n", + " \n", + "# instantiate the generator and the descriminator and apply the weight initialization\n", + "\n", + "# Create the generator\n", + "netG = Generator(ngpu).to(device)\n", + "# Apply the weights_init function to randomly initialize all weights\n", + "# to mean=0, stdev=0.02.\n", + "netG.apply(weights_init)\n", + "\n", + "# Create the Discriminator\n", + "netD = Discriminator(ngpu).to(device)\n", + "# Apply the weights_init function to randomly initialize all weights\n", + "# to mean=0, stdev=0.2.\n", + "netD.apply(weights_init)\n", + "\n", + "# handeling loss dunction and optimizers\n", + "# Initialize BCELoss function\n", + "criterion = nn.BCELoss()\n", + "\n", + "# Create batch of latent vectors that we will use to visualize\n", + "# the progression of the generator\n", + "fixed_noise = torch.randn(28, nz, 1, 1, device=device)\n", + "\n", + "# Establish convention for real and fake labels during training\n", + "real_label = 1.\n", + "fake_label = 0.\n", + "\n", + "# Setup Adam optimizers for both G and D\n", + "optimizerD = optim.Adam(netD.parameters(), lr=lr, betas=(beta1, 0.999))\n", + "optimizerG = optim.Adam(netG.parameters(), lr=lr, betas=(beta1, 0.999))\n", + "\n", + "#training loop \n", + "# Training Loop\n", + "\n", + "# Lists to keep track of progress\n", + "img_list = []\n", + "G_losses = []\n", + "D_losses = []\n", + "iters = 0\n", + "\n", + "print(\"Starting Training Loop...\")\n", + "# For each epoch\n", + "for epoch in range(num_epochs):\n", + " # For each batch in the dataloader\n", + " for i, data in enumerate(train_loader, 0):\n", + "\n", + " ############################\n", + " # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))\n", + " ###########################\n", + " ## Train with all-real batch\n", + " netD.zero_grad()\n", + " # Format batch\n", + " #resize the image\n", + " # print(data[0].shape)\n", + " data[0] = F.interpolate(data[0], size=64, mode='bilinear', align_corners=False)\n", + "\n", + " # Convert the upsampled image to a tensor with 3 channels (i.e., RGB)\n", + " data[0] = data[0].repeat(1, 3, 1, 1)\n", + " # print(data[0].shape)\n", + " real_cpu = data[0].to(device)\n", + " b_size = real_cpu.size(0)\n", + " label = torch.full((b_size,), real_label, dtype=torch.float, device=device)\n", + " # Forward pass real batch through D\n", + " output = netD(real_cpu).view(-1)\n", + " # Calculate loss on all-real batch\n", + " errD_real = criterion(output, label)\n", + " # Calculate gradients for D in backward pass\n", + " errD_real.backward()\n", + " D_x = output.mean().item()\n", + "\n", + " ## Train with all-fake batch\n", + " # Generate batch of latent vectors\n", + " noise = torch.randn(b_size, nz, 1, 1, device=device)\n", + " # Generate fake image batch with G\n", + " fake = netG(noise)\n", + " label.fill_(fake_label)\n", + " # Classify all fake batch with D\n", + " output = netD(fake.detach()).view(-1)\n", + " # Calculate D's loss on the all-fake batch\n", + " errD_fake = criterion(output, label)\n", + " # Calculate the gradients for this batch, accumulated (summed) with previous gradients\n", + " errD_fake.backward()\n", + " D_G_z1 = output.mean().item()\n", + " # Compute error of D as sum over the fake and the real batches\n", + " errD = errD_real + errD_fake\n", + " # Update D\n", + " optimizerD.step()\n", + "\n", + " ############################\n", + " # (2) Update G network: maximize log(D(G(z)))\n", + " ###########################\n", + " netG.zero_grad()\n", + " label.fill_(real_label) # fake labels are real for generator cost\n", + " # Since we just updated D, perform another forward pass of all-fake batch through D\n", + " output = netD(fake).view(-1)\n", + " # Calculate G's loss based on this output\n", + " errG = criterion(output, label)\n", + " # Calculate gradients for G\n", + " errG.backward()\n", + " D_G_z2 = output.mean().item()\n", + " # Update G\n", + " optimizerG.step()\n", + "\n", + " # Output training stats\n", + " if i % 50 == 0:\n", + " print('[%d/%d][%d/%d]\\tLoss_D: %.4f\\tLoss_G: %.4f\\tD(x): %.4f\\tD(G(z)): %.4f / %.4f'\n", + " % (epoch, num_epochs, i, len(train_loader),\n", + " errD.item(), errG.item(), D_x, D_G_z1, D_G_z2))\n", + "\n", + " # Save Losses for plotting later\n", + " G_losses.append(errG.item())\n", + " D_losses.append(errD.item())\n", + "\n", + " # Check how the generator is doing by saving G's output on fixed_noise\n", + " if (iters % 500 == 0) or ((epoch == num_epochs-1) and (i == len(train_loader)-1)):\n", + " with torch.no_grad():\n", + " fake = netG(fixed_noise).detach().cpu()\n", + " img_list.append(vutils.make_grid(fake, padding=2, normalize=True))\n", + "\n", + " iters += 1\n", + "\n", + "# Saving the trained generator and descriminator\n", + "torch.save(netG.state_dict(), 'netG.pth')\n", + "torch.save(netD.state_dict(), 'netD.pth')\n", + "\n", + "# Visualize loss result \n", + "plt.figure(figsize=(10,5))\n", + "plt.title(\"Generator and Discriminator Loss During Training\")\n", + "plt.plot(G_losses,label=\"G\")\n", + "plt.plot(D_losses,label=\"D\")\n", + "plt.xlabel(\"iterations\")\n", + "plt.ylabel(\"Loss\")\n", + "plt.legend()\n", + "plt.show()\n", + "plt.savefig('Loss-DC-GAN.png')\n", + "# Visualize Genrator progression\n", + "fig = plt.figure(figsize=(8,8))\n", + "plt.axis(\"off\")\n", + "ims = [[plt.imshow(np.transpose(i,(1,2,0)), animated=True)] for i in img_list]\n", + "ani = animation.ArtistAnimation(fig, ims, interval=1000, repeat_delay=1000, blit=True)\n", + "\n", + "HTML(ani.to_jshtml())\n", + "#comparisation \n", + "# Grab a batch of real images from the dataloader\n", + "real_batch = next(iter(test_loader))\n", + "\n", + "# Plot the real images\n", + "plt.figure(figsize=(15,15))\n", + "plt.subplot(1,2,1)\n", + "plt.axis(\"off\")\n", + "plt.title(\"Real Images\")\n", + "plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=5, normalize=True).cpu(),(1,2,0)))\n", + "\n", + "# Plot the fake images from the last epoch\n", + "plt.subplot(1,2,2)\n", + "plt.axis(\"off\")\n", + "plt.title(\"Fake Images\")\n", + "plt.imshow(np.transpose(img_list[-1],(1,2,0)))\n", + "plt.show()\n", + "plt.savefig(\"comparison-DC-GAN.png\")" ] }, { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "5fbSgsrE1GqC" }, "source": [ @@ -93,7 +453,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "7SjXNoT7BUey" }, "source": [ @@ -111,7 +470,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "0JRaeHfzl6cO" }, "source": [ @@ -134,7 +492,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "xFqMOsoYwzFe" }, "source": [ @@ -147,7 +504,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "yzy7y4hmbbX3" }, "source": [ @@ -157,7 +513,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "Q_jf9H_NDESm" }, "source": [ @@ -168,8 +523,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "uOKvYDyu0w8N" }, "outputs": [], @@ -204,8 +557,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "Zk5a6B5hILN2" }, "outputs": [], @@ -279,7 +630,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "1rZ5Qz1mBUe8" }, "source": [ @@ -290,13 +640,11 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "4Tbp_535EVPW" }, "outputs": [], "source": [ - " class U_Net(nn.Module):\n", + "class U_Net(nn.Module):\n", " ''' \n", " Ck denotes a Convolution-BatchNorm-ReLU layer with k filters.\n", " CDk denotes a Convolution-BatchNorm-Dropout-ReLU layer with a dropout rate of 50%\n", @@ -321,7 +669,20 @@ " # CD512 - CD1024 - CD1024 - C1024 - C1024 - C512 - C256 - C128\n", " # The last layer has already been defined\n", " \n", - " \n", + " self.down1 = down(64,128)\n", + " self.down2 = down(128,256)\n", + " self.down3 = down(256,512)\n", + " self.down4 = down(512,512)\n", + " self.down5 = down(512,512)\n", + " self.down6 = down(512,512)\n", + " self.down7 = down(512,512)\n", + " self.up7=up(512,512)\n", + " self.up6=up(1024,512)\n", + " self.up5=up(1024,512)\n", + " self.up4=up(1024,512)\n", + " self.up3=up(1024,256)\n", + " self.up2=up(512,128)\n", + " self.up1=up(256,64)\n", " self.outc = outconv(128, n_classes) # 128 filters\n", "\n", " def forward(self, x):\n", @@ -349,11 +710,135 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "1hmcejTWJSYY" + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "1hmcejTWJSYY", + "outputId": "b4ae6c2e-e2c1-4fa5-a9dc-1f5476ee86a3" }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "U_Net(\n", + " (inc): inconv(\n", + " (conv): Sequential(\n", + " (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down1): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down2): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down3): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down4): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down5): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down6): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (down7): down(\n", + " (conv): Sequential(\n", + " (0): Conv2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (up7): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up6): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(1024, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up5): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(1024, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up4): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(1024, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up3): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(1024, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up2): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(512, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (up1): up(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(256, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (outc): outconv(\n", + " (conv): Sequential(\n", + " (0): ConvTranspose2d(128, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): Tanh()\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# We take images that have 3 channels (RGB) as input and output an image that also have 3 channels (RGB)\n", "generator=U_Net(3,3)\n", @@ -364,7 +849,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "xIXFtHzcBUfO" }, "source": [ @@ -374,21 +858,42 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "RqD1katYBUfP" }, "source": [ "<font color='red'>**Question 1**</font> \n", "Knowing the input and output images will be 256x256, what will be the dimension of the encoded vector x8 ?\n", "\n", + "<font color='red'>**Réponse 1**</font> \n", + "En supposant une taille de noyau de 4, un padding de 1 et un stride de 2 pour chaque couche de convolution, les dimensions de sortie pour chaque couche peuvent être calculées à l'aide de la formule :\n", + "\n", + "n_out = (n_in - k + 2p + s) / s\n", + "\n", + "où n_in est la taille d'entrée, k est la taille de noyau, p est la taille de padding, s est la taille de stride, et n_out est la taille de sortie.\n", + "\n", + "En utilisant cette formule pour chaque couche, nous pouvons calculer les dimensions de sortie pour chaque canal comme suit :\n", + "\n", + "Pour la première couche de convolution : n_out = (256 - 4 + 2*1 + 2) / 2 = 128\n", + "Pour la deuxième couche de convolution : n_out = (128 - 4 + 2*1 + 2) / 2 = 64\n", + "Pour la troisième couche de convolution : n_out = (64 - 4 + 2*1 + 2) / 2 = 32\n", + "Pour la quatrième couche de convolution : n_out = (32 - 4 + 2*1 + 2) / 2 = 16\n", + "Pour la cinquième couche de convolution : n_out = (16 - 4 + 2*1 + 2) / 2 = 8\n", + "Pour la sixième couche de convolution : n_out = (8 - 4 + 2*1 + 2) / 2 = 4\n", + "Pour la septième couche de convolution : n_out = (4 - 4 + 2*1 + 2) / 2 = 2\n", + "Pour la huitième couche de convolution : n_out = (2 - 4 + 2*1 + 2) / 2 = 1\n", + "Par conséquent, la sortie de la dernière couche de convolution, qui est le vecteur encodé, aura des dimensions de 1x1x512, où 512 est le nombre de filtres dans la dernière couche.\n", + "\n", "<font color='red'>**Question 2**</font> \n", - "As you can see, U-net has an encoder-decoder architecture with skip connections. Explain why it works better than a traditional encoder-decoder." + "As you can see, U-net has an encoder-decoder architecture with skip connections. Explain why it works better than a traditional encoder-decoder.\n", + "\n", + "<font color='red'>**Answer 2**</font> \n", + "\n", + "Les connexions de saut permettent d'obtenir de meilleurs résultats que les architectures d'encodeur-décodeur traditionnelles car elles aident à préserver les informations de haute résolution et à éviter la perte d'informations." ] }, { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "cchTp3thBUfR" }, "source": [ @@ -416,7 +921,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "ge6I7M0aBUfT" }, "source": [ @@ -427,8 +931,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "RYqomFO8BUfV" }, "outputs": [], @@ -469,7 +971,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "5m4Dnup4BUfc" }, "source": [ @@ -487,23 +988,23 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "AH6u5a-PBUfg" }, "source": [ "<font color='red'>**Question 3**</font> \n", - "Knowing input images will be 256x256 with 3 channels each, how many parameters are there to learn ?" + "Knowing input images will be 256x256 with 3 channels each, how many parameters are there to learn ?\n", + "\n", + "Nous avons des images d'entrée de taille 256x256 avec 3 canaux de couleur chacune. Le nombre de paramètres dans le générateur peut être calculé en multipliant le nombre de canaux d'entrée par le nombre de canaux de sortie par la taille du noyau et en ajoutant les biais. Nous obtenons un total de 54 420 483. Pour le discriminateur, nous pouvons calculer le nombre de paramètres de manière similaire, en considérant que le discriminateur est composé d'une série de couches de convolution suivies de couches entièrement connectées. Nous obtenons le nombre de paramètres suivant : 2 769 729.\n", + "\n", + "Par conséquent, le nombre total de paramètres à apprendre dans ce modèle cGAN est de 57 190 212." ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "g_9LxNhGBUfi" }, "outputs": [], @@ -513,10 +1014,11 @@ " super(PatchGAN, self).__init__()\n", " # TODO :\n", " # create the 4 first layers named conv1 to conv4\n", - " self.conv1 =\n", - " self.conv2 =\n", - " self.conv3 =\n", - " self.conv4 =\n", + " # create the 4 first layers named conv1 to conv4\n", + " self.conv1 =conv_block(n_channels,64)\n", + " self.conv2 =conv_block(64,128)\n", + " self.conv3 =conv_block(128,256)\n", + " self.conv4 =conv_block(256,512,stride=1)\n", " # output layer\n", " self.out = out_block(512, n_classes)\n", " \n", @@ -534,11 +1036,59 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "W_sevZRnBUfn" + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "W_sevZRnBUfn", + "outputId": "b80e9dd6-d9d0-4848-b5f6-9af15d2abaa2" }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "PatchGAN(\n", + " (conv1): conv_block(\n", + " (conv): Sequential(\n", + " (0): Conv2d(6, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (conv2): conv_block(\n", + " (conv): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (conv3): conv_block(\n", + " (conv): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (conv4): conv_block(\n", + " (conv): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): LeakyReLU(negative_slope=0.2, inplace=True)\n", + " )\n", + " )\n", + " (out): out_block(\n", + " (conv): Sequential(\n", + " (0): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))\n", + " (1): Sigmoid()\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# We have 6 input channels as we concatenate 2 images (with 3 channels each)\n", "discriminator = PatchGAN(6,1)\n", @@ -548,7 +1098,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "v_QubOycBUfv" }, "source": [ @@ -558,7 +1107,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "DiI2CByRBUfz" }, "source": [ @@ -576,8 +1124,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "k4G_xewPBUf4" }, "outputs": [], @@ -593,7 +1139,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "c12q2NwkBUf7" }, "source": [ @@ -604,8 +1149,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "vGKjO0UMBUf9" }, "outputs": [], @@ -629,7 +1172,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "PhPkU7BDYooV" }, "source": [ @@ -640,11 +1182,22 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "8wyPjAxPYsNF" + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "8wyPjAxPYsNF", + "outputId": "42f99861-6dd8-4faf-eec0-65d36b8c28fb" }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "CMP_facade_DB_base.zip: 34.8MB [00:00, 75.9MB/s] \n", + "CMP_facade_DB_extended.zip: 19.4MB [00:00, 61.4MB/s] \n" + ] + } + ], "source": [ "import urllib.request\n", "from tqdm import tqdm\n", @@ -705,7 +1258,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "6DHT9c0_BUgA" }, "source": [ @@ -716,11 +1268,22 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "rxi_QIpgBUgB" + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "rxi_QIpgBUgB", + "outputId": "6b5d39be-dd7c-44b4-f610-8f76ceb784ce" }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.9/dist-packages/torchvision/transforms/transforms.py:329: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n", + " warnings.warn(\n" + ] + } + ], "source": [ "class ImageDataset(Dataset):\n", " def __init__(self, root, transforms_=None, mode='train'):\n", @@ -769,7 +1332,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "Okb3LU76BUgG" }, "source": [ @@ -780,8 +1342,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "xuxq4TZRBUgJ" }, "outputs": [], @@ -816,11 +1376,27 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "m2NxLrQEBUgM" + "colab": { + "base_uri": "https://localhost:8080/", + "height": 216 + }, + "id": "m2NxLrQEBUgM", + "outputId": "9e30fb65-2794-48a0-dcf3-042ab1ba9258" }, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADHCAYAAADifRM/AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAsTAAALEwEAmpwYAACmJUlEQVR4nOz9ebwtyXbXB35XRA57OuOda3z16tXTG/UEGtDkRgy2AYMlY4GN3S1Bg+UBu92f5mMb7E83GH9wq23abtx2g9UYJNnNjAWyG2MwjSzLRiAhw9Mbq+q9muvO955pj5kRq/+IiMzc+wz3VtWtqlOv9rp16uyTmREZEZn7Fyt+a8VaoqqsZS1rWctavrHEvN8NWMta1rKWtTx6WYP7WtaylrV8A8oa3NeylrWs5RtQ1uC+lrWsZS3fgLIG97WsZS1r+QaUNbivZS1rWcs3oKzBfS1rWcsHRkTkZ0Tk97zf7fggyBrc32MRkZdF5Ne/3+1Yy1reLYnv+EJELq4c/19FREXkI+9T0z5Usgb3taxlLe+GvAT8jvSHiHwWGLx/zfnwyRrc3ycRkd8pIv+ziPzHIrInIl8Xke+Ox18TkVsi8sOd6/+JqPkcxPN/aKW+HxKRV0Tkroj8n7srBBExIvL7ReRr8fxfEJHd97jLa/lwyX8J/FDn7x8GfjL9cdb7LCI9Efmv4ru6JyK/ICJXVm8gItdE5PMi8m+8mx35oMoa3N9f+VXA54ELwJ8B/hzw7cDHgP8t8J+KyCheOyZ8WbaBfwL4l0XkBwBE5FPA/wv454FrwBbweOc+/xrwA8CvBh4D7gP/2bvWq7WsBX4e2BSRT4qIBf5Z4L/qnD/1fSZMBFvAk4Tvxr8ETLuVi8gzwP8I/Keq+h++e9344Moa3N9feUlV/7SqOuDPE17mP6yqc1X9G8CCAPSo6s+o6i+rqlfVzwN/lgDWAD8I/Deq+nOqugD+L0A3aNC/BPw7qvq6qs6BPwT8oIhk70Un1/KhlaS9/6PAl4E30okHvM8VAdQ/pqpOVf++qh506v0U8LeBP6iqP/ZedOSDKOsv9/srNzufpwCqunpsBCAivwr4UeAzQAGUwF+M1z0GvJYKqepERO526nka+CkR8Z1jDrhC5wu3lrU8YvkvgZ8FnqFDycAD3+f/kqDo/DkR2SZo/P+Oqlbx/D8PvAj8pXe5/R9oWWvuHxz5M8BPA0+q6hbwJwCJ564DT6QLRaRP0HySvAb8RlXd7vz0VHUN7Gt510RVXyEYVn8T8F+vnD71fVbVSlX/XVX9FPDdwG9mmb//Q8Ad4M9EymctJ8ga3D84sgHcU9WZiHwH8M91zv0l4LdEg2xBePmlc/5PAH9ERJ4GEJFLIvL971G71/Lhlt8N/FpVHa8cP/V9FpFfIyKfjcB9QKBpuqvOCvhtwBD4SRFZ49gJsh6UD478K8AfFpFDAqf+F9IJVf0iwWj65wha/BFwC5jHS/4YQUv6G7H8zxOMuWtZy7sqqvo1Vf3FE06d+j4DVwkKywGBq/8fCVRNt94F8FsJ1OKfWgP8cZF1so5vPIkeNnvAc6r60vvcnLWsZS3vg6xnu28QEZHfIiIDERkCfxT4ZeDl97dVa1nLWt4vedfAXUR+g4h8VUReFJHf/27dZy2NfD/wZvx5Dvhndb0se+Syfq/X8kGRd4WWiYaQ5wn+ra8DvwD8DlX90iO/2VrW8h7J+r1eywdJ3i3N/TuAF1X169Hw8ecImuVa1vJBlvV7vZYPjLxb4P44nU01BC3n8VOuXctaPiiyfq/X8oGR922Hqoj8CPAj8c9vNWZt2323pEu9icjS5/QPkfC3EUQMxphwrQjJZb5btvlbtY1zIAIai8SDEo8vlyTec+XYsfqbGjp1SvxPmhtIe3HsU6d/1nB4uM/h4cFqE941WX63y2+FpxhYy3MbL2KW3LXXspa3Lh7DC4fPMXEOuI7qye/2uwXubxC2Dyd5gpVt7jEmxI8BWGu13++/S005p7Jq6pATjqVLV0FwxU6idEFUMNaQ2YyiLBj0+wyHQzY3N9nc2GRntImI8OLLr/Ht3/wt7Aw2EckwxoRKInAmt2HxPkwO1iLGgBGMsWSEiQBjEMDEicGItBODFUyWkVuLsTbcA8HaWLcxoe0iYNr7hjklHhPBe4+xNk42oW6b0F4ktEtAI6wrioigKvjM8O/9wUdm93zgew3L77bIswr/CZ/e2uRnvu/XM7DT1cvXspa3JGPX41f/7Z/k798/IuwRO1neLXD/BeC5GLntDUJEuH/u7CIfVlFUhIBkimir2SWN28frBIMYwVpLYXOKoqDs9xgOh2xsbvLkE09w8eIltrY22djYYDgY0Ct7ZHkewdeAKnfv7DH3BR//5k+yMRxGhTvcq6stgyCqeO/xAl4DaBpjUK+IJq3eo/EfRlCvoV/a9sPhqLRGY/9UQZ2P1yi+WQEo6mNdCKqKVw+axkNx3iMo3js0tsF739zTN/dQLl++jDGPTGlfv9dr+cDIuwLuqlqLyL8K/PeABf5U3EX5oZMuJRJwrOUrgqYatF5rJWjbmSEvCsqiJM9zRIThYMBwNGQ4GDIYDBgMBvTKkjwvsHlGnudkxmCNBRFUPEcHe4z37zdt0AiOzi24cesAj+PFF75MvygQkyiWqI23MBtAViUp9aH+pk/S9kwDQKMazsf+KaAqdImZxL6o94jGlYcJtFA7RhHYvccYQRPdI+CdYm2Bcy6uMAIdFK41DbWUFRnf/o9/K3/SPprwI+/kvV6aM9eylncgcpzRPFHeNc5dVf8a8NferfofhZzlBNrA1sO6isZvr9DyvmIMmbXkRUG/KBkOhtjMklnLaDhi1B8w6PfplyX9sqAsS8oiJ89yjA28txGDNSYCXABpIWnR4TcIVgWtffPkvdeoHrdN9CjqcwotyXolG7YgU4t4CfXjEQyGAJiJUw90i2l0dB/bgLGRO9fw40HQcCyCtRGLqkHEoKoNZRPuR8uVxz6mwUyArijWRvqmoV3AmkDTSFr1pOcpoNZgjGW4PeKxp5868zm/VXm77/XeYpO/fuM3UMjiEbZmLR9GmWvJfrUJHJ553TdsyN+HAuWkKa5QCNLRTAMYgRFDURb0en2MMfTLktFwxMbGBnmW0ev12NzcYmtrm62tTawx9HslG8MR/X6fXlGS2yxgb0IhIy3XHu+pUftN7WuNhYGuQARDBF0RvLRlocVykTQZaEO1qCrOg3/+Rcbzim/6xHNkNseItupA5LOF0G9NmnpsSyBGkuE0gWtLhSCBd0+NUZGGzkmaudCW6/azXTPEFUR6JuIjj5609MSpt30FAnUkipAhGIphiT0XIeuVF44+yu/4O3/2/W7IWr5BpPKG4Kx1upyHN/9tySp4nwTmiR9Ov7Msw0bDXlmWZFlGnvUYjYbx75yd7W22NoLRcWtri53tHbI8oygKRsMhZa+HMYYizymynCzWZyQYMk30MNEEQh0ASrRFA06p7V0Qbdjmdu2VYDUq6UFRDpVGzR1sKIoKeNMZj0jHSOTC63lNVg7IZcbG1hZZVoRJJl4bQDjctR1H8K3VdglUE0Z7rxH4QQwYjZNLw+ekCmLvomdNmlxRE0E+UTosjU8aic5oQDSceq9pXsIbQdQimEBrnf4KvaeiCAtfvt/NWMs3jLgHXnFuwP04jxv/6mh4xrQuelmW0euVGGPp9XpsjDYC/dHrsbOzQ1EUDPoDtre3yYucXq/PxmhElufkWcZgMCDPc/KspCyKxpvDWovpuNwFQ12njUkbpqOBNhq4dppvmt6kPthoPDQJKSXU6eOFmigNVg2byyKN5hxoEiC0OYK7i21JpZPm7L3He8FkOeJrbFlisyIadGk17s7EEuoRTPxNWi3EtmocF5sAP2r5StDmG2/K7vNs6pZoEAUletjgCCbklqVPo91ODLQrHRQTc5AYTDQax6cjZs1zr+VDK+cC3EWE0XDIaGOLrc1tyl5JURRsbW2xvb1Nv9+nLEq2d7bZ3NzEWstgMGA03MBmGXmWN8ZHQUk+85L2aEVcSBOIRm8LYwweDYDQkcRnB+qkPSeAaHQNjLzwqm82S1e3wJ4AVBTENBxQvMq33HPU4EWUMEEYGv02gnRXrCQqqaNpN+QGy7BoBOMUawwqgsksYrvc9ckrIJVk0JTGnqBxAuz6tDf9jpOY6PL4NNRMQxcZwo7+OK2JNF47KM1n0c7zER8nGRvnUteZPSyiicsn0jiPknFfy1o+OHIuwP3ypUv8a//K72X34hW2tnfJsqzVopPPNMdQpAMcXba2BY7muo6W3L08eXUsAZq0GmzimhOoSqpNIv0RtVJzDD9aoO6Cm5KUyu6xSEl0gHv59zI4pnZJB1UTxeMbbjxNJCvjFdverCZS++T4Pbr97/YlgXNq+/HtSe34nrjp6URJE0a78tHGDtIuSaRzrnnyCcATond+dRcga1nLh03OBbgPBgO+6Zs+jicjL/odjXhF+41Y02CW+rD0145mnLhc9QHE4uSgdMHFNMR14sHTbk1VDUY5TyCPpblZpBs6hs90puOn13DIkbQQ2smi9fbolk2UdPBCwUgDcqKKBP8UTMwmFjCs5euJPL33qW0+uB7GVUxaZXTH0LcVtQZl3xkPpKk3iYmbh84yVHvvQRSnLhkSCBp8KOOtxdqchP6azkcrs68rFHDq8LjGfVNU4uYpQx69ZNpmmJbOX3lf2v6tZS2PQJZ1rWV5mI3HaSF+kjge+SLzXIC7GCHvFVTVMj+rDcAkcFr2aqldzb3DfWbjQ5wGH2cisOU25/LFS+Qmb8esUXaD//aiqri7fzfy6sFDJABkzaAYsL11AUtnx2SsRFUbPKnqisPJIYv5HM8y7WDEcOXCRbIsDxREaHlqRHNt7R37BwfMZhMcaZ4IV1tjuLi9S57ZVnPteosI1LVjf/+Ao+kRKg3SRbwTLu9eolf0SD1PO0nTRSJK5Sru3LrJdDHDR21YFbyvyfKcJ68+SVkcNwi2bVFcXXPr9k32xnsBoKPvu2qYHD7x0U/Qs0Vn4unW5FHvePONN3jt+uvUVKg61Hu894gxfPZzv4Kd4W66c5yMQcTS+OQDx7b0flDlCrB1wvEpYQvVWYBSEPbSnuTifwDceIj7C7BLSF29KgrcBuqHqOMikJ9wzgF3H6KOx4hp4lekBl59yDY8CfROODcnOJ08yD5pCYG0T2qHAi8QxvUseRr4yCnnfgnYf0D5tyjnAtxVCbsOj+W6PXsqE4SXXnqBV7721dblzwdtbTjY4tf/un+cPD/prQpLfFfXfOnzv8R0NkF9WOKnCeXKxat8+7d/D9h2qk1a6zJ1AS997evcuPkKGjZJNoBUFCW/9tf+Bmx0gTze70QlCS99/UXevP5KBGSDRm6/V/b5nu/5Por8pDezlTdfe5WXX/06XlwD3N57irxg93u/Dyl7zcpnlR4JXVZeeekF7h3cRbHh/l5xXtna2uKJK48vtbnL/6fNRqrKzTev8/qN18A7jAQvGuc9m5ubyLOfaMY/3rn9Mx4aHxxw78YN1IQNSs57aucoB31s19jcUPNJ5en45i/d4wMqhhDc4JkTzt0lgPNZ4F4CnyaA/Kq8AtzkwZqiBT5BmGRWxQE/w4MByQDfzOmT1M9xtru2IYzBtVPKX+fB4G6AjwM7HO/zPmEsHgTuJrbh4gnnPGGCeBC4D08pr5w8+b1DORfgDiwZPE88vUqDhCLUVc1kPMHVNZkYgqlUUWcC4ESKoaVmlut1ixpfOfAafbI9qEfrmu6bkLRxWAZHAaR2uKpGvGJVqHBB26zy9lrVqFUv96mheZwH7wGLQZnXFahiigHW2MjNx3sKHbooAq13qHMY8VhrqJziXI3NSzKxtMuWzo8K2tmq71WoHOTG42qH82ECHJYDTJz4kndOZL9IG53SWHgPlXdkWc689jidgzqGGxtLdpBEL3WfffDDN8y9YzgYcnA0ZVEvKLKSna0LWCkQyejOBmlHbQB8QxfUk1fTB1reSQdOoxHeSp3Cydr/W6EQzCl1vJVYgaf1462Oz+r1b6X8aX1+2LFoiYj3RM5FKEaR9CVtpashrv4016BkeY9FJSxqKPubqCmZ14a61i7+LU0IaXBtljPF8vrdfe4cTBltX+LGvUNev3uAs8WxmWDVQJqqWjjP3uGco6ljc+cK98cVt8cLbH/UrAZ81MSP9UODYfb+dMrN/SnjmaO/cYFbhzNuHkzIRxvRm0dPmODavw/nC25PpiwkR/Iht/cm7B0tGGzuop0VSZC4xKB1ZUSEw0nN/tGc0dYFDqY1d/YneCnY2Npp+m6SYTVu8yeuBEwMAnY0nbN/tODizmUmhzW3bx9i8x4bG9vHJu/WwN1aPifjOYdjx87uVQ6PFty7PyaPG8REIu1Gu3fBiA20jB7/pp8UeXIta/mwyLnQ3AO361E1x9z9TvOwSD7nZdmn19+idhWjrQswneHHE/KyPAbOq3UbMfR72xi5j8lKRjtXMeVtitozKjbjNvyTaIwOJQFUYphUln7Zo79xkUV9Hecdvd5GU37VE2aJ3vBQSImxQ6Q3oLe5S5bfxy1m9Hujxki8OhTdOo30EO1TlNuUwwFkk0DrDDaWeYzG8JiOtcZek/XJyg2GW5fh1iH1YkJWDsl7vWZaONEDJlmFRbBFn+HGRfrbF+ltVyxMj/5wSNEfLHvqdOsSSHsGti9d4mnbY3Rxm8eegrv3btMf9CiLfqP5L0/wwqk6SsfIupa1fNjknIC7RqNm58t7DNQ7Ozfj+kaBPCvpj7bw6uiNtnHZDGdyyjwLGq/XRMeu1BZqGOzsMtyfUdqMcjikv3mR2jnMoIdHsSyDyTGfcBHK3oh8uEPZKylHfYrRNt578mEfpN3sk8p0XTuTb7cp+7hyg3I0hDyDoofJM7J+x5rVuDm2RsxwHKQsMYNNNi9eQoByuIMxStHrRw4+etPEmDPO+yWXQgTKwZDdosdgtMlgY4e6GNIf9bB5Hjx0OuDcNRynR6LAcHOLq0WPYjBgc2cXm+UMBmF3cHqKqY6lXbSE9UQ57LNjMkxesLG7g8Ez6DkkC/7wiAdp61pe5sbVSHeB8I1iXF3LWt6inAtwNzGMbR0NI11u/DgPrw3HaxQGec61S8FK0e/3Gfb6bPQG5DZd3QJIQoHkYgfKTr+kurCDNULfGJ7Y2aauK4oscvcdb5BVV8Ck824OBjy1s01R5BQGHt/dxHuln2XRfzt1SJv7Q3CR1MjFX9gc8cxuxeYwpwc8vTliYRSzSkUlN8nuiKhyaWODj125yu5wyLyquLa7RaZKYWzjJppa7ZyLKyVPeAVC/Vd2tpg6TyHCk7u7LOYVZabBj7+lx2HlmaSxMShPXNllXDuMwjOP76LzEd4tQiAzlu0M3eebvF22Rj2woOrpXx7itiyL+RTvW18xSXaV5n1YeT+WXFLXspYPp5wLcE+0DNpu2V/l19NxOgDrxTPYHHA1u4YAWZbhFaq6RmqHtfaYEWMZKGF7u89oOEStUmQFz/aeoHIVpQ1REruWoGMTjQQ9emd7g8wKYoDM8PRTV1hUDueqAGoR4VsHytj+VK/3XLpygcHWCPE1xho2tgbMa4/1rgHEEzcMAeqV7YtbyLBPJmED19bFLdxsgeCO4Z/QCe3b6dPWzgZ2MaPMCy5dvUDtKqr5NOwjOMGg3D0WJilh4T3j+ZxeZlhUc6q6Aq0pKJtJ1Xdi1qfhSDHbJ5Mp+0djBv0B4/GU8fQQS8325mZjH1DRZjIIN4elJQjt4TW8r+XDKucE3D1VVSGSR21SoPGsaGO5rHIrQSM2vHprD9Gaqxd22Z/OGE8mjIqSp1KYkabkcXA+nNbcPZiSGbi0s82bd+7iKsdju1uNQbYLYC0kJ4BzHByNuX7vPhZl2Cu4u3fAbF4xKmxLx3Q+tR0IdRo8t/f2uX7/kDIz7IyGvPbmG8zrimevtT5gqn5pM1ISYwz3Dg556eZtdkd9yrzk+p37OFfz5JVtuiDuUSSG0FU6wcoUbty+x/50wpXLl7hz/4jJbMIgE4ZXeivjJ0sxd9LYKDCe1xxMKwYXthjvzdk7HDMoLDtbzVIKT92xocYQYHFDms9y6G2SDXq4ORxO9ikK2DFZUAI0ejY1jzBNUoI2r3NA+5QARNYIv5YPoZwLcBcJnKxLtIy2YNT1cpHO3/FKjDHM5hWKR8Uyr5SjWUUmWeO2l8CrK+ketQpH8wW5GC5JxnhW4RYOrwavgnh/nI6Rbp1Bg3Rh+YEozBc1Ve0wZZ54jKi5H+t4bJ2wmHsOjqYMi4yNfp97ewdUrsJf7YJ72hwlSysYVWWxcEwnC6peibXC0bRC3QJpbq8k33mRsBPVex/D/YY+zFSZepA8p6oc09mCspeHdHi6TFGd5PmiXqmrCuc93sOs8kxnNf3cNLRU1xTaTe6RtHrnw1g675kvKuraMywtViMZ4307YUsnbAEK4sJbksbFm/PhDtbI25llTlqtPUQ9zSXHbVdvvRlvsw3veh1vtS8ntOEY7fs26niYRpywslwq/y4oIOcC3AFq73AqGF8HAHH+2Dh4AkZoPOVQau+ZzKZ475lVjnldM57M6BmDV4/DY5b0zsTJKl6Eee0ZT2cU1jJ3num8wlWOmhD9sMkt2tHio66LqOCB2oeVB0bAWhKiqoTzIepjUlVb/h7iykIts8WCw8Mjyu1R3NAVqBOHhpAEHc8PjT77Gj1Vgn8+HB4eceXSDuPZgv3DI3p50NRd/NH4Dvnoz+8JESRNLD+ZzPAuXDeZjpnPZ9DPqb3HoXS91FX90vuoqtTqmC3mgV6pKmaLRQwl4JnOZyxcjWcRnqHEkAd40lxe1zXT+ZS6gvF4wWw2BjyLynF/fEg26zEgptxj1RYRqR1Jgwq2yrm+d4+Fe9Aul3dfLpV3+P7H/zrZA3fcBHFi+bnN7+XLfOrYuWu96/yGZ/46ZT0/tfxeb5u/Yn6AGcdzE39s40V+7Uf/fxj1J5RspbI5f3Pwj/IqTx87l5uK73/yr3Lxwp0z66hNxl8u/mnus3vs3GZ2wG968r9je3b/1PLeGP764DfwKk8dOzeyR/zA03+F0eLogW34K+UPcIdLx85dLO/wm57+awyqyZl1LLKCn+9/J7dPqCOXil917e9yZXjz1PIqwhd3Ps3zfJzV3RcGzz/y+P/Exa2zxzLJP9z/HD9/99sfOB+cC3D36vkHX/yH3Lu9R2bzxnc6AVwCZR9pCfEBHCtXU1Vw7/Y+YpQXJneZzuaMx2OmvZyfq+5RFEVj1Ezl6WiKd/fHHByOya2lPrjNvft7uNrz1dlt7tx6mcwmo2iki2g19xAjHe7d3+fw6IjMWo72BxweHFDXjvlRzs//Yk3ZK4N/OBKzDUmzUzRpwm++foPZnQMOFke48R5H92+jCC+/9DWq2QFFUYDYELGSYKNIhlFXO+7d22dy7zZv2gXVoma+v4fPMl54fsLtWzfJ8xJU8VXN7dt3mMxn/OL/+ksR8MMIX79xCxHhhfFdDvYPcK7m9vQek4PbvP76y2FXr1cCM+KpU3bXSNEIwtHBhNo5bpuM+XwRePt7htuvGl7++ovtPKnaCQwXno/zjsWsYj6vAKWuHIpyJHDPWl772gvYFF45xZSh43kTn4sxGcZabJbx4he+wr27p4PHeyPCR4av8ce+5V9/6ATZFRk/wQ/zP/Drj5371OhL/J8+9x8xYnxq+Te5xogjDtlYaYny3Rf+F/7lC3+c7AHbMucU/BV+gBd47ti50sz5oU/+JFe49cA6drh/IjhfLW7wb3z6P+Rx3jy1fI3lL/GDvMjHjp3bKvb5oc/+JFsP2BpakfGd/DzXT9jmeq1/nX/mm//8mWOZ2vECz3HA5rFz1jiee+6FM9uhwBs8zps8dgzcRZRPftOX2eDsSSrJH33+9/EL976V+gHoLmftCn2v5KPPPqP/wu/+XcwOpwgruxijsdUawdcx+IyR4MpXV/G8khcZ6pXFoqbh7AXKsgATkynXIcm0NQYhgqPSpnxDohYblvgGQ2YzFMF5pU7uPCKIuKiMS4hBJiGKpRCSdtR1TV3XzKsFYgzOOVDFGIu18VrTRoM0KnhNawyH8w5jIk9tgzeRSTSED5SK8yHcrTEGYzPEKYW1iIkBwIzFiAmTgle8cwi+oTbECMaETf3Oh525IBRZKKeAyUOKvBRewLm68f5Jurw6H/ojgrUximf0excE79NEHePKuzAZ2Mw2YxYAPpiNXR0nDROeSRhywXuHxKxR6qVJQJKyNikm3C/aWsI9Lf/x/+M/5NVXX3lffCJFnlX4z/j23U1+5vt+/UODe2MTOWEZL6eeWS7vTyGlHqZ8qoPYjtPreXAdZ90t7Cl/99twWh3d8NgfFPmjz/8+/sDn/wi1/s/Av4Dq105s/rnQ3AVh68IOkoUMOqraaLgGglpnpNnlGUDRE7BNAI/NTNDOXdDgJO5YzLOs2SXqfNAETcz9qaqIthSIV1B88KXGICpxIkicso/A3gJOEzJXYuTE8CHQIbWDKkw2yUPEWIuIaXd6Jv5c0uogTDze+RB2QEHEhvEQ33DtXsE7F1z4JcbKUUVsDDWgIOIRslim3SELKcdqDLYW63QaDJ3GmMZunPht4+N4uRqvrlnNmLh6kahNG5PFyJIuGIAJ59MGUlWPxsweNTUidQBmn3zmDU5C/5vMfZFOCvXF9msc98aDJ1JlEGaF2HCJq4MPmoThiraEt1nePlSowrProGnH26/jnfbjUbThndbxQZRzAe5ePffu3OLN114JS2zvQ3KjBKKQvr3hC9wcbymOJrEDbbTEVDZtFGoydUpLozd+5GKihhE29xhrA/0TtdTupippuHMAbbTJlIpU20YHXlkT/stSXWnRJJL6F3phOvcKiBh37hoXGx7AK/H/SbqfQ77qBHaB0vLRUOm9o6rBGiGzhFACYVptNjwhtumLiHQSZkRdrN2IEAzXzUapsBoxUkcwNphY1kknO5UG3t1Hzt2KDwMiprEv+VjOxLHtipqUFDuO/dKImfAjhtGFHUajIWtZy4dN3hG4i8jLhJhuDqhV9dtEZBf484Tgli8Dv11VzyQ9vQtf7F7Rj3HFPYbEoUKCcY2cajgSPjfgk4AbEwN0RZohnlMkhmdv/c0TpibNORlbk9bebrXXzqyftMXWaz2qleFvFRCN9yEAM2B8vLf4JmGHet/W0XY1bm5KtxaI2jfeN3w/pGasBMvqjE8L/p5Mwr2chKBke3f3eezqNTKrzUQqGlY57cokGm2FANAx/noXadPEqRJGKS2yLTaujGjqsmmCaFUpfCzRgf0A7h2jc3e15BPjkyihOJbdlZAS0zFayyc++xnyn/op3qo8qnd7LWt5v+RRaO6/RlW7Zt7fD/wtVf1REfn98e9/66wKjDWMRhtMDg8bPjatxZPm3fozgzYJnUMArAa8m4QSIBKgBiIXG+mbdhmftP9AY2gzmbRUTaOtawC3ZtuMantOQKK2n1YRaMjnmUCsC1Sh3rC5KtBMS4uCdn5BOpp9upeLt+xE/V8yEne86dM94wEfjznvmbtDag4ZbAzIrGli6Kik4GA0k1XjdqhxXDtL7LT68doB+7Q6UY1Js1tNv6FXSBOTkkvyuTedtRdx9SZN2xOoh5UCOFeHFYeGSRNjmjGVLEMdlPmQ5z7+3DvhU9/xu72Wtbxf8m7QMt8PfF/8/BOEqM9ngzuGfm+AFfDVnMQRQ/Qu0RZ8BaLfNTFTUatNJkBrMx5xbMkOAeRIxkyTADOgqkbwSGUS99/4YrNMhfjktiixPYm/FrPkidIGGmtTADabs2KZLs0EqTwN2AdbgCaupAG7BjjjSqCJARMHwSfqRENykb179xGf4SdTfBa17ehSGezVwSaQ9sWmHbImgWnsT9okRDuHgATvF7wei/LqCN6i6lNibMVYj/eKaLY0TplIAG0FrDRJvok2EMQ0XHqi3FK0SpPnGGsYbm9zaWf3UTKtb/ndXsta3i95p+CuwN+QkM35P1fVHwOuqOr1eP4GJ4f6X5Lkorizs4NsbTWgpaqNlh4Jl1giaa1L6aM7vDoN4DQgF2eGhlzp8vlp00uHcgnt0mhQleYaTwB5m6aVeDwk2+5Y/sW0pPoSzdASMcfCK0TeOmF5Ou1j7xo+PFVKpJPixNBa/kMdjnb1o9HbZVFXTI4qoOLSxUtkmWAjUJsOJ56oEEPQ6FVC301cN7SjlPz4afokdH2e2tYGo7FGKimlMHTxWIjT3qxipE1p2IyLdMZb2smg+wyMMZCHHLxZv481JwUSfyh5JO/2Wtbyfsk7BffvVdU3ROQy8DdF5Cvdk6qq8ctxTETkR4AfAbh48SL9Xo9ZVpCZ8KX30uG5k7YYVeou4Ce8bKCgo9U2GryEz6YBh05UQ1o6I8X/1uhiCDEdXUIyidNHBFQ0UkbxviopyqM0fEZioVeIC6ClM7q0k6ZysVLVDjWyMil0/y9xMkn9VmLQMSKGKnjvcJUh75WYSsn6JWWRNROdTVNhmgQlAL8SVihpRWMiD48P3jCelAc1GpDRDrwn4BVqn2gxi0kJSBpwt829m1IN0rtmADUmypZmXdDxXkJRY6OHjMR7vG15JO/2yal31rKWd1/eEbir6hvx9y0R+SngO4CbInJNVa+LyDU4eZdD1IR+DOBjH3tWs5Q8OeKL+pq0STVRLQm8E1gpnvT9somj1sR1txpww/VGZbrlsuOyPmq1pOsjJy4agVpSIu2gThsx0YBI5HxTp2gV6w59kgykvtGwIwCnfKfxfDcwV0tzLFcXx649EaJ6odp6CYUzpjN5hTs6Ba9ZA4yuVuoU8IzgF20QvLSGao1JONBgAPYSdriGyTHdw3dWFSa2YRncvSZ3UI1ta/O0pmMQXDTFmDBWmmwoceJbmuyTNi8xebiCOrwKKQV31/j8VuVRvdvBz30ta3nv5W2Du4gMAaOqh/HzPwb8YeCngR8GfjT+/qsPqivhqokZ7tV7rCmgN4xgKNGnGiBoZClkbfCA6bDsGuOYNNqtIiZQGj75UgNiQhljYr5SIuBqy4knH/LE9zvn8LULS/9UTkPoYZtFw6wml77UhuCSp+pCWAHAxk1BXjQm9W6jIvoYZz1QDEGXbnzkY4wXNPqTew/iGt49UT9pRaC+NQwLYSNVntX0exP0aEF/tElRFnFMTasgx8nMRAN1Wp2YNOmYdMzivGJE2wknJQKXFAcn9V+bSVd99I9fmtCI9g5tNHYjWZwoulRbaKQRG+4l7SoJgfn0COdmoc+m9fx5K/Io3+3lit9yU9aylrct70RzvwL8VPzyZMCfUdW/LiK/APwFEfndhFS8v/1BFal6FlXwfjAmbL0oiyH9rYtEJhyg80VNfLdvwCecp0l0nVTdYMBcVZ6iRusDiPqYDJqmdtPJzSkNWIewxPE+nboMstQOSB47yzxxUu1NAnd1DYXyMNLew9BMJPglAEuacIra2JYJ7a8XjsHeEeXBARtbO/TKfqA54iSTDMeN/t/ZALQElAKNA2MybHZXFMQNZyTtu+1l2AXsG+07fA5Fl43Qhu6KqpVQb5qI085WEcFVC5yfI8bG9+BtIeoje7eTzF3JV48+QWlmb6c9a1lLI7cWlx8KMd42uKvq14HPnXD8LvDr3kpdRgxlr8d8No5gED03FMQufzm7QJFc/VS1dcEGErB0SkWQS6Aej3XCDnQx4BjYJhoCmjJt/ZFWWQHxxv+7WZVEoNEATcHvvgWozvg10RKXJwzaPjdGgACmDT/d0Y5T3TQUT+h7CvkrktwjQ7MS1SFGottjZ7KT488AoNnXJJEWaloZfgdXT9+EUQg7j01it2hDsLU2j7Ciau0hjdIusjTmYUXWTsCJgRGRZgUoceXzVuVRvtuxJF89/Bi/9ef+Mh3CbS1reVuyX23h9MGOAudihyrQYEL4cpqgDSbjWpeLTpRDOoa0gEigPFaBqdkA47W50UmAdVKcndX7NdeHQksgnOiT5m/vj91H6XL8ulJjymhEQ00sa+Wtu+eJ4Nv00x8r25Wkjfu4KSrRLcbkDYce+HA93v6OZ0rjGhpXC2m1kvq2Ojl1n0OaeJJNZXWc0/lmrB8gEkn2wMGHifS4E+z7J3Nf8PL4GUDJpF6D/FrekiSF0CXHgwcEfYNzAu5ePfPFPICDDUGh1PsY/yTG6I6SONaksyo+xpEJoLmQKgKGWfpyN77o4Q9c5MaloW9osbbDX3dpiSVAhWYXah3BtAUm33iUmGh0TIG3ltwd8ZFKShOZa0Luigkg1XLVbRu67Wj+7hhhE8WUwDtptt6HSVB9CEXgFVxYJGDEh6BgidtKdbgUxCsNXlj5mM76JkSFbA3FTnzwbErnO7x9K4lWSp/bfrWbxWhpK+nAdXf11n07VGLmK8PxyfN8yIXiLv/2p/+vPNV/5f1uylo+YPK3b/4a/vOv/4s4fTjYPhfgjkg0SNJoX4vFAnd0BKINQDWXkwXXOldFI6tHtc0OZIxpIyhGT5IUrdBH9z3RYLh0hFgrLa4H6sQa03C2CeQbg2V06UvccZoo/AqgiAhG41b4BHASAml576OXSmhfqLdqDJrdDU1GbENpOOeasehq4CoaIzsKYKOxNUSxtMYGissYqoVjPpvhnGMyGSMurHYEj8kz6mTjCNxTA6CpDa3m3sLqIkbrtCnCZJtAsNHsw36BaKiOAdFQwIcAabU6qrqmyHK8d43HDkpDeVljQxRICdFjEsCnd0c9uMUcY5PWf/7AfWCn/Lor/wOf2/z8A6/1JE+h42LkwdEUfScscldEtLNn5N1rQ2rHlH4TJ2i1HT1mDwxwNqeg0vzE8gMm77gNfaYPHA8FZvROpEMeph0eYUr/1LEcyviBz3N/sfWWVnznAtyFAL6QOGdPnpeMNjYQ29IcyWCXlt2LxZwbN1/j/v4dglseSDQwZibno898jF6vh0ixZLATUdCa2tW89uZrTKZTvLrG00UUhoMBTzzxETKbHzMqauTEjQhVteDm7escHB7SxHyPvLoR4aMfeZayLDtGX4OqJaGbEqIf3rx1ncPD+zhJIRMAVTKT8fSTz8Q6kvtgl/JQ6tpx884Nbt+6Gcslvj+MxUefeZbN4RYeqKqa0cYGRXGHza1NNnpbgODcglu33+TO/XtBu+9QMkaE5579Jnq9fvss0oBoCAXgNYQUvvHmm9w7PEAlJLROE0WeFzz37DeRNVE62/ULhFXQfD7n9Tdf59b926iA1dYGIcbw6U99mn45ipNl94saNX4PR/s1qsGDqGvEPS9y5Ib81Ou/lb/X/44zr1OE2xcusbexfeyc8Z6nrr9KUS1OLV9lOa9eewpnj4PRYDrhsVtvYvzZoKoiXL90jaPB6Ng5UeWxW28ynJwdB92L4eeufi9H5fE6clfxrTf/Prvze2fW8cXdT/PK5tPHjverKd91/e/Qc2cbqZ1Y/s7V7+KgPB6LvaznfOf1n2dYPyCeu8n4+5e/lbu9C8fbsZjyv3nzZ7F6OlUysz3+zrXvYpodT55ineNXv/k/PrAf/8vt7z5xgjpNzgW4qyp1DI275Kpo2tC4gTogamRB61aj3Lp1g1defgHFYpKRToQy6/GRJ55BelkHWIlGuPDl8a7may8+z+HBPhBjvQDWK7sXL/LkE8+QZXlrdCRSA61qG74Ab7zBm9dfD7y1D9mN1ENmhI88/hTS64Ex0R9bmnoUjb70ntu3bvL69VeXZm/vHEXR48nHnsGYrA2/Sww3QHI3hMnBIW+89hopaHCYyGpQy0eefBZNHkQiSGYIPj6W5KQvIty/e5c3rr8e2xSA0atjUJY8+9HnQt3RW2cJNo3BqOLqmnv373L99k2afbUeXO3Y3t6FZ0PkzbAcSFFqJNlzUYSjvQPu3boVNpwZg/fg3YKNzW2I43d8H0B6ptr60iutu+w5Qvj7ix3+/S/+AU7Z/9SKAfc5i984Qdv0iv2iQw5Pr0NHgrtsUXtcHzSHHvsPHFQPaGwG7ldZ/OCENqDYrznkzQf3Y/FrCihP0Eud8lNf/acwd8+eZOrPZfitTkLkJJXyl375B5HpA9pgofrVOVqeAIyV8l9/4bci4wfUkUP1XTnaO6GOmfKX/8E/zVlJtrQnVBdyyE4eh5/+5X/ygf3wak6N0X+SnAtwB238u5PHA6TfnQiJjVasrfacZRxO5oCyORyxWFQsFgt82VIbDWI21E6iPAx1rRweTTEijEYbHIwP0dqxtU0LximWL11tMxkQDZVTDo9miAgbwxEHR0d459jeGGJtu5MyMcrp/w3HLErtHLO5I7eWfn/AeDzGOWVzcxRWNS1f0y61G05acE6YLTw2yyl7ffb291Ffc+nSLjbPGoNzGOeONwpx5y4wn3vGk5qtzS2m8znT6RRr4erl3ZhUg2b8G4+ZBM8SaPrprGY6rdnZ2eHe3XvU9YLBoM/W9m7E9EhvrYxmes7j2YLxXLl4YZubN25S+5pLFy+wvXOhuToZcNu357jhuR2m82NUDSJUWrwjxihMupYz2Ywz6ncYnM/OLg9pfn77bYAHDL9Q+/zBdZxRvtLiHbeh8g9Rx5nnhYWWZ1/zgPIP1Y+3KOcii4GICWnkkvGMFgSSIbI1kLUTgACD0RaTShhPhdHGFUy2wXgKtUtc9/KzbVwJNXD5Nu9xNKmZzpSt3StM5srhrCazA5IvNdDSFLGyrvOMyQqmc2FeG7Z3rzJdKIczhxQDjMlIO2xPktS36aLi4GhB5SwbWxfZP5yxfzilP9xEToiPsko4VDVM5p68HDHcvMh4UjOewmhzpxnLls5Z7kOiLxY1OM3Z3L2KLbeY+wxsj42ti4lg6YxhOzl1/ftrtWB7XLj0BFmxgdOc3nCT3mB0zBi8bEsJzyMvR4x2r3L56kfpDa6RFZsMty/RG47a6XHJ+NoMZDP5iEiIh9OJSrmWtXzY5Nxo7oqDSCqEL29YUjcqmMTsO0k8GLX0+iN6W1eoa6W/eZGZFBSV0NsYhXRrzbbLGKeluV/YuNTf2mWwfURmhP7WDv3tS7jK0x9utrOCxo/+ZA636G9R7lyml1k2di4y2tpjXtcMN3bxzfK7TdSdHMTj4gOnSpZvUAwusLmzzXDjAqbYRrSi6A+6a4U0twUJFmMciuQFtr/D9qXH6fV62MEuGKXsD0NQ42SIVGLEAh+4aXz0LIL+cJML13IuXX6cqc+YmpxhpvTKQeP7frJ3ZaBqUNi+cIXBhcfYvLDJztSRHxzQH9o4ecerO37p2qgrYeW2dfkyJVfZ7m9y4TFhfHSTwXCDMivxLE+0PkbBNDaGDRaCV48PCQA9iotJStaylg+bnAtwV+9ZLIJxaMmHO3HdJwGKgDeKzU1jsMxtSVnmlGVBnudh6dhxB+wUbaQoexS9Prk1FEWPXtnHGUWKYok6adoavVbSZ9WgufeKAWVuyPKCsuxhrKPIilZjZlnL7NbnAMkMRRE2c9m8IC8MTjOyomgMnEsUUyof68isUBYZg15JlmWURVi1lHkJ2KX+JzfN1LbAVSuSCXmZY3sFxmbkJqfXs9gyb+jOrovhMZdMFJMLuS2wtiCzGUVeMOwV5Pb4IlE79aRQDnmWUZssjEcG2utTFj2sDa+qV9+4iLYbutpx7Wr1op7FojrV338ta/lGlnMB7mIMeV4w1Ql0+NwTr+1QAqKeXD3PXr6MqmFjlNHvbbORh+iSyTW7a1BtlgIBFdnsWZ68ukshwtB4nrq0g6s9gzLofu1W/2UgaxhdUbb7Gc9c3kEs9Kzn6SsXmC8qBnmkY3QJgpqyDb3k4dqFiwzzkmK0Sdnv800f+yjO1YzKPiQbQ9d4uNKe3Z0dPvnxko3RBqrwmU99HHVKnuWwsgM2bR5Kqxkf63/88iUuOEO/X/DsE1dZXLqA+IosbilN/U5eOEs9iobdq7s71FKSG8NHrl5kUW1RV1PUdXz92xFoJ9AYhP7Szi6VZFhj+OgzT1DPZ8zrCbh5vI7lCa67ASo0JBhTVVlMZ+R5xinLjbWs5RtazgW4A8EHWrXNwPMwK2kxeDVsbPdRm2HyjF6dkRdZs4lGonfN8SWAQVUosozt3Q0yNeRlyXaW492CXtkLnK1J8WtiSN1kW41ZK4wKRVmysTMI+36KgtHWJmVdY3wdkkNrDAdA263G8SdNMrub5FtbGMnI85y8V1JXc4ybN/QJamLu0bYXSVMtej02bI/chHC6lRXcokK0au7aXUWEHKypTYnayXGqLJzDlCViwM0rnNZLrpHHjJeBswqGbhEWriLr9TGuF5/lIgSL7Gw+gtVHHAKeTeoJsxp6ZY/D6ZTF9AClZmDkmMEpgXoTrkEFYo7Y2WzGYGe70fjXspYPm5yLN19Vqes6eH54jxch2RBXda7VXaLYnDfvHeC9cvnCRWbTGUfjIwZ5wVOkJNEpYHDrbZHqmdVw/c4+OcLVa49x8959XF3x+IUMofWtlwaNlxoTvEwqx/X7R6DKtQs9btzbp3KO3dGgE1RruTONw0dsx97+Hrf3J+S24MKFC1y/dYO6mvPYxZ14fcekGevqjsV4POH63UOGvZLRxia3bt1iMZ3z2KXtxrtoyRCpNJp0kqOjI+4cTtm5eInZbMHh5AA3H/Pkld1Gw04AvQrwqc7xvGLqhF5/wMFkyp17tyms58r2Znt/00lkQktvqULlXDDKSsZsVnHn1h12L24iWR4m204/kotsGptUz2w6Iy9CNqZ2F+xa1vLhknPhLQOJLmj+ajQ8Xbnm2PZ7hHsHY+7sHTGf1hzNFtw7GHM0mYbcm82Vx+kMEWFeOe7sHXLvYMKihnuHM/YOFyyqlkIwku4U28YyMM0WFfcOZxxOKpSMg/GCg/GcytGq5ykUcAJ0Xe7d/v4Bd27dYnx0xGw249bNW9y6eTvuNO26g8ZyjQYefh+Nj7h14waz2YzpZMKdm3fY29+PbfRLY+lVQ6gBbfuiqszmMyaTKbVTxtOKg6M5ldPgraPLY7c8li24T2Zz5lWNq2H/aMzhODyHmJi1aXtyZV2uQ5nPa+YLR+UcR0cTqspjxIaNa0nz774HS+8CzGZTrDVkWdY1crCWtXzY5FyAu4hQ5FnwRjnhi3tiLJVQMtDJLhjOqnqO+Aq/8DgVxAviNVIQSzjQiMHja2W+qKCqcJWjqlyMF+MbMkcSQDdTRqxMlFo99bzCuRonjto5vAO8j4RHu/O1YXQanIugtqiYHU1Q5/B1zfxoymI6x6sJfSHSSzE5R8LJVJer6sgx56CKWzjqOuzsTCnyGiD3HjFpnF2j8brah0iKKtR1jauVzGbYztTQUCGd3Y3NSkjDpqWi6AGCdx6LJc+KENANj+Lx0YNFV8sDmc3p5T1EHd5DVpRktiAkFfc0XlSdtoT8scp0Og3p9VI4Ao0bX9bYvpYPoZwbWkZ9DcT43Mv6enPN0vWdCWC0sYntK0V/yLxasLl7kY1BGXdTHtc40zGvis1LtnZ2MerJi4KtzS28V7IsazXdE6I7tu2AIs/Z2d3BCmR5zvb2NnXtYxwZXS20VD70UOn3+2xtbVGUBWVZsrW1xdHRIcZ2MhZ5jdNxGzcn1dPr99nY2KAoCsqix2g0YjyfYaxdBl9didbY0EVhIiiKAmsNRVlSLhZYWy+BLyv3bcchAK41ht5gEJOQCNYYsrgBatU3fXkFFv4uyxJve+AWCJBnKfyDXyrX9bEXEebzefCYynOcr+O5Y49sLWv50Mi5Aff5YtFww+lY93dXGsOcBpgryxKXCUVvQKmKGy/IsrwLByxv7UzHFJvnZGXQFI0VssLiPYjNmgkg+WV3AaXrO53ZgiIrEJM2RuV4HMa6xnLa9bBBWzc+jTRFYW3giTOLyQxZkWELG7epL9Mn3Z3rCeisNRRFFiYDI2SZxfosOvukOuJIdBJzdOswouR5RlEUFHkYG2NoEmd3/dyPUzQpemMM2RyXFGJj0urokx8mmWTgXu6HjxNMih1kRSDFkWnyrzazUTNBzWYzQCnLHovJUWJ+UE15ateq+1o+fHIuwN0YQ1n2mE0mzTGN/9o0oyfH/DY2w+YlBZ4cIRdDZg3GZnEDUQvwXQ00ALaSmZwsRV0UixULNmicGj047Aluf0tsszH4LCfDk2GwZKgEjVURnE8RIVqQabNFhbqMEcRarDEYI+TGhL5EIPOkaJYQralLbbJYcmPJTIYxlswYCixWTcjEFw3UDUsRsoXHvK4BdHNrEGvRvIC8BpthjIIJWUlbDT1OTNqujBJmG2PBGrwRfPxtrIU4Fs0EoXpMtZbEgWUSGmzjxjVrkSZsQhrHYOSez+d47+j1+iFOfBpllfDD287GtJbzJKfsdXlL5U+r42HrOe1Vetjyj6INb0HOBbirJh/opF92NOUVrZnOZ+8dt268zuG928y85/XFXRaLOc6Blj3q6RDyYoXmaScK5xx37t/iYP8u4mpe1yMODoPbXo8Rzu9gTN642nXLQwAR7z3T8R6zw3sYHDcYMzma4LxFnDCbXyTLhjFOekfz73iqeA+T8ZjZ4X0Wboo3FUfjPRb1lDv3bjEa5ORZ2sQTwrW2njxhPPaPDrh3dIf6ZoWxGXuH96gWFa9fV7LHrrKdbzfXphVSuzkqjMXd27fZmy84uH+X2WzOYl4xFY8/7PPUE4+zvb29tALpTpbJ4+num68wfePrGGM42jvCuZo3D3P2+j2efeZpNjY2AsDHCaoZVe9R57n+5te4f3gE3jE+mgGO+cGbGOd5+pmnubR7IbwTEdjruqbX6wUu3tMkQvHqMaoxBMH5kr6dMLRnRyHEAHNg/4RzLp4vTjjXLX/Iyd/wCTRZxM+SjGBeOin4pD5EG1I7xqfcq37IOg6A1zi+AKveQhtuxXZ065BYhzxkHS8BN084twDamIAniwWuA+UJbagfrg21ZhxUm3genIUJzgm4e++ZzmYkjxlFcd7hXB0TcbSgCK3SN59PMX7GlT6EMLoLTC6YMsNmwmI6xQ2GIQqitpNGAvf5fE4+3ueSzrG5wmyPy1nQ4gd1zmI6JR9leOfwEZxX094t5jMyN+Nq32MRqGdsDUwAc4FqPsP3ehGQTn761WKKVkcUzMimC+rFmJ3MoHlBfbjHZLxNv9cL5TXEoG9zr4bQx7lfcKln0Ok+BsPF3ONyi5nt4+ZbeLcBKN7V4cdrNJouEMmYz6b0Ctisa5jdo0TRQnEK1MpiMaGu+3HFsbxDt3mG0zF9u0AmE8TCcBD2Iaj1ZFJTLebUVRn2NMTdtk3aElVm4wl5NWNbKiSHi7sDjDUsfAUKQoj5bq2lqiqqqqLf6+FcaxTWBOjSiStzArX3fso/8+Sf53d+9McxD4oUlbZonHT8Vz7EjdI+vFXZBL7zIcpDAJzToh1+AnjuHbTDAJ99iPKnPb4ecHbk5AfXUQLf/hbqOKkeAZ58h214iH68OP4Y/9Y/+L9xe375IW52TsDdGENe5MynkZZRxbkqJJOIS5bl1HExSYWr2Nm6xOZoB69h+stNMMBVVY3NMibTCT5iqmhbj/ce7z0XL13A7W41FEHynS7LgrquOTo8bOKJd5NkNBqrd2xvbbIxGi6FJWi8SRSmk0lTpusjnupwrubipYuMNkaYZIAVk2IcIyiT8bgxHnrXJhcxJiTb3tzZoOznjfLgfABCYyxFWTCbjvHe45xjPp9RO8dkPKYwSpYVWCNcunSZqqowVpp6nQdrwwav2Wwa1wy2WX2kfibe/+KlS2FVkuK+RL7d+5BsYzKeNO1u/NRVEbFkWcbFCxdD4nARTJZhraV2NVVdk+cF8/kc5z31YkFZlkyn05bP0eS739o4zhesB3lq8Crfe+nnMB90H/zh+92AD5dsFIeU5vQY/qtyLsA9JOuw4Usav+xZnjEcDhtPjmWJHpzBYhavieBvsnjKNxuPGo3ZL9MiIqCbGyT+NsL7Ei+fRDvgv7qRR3R4jEIOoNUaDlc3/XQnCIDRaBRBUdMwBL1cYoajTvnkvdP1fFEGuKFrlobBcBnGLssyIPS5rmsmkwnWWobDIaPREGtDlhsdjjqcdkydpxLG0LReTN4fz+Oa+qh+I7RfFe91yWPJ2LzxiknRKdtNRga8ItpPkeBRCbYPr2FcjDHUzrGYz9nc3aUb1z1RPVN1zOd161FzzrR2gJ+5/X3oF9LGurWs5eHk5vwKB9XxhCOnyQPBXUT+FPCbgVuq+pl4bBf488BHgJeB366q9yUg0B8DfhOB2fudqvpLD7qH9575YhE42JQ8AtOAcwDOFiSWXPmanawBjIzYxAg3GmBzrWhCzCbxh7Fhs0sX3BFiXtB4XzkZ8OPVTQq6Veny0d2t+wmcjwN+6y2U/MBN9D5JPiLhOhv0+U4KQi/SHE9eKYnGIXkhde4nDXceE4x1QwuoNNSJJmN20wJBZDkR+FIfjSVyKNjGbVPSsAdqK8QiCOMd/6d0bKBxsk2cvIgBE9IIzufzSFGlLRqt94w27QsutYgEWukUeS/e7ZPkZ2//an729q9+O0XXspaHlofZxPTjwG9YOfb7gb+lqs8Bfyv+DfAbCSzcc8CPAH/8YVthM9todUEkbnbxrVubD9rgcSBN7GrMxKPL1zR+8d1NUs1dYtyYBDDpTHTDW9Y0Zbm+lZ9VSWVO49pXyxrTybdqkgth8LZpDKCx1aHlbRJw9dp8ptMnHxc3y2ObNGu3dC55w6TaE/h23TBXt/MfHwNNJzpPh0jNhFSGStiI1D7b9Ex0+Rl02qyqzGazkDYxriJ0+TZxMlDahrfZnk6RH+fdfrfXspb3SR4I7qr6s8BqksPvB34ifv4J4Ac6x39Sg/w8sC0i1x50D0EaX+rOfRtuYilc7wkbk5YAYgUwu3+fuBGpc026vvFk6ZRdThrSAR048V7eh12YKeZ49ziRTvAr7W14emkNvydNDg3YxsnMex/itTtFmgnMH2uXb/5myf1qdWJqVg90xuKE88fGsnO8m4TjZKNmOxF0qZXVOkQE5xzT6TTmw5U4wS+XWb5dqNsrqD85SXQs+66/22tZy/slb5dzv6Kq1+PnG8CV+PlxgtNSktfjseusiIj8CEEDYvfCLlWM576svy1nMFrledvPS+pbXNpr91DDz8bIsoHi8IqIb2ifB4HYKr2SpMMaN9cF6rqzkzKpr7Ta70nBt1I+eYnl48n22ojMQfttaSaacetc1gBol5snMU+ghmWl1jRUiGd5nLt2gpMmydN28Hb+amtstP9lz6NUpvssghdOAPbWThHHoAH15HMfx8FHG4pq8HJ6a/JI3224+Fbvv5a1PBJ5x7FldGkt/pbK/ZiqfpuqflvyfW4zLy17OTyMptjICrAnDZxuM4WOhpeufziq5ayVQdsGWf5JN22QVRrwb4ukFUl7OAF8gq3EuS+Dtmk47WWgbvvUHSvVNoKi74xNM4/E9qeVxWrfT5Ozx05X/h2vb/VzWslMp1OKoljyNjr5fpHqSbRdZ2X1duVRvNvB73Ata3nv5e2C+820JI2/b8Xjb7Ds8flEPHZ2I0TIYw7VhvtWf8IXOFy/SpN0caTRlBtNtz3enIvXrmqI3etWQeRhpLn2TFBsNf+l+WWpZctUU7futlxLzaxq7KdOQP44VdMO0dl9bjXkh5kAl3/STZYBPPxedafsXjudTsnzPMbHaZ/18R/t/G6pKk3tPdbCM+WRvttrWcv7JW8X3H8a+OH4+YeBv9o5/kMS5DuB/c4S91TxqtR11QBj0iyDATREYUxgHzQz12hqzWcviLYxU1KSDFIgsuieh1+dGI5rt0mOTyJna+2BzogxI9NEooo0fQFUY5gUAR+Mfl5DHlUXDYyQ8n9qA+RJ03bH2pvGJ9Z5Qj+6/VFVvBOc821mpAaFu2PqaHuTtGLXjDe049KOUWyDSmdcOqDsJfw0UTrbCVh9XEFEzXs2nZJnWQjhoOA0hCLoTvBhTAKVFcZc8D6k4nPq8OoivfSWqJlH+m6vZS3vlzyMK+SfBb4PuCgirwN/EPhR4C+IyO8GXgF+e7z8rxFcxV4kuIv9rodqhSp17RpAT7SJavL7TtpuCyLSoTtUPQaPgyZLUXLXC9UnjT0QGKv8+knc9/EmtiuCpEEfu76JZZNcCQGftveZaAyMCUREAvCTEk60YL3avq7TX6JzuqsPiLy/Hm9ndyLwzuFdjfN1M9bOuZbuMcfn+tV+dvt+sm6Q7Aqu+VvEtJ4+8fk2z7Wpt73HYjHHGIsxBudck0draf2TJkHV1jDsJW5mezg25T15t4+J8ljvTbaLvbdXfC0fWjmqR7w+eeLRhR9Q1d9xyqlfd8K1Cvzeh7pzR0SEIs+ZJyDxMK/nzOs9jE3ADtKEQwydM8ZErxKC1oeiBG424FRraAtgutRaQOKu0mUqp6sdkrROfAxjmzztpPF2keSzbVysx4CGjUfqFyF+uphQr7SwYzquixo39fiYn9SIwfs0UYR2xD9jP8KMIHEMVLRNYu1D+5Jbv3eOuq5RPL6umEwO8d4xmUwoc4NNyTiWmKxEd7Wa7xLlpSDGrkwmKTSy4rWOz8aiGr1eYux4E8HeNJAtcVUAdV0hYsjznPlsHiezOOHF8UFBXIdSiu12XsFVGBNy37YT5KkT9rv+bq9KJjX/5if+A37bU3/xnVa1lg+Z/Mzt7+Nf+oU/wWH9cHacc7FDFQhfWI1fXgLw5XmByW0D3mm3o9PgueGdYzGfsqjmCBaldfezVhj0B1ibIWIwaSOOghgBPM7VzOeThvIJG5kMSAC8MsswMcmE94q14OsWcF0KZaBK7RYhcmOzrT4AdK/I8WlTDaEfIQyCA2MRE1z96sUMr64xaAph8jBiKPNgUDTxnBXQCIY2ard1BPBA5wRffxP94LM8pyxC2sBKLEU2D1Ejs4wyKzASEmtU9YKFq/GicUdo8KTPbNbYRKwk33pFTadPQF07pvM5lS4Ql3YMhzAKIfJniZEcSxZmUAMqPro7KpPJhLr2GBPGs5lbVFEso80tenG37rKjpYQ+eGVyuI9q2qHqVyb08yFWHIU8eBv52A6Z2MHx8urYrvfODl8gcD/boZbjX/G+nzKqjx6qrWe1Yae+HzfDnSECe9k2leTHTuW+YsvtP7COd9wGYhvMCW3Qiq36wW2YS8mXs08ykePt6OuUz9X/8MznMZEBP539kxzKxrFzRj2/pf5vuKy3TijZSianBfk55fq3dPW7JN57ZrNZDI0bvrg2C9vjxUp0MDmuPVZVxfMvfpXXXns5ZnFTNGrIuc34nu/5XoaDXghDC8c8LhaLOb/4C7/AdDaNLWkpm62tbb7tV34bvbLXgolISxuF21HVFV/+yld58/prDRWRKOwsy/je7/peRqNRs6kocdCoNppvXVf88i8/z81bNyJ33HLK/V6f7/i272AwGKz4+xPrM1RVxcsvPM+rr7/aUDuhvJBZy7d967czGm2hquRZzXy+wNqQiLsoS4w1uLrma196gTdv3Qw7RTvjdPHCBT73Lb+SIi8CtdVyX7Ev8XpmfO2LX+L23m1E08QXJuzd3V2+7Vu/gyILWZpoHImCvWA2m5NlGa++9gpvvvEanrp18ayVsj/iO7/reyiKfqPFJ6fRZqUVQxS4yNsLp28ge7+k1ow/8uV/h//ni/9as6AIasdKOw3sf2qLoydHx+rIZjWX/s5tzHwZTLr1aC7c+q7L1MPs2DX961N2vnD/5GBeyxez/6ktxk8Nj7XPzh2X/+dbmIU/vQ+AWuHOd15ksXk85GFxuODCL97FLPyp5SG24enjbcjmNRd//g7ZtCYSnG+9DUcLLvzC3VPHMv2uBjk3vu0q1fD4BGEnjif/zmuY6vQBrXsZr33nk7j+cUpFUP6zv/976d+fHjve7c/EDRjXDx/Q51yAuzGGslfGwGFd90GlpeC1oS4SuCiAEQ4OjzAilHmBx7OoKoqsIK7gG2KhXaYnQDBMplP29vcxIvTKgul8jldPURSNgVSSI6ISlheJK46HxpMx9/buY4xS5BnzeYVzymgwaAFQOh6YsZxJII8wmc65v3eAGMFmlvlsjvPK5Ys2xN2J7U799pqAy6CxH/fu72GtYI2wWFTUXtnd2SXLCtDWh73xIIr/vCoemMxmHI3HFGXOYl5R1TUqcPVa2KvjVduxAMSHlY4SynuFyWzCeDJh0OtxNJ5QuZqylzMYjQATnmecPCQOxmw+Q9VT5AX13DFfeHq9kulsjvM1VuDixrApl1w1u3tyNdFySzaQtAnsHbycj1yEG7Nr3Jg9YP+TIYSjPUk87I+3YXZG+ZxTzQ5H9Qa3Dy8/2CwhnBzul1D3wXgrhCU+SyynTiJjD/ePdk+/R5IFx40usQ37R9swPV7kWBvcyafGDu6Pd88eS1jKrLkqtc958ei5058XhMiaZ4z3a9On4OEWUw8t5yKHKiRNsQPAzecErEI394bGRAzDwZDJXNmbeIreBpoN2Z96JosQmrY78y1zsOGNy3ojjhaWsTPsXLjC1FsmdUG/t4XB0m6OSSKxbcE4igp1DQeTmvFcGG1e4mCqHE6Vfn8j8NmY6PHRepSomiWz32ym7I1ras0YjnYYL+BooQw3NuNqonP/TntEHYLH1cp0AXk5oD/cZlbnzJ1la2srUFgacrmGSZJgM0iwHGmVihyXD9m5cAlbjKgpkSxnNIoBxTSNm4cYRsCLQyWGFPCeRa30ty5x6eqTbFx4DFNuMxxusTHcjOPvgNBmQZktpty6eztQcCrMZwvmTtnY2mU2dxweTskHI0abO4h2DM2ankMX2Dv5beNYG3mQerqWM+U8TIxnteGdtu889O9dknOhuXvvWcwXkf5IHhRpaQ0d9xkgamyRhukNR4x2rjCvleH2VTLvGNcZw14Z+PLEkZzg3SICg80LFKM5WWYYbl+huD9Fa6U32G548mXvmOW2K0I52mG48xj9POfCpce5uVfjVRluXYZOHV0wB5r8qh7PYLTJ1s5VdrY22d7eZH8i1OoZDnebO3W6QWpM6k852mZ06XEuXd5FFCrZwrsZo42tjr6f+t+6WHaqYrh1mcvlFju7A6Q/YziZUE3ukBe9zk1Tbe0kHMrHnb6mx6UrVxj2Sw5dxlbWZyOvKYv+yuoJZvMZb7xxg4PxERujTXp5ji1KNrZ36I82WdQKNmO0sUlZlEvjJ43BtxmeJeUuvSkx/xNrWcuHTc4FuLe0zLR14Yv/guuHOba0Dst6j7U5WVlirNIrSnAV/bKkX5bBmEf8oq+gcgKZosjplznWGIq8oFcU1JlicmkBjGVQ6oqgFJmhX/TIi4y8V1KWBeqU3GYtAK8YATstQVWwRUHeG5D3ypCcetCDxZy8SCnuWKIlEsWSANPajDLPg9HSC71iQe2EouiFxcYJ/Vcff0w4l2c5vUIpix5l4VgsDCbvxdVHm2ZP5OQFnwdMJlTVDJ8LN994nTzPuXB1N0Tf7Ny7qmv29w8RsRhjuXn7Fo9dvYpYS9Hrh9SJ/QHOh7jtxppmLJsYPCvSTORpvFCyc7M2Xcta3ls5F+AOHY0y/ZbwEzfC03VpTNd7VcTDY7vbIJbRsEepOZm5SBEjKsaLW41vpfygV3Lt0gWMCDYTrl26QFU7imw5jO+q/3hTBzAYFFze3cQaIUO5dmkHVyt50WrtplMufW5izivsbI6o1FFYS5lbrl7c4Wh8GPO3hp/WX78F9aSBbwz7XN7doMgyMhF2tkqmUxd3pbLU5jQxdnlEVWXYz3HGoa6mzBWXQ+0iYalK617YrhiWkpCgbG0MMbkhAx67sEXta6wF6Wx6WlQV0+kcJMOpcv/+XRTHhe1NNkZ9zNyQiXJ5ZxOlpjDB70c77T9JmrSBcVCFkGhkPn8QMbyWtXzjybkAd++Ct0zS2BN4hHRpHY21I4m99d6zu7URNO8sB1MwKjO8S+v0aIg94b7qPb3McXlngGAwxnNho09V15QxNZ/3vsmwBCdpwJ5BWXB1dyO4Dhq4dmGLulZMx8KyWm4pfDCOfj/jAkNElMwa8mzE1rCgfEBArsarpsi4uDVq/PnzYY9RoeQ2TI4i2XI5WmolabyFcZTWIc7RMx4pMxbSa55LN0nI6v0hMFC7OxvM6jmFhccfu0RV1zjn4y5dz8I5prMpRdHnzt2b/PIX/iF3797m05/5NGXZY3d7i3IyQYzhqSceo3YLqrqO9yL6/i/Hk+9OOK0nUnSb9Y7791cDP65lLd/4ci7AXYzQK0oW0ymJcl8G+nZXaKtBQ0bwn75+5x7OK5d2dqjUsbd3QJHlPPHYZY6RICsgO1nU3LyzjxXh6uVLXL97n8p5nrl8mRSS67SMShCcZ8bjOa/duUcmwqULu9y4fQfnlCs7WyQfemOWt853akBUuLM35s37ewzynN3tTW7evkNdVXzk2qWlMidGrwQOJjNu3DtiOOjRy3Pu7Y2pXMUTl/qYE/odtOA2tLFXZVI5JgvPqF8yWSyYLZT50ZTLly9GSsQjHRt8qjGFH6id542bt5m5isu7l7h95y6T2ZhLF3bZNcJsPsd7z2i0yd79fb70xS+wmE353Dd/hk996rMYhJu3bnH3YI+LFy7x2us3qXXBtcuX2G5onUC7HN8h3DLuXj02evG8/trr7O3ts5a1fNjkfIC7hE0oEt0gQ0KHIG28FIBk2AxgqyhY4WiyYF7XjDY2qXzN/aMJw14/ei56OHW7rjBfKHf2xxRGuHzxEvcOptROuXahRnEtmHUMo6nNCWhn1YJ7+zPK3HDhgnD3YIzzcHFnh+R33nVMWnbJDNrm0XTBvYMpZjPH1cLewZSqrvnIY8eNust1hMGZzCruHszoD4YsHBxM5ygesUUwKXZomXTvEBLAk2LBVM6A6VEUPQ6mMK0XqNhOpqmoFad1UMcfPuzG9ezv7UOZ4z3cvbdH5SquXLpMVdfMmLO1uYmglD3LJ77pWYoi59q1a9isZDGbMxlPmS5qvAr37t3HlIIty6YNx6myti1Ap0/B5/1/+rmfZVE9fN7JtazlG0XOBbiHNHvzSMXE4FnHwCgBXPtZI/NijCXPMqzJcSpkWYm1eVs28rBd7T/5nlubUxZ9cgNGMvKsh7FgJG9pGXvy5JA4b8kKsl6fzIRyZT4Iu0SpY7YhXQailTq8Kr1ej+FgSJHnZHnGaDTiaDwOWYdO4P6X2qBKWRSUZUlRFGH3adlDtYrlVzX+1u20O6ZlETYp5VmBMTWCxWbL+VtbM+7JY2GMoSjLxuiaRaOyq2s2tnea8ASDfp/nnnuOxk3Uh00JRVEwFMUaQ5bnZIUhy2wz4WvnfisNWAJ/r8pkOuW//W//v6uLt7Ws5UMh5wLcRQRjbYjglxJEqwb3thgj5mQJ4JvnBZnJKYse6ir6gw1KKvAer3GzUCqxovFmWcFgtBnAPS/pDQd4b7BZ1jjRdb1SuvU0Hjdlj9HGJpk6sswy3NigcjVV5ZpNP42db6UdqZ6iLOkNhmR5CBcw3NhkUdeBGOqsFk7zFMnLkl6/T5bllHlBbzDA17Njk2SaFJtIknEFpEDZH+CdQ4whyzKMtVjJo7fRCauH7pOINpIsyyjLPsaEzVee4BnT7w2aUqoGWd1DEGm4vCjpZRnEkAdFUQA2hpXQBsSXMz3FbiqELWdhN/BLL36Nr3zpS+dul+pa1vJeyLkAdwBszPKzQl8kOuAkP3ONERSNtaixgdoRQB3Tg3ugz6AnfLETSAat2YNY1ChqgBQbBjoJm9u2aFzup2MQtFVrbBvQK7PgEz8d87OugNJydMkwixmTpW2fiM0wNuvEqunQH6taa2hEuF4EERvKx+BlJ2FbWjEErTl8NtY2sVjaFH8BhBvjK60htutpk56TNaYJ95DGZjgYtW6piV5LYWk6njcK2CzHOAFjKXo9iqJAdDkjV3fsO0+19d334fPXXniB2XhyvPNrWcuHQM4FuGtctifvlnQsnl36e8mQpoq1lsxYNAKbEaUa3yerJkv+2IkW6XreiITYKwkAE3iKmGbLf7s7NFBGYo47TovE5NZRPTdiMNaQ5VnT/tX+dj8nrV5igC0jpulnFyhXNeel8tHImCaeQFeFfnQpmATqzrslLThFsQggGmwaTVuMNHWEibbp+dKEkzJCiZhm8izyMuw+XaKXup5CLFMq1mAkI0NCog5jsJmN/TjuBtk1sKdllqpSLea88PwLJ0+Ea1nLh0DOBbgHWQbvllyPtEYD8mkDj4D3VIsKh0N9xayaMZ+PmR/tIwYWVUVPG24nlg/3Ug1RHesELN5TVQ6N8dfreo5zirWKNQRXPkC9i47mEuvxOFdRuwrjHVUdEluIV+ra45xfAkWlE3bAO7z3VHUd4tl7h0eoveIUVD2Vq/HOQZbF9q+CVTDYOq1BK5w6jFbgPF49dYzfbkzW9BtVcmuRFEZAQ0C0yi2ovVLHcMNePcb79rO2wA/gfVqFKKoOjf7wVVUx1xRBUgNFtajwhcGKje1w7XMNTYptqMKYSKCGxCh1tWBuDUXtyE2GpsxNqSXNyibUYwRef/N17ty9885eyW9UeSvz3aOYGx0nx3Y5Jd7LMfHx2tW2nHTsrcpbKX9aP86IO/NQ5bvGpEco5wLcRQSbtXHVgZAxST1EQ1vaueN96/c+r2veeO3r3Lv+Gs7BUWGwxpBTIZIzmY7Z2BihK4k7Ash6JpMjrr/yZQ6PJgjC4c2M+aLCGuHmeMDVnQF5Ht0ZfUdrFMAELbWuK+7feoPD6zdRdUxuZcydA69UWca13T5FcTFqrmGe0Wb1EH5cNef617/E67fu4GrHqFciWQa+4qXxTQb201y+fBk5MX5tiCp57/rL3Ltxk6MbOUaExaIG8cxuZxTPPsvOheBS2XL2Go2UHkGYzSe88uLXGM9rjITEHrV3WHX42V0+9tFvClE6peuGSPPbe09dL/CzQ+7u3yErCnw1p3LC6y8fcX8w4JPf9HEym3dCQbTp9byH2lXMDvc5ONjHO8/CezILd+b77OU5Tz35NL2dS8uG0+YdCoHLFE/tK7761a9Qu/rU3bTvlwzsmG/e/jy5nBVlivCOTVlOyZ2kBrbj79PEEBIE3j/h3ISQt/tBgCKEgFpnteFBAC2EZIQ3TzhXxToeFP7nAPjKCccdIUXtgwIlGuAuJwfmWvDgsYTgcPcqJzveOWCHs8fTAi+fUl6BggfmUn95/JEQYOwh5VyAu2rIxBQSNvvmi3p0dISNyTT8CZREXdfsbm/jqjnTyYwyE8p+gUhGrzcEaibTQ6zNIvWRDHghZdxiPuXS9ia7A4uxBWItvgrh24qiZDabkBeWLuNrYnzsFNlxNp9xeWvERh7T+VmLUYeiGJOjOGazCcZYJFIcPr3N8YvhnefJq7sc7d/l1dt32b52kcd2RxTFJllZotTMZuPgK8+yG6aqMJvO2SgNT1wYoUjYwCM+eLuYsAt0Pp8Hft2FqJne+6Bhz2eowGIxY7OfMz3YZ1JVXLm0RWkCnZKVfeq6YrEILoVN9id1cQETaJvxeMLuzjbbvkKNoDoK/uYimDyjni9Y2BkqJk7U3YlGmc9nXN7dQVzFm7dvsb27w3YM2ZyXJdYI09ksxm5v48WHidPHCWbO+OiAr7/00iN/Tx+FPD14hb/03T/I1fLGgy8+Cyye5iSnpYcv/8kH3/6BdXzkEdTxsLbu0+p4p+UBnnnIet5pP95BeYflD37h3+VHv/IHHuJGQc4NuLuqDtF0NfbTmGBMO2WHpqqS5wX93gDFMj46ZNTvs7mzhSa+NvLf6qXxyRYDeDAYRoMhTz35NOprpvMFk2lFP7cMR32Mych7MWBW4t1FgsaePHmAMi8oLlzC72qr1apvjJWJ83bOYwwhsmGoFO9CUomsyHnsiae4fzDmhZde44knn+AjTz7ReOyUvT6qUNdAaAKKx7uQ3CPPMy5ducbO7oVmfAINFAy6IdFG0pQV9Q7nPLWGSI6I0Ct6XLp8lRdefJUb9+7wTZ/4GFu9YQBOMeR5gavrTvYoUAlLKkVZzOeICE8+/Ux7n2ggT0btoijD5CPS7CBtbCEGeoMBvcGAOzfv8Mqrr7N77QqXrz4ejNWSkWVFY+A2nRVEeETBvpBnPV5+7Uvc39vn4b/5750sfMHz449zt7rwfjdlLR8gcVjuLXYffGFHzgW4mwjk80kIqqyEWCx5niMSNTMA7XrNtJ4qGxsbqHpGgyGj0WbwNokGweSKKDGjEhImkLRYL12Bc47Z4j7W5GRZRr8/xFqLzQvoaMldSiKEH24NgeH8skdM+GyXygeDZ2iTc3VjWvDes7O9zUeevMalixfZ2tltVhq28YJpE36shmSo6wrneqDLFBQED5RkEK3rmqLIg5tjllEWZeNl4wrHxYu7mBy2NzcZ9gaRq0+G3TR5RXfK2IaqqnDWcvHChaYvTRtorClNP7rtazxlpPViGm1t8uTjj3Nha5vhcBjvazEm73hOLqtBKbb9fHzE8y+8gPetV9N5ESs1N2ZX+T1/909i5WEJ57WsBRRhv9qiMCFOUuXNA1m1cwHuXYAkGtYQ34CnS8t/usAeTawetra22NwYNXk1l66LtAExenkiir229Ia10SfbV1hbNsd99I5pKAAIG5oSuCXA5/iKq+tXDqzEZUm+7+05gKefepInr12ErKCrda56iYSxWAW3lndukPfUcW6BNPSxnbg++82fjP7q7SaucG2EaWnB2ijM53Pm83nINkXcIerb56OEhCspLWJ3slzqm2n93R974nEuX72MMdEXNU4izrulvoR+t/494Ll3/z7Xr18/d77tBscPPvFX+ceu/I33uylr+YDLQgt+9Mv/Fq9Mzn7Hzwm4h8BhtatRH6FPPdPxXsgzKm00QjTRAfE6Yxotuptbs0HcGNogXBLhJrrtGWMwJgBXbmFzmJFZxdULXDSwHIslk7Ty0PAmaXZKbK1xZUC6XwTS4N8T2+FjNMUGoEO7EtiaugpJMDQ55phoBE6uii2wNauGOAZefUi65D0SvVm8cyHFhg/eM1U1Q+sKt5hTLWbUrmo0cu9rjLHU1aKZIyS6RDZ9ii1xdU1VVZS9PvP5tJk4m8mZ5FEUomcith27+Mmn5yEx+XhTh8er4GrTmcw6LpSJn+mMoYjywte/ynhy/nzbjSjfsfv3+F3P/Ol3xTNiLR8emfgBf/Lrv+edg7uI/CngNwO3VPUz8dgfAv4F4Ha87N9W1b8Wz/0B4HcTzIX/B1X97x98j0DLTOOGkwB6jtn8qAHH8OVNwBqMrt0cmcGvOySmpgOwib4IKeKaOzYrgeTjbsWQl4LqgnqxCNo5SatuJ40EsksSwck2hsYQzVJ92vsZ29+aZdPYNt47oV5lsVgEF8DkVtOZpLrtP665SwPuYiIIR3BXFwMnq6OuHXU1xtUzqsWMxXyMyRIPHqkbJ41/vRCepEv3i3aRqqqo1ZMXOdWibvomNlJH2kmkrS60K1bh4zNRBNFgDEVSknLBpjDCPiPtThWWqZh2jgkePwBV5fjqV7+yRI2dJe/Fu33sns3/1rKWd1ceRnP/ceA/BX5y5fh/rKp/tHtARD4F/LPAp4HHgP9BRD6uKXP0GdLw0fHFFxOKiI8behqQhzYJaRtwLBljsVEjdtoY/1SWE+61hs84kahgCD8BdNJOWcEY3wDXSWRH1DMRE+kGUm7UsMIQibSC72rbbcJtolYqEiYSG/8JtgPssd1NO5RuApN2I1I0SPugyWvk6CPEImpRNHiwqAdxGAGrHf+s2FYhjHsK3RB+KYgwdxWV9/T7/aWx12gjiGRT5I8MQo6JPvUmcusOH0a4oc1sLB8mQY+Prp8x9nyyM6RJpPs/DZPl9Tff4ObNszPIr8iP8x6822tZy/shD7Q4qerPAg8bEPv7gT+nqnNVfQl4EfiOBxXy6pkt5qTdCoJrSOTgo+7BasjVGbfna9R0VcCL4o3iJSzrJWbVdqI0mUMDv9HR/pNW7ZF4j3BdQKpwnwBmGInl0/lUlwmTSwL2roFVdCl8Aa0dFYyG7Efi409sSwQ+RGKrPXSvM7Fdsd3BQByvwSNpN4VIAEHRtn6fcosGDxrBgppoaA35TMWDccRcpaaxK6CKw+NEmVVzFtWcXq8kuECGXKxOfBhBBVK4AG1HWQnPqY6AbIi5ZTGoWLzE/K4R2MM4mshYxVWPRiBXE0ISJDOAGJxT/uEXv9TEfn8YeS/e7bWs5f2Sd8K5/6si8kPALwK/T1XvA48DP9+55vV47JiIyI8APwKws7sTvWXGqHORO0/b/5OnjLQER7M6j0mqaXFTO+teTVROLKjafnbJk4SkwbcSfNTb+OteNCT8EMH4QLu06mrQjQMGdumSZCDOVgmUpg1+hd9J2/G7BtTWNx+SY3y7goEUVdGLNoc0tk99yyGFhN5thEpFcc4FrxLxeC+EhB4hQqMRF20fsa0ItaupK0dZ9nGOOJm04yxonCjbZxAmLhfv66L2bZvnqUujE/uqEgzezncIqGjEbna8LHsF7e3v8/LLryyPz9uXR/ZuP3BnylrW8i7J2wX3Pw78e4Tv3b8H/N+B//1bqUBVfwz4MYAnn3xCBaFaVIwPj1CFsjegPxjGoFEtCCTu1kdjZjJWqgs+7E1smMgZa+Jr0UZ5blnaUI9EeiYlr2h2gkYDoNewKrDGQB3dKU2gXfCKaKQ/GkxRxPhIk0SqImV0kuQ5A7VzWBPC2SbNv6oWWNty8mJSBMVOu+hOAOHaYLCMUChhR63FRi+XMCF67/HqmM8rnFecD5vHVE3jNhiuiTuDEWzk/aeLBc45er0e84VbshUkCis8o8S/myXwDh43kX5LnlBxfRI5rKadGv3vfdzQJibr0E7atNGrD8Zpgeeff5H9/f238gqeJo/03RZ5tnloNRkzXz6KNq7lQyxzLTsk8+nytsBdVZvNxCLy/wb+2/jnG8CTnUufiMfOlNo55osKIxnbWxcCAGSW0daljmFPm39RYY6MiWkMpnS0XZu8bCJoJ457SatTBfVNuRRW1jd1ZIHiUMVomy901dceWrfNlMfTmADqDbeetFJJ2rDgvSP4xodz3ju8T9o50UsmAGVI92eXDHInuVt2jzWRHePfTj3eebLyiOKN2wyGm+zsXgFrw8QlgnPuWPnFYkFWOwaDIYkSAsGniBHarnLSuKY/26iU0Rai2mjtDc2loN42E4xzKahZ2C2cfOzjixBu6TXeR3De8crrf7O53zuRR/1uJ3Fq+eMv/Mv8ldd/4B21by1r8Wr46uFzwN8/87q3Be4ick1Vr8c//yngC/HzTwN/RkT+I4LR6Tng7z2oPvWeLIa37UZEVO/xzQzVRiP0CUkUvLTxSYAWyHy9BKyB2jnuRWFWgbExcka6gRR8YJlDaYG946O/dJ5GI5dOmaSBJncP3+FmulUkA2ZoUgJsF+ghTT1dbk8L7O3fy5MZS+10zlM7h+lMVunHRP/+qqqoa0e/P4ieQ9r8JO0hxN1pVy4njQUNNdOuNoBm96r6Gp+wX1006irJH1/ThJ52tjYB2Qw3bt7gzTev8yjkUb/bTT8RXp48w8uTZ9jO77NT3G9Mz2tZy8PI3JfcmF3FacbDRE17GFfIPwt8H3BRRF4H/iDwfSLyLbH2l4F/EUBVvygifwH4EiEUz+99GG+CJgm1tFECXe04PDrCGNuARdByaUAkgZJXh3PaaPIB0FruVzo+2i3WCdYEnth7xbk6GucEtTFsr9hYNmj4xhgCY5G8NyIPT0jEnELpGtrYJ8amHappPE3U2NsJw0hwG3TeLW3USbRT0OoVa1MJabxZUr9UPa52tPleg4YsImDSNUErnkwmVFXFdDJm/2AfY23sWwzWRnBHrOuaRbVgOBxydHTYeWIajd1xMkl2XNNdRXUnHI3G4mggJW4SU0+IJBmoMhvPp0iaaaJGgt+TmBDKwLsQeTPc0/KlL36RyWT8oNfsmLwX7/ZJ8js/8uP865/4Y9gHFHfYU5ffBh88kM65PIg+WE9wDykCXzr8NL/r5/8012fXHqrIA8FdVX/HCYf/izOu/yPAH3mou0dxzrGoFtHLJACT954sZgRyzjVA1vC8kUKYziZU1Rw0gGSzhBdHkRdYWzQgGSiPNv547Rzz+SwYFl3QFq0NAGKtZdAbklkJG51C5wg0QfgYNkB5JtMZdV3jYsCw5N0hIhRlj7IssRHkTWfHa1otOOeYzaahHYn3NxHQROj1+mRZS1skiifVVdc108kYV1eNxh82foVryl6JGGm5fBNC+uLB2GiR0DbVIYRk166uKcuS2XxGr+yHnbyZxUX/fVUXNoJhcHXgwCezSbQpxPTikRsXI/QGA8qsTxZtD2lS8nGl5SvHdDLBOdcYfSOCgxh6vT5FXiC5NJPeoprz4teeP75KeQh5L97tk6QwCzayQ+wZIRXnlPxF+9t4UT524vlf5f8uv9H/d6eW35ctXrDPRYfTZcm05hPuKww4e7PXgoLn7ceZyODE80+5V7mqZwdAe1We4u+aX0Utx6Gm1Dn/iPufuNRsKTguU+lz3+6ceE5U2fZ79HV6ZhvuyS5fs8+eONEIyifrLzM6MWRkKzflCq/Yp088Z/F8zL3Alp5u89mTbZ63Hz/1/EfdS1zU08cBQkRReQsT+rnYoZryZ4YveuS7rQ1b2mP8b+jyqRGM5nNefOF59g/2mt3rbeYjz7MffZZnnvlYA6ypXFrq7+/v8/nPfz5wvImNSX7zYviWb/4WNi9uNok7AoXiltwe54sFv/yFX+bw4DD4tSdXmHj9R5/5KM888wx5nq/0odVs79y9wy9/8Yth9bDiESNi+ObPfgvDYYh3YxIwJ+3Xe+7evcsXvvRFale31FP0EsqyjM9+5rOMBiOMMdR1mAyKsqQsy+CrLoa9vT2+8sLzrbHSeWyMIW+N4XOf/Ryj/giT2eiRE/8nAh58FtrxpS9/KU5avulHWHVYPvcrfwVFXpDZdndukxGLgsPJPl/88hepfR0nGWlsE149n/3M5xiNNuLkHgLDvXn9Te7evfvI3sX3Qn7ilR/mv7/5j5+ptXpreOlbnuFwd/OEs8rF1+7wxPOvn1p+cmHArW+5jJrjgGZqz7VfvE5xdHbicFdm3PiVV1iMihPP7754j61XzzZi713Z5vXPPHFiRjTrHE//0isM909fdS2uFIw/MzzRaVtUGXxhQnHr7H6MLw+5/c2XTs7K5pWrv3SD3v7szDoOntjk7idPDvYmXrnyD2/Rv3v6ZDm5OODmr7hy6vmLX7nDxpuHp54HmLo+t+eXzrymK+cC3BMtE5bgkLhm5xzGnMTltlvWF7Xn9u27GKOMhiNqD4vpHASe+UiYMFRN46GSJGjelsl0xmw2RcRy4cIFbt+5Rb2oGA5HjWacNh6l+9IBcBSmi5q7e/sYEba3t7hz9y7OO/IsoyjLhgMP2vRyP7xXFrVj7+iQarGgV+ZYYxmPj1Bgd3u3ifMSPF5ag2kKV1BVC+7v7bGoFvQHA4wI8/kcr8rFixfJioJkb07tsMaASfYEoXaO+/sHqCobGyMWVUU1meBVuXbtGlmeh4k3ct9hMpbGvqoKVV1zNJ4ASq9XMJ5M8c5jjHDl6lXyvIgTePdLFidKH1YL+4dj1DiGgyGHB0c4X2Nzw4ULF+j3+827IiJUzvGlL3+JqqofhfvjeyY3Z1e4OTv9iw6Eb+ZpLvsi3Flc5M7eGW6WJaCnjInC/cMdeJBzUQ9wp4/r/nQL9h5QxwhW7UNJnGa8cPTc2XWcUR6AiT64DcMz6lDYO9x+cB0XHlDH0QPq6J1RHtifPMRYvkU5F2HzvPPUdcWx7+cJik2zuzSCUm8wYm9/zN27h1jTozfYZFF5vFqKotd4oCRwpPFJDyFojclYVJ66hq2ti8znNePxFFXI8xQqNyBYy2e3mX8Qg80Kaqc4FbZ3L3E0nnJ4cISqMByM4vXtfdv2h1WGd8rh0ZiDozFZ3sfmPQ6O5hwcTukPN8iyorn2JHHeM53NmM7nbG5tUZY9fDRybm3tYE0MZ8AyteWj50qgtMKuWGsKrly+Rp6XgKXIS3a2d0EsXqOH5Ip3jG/+gWLJioKtnV28Wqoail6fnd0dDCZs7mqfJskmIpLCPUCe99ncukBVQ1V7er0+l3avkJl29aPAeHzIiy++cPKLcq5F3sLPO6njUbThPNRxHtpwXvrx8ErMudDcVQNVEGypXe+RoCUfz6YTtGiAXr/HaLRBZizb27uQW6rpLIQRjlRIimjYGihjLcaQ9wZwNAGTU/SHFL0BGv25T3d9XJZ+r8dwGGKfj0Yj+r0+CyMURUGWnTTE7SpANSSVHpQ9cptFmkTI86zxK09NXtZO27ZkNmO0sRGMn4MhiyxjNpsxn1eUZa8xdHbHVdNvbWPopExL/f6Qra0LiBzgXE2vN1p6BqsumEmssZTlgLK0jIZbDAdHTHTGoL9BrzdYoaSW+xBWUobBYIOiVzDobzAabbKopgwHm5Rlv0mUkvr/yiuvcP/+/ROfyVrW8mGXcwLu2kRJTB4wXSBoQu6ugqxAr8jZ3Nwgzwr6gyE1HhXDbL7A+9awuXq/BlitIc9yjMmxNgQw84syJIYgbmBKKndHEu8OdAy2gcKxmcW4oC0nvn71/st9Fwb9Pt4r/V4f55V+f4AI5Fm+xPGfNnaD/oC8KBgMgvGr3x9gTEWeF0tzfQx8HKYXDS6G09mUqq7p9XuoV/KiIM9yirygNgZrzVIdp7Ujs5bhoE9R5ojJ4spJKYuCzOZLY9bdzOs10FMihsFgSNkvyfOcXq/EGKUsejE9X3joXsG7mi9Hft+cwCuvZS0fdjkX4O69nqi5QwskXWAIxwGUQb/PZz7zabIsp+z1MDbj8uUrVPMF/f4w7PFvdq4uTxzqPZ/4+McwtsBITp5nbG58G752Df/vYpLok+Aj8d5XLl9me3sbEWE0GvHd3/09VDGVXVdDPklUlV6/z6c+/engeWLDauPq1Ss4V5Nb24xI4ye/Ulev3+e5j30ck1nKoofzcO2ao64XjYtpGK32J8Vdn04neGBnZwdblogxWJNx9dpVLl66SF0v4Nhqavlz6l9ZFjz5+GNIFuLGPP30U9T1AsG3kR7TZNmpIz3zPM956qknERva+8wzT1PXMUG4hAnBecUK3L17h9def3UN7GtZyylyTsDdM5lMyIxB0malzmYiaIEtSfqkwGQScpQ656g9zGYz8LA9GtLYaFmlNcLfd+/eZTpdkOd9dnd32Nu/hzpHURRc2N1ubtatZ0kUjsZj9vf3Gxpnf38/aLKZRU/YNbraJ439996T5SVZljEeH+K9Z9DvQ2el0S2bftdVxd7+Hl6VjY1NnFMmkwmqnp2dnYZ6ETpgqsrReIzf3mawMWQynXHrdnDFurBzMcTXrytUPVtbW9H1cbkdq8HSalczm83wRhj0h0yn0xAfXvTYRNB9Hulv5zwHB4eo8WyMNhiPx2FMrO34yAf52te/ztHR+NiYrmUtawlyLsA9+HXbjhYnLG+CWdbcW1CE+aLi+RdeILM5jz/2JAtR3njjBuorLu5s4WUUQ8se13jFGO7e3+fWrTsMBhv0hwNeeuUVqtmMCzs7fOTppxrjp8YNVMfYfxFu377NV77yFcqyx7d+67fx1eefZz6fceniRZ55KmYrV45Fj+y6dL72+uvM5wsuXLxMnue88ebr1HXNNz33XKt5nwDuHmW2mPPGm2/ifM3TTz3DZDLh9p1beLVsbm4tcezB5UapvaMwBb3hAEWZLypu3LyL9zUbG8HjZzIeY3IbcrOuPI/V9gDM5wtu37mLWsOli5aXXnmZ2WLOlcu7XLi4216vMXFJR2tXoK4XvH7jOkWRkeUFL738GnW94MrVq+zsXCBNr66u+epXv0QIRLYG9rWs5SQ5N+C+mC/oFXk6ApHySC6IaQNPo6lFkBJj6PcHFFlO2evHHJw71IvxkiHxmEFRFfUea3N6/X7kzU3YPGTaXKHee7xpNw4EbrgFW0UpioKtrS3EhLykRVGG3aaqDbfcXT0st0kx1lCWJRI36gBkNm937rLiLtr0ow1CNugP8erp9wbUtWcwGDKvwXQMuqqKrx2T8ZhqUVFc2Inu8IrNCy5evMp4fEBZ9hiNRtH/X0JqwU4dJz0/7z1ZltHrD3AS3ExtVmC8Jy/6YfJmeYII9FAMHxE2ONDrDxgMe+Rlj8Foi+lsTNHrhZ2+hHG8cfMGN248mnADa1nLN6qcG3A/Ojqi3NluKZC4CSc4acgxzTsYOn00ilqszTBGMOQURR9fzZaB5BRQqqoFVV1RVQ7nHK6qIyVRN5ru0tL/GDiHPKKHh4dA2LJ/f2+P6XTceMqkWOopXeCy5h76FzxrcsqypK7raJRN3j1hNXPcsBz7QYiiqarkRY+iWMTAacFAnTR35xx7e3uICGWvbOkeAUHIs5I8D0BaliW9Xg+nvpkkwziE9nb73/r+Q5HnZP1gkB4MRpisoCj6iNhIC7Vjr7EvPu54FYHBcES/18OYnLw3wBsJRmEjTR++8tWvUlWLtda+lrWcIefCzx2grusWwLWNAJk27RwDts4uyBTYUWMCVsGjLoJRp2xXGsDzjsViTl3PEYLnSm6z4OMeNW5DSscXt/ys8OaTyYSDg4Nm+776kL3JplR5MZxCiJ/eianeOOGEcL42GhJnsxnjo6PAw2swyjZp7pZ7AQIxqgJiYoIRQgRJ7+uwWTnGw1ksFozHR2RZTpNMI/rD15Vj/3CfeTWPYRgyrC2wNkdQQlz24wbidtIUEEGsxdgMMUJelJgsCyEdOqsMiMPSNdTGqGE2yzA2wyAU1pBnWZwkwy7myXTahBtYy1rWcrqcC80dYDqdBs8HH7jpVmPWJW2vmQAS0JICgyUtu2JydMBsOlkB0hO8cFQpsox+GbRYI4Yiy6FXkieKSFfKhA9LdRljyPI85j4VhsMhRZ4FH3XaYMWstEM1tSNMIjaWTxNPkuX8r8sSsjFF3V4AiQHUjAkBtkhgHFwGR6NhKOd9BNS2rjzPEeejFm3iuKbcp6GOgMrL/v9pnEP8eYk5ZLXpTxu6OAF82gyWVjVpoEPZOoUdFlAxSAz7jAi379zm3r0PVriBtazl/ZBzAe6Jlgmr+zZ8bFezhS5fHbVeCO6K3uEXjsViQeUde/fuIOqPAXpTU0frLPOcMgtAbo2hLAvUuxhkKwCaRg03OlI2ZdNPllmKPA/gmNwtjUQf9wBaXeriJBGRjpYaJNkBzpL9vX0W8yrEz7EpKqaJwcFc61WkwW5RliUxXHroGwn4Df1+n+nMtb75kRpLGjtp0uk+gzgG7URK455orGkeYtuORLu1ZdM5Hw2tWWbj87VgUi7WYBt4/qtfparqtQvkWtbyADkX4A4wHk9JSSBazbUFEYnL+ACSkerAoGrYHx+Cc2xvbeOj+yFRM5WkHdMB9g69Yowhy2zUMGsyayl7wR2xoU68DwGYREh5/VR1KXphr1dS9HqIEYqyCIAoISOoNokqumGI0y7VpClbjLFNVEsxNmTyM8FnHE3TxHJ/Dg/HYeOUlZCSSAyYAq8SRygBOFhryfMS7+eAC//EI2rCCsAIkmUhrC6gEryEPIFqCvYGn3rQeuBEcd4zrypUhMxkMZuSa+4fJu3OLmFNuVFTrtuM3FqMgFOHWEOmJl5nOTo64JVXXzoepmIta1nLMTk34F6ncLVJw9VkQDQxFkrrQth8udVRz2cYFK+CqxWbCaNBH/Ee1RpVh2ADHdDd0KNKVVXM54vg2WEzFosKEzfxqAa/68xqB5zCZJEANsQkVzAZveGI3OTUC0eZl6jz4GA2n9Hv9Zrdq8vGyADu3jlUQ/jhPF+wmM/DdcYwXyyYzyf0yh6oOQaoKVLkoq7wWnN0NGE+nVLPF/jaMx1PmZc9ijw86uRx0/L/YUJ1rsbVNaIwm06pFwtcVVPXNbPxjMKE3bvNvYVEnDfPpa4qfFVRqzJxjtlkgtaeajZnfDRGByX9fhnnR4njmZ634qo51WKGxzGdz6nrGd6F8vPpjFdfeZn9vb136xV8D+Uh7QVnXfZOTA7vi7nixG2ADy4WTEgnX3r2Yvht3e6BZd/p2J1U/i0rKw9X4NyA+3g8CREEO4k1qqrC2sQhB023C5CzyZh7d26yvTVE1TOb75OpYXu7wGCZTA9ZVJuUNkONQZxruGnvPHfu3Obe/Zsx1C4cHt1uNEvvJxweXcBu7YTnYRLHHyRlZ5pPJ2R+xkYpIJ4bb36dIvMUoxybwc2bN3j8scc6sdjTCiL8L8SDHzOZHOG9YzzeR71nZ5TjvXLv1hv0shCZ0aw81ESpLOYV04P7qC5482APxYdt/7nnjddeRKsnuHL1KtZmzWqjSWlXh1R/k/ER0/s38Frz2sF1jM1w0Sj9ysv3ePzxJ7l06fJS2a4NwXvPdHpEVR2wmMdolS5EE9+7N2Z8cIfLly9x5erVuAqJ1E2a7JxnNjlk7+4bIYtWs7oy3BsbDu5mfOGXvxDCGn/A5WrvBj/45F86M547Fjg5jHp4d3YIuaBOkcnGgDfk8RPjuVvrePLp1yin87MbmgMnR/sN8hDRZ29uX+HL8kkcx8Nw5Kbim5/8PNu7e6dXUAJneb1uc/o4JRkSImCeNr88Roz6eIZscHoUTQWuACdFZ07SP6M8hOeZn3EeQOBv3PjH+PLBJx9wYZBzAe4iMJ3Mgw+5CUt151wwslohJbwxMUuGaqApar/g4qVddnY3sSYkxTbRM0MInPd0vmBWuSb/aNL8RaDXK/nYRz+K8466dlgTfLqtseR5jsdzNDkIsWd82DHrJHDLRix4WFRznnjqKRbzOa6uI62ijRG01yuZV/O4CpFIw/ioyYeNW9YYHrt6Da+OxXwKSMe/XcmtYXJ0xNwsIn0U2u+cYzGbMhgMeerJx1GUarGA2Ebvw+rHWGE2CwkNnPPMFxXVosJOZxwdjSnzkiK3XL60HfzOTYj6WNcuGKxNaE/YCWxWnp2QokRmWcbO1lZnNdCuDoL3TcZkPMHaeeTaUyjl8FysFa5cvoxzwYZQV4uYJNsynUy5v3+/vecHWD4yfJn/4HP/Jn1zdgzxM7XES8DlM4pqSMh9YoKKTMk+UT+c/ndWGx4nZJI9Q6baY5+tE9thrGfn4/cpqN5+Gx6mEw/StnceQR3bD2jLg8pvPaB8lB/+ez/+wQJ3CL7i3scYJARXvP5oFPybI6VxfANQn52dnXBOW6OhNkbZZIgLElLJBeAREQb9ERsbm62bY2PkIwTLivczxkCK6y4xWTWJ+R+SUjOpdzFhN6i6hudOV6bgYo2Pd/Q6GfTDyqN2Dmul9TwhGBmN2MbY2HgGEeLxFHmfne1d0m7Nuq6D26JqBN02Gqb3GhNnLMjznCLP6ff79Ioe/X4f3dwMdRsTg3n5hhIjpvVLY9KN05Piupe9ks3NjWBUleAl5GPKPGNtmBBDoeiC2bqoighlWbKxudlQWF7D5CAYPv/5zzOff7Ditp8m9xa7/JU3foBCzk4ysZa1LInAK+OnH/rycwLuwmw+p3Y1efQWESFq4hY13fgh2mitKWNTAsLG8NrwwclzpfW6URPSv6W01UVRRO2w1SADAIbySdNX8dEQGwDWxDaKmDbDi+v602d4HyaJNrE0MflIpJ5i34MvvsPasFs1cdANzy8t155AU2LHsjwnL8rW0Gls60vuW+8Vrx6jGmPnmxikLKPI88arRzVmsZLuBoiYCzbGhG83MWnHRtJSadbYJs2eqkeNdiYY26kj7j+QdpNY8EpKu4CDN49zNVXteOnlV2hs6R9wefHwY/yev/cnWecPXctblbkvH/racwLuUC0WVFVFv+wB0WWu8ZRpgTeAACRNUhOIR9BrEidHcAi0gW3KA6TdkOFzkO6qoHXD9M39NDqTN8BEcDvsulaGq1Jbk3ON79S7jExeFSQkhm64a8KE0RiWATWdXaa0dYXwBhrTZBB3XJnOSiV4+njCiqMZH4gGaxrtPlUQUvR1xyRSSXH/Qbi07UeaNFejbhojqLY7W9uJsv0chjQ9uzjwEq5JqwVjMu7dv831G2+2k9oHXDyWiRu+381Yyze4PHCHqog8KSJ/W0S+JCJfFJF/PR7fFZG/KSIvxN878biIyH8iIi+KyOdF5Fc+TENc3MGZaJEG2LT1J1+KCtkxrPpO2ZCBaHkH5ck7Kln6u7uLdTXaYfe604D6pDrPOp4Mu12aQzrxc5qfzt+rdaXVS3LHbOrpAC0dwG3LdiZMluteLd/62nf7u0zLrIaGWH1OifI6a7y6E+vyJGH42otfYz6f86jdPN6rd3sta3k/5GHCD9TA71PVTwHfCfxeEfkU8PuBv6WqzwF/K/4N8BsJdvzngB8B/vjDNMQ7T7VwdP1RkkfJEtidAtTLAJhS2rXUxqqcNAGknKRwMsAnWQ0/cKwu6PzoMUgKfHj3+s41nXOdAmGXaPOjoMT8pMcfYaA3OoDZ3DMOqay0GVrNue1lNHjGGmSl/lOex0nPptntKt0xOfkZtgMRtPnJZMLXX/r68oT16OQ9ebfXspb3Qx4I7qp6XVV/KX4+BL5MsJN/P/AT8bKfAH4gfv5+4Cc1yM8D2yJy7UH3qWvHYlHHb7+gxJ8Ozh0DApZpj7T8b3/amCYBeNpwBOnYaoiCLsifBGDpPqvXrV7v1FPHyJDBP+bkSUo1eNZ4QpmQZihx7hHofdhEJKEzIRqmKr6ul10sIzgaBTmp3bEdMVxPo7UHe2j47SXcInDf4TeagP70lcxxSZROsl0sT3YKS0k70gQWupzKGa7fvM79+/ce9Pq8LXmv3u21rOX9kLfEuYvIR4BfAfxd4IqqJg/UGwRPTwhfjtc6xV6Px86M0epc2MTjVYOxMiFAJ7bMSZpbl49flS4ABSOfNh45q+dTXV0AS14bJ1EPq/cOIE3j4aIdygWS2fS4HKNbwsFTr+22tXbB9XK5H1FDX9LMNewDSccSmHcmpODBkmwUy209iXY5aaI9TdJq5qR+LHV8ZXUA8Morr1BVD3CVewTybr7ba1nL+yEPDe4iMgL+MvB/VNWDFWpCJVjKHlpE5EcIS9sAFt6zWFSpPlK4LVaAfZUCWOV3u8ePg4h2KIro+og0watUlutc/b16v2OUhPctSIk51pamXKrChza1/ydic+h309YVCVovVJWjPyhSYyJYn1Iune+sdJYAf2X6WZ3wuuPAynVLk1y3kfFD6u+Sop6WD5Ket8RJKVFEkZL52tdPvfejknfz3YaLj66ha1nLW5CHAncRyQkv//9HVf/rePimiFxT1etxaXorHn8DeLJT/Il4bElU9ceAHwOw1qpCo6EpMdSvEo2My1pjLN/8vRrSdxWEm2PNZxo+ORgys3TXE8udCNAngK5JHimdY219cQWSPEMIrLLosqKs0VPnNO09lfQKlXMMbYzQmOgb1cYw21YaVkImWCvjodZDxTReMK3htLXHLk9mJ02azfhL90nFibQZq9YOEoia5dg0qu24pGOvv/4a+wf77zawv6vvtsizb2liWMtaHpU8jLeMAP8F8GVV/Y86p34a+OH4+YeBv9o5/kPRs+A7gf3OEvdUUVWqmCAjgUDLzrbXnKyxtxtskofFKXc5Vk+7qeh0DvkYd30C3xySYSdvlZPKN3906JBEGT3o+38y99/NzpQOr/Z9qT/a9ZKJtgJNu2XhJOZo9b4nUirp2o4Rt9PhY+1QXdHi9Xj/nHM8//wLIRvUuyTv1bu9lrW8H/Iwmvv3AP874JdF5B/EY/828KPAXxCR3w28Avz2eO6vAb8JeBGYAL/roVqiymw6bXljUbxPYQeWtUc0AJhEX2jocMeatM5AtyTCIcQZp0EVFddsSGo1/3RuNePRCRp7pD+iOt7w3A2ya9y4FJzdlzXTzv9DS0l7hUBPynbUarfhQ9j9ildMjBgZJoikE3fbKg2YqhICmkUDLT7Uo16i3/7xtp1EybTjYZr6W9FOjPZUU9xB3BmvRD21QN+Ojioc7O/z6msv0/Jc74q8N+/2WtbyPsgDwV1Vf47Tv12/7oTrFfi9b7UhSkjY0fztPXVdYYxG2qBLjbTNMW45nG6jgKZrI3BIpEOakhITUrBSp6TWHOeblzj9RHO0hXC04YC7W+tRRelOSCdLqD8mqtA2xV461tImQdv2zuFdmEScdiayuCKJw9CsEMIk4GI5h69DNE5UMNaEoG2JS2meSwPP8S/prACUtKqAFL4hcfot+bQ8UaW+psnKNGPVnla+/tLXGI8PTx2rRyHv1bu9Kk8OXuVHPvpjZPLBD4K2lvdQBP7ia7+NX7r/cNsrzs0OVVBmszZKnXOeg4PDmLxCl7RDJeTVa+mZAICBS1fS9v5AMbefW3BJbnqKNdLQKaG+AKCN1p5CBXfE+5CdKDN2aYWhKXa57wAqyS7QTiBdn+2UGCMZlZU6zUdLdE13cnGuRlUYjyfs7cUAY8QMRpF7TxucRKLdoqGAYFFVTKdTbG3Y27tPlmUNnXPc5uAbj5xmEtQ0HmH5kmLXCJLC3eP8gv9/e2cWY1mWneVv7X2GO8aQlWNl5VBzV7W75W5ZlluyeEZ+MYgXLIQtYdEIhADBS2PzgGReQNAIJEBqBJKNDH4xEi2EhWyEBFK3DW7L2G2qa8isHCuzMiJjuBF3OMPem4e9z7nnRkRmRlI5REaevxQVN2/cc+/e55y79tpr/etf1aI6d+SrOdj5ghDoqs3r40MyPzpwPMcB57u3+dvvfZuOPEI47AviYfW8L4r0waNqkg8zj8PUNb8Q50Pgg9F7L6Jx90bHOBu8dU2/v0wcxyDCvCSd2tMrioL1+2sUeRY6EHnPt0rWDYdL9Ho9kiQJf6vCGv5XlmVsba5hyrL235wF38xZGC4PSbq90HxjD2smhGXKsuD++jpFUdRxa/9ZfhFYWlqi2+2idVQvNCx0KBKm2ZTtjQ1sSHM2jyfMI0kSotiPo1KxtAiDwZA8z9nZ2cVZz2MXReC5gxNFv9dDRRoVQlBZltHtdVGxsDwcoHWMtTAZ7+Lq+AmA+MYdztLt9v0ioDXzDPBiHL4sSya7EwxeCdLTKn3TFAf0hn2SKA6J57CoOp8wt2FB2tkdsbW1+dS47UcB92an+XdX/yKxPITiqfCqj90H/H0EPKTbYBan3Dh3EaP3S+1qY7h49wZpfgjJ33NwgFqvv/RrwPjhb0EU3uNBlmaDh0rhTjo9bp85j1H7B5FmGZfuXkcd0CN54SOWT3Bv9fSBezRxjot3btDNpvv/2EQCnOXB5+I+8HQ3miBel+iwOELGXWplSG8UgkeoVPAq/Va/MtKVl3392nW2tu+jlKCDJ+2s91DfevMdlpaWvdyvND3OuZf40ccfMplMfAelOK4La5x1/Nj7X2VwZuB3D3s8SBVi+lme8dEnVxjtjNBa0UlTSlPWceSvfuWrLC0NiaKEWgCsigC5sDMQxSfXrjGZjAFDmiRkuZf3TeKE97/0ZXq9HlEc44CIGOdmxElMFMVMJlM+/PgjirLwapbK4QwYW5KmCe+//xV6UQelFMYYImu9QJsmCIcl7O5O+eTqtbl4W0gyW3xv1fe+9D5RlNR9YvfmIay1ZLOMT69/SmEtSizWFBif6CBOEr703leIogSt/fmsImDV4jCdTrl58xbXb1xjOnu6Xu3zxNXxG/zVHzyiuDXCZwQOMu4CfA784YMPd0Pxi4M+wKIZkD+xD9cXBxjgmZwHGTTwmYd9XKE96IX3eJCluQV8+ODD3UmBk3Iw9WPikB84HiaLD+DeETjxAO/dOuRH7qELJeAlfU/x4HNxDfj0Ee/xBPA46kpHyLhDlmcYG3p4BibHfqbKPK4NEMUxG5ubiNKcOnWK6XTGvXtrKLxxrwxPzVlHGsZJKKxw+84axhreeusttje3MLagk3a8MXV+sUDtTSrO+eKzIufuvXsMhwNWT57g1q1bFEVBp9Oh2+nUISWfcG3ElhuhjmlpuL85oixyLlx4jStXr9Af9Hnj8mV6PS8yZUOrO2stW1tbXh7ZeY93NMmwCCvLXcCyNZmxs7HOu++8TRLE2JqowuJV+MgBO7Oc3fEuly++xnhnxHQ6wQIXL15Ea73vWjSvg3MO4xyT2QwjsNzvcf32Z0xnM1ZOnOCNN84Rh/O5EGZz88XBOcfOzg5ra2uNkNtxhDzWl/RpwB1KeeSo40mcx8aO/pjhSBn3PC8pC+9V1lzo8OWff8/nMVsHdHt9er0hShTD/ipluUVZGLRodJRgLKHhh3g2iPi0oBMQrej3h8RJB5tldHsDdnbGKKvpdHroOKZ0Fk0zVBFQhU1EkXQ66DghSVKGg2XSdB0QoigljlOc8wqOe5OVNS/dQZr0EB2RiKLfX0JHMf3BEp3eEBWnXhDNWqbTKXme+3xBFGFC6KXT61OW0O8NEKWZZFvs6oT+YHkhZl4tVr54q+5ai7GWWegGNRgs4Ywlz3NEFMPG7qeZR9hr6LVSJGmX0ln6gyVEIrSO6fT6DIfLKH8m6y9kU36gej7LC2az2TE27C1aPBscqeXblOWc17yQAJ1T5mpPM/xoHdWGRytNFEehEXTU8DbDe7B4rCMkNAVESSP84hb48vuOa/wAaB3V8egoiupqS0EaPPT9Hu/ij18s4jgKSWTf2SgK83POYY0hz3O63W6dxCTQOaMoBuf8vJVGcP481PmGuVGunJ1KZ8YXjVnyLCPSijTt1Cwi5xxxnDSYQSzMp+l1W+fIsgnGlOg4IU79ohelHdJOdyGXsMCbD5wc54Sd0c6+c9WiRYvHx5Hy3L10r6eHNWlxe1kj8y+/C80vQvxWQGlNHCfEkT5QPmDhPZyv5kw7HbTW9Y/Ddw6qjmmGCOr3aIw7iiKGgwHdbhdRiiiOiG1MHCeNeSx6ohXLxwYuvtaabrdHr5OQpinLyyv0+32iOMZz9l3YafQXFpUq6RrHvjlImnYojSOOYzqdTr1QVAvIwkLTYA+JEnq9HjqJUUooynJxgdsbHlu8SFT0T+csUQRpt8eps6+yMxrR6Q6I4pSFXdc+A+4oy4LRzuiYh2RatHg2ODLG3QthGfK8QOx8y16x73yc1s5LdEJ4oZt2+NI776LjiDTtcU7gjcuXcdb61nF7jLM/1L+3tZazZ87y6rlXESXEScL5V1+tvVEdkocLaPT89AuE5Y1LF3nz8mVEaXQUsbp6whtt63xbPhT6gDCGE3BKEAXvvfU67737LkoLURxz7tXzOOd57NaUWAS9YNS93IF1JXEa8c6bl4njLlEUY5zh3Lkz2LLwNQLWx6OaXrMLOQ0f8xa6nT5f+9rXiSJNHHW4dOl1nPMVw7Hen0TFzZUuq7h5Eqe8/+UfR8URnThlub+ENYZSHHHoEPXAalynuL+xxu7uo7J8LVq0OAyOjHEH/8XPZjMI5TjVlp3Ag5aqUrWRoCyKgo3NDUQploarOGuYTSeA0O/2FrzEg7zB7Y1Ntre3ES2cPH2ayXiMKwxRFDHo9bB6bgx9iMWPq+p35Izj9vWbbG1tIjri/PnXuH//Ps5Zut0ew3ffDXFuu+C8L4Q1jOHDDz5gmhuGwyGrJ1b57NZNtBJOnjpF7+KlOvFa8d2tqVrZOcq84PrVT5lNZ5x45RUcit3dHaJYc/r0aTonOnUCtYn55shRlpZ7654ysLq8ynQ6IcumaK04c+ZM4O/Pk9PzUMrcuJuyZOPO51hxDJaXGE8m2LJEdxLOnjxNpPYndufX3nL1yhXKPTLGLVq0+P/DkTLuzlkm02nwLithqUZFf7BGTa3yPM+5evUqSikuXnqDnfEOt2/fJokiTp0+9ZAP87Z2NNnlynXPYUo6Xe7evUOe5wz6Ay5evhg+t9neb7/3PZrscnd9jaTT5fT589zbWCfPC1ZWVngHb7isUyikTqpK4HlbaymdZWs2ZZIVuEjRM0usbW36nclg4MMjUsXv5wIDPl9gKU3J/c1NdsbbpEtdlEpY395COcvJkyfDSazGPq8YJcTbnQNjStbvb5CXBcPhMpubG2xsrhNFEWfPnZurOFKxfvwoKkMvIuRFzqc3rmCV8Fb6NjdvXmO8u8vqqVO8evrMQkim5vyH8zke73Lz1s02HNOixRPCkTLuIGSzDF9tqrHGhBi4zO1Sbai8YUIJ3V4XLSqEDxSRUqH4iZqRUnne4cCGeJfBORP429qHQEyJEr2P296IEc29VkBHEXGS0ElTYp2gtRBHmjgKcrxiAI1zqmlafWxfKUQier0hTuUkaZc4Tun2l7CmJIoTfIfvAmxU1wCIcyipeq4KaSelMH3StOMZQirCFBlOK5CqOMqfY6k1YSzWOpRYSjNjOtnAOgViyYqMWTalq4cg0cIiW52L/bkIIe7G6CQhSTqkaUqez4iSFIcOC4KXHFiQwhTh9mc3GY1GX+z2adGiRY0jZtx9xacLHiX4UE2onq/DM1VMfN4Wb24ydWCseKYMC63z5slQqbXXtdb0ej3AEemItJOCc6gQ+jDK1EnJOmvr320h5DMv36f2aJu2sOLXh4HgmouUNRSzCdl4RlcLtszIZmPEGRTz6rsqCezq95f6XGTZzEsmW6HMZuxsr+9r5DEX62rkM5iPQ4mgIs/WMcagxC+SlQZPNedm/H0x7KWIdYrSMSIaUzqKwtTefZU8VnuTqc5y/fo1rDWt596ixRPCkTLuzsEsy+pwTGV8vNco9WuahnWRweHpf0qFEAhzSVsVVgiR+fE26KakaQp49Uml/PG1YFbwlBfHGQxbNU7bNKCN14X/qq5SrmJ417K7VfLAMt4dMRrt0k01+WzKztYmaVLp6njGkKoECkJTjuqzrLWMJxNmswznLKYssDYnjvtopXGhtL+q9J3bz3luA7z3b/AUyzRJSDspcRzX58NZu8CeWZhnuBZZXqBFYYMomK9I1VR9VJ01jZn7z93d2eGz258d6h5p0aLF4XCkjLsIZFlBJRPgY7TQtK6VYfJx7AbfWuaUycqPd9YtvKYSMmwW81jrcE5qo8/CglFJFsylbSvNFN/q1P8ujaUsLToOuwnndxzWBhEwK7Vxp6Es6SmOfrSR1sSxIgrMlEjHxEmM6KZIl9eiKcoSVKPphwix1hRa0IH1k6YddKzreVTetnWGSnhNXLXw+IKmWVaCVhhryIuCsiwX5Hv9NTmAs18tojjPWuqkiFakaUpmc1QUYwkVZBXrqaETdP3GNcaTSeu1t2jxBHGkjDsw15dR+xsyN5OaVYii+lslPFWWBVme18yOJlxwtZvGqSiKWvSrKH2RkLXWC3M1PHLYz5evHhtjKIoCFak66WutpSjKWjTLNY5fHNOcf14dV5YlRZETRdWuIYzDOkQ5SlPWBVr1X0O4qjSWIrdkWUmq9L73n3/mPLRV5TY7aQ8rviiqCm/VRVjsp3I2z6t1fkcQB/2bsizJsozZdFYXpu29noTXXb16tQ3JtGjxhHE0jbsxeClYbxbnNmXRMMzDEv51xnpjuhganxsyJWphQXDOUeQFWajMLAuvbWOsQYUkpLU+ueu9+0XjszfeHum4ZoLMZX29RIHQWIzq4c3nV1owFlC65stHcexzj2EH41sPWsqirKVzrfN9UY11OBFEa1QU0+kOidOIuhutmy92PkxTBK/czcdhgShIIYdiMNFR+IyDr1eVO6j08q11mDynyDOmgflUXYNqHB4+ir+9vc2dO49Sn2rRosXj4kgZd+cceZZhrEUrwbqm6Nd+z7N6PK9E9cYP8Z2FytDMYl60Y0LcPWibWIsL8rTGQJHniNMoAVEa64zvrypz+mFzDM3fWvuQSpFnCAqtla+etTYY6DC2CsHwi7MIvgAoivKwUARZAeVVxowpwsLg8wZlaUiSZP46EaIoRlcccbEgXi/GlobSlIuLWp1E9fPHGpw1bI82iZKEsvSxex+OcpQmx6jE/2tvSKbxbxXOkykLrMnx8suE0I/zcsIQQlH+HNy6dYssm3LElDBaVHjQZqrdZB15HCnjDkHT3Rh0pOtwi9dyXzSozjlmsxmj0ajWWikKL10wHA5xzrG7s8NSf0CUxDW7pHl8lmWYMmdpeYAWRZ5P6XQTbwjFsLMzIkleqXcQsCh/YK0lzzOUdpw8uYqoiNFokzSNiBNBKcfm5iarq6sYM+93On8Pv3iZsmR52CftRPS7Kc7knD97BhUpZtMpm1tbrA6XwPkcQlEUJEnSWJwcJ1ZWWHEruLJEOcPJ1WVKa9neWicSy3A4WDjPtc6LsUHD3fHa2VdxWsiznCRJ0EohAlvra5S9QX1eF1DlEvCGfak/oHQWAZaXl3HOkeqY3dEOnUTT6ST1e+R5xqefXg3jedJ30tGFlpJE5Q9/kQIyfDO/vRCg4MHys9VrpsBBzZ6K8PeHHV9hA6/rfhAeNYZqHFVd4kEwh3iPafisvcg4nE9gOPg8wry95aPG8KjPOez5/ILITIo9pCN0BI17SV4adGIpCn9XNJtOV+EIcGSzCZPxiOl0h7IwtfE0xhDHMePxiLI8gSljPMd8kcaYFzOSRGOt59YUxaSOHScqYTadUOZLaKm86frwOsZdFBlaQ1EYsJbpNAdncc5gnWb9/hr9fq9mi4jMwxJ+8TLk2Yw01jinKbIxs8nI0xxLGG3laFH00w6Rg8lkwo0b17l06ZL3lAVmkwllPqMocu5NRnUS2eEoZ1MiJaRpAs632XPGYoxfVPKiQFCMx2NMmZNnJZMgAeCsxRYZo801Tpw8Q7f7FqI0vu2fRZzXzyfICozHO+TZmLwsmOxu1jTV7c17bG+usbK8woULF5BYgQhra/dYX1/jZfPav/HK9/lrb/9L9KOEyGMe3AxjFfjJhxwreON30BrigPd5sMFtwvBgvfQ3w8+jUHDwIgO+AcZDag3rxeGgxV+AnzjE52tg9wHv4YAv8ehzEfNwa/kGcOYQY/kCKG3EP/jg7/HB6L1Dvf7oGfe84PO1+6ysDtA6o9ft1/TGmvFhy9rgX7x4gfF4zGi0w9bmJkoUyydfYThcYmlpCcQxnYwXLmzltSolXLpwiTwvgjfvJQKioPKY6JQ8yyhCkrU6VoUGItZaFML5c+cpg6Jl1X1I8Fz5OO6Q51lYdLyX3Syvd84hSjh9+lTosFSE1/kcgXNelTHLphRFznh3zPr6PV45eQKMIYq9CuWZU6fI8xkOwYTWgJWHHMcxk8mEmZqBsTUvPstzxru7lInvRLW0MkQk8N1FIc6R72yztTtiOpny8ZVPUCrmtddeY2Vlid2dHfJiAk6hRFMWBYNhD2MMVTWqr1PwCdo0TZmMJ2TaNxW5cuUTZnVTjpfHdb/Uu8Gfu/CbJAe6oy1eOAhwIvw8RWQ24TtXv8kHvKDGvSxLvve97xHHmjju0et1GQ59SGAwGDAYDOj3u6RpWlP+VpZP0O1scvPGbVZXVrl86Q0Gg6EPLWivIa6q6qIGc8R3RjJB7mAuQytKoUX5sETD6OxlzVTomX5tyPwH2Dr+LKJD/F3PqZqNZiEiYJ2l00kbn+PfX6n5TsMvZj7Jefny65w4cYJO7EMnPpG5ggm9XG3Ya1bcdqU0VWxerA/rpGmKdYbBcEgapwwYYCUsYGGrOp3O2CmFbHvK1tomWXYH6wzjLOO18xd575132R1vM9kdoUTodrusrKzOKZeuqlFQYVEEnGfglGXJ3Tt3Dzyfxx1Xx6/z69f+wqM99xYtGihdxOezw28PHmncReQC8Gv4TYcDvuOc+2ci8veBv4zvpAjwS865/xKO+bvAL+I3dH/DOfdfDzOY6kue5wV5XgAZW1ubuEYiUkRqzfNOp+NlcntdrHWsra2jRLGxuU1pHL1e1/cvjaK6yrJKUnqD6T1oFRKHVbK1burn+YuNatPm3m2+EEShPJ+K146uXypU0sPKqwg4F/KU4hONiC+eikJeoOL2N0NBdb5B6HZ7vP3221jnm3RXOuuiFcp6uWKsD8lEIRmsK+MOWLGh+jR8rvJa8N7Tr3YewnSaceezz1iSgm45Zj3zDTQ6aYfV1RV2xmM+vnKNH3v/fcQJeT4FbL2jsU4Hlsx8tzNvPq5ZW19ja7R1mNviqeFZ3ttNfP/+N/j+/W88gRm0eDlxOGfoMJ57Cfwd59wfiMgQ+IGI/Hb42z91zv3jhY8VeR/488CXgVeB3xGRd5wP1B5u6Hs8ubnIVBXnLimKkul0xtbW9kKS79r161y/caPmaadpSrfbpdfrNTz/Pr1ej16vR7fbJUkSkjgh0t7LlsDbbqRRfdGPeE2UZim/oBAXWnI3WT2V+9tUk6m4+bZKIM5fH+korCZRXdkarL83y82wUvV/mc9bIaF+VXyBk3PzYwJX3jGvCbAhTGRDmKY6w8YYrBV+9NHHmDLj/LlXWN/YoNvrkJeWpeEKEBElKdu7I27eusmpV04wy6Y1Iwe8HPF8UZpPX8Q3+L569VPKsnjeXvszv7fDO33hgbdo8Sg80rg75+4Ad8LjHRH5ADj/kEN+FvgN51wGfCoin+BTP99/AuM90BgcxD+vipOm0ylbW1sLlMkqBlwtAEmS0Ek79LpdBsMBvX6f4XBIr9ul2+nS63ZJ4pgo0ijNXDtGBGvAGc83b1I+KmNpQ/ZVVXbc+b+KSAiDOCRorTvrsEIwtkEmoKJhBh65C6+vWxCG11eUQwcYV+ntzA3rvDmIb4phjcFZS1HmRIUvXDLG5x3u3rvPxvYWb755maSbgo6JE4dxBUmnS24ckXYorbi3vk4njb1ue5njrKnDU1Vuwzlfe6C07xI1mc64fuP6wjV5Hjhq93aLFk8SjxVzF5HLwNeA38P3Zv/rIvLzwO/jPaBN/JfjdxuH3eLhX5ingv3e/5wGWTWsKMuykdBjD63Ph1ziKCJJU6+1kqb0+z7U0++lLA369Ps9up0uWiviJEFpbyjFVZRLQxUS8YVZeOMsAsoGiieU1oRwhq2d8qqNHlTxdx3kDKo5+ehNpftShUBsbVBDhSoWkVAM5hRFUVDmGUWZU2YzfGttQZxjlpfcuH2TV86eJY0TiqJEYdAqQZRF4sQvNKHRR+FKtkdjbDFGzAxLaFISFjVrG06tCFprbt66y2h760lf8i+EF+nebtHiMDi0cReRAfCbwN9yzo1E5F8Bv4J3D38F+CfAX3qM9/sm8M3w+HHG/IXwIKPv/1GZYeqqyrwoyIvCM6kaHPcq1aqU8jmAOKHf69Hv9xkO+wyGXR/+6aY+NxBHxIH14xp0SEKiV1AQqINVbkBkvizUSVXAqUpVcp4PcDrEuAFlq5BN5TmHxUQk1A1US0aVAPYhHK01W9tbKB3T6y8hCEnofeppND4b4YwJGxCNU47JbIaYnFQ7n+CVeS2B1tG8khgfBrr92W0qDfijgKd5b8PJ6tknOOIWLeBR99ShjLuIxPib/9edc/8RwDn3eePv/xr4z+Gft4ELjcNfC88twDn3HeA7AFrrwzBuny32SPbOn24UQuF540VmIMsY7e7WHHhRglaKSInvZ5p2fNy/32M47NPr9UmSiDiJSeKEOLT0E/GGXclcfXFegRsUFasYuqNuDl7lAKzzgmjzwiupfwuemVMUJUVZBv2bop5XoSLWNkasrp5GJEbEgoopQ7hHa02sIzTC9sYmZ86eRUQorcGZEmsKYhUFlUwXWiWGRSqElSaTCWv3qjzl88fTvrdF3nSebH6FI0hOa/HCwuGrux6Mw7BlBPg3wAfOuW83nj8XYpYAfxb4YXj8XeDfi8i38Umnt4H/9dhjP+rYl+AE6qSqj5+X1lACs7xgZzyFjY3gJLvaiEdRRCft0O/36PcHLC0vcfrkKc6eOUW3kwbjrn1YxnmqqNYqFCT5Rt5VN6U65GRNnTwVCf65tYgTDBZ0gUoStDUkaZck6aCVZjyZUVpDbzCgkjqweYEtLVY54iQl6fQxpePevVtcvHQBkQQlljz3CV2rHWi/2xABaxwu5BiUVqxtbrI7flBlzrPFs7u3s8ZbtGjxpPBwz10epPRXv0Dkp4H/Cfwx82LdXwJ+DvhxvO24BvyV6gshIr+M38aW+K3ubz3iM3aADx86kOOFk8D68x7EM8JRmOsl59y+Osj23n4qOArX+1nhKMz1wHsbDmHcnwVE5Pedc4cpJD4WeJnm+zLN9SC8bPN/meZ71Of6col6tGjRosVLgta4t2jRosUxxFEx7t953gN4xniZ5vsyzfUgvGzzf5nme6TneiRi7i1atGjR4sniqHjuLVq0aNHiCeK5G3cR+dMi8qGIfCIi33re43kSEJF/KyL3ROSHjedOiMhvi8jH4fdqeF5E5J+H+f+RiHz9+Y388SEiF0Tkv4vI/xWRPxGRvxmeP5bzfRwct3u7va9fsPlWZeLP4wffI+UKvo9JAvwf4P3nOaYnNK8/BXwd+GHjuX8EfCs8/hbwD8PjnwF+C1+R8FPA7z3v8T/mXM8BXw+Ph8BH+D4/x3K+j3Fejt293d7XL9Z9/bw9958EPnHOXXXO5cBv4JX3Xmg45/4HvvtkEz8L/Gp4/KvAn2k8/2vO43eBFRE590wG+gTgnLvjnPuD8HgHqJQVj+V8HwPH7t5u7+sX675+3sb9PHCz8e/jrLJ3xs1L2u8y77h4bM6BLCorHvv5PgIvyzyP/XV+Ue/r523cX0o4v487VjSlvcqKzb8dx/m22I/jeJ1f5Pv6eRv3Q6nsHRN8Xm3Twu974fkX/hwcpKzIMZ7vIfGyzPPYXucX/b5+3sb9fwNvi8jrIpLgW5h99zmP6Wnhu8AvhMe/APynxvM/H7LtPwVsN7Z9Rx4PUlbkmM73MfCy3NvH8jofi/v6eWd08Vnmj/DMgl9+3uN5QnP6D/j2bQU+9vaLwCvAfwM+Bn4HOBFeK8C/CPP/Y+Annvf4H3OuP43fmv4R8Ifh52eO63wf89wcq3u7va9frPu6rVBt0aJFi2OI5x2WadGiRYsWTwGtcW/RokWLY4jWuLdo0aLFMURr3Fu0aNHiGKI17i1atGhxDNEa9xYtWrQ4hmiNe4sWLVocQ7TGvUWLFi2OIf4frzaKXmaNzwgAAAAASUVORK5CYII=", + "text/plain": [ + "<Figure size 432x288 with 2 Axes>" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "image, mask = next(iter(dataloader))\n", "image = reverse_transform(image[0])\n", @@ -831,7 +1407,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "zAvaxAbxBUgQ" }, "source": [ @@ -842,8 +1417,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "dVgF3qfDBUgR" }, "outputs": [], @@ -865,7 +1438,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "rN3cbiWaBUgf" }, "source": [ @@ -876,8 +1448,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", "id": "msmQQUX-BUgh" }, "outputs": [], @@ -909,7 +1479,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "6UXrZLLNBUgq" }, "source": [ @@ -920,11 +1489,222 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "7NUuGcQ0SiJw" + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7NUuGcQ0SiJw", + "outputId": "949cd2b6-a471-4806-c45d-b407996b355f" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch [ 1/ 200] | d_loss: 0.3133 | g_loss: 42.4292\n", + "Saving model...\n", + "Epoch [ 2/ 200] | d_loss: 0.1131 | g_loss: 18.0722\n", + "Epoch [ 3/ 200] | d_loss: 0.0191 | g_loss: 17.2005\n", + "Epoch [ 4/ 200] | d_loss: 0.0052 | g_loss: 16.1699\n", + "Epoch [ 5/ 200] | d_loss: 0.0053 | g_loss: 15.6566\n", + "Epoch [ 6/ 200] | d_loss: 0.1414 | g_loss: 16.3139\n", + "Epoch [ 7/ 200] | d_loss: 0.0038 | g_loss: 15.1481\n", + "Epoch [ 8/ 200] | d_loss: 0.0036 | g_loss: 12.7986\n", + "Epoch [ 9/ 200] | d_loss: 0.0027 | g_loss: 13.3789\n", + "Epoch [ 10/ 200] | d_loss: 0.0013 | g_loss: 12.2196\n", + "Epoch [ 11/ 200] | d_loss: 0.0005 | g_loss: 13.9303\n", + "Epoch [ 12/ 200] | d_loss: 0.0008 | g_loss: 11.6532\n", + "Epoch [ 13/ 200] | d_loss: 0.0006 | g_loss: 11.4009\n", + "Epoch [ 14/ 200] | d_loss: 0.0002 | g_loss: 11.6138\n", + "Epoch [ 15/ 200] | d_loss: 0.0005 | g_loss: 10.2916\n", + "Epoch [ 16/ 200] | d_loss: 0.0003 | g_loss: 10.6686\n", + "Epoch [ 17/ 200] | d_loss: 0.0004 | g_loss: 9.6153\n", + "Epoch [ 18/ 200] | d_loss: 0.0005 | g_loss: 9.5919\n", + "Epoch [ 19/ 200] | d_loss: 0.0003 | g_loss: 10.4633\n", + "Epoch [ 20/ 200] | d_loss: 0.0002 | g_loss: 10.5165\n", + "Epoch [ 21/ 200] | d_loss: 0.0001 | g_loss: 9.3780\n", + "Epoch [ 22/ 200] | d_loss: 0.0011 | g_loss: 9.8256\n", + "Epoch [ 23/ 200] | d_loss: 0.0002 | g_loss: 8.4660\n", + "Epoch [ 24/ 200] | d_loss: 0.0007 | g_loss: 9.1047\n", + "Epoch [ 25/ 200] | d_loss: 0.0005 | g_loss: 8.0756\n", + "Epoch [ 26/ 200] | d_loss: 0.3912 | g_loss: 9.0183\n", + "Epoch [ 27/ 200] | d_loss: 0.0416 | g_loss: 8.3293\n", + "Epoch [ 28/ 200] | d_loss: 0.0015 | g_loss: 9.0377\n", + "Epoch [ 29/ 200] | d_loss: 0.0011 | g_loss: 8.5958\n", + "Epoch [ 30/ 200] | d_loss: 0.0005 | g_loss: 8.7900\n", + "Epoch [ 31/ 200] | d_loss: 0.0003 | g_loss: 7.9501\n", + "Epoch [ 32/ 200] | d_loss: 0.0002 | g_loss: 8.1970\n", + "Epoch [ 33/ 200] | d_loss: 0.0003 | g_loss: 7.2992\n", + "Epoch [ 34/ 200] | d_loss: 0.0007 | g_loss: 7.3520\n", + "Epoch [ 35/ 200] | d_loss: 0.0002 | g_loss: 7.6644\n", + "Epoch [ 36/ 200] | d_loss: 0.0003 | g_loss: 7.7798\n", + "Epoch [ 37/ 200] | d_loss: 0.0002 | g_loss: 7.5124\n", + "Epoch [ 38/ 200] | d_loss: 0.0002 | g_loss: 6.8729\n", + "Epoch [ 39/ 200] | d_loss: 0.0002 | g_loss: 7.0267\n", + "Epoch [ 40/ 200] | d_loss: 0.0001 | g_loss: 7.2345\n", + "Epoch [ 41/ 200] | d_loss: 0.0002 | g_loss: 6.8107\n", + "Epoch [ 42/ 200] | d_loss: 0.0002 | g_loss: 7.1081\n", + "Epoch [ 43/ 200] | d_loss: 0.0003 | g_loss: 7.6882\n", + "Epoch [ 44/ 200] | d_loss: 0.0002 | g_loss: 6.8501\n", + "Epoch [ 45/ 200] | d_loss: 0.0003 | g_loss: 6.5136\n", + "Epoch [ 46/ 200] | d_loss: 0.0003 | g_loss: 6.9682\n", + "Epoch [ 47/ 200] | d_loss: 0.0001 | g_loss: 6.4437\n", + "Epoch [ 48/ 200] | d_loss: 0.0002 | g_loss: 6.1539\n", + "Epoch [ 49/ 200] | d_loss: 0.0001 | g_loss: 7.1213\n", + "Epoch [ 50/ 200] | d_loss: 0.0001 | g_loss: 5.9238\n", + "Epoch [ 51/ 200] | d_loss: 0.0004 | g_loss: 7.0581\n", + "Epoch [ 52/ 200] | d_loss: 0.0001 | g_loss: 6.4927\n", + "Epoch [ 53/ 200] | d_loss: 0.0002 | g_loss: 6.0246\n", + "Epoch [ 54/ 200] | d_loss: 0.0002 | g_loss: 6.3500\n", + "Epoch [ 55/ 200] | d_loss: 0.0001 | g_loss: 7.9461\n", + "Epoch [ 56/ 200] | d_loss: 0.0001 | g_loss: 6.5596\n", + "Epoch [ 57/ 200] | d_loss: 0.0002 | g_loss: 6.8856\n", + "Epoch [ 58/ 200] | d_loss: 0.0002 | g_loss: 6.6079\n", + "Epoch [ 59/ 200] | d_loss: 0.0004 | g_loss: 6.5909\n", + "Epoch [ 60/ 200] | d_loss: 0.0001 | g_loss: 6.2006\n", + "Epoch [ 61/ 200] | d_loss: 0.0001 | g_loss: 6.3177\n", + "Epoch [ 62/ 200] | d_loss: 0.0001 | g_loss: 6.0408\n", + "Epoch [ 63/ 200] | d_loss: 0.0001 | g_loss: 5.6913\n", + "Epoch [ 64/ 200] | d_loss: 0.0002 | g_loss: 5.6761\n", + "Epoch [ 65/ 200] | d_loss: 0.0001 | g_loss: 5.9295\n", + "Epoch [ 66/ 200] | d_loss: 0.0001 | g_loss: 6.4304\n", + "Epoch [ 67/ 200] | d_loss: 0.0001 | g_loss: 5.7391\n", + "Epoch [ 68/ 200] | d_loss: 0.0002 | g_loss: 6.3698\n", + "Epoch [ 69/ 200] | d_loss: 0.0001 | g_loss: 5.7311\n", + "Epoch [ 70/ 200] | d_loss: 0.0001 | g_loss: 5.2358\n", + "Epoch [ 71/ 200] | d_loss: 0.0002 | g_loss: 5.6618\n", + "Epoch [ 72/ 200] | d_loss: 0.0001 | g_loss: 6.0555\n", + "Epoch [ 73/ 200] | d_loss: 0.0002 | g_loss: 5.5737\n", + "Epoch [ 74/ 200] | d_loss: 0.0001 | g_loss: 5.6385\n", + "Epoch [ 75/ 200] | d_loss: 0.0001 | g_loss: 5.2095\n", + "Epoch [ 76/ 200] | d_loss: 0.0000 | g_loss: 5.1337\n", + "Epoch [ 77/ 200] | d_loss: 0.0001 | g_loss: 5.7715\n", + "Epoch [ 78/ 200] | d_loss: 0.0001 | g_loss: 5.7186\n", + "Epoch [ 79/ 200] | d_loss: 0.0000 | g_loss: 5.0293\n", + "Epoch [ 80/ 200] | d_loss: 0.0001 | g_loss: 5.2702\n", + "Epoch [ 81/ 200] | d_loss: 0.3727 | g_loss: 5.5885\n", + "Epoch [ 82/ 200] | d_loss: 0.2660 | g_loss: 5.3950\n", + "Epoch [ 83/ 200] | d_loss: 0.1546 | g_loss: 5.0554\n", + "Epoch [ 84/ 200] | d_loss: 0.0098 | g_loss: 5.0735\n", + "Epoch [ 85/ 200] | d_loss: 0.0126 | g_loss: 5.2378\n", + "Epoch [ 86/ 200] | d_loss: 0.0035 | g_loss: 5.2618\n", + "Epoch [ 87/ 200] | d_loss: 0.0034 | g_loss: 5.2145\n", + "Epoch [ 88/ 200] | d_loss: 0.0024 | g_loss: 5.4119\n", + "Epoch [ 89/ 200] | d_loss: 0.0020 | g_loss: 5.1056\n", + "Epoch [ 90/ 200] | d_loss: 0.0049 | g_loss: 5.2315\n", + "Epoch [ 91/ 200] | d_loss: 0.0012 | g_loss: 5.5100\n", + "Epoch [ 92/ 200] | d_loss: 0.0017 | g_loss: 5.5635\n", + "Epoch [ 93/ 200] | d_loss: 0.0012 | g_loss: 5.8867\n", + "Epoch [ 94/ 200] | d_loss: 0.0017 | g_loss: 4.8199\n", + "Epoch [ 95/ 200] | d_loss: 0.0011 | g_loss: 5.4372\n", + "Epoch [ 96/ 200] | d_loss: 0.0007 | g_loss: 4.8909\n", + "Epoch [ 97/ 200] | d_loss: 0.0011 | g_loss: 5.1560\n", + "Epoch [ 98/ 200] | d_loss: 0.0012 | g_loss: 5.0970\n", + "Epoch [ 99/ 200] | d_loss: 0.0007 | g_loss: 5.0886\n", + "Epoch [ 100/ 200] | d_loss: 0.0011 | g_loss: 4.4425\n", + "Epoch [ 101/ 200] | d_loss: 0.0008 | g_loss: 4.8758\n", + "Saving model...\n", + "Epoch [ 102/ 200] | d_loss: 0.0004 | g_loss: 5.3083\n", + "Epoch [ 103/ 200] | d_loss: 0.0009 | g_loss: 5.3150\n", + "Epoch [ 104/ 200] | d_loss: 0.0007 | g_loss: 4.8089\n", + "Epoch [ 105/ 200] | d_loss: 0.0007 | g_loss: 5.0185\n", + "Epoch [ 106/ 200] | d_loss: 0.0007 | g_loss: 4.9800\n", + "Epoch [ 107/ 200] | d_loss: 0.0003 | g_loss: 5.2003\n", + "Epoch [ 108/ 200] | d_loss: 0.0007 | g_loss: 4.8150\n", + "Epoch [ 109/ 200] | d_loss: 0.0005 | g_loss: 4.5679\n", + "Epoch [ 110/ 200] | d_loss: 0.0092 | g_loss: 4.7240\n", + "Epoch [ 111/ 200] | d_loss: 0.0095 | g_loss: 4.4006\n", + "Epoch [ 112/ 200] | d_loss: 0.0024 | g_loss: 4.5022\n", + "Epoch [ 113/ 200] | d_loss: 0.0022 | g_loss: 4.6022\n", + "Epoch [ 114/ 200] | d_loss: 0.0017 | g_loss: 5.7689\n", + "Epoch [ 115/ 200] | d_loss: 0.0016 | g_loss: 4.5154\n", + "Epoch [ 116/ 200] | d_loss: 0.0008 | g_loss: 5.1195\n", + "Epoch [ 117/ 200] | d_loss: 0.0007 | g_loss: 4.1679\n", + "Epoch [ 118/ 200] | d_loss: 0.0004 | g_loss: 4.6265\n", + "Epoch [ 119/ 200] | d_loss: 0.0006 | g_loss: 4.6242\n", + "Epoch [ 120/ 200] | d_loss: 0.0018 | g_loss: 4.4529\n", + "Epoch [ 121/ 200] | d_loss: 0.0008 | g_loss: 4.6963\n", + "Epoch [ 122/ 200] | d_loss: 0.0005 | g_loss: 4.3253\n", + "Epoch [ 123/ 200] | d_loss: 0.0009 | g_loss: 4.8627\n", + "Epoch [ 124/ 200] | d_loss: 0.0006 | g_loss: 4.4839\n", + "Epoch [ 125/ 200] | d_loss: 0.0034 | g_loss: 4.6999\n", + "Epoch [ 126/ 200] | d_loss: 0.0010 | g_loss: 4.3671\n", + "Epoch [ 127/ 200] | d_loss: 0.0002 | g_loss: 4.8077\n", + "Epoch [ 128/ 200] | d_loss: 0.0015 | g_loss: 4.1619\n", + "Epoch [ 129/ 200] | d_loss: 0.0003 | g_loss: 4.3635\n", + "Epoch [ 130/ 200] | d_loss: 0.0003 | g_loss: 4.6414\n", + "Epoch [ 131/ 200] | d_loss: 0.0003 | g_loss: 4.5485\n", + "Epoch [ 132/ 200] | d_loss: 0.0351 | g_loss: 4.4199\n", + "Epoch [ 133/ 200] | d_loss: 0.0020 | g_loss: 4.4355\n", + "Epoch [ 134/ 200] | d_loss: 0.0005 | g_loss: 4.3288\n", + "Epoch [ 135/ 200] | d_loss: 0.0006 | g_loss: 4.1773\n", + "Epoch [ 136/ 200] | d_loss: 0.0004 | g_loss: 4.1113\n", + "Epoch [ 137/ 200] | d_loss: 0.0019 | g_loss: 4.4359\n", + "Epoch [ 138/ 200] | d_loss: 0.0019 | g_loss: 4.6646\n", + "Epoch [ 139/ 200] | d_loss: 0.0001 | g_loss: 4.4671\n", + "Epoch [ 140/ 200] | d_loss: 0.0003 | g_loss: 4.7338\n", + "Epoch [ 141/ 200] | d_loss: 0.0001 | g_loss: 4.3889\n", + "Epoch [ 142/ 200] | d_loss: 0.0002 | g_loss: 3.9919\n", + "Epoch [ 143/ 200] | d_loss: 0.0002 | g_loss: 4.1381\n", + "Epoch [ 144/ 200] | d_loss: 0.0001 | g_loss: 3.8697\n", + "Epoch [ 145/ 200] | d_loss: 0.0001 | g_loss: 4.0535\n", + "Epoch [ 146/ 200] | d_loss: 0.0002 | g_loss: 4.1972\n", + "Epoch [ 147/ 200] | d_loss: 0.0001 | g_loss: 4.2595\n", + "Epoch [ 148/ 200] | d_loss: 0.0001 | g_loss: 4.1067\n", + "Epoch [ 149/ 200] | d_loss: 0.0007 | g_loss: 4.0283\n", + "Epoch [ 150/ 200] | d_loss: 0.0002 | g_loss: 4.5137\n", + "Epoch [ 151/ 200] | d_loss: 0.0004 | g_loss: 4.5669\n", + "Epoch [ 152/ 200] | d_loss: 0.0003 | g_loss: 3.9768\n", + "Epoch [ 153/ 200] | d_loss: 0.0003 | g_loss: 4.3775\n", + "Epoch [ 154/ 200] | d_loss: 0.0065 | g_loss: 3.8469\n", + "Epoch [ 155/ 200] | d_loss: 0.0002 | g_loss: 3.7896\n", + "Epoch [ 156/ 200] | d_loss: 0.0002 | g_loss: 4.4394\n", + "Epoch [ 157/ 200] | d_loss: 0.0001 | g_loss: 3.7670\n", + "Epoch [ 158/ 200] | d_loss: 0.0002 | g_loss: 3.9571\n", + "Epoch [ 159/ 200] | d_loss: 0.0001 | g_loss: 3.8488\n", + "Epoch [ 160/ 200] | d_loss: 0.0001 | g_loss: 3.8009\n", + "Epoch [ 161/ 200] | d_loss: 0.1925 | g_loss: 3.9065\n", + "Epoch [ 162/ 200] | d_loss: 0.0013 | g_loss: 3.6093\n", + "Epoch [ 163/ 200] | d_loss: 0.0050 | g_loss: 4.0170\n", + "Epoch [ 164/ 200] | d_loss: 0.0005 | g_loss: 4.0730\n", + "Epoch [ 165/ 200] | d_loss: 0.0003 | g_loss: 3.9590\n", + "Epoch [ 166/ 200] | d_loss: 0.0009 | g_loss: 4.6059\n", + "Epoch [ 167/ 200] | d_loss: 0.0004 | g_loss: 3.9176\n", + "Epoch [ 168/ 200] | d_loss: 0.0001 | g_loss: 3.8140\n", + "Epoch [ 169/ 200] | d_loss: 0.0005 | g_loss: 4.1499\n", + "Epoch [ 170/ 200] | d_loss: 0.0008 | g_loss: 4.1592\n", + "Epoch [ 171/ 200] | d_loss: 0.0005 | g_loss: 4.1078\n", + "Epoch [ 172/ 200] | d_loss: 0.0003 | g_loss: 3.6807\n", + "Epoch [ 173/ 200] | d_loss: 0.0002 | g_loss: 3.6856\n", + "Epoch [ 174/ 200] | d_loss: 0.0006 | g_loss: 3.6159\n", + "Epoch [ 175/ 200] | d_loss: 0.0002 | g_loss: 3.5474\n", + "Epoch [ 176/ 200] | d_loss: 0.0002 | g_loss: 3.8628\n", + "Epoch [ 177/ 200] | d_loss: 0.0075 | g_loss: 3.5338\n", + "Epoch [ 178/ 200] | d_loss: 0.0004 | g_loss: 3.7856\n", + "Epoch [ 179/ 200] | d_loss: 0.0004 | g_loss: 3.8777\n", + "Epoch [ 180/ 200] | d_loss: 0.0004 | g_loss: 4.0136\n", + "Epoch [ 181/ 200] | d_loss: 0.0002 | g_loss: 3.5460\n", + "Epoch [ 182/ 200] | d_loss: 0.0011 | g_loss: 3.9362\n", + "Epoch [ 183/ 200] | d_loss: 0.0003 | g_loss: 3.9842\n", + "Epoch [ 184/ 200] | d_loss: 0.0019 | g_loss: 3.5008\n", + "Epoch [ 185/ 200] | d_loss: 0.0017 | g_loss: 3.6214\n", + "Epoch [ 186/ 200] | d_loss: 0.0029 | g_loss: 3.7715\n", + "Epoch [ 187/ 200] | d_loss: 0.0013 | g_loss: 3.3345\n", + "Epoch [ 188/ 200] | d_loss: 0.0011 | g_loss: 3.9673\n", + "Epoch [ 189/ 200] | d_loss: 0.0002 | g_loss: 3.9342\n", + "Epoch [ 190/ 200] | d_loss: 0.0002 | g_loss: 4.0399\n", + "Epoch [ 191/ 200] | d_loss: 0.0001 | g_loss: 3.8562\n", + "Epoch [ 192/ 200] | d_loss: 0.0000 | g_loss: 3.9400\n", + "Epoch [ 193/ 200] | d_loss: 0.0001 | g_loss: 4.0293\n", + "Epoch [ 194/ 200] | d_loss: 0.0001 | g_loss: 3.6815\n", + "Epoch [ 195/ 200] | d_loss: 0.0001 | g_loss: 3.7573\n", + "Epoch [ 196/ 200] | d_loss: 0.0001 | g_loss: 3.4911\n", + "Epoch [ 197/ 200] | d_loss: 0.0223 | g_loss: 3.5906\n", + "Epoch [ 198/ 200] | d_loss: 0.4819 | g_loss: 3.3126\n", + "Epoch [ 199/ 200] | d_loss: 0.0015 | g_loss: 3.5958\n", + "Epoch [ 200/ 200] | d_loss: 0.0153 | g_loss: 3.5562\n" + ] + } + ], "source": [ "# ----------\n", "# Training\n", @@ -963,16 +1743,21 @@ "\n", " # GAN loss\n", " # TO DO: Put here your GAN loss\n", + " fake_A=generator(real_B)\n", + " loss_GAN=criterion_GAN(fake_A,real_A)\n", "\n", " # Pixel-wise loss\n", " # TO DO: Put here your pixel loss\n", + " loss_pixel=criterion_pixelwise(real_A,fake_A)\n", "\n", " # Total loss\n", " # TO DO: Put here your total loss\n", + " loss_G=loss_GAN+lambda_pixel*loss_pixel\n", "\n", " loss_G.backward()\n", "\n", " optimizer_G.step()\n", + " \n", "\n", " # ---------------------\n", " # Train Discriminator\n", @@ -1010,7 +1795,6 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", "id": "Ed-ZbuVWBUgu" }, "source": [ @@ -1021,25 +1805,56 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "nOLW054DTLpg" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, - "outputs": [], + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'plt' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\Users\\FREDJ\\Desktop\\BE\\mso3_4-be2_cgan\\BE2_GAN_and_cGAN.ipynb Cell 46\u001b[0m in \u001b[0;36m1\n\u001b[1;32m----> <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#X63sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m fig, ax \u001b[39m=\u001b[39m plt\u001b[39m.\u001b[39msubplots()\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#X63sZmlsZQ%3D%3D?line=1'>2</a>\u001b[0m losses \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39marray(losses)\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#X63sZmlsZQ%3D%3D?line=2'>3</a>\u001b[0m plt\u001b[39m.\u001b[39mplot(losses\u001b[39m.\u001b[39mT[\u001b[39m0\u001b[39m], label\u001b[39m=\u001b[39m\u001b[39m'\u001b[39m\u001b[39mDiscriminator\u001b[39m\u001b[39m'\u001b[39m)\n", + "\u001b[1;31mNameError\u001b[0m: name 'plt' is not defined" + ] + } + ], "source": [ "fig, ax = plt.subplots()\n", "losses = np.array(losses)\n", "plt.plot(losses.T[0], label='Discriminator')\n", "plt.plot(losses.T[1], label='Generator')\n", "plt.title(\"Training Losses\")\n", - "plt.legend()\n" + "plt.legend()\n", + "plt.savefig('my_plot.png')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" ] }, { "cell_type": "markdown", "metadata": { - "colab_type": "text", - "id": "S58kJj9HBUgV" + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } }, "source": [ "If the training takes too much time, you can use a pretrained model in the meantime, to evaluate its performance.\n", @@ -1050,8 +1865,10 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", - "id": "i0TC5qK3BUg4" + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } }, "source": [ "### Evaluate your cGAN" @@ -1061,9 +1878,10 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "fYBRR6NYBUg6" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, "outputs": [], "source": [ @@ -1094,9 +1912,10 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "4V0DwQomBUg9" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, "outputs": [], "source": [ @@ -1110,9 +1929,10 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "gyvmvkIvBUhB" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, "outputs": [], "source": [ @@ -1132,9 +1952,10 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "nqvrxBoGBUhD" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, "outputs": [], "source": [ @@ -1153,8 +1974,10 @@ { "cell_type": "markdown", "metadata": { - "colab_type": "text", - "id": "qkFVjRsOBUhG" + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } }, "source": [ "<font color='red'>**Question 4**</font> \n", @@ -1165,24 +1988,50 @@ "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "k85Cl5_UDWyv" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, - "outputs": [], + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'load_model' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\Users\\FREDJ\\Desktop\\BE\\mso3_4-be2_cgan\\BE2_GAN_and_cGAN.ipynb Cell 55\u001b[0m in \u001b[0;36m3\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#Y105sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m \u001b[39m# TO DO : Your code here to load and evaluate with a few samples\u001b[39;00m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#Y105sZmlsZQ%3D%3D?line=1'>2</a>\u001b[0m \u001b[39m# a model after 100 epochs\u001b[39;00m\n\u001b[1;32m----> <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#Y105sZmlsZQ%3D%3D?line=2'>3</a>\u001b[0m load_model(epoch\u001b[39m=\u001b[39m\u001b[39m100\u001b[39m)\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#Y105sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m \u001b[39m# switching mode\u001b[39;00m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/FREDJ/Desktop/BE/mso3_4-be2_cgan/BE2_GAN_and_cGAN.ipynb#Y105sZmlsZQ%3D%3D?line=5'>6</a>\u001b[0m generator\u001b[39m.\u001b[39meval()\n", + "\u001b[1;31mNameError\u001b[0m: name 'load_model' is not defined" + ] + } + ], "source": [ "# TO DO : Your code here to load and evaluate with a few samples\n", "# a model after 100 epochs\n", - "\n" + "load_model(epoch=100)\n", + "\n", + "# switching mode\n", + "generator.eval()\n", + "\n", + "output = generator(mask.type(Tensor))\n", + "output = output.view(8, 3, 256, 256)\n", + "output = output.cpu().detach()\n", + "for i in range(8):\n", + " image_plot = reverse_transform(image[i])\n", + " output_plot = reverse_transform(output[i])\n", + " mask_plot = reverse_transform(mask[i])\n", + " plot2x3Array(mask_plot,image_plot,output_plot)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "colab": {}, - "colab_type": "code", - "id": "_GbMIfRXBUhH" + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } }, "outputs": [], "source": [ @@ -1192,11 +2041,12 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "metadata": { - "colab_type": "text", - "id": "rVxSSPJgK60P" + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } }, "source": [ "# How to submit your Work ?\n", @@ -1206,11 +2056,12 @@ } ], "metadata": { + "accelerator": "GPU", "colab": { - "collapsed_sections": [], - "name": "BE2 - GAN and cGAN.ipynb", - "provenance": [] + "provenance": [], + "toc_visible": true }, + "gpuClass": "standard", "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -1226,9 +2077,1381 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.8" + "version": "3.10.7" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "006ed596437e401484071852e4dcf478": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_97b0e145241b4c22b04d688d6ac61a7b", + "IPY_MODEL_344eb89047d742cd8ae04352b63ed3eb", + "IPY_MODEL_7e5da0e70e93447ea42becf167e8db38" + ], + "layout": "IPY_MODEL_7c9e2a97e6814f6b890c634dfb39f703" + } + }, + "09669e92fbe94221a8eca46cb25d417c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "107b3fc0d47441c88382b42b8ab1cf93": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "12e84e4422dc47c98445fe35124d3c16": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "184daa5104814b749029e96912e79b2e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "1c18f2d791b8438fb6e8f7fbe049511b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_40472578b17c406fba0d98b437319240", + "placeholder": "", + "style": "IPY_MODEL_37d8380055914bc0b8c317dbaff31480", + "value": "100%" + } + }, + "1e74bdbe2f654c3ba9e78aee3586a6d5": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "27be0e531de348a88546a28648f3735b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_84254cce85184864b00b8bf9aee91ad9", + "IPY_MODEL_56bd9277e70948eeb51ea45c8e82f449", + "IPY_MODEL_df28f49a1af44a47953e77bf76f1777d" + ], + "layout": "IPY_MODEL_d7c2e4a9ac2848adb838443c4a35c51f" + } + }, + "344eb89047d742cd8ae04352b63ed3eb": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_5bd30ef4aeee4753815a01dfe338ddfe", + "max": 28881, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_184daa5104814b749029e96912e79b2e", + "value": 28881 + } + }, + "37d8380055914bc0b8c317dbaff31480": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "3947bf97fc6b4122917d04eb93d19d90": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_e5205e9e71d0442fb0933e5037b1a004", + "max": 4542, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_12e84e4422dc47c98445fe35124d3c16", + "value": 4542 + } + }, + "3d7c01e7ee9c4892a3dbe585bdd982c0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "3da59f3d2fde43b2a65a640431a06403": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_fd098180435a4eac8e0461e866cf9430", + "placeholder": "", + "style": "IPY_MODEL_b3275b615ed64036b58c4b772b05611a", + "value": " 4542/4542 [00:00<00:00, 272064.90it/s]" + } + }, + "40472578b17c406fba0d98b437319240": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "4410b85dfd1441828241e2b9f574d96e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "5405bd5d3915494f85ba0ca628cc56ac": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "56bd9277e70948eeb51ea45c8e82f449": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_5405bd5d3915494f85ba0ca628cc56ac", + "max": 1648877, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_1e74bdbe2f654c3ba9e78aee3586a6d5", + "value": 1648877 + } + }, + "5a8f192e615f4e348289d842e67dcb28": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "5bd30ef4aeee4753815a01dfe338ddfe": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "5fd5ca3c07134c3eb715aa303219704a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_1c18f2d791b8438fb6e8f7fbe049511b", + "IPY_MODEL_e88326c0629a4dabaebd5bf0c5b955b2", + "IPY_MODEL_9c46b4622fab458f9396af535b5d3fa8" + ], + "layout": "IPY_MODEL_823ae10426bf4af7b528ba70a4c58db1" + } + }, + "77f8f29eca634c519132112d2a588ed8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_cf5d851292a24c60abbde8b165e9d414", + "placeholder": "", + "style": "IPY_MODEL_107b3fc0d47441c88382b42b8ab1cf93", + "value": "100%" + } + }, + "7c9e2a97e6814f6b890c634dfb39f703": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "7e5da0e70e93447ea42becf167e8db38": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_ac94dcd84d0c42de890ac7431a7ffb1c", + "placeholder": "", + "style": "IPY_MODEL_4410b85dfd1441828241e2b9f574d96e", + "value": " 28881/28881 [00:00<00:00, 1686821.24it/s]" + } + }, + "8187d104f2de4563b59afc04c16035aa": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "823ae10426bf4af7b528ba70a4c58db1": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "84254cce85184864b00b8bf9aee91ad9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_5a8f192e615f4e348289d842e67dcb28", + "placeholder": "", + "style": "IPY_MODEL_8187d104f2de4563b59afc04c16035aa", + "value": "100%" + } + }, + "88c21629415f4163b52fa6ded0680c35": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "8ee5ea60fccb460ab8e2399131ff6650": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "97b0e145241b4c22b04d688d6ac61a7b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_3d7c01e7ee9c4892a3dbe585bdd982c0", + "placeholder": "", + "style": "IPY_MODEL_88c21629415f4163b52fa6ded0680c35", + "value": "100%" + } + }, + "9c46b4622fab458f9396af535b5d3fa8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_a70f418062994afaa1d951028793ae39", + "placeholder": "", + "style": "IPY_MODEL_09669e92fbe94221a8eca46cb25d417c", + "value": " 9912422/9912422 [00:00<00:00, 167416499.53it/s]" + } + }, + "9dc13513249d45adb67946a5666b739d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "a70f418062994afaa1d951028793ae39": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "ac94dcd84d0c42de890ac7431a7ffb1c": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "b10ca89bfa4e4990a9a3f14964755d03": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "b3275b615ed64036b58c4b772b05611a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "ba63bfd561f44c26a24829a7e75e2bd8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "c7337fd6904846a388436064c9981d12": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "cf5d851292a24c60abbde8b165e9d414": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "d7c2e4a9ac2848adb838443c4a35c51f": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "df28f49a1af44a47953e77bf76f1777d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_b10ca89bfa4e4990a9a3f14964755d03", + "placeholder": "", + "style": "IPY_MODEL_ba63bfd561f44c26a24829a7e75e2bd8", + "value": " 1648877/1648877 [00:00<00:00, 55557360.88it/s]" + } + }, + "e5205e9e71d0442fb0933e5037b1a004": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "e88326c0629a4dabaebd5bf0c5b955b2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_c7337fd6904846a388436064c9981d12", + "max": 9912422, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_8ee5ea60fccb460ab8e2399131ff6650", + "value": 9912422 + } + }, + "fd098180435a4eac8e0461e866cf9430": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "ff6606057f814c99b556ca2ed99daaea": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_77f8f29eca634c519132112d2a588ed8", + "IPY_MODEL_3947bf97fc6b4122917d04eb93d19d90", + "IPY_MODEL_3da59f3d2fde43b2a65a640431a06403" + ], + "layout": "IPY_MODEL_9dc13513249d45adb67946a5666b739d" + } + } + } } }, "nbformat": 4, - "nbformat_minor": 1 + "nbformat_minor": 0 } diff --git a/BE2_GAN_and_cGAN.py b/BE2_GAN_and_cGAN.py deleted file mode 100644 index 8acf1626ea812c16daff356090f249a65deb74ab..0000000000000000000000000000000000000000 --- a/BE2_GAN_and_cGAN.py +++ /dev/null @@ -1,689 +0,0 @@ -# -*- coding: utf-8 -*- -"""BE2 - GAN and cGAN.ipynb - -<h1 ><big><center>MSO 3.4 - Deep Structured Learning</center></big></h1> - -<h2><big><center> BE 2 - GANs and cGAN </center></big></h2> - -<h5><big><center>Adapted from <i>Projet d'Option</i> of : Mhamed Jabri, Martin Chauvin, Ahmed Sahraoui, Zakariae Moustaïne and Taoufik Bouchikhi - - -<p align="center"> -<img height=300px src="https://cdn-images-1.medium.com/max/1080/0*tJRy5Chmk4XymxwN.png"/></p> -<p align="center"></p> - -The aim of this assignment is to discover GANs, understand how they are implemented and then explore one specific architecture of GANs that allows us to perform image to image translation (which corresponds to the picture that you can see above this text ! ) - -Before starting the exploration of the world of GANs, here's what students should do and send back for this assignement : -* In the "tutorial" parts of this assignement that focus on explaining new concepts, you'll find <font color='red'>**questions**</font> that aim to test your understanding of those concepts. -* In some of the code cells, you'll have to complete the code and you'll find a "TO DO" explaining what you should implement. - -# Part1: DC-GAN - -In this part, we aim to learn and understand the basic concepts of **Generative Adversarial Networks** through a DCGAN and generate new celebrities from the learned network after showing it real celebrities. For this purpose, please study the tutorial here: https://pytorch.org/tutorials/beginner/dcgan_faces_tutorial.html - -##Work to do -Now we want to generate handwritten digits using the MNIST dataset. It is available within torvision package (https://pytorch.org/vision/stable/generated/torchvision.datasets.MNIST.html#torchvision.datasets.MNIST) - -Please re-train the DCGAN and display some automatically generated handwritten digits. -""" - -#TO DO: your code here to adapt the code from the tutorial to experiment on MNIST dataset - -"""# Part2: Conditional GAN (cGAN) - -Let's take the example of the set described in the next picture. - - -We have a picture of a map (from Google Maps) and we want to create an image of what the satellite view may look like. - -As we are not only trying to generate a random picture but a mapping between a picture to another one, we can't use the standard GAN architecture. We will then use a cGAN. - -A cGAN is a supervised GAN aiming at mapping a label picture to a real one or a real picture to a label one. As you can see in the diagram below, the discriminator will take as input a pair of images and try to predict if the pair was generated or not. The generator will not only generate an image from noise but will also use an image (label or real) to generate another one (real or label). - - -### Generator - -In the cGAN architecture, the generator chosen is a U-Net. - - -A U-Net takes as input an image, and outputs another image. - -It can be divided into 2 subparts : an encoder and a decoder. -* The encoder takes the input image and reduces its dimension to encode the main features into a vector. -* The decoder takes this vector and map the features stored into an image. - -A U-Net architecture is different from a classic encoder-decoder in that every layer of the decoder takes as input the previous decoded output as well as the output vector from the encoder layers of the same level. It allows the decoder to map low frequencies information encoded during the descent as well as high frequencies from the original picture. - - - -The architecture we will implement is the following (the number in the square is the number of filters used). - - -The encoder will take as input a colored picture (3 channels: RGB), it will pass through a series of convolution layers to encode the features of the picture. It will then be decoded by the decoder using transposed convolutional layers. These layers will take as input the previous decoded vector AND the encoded features of the same level. - -Now, let's create or cGAN to generate facades from a template image. For this purpose, we will use the "Facade" dataset available at http://cmp.felk.cvut.cz/~tylecr1/facade/. - -Let's first create a few classes describing the layers we will use in the U-Net. -""" - -# Importing all the libraries needed -import matplotlib.pyplot as plt -import imageio -import glob -import random -import os -import numpy as np -import math -import itertools -import time -import datetime -import cv2 -from pathlib import Path -from PIL import Image - -from torch.utils.data import Dataset, DataLoader -import torchvision.transforms as transforms -from torchvision.utils import save_image, make_grid -from torchvision import datasets -from torch.autograd import Variable - -import torch.nn as nn -import torch.nn.functional as F -import torch - -# code adapted from https://github.com/milesial/Pytorch-UNet/blob/master/unet/unet_parts.py - -# Input layer -class inconv(nn.Module): - def __init__(self, in_ch, out_ch): - super(inconv, self).__init__() - self.conv = nn.Sequential( - nn.Conv2d(in_ch, out_ch, kernel_size=4, padding=1, stride=2), - nn.LeakyReLU(negative_slope=0.2, inplace=True) - ) - - def forward(self, x): - x = self.conv(x) - return x - -# Encoder layer -class down(nn.Module): - def __init__(self, in_ch, out_ch): - super(down, self).__init__() - self.conv = nn.Sequential( - nn.Conv2d(in_ch, out_ch, kernel_size=4, padding=1, stride=2), - nn.BatchNorm2d(out_ch), - nn.LeakyReLU(negative_slope=0.2, inplace=True) - ) - - def forward(self, x): - x = self.conv(x) - return x - -# Decoder layer -class up(nn.Module): - def __init__(self, in_ch, out_ch, dropout=False): - super(up, self).__init__() - if dropout : - self.conv = nn.Sequential( - nn.ConvTranspose2d(in_ch, out_ch, kernel_size=4, padding=1, stride=2), - nn.BatchNorm2d(out_ch), - nn.Dropout(0.5, inplace=True), - nn.ReLU(inplace=True) - ) - else: - self.conv = nn.Sequential( - nn.ConvTranspose2d(in_ch, out_ch, kernel_size=4, padding=1, stride=2), - nn.BatchNorm2d(out_ch), - nn.ReLU(inplace=True) - ) - - def forward(self, x1, x2): - x1 = self.conv(x1) - x = torch.cat([x1, x2], dim=1) - return x - -# Output layer -class outconv(nn.Module): - def __init__(self, in_ch, out_ch): - super(outconv, self).__init__() - self.conv = nn.Sequential( - nn.ConvTranspose2d(in_ch, out_ch, kernel_size=4, padding=1, stride=2), - nn.Tanh() - ) - - def forward(self, x): - x = self.conv(x) - return x - -"""Now let's create the U-Net using the helper classes defined previously.""" - -class U_Net(nn.Module): - ''' - Ck denotes a Convolution-BatchNorm-ReLU layer with k filters. - CDk denotes a Convolution-BatchNorm-Dropout-ReLU layer with a dropout rate of 50% - Encoder: - C64 - C128 - C256 - C512 - C512 - C512 - C512 - C512 - Decoder: - CD512 - CD1024 - CD1024 - C1024 - C1024 - C512 - C256 - C128 - ''' - def __init__(self, n_channels, n_classes): - super(U_Net, self).__init__() - # Encoder - self.inc = inconv(n_channels, 64) # 64 filters - # TO DO : - # Create the 7 encoder layers called "down1" to "down7" following this sequence - # C64 - C128 - C256 - C512 - C512 - C512 - C512 - C512 - # The first one has already been implemented - - - # Decoder - # TO DO : - # Create the 7 decoder layers called up1 to up7 following this sequence : - # CD512 - CD1024 - CD1024 - C1024 - C1024 - C512 - C256 - C128 - # The last layer has already been defined - - - self.outc = outconv(128, n_classes) # 128 filters - - def forward(self, x): - x1 = self.inc(x) - x2 = self.down1(x1) - x3 = self.down2(x2) - x4 = self.down3(x3) - x5 = self.down4(x4) - x6 = self.down5(x5) - x7 = self.down6(x6) - x8 = self.down7(x7) - # At this stage x8 is our encoded vector, we will now decode it - x = self.up7(x8, x7) - x = self.up6(x, x6) - x = self.up5(x, x5) - x = self.up4(x, x4) - x = self.up3(x, x3) - x = self.up2(x, x2) - x = self.up1(x, x1) - x = self.outc(x) - return x - -# We take images that have 3 channels (RGB) as input and output an image that also have 3 channels (RGB) -generator=U_Net(3,3) -# Check that the architecture is as expected -generator - -"""You should now have a working U-Net. - -<font color='red'>**Question 1**</font> -Knowing the input and output images will be 256x256, what will be the dimension of the encoded vector x8 ? - -<font color='red'>**Question 2**</font> -As you can see, U-net has an encoder-decoder architecture with skip connections. Explain why it works better than a traditional encoder-decoder. - -### Discriminator - -In the cGAN architecture, the chosen discriminator is a Patch GAN. It is a convolutional discriminator which enables to produce a map of the input pictures where each pixel represents a patch of size NxN of the input. - - - -The size N is given by the depth of the net. According to this table : - -| Number of layers | N | -| ---- | ---- | -| 1 | 16 | -| 2 | 34 | -| 3 | 70 | -| 4 | 142 | -| 5 | 286 | -| 6 | 574 | - -The number of layers actually means the number of layers with `kernel=(4,4)`, `padding=(1,1)` and `stride=(2,2)`. These layers are followed by 2 layers with `kernel=(4,4)`, `padding=(1,1)` and `stride=(1,1)`. -In our case we are going to create a 70x70 PatchGAN. - -Let's first create a few helping classes. -""" - -class conv_block(nn.Module): - def __init__(self, in_ch, out_ch, use_batchnorm=True, stride=2): - super(conv_block, self).__init__() - if use_batchnorm: - self.conv = nn.Sequential( - nn.Conv2d(in_ch, out_ch, kernel_size=4, padding=1, stride=stride), - nn.BatchNorm2d(out_ch), - nn.LeakyReLU(negative_slope=0.2, inplace=True) - ) - else: - self.conv = nn.Sequential( - nn.Conv2d(in_ch, out_ch, kernel_size=4, padding=1, stride=stride), - nn.LeakyReLU(negative_slope=0.2, inplace=True) - ) - - def forward(self, x): - x = self.conv(x) - return x - - -class out_block(nn.Module): - def __init__(self, in_ch, out_ch): - super(out_block, self).__init__() - self.conv = nn.Sequential( - nn.Conv2d(in_ch, 1, kernel_size=4, padding=1, stride=1), - nn.Sigmoid() - ) - - def forward(self, x): - x = self.conv(x) - return x - -"""Now let's create the Patch GAN discriminator. -As we want a 70x70 Patch GAN, the architecture will be as follows : -``` -1. C64 - K4, P1, S2 -2. C128 - K4, P1, S2 -3. C256 - K4, P1, S2 -4. C512 - K4, P1, S1 -5. C1 - K4, P1, S1 (output) -``` -Where Ck denotes a convolution block with k filters, Kk a kernel of size k, Pk is the padding size and Sk the stride applied. -*Note :* For the first layer, we do not use batchnorm. - -<font color='red'>**Question 3**</font> -Knowing the input and output images will be 256x256, what will be the dimension of the encoded vector x8 ?Knowing input images will be 256x256 with 3 channels each, how many parameters are there to learn ? -""" - -class PatchGAN(nn.Module): - def __init__(self, n_channels, n_classes): - super(PatchGAN, self).__init__() - # TODO : - # create the 4 first layers named conv1 to conv4 - self.conv1 = - self.conv2 = - self.conv3 = - self.conv4 = - # output layer - self.out = out_block(512, n_classes) - - def forward(self, x1, x2): - x = torch.cat([x2, x1], dim=1) - x = self.conv1(x) - x = self.conv2(x) - x = self.conv3(x) - x = self.conv4(x) - x = self.out(x) - return x - -# We have 6 input channels as we concatenate 2 images (with 3 channels each) -discriminator = PatchGAN(6,1) -discriminator - -"""You should now have a working discriminator. - -### Loss functions - -As we have seen in the choice of the various architectures for this GAN, the issue is to map both low and high frequencies. -To tackle this problem, this GAN rely on the architecture to map the high frequencies (U-Net + PatchGAN) and the loss function to learn low frequencies features. The global loss function will indeed be made of 2 parts : -* the first part to map hight frequencies, will try to optimize the mean squared error of the GAN. -* the second part to map low frequencies, will minimize the $\mathcal{L}_1$ norm of the generated picture. - -So the loss can be defined as $$ G^* = arg\ \underset{G}{min}\ \underset{D}{max}\ \mathcal{L}_{cGAN}(G,D) + \lambda \mathcal{L}_1(G)$$ -""" - -# Loss functions -criterion_GAN = torch.nn.MSELoss() -criterion_pixelwise = torch.nn.L1Loss() - -# Loss weight of L1 pixel-wise loss between translated image and real image -lambda_pixel = 100 - -"""### Training and evaluating models""" - -# parameters -epoch = 0 # epoch to start training from -n_epoch = 200 # number of epochs of training -batch_size =10 # size of the batches -lr = 0.0002 # adam: learning rate -b1 =0.5 # adam: decay of first order momentum of gradient -b2 = 0.999 # adam: decay of first order momentum of gradient -decay_epoch = 100 # epoch from which to start lr decay -img_height = 256 # size of image height -img_width = 256 # size of image width -channels = 3 # number of image channels -sample_interval = 500 # interval between sampling of images from generators -checkpoint_interval = -1 # interval between model checkpoints -cuda = True if torch.cuda.is_available() else False # do you have cuda ? - -"""Download the dataset.""" - -import urllib.request -from tqdm import tqdm -import os -import zipfile - -def download_hook(t): - """Wraps tqdm instance. - Don't forget to close() or __exit__() - the tqdm instance once you're done with it (easiest using `with` syntax). - Example - ------- - >>> with tqdm(...) as t: - ... reporthook = my_hook(t) - ... urllib.request.urlretrieve(..., reporthook=reporthook) - """ - last_b = [0] - - def update_to(b=1, bsize=1, tsize=None): - """ - b : int, optional - Number of blocks transferred so far [default: 1]. - bsize : int, optional - Size of each block (in tqdm units) [default: 1]. - tsize : int, optional - Total size (in tqdm units). If [default: None] remains unchanged. - """ - if tsize is not None: - t.total = tsize - t.update((b - last_b[0]) * bsize) - last_b[0] = b - - return update_to - -def download(url, save_dir): - filename = url.split('/')[-1] - with tqdm(unit = 'B', unit_scale = True, unit_divisor = 1024, miniters = 1, desc = filename) as t: - urllib.request.urlretrieve(url, filename = os.path.join(save_dir, filename), reporthook = download_hook(t), data = None) - -if __name__ == '__main__': - # Download ground truth - if not os.path.exists("CMP_facade_DB_base.zip"): - download("http://cmp.felk.cvut.cz/~tylecr1/facade/CMP_facade_DB_base.zip", "./") - # Extract in the correct folder - with zipfile.ZipFile("CMP_facade_DB_base.zip", 'r') as zip_ref: - zip_ref.extractall("./facades") - os.rename("./facades/base", "./facades/train") - - # Download ground truth - if not os.path.exists("CMP_facade_DB_extended.zip"): - download("http://cmp.felk.cvut.cz/~tylecr1/facade/CMP_facade_DB_extended.zip", "./") - # Extract in the correct folder - with zipfile.ZipFile("CMP_facade_DB_extended.zip", 'r') as zip_ref: - zip_ref.extractall("./facades") - os.rename("./facades/extended", "./facades/val") - -"""Configure the dataloader""" - -class ImageDataset(Dataset): - def __init__(self, root, transforms_=None, mode='train'): - self.transform = transforms.Compose(transforms_) - - self.files_img = sorted(glob.glob(os.path.join(root, mode) + '/*.jpg')) - if mode == 'val': - self.files_img.extend( - sorted(glob.glob(os.path.join(root, 'val') + '/*.jpg'))) - - self.files_mask = sorted(glob.glob(os.path.join(root, mode) + '/*.png')) - if mode == 'val': - self.files_mask.extend( - sorted(glob.glob(os.path.join(root, 'val') + '/*.png'))) - - assert len(self.files_img) == len(self.files_mask) - - def __getitem__(self, index): - - img = Image.open(self.files_img[index % len(self.files_img)]) - mask = Image.open(self.files_mask[index % len(self.files_img)]) - mask = mask.convert('RGB') - - img = self.transform(img) - mask = self.transform(mask) - - return img, mask - - def __len__(self): - return len(self.files_img) - -# Configure dataloaders -transforms_ = [transforms.Resize((img_height, img_width), Image.BICUBIC), - transforms.ToTensor()] # transforms.Normalize((0.5,0.5,0.5), (0.5,0.5,0.5)) - -dataloader = DataLoader(ImageDataset("facades", transforms_=transforms_), - batch_size=16, shuffle=True) - -val_dataloader = DataLoader(ImageDataset("facades", transforms_=transforms_, mode='val'), - batch_size=8, shuffle=False) - -# Tensor type -Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor - -"""Check the loading works and a few helper functions""" - -def plot2x2Array(image, mask): - f, axarr = plt.subplots(1, 2) - axarr[0].imshow(image) - axarr[1].imshow(mask) - - axarr[0].set_title('Image') - axarr[1].set_title('Mask') - - -def reverse_transform(image): - image = image.numpy().transpose((1, 2, 0)) - image = np.clip(image, 0, 1) - image = (image * 255).astype(np.uint8) - - return image - -def plot2x3Array(image, mask,predict): - f, axarr = plt.subplots(1,3,figsize=(15,15)) - axarr[0].imshow(image) - axarr[1].imshow(mask) - axarr[2].imshow(predict) - axarr[0].set_title('input') - axarr[1].set_title('real') - axarr[2].set_title('fake') - -image, mask = next(iter(dataloader)) -image = reverse_transform(image[0]) -mask = reverse_transform(mask[0]) -plot2x2Array(image, mask) - -"""Initialize our GAN""" - -# Calculate output of image discriminator (PatchGAN) -patch = (1, img_height//2**3-2, img_width//2**3-2) - -if cuda: - generator = generator.cuda() - discriminator = discriminator.cuda() - criterion_GAN.cuda() - criterion_pixelwise.cuda() - -# Optimizers -optimizer_G = torch.optim.Adam(generator.parameters(), lr=lr, betas=(b1, b2)) -optimizer_D = torch.optim.Adam(discriminator.parameters(), lr=lr, betas=(b1, b2)) - -"""Start training""" - -def save_model(epoch): - # save your work - torch.save({ - 'epoch': epoch, - 'model_state_dict': generator.state_dict(), - 'optimizer_state_dict': optimizer_G.state_dict(), - 'loss': loss_G, - }, 'generator_'+str(epoch)+'.pth') - torch.save({ - 'epoch': epoch, - 'model_state_dict': discriminator.state_dict(), - 'optimizer_state_dict': optimizer_D.state_dict(), - 'loss': loss_D, - }, 'discriminator_'+str(epoch)+'.pth') - -def weights_init_normal(m): - classname = m.__class__.__name__ - if classname.find('Conv') != -1: - torch.nn.init.normal_(m.weight.data, 0.0, 0.02) - elif classname.find('BatchNorm2d') != -1: - torch.nn.init.normal_(m.weight.data, 1.0, 0.02) - torch.nn.init.constant_(m.bias.data, 0.0) - -"""<font color='red'>Complete the loss function </font> in the following training code and train your network:""" - -# ---------- -# Training -# ---------- - -losses = [] -num_epochs = 200 - -# Initialize weights -generator.apply(weights_init_normal) -discriminator.apply(weights_init_normal) -epoch_D = 0 -epoch_G = 0 - -# train the network -discriminator.train() -generator.train() -print_every = 400 - -for epoch in range(epoch_G, num_epochs): - for i, batch in enumerate(dataloader): - - # Model inputs - real_A = Variable(batch[0].type(Tensor)) - real_B = Variable(batch[1].type(Tensor)) - - # Adversarial ground truths - valid = Variable(Tensor(np.ones((real_B.size(0), *patch))), requires_grad=False) - fake = Variable(Tensor(np.zeros((real_B.size(0), *patch))), requires_grad=False) - - # ------------------ - # Train Generators - # ------------------ - - optimizer_G.zero_grad() - - # GAN loss - # TO DO: Put here your GAN loss - - # Pixel-wise loss - # TO DO: Put here your pixel loss - - # Total loss - # TO DO: Put here your total loss - - loss_G.backward() - - optimizer_G.step() - - # --------------------- - # Train Discriminator - # --------------------- - - optimizer_D.zero_grad() - - # Real loss - pred_real = discriminator(real_A, real_B) - loss_real = criterion_GAN(pred_real, valid) - - # Fake loss - pred_fake = discriminator(fake_A.detach(), real_B) - loss_fake = criterion_GAN(pred_fake, fake) - - # Total loss - loss_D = 0.5 * (loss_real + loss_fake) - - loss_D.backward() - optimizer_D.step() - - # Print some loss stats - if i % print_every == 0: - # print discriminator and generator loss - print('Epoch [{:5d}/{:5d}] | d_loss: {:6.4f} | g_loss: {:6.4f}'.format( - epoch+1, num_epochs, loss_D.item(), loss_G.item())) - ## AFTER EACH EPOCH## - # append discriminator loss and generator loss - losses.append((loss_D.item(), loss_G.item())) - if epoch % 100 == 0: - print('Saving model...') - save_model(epoch) - -"""Observation of the loss along the training""" - -fig, ax = plt.subplots() -losses = np.array(losses) -plt.plot(losses.T[0], label='Discriminator') -plt.plot(losses.T[1], label='Generator') -plt.title("Training Losses") -plt.legend() - -"""If the training takes too much time, you can use a pretrained model in the meantime, to evaluate its performance. - -It is available at : https://partage.liris.cnrs.fr/index.php/s/xwEFmxn9ANeq4zY - -### Evaluate your cGAN -""" - -def load_model(epoch=200): - if 'generator_'+str(epoch)+'.pth' in os.listdir() and 'discriminator_'+str(epoch)+'.pth' in os.listdir(): - if cuda: - checkpoint_generator = torch.load('generator_'+str(epoch)+'.pth') - else: - checkpoint_generator = torch.load('generator_'+str(epoch)+'.pth', map_location='cpu') - generator.load_state_dict(checkpoint_generator['model_state_dict']) - optimizer_G.load_state_dict(checkpoint_generator['optimizer_state_dict']) - epoch_G = checkpoint_generator['epoch'] - loss_G = checkpoint_generator['loss'] - - if cuda: - checkpoint_discriminator = torch.load('discriminator_'+str(epoch)+'.pth') - else: - checkpoint_discriminator = torch.load('discriminator_'+str(epoch)+'.pth', map_location='cpu') - discriminator.load_state_dict(checkpoint_discriminator['model_state_dict']) - optimizer_D.load_state_dict(checkpoint_discriminator['optimizer_state_dict']) - epoch_D = checkpoint_discriminator['epoch'] - loss_D = checkpoint_discriminator['loss'] - else: - print('There isn\' a training available with this number of epochs') - -load_model(epoch=200) - -# switching mode -generator.eval() - -# show a sample evaluation image on the training base -image, mask = next(iter(dataloader)) -output = generator(mask.type(Tensor)) -output = output.view(16, 3, 256, 256) -output = output.cpu().detach() -for i in range(8): - image_plot = reverse_transform(image[i]) - output_plot = reverse_transform(output[i]) - mask_plot = reverse_transform(mask[i]) - plot2x3Array(mask_plot,image_plot,output_plot) - -# show a sample evaluation image on the validation dataset -image, mask = next(iter(val_dataloader)) -output = generator(mask.type(Tensor)) -output = output.view(8, 3, 256, 256) -output = output.cpu().detach() -for i in range(8): - image_plot = reverse_transform(image[i]) - output_plot = reverse_transform(output[i]) - mask_plot = reverse_transform(mask[i]) - plot2x3Array(mask_plot,image_plot,output_plot) - -"""<font color='red'>**Question 4**</font> -Compare results for 100 and 200 epochs -""" - -# TO DO : Your code here to load and evaluate with a few samples -# a model after 100 epochs - -# And finally : -if cuda: - torch.cuda.empty_cache() - -"""# How to submit your Work ? -Your work should be uploaded within 3 weeks into the Moodle section "Devoir 2 - GAN et Conditional GAN". It can be either a notebook containing your code and a description of your work, experiments and results or a ".zip" file containing your report in a "pdf" format describing your work, experiments and results as well as your code (".py" Python files). -""" \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e65b60948f7c535530c000c11a5a7b91a5e2cd8c --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 FREDJ Siwar + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 57884980d12716147ef767baaee44c3afe0e851a..d1d4fdab51e2ee9ff15f23bce50dc0bc2ebdc72d 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,37 @@ # GAN & cGAN tutorial. -We recommand to use the notebook (.ipynb) but the Python script (.py) is also provided if more convenient for you. +## Getting started : better understanding of GAN/cGAN +Generative Adversarial Networks (GANs) have gained a lot of attention in recent years as a powerful tool in the field of machine learning. GANs are a type of artificial neural network that are capable of generating synthetic data that closely resembles real data. This is achieved by training two main components of the network, a generator and a discriminator, together in a "game-like" manner. The generator creates synthetic data, and the discriminator determines whether the data is real or fake. As the two components compete against each other, the generator improves over time and produces more realistic data. -# How to submit your Work ? +This project focuses on understanding and exploring GANs, with a specific focus on image-to-image translation. -This work must be done individually. The expected output is a repository named gan-cgan on https://gitlab.ec-lyon.fr. It must contain your notebook (or python files) and a README.md file that explains briefly the successive steps of the project. The last commit is due before 11:59 pm on Wednesday, March 29, 2023. Subsequent commits will not be considered. \ No newline at end of file +Image-to-image translation is the process of generating a new image that is based on an input image. For example, it can be used to convert a black-and-white image to a colored image or to convert a sketch to a realistic image. + +## Objective + + +This project was completed as part of the MSO_3_4 Apprentissage automatique practical work. The aim of the project was to provide a concrete implementation example of Generative Adversarial Networks (GANs) in order to gain a better understanding of how they work and how they can be used in machine learning applications. + +The project is divided into two main parts, both of which are included in the same notebook. The first part provides an introduction to GANs, explaining their architecture and the basic principles behind their operation. The second part explores a specific type of GAN architecture(cGan) that is used for image-to-image translation. + +## Dataset + +The MNIST Dataset is a large collection of handwritten digits that is frequently used for training image processing systems and testing machine learning algorithms. It is a popular dataset for researchers in the field. + +The CMP Facade Dataset is a collection of 606 rectified images of facades from various sources that have been manually annotated. This dataset was created at the Center for Machine Perception and includes images from different cities and a variety of architectural styles. + +## Hands on example + +In this section, we provide an introduction to DCGANs using an example. We use the MNIST dataset to train a GAN to generate new handwritten digits after showing it real digit images. DCGANs are similar to GANs but use convolutional and convolutional-transpose layers in both the generator and discriminator. + +## Results of the model + +Training loss of the generator and descriminator + + +Results for 100 epochs : + +Results for 200 epochs: + + +for more results, review the ipynb file. \ No newline at end of file diff --git a/data/MNIST/raw/t10k-images-idx3-ubyte b/data/MNIST/raw/t10k-images-idx3-ubyte new file mode 100644 index 0000000000000000000000000000000000000000..1170b2cae98de7a524b163fcc379ac8f00925b12 Binary files /dev/null and b/data/MNIST/raw/t10k-images-idx3-ubyte differ diff --git a/data/MNIST/raw/t10k-images-idx3-ubyte.gz b/data/MNIST/raw/t10k-images-idx3-ubyte.gz new file mode 100644 index 0000000000000000000000000000000000000000..5ace8ea93f8d2a3741f4d267954e2ad37e1b3a39 Binary files /dev/null and b/data/MNIST/raw/t10k-images-idx3-ubyte.gz differ diff --git a/data/MNIST/raw/t10k-labels-idx1-ubyte b/data/MNIST/raw/t10k-labels-idx1-ubyte new file mode 100644 index 0000000000000000000000000000000000000000..d1c3a970612bbd2df47a3c0697f82bd394abc450 Binary files /dev/null and b/data/MNIST/raw/t10k-labels-idx1-ubyte differ diff --git a/data/MNIST/raw/t10k-labels-idx1-ubyte.gz b/data/MNIST/raw/t10k-labels-idx1-ubyte.gz new file mode 100644 index 0000000000000000000000000000000000000000..a7e141541c1d08d3f2ed01eae03e644f9e2fd0c5 Binary files /dev/null and b/data/MNIST/raw/t10k-labels-idx1-ubyte.gz differ diff --git a/data/MNIST/raw/train-images-idx3-ubyte b/data/MNIST/raw/train-images-idx3-ubyte new file mode 100644 index 0000000000000000000000000000000000000000..bbce27659e0fc2b7ed2a64c127849380a477099b Binary files /dev/null and b/data/MNIST/raw/train-images-idx3-ubyte differ diff --git a/data/MNIST/raw/train-images-idx3-ubyte.gz b/data/MNIST/raw/train-images-idx3-ubyte.gz new file mode 100644 index 0000000000000000000000000000000000000000..b50e4b6bccdebde3d57f575c7fbeb24bec277f10 Binary files /dev/null and b/data/MNIST/raw/train-images-idx3-ubyte.gz differ diff --git a/data/MNIST/raw/train-labels-idx1-ubyte b/data/MNIST/raw/train-labels-idx1-ubyte new file mode 100644 index 0000000000000000000000000000000000000000..d6b4c5db3b52063d543fb397aede09aba0dc5234 Binary files /dev/null and b/data/MNIST/raw/train-labels-idx1-ubyte differ diff --git a/data/MNIST/raw/train-labels-idx1-ubyte.gz b/data/MNIST/raw/train-labels-idx1-ubyte.gz new file mode 100644 index 0000000000000000000000000000000000000000..707a576bb523304d5b674de436c0779d77b7d480 Binary files /dev/null and b/data/MNIST/raw/train-labels-idx1-ubyte.gz differ diff --git a/images/1.png b/images/1.png new file mode 100644 index 0000000000000000000000000000000000000000..0da41273b4da286b7f0c23d3a2fcb13425a2aa87 Binary files /dev/null and b/images/1.png differ diff --git a/images/output.png b/images/output.png new file mode 100644 index 0000000000000000000000000000000000000000..e3580a285614d23cacb5040878ace0923d42b78a Binary files /dev/null and b/images/output.png differ diff --git a/loss-function.png b/loss-function.png new file mode 100644 index 0000000000000000000000000000000000000000..8c1e24fa14cbaa082eb55104a717c2741808d0f0 Binary files /dev/null and b/loss-function.png differ