├── LICENSE ├── LICENSE.md ├── train_gans.py ├── models ├── Vanilla_GAN.py ├── DCGAN.py └── GAN3D.py ├── README.md ├── notebooks ├── DCGAN.ipynb └── 3D_GAN_pytorch.ipynb └── Colaboratory'ye_Hoş_Geldiniz.ipynb /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Adaloglou Nikolaos 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 3D-GAN-pytorch Nikolas Adaloglou (black0017) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /train_gans.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd.variable import Variable 3 | 4 | 5 | def ones_target(size): 6 | ''' 7 | Tensor containing ones, with shape = size 8 | ''' 9 | data = Variable(torch.ones(size, 1)) 10 | return data 11 | 12 | 13 | def zeros_target(size): 14 | ''' 15 | FAKE data 16 | Tensor containing zeros, with shape = size 17 | ''' 18 | data = Variable(torch.zeros(size, 1)) 19 | return data 20 | 21 | 22 | def train_discriminator(discriminator, optimizer, real_data, fake_data, loss): 23 | cuda = next(discriminator.parameters()).is_cuda 24 | N = real_data.size(0) 25 | # Reset gradients 26 | optimizer.zero_grad() 27 | # 1.1 Train on Real Data 28 | prediction_real = discriminator(real_data) 29 | # Calculate error and backpropagate 30 | target_real = ones_target(N) 31 | if cuda: 32 | target_real.cuda() 33 | 34 | error_real = loss(prediction_real, target_real) 35 | error_real.backward() 36 | 37 | # 1.2 Train on Fake Data 38 | prediction_fake = discriminator(fake_data) 39 | # Calculate error and backpropagate 40 | target_fake = zeros_target(N) 41 | if cuda: 42 | target_fake.cuda() 43 | error_fake = loss(prediction_fake, target_fake) 44 | error_fake.backward() 45 | 46 | # 1.3 Update weights with gradients 47 | optimizer.step() 48 | 49 | # Return error and predictions for real and fake inputs 50 | return error_real + error_fake, prediction_real, prediction_fake 51 | 52 | 53 | def train_generator(discriminator, optimizer, fake_data, loss): 54 | cuda = next(discriminator.parameters()).is_cuda 55 | N = fake_data.size(0) # Reset gradients 56 | optimizer.zero_grad() # Sample noise and generate fake data 57 | prediction = discriminator(fake_data) # Calculate error and backpropagate 58 | target = ones_target(N) 59 | if cuda: 60 | target.cuda() 61 | 62 | error = loss(prediction, target) 63 | error.backward() # Update weights with gradients 64 | optimizer.step() # Return error 65 | return error 66 | -------------------------------------------------------------------------------- /models/Vanilla_GAN.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | 5 | class DiscriminatorNet(torch.nn.Module): 6 | """ 7 | A three hidden-layer discriminative neural network 8 | """ 9 | 10 | def __init__(self): 11 | super(DiscriminatorNet, self).__init__() 12 | n_features = 784 13 | n_out = 1 14 | 15 | self.hidden0 = nn.Sequential( 16 | nn.Linear(n_features, 1024), 17 | nn.LeakyReLU(0.2), 18 | nn.Dropout(0.3) 19 | ) 20 | self.hidden1 = nn.Sequential( 21 | nn.Linear(1024, 512), 22 | nn.LeakyReLU(0.2), 23 | nn.Dropout(0.3) 24 | ) 25 | self.hidden2 = nn.Sequential( 26 | nn.Linear(512, 256), 27 | nn.LeakyReLU(0.2), 28 | nn.Dropout(0.3) 29 | ) 30 | self.out = nn.Sequential( 31 | torch.nn.Linear(256, n_out), 32 | torch.nn.Sigmoid() 33 | ) 34 | 35 | def forward(self, x): 36 | x = self.hidden0(x) 37 | x = self.hidden1(x) 38 | x = self.hidden2(x) 39 | x = self.out(x) 40 | return x 41 | 42 | 43 | class GeneratorNet(torch.nn.Module): 44 | """ 45 | A three hidden-layer generative neural network 46 | """ 47 | 48 | def __init__(self): 49 | super(GeneratorNet, self).__init__() 50 | n_features = 100 51 | n_out = 784 52 | 53 | self.hidden0 = nn.Sequential( 54 | nn.Linear(n_features, 256), 55 | nn.LeakyReLU(0.2) 56 | ) 57 | self.hidden1 = nn.Sequential( 58 | nn.Linear(256, 512), 59 | nn.LeakyReLU(0.2) 60 | ) 61 | self.hidden2 = nn.Sequential( 62 | nn.Linear(512, 1024), 63 | nn.LeakyReLU(0.2) 64 | ) 65 | 66 | self.out = nn.Sequential( 67 | nn.Linear(1024, n_out), 68 | nn.Tanh() 69 | ) 70 | 71 | def forward(self, x): 72 | x = self.hidden0(x) 73 | x = self.hidden1(x) 74 | x = self.hidden2(x) 75 | x = self.out(x) 76 | return x 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 3D GAN Pytorch (Learning a Probabilistic Latent Space of Object Shapes via 3D Generative-Adversarial Modeling) 2 | **Responsible** implementation of 3D-GAN NIPS 2016 paper that can be found [here](https://papers.nips.cc/paper/6096-learning-a-probabilistic-latent-space-of-object-shapes-via-3d-generative-adversarial-modeling.pdf "Link to paper") 3 | 4 | We did our best to follow the original guidelines based on the papers. However, it is always good to try to reproduce the publication results from the original work. We also included our **DCGAN** implementation since **3D-GAN** is the natural extension of DCGAN in 3D space. For completeness, a Vanilla GAN is also included. All models all available in Google COLLAB. You can train them with the same training script that exists in train_gans.py 5 | 6 | Data loaders to be updated soon. 7 | 8 | ## Google collab instructions and Usage 9 | 1. Go to https://colab.research.google.com 10 | 2. **```File```** > **```Upload notebook...```** > **```GitHub```** > **```Paste this link:``` https://github.com/black0017/3D-GAN-pytorch/blob/master/notebooks/3D_GAN_pytorch.ipynb** 11 | 3. Ensure that **```Runtime```** > **```Change runtime type```** is ```Python 3``` with ```GPU``` 12 | 4. Run the code-blocks and enjoy :) 13 | 14 | 15 | 16 | ## Detailed Info 17 | 18 | #### Generator/Discriminator summary for batch size of 1 19 | Trainable params: 17,601,408/11,048,833 20 | 21 | Forward/backward pass size (MB): 67.25/63.75 22 | 23 | Params size (MB): 67.14/42.15 24 | 25 | Estimated Total Size (MB): 134.39/106.90 26 | 27 | ## References 28 | 29 | [1] Wu, J., Zhang, C., Xue, T., Freeman, B., & Tenenbaum, J. (2016). Learning a probabilistic latent space of object shapes via 3d generative-adversarial modeling. In Advances in neural information processing systems (pp. 82-90). 30 | 31 | [2] Radford, A., Metz, L., & Chintala, S. (2015). Unsupervised representation learning with deep convolutional generative adversarial networks. arXiv preprint arXiv:1511.06434. 32 | 33 | [3] Goodfellow, I., Pouget-Abadie, J., Mirza, M., Xu, B., Warde-Farley, D., Ozair, S., ... & Bengio, Y. (2014). Generative adversarial nets. In Advances in neural information processing systems (pp. 2672-2680). 34 | 35 | 36 | ## Support 37 | If you **really** like this repository and find it useful, please consider (★) **starring** it, so that it can reach a broader audience of like-minded people. 38 | -------------------------------------------------------------------------------- /models/DCGAN.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torchsummary import summary 4 | 5 | """ 6 | DCGAN pytorch implementation based on https://arxiv.org/abs/1511.06434 7 | """ 8 | 9 | 10 | class Discriminator(torch.nn.Module): 11 | 12 | def __init__(self, in_channels=3, out_conv_channels=1024, dim=64): 13 | super(Discriminator, self).__init__() 14 | conv1_channels = int(out_conv_channels / 8) 15 | conv2_channels = int(out_conv_channels / 4) 16 | conv3_channels = int(out_conv_channels / 2) 17 | self.out_conv_channels = out_conv_channels 18 | self.out_dim = int(dim / 16) 19 | 20 | self.conv1 = nn.Sequential( 21 | nn.Conv2d( 22 | in_channels=in_channels, out_channels=conv1_channels, kernel_size=4, 23 | stride=2, padding=1, bias=False 24 | ), 25 | nn.BatchNorm2d(conv1_channels), 26 | nn.LeakyReLU(0.2, inplace=True) 27 | ) 28 | self.conv2 = nn.Sequential( 29 | nn.Conv2d( 30 | in_channels=conv1_channels, out_channels=conv2_channels, kernel_size=4, 31 | stride=2, padding=1, bias=False 32 | ), 33 | nn.BatchNorm2d(conv2_channels), 34 | nn.LeakyReLU(0.2, inplace=True) 35 | ) 36 | self.conv3 = nn.Sequential( 37 | nn.Conv2d( 38 | in_channels=conv2_channels, out_channels=conv3_channels, kernel_size=4, 39 | stride=2, padding=1, bias=False 40 | ), 41 | nn.BatchNorm2d(conv3_channels), 42 | nn.LeakyReLU(0.2, inplace=True) 43 | ) 44 | self.conv4 = nn.Sequential( 45 | nn.Conv2d( 46 | in_channels=conv3_channels, out_channels=out_conv_channels, kernel_size=4, 47 | stride=2, padding=1, bias=False 48 | ), 49 | nn.BatchNorm2d(out_conv_channels), 50 | nn.LeakyReLU(0.2, inplace=True) 51 | ) 52 | self.out = nn.Sequential( 53 | nn.Linear(out_conv_channels * self.out_dim * self.out_dim, 1), 54 | nn.Sigmoid(), 55 | ) 56 | 57 | def forward(self, x): 58 | x = self.conv1(x) 59 | x = self.conv2(x) 60 | x = self.conv3(x) 61 | x = self.conv4(x) 62 | x = x.view(-1, self.out_conv_channels * self.out_dim * self.out_dim) 63 | x = self.out(x) 64 | return x 65 | 66 | 67 | class Generator(torch.nn.Module): 68 | 69 | def __init__(self, in_channels=1024, out_dim=64, out_channels=3, noise_dim=200): 70 | super(Generator, self).__init__() 71 | self.in_channels = in_channels 72 | self.out_dim = out_dim 73 | self.in_dim = int(out_dim / 16) 74 | conv1_out_channels = int(self.in_channels / 2.0) 75 | conv2_out_channels = int(conv1_out_channels / 2) 76 | conv3_out_channels = int(conv2_out_channels / 2) 77 | 78 | self.linear = torch.nn.Linear(noise_dim, in_channels * self.in_dim * self.in_dim) 79 | 80 | self.conv1 = nn.Sequential( 81 | nn.ConvTranspose2d( 82 | in_channels=self.in_channels, out_channels=conv1_out_channels, kernel_size=4, 83 | stride=2, padding=1, bias=False 84 | ), 85 | nn.BatchNorm2d(conv1_out_channels), 86 | nn.ReLU(inplace=True) 87 | ) 88 | self.conv2 = nn.Sequential( 89 | nn.ConvTranspose2d( 90 | in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=4, 91 | stride=2, padding=1, bias=False 92 | ), 93 | nn.BatchNorm2d(conv2_out_channels), 94 | nn.ReLU(inplace=True) 95 | ) 96 | self.conv3 = nn.Sequential( 97 | nn.ConvTranspose2d( 98 | in_channels=conv2_out_channels, out_channels=conv3_out_channels, kernel_size=4, 99 | stride=2, padding=1, bias=False 100 | ), 101 | nn.BatchNorm2d(conv3_out_channels), 102 | nn.ReLU(inplace=True) 103 | ) 104 | self.conv4 = nn.Sequential( 105 | nn.ConvTranspose2d( 106 | in_channels=conv3_out_channels, out_channels=out_channels, kernel_size=4, 107 | stride=2, padding=1, bias=False 108 | ) 109 | ) 110 | self.out = torch.nn.Tanh() 111 | 112 | def forward(self, x): 113 | x = self.linear(x) 114 | x = x.view(-1, self.in_channels, self.in_dim, self.in_dim) 115 | x = self.conv1(x) 116 | x = self.conv2(x) 117 | x = self.conv3(x) 118 | x = self.conv4(x) 119 | return self.out(x) 120 | 121 | 122 | def test_dcgan(): 123 | noise_dim = 100 124 | in_conv_channels = 512 125 | dim = 64 # cube volume 126 | model_generator = Generator(in_channels=in_conv_channels, out_dim=dim, out_channels=3, noise_dim=noise_dim) 127 | noise = torch.rand(1, noise_dim) 128 | generated_volume = model_generator(noise) 129 | print("Generator output shape", generated_volume.shape) 130 | model_discriminator = Discriminator(in_channels=3, dim=dim, out_conv_channels=in_conv_channels) 131 | out = model_discriminator(generated_volume) 132 | print("Discriminator output", out) 133 | print("Generator summary") 134 | summary(model_generator, (1, noise_dim)) 135 | print("Discriminator summary") 136 | summary(model_discriminator, (3,64,64)) 137 | 138 | test_dcgan() -------------------------------------------------------------------------------- /models/GAN3D.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torchsummary import summary 4 | 5 | """ 6 | Implementation based on original paper NeurIPS 2016 7 | https://papers.nips.cc/paper/6096-learning-a-probabilistic-latent-space-of-object-shapes-via-3d-generative-adversarial-modeling.pdf 8 | """ 9 | 10 | 11 | class Discriminator(torch.nn.Module): 12 | def __init__(self, in_channels=3, dim=64, out_conv_channels=512): 13 | super(Discriminator, self).__init__() 14 | conv1_channels = int(out_conv_channels / 8) 15 | conv2_channels = int(out_conv_channels / 4) 16 | conv3_channels = int(out_conv_channels / 2) 17 | self.out_conv_channels = out_conv_channels 18 | self.out_dim = int(dim / 16) 19 | 20 | self.conv1 = nn.Sequential( 21 | nn.Conv3d( 22 | in_channels=in_channels, out_channels=conv1_channels, kernel_size=4, 23 | stride=2, padding=1, bias=False 24 | ), 25 | nn.BatchNorm3d(conv1_channels), 26 | nn.LeakyReLU(0.2, inplace=True) 27 | ) 28 | self.conv2 = nn.Sequential( 29 | nn.Conv3d( 30 | in_channels=conv1_channels, out_channels=conv2_channels, kernel_size=4, 31 | stride=2, padding=1, bias=False 32 | ), 33 | nn.BatchNorm3d(conv2_channels), 34 | nn.LeakyReLU(0.2, inplace=True) 35 | ) 36 | self.conv3 = nn.Sequential( 37 | nn.Conv3d( 38 | in_channels=conv2_channels, out_channels=conv3_channels, kernel_size=4, 39 | stride=2, padding=1, bias=False 40 | ), 41 | nn.BatchNorm3d(conv3_channels), 42 | nn.LeakyReLU(0.2, inplace=True) 43 | ) 44 | self.conv4 = nn.Sequential( 45 | nn.Conv3d( 46 | in_channels=conv3_channels, out_channels=out_conv_channels, kernel_size=4, 47 | stride=2, padding=1, bias=False 48 | ), 49 | nn.BatchNorm3d(out_conv_channels), 50 | nn.LeakyReLU(0.2, inplace=True) 51 | ) 52 | self.out = nn.Sequential( 53 | nn.Linear(out_conv_channels * self.out_dim * self.out_dim * self.out_dim, 1), 54 | nn.Sigmoid(), 55 | ) 56 | 57 | def forward(self, x): 58 | x = self.conv1(x) 59 | x = self.conv2(x) 60 | x = self.conv3(x) 61 | x = self.conv4(x) 62 | # Flatten and apply linear + sigmoid 63 | x = x.view(-1, self.out_conv_channels * self.out_dim * self.out_dim * self.out_dim) 64 | x = self.out(x) 65 | return x 66 | 67 | 68 | class Generator(torch.nn.Module): 69 | def __init__(self, in_channels=512, out_dim=64, out_channels=1, noise_dim=200, activation="sigmoid"): 70 | super(Generator, self).__init__() 71 | self.in_channels = in_channels 72 | self.out_dim = out_dim 73 | self.in_dim = int(out_dim / 16) 74 | conv1_out_channels = int(self.in_channels / 2.0) 75 | conv2_out_channels = int(conv1_out_channels / 2) 76 | conv3_out_channels = int(conv2_out_channels / 2) 77 | 78 | self.linear = torch.nn.Linear(noise_dim, in_channels * self.in_dim * self.in_dim * self.in_dim) 79 | 80 | self.conv1 = nn.Sequential( 81 | nn.ConvTranspose3d( 82 | in_channels=in_channels, out_channels=conv1_out_channels, kernel_size=(4, 4, 4), 83 | stride=2, padding=1, bias=False 84 | ), 85 | nn.BatchNorm3d(conv1_out_channels), 86 | nn.ReLU(inplace=True) 87 | ) 88 | self.conv2 = nn.Sequential( 89 | nn.ConvTranspose3d( 90 | in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=(4, 4, 4), 91 | stride=2, padding=1, bias=False 92 | ), 93 | nn.BatchNorm3d(conv2_out_channels), 94 | nn.ReLU(inplace=True) 95 | ) 96 | self.conv3 = nn.Sequential( 97 | nn.ConvTranspose3d( 98 | in_channels=conv2_out_channels, out_channels=conv3_out_channels, kernel_size=(4, 4, 4), 99 | stride=2, padding=1, bias=False 100 | ), 101 | nn.BatchNorm3d(conv3_out_channels), 102 | nn.ReLU(inplace=True) 103 | ) 104 | self.conv4 = nn.Sequential( 105 | nn.ConvTranspose3d( 106 | in_channels=conv3_out_channels, out_channels=out_channels, kernel_size=(4, 4, 4), 107 | stride=2, padding=1, bias=False 108 | ) 109 | ) 110 | if activation == "sigmoid": 111 | self.out = torch.nn.Sigmoid() 112 | else: 113 | self.out = torch.nn.Tanh() 114 | 115 | def project(self, x): 116 | """ 117 | projects and reshapes latent vector to starting volume 118 | :param x: latent vector 119 | :return: starting volume 120 | """ 121 | return x.view(-1, self.in_channels, self.in_dim, self.in_dim, self.in_dim) 122 | 123 | def forward(self, x): 124 | x = self.linear(x) 125 | x = self.project(x) 126 | x = self.conv1(x) 127 | x = self.conv2(x) 128 | x = self.conv3(x) 129 | x = self.conv4(x) 130 | return self.out(x) 131 | 132 | 133 | def test_gan3d(): 134 | noise_dim = 200 135 | in_channels = 512 136 | dim = 64 # cube volume 137 | model_generator = Generator(in_channels=512, out_dim=dim, out_channels=1, noise_dim=noise_dim) 138 | noise = torch.rand(1, noise_dim) 139 | generated_volume = model_generator(noise) 140 | print("Generator output shape", generated_volume.shape) 141 | model_discriminator = Discriminator(in_channels=1, dim=dim, out_conv_channels=in_channels) 142 | out = model_discriminator(generated_volume) 143 | print("Discriminator output", out) 144 | summary(model_generator, (1, noise_dim)) 145 | summary(model_discriminator, (1, 64, 64, 64)) 146 | 147 | 148 | test_gan3d() 149 | -------------------------------------------------------------------------------- /notebooks/DCGAN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "DCGAN.ipynb", 7 | "provenance": [] 8 | }, 9 | "kernelspec": { 10 | "name": "python3", 11 | "display_name": "Python 3" 12 | } 13 | }, 14 | "cells": [ 15 | { 16 | "cell_type": "markdown", 17 | "metadata": { 18 | "id": "iF22iJa0FgmD", 19 | "colab_type": "text" 20 | }, 21 | "source": [ 22 | "" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": { 28 | "id": "JX3QqxIxFh25", 29 | "colab_type": "text" 30 | }, 31 | "source": [ 32 | "# DCGAN implementation in pytorch" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "metadata": { 38 | "id": "am3Jnb-GF46B", 39 | "colab_type": "code", 40 | "colab": { 41 | "base_uri": "https://localhost:8080/", 42 | "height": 52 43 | }, 44 | "outputId": "8563f2e1-f119-45b9-e479-053d43dff46e" 45 | }, 46 | "source": [ 47 | "!pip install torch\n", 48 | "!pip install torchsummary" 49 | ], 50 | "execution_count": 2, 51 | "outputs": [ 52 | { 53 | "output_type": "stream", 54 | "text": [ 55 | "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (1.4.0)\n", 56 | "Requirement already satisfied: torchsummary in /usr/local/lib/python3.6/dist-packages (1.5.1)\n" 57 | ], 58 | "name": "stdout" 59 | } 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "metadata": { 65 | "id": "qAVbbhcdFYdP", 66 | "colab_type": "code", 67 | "colab": { 68 | "base_uri": "https://localhost:8080/", 69 | "height": 34 70 | }, 71 | "outputId": "ed992831-d8ca-493d-8162-fbe33f8a6172" 72 | }, 73 | "source": [ 74 | "import torch\n", 75 | "import torch.nn as nn\n", 76 | "from torchsummary import summary\n", 77 | "\n", 78 | "\"\"\"\n", 79 | "DCGAN pytorch implementation based on https://arxiv.org/abs/1511.06434\n", 80 | "\"\"\"\n" 81 | ], 82 | "execution_count": 3, 83 | "outputs": [ 84 | { 85 | "output_type": "execute_result", 86 | "data": { 87 | "text/plain": [ 88 | "'\\nDCGAN pytorch implementation based on https://arxiv.org/abs/1511.06434\\n'" 89 | ] 90 | }, 91 | "metadata": { 92 | "tags": [] 93 | }, 94 | "execution_count": 3 95 | } 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": { 101 | "id": "a24T9e6oFrUE", 102 | "colab_type": "text" 103 | }, 104 | "source": [ 105 | "## Discriminator" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "metadata": { 111 | "id": "DMsIKh59Fpgi", 112 | "colab_type": "code", 113 | "colab": {} 114 | }, 115 | "source": [ 116 | "class Discriminator(torch.nn.Module):\n", 117 | "\n", 118 | " def __init__(self, in_channels=3, out_conv_channels=1024, dim=64):\n", 119 | " super(Discriminator, self).__init__()\n", 120 | " conv1_channels = int(out_conv_channels / 8)\n", 121 | " conv2_channels = int(out_conv_channels / 4)\n", 122 | " conv3_channels = int(out_conv_channels / 2)\n", 123 | " self.out_conv_channels = out_conv_channels\n", 124 | " self.out_dim = int(dim / 16)\n", 125 | "\n", 126 | " self.conv1 = nn.Sequential(\n", 127 | " nn.Conv2d(\n", 128 | " in_channels=in_channels, out_channels=conv1_channels, kernel_size=4,\n", 129 | " stride=2, padding=1, bias=False\n", 130 | " ),\n", 131 | " nn.BatchNorm2d(conv1_channels),\n", 132 | " nn.LeakyReLU(0.2, inplace=True)\n", 133 | " )\n", 134 | " self.conv2 = nn.Sequential(\n", 135 | " nn.Conv2d(\n", 136 | " in_channels=conv1_channels, out_channels=conv2_channels, kernel_size=4,\n", 137 | " stride=2, padding=1, bias=False\n", 138 | " ),\n", 139 | " nn.BatchNorm2d(conv2_channels),\n", 140 | " nn.LeakyReLU(0.2, inplace=True)\n", 141 | " )\n", 142 | " self.conv3 = nn.Sequential(\n", 143 | " nn.Conv2d(\n", 144 | " in_channels=conv2_channels, out_channels=conv3_channels, kernel_size=4,\n", 145 | " stride=2, padding=1, bias=False\n", 146 | " ),\n", 147 | " nn.BatchNorm2d(conv3_channels),\n", 148 | " nn.LeakyReLU(0.2, inplace=True)\n", 149 | " )\n", 150 | " self.conv4 = nn.Sequential(\n", 151 | " nn.Conv2d(\n", 152 | " in_channels=conv3_channels, out_channels=out_conv_channels, kernel_size=4,\n", 153 | " stride=2, padding=1, bias=False\n", 154 | " ),\n", 155 | " nn.BatchNorm2d(out_conv_channels),\n", 156 | " nn.LeakyReLU(0.2, inplace=True)\n", 157 | " )\n", 158 | " self.out = nn.Sequential(\n", 159 | " nn.Linear(out_conv_channels * self.out_dim * self.out_dim, 1),\n", 160 | " nn.Sigmoid(),\n", 161 | " )\n", 162 | "\n", 163 | " def forward(self, x):\n", 164 | " x = self.conv1(x)\n", 165 | " x = self.conv2(x)\n", 166 | " x = self.conv3(x)\n", 167 | " x = self.conv4(x)\n", 168 | " x = x.view(-1, self.out_conv_channels * self.out_dim * self.out_dim)\n", 169 | " x = self.out(x)\n", 170 | " return x\n" 171 | ], 172 | "execution_count": 0, 173 | "outputs": [] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": { 178 | "id": "oby_xHhqFmfw", 179 | "colab_type": "text" 180 | }, 181 | "source": [ 182 | "## Generator" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "metadata": { 188 | "id": "tIw8VMAUFvJ1", 189 | "colab_type": "code", 190 | "colab": {} 191 | }, 192 | "source": [ 193 | "\n", 194 | "class Generator(torch.nn.Module):\n", 195 | "\n", 196 | " def __init__(self, in_channels=1024, out_dim=64, out_channels=3, noise_dim=200):\n", 197 | " super(Generator, self).__init__()\n", 198 | " self.in_channels = in_channels\n", 199 | " self.out_dim = out_dim\n", 200 | " self.in_dim = int(out_dim / 16)\n", 201 | " conv1_out_channels = int(self.in_channels / 2.0)\n", 202 | " conv2_out_channels = int(conv1_out_channels / 2)\n", 203 | " conv3_out_channels = int(conv2_out_channels / 2)\n", 204 | "\n", 205 | " self.linear = torch.nn.Linear(noise_dim, in_channels * self.in_dim * self.in_dim)\n", 206 | "\n", 207 | " self.conv1 = nn.Sequential(\n", 208 | " nn.ConvTranspose2d(\n", 209 | " in_channels=self.in_channels, out_channels=conv1_out_channels, kernel_size=4,\n", 210 | " stride=2, padding=1, bias=False\n", 211 | " ),\n", 212 | " nn.BatchNorm2d(conv1_out_channels),\n", 213 | " nn.ReLU(inplace=True)\n", 214 | " )\n", 215 | " self.conv2 = nn.Sequential(\n", 216 | " nn.ConvTranspose2d(\n", 217 | " in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=4,\n", 218 | " stride=2, padding=1, bias=False\n", 219 | " ),\n", 220 | " nn.BatchNorm2d(conv2_out_channels),\n", 221 | " nn.ReLU(inplace=True)\n", 222 | " )\n", 223 | " self.conv3 = nn.Sequential(\n", 224 | " nn.ConvTranspose2d(\n", 225 | " in_channels=conv2_out_channels, out_channels=conv3_out_channels, kernel_size=4,\n", 226 | " stride=2, padding=1, bias=False\n", 227 | " ),\n", 228 | " nn.BatchNorm2d(conv3_out_channels),\n", 229 | " nn.ReLU(inplace=True)\n", 230 | " )\n", 231 | " self.conv4 = nn.Sequential(\n", 232 | " nn.ConvTranspose2d(\n", 233 | " in_channels=conv3_out_channels, out_channels=out_channels, kernel_size=4,\n", 234 | " stride=2, padding=1, bias=False\n", 235 | " )\n", 236 | " )\n", 237 | " self.out = torch.nn.Tanh()\n", 238 | "\n", 239 | " def forward(self, x):\n", 240 | " x = self.linear(x)\n", 241 | " x = x.view(-1, self.in_channels, self.in_dim, self.in_dim)\n", 242 | " x = self.conv1(x)\n", 243 | " x = self.conv2(x)\n", 244 | " x = self.conv3(x)\n", 245 | " x = self.conv4(x)\n", 246 | " return self.out(x)" 247 | ], 248 | "execution_count": 0, 249 | "outputs": [] 250 | }, 251 | { 252 | "cell_type": "markdown", 253 | "metadata": { 254 | "id": "UGmkMNYOF1EN", 255 | "colab_type": "text" 256 | }, 257 | "source": [ 258 | "## Test" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "metadata": { 264 | "id": "s5Xs5zyCF2Ms", 265 | "colab_type": "code", 266 | "colab": { 267 | "base_uri": "https://localhost:8080/", 268 | "height": 990 269 | }, 270 | "outputId": "160788d3-5aa9-4c44-9850-7455dd2cdad9" 271 | }, 272 | "source": [ 273 | "def test_dcgan():\n", 274 | " noise_dim = 100\n", 275 | " in_conv_channels = 512\n", 276 | " dim = 64 # cube volume\n", 277 | " model_generator = Generator(in_channels=in_conv_channels, out_dim=dim, out_channels=3, noise_dim=noise_dim)\n", 278 | " noise = torch.rand(1, noise_dim)\n", 279 | " generated_volume = model_generator(noise)\n", 280 | " print(\"Generator output shape\", generated_volume.shape)\n", 281 | " model_discriminator = Discriminator(in_channels=3, dim=dim, out_conv_channels=in_conv_channels)\n", 282 | " out = model_discriminator(generated_volume)\n", 283 | " print(\"Discriminator output\", out.item())\n", 284 | " print(\"Generator summary\")\n", 285 | " summary(model_generator, (1, noise_dim))\n", 286 | " print(\"Discriminator summary\")\n", 287 | " summary(model_discriminator, (3,64,64))\n", 288 | "\n", 289 | "test_dcgan()" 290 | ], 291 | "execution_count": 7, 292 | "outputs": [ 293 | { 294 | "output_type": "stream", 295 | "text": [ 296 | "Generator output shape torch.Size([1, 3, 64, 64])\n", 297 | "Discriminator output 0.5283796787261963\n", 298 | "Generator summary\n", 299 | "----------------------------------------------------------------\n", 300 | " Layer (type) Output Shape Param #\n", 301 | "================================================================\n", 302 | " Linear-1 [-1, 1, 8192] 827,392\n", 303 | " ConvTranspose2d-2 [-1, 256, 8, 8] 2,097,152\n", 304 | " BatchNorm2d-3 [-1, 256, 8, 8] 512\n", 305 | " ReLU-4 [-1, 256, 8, 8] 0\n", 306 | " ConvTranspose2d-5 [-1, 128, 16, 16] 524,288\n", 307 | " BatchNorm2d-6 [-1, 128, 16, 16] 256\n", 308 | " ReLU-7 [-1, 128, 16, 16] 0\n", 309 | " ConvTranspose2d-8 [-1, 64, 32, 32] 131,072\n", 310 | " BatchNorm2d-9 [-1, 64, 32, 32] 128\n", 311 | " ReLU-10 [-1, 64, 32, 32] 0\n", 312 | " ConvTranspose2d-11 [-1, 3, 64, 64] 3,072\n", 313 | " Tanh-12 [-1, 3, 64, 64] 0\n", 314 | "================================================================\n", 315 | "Total params: 3,583,872\n", 316 | "Trainable params: 3,583,872\n", 317 | "Non-trainable params: 0\n", 318 | "----------------------------------------------------------------\n", 319 | "Input size (MB): 0.00\n", 320 | "Forward/backward pass size (MB): 2.88\n", 321 | "Params size (MB): 13.67\n", 322 | "Estimated Total Size (MB): 16.55\n", 323 | "----------------------------------------------------------------\n", 324 | "Discriminator summary\n", 325 | "----------------------------------------------------------------\n", 326 | " Layer (type) Output Shape Param #\n", 327 | "================================================================\n", 328 | " Conv2d-1 [-1, 64, 32, 32] 3,072\n", 329 | " BatchNorm2d-2 [-1, 64, 32, 32] 128\n", 330 | " LeakyReLU-3 [-1, 64, 32, 32] 0\n", 331 | " Conv2d-4 [-1, 128, 16, 16] 131,072\n", 332 | " BatchNorm2d-5 [-1, 128, 16, 16] 256\n", 333 | " LeakyReLU-6 [-1, 128, 16, 16] 0\n", 334 | " Conv2d-7 [-1, 256, 8, 8] 524,288\n", 335 | " BatchNorm2d-8 [-1, 256, 8, 8] 512\n", 336 | " LeakyReLU-9 [-1, 256, 8, 8] 0\n", 337 | " Conv2d-10 [-1, 512, 4, 4] 2,097,152\n", 338 | " BatchNorm2d-11 [-1, 512, 4, 4] 1,024\n", 339 | " LeakyReLU-12 [-1, 512, 4, 4] 0\n", 340 | " Linear-13 [-1, 1] 8,193\n", 341 | " Sigmoid-14 [-1, 1] 0\n", 342 | "================================================================\n", 343 | "Total params: 2,765,697\n", 344 | "Trainable params: 2,765,697\n", 345 | "Non-trainable params: 0\n", 346 | "----------------------------------------------------------------\n", 347 | "Input size (MB): 0.05\n", 348 | "Forward/backward pass size (MB): 2.81\n", 349 | "Params size (MB): 10.55\n", 350 | "Estimated Total Size (MB): 13.41\n", 351 | "----------------------------------------------------------------\n" 352 | ], 353 | "name": "stdout" 354 | } 355 | ] 356 | } 357 | ] 358 | } -------------------------------------------------------------------------------- /notebooks/3D_GAN_pytorch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "3D-GAN-pytorch.ipynb", 7 | "provenance": [], 8 | "collapsed_sections": [] 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | } 14 | }, 15 | "cells": [ 16 | { 17 | "cell_type": "code", 18 | "metadata": { 19 | "id": "RmxEe0AEtRiO", 20 | "colab_type": "code", 21 | "colab": { 22 | "base_uri": "https://localhost:8080/", 23 | "height": 52 24 | }, 25 | "outputId": "f8e8c2ad-11c6-4fc4-a5dc-33b8e5431808" 26 | }, 27 | "source": [ 28 | "!pip install torch\n", 29 | "!pip install torchsummary\n" 30 | ], 31 | "execution_count": 2, 32 | "outputs": [ 33 | { 34 | "output_type": "stream", 35 | "text": [ 36 | "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (1.4.0)\n", 37 | "Requirement already satisfied: torchsummary in /usr/local/lib/python3.6/dist-packages (1.5.1)\n" 38 | ], 39 | "name": "stdout" 40 | } 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "metadata": { 46 | "id": "_ue1BlbIxDsH", 47 | "colab_type": "code", 48 | "colab": { 49 | "base_uri": "https://localhost:8080/", 50 | "height": 54 51 | }, 52 | "outputId": "ac224250-82c9-4e4a-f702-a180dbc8d053" 53 | }, 54 | "source": [ 55 | "import torch\n", 56 | "import torch.nn as nn\n", 57 | "from torchsummary import summary\n", 58 | "\n", 59 | "\"\"\"\n", 60 | "Implementation based on original paper NeurIPS 2016 https://papers.nips.cc/paper/6096-learning-a-probabilistic-latent-space-of-object-shapes-via-3d-generative-adversarial-modeling.pdf\n", 61 | "\"\"\"\n" 62 | ], 63 | "execution_count": 3, 64 | "outputs": [ 65 | { 66 | "output_type": "execute_result", 67 | "data": { 68 | "text/plain": [ 69 | "'\\nImplementation based on original paper NeurIPS 2016 https://papers.nips.cc/paper/6096-learning-a-probabilistic-latent-space-of-object-shapes-via-3d-generative-adversarial-modeling.pdf\\n'" 70 | ] 71 | }, 72 | "metadata": { 73 | "tags": [] 74 | }, 75 | "execution_count": 3 76 | } 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": { 82 | "id": "FdWpJZhRxNVC", 83 | "colab_type": "text" 84 | }, 85 | "source": [ 86 | "## Discriminator" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "metadata": { 92 | "id": "0p3oPTztxLqj", 93 | "colab_type": "code", 94 | "colab": {} 95 | }, 96 | "source": [ 97 | "class Discriminator(torch.nn.Module):\n", 98 | " def __init__(self, in_channels=3, dim=64, out_conv_channels=512):\n", 99 | " super(Discriminator, self).__init__()\n", 100 | " conv1_channels = int(out_conv_channels / 8)\n", 101 | " conv2_channels = int(out_conv_channels / 4)\n", 102 | " conv3_channels = int(out_conv_channels / 2)\n", 103 | " self.out_conv_channels = out_conv_channels\n", 104 | " self.out_dim = int(dim / 16)\n", 105 | "\n", 106 | " self.conv1 = nn.Sequential(\n", 107 | " nn.Conv3d(\n", 108 | " in_channels=in_channels, out_channels=conv1_channels, kernel_size=4,\n", 109 | " stride=2, padding=1, bias=False\n", 110 | " ),\n", 111 | " nn.BatchNorm3d(conv1_channels),\n", 112 | " nn.LeakyReLU(0.2, inplace=True)\n", 113 | " )\n", 114 | " self.conv2 = nn.Sequential(\n", 115 | " nn.Conv3d(\n", 116 | " in_channels=conv1_channels, out_channels=conv2_channels, kernel_size=4,\n", 117 | " stride=2, padding=1, bias=False\n", 118 | " ),\n", 119 | " nn.BatchNorm3d(conv2_channels),\n", 120 | " nn.LeakyReLU(0.2, inplace=True)\n", 121 | " )\n", 122 | " self.conv3 = nn.Sequential(\n", 123 | " nn.Conv3d(\n", 124 | " in_channels=conv2_channels, out_channels=conv3_channels, kernel_size=4,\n", 125 | " stride=2, padding=1, bias=False\n", 126 | " ),\n", 127 | " nn.BatchNorm3d(conv3_channels),\n", 128 | " nn.LeakyReLU(0.2, inplace=True)\n", 129 | " )\n", 130 | " self.conv4 = nn.Sequential(\n", 131 | " nn.Conv3d(\n", 132 | " in_channels=conv3_channels, out_channels=out_conv_channels, kernel_size=4,\n", 133 | " stride=2, padding=1, bias=False\n", 134 | " ),\n", 135 | " nn.BatchNorm3d(out_conv_channels),\n", 136 | " nn.LeakyReLU(0.2, inplace=True)\n", 137 | " )\n", 138 | " self.out = nn.Sequential(\n", 139 | " nn.Linear(out_conv_channels * self.out_dim * self.out_dim * self.out_dim, 1),\n", 140 | " nn.Sigmoid(),\n", 141 | " )\n", 142 | "\n", 143 | " def forward(self, x):\n", 144 | " x = self.conv1(x)\n", 145 | " x = self.conv2(x)\n", 146 | " x = self.conv3(x)\n", 147 | " x = self.conv4(x)\n", 148 | " # Flatten and apply linear + sigmoid\n", 149 | " x = x.view(-1, self.out_conv_channels * self.out_dim * self.out_dim * self.out_dim)\n", 150 | " x = self.out(x)\n", 151 | " return x\n" 152 | ], 153 | "execution_count": 0, 154 | "outputs": [] 155 | }, 156 | { 157 | "cell_type": "markdown", 158 | "metadata": { 159 | "id": "xDCCLjoaxW5B", 160 | "colab_type": "text" 161 | }, 162 | "source": [ 163 | "## Generator" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "metadata": { 169 | "id": "mwYhui5IxY6u", 170 | "colab_type": "code", 171 | "colab": {} 172 | }, 173 | "source": [ 174 | "class Generator(torch.nn.Module):\n", 175 | " def __init__(self, in_channels=512, out_dim=64, out_channels=1, noise_dim=200, activation=\"sigmoid\"):\n", 176 | " super(Generator, self).__init__()\n", 177 | " self.in_channels = in_channels\n", 178 | " self.out_dim = out_dim\n", 179 | " self.in_dim = int(out_dim / 16)\n", 180 | " conv1_out_channels = int(self.in_channels / 2.0)\n", 181 | " conv2_out_channels = int(conv1_out_channels / 2)\n", 182 | " conv3_out_channels = int(conv2_out_channels / 2)\n", 183 | "\n", 184 | " self.linear = torch.nn.Linear(noise_dim, in_channels * self.in_dim * self.in_dim * self.in_dim)\n", 185 | "\n", 186 | " self.conv1 = nn.Sequential(\n", 187 | " nn.ConvTranspose3d(\n", 188 | " in_channels=in_channels, out_channels=conv1_out_channels, kernel_size=(4, 4, 4),\n", 189 | " stride=2, padding=1, bias=False\n", 190 | " ),\n", 191 | " nn.BatchNorm3d(conv1_out_channels),\n", 192 | " nn.ReLU(inplace=True)\n", 193 | " )\n", 194 | " self.conv2 = nn.Sequential(\n", 195 | " nn.ConvTranspose3d(\n", 196 | " in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=(4, 4, 4),\n", 197 | " stride=2, padding=1, bias=False\n", 198 | " ),\n", 199 | " nn.BatchNorm3d(conv2_out_channels),\n", 200 | " nn.ReLU(inplace=True)\n", 201 | " )\n", 202 | " self.conv3 = nn.Sequential(\n", 203 | " nn.ConvTranspose3d(\n", 204 | " in_channels=conv2_out_channels, out_channels=conv3_out_channels, kernel_size=(4, 4, 4),\n", 205 | " stride=2, padding=1, bias=False\n", 206 | " ),\n", 207 | " nn.BatchNorm3d(conv3_out_channels),\n", 208 | " nn.ReLU(inplace=True)\n", 209 | " )\n", 210 | " self.conv4 = nn.Sequential(\n", 211 | " nn.ConvTranspose3d(\n", 212 | " in_channels=conv3_out_channels, out_channels=out_channels, kernel_size=(4, 4, 4),\n", 213 | " stride=2, padding=1, bias=False\n", 214 | " )\n", 215 | " )\n", 216 | " if activation == \"sigmoid\":\n", 217 | " self.out = torch.nn.Sigmoid()\n", 218 | " else:\n", 219 | " self.out = torch.nn.Tanh()\n", 220 | "\n", 221 | " def project(self, x):\n", 222 | " \"\"\"\n", 223 | " projects and reshapes latent vector to starting volume\n", 224 | " :param x: latent vector\n", 225 | " :return: starting volume\n", 226 | " \"\"\"\n", 227 | " return x.view(-1, self.in_channels, self.in_dim, self.in_dim, self.in_dim)\n", 228 | "\n", 229 | " def forward(self, x):\n", 230 | " x = self.linear(x)\n", 231 | " x = self.project(x)\n", 232 | " x = self.conv1(x)\n", 233 | " x = self.conv2(x)\n", 234 | " x = self.conv3(x)\n", 235 | " x = self.conv4(x)\n", 236 | " return self.out(x)\n" 237 | ], 238 | "execution_count": 0, 239 | "outputs": [] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": { 244 | "id": "i2GyllAQxc-R", 245 | "colab_type": "text" 246 | }, 247 | "source": [ 248 | "## Test" 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "metadata": { 254 | "id": "OmJJv8VwxfCC", 255 | "colab_type": "code", 256 | "colab": { 257 | "base_uri": "https://localhost:8080/", 258 | "height": 1000 259 | }, 260 | "outputId": "22d77bc2-ea30-4298-d2f2-b2ef203494ba" 261 | }, 262 | "source": [ 263 | "def test_gan3d(print_summary=True):\n", 264 | " noise_dim = 200 # latent space vector dim\n", 265 | " in_channels = 512 # convolutional channels\n", 266 | " dim = 64 # cube volume\n", 267 | " model_generator = Generator(in_channels=512, out_dim=dim, out_channels=1, noise_dim=noise_dim)\n", 268 | " noise = torch.rand(1, noise_dim)\n", 269 | " generated_volume = model_generator(noise)\n", 270 | " print(\"Generator output shape\", generated_volume.shape)\n", 271 | " model_discriminator = Discriminator(in_channels=1, dim=dim, out_conv_channels=in_channels)\n", 272 | " out = model_discriminator(generated_volume)\n", 273 | " print(\"Discriminator output\", out.item())\n", 274 | " if print_summary:\n", 275 | " print(\"\\n\\nGenerator summary\\n\\n\")\n", 276 | " summary(model_generator, (1, noise_dim))\n", 277 | " print(\"\\n\\nDiscriminator summary\\n\\n\")\n", 278 | " summary(model_discriminator, (1,dim,dim,dim))\n", 279 | "\n", 280 | "test_gan3d()" 281 | ], 282 | "execution_count": 12, 283 | "outputs": [ 284 | { 285 | "output_type": "stream", 286 | "text": [ 287 | "Generator output shape torch.Size([1, 1, 64, 64, 64])\n", 288 | "Discriminator output 0.47117894887924194\n", 289 | "\n", 290 | "\n", 291 | "Generator summary\n", 292 | "\n", 293 | "\n", 294 | "----------------------------------------------------------------\n", 295 | " Layer (type) Output Shape Param #\n", 296 | "================================================================\n", 297 | " Linear-1 [-1, 1, 32768] 6,586,368\n", 298 | " ConvTranspose3d-2 [-1, 256, 8, 8, 8] 8,388,608\n", 299 | " BatchNorm3d-3 [-1, 256, 8, 8, 8] 512\n", 300 | " ReLU-4 [-1, 256, 8, 8, 8] 0\n", 301 | " ConvTranspose3d-5 [-1, 128, 16, 16, 16] 2,097,152\n", 302 | " BatchNorm3d-6 [-1, 128, 16, 16, 16] 256\n", 303 | " ReLU-7 [-1, 128, 16, 16, 16] 0\n", 304 | " ConvTranspose3d-8 [-1, 64, 32, 32, 32] 524,288\n", 305 | " BatchNorm3d-9 [-1, 64, 32, 32, 32] 128\n", 306 | " ReLU-10 [-1, 64, 32, 32, 32] 0\n", 307 | " ConvTranspose3d-11 [-1, 1, 64, 64, 64] 4,096\n", 308 | " Sigmoid-12 [-1, 1, 64, 64, 64] 0\n", 309 | "================================================================\n", 310 | "Total params: 17,601,408\n", 311 | "Trainable params: 17,601,408\n", 312 | "Non-trainable params: 0\n", 313 | "----------------------------------------------------------------\n", 314 | "Input size (MB): 0.00\n", 315 | "Forward/backward pass size (MB): 67.25\n", 316 | "Params size (MB): 67.14\n", 317 | "Estimated Total Size (MB): 134.39\n", 318 | "----------------------------------------------------------------\n", 319 | "\n", 320 | "\n", 321 | "Discriminator summary\n", 322 | "\n", 323 | "\n", 324 | "----------------------------------------------------------------\n", 325 | " Layer (type) Output Shape Param #\n", 326 | "================================================================\n", 327 | " Conv3d-1 [-1, 64, 32, 32, 32] 4,096\n", 328 | " BatchNorm3d-2 [-1, 64, 32, 32, 32] 128\n", 329 | " LeakyReLU-3 [-1, 64, 32, 32, 32] 0\n", 330 | " Conv3d-4 [-1, 128, 16, 16, 16] 524,288\n", 331 | " BatchNorm3d-5 [-1, 128, 16, 16, 16] 256\n", 332 | " LeakyReLU-6 [-1, 128, 16, 16, 16] 0\n", 333 | " Conv3d-7 [-1, 256, 8, 8, 8] 2,097,152\n", 334 | " BatchNorm3d-8 [-1, 256, 8, 8, 8] 512\n", 335 | " LeakyReLU-9 [-1, 256, 8, 8, 8] 0\n", 336 | " Conv3d-10 [-1, 512, 4, 4, 4] 8,388,608\n", 337 | " BatchNorm3d-11 [-1, 512, 4, 4, 4] 1,024\n", 338 | " LeakyReLU-12 [-1, 512, 4, 4, 4] 0\n", 339 | " Linear-13 [-1, 1] 32,769\n", 340 | " Sigmoid-14 [-1, 1] 0\n", 341 | "================================================================\n", 342 | "Total params: 11,048,833\n", 343 | "Trainable params: 11,048,833\n", 344 | "Non-trainable params: 0\n", 345 | "----------------------------------------------------------------\n", 346 | "Input size (MB): 1.00\n", 347 | "Forward/backward pass size (MB): 63.75\n", 348 | "Params size (MB): 42.15\n", 349 | "Estimated Total Size (MB): 106.90\n", 350 | "----------------------------------------------------------------\n" 351 | ], 352 | "name": "stdout" 353 | } 354 | ] 355 | } 356 | ] 357 | } -------------------------------------------------------------------------------- /Colaboratory'ye_Hoş_Geldiniz.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "Colaboratory'ye Hoş Geldiniz", 7 | "provenance": [], 8 | "collapsed_sections": [], 9 | "toc_visible": true, 10 | "include_colab_link": true 11 | }, 12 | "kernelspec": { 13 | "display_name": "Python 3", 14 | "name": "python3" 15 | } 16 | }, 17 | "cells": [ 18 | { 19 | "cell_type": "markdown", 20 | "metadata": { 21 | "id": "view-in-github", 22 | "colab_type": "text" 23 | }, 24 | "source": [ 25 | "\"Open" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": { 31 | "id": "5fCEDCU_qrC0" 32 | }, 33 | "source": [ 34 | "

\"Colaboratory

\n", 35 | "\n", 36 | "

Colaboratory nedir?

\n", 37 | "\n", 38 | "Colaboratory (ya da kısaca \"Colab\"), tarayıcınızda Python'u yazmanızı ve çalıştırmanızı sağlar. Üstelik: \n", 39 | "- Hiç yapılandırma gerektirmez\n", 40 | "- GPU'lara ücretsiz erişim imkanı sunar\n", 41 | "- Kolay paylaşım imkanı sunar\n", 42 | "\n", 43 | "İster öğrenci ister veri bilimci ister yapay zeka araştırmacısı olun, Colab işinizi kolaylaştırabilir. Daha fazla bilgi edinmek için Colab'e Giriş videosunu izleyebilir ya da aşağıdan hemen kullanmaya başlayabilirsiniz." 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": { 49 | "id": "GJBs_flRovLc" 50 | }, 51 | "source": [ 52 | "## Başlarken\n", 53 | "\n", 54 | "Okuduğunuz doküman statik bir web sayfası değil, kod yazmanıza ve yürütmenize imkan veren Colab not defteri adında etkileşimli bir ortamdır.\n", 55 | "\n", 56 | "Örneğin, buradaki kod hücresinde, bir değeri hesaplayan, bir değişken içinde saklayan ve sonucu yazdıran kısa bir Python dizesi görebilirsiniz:" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "metadata": { 62 | "colab": { 63 | "base_uri": "https://localhost:8080/", 64 | "height": 34 65 | }, 66 | "id": "gJr_9dXGpJ05", 67 | "outputId": "9f556d03-ec67-4950-a485-cfdba9ddd14d" 68 | }, 69 | "source": [ 70 | "seconds_in_a_day = 24 * 60 * 60\n", 71 | "seconds_in_a_day" 72 | ], 73 | "execution_count": null, 74 | "outputs": [ 75 | { 76 | "output_type": "execute_result", 77 | "data": { 78 | "text/plain": [ 79 | "86400" 80 | ] 81 | }, 82 | "metadata": { 83 | "tags": [] 84 | }, 85 | "execution_count": 0 86 | } 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": { 92 | "id": "2fhs6GZ4qFMx" 93 | }, 94 | "source": [ 95 | "Yukarıdaki hücrede kodu yürütmek için tıklayarak seçin, ardından ya kodun sol tarafındaki oynat düğmesine basın ya da \"Command/Ctrl+Enter\" klavye kısayolunu kullanın. Kodu düzenlemek için hücreyi tıklamanız yeterlidir. Sonrasında düzenlemeye başlayabilirsiniz.\n", 96 | "\n", 97 | "Bir hücrede tanımladığınız değişkenler daha sonra başka hücrelerde kullanılabilir:" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "metadata": { 103 | "colab": { 104 | "base_uri": "https://localhost:8080/", 105 | "height": 34 106 | }, 107 | "id": "-gE-Ez1qtyIA", 108 | "outputId": "94cb2224-0edf-457b-90b5-0ac3488d8a97" 109 | }, 110 | "source": [ 111 | "seconds_in_a_week = 7 * seconds_in_a_day\n", 112 | "seconds_in_a_week" 113 | ], 114 | "execution_count": null, 115 | "outputs": [ 116 | { 117 | "output_type": "execute_result", 118 | "data": { 119 | "text/plain": [ 120 | "604800" 121 | ] 122 | }, 123 | "metadata": { 124 | "tags": [] 125 | }, 126 | "execution_count": 0 127 | } 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": { 133 | "id": "lSrWNr3MuFUS" 134 | }, 135 | "source": [ 136 | "Colab not defterleri; yürütülebilir kod, zengin metin, resimler, HTML, LaTeX ve diğer öğeleri tek bir dokümanda birleştirmenizi sağlar. Oluşturduğunuz Colab not defterleri Google Drive hesabınızda saklanır. Colab not defterlerinizi arkadaşlarınızla veya iş arkadaşlarınızla kolayca paylaşabilir, not defterlerinize yorum yapmalarını, hatta düzenlemelerini sağlayabilirsiniz. Daha fazla bilgiyi Colab'e Genel Bakış bölümünde bulabilirsiniz. Yeni bir Colab not defteri oluşturmak için yukarıdaki Dosya menüsünü ya da yeni bir Colab not defteri oluşturma bağlantısını kullanabilirsiniz.\n", 137 | "\n", 138 | "Colab not defterleri, Colab tarafından barındırılan Jupyter not defterleridir. Jupyter projesi hakkında daha fazla bilgiyi jupyter.org adresinde bulabilirsiniz." 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "metadata": { 144 | "id": "UdRyKR44dcNI" 145 | }, 146 | "source": [ 147 | "## Veri bilimi\n", 148 | "\n", 149 | "Colab ile popüler Python kitaplıklarının tüm avantajlarından yararlanarak veri analiz edip görselleştirebilirsiniz. Aşağıdaki kod hücresi rastgele veri oluşturmak için numpy'yi, bu veriyi görselleştirmek için de matplotlib'i kullanır. Kodu düzenlemek için hücreyi tıklamanız yeterlidir. Sonrasında düzenlemeye başlayabilirsiniz." 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "metadata": { 155 | "colab": { 156 | "base_uri": "https://localhost:8080/", 157 | "height": 281 158 | }, 159 | "id": "C4HZx7Gndbrh", 160 | "outputId": "46abc637-6abd-41b2-9bba-80a7ae992e06" 161 | }, 162 | "source": [ 163 | "import numpy as np\n", 164 | "from matplotlib import pyplot as plt\n", 165 | "\n", 166 | "ys = 200 + np.random.randn(100)\n", 167 | "x = [x for x in range(len(ys))]\n", 168 | "\n", 169 | "plt.plot(x, ys, '-')\n", 170 | "plt.fill_between(x, ys, 195, where=(ys > 195), facecolor='g', alpha=0.6)\n", 171 | "\n", 172 | "plt.title(\"Sample Visualization\")\n", 173 | "plt.show()" 174 | ], 175 | "execution_count": null, 176 | "outputs": [ 177 | { 178 | "output_type": "display_data", 179 | "data": { 180 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXe4JOdd5/v9VejuEydogkbBki1L\nloUlW7IALWYXgw2XLFgvcAnGpDULvg/2xXjx8rCENXgNlzXBrGG9zlg4YBks27JXsiyhHGYUZjQa\nTdDkmZP7dK5c7/3jrbdSV3VX9+kzJ8z7eZ55pk+f6urqPlW/+r3fXyLGGCQSiUSyeVHW+gAkEolE\nsrpIQy+RSCSbHGnoJRKJZJMjDb1EIpFscqShl0gkkk2ONPQSiUSyyZGGXrLhIKI/IqLPrNK+/56I\n/utq7Dv2Hg8Q0a8Fj3+eiO5Zhff4PSL66Kj3K9mYSEMvKQwRfTcRPUpEdSKqEtEjRPTta31cRSGi\nbxDRf8t4/nYimiUijTH2nxhj77tQx8QYu4Mx9gMr2QcRvZGIzqb2+37G2K+t7OgkmwVp6CWFIKJp\nAF8F8CEA2wFcDuCPAVhreVwD8ikAv0BElHr+rQDuYIy5a3BMEsmqIw29pCjXAQBj7LOMMY8xZjDG\n7mGM7QcAIrqGiL5FREtEtEhEdxDRVvFiIjpJRO8hov1E1CaijxHRbiL6OhE1ieibRLQt2PZqImJE\n9HYiOk9EM0T0O3kHRkS3BSuNGhE9R0RvzNn0XwBcAuDfxl67DcCPAvh08PMniehPgsc7iOirwX6r\nRPQQESnB7xgRvTK2n/jrtgWvWyCi5eDxFTnH/ktE9HDw+D8TUSv2zyGiTwa/+2UiOhR8V8eJ6NeD\n5ycAfB3AZbHXXZaWt4jox4noYPBZHiCiV6f+Nr8T/G3qRPR5Iqrkfd+SjYc09JKiHAHgEdGniOiH\nhFGOQQD+O4DLALwawJUA/ii1zVsAfD/4TePHwA3U7wHYCX4u/lZq++8FcC2AHwDwu0T05vRBEdHl\nAL4G4E/AVxq/A+BOItqZ3pYxZgD4AoBfjD390wBeZIw9l/GZ3w3gbHB8u4NjLdIzRAHwCQBXAXgZ\nAAPA3/Z7EWPszxljk4yxSfDvcAHA54Nfz4PfkKYB/DKAvySiWxhjbQA/BOC8eC1j7Hx8v0R0HYDP\nAnhX8FnuBvAVIirFNvtpAD8I4OUAbgLwSwU+p2SDIA29pBCMsQaA7wY3dP8bwAIR3UVEu4PfH2OM\n3csYsxhjCwA+COB7Urv5EGNsjjF2DsBDAJ5gjD3DGDMB/DOAm1Pb/zFjrM0YOwBuOH8249B+AcDd\njLG7GWM+Y+xeAHsB/HDOR/kUgP8Q81h/MXguCwfAHgBXMcYcxthDrEBzKMbYEmPsTsZYhzHWBPCn\n6P4uciGiMfDVx18zxr4e7PNrjLGXGOdfAdyD2MqkDz8D4GvB38cB8BcAxgB8V2ybv2GMnWeMVQF8\nBcDrih6vZP0jDb2kMIyxQ4yxX2KMXQHgNeDe+18BQCDDfI6IzhFRA8BnAOxI7WIu9tjI+Hkytf2Z\n2ONTwfuluQrATwWSRI2IauA3pD05n+FhAIsAfoKIrgHwHQD+Mecj/38AjgG4J5BL3puzXQIiGiei\n/0VEp4Lv4kEAW4lILfJ6AB8DcJgx9mexff4QET0eSEg18BtZ+vvN4zLw7w8AwBjzwb/by2PbzMYe\nd9D9t5BsYKShlwwFY+xFAJ8EN/gA8H5wb/9Gxtg0uKedDnoOypWxxy8DcD5jmzMA/oExtjX2b4Ix\n9oEe+/00uCf/CwD+D2NsLmsjxliTMfZuxtgrAPw4gN8mojcFv+4AGI9tfmns8bsBvArAdwbfxb8L\nnu/7fQQ3k+sA/GrsuTKAO8E98d2Msa3g8ovYX79VxnnwG6LYH4F/t+f6HY9kcyANvaQQRHQ9Eb1b\nBBWJ6EpwKeXxYJMpAC0A9UA3f88I3va/Bt7xt4Hr0p/P2OYzAH6MiP4vIlKJqEI83TAz+BnwaQBv\nBvAfkS/bgIh+lIheGRjGOgAPgB/8+lkAPxe85w8iKc1Mga9QakS0HcAfFvmwRPRD4HGKnwziCYIS\ngDK4Zu8G28VTMucAXEJEW3J2/QUAP0JEbyIiHfxGZAF4tMhxSTY+0tBLitIE8J0AniCiNriBfx7c\naAA81fIWcIP4NQBfGsF7/iu4dHIfgL9gjHUVFjHGzgC4HTxQugDu4b8HPc5txthJcCM3AeCuHu9/\nLYBvgt/AHgPwYcbY/cHv3gkeUK4B+HlwTV3wV+Aa+CL49/SN3h8z5GfAg6WHYhk0fx/o/L8FbrCX\nAfxc/LiD1dVnARwP5KuExMUYOwy+evlQcEw/BuDHGGN2weOSbHBIDh6RrDeI6GoAJwDoMrddIlk5\n0qOXSCSSTU5fQ09EVxLR/UT0QlBw8c7g+e1EdC8RHQ3+F8UutweFF88S0V4i+u7V/hASiUQiyaev\ndENEewDsYYw9TURTAPYB+AnwgooqY+wDQabANsbY7xLRJIA2Y4wR0U0AvsAYu351P4ZEIpFI8ujr\n0TPGZhhjTwePmwAOgeff3o4oY+FT4MYfjLFWrKhkAsUqCSUSiUSySmiDbBwEyW4G8AR4Pu9M8KtZ\n8BJxsd1PgpfD7wLwIzn7ejuAtwPAxMTE66+/Xjr9EolEMgj79u1bZIx1tftIUzjrJpBk/hXAnzLG\nvkREtaBwQ/x+mTG2LfWafwfgDxhjXT1K4tx6661s7969hY5DIpFIJBwi2scYu7XfdoWyboIiizvB\nW7mK/Oi5QL8XOv58+nWMsQcBvIKIipZqSyQSiWTEFMm6IfDeG4cYYx+M/eouAG8LHr8NwJeD7UUl\nIYjoFvCKvqVRHrREIpFIilNEo38D+GCGA0T0bPDc7wH4AIAvENGvgjdM+ungd28B8ItE5ICXgf9M\nkY5/EolEIlkd+hr6oNtfXjOmN6WfCDru/VnGthKJRCJZA2RlrEQikWxypKGXSCSSTY409BKJRLLJ\nkYZ+SBaaFu4+MNN/Q4lEIlljpKEfki/uO4vfvONpzNbNtT4UiUQi6Yk09EPSNB0AwIFz9TU+EolE\nIumNNPRD0rE9AMDz0tBLJJJ1jjT0Q9Kx+eAj6dFLJJL1jjT0Q9IOPPr9Z5fX+EgkEomkN9LQD4kR\nGPrFloP5hgzISiSS9Ys09EPStlwoCjf2Ur6RSCTrGWnoh6RlOdgyuQyASUMvkUjWNdLQD0nHdlHW\nLUyOd6Shl0gk6xpp6IekbbtQVRdTE8syICuRSNY10tAPScf2oaoepidqWGg6mG/KgKxEIlmfSEM/\nJKbtQVVdTE/WAMjCKcn6Zq5h4j/9w160LHetD0WyBkhDPwS268P1AU3xMD1RB8Bw4GxjrQ9LIsnl\n8eNL+MbBORyelefpxUiRmbFXEtH9RPQCER0koncGz28nonuJ6Gjw/7bg+Z8nov1EdICIHiWi1672\nh7jQiKpYVXWhaR4mxzo4cK62xkclkeSz1LIBAG3LW+Mj2Ry0N9jKqIhH7wJ4N2PsBgC3AXgHEd0A\n4L0A7mOMXQvgvuBnADgB4HsYYzcCeB+Aj4z+sNcWURWrqvx/GZCVXAgOnq/j0ZcWh3pttc0NvejR\nJBmeA2fruOmP78Hppc5aH0ph+hp6xtgMY+zp4HETwCEAlwO4HcCngs0+BeAngm0eZYwJq/c4gCtG\nfdBrjSE8eoX/Pz1Zx3zTCS8mScRy28acrBweCX9z31H8wZefH+q1S8G5aTgbyxNdj5yuduD5DOfr\nxlofSmEG0uiJ6GoANwN4AsBuxpiYvDELYHfGS34VwNdz9vV2ItpLRHsXFhYGOYw1Ryx/NZVfNKUS\nN2TLHWno07zvay/gNz6zb60PY1PQMNywPfagVNsWAOnRjwLD4d+h6Wyc77KwoSeiSQB3AngXYywR\n0WGMMQAstf33ghv6383aH2PsI4yxWxljt+7cuXPgA19L2qFGnzT4G023uxAstWzMydTTkdAw7aEN\ndejRS0O/YsSKftMZeiLSwY38HYyxLwVPzxHRnuD3ewDMx7a/CcBHAdzOGFsa7SGvPeJiEdKNMPQy\nda0b0/HC4LVkZTRNB6bjD/XaxRa/2UqPvhvH8wdy0sR3OOzfYi0oknVDAD4G4BBj7IOxX90F4G3B\n47cB+HKw/csAfAnAWxljR0Z7uOuDdDBWDT16eRGlMRwXpr1xLoj1TMty4XgMrjf49ymDsfl86L6j\n+MkPP1J4eyHdGBvIo9cKbPMGAG8FcICIng2e+z0AHwDwBSL6VQCnAPx08Ls/AHAJgA/zewRcxtit\nIz3qNaZjRemVQNyjH04/3QxYrgeFCLqa9B1Mx4Ph+PB9BkWhNTq6zUErcCQ6jodptXh4zfV8NIzA\nOMnVVRcnlzoDZdCIFf1GksH6GnrG2MMA8q7QN2Vs/2sAfm2Fx7WuEV6RltLoWxexR/8rn3gKr9w1\niT++/TWJ54WOaboexktF/ApJFrbrw3Z5GMywPUxX9MKvXe5EDoj06Ltpmg5M14fnM6gFnBEjdk5v\nFGRl7BB0UumVqgzG4sRSC2eWu9PNTJfLDNLArIz4uTXodxlP++1sILnhQlE3+I2waCwp1Og30Dkt\nDf0QtG0PCvlQFO5hqYoHgF3Uhr5puplLWTsw9BtpmbseaSUM/WDn2VKQWglEsqMkomEOFr8Q57Jw\nYjYC0tAPgWF70LToj0wE6Kp/0WbdMMbQtrzMYhyRmdCW2vCKaJor9+h1bfj0zM1MI/Doi16/YTB2\nA32X0tAPQdtyQ31eoGnumnj0z5+r41svzl3w941juT581p2FwBgLPfqVGpizyx28/n334vhCa0X7\n2ai0RiDdVModecPNoGkGQe6CMbbOZs2jlyTp2F6ozws01V2T9MoPP3AMf3jXwZ7bPPbSEn78bx9e\ntRNTeJvp/bs+gx+U0a3U+zmx2MZS28ax+YvV0EcB1UEzZ0RDs7GyIWsaUrieD2PAVaeQvzZSeqU0\n9EPQsV0oSjKVUlGcnks/y/Xwu1/cP/K+L8ttB60+ZfFPn17G/rP1Ves5I1YyaUMf/3mlHr24UVys\n8thKpZuS7kLTHGnoU8S/16Ir8o4jPfqLgrbtQkl59Kra29AfnWvh83vPDN19MI/ljtX3wl8Olu7x\nNLtR0so19FEcY6UGRnhP69XQO56Pzz15Gp7P+m88BCuVbkq6DVXxNpSufCFIGPqC382mrIyVdNOy\n3DB3XqCqLlpmflMzcUKNOtd+uWPDcllPAyMM/Go1XRNGyEplIVju6Dx68fr4hbmeeOTYIt77pQN4\n/PjqdPxoxT73oMZ6qW1B10yoqhfKFBJOI7YaLpqRJJyOjbQ6koZ+CDqWG7Y/EGiq29PbFF0HWyM2\nVEVygGuBga+tlqEPPpPlMPD+dpykR7+5pZuFJk9hPJdRSzAK+Ofm3+2g3+Viy4SuWVAVF3Yfp+Bi\nI27oi55boqWHlG42OR3bzQ7G9rgAxUk0yjYJpuPBdKJqyTyqgYGvtldHuhFBLAbAjvVhiV8IKy29\nD6WbderRi8yW1epR3jRdlDQPquKHGnFRlto2SroVOicbKYi42jSMwSQxxljowAz6d1hLpKEfgo7t\ndXn0ap+sG2HoR5mZI/J/gd76ouhFvloefVxOiTcwi0s5K5du1neHUNEGeKa2OgHvpulC01xoqj+Q\ndOP7DPWOG2r0wMaSHFabeH//Ilk3luuH/dg3UrxDGvoBYYzBcPwujV5TeWdBJ6ezoDCGo9SYa3FD\n38MACgO/Whp9/L3j/T+sEWbdrHeNXqQwrpZH37IcaKrb16FIUzcc+AzQdTts1bGRDNRq0wjOJ6Ji\nrYrj53E6JrWekYZ+QGzPh+dH/W0E/YaPRB79CA19LIsmbznu+ywsCFleJekm7mXH5RozEYxd2ec2\nw6yb9dkhVLQZOLvcXpX9t0wXihJkzgwgGYiVRkmzQo9ettOOEKvisZJdqGBKnMeaZkuNfjMjTgZx\n0QiE4c/zOIW23ByhoYpLMXk3kIbphEVLq511AyQDsNYIg7Hi9Y0hR+mtNostbuhn61YiID0qmqYD\nTXWgKM5A36WIHZTiHn3sRiHqO87Viq9E/vwbL+LOfWcLb7+eaZoudNWDpjmFpBth3Eua3ZV8sJ6R\nhn5ARPe/XI8+52QJs25G6dEb/dvPxnPnq7HmVr1YbFl4/Z/ci8deKpYqGA+QZnn0mprd8GwQxOfr\nVxy2ViwFE5wMx08E+EZFI5BuFMUd0NDzv3lJt8K2HfHXH5vn9R33HpwtvM9/2ncGXzsw03/DDUDD\ndKBr/HststIR311Jt8GwceQbaegHROTapnvd9GtVLAz8sMOdsyji0QsvvqSbYfZNPx4+uoillo1j\nBfvKxG9ucQlJePe61r+oqx/mOi+YqrYdVMp8eMVq6PQt04Wq8bTe9gCrwqW4Rx9kisX/FiI991S1\n+OCNWsfBXGN1YhEXmqbpQNOcoOCx//cqvjtd5zdQa4PUJUhDPyDhGMGu9EphiLINWlQwNUpD31+j\nFzeDibFWYvteCE++aAEJ/2x8CZvw6J3I+1np526vQtbSqOjYLiyXYXqiDgCYWQVD37Y8HoxV3IHi\nHdVWXLrp7rooNOpTi8ViC6bjwfEYFjbJwPeG4UJR7SDI3f97NWLSTfzn9U6RmbFXEtH9RPQCER0k\noncGz28nonuJ6Gjw/7bg+euJ6DEisojod1b7A1xowqEjGQVTQL5nLTz5URqqmuGEN5i8/YoA7PhY\nG7bLCkkoD7+0wPdZ0AtvWTx9D0hp9MGyVtftFQdjxarBcvMzm9YKkXEzPVkDAJwfcYql54tMLweq\n6g20Olpq29A1D4rix9Ir44aef68nloqt3oSzsNR24G+Cwqu6aUNXbWiKV8gZMUKPfpMZegAugHcz\nxm4AcBuAdxDRDQDeC+A+xti1AO4LfgaAKoDfAvAXq3C8a04YjM1ogQDkSwvC0Hcsf2QBnHrHQaVs\nAWC5hlRINxOVVuLnPM4ud3Bu2QyOtahHzwtygGTbA+HR8z7oKzP08devtwEvQh6ZGm9CITZyj16c\nU5rmBlk3gwVjxU1YnKPx71JIN2eXzUIVs2J7z0/GiDYqDcMO01aL3ECFoRce/UbJvOlr6BljM4yx\np4PHTQCHAFwO4HYAnwo2+xSAnwi2mWeMPQVg458FGQjPMp1109ejt6Lq0VENf6h1bGiqCV3zc/dZ\n6zggMIyPcQ02PlYuiygAywb06LmhN1MavaLwmoOV9lgxHQ9EfB/rLZc+DHiWTIyVrZF79KGhVx2o\nqgvDLv5dij43QHTOGhkaveMxzBbobhqPC4m2DxsZXojGA92dAt9rJyZHApvLow8hoqsB3AzgCQC7\nGWMi9D4LYPeA+3o7Ee0lor0LCwuDvHRg/uqbR7D/bG0k+4oGgw+WR881VrvnNoOy3LGhazY01evp\n0Zd0F7omqmN7338fO76Esu5gomIW9sJblodyYOiNREGJB03xB5YbsjDs6D3WW0B2MaaDl0ptnB8g\nVbEIIqtJU/nAG8djcGPy1dG5Jt731RfwP+8/hi/sPYNDM43wd0stK/zbKwqDQn5ibmw95pUX0enj\nXvxGN/SM8RoTHoztXfAoEK08dH1jefRa0Q2JaBLAnQDexRhrEEXT0hljjIgG0iMYYx8B8BEAuPXW\nW1dN7HM8H3/1zaNoWy5uumJr4ncf+tZRXLZlDG95/RWF9ycMVlq6URQfCvmZwVjL5Rfn5LiJVqeE\npuVi1xCfJc1yx4JecqD2SA2rdRxugPT+HSwZY3jk2AK2Ti/AMKcKxRMYY+hYHi7ZFnj0brzXjR9o\nw1EzLVWhvF31xHB8TIybMO2xwob+4Pk6Lp2u4JLJ8lDvWZQoV91CuWTgXK14BksRhOwngrEA9yyn\nVe6nfWHvGXzs4RPh9goBd7/z3+L6S6ex1LZQGov+5ukWCnXDgap48HwVp6odfFefY4nfGBZaGzsg\n27E9XjWsuiAl6F9jedgynu//itXUppNuAICIdHAjfwdj7EvB03NEtCf4/R4A86tziCtDZBVkLfc/\n++QpfOaJUwPtT+jWaekGAHQtu4xaeGTlEr8wRuXR1w0XumYH+mK+R6+qFnStfxuEU0sdzDVsbN+y\nCFUtNqTCdPgYwVKpW7qxHN6Ea6XNtJygGjn06AtIN4wx/OxHHseHH3hpqPcchKWWxT+n4qFSNjDX\nsEYaqGzGNfqMzJnljoPxioU33/ZVvOHmb0FVXXzw3iNgjGG57YSpgACC9Mzo+2uYDibHW1AUHyeX\n+nv09c7m8eiFTRDSDdC/303HcYNzWrSTWF+JAXkUybohAB8DcIgx9sHYr+4C8Lbg8dsAfHn0h7dy\n6j0MfdN0cXi2MVBwtG3zDAZF6X6NlpOiJTzQSokv6UfRgdF2fXRsH7reu1qSa7R2ZOh7tEF4LOil\nvn3LIi8gKWDoI/3YhaL4XVk3iuKtuJmW+GziRtkscKNcbNlomC5m66vvdS61bZRLDoiAStmA47Ew\nQDsKIunGycyc4bEaC6rqY3K8hZftOYZ7Ds7hsZeW4PqRngxwQx+XbmodG5pmYbxi4PRS/5VI3eAx\nH1XxN7yhb4QrJSdcKfVzwgzbg6pGGUybyaN/A4C3Avg+Ino2+PfDAD4A4PuJ6CiANwc/g4guJaKz\nAH4bwO8T0Vkiml6l4++LMPTp0nnGGNqWh47t4+wAPcQN24WuZt/F86ZMiZtMpcyNzig0ZvG5dM2G\n0iM1bLljQ9dtKAqDrnk9PfrHXlpEpWRjYqwV3LT6x9Pjhl5T/K48ekXxQm+p6PDlNEZo6It79KeD\nAqClgtXAK6HatsOA51hwMxpl5k38O87KnFkOjLXg6suOo6Q74SzhhKFXnMRqoGbY0FQHY+UmTiz2\nT7GsGTzmUylbG97QC0lMT3j0vc9Rw/agKd6KV6kXmr4aPWPsYQB5wuqbMrafBVBc9F5l8gx9O9Dn\nAODwbBNXbh8vtL92RotigaI4mR5BM5RuAo9+JIaeX7ziJM3zROodB3smo2BhXqtirs8vYuv0PIhQ\nuICkncgI8btaIBBFcsOwAVlh1IRHXyTf+Uxg6C9EYc9C0wwNfVgdWzNx04iugngwNitzhgflo+9E\n01xcfdlRHDl1AwDe0EygpAqu6oaDiUkHimLh1GIHjDHE429pah2Hn3Payg39YsvC3Qdm8Nbbrur5\nnquFqCHQNBe+z9+/X0pxx+HXv7IJPfoNjWhD2jCSBi7ey/3wXLPw/rKGjghU1c1sWhZKNyP06EX2\nTC+N3nQ8WC4LMwQ0zUI1J+vmxGIbS20H27fwmbaqUixTRtzEVNWFoniZHr34vgbpuhjHSKS0sUIe\n/ZnQo1+dRm5xltpW6DWLv/EoPXohValq9k1TZF/FuXLPCVRK0Q1ewAP3QaovY2gaLnTNwXilDcPx\nwwyiPOoGN/Il3cRcc2Wf8SvPnccffPkgThWQjFaDuHQTVbb3PrdM24OiuIWkm/VU2LfpDX2eRh//\n+cXZQQy9B0XNNpZ8nGCWoefPVUrZhv6Bw/NYag3mHSUMfY5RFjKN8PZ01cptbCZ6nUyO8+W7WjCv\nOF3ME9foDYfr8yv16IX3qip8ylIRjV5IN/WOu6qj80TAU9QR8L+HP9IUy5bpQtc8vtIK+9VExrph\nuOHNXKCpHl5+xYsg8sNVBhBo9La48XpwfR6MHK/wQOypPgHZ5Y4NTbVRLpkr9uhFttKZ5bUy9EGq\nZJBeCfQ/Rzu2B0VxQMRAxJA3IPxMtYNv/9N78dGHjo/2oIdk0xt64bmnjavQ51TFxaGZ/Bz7pukk\nvLO25UKhbEOjqW6mtymeK+kWFEp6pB3bxa988il87qkzBT8RR+Qz67qTa5RF4FV4e7puYznH0M8F\nQUsRMBb52v28knZMPyZyEz3ouUfvZzbTGoQopdWDpnkDafQMq9eeWRxbfNVEBIyVTZyPBYGPzTcT\nee+D0rIc6Gqy9Yb4Ttq2B89HQroRXHnpSbzx2+9BuRT36CNDL6QLXXMwPiYMfW+jW+tY0DV+Y6sb\nHuwVdG8Uf5cz1bVpkNZMePTFsm7atgNF4TddTfFzNfr3fe0F1Dou/uwbL+LYfLH2EqvJpjf0wqM3\nnaTREsu2rVPLOLHYSZTux3nfV1/AW/7u0fDntuV2FUsJ1Jy5sVF6nANNS6a3LbVs+CwpJRWhFnrr\ndmYRTXwbsXTXNTvRtjiOqIoUOnhR49xMZN0k2xFbXR79yqQbVXGhKtkB7zQnl1rhZ1jqI0esBOGV\nlmMpjPGiqX89soA3f/BBfOmZc0O/R8viYwSBqDBP/F2W29HfNg1RUrYB+DkqqpTjAf2xcgcE1tej\nrwerBxEYX0mwWzgiZ9fKozdcKOQnnJFCWTeKcDyyDf0jxxZxz8E5vGzPSyBy8N47n1vzvkCb3tDH\nDWhcrhGPt04vwfOBl+azT/CHji7gfM3EfBDUa9tR5kMaLShPT6drNs3ghCIfuuompAehIRdJZYwj\n0tzSRTRxljtJj76kO+jYfqYXNtcwUSk5YdpoVnZHFu2YfqwofkKHN92kRr9i6Ub1oKpO3xYItutj\nvmGHTcYGlcUGQQwciUsnlbKB87UO2paL9975HADgmdPLQ79HM5guBXS3MRDGupTh0WehKl7XazXN\nhaIwjFesnu2KfZ+hZXrQVAdlnV8PK5FvQo9+gKw3geP5uO3938SXnx3+BtowHZT0QBLr0xxQ0Ild\n/2oqJgUArufjD+96HuMVA9ddfQjXvfwA9p6q4Y4B63VGzaY39PWEoY8eixvAtukqAODwXANpZuoG\nZur8RH7hPP89D8Zmnwyq6sJn3SlXCY01lc0ijNCgqYeitUH8JE3vY7mT9PbE/zWj2/ubrZsolaIL\nLmtIRRYt0w3yqr2uE98SlbEZRT6DEEo3ileob/i5mgEGvloDVjcgG6+KFVRKBhaaNv7sGy9ipm6i\nUu6sqAVH0+T90gF0acm11M28H6rKpSbPZ5FHH7TmKPdJsWyaLhi4XCgK5FZi6MVq4HR18PGLyx0b\nsw0Lz5xeyffqQgu+VyL0bCUi6DiRR59OPgCAO544jWPzbVx39QGoio/Ldp7Bjq3zeP/XDw00xWvU\nbGhDbzoeHj++1HO8XD3HoxeAI2v+AAAgAElEQVSBmC1Ty1DIzwzI7j0ZeWEHQ0Pv9fToge54QHzp\nrSh2QmMWssKgHr1IcwNiQ09S+4ikm8DrC7zOrH43M3UDZT0etCuW+96yopuYkuqsaLlsJMHYqDU0\nl836DW8R+vwWYehX0aOPD/YQVMoGfAZ8+rFTuPLSk7h0xzkcnm0OrWc3zEhD5gFZH51g5RTezPWi\nHn0UiI2km6CVdaXdU7qpxVJ6yyMw9CJedGYIQy8ctZUEvRuGA1WNt4fwcudJCEwnclwUxU0EY9uW\ni/9xz4u4ZMsCdm3nE7uIgBuueQ6m4+FzT54e+lhXyoY29AfO1fF/f+RxPN5j5F2tE03Wid8Q+HLY\nh6Z6mBxv4/BMlqGvQlM9jFdMvDDDK2gN2++aLiWIGpslf980I3klnYK5GJzsgxrBWscJi2Q0cfHa\n3dKNpvKAKBB5fVkdLGcbZqjPA5FE0O8GxG9ikcdtBYbe9xkcj0FRPCjEKykHvZkJTCfav6Y6fefu\nCkM/PVkDYbRVqmmWWhkefZkbn7GyieuuegHTE3W4PnBkgDTeOK1gXqwg3q+mZgzu0QP85tkIpZvI\n0NcNL+EcxYmvHkRMYlhDzxhDzXBB8FFtDz5qUhzjSvoKNbq+194ePWMMph3p+emY1MmlNhqmhysu\nPYl4WcBYxcBY2ZIe/bDcePkWaAqwr4f+WTdsjFX4yZD06B2UAgM1MV7Hodl612ufOlnF9GQVkxNV\nPH+uBsvlfV3yPPq8cYItK/IcNNVJeKTCUAyaW7/csaBpos+40Bfd1DZRMzMgJt2kslAs10Ot4yYN\n/QAavaqIlYUXBvrE0BFxk0k30xqEjs1vForCguKw3vs5U+1AVXxUSibKJbdvbvhKqLajPjeC6YkG\nyiUTN1zzDDTNw/QkP7cOnOs+x4rQsrxEAkC8G2g9lULbj6hHS4ZHH2Te5LVCiG+vKD5KuouFIVdL\nhuPBdhkmJ/jNb9CAbGTohzeedcMOb3KAKHjMP7cs1wcDYh69F66sgKgPUDoAzp/rXJB2HHlsaENf\n0VVctUPB3pPV3G3qpouxcrehj+tzk+MNzDXsRMOmluXixdkmtk1XMT3RwOklI/Re8jT6POkm7jmk\nq1iFrDDIHFCAG2s9NPTZwU4u70QnnTgB05k38w1+DKLYJ/5Z+hnVpumG+rGi+KGBj3vh/BiHb1Xc\nsb1wFaVpLjqW3zOL4Uy1g/GKASI+r7boUPQ8XM/PlYuWWjbKJTvhwZVLFt747fdgxzbefnu80kZJ\nc/H8EIbe9xk6th9KfwAS4wTTq7Z+xHvl1A0nlN34cfLrJK+5WXr1UNaHr44V5+D0BNfY89qQ1A0H\n//7Dj+BEqoWyMPS1zvCD55spj15VsyvbBYadOqcVD2aiFUX+6qqyCl1NB2FDG3oAeMVuYP/ZemZ6\npO8ztE0vXEo3E9JN5GVPjXP9PV4h+8zpZfgM2DpdxdREHQzA08HKoVd6JdDtWTeNSGPlRVXRsQpv\nc9COlvwiFTePbJml2ragqdGFmCfdRKmV0cVWNCWSr1ZETQL30hhjMY8+So0c9oI0Hd5ICuDfH0N3\nhlGcU0ttlMs8qKjrRpgZMwiO5+Nr+2fw259/Frf8yT247f3fxNEM6YWP6uu9fyJgcqI2VEBW/E3j\n5xxvYxAFY0sF9Xkg6RQ0DCeRrTNeaUFXPdz/YnYj2vQKQNc7YTbaoIi0ULHaySuaOjbfwtOna3j6\nVHLVHnfKhh3GzoeOJG+gvQL9nZTzwj36ZIUykO3Rl8sm5hrmyKbLDcqGN/TX7CY4HguDpXFElsBY\naOiTPT5Cj34iMPSz0T6eOrkMAsPWyWo49PmpYOWQ1+umSDBWTAgSHuli0NN7EG/X9Xy0LD/y6HPS\nF6ttK5H2x7vu+V3SzWxYLBXX6It69NFNTBh1y/VjHr0w+MXaHmfRsb1YjCMogMtJsWSM4VS1HVZ6\nlnR7qH43f/3No3jHPz6Nrz5/EhOTJ+HBxK//w96uv+1iy4Su99//9EQNL842By6Lb1nJzw2I71Kk\nSHa3P+iFFkvP5O0MkufHnl2n8JX95zM99bRMVNItzBeYSpWFMIqT4w2oih+2rEgjPn+66K1uRH+H\nmSEmejmeD9NhCclLVb2ecaR4mi+QjEkBydqWNJWSAdNhYZHahWbDG/pX7Obrzn0nu3V6EXwt6RY0\n1Uvk1Ddi+lylZKKkuYnMm6dOLGF6sglN81AumSjrTpiFk9frJk/uaFleeKFqWtIjFd7mIIY+XroN\nxNMrU1k3htN10pVLTpd0M5cqlop/lv4efTKvGOAeuKiQVULvJ7uYrAidoL9I/LjyPK+64aBt+aFc\nV9Lz+/v04uFjC9g6tYzv+fZv4KbrnsGN1z2FE4tt/OcvPpfwyhZbVjiEohfTk3U4HsPRucGqJOMN\nzQS8pzz/TNU2nzdQlPhKrW46Xa992aUn4HgMn83IEKmlZKJyyRo6/lGNZSuNV4xc6UaswrsNfcyj\nH0KnD3vRJ4KxvRv5xVtxAMmYFD9GB7rqZbYwF6rCTGNtArIb3tBvHSdMVAzsO9Wt08eXmrrmdQVj\n4zm005NV3PXcObxwvgHH8/HsmRq2TC2Gv5+cqIXSTj+PPn6yiOlScelGbMMYC4wuC3Obi5DOj4/S\nK6Pj8nyGpuF1GXpds8Jls2C2bkJV/IR3E46d62Oc45XCwqgbjgcruADiGv2gcQiB4biRoQ9WRnlF\nU6KcXujNJd1Gy0yW6t//4jweObaY+36O5+OFmQa2TFWhBIPTtm9ZwrVXHcLdB2bDaU7i7xfPuMlD\naNGD6vTxoSOCpEZvDeTRx9Mrax0rEYwEgInxNnZsm8enHzvRlQ7K5cLoOMq6BcPJHrbTj0QGT6mV\nm0svbnRp56RuOEHTNjZUQDadcQTkV7YLojTfyImx3Liht3NltErYvnptArIb3tADwPTUIp46Ve3S\nv6LKP7srLa9leomL54ZrnoPLOviFjz2Guw/MwHD8sJgKANfpg933y7qJL++Fd6+FenpkqBqGC8+P\nToKi0kZ4kQQnlUI+iFhCA28YDhiySuAtVNPSTYMPtU53itXU3oaesSBQGHr0IhAbSTfC+4sbp0Hp\nxMrO8+QxgUitHKtEHj2QjEv80Veex7s+/0xu/5kjc03YLsOWyaSmfvXlx7Br+wzef/chPHB4Hm2b\nxySyNNk042Nt6Ko3cOZNnkcfavSxWE0R4jUNdcOBntGg72V7jmOx5eDrz88knk+vEEulZHXs337r\nKP78Gy8WOo54/v9YpRP+3dKIG3pabuRVrRbGyvaKPHpdTRp6w/ZydXQjnWAQxKSEDMtTnrPPhXLg\n0a9V5s2mMPRbp5ax1HK6miNFHr0LVbXDP67r+TAcP7FsG6sYeP0ND6NtG3jX554N9ps09AItR7oR\n1XVxDyc+75P/z39uW26YQz8W6MlF5ZuoF33USEtP6YvpzpWCkm53ZaHMNkzoeveFlv4saQyH9/RP\ne/RcukkGY7VU1o1he7m6bJqOFXUX1Ppo9KGhLwuNnn9WIZGZjoczVV65+q2coOP+s/xvPZ0y9ETA\njdc+g8mJBn7jM/vw4JGFxHv0QqwKD5wbLCAbDR2JGaSgMC2vc2Uv4oa+EbQoTrNj6zwmx9r4xCMn\nEs/XjaRMFObStyycXGzjL795BJ989EShlely20ZJc6EQw1i5g6bpZRY+htJNO+3R82MplzpDGfqw\nRXHM2dNU7njZOQ5At0YfODaBTJlOfohT1i0QmPToV8LWwPPedzop3zRiHr2qOqGBbKY0bsHEeBu3\n3PAINM3FRMVIpBtOxwx9nnQDdBvcZsoji3v9IodeeJ9Fl8DRsjfujfiJKtYw1UtPSzfdw0dmap1E\nxk20T7fnzScKFHZr9FZXemVyX3/3wDH86IceKpSFkOXRx/sFxT3z09UOKiUnLOIS3rbw6F9aaIUD\nZ+54IrtScf/ZGkq6GwZ042iai5uvfwykdvDOzz0TvEcxjXx6soZDM42BOlmGHn1cugkC+i2LG6ai\nfW6ASLqpdWxYLuuSbgB+U7piz0t49kwdz56JbkzL7eSAk3h17F/eewSeD3Rsv1Bh2HIsW0ic/2cz\nuliKv3O1nTSQtWDYSqXUwZnlwStrQwdMS95AgfwEBMOJJBsAUIJzUVTHppMf4igKQ6XsYHaEcwoG\nocjM2CuJ6H4ieoGIDhLRO4PntxPRvUR0NPh/W/A8EdHfENExItpPRLes9oeYGm9AVz3sS6dgxTR6\nTXXCu3gz4+IRTE82cNtND+B1r3488fzEWCu8g+dJN+J3cf04PgYu/n/TdMMceqEnF/Xos/qbcH3R\njW2TnQGgazYahhd6XYwxzDftRMaNoN/c2FYqoBUfr2amCqbSvepfnG2ibhQL0BpO1HYivFEG7103\nHNzyJ/filz/xJGbrJk5X26iUo4CnMMKir4poGbvrkvN48MhCZqHOs2eWMTWx3CVlCSplCze/+lGQ\nErV/LsL0RB2Wy3BsoXhANj4cQ6AqPO4jHIVBNHoee2GhhJAn+1y+8wxUxcfX9p8Pn6ulMnxKQbbR\nQ0cXcNdz53HpDt5gLH0dZlFt29DE+MUgcJ71t2iGGn3yMwrJqlLm/agGTVsMWzSngrFAvsMlrs+0\nVCluAFnJD3HKpc669uhdAO9mjN0A4DYA7yCiGwC8F8B9jLFrAdwX/AwAPwTg2uDf2wH83ciPOgUR\nMD1VxVMnkh593XCC8nuux4uTRlw8WfokAIyPdTA5nvRKiCL5Jq9gCuguukgbw/jJtBh4meJEL+zR\nGzyAm/DoU7NAhUefzggp6TYYotVOrePA8RjK5SxD37uAJIw/pNMrHT+zYCreSlk0z0oHhrMwHC8m\nASU1+oPn6mgYLu4/PI83ffB+PHemhko58vBCQx8YxSNzTSjE8KqrXgDA8PnUHADT8XBkroXpyd7G\nanK8hZuvfxw7t890nSt5CCnowNniOn3aUeCP+Xch8scH0ejF60XtRN5rNc3D1EQ9kfuflnpKug0i\nhs89dQa65uKGa/ajUrKLGfpYZbfw6LO6WIrrp5ZKS2waLrTA0A8zjL2R5dH3GT4iri9FSZ7bpuPl\nJj/EKZU6OL9GRVN9DT1jbIYx9nTwuAngEIDLAdwO4FPBZp8C8BPB49sBfJpxHgewlYj2jPzIU2yd\nWsKRuVaiKEpkCXAN2wlPmqw/chGmJmpQlez0KYGS6pfeSmVNiP+5dBNo9OXBPPp6xw7bq0bvm+PR\n62lDz99TGAlxwVcypJt+6WYiuB3WCChRVWy6BUK8lTJjLNTS+w0FSfcXUhTeN0d8ry/M8NqH77zx\nYZTLC2hZXkJy0VTeInoxNPQtTIy1MT7WwY5t8/jsk6cSue0vzDTg+egKxGaxdXoZt7z6qdzeR2km\nxlpQFH8gj75lutDU5N9afJcif3wQjR7gN13h0ecFDwF+vh84V4fvs2gsZeyaIQIqJQeMAVddfgS6\n5vDEiJP5vacE1XaUlqprPC0xK2YjrmfbjZINnCDGxvvo8/N2UJ2+kRnk7h3oD4OxatLQG7aHppmd\n/BCnUjLD6+1CM5BGT0RXA7gZwBMAdjPGRFh+FsDu4PHlAOJu0tngufS+3k5Ee4lo78LCwoCH3c3W\n6WUwING2tGFGHoimObBcBtv1M3Noi/CKK47g5lc/0XMbPmUqVoGb8sjSGn1Zd8MbTtGmX7VURaPY\nb/wEXWhZQdO25D5FJtFDR3l6YXrgSHqfvY5JePRqyqM33SyNProo5psWLJffLNNVumeqHfz+vxwI\nja/jMd5fKLaK0mOrsxdmGhgr2dg6vYxbX/MIbnn147j68mh8GxGvHRAB6MOzdYyP8ZvDFbtPYrHl\n4L5DUVB2f6BJFzH0g0IETFQMnFwsrimL7qBxxHcpjNsg0o14fT/pBuDfQcf2cWKpnRhSEqekG6iU\nbLxsDw/cbptaxtlls2/FbL3jJqdyVTqZufTxAK1wCuKSbDSMfTBD3zST7R+AeGvufOlGUfww5Tae\nfLCcETdLUykZaFv57TRWk8KGnogmAdwJ4F2MsUQZKuMC2UAiGWPsI4yxWxljt+7cuXOQl2Yi8pTj\ngaB45V+kjTuxIO1g6X6VsoVLtubnXwPdBreZ0ljFAJKW5QZDpa2oOKlgT/r5phV2rgzfV3ETeepn\nlw2Ml80unblSNjE92cB9h+YAxEcIZhn63t38RNFSpFmKEz+eXpksMmlbbmJcXbpl8jcPzeEzj5/G\n8QVuDKNMh2R2RDsm3UyMc6mACNi5fT4j08jCUsuG6Xg4u2yGUsuO7fMYK1v4+MPHQ413/7k6KiU7\n88Y3CirlJo4P4NE3Myaaie/ifAFjnYWqRMNv8uRLIJKanj9Xj6UqJ7d/1cv347XXPx4aSZGplm5Z\nEMd0eKFRXFYsl9o4Xe3+XhqmA4X4vtOGXkg3AHBuwOrYrIyjqIVJnnTjQov1FBI3XNPxY+miPTz6\nQB6dWwOvvpChJyId3MjfwRj7UvD0nJBkgv+FW3QOwJWxl18RPLeqlHQHZd3F8Zi3VOtYUddILQrE\nDuvRF0FTXbRixrFlukHnRX6C8EZbvMhkqcUDUoOM2nM9H88FwcLk+ybTF88ud1AuZXuOO7bOYN+p\nZdQ6dm+PXumXdZOt0Rs2l24IfIAykEzrizfNSnv0QksXF4PoDqjEPHo+fMSF7fp4aaGFqYnu9hdx\nNM3EQssMM26EoVeI4erLD+PJk8v4p71nAYhAbDU3ELtSxsfaOF3tFB4t1zJdKGryOxI31Jn6cB69\nosQCkD1uEhPjLWiKj/1n6121G4Jt08vYOhWtfqYn61AVv6dOX8vICBurdHBm2egKqrYsN9TwxeuS\nhZC8WndQj365Y3fJVlrMGcmCJwVE52G8QLBX+wOBkEfXIiBbJOuGAHwMwCHG2Adjv7oLwNuCx28D\n8OXY878YZN/cBqAek3hWlbFKM9HlTqRgAQiHKzdNNzOHdlRoqpvwzOODOeLbtEwXCy0TJd3KrGzN\n49BMEx3bx7bppA6aTl88U22Hy9o0O7fPw2d8nml6hGDys3iZoxHDz5ZOHY1JN6IRmfjc8Yyc00sd\nEDEQWFeqp8iOCQ19KneZP7bRNB0cnW/C9ZM1Dllwj94K2w/Eg6dXXnoS27cs4b999SCOzbdwYqHT\nlT8/SsYrbVguw1zB/juLLRN6Kjc7Lt3kldz3QknIYPmGXiGGySAgm55Glb9vH9OTtbAvVBbVjDm3\nY5UODNvvWuG1zMjQZ0k34TD2AQ39TL2TGLQD9G/kF0/zBZLBWJHn3+v7FAkP69LQA3gDgLcC+D4i\nejb498MAPgDg+4noKIA3Bz8DwN0AjgM4BuB/A/jN0R92NuOVFo4vRBdxM6XR8+f4zFFd9UKtbZSo\nqgvLjbJLeDAtvfR2wmBsSbfCytYiHv0TJ7iBj1ftAkjMAjUdD9W2i7FK9sm/ZXIZZd3Btw7NY7Zu\nZubQi8/iMyTKvOO0LD63NupnE6+M9cPgLBB5S8Kjn6iYKOluV5Wu8Ojng2pL8Zm02AWmqfxmLcY7\n9vPoeZEYvzEoxDBeiSQCPgHoWRiOg1/+xJNgALZMraKhD3q+n1wsln1xvmaE8oQgHowtOlkq8fow\n1bf/TWJ6chnPn69HQ8gLvN+WqSqeP1fvGrMnSA+tB6LGg/FOlI7nw3JZmKwgdPBG6qZTKrUH7mc/\nUze7VrFaH4fLdJLT5dSYR9+rc6VAyKNrUR1bJOvmYcYYMcZuYoy9Lvh3N2NsiTH2JsbYtYyxNzPG\nqsH2jDH2DsbYNYyxGxlje1f/Y3DGx9qYa9hhGXPDdGMafXCSmHwU3Wp48/x9kicL11iTF4eiOqgZ\nDuqGh5JuRZWtBTT6J09UMTHWSRRzAfzidX0+GFt4N2M5Hj0RcMm2Gdx/eA5nlzsoZVTFin0C+UvZ\ntuUlVitizJ3lBB59hp7ZsVycWGyhXG6ipNtdPUxEBau4GMIiFTVp6Fumg0MzTaiKj4mx3pp3KejJ\n8tyZOibG2l3GbWKsjVde9UKY3rcagdjwvYKbTF7P9zg8yOdm/K2jc2tQ2QaIbpqlAtfA9GQdhu3j\nmTNciinyftumqnD9/EErWX3bRavneAWsWDGGHn07rdHz31fKg/V6F4N20t8rd1hYzzx6ilXFR+nE\nHmod7vT0koMVxUel5Kxbj37DIDy1U9U2OrYHz48km6gZlpNb+j0K0kUXTdPp0lg1xQlTyYQH0C/w\nCfD++k+cWMLWqe6AsHhfw/bC7IU86QYAdm6bQ8P0cHS+nRt4FAYhT6dP9/Pmx+GH6ZUJXT3m0Z9a\n4m2EVbW7wdpCKxmwigaDJ9PgWpaLF2bqmJpo9NXTRUrp3lPVMOMmzVV7jmPbdBWTY+1CvWuGpVI2\noCh+IUMfto/O8egBDNS5MnpNsv9SL8RNj2dpsa7VaeZrgoBsnk5fzfB+o6E40XMiqaGk29DVyGuu\np24UY2UD1babu4JIIwbtpFeyUfwsez9t201990mPvqS7fc/FcqmzJtWxm8rQTwTL4hML7URDMyDy\n6JuBR68oq3Mxq7FYAP/f6brLa6obBkGFEdL6dM4DgGMLLdQNt0ufB2Ll27YbdvPLk24AYMfWhVC6\nSns26c+Sl2LZzsoICfqwmLEiJ76vKIDYsnyMV9rQdSvU5AVCv50N2rmm+4sA3EC1LA8Hz9cxOd7f\n+xZGxHT83OImIuCWGx7Dra95uO/+VsIgKZZCxkjXOMS/iyItktNEbXb73yQmxprQVO48lFO1G3mU\nSzYmxzq5hr6WodGLx3FDH68K1nUnEYyNy06VWMOwtuXiNz6zD1/cdzb3+KLake7zvtfcWMN2MzV6\nEVso4iCsVdHUpjL0wqM/sdSOql+15DKvabqopWZFjhLRE0c0zGqYDtItE1TVDTthCkOvKG5XP/k0\nTwSVv9u2ZBj6WP/4s8s82NkrRVDTohtGL42e7zPHo7e6b5iKwvV5M8ejPxQMYefj9Rwsxwy96Xho\nW1zuER694SQDvUDUfKppen31eSDZi6ZXFaumemH/ltWkaIqlKIjq8uhjhn44jV549P0NU7wifJBV\n8PTUIp48sZSZXVTt2NC1ZHxAfI54FlY82K9rViIYG2+XXClxw3l0voW3ffwJfP352bDhXBZipZRX\nO5JXMNWx3cR3TxSc7y736IvcOCslEzPrNb1yo6BpHiolm3v0qQIGhRg0lVewpae/j5KJ8Ta2b1nC\nZ544Cd9nfLpUytDH5Q7hBSiK07dg6qkTVYyVrUztPZ6+eG6ZT53vF2zesW0WQLZnA8Skm5ylbHzo\niEBR3LCpGVH3MvdQUMk6PtaBrtuoGW6Y1RMNo7Cw0LSDeand0k38++yXccP3FxmPou0KVpOiKZYz\nOR59PF13GI1efJdFDfdUUKOi9hmZGGf79BLqhosj893fd63TXfCnEENJdxNSXjwNWovN/q2nWjOL\nQO5vf+FZ7Du9zFuS9yhKiiSxrJRiJ9exiU86E2gKH3pfbRebC1ApG2gY3tBjNYdlUxl6IEqxzCrw\n0DXeCrVlugmPYNRcsfsEzi2beOjYItqW17V6iBuquHST50kAvBXA48cXsWVqIXP5HB/9d2a5g3Kp\nv8d42a6zuPLSk2H3z6599pFu4mMEBYrCG5rxYSFRMFYEak8E2vRYuQ1ds3lpe6CtioybqYk6fMbn\nsWZKN3FDP17co09n3KwVRVMsz9X4ZDMxLzeOrq7A0AffZa9iqThCp9f6pFbG2b6Fx5Eef6l79Rlv\naBanpNmJaWDx9iG6ZieCsXHvWRjsluXgpuv2YnqyltnyWDDbMKGpXqaz16u/E88kS1cp+7ACj75I\nKwqxirjQrRA2naEfr7RwfLHZNcgYQHCn5wVTq+XRA8DuS2ZQ1m188pETsN3uAFZYYERR7xC1T+/3\nM1UD800b2zP0eb7PKAf47HIn9HJ6UdJt3HDN/txeLf3yirNWK0Tco+dZN8n9aqoHxoDxsgVV9WMB\nOP4diP78wkufa5g9Df3kWCdsR9wLflF7mRk3a4HIEoqnWH51/3kcPJ9cncxkpFYKQmM9hAQ5SDAW\niCpkB7mpjFUMjFdMPH48y9BbmbKRlpp+Fp/lUNJtLIeN+JKvVxQfr7jiCF53/ZO4dMcMT781ehv6\nSql70I54r2bGJDTGGDf0qWtFpDWnVxl5RJOmLmxAdvMZ+rE2qm0X50XDp0R3OhsLLQuuP3hDs0FQ\nFIbLdp3C/Ye5TphXwl4uOVFBUZ8JTGH+fIY+H99n3XCw0LTDlLSVkDcDF+AZQNWW3dWLXWQPpYOx\nfH/853KZL+fDAFxwcQuPfjrQ3eebJjpOsr8Ifw/+t5soEIgVlEvGQNuvJqLpmsi8WWpZeOfnnsHf\nfutYYrtztfzq5kh+WX3pZmKsBV1zBo5fbJ2ex2PHu3X6eEOzOHpMngHiYxS5EW2ZHlzPz5yqde1V\nL2LX9rlw+yxjLZitGyiV8lKKsx0u2/O7ei4BfAVbMxyYDisUGK+s0aSpTWfoReaNaK+a6OOtOjgX\npB4WSRNbCVdceip83J2CKFYb0Undb8jHkyeqKOtObs64MPSizL9XamVRopTI7u+q2rHh+tGINIGi\neDAdF6abzKOPH6MwdOmUOtHNM/LoLRi2l+gvAkTfZxF9XnDz9U/g+pcfLLz9apJOsbzrufPwfODQ\nbMqjr+d79GKG7kqCsUUNPRHwHTc+hFdccWSg99m+JVunrxlOpsyh63YiC6tpumFvKHFDqxsOb1bY\nY0UuKs/zmKkbuYkK/Drsfm1WzyWA/x3CBnEDSDcXOpd+0xl6ocE+e6aGkpbMa9U1J8zmWK08+ug4\nOtixlWfeZKVXAoCuG7Hn8tsNGLaHew/NYuv0fG56m/A0jgRl/kWkm34Ig5Dl0c/mNENTlSCP3vG7\nPHphnER1qLh4RRB2qW1DU/zwRjBb59JNWloar7QxNVHDzm1zhT/LxHj7gmTUFCGdYvnFfbzZ6+kl\nI8wFb5oOWpafGygX/XQ0j/EAABozSURBVGqGKphSB18NTI63cgdf57F9ulunt10fbcvPfO+SZida\nIMQ7TAojutiyedvqHtevpjpoW35msNv3GeYbVmZbboBXcGed78IJS5/TRG5otIt8n5rqYaxs4dOP\nncBDR1fetbcom9DQB8vhtt0VcNVUJxwjt5oaveDKoHVrXql1PBukV7uBzz11GrWOi6suO9H1u/Q+\njwbdO/OqYgdBzMDN8nDyDL0STJKyXJaxzOXfedqjFxf3YstCuWTzsWslJ5JuUoZe11x81+sexPRk\n/0DsekWkWB6ebeLg+Sa2TlXhM74iAyKPbzU0+i1Ty3jVy5/v24l1pXCd3sBjMZ2+ZuS3CtB1Pt5Q\neM+tWEGekEVOLQknoYeh11wwZCcRhCvRnBtoqWShY/tdbZbTvegFqurltnDO47WvegJtdxlv/diT\n+C9f2t8zCWNUbDpDr6o+xivZQxUSg4BXMetGsGv7HN5w87e6SupVcfLqSekG6G43YLs+/te/HsO2\n6WpXf5s4fEScj1PVDgCWayAGRVP9TElpJux6ma7a5IFY2+326IXhF6MThVdWjWn0WjCerlQyAunG\n7Upp2wyIFMs7nz4LhRiuveoQgKjNtmhjkWvoFWHoB/foFWK4+rLjiayo1WLb9AIeO74YetdZYzAF\npVTRVDxpQg8NfSf4ubdHL16fpldqJQDs3s77L375mfOJ50PpJuecBlB4xbNlqobbbrofV19+DJ97\n8jT+n398utDrVsKmM/QA95aA7sq/uBe/2tKNYHK81SW3RB59dHx57Qbueu48Zhs2Xn55f31UU30w\nBoyV7ZFll+TFDubqZlCUle6L76Fj88BV2pCIm9lY4NGL3GnR5GqxZaIUpN2VdV4qzoc9bEJDH6RY\n3vH4KVyybQ5bp6pQyMfh2aIePc94Wg9ZRL3YtmURDcPD4eAGFtVKZHv08W3iBXkiFnFyKSn7ZaHH\nprilmctxUAQT421snarhn/adTsioWV1UgXQn0OI3XVX18aqrX8D3vv4Z/M4PvKrw64ZlUxp6EZDt\nyl9PpVquFWXdxNRELRzSAGTnrPs+w4cfOIrpiSZ2bJvv2k+aMKslJ1NjGPhAk+4LZqZuYqxkd93E\nFMUL5bG091PSbVRKRkJSi+dOL7as0ACUSyZmGwY6trspDb0IqrdtD5ftPANFYZgcb+PILJejZmoG\nCPnVzbu2z+KKS09eqMMdGpEOLNIse/VtTwfn+eAgfm4Ib18Y+l4avRp69N3bzPQYtCPYs/M0jsy1\ncfB8JA2G0k3qXExMPhuiR9IlWxp4zeVbBn7doGxKQy8CsunIvB6LmK+loVdVH9/1ugexfUuGoY8F\ngu55YQ7HFzq4+vIjhXqMiH2MIuNGoORUCs42slPU4pk2aY/+misP49bXPJp4TuROM8ZQbTvhKqdc\nsrDc5u2Iew1j36iEcQrNDdMCx8fqOBQY+vN1E5WynVvdvPuSWbzq6hcuzMGugLGKgYmKgYeC2Qdi\nVnBm1k0qON+KFeSpwexfUXvQa0UurvNGhnQz1zBBYCj1CMxfuuMcFMXHnU9H/XIMu7sVBxB59Jra\nnWW2ntichn4sO2AjjDuBdS3B1pqseZUffeglTFQM7N5xPu9lCYTnO4ocegHv/ZHlGRmZy99kf5u0\nR++Eqy2ByJ1uWi4cj8UMvQkG4Nyyue7+VqOgUjagaw4u3Xk6vCFOjTcxU7fQNB3M1AyUc3K9Nxpb\np+fxrcML+M7334f33/0iN7S9PPpQuokK8oiAsu7Gpmr1Csb21uh73UD5cTjYuW0W//LMWTieH1Sl\nV4N9p2tiird8Xku0tT6A1UAsi7ulG5HWWKwL34Uk3sYX4JV4B2ca2HnJ+cIDUkRWyyhSK6Pjyi4g\nma2b2HFJ9pzZ6Hj6G2hdt1HtWGGxVFy6AXgW0mYMxhIB/+a1D6QarnFv/uh8C2eW25vG0F971YvY\nOr0MxghgQKViZLZ14Ncrw3LHAWMMbcvDJfEWJroNwy7Fts1G3ByyculnGyZKev/r47JdZ/DMocvw\nwOEF7D1ZxScfPYnLd5/qymYTzkx6hvN6Y1Ma+vFKG5ftOt2la4uT40IFYgch3W6gYbowbB9jOdkB\nWWirIN3w9snJC6ZpOujYfmYuctKj77+U1TUHc8tOWCwlDF9cQ92MHj3Q3UZ6coIHLA/PNjHbsHDZ\nrgvft3w1KJcsXLH7dN/tFGIo67xvjOn48PxkYSNPrpiEqvg9zy2tp0bfyQ3ExtmxdR6Vko33/NNz\nqBkOrrz0BF79igNdDqI4jvVu6IvMjP04Ec0T0fOx515LRI8R0QEi+goRTQfPl4joE8HzzxHRG1fx\n2HscM3Djtc9iy2Sy0lBo9mupz+eRbjcQdi4cwDsXBnGkHn1G1k2YuZDZ/W8wj76kWzAdFvbQj0s3\n0T43n0efxVi5A0318MTxJdju6FJkNxK6bqPatsMWBnFDLzT8kt77fFBVDwTWQ7rp/70qCsPuHWdQ\nMxxcteelTCPPtxs+zfVCUkSj/ySAH0w991EA72WM3QjgnwG8J3j+PwJA8Pz3A/gfRLRu4gBCulmt\noSMrId5PHoh6kRfxPsJ9CI1+xNJNuqXqbF143b09+kLSTbC6OjbP5TZh6PmIRS5ZbVaPPg0RMDHe\nxINBxWRervdmRlNNLLftqEVxSrrh2/S+fvmkKK/Lo+/YLlqW33NOQ5xXvuxF3HLDY3jVyw/2rUgf\ntGr4QlNkZuyDANKVOtcBeDB4fC+AtwSPbwDwreB18wBqAG4dyZGOgKj4Yv39UcIJUYFHf34Ij35q\nooHpyeWRGkYx7NyLlZNHfdJ7e/RFpBuhyUeGnv9MBFRKdtc+NzuTYw1Ug7mpeWX6mxld4/1u4kNH\nBCKAqxbwnnXNDZuiCfKqufPQVA87t2W3BRdEFcrrz3mMM6y3fRDA7cHjnwJwZfD4OQA/TkQaEb0c\nwOtjv0tARG8nor1EtHdh4cL0fOAl/d2tddcD6XYDMzUzyKMurv1dddkJ/JvXPjTS48rKBuot3cTT\nK4t49PwCOTrfQklPFgCJ1czF4tEDUUAWGOwmv1ko6Taqncij1xNFjkEOfoG++GrQkjzObFgsNbqV\n0maSbrL4FQC/SUT7AEwBEJ/y4wDOAtgL4K8APAog8ypljH2EMXYrY+zWnTt3DnkYg7Nn5xns3F68\nGdaFRFO9cG7s+brBK1wLZtysFlnjBGfqwUCMDI89Kd0U9+hPLrZRTuVWi+yIi8qjDyZgKYrf1QL6\nYkDXeWOzsBd9LG1RSDdFVuSqandJN5FHP1ppM35s65Whsm4YYy8C+AEAIKLrAPxI8LwL4P8V2xHR\nowAG6226ytxwzYG1PoRcVNUL58bO1s3cntkXkmhyVdKjz50z2yOPPgvhCbk+g6Yl9yk8r4slGAtE\nHv1YOXswxmanFEwdm2+KyWtO7HfdU+Py0FSna/jIbI+V6LCI87eoHLRWDOXRE9Gu4H8FwO8D+Pvg\n53Eimggefz8AlzG2/sv31gmq4oYe/dnldjj0eC2JpJvIaJ+vGyjlGPrBg7GRJ5T2YENDfxFJN+WS\nhZLmoqSPro3FRkJ4xqKCdliPXlPdLo9+rm6ipLm5E9WGYWqigVu/7dFwdOJ6pa9HT0SfBfBGADuI\n6CyAPwQwSUTvCDb5EoBPBI93Afg/ROQDOAfgrSM/4k2MojjoWHxY9nrJo87qqjlTNzAx2bt9LlAs\nGKsoDLrmwXHVLkNfuQgNPRHwssuODZRttZkQAVdh6NWM9MpCfd81F81Ot0Y/Sn0e4H+v1W73PAr6\nGnrG2M/m/OqvM7Y9CWD1W7FtUlTVRct2sdxx1k0etZry6G3Xx3LbxfbtORN6BgzGAlynd9yxro6G\nW6ermJ5cXhcDvS8k11y5rtTOC0ro0S/xmoJ4jGq80sHObbPYnjNOM46mOmiZ3GmiQAMrWhW7GdmU\nlbEbFVV10bacsBf5qL2PYUi3ZhAZN3k53nwxx6AqrLDGrKkWgLEuj35irD3yLCLJ+qYUVJiernag\npwa/K4qPW254stB+NM2B6/MWGhVdBcBnxZYrF6ehXzfFTBLek75tuX17kV9ItFT75NDQ50gLPE20\nd4l6Gl1Ptj+QXLwIj95wvBVVsIvzVqRY+j7DQtNeF87TWiAN/TpCtBsQBUmjrHAdFiHdiAwGcRPq\ndcH060WSJiptl4b+YkcPGpsBUV/5YUj3u1lq2/DZ+s+OWS2koV9HcEPv43zNhELrI49a12xMjHXw\nuadOw/X8mHSTfxNSVb9rzmsvSqGhX9+5yJLVR7QjBngu/LCIbB3h0c+tQrHURkIa+nWEpnpwPIYz\nyx2MlbunN60FRMC1Vx3Esfk2vrD3LGbqJjTV61ldrCgeFCru0QsDvx5ubJK1R/SNWUkFe3purBj2\nfbEaehmMXUeIwOdL8y2URjgOcKXs2j6D7dNV/MU9h3DTFdtQ6VPMo5AXBGWLcfnu0yiXjXXfGEpy\nYdA0E8D4yjT6cG4s38dco7sr6sWE9OjXESJn+PhCa101tCICrrv6eVTbLh44vNC3mEdRXNAAbQvK\nJQuX7zrbf0PJRYGuiarY4T160SOnkZZuLtJVozT06whRsWd7DOV1EIiNs2Wqhj07uDHuF9Dadcl5\n7No+cyEOS7IJEcH59Ni+QVDVtEZvoVJyEk3zLiakdLOOiFcBrofUyjTXXn0I89U9GB/rXcD08stf\nukBHJNmMRH3nVy7diKyb+R79mS4GpKFfR8Sbd42tQy1xrGzgu2+5b9136pNsbEoj8OgVYtBUL/To\nZxoG9Iu0KhaQhn5dEW+2tB49euDinHokubCURuDRA3zKVCum0VcmLt5zV2r064j1Lt1IJBeCUKNf\n4YAgTXXQtBy4no9qy7loi6UAaejXFcLQq4q/7ifWSCSrxdbpZey+5DymJ2sr2o+q2mgYLhZaFhgu\n3tRKQEo36woh3YyVzXVRLCWRrAUl3cbrrt+74v2oqoOGaV/0OfSA9OjXFSIYux4mS0kkGx1N5SMJ\no0Z80tBL1gGKwqCQv66KpSSSjQqfMuVi/iLvcwNIQ7/uuGzXaey6RBYbSSQrRdNctCwXcw0LROyi\n7qXU19AT0ceJaJ6Ino8991oieoyIDhDRV4hoOnheJ6JPBc8fIqL/spoHvxn5tlfux+5LZtf6MCSS\nDY+mOjAdhnM1A5XS+mgSuFYU8eg/CeAHU899FMB7GWM3AvhnAO8Jnv8pAOXg+dcD+HUiunokRyqR\nSCQDIIaIv7TQyh1mf7HQ19Azxh4EUE09fR2AB4PH9wJ4i9gcwAQRaQDGANgAGqM5VIlEIimOyMN/\naaGF8kVcFQsMr9EfBHB78PinAFwZPP4igDaAGQCnAfwFYyx9kwAAENHbiWgvEe1dWFgY8jAkEokk\nG9FCoW15F3UgFhje0P8KgN8kon0ApsA9dwD4DgAegMsAvBzAu4noFVk7YIx9hDF2K2Ps1p07dw55\nGBKJRJJNvIXCxW7ohyqYYoy9COAHAICIrgPwI8Gvfg7ANxhjDoB5InoEwK0Ajo/gWCUSiaQw0tBH\nDOXRE9Gu4H8FwO8D+PvgV6cBfF/wuwkAtwF4ceWHKZFIJIMR734pDX0fiOizAB4D8CoiOktEvwrg\nZ4noCLgRPw/gE8Hm/xPAJBEdBPAUgE8wxvavzqFLJBJJPvGmaJXSxZtDDxSQbhhjP5vzq7/O2LYF\nHpyVSCSSNUXTpHQjkE3NJBLJpkRVfCgKH1J/sXeDlYZeIpFsWnTVg6Z6F3VVLCANvUQi2cRomgtd\nu7iLpQBp6CUSySZmy+QiynJamzT0Eolk83Ljdc+s9SGsC2SbYolEItnkSEMvkUgkmxxp6CUSiWST\nIw29RCKRbHKkoZdIJJJNjjT0EolEssmRhl4ikUg2OdLQSyQSySZHGnqJRCLZ5EhDL5FIJJscaegl\nEolkkyMNvUQikWxyiowS/DgRzRPR87HnXktEjxHRASL6ChFNB8//PBE9G/vnE9HrVvMDSCQSiaQ3\nRTz6TwL4wdRzHwXwXsbYjQD+GcB7AIAxdgdj7HWMsdcBeCuAE4yx/7+9e4+R6qzDOP59uLVyqaV2\naZSL0MhiUKSQjWJaa2kNocWIpqLdNLEJJITQxHpJG4wYo/+ZmCompoRQaLVKjfQircYGsWb9A7EL\nRVjKSgEvbEtla29GTaH684/zkozrjrOcndnpvvN8ksmc854zM7+Xd3ly5p0zcw7WsV4zM7tANYM+\nIrqAlwY0twNdaXk3cPMgD+0EHhxWdWZmNmxl5+iPACvT8ipg5iD7fBrYUe0JJK2V1C2pu7+/v2QZ\nZmZWS9mgXw2sl7QfmAL815V3JX0A+EdE9Az2YICI2BIRHRHR0dbWVrIMMzOrpdQVpiKiF1gGIKkd\nWDFgl1v4P0fzZmY2ckoFvaRpEXFG0hhgI7C5YtsY4FPAh+pTopmZDcdQTq/cAewF5knqk7QG6JR0\nDOgFnge2VzzkWuBURJxsRMFmZnZhah7RR0RnlU2bquz/K2DJMGoyM7M68jdjzcwy56A3M8ucg97M\nLHMOejOzzDnozcwy56A3M8ucg97MLHMOejOzzDnozcwy56A3M8ucg97MLHMOejOzzDnozcwy56A3\nM8ucg97MLHMOejOzzDnozcwyN5RLCW6TdEZST0XbQkl7JR2W9JikSyq2vS9tO5K2X9yo4s3MrLah\nHNHfBywf0LYV2BARC4BHgDsBJI0DHgDWRcR7gOuAc/Uq1szMLlzNoI+ILuClAc3tQFda3g3cnJaX\nAYci4nfpsX+NiH/VqVYzMyuh7Bz9EWBlWl4FzEzL7UBIekLSAUl3VXsCSWsldUvq7u/vL1mGmZnV\nUjboVwPrJe0HpgBnU/s44Brg1nT/CUk3DPYEEbElIjoioqOtra1kGWZmVsu4Mg+KiF6KaRoktQMr\n0qY+oCsiXkzbfgYsBvYMv1QzMyuj1BG9pGnpfgywEdicNj0BLJA0MX0w+2HgmXoUamZm5Qzl9Mod\nwF5gnqQ+SWuATknHgF7geWA7QES8DNwNPAUcBA5ExE8bVbyZmdVWc+omIjqrbNpUZf8HKE6xNDOz\nNwF/M9bMLHMOejOzzDnozcwy56A3M8ucg97MLHMOejOzzDnozcwy56A3M8ucg97MLHMOejOzzDno\nzcwy56A3M8ucg97MLHMOejOzzDnozcwy56A3M8ucg97MLHNDuZTgNklnJPVUtC2UtFfSYUmPSbok\ntc+W9E9JB9Ntc/VnNjOzkTCUI/r7gOUD2rYCGyJiAfAIcGfFthMRcVW6ratPmWZmVtZQrhnbJWn2\ngOZ2oCst7waeAL5S18qGaPyY8UyeMLkZL21mNiyTJkwakdepGfRVHAFWAo8Cq4CZFdvmSHoaeA3Y\nGBG/HuwJJK0F1gLMmjWrZBmwdM5Sls5ZWvrxZma5K/th7GpgvaT9wBTgbGo/DcyKiEXAF4Afnp+/\nHygitkRER0R0tLW1lSzDzMxqKXVEHxG9wDIASe3AitT+OvB6Wt4v6QTFNE93Xao1M7MLVuqIXtK0\ndD8G2AhsTuttksam5SuBucDJ+pRqZmZl1Dyil7QDuA64XFIf8FVgsqTb0y4PA9vT8rXA1yWdA/4N\nrIuIl+petZmZDdlQzrrprLJp0yD7PgQ8NNyizMysfvzNWDOzzDnozcwy56A3M8ucg97MLHOKiGbX\ngKR+4E/DeIrLgRfrVM5o0Yp9htbst/vcOi603++MiJrfOH1TBP1wSeqOiI5m1zGSWrHP0Jr9dp9b\nR6P67akbM7PMOejNzDKXS9BvaXYBTdCKfYbW7Lf73Doa0u8s5ujNzKy6XI7ozcysCge9mVnmRnXQ\nS1ou6feSjkva0Ox6GkHSTElPSnpG0hFJd6T2yyTtlvRsup/a7FobQdJYSU9Lejytz5G0L435jyRN\naHaN9STpUkk7JfVKOirpg60w1pI+n/6+eyTtkHRxjmMtaZukM5J6KtoGHV8VvpP6f0jS4rKvO2qD\nPv3u/XeBG4H5QKek+c2tqiHeAL4YEfOBJcDtqZ8bgD0RMRfYk9ZzdAdwtGL9G8C3IuJdwMvAmqZU\n1TibgJ9HxLuBhRR9z3qsJU0HPgt0RMR7gbHALeQ51vcBywe0VRvfGymu6TGX4rKr95R90VEb9MD7\ngeMRcTIizgIPUlzHNisRcToiDqTlv1H8x59O0df70273Ax9vToWNI2kGxdXLtqZ1AdcDO9MuWfVb\n0lsprulwL0BEnI2IV2iBsab4yfS3SBoHTKS4LGl2Yx0RXcDAa3RUG9+VwPei8BvgUklvL/O6ozno\npwOnKtb7Ulu2JM0GFgH7gCsi4nTa9AJwRZPKaqRvA3dRXMQG4G3AKxHxRlrPbcznAP3A9jRdtVXS\nJDIf64h4Dvgm8GeKgH8V2E/eY12p2vjWLeNGc9C3FEmTKS7q8rmIeK1yWxTnyGZ1nqykjwJnImJ/\ns2sZQeOAxcA9EbEI+DsDpmkyHeupFEevc4B3AJP43+mNltCo8R3NQf8cMLNifUZqy46k8RQh/4OI\neDg1/+X827h0f6ZZ9TXI1cDHJP2RYlrueor560vT23vIb8z7gL6I2JfWd1IEf+5j/RHgDxHRHxHn\nKC5PejV5j3WlauNbt4wbzUH/FDA3fTI/geLDm11Nrqnu0rz0vcDRiLi7YtMu4La0fBvwk5GurZEi\n4ksRMSMiZlOM7S8j4lbgSeCTabes+h0RLwCnJM1LTTcAz5D5WFNM2SyRNDH9vZ/vd7ZjPUC18d0F\nfCadfbMEeLViiufCRMSovQE3AceAE8CXm11Pg/p4DcVbuUPAwXS7iWK+eg/wLPAL4LJm19rAf4Pr\ngMfT8pXAb4HjwI+Bi5pdX537ehXQncb7UWBqK4w18DWgF+gBvg9clONYAzsoPoc4R/EObk218QVE\ncWbhCeAwxVlJpV7XP4FgZpa50Tx1Y2ZmQ+CgNzPLnIPezCxzDnozs8w56M3MMuegNzPLnIPezCxz\n/wEY0siNlckV2gAAAABJRU5ErkJggg==\n", 181 | "text/plain": [ 182 | "
" 183 | ] 184 | }, 185 | "metadata": { 186 | "tags": [] 187 | } 188 | } 189 | ] 190 | }, 191 | { 192 | "cell_type": "markdown", 193 | "metadata": { 194 | "id": "4_kCnsPUqS6o" 195 | }, 196 | "source": [ 197 | "Kendi verilerinizi Google Drive hesabınızdan (e-tablolar dahil), GitHub'dan ve diğer pek çok kaynaktan Colab not defterlerine aktarabilirsiniz. Veri içe aktarma ve Colab'in veri bilimi için nasıl kullanılabileceği hakkında daha fazla bilgi edinmek için Verilerle Çalışma bölümünün altındaki bağlantılara bakabilirsiniz." 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": { 203 | "id": "OwuxHmxllTwN" 204 | }, 205 | "source": [ 206 | "## Makine öğrenimi\n", 207 | "\n", 208 | "Colab ile bir resim veri kümesini içe aktarabilir, üzerinde bir resim sınıflandırıcıyı eğitebilir ve modeli değerlendirebilirsiniz. Hem de sadece birkaç satır kodla. Colab not defterleri Google'ın bulut sunucularında kod yürütür. Yani makinenizin gücünden bağımsız olarak, GPU'lar ve TPU'lar dahil Google donanımının gücünden yararlanabilirsiniz. Tek ihtiyacınız olan şey bir tarayıcıdır." 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "metadata": { 214 | "id": "ufxBm1yRnruN" 215 | }, 216 | "source": [ 217 | "Colab, makine öğrenimi topluluğunda yaygın olarak şu uygulamalarla kullanılır:\n", 218 | "- TensorFlow'u kullanmaya başlama\n", 219 | "- Nöral ağ geliştirme ve eğitme\n", 220 | "- TPU'lar ile deneme yapma\n", 221 | "- Yapay zeka araştırmalarını yayma\n", 222 | "- Eğitici oluşturma\n", 223 | "\n", 224 | "Makine öğrenimi uygulamalarını açıklayarak tanıtan örnek Colab not defterlerini görmek için aşağıdaki makine öğrenimi örneklerine bakabilirsiniz." 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": { 230 | "id": "-Rh3-Vt9Nev9" 231 | }, 232 | "source": [ 233 | "## Diğer Kaynaklar\n", 234 | "\n", 235 | "### Colab'de Not Defterleriyle Çalışma\n", 236 | "- [Colaboratory'ye Genel Bakış](/notebooks/basic_features_overview.ipynb)\n", 237 | "- [Markdown rehberi](/notebooks/markdown_guide.ipynb)\n", 238 | "- [Kitaplıkları içe aktarma ve bağımlıları yükleme](/notebooks/snippets/importing_libraries.ipynb)\n", 239 | "- [GitHub'da not defteri kaydetme ve yükleme](https://colab.research.google.com/github/googlecolab/colabtools/blob/master/notebooks/colab-github-demo.ipynb)\n", 240 | "- [Etkileşimli formlar](/notebooks/forms.ipynb)\n", 241 | "- [Etkileşimli widget'lar](/notebooks/widgets.ipynb)\n", 242 | "- \"New\"\n", 243 | " [Colab'de TensorFlow 2](/notebooks/tensorflow_version.ipynb)\n", 244 | "\n", 245 | "\n", 246 | "### Verilerle Çalışma\n", 247 | "- [Veri yükleme: Drive, E-Tablolar ve Google Cloud Storage](/notebooks/io.ipynb) \n", 248 | "- [Grafikler: Veri görselleştirme](/notebooks/charts.ipynb)\n", 249 | "- [BigQuery'yi kullanmaya başlama](/notebooks/bigquery.ipynb)\n", 250 | "\n", 251 | "### Makine Öğrenimi Hızlandırılmış Kursu\n", 252 | "Google'ın online Makine Öğrenimi kursundan birkaç not defterini burada bulabilirsiniz. Daha fazlası için tam kurs web sitesine bakın.\n", 253 | "- [Pandas'a giriş](/notebooks/mlcc/intro_to_pandas.ipynb)\n", 254 | "- [TensorFlow kavramları](/notebooks/mlcc/tensorflow_programming_concepts.ipynb)\n", 255 | "- [TensorFlow ile ilk adımlar](/notebooks/mlcc/first_steps_with_tensor_flow.ipynb)\n", 256 | "- [Nöral ağlara giriş](/notebooks/mlcc/intro_to_neural_nets.ipynb)\n", 257 | "- [Seyrek veriler ve yerleştirilmiş öğelere giriş](/notebooks/mlcc/intro_to_sparse_data_and_embeddings.ipynb)\n", 258 | "\n", 259 | "\n", 260 | "### Hızlandırılmış Donanım Kullanma\n", 261 | "- [GPU'lar ile TensorFlow](/notebooks/gpu.ipynb)\n", 262 | "- [TPU'lar ile TensorFlow](/notebooks/tpu.ipynb)" 263 | ] 264 | }, 265 | { 266 | "cell_type": "markdown", 267 | "metadata": { 268 | "id": "P-H6Lw1vyNNd" 269 | }, 270 | "source": [ 271 | "\n", 272 | "\n", 273 | "## Makine Öğrenimi Örnekleri\n", 274 | "\n", 275 | "Colaboratory'nin mümkün kıldığı etkileşimli makine öğrenimi analizlerinin uçtan uca örneklerini görmek için, TensorFlow Hub'daki modelleri kullanan bu eğiticilere bakın.\n", 276 | "\n", 277 | "Öne çıkan birkaç örnek:\n", 278 | "\n", 279 | "- Bir Resim Sınıflandırıcıyı Yeniden Eğitme: Çiçekleri ayırt etmek için önceden eğitilmiş bir resim sınıflandırıcının üzerine bir Keras modeli inşa eder.\n", 280 | "- Metin Sınıflandırma: IMDB'deki film yorumlarını olumlu veya olumsuz olarak sınıflandırır.\n", 281 | "- Stil Aktarımı: Resimler arasında stil aktarımı yapmak için derin öğrenmeyi kullanır.\n", 282 | "- Çok Dilli Evrensel Cümle Kodlayıcı Soru-Cevap: SQuAD veri kümesinden soruları cevaplamak için bir makine öğrenimi modeli kullanır.\n", 283 | "- Video İnterpolasyonu: Bir videonun ilk ve son karesi arasında ne olduğunu tahmin eder.\n" 284 | ] 285 | } 286 | ] 287 | } --------------------------------------------------------------------------------