├── simple_deep_learning ├── __init__.py ├── mnist_extended │ ├── __init__.py │ ├── mnist.py │ ├── array_overlay.py │ ├── object_detection.py │ └── semantic_segmentation.py └── bounding_box.py ├── requirements.txt ├── resources └── featured_image.png ├── setup.py ├── unet_segmentation ├── utils.py ├── iou_metric.py ├── config.py ├── dataset_generation.py ├── unet_model.ipynb └── dataset_generation.ipynb ├── .gitignore ├── README.md └── mnist_extended.ipynb /simple_deep_learning/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /simple_deep_learning/mnist_extended/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.25.0 2 | tensorflow==2.16.1 3 | matplotlib==3.8.4 4 | Pillow==10.3.0 5 | -------------------------------------------------------------------------------- /resources/featured_image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kvnptl/simple-deep-learning/main/resources/featured_image.png -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | setuptools.setup( 4 | name='simple_deep_learning', 5 | packages=setuptools.find_packages(exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']), 6 | ) -------------------------------------------------------------------------------- /unet_segmentation/utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import torch 4 | 5 | 6 | def set_seed(seed=8): 7 | random.seed(seed) 8 | np.random.seed(seed) 9 | torch.manual_seed(seed) 10 | torch.cuda.manual_seed(seed) -------------------------------------------------------------------------------- /unet_segmentation/iou_metric.py: -------------------------------------------------------------------------------- 1 | 2 | from torch import randint, tensor 3 | import torchmetrics 4 | 5 | target = randint(0, 2, (10, 25, 25)) 6 | pred = tensor(target) 7 | 8 | pred[2:5, 7:13, 9:15] = 1 - pred[2:5, 7:13, 9:15] 9 | 10 | jaccard = torchmetrics.JaccardIndex(task="multiclass", num_classes=2) 11 | ans = jaccard(pred, target) 12 | print(ans) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Pycache and compiled python files 2 | __pycache__/ 3 | *py[cod] 4 | 5 | # Jupyter notebook checkpoints. 6 | .ipynb_checkpoints/ 7 | 8 | # Egg info files produces by pip installation 9 | *egg-info/ 10 | 11 | # Data directory 12 | /data/ 13 | /notebooks/data/ 14 | 15 | models/ 16 | 17 | # vscode configuration 18 | .vscode 19 | 20 | .mypy_cache 21 | 22 | # Directory with old files 23 | .archive 24 | 25 | 26 | -------------------------------------------------------------------------------- /unet_segmentation/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | 4 | DATASET_NAME = "mnist_extended" 5 | PARENT_DIR = os.path.dirname(__file__) 6 | NUM_WORKERS = os.cpu_count() 7 | BATCH_SIZE = 8 8 | PIN_MEMORY = True 9 | SEED = 8 10 | NUM_CLASSES = 5 11 | 12 | TOTAL_SAMPLES = 1000 13 | TRAIN_VAL_SPLIT = 0.8 14 | 15 | LR_RATE = 0.0001 16 | EPOCHS = 50 17 | 18 | MODEL_LOG = "unet_test" 19 | 20 | LOAD_MODEL = False 21 | MODEL_PATH = "model.pth" 22 | 23 | TIMESTAMP = datetime.now().strftime("%Y-%m-%d_%H-%M") 24 | 25 | # ImageNet mean and standard deviation 26 | MEAN = [0.485, 0.456, 0.406] 27 | STD = [0.229, 0.224, 0.225] 28 | 29 | CLASS_NAMES = ['one', 'two', 'three', 'four', 'five'] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple deep learning 2 | 3 | ![png](./resources/featured_image.png) 4 | 5 | The field of deep learning is vast. The sheer number of publications on the subject is enough to overwhelm anyone. In the series "Simple deep learning" we'll be taking a step back. We'll forget about the latest tips and tricks that are pushing the state of the art. Instead, through the use of simple datasets and toy problems, we'll explore the basis of deep learning to give you a better understanding of the big picture. 6 | 7 | This series currently contains the following posts: 8 | - [MNIST extended: a simple dataset for semantic segmentation and object detection](./mnist_extended.ipynb) 9 | - [A simple example of semantic segmentation with tensorflow keras](./semantic_segmentation.ipynb) 10 | 11 | ## Installation 12 | These posts use the simple_deep_learning package which contains helper functions. If you'rerunning the notebooks from within this repository, there's no need to install the package. In case you want to have access to the helper functions from another directory, you can install the package by cloning the repository and running: 13 | 14 | ``` 15 | pip install -e /path/to/simple-deep-learning 16 | ``` 17 | This will give you access to all the helper functions from anywhere on your computer. 18 | 19 | If you only need the dependencies, from within the repository run: 20 | ``` 21 | pip install -r requirements.txt 22 | ``` 23 | 24 | 25 | -------------------------------------------------------------------------------- /simple_deep_learning/mnist_extended/mnist.py: -------------------------------------------------------------------------------- 1 | """This module contains functions to download and process the mnist dataset. 2 | """ 3 | from typing import Tuple 4 | 5 | import numpy as np 6 | from matplotlib import pyplot as plt 7 | import tensorflow as tf 8 | 9 | plt.rcParams['figure.facecolor'] = 'white' 10 | 11 | 12 | def download_mnist(): 13 | """Wrapper around keras mnist download function. 14 | This function uses the keras function. The original data can be found at: 15 | http://yann.lecun.com/exdb/mnist/ 16 | """ 17 | 18 | (train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data() 19 | 20 | return (train_images, train_labels), (test_images, test_labels) 21 | 22 | 23 | def preprocess_mnist(images: np.ndarray, labels: np.ndarray, proportion: float, 24 | num_classes: int, normalise: bool = True) -> Tuple[np.ndarray, np.ndarray]: 25 | """Perform simple preprocessing steps on the mnist data. 26 | 27 | Parameters: 28 | images: MNIST images (num_images, image_height, image_width). 29 | labels: MNIST labels (num_images,) (must be the same length as images). 30 | proportion: The proportion of the total dataset (60,000 images) to use. 31 | num_classes: Integer between 1 and 10. Only select images/labels between 0 and num_classes-1. 32 | normalise: If True, normalise the data between 0-1, else leave between 0-255. 33 | 34 | Returns: 35 | images: The preprocessed MNIST images. 36 | labels: The preprocessed MNIST labels. 37 | """ 38 | 39 | indices = np.random.randint(len(images), size=int(len(images) * proportion)) 40 | images = images[indices] 41 | labels = labels[indices] 42 | 43 | valid_examples = np.zeros_like(labels) 44 | for i in range(0, num_classes): 45 | valid_examples = np.logical_or(labels == i, valid_examples) 46 | 47 | images = images[valid_examples] 48 | labels = labels[valid_examples] 49 | 50 | if normalise: 51 | images = images / 255.0 52 | 53 | images = np.expand_dims(images, -1) 54 | 55 | return images, labels 56 | 57 | 58 | def display_digits(images: np.ndarray, labels: np.ndarray, num_to_display: int = 25, random: bool = True) -> None: 59 | """Display a random subset of digits from the MNIST dataset. 60 | 61 | Parameters: 62 | images: MNIST images (num_images, image_height, image_width, 1) or (num_images, image_height, image_width). 63 | labels: MNIST labels (num_images,) (must be the same length as images). 64 | num_to_display: Number of images to display. 65 | random: If True, display the images at random. 66 | """ 67 | num_columns = 5 68 | num_rows = int(np.ceil(num_to_display / num_columns)) 69 | 70 | plt.figure(figsize=(num_columns * 2, num_rows * 2)) 71 | 72 | indices = np.random.randint( 73 | len(images), size=num_to_display) if random else range(num_to_display) 74 | 75 | for i, index in enumerate(indices): 76 | ax = plt.subplot(num_rows, num_columns, i+1) 77 | ax.set_xticks([]) 78 | ax.set_yticks([]) 79 | 80 | # imshow takes the input as (x, y) image instead of (x, y, 1) if the image is grayscale or binary. 81 | if len(images.shape) == 4: 82 | ax.imshow(images[index, ..., 0], cmap=plt.cm.binary) 83 | else: 84 | ax.imshow(images[index], cmap=plt.cm.binary) 85 | 86 | ax.set_xlabel(labels[index]) 87 | 88 | plt.show() 89 | -------------------------------------------------------------------------------- /unet_segmentation/dataset_generation.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | np.random.seed(seed=9) 4 | from typing import Tuple 5 | from matplotlib import patches as mpatches 6 | from matplotlib import pyplot as plt 7 | import matplotlib 8 | 9 | from simple_deep_learning.mnist_extended.mnist import display_digits 10 | from simple_deep_learning.mnist_extended.semantic_segmentation import (create_semantic_segmentation_dataset, display_segmented_image, display_grayscale_array, plot_class_masks) 11 | 12 | def mnist_extended_dataset(total_train_samples: int = 100, total_test_samples: int = 10, num_classes: int = 10) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: 13 | 14 | (train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data() 15 | 16 | train_x, train_y, test_x, test_y = create_semantic_segmentation_dataset(num_train_samples=total_train_samples, num_test_samples=total_test_samples, image_shape=(60, 60), num_classes=num_classes) 17 | 18 | return train_x, train_y, test_x, test_y 19 | 20 | def display_segmented_image(y: np.ndarray, threshold: float = 0.5, 21 | input_image: np.ndarray = None, 22 | alpha_input_image: float = 0.2, 23 | title: str = '', 24 | ax: matplotlib.axes.Axes = None) -> None: 25 | """Display segemented image. 26 | 27 | This function displays the image where each class is shown in particular color. 28 | This is useful for getting a rapid view of the performance of the model 29 | on a few examples. 30 | 31 | Parameters: 32 | y: The array containing the prediction. 33 | Must be of shape (image_shape, num_classes) 34 | threshold: The threshold used on the predictions. 35 | input_image: If provided, display the input image in black. 36 | alpha_input_image: If an input_image is provided, the transparency of 37 | the input_image. 38 | """ 39 | ax = ax or plt.gca() 40 | 41 | base_array = np.ones( 42 | (y.shape[0], y.shape[1], 3)) * 1 43 | legend_handles = [] 44 | 45 | for i in range(y.shape[-1]): 46 | # Retrieve a color (without the transparency value). 47 | colour = plt.cm.jet(i / y.shape[-1])[:-1] 48 | base_array[y[..., i] > threshold] = colour 49 | legend_handles.append(mpatches.Patch(color=colour, label=str(i))) 50 | 51 | # plt.figure(figsize=figsize) 52 | # ax.imshow(base_array) 53 | # ax.legend(handles=legend_handles, bbox_to_anchor=(1, 1), loc='upper left') 54 | # ax.set_yticks([]) 55 | # ax.set_xticks([]) 56 | # ax.set_title(title) 57 | 58 | # if input_image is not None: 59 | # ax.imshow(input_image[..., 0], 60 | # cmap=plt.cm.binary, alpha=alpha_input_image) 61 | 62 | # if not ax: 63 | # plt.show() 64 | 65 | return base_array 66 | 67 | def plot_class_masks(y_true: np.ndarray, y_predicted: np.ndarray = None, title='') -> None: 68 | """Plot a particular view of the true vs predicted segmentation. 69 | 70 | This function separates each class into its own image and 71 | does not perform any thresholding. 72 | 73 | Parameters: 74 | y_true: True segmentation (image_shape, num_classes). 75 | y_predicted: Predicted segmentation (image_shape, num_classes). 76 | If y_predicted is not provided, only the true values are displayed. 77 | """ 78 | num_rows = 2 if y_predicted is not None else 1 79 | 80 | num_classes = y_true.shape[-1] 81 | fig, axes = plt.subplots(num_rows, num_classes, figsize=(num_classes * 4, num_rows * 4)) 82 | axes = axes.reshape(-1, num_classes) 83 | fig.suptitle(title) 84 | plt.tight_layout() 85 | 86 | for label in range(num_classes): 87 | axes[0, label].imshow(y_true[..., label], cmap=plt.cm.binary) 88 | axes[0, label].axes.set_yticks([]) 89 | axes[0, label].axes.set_xticks([]) 90 | 91 | if label == 0: 92 | axes[0, label].set_ylabel(f'Target') 93 | 94 | if y_predicted is not None: 95 | if label == 0: 96 | axes[1, label].set_ylabel(f'Predicted') 97 | 98 | axes[1, label].imshow(y_predicted[..., label], cmap=plt.cm.binary) 99 | axes[1, label].set_xlabel(f'Label: {label}') 100 | axes[1, label].axes.set_yticks([]) 101 | axes[1, label].axes.set_xticks([]) 102 | else: 103 | axes[0, label].set_xlabel(f'Label: {label}') 104 | 105 | plt.show() -------------------------------------------------------------------------------- /simple_deep_learning/mnist_extended/array_overlay.py: -------------------------------------------------------------------------------- 1 | """This module contains utility functions for performning manipulations of images stored as arrays. 2 | """ 3 | 4 | from typing import List 5 | 6 | import numpy as np 7 | 8 | from ..bounding_box import format_bounding_box, calculate_iou 9 | 10 | 11 | def overlay_arrays(array_shape: tuple, 12 | input_arrays: np.ndarray, 13 | input_labels: np.ndarray, 14 | num_input_arrays_to_overlay: int, 15 | max_array_value: int, 16 | max_iou: float = 0.2, 17 | ): 18 | """Generate an array by overlaying multiple smaller arrays onto a blank one. 19 | 20 | The smaller arrays are randomly selected from input_arrays which 21 | is an array of all the smaller arrays stacked along the axis 0. 22 | 23 | Parameters: 24 | array_shape: The shape of the output array. 25 | input_arrays: Each row corresponds to an input array that 26 | can be overlaid on the output array. 27 | input_labels: Same thing as input_arrays but contains the labels. 28 | num_input_arrays_to_overlay: The number of arrays to attempt to 29 | add to the output array. 30 | max_array_value: The maximum allowed value for this array. 31 | Any number larger than this will be clipped. 32 | Clipping is necessary because the overlaying is done by summing arrays. 33 | max_iou: The maximum allowed IOU between two overlaid arrays. 34 | 35 | Returns: 36 | output_array: The output array of size array_shape. 37 | arrays_used: an array of shape (num_arrays_overlaid, input_array_shape) 38 | labels_overlaid: an array of shape (num_images_overlaid, input_label_shape) 39 | bounding_boxes_overlaid: an array of shape (num_images_overlaid, 4) 40 | The bounding boxes are absolute pixel values in the format xmin, ymin, xmax, ymax 41 | """ 42 | 43 | output_array = np.zeros(array_shape) 44 | 45 | indices = np.random.randint( 46 | len(input_arrays), size=num_input_arrays_to_overlay) 47 | bounding_boxes = [] 48 | bounding_boxes_as_tuple = [] 49 | indices_overlaid = [] 50 | for i in indices: 51 | bounding_box = overlay_at_random( 52 | array1=output_array, array2=input_arrays[i], 53 | max_array_value=max_array_value, 54 | bounding_boxes=bounding_boxes, max_iou=max_iou) 55 | 56 | if bounding_box is None: 57 | break 58 | 59 | indices_overlaid.append(i) 60 | 61 | bounding_boxes_as_tuple.append( 62 | format_bounding_box(bounding_box, output_type='tuple')) 63 | bounding_boxes.append(bounding_box) 64 | 65 | arrays_overlaid = input_arrays[indices_overlaid] 66 | labels_overlaid = input_labels[indices_overlaid] 67 | bounding_boxes_overlaid = np.stack(bounding_boxes_as_tuple) 68 | 69 | return output_array, arrays_overlaid, labels_overlaid, bounding_boxes_overlaid 70 | 71 | 72 | def overlay_at_random(array1: np.ndarray, array2: np.ndarray, 73 | max_array_value: int, 74 | bounding_boxes: List[dict] = None, 75 | max_iou: float = 0.2) -> np.ndarray: 76 | """Overlay an array over another. 77 | 78 | Overlays array2 over array1 while attempting to avoid locations specified by 79 | a list of bounding_boxes. This function overlays inplace so array1 is not 80 | copied or returned. 81 | 82 | THe location of the array2 in array1 is determined at random. 83 | 84 | Parameters: 85 | array1: The base array (or canvas) on which to overlay array2. 86 | array2: The second array to overlay over array1. 87 | max_array_value: The maximum allowed value for this array. 88 | Any number larger than this will be clipped. 89 | Clipping is necessary because the overlaying is done by summing arrays. 90 | bounding_boxes: A list of bounding boxes in the format xyxy. 91 | The algorithm will not overlay with existing bounding boxes by more 92 | than an IOU of max_iou. 93 | max_iou: The maximum allowed IOU between the candidate location and the 94 | bounding_boxes. 95 | 96 | Returns: 97 | The bounding box of the added image if successfully overlaid. Otherwise None. 98 | """ 99 | if not bounding_boxes: 100 | bounding_boxes = [] 101 | 102 | height1, width1, *_ = array1.shape 103 | height2, width2, *_ = array2.shape 104 | 105 | max_x = width1 - width2 106 | max_y = height1 - height2 107 | 108 | is_valid = False 109 | # This number is arbitrary. There are better ways of doing this but this is fast enough. 110 | max_attempts = 1000 111 | attempt = 0 112 | while not is_valid: 113 | if attempt > max_attempts: 114 | return 115 | else: 116 | attempt += 1 117 | x = np.random.randint(max_x + 1) 118 | y = np.random.randint(max_y + 1) 119 | 120 | candidate_bounding_box = { 121 | 'xmin': x, 122 | 'ymin': y, 123 | 'xmax': x + width2, 124 | 'ymax': y + height2, 125 | } 126 | 127 | is_valid = True 128 | for bounding_box in bounding_boxes: 129 | if calculate_iou(bounding_box, candidate_bounding_box) > max_iou: 130 | is_valid = False 131 | break 132 | 133 | overlay_array(array1=array1, array2=array2, x=x, y=y, max_array_value=max_array_value) 134 | 135 | return candidate_bounding_box 136 | 137 | 138 | def overlay_array(array1: np.ndarray, array2: np.ndarray, x: int, y: int, max_array_value: int = None) -> np.ndarray: 139 | """Overlay an array on another at a given position. 140 | 141 | Parameters: 142 | array1: The base array (or canvas) on which to overlay array2. 143 | array2: The second array to overlay over array1. 144 | max_array_value: The maximum allowed value for this array. 145 | Any number larger than this will be clipped. 146 | Clipping is necessary because the overlaying is done by summing arrays. 147 | 148 | Returns: 149 | array1: array1 with array2 overlaid at the position x, y. 150 | 151 | """ 152 | 153 | height1, width1, *other1 = array1.shape 154 | height2, width2, *other2 = array2.shape 155 | 156 | if height2 > height1 or width2 > width1: 157 | raise ValueError('array2 must have a smaller shape than array1') 158 | 159 | if other1 != other2: 160 | raise ValueError('array1 and array2 must have same dimensions beyond dimension 2.') 161 | 162 | array1[y:y+height2, x:x+width2, ...] = array1[y:y + height2, x:x+width2, ...] + array2 163 | 164 | array1 = np.clip(array1, a_min=0, a_max=max_array_value, out=array1) 165 | 166 | return array1 167 | -------------------------------------------------------------------------------- /simple_deep_learning/bounding_box.py: -------------------------------------------------------------------------------- 1 | """This module contains utility functions related to bounding boxes. 2 | """ 3 | from typing import Union 4 | 5 | import numpy as np 6 | 7 | 8 | def format_bounding_box(bounding_box: Union[tuple, dict, np.ndarray, list], 9 | input_format: str = None, 10 | output_format: str = 'xyxy', 11 | output_type: str = 'dict') -> Union[dict, tuple]: 12 | """Format a bounding box object. 13 | 14 | This is a utility function for converting bounding boxes between different formats. 15 | There are two caracteristics for a bounding box: 16 | - format: Whether the bounding box is defined by its min and max values: xmin, ymin, xmax, ymax 17 | or by its minimum x and y and a width and height. 18 | - type: Whether or not the bounding box is an indexable (e.g tuple, array, list) or a dictionary. 19 | 20 | This function converts between all types. 21 | 22 | In the case of output_type == 'dict', the keys of the dictionary will be 23 | reordered and renamed to be either: 24 | - xmin. ymin, xmax, ymax for the format xyxy. 25 | - x, y, width, height for the format xywh. 26 | 27 | Parameters: 28 | bounding_box: The input bounding box, as a tuple, array, list or dictionary. 29 | input_format: The format of the input. Required if the input type is tuple. 30 | Otherwise the input format is inferred from the keys of the dictionary. 31 | output_format: Determines the output format of the bounding box. 32 | Must be 'xyxy' or 'xywh'. Defaults to 'xyxy' 33 | output_type: The output type of the bounding box. 34 | Must be 'dict' or 'tuple'. Defaults to 'dict'. 35 | 36 | Returns: 37 | return_value: A bounding boxes represented in the specified format and type. 38 | """ 39 | if output_format == 'xyxy': 40 | if isinstance(bounding_box, dict): 41 | if all(key in bounding_box for key in ['xmin', 'ymin', 'xmax', 'ymax']): 42 | return_value = { 43 | 'xmin': bounding_box['xmin'], 44 | 'ymin': bounding_box['ymin'], 45 | 'xmax': bounding_box['xmax'], 46 | 'ymax': bounding_box['ymax'] 47 | } 48 | elif all(key in bounding_box for key in ['xmin', 'ymin', 'width', 'height']): 49 | return_value = { 50 | 'xmin': bounding_box['xmin'], 51 | 'ymin': bounding_box['ymin'], 52 | 'xmax': bounding_box['xmin'] + bounding_box['width'], 53 | 'ymax': bounding_box['ymin'] + bounding_box['height'] 54 | } 55 | elif all(key in bounding_box for key in ['x', 'y', 'width', 'height']): 56 | return_value = { 57 | 'xmin': bounding_box['x'], 58 | 'ymin': bounding_box['y'], 59 | 'xmax': bounding_box['x'] + bounding_box['width'], 60 | 'ymax': bounding_box['y'] + bounding_box['height'] 61 | } 62 | else: 63 | raise ValueError( 64 | f'Incorrect format for bounding_box dictionary. Received: {bounding_box}') 65 | else: 66 | if input_format == 'xyxy': 67 | return_value = { 68 | 'xmin': bounding_box[0], 69 | 'ymin': bounding_box[1], 70 | 'xmax': bounding_box[2], 71 | 'ymax': bounding_box[3] 72 | } 73 | elif input_format == 'xywh': 74 | return_value = { 75 | 'xmin': bounding_box[0], 76 | 'ymin': bounding_box[1], 77 | 'xmax': bounding_box[0] + bounding_box[2], 78 | 'ymax': bounding_box[1] + bounding_box[3] 79 | } 80 | else: 81 | raise ValueError( 82 | 'If bounding_box is not a dictionary, input_format must be specified: "xyxy" or "xywh"') 83 | 84 | elif output_format == 'xywh': 85 | if isinstance(bounding_box, dict): 86 | if all(key in bounding_box for key in ['xmin', 'ymin', 'width', 'height']): 87 | return_value = { 88 | 'x': bounding_box['xmin'], 89 | 'y': bounding_box['ymin'], 90 | 'width': bounding_box['width'], 91 | 'height': bounding_box['height'] 92 | } 93 | elif all(key in bounding_box for key in ['xmin', 'ymin', 'xmax', 'ymax']): 94 | return_value = { 95 | 'x': bounding_box['xmin'], 96 | 'y': bounding_box['ymin'], 97 | 'width': bounding_box['xmax'] - bounding_box['xmin'], 98 | 'height': bounding_box['ymax'] - bounding_box['ymin'] 99 | } 100 | elif all(key in bounding_box for key in ['x', 'y', 'width', 'height']): 101 | return_value = { 102 | 'x': bounding_box['x'], 103 | 'y': bounding_box['y'], 104 | 'width': bounding_box['width'], 105 | 'height': bounding_box['height'] 106 | } 107 | else: 108 | raise ValueError( 109 | f'Incorrect format for bounding_box dictionary. Received: {bounding_box}') 110 | else: 111 | if input_format == 'xyxy': 112 | return_value = { 113 | 'x': bounding_box[0], 114 | 'y': bounding_box[1], 115 | 'width': bounding_box[2] - bounding_box[0], 116 | 'height': bounding_box[3] - bounding_box[1] 117 | } 118 | elif input_format == 'xywh': 119 | return_value = { 120 | 'x': bounding_box[0], 121 | 'y': bounding_box[1], 122 | 'width': bounding_box[2], 123 | 'height': bounding_box[3] 124 | } 125 | else: 126 | raise ValueError( 127 | 'If bounding_box is not a dictionary, input_format must be specified: "xyxy" or "xywh"') 128 | else: 129 | raise ValueError( 130 | f'output_format must be either "xyxy" or "xywh". Received {output_format}') 131 | 132 | if output_type == 'tuple': 133 | return tuple(return_value.values()) 134 | elif output_type == 'dict': 135 | return return_value 136 | else: 137 | raise ValueError( 138 | f'output_type must be either "dict" or "tuple". Received {output_type}') 139 | 140 | 141 | def calculate_iou(bounding_box1: dict, bounding_box2: dict) -> float: 142 | """Calculate the intersection over union of two bounding boxes. 143 | 144 | Both bounding boxes must be in xyxy format and of type dict. 145 | See format_bounding_box function for more details. 146 | 147 | Returns: 148 | IOU: number between 0 and 1. 149 | """ 150 | 151 | A1 = ((bounding_box1['xmax'] - bounding_box1['xmin']) 152 | * (bounding_box1['ymax'] - bounding_box1['ymin'])) 153 | A2 = ((bounding_box2['xmax'] - bounding_box2['xmin']) 154 | * (bounding_box2['ymax'] - bounding_box2['ymin'])) 155 | 156 | xmin = max(bounding_box1['xmin'], bounding_box2['xmin']) 157 | ymin = max(bounding_box1['ymin'], bounding_box2['ymin']) 158 | xmax = min(bounding_box1['xmax'], bounding_box2['xmax']) 159 | ymax = min(bounding_box1['ymax'], bounding_box2['ymax']) 160 | 161 | if ymin >= ymax or xmin >= xmax: 162 | return 0 163 | 164 | return ((xmax-xmin) * (ymax - ymin)) / (A1 + A2) 165 | -------------------------------------------------------------------------------- /simple_deep_learning/mnist_extended/object_detection.py: -------------------------------------------------------------------------------- 1 | """This module contains functions to create the extended MNIST dataset 2 | for object detection. 3 | """ 4 | 5 | from typing import Tuple, List, Union 6 | 7 | import numpy as np 8 | import PIL 9 | from PIL import ImageDraw, ImageFont 10 | 11 | from .array_overlay import overlay_arrays 12 | from .mnist import preprocess_mnist, download_mnist 13 | 14 | 15 | def create_object_detection_dataset(num_train_samples: int, num_test_samples: int, 16 | image_shape: Tuple[int, int] = (60, 60), 17 | min_num_digits_per_image: int = 2, 18 | max_num_digits_per_image: int = 4, 19 | num_classes: int = 10, 20 | max_iou: float = 0.2, 21 | proportion_of_mnist: float = 1.0, 22 | ) -> Tuple[np.ndarray, List[np.ndarray], List[np.ndarray], 23 | np.ndarray, List[np.ndarray], List[np.ndarray]]: 24 | """Create the extended mnist dataset for object detection. 25 | 26 | The bounding boxes are returned in a format that is not readily usable by 27 | a machine learning algorithm. This is because the actual target array used 28 | in backpropagation will vary depending on the characteristics of the model used (e.g 29 | number of anchors used, number of feature maps etc...). It is left up to the 30 | user of this function to process the provided bounding boxes into the array format required 31 | by the model. 32 | 33 | Parameters: 34 | num_train_samples: Number of training samples to generate. 35 | num_test_samples: Number of test samples to generate. 36 | image_shape: The (height, width) of the image. 37 | min_num_digits_per_image: The minimum number of digits that can be added 38 | to each output image. The number is randomly selected between min_num_digits_per_image and 39 | max_num_digits_per_image (included). 40 | max_num_digits_per_image: The maximum number of digits that can be added 41 | to each output image. The number is randomly selected between min_num_digits_per_image and 42 | max_num_digits_per_image (included). 43 | num_classes: Integer between 1 and 10. Only select images/labels between 0 and num_classes-1. 44 | max_iou: The maximum allowed IOU (intersection over union) between two overlaid images. 45 | A lower number means digits will overlap less. 46 | proportion_of_mnist: The proportion of total mnist images to use when generating this 47 | dataset. Smaller values will slightly speed up preprocessing (but not much). 48 | 49 | Returns: 50 | train_x, train_bounding_boxes, train_labels, test_x, test_bounding_boxes, test_labels. 51 | The input and the bounding boxes and labels for train and test. 52 | The bounding boxes are in absolute pixel values in the format xmin, ymin, xmax, ymax 53 | """ 54 | 55 | (train_images, train_labels), (test_images, test_labels) = download_mnist() 56 | 57 | train_images, train_labels = preprocess_mnist(images=train_images, labels=train_labels, proportion=proportion_of_mnist, 58 | num_classes=num_classes, normalise=True) 59 | 60 | test_images, test_labels = preprocess_mnist(images=test_images, labels=test_labels, proportion=proportion_of_mnist, 61 | num_classes=num_classes, normalise=True) 62 | 63 | train_x, train_bounding_boxes, train_labels = create_object_detection_data_from_digits( 64 | digits=train_images, digit_labels=train_labels, 65 | num_samples=num_train_samples, image_shape=image_shape, 66 | min_num_digits_per_image=min_num_digits_per_image, 67 | max_num_digits_per_image=max_num_digits_per_image, 68 | max_iou=max_iou) 69 | 70 | test_x, test_bounding_boxes, test_labels = create_object_detection_data_from_digits( 71 | digits=test_images, digit_labels=test_labels, 72 | num_samples=num_test_samples, image_shape=image_shape, 73 | min_num_digits_per_image=min_num_digits_per_image, 74 | max_num_digits_per_image=max_num_digits_per_image, 75 | max_iou=max_iou) 76 | 77 | return train_x, train_bounding_boxes, train_labels, test_x, test_bounding_boxes, test_labels 78 | 79 | 80 | def create_object_detection_data_from_digits(digits: np.ndarray, 81 | digit_labels: np.ndarray, 82 | num_samples: int, 83 | image_shape: tuple, 84 | min_num_digits_per_image: int, 85 | max_num_digits_per_image: int, 86 | max_iou: float, 87 | ) -> Tuple[np.ndarray, List[np.ndarray], List[np.ndarray]]: 88 | """Create the extended MNIST data for object detection from the provided MNIST digits and labels. 89 | 90 | This is used by create_mnist_extended_object_detection_dataset. 91 | This function is useful directly if one wants to perform additional preprocessing on 92 | the original mnist digits (e.g resize or warp digits etc.) 93 | 94 | Parameters: 95 | digits: The MNIST digits (num_images, height, width, 1) 96 | labels: The MNIST labels (num_images,) 97 | image_shape: The (height, width) of the image. 98 | min_num_digits_per_image: The minimum number of digits that can be added 99 | to each output image. The number is randomly selected between min_num_digits_per_image and 100 | max_num_digits_per_image (included). 101 | max_num_digits_per_image: The maximum number of digits that can be added 102 | to each output image. The number is randomly selected between min_num_digits_per_image and 103 | max_num_digits_per_image (included). 104 | num_classes: Integer between 1 and 10. Indicating the number of classes used in the dataset. 105 | max_iou: The maximum allowed IOU (intersection over union) between two overlaid images. 106 | A lower number means digits will overlap less. 107 | 108 | Returns: 109 | x, bounding_boxes, labels. 110 | The input, the bounding boxes and the labels. 111 | The bounding boxes are absolute pixel values in the format xmin, ymin, xmax, ymax 112 | """ 113 | 114 | x = [] 115 | labels = [] 116 | bounding_boxes = [] 117 | 118 | for _ in range(num_samples): 119 | num_digits = np.random.randint( 120 | min_num_digits_per_image, max_num_digits_per_image + 1) 121 | 122 | input_array, arrays_overlaid, labels_overlaid, bounding_boxes_overlaid = overlay_arrays( 123 | array_shape=image_shape + (1, ), 124 | input_arrays=digits, 125 | input_labels=digit_labels, 126 | max_array_value=1, 127 | num_input_arrays_to_overlay=num_digits, 128 | max_iou=max_iou) 129 | 130 | x.append(input_array) 131 | labels.append(labels_overlaid) 132 | bounding_boxes.append(bounding_boxes_overlaid) 133 | 134 | x = np.stack(x) 135 | 136 | return x, bounding_boxes, labels 137 | 138 | 139 | def draw_bounding_boxes(image: Union[PIL.Image.Image, np.ndarray], 140 | bounding_boxes: Union[list, np.ndarray], 141 | labels: Union[list, np.ndarray] = None, 142 | label_size: int=7, 143 | colour: str = 'white', width: int = 1, 144 | copy: bool = False) -> PIL.Image.Image: 145 | """Draw multiple bounding boxes with labels on an image. 146 | 147 | Essentially a loop over the draw_bounding_box function. 148 | 149 | Parameters: 150 | See draw_bounding_box for most parameters: 151 | bounding_boxes: Must indexable with each index returning a bounding box in 152 | format used by draw_bounding_box. 153 | labels: Same as bounding_boxes 154 | 155 | Returns: 156 | A PIL image with the bounding box drawn. 157 | """ 158 | 159 | num_bounding_boxes = len(bounding_boxes) 160 | 161 | if isinstance(image, np.ndarray): 162 | image = array_to_image(image) 163 | elif isinstance(image, PIL.Image.Image): 164 | if copy: 165 | image = image.copy() 166 | else: 167 | raise ValueError( 168 | f'Invalid type {type(image)} for image argument. Expecting np.ndarray or PIL.Image.Image') 169 | 170 | if labels is not None: 171 | num_labels = len(labels) 172 | if num_bounding_boxes != num_labels: 173 | raise ValueError('len(bounding_boxes) and len(labels) must be the same. ' 174 | f'len(bounding_boxes) = {num_bounding_boxes} != len(labels) = {num_labels}') 175 | 176 | for i in range(num_bounding_boxes): 177 | draw_bounding_box(image, bounding_box=bounding_boxes[i], label=labels[i], 178 | label_size=label_size, 179 | colour=colour, width=width, copy=False) 180 | 181 | return image 182 | 183 | 184 | def draw_bounding_box(image: Union[PIL.Image.Image, np.ndarray], bounding_box: Union[tuple, list, np.ndarray], 185 | label: str = None, label_size: int = 7, 186 | colour: str = 'white', width: int = 1, copy: bool = False) -> PIL.Image.Image: 187 | """Draw a bounding box with a label on an Image. 188 | 189 | Parameters: 190 | image: A PIL image or numpy array on which to draw the bounding box. 191 | bounding_box: A tuple/array/list of 4 values containing the 192 | xmin, ymin, xmax, ymax coordinates of the bounding box. 193 | label: A string (or integer) to display next to the bounding box. 194 | label_size: The size of the label when displayed. 195 | This is only used if the function manages to load the specified font. 196 | Otherwise PIL will use the default font for which the size cannot be set. 197 | colour: The colour of the label and bounding box. 198 | width: The width of the bounding box. 199 | copy: If True, copy the image and do not modify the original. 200 | Only used if image is a PIL Image. In the case of an array, 201 | the array is copied automatically. 202 | 203 | Returns: 204 | A PIL image with the bounding box drawn. 205 | """ 206 | 207 | if isinstance(image, np.ndarray): 208 | image = array_to_image(image) 209 | elif isinstance(image, PIL.Image.Image): 210 | if copy: 211 | image = image.copy() 212 | else: 213 | raise ValueError( 214 | f'Invalid type {type(image)} for image argument. Expecting np.ndarray or PIL.Image.Image') 215 | 216 | xmin, ymin, xmax, ymax = bounding_box 217 | 218 | image_draw = ImageDraw.Draw(image) 219 | image_draw.rectangle(xy=(xmin, ymin, xmax, ymax), 220 | outline=colour, width=width) 221 | 222 | if label is not None: 223 | label = str(label) 224 | 225 | try: 226 | font = ImageFont.truetype("arial.ttf", size=label_size) 227 | except OSError: 228 | font = ImageFont.load_default() 229 | 230 | label_width, label_height = font.getsize(label) 231 | image_height, image_width = image.size 232 | 233 | if ymax + label_height > image_height: 234 | label_x = xmin 235 | label_y = ymin - label_height 236 | else: 237 | label_x = xmin 238 | label_y = ymax 239 | 240 | image_draw.text((label_x, label_y), label, fill=colour, font=font) 241 | 242 | return image 243 | 244 | 245 | def array_to_image(array: np.ndarray) -> PIL.Image.Image: 246 | """Converts an array to a PIL image. 247 | 248 | Performs checks and applies type modifications to 249 | put into the correct format for PIL. 250 | 251 | Parameters: 252 | array: An array to convert. Can be have any of the following shape: 253 | (height, width), (height, width, 1), (height, width, 3) 254 | 255 | Returns: 256 | A PIL image of mode L or RGB. 257 | """ 258 | array_max = array.max() 259 | array_min = array.min() 260 | 261 | if array_max > 255 or array_min < 0: 262 | raise ValueError('This function cannot deal with values above 255 or ' 263 | f'below 0. array.max() = {array_max}, array_min = {array_min}') 264 | 265 | if array.dtype == float: 266 | if array.max() <= 1: 267 | array = (array * 255).astype(np.uint8) 268 | else: 269 | array = array.astype(np.uint8) 270 | 271 | if len(array.shape) == 3: 272 | if array.shape[2] == 1: 273 | array = array[..., 0] 274 | else: 275 | if array.shape[2] != 3: 276 | raise ValueError( 277 | 'array can have either 3 or 1 channel (i.e 3rd dimension)') 278 | else: 279 | if len(array.shape) != 2: 280 | raise ValueError( 281 | 'Array must be 3 (with channels) or 2 dimensional') 282 | 283 | return PIL.Image.fromarray(array) 284 | -------------------------------------------------------------------------------- /simple_deep_learning/mnist_extended/semantic_segmentation.py: -------------------------------------------------------------------------------- 1 | """This module contains functions to create the extended MNIST dataset 2 | for semantic segmentation. 3 | """ 4 | import matplotlib.pyplot as plt 5 | from typing import Tuple 6 | 7 | import numpy as np 8 | import matplotlib 9 | from matplotlib import pyplot as plt 10 | from matplotlib import patches as mpatches 11 | 12 | from .array_overlay import overlay_arrays 13 | from .mnist import preprocess_mnist, download_mnist 14 | 15 | plt.rcParams['figure.facecolor'] = 'white' 16 | 17 | 18 | def create_semantic_segmentation_dataset(num_train_samples: int, num_test_samples: int, 19 | image_shape: Tuple[int, int] = (60, 60), 20 | min_num_digits_per_image: int = 2, 21 | max_num_digits_per_image: int = 4, 22 | num_classes: int = 10, 23 | max_iou: float = 0.2, 24 | labels_are_exclusive: bool = False, 25 | target_is_whole_bounding_box: bool = False, 26 | proportion_of_mnist: float = 1.0, 27 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: 28 | """Create the extended mnist dataset for semantic segmentation. 29 | 30 | Parameters: 31 | num_train_samples: Number of training samples to generate. 32 | num_test_samples: Number of test samples to generate. 33 | image_shape: The (height, width) of the image. 34 | min_num_digits_per_image: The minimum number of digits that can be added 35 | to each output image. The number is randomly selected between min_num_digits_per_image and 36 | max_num_digits_per_image (included). 37 | max_num_digits_per_image: The maximum number of digits that can be added 38 | to each output image. The number is randomly selected between min_num_digits_per_image and 39 | max_num_digits_per_image (included). 40 | num_classes: Integer between 1 and 10. Only select images/labels between 0 and num_classes-1. 41 | max_iou: The maximum allowed IOU (intersection over union) between two overlaid images. 42 | A lower number means digits will overlap less. 43 | labels_are_exclusive: If True, each pixel can only belong to one class. If False, 44 | a pixel can be multiple digits at the same time. 45 | target_is_whole_bounding_box: If True, the target for each digit is the whole digit's image. 46 | If False, only the non null pixels of the digit are the target values. 47 | proportion_of_mnist: The proportion of total mnist images to use when generating this 48 | dataset. Smaller values will slightly speed up preprocessing (but not much). 49 | 50 | Returns: 51 | train_x, train_y, test_x, test_y. The input and target values of train and test. 52 | """ 53 | 54 | (train_images, train_labels), (test_images, test_labels) = download_mnist() 55 | 56 | train_images, train_labels = preprocess_mnist( 57 | images=train_images, labels=train_labels, proportion=proportion_of_mnist, num_classes=num_classes, 58 | normalise=True) 59 | 60 | test_images, test_labels = preprocess_mnist(images=test_images, labels=test_labels, proportion=proportion_of_mnist, 61 | num_classes=num_classes, normalise=True) 62 | 63 | train_x, train_y = create_semantic_segmentation_data_from_digits( 64 | digits=train_images, digit_labels=train_labels, 65 | num_samples=num_train_samples, 66 | image_shape=image_shape, 67 | min_num_digits_per_image=min_num_digits_per_image, 68 | max_num_digits_per_image=max_num_digits_per_image, 69 | num_classes=num_classes, max_iou=max_iou, 70 | labels_are_exclusive=labels_are_exclusive, 71 | target_is_whole_bounding_box=target_is_whole_bounding_box) 72 | 73 | test_x, test_y = create_semantic_segmentation_data_from_digits( 74 | digits=test_images, digit_labels=test_labels, 75 | num_samples=num_test_samples, image_shape=image_shape, 76 | min_num_digits_per_image=min_num_digits_per_image, 77 | max_num_digits_per_image=max_num_digits_per_image, 78 | num_classes=num_classes, max_iou=max_iou, 79 | labels_are_exclusive=labels_are_exclusive, 80 | target_is_whole_bounding_box=target_is_whole_bounding_box) 81 | 82 | return train_x, train_y, test_x, test_y 83 | 84 | 85 | def create_semantic_segmentation_data_from_digits(digits: np.ndarray, 86 | digit_labels: np.ndarray, 87 | num_samples: int, 88 | image_shape: tuple, 89 | min_num_digits_per_image: int, 90 | max_num_digits_per_image: int, 91 | num_classes: int, 92 | max_iou: float, 93 | labels_are_exclusive: bool = False, 94 | target_is_whole_bounding_box: bool = False 95 | ) -> Tuple[np.ndarray, np.ndarray]: 96 | """Create the extended MNIST data (either train or test) for semantic segmentation 97 | from the provided MNIST digits and labels. 98 | 99 | This is used by create_mnist_extended_semantic_segementation_dataset. 100 | This function is useful directly if one wants to perform additional preprocessing on 101 | the original mnist digits (e.g resize or warp digits etc.) 102 | 103 | Parameters: 104 | digits: The MNIST digits (num_images, height, width, 1) 105 | digit_labels: The MNIST labels (num_images,) 106 | image_shape: The (height, width) of the image. 107 | min_num_digits_per_image: The minimum number of digits that can be added 108 | to each output image. The number is randomly selected between min_num_digits_per_image and 109 | max_num_digits_per_image (included). 110 | max_num_digits_per_image: The maximum number of digits that can be added 111 | to each output image. The number is randomly selected between min_num_digits_per_image and 112 | max_num_digits_per_image (included). 113 | num_classes: Integer between 1 and 10. Indicating the number of classes used in the dataset. 114 | max_iou: The maximum allowed IOU (intersection over union) between two overlaid images. 115 | A lower number means digits will overlap less. 116 | labels_are_exclusive: If True, each pixel can only belong to one class. If False, 117 | a pixel can be multiple digits at the same time. 118 | target_is_whole_bounding_box: If True, the target for each digit is the whole digit's image. 119 | If False, only the non null pixels of the digit are the target values. 120 | 121 | Returns: 122 | train_x, train_y, test_x, test_y. The input and target values of train and test. 123 | """ 124 | 125 | input_data = [] 126 | target_data = [] 127 | 128 | for _ in range(num_samples): 129 | num_digits = np.random.randint( 130 | min_num_digits_per_image, max_num_digits_per_image + 1) 131 | 132 | input_array, arrays_overlaid, labels_overlaid, bounding_boxes_overlaid = overlay_arrays( 133 | array_shape=image_shape + (1, ), 134 | input_arrays=digits, 135 | input_labels=digit_labels, 136 | max_array_value=1, 137 | num_input_arrays_to_overlay=num_digits, 138 | max_iou=max_iou) 139 | 140 | target_array = create_segmentation_target(images=arrays_overlaid, 141 | labels=labels_overlaid, 142 | bounding_boxes=bounding_boxes_overlaid, 143 | image_shape=image_shape, 144 | num_classes=num_classes, 145 | labels_are_exclusive=labels_are_exclusive, 146 | target_is_whole_bounding_box=target_is_whole_bounding_box) 147 | 148 | input_data.append(input_array) 149 | target_data.append(target_array) 150 | 151 | input_data = np.stack(input_data) 152 | target_data = np.stack(target_data) 153 | 154 | return input_data, target_data 155 | 156 | 157 | def create_segmentation_target(images: np.ndarray, 158 | labels: np.ndarray, 159 | bounding_boxes: np.ndarray, 160 | image_shape: tuple, 161 | num_classes: int, 162 | labels_are_exclusive: bool = False, 163 | target_is_whole_bounding_box: bool = False 164 | ) -> np.ndarray: 165 | """Creates the target (aka y value) based on the base images that were overlaid. 166 | 167 | Parameters: 168 | images: MNIST digits that were overlaid. 169 | labels: Labels of the digits that were overlaid. 170 | bounding_boxes: Bounding boxes (wrt output image) of the digits. 171 | num_classes: Integer between 1 and 10. Indicating the number of classes used in the dataset. 172 | max_iou: The maximum allowed IOU (intersection over union) between two overlaid images. 173 | A lower number means digits will overlap less. 174 | labels_are_exclusive: If True, each pixel can only belong to one class. If False, 175 | a pixel can be multiple digits at the same time. 176 | target_is_whole_bounding_box: If True, the target for each digit is the whole digit's image. 177 | If False, only the non null pixels of the digit are the target values. 178 | 179 | Returns: 180 | target for a particular input. An ndarray of shape (image_shape, num_classes) 181 | 182 | """ 183 | if len(bounding_boxes) != len(labels) != len(images): 184 | raise ValueError( 185 | f'The length of bounding_boxes must be the same as the length of labels. Received shapes: {bounding_boxes.shape}!={labels.shape}') 186 | 187 | target = np.zeros(image_shape + (num_classes,)) 188 | 189 | if labels_are_exclusive: 190 | exclusivity_mask = np.zeros(image_shape) 191 | 192 | for i in range(len(bounding_boxes)): 193 | label = labels[i] 194 | xmin, ymin, xmax, ymax = bounding_boxes[i] 195 | 196 | if target_is_whole_bounding_box: 197 | target[ymin:ymax, xmin:xmax, [label]] = 1 198 | else: 199 | max_array_value = max(target[ymin:ymax, xmin:xmax, [label]].max(), images.max()) 200 | target[ymin:ymax, xmin:xmax, [label]] = images[i] + target[ymin:ymax, xmin:xmax, [label]] 201 | 202 | array1 = np.clip(target, a_min=0, a_max=max_array_value, out=target) 203 | 204 | if labels_are_exclusive: 205 | target[..., label] = np.where( 206 | exclusivity_mask, 0, target[..., label]) 207 | exclusivity_mask = np.logical_or( 208 | exclusivity_mask, target[..., label]) 209 | 210 | return target 211 | 212 | 213 | def display_grayscale_array(array: np.ndarray, title: str = '', ax: matplotlib.axes.Axes = None) -> None: 214 | """Display the grayscale input image. 215 | 216 | Parameters: 217 | image: This can be either an input digit from MNIST of a input image 218 | from the extended dataset. 219 | title: If provided, this will be added as title of the plot. 220 | """ 221 | ax = ax or plt.gca() 222 | 223 | if len(array.shape) == 3: 224 | array = array[..., 0] 225 | 226 | ax.imshow(array, cmap=plt.cm.binary) 227 | ax.axes.set_yticks([]) 228 | ax.axes.set_xticks([]) 229 | 230 | if title: 231 | ax.set_title(title) 232 | 233 | if not ax: 234 | plt.show() 235 | 236 | 237 | def display_segmented_image(y: np.ndarray, threshold: float = 0.5, 238 | input_image: np.ndarray = None, 239 | alpha_input_image: float = 0.2, 240 | title: str = '', 241 | ax: matplotlib.axes.Axes = None) -> None: 242 | """Display segemented image. 243 | 244 | This function displays the image where each class is shown in particular color. 245 | This is useful for getting a rapid view of the performance of the model 246 | on a few examples. 247 | 248 | Parameters: 249 | y: The array containing the prediction. 250 | Must be of shape (image_shape, num_classes) 251 | threshold: The threshold used on the predictions. 252 | input_image: If provided, display the input image in black. 253 | alpha_input_image: If an input_image is provided, the transparency of 254 | the input_image. 255 | """ 256 | ax = ax or plt.gca() 257 | 258 | base_array = np.ones( 259 | (y.shape[0], y.shape[1], 3)) * 1 260 | legend_handles = [] 261 | 262 | for i in range(y.shape[-1]): 263 | # Retrieve a color (without the transparency value). 264 | colour = plt.cm.jet(i / y.shape[-1])[:-1] 265 | base_array[y[..., i] > threshold] = colour 266 | legend_handles.append(mpatches.Patch(color=colour, label=str(i))) 267 | 268 | # plt.figure(figsize=figsize) 269 | ax.imshow(base_array) 270 | ax.legend(handles=legend_handles, bbox_to_anchor=(1, 1), loc='upper left') 271 | ax.set_yticks([]) 272 | ax.set_xticks([]) 273 | ax.set_title(title) 274 | 275 | if input_image is not None: 276 | ax.imshow(input_image[..., 0], 277 | cmap=plt.cm.binary, alpha=alpha_input_image) 278 | 279 | if not ax: 280 | plt.show() 281 | 282 | 283 | def plot_class_masks(y_true: np.ndarray, y_predicted: np.ndarray = None, title='') -> None: 284 | """Plot a particular view of the true vs predicted segmentation. 285 | 286 | This function separates each class into its own image and 287 | does not perform any thresholding. 288 | 289 | Parameters: 290 | y_true: True segmentation (image_shape, num_classes). 291 | y_predicted: Predicted segmentation (image_shape, num_classes). 292 | If y_predicted is not provided, only the true values are displayed. 293 | """ 294 | num_rows = 2 if y_predicted is not None else 1 295 | 296 | num_classes = y_true.shape[-1] 297 | fig, axes = plt.subplots(num_rows, num_classes, figsize=(num_classes * 4, num_rows * 4)) 298 | axes = axes.reshape(-1, num_classes) 299 | fig.suptitle(title) 300 | plt.tight_layout() 301 | 302 | for label in range(num_classes): 303 | axes[0, label].imshow(y_true[..., label], cmap=plt.cm.binary) 304 | axes[0, label].axes.set_yticks([]) 305 | axes[0, label].axes.set_xticks([]) 306 | 307 | if label == 0: 308 | axes[0, label].set_ylabel(f'Target') 309 | 310 | if y_predicted is not None: 311 | if label == 0: 312 | axes[1, label].set_ylabel(f'Predicted') 313 | 314 | axes[1, label].imshow(y_predicted[..., label], cmap=plt.cm.binary) 315 | axes[1, label].set_xlabel(f'Label: {label}') 316 | axes[1, label].axes.set_yticks([]) 317 | axes[1, label].axes.set_xticks([]) 318 | else: 319 | axes[0, label].set_xlabel(f'Label: {label}') 320 | 321 | plt.show() 322 | -------------------------------------------------------------------------------- /unet_segmentation/unet_model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Semantic Segmentation with U-Net\n", 8 | "- Dataset: MNIST Extended\n", 9 | "- Reference: https://github.com/LukeTonin/simple-deep-learning " 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "### Imports" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import os\n", 26 | "import numpy as np\n", 27 | "import random\n", 28 | "import matplotlib.pyplot as plt\n", 29 | "from torchinfo import summary\n", 30 | "import cv2\n", 31 | "from typing import List, Tuple\n", 32 | "from PIL import Image\n", 33 | "import imutils\n", 34 | "from pathlib import Path\n", 35 | "import pprint\n", 36 | "\n", 37 | "import torch\n", 38 | "import torch.nn as nn\n", 39 | "import torchvision\n", 40 | "import torchmetrics\n", 41 | "from torch.utils.data import Dataset, DataLoader\n", 42 | "from torchvision import transforms, datasets, models\n", 43 | "\n", 44 | "import config\n", 45 | "import dataset_generation\n", 46 | "import utils\n", 47 | "\n", 48 | "import importlib\n", 49 | "importlib.reload(dataset_generation)" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "# device\n", 59 | "DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", 60 | "print(f\"device: {DEVICE}\")" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### Load hyperparameters" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "NUM_WORKERS = config.NUM_WORKERS\n", 77 | "BATCH_SIZE = config.BATCH_SIZE\n", 78 | "PIN_MEMORY = config.PIN_MEMORY\n", 79 | "\n", 80 | "TOTAL_SAMPLES = config.TOTAL_SAMPLES\n", 81 | "num_classes = config.NUM_CLASSES\n", 82 | "\n", 83 | "EPOCHS = config.EPOCHS\n", 84 | "LR_RATE = config.LR_RATE\n", 85 | "\n", 86 | "TRAIN_VAL_SPLIT = config.TRAIN_VAL_SPLIT" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "### UNet architecture\n", 94 | "\n", 95 | "

\n", 96 | " \n", 97 | "

\n", 98 | "\n", 99 | "Reference: https://lmb.informatik.uni-freiburg.de/people/ronneber/u-net" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "class Block(nn.Module):\n", 109 | " def __init__(self, in_channels, out_channels):\n", 110 | " super().__init__()\n", 111 | " self.conv1 = nn.Conv2d(in_channels, out_channels, 3, padding=1)\n", 112 | " self.bn = nn.BatchNorm2d(out_channels) \n", 113 | " self.relu = nn.ReLU()\n", 114 | " self.conv2 = nn.Conv2d(out_channels, out_channels, 3, padding=1)\n", 115 | "\n", 116 | " def forward(self, x):\n", 117 | " x = self.conv1(x)\n", 118 | " x = self.bn(x)\n", 119 | " x = self.relu(x)\n", 120 | " x = self.conv2(x)\n", 121 | " x = self.bn(x)\n", 122 | " x = self.relu(x)\n", 123 | " return x" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "# Check the block\n", 133 | "encoder_block = Block(1, 64)\n", 134 | "x = torch.rand(1, 1, 60, 60)\n", 135 | "print(f'Shape of the encoder block: {encoder_block(x).shape}')" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "metadata": {}, 142 | "outputs": [], 143 | "source": [ 144 | "class Encoder(nn.Module):\n", 145 | " def __init__(self, channels=(1, 64, 128, 256)): # 512\n", 146 | " super().__init__()\n", 147 | " self.encoder_blocks = nn.ModuleList(\n", 148 | " [Block(channels[i], channels[i + 1]) for i in range(len(channels) - 1)]\n", 149 | " )\n", 150 | " self.pool = nn.MaxPool2d(kernel_size=2)\n", 151 | "\n", 152 | " def forward(self, x):\n", 153 | " block_outputs = []\n", 154 | " for block in self.encoder_blocks:\n", 155 | " x = block(x)\n", 156 | " block_outputs.append(x)\n", 157 | " x = self.pool(x)\n", 158 | " return block_outputs" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "# Check the encoder\n", 168 | "encoder = Encoder()\n", 169 | "x = torch.rand(1, 1, 60, 60)\n", 170 | "encoder_outputs = encoder(x)\n", 171 | "\n", 172 | "for op in encoder_outputs:\n", 173 | " print(f'Shape of the encoder output: {op.shape}')" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "class Decoder(nn.Module):\n", 183 | " def __init__(self, channels=(256, 128, 64)): #512\n", 184 | " super().__init__()\n", 185 | " self.channels = channels\n", 186 | " self.decoder_blocks = nn.ModuleList(\n", 187 | " [Block(channels[i], channels[i + 1]) for i in range(len(channels) - 1)]\n", 188 | " )\n", 189 | " self.upconvolution = nn.ModuleList(\n", 190 | " [nn.ConvTranspose2d(channels[i], channels[i + 1], kernel_size=2, stride=2) for i in range(len(channels) - 1)]\n", 191 | " )\n", 192 | "\n", 193 | " def forward(self, x, encoder_outputs):\n", 194 | " for i in range(len(self.channels) - 1):\n", 195 | " x = self.upconvolution[i](x)\n", 196 | " encoder_output = self.crop(encoder_outputs[i], x)\n", 197 | " x = torch.cat([x, encoder_output], dim=1)\n", 198 | " x = self.decoder_blocks[i](x)\n", 199 | " return x\n", 200 | "\n", 201 | " # Following the paper, we crop the encoder output to match the shape of decoder output \n", 202 | " def crop(self, encoder_output, tensor):\n", 203 | " _, _, H, W = tensor.shape\n", 204 | " encoder_output = torchvision.transforms.CenterCrop([H, W])(encoder_output)\n", 205 | " return encoder_output" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [ 214 | "# Check the decoder\n", 215 | "decoder = Decoder()\n", 216 | "x = torch.rand(1, 256, 7, 7)\n", 217 | "decoder(x, encoder_outputs[::-1][1:]) # Pass the encoder outputs in reverse order\n", 218 | "print(f'Shape of the decoder output: {decoder(x, encoder_outputs[::-1][1:]).shape}')" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "class UNet(nn.Module):\n", 228 | " def __init__(self, encoder_channels=(1, 64, 128, 256), decoder_channels=(256, 128, 64), num_classes=5, retain_dim=False, output_size=(60, 60)):\n", 229 | " super().__init__()\n", 230 | " self.encoder = Encoder(encoder_channels)\n", 231 | " self.decoder = Decoder(decoder_channels)\n", 232 | " self.head = nn.Conv2d(decoder_channels[-1], num_classes, kernel_size=1)\n", 233 | " self.retain_dim = retain_dim\n", 234 | " self.output_size = output_size\n", 235 | " self.sigmoid = nn.Sigmoid()\n", 236 | "\n", 237 | " def forward(self, x):\n", 238 | " encoder_outputs = self.encoder(x)\n", 239 | " out = self.decoder(encoder_outputs[-1], encoder_outputs[::-1][1:])\n", 240 | " out = self.head(out)\n", 241 | " if self.retain_dim:\n", 242 | " out = nn.functional.interpolate(out, self.output_size)\n", 243 | " # Apply sigmoid activation\n", 244 | " out = self.sigmoid(out)\n", 245 | " return out" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": null, 251 | "metadata": {}, 252 | "outputs": [], 253 | "source": [ 254 | "# Check the model\n", 255 | "model = UNet(retain_dim=True)\n", 256 | "x = torch.rand(1, 1, 60, 60)\n", 257 | "out = model(x)\n", 258 | "print(f'Shape of the model output: {out.shape}')" 259 | ] 260 | }, 261 | { 262 | "cell_type": "markdown", 263 | "metadata": {}, 264 | "source": [ 265 | "### Create data loaders" 266 | ] 267 | }, 268 | { 269 | "cell_type": "code", 270 | "execution_count": null, 271 | "metadata": {}, 272 | "outputs": [], 273 | "source": [ 274 | "class MNISTExtendedDataset(Dataset):\n", 275 | " def __init__(self, count, transform=None):\n", 276 | " \n", 277 | " self.input_images, self.target_masks, _, _ = dataset_generation.mnist_extended_dataset(total_train_samples=count, total_test_samples=1, num_classes=num_classes)\n", 278 | "\n", 279 | " # permute target mask \n", 280 | " self.target_masks = np.transpose(self.target_masks, (0, 3, 1, 2))\n", 281 | "\n", 282 | " # Convert to datatype float32\n", 283 | " self.input_images = self.input_images.astype(np.float32)\n", 284 | " self.target_masks = self.target_masks.astype(np.float32)\n", 285 | "\n", 286 | " self.transform = transform\n", 287 | "\n", 288 | " def __len__(self):\n", 289 | " return len(self.input_images)\n", 290 | "\n", 291 | " def __getitem__(self, idx):\n", 292 | " image = self.input_images[idx]\n", 293 | " mask = self.target_masks[idx]\n", 294 | " if self.transform:\n", 295 | " image = self.transform(image)\n", 296 | "\n", 297 | " return [image, mask]" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": null, 303 | "metadata": {}, 304 | "outputs": [], 305 | "source": [ 306 | "from torch.utils.data import random_split\n", 307 | "\n", 308 | "trans = transforms.Compose([\n", 309 | " transforms.ToTensor()\n", 310 | "])\n", 311 | "\n", 312 | "train_size = int(TRAIN_VAL_SPLIT * TOTAL_SAMPLES)\n", 313 | "val_size = int((1 - TRAIN_VAL_SPLIT) * TOTAL_SAMPLES)\n", 314 | "\n", 315 | "train_data = MNISTExtendedDataset(count=train_size, transform=trans)\n", 316 | "val_data = MNISTExtendedDataset(count=val_size, transform=trans)\n", 317 | "test_data = MNISTExtendedDataset(count=val_size, transform=trans)\n", 318 | "\n", 319 | "print(f'Shape of train_data: {len(train_data)}')\n", 320 | "print(f'Shape of val_data: {len(val_data)}')\n", 321 | "print(f'Shape of test_data: {len(test_data)}')" 322 | ] 323 | }, 324 | { 325 | "cell_type": "code", 326 | "execution_count": null, 327 | "metadata": {}, 328 | "outputs": [], 329 | "source": [ 330 | "from torch.utils.data import DataLoader\n", 331 | "\n", 332 | "train_loader = DataLoader(dataset=train_data, \n", 333 | " batch_size=BATCH_SIZE, \n", 334 | " shuffle=True, \n", 335 | " num_workers=NUM_WORKERS, \n", 336 | " pin_memory=PIN_MEMORY)\n", 337 | "\n", 338 | "val_loader = DataLoader(dataset=val_data,\n", 339 | " batch_size=BATCH_SIZE,\n", 340 | " shuffle=False,\n", 341 | " num_workers=NUM_WORKERS,\n", 342 | " pin_memory=PIN_MEMORY)\n", 343 | "\n", 344 | "test_loader = DataLoader(dataset=test_data, \n", 345 | " batch_size=1, \n", 346 | " shuffle=False, \n", 347 | " num_workers=NUM_WORKERS, \n", 348 | " pin_memory=PIN_MEMORY)\n", 349 | "\n", 350 | "print(f'Number of train batches: {len(train_loader)}')\n", 351 | "print(f'Number of val batches: {len(val_loader)}')\n", 352 | "print(f'Number of test batches: {len(test_loader)}')" 353 | ] 354 | }, 355 | { 356 | "cell_type": "code", 357 | "execution_count": null, 358 | "metadata": {}, 359 | "outputs": [], 360 | "source": [ 361 | "# Get a train input and output pair\n", 362 | "i = np.random.randint(0, len(train_loader.dataset))\n", 363 | "train_x_sample, train_y_sample = next(iter(train_loader))\n", 364 | "print(f'Shape of train_x_sample: {train_x_sample.shape}')\n", 365 | "print(f'Shape of train_y_sample: {train_y_sample.shape}')\n", 366 | "\n", 367 | "# get min and max of train y\n", 368 | "train_y_min, train_y_max = train_y_sample.min(), train_y_sample.max()\n", 369 | "print(f'Min and max of train_y_sample: {train_y_min}, {train_y_max}')" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "metadata": {}, 376 | "outputs": [], 377 | "source": [ 378 | "train_x_sample.dtype, train_y_sample.dtype" 379 | ] 380 | }, 381 | { 382 | "cell_type": "markdown", 383 | "metadata": {}, 384 | "source": [ 385 | "### Visualize the dataset" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": null, 391 | "metadata": {}, 392 | "outputs": [], 393 | "source": [ 394 | "from matplotlib.colors import ListedColormap\n", 395 | "\n", 396 | "random_indices = np.random.choice(len(train_loader.dataset), size=6, replace=False)\n", 397 | "\n", 398 | "fig, axs = plt.subplots(3, 2, figsize=(5, 5)) # Adjust the figure size as needed\n", 399 | "\n", 400 | "for i, ax in enumerate(axs.flat):\n", 401 | " idx = random_indices[i // 2]\n", 402 | " data = train_loader.dataset[idx]\n", 403 | "\n", 404 | " # Just to put in order in the plot\n", 405 | " if i % 2 == 0:\n", 406 | " image = data[0]\n", 407 | " image = image.permute(1, 2, 0)\n", 408 | " ax.imshow(image, cmap=plt.cm.binary) \n", 409 | " ax.set_title(\"Original Image\")\n", 410 | " else:\n", 411 | " segmentation = data[1] \n", 412 | " segmentation = np.transpose(segmentation, (1, 2, 0))\n", 413 | " seg_img = dataset_generation.display_segmented_image(segmentation)\n", 414 | " ax.imshow(seg_img, cmap=plt.cm.binary)\n", 415 | " ax.set_title(\"Segmented Image\")\n", 416 | "\n", 417 | " ax.set_xticks([])\n", 418 | " ax.set_yticks([])\n", 419 | " ax.set_aspect('equal')\n", 420 | "\n", 421 | "fig.suptitle('Input and Segmented Images', fontsize=16)\n", 422 | "plt.tight_layout()\n", 423 | "plt.show()" 424 | ] 425 | }, 426 | { 427 | "cell_type": "markdown", 428 | "metadata": {}, 429 | "source": [ 430 | "### Load the model" 431 | ] 432 | }, 433 | { 434 | "cell_type": "code", 435 | "execution_count": null, 436 | "metadata": {}, 437 | "outputs": [], 438 | "source": [ 439 | "unet_model = UNet(retain_dim=True, num_classes=num_classes, output_size=(60, 60)).to(DEVICE)" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": null, 445 | "metadata": {}, 446 | "outputs": [], 447 | "source": [ 448 | "# Model summary\n", 449 | "summary(model=unet_model,\n", 450 | " input_size=(1, 1, 60, 60),\n", 451 | " col_names=[\"input_size\", \"output_size\", \"num_params\", \"trainable\"],\n", 452 | " col_width=20,\n", 453 | " row_settings=[\"var_names\"],\n", 454 | " depth=5\n", 455 | " )" 456 | ] 457 | }, 458 | { 459 | "cell_type": "markdown", 460 | "metadata": {}, 461 | "source": [ 462 | "### Loss function and optimizer" 463 | ] 464 | }, 465 | { 466 | "cell_type": "code", 467 | "execution_count": null, 468 | "metadata": {}, 469 | "outputs": [], 470 | "source": [ 471 | "# loss_fn = nn.CrossEntropyLoss()\n", 472 | "loss_fn = nn.BCEWithLogitsLoss()\n", 473 | "optimizer = torch.optim.Adam(unet_model.parameters(), lr=LR_RATE)" 474 | ] 475 | }, 476 | { 477 | "cell_type": "markdown", 478 | "metadata": {}, 479 | "source": [ 480 | "### Training loop" 481 | ] 482 | }, 483 | { 484 | "cell_type": "code", 485 | "execution_count": null, 486 | "metadata": {}, 487 | "outputs": [], 488 | "source": [ 489 | "utils.set_seed()\n", 490 | "\n", 491 | "from timeit import default_timer as timer\n", 492 | "start_time = timer()\n", 493 | "\n", 494 | "# Store train and validation losses\n", 495 | "train_losses = []\n", 496 | "val_losses = []\n", 497 | "\n", 498 | "for epoch in range(EPOCHS):\n", 499 | "\n", 500 | " unet_model.train()\n", 501 | "\n", 502 | " train_loss = 0.0\n", 503 | " val_loss = 0.0\n", 504 | "\n", 505 | " for x, y in train_loader:\n", 506 | " x = x.to(DEVICE)\n", 507 | " y = y.to(DEVICE)\n", 508 | "\n", 509 | " pred_logits = unet_model(x)\n", 510 | " loss = loss_fn(pred_logits, y)\n", 511 | "\n", 512 | " optimizer.zero_grad()\n", 513 | " loss.backward()\n", 514 | " optimizer.step()\n", 515 | "\n", 516 | " train_loss += loss.item()\n", 517 | "\n", 518 | " train_loss /= len(train_loader)\n", 519 | " train_losses.append(train_loss)\n", 520 | "\n", 521 | " unet_model.eval()\n", 522 | " with torch.no_grad():\n", 523 | " for x, y in val_loader:\n", 524 | " x = x.to(DEVICE)\n", 525 | " y = y.to(DEVICE)\n", 526 | "\n", 527 | " pred_logits = unet_model(x)\n", 528 | " loss = loss_fn(pred_logits, y)\n", 529 | "\n", 530 | " val_loss += loss.item()\n", 531 | "\n", 532 | " val_loss /= len(val_loader)\n", 533 | " val_losses.append(val_loss)\n", 534 | "\n", 535 | " print(f'Epoch: {epoch+1}/{EPOCHS} | Train Loss: {train_loss:.4f} | Val Loss: {val_loss:.4f}')\n", 536 | "\n", 537 | "end_time = timer()\n", 538 | "total_time = end_time - start_time\n", 539 | "print(f\"Total training time: {total_time:.2f} seconds\")" 540 | ] 541 | }, 542 | { 543 | "cell_type": "markdown", 544 | "metadata": {}, 545 | "source": [ 546 | "### Plot losses" 547 | ] 548 | }, 549 | { 550 | "cell_type": "code", 551 | "execution_count": null, 552 | "metadata": {}, 553 | "outputs": [], 554 | "source": [ 555 | "# Plot train and validation loss over epochs\n", 556 | "plt.plot(train_losses, label='train')\n", 557 | "plt.plot(val_losses, label='val')\n", 558 | "plt.legend()\n", 559 | "plt.title('Loss over epochs')\n", 560 | "plt.xlabel('Epoch')\n", 561 | "plt.ylabel('Loss')\n", 562 | "plt.grid()\n", 563 | "plt.tight_layout()\n", 564 | "plt.show()" 565 | ] 566 | }, 567 | { 568 | "cell_type": "markdown", 569 | "metadata": {}, 570 | "source": [ 571 | "### Calculate mean IoU" 572 | ] 573 | }, 574 | { 575 | "cell_type": "code", 576 | "execution_count": null, 577 | "metadata": {}, 578 | "outputs": [], 579 | "source": [ 580 | "def calculate_iou(preds, labels):\n", 581 | " # Ensure the input tensors are binary (0 or 1)\n", 582 | " # preds and labels should be of shape [batch_size, n_classes, height, width]\n", 583 | " smooth = 1e-6 # Small epsilon to avoid division by zero\n", 584 | "\n", 585 | " # True Positives (TP)\n", 586 | " intersection = torch.logical_and(preds, labels).float().sum((2, 3))\n", 587 | "\n", 588 | " # False Positives (FP)\n", 589 | " false_positive = torch.logical_and(preds, torch.logical_not(labels)).float().sum((2, 3))\n", 590 | "\n", 591 | " # False Negatives (FN)\n", 592 | " false_negative = torch.logical_and(torch.logical_not(preds), labels).float().sum((2, 3))\n", 593 | "\n", 594 | " # Union is calculated as TP + FP + FN\n", 595 | " union = intersection + false_positive + false_negative\n", 596 | "\n", 597 | " # IoU for each class\n", 598 | " IoU = (intersection + smooth) / (union + smooth)\n", 599 | "\n", 600 | " return IoU\n", 601 | "\n", 602 | "def mean_iou(preds, labels):\n", 603 | " # Calculate IoU for each class\n", 604 | " ious = calculate_iou(preds, labels)\n", 605 | " # Mean IoU across all classes\n", 606 | " return ious.mean()\n" 607 | ] 608 | }, 609 | { 610 | "cell_type": "code", 611 | "execution_count": null, 612 | "metadata": {}, 613 | "outputs": [], 614 | "source": [ 615 | "\n", 616 | "def visualize_predictions(orig_img, gt_img, pred_img):\n", 617 | " plt.clf()\n", 618 | " plt.figure(figsize=(18, 6)) \n", 619 | "\n", 620 | " # Plot the original image\n", 621 | " ax1 = plt.subplot(1, 3, 1) # 1 row, 3 columns, 1st subplot\n", 622 | " image = orig_img.permute(1, 2, 0).numpy() \n", 623 | " ax1.imshow(image, cmap=plt.cm.binary)\n", 624 | " ax1.set_title(\"Original Image\")\n", 625 | " ax1.axis('off') \n", 626 | "\n", 627 | " # Plot the ground truth image\n", 628 | " ax2 = plt.subplot(1, 3, 2) # 1 row, 3 columns, 2nd subplot\n", 629 | " gt_segmentation = gt_img.permute(1, 2, 0).numpy() \n", 630 | " gt_seg_img = dataset_generation.display_segmented_image(gt_segmentation) \n", 631 | " ax2.imshow(gt_seg_img)\n", 632 | " ax2.set_title(\"Ground Truth Image\")\n", 633 | " ax2.axis('off') # Hide the axis\n", 634 | "\n", 635 | " # Plot the predicted image\n", 636 | " ax3 = plt.subplot(1, 3, 3) # 1 row, 3 columns, 3rd subplot\n", 637 | " pred_segmentation = pred_img.permute(1, 2, 0).numpy()\n", 638 | " pred_seg_img = dataset_generation.display_segmented_image(pred_segmentation, threshold=0.5)\n", 639 | " ax3.imshow(pred_seg_img)\n", 640 | " ax3.set_title(\"Predicted Image\")\n", 641 | " ax3.axis('off') # Hide the axis\n", 642 | "\n", 643 | " plt.show()" 644 | ] 645 | }, 646 | { 647 | "cell_type": "markdown", 648 | "metadata": {}, 649 | "source": [ 650 | "### Visualize predictions" 651 | ] 652 | }, 653 | { 654 | "cell_type": "code", 655 | "execution_count": null, 656 | "metadata": {}, 657 | "outputs": [], 658 | "source": [ 659 | "def min_max_normalize(x, xmin, xmax):\n", 660 | " return (x - xmin) / (xmax - xmin)" 661 | ] 662 | }, 663 | { 664 | "cell_type": "code", 665 | "execution_count": null, 666 | "metadata": {}, 667 | "outputs": [], 668 | "source": [ 669 | "\n", 670 | "\n", 671 | "unet_model.eval()\n", 672 | "with torch.no_grad():\n", 673 | "\n", 674 | " for idx, (x, y) in enumerate(train_loader):\n", 675 | " x = x.to(DEVICE)\n", 676 | " y = y.to(DEVICE)\n", 677 | "\n", 678 | " pred_logits = unet_model(x)\n", 679 | "\n", 680 | " pred_norm = min_max_normalize(pred_logits, pred_logits.min(), pred_logits.max())\n", 681 | "\n", 682 | " # Min and max of train y\n", 683 | " print(f'Min and max of y: {y.min()}, {y.max()}')\n", 684 | " print(f'Min and max of pred: {pred_logits.min()}, {pred_logits.max()}')\n", 685 | " print(f'Min and max of pred: {pred_norm.min()}, {pred_norm.max()}')\n", 686 | "\n", 687 | " pred_softmax = torch.softmax(pred_logits, dim=1)\n", 688 | " pred_argmax = torch.argmax(pred_softmax, dim=1).unsqueeze(1)\n", 689 | "\n", 690 | " print(f'Pred argmax shape: {pred_argmax.shape}')\n", 691 | "\n", 692 | " iou_scores = calculate_iou(pred_logits, y)\n", 693 | " # mean_iou_score = mean_iou(pred_logits, y)\n", 694 | "\n", 695 | " print(f\"Mean IoU: {iou_scores.cpu().numpy().mean()}\")\n", 696 | "\n", 697 | " visualize_predictions(x[0].cpu(), y[0].cpu(), pred_norm[0].cpu())\n", 698 | " break\n", 699 | " \n", 700 | " " 701 | ] 702 | } 703 | ], 704 | "metadata": { 705 | "kernelspec": { 706 | "display_name": "unet_training", 707 | "language": "python", 708 | "name": "python3" 709 | }, 710 | "language_info": { 711 | "codemirror_mode": { 712 | "name": "ipython", 713 | "version": 3 714 | }, 715 | "file_extension": ".py", 716 | "mimetype": "text/x-python", 717 | "name": "python", 718 | "nbconvert_exporter": "python", 719 | "pygments_lexer": "ipython3", 720 | "version": "3.10.14" 721 | } 722 | }, 723 | "nbformat": 4, 724 | "nbformat_minor": 2 725 | } 726 | -------------------------------------------------------------------------------- /mnist_extended.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# MNIST extended: a simple dataset for semantic segmentation and object detection\n", 8 | "\n", 9 | "Most open source datasets for computer vision are huge and complex. Building a model from scratch using ImageNet or Coco is impossible without days of training on specialised hardware such as GPUs or TPUs. I've often found myself in need of a simple and small dataset to test model architectures. I don't always have a GPU available and I don't want to wait hours for the results of my experiments.\n", 10 | "For image classification I often use MNIST. It's an incredibly useful dataset of small digts (if you're not familiar with it don't worry, we'll see what it looks like soon.). However, in its raw form it's really only useful for image classification tasks. For more complex tasks such as semantic segmentation and object detection I created MNIST extended, a dataset as simple as MNIST but that can be used for more than just image classification. In this post, I will describe how to use MNIST extended and share a few details on the simple code that is used to generate it.\n", 11 | "\n", 12 | "This dataset is used in my [\"Simple deep learning\" series](https://awaywithideas.com/simple-deep-learning) in the following posts:\n", 13 | "- [A simple example of semantic segmentation with tensorflow keras](https://awaywithideas.com/a-simple-example-of-semantic-segmentation-with-tensorflow-keras)\n", 14 | "\n", 15 | "This post won't go into the details of how the dataset is created, rather we'll focus on what the dataset is composed of. However, the code is very well documented and easy to understand. You can find all the functions used here in my [MNIST extended package](https://github.com/LukeTonin/simple-deep-learning/tree/main/simple_deep_learning/mnist_extended) on github." 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "metadata": {}, 21 | "source": [ 22 | "## MNIST\n", 23 | "\n", 24 | "MNIST is a dataset of handwritten digits.\n", 25 | "The original dataset can be downloaded from [Yann Lecun's website](http://yann.lecun.com/exdb/mnist/).\n", 26 | "However, we do not need to download the data from there since we will be using a Keras function to do that for us. This dataset forms the base of both the semantic\n", 27 | "segmentation and object detection components of MNIST extended." 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 1, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "name": "stdout", 37 | "output_type": "stream", 38 | "text": [ 39 | "(60000, 28, 28) (60000,)\n", 40 | "(10000, 28, 28) (10000,)\n" 41 | ] 42 | } 43 | ], 44 | "source": [ 45 | "import tensorflow as tf\n", 46 | "\n", 47 | "(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()\n", 48 | "\n", 49 | "print(train_images.shape, train_labels.shape)\n", 50 | "print(test_images.shape, test_labels.shape)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "As you can see, there are 70000 images in total. Let's display a few just to get an idea of what MNIST looks like." 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 2, 63 | "metadata": {}, 64 | "outputs": [ 65 | { 66 | "data": { 67 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj4AAAHPCAYAAABTFNCNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABGkUlEQVR4nO3dd3xUVf7/8U+kKqGHHiAUIbSlFxUQlV4EpStKXRXFxVUEFQRdlmYDFliKFBEkCCrK0qS4gqwUEVBZIMpCpMgKQQKYgAkhvz++P8+ec83EyWRmbmbO6/l4fB+P9/HcuTlfLhPO3tMiMjIyMgQAAMACN7ndAAAAgGCh4wMAAKxBxwcAAFiDjg8AALAGHR8AAGANOj4AAMAaebNzcVRUlMTExASoKfAkISFBEhMT/XpPnqV7eJ7hg2cZXvz9PHmW7snqWWar4xMTEyP79u3zS6PgvSZNmvj9njxL9/A8wwfPMrz4+3nyLN2T1bNkqAsAAFiDjg8AALAGHR8AAGANOj4AAMAadHwAAIA16PgAAABr0PEBAADWoOMDAACsQccHAABYg44PAACwBh0fAABgjWyd1QUEUkpKilH+61//qvKPP/6o8gcffGBc17RpU5UrVqzo8f4tW7ZU+a677jLqOEgQyB3i4+ONctu2bVU+ffq0UffUU0+pPH369IC2C+GDNz4AAMAadHwAAIA1rBnqunTpklG+cOGCyn/84x+Nuk8++UTlqKgolb/66ivjuvLly/uziVY6dOiQyn369DHqjh49qnJGRobKERERxnVbt2716mctXrxYZf25iog0btxY5TVr1hh1N998s1f3B+C9+fPnqzxx4kSVf/rpJ+O6a9euqVypUiWjbujQoQFqnT1Wr16t8oIFC1T+8ssvjevGjRun8tNPPx34hgUQb3wAAIA16PgAAABrWDPU9dZbbxnlrF7V3XTT//qD+mvXXr16Gdd9/vnn/mmcZX755ReV9WFGfWgr0PShThGRzZs3q7x27Vqjrm/fvkFpU6jQhydFRJo0aaJyoUKFVHYOXerfn3vuuSdArUNu9c033xhlfUWW/jshK6dOnTLKf//73zPNMC1btkzlyZMnG3UJCQkq68OKzikFY8aM8Vj35z//OdOf+8Ybbxjlrl27qlyjRo3faXXg8MYHAABYg44PAACwBh0fAABgDWvm+PhKX8p87733utiS8KFvF7Bnzx6vPlOuXDmVW7dubdTdd999Kus7PIuYcwf0sW3n9ga6kydPetUmW507d84o63/Gt9xyi8pLly41rps3b57KefP+71dP7969jev0OVXVqlUz6urWretDixEse/fuNcoDBw5U2Tmvztt5PaVLl1ZZ35JCRKRVq1bZbaI1Fi5cqPITTzyhclpamlefz58/v1HWtxRxbu2ii4uLU1lfAi8iMmfOHJX/85//eNWOQOCNDwAAsAYdHwAAYA2Gun5H4cKFVX7uuedcbEn4qFy5cqb/fcCAAUa5QIECKusHlpYpU8ann1u9enWVe/bs6dM9kLWxY8eqPGLECKNOX26sD3GuW7fOuE5/Va4PiYmIdOzYUeVRo0apfOedd/rYYuSUvr3BlClTjDr9wFF9qETkt0uiPdGHsjt37uxLE600fvx4lbMa3tKHk/XvVP369Y3r9u3bl2kWEfnhhx9Ufvzxx1XWl8eLmFvFuCl3tAIAACAI6PgAAABr0PEBAADWCOs5Pvo45Ntvv+3TPfRlzxMmTDDqXn75Zd8aZrnatWurfOPGDb/eOyUlxSjrc4Oc8w88admypV/bFG6cWwbo9KXp+hwtEc/b2icnJxvlf//73yovX77cqJs1a5bK27dvV9m5jDo2NtZjG5F9P//8s1HW53Lpp3v/97//9en+DRo0UFk/GkFEpFOnTj7d03Znz57N9L87j17Sn19WWrRo4bGuXbt2KiclJXm8LrfMreSNDwAAsAYdHwAAYI2wHurSl9gdPHjQp3vop01zSrd7rly5ovKKFSuMOn057YYNG4y6EydOZHo/51Latm3bqtysWTOf22mDHTt2GOXu3burHB0dne376d8xEfPPXz/5XcRclqvvBO1cEs9Ql39dvnzZKOu7r/s6vFW8eHGVN23apLK+UzP8Q/9eOk9M98WCBQuM8tatWzP9WfXq1TOuGz58eI5/tj/wxgcAAFiDjg8AALBGyA91OXcDPXDggMr6AXm+0neO1VcjIfA+++wzlR966CGV/XGIqHPF0SOPPKJynjx5cnz/cPbKK68YZf074u+dWZ33q1GjRqbXHT9+3K8/FyK7du1SedKkSUadvvLOWw8++KBR/tOf/qQyw1v+px+wXaxYMZXLli3r0/1Onz6t8ujRoz1ep09LmD17tlHnadf+YOONDwAAsAYdHwAAYA06PgAAwBohP8fHuetr06ZN/Xp/lsUGzy+//GKUn3rqKZW///57lb091Tkr/fv3N8rO3UzhWeHChYP2s5x/J5xbGfzKeZI0ck7//n3xxRc+3UPf6sC5CzcCS99FfdmyZSo7t/zQn5Hu4sWLRlmf1+Pc3kA3efJklatWrepdY4OMNz4AAMAadHwAAIA1Qn6oK9CcrwUROM5dlvWtCfzNOWQyZMgQlTmkNPdw7gqsHzwcFRWlsj+2roC5q++pU6ey/XnnsElWy54RWPp2Ae+++67K+sHNIubvu5IlS6q8Z88e47q4uDiPP0v//g0ePDj7jQ0y3vgAAABr0PEBAADWYKgrE/ouwfqutAis9evXG2V9V+4iRYqo7NwB9v7771f5vffeM+rmz5+f6c9yrhb66aefstdYBMXXX3/tsa5EiRIqFyxYMBjNCXvfffedymfPnvXqMxUqVFB5zZo1Hq/TV/uIiIwdO1blhg0bqrxlyxbjOn34Bd5r3LixyvohzM7pGx07dlR54cKFKuu72YuYq2mdhwe/9dZbOWprsPHGBwAAWIOODwAAsAYdHwAAYA0msGRi1KhRKufLl8/FltjlySefNMpdu3ZVWT9puFKlSh7v0bp1a6Ocmpqq8pIlS1T2x+7PCDznnC1dnz59gtiS8HT06FGjfOPGDZWz+o4UL15cZX0riO3btxvXvf766ypv3rzZqNPvf/DgQZXfeecd4zp9WTZ807dvX5V37txp1H355ZcqN2jQQGXn84+MjFT56aef9nMLg4s3PgAAwBp0fAAAgDVCfqjLueQOoSt//vxGuWbNmjm+xwMPPKCyPtSF3EvfMXjVqlVGnX5A6rBhw4LWpnCVmJholJ1/3p6UKVNGZf0gym7duhnX/fzzzyrr21OIeB5K+/DDD42y/h3Wd+uG9/QtWpzbA+j/hp45c8bjPXr06KFyv379/Nc4F/DGBwAAWIOODwAAsAYdHwAAYI2Qn+PzySefuN0Ea125ckXltLQ0o04/TsBN+lbtCA36KdDXrl0z6vSTpCtXrhy0NoUr59YQ+pLlS5cuefycPncnPT090//uK+cWIklJSSozxyfnateubZT1+Vr6HB/nnKx169apfOHCBaMu1I4V4Y0PAACwBh0fAABgjZAf6vKVvix2+fLlRp3zVSAyp+9w7TxR+dVXX1W5Z8+eQWuT07fffpvpf3ee5p3VbtAILOcwqX7Sc5EiRYw6/e8Vcs65c7M+rJSV06dPq/zHP/7Rn02Sv/3tb0a5evXqfr2/7fQT2EVEDhw4oLK+hYhzmPnkyZMq6ye6i5i7cuu7eudWvPEBAADWoOMDAACsEZJDXYcOHVJZP4QyOyZOnKiyfhgmsqav5Pr4449V/v77743rNm3apLK/h7r01+wiIgkJCSrPmzfPqHMeePgr56ta/XA+BJ4+vDVy5Eij7siRIyrHxsYadTwn/2rUqJFR1ldNJScnB60dQ4cOVTk6OjpoP9cWn376qcr6ULKISLVq1VSeM2eOyj/88INxnX6ItH6wqYj5/FavXq1ynjx5fGpvoPHGBwAAWIOODwAAsAYdHwAAYI2QnOOjL5/ManfRrOzfv19l54m0FSpU8K1hFlixYoXK+vJG50nL7733nso7d+406v7whz+o3LhxY48/69ixYyp/9tlnKl+8eNG47ty5cx7vobdL3xF23LhxHj+DwIuPj1d57ty5Rt1NN/3vf4+NHj06aG2ykfO7qe/CHGhdunRRedasWSo7t5pAzm3YsEFl59yddu3aqXzPPfd4vEfdunVVdv7eXrNmjcqJiYkq67tC5ya88QEAANag4wMAAKwRkkNdAwcOVFlf2i4ikpKS4tU93n77bZWdh7Hpu8OWKlXKlyaGLW+HFvXrnLvB6sMc+tLHrOjPyDms5i19SMW5jBeBd/36dZUXLFjg8bp69eqpPHjw4IC2yXY9evQwyvqu9foQyKlTp3L8s/Td8kVExowZozLDW4G1bNkylZ1/1g888IDK+g7M+nQQZ9n5b2ao4Y0PAACwBh0fAABgDTo+AADAGiE5x+exxx5T+aWXXjLqvJ3jo3OeJps3b0j+sQRFixYtVNbH7PWjLIJNP8G7Vq1aRt3ixYs91iG49FOg9eXLMTExxnX61gUILv13n/79dv5O1OdrZaV79+4qO7cmuO2223xpInLIeeq6Po/O27mUzrqs/q7kRrzxAQAA1qDjAwAArJH730n9juXLlxvlDh06ePW5ESNGqDx27Fijzjn0hf9p3bq1yrt371Z58uTJxnWeTkXPjm7duqlcunRplfUdREXMZ+48zRvu0XdwFREZNmxYpteNHz/eKDuXPSN4qlatqrK+VUhcXJxxnf59P336tMr6EnURkTvvvFNlhrbc07VrV5UXLVqU4/s5/41ctWqVyiVLlszx/QONNz4AAMAadHwAAIA1Qn6o6+677zbKGzduVLlTp04qDxkyxLhu4sSJKuurguA9fZWUvjNoZmXYZ9KkSUb566+/VllfydWrV69gNQk+6t+/f5Zl5G7PPvusynny5DHq9F3U9QO627Zta1ynHyrr/Hc3FIa3dLzxAQAA1qDjAwAArEHHBwAAWCPk5/jcdJPZd2vfvr3K6enpwW4OgP/viy++MMrFihVTWV/+yvJ1ILBq1Kih8rx584w6Z9kGvPEBAADWoOMDAACsEfJDXQByp507d7rdBAD4Dd74AAAAa9DxAQAA1qDjAwAArEHHBwAAWIOODwAAsAYdHwAAYI2IjIyMDG8vjoqKMk5VRnAkJCRIYmKiX+/Js3QPzzN88CzDi7+fJ8/SPVk9y2x1fAAAAEIZQ10AAMAadHwAAIA16PgAAABrhH3HZ8iQIVK6dGmpW7eu201BDl27dk2aNWsm9evXlzp16siECRPcbhJ8FB8fLw0aNFD/V6RIEZkxY4bbzUIOJCUlSa9evSQ2NlZq1aolu3btcrtJ8NH06dOlTp06UrduXenfv79cu3bN7Sb5VdhPbt6xY4dERkbKww8/LIcOHXK7OciBjIwMSU5OlsjISElLS5OWLVvKzJkzpUWLFm43DTmQnp4uFSpUkD179kjlypXdbg58NHDgQGnVqpUMGzZMUlNTJSUlRYoVK+Z2s5BNZ86ckZYtW8rhw4fl5ptvlj59+kjnzp1l0KBBbjfNb8L+jU/r1q2lRIkSbjcDfhARESGRkZEiIpKWliZpaWkSERHhcquQU9u2bZNq1arR6Qlhly9flh07dsjQoUNFRCR//vx0ekLY9evX5erVq3L9+nVJSUmR8uXLu90kvwr7jg/CS3p6ujRo0EBKly4t7dq1k+bNm7vdJOTQypUrpX///m43Azlw/PhxKVWqlAwePFgaNmwow4YNk+TkZLebBR9UqFBBRo0aJZUqVZJy5cpJ0aJFpX379m43y6/o+CCk5MmTRw4ePCinT5+WvXv3MnwZ4lJTU2Xt2rXSu3dvt5uCHLh+/brs379fhg8fLgcOHJBChQrJ1KlT3W4WfHDx4kX56KOP5MSJE/LDDz9IcnKyLF++3O1m+RUdH4SkYsWKSZs2bWTTpk1uNwU5sHHjRmnUqJGUKVPG7aYgB6KjoyU6Olq9ge3Vq5fs37/f5VbBF1u3bpUqVapIqVKlJF++fHL//ffL559/7naz/IqOD0LG+fPnJSkpSURErl69Klu3bpXY2Fh3G4UciYuLY5grDJQtW1YqVqwo8fHxIvJ/87Zq167tcqvgi0qVKsnu3bslJSVFMjIyZNu2bVKrVi23m+VXYb+qq3///vLpp59KYmKilClTRl5++WU1AQ+h5euvv5aBAwdKenq63LhxQ/r06SPjx493u1nwUUpKilSsWFGOHz8uRYsWdbs5yKGDBw+qFV1Vq1aVJUuWSPHixd1uFnwwYcIEeffddyVv3rzSsGFDWbhwoRQoUMDtZvlN2Hd8AAAAfsVQFwAAsAYdHwAAYA06PgAAwBp0fAAAgDXo+AAAAGvkzc7FUVFREhMTE6CmwJOEhARJTEz06z15lu7heYYPnmV48ffz5Fm6J6tnma2OT0xMjOzbt88vjYL3mjRp4vd78izdw/MMHzzL8OLv58mzdE9Wz5KhLgAAYA06PgAAwBp0fAAAgDXo+AAAAGvQ8QEAANbI1qouAAAAX2zfvl3lu+++26hLT08PWjt44wMAAKxBxwcAAFiDoS4AAOB3x44dM8rjxo1TuXDhwsFujsIbHwAAYA06PgAAwBp0fAAAgDWY4wMAAPzi8OHDKj/wwANG3VdffaXy1KlTg9YmJ974AAAAa9DxAQAA1mCoCwAscuXKFZXr1atn1JUsWVLlRo0aqbx06VLjurS0tEzv3aNHD6NcokQJlQsUKGDUPfzwwyo3bNjQ43XI/Q4cOKByx44dVT537pxx3X333afy6NGjA98wD3jjAwAArEHHBwAAWIOhLgC/4Tww8PLlyyoXKlTIqEtOTs7Rz4qMjDTK+fLly9H9kLWIiAiVCxYsaNTt378/0+ytDz/80Otr586dq/KQIUNUXrBggXFdnjx5st0OBNeYMWNU1oe3unfvbly3atUqlfW/h8HGGx8AAGANOj4AAMAadHwAAIA1wnqOjz5PwTlnwR/y5v3fH99NN9ndh1yxYoVRfvDBBz1eW7p0aZUfffTRbP+satWqGeV+/fp59Tl9roD+7GySkZFhlD/77DOVX3rpJZWvXbtmXLdr1y6V69SpY9T9+9//zlGbGjRoYJSbNm2qcq9evYy62267TWU3T3cOZfqcqrfeesuoe+GFF1Tu1KmTytHR0V7dW9+1V0TkwoULKm/ZssWo00/uXrx4scrO39Vvvvmmysz/co++hcGIESOMuk8//VRlfV5PduZ8BZPd/1oDAACr0PEBAADWCJn3/fpy2jVr1qisv4J3+vLLL1Xet2+f39vUvn17lceOHWvUtWrVSmU3l+0FyxdffGGUs/r/+fz58yr/9a9/zfHPHjx4sFfXDRw4UOVFixYZdbYMVY4cOdIor169WuUOHTqoXKpUKeM6fbmxc6jLFydPnlTZ+d3Uh9+WLVtm1FWpUkXl119/XeXbb7/duK5o0aI5bqMNWrRoYZQ/+eSTgP2sixcvGmV9GFP/uc5dol977TWVo6KiAtQ6OOm7MYuIdOnSReWzZ88adfoO4Po2BbmVHb/tAQAAhI4PAACwCB0fAABgjVw7x+fEiRNG+d5771X50KFDwW5OpjZv3pxpFjGXWMfFxQWtTW5xztWJiYlR+aOPPjLqvvnmmxz9rKtXr3qsS01NNcrXr19XWZ87cNdddxnX6SdFhzPnn118fLzKRYoUCVo79GXpffv29Xjdt99+a5TXrl2r8hNPPKGys+07d+5U2XkkBtzxr3/9yyh7mk9UtmxZo5w/f/6AtQkiN27cUFmf16PP6RER+fHHH1V2biWhz8MqV66cv5vod7zxAQAA1qDjAwAArJGrhrpmz56t8sSJE406/cRXf9BPJc7q9F99aEB/Jfh73nvvPZUTEhJUXr9+vXFdiRIlvL5nbuY8sVtfNu1cQp1TWW1NoJ/+K2K+gtXt3bvXKNsy1KXvghsKatSoYZQHDBigsj5U8sMPPwStTTD99NNPKuvbjoiITJ48WeUlS5Z4vIe+ncSoUaOMumAOwdpAf14iIm+88YbKkyZN8vi5jh07qqzvtC0SGsNbOt74AAAAa9DxAQAA1nB1qOvSpUtGWX8t6uvQVrFixVTWX9vdfPPNxnVdu3ZV2blLrU5fkbRu3TqjTn9l+MEHHxh1+mqi3bt3q3z06FHjOueOs/h9TZo08VinH3yI0KTvCutcLfnyyy+rfMcdd6js3ImblVz+5RzC0g+p1A+oPHXqlE/315/rM88849M94Jm+a/bMmTONOk/DW9OmTTPK+g75Wf2bGQp44wMAAKxBxwcAAFiDjg8AALCGq3N8nMucnSe++kJfrquf/uur7t27Z5pFzOW0zjk+njiX6W/cuDEHrYOvSpYs6XYTrKPP2/vqq69U1rd+EBF59913VXbuxK3/ztC/S3nz5qqdOcKOc5flZcuWefU5fW7lrbfeatS98MILKvfu3TsHrUNm9O1XXn31VZWnTJliXKfvwvzOO++o3LlzZ+O6rLZ9uXbtmsr699k5165Hjx6/0+rg4I0PAACwBh0fAABgjaC/H75w4YLKzkPr/OH06dN+v6cn+pI+5w7Mzt0xfxUdHR3QNtnu/PnzXl03fPjwALcEzqXoPXv2VPnnn39W2XkI5SuvvKKyvoRWhF183dKsWTOjXKZMGZX1wyudHnzwQZVDbdfwULdixQqV9eEtfcsXEfPg31atWnl1b+fUjjFjxqisbymi/z0REWnTpo3HdgQTb3wAAIA16PgAAABrBH2oKy4uTuWsdtl17rSsH064dOlSlZ2rPvRdKY8cOaLy/Pnzs9/Y3zFhwgSVPQ1tOVWuXNnv7bCdvjP2iy++6PG6du3aqezma1ZbOFfO6Ycc6jub67uji5hDIvqOsyLmSs26dev6pZ34feXLlzfK8+bNU1lfxeMcAlm4cKHK+oohEZEPP/xQ5fbt2/ujmVZLSUkxyvqqR32IeMaMGcZ1+vCWvkP3wYMHjeuGDh2q8vfff2/UpaWlZdom5+/ZfPnyZXpdsPHGBwAAWIOODwAAsAYdHwAAYI2gz/HRl85lpVOnTkZ5wYIFKj/++OMqO3c+1uf1RERE+NJEj5zLMf/5z3969bly5cqpPGzYML+2CeaO385TpHW33367ygULFgxomyDSuHFjo7x69epMr0tISDDKO3fuVHnTpk1GnT5PKyoqSmV9qbyISN++fVWuVauWdw2G1/QdePV8+PBh47px48ap7JzLpe/WrM8Nuueee/zUSrs4TwX49ttvVe7fv7/K+nxZEZHPPvtM5UGDBql8/Phxn9qhz717/fXXjbpChQr5dE9/440PAACwBh0fAABgjaAPdW3ZssWr6zp06OCxrkGDBplmX+mHuYmIfP311yq///77Kk+bNs24ztMSPhFzGd9rr72mctmyZX1tJjxYvny5xzp959DHHnssGM1BNsXExHgsO1/L63bs2KGycxhaHxIbMmSIUff888+r7Nw2AzlTu3Zto6wPYX3xxRdGnb4bdLdu3VTetWuXcV39+vX92cSw5Tzst06dOio/++yzKjsPB58zZ06m99MPLxUxp2lMnz7dqNMPI9WHt3LrNgW88QEAANag4wMAAKxBxwcAAFgj6HN8gknfFl8/1kBEZNWqVSqfPHnSqHMuu/SGc+7OE088ofIDDzyQ7fvBM+cJ7N98843Ha/Vt8p0nBSO0tW7dOtMsYi7lHThwoFG3Z88elT/++OMAtQ5OjRo1Msr6ESb6tgXOIxWWLFkS0HaFsgMHDqjsPALqoYceUvmll15S2dstZUaPHm2U9eNHnCZNmqRybp3Xo+ONDwAAsAYdHwAAYI1cO9Q1d+5co5w3b+ZN3bp1q1HWd25OT09XOavhEF/dfffdKr/xxhtGHUswA8e5LN35ildXsWLFQDcHuVCNGjVUfvrpp426Pn36BLs5kN9uG5KcnJzpdZcuXQpGc8LCv//9b491y5Yty9G9X3zxRaOs73b/l7/8xagbPnx4jn5WsPHGBwAAWIOODwAAsEbQh7o2bNig8lNPPWXU6SsxDh48aNQNHTo0kM3ySD9UTZ+5LmKu3PI0FAf/OHfunMpZvd6Njo42yvqOoghfKSkpRvnDDz9U+ZlnnjHqmjdvHowmwcG5gk4/HFPXsGHDYDQHv0MfLhYx/+2uVq1asJvjV7zxAQAA1qDjAwAArEHHBwAAWCPoE1M6deqk8urVq406fY5PMFWpUsUo33777SrrS2GdO48ieL766iuVs/p78sgjjxjlcuXKBaxNCLwff/zRKB89elTlmTNnqqzvYCsikpCQoPLgwYONOv30aPiXc8n6ggULVNZPCHfSl0p37tzZ/w0LU/fdd5/KxYoVM+qSkpK8ukdERITK+gnsU6dONa4rUaJE9huYS/HGBwAAWIOODwAAsIara7D1V9UiIlFRUSqvW7fOqNN3ZO7fv7/KcXFxxnXVq1dXWT84tG7dusZ1vXv3VrlevXpGXalSpX637Qg8fZhjwoQJHq/TD8XL6nU6fPfBBx+oPHHiRKOubdu22b6fPiTiPCR43759Kl++fNmo07eX0A+5dP79uPPOO1WuXLmyUXfTTfzvvexKTU01yqdOnVJ5586dKut/T0SyPhDz5ptvVnnFihUqN27c2Od22kb/PvzjH/8w6p5//nmVT5w4obJz5/JevXqprE/zCGf8BgAAANag4wMAAKxBxwcAAFjD1Tk+hQsXNsqvvPKKylOmTDHq9DkB+vEQS5cuNa7Tx+/1ZXqM64eelStXqrx7926P1+nj3AUKFAhom2x12223qew8PkY/QsSXLSliYmKM8rhx41Tu0qWLUac/a+fvD3hHn0+zcOFCo07/89XnUs6dO9e4Tp/X46tBgwap3KNHjxzfz3YtW7Y0yp6OBAFvfAAAgEXo+AAAAGvk2iPF8+TJk2X5V/ny5QtGc+CC9evXZ/rfncuTJ02aFIzmWE3fAXvEiBEutgQ5deXKFZX/+c9/erzOuaWIL/ShM+e2IbNmzcrx/QFf8MYHAABYg44PAACwRq4d6oJ9Zs+ebZS3b9+e6XVDhgwxyrGxsQFrExBu9O/P+fPnjbrDhw+rrO+KP3LkSOO6Tz/9VGV9dZ1zdZa+k3737t19ai/gb7zxAQAA1qDjAwAArEHHBwAAWIM5Psg1Nm3aZJTT0tIyvU4/jR1A9uhbgOi7ZDvpOzwD4YQ3PgAAwBp0fAAAgDUY6kJIqFKlisoVKlRwsSUAgFDGGx8AAGANOj4AAMAadHwAAIA1mOODXMMfp0EDAJAV3vgAAABr0PEBAADWiMjIyMjw9uKoqCiJiYkJYHOQmYSEBElMTPTrPXmW7uF5hg+eZXjx9/PkWbonq2eZrY4PAABAKGOoCwAAWIOODwAAsEbYd3w2bdokNWvWlOrVq8vUqVPdbg5yID4+Xho0aKD+r0iRIjJjxgy3mwUfXLt2TZo1ayb169eXOnXqyIQJE9xuEnKA5xlekpKSpFevXhIbGyu1atWSXbt2ud0kvwrrOT7p6elSo0YN2bJli0RHR0vTpk0lLi5Oateu7XbTkEPp6elSoUIF2bNnj1SuXNnt5iCbMjIyJDk5WSIjIyUtLU1atmwpM2fOlBYtWrjdNPiA5xleBg4cKK1atZJhw4ZJamqqpKSkSLFixdxult+E9RufvXv3SvXq1aVq1aqSP39+6devn3z00UduNwt+sG3bNqlWrRqdnhAVEREhkZGRIiKSlpYmaWlpEhER4XKr4CueZ/i4fPmy7NixQ4YOHSoiIvnz5w+rTo9ImHd8zpw5IxUrVlTl6OhoOXPmjIstgr+sXLlS+vfv73YzkAPp6enSoEEDKV26tLRr106aN2/udpOQAzzP8HD8+HEpVaqUDB48WBo2bCjDhg2T5ORkt5vlV2Hd8clsFI//FRL6UlNTZe3atdK7d2+3m4IcyJMnjxw8eFBOnz4te/fulUOHDrndJOQAzzM8XL9+Xfbv3y/Dhw+XAwcOSKFChcJufmxYd3yio6Pl1KlTqnz69GkpX768iy2CP2zcuFEaNWokZcqUcbsp8INixYpJmzZtZNOmTW43BX7A8wxt0dHREh0drd7Y9erVS/bv3+9yq/wrrDs+TZs2le+++05OnDghqampsnLlSrn33nvdbhZyKC4ujmGuEHf+/HlJSkoSEZGrV6/K1q1bJTY21t1GwWc8z/BRtmxZqVixosTHx4vI/82nDLcFQWF9OnvevHll9uzZ0qFDB0lPT5chQ4ZInTp13G4WciAlJUW2bNki8+fPd7spyIGzZ8/KwIEDJT09XW7cuCF9+vSRrl27ut0s+IjnGV5mzZolDz74oKSmpkrVqlVlyZIlbjfJr8J6OTsAAIAurIe6AAAAdHR8AACANej4AAAAa9DxAQAA1qDjAwAArEHHBwAAWCNb+/hERUVJTExMgJoCTxISEiQxMdGv9+RZuofnGT54luHF38+TZ+merJ5ltjo+MTExsm/fPr80Ct5r0qSJ3+/Js3QPzzN88CzDi7+fJ8/SPVk9S4a6AACANej4AAAAa9DxAQAA1qDjAwAArEHHBwAAWIOODwAAsAYdHwAAYI1s7eMTanbs2KHyhAkTjLovv/xS5d27dxt1tWvXDmzDAACAK3jjAwAArEHHBwAAWIOODwAAsEbIz/E5efKkUX7xxRdVXrduncpJSUnGdaVLl1aZOT12OHz4sMrffvutx+smTZpklOfPn69yo0aN/N8wAAhDaWlpRnnFihUqjxw50qgbPHiwytOnTw9ou3jjAwAArEHHBwAAWCMkh7p++eUXlSdOnGjULV++PNPPVK5c2Sg/8cQT/m8Ycp2nnnpK5U8++UTlb775xut7zJ49W+XFixf7pV1AsCQmJhrldu3aqdypUyej7rnnnlO5SJEigW0Ywt6ePXuM8qBBg1QeMGCAUff6668Ho0kiwhsfAABgETo+AADAGiEz1HXo0CGVX3vtNZWXLVvm8TPdunVTuV+/fkads4zQ9be//U3l8ePHG3WXLl3K9v3y5jW/FjVq1PCtYWHqzJkzRjk6Olrlhg0bqtyjRw/juu7du2f6mZIlS/q5hdBdv37dKOtTBaZMmWLUnT59WuVZs2apXLRo0QC1DuFGn26S1dSAMWPGGOWbbgreexje+AAAAGvQ8QEAANag4wMAAKwRMnN89PkBJ06cUDkiIsK4Tl+2PnfuXJXLlSsXwNYhEPRdP53jwbqZM2eqfOPGjRz/3Dx58hjlmJiYHN8znOnfwYMHD2aaRUReeukllatVq6ZyqVKlvP5ZLVq0UPmZZ55RuUKFCl7fwzZly5Y1ytOmTVP5ySefNOr0OZP6c9Xn0Ykw5wemY8eOqbxo0SKVP/30U+O63DK3jzc+AADAGnR8AACANXLVUNeBAwdU7tixo1F3/vx5lfVXsG3atDGu27ZtW2Aah4DQh7OOHj1q1OlL0z/88MNgNclY7isi8u6776rcuXNnlW3d2VY/4FfE/A46X2178p///CfT/Ht2796t8saNG1XevHmzcV3FihW9vqdt9G0+ihcvbtRNnjxZ5bfffltl56G+Y8eOVblr167+biJyuZSUFKP89NNPq5zV74BHHnlEZTd/f/LGBwAAWIOODwAAsAYdHwAAYA1X5/hcvXrVKOsnpjtPFNbn9YwePVpl55brCC36yef6OHFuom+foB/XYOscH32ejYjnMX3nMvU777zTq/tfvnxZZefcHV18fLzKL7zwglGX1VE2+J+WLVsa5fnz56usbyERFxdnXKdvL1K1alWVnd9h/fR3toUIbfrRJ2+88YZRt2XLlkw/c9999xnlUaNGqXzzzTf7sXXZwxsfAABgDTo+AADAGq4OdTmHqfbs2ePx2meffdbj55C76UvWRczhLX1ZbG7VqFEjlWvVquViS3IHT6+1nZo2bWqUV61a5dXn9FfqSUlJRt29996rsj7kltWQGLynbwOwdOlSlfv06WNct3fvXpWXLFmi8uOPP25cV7hwYZX1XfVFzJ17mzVr5rFNt99+u8o9evTweB0CS9+9+8UXX/R4XWxsrMorVqww6goWLOj/hvmANz4AAMAadHwAAIA1gj7UdeHCBZXnzJnj8Tr9EEOR367aQOhwrgB47rnncnS/9u3bG2Xn3xVvOFepOIdUdEeOHFFZX+FVpUqVbP/ccHDq1CmPdfny5VNZH57Ojrx5//drKSoqyqjTnwUCS3+WziEmvayv1Jk0aZJx3TvvvKPyoUOHPP6s7du3e6y79dZbPbYDgePcLV8/ZNhJX8E5depUlXPL0JYTb3wAAIA16PgAAABr0PEBAADWCPocn1mzZqmc1bwK/WRgEf/vkvvFF1+o7Fx2i+xLT083ynPnzlU5q7HhrERGRqr8/vvvq/yHP/zBuK5s2bLZvrdzt+Gs/i7qy6a/+eYblW2d47N+/XqPdbfccovK3u7UnJXDhw8b5WvXrmV6XcOGDXP8s+CbEiVKqPz6668bdePGjVPZOcenZ8+eKp8/f97j/WfOnJnTJsJL+mkKa9asMequXLmisnPX5ZdfflllfVfv3Io3PgAAwBp0fAAAgDWCPtSVnJysckZGhsfr9N06nfTP6YeXOtWpU0flhIQEo+7nn3/O9j30A/ec7ejcubNRp7e/QIECHu8fLv71r38Z5SeffNKrz+nLHZ1LVZ9//nmVncNbCF/674h3333XqPvll18y/Uzz5s0D2ib4pnjx4irrh5mK/HZH9185h0iz+rcA3tGnImzcuNGo079jqampKme103qTJk2MsvPfv9yONz4AAMAadHwAAIA1gj7UpR8yuHjxYqMuq5U1nmQ1TOVcEeLpc97ew3k/fajLuTvxyJEjPdaFo1dffdWnz+mvwp27KSN3qlu3rlHWd9198MEHc3z/devWqTxx4kSP1+nf2759++b458L/zp49q/I999xj1Om/74sVK6bysmXLjOuKFi0akLaFA30I6+9//7tRp686LV26tMrz5s0zrstqlaYnzgNn165dq7J+kHPbtm2zfe9g4I0PAACwBh0fAABgDTo+AADAGkGf49OqVSuVnSfy6uPBzvk0mzZtUlmfW6OPXYqY44vOJXc5pe9GLCJy4MABlU+ePGnULV++PNN26LuVioTPUvfPPvvM7Sb8ru+++05lfcn073n44YdV7tKli1/bFIqc87kuXLigcvv27bN9P+euvc5ToT0ZOnSoyrVr1872z0XgzZ8/X+X4+HijTp/Xo/9+r1ixYsDbFS7y5MmjsrdbiDj/HdO3gtD/DXYaPHiwyosWLTLqsponmxvxxgcAAFiDjg8AALBG0Ie6dM5lsXq5Xbt2Rp2+PNwtzjYdO3ZMZeeuzsePH1f5oYceUtm5vM85VBdKli5dqrJ+uF1uou/YrS95dg5N6pwH8A0aNEhl/dWyrRo3buzX++lLYUV+u1uzrnDhwir/6U9/8ms74H/OQ0t1H330kcrsvB1Yp06dUnnLli1GnafhrZo1axrlZ599VuVQG9py4o0PAACwBh0fAABgDTo+AADAGq7O8Ql11atXV9l5grA+xyecXLx4UeXVq1errJ/qmx36PCFfvfnmmyqPHz/eqNPb9dNPP3l1v5UrVxrlu+66Kwetw+9ZsGCB19fq862ccwThvu7duxtlfdsI5/EjLVu2DEqbYM6b0+fqOOnbq/z5z3826vStYkIdb3wAAIA16PgAAABrMNSVA/rJuM7l3Pru0t7891Ch77bty6m+Tvrp7E5LlixRWd912WnatGkq37hxI6Btgn/oJ7AfPXrU68/95S9/CURzkE369+zpp59W2bk1gb47s3P7gZtu4n93B5J+eoC33xv9WT766KN+b1Nuwd88AABgDTo+AADAGgx1ZcO5c+eM8ogRI1R+//33jTp9Z8sSJUqonC9fvgC1LjT17t1bZeeOyfruytk5VNQXRYsW9dgO+Ic+NDxu3DiVr1y54vEzr7zyilEuUqSI/xuGbPvxxx9Vnjlzpsfr9ANneXaB5dyNfvbs2Spn9R3TV9cNGDDA/w3LhXjjAwAArEHHBwAAWIOODwAAsEZIzvHR53vo82xERPLnz69yz549c/yzDh8+rPL8+fONuvj4eJWzOq128eLFKuvzfWCenh5od9xxh8rO06DbtGmjcpMmTYLVJKsMHz5c5a+//trjdZUqVVJ5yJAhRl2onwodqvT5WSIikyZNyvS6Ro0aGWXnjvbwL/25OP8t3LNnT6afiY2NNcpxcXEqR0dH+7F1uRdvfAAAgDXo+AAAAGuE5FBXoUKFVK5Zs6ZRN3bsWJUXLlzo8R76Dsr+eH3uHMJ64oknVG7btm2O759btGrVSuVevXqp/N5777nRnN9wLkXXDxxt2LChyhUrVgxam2z1/fffG+WPPvrIq8+VKlVKZYaGc4f//ve/RnnOnDkq67+Pt2zZYlzH9h2B9Y9//ENlfTd0J/07tWrVKqPOluEtHW98AACANej4AAAAa9DxAQAA1gjJOT66Z555xihHRkaqfOrUKY+fe/XVV7P9s5o2bWqUO3bsqLLz5OFwnZtQsmRJlVevXq2yc+uADz74IGhtevjhh1UeNGiQUXfXXXcFrR0QSUtLU9l5IvT58+cz/UyFChWM8ooVK/zfMGSbvm2IfrSM05NPPumxzt9zKW3nPDbpscceU1n/sxYRKVOmjMqTJ09WuV69egFqXejgjQ8AALAGHR8AAGCNkB/qci6XdO5e6cm0adMC0RxrLViwwChfv35d5bVr1+b4/voy+lGjRhl1Xbp0UTlPnjw5/lnwnT48smTJEq8+061bN6N86623+rVN8M2BAwdU3rVrl8frli9frrLz9/GECRNU5ruZc7fccotR1nfK3rhxo1Gn75Tu3AHddrzxAQAA1qDjAwAArBHyQ13IHfTVXiLe79ILO+mrL50rIpE7FC1aVGXnitZLly6p/M4776jMAb+BpX9vREQ2bNjgUktCG298AACANej4AAAAa9DxAQAA1mCODwC/KVy4sMrOXdVnzJihsj4vJDY2NuDtQvbpO/zu3bvXxZYA/sUbHwAAYA06PgAAwBoMdQHwG313XudBwL4cDAwA/sYbHwAAYA06PgAAwBp0fAAAgDXo+AAAAGvQ8QEAANag4wMAAKwRkZGRkeHtxVFRURITExPA5iAzCQkJkpiY6Nd78izdw/MMHzzL8OLv58mzdE9WzzJbHR8AAIBQxlAXAACwBh0fAABgDTo+AADAGlZ0fGJiYqRevXrSoEEDadKkidvNQQ5s2rRJatasKdWrV5epU6e63Rz4KD4+Xho0aKD+r0iRIjJjxgy3mwUf8TzDy5AhQ6R06dJSt25dt5sSEFZMbo6JiZF9+/ZJVFSU201BDqSnp0uNGjVky5YtEh0dLU2bNpW4uDipXbu2201DDqSnp0uFChVkz549UrlyZbebgxzieYa+HTt2SGRkpDz88MNy6NAht5vjd1a88UF42Lt3r1SvXl2qVq0q+fPnl379+slHH33kdrOQQ9u2bZNq1arxj2SY4HmGvtatW0uJEiXcbkbAWNHxiYiIkPbt20vjxo1lwYIFbjcHPjpz5oxUrFhRlaOjo+XMmTMutgj+sHLlSunfv7/bzYCf8DyR2+V1uwHB8K9//UvKly8v586dk3bt2klsbKy0bt3a7WYhmzIblY2IiHChJfCX1NRUWbt2rUyZMsXtpsAPeJ4IBVa88SlfvryIiJQuXVruu+8+2bt3r8stgi+io6Pl1KlTqnz69Gn1bBGaNm7cKI0aNZIyZcq43RT4Ac8ToSDsOz7Jycly5coVlTdv3hy2M9XDXdOmTeW7776TEydOSGpqqqxcuVLuvfdet5uFHIiLi2NYJIzwPBEKwr7j8+OPP0rLli2lfv360qxZM+nSpYt07NjR7WbBB3nz5pXZs2dLhw4dpFatWtKnTx+pU6eO282Cj1JSUmTLli1y//33u90U+AHPM3z0799fbrvtNomPj5fo6GhZtGiR203yKyuWswMAAIhY8MYHAADgV3R8AACANej4AAAAa9DxAQAA1qDjAwAArJGtnZujoqIkJiYmQE2BJwkJCZKYmOjXe/Is3cPzDB88y/Di7+fJs3RPVs8yWx2fX085R3A1adLE7/fkWbqH5xk+eJbhxd/Pk2fpnqyeJUNdAADAGnR8AACANej4AAAAa9DxAQAA1qDjAwAArJGtVV22uHDhgsrt27dXuUCBAsZ1n3/+edDaBAAAco43PgAAwBp0fAAAgDUY6hKRL7/80ig/8sgjKh84cEDlMmXKBK1NAJBb/Pzzz0a5SpUqKk+dOlXloUOHBq1N8L/du3erPH78eKNuz549Kh89etSoK1euXGAb5me88QEAANag4wMAAKxBxwcAAFjD2jk+6enpKq9YscKo279/v8r6EvaxY8cGvmGABXbu3Kny4MGDjbpjx455/Nx9992n8rx581QuXbq0H1sHEZFLly6pPGDAAKPup59+CnZzECD6c+7Xr5/K33//vcfPOOd8hRre+AAAAGvQ8QEAANawdqhr8uTJKk+fPt3jdZ07d1Z5xIgRAW0TEOp++eUXlUeOHGnUbd68WWX9NfqNGze8vv+aNWtUTkhIUHnjxo3GdWw9kXNHjhxRecOGDS62BIE0Z84clbMa3oqNjVW5fPnyAW1ToPHGBwAAWIOODwAAsIY1Q136rpMi5vBWRkaGUVeiRAmVFy1aFNiGWW7dunUqO3cKrVChgsq33nqryvqqOxGRRo0aqdy1a1ejTv+cvrto3rzW/NUPOH3IacKECSq//fbbXn1+9OjRRlk/GDgpKcmoe+yxx1TWd1VfsmSJcd1zzz3n1c+GZ3PnzvVYlz9/fpWbNGkSjObAT5wrst58881MrytWrJhR/uSTT1QuVKiQ39sVTLzxAQAA1qDjAwAArEHHBwAAWCOsJzpcuXJF5d69ext1+twBfU6PiMiWLVtULl68eGAaF+b0Zc3r16836t59991M65KTk43rDh486NXP2r59u8ozZszweJ0+h+ill17y6t74LX2nVxGRNm3aqKwvh23YsKFx3b333qvymDFjVC5YsKBxXUREhMefPW3aNJUTExO9azC8lpaWpvKPP/7o8bpHHnlE5fr16we0Tcg5/aQC/TskYs7Ru+mm/70Lcc65DLUT2LPCGx8AAGANOj4AAMAaYT3UNWrUKJVPnz7t8TrnMJi+PBq+efLJJ1VeuHChx+tq1qyp8hNPPGHU+bJM9uzZs0ZZfyX/l7/8RWWGunynvw4XEcmTJ4/K+rDHrl27jOv0A3+9FR8fb5T/+9//ZnrdzTffnO1747d2796tsj7k7zRu3LhgNAd+og9b/vWvf/V4nf47989//nNA2+Qm3vgAAABr0PEBAADWoOMDAACsEXZzfF544QWVFyxYoLJziaw+ljlp0qTANyzMNWvWzCh/8cUXKjvn6rz44osqt27dWmXnFun+8Morr6h84cIFv9/fRoULFzbK+vYE+onevszpERE5c+aMys6/V5cvX1a5T58+KutHWcB3H3/8cab/PV++fEFuCXJK31Lk8ccf9+ozWR1TEk544wMAAKxBxwcAAFgj5Ie69BNjRUTeeuutTK9zvp6fMmWKyiVLlvR7u2wwYsQIlfWhLRGR6Ohold955x2jrkaNGgFr0w8//GCU9eXtGRkZAfu5NtOHMn3ZguDUqVNGuW3btirrQ1si5lL6P/7xjyr7OqwG7ziHQEqVKuVSS+At/bvz1VdfebyuaNGiKpcpU0blixcvGtfpOzw/88wzRl2rVq1U1k9ub9CggXHdPffco7K+DUaw8cYHAABYg44PAACwRkgOdV29elVl52x1Tzu76kNbIuYrN/jmyy+/VNm5ak7f9TOQQ1tOziHNIkWKqOzLMAwC4+TJkyqPHDnSqPv2229Vrl69ulG3ePFilfXX6/A//fDmO+64w+/3P3/+vMr67u7O6Qu6W2+91SjrO8TXqlXLj60LffoqVv2QUif930L9d/ro0aON65y7qOv++c9/etUmfaW1PlQdbLzxAQAA1qDjAwAArEHHBwAAWCNk5vjou1DefffdKuvzAZyGDRumsn5Kt7/oc42cOC06eA4ePKiyfgK7iMihQ4cy/UxcXJxR7t+/v9/bZTv9Oysi8ve//13lWbNmqXzixAnjunLlyqm8f/9+o845hwuBo8+n8XWe3pIlS1SeOHGiUXft2jWV9dPDs+Kc/6NvReK8v22c/x499NBDKv/0008eP/fBBx9kmgNBn/vZt29fo06fjxlovPEBAADWoOMDAACsETJDXfpruz179ni8rlq1airrwx5583r//6q+++9rr72m8uHDh43rstoVePz48Sr36tXL65+N7HvqqadU3rFjh1ef2bBhg1FmqMs/5s+fr7J+eKmI5yWv7du3N8pr1qxR+ZZbbvFj6/B7Ll26pLK3O52vXLlSZecQ8j/+8Q+P93NugfEr5+/qggULqvzzzz971SYb6UOHIiL79u1zqSWeJScnq3zjxg3X2sEbHwAAYA06PgAAwBq5dqhr9erVWZZ/FRsba5T1Wf/66hCnI0eOqOxc8bVz506v2+lJ7969VdYPiPvDH/6Q43vnFkuXLlW5efPmRt327dtVfvrppwPaDn23UW9fz3Ngqe/mzZtnlPXhra+//lplb19l66s0RUTefPNNlZs2bWrUNWzYUGVWTvqf/p2uW7eux+v037PPP/+8yvqO3CLmcJY+ZCUiMnXqVJWrVKmisvPAWX0H4gEDBnhsk+2ch4r6mz5lw/md1Q8Wbt26tcqeTlJwG298AACANej4AAAAa9DxAQAA1shVc3z0XZgffvhhj9fp48YNGjQw6kaMGKHy559/7vEeKSkpKl+5csXjdfXq1VP5+PHjHu/h7VLNcKLv5qrvyClizhXo06ePUdevXz+VGzVqpHL+/PmN6/Slq7t27VJ5/fr1xnVjxoxR2blMfffu3Zm2Xf+5yJ4ZM2YY5axObfbGc8895/W1VatWVVk/1f3RRx81rnPOE0H26aen698xEfPvwPXr11WuVKmScZ2+VcEzzzxj1HnaDdq5A3HPnj29a7DlnL8/vaV/V26//XaV33jjDeO62rVre/WzunXrprI+Xy834Y0PAACwBh0fAABgDVeHus6dO2eU77//fpWdBxx64twdVqcPPzmHnqKiolTu3r27x3Z06NBBZX3ZpojI3/72N48/u3LlyiqXKlXK43XhQt+pWsT8M3QOg+nLIsuWLatys2bNjOv04Sj9kD3n9gN16tRR+ZtvvjHqPA11+XroIkQ6depklD299nYuh9a/B/ouzvordBFzGwrnsOZ3332nsj7U5TyE8aWXXsq0Tcja448/rvKrr76qsr6DvYjITTf9738z/+lPf1JZX6Iu4tuQ4+XLl43yxx9/7PHa2267Ldv3D1f6v2ki5iGz+vfGSf8+67um++rTTz/N8T0CjTc+AADAGnR8AACANej4AAAAa7g6x0c/3Vzkt6ef51TLli1VHj16tFHXpk0blQsXLmzU6UdWdOnSReXsjF3qY6VZHZ0RrvT5HevWrTPq9GWyuqJFixpl53PxRN+OQN8SISs2PhN/mT59etB+1uTJk42yfhL4kCFDVJ42bZpxnT7HLJyOiQk058nonuh/vv74+6DP99y/f7/H6+644w6j7Dw6wWb6tgIiIpcuXfLqc4mJiSrrv0u9/f0rYj6zY8eOef05t/DGBwAAWIOODwAAsIarQ13Lly83yr6crD1p0iSjbvDgwSrrwxnOIZBFixap/P777xt13p7O3qRJE5WnTJli1Dl3lLaZc0lrdHS0X+9/6tQplQ8ePOjVZxo3buzXNiAwnCewO1/n/8o5RJMvX76AtQki48aN8+v9Xn/9dZWdS+f1ZdrOoU/nie82i4yMNMr6Nh/OrWN0+r93+hSNrE5PcDpw4IDKWf07XrNmTZWd3+1g4o0PAACwBh0fAABgDVeHupwHRWZ1sKenXZidBybOmjUr08/rB16KiCQnJ3vVRv2QUufKkXvuuUdlXw+IQ84tXLhQZRsOhw13Z86cUVnfPVhEZO7cuZl+xnkAZq1atfzfMAt4WtXl3Om8SpUq2b73Dz/8YJT1w0f1VUGFChUyrlu7dq3KzZs3z/bPtZU+FUPfKT0rgwYNUtn5XdN3WE9KSjLqvN3xWf9d7eZBwrzxAQAA1qDjAwAArEHHBwAAWMPVOT533nmnUa5UqZLKJ0+e9Ooe+q6TTt6ezt6xY0ejrmHDhioPGDBAZRtOWQ9n+lJK+M+NGzdU1k/tzor+3dy3b59R17lzZ5Wd32/9e9y+fXuV9e8pfDdmzBiV9VPR9+7da1ynn8j+/PPPq+z8vX3kyJFMPyNiPssSJUqo/NBDDxnXMa/HN/qz1OfWXLx40eNn9O/l7t27jTpn2Rv66Qkiv9152y288QEAANag4wMAAKzh6lBX+fLljbJ+COj8+fONulWrVql84sQJj/esVq2ayvpySX3puYhIixYtVM7OYWwIXfoQCrJHX776wgsveKzTDw51+v7771XWl7+uX7/e42f0w4RFzO/0iBEjPH4OvtF309W3COjbt69x3dKlS1XWd753Lod3LnvW6VMH9J2gea7+UbJkSZW3bt2qctu2bY3rshr68oV+QPXMmTONutyy3QhvfAAAgDXo+AAAAGvQ8QEAANZwdY6PU0xMjMrO086dZSAzWZ0MrM8hu3LlilHHPK+sHT58WGVPx0aIiMTFxWX73l27djXK+v3LlStn1OXJkyfb94dvunXrpvKxY8eMukcffVTlbdu2eXU/5/Yl+knrt912my9NhJf046H0uXYi5lwufTsCfTsDpw4dOhhlfSua6dOnq+w8fiS34I0PAACwBh0fAABgjVw11AX4Qh8OyWq55IEDB1R2LulctmyZys6TqGEuUdWHOUR+u/XErwoWLGiU+/fvr3KzZs1Udu66HBkZ6XM74T/66dnO09g3b94c7ObAT5zD+gsWLHCpJe7hjQ8AALAGHR8AAGANhroQ8vTDD527es+bNy/TzzhXmDC8lbUiRYqo7Pwz9fRnDAC5EW98AACANej4AAAAa9DxAQAA1mCOD0KevuxW3zVURKRq1aoq66eAd+nSJfANAwDkOrzxAQAA1qDjAwAArMFQF8KKPuwlIjJq1KhMMwDATrzxAQAA1qDjAwAArEHHBwAAWIOODwAAsAYdHwAAYA06PgAAwBoRGRkZGd5eHBUVJTExMQFsDjKTkJAgiYmJfr0nz9I9PM/wwbMML/5+njxL92T1LLPV8QEAAAhlDHUBAABr0PEBAADWsKLjk5SUJL169ZLY2FipVauW7Nq1y+0mwUczZ86UunXrSp06dWTGjBluNwc+unbtmjRr1kzq168vderUkQkTJrjdJOQQv2fDR7g/SyvO6ho5cqR07NhR3nvvPUlNTZWUlBS3mwQfHDp0SN58803Zu3ev5M+fXzp27ChdunSRW2+91e2mIZsKFCggn3zyiURGRkpaWpq0bNlSOnXqJC1atHC7afARv2fDR7g/y7B/43P58mXZsWOHDB06VERE8ufPL8WKFXO3UfDJkSNHpEWLFnLLLbdI3rx55c4775Q1a9a43Sz4ICIiQiIjI0VEJC0tTdLS0iQiIsLlVsFX/J4NHzY8y7Dv+Bw/flxKlSolgwcPloYNG8qwYcMkOTnZ7WbBB3Xr1pUdO3bIhQsXJCUlRTZs2CCnTp1yu1nwUXp6ujRo0EBKly4t7dq1k+bNm7vdJPiI37Phw4ZnGfYdn+vXr8v+/ftl+PDhcuDAASlUqJBMnTrV7WbBB7Vq1ZIxY8ZIu3btpGPHjlK/fn3Jm9eK0dqwlCdPHjl48KCcPn1a9u7dK4cOHXK7SfARv2fDhw3PMuw7PtHR0RIdHa3+12SvXr1k//79LrcKvho6dKjs379fduzYISVKlGB+TxgoVqyYtGnTRjZt2uR2U+Ajfs+GDxueZdh3fMqWLSsVK1aU+Ph4ERHZtm2b1K5d2+VWwVfnzp0TEZGTJ0/KBx98IP3793e5RfDF+fPnJSkpSURErl69Klu3bpXY2Fh3GwWf8Xs2fNjwLK0YJ5g1a5Y8+OCDkpqaKlWrVpUlS5a43ST4qGfPnnLhwgXJly+fzJkzR4oXL+52k+CDs2fPysCBAyU9PV1u3Lghffr0ka5du7rdLOQAv2fDR7g/S46sAAAA1gj7oS4AAIBf0fEBAADWoOMDAACsQccHAABYg44PAACwBh0fAABgDTo+AADAGnR8AACANf4fU5C8VQlpnmUAAAAASUVORK5CYII=\n", 68 | "text/plain": [ 69 | "
" 70 | ] 71 | }, 72 | "metadata": {}, 73 | "output_type": "display_data" 74 | } 75 | ], 76 | "source": [ 77 | "from simple_deep_learning.mnist_extended.mnist import display_digits\n", 78 | "\n", 79 | "display_digits(images=train_images, labels=train_labels, num_to_display=20)" 80 | ] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": {}, 85 | "source": [ 86 | "These digits form the base of MNIST extended. Let's see how we can turn those single digit images into a semantic segmentation dataset." 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "## Semantic segmentation" 94 | ] 95 | }, 96 | { 97 | "cell_type": "markdown", 98 | "metadata": {}, 99 | "source": [ 100 | "Semantic segmentation is the task of assigning a label to each pixel of an image.\n", 101 | "It can be seen as a an image classification task, except that instead of classifying the whole image, you're classifying each pixel individually.\n", 102 | "\n", 103 | "The input image is created by randomly overlaying digits from the original MNIST dataset on an empty array. The target array is of shape (height, width, num_classes), this corresponds to an output for which each pixel has a class.\n", 104 | "\n", 105 | "Let's take a look at what this might look like. We'll generate images of height and width 60 pixels and choose digits 0-4 (i.e num_classes = 5).\n", 106 | "We're just going to use the basic parameters of the create_semantic_segmentation_dataset function. For more customisation, take a look at the documented code or check the \"Customisation\" section at the end of the article." 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 3, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "import numpy as np\n", 116 | "np.random.seed(seed=9)\n", 117 | "\n", 118 | "from simple_deep_learning.mnist_extended.semantic_segmentation import (create_semantic_segmentation_dataset, display_segmented_image,\n", 119 | " display_grayscale_array, plot_class_masks)\n", 120 | "\n", 121 | "train_x, train_y, test_x, test_y = create_semantic_segmentation_dataset(num_train_samples=100,\n", 122 | " num_test_samples=10,\n", 123 | " image_shape=(60, 60),\n", 124 | " num_classes=5)" 125 | ] 126 | }, 127 | { 128 | "cell_type": "markdown", 129 | "metadata": {}, 130 | "source": [ 131 | "Below is a randomly selected example from the dataset and its shape. As you can see, the input is of shape (height, width, 1) which is expected, the input is a simple grayscale image. The output is of shape (height, width, num_classes), there is one channel per class. We'll see what each channel contains in a bit." 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 4, 137 | "metadata": {}, 138 | "outputs": [ 139 | { 140 | "name": "stdout", 141 | "output_type": "stream", 142 | "text": [ 143 | "(60, 60, 1)\n", 144 | "(60, 60, 5)\n" 145 | ] 146 | } 147 | ], 148 | "source": [ 149 | "import numpy as np\n", 150 | "\n", 151 | "i = np.random.randint(len(train_x))\n", 152 | "print(train_x[i].shape)\n", 153 | "print(train_y[i].shape)" 154 | ] 155 | }, 156 | { 157 | "cell_type": "markdown", 158 | "metadata": {}, 159 | "source": [ 160 | "The following code displays the input image, as already mentioned, this is simply the original digits of MNIST randomly overlaid on a blank canvas." 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": 5, 166 | "metadata": {}, 167 | "outputs": [ 168 | { 169 | "data": { 170 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOsAAADrCAYAAACICmHVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAOF0lEQVR4nO3deWxVxRfA8WllKSRYkQouqT6XuidqLEExiktKWhWNiRg3EkUSNRKDYlxwA6u48AeRAGJABTUmJIrRmIoSlPCHYqgSXHCFlCpWsQKyBUHt749fMp5z2nt97/W1faf9fv46J/N637TJYe4w986UtLW1tQUARa+0pzsAIDsUK+AExQo4QbECTlCsgBMUK+BEv1w+XFFRETKZTBd1BUBTU1NobW3tsC2nYs1kMqGxsbEgnQLQXnV1dWIbt8GAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAE/16ugOF9Omnn6p8zpw5Wf1cW1ubyi+99FKVX3bZZTEeMmRInr0DOoeRFXCCYgWccH0bvH37dpVfeeWVKm9pacnqOvY2eMWKFSofNmxYjGtqanLpIlAwjKyAExQr4ATFCjjhes5aWqr/rRk0aJDKJ02aFOPJkyertueeey7GCxYsUG1bt25V+fjx42O8du1a1VZVVZVDj4H8MbICTlCsgBOub4PLy8tV3tjYqHJ5WzxgwADVNn/+/Bjb22l5ixxCCLt27Yrx7NmzE68DdCVGVsAJihVwgmIFnChps8/apaiurm43L+wNGhoaVH711VerfN++fYk/u3HjRpUfe+yxhesY+py0GmNkBZygWAEnKFbACdfrrIVid4a4++67VT537twYyzXXEEKor69X+Ysvvljg3gH/x8gKOEGxAk6wdJOF2traGNtdJKy///67q7uDXoylG6AXoFgBJyhWwAmWbgrsyy+/jPHpp5/egz1Bb8PICjhBsQJOUKyAE8xZO2B3N9y8eXPWP/v7778XujtACIGRFXCDYgWc6DO3wfb29MCBA4mf3bBhg8q//fbbxM9eeOGFKh8zZkzunQOywMgKOEGxAk5QrIATvWrO2tzcrPJFixbFeN68eaptx44dMbZvCZaUlKTmUiaTUblc9hk+fHhqf4FcMLICTlCsgBMUK+CE6znrn3/+qXJ5eHIIIaxcuTLGRx55pGqT88m0ddT/smTJEpWvW7cuxvaEuXPPPTfv7yk2TU1NMb7gggtUm90dcsqUKd3Qo96PkRVwgmIFnHB9G3z22Wer/Ouvv1b56NGjY/zaa6+ptrKyshhffvnlqq0zOziuX78+xjU1Nart/vvvV/kdd9wR46FDh+b9nd3BPp4pD5z+6aefVNuaNWu6pU99DSMr4ATFCjhBsQJOuJ6z2lfZ7GOBF110UYwrKytV28SJE2O8du3a1OtITz/9tMpPPvlklcuDquzc99FHH1X5nDlzYjx58mTVVldXF+ORI0eqtj179qh8586dMT7iiCMS+94ZTz31lMpnzZqV+Fl70BcKg5EVcIJiBZxwfRs8ePBgle/du1fl8smkESNGqLbffvstxvatm/79+6t85syZMZ46dWpqn+QyUENDg2pbvny5yl955ZUYT58+XbU988wzMbZPBNnb66+++irGdnno9ttvT+1vtuTm5VZ5ebnKR40aVZDvhMbICjhBsQJOUKyAE64PU7755ptV/vLLL2f9s/Ktm+OPP1612XmffRyxUDZt2hRjuVQTQgjfffddXtd87733VD527Ni8rhNCCFu2bIlxdXW1avv1119jfOutt6o2+SgicsNhykAvQLECTlCsgBOu11lbWlpUfuKJJ6pc7jx4yimnqDa5/lhVVVX4zmXhuOOOi7Gda8rHGO2OGGk6M2e13/PII4/EWM5RrRtuuCHr70D+GFkBJyhWwAnXt8H28T35CGEIIRx22GHd2Z1OKS3V/24OGjQoxv91GywPw+rM+bDbtm1T+UsvvZT4Wfm3raioyPs7i8Hjjz8e4xkzZqi2tAPMuhsjK+AExQo4QbECTries1qe5qiWnZf+8ccfMbY7V9idGN55552C9OHzzz/P+rNyacnullHsVq1apXK5g8dBBx3Uzb3JHiMr4ATFCjhBsQJO9Ko5K3Kzf/9+lT/55JNZ/6x9Za6Y/fXXXyqXu0qGEMI///wTY+asADqNYgWc4Da4SBx66KEql8sh33zzjWrbuHGjyuUbMXYXxzR2uWj16tWJn7XXtbtDFLMPP/xQ5W+++WbiZ6+66qqu7k7eGFkBJyhWwAmKFXCCOWuRGDZsmMrPPPPMGNtDou0cdtq0aTGWr3uFkH5QlV2qSdvo8rrrrlO53ZWjUOTBzPLEghD0yQQDBw5MvY58tc3+TSy5o8jixYuz6GXPYGQFnKBYAScoVsAJ5qxF6tlnn42xXVe1hz/L7Vc++eQT1XbJJZckfsdHH32k8rRDpEePHp3c2U6wW/OMHz8+xrt371Zt8tXAM844I/W6c+fOjXHa+nEIIYwbNy7GcjudYsPICjhBsQJOcBtcpOSuF2+//bZqmzdvnsqXLl0a4x9++EG1bdiwIfE77FKNvQ2WjxgW6i0bu/uiPQRM3vrW1NSoNvm72dvg7du3q7y+vj6xD6eddprKn3jiiZQeFw9GVsAJihVwgmIFnGDOGkJ46KGHUtvlo3X2tbK0nQXkYckdGTx4cIwnTpyo2g4//PAY29fTHnvsscT83XffVW179uxRuTzo+IMPPkjtnzywSz6Sl6u9e/fGeNasWapt/fr1Kpe/9xtvvKHahgwZkvgdU6dOVbmdw0r33HNP1tctJoysgBMUK+BESVvaqxZGdXV1aGxs7Mr+9IjKykqVyw22Qwhh165dXd6H4cOHq/y8886L8X333afa7DJKLpt8HXzwwTG2v1dZWZnKly1bFuO6urqsv8OSZ+EuWLBAtQ0dOlTlcsPytKem7JKUfVLrl19+ifEVV1yh2t56663/6HHPSasxRlbACYoVcIJiBZxg6SboNz1CCGHlypUqz+XApnxt3bpV5XIHPrsb38cff6zyc845J/G6TU1NKrcbXkt2/pjvPHX69Okqf/7552N8yCGHqDa74Xa2b/fcdNNNKpdzVMv+DaZMmZL42YaGBpXbJTW5W8WAAQPSO1lgjKyAExQr4ATFCjjBnDWEcO2116p8/vz5iZ+1O+db/fv3j7HddaC1tVXlckdD+3jczp07E7/j/fffV3nanNW+Xrdv377Ez06aNCmxLY19hHDGjBlZ/+ySJUtUfv7558fYrnfLxw+/+OKLrL/D/p9DLv8HYR/tnDBhQoyPOuqorK9TCIysgBMUK+AEjxt24NVXX1W53ChbvonSkX79/p1ZyLdqQmh/Gyxvqe1tcG1tbYzt7g92g2t5W2c33z711FNVLjcIt/3bsmWLysvLy0MS+bvYnRfsMlRPkH2yf5Mbb7wx8edKS/X4ddZZZ6n8mGOOKUDvkvG4IdALUKyAExQr4ARLNx1Im9N0hn3UTrKPFNp5qmR3q5DLM3YXBLnUEEIIDz74YIzlDg4hhPD666+r/JZbbknsg3w0cfbs2anXkZuJy4OfQ2h/cJZczmpublZtactZDz/8sMofeOCBGBfzxt25YGQFnKBYAScoVsAJ5qw9aN26dTG2253kIm3XfTuHnTlzZozt9jB2TTaN3Erm+uuvV202b2lpibE8LDmE9uuWcqfBUaNGqTb5iKF9xPKuu+5SeW+Zp0qMrIATFCvgBLfB3ci+LSN33bPLMWns7aE8y9WSbwGF0D07NVpyecYu1Vhy2SftzRq5WXkI7Xe56I0YWQEnKFbACYoVcII5axd64YUXVH7bbbepPG2nQcm+qiZ3uA/Bz8FKHfnxxx9VLncPtO68884Y29fy+gJGVsAJihVwgtvgTvr+++9VLjdbs2+fZHvbG4J+Q8eeuZq2QZo3ixYtUrm8LS4pKVFt9fX1MbZLUn0BIyvgBMUKOEGxAk4wZ83R7t27VW43xl69enVe1z366KNVvnTp0hj3pjnqmjVrVC7fAgpBH/Zkl77kQdB9ESMr4ATFCjhBsQJOMGfNgjzMyR68nO8cNQT9qps9oOmkk05K/Dm7Xrt///4Y21fH7FqvdMIJJ6jc7qg4bty4GE+bNi3xOrnYvHmzyu3vUlFREeOu2mXSK0ZWwAmKFXCC2+AsLF68OMbLly/P+zr21nbFihUx3rFjh2qTm1Rv2rRJtdnb1c8++yyv/vz8888qt5t+53tea5pMJqNyeZBXCO1vzfEvRlbACYoVcIJiBZxgzpoF+ehfZ9i5p5y/HThwQLUVahdCeWBzCHoz8bFjx6o2e6522kFa+bI7M9rfG8kYWQEnKFbACYoVcII5azey87Nt27YV5LoDBw6M8cUXX6zarrnmGpXbxyXhByMr4ATFCjjBbXAW7r333hivWrWqW75zxIgRMR4zZoxqq6urU7nc8HrkyJFd2zH0GEZWwAmKFXCCYgWcYM6ahdra2hgvXLhQtdnd8pctW5Z4Hbs7n5xr2sOm5JKLXJpB38XICjhBsQJOUKyAE8xZsyBPM7NbnUyYMEHlzc3NidcpKytTeWVlZQF6h76CkRVwgmIFnOA2uJPsskpVVVUP9QS9HSMr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ATFCjhBsQJOUKyAExQr4ERJW1tbW7YfrqioCJlMpgu7A/RtTU1NobW1tcO2nIoVQM/hNhhwgmIFnKBYAScoVsAJihVwgmIFnKBYAScoVsAJihVw4n8ZaZ5Gn5bURAAAAABJRU5ErkJggg==\n", 171 | "text/plain": [ 172 | "
" 173 | ] 174 | }, 175 | "metadata": {}, 176 | "output_type": "display_data" 177 | } 178 | ], 179 | "source": [ 180 | "from simple_deep_learning.mnist_extended.semantic_segmentation import display_grayscale_array\n", 181 | "\n", 182 | "i = np.random.randint(len(train_x))\n", 183 | "display_grayscale_array(array=train_x[i])" 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "metadata": {}, 189 | "source": [ 190 | "The target class is a lot more interesting. The target array has a 3rd dimension of length equal to the number of classes to predict. That is, if our input images are composed of MNIST digits 0-4, then our target array will have a shape (width, height, 5).\n", 191 | "\n", 192 | "In the following cell, we have a function that indexes the target array along the third axis (the classes axis) and displays each slice individually. " 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 6, 198 | "metadata": {}, 199 | "outputs": [ 200 | { 201 | "data": { 202 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABX4AAAEHCAYAAAAZLl53AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAdhUlEQVR4nO3de7SVc/4H8O+RpNI9Ui4dM7qYLGS6uQ+jGYVaYwmDFpJLo2YRhgzpNu7D0qSyRJoYGjUGCykaqzWDIdPNFKuQpJtLV+lq//6YZf/6btqnU+fsc8631+uv532eZz/P53H5ru3T1+cUZTKZTAAAAAAAIBl7VXQBAAAAAACULY1fAAAAAIDEaPwCAAAAACRG4xcAAAAAIDEavwAAAAAAidH4BQAAAABIzN7lcdPGjRuH4uLi8rg1UIYWLVoUvvjii4ouo0xYd6BqsO4AhZTSmhOCdQeqAusOUGj51p1yafwWFxeHGTNmlMetgTLUrl27ii6hzFh3oGqw7gCFlNKaE4J1B6oC6w5QaPnWHaMeAAAAAAASo/ELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJEbjFwAAAAAgMRq/AAAAAACJ0fgFAAAAAEiMxi8AAAAAQGI0fgEAAAAAEqPxCwAAAACQGI1fAAAAAIDEaPwCAAAAACRG4xcAAAAAIDEavwAAAAAAidH4BQAAAABIjMYvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBiNXwAAAACAxGj8AgAAAAAkRuMXAAAAACAxGr8AAAAAAInR+AUAAAAASIzGLwAAAABAYjR+AQAAAAASo/ELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJEbjFwAAAAAgMRq/AAAAAACJ0fgFAAAAAEiMxi8AAAAAQGI0fgEAAAAAEqPxCwAAAACQGI1fAAAAAIDEaPwCAAAAACRG4xcAAAAAIDEavwAAAAAAidH4BQAAAABIjMYvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBiNXwAAAACAxGj8AgAAAAAkRuMXAAAAACAxe1d0AQAAlK933303ysOHDy/V5zOZTJS7du0a5TPPPDN7XKdOnVJWBwAAlAc7fgEAAAAAEqPxCwAAAACQGI1fAAAAAIDEmPELAJCYVatWRbl79+5RXrZsWanulzvjd+rUqVFu1KhR9rhz586lujcAAFA+7PgFAAAAAEiMxi8AAAAAQGI0fgEAAAAAEmPGLwBAYvbaK/6z/Zo1a0a5d+/eUe7bt2+UR40aFeXRo0dHeeXKlVHu0aNH9vidd96JzrVo0WInKgYAAMqaHb8AAAAAAInR+AUAAAAASIzGLwAAAABAYsz4BQBITL169aI8Y8aMKOfO/N1nn32iPHLkyCjnzgzOnQG8bt267PEDDzyQ914AAEBh2PELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJMaMXwCAxOXO/C2trl27Rvmxxx6L8saNG7PHo0ePjs7deOONUT7ssMN2qxYAAGDn2PELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJMaMXwAA8sqd8du/f/8ojxgxInu8bt266NzQoUOjnDsfGAAAKB92/AIAAAAAJEbjFwAAAAAgMUY9AABQKsOGDYvyjBkzssdTp06Nzo0bNy7KRj0AAEBh2PELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJMaMXwAACua9996L8pFHHllBlQAAQNrs+AUAAAAASIzGLwAAAABAYjR+AQAAAAASY8YvAAClsnLlyih/8sknO/3ZL7/8sqzLAQAAfoAdvwAAAAAAidH4BQAAAABIjMYvAAAAAEBizPgFANjD5c7d3bJlS97r582bF+UPPvhgh9f+7Gc/i/Ipp5xSuuIAAIBdYscvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBgzfgEAErd48eIojxkzJsoPPfRQlFevXh3lTCYT5aKiorx5e8XFxVFeuXJllA844IAdfhYAANh1dvwCAAAAACRG4xcAAAAAIDEavwAAAAAAiTHjFwAgMZs2bYpy7969o/zaa69FuVmzZlHOnbv7wQcf7HIt48aNi/LMmTOjPHLkyCgfd9xxu/wsAAAq1qJFi6J88sknR7l///5Rvvbaa8u5oj2bHb8AAAAAAInR+AUAAAAASIzGLwAAAABAYsz4BQBIzE9/+tMoz58/P8rHH398lP/yl79Eed99943yWWedFeUZM2bscm2zZ8+OcufOnaN88803R/maa66JcoMGDXb52QAAlK0tW7ZEedSoUVFesmRJlN96661yr4n/Z8cvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBgzfgEAEjNv3rwoFxUVRfnUU0+N8iGHHBLlXr16Rfmdd97Je7/t3X333VFu3bp1lIcOHRrl3HnBt99+e5SHDx8e5b59+0a5S5cuUW7fvv0OawMASNHXX38d5bVr10a5adOm5fbsu+66K8r33ntv3uu7du1abrXwfXb8AgAAAAAkRuMXAAAAACAxGr8AAAAAAIkx4xcAIDG1atWK8oYNG6L8wQcfRLlJkyZR/vzzz6OcyWSiXL169Sjfcccd2ePrr78+b21nnXVWlF966aUoT548Ocrjx4+P8qBBg6J8zz33RHngwIFRvuKKK6LcoEGDvPUBAFR269evj/KVV14Z5X/9619Rvvnmm7PHffr0KdNa3nvvvbzn69WrF+WOHTuW6fPJr8QdvzfddNNO/QwAAAAAgMqhxMbv1KlTv/ezl19+uVyKAQAAAABg9+1w1MOoUaPCyJEjw0cffRSOOuqo7M/XrVsXTjjhhIIUBwAAAABA6e2w8XvhhReGLl26hAEDBoS77ror+/M6deqEhg0bFqQ4AABKr0ePHlH+85//HOWJEyfm/XzuzN8f//jHUd5+TlwI35/bWxpdu3bNm6+99tood+nSJcoLFiyI8oABA6L8pz/9KcrPPfdclI899tidrhUAoCLMnz8/ykOHDo3y008/nffzud/ldsdnn30W5enTp+e9/oILLohyq1atyqwWSrbDUQ/16tULxcXF4amnngqffvppmDZtWmjevHn49ttvw8cff1zIGgEAAAAAKIUSZ/wOHjw43H333eHOO+8MIYSwefPmcPHFF5d7YQAAAAAA7JoSG7/PPvtseP7550Pt2rVDCCE0a9YsrFu3rtwLAwAAAABg1+xwxu939tlnn1BUVBSKiopCCCF8/fXX5V4UAAC7btmyZVFu2bJllIuLi6N8xBFHRLlPnz5RbtGiRdkVV0o/+tGPovzKK69Eefbs2VHu27dvlJcuXRrl7t27R/nSSy/NHufOywMAqAwGDRoU5WeeeSbv9VdccUWUTzrppF1+9qZNm6I8cODAKK9YsSLv5y+66KJdfja7r8Qdv+edd1646qqrwurVq8MjjzwSTj/99O/9AwQAAAAAQOVR4o7fG264IUydOjXUrVs3fPDBB2HIkCGhc+fOhagNAAAAAIBdUGLjN4QQOnfurNkLAAAAAFBFlNj4rVOnTna+73fq1asX2rVrF/74xz9+b+4aAAAVa/LkyVH+/PPPo7z//vsXspwylTufODcfe+yxUZ41a1aUJ02aFOVPP/20rEoDACgTixcvjvKUKVPyXn/ggQdGecCAAVGuWbPmLtfy1VdfRXns2LF5r8/9ntm4ceNdfnZlM2zYsCgPHjw4ylu2bClkOTulxMZv//79Q7NmzcKFF14YMplMePrpp8Py5ctDq1atQq9evcLrr79egDIBAAAAANhZJf5yt8mTJ4errroq1KlTJ9StWzdceeWV4aWXXgrnn39+WLVqVSFqBAAAAACgFErc8bvXXnuFv/71r+Hcc88NIYQwceLE7LncERAAAFQ+VXm0Q2kdcsghefPZZ59dyHIAAEpt06ZNUV6zZk3e63NHXeWOwtodc+bMKdX1rVu3zpurktwpB7fffnuUq1WrVsBqdk2JO36ffPLJMH78+HDAAQeEJk2ahPHjx4cnnngifPPNN2HEiBGFqBEAAAAAgFLIu+N327ZtYdSoUeGFF174wfMnnnhiuRQFAAAAAMCuy7vjt1q1auHdd98tVC0AAAAAAJSBEmf8tm3bNnTr1i306NEj1K5dO/vzc845p1wLAwAAAAAKZ/PmzVG+8847S/X5du3alWU5BbV169YoDx8+PMrffvttlKvCjN8SG79fffVVaNSoUZg2bVr2Z0VFRRq/AAAAAACVVImN37FjxxaiDgAAAAAAykiJjd+NGzeGRx99NPz3v/8NGzduzP78scceK9fCAAAAAADYNSU2fnv27Blat24dXnnllTBw4MDw5JNPhiOOOKIQtQEAAADAHqVhw4ZRbt26dZTff//9KH/44YdRXrFiRZSbNGmy08/etGlTlKdPn573+tx7X3XVVTv9rMrmH//4R5SfffbZvNf/6le/Ks9yysReOzrx3UDjhQsXhqFDh4batWuHSy65JLz44oth7ty5BSsQAAAAAIDS2WHjt0OHDiGEEKpXrx5CCKF+/frhvffeC2vWrAmLFi0qSHEAAAAAAJReiaMerrzyyrBq1aowbNiw0K1bt7B+/fowdOjQQtQGAAAAAMAu2GHjd+XKleH+++8PIYQwduzYEEII11xzTQghhK+//roApQEAAADAnqVRo0ZRPuaYY6I8f/78KOfO/L3llluiPGzYsOxx06ZN8z77zjvvjHImk8l7/a9//esot2zZMu/1u2PJkiVRHj9+fJT79+8f5Ro1auS935YtW6K8/V+nH1JcXBzlxx9/PO/1lcEOG7/btm0L69ev/8G/wUVFReVaFAAAAAAAu26Hjd+mTZuGgQMHFrIWAAAAAADKwA5/uVtJW7kBAAAAAKicdrjj97XXXitkHQAAAABAjgcffDDKH374YZTfeeedKH/3u7q+8+9//zt7/POf/zzvs954440olzTu9fjjj897fndNnjw5e9yjR4/o3Pr166PctWvXKB999NF57z1ixIgoT58+Pe/1Z599dpRr1qyZ9/rKYIc7fhs2bFjIOgAAAAAAKCM7bPwCAAAAAFA1afwCAAAAACRmhzN+AQAAAICKtf/++0f5+eefj/JDDz0U5QkTJkR54cKF2eN58+blfVYmk4ly7ozfJk2aRLldu3Z571daX375ZZRvvvnm7HHuTN/OnTtHefv3DOH7M35XrVoV5aFDh+atpU2bNlH+wx/+kPf6ysiOXwAAAACAxGj8AgAAAAAkRuMXAAAAACAxZvwCAJDXrbfemvd8y5Yts8ebNm2KzlWrVi3vZz/66KO852vVqhXlXr16RfnAAw/M+3kAgNTkztkdMmRI3vzyyy9nj7/++uvo3KhRo6I8bdq0vM9u0aJFlIuLi/NeX5INGzZE+d57743y7Nmzs8e53/smTZoU5Tp16uR91vXXXx/l3Jm/uW644YZS3b8ysuMXAAAAACAxGr8AAAAAAInR+AUAAAAASIwZvwAA5DVu3Lgor1mzJsrr1q0rWC0PPvhglE844YQo33TTTVFu165dlEuaOQwAkJouXbrs8Fzu70/IVaNGjSgPGDCgTGr6Tu7c3dGjR0e5QYMG2ePSzvSdN29elLefdfxDunXrFuVLL7007/VVgR2/AAAAAACJ0fgFAAAAAEiMxi8AAAAAQGLM+AUAIK8ePXpE+bXXXovynDlzClbLypUro/zss8/mzW+++WaUO3XqVD6FAQBUAYsWLYry1q1b816//YzdEPLPC94ZgwYNivLDDz8c5fr160d5+PDh2ePjjz++VM/KndG7fPnyvNfn/rW59tpr817/0ksvRTl3XnL//v2zx/vss0/ee5UXO34BAAAAABKj8QsAAAAAkBiNXwAAAACAxJjxCwBAXhdccEGUR44cucNrGzZsmPde1atXj3LNmjWj/MUXX0S5UaNGUV61alWU165dm/d5U6ZMibIZvwDAnuz555+P8saNG/Ne37t379163r333hvlwYMHl+rz48aNyx6fdNJJ0bk1a9ZEedKkSVGeO3duqZ6V+3srSvt7LIYMGRLlnj17Zo8POuigUt2rrNjxCwAAAACQGI1fAAAAAIDEaPwCAAAAACTGjF8AAPLq0KFDlMeMGRPlpk2bZo9btGiR91577x1//axVq1aUc2f85s4Mzp3xe8YZZ0R54cKFUb7jjjuinDuvuGXLlnnrBQBIyejRo/Oer127dpSvv/76Ut0/97vcfffdV6rPr169Osqvvvpq9ri4uLhU9ypJmzZtopz7vfDiiy/O+/m99or307Zt2zbKFTXXd3t2/AIAAAAAJEbjFwAAAAAgMRq/AAAAAACJMeMXAIBSKWne2e6oX79+3vPPPvtslHNn+ubatGlTlJ9//vko33DDDTtfHABAFdezZ88o//73v4/yhg0bojxx4sQoX3755Xnv36BBgyg/8MADee/3xhtvRHnFihVR3v53STRq1Cg6t3jx4iivXbs2b2233XZblAcMGBDlmjVr5v18VWTHLwAAAABAYjR+AQAAAAASY9QDAACV1syZM6M8evTo3brfvHnzduvzAABVWe6YqzvuuCPK7dq1i/JPfvKTUt2/WrVqUb7wwgvz5mXLlkV5yZIlUW7evHn2uE6dOtG5jh07Rnnu3LlR7tSpU5Svu+66KKc42iGXHb8AAAAAAInR+AUAAAAASIzGLwAAAABAYsz4BQCg0pgyZUqUu3XrFuVNmzaV6n65s98efPDBXSsMACAB1atXj/K6desqqJL/adq0ad68vYkTJ0Y5d6ZvrlGjRkW5QYMGpayu6rPjFwAAAAAgMRq/AAAAAACJ0fgFAAAAAEiMGb8AAFSYRx99NMpXX311lLdu3Vqq+9WrVy/Kffr0iXKdOnVKdT8AACrOp59+mj3u379/3mt/+9vfRrlNmzblUlNVYscvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBgzfgEAKDcLFiyI8siRI6M8ceLEKJd2pm/9+vWj/PLLL0e5U6dOpbofAACVx5gxY7LH28/7DSGEoqKiKA8dOjTK1atXL7/Cqgg7fgEAAAAAEqPxCwAAAACQGI1fAAAAAIDEmPELAECZWb9+fZR79+4d5enTp+/W/Q899NAoT5gwIcpm+gIAVF1vvfVWlO+4447s8T777BOde/TRR6Nct27d8iusirLjFwAAAAAgMRq/AAAAAACJ0fgFAAAAAEiMGb8AAOyWjRs3Zo979OgRndvdmb4dO3aM8rhx46LcqlWrvJ/funVrlDdv3hzlUaNGZY8nTpyY916HH354lBcuXBjls88+O8q33HJL3vsBABD75JNPorz9d7nGjRtH5y6++OKC1FSV2fELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJMaMXwAAdsvjjz+ePZ48efJu3St3Zu/UqVOjvHr16igPGDAgyh999FGUc+fw/uc//9nl2pYuXRrlDRs2RLl37967fG8AAEIoLi6O8t57/3/rMvf3LVAyO34BAAAAABKj8QsAAAAAkBiNXwAAAACAxJjxCwDAbpkwYUKZ3St3Rm/unLctW7ZEed26dWX27IYNG0Z59OjRUf7FL34R5UwmE+X69euXWS0AAHuijh07Rjn3ux+lY8cvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBgzfgEAqDRy57h99dVXZXr/GjVqRPm0007LHp933nnRuR49epTpswEAoJDs+AUAAAAASIzGLwAAAABAYjR+AQAAAAASY8YvAAC75Xe/+132+PXXXy/os5s0aRLlU045JcpdunSJcps2baLcvn378ikMAAAqmB2/AAAAAACJ0fgFAAAAAEiMxi8AAAAAQGLM+AUAYLecccYZ2eNHHnkkOvfyyy9H+W9/+1vee9WtWzfKuTN5+/TpE+XzzjsvyjVq1MhfLAAA7CHs+AUAAAAASIzGLwAAAABAYjR+AQAAAAASY8YvAAC7paioKHvcu3fv6FzPnj2jvHjx4rz32nfffaN8yCGH7GZ1AACwZ7LjFwAAAAAgMRq/AAAAAACJMeoBAIByU6NGjSi3aNGigioBAIA9ix2/AAAAAACJ0fgFAAAAAEiMxi8AAAAAQGI0fgEAAAAAEqPxCwAAAACQGI1fAAAAAIDEaPwCAAAAACRG4xcAAAAAIDEavwAAAAAAidH4BQAAAABIjMYvAAAAAEBiNH4BAAAAABKj8QsAAAAAkBiNXwAAAACAxGj8AgAAAAAkRuMXAAAAACAxGr8AAAAAAInR+AUAAAAASIzGLwAAAABAYjR+AQAAAAASo/ELAAAAAJAYjV8AAAAAgMRo/AIAAAAAJKYok8lkyvqmjRs3DsXFxWV9W6CMLVq0KHzxxRcVXUaZsO5A1WDdAQoppTUnBOsOVAXWHaDQ8q075dL4BQAAAACg4hj1AAAAAACQGI1fAAAAAIDEaPwmbr/99tvpawcNGhTuu+++Mr//xx9/HDp27BhatGgRzj///LB58+ZSPQOoWirDujNixIhw+OGHh6KioqRmrAHfVxnWnIsuuii0atUqHHnkkaFXr15hy5YtpXoGULVUhnXn8ssvD0cffXQ46qijwrnnnhvWr19fqmcAVUtlWHe+069fv1JdT8XS+KXc3XTTTeG6664LCxYsCA0aNAiPPvpoRZcEJO6EE04Ir776amjevHlFlwLsAS666KLw/vvvh7lz54ZvvvkmjBkzpqJLAhL3wAMPhNmzZ4c5c+aEQw89NIwYMaKiSwL2ADNmzAirV6+u6DIoBY3fPdALL7wQOnbsGNq2bRtOP/30sGLFiuy52bNnh9NOOy20aNEiPPLII9mf33vvvaF9+/bhqKOOCrfffvtOPyuTyYRp06aFc889N4QQwiWXXBL+/ve/l9m7AFVDIdedEEJo27at3z4Me7BCrzldu3YNRUVFoaioKHTo0CEsWbKkzN4FqBoKve7UrVs3hPC//9765ptvQlFRUdm8CFBlFHrd2bZtW7jxxhvDPffcU2bvQPnT+N0DnXjiieGtt94KM2fODBdccEH0L+2cOXPCiy++GN58880wZMiQsHTp0jBlypSwYMGC8Pbbb4dZs2aFd999N0yfPv179z3mmGO+97Mvv/wy1K9fP+y9994hhBAOPvjg8Nlnn5XbuwGVUyHXHYCKWnO2bNkSxo8fH84444yyfiWgkquIdeeyyy4LBx54YHj//fdDv379yuO1gEqs0OvOiBEjQrdu3ULTpk3L65UoB3tXdAEU3pIlS8L5558fli1bFjZv3hwOO+yw7Lnu3buHmjVrhpo1a4ZTTz01vP322+Gf//xnmDJlSmjbtm0IIYT169eHBQsWhJNPPjm676xZs773rEwm872f+dNo2PMUct0BqKg15ze/+U04+eSTw0knnVTm7wRUbhWx7owdOzZs27Yt9OvXL0yYMCFcdtll5fJuQOVUyHVn6dKl4Zlnngmvv/56eb4S5cCO3z1Qv379Qt++fcPcuXPDww8/HDZu3Jg9l9uULSoqCplMJgwYMCDMmjUrzJo1KyxcuDBcfvnlO/Wsxo0bh9WrV4etW7eGEP63MDVr1qzsXgaoEgq57gBUxJozePDg8Pnnn4f777+/TN4BqFoq6rtOtWrVwvnnnx8mTZq02+8AVC2FXHdmzpwZFi5cGA4//PBQXFwcNmzYEA4//PAyfR/Kh8bvHmjNmjXhoIMOCiGEMG7cuOjcc889FzZu3Bi+/PLL8Prrr4f27duHX/7yl+Gxxx7L/qbYzz77LKxcuXKnnlVUVBROPfXUMHHixOzzunfvXoZvA1QFhVx3AAq95owZMya88sor4amnngp77eXrNeyJCrnuZDKZsHDhwuzxCy+8EFq3bl2GbwNUBYVcd84888ywfPnysGjRorBo0aJQq1at7DpE5WbUQ+I2bNgQDj744Gzu379/GDRoUOjRo0c46KCDQqdOncLHH3+cPd+hQ4dw5plnhsWLF4fbbrstNGvWLDRr1izMnz8/HHfccSGEEPbbb7/wxBNPhAMOOCB61jHHHPOD/0vA3XffHS644IJw6623hrZt29q1B4mrDOvO8OHDwz333BOWL18ejjrqqNC1a9cwZsyY8nlhoEJVhjXn6quvDs2bN89+/pxzzgkDBw4sh7cFKoOKXncymUy45JJLwtq1a0MmkwlHH310GDVqVPm9MFDhKnrdoeoqyvzQEFYAAAAAAKos/y8aAAAAAEBiNH4BAAAAABKj8QsAAAAAkBiNXwAAAACAxGj8AgAAAAAkRuMXAAAAACAxGr8AAAAAAInR+AUAAAAASMz/AfQHmKEKI8jIAAAAAElFTkSuQmCC\n", 203 | "text/plain": [ 204 | "
" 205 | ] 206 | }, 207 | "metadata": {}, 208 | "output_type": "display_data" 209 | } 210 | ], 211 | "source": [ 212 | "from simple_deep_learning.mnist_extended.semantic_segmentation import plot_class_masks\n", 213 | "plot_class_masks(train_y[i])" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "Each slice contains only one type of digit. In our case, the input image is composed of 2 twos and 2 fours, therefore the target array has 2 twos at slice 2 and 2 fours at slice 4.\n", 221 | "\n", 222 | "It's by separating the digits of a certain class into different slices that we tell our model which pixels correspond to which class. When training a model, we want it to be able to separate pixels of the original image into their respective slice.\n", 223 | "\n", 224 | "By default, in our dataset, classes are not exclusive. That means a pixel can part of more than one digit at a time. This will affect our loss function when building models but is not particularly important. If you want exclusive classes, you can set labels_are_exclusive=True in the create_semantic_segmentation_dataset function, in which case for pixels from multiple digits will only have one class, selected at random.\n", 225 | "\n", 226 | "Below is another way of displaying the digits. This time instead of separating the slices, we give each slice a particular colour. Here's what that looks like:" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 7, 232 | "metadata": {}, 233 | "outputs": [ 234 | { 235 | "data": { 236 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAARoAAADrCAYAAAC2ClmyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKHklEQVR4nO3dTYhcZRYG4NPa0QaTEOyQjlJCRQLa3caftPEHokRBxEayMCARJUp0MRAXIgOzdKeOjGDAhRs3o2C2SoSAko1kExwjjAyIgg2pxkjSEKKDjW1Zs3DGCW2sure6Tqpu1fPsQtV379eLvJxzvltVY61WqxUAia7o9waA4SdogHSCBkgnaIB0ggZIJ2iAdONl3rx58+ao1+tJWwEWFhbi3Llz/d5Gz5UKmnq9Hp9++mnWXmDk3Xnnnf3eQgqtE5BO0ADpBA2QrtSMBrj8VlZWotFoxPLycr+30tbExETUarVYt27d714TNDDgGo1GbNiwIer1eoyNjfV7O5fUarViaWkpGo1GbNu27Xeva51gwC0vL8fk5OTAhkxExNjYWExOTv5h1SVooAIGOWT+p90eBQ1QyLFjx+Kmm26K7du3x6uvvlpqrRkNVMzWrX+L7777d8+uNzV1TZw58+e272k2m3Ho0KH46KOPolarxa5du2Lv3r0xMzNT6B4qGqiYXoZM0eudPHkytm/fHjfeeGNcddVVsX///nj//fcL30PQAB0tLi7GDTfc8Nu/a7VaLC4uFl4vaICOLvXV4mUG1IIG6KhWq8Xp06d/+3ej0Yjrr7++8HpBA3S0a9eu+Oqrr+Kbb76Jn376KY4cORJ79+4tvN6pE9DR+Ph4vPnmm/Hwww9Hs9mMgwcPxuzsbPH1iXsDEkxNXdPz4+0i5ufnY35+vqt7CBqomE7PvAwiMxognaAB0gkaIJ2gAdIJGiCdoAE6OnjwYGzZsiVuueWWrtY73oaK2Xog4rvzvbve1KaIM39v/55nnnkmnn/++Thw4EBX91DRQMX0MmSKXu/++++Pa6+9tut7CBognaAB0gkaIJ2gAdIJGqCjJ554Iu6999748ssvo1arxdtvv11qveNtqJipTb0/3u7kvffeW9M9BA1UTKdnXgaR1glIJ2iAdIIGSCdogHSCBkgnaICOTp8+HQ888EBMT0/H7OxsHD58uNR6x9tQMffFP2Mpfu7Z9SZjPD6JHW3fMz4+Hq+//nrs3Lkzvv/++5ibm4uHHnooZmZmCt1DRQMV08uQKXq96667Lnbu3BkRERs2bIjp6elYXFwsfA9BA5SysLAQp06dirvvvrvwmqFunWbis67W/St29ngnMBx++OGH2LdvX7zxxhuxcePGwutUNEAhKysrsW/fvnjyySfjscceK7VW0AAdtVqtePbZZ2N6ejpefPHF0uuHqnXqtlXqdB2tFKPuxIkT8c4778SOHTvi9ttvj4iIl19+Oebn5wutH6qggVEwGeM9P97uZPfu3dFqtbq+h6CBiun0zMsgMqMB0o1MRbN6zlJmnnPxe81roDwVDZBO0ADphqp1KtPWXPzebtuosveEUaWiATpaXl6Ou+66K2677baYnZ2Nl156qdT6oapoYBT8M+6Ln2OpZ9cbj8nYEZ+0fc/VV18dx48fj/Xr18fKykrs3r07HnnkkbjnnnsK3UNFAxXTy5Aper2xsbFYv359RPz6maeVlZUYGxsrfA8VTazt6BtGRbPZjLm5ufj666/j0KFDpb4mQkUDFHLllVfG559/Ho1GI06ePBlffPFF4bWCBihl06ZNsWfPnjh27FjhNYIG6Ojs2bNx/vz5iIj48ccf4+OPP46bb7658HozmjXyXA2j4Ntvv42nn346ms1m/PLLL/H444/Ho48+Wni9oIGKGY/Jnh9vd3LrrbfGqVOn1nAPoFI6PfMyiATNJTjuht4yDAbSCRognaAB0gkaIJ2gAQprNptxxx13lHqGJsKpE1TPX7ZGXPiud9fbOBXx1zOF3nr48OGYnp6OCxculLqFigaqppchU+J6jUYjPvzww3juuedK30LQAIW88MIL8dprr8UVV5SPDUEDdHT06NHYsmVLzM3NdbVe0AAdnThxIj744IOo1+uxf//+OH78eDz11FOF1wsaoKNXXnklGo1GLCwsxJEjR+LBBx+Md999t/B6QQOkEzRQNRun+nq9PXv2xNGjR0ut8RwNVE3BZ14GyVAHja93gMGgdQLSCRqogFar1e8tdNRuj4IGBtzExEQsLS0NdNi0Wq1YWlqKiYmJS74+VDOaQZjJXLyHkflFhD+t+mnUtwb3P0QV1Wq1aDQacfbs2X5vpa2JiYmo1WqXfG2oggaG0bp162Lbtm393saaaJ2AdJWuaMq0Su3amKyWa6h/XG51u9TuNa3UyFPRAOkEDZBO0ADpKj2jWYtu5zKd5iztrtvutaGa35jJsIqKBkgnaIB0I9M6reUIu0xb0+0xeqm2qt3Rclbb0u6e0IGKBkgnaIB0ggZINzIzmjKyjpq7/hiE+QgVp6IB0gkaIJ2gAdKN7Ixm0B75L/WsTBs9/WqKonvwkQM6UNEA6QQNkK7SrdOgtT/9MvPWP/q9heHh2wFTqGiAdIIGSCdogHSVntHwK7OqNfIRj3QqGiCdoAHSCRognRkN/zcqs4pR+TsHiIoGSCdogHRaJ7rj0XxKUNEA6QQNkE7QAOnMaIbBxce1ZWYno3LMW+bvNHtKoaIB0gkaIJ2gAdKZ0Qyqi2cFZWYMWXOXyzW7aLd/85PKUtEA6QQNkE7rVAWrW4ZhOpbO+lscaQ8UFQ2QTtAA6QQNkM6MJiJm4rO2r3f7KwNZ113TTKHf852s2YmZzEBT0QDpBA2QTutUQKcWKPu6ffuBuF61GNqakaeiAdIJGiCdoAHSmdFUwOpZTt9mNkVdjpnMWo7pzYwuOxUNkE7QAOkEDZDOjCZ+P/PIem6GS1g9L2k3E+nHxyfK7I8/pKIB0gkaIJ3W6RIG4fi4XftW6rh70I+aM6/1R7Q/l52KBkgnaIB0ggZIZ0Yz7C7HLygM4q80mMMMFBUNkE7QAOkEDZDOjGZArOVjDxevHYRngHwFKKupaIB0ggZIp3Xqo159SrxUuzQsLcaw/B0jQkUDpBM0QDpBA6Qzo7mM+jKTqZpB+PgCPaeiAdIJGiCd1qnHMr7YXKv0X460K0tFA6QTNEA6QQOkM6NZo6wfmxvquUxRZjJDQ0UDpBM0QDpBA6QzoympajOZtezXnIheUdEA6QQNkE7r1EftWpOsFg36QUUDpBM0QDpBA6Qzoykga17SjznMwB1Z+5jBSFDRAOkEDZBO0ADpzGiG3MDNZBhJKhognaAB0mmdCri4/RiEjwZoh6gaFQ2QTtAA6QQNkM6MpqTV85FezWzMXRhmKhognaAB0gkaIJ0ZzRqZrUBnKhognaAB0gkaIJ2gAdIJGiCdoAHSCRognaAB0gkaIJ2gAdIJGiCdoAHSCRognaAB0gkaIJ2gAdIJGiCdoAHSCRognaAB0gkaIJ2gAdIJGiCdoAHSjbVarVbRN2/evDnq9XridmC0LSwsxLlz5/q9jZ4rFTQA3dA6AekEDZBO0ADpBA2QTtAA6QQNkE7QAOkEDZBO0ADp/gOtGBvMXYFqCgAAAABJRU5ErkJggg==\n", 237 | "text/plain": [ 238 | "
" 239 | ] 240 | }, 241 | "metadata": {}, 242 | "output_type": "display_data" 243 | } 244 | ], 245 | "source": [ 246 | "from simple_deep_learning.mnist_extended.semantic_segmentation import display_grayscale_array\n", 247 | "display_segmented_image(y=train_y[i])" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "metadata": {}, 253 | "source": [ 254 | "That's it for the basic information on the semantic segmentation dataset. If you want an example of how this dataset is used to train a neural network for image segmentation, checkout my tutorial: [A simple example of semantic segmentation with tensorflow keras](https://awaywithideas.com/a-simple-example-of-semantic-segmentation-with-tensorflow-keras/)" 255 | ] 256 | }, 257 | { 258 | "cell_type": "markdown", 259 | "metadata": {}, 260 | "source": [ 261 | "## Object detection\n", 262 | "\n", 263 | "Object detection is the task of drawing a bounding box around objects of interest. The input data for the object detection problem is exactly the same as for the semantic segmentation. The target however is different. Instead of classifying each pixel, we want to output the coordinates of a bounding box and a class label for each predicted bounding box.\n", 264 | "\n", 265 | "Generating the target for an object detection task is more complicated than for semantic segmentation. Different models use different target arrays. To remain generic, I have decided to output the bounding boxes and labels as lists. This cannot be used directly as a target for machine learning models but can be processed to produce a suitable target array for a given model." 266 | ] 267 | }, 268 | { 269 | "cell_type": "code", 270 | "execution_count": 8, 271 | "metadata": {}, 272 | "outputs": [], 273 | "source": [ 274 | "from simple_deep_learning.mnist_extended.object_detection import create_object_detection_dataset \n", 275 | "\n", 276 | "train_x, train_bounding_boxes, train_labels, test_x, test_bounding_boxes, test_labels = create_object_detection_dataset(\n", 277 | " num_train_samples=100, num_test_samples=10, image_shape=(60, 60))\n" 278 | ] 279 | }, 280 | { 281 | "cell_type": "markdown", 282 | "metadata": {}, 283 | "source": [ 284 | "The input array (i.e x) is in the same format as for semantic segmentation." 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": 9, 290 | "metadata": {}, 291 | "outputs": [ 292 | { 293 | "data": { 294 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOsAAADrCAYAAACICmHVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAG20lEQVR4nO3dTYjNUQPH8Ts8hUnDYmJhMztJspmUl0RSVtYWlJ3UZGMhrxtkZCdFNCxFSJosbGVBo0jJSlOURpOF1xD32d3nnvsYzZg7997fzOezOqf/zL1n4ev8TzPzv13VarVaATrevHYvAJgcsUIIsUIIsUIIsUIIsUKI/0zli3t7eyt9fX0ztBRgdHS0Mj4+/sdrU4q1r6+vMjIy0pRFAf+vv79/wmtugyGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCGEWCHElB5FSut8/fq1Nj516lRxbXBwsJgvXbq0Nn706FFxbdWqVc1fHG1hZ4UQYoUQYoUQzqwd6uzZs7XxuXPnimvz588v5p8+faqNjx49Wly7c+fODKyOdrCzQgixQgixQog5e2at/9nl06dPi2vbtm0r5gMDAy1ZU71Lly61/D3pbHZWCCFWCDGrb4MfP35cG+/cubO4Vv/jjp8/fxbXFixYUMz37NlTGy9ZsqSZS5zQ8PBwbbxhw4aWvCedzc4KIcQKIcQKIWb1mbX+LPrhw4dJf9/t27eL+Y4dO2rjvXv3Tntdk9HT0/NP3/fmzZti/uLFi2K+Zs2af14T7WVnhRBihRBihRCz+sxa/2dmaRof3TJZz549K+YPHjwo5s6sueysEEKsEGJW3wY3Pukvyf3799u9BDqMnRVCiBVCiBVCzOoz671792rjrVu3/vPrPH/+/I+vOVXVarWYd3V1Tfi1P378+Of3YXays0IIsUKIWX0b3CwXLlz443iqfv36VcwbH9bdDKtXry7mmzdvbvp7TMeRI0eK+bVr14r5u3fvWrmcKHZWCCFWCCFWCOHMOgvUP1Xixo0bxbWVK1e2ejnTUv8h0t3d3W1cSeexs0IIsUIIsUKIWX1m3bRpU23c+PO8y5cv18ZPnjwprs3Ur/o1/rph489dm/G6v3//bsprtsrY2Fgxv3nzZm3cqidJprCzQgixQohZfRtcb/fu3RPOT548WVz79u3bhK8zNDRUzKfy8PCZ+nXD+g/Z2rVrV3HtypUrxXzdunVNeU9az84KIcQKIcQKIebMmfVvjh8/Pumv3b59ezH/8uXLpL93Kk+KqP8A58+fP0/6PV6+fFnMHz58WMw7/cxa/+uSfnRTsrNCCLFCCLFCCGfWKZrOUxL/5u3bt8V83ry5+f/o69ev272EjjU3/0VAILFCCLfBHeLu3bvF/Pv370153ZGRkWL+/v372njZsmVNeQ9aw84KIcQKIcQKIZxZO8TAwEAxr/+zvemcX2/dulXMDxw4UBs7s2axs0IIsUIIsUIIZ9YOcfjw4WL+8ePHNq2ETmVnhRBihRBugzvEmTNnivnVq1dr46k8QZHZy84KIcQKIcQKIZxZO9T+/ftr49OnT7dxJTOr8YmPjXP+x84KIcQKIdwGd6hDhw7Vxo0faDU4OFjMe3p6auPh4eHi2sWLF2dgdc3T+KDzjRs3tmklnc/OCiHECiHECiGcWTvUokWLauPGD3tunP/N+vXrm7amVnj16lW7l9Cx7KwQQqwQQqwQwpmVlhobG/vr9X379rVoJXnsrBBCrBDCbTAtVf8EjEqlUlm4cGExX758eSuXE8XOCiHECiHECiGcWWkrT4qYPDsrhBArhBArhHBmZcYNDQ1NeG3evHK/6O7ununlxLKzQgixQgi3wTTd9evXi/mxY8cm/NrGz6XdsmXLTCxpVrCzQgixQgixQghnVqbt4MGDxfz8+fPFvP4TBRYvXvzX72VidlYIIVYIIVYI4czKtK1YsaKY13+aQKVSqaxdu7Y2PnHiRHHNrxdOnp0VQogVQnRVp/Cn+f39/ZWRkZGZXA/MaX9rzM4KIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIcQKIbqq1Wp1sl/c29tb6evrm8HlwNw2OjpaGR8f/+O1KcUKtI/bYAghVgghVgghVgghVgghVgghVgghVgghVgjxXxYMLFk+k4ZuAAAAAElFTkSuQmCC\n", 295 | "text/plain": [ 296 | "
" 297 | ] 298 | }, 299 | "metadata": {}, 300 | "output_type": "display_data" 301 | } 302 | ], 303 | "source": [ 304 | "from simple_deep_learning.mnist_extended.semantic_segmentation import display_grayscale_array\n", 305 | "\n", 306 | "i = np.random.randint(len(train_x))\n", 307 | "display_grayscale_array(array=train_x[i])" 308 | ] 309 | }, 310 | { 311 | "cell_type": "markdown", 312 | "metadata": {}, 313 | "source": [ 314 | "Let's take a look at the bounding boxes and labels." 315 | ] 316 | }, 317 | { 318 | "cell_type": "code", 319 | "execution_count": 10, 320 | "metadata": {}, 321 | "outputs": [ 322 | { 323 | "name": "stdout", 324 | "output_type": "stream", 325 | "text": [ 326 | "[[ 9 2 37 30]\n", 327 | " [27 11 55 39]]\n", 328 | "[4 1]\n" 329 | ] 330 | } 331 | ], 332 | "source": [ 333 | "print(train_bounding_boxes[i])\n", 334 | "print(train_labels[i])" 335 | ] 336 | }, 337 | { 338 | "cell_type": "markdown", 339 | "metadata": {}, 340 | "source": [ 341 | "We see the (xmin, ymin, xmax, ymax) coordinates of each bounding box, as well as the associated label. As mentioned before, this cannot directly be used as a target because different images have a different number of bounding boxes and the output of most neural networks (e.g excluding RNNs) is of fixed size for an input of a given size. For anyone interested in how to construct the target for an object detection model, I recommend checking the architecture of single shot detection (SSD) models. They are very commonly used models for object detection and relatively simple.\n", 342 | "\n", 343 | "I have created a function to draw the bounding boxes on the array and return a PIL image." 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": 11, 349 | "metadata": {}, 350 | "outputs": [ 351 | { 352 | "data": { 353 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOsAAADrCAYAAACICmHVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAH3klEQVR4nO3dPWiV1x8H8BsVrCLRQbTgksFSpIOLFHxBKqXgJI4OCoUORQhVcJC26qKitRREAoolOoriGxIcu4jQShzsIB1EHIRqSTu0vuBLm//0f3rPrYmJ3rfvzecznR8nufeQ+PX3nNx7z9M3Pj4+XgO63qxOLwCYGmGFEMIKIYQVQggrhBBWCDFnOl+8ePHi2sDAQIuWAty7d682Njb2yrlphXVgYKA2OjralEUB/7Vq1aoJ51wGQwhhhRDCCiGEFUJM6w9M3aCvr6/TS+A1fDakNXRWCCGsEEJYIUTcnrWevVF38HeE9tBZIYSwQghhhRDCCiGEFUIIK4QQVgghrBBCWCGEsEIIYYUQwgohhBVCRH/qppc9efKkGh84cKCYO3z4cFEvWrSoGl+/fr2YW7FiRfMXR0forBBCWCGEsEIIe9Yu9c0331Tjb7/9tpibPXt2Uf/111/V+Ouvvy7mLl682ILV0Qk6K4QQVgghrBBixu5Z61+7vHnzZjH38ccfF/Xg4GBb1lTvxIkTbX9OupvOCiGEFUL09GXwTz/9VI03bdpUzNW/3PHixYtibu7cuUW9bdu2arxw4cJmLnFCIyMj1XjNmjVteU66m84KIYQVQggrhOjpPWv9XvSPP/6Y8vdduHChqDdu3FiNP/3007de11T09/dPOPfy5csJ5y5dulTUnbhplBtVteamaTorhBBWCCGsEKKn96z1HzNL03h0y2TmzJn419j4OLt27XrjNU2kcY86U29y3eq9us4KIYQVQvT0ZXDjSX9Jrl692ukl0GV0VgghrBBCWCFET+9Zr1y5Uo03bNjwxo9z69atVz7mdDW+pDHZn/qfP3/+xs9Db9JZIYSwQoievgxulqGhoVeOp+vvv/8u6sbDupvhgw8+KOr169c3/TnexldffVXUp0+fLupff/21ncuJorNCCGGFEMIKIexZe0D9qRJnz54t5t5///12L+et1N9Eev78+R1cSffRWSGEsEIIYYUQPb1nXbduXTVufD3v5MmT1fjGjRvFXKve6tf4dsPG112b8bj//PNPUx6zXR4+fFjU586dq8btOkkyhc4KIYQVQvT0ZXC9rVu3Tljv37+/mHv69OmEjzM8PFzU0zk8vFlvN2w85Lv+Jltbtmwp5r7//vui/vDDD9/oOek8nRVCCCuEEFYIMWP2rJPZu3fvlL/2k08+KerHjx9P+Xunc1JE/Q2cHz16NOXnuH37dlFfu3atqLt9z1r/dkkv3ZR0VgghrBBCWCGEPes0vc0piZO5f/9+Uc+aNTP/H717926nl9C1Zua/CAgkrBDCZXCXuHz5clE/e/asKY87Ojpa1L/99ls1XrJkSVOeg/bQWSGEsEIIYYUQ9qxdYnBwsKjrP7b3NvvX8+fPF/UXX3xRje1Zs+isEEJYIYSwQgh71i7x5ZdfFvWff/7ZoZXQrXRWCCGsEMJlcJc4dOhQUZ86daoaT+cERXqXzgohhBVCCCuEsGftUtu3b6/GBw8e7OBKWqvxxMfGmn/prBBCWCGEy+AutXv37mrceEOrAwcOFHV/f381HhkZKeaOHz/egtU1T+NB52vXru3QSrqfzgohhBVCCCuEsGftUvPmzavGjTd7btyz/v777xM+zurVq5u7sBb75ZdfOr2ErqWzQghhhRDCCiHsWWmrhw8fTjr/+eeft2kleXRWCCGsEMJlMG1VfwJGrVarvfPOO0W9dOnSdi4nis4KIYQVQggrhIjeszZ+vGqmSv45OCli6nRWCCGsEEJYIUTcntWe5r971G7/mQwPD084N2tW2S/mz5/f6uXE0lkhhLBCiLjL4HoPHjwo6s2bNxf1jz/+2MbV8H9nzpwp6j179kz4tY33pf3oo49asaSeoLNCCGGFEMIKIeL2rPWn0x85cqSYe++999q9HGq12q5du4r62LFjRV3/O1uwYMGk38vEdFYIIawQQlghRNye9bvvvqvGg4ODxVwv33S4my1btqyo6+8mUKvVaitXrqzG+/btK+a8vXDqdFYIIawQom98Gh/ZWLVqVW10dLSV63mtd999txq/7sDoHTt2VOOjR4+2akltl/apm5mi/vfypr+TyTKms0IIYYUQwgoh4l66afxYXL3PPvusqHtpnwo6K4QQVggRdxk8mckO5oJ0OiuEEFYIIawQQlghhLBCCGGFEMIKIXrqddaZKvlmykydzgohhBVCuAwO5GSImUlnhRDCCiGEFULYs/aAH374oRpfvHixmBsaGmr3cmgRnRVCCCuEEFYIYc8a6M6dO0X9888/V+OnT5+2ezm0ic4KIYQVQrgMDrR8+fKi3rlzZzVuPOic3qGzQghhhRDCCiGEFUIIK4QQVgghrBBCWCGEsEIIYYUQ3m7YY9xQunfprBBCWCGEsEIIYYUQwgohhBVCCCuEEFYIIawQQlghhLBCCGGFEMIKIYQVQggrhBBWCCGsEEJYIYSwQghhhRDCCiGEFUIIK4QQVgghrBBCWCGEsEIIYYUQwgohhBVCCCuEEFYIIawQQlghhLBCCGGFEMIKIYQVQggrhBBWCCGsEEJYIYSwQghhhRDCCiGEFUIIK4QQVgghrBBCWCFE3/j4+PhUv3jx4sW1gYGBFi4HZrZ79+7VxsbGXjk3rbACneMyGEIIK4QQVgghrBBCWCGEsEIIYYUQwgohhBVC/A+hynnpNZotAQAAAABJRU5ErkJggg==\n", 354 | "text/plain": [ 355 | "
" 356 | ] 357 | }, 358 | "metadata": {}, 359 | "output_type": "display_data" 360 | } 361 | ], 362 | "source": [ 363 | "from simple_deep_learning.mnist_extended.object_detection import draw_bounding_boxes\n", 364 | "\n", 365 | "a = np.array(draw_bounding_boxes(train_x[i], bounding_boxes=train_bounding_boxes[i], labels=train_labels[i]))\n", 366 | "display_grayscale_array(a)" 367 | ] 368 | }, 369 | { 370 | "cell_type": "markdown", 371 | "metadata": {}, 372 | "source": [ 373 | "## MNIST extended customisation" 374 | ] 375 | }, 376 | { 377 | "cell_type": "markdown", 378 | "metadata": {}, 379 | "source": [ 380 | "So far we've only used the main parameters of the create dataset functions. I recommend checking the code to find how to change things such as the maximum number of digits per image, the maximum IOU (intersection of union) of two digits in the same image and more.\n", 381 | "\n", 382 | "In this post, we've been using the end to end functions create_semantic_segmentation_dataset and create_object_detection_dataset.\n", 383 | "\n", 384 | "These perform the following tasks:\n", 385 | "- Download the original MNIST dataset.\n", 386 | "- Preprocess the original MNIST images.\n", 387 | "- Overlay the MNIST digits to create the new input image.\n", 388 | "- Create the target/output arrays.\n", 389 | "\n", 390 | "These components are all part of the MNIST extended package and are very modular. This provides you with a lot of freedom to customise the dataset as you would like. For instance, if you want to perform additional preprocessing on the original MNIST digits, that's totally possible. You might want modify the digits by randomly changing their size, in which case you can use the [individual functions](https://github.com/LukeTonin/simple-deep-learning/blob/main/simple_deep_learning/mnist_extended/mnist.py) for downloading and preprocessing the MNIST digits. Then you can add a function to modify their size and finally feed the modified images and labels into the [create_object_detection_data_from_digits](https://github.com/LukeTonin/simple-deep-learning/blob/main/simple_deep_learning/mnist_extended/object_detection.py) or [create_semantic_segmentation_data_from_digits](https://github.com/LukeTonin/simple-deep-learning/blob/main/simple_deep_learning/mnist_extended/semantic_segmentation.py)." 391 | ] 392 | }, 393 | { 394 | "cell_type": "markdown", 395 | "metadata": {}, 396 | "source": [ 397 | "I hope you have a lot of fun playing around with this dataset. I've certainly found it very useful for experimenting with model architectures and learning about deep learning more generally. I would love to hear what you've done with the dataset so please post a comment below or send me a message via LinkedIn!\n", 398 | "\n", 399 | "Have a great day.\n", 400 | "\n", 401 | "Luke" 402 | ] 403 | } 404 | ], 405 | "metadata": { 406 | "kernelspec": { 407 | "display_name": "Python 3", 408 | "language": "python", 409 | "name": "python3" 410 | }, 411 | "language_info": { 412 | "codemirror_mode": { 413 | "name": "ipython", 414 | "version": 3 415 | }, 416 | "file_extension": ".py", 417 | "mimetype": "text/x-python", 418 | "name": "python", 419 | "nbconvert_exporter": "python", 420 | "pygments_lexer": "ipython3", 421 | "version": "3.7.9" 422 | } 423 | }, 424 | "nbformat": 4, 425 | "nbformat_minor": 4 426 | } 427 | -------------------------------------------------------------------------------- /unet_segmentation/dataset_generation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Get the MNIST dataset from Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stderr", 17 | "output_type": "stream", 18 | "text": [ 19 | "2024-04-13 00:24:59.306671: I external/local_tsl/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", 20 | "2024-04-13 00:24:59.308456: I external/local_tsl/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", 21 | "2024-04-13 00:24:59.331839: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", 22 | "To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", 23 | "2024-04-13 00:24:59.796858: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" 24 | ] 25 | }, 26 | { 27 | "name": "stdout", 28 | "output_type": "stream", 29 | "text": [ 30 | "(60000, 28, 28) (60000,)\n", 31 | "(10000, 28, 28) (10000,)\n" 32 | ] 33 | } 34 | ], 35 | "source": [ 36 | "import tensorflow as tf\n", 37 | "\n", 38 | "(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()\n", 39 | "\n", 40 | "print(train_images.shape, train_labels.shape)\n", 41 | "print(test_images.shape, test_labels.shape)" 42 | ] 43 | }, 44 | { 45 | "cell_type": "markdown", 46 | "metadata": {}, 47 | "source": [ 48 | "#### Show sample images" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 2, 54 | "metadata": {}, 55 | "outputs": [ 56 | { 57 | "data": { 58 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxsAAAKPCAYAAAAFXtcoAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABVUUlEQVR4nO3de5zN5fr/8WsZOc+MMw3jkJJTSKTD3g6lkCI5JZVT7Q4jSRR2IodCbUmOSRI7lHLcJJSRSkSUbyWpDI1T5TDkOLN+f3x/+e7Puu6az6xZ9/qsNev1fDz6434/PuuzLnVba67WXOv2+f1+vwAAAABAiOXzugAAAAAAeRPNBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALCCZgMAAACAFTQbAAAAAKzI7+airKwsSU9Pl/j4ePH5fLZrQpTw+/2SkZEhSUlJki+fvb6V/QeTcO0/EfYgNPYfvMZ7MLyUk/3nqtlIT0+X5OTkkBSHvGfv3r1SsWJFa/dn/+Gv2N5/IuxB/Dn2H7zGezC85Gb/uWo24uPjL9wwISEh95UhTzh+/LgkJydf2B+2sP9gEq79J8IehMb+g9d4D4aXcrL/XDUbf3xslpCQwEaDYvtjVfYf/ko4PtZnD+LPsP/gNd6D4SU3+48BcQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWJHf6wIAAACAnFq3bp2r7Jlnngnq/sOGDVPZ8OHDg7pXLOOTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArMiTA+JLlixRWVpamsreeecdx7pWrVrqmmnTprl6Tr/frzKfz5ft4zp27KiylJQUlTVt2tRVHQAAAHmNaTA72MFvt0z3T01NVZnpZzQGyf8Pn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGBF1A+ImwZ17rnnHpWdPHlSZYFD3R999JG6xs2Q959x89jAIXURkdWrV6ts3rx5jnWrVq2Crgvhd+bMGZW99tprjvV7772nrilSpIjKLr30UpX9+OOPKhs4cKBjXa9evWzrBAAgEgQOWId6GDzwdHC393d7ajkD4v+HTzYAAAAAWEGzAQAAAMAKmg0AAAAAVkT9zIZJbuYsAlWoUEFlRYsWVVlWVpbKTp065Vj//PPPrp7z2LFjKuvZs6djvWzZMnVNw4YNXd0foXP69GmVPffccyqbMmWKyn755RcrNf3h66+/dqw/+OADdU3x4sWt1gAAQDDczFA0a9ZMZYGzGH92XSDTjEVuDhJs3ry5Y/3hhx+6elxexCcbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYEfUD4k2bNlXZnDlzVJaWlhbU/W+77TaVVa5c2dVj9+zZ41jfeuut6prAId4/c+jQIcf6119/dfU4hI7pv1WXLl1UtmPHjpA9p9sB7hYtWqjsuuuuc6zz54+Mv+6mAzb37t3rWNeoUSNc5eC/ZGZmqsz0hRXRxPR3KF8+/j9bINMXSKSnp2f7uCuuuEJlefEA0RtvvNGxDvzSFhGRzp07q6xAgQLWasrrAgeqvTg4z+39TUPjgfW6HUDPi3jFBQAAAGAFzQYAAAAAK2g2AAAAAFhBswEAAADAisiYGA2xtm3bel2CiIhMnz7dsXY7DG4SeCp1y5Ytg74XgtOtWzeV5WYY/Morr3Sse/Xqpa7p06dP0PcPt927d6vs2WefVdmKFStUdv78ecf61VdfVde0a9cuF9VFvrNnz6os2C+2MJ1Yf+LEiWwfd+TIEZUtXLgwqBoixeTJk1X28MMPe1BJZHv++edV9t5772X7uAEDBqisUqVKKitcuLDKChUq5LK67J05c0Zlv//+e7aPGzJkiMp++uknlX3yySeOtWmg3vSFMgyIBy/w1G83p4AjMvHJBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVuTJAXHbTIPeplPLJ0yYENT9u3fvrrLHHnssqHshdMqWLevqOtOQYPPmzVXWo0cPx7pEiRJB1RUOgadIDx48WF2zaNEilR04cCCo57vzzjtVdurUqaDuFYnGjRunMtPA6apVq8JRTo7VrFlTZfHx8Vafc+fOnY6125PNU1JSVMaAuHbzzTerzM2A+AsvvOAqa9++vcpuuOEGl9VlL3CAW0Rk3rx5Ibs/kFuxPODOJxsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFgR0wPiR48edawfffRRdc3hw4dV9u2336rMdOKoz+cLqq4nnnhCZRdddFFQ90LoBA50i4i8//77KvP7/SozDf1HwkC46dRq058p8HTh9evXB/2cplODW7du7Vi3bds26PtHg6lTp6rM9BpSv359x7p8+fKu7v/QQw+pLJQD3A0aNFBZYmJiyO5vMmzYMMd6xIgRrh5XtWpVG+XkOY888ojKAofwZ86cqa7Zt2+fq/ubvkDClEWqggULOtamk8eLFi0arnLgodTU1KAex4A4AAAAAIQYzQYAAAAAK2g2AAAAAFgRMzMbCxcuVNnkyZMd69z8HnooTZs2TWVDhw51rEuXLh2ucvD/mQ69Ms35vPTSSyoLnEkQERk0aJBjbTr0KpR+++03lZkO33ruueeCur9pFmPGjBkqa9y4scqqVavmWOfLl7f/P4jpALJDhw6prEqVKo617bmISGH6Xf4XX3wxqHutXLkyt+XEhPz59Y8Dw4cPd6xNr3fjx493df/Vq1er7LPPPnNXXAQYOXKkYz1w4ECPKkE4BTt7axL49+nPsrwob7+jAwAAAPAMzQYAAAAAK2g2AAAAAFhBswEAAADACp/fdAJZgOPHj0tiYqIcO3ZMEhISwlGX0fbt21U2ffp0lZkGrE0C/+i5GQSqW7euygIPcAk8RFBE5I033si2LhGRNm3aONbLly/PWYEWhGtfRMr+M8nKylKZ6csITAdmBQ5smwbEn3nmGZXVrFlTZabD+VasWOFYmw6L3LVrl8rcMA2DL1iwQGU2D+cL576I5D0Y7ZYtW6ayLl26qOzUqVOO9aWXXqquMe3x3r17qywUX0DA/ssZ0xcgmL60IpQCD1PdtGmTq8eZDs/csmWLY52UlBR8YSHCe7B9oRwQdyvwAFORyBwkz8m+4JMNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsiOgTxL/++mvHumXLluqaw4cPqyzYgR7T4xo1aqSyBx54QGW33XabygJP+U5PT1fXmE4t/+mnn7K9zvS4Jk2aqAx2mQZNO3furDLTf5sBAwY41u+++666ZuPGjSobN26cyt5//32VzZo1S2XB6tq1q2MdeJquiD4FHMjMzHSsA7/oQkTkgw8+UNm5c+dUdskllzjWpj1455135rREhEnZsmVdZaFUpEiRbK8x1bB48WKVRcJAOMLP9IU9bk8CD8xSU1PVNevWrVOZ6Yth3DxfJOOTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArIjoAfF33nnHsTYNg+fGZZdd5lgPHTpUXdOqVSuVBQ5+u2UaMDOdsDxx4kSVnTx50rEOPE0Xkc10Iu3cuXMd60mTJqlrTCePBw5ru2U69fvqq69W2WOPPaayv//97451qVKlgqoBeZfpNalDhw6O9apVq9Q1pi9ZqFevnsqWLFniWFeuXDmnJSIPC3w9FRH5+OOPs32c6cs7GjduHJKakDe5Hc52c13z5s1VZhoaNw2XRxM+2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwIqIHhB/6qmnHOsdO3aoa2rWrKkytwPcffr0Ca6wELrjjjtUZhoQR96zb98+x9p0knJuBA6lP/HEE+oa0zA4kJ0vvvhCZaYvMwgc0PX5fOqawYMHq2zUqFG5qA553dmzZ1U2bdo0lZlOog80ZMiQkNQEBOPDDz9Umel10jQ0Hk34ZAMAAACAFTQbAAAAAKyg2QAAAABgRUTPbAT+3tqCBQs8qsSewIMLRUT8fn+2j3NzDSLHypUrVda/f3/H+ttvvw3pc95///2ONfMZCMbXX3+tspEjR6rMdIBahQoVHOtevXqpa0aMGJGL6hCL7r77bpW5OcCvTZs2KqtSpUooSgLCyjTD0axZs7DX4RafbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYEVED4jnNT/99JPK5syZozLTgS6BmekahF9WVpbKTIdFvvbaayo7c+ZMtvdv3769yrp27aoy0yFogc+ZkpKirilXrly2NSB2rF69WmVdunRR2ZEjR1SWnJysshUrVjjWderUyUV1wP86f/58UI/Ln1//yMN7KWAfn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFA+JhlJ6errJjx465euwtt9ziWF911VUhqQm5s3z5cpVNnTo1qHslJCSobPLkySq7+OKLVXb55ZerrF69eo51586d1TWpqak5KRF5zIcffuhYd+rUSV1jeo0yDYP/5z//URkD4fBSoUKFHOtu3bqpa4oXLx6mahAuptO1n3nmGZUFvv5Fk0g+LdyETzYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALCCAXGLRo4c6VjPmjXL1eMmTpyosnvuucexTkxMDL4wBMV0aq1poNatYsWKOdbvvvuuusY0DL5nzx6VuRlKP3r0qPvikOds2LBBZW3atHGsT506pa6pUKGCygJPBhdhGBx2rF+/XmUfffSRq8dWqVLFsc7N6zWih2kY3DQ03rx5c8e6adOm6prhw4eHqizXg+t5EZ9sAAAAALCCZgMAAACAFTQbAAAAAKxgZiNETHMWw4YNC+peffr0yW05sCAuLk5l3bt3V9mMGTNc3e/EiROOdYcOHVw97syZMyo7ffq0yooWLepY8/vKedO5c+dU9uKLL6rM9HoUuJcCf8ddRGTlypUqq1GjRg4qBIJ3+PBhlf3yyy+uHjtlypRQl4M8JHCGIpJnKoL9eTJS8MkGAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABWeDIgbhr4uv/++1VWq1Ytx/rZZ5+1VtOfOXnypMr69eunsqVLl6rM5/M51oGHuImITJgwIejaEF6B/z1FRO666y6VXXXVVSozHdj3/vvvO9bHjh0LurbAYXARkUGDBjnWTz31VND3R2QwHbpneu3897//7ep+KSkpjvWkSZOCKwywZPDgwV6XgChjGqY2DX9HqmbNmqkslIcLeoFPNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsMKTAfHPP/9cZcuWLcs227Ztm7pm6NChKitdurSrOtavX+9Yv/POO+oa00Bm4ONERPx+v8qqVq3qWD/55JPqmp49e2ZbJyKXaZDLlP3jH/9Q2erVqx3rrVu3qmuqV6+usmrVqqmsfPnyKitXrpzKEN3efvttlQU7DC7CQDgi3549e1xd16BBA5UFvgcjNpjeg00/owXL7bC2m9PHTcPspvqjHZ9sAAAAALCCZgMAAACAFTQbAAAAAKyg2QAAAABghScD4sFatWqVq8ytwIEh0wnRbjVt2lRlixcvdqwTExODvj+im2lv3XzzzX+5BkaMGOFYT5gwwdXj5syZo7KuXbuGoiQgInXs2FFlVapUCX8hyPPcDohH+6nfocQnGwAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWOHJgHjr1q1VlpmZ6UElABAZ9u7dq7IXX3zRsT569Ki6pm/fvirr3LmzyuLi4oIvDogghQoVUtngwYM9qASAG3yyAQAAAMAKmg0AAAAAVtBsAAAAALAiqg71A4C8qmDBgiozzV4EGjBggMoKFCgQkpoAAMgtPtkAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMAKBsQBIAKULVtWZdOnT/egEiBypKSkqMzn83lQCYBg8ckGAAAAACtoNgAAAABYQbMBAAAAwApXMxt+v19ERI4fP261GESXP/bDH/vDFvYfTMK1//77OdiD+AP7LzzOnDmjMtPMRiz+u+E9GF7Kyf5z1WxkZGSIiEhycnIuykJelZGRIYmJiVbvL8L+g5nt/ffHc4iwB6Gx/yLD5MmTvS7BM7wHw0tu9p/P76IlycrKkvT0dImPj+dbIHCB3++XjIwMSUpKknz57P1GHvsPJuHafyLsQWjsP3iN92B4KSf7z1WzAQAAAAA5xYA4AAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABW0Gy4MHXqVKlbt64kJCRIQkKCXHvttbJy5Uqvy0IMyczMlKFDh0rVqlWlcOHCUq1aNRk5cmRYvuMfqFKlivh8PvVPSkqK16UhRrAH4SV+DswdV+dsxLqKFSvKmDFj5LLLLhO/3y+zZ8+Wdu3ayRdffCG1a9f2ujzEgLFjx8rUqVNl9uzZUrt2bfn888+lZ8+ekpiYKH379vW6PORxmzdvlszMzAvrHTt2yE033SSdOnXysCrEEvYgvMTPgbnDV98GqWTJkvL8889L7969vS4FMeDWW2+VcuXKycyZMy9kHTp0kMKFC8vcuXM9rAyxqF+/frJ8+XLZtWsX37sPT7AH4TV+DnSPX6PKoczMTJk/f76cPHlSrr32Wq/LQYy47rrrZO3atfLdd9+JiMj27dtlw4YN0rp1a48rQ6w5e/aszJ07V3r16sUPefAEexBe4ufAnOPXqFz66quv5Nprr5XTp09LsWLFZNGiRVKrVi2vy0KMGDRokBw/flxq1KghcXFxkpmZKaNHj5Zu3bp5XRpizOLFi+Xo0aPSo0cPr0tBjGIPwgv8HBg8fo3KpbNnz0paWpocO3ZMFi5cKK+++qqkpqay0RAW8+fPl4EDB8rzzz8vtWvXlm3btkm/fv1k/Pjx0r17d6/LQwxp2bKlFChQQJYtW+Z1KYhR7EF4gZ8Dg0ezEaQWLVpItWrVZPr06V6XghiQnJwsgwYNcnzzyqhRo2Tu3Lny7bffelgZYsmePXvkkksukXfffVfatWvndTmIQexBRAp+DnSPmY0gZWVlyZkzZ7wuAzHi999/l3z5nH9d4+LiJCsry6OKEItmzZolZcuWlTZt2nhdCmIUexCRgp8D3WNmw4XBgwdL69atpVKlSpKRkSFvvvmmrFu3TlatWuV1aYgRt912m4wePVoqVaoktWvXli+++ELGjx8vvXr18ro0xIisrCyZNWuWdO/eXfLn560D4ccehFf4OTB3+NvqwqFDh+Tee++V/fv3S2JiotStW1dWrVolN910k9elIUa8/PLLMnToUHn44Yfl0KFDkpSUJA888IA8/fTTXpeGGLFmzRpJS0ujwYVn2IPwCj8H5g4zGwAAAACsYGYDAAAAgBU0GwAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALDC1QniWVlZkp6eLvHx8eLz+WzXhCjh9/slIyNDkpKSJF8+e30r+w8m4dp/IuxBaOw/eI33YHgpJ/vPVbORnp4uycnJISkOec/evXulYsWK1u7P/sNfsb3/RNiD+HPsP3iN92B4yc3+c9VsxMfHX7hhQkJC7itDnnD8+HFJTk6+sD9sYf/BJFz7T4Q9CI39B6/xHgwv5WT/uWo2/vjYLCEhgY0GxfbHquw//JVwfKzPHsSfYf/Ba7wHw0tu9h8D4gAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALCCZgMAAACAFTQbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAV+b0uINb99NNPKps1a5bKRo4c6VhPmDBBXdO9e3eVJSYmBl0bItMTTzyhsvvuu09l1atXD0c5AAAAf4pPNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsIIB8TAaPHiwyubOnauy9PR0lfl8Pse6X79+6pr3339fZaZh8zJlyvxVmYhwK1asUFlSUpLKGBBHtDp+/LjKJk2a5Fj/61//Utc0adJEZYsWLQpdYQAQpIULF6pswIABKjN9cVAo7d27V2W7du1yrG+44YaQPiefbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYAUD4iGyfft2lQUOhL/33nvqmsDB79xYuXKlyrZu3aqyli1bhuw5ERm+//57r0sAQiYtLU1lTz31lGNdqFAhdc0ll1xirSYAyI3A1zARkV9++SXsdSQmJqrM9msnn2wAAAAAsIJmAwAAAIAVNBsAAAAArGBmIxt+v19l33zzjcrat2+vsj179lipCbHju+++U1ng4TsiIs2aNQtDNUDoZWRkqOyll17K9nGlS5dWmemgP9j18ssvq2zevHkqa9GihcoaNWqU7f1Nv9NepEgRlZneq1etWuVYlyxZUl2zf/9+lR04cEBlH374oWP9t7/9TV2zfPlylZl+Px6hY/rvbnqPfOCBB1Q2depUx7pGjRqhK0xEPv/8c8fadFhf/vzh/zE8ISHBVRZKfLIBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVDIhno3Pnzip79913rT5nu3btVFa5cmXH2s0ApYhIamqqyjjUL3oUK1bMVbZz585wlIM8bsOGDSobMGCAysaNG6eyJk2aBPWcpmHimTNnquymm25yrPv37x/U8yG0pkyZorJvv/1WZZ9++mk4ygkb09+VH374QWVXXnllOMqJWadOnVLZ5Zdf7uqxgV/AEuoB8WXLljnWZ86cUddUqVIlpM8ZqfhkAwAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK2J6QHzlypWOdb9+/dQ1ppMofT5fUM8XOOQtItKpUyeVDR8+XGWBJ0+6HRA3Deohepw+fVplpiGz7du3h6Mc5HFvvvmmyjZt2qSyadOmqczNgLjpBF3T6125cuWyfc6qVatm+3wIrd27d6vs559/VlmBAgVUVqtWLZXt2bPHsbb939R02vQXX3wR1L0efPBBldWpUyeoeyF48+fPd3Vd4cKFVRbKgfDffvtNZW+//Xa2j3viiSdCVkMk45MNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsiJkB8c2bN6usZ8+ejvXhw4eDvr9pIO7xxx93rLt166auqVmzpspMA3e33nprtjU0atRIZa+88kq2j0PkWrFihcpOnjypsh49eoShGuQ1s2fPdqznzp3r6nGBX64hIrJjxw6VBQ7MpqWlqWsOHDigsmuuuUZlDIR7r1q1aioz7RnTF1uY3p8C3zcrVKiQi+qyt379epU1bdrU1WMLFSrkWA8ePFhdc9FFFwVXGIJ29uxZV9fdeeedKqtevXrI6gg8LVxE5JtvvnGsTaeFd+nSJWQ1uJWRkaGyI0eOONaVKlUK6XPyyQYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFbEzIC4aVA6NwPhgQKHwUVERo0aFdS95syZo7LAk1ZNypQpo7LSpUsHVQMig+nLAoBg7N+/X2UTJkxwrE+cOKGuMZ38fPz4cZWdO3cu2xp27typsoIFC6rsySefzPZeiAxt27b1uoQ/dfDgQce6Xbt2Qd9r6NChjnWoB2iRPdNg81tvveXqsS+88EKoy8kx02td0aJFw17H0aNHVfb999871gyIAwAAAIgKNBsAAAAArKDZAAAAAGBF1M9spKamqqx58+Yhu7/f71fZrl27VGY67ChYpoOHAusw1WXKAMSe8+fPq8w0B7F9+/Zs72WaRzP97nvJkiVVFniI37PPPquuqVevnqv7A3/FNN/WokULx9r0u+omXbt2VdmAAQOCqguhY5o7+/DDD8Nex5kzZ1Q2derUbB/Xr1+/oJ8z8DDf+fPnq2s+//xzV/fq3LmzyhITE4MrzCU+2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwIqoGhAPHJAR0YdSiYj4fL6g7h94aI+IyD333KOyUB52Yhpw37Bhg8oC/0ymIfUiRYqErC5EhsWLF7u6rnjx4lbrQHSZNm2ayubOnRvUvf75z3+qrHfv3ipbvXq1ygJfp0yHk5qGcYG/cuzYMZV16dJFZd9++22296pYsaLKXn/9dZUVKFDAXXGwZtGiRa6uM31JUCgHoKdPn66yzz77LNvH/fTTTyrr3r27yjZu3KiywKF0Nwc9/xnTz7CDBw8O+n5u8MkGAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABWRNWA+NNPP62ypUuXBn2/ChUqONY9e/ZU11SuXDno+wcynV46duxYlZkG4QOF8sRyRIZ9+/ap7MCBAyq77LLLVDZkyBArNSHyrV27VmWTJ08O2f1Ne9DE9GUaBQsWzPZxffr0yXFNiB1HjhxRWUpKiso+/vjjbO+VnJysMtOXtDAMHpm2bdvm6rpixYqpLC4uLqjnfPHFF1U2adKkoO5l+nkvlFq0aKGyDh06qOzee++1WocJn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGBFVA2If/PNNyG9X+CQTCiHwU3mzJmjslWrVll9TkQP0+mipi8VeOihh1TG6fGxYc2aNSpr27atyk6dOhXU/atWraoy0+n0X3zxhcr279+f7f2bNm2qsqSkJHfFIc8zDYO3adNGZZ9++qmr+1WpUsWxNp1yb9rziAyBX34xf/58dU3+/PrH2Ntvv11lCxcuVNny5cv/8vlEzF/c4lbga6fpZPN27doFde/bbrtNZSVLlgzqXuHAJxsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFgR0QPiK1eudKzfe++9kN7/b3/7W8juZRrkDRwIX79+vbrG7/eHrAZEt3Xr1nldAiLc4MGDVXb69GmV+Xw+lZkGyadMmeJYlylTxtW9TCc4v/LKK9k+1nTKs2mw/OKLL1YZ8p5jx4451uPGjVPXuB0GL1GihMoCv4Dl0ksvzUF1iAbnz59XWe/evYO6V8WKFVWWL5/+f/JZWVkqa9CggcoCf2Y1vb7GCj7ZAAAAAGAFzQYAAAAAK2g2AAAAAFgR0TMbI0eOdKxNvzvsVtmyZVUW7O/PHT58WGXdu3dXWeDvi5rmM0x/pmLFiqmsSZMmOSkRUSg9Pd3rEhDhTpw44eq6evXqqWzatGkqK1++fFB1mA6nMs1sBP4e/cCBA9U15cqVC6oGRJeMjAyVPfvss461aWbDxLRnJk6cqLLq1au7rA6RqEKFCo616YDRwLkfEfMht126dFFZx44dHetGjRqpaypVqqQy06Gp9evXV1mkzmiYZk4Cfz6Ni4sL6XPyyQYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFZE9ID4r7/+GrJ71axZU2XVqlVzrHft2uXqXo8++qjKAofB3apSpYrKnnzySZX94x//COr+iB5paWmurjMNuiE2bNy4UWVvvvmmym6//XaVBTsMbvLbb7+5uq5169aO9aBBg0JWAyKXaQB1+PDhKhs/fny297roootUFvjlMSIinTt3dlccokaNGjUc648++khdY/q5rX379kE9n2k/mobBCxcurDLTz4WR6ueff1ZZ4L/HG264IaTPyScbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYEdED4h06dHCsx44dG/S9UlNTVXbHHXdke43bU7/daNq0qcoWL16sssTExKDuj+hmOjne5OjRo3YLQcQyvTY89NBDVp/zl19+UdnLL7/s6rGBr+GIDQsXLlSZm2Fwk6lTp6qsd+/eQd0L0a1OnTquMrdOnjzpWE+aNMnV41q1aqWyunXrBl1HLOCTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArIjoAfG7777bsZ43b566Zs+ePUHff926ddleYxoQD5ZpgIhhcPzB7Sn0+/fvt1wJ8H+WLVumsp07d6rMdOJsu3btrNSEyBbsa1Tbtm1Vds899+S2HMDomWeecax//PFHdU2BAgVU1qVLF2s15VV8sgEAAADACpoNAAAAAFbQbAAAAACwIqJnNmrVquVY16xZU12TlpYWrnIuMB3qd8stt6hswoQJjjXzGQiF3BxiBPyVI0eOqGzGjBmuHms6wC9fPv5/Vl538OBBlT377LNB3ev5559Xmel35oFQWL16dbbXPPnkkypjZiPneCcAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMCKiB4Qj1QzZ85U2a233qqy0qVLh6Mc5BGXXXaZyjZv3qyyXbt2qYyhcYTC7NmzVbZx40aVFSxYUGWdOnWyUhMim+lw3EOHDrl6bOChj6bXQCAUvvzyS1dZoAoVKtgoJ+bwyQYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFZE1YD4gw8+qLLTp0+rLDU1NWTPOXHiRJW1b99eZZwOjtwaO3asykwn6rodvgSy88477zjWI0eOdPW4xx9/XGVlypQJSU2ILjt37gz6sR07dnSsfT5fbssBjLKyslxlgYoUKWKjHE8lJCSorEqVKlafk082AAAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwIqoGxNu2besqA6JR8+bNXWVAqPj9fsf6yJEj6prq1aurLCUlxVpNyJtKlCihsltvvdWDShCLqlWrprJLL73UsTYNSXfp0sVWSZ4xfaGR7S854pMNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsiKoBcQBA6ASe4OzmRF3gvzVo0EBl5cqVU9kVV1yhsuLFi9soCVDi4+NVtmvXLg8qiU18sgEAAADACpoNAAAAAFbQbAAAAACwgpkNAAAQFNPBfPfee6/KTAdGAogNfLIBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVDIgDAICQGTdunNclAIggfLIBAAAAwAqaDQAAAABW0GwAAAAAsMLVzIbf7xcRkePHj1stBtHlj/3wx/6whf0Hk3Dtv/9+DvYg/sD+g9d4D4aXcrL/XDUbGRkZIiKSnJyci7KQV2VkZEhiYqLV+4uw/2Bme//98Rwi7EFo7D94jfdgeMnN/vP5XbQkWVlZkp6eLvHx8eLz+UJWIKKb3++XjIwMSUpKknz57P1GHvsPJuHafyLsQWjsP3iN92B4KSf7z1WzAQAAAAA5xYA4AAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABW0GzkwOTJk6VKlSpSqFAhady4sWzatMnrkhAj1q9fL7fddpskJSWJz+eTxYsXe10SYtiYMWPE5/NJv379vC4FMaJKlSri8/nUPykpKV6XhhjEa2DO0Gy4tGDBAunfv78MGzZMtm7dKvXq1ZOWLVvKoUOHvC4NMeDkyZNSr149mTx5stelIMZt3rxZpk+fLnXr1vW6FMSQzZs3y/79+y/8s3r1ahER6dSpk8eVIdbwGphzNBsujR8/Xu6//37p2bOn1KpVS6ZNmyZFihSR1157zevSEANat24to0aNkvbt23tdCmLYiRMnpFu3bjJjxgwpUaKE1+UghpQpU0bKly9/4Z/ly5dLtWrVpGnTpl6XhhjCa2BwaDZcOHv2rGzZskVatGhxIcuXL5+0aNFCPv30Uw8rA4DwSUlJkTZt2jheC4FwO3v2rMydO1d69erFIXMIK14Dg5Pf6wKiwS+//CKZmZlSrlw5R16uXDn59ttvPaoKAMJn/vz5snXrVtm8ebPXpSDGLV68WI4ePSo9evTwuhTEEF4Dg0ezAQD4S3v37pVHH31UVq9eLYUKFfK6HMS4mTNnSuvWrSUpKcnrUhAjeA3MHZoNF0qXLi1xcXFy8OBBR37w4EEpX768R1UBQHhs2bJFDh06JA0aNLiQZWZmyvr162XSpEly5swZiYuL87BCxIo9e/bImjVr5N133/W6FMQQXgNzh2bDhQIFCshVV10la9euldtvv11ERLKysmTt2rXSp08fb4sDAMtuvPFG+eqrrxxZz549pUaNGvLkk0/yJouwmTVrlpQtW1batGnjdSmIIbwG5g7Nhkv9+/eX7t27S8OGDeXqq6+WCRMmyMmTJ6Vnz55el4YYcOLECfn+++8vrH/88UfZtm2blCxZUipVquRhZYgF8fHxUqdOHUdWtGhRKVWqlMoBW7KysmTWrFnSvXt3yZ+fH18QPrwG5g5/W13q0qWLHD58WJ5++mk5cOCA1K9fX9577z01NA7Y8Pnnn0vz5s0vrPv37y8iIt27d5fXX3/do6oAIHzWrFkjaWlp0qtXL69LAZADPr/f7/e6CAAAAAB5D+dsAAAAALCCZgMAAACAFTQbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABWuDpBPCsrS9LT0yU+Pl58Pp/tmhAl/H6/ZGRkSFJSkuTLZ69vZf/BJFz7T4Q9CI39B6/xHgwv5WT/uWo20tPTJTk5OSTFIe/Zu3evVKxY0dr92X/4K7b3nwh7EH+O/Qev8R4ML7nZf66ajfj4+As3TEhIyH1lyBOOHz8uycnJF/aHLew/mIRr/4mwB6Gx/+A13oPhpZzsP1fNxh8fmyUkJLDRoNj+WJX9h78Sjo/12YP4M+w/eI33YHjJzf5jQBwAAACAFTQbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGBFfq8LAADEhsOHD6vspptuUtnJkydVtmjRIse6Tp06oSsMAGANn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFA+IAACsOHjzoWJuGun/55ReVPfDAAyqrXbt26AoDAIQNn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFA+JABEtLS1NZYmKiqyyUzpw541jfd9996pq5c+eq7KWXXlJZ3759Q1cYIsaJEydUNmXKFMfaNAx+5ZVXZvs4ERGfz5eL6gAAXuGTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArGBAHIhgzZo1U1nVqlVVtnbt2jBU83+2b9+uMtMA7/jx41XGgHjetGXLFpWNGDHCsS5RooS6Zvny5SrLl4//D4bIsm/fPsd6zZo16prPP/9cZYsXL1aZ6UsRli1bFnxxCIkdO3ao7P7771fZxo0bXd1v4sSJjvUjjzwSXGF5AK/oAAAAAKyg2QAAAABgBc0GAAAAACsiembjlltucaxXrlyprhk+fLjKqlWrprJSpUqprHXr1tnWYPrdvEaNGqksLi4u23sB/+23335zrO+++251TXp6usrq1KljraY/c+7cOcfa9LutJu3bt7dRDjxm2pcDBgzI9nGma5KSkkJSE5Adv9+vstmzZ6vsrbfeUlngzwJHjhwJuo4mTZoE/ViEzoYNGxzrNm3aqGuOHz8e9P379evnWH/66afqmjfffDPo+0cTPtkAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMCKiBkQ//DDD1W2detWx9p0aNgzzzzj6v6JiYkqu/7667N93FdffaWy2rVrqyyUh1A999xzKrviiitCdn+E39GjR1XWqlUrx9p0IJTJ0KFDQ1HSnzp9+rTKTHvejcKFC+e2HESgcePGqcy0fxs2bOhYDxw40FpNiG1paWkqe+eddxzrdevWqWuWLl0ashr+9re/qezaa69V2QMPPBCy54Q7poH+xx9/3LE2DYMnJCSobNSoUa6uCzz89pVXXlHXpKamqqxp06Yqi3Z8sgEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBURMyBerFgxlYVyuPTYsWMqW7FiRVD32rt3b27L+Us//fSTytye2IzI9PTTT6vMzUB4jx49VGY6wT6UFi1apDI3e75EiRIq69OnT0hqgnd+/vlnlZlOXS5UqJDKhg0b5lhfdNFFoSsMMeGTTz5R2dixY1X2wQcfqOzEiRNBPafpC1/uuusux7pjx47qmhtuuEFl8fHxQdWA4JmGwW+55RaVbdq0ybG++uqr1TWTJk1Smdv34MD9t2TJEnVNt27dVLZv3z5X948mfLIBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVETMgbhq4CTy1eM+ePSF9zlKlSjnWpiHy8+fPh/Q5kfe9/PLLKps+fXq2j6tYsaLKTIOQtu3cuTOox5n+3ElJSbktBx574YUXVHb06FGVXX755Sq79dZbbZSEPKp///4qW7x4scp+/PHHoO7v8/lUNmHCBJU1aNBAZabTwRGZTF8WsHHjRpVVqVLFsf7Pf/6jrildunTQdQQOqpu+aKVkyZJB3z+a8MkGAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABWRMyAuEm7du0c62BP/BYRefDBB1UWeCLoxx9/rK75/fffXd3/rbfeUlmwg7aITH6/X2W7d+9WmWmg9ty5cyorW7asY/3ss8+qa8qUKZOTEv+SaS+bBnhNfw8CVahQQWXXXHNNcIUhYqxfv15lphN0TaeFDx8+3EZJyMNeeuklx9o0rG163TWpW7euygK/mKNhw4bqmvz5I/rHIARh4cKFrq67++67HevcDIObZGVlOdamnwNiBZ9sAAAAALCCZgMAAACAFTQbAAAAAKyI6F9W7NKli2M9evRodU1aWpqre/Xt21dlNWrUcKxzc2jPjh07VBbszEbgQTOIDN98843K6tSp4+qxcXFxKktJSXGsA39/NNRMB1SaDhky/V5pwYIFHes1a9aoay655JJcVAcvBM7xmOYuTPvGdAjrnXfeGbK6EN3Onj2rsvnz56sscGbDNJ+RkJCgsueff15l99xzj8oKFy78l3Ui+m3YsEFlphla07yO7Tmzbdu2Wb1/NOGTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArIjoAfHAwbDFixera9q3b6+yPXv22CpJREQGDRqkMtOAeLBq1aoVsnsheMuXL3esH3nkkaDv1a1bN5UNHTo06PsFo2PHjiozHUpoctFFFznWl19+eUhqgrcChys//PBDdU3glwOIiEydOtVaTYh+/fv3V9nkyZODupdp2Ldly5ZB3Qt5j+kLTQIP0xMR6d27t8pMX9wSSt99912217Rp08ZqDZGCTzYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALAiogfEA9WvX19lt912m8qqV6+usksvvTTb+0+fPl1lphMmDx06pDLTyafBCjxVVUSfyNq2bVtX9ypVqpTK6tWrF1xhedjSpUtV9uSTTzrWufnigfLly6ss8EsF3J5G7lbg/devXx/0vWrXru1Yz507V11j+wR05M6pU6dUNnbs2Gwfd8cdd6iM15DYdObMGZW9+OKLKnvttdeCuv/FF1+ssssuuyyoeyF2ValSRWW2359Mfzc2bdrkWCcmJqprBg4caK2mSMInGwAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWBFVA+ImEydODPqxCxYscKwff/xxdc3vv/8e9P2DZToRM/DP6fbPbTqdctmyZcEVlkfMmjVLZSNHjlTZjz/+GLLnNA3izpw507F+55131DXXXnutygJP8xYxD/926NDBsTYNsLm1ceNGx7phw4bqGgbEI1tqaqrKPvjgg2wfd+TIEZWZvkyjVatWKgsciCxevHi2z4fItXPnTpWNGDFCZabXIzf279+vsiuvvFJl48aNU5nphOj8+aP+RxwEoVKlSiorVqyY1ecM/HlSRGThwoWOdYUKFdQ1NWrUsFZTJOGTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArIiZ6SnTkOO8efMcay+GwW1r1KiR1yVEnF69eqnM5/Nlm11xxRXqmuuvv15lgftKROTYsWMq+/XXXx3rZs2aqWsGDBigsj59+qjMNOi7a9cux9r0Z3SrYMGCjnWsnHqal7z11ltBPe7jjz9W2ZYtW1T24IMPqqx69eqO9auvvqqu+fvf/x5UXQi/unXrquyTTz5RWfv27VXWvHlzlXXp0iXb5+zUqZPKTHvN9Fpcp06dbO+PvCctLU1lpi83uPzyy7O9l+mLVebPn68y054MVLt27Wyvyav4ZAMAAACAFTQbAAAAAKyg2QAAAABgRczMbJh+X7179+6O9cmTJ9U1Dz/8sMruvfdelZke64bpgLa4uDiVBf5ua9u2bV3d/9Zbbw2qrlhjOmwncF7irrvuUteUKVMm28eJiLzwwgsqmzZtWrZ1mR43Z84clQV7iJZb9evXd6xNhyYhcpw9e1ZlS5YsyfZxHTt2VNn48eNVVrJkSZUNHjxYZZMnT3asTQevrV69WmWVK1f+yzoROQJfG0RCeyBq1apVVfbll1+q7KOPPlIZMxt5n+m1wvR+2LJlS5UNGjTIsTbNuI4ePVplixYtykmJFzRu3Diox+UFfLIBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVMTMgXrx4cZUFHjxkOojI5PXXX1eZ6eAhNx599FGVjRs3Lqh7wZ0TJ06ozDSUX6hQoaDuf8kll6gscFBWROT+++93rG+44QZ1jekwwAMHDqgsNwf2BerRo4fK3AyzI3Js2LBBZb/99pvKatas6VibvnzA7d+DiRMnqixfPuf/z3rppZfUNY888ojKli5d6uo5ETo7duxQWeBrTXx8vLomlEOvgQedipgPaDPZu3dvyOpA9DC93z711FMqM73OPPTQQ0E9p+kLUlq1aqWyV155xbG+5ZZbgnq+vIBPNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsCJmBsSBPxQtWjTsz2ka4L7yyisda9OA5lVXXaWygwcPBlXD1VdfrTLTwFpKSorKChQoENRzwhumU50TExNVlp6e7lgfP35cXRPsFyWI6BPJTQPippOfDx8+rLIyZcoEXQeyZzo9fufOnY71RRddpK4ZM2aMyjp06KAy06nOga95gV+aISJy9OhRlSUnJ6vsrrvuUhli07333qsy0z4K/DIe088Gd999t8r69u2rMtOXubz22muOdSy/j/LJBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVjAgDkSIlStXqsw0dGZSrlw5lc2cOdOxbtmypbrGdHI6ol/JkiVVZhru/eWXXxzr06dPh7SO999/P9trrrjiCpUxDB5+06dPV1mnTp0ca9Pg/uOPP66ykSNHquzMmTMqMw2NBypcuLDKXn31VZXVqVMn23shNiQkJKjMdKp4t27dHOuLL75YXeP2CzLmzJmjsooVKzrWDRo0cHWvvIhPNgAAAABYQbMBAAAAwAqaDQAAAABWMLMRRqbfmS5btqwHlSASfPnll451nz591DVnz55Vmen32UePHq0y04F9iF333HOPyl588UXHesCAAeqatm3burp/RkaGygIP8atdu7a6JvDgK3ijadOmKluxYoVj/dZbb6lrTIeRBr62ibg7TLVr164q6969u8qYz0AoVK1a1esSYgafbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYAUD4kEwDTmasv/5n/9xrE2DvaaBTOQ9P//8s8q6dOniWJuGwU3Gjx+vsrvvvju4whAzTPumRYsWjvUrr7yirhkyZIjK9u7dq7KaNWuqLHAAvX379uqaEiVK6GIRERo2bPiXawBwg082AAAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwggHxINSoUUNl1atXV1nggDhi1+zZs1W2c+fObB8XOMArwjA4QifwlHlOnQcAhBqfbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYAUD4kCEqFq1qsrmzZvnQSUAAAChwScbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYwYB4iNSvX19lixYtCn8hiEhDhgxxlQEAAOQlfLIBAAAAwAqaDQAAAABW0GwAAAAAsIKZjRAZMGCAyoYNG+ZBJQAAAAgX5jL/Gp9sAAAAALCCZgMAAACAFTQbAAAAAKyg2QAAAABgBQPiIVK4cGGVZWVleVAJAAAAEBn4ZAMAAACAFTQbAAAAAKyg2QAAAABghauZDb/fLyIix48ft1oMossf++GP/WEL+w8m4dp///0c7EH8gf0Hr/EeDC/lZP+5ajYyMjJERCQ5OTkXZSGvysjIkMTERKv3F2H/wcz2/vvjOUTYg9DYf/Aa78Hwkpv95/O7aEmysrIkPT1d4uPjxefzhaxARDe/3y8ZGRmSlJQk+fLZ+4089h9MwrX/RNiD0Nh/8BrvwfBSTvafq2YDAAAAAHKKAXEAAAAAVtBsAAAAALCCZgMAAACAFTQbAAAAAKyg2XCpSpUq4vP51D8pKSlel4YYM2bMGPH5fNKvXz+vS0EM+fnnn+Xuu++WUqVKSeHCheWKK66Qzz//3OuyECOGDx+u3n9r1KjhdVmIIRkZGdKvXz+pXLmyFC5cWK677jrZvHmz12VFBVfnbEBk8+bNkpmZeWG9Y8cOuemmm6RTp04eVoVYs3nzZpk+fbrUrVvX61IQQ44cOSLXX3+9NG/eXFauXCllypSRXbt2SYkSJbwuDTGkdu3asmbNmgvr/Pn5EQbhc99998mOHTtkzpw5kpSUJHPnzpUWLVrI119/LRUqVPC6vIjG31SXypQp41iPGTNGqlWrJk2bNvWoIsSaEydOSLdu3WTGjBkyatQor8tBDBk7dqwkJyfLrFmzLmRVq1b1sCLEovz580v58uW9LgMx6NSpU/LOO+/IkiVLpEmTJiLyv5+2LVu2TKZOncp7cjb4NaognD17VubOnSu9evXigBuETUpKirRp00ZatGjhdSmIMUuXLpWGDRtKp06dpGzZsnLllVfKjBkzvC4LMWbXrl2SlJQkl1xyiXTr1k3S0tK8Lgkx4vz585KZmSmFChVy5IULF5YNGzZ4VFX0oNkIwuLFi+Xo0aPSo0cPr0tBjJg/f75s3bpVnnvuOa9LQQz64YcfZOrUqXLZZZfJqlWr5KGHHpK+ffvK7NmzvS4NMaJx48by+uuvy3vvvSdTp06VH3/8Uf7+979LRkaG16UhBsTHx8u1114rI0eOlPT0dMnMzJS5c+fKp59+Kvv37/e6vIjHCeJBaNmypRQoUECWLVvmdSmIAXv37pWGDRvK6tWrL8xqNGvWTOrXry8TJkzwtjjEhAIFCkjDhg3lk08+uZD17dtXNm/eLJ9++qmHlSFWHT16VCpXrizjx4+X3r17e10OYsDu3bulV69esn79eomLi5MGDRpI9erVZcuWLfLNN994XV5E45ONHNqzZ4+sWbNG7rvvPq9LQYzYsmWLHDp0SBo0aCD58+eX/PnzS2pqqkycOFHy58/v+OICwIaLL75YatWq5chq1qzJr7HAM8WLF5fq1avL999/73UpiBHVqlWT1NRUOXHihOzdu1c2bdok586dk0suucTr0iIezUYOzZo1S8qWLStt2rTxuhTEiBtvvFG++uor2bZt24V/GjZsKN26dZNt27ZJXFyc1yUij7v++utl586djuy7776TypUre1QRYt2JEydk9+7dcvHFF3tdCmJM0aJF5eKLL5YjR47IqlWrpF27dl6XFPH4NqocyMrKklmzZkn37t35yj2ETXx8vNSpU8eRFS1aVEqVKqVywIbHHntMrrvuOnn22Welc+fOsmnTJnnllVfklVde8bo0xIgBAwbIbbfdJpUrV5b09HQZNmyYxMXFSdeuXb0uDTFi1apV4vf75fLLL5fvv/9eBg4cKDVq1JCePXt6XVrE4yfmHFizZo2kpaVJr169vC4FAMKmUaNGsmjRIhk8eLCMGDFCqlatKhMmTJBu3bp5XRpixL59+6Rr167y66+/SpkyZeRvf/ubbNy4UX0tPWDLsWPHZPDgwbJv3z4pWbKkdOjQQUaPHi0XXXSR16VFPAbEAQAAAFjBzAYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYIWrE8SzsrIkPT1d4uPjxefz2a4JUcLv90tGRoYkJSVJvnz2+lb2H0zCtf9E2IPQ2H/wGu/B8FJO9p+rZiM9PV2Sk5NDUhzynr1790rFihWt3Z/9h79ie/+JsAfx59h/8BrvwfCSm/3nqtmIj4+/cMOEhITcV4Y84fjx45KcnHxhf9jC/oNJuPafCHsQGvsPXuM9GF7Kyf5z1Wz88bFZQkICGw2K7Y9V2X/4K+H4WJ89iD/D/oPXeA+Gl9zsPwbEAQAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYAXNBgAAAAAraDYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALCCZgMAAACAFTQbAAAAAKyg2QAAAABgRX6vC4h0CxcuVNnEiRNV1qtXL5UVK1bMse7YsWPoCgOAXNq5c6dj/Z///Edds3TpUpWlpqaqzOfzZft8LVu2VNm//vUvldWqVSvbeyFyffbZZypbsmSJysaMGWO1Dr/f71jfc8896poJEyaorGTJkrZKAmISn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGBFTA+I79ixw7Hu27evumbTpk0qO3PmjMo2bNigsiJFijjWU6ZMUdeYhs3r1KmjiwUAg++//15lX3zxhavrRowY4VifPXvW1XOahsGbNWumsrJlyzrWGzduVNe0atVKZabX4gEDBriqDd5r27atyn777TeVxcXFWa0jMzPTsZ43b566Jn9+/WPQ9ddfr7LevXuHrjDEpMWLF6usffv2Ktu6davKrrzyShslhQ2fbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYEVMD4gHDiF+9NFHIb1/4CC56f7t2rVT2TvvvKOy+vXrh6wuRIZff/3VsT59+rS6pkKFCiF7vu3bt6vMNJzbunVrlb322mshqwPB++abb1T2xBNPqMx0EribE74TEhJU1qNHD5UNGTJEZcWLF1fZRRdd5Fib6r/llltU9sILL6isTZs2jnXNmjXVNQi/UaNGqSwjI8ODSoIzZ84clS1atEhlp06dcqz79OljrSbkTf/+979dXWd6nWRAHAAAAAAMaDYAAAAAWEGzAQAAAMCKmJnZMP1eqenAvkCNGzdWWcGCBVXm9/tVFnhA1meffaauSUtLU9mXX36pMmY2oltWVpbKAn/n96efflLXfPrppyGrYdasWSo7ePCgynbt2hWy50TuBP7urmnGZt++fa7uVa9ePZXdfPPNjrXp99ArVqzo6v5umOYsBg4cqDJTHSkpKY71e++9p64pUKBALqpDMLZs2aKyc+fOeVBJ6Jw4cUJlgQf33nTTTeqaqlWrqow9GbsCfy48evSoq8e1aNHCQjXe4pMNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsiJkB8aefflplcXFxjnXbtm3VNbNnz1ZZsWLFXD1n4MDvZZdd5upxyHu2bdumsgULFjjWNWrUsFrDm2++afX+CL18+Zz/P8jtMPill16qMtOhokWLFg2usBC68847VWYaEE9NTXWsjx07pq4pU6ZM6AqDYvryEtOXnORFgYftmg7f/frrr1XG+37sCvxiojVr1qhr7r//fpWVLFnSWk1e4ZMNAAAAAFbQbAAAAACwgmYDAAAAgBU0GwAAAACsiPoBcdPJ4KZhcNMJ33fddZdjbRoGd+v7779X2ZIlSxzrzMxMV/cy1YroNmbMmLA/54gRIxzrI0eOuHpcYmKijXIQhMsvv9yxNg3jBr7OiIg8/PDD1moKNdMwZPHixVXm9vRd2LNq1SqVmYbGY9Vzzz2nstdee82DShBuWVlZKvvggw8c64IFC6prevToobL8+aP+R3OFTzYAAAAAWEGzAQAAAMAKmg0AAAAAVtBsAAAAALAi6qdQTMNXgSeDi5iHs3v37h2yOjZs2KCyQYMGZVtXoUKFVBYfHx+yuhB+ppNlV6xYYfU5T58+rbLFixc71m6/oGDmzJmhKAkWVKhQQWXRNAzululk88ATw3fv3q2u4QTx0Dp37pxjffLkSavPly+f/v+fbdu2Vdmzzz7r6n6Bw+um9/xQ/plWrlwZsnshuowbN05l69atc6z79++vrrnuuutslRRR+GQDAAAAgBU0GwAAAACsoNkAAAAAYAXNBgAAAAAron5A3K3hw4errHLlyiG7/9KlS4N6nGnQ7Y477shtOfDQkCFDVOZmCPHJJ58M+jlnz56tsi+++CLbx3Xs2FFl5cqVC7oOIBRMp4qnp6c71qZheYTWZ5995liPHj3a6vOZ/ru//fbbQd/vsssuc6xNA+h9+/ZV2YEDB4J+TuR9pvfzWbNmZfu4a665xkY5UYFPNgAAAABYQbMBAAAAwAqaDQAAAABWRP3MxkcffaSy8+fPqyzY+YyjR4+qzPQ7noEHqInoQ/xMj3vooYeCqguRwTQL9N1336nM5/OprE2bNo713XffHXQdEyZMyPY58+fXf90HDhwY9HMCtnz11VcqC9zPpgNREd0efPBBq/dv3769yt544w2VLV++PKj7m36X3zRP171796Duj8gwffp0lZne95944gnHukOHDtZqinR8sgEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBVRPyBu+2Cnbdu2qWzevHmuHlu6dGnH+t5771XXmIZ2ET3Wrl3r6rrExESVzZgxw7EO/EKBP7Nq1SqVBR54ZmIavrz66qtdPSfypsDXsq5du3peA2LXXXfd5XUJuXLq1CmVLVmyRGUMiEeP7du3q2zUqFEqq1SpkspSUlIca9OhkrEidv/kAAAAAKyi2QAAAABgBc0GAAAAACtoNgAAAABYwXTyfzENg3fu3FllmZmZKqtWrZrKAk8hvfzyy4MvDhHJNPhtYhoMCzydfuHCheqaESNGqCwjI0NlZ86cybaG5OTkbK9B9Pntt99UNmXKFJU9/fTT2d6rW7duKrviiitU1rZtW5X94x//UJmbPffrr7+qzO/3qyzw7xpfrhF+pve+UDL9d7fN9Jy2/5yIHoMGDVLZkSNHVLZgwQKVmYbGYxWfbAAAAACwgmYDAAAAgBU0GwAAAACsoNkAAAAAYEVMT9gdOHDAsb7jjjvUNaZBoHLlyqkscBhchIHwWDBz5kyVPfXUUyozDX/XqlUrqOc0DTT6fL5sH3frrbcG9XyIbKbTbCdMmKAyN3ukbNmyKktLS1PZ6NGjVfbqq6+q7P7773esTV948MMPP6jMVGvLli0d6xIlSqhrYFdcXJzV+7vZo7nx2Wefqcy0/0L559y9e7fKNm3apLKrr746ZM+J4KxatUpla9euVVmLFi1Udv3111upKa/gkw0AAAAAVtBsAAAAALCCZgMAAACAFTQbAAAAAKyI6QHx+fPnO9Z79+5V11SpUkVlpmFfhsFjk+nLAmbMmKEy0+nK/fv3d6x///13dc3WrVtd1WEaGm/VqpVjXbVqVVf3QuQyDX5PnjxZZabTtU0DqPPmzXOs4+Pj1TWmvfXBBx+ozDQ0PnbsWMf6lVdeUdeYvoTDVL/p7xDwV7755hvHOvALC0zXhNrXX3+tso8++khlDIh7784771SZ6TT5oUOHqqxIkSJWasor+GQDAAAAgBU0GwAAAACsoNkAAAAAYEXMzGysW7dOZW+88Ua2jws8SEpEpF69eqEoCTGkUaNGKgv8vd1Tp06pa6699lqVbd++XWWmw7Aeeughx7pQoULZ1onIcuzYMcf6xRdfVNecP39eZePGjVPZ448/HrK6OnTo4Cq78sorHesvv/zS1f1NB17ecMMNLqtDtDK93w4ZMkRl//znP13d7+zZs471iRMngissF1q3bq2yhx9+OOx1QFuwYIFjbdofjzzyiMqaNGliraa8ik82AAAAAFhBswEAAADACpoNAAAAAFbQbAAAAACwImYGxE3DhXFxcdk+7vDhwyrr1auXykwHX5mGdoMVeP9BgwapazhYMLrt2bNHZW4HahMTE1XGIVHR74knnnCs9+3bp67p2rWrykI5DJ4bt99+u2Ptdj8jNpkO1g38ooucCDyQzc17fqiZDqgsXLhw2OuAdtdddznWpvdRt19GgL/GJxsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFiRJwfEN2zYoDLTAHfg8JjJ22+/7eo5wz0gnpqaqq754YcfQvZ8iC7XX3+9ysqXL+9BJQilxYsXO9ZFihRR13Tq1ClM1fyf06dPq+z9999X2dixYx1r0+ukyY4dO1Q2cuRIx3ro0KGu7gWES7FixVTWp08fDyqJbaaf7UaMGKGyrKwsx3ratGnqmjJlyoSusBjGJxsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFgR9QPipkHFiRMnqsw0rB3K00RNA0k27z948OCQ3RvR78knn/S6BFgQ+LoVeOKtiEi7du3CVc4FU6ZMUdnAgQOzfZxpmL1ChQoqmzBhQrbP+cgjj6hrihcvnm0NcO+qq65yrE0n0//rX/8KVzkRr1y5ciq74YYbPKgktp09e1ZlpgHxwBPDW7VqZa2mWMcnGwAAAACsoNkAAAAAYAXNBgAAAAAron5mY+PGjSpbtGhRyO7ft29flQX+np8Xbr75Zq9LQIjNmjVLZW4PQStbtmyoy0EEKFy4sGP9888/W32+Y8eOqeyJJ55Q2YwZM1Rmmou74447HOs33nhDXTNz5kxXtR06dMixPnfunKvHIXiB+69x48bqGtPhoQcOHLBWk1cqVarkWNerV09dM2nSpHCVg//v/PnzKhsyZIirxy5fvtyxTkhICElN0PhkAwAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK6J+QDw3brzxRsd6+vTp6hrTgVP588f0vzZYcsUVV6jMNHTrdmgc0W/hwoWOdc+ePdU1psOqmjRporLdu3er7P3333esTV+4sW/fPpUVLVpUZaNHj1bZAw884FgXLFhQXdOyZUuVmV5309PTVYbwat++vavrevToobLff/89xNWEV4sWLRxr088LCL9ff/1VZaZDQQsVKqSya665xkZJMOCTDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArIj6SecaNWqo7LrrrnP12NmzZzvW5cqVC0lNQDAaNmzodQmIMFdddZVjvWLFCnVN3759VTZ8+HCVmb5swI2uXbuq7JlnnlFZtWrVgrq/6XGbNm1S2ZEjRxzrEiVKBPV8CC3T0HjFihVVtnTpUpWNGTPGSk1/CDzle8qUKUHfq2zZsrktBxbs2rXL1XUDBw5UGV/2Ez58sgEAAADACpoNAAAAAFbQbAAAAACwgmYDAAAAgBVRPx1Tvnx5laWmpnpQCQDYZRq8fffddz2oxK6LL77YVYbI1KhRI1fZyJEjw1EO8rDNmzerzHRa+IgRI8JRDv4En2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGBF1A+IA3lF5cqVVfbaa6+prGfPnuEoBwCAiPbYY4+5yuAtPtkAAAAAYAXNBgAAAAAraDYAAAAAWMHMBhAhChcurLIePXq4ygAAACIRn2wAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFhBswEAAADACpoNAAAAAFa4OtTP7/eLiMjx48etFoPo8sd++GN/2ML+g0m49t9/Pwd7EH9g/8FrvAfDSznZf66ajYyMDBERSU5OzkVZyKsyMjIkMTHR6v1F2H8ws73//ngOEfYgNPYfvMZ7MLzkZv/5/C5akqysLElPT5f4+Hjx+XwhKxDRze/3S0ZGhiQlJUm+fPZ+I4/9B5Nw7T8R9iA09h+8xnswvJST/eeq2QAAAACAnGJAHAAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK2g2XHjuueekUaNGEh8fL2XLlpXbb79ddu7c6XVZiFFjxowRn88n/fr187oUxAheAxEJJk+eLFWqVJFChQpJ48aNZdOmTV6XhBixfv16ue222yQpKUl8Pp8sXrzY65KiCs2GC6mpqZKSkiIbN26U1atXy7lz5+Tmm2+WkydPel0aYszmzZtl+vTpUrduXa9LQQzhNRBeW7BggfTv31+GDRsmW7dulXr16knLli3l0KFDXpeGGHDy5EmpV6+eTJ482etSohJffRuEw4cPS9myZSU1NVWaNGnidTmIESdOnJAGDRrIlClTZNSoUVK/fn2ZMGGC12UhBvEaiHBr3LixNGrUSCZNmiQi/3v2Q3JysjzyyCMyaNAgj6tDLPH5fLJo0SK5/fbbvS4lavDJRhCOHTsmIiIlS5b0uBLEkpSUFGnTpo20aNHC61IQ43gNRDidPXtWtmzZ4njty5cvn7Ro0UI+/fRTDysD4EZ+rwuINllZWdKvXz+5/vrrpU6dOl6Xgxgxf/582bp1q2zevNnrUhDjeA1EuP3yyy+SmZkp5cqVc+TlypWTb7/91qOqALhFs5FDKSkpsmPHDtmwYYPXpSBG7N27Vx599FFZvXq1FCpUyOtyEON4DQQA5ATNRg706dNHli9fLuvXr5eKFSt6XQ5ixJYtW+TQoUPSoEGDC1lmZqasX79eJk2aJGfOnJG4uDgPK0Ss4DUQXihdurTExcXJwYMHHfnBgwelfPnyHlUFwC1mNlzw+/3Sp08fWbRokXzwwQdStWpVr0tCDLnxxhvlq6++km3btl34p2HDhtKtWzfZtm0bjQas4zUQXipQoIBcddVVsnbt2gtZVlaWrF27Vq699loPKwPgBp9suJCSkiJvvvmmLFmyROLj4+XAgQMiIpKYmCiFCxf2uDrkdfHx8ep344sWLSqlSpXid+YRFrwGwmv9+/eX7t27S8OGDeXqq6+WCRMmyMmTJ6Vnz55el4YYcOLECfn+++8vrH/88UfZtm2blCxZUipVquRhZdGBr751wefzGfNZs2ZJjx49wlsMICLNmjXjq28RNrwGIhJMmjRJnn/+eTlw4IDUr19fJk6cKI0bN/a6LMSAdevWSfPmzVXevXt3ef3118NfUJSh2QAAAABgBTMbAAAAAKyg2QAAAABgBc0GAAAAACtoNgAAAABYQbMBAAAAwAqaDQAAAABW0GwAAAAAsIJmAwAAAIAVNBsAAAAArKDZAAAAAGAFzQYAAAAAK2g2AAAAAFjx/wC1ALuLb/YreQAAAABJRU5ErkJggg==", 59 | "text/plain": [ 60 | "
" 61 | ] 62 | }, 63 | "metadata": {}, 64 | "output_type": "display_data" 65 | } 66 | ], 67 | "source": [ 68 | "from simple_deep_learning.mnist_extended.mnist import display_digits\n", 69 | "\n", 70 | "display_digits(images=train_images, labels=train_labels, num_to_display=20)" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "### Create the semantic segmentation dataset" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 3, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "import numpy as np\n", 87 | "np.random.seed(seed=9)\n", 88 | "\n", 89 | "from simple_deep_learning.mnist_extended.semantic_segmentation import (create_semantic_segmentation_dataset, display_segmented_image,\n", 90 | " display_grayscale_array, plot_class_masks)\n", 91 | "\n", 92 | "train_x, train_y, test_x, test_y = create_semantic_segmentation_dataset(num_train_samples=100,\n", 93 | " num_test_samples=10,\n", 94 | " image_shape=(60, 60),\n", 95 | " num_classes=5)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 6, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "Train image shape: (60, 60, 1)\n", 108 | "Train label shape: (60, 60, 5)\n", 109 | "Total train samples: 100\n", 110 | "Total test samples: 10\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "import numpy as np\n", 116 | "\n", 117 | "i = np.random.randint(len(train_x))\n", 118 | "print(f'Train image shape: {train_x[i].shape}')\n", 119 | "print(f'Train label shape: {train_y[i].shape}')\n", 120 | "\n", 121 | "print(f'Total train samples: {len(train_x)}')\n", 122 | "print(f'Total test samples: {len(test_x)}')" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 7, 128 | "metadata": {}, 129 | "outputs": [ 130 | { 131 | "name": "stdout", 132 | "output_type": "stream", 133 | "text": [ 134 | "(60, 60)\n" 135 | ] 136 | }, 137 | { 138 | "data": { 139 | "text/plain": [ 140 | "" 141 | ] 142 | }, 143 | "execution_count": 7, 144 | "metadata": {}, 145 | "output_type": "execute_result" 146 | }, 147 | { 148 | "data": { 149 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaAAAAGfCAYAAAAZGgYhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAcMklEQVR4nO3df3DU9b3v8deGZBdKyIYgbIgkCFclqDd4jRL2qq2FaIZ6GSxxDnWYW7ScOtrAFdKONXeq6ExnwtUZUTSgYy2cnilNpR304FQsN0q81oAQZAR/pOBwmjhhF+2cbEJqNiH53D9s93Rlo91kwzvZPB8z3xnz+X73+/18khmefpPvJh7nnBMAABdYhvUEAADjEwECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYyBypE9fV1emxxx5TKBTSggUL9NRTT2nhwoVf+bqBgQG1t7drypQp8ng8IzU9AMAIcc6pq6tLBQUFysj4kvscNwLq6+ud1+t1P//5z917773nvv/977vc3FwXDoe/8rVtbW1OEhsbGxvbGN/a2tq+9N97j3Op/2WkZWVluu666/T0009L+vyuprCwUOvWrdMDDzzwpa+NRCLKzc3VDfqWMpWV6qkBAEbYOfXpTf1OHR0d8vv9gx6X8m/B9fb2qrm5WTU1NbGxjIwMlZeXq6mp6bzjo9GootFo7OOurq6/TixLmR4CBABjzl9va77qxygpfwjh008/VX9/vwKBQNx4IBBQKBQ67/ja2lr5/f7YVlhYmOopAQBGIfOn4GpqahSJRGJbW1ub9ZQAABdAyr8Fd9FFF2nChAkKh8Nx4+FwWPn5+ecd7/P55PP5Uj0NAMAol/I7IK/Xq9LSUjU0NMTGBgYG1NDQoGAwmOrLAQDGqBF5H1B1dbVWr16ta6+9VgsXLtQTTzyh7u5u3XXXXSNxOQDAGDQiAVq5cqU++eQTPfTQQwqFQrr66qu1d+/e8x5MAACMXyPyPqDh6OzslN/v101azmPYADAGnXN92q+XFIlElJOTM+hx5k/BAQDGJwIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJpIO0BtvvKFly5apoKBAHo9HL774Ytx+55weeughzZw5U5MmTVJ5eblOnDiRqvkCANJE0gHq7u7WggULVFdXl3D/o48+qi1btuiZZ57RwYMHNXnyZFVUVKinp2fYkwUApI/MZF+wdOlSLV26NOE+55yeeOIJ/eQnP9Hy5cslSb/4xS8UCAT04osv6jvf+c55r4lGo4pGo7GPOzs7k50SAGAMSunPgE6dOqVQKKTy8vLYmN/vV1lZmZqamhK+pra2Vn6/P7YVFhamckoAgFEqpQEKhUKSpEAgEDceCARi+76opqZGkUgktrW1taVySgCAUSrpb8Glms/nk8/ns54GAOACS+kdUH5+viQpHA7HjYfD4dg+AACkFAdozpw5ys/PV0NDQ2yss7NTBw8eVDAYTOWlAABjXNLfgjt79qxOnjwZ+/jUqVM6evSo8vLyVFRUpPXr1+unP/2pLrvsMs2ZM0cPPvigCgoKdNttt6Vy3gCAMS7pAB0+fFjf/OY3Yx9XV1dLklavXq0dO3bo/vvvV3d3t+6++251dHTohhtu0N69ezVx4sTUzRoAMOZ5nHPOehJ/r7OzU36/XzdpuTI9WdbTAQAk6Zzr0369pEgkopycnEGP43fBAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgImkAlRbW6vrrrtOU6ZM0YwZM3TbbbeppaUl7pienh5VVVVp2rRpys7OVmVlpcLhcEonDQAY+5IKUGNjo6qqqnTgwAHt27dPfX19uuWWW9Td3R07ZsOGDdqzZ4927dqlxsZGtbe3a8WKFSmfOABgbPM459xQX/zJJ59oxowZamxs1Ne//nVFIhFNnz5dO3fu1O233y5J+vDDDzV//nw1NTVp0aJFX3nOzs5O+f1+3aTlyvRkDXVqAAAj51yf9uslRSIR5eTkDHrcsH4GFIlEJEl5eXmSpObmZvX19am8vDx2THFxsYqKitTU1JTwHNFoVJ2dnXEbACD9DTlAAwMDWr9+va6//npdddVVkqRQKCSv16vc3Ny4YwOBgEKhUMLz1NbWyu/3x7bCwsKhTgkAMIYMOUBVVVU6fvy46uvrhzWBmpoaRSKR2NbW1jas8wEAxobMobxo7dq1evnll/XGG29o1qxZsfH8/Hz19vaqo6Mj7i4oHA4rPz8/4bl8Pp98Pt9QpgEAGMOSugNyzmnt2rXavXu3XnvtNc2ZMyduf2lpqbKystTQ0BAba2lpUWtrq4LBYGpmDABIC0ndAVVVVWnnzp166aWXNGXKlNjPdfx+vyZNmiS/3681a9aourpaeXl5ysnJ0bp16xQMBv+hJ+AAAONHUgHatm2bJOmmm26KG9++fbvuvPNOSdLmzZuVkZGhyspKRaNRVVRUaOvWrSmZLAAgfQzrfUAjgfcBAcDYdkHeBwQAwFARIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADCRVIC2bdumkpIS5eTkKCcnR8FgUK+88kpsf09Pj6qqqjRt2jRlZ2ersrJS4XA45ZMGAIx9SQVo1qxZ2rRpk5qbm3X48GEtXrxYy5cv13vvvSdJ2rBhg/bs2aNdu3apsbFR7e3tWrFixYhMHAAwtnmcc244J8jLy9Njjz2m22+/XdOnT9fOnTt1++23S5I+/PBDzZ8/X01NTVq0aNE/dL7Ozk75/X7dpOXK9GQNZ2oAAAPnXJ/26yVFIhHl5OQMetyQfwbU39+v+vp6dXd3KxgMqrm5WX19fSovL48dU1xcrKKiIjU1NQ16nmg0qs7OzrgNAJD+kg7QsWPHlJ2dLZ/Pp3vuuUe7d+/WFVdcoVAoJK/Xq9zc3LjjA4GAQqHQoOerra2V3++PbYWFhUkvAgAw9iQdoHnz5uno0aM6ePCg7r33Xq1evVrvv//+kCdQU1OjSCQS29ra2oZ8LgDA2JGZ7Au8Xq8uvfRSSVJpaakOHTqkJ598UitXrlRvb686Ojri7oLC4bDy8/MHPZ/P55PP50t+5gCAMW3Y7wMaGBhQNBpVaWmpsrKy1NDQENvX0tKi1tZWBYPB4V4GAJBmkroDqqmp0dKlS1VUVKSuri7t3LlT+/fv16uvviq/3681a9aourpaeXl5ysnJ0bp16xQMBv/hJ+AAAONHUgE6c+aMvvvd7+r06dPy+/0qKSnRq6++qptvvlmStHnzZmVkZKiyslLRaFQVFRXaunXriEwcADC2Dft9QKnG+4AAYGwb8fcBAQAwHAQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJpL+k9yAJP3xmYUJx08ueybh+POdsxKO/3b+jJTNCcDYwh0QAMAEAQIAmCBAAAATBAgAYIIAAQBM8BQcvlTm3EsSjv/fpY8nHB/QpITjyyb/MeH4b8VTcMB4xR0QAMAEAQIAmCBAAAATBAgAYIIAAQBM8BQcvtSJf56ZcLwoM/HTboP5xls/SDg+R+8mPScA6YE7IACACQIEADBBgAAAJggQAMAEAQIAmOApuDHgP+4MJhw/e7En4XjR/3k74bg7dy7pa3/3f7ye9GsS8R7NTsl5AKQP7oAAACYIEADABAECAJggQAAAEwQIAGCCp+DGgNX3v5xw/G7/vyccv7Z7XcLx/CfeSvraUzO7kzr+g76+hOOFezsSjg8kOyEAaYM7IACACQIEADBBgAAAJggQAMAEAQIAmOApuFEkeut1Ccfv8T87yCsS/y64u+/ek3D8356YlvScJgzynFrGINf+f3+5LOH4wNH3k742gPTGHRAAwAQBAgCYIEAAABMECABgggABAEwMK0CbNm2Sx+PR+vXrY2M9PT2qqqrStGnTlJ2drcrKSoXD4eHOEwCQZoYcoEOHDunZZ59VSUlJ3PiGDRu0Z88e7dq1S42NjWpvb9eKFSuGPdHxwHPOJdw+c70JtwG5xJvLSLgNRb8yEm4X4toA0tuQ/mU4e/asVq1apeeee05Tp06NjUciET3//PN6/PHHtXjxYpWWlmr79u166623dODAgZRNGgAw9g0pQFVVVbr11ltVXl4eN97c3Ky+vr648eLiYhUVFampqSnhuaLRqDo7O+M2AED6S/o3IdTX1+vIkSM6dOjQeftCoZC8Xq9yc3PjxgOBgEKhUMLz1dbW6pFHHkl2GgCAMS6pO6C2tjbdd999+uUvf6mJEyemZAI1NTWKRCKxra2tLSXnBQCMbkkFqLm5WWfOnNE111yjzMxMZWZmqrGxUVu2bFFmZqYCgYB6e3vV0dER97pwOKz8/PyE5/T5fMrJyYnbAADpL6lvwS1ZskTHjh2LG7vrrrtUXFysH//4xyosLFRWVpYaGhpUWVkpSWppaVFra6uCwWDqZp2mvP8RTTge7j+XcLwoM2skpzMkV0z8OOH43ksWJBw/9++tIzkdAKNYUgGaMmWKrrrqqrixyZMna9q0abHxNWvWqLq6Wnl5ecrJydG6desUDAa1aNGi1M0aADDmpfzPMWzevFkZGRmqrKxUNBpVRUWFtm7dmurLAADGuGEHaP/+/XEfT5w4UXV1daqrqxvuqQEAaYy3qAMATBAgAIAJ/iT3KBK5bHLC8aLMSSN63Qlf8uh77oTk3pf1fs+shOM87Qbgi7gDAgCYIEAAABMECABgggABAEwQIACACZ6CG0X+6/869tUH/QMG+31sz/3w1oTjZ+f2D3quf8p+Palrf3vKewnHj799c8Lxo09dnXA8918T//0oAOmDOyAAgAkCBAAwQYAAACYIEADABAECAJjgKbgLrP1H/33Qfa8WJv67Sf3Ok9Q1bprYl3D8neqnE45P8Az+/yHJXnvmhK8lHN968R8Sjl8d+G8Jx3OTuiqAsYg7IACACQIEADBBgAAAJggQAMAEAQIAmOApuAsss2fwff1uIOH4gNwIzeavBrnuUK4ddYmfwDvem5Vw/Ms+HwDSG3dAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACZ4DPsCm/H0W4PuC3ZVXcCZ/Keifz4x6L5fzX01qXNtj8xLOP7ylVMTjs/Q4J8PAOmNOyAAgAkCBAAwQYAAACYIEADABAECAJjgKbhRZOq/NJlc99i3SgbfOTe5cz35u28lHP8vslkbgNGLOyAAgAkCBAAwQYAAACYIEADABAECAJjgKTik1IRe6xkAGCu4AwIAmCBAAAATBAgAYIIAAQBMECAAgAmegoPuL/n9oPsy5LmAMwEwnnAHBAAwQYAAACYIEADABAECAJggQAAAE0kF6OGHH5bH44nbiouLY/t7enpUVVWladOmKTs7W5WVlQqHwymfNFLrzpz2QbcBuaQ2APhHJX0HdOWVV+r06dOx7c0334zt27Bhg/bs2aNdu3apsbFR7e3tWrFiRUonDABID0m/DygzM1P5+fnnjUciET3//PPauXOnFi9eLEnavn275s+frwMHDmjRokXDny0AIG0kfQd04sQJFRQUaO7cuVq1apVaW1slSc3Nzerr61N5eXns2OLiYhUVFampqWnQ80WjUXV2dsZtAID0l1SAysrKtGPHDu3du1fbtm3TqVOndOONN6qrq0uhUEher1e5ublxrwkEAgqFQoOes7a2Vn6/P7YVFhYOaSEAgLElqW/BLV26NPbfJSUlKisr0+zZs/XCCy9o0qRJQ5pATU2NqqurYx93dnYSIQAYB4b1u+Byc3N1+eWX6+TJk7r55pvV29urjo6OuLugcDic8GdGf+Pz+eTz+YYzDYwihfui1lMAMEYM631AZ8+e1UcffaSZM2eqtLRUWVlZamhoiO1vaWlRa2urgsHgsCcKAEgvSd0B/ehHP9KyZcs0e/Zstbe3a+PGjZowYYLuuOMO+f1+rVmzRtXV1crLy1NOTo7WrVunYDDIE3AAgPMkFaCPP/5Yd9xxh/785z9r+vTpuuGGG3TgwAFNnz5dkrR582ZlZGSosrJS0WhUFRUV2rp164hMHAAwtiUVoPr6+i/dP3HiRNXV1amurm5YkwIApD9+FxwAwAQBAgCY4E9yjyMd/3OwpxGPJH2uU+d6Eo57P/g44Xh/0lcAkO64AwIAmCBAAAATBAgAYIIAAQBMECAAgAmeghtHcv818d9lev5/zxr0Ncsm/zHh+G3P3Z9wvDD8VvITAzAucQcEADBBgAAAJggQAMAEAQIAmCBAAAATPAUH/Xb+jMH3KfG+QvG0G4Dh4Q4IAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACACQIEADBBgAAAJggQAMAEAQIAmCBAAAATBAgAYIIAAQBMECAAgAkCBAAwQYAAACYIEADABAECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABMECABgggABAEwQIACAiUzrCXyRc06SdE59kjOeDAAgaefUJ+k//z0fzKgLUFdXlyTpTf3OeCYAgOHo6uqS3+8fdL/HfVWiLrCBgQG1t7drypQp6urqUmFhodra2pSTk2M9tQums7OTdY+TdY/HNUvjc93jac3OOXV1damgoEAZGYP/pGfU3QFlZGRo1qxZkiSPxyNJysnJSfsvWCKse/wYj2uWxue6x8uav+zO5294CAEAYIIAAQBMjOoA+Xw+bdy4UT6fz3oqFxTrHj/rHo9rlsbnusfjmr/KqHsIAQAwPozqOyAAQPoiQAAAEwQIAGCCAAEATBAgAICJUR2guro6XXLJJZo4caLKysr09ttvW08ppd544w0tW7ZMBQUF8ng8evHFF+P2O+f00EMPaebMmZo0aZLKy8t14sQJm8mmSG1tra677jpNmTJFM2bM0G233aaWlpa4Y3p6elRVVaVp06YpOztblZWVCofDRjNOjW3btqmkpCT2LvhgMKhXXnkltj8d1/xFmzZtksfj0fr162Nj6bjuhx9+WB6PJ24rLi6O7U/HNQ/VqA3Qr3/9a1VXV2vjxo06cuSIFixYoIqKCp05c8Z6ainT3d2tBQsWqK6uLuH+Rx99VFu2bNEzzzyjgwcPavLkyaqoqFBPT88FnmnqNDY2qqqqSgcOHNC+ffvU19enW265Rd3d3bFjNmzYoD179mjXrl1qbGxUe3u7VqxYYTjr4Zs1a5Y2bdqk5uZmHT58WIsXL9by5cv13nvvSUrPNf+9Q4cO6dlnn1VJSUnceLqu+8orr9Tp06dj25tvvhnbl65rHhI3Si1cuNBVVVXFPu7v73cFBQWutrbWcFYjR5LbvXt37OOBgQGXn5/vHnvssdhYR0eH8/l87le/+pXBDEfGmTNnnCTX2NjonPt8jVlZWW7Xrl2xYz744AMnyTU1NVlNc0RMnTrV/exnP0v7NXd1dbnLLrvM7du3z33jG99w9913n3Mufb/WGzdudAsWLEi4L13XPFSj8g6ot7dXzc3NKi8vj41lZGSovLxcTU1NhjO7cE6dOqVQKBT3OfD7/SorK0urz0EkEpEk5eXlSZKam5vV19cXt+7i4mIVFRWlzbr7+/tVX1+v7u5uBYPBtF9zVVWVbr311rj1Sen9tT5x4oQKCgo0d+5crVq1Sq2trZLSe81DMep+G7Ykffrpp+rv71cgEIgbDwQC+vDDD41mdWGFQiFJSvg5+Nu+sW5gYEDr16/X9ddfr6uuukrS5+v2er3Kzc2NOzYd1n3s2DEFg0H19PQoOztbu3fv1hVXXKGjR4+m7Zrr6+t15MgRHTp06Lx96fq1Lisr044dOzRv3jydPn1ajzzyiG688UYdP348bdc8VKMyQBgfqqqqdPz48bjvj6ezefPm6ejRo4pEIvrNb36j1atXq7Gx0XpaI6atrU333Xef9u3bp4kTJ1pP54JZunRp7L9LSkpUVlam2bNn64UXXtCkSZMMZzb6jMpvwV100UWaMGHCeU+GhMNh5efnG83qwvrbOtP1c7B27Vq9/PLLev3112N//0n6fN29vb3q6OiIOz4d1u31enXppZeqtLRUtbW1WrBggZ588sm0XXNzc7POnDmja665RpmZmcrMzFRjY6O2bNmizMxMBQKBtFz3F+Xm5uryyy/XyZMn0/ZrPVSjMkBer1elpaVqaGiIjQ0MDKihoUHBYNBwZhfOnDlzlJ+fH/c56Ozs1MGDB8f058A5p7Vr12r37t167bXXNGfOnLj9paWlysrKilt3S0uLWltbx/S6ExkYGFA0Gk3bNS9ZskTHjh3T0aNHY9u1116rVatWxf47Hdf9RWfPntVHH32kmTNnpu3Xesisn4IYTH19vfP5fG7Hjh3u/fffd3fffbfLzc11oVDIemop09XV5d555x33zjvvOEnu8ccfd++8847705/+5JxzbtOmTS43N9e99NJL7t1333XLly93c+bMcZ999pnxzIfu3nvvdX6/3+3fv9+dPn06tv3lL3+JHXPPPfe4oqIi99prr7nDhw+7YDDogsGg4ayH74EHHnCNjY3u1KlT7t1333UPPPCA83g87ve//71zLj3XnMjfPwXnXHqu+4c//KHbv3+/O3XqlPvDH/7gysvL3UUXXeTOnDnjnEvPNQ/VqA2Qc8499dRTrqioyHm9Xrdw4UJ34MAB6yml1Ouvv+4knbetXr3aOff5o9gPPvigCwQCzufzuSVLlriWlhbbSQ9TovVKctu3b48d89lnn7kf/OAHburUqe5rX/ua+/a3v+1Onz5tN+kU+N73vudmz57tvF6vmz59uluyZEksPs6l55oT+WKA0nHdK1eudDNnznRer9ddfPHFbuXKle7kyZOx/em45qHi7wEBAEyMyp8BAQDSHwECAJggQAAAEwQIAGCCAAEATBAgAIAJAgQAMEGAAAAmCBAAwAQBAgCYIEAAABP/H5ob4QwLIo0GAAAAAElFTkSuQmCC", 150 | "text/plain": [ 151 | "
" 152 | ] 153 | }, 154 | "metadata": {}, 155 | "output_type": "display_data" 156 | } 157 | ], 158 | "source": [ 159 | "import matplotlib.pyplot as plt\n", 160 | "\n", 161 | "# plt.imshow(train_x[0])\n", 162 | "\n", 163 | "print(train_y[0][:, :, 0].shape)\n", 164 | "plt.imshow(train_y[0][:, :, 4])" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 8, 170 | "metadata": {}, 171 | "outputs": [ 172 | { 173 | "data": { 174 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAGFCAYAAAASI+9IAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAARuElEQVR4nO3df6yWZf0H8PsYKOnOORoojnHSMi2WFCpDceSaTWTotJ9OrfWDcLlwmanN5mpZc/6RKyBD7FRLpxZpmLjKcq7FpC1jhj82k2SFxkllhOc5yyDtPP3V51t9P9fh3Haf36/Xn+/7ea77wrHnvUs+u++OdrvdrgCgqqqDxnoDAIwfSgGAoBQACEoBgKAUAAhKAYCgFAAI04bzocHBwaqvr6/q7OysOjo6RnpPADSs3W5XAwMD1Zw5c6qDDiqfB4ZVCn19fVVPT09jmwNgbDz77LPV3Llzi9eHVQqdnZ2xWFdXVzM7A2DUtFqtqqenJ37PS4ZVCv/6X0ZdXV1KAWACO9A/AfiHZgCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAIEwb6w0AE8vOnTvT/O67707zq666qrF7n3322Wm+YcOGNO/u7m7s3lOFkwIAQSkAEJQCAEEpABCUAgDB9BFMIn19fWm+e/fu4ndWr16d5k8++WSav/jii2m+ffv2NO/o6Cjeu66f//znaf7888+nuemj+pwUAAhKAYCgFAAISgGAoBQACEoBgGAkFSag0hjpbbfdlubbtm0buc0wqTgpABCUAgBBKQAQlAIAQSkAEEwfwQR0zz33pPloTBldcMEFaT5jxow0f+yxx9LcRNT45KQAQFAKAASlAEBQCgAEpQBAMH0EE9C1116b5pdffnnttV555ZU037FjR5r39PSk+Q033JDmV199dZqbPhqfnBQACEoBgKAUAAhKAYCgFAAIpo9gAlq6dGmaP/nkk7XXarVaad7b25vmt99+e5r/9a9/TfP169fX3lPJO9/5zjQ/8sgjG7vHVOekAEBQCgAEpQBAUAoABKUAQDB9BFNcV1dXml955ZVpXnr20YUXXtjYnm699dY0/8AHPpDmpbe+UZ+TAgBBKQAQlAIAQSkAEJQCAMH00STyxBNPpHnpzVqj4YQTTkjzQw89dJR3Ql179+5N83Xr1jWy/sKFC4vXSs92MmU08pwUAAhKAYCgFAAISgGAoBQACEoBgGAkdZwqvSKxqqrq4x//eJrfe++9af7yyy83sqdX421ve1uan3322Wl+7rnnpvkZZ5zR2J74T6XR0/PPPz/NH3rooVrrl16heckllxS/M3v27Fr3oDlOCgAEpQBAUAoABKUAQFAKAISOdrvdPtCHWq1W1d3dXfX39xdf3cerU5oyWr58efE7W7ZsGantjLlp0/KBuNJ/jzvvvLO41mGHHdbIniaDvr6+4rWLL744zTdv3lzrHoccckia9/b2pvmHPvShWuvzvxnu77iTAgBBKQAQlAIAQSkAEJQCAMGzj8ZY6TlGQ00YzZ07N81PO+20NF+2bFmaH3vssUNvroYXXnghzTdu3Jjm27ZtS/Onn346zTdt2pTmV199dXFPTb02ciIZGBhI8wsvvLD4nbrPMjr11FPT/Etf+lKan3XWWbXWZ2w5KQAQlAIAQSkAEJQCAEEpABBMH42SX//612leelva/Pnzi2uVpkXG43OpLrroojR/4okn0nzp0qVp/uc//znNh3qmz2R26623pvm3v/3tNK87YTSUFStWpPlEmjL6zW9+k+Y/+9nPit8555xz0vykk05qZE/jhZMCAEEpABCUAgBBKQAQlAIAwfTRKLnuuuvS/Kijjkrzu+66q7jWeJwyKnn22WfT/Nprr03z0pTRjBkz0nyyv72rNF311a9+Nc0ff/zx2vc49NBD0/zGG29M89L00VgqTVfddNNNaf7UU0+l+eLFi4v3OP744+tvbAJyUgAgKAUAglIAICgFAIJSACCYPholl112WZovWrQozWfNmjWS23lV9u/fX7z205/+NM0/97nPpfnvfve7Wvd+97vfnebvf//7a60zXpWeWdTb25vmdaeMDj744OK10mTcpZdeWuseJfv27UvzRx55pPidNWvW1LrHAw88kOalNxtec801ab5gwYJa952MnBQACEoBgKAUAAhKAYCgFAAISgGAYCR1lCxfvnyst/D/PPPMM2m+ZcuWNC89IK2qhh4vzBx22GFpXnp95ze+8Y1a6080jz76aJo//PDDjay/bt264rW6D7h7+umn0/xHP/pRmj/44INpPtSrL+u6/vrr07w0Ek2ZkwIAQSkAEJQCAEEpABCUAgDB9NEUcN9996X55ZdfnuZ/+MMfRnI7VVVVVWdnZ5rPnz8/zdevX5/m55xzTvEer3/969N8+vTpB9jdyNm0aVOa33777Y2sf9xxx6V5T09P8TubN29O89I0UelVsbt27Rp6c//liCOOKF4rTZudfPLJaX7sscfWujdlTgoABKUAQFAKAASlAEBQCgCEjna73T7Qh1qtVtXd3V319/dXXV1do7EvGlR6JeF3vvOdUd7J6JoxY0aaf/e7303z888/v9Y6Ja+88krx2umnn57mW7durXWPklNOOSXNh3p959///vdG7l165ed5552X5p/4xCeKa73rXe9qZE/8n+H+jjspABCUAgBBKQAQlAIAQSkAEEwfTQH9/f1p/vvf/37E771x48Y03717d5rv2LEjzX/xi180tqeSG264Ic2vueaaWuv09vYWrw01cTNRrFy5Ms0/9alPpfmJJ544ktthmEwfAVCbUgAgKAUAglIAICgFAILpI8aVwcHBNP/HP/6R5n/84x+Lay1durTWdzo6OtK8NDF08803p/lf/vKX4p4+9rGPpXnp7XhNmTlzZvHaqaeemualaax58+al+UsvvZTmpWdB3XTTTcU9labNSpN0v/3tb4trZUrPYypNy1VVVb3mNa+pdY/xxvQRALUpBQCCUgAgKAUAglIAIJg+YtLauXNnmn/kIx9J81/+8pdpXvo7v2HDhjRftmxZcU979uxJ8wsuuKBWPm3atOI9Mm94wxuK184888w0f/7559O89Oa61atX11pnPFq7dm3x2mWXXTaKO2me6SMAalMKAASlAEBQCgAEpQBAUAoABCOpTDnbt29P88WLF6d56QF3CxYsSPO6D2cba3v37k3zk08+Oc1Lo76TwSWXXFK8dsstt4ziTppnJBWA2pQCAEEpABCUAgBBKQAQ6j1Viylh69ataT7Uw8Juu+22kdpO40444YQ0f8c73pHm9957b5pv27atqS2NqdIAYuk1mk055phjitdKU0DHHXdcml900UWN7AknBQD+jVIAICgFAIJSACAoBQCC6aMp7Ctf+Uqaf+ELX0jzt7zlLSO5ncaVXn3Z29ub5j/+8Y9rrb9w4cLaexqPXve616X5pk2b0vy8885L8127dtW6b2n9qqqq+fPnp/n9999f6x51dXR0jOj6E4GTAgBBKQAQlAIAQSkAEJQCAMH00RRWeqbPvn370vzRRx8trvW+972vkT016Sc/+Umal/58JW9+85vT/Jvf/GbtPU0kJ510UpqX3iz3t7/9rdb6c+bMKV678cYb0/zLX/5yrXuUvOlNb0rzz372s42sP5E5KQAQlAIAQSkAEJQCAEEpABBMH01hd999d5ovW7YszYeaPtq4cWMjexpLXV1dab5q1ao0L03nTHazZs1K88HBwTQvvcFt9erVxXt8/vOfT/P9+/cPvbn/UnqW0Sc/+ck0f+Mb31hr/cnISQGAoBQACEoBgKAUAAhKAYBg+mgKO/roo9N827ZtaT7U9NGCBQsa2FFVLV68uNbnn3vuueK10p+vp6cnzb/1rW+leWdnZ609TVVXXnllmq9Zs2bE7z19+vQ0v/7669P805/+9AjuZmJzUgAgKAUAglIAICgFAIJSACAoBQCCkVSG7e1vf3vx2hVXXJHmpYeeLVmyJM03b95ca0979uwpXps5c2attfjfbN26dczuXXqQ3aWXXjrKO5n4nBQACEoBgKAUAAhKAYCgFAAIpo8YE3v37q2VH3HEEWluwmj8WLlyZZrv3r07zbdv397YvZ966qk0/8EPfpDmK1asaOzek42TAgBBKQAQlAIAQSkAEJQCAKGj3W63D/ShVqtVdXd3V/39/VVXV9do7IsJ5rHHHkvzuq/pLD37qPSsJMa/F154Ic2HeiXmhg0b0nwYP1f/obu7O81/9atfpfm8efNqrT+RDPd33EkBgKAUAAhKAYCgFAAISgGA4NlHNOKoo44a6y0wTpX+btx5553F7xx++OFpvn79+lr37u/vT/O1a9em+c0331xr/cnISQGAoBQACEoBgKAUAAhKAYBg+ohGzJ49O80vvvjiNC9NnqxatSrNH3zwwTSfNWvWMHbHRLNs2bI0rzt9VLJnz55G1pmMnBQACEoBgKAUAAhKAYCgFAAIpo9oREdHR5ovXbo0zUvTR48//nia7969O81NH01OjzzyyFhvYcpyUgAgKAUAglIAICgFAIJSACAoBQCCkVRG1MKFC8d6C+PKyy+/nObr1q1L81NOOSXNFy1alOYHH3zwq9vYGPjTn/5UvHb//fc3co9DDjkkzZcvX97I+pORkwIAQSkAEJQCAEEpABCUAgDB9BEjqqenJ81L0zMPP/xwmpcmVebNm/fqNjZGvv71r6f5VVddVWudK664olY+d+7cWus36a677krzD3/4w8Xv7N+/v5F7v+c970nzj370o42sPxk5KQAQlAIAQSkAEJQCAEEpABBMHzGiOjs703z27Nm11vne976X5meddVbtPY2ll156qZF1vva1r6X5D3/4wzRfsWJFca0lS5bUuvfGjRvTfMeOHWm+ZcuWNG9qwqiqyn/PPvOZzzR2j6nCSQGAoBQACEoBgKAUAAhKAYBg+ogx8da3vjXN77vvvlHeyehauXJlmt9yyy1pvmvXrlrrP/PMM2n+xS9+sdY641Vpau2BBx5I8xNPPHEktzMpOSkAEJQCAEEpABCUAgBBKQAQOtrtdvtAH2q1WlV3d3fV399fdXV1jca+mOQeeuihND/jjDPSfMaMGWm+c+fO4j2OPPLI+hsbI88991yaX3fddWlemlaaLE4//fQ0v+OOO9L8mGOOGcntTArD/R13UgAgKAUAglIAICgFAIJSACB49hETwr59+9J8cHBwlHcyMo4++ug0X7t2bZp/8IMfTPNzzz03zVut1qvbWA3HH398mr/3ve9N81WrVhXXmjlzZpq/9rWvrb8xanFSACAoBQCCUgAgKAUAglIAICgFAIKRVBjHpk+fnuZLlixJ8xdffHEEd8NU4KQAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBA8OwjJrSdO3cWr82ePXsUdwKTg5MCAEEpABCUAgBBKQAQlAIAwfQRE9r3v//94rVFixaN4k5gcnBSACAoBQCCUgAgKAUAglIAICgFAIJSACAoBQCCUgAgKAUAglIAIHj2EWPitNNOS/M1a9ak+T333JPmZ555ZmN7ApwUAPg3SgGAoBQACEoBgKAUAAhKAYDQ0W632wf6UKvVqrq7u6v+/v6qq6trNPYFQIOG+zvupABAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAEBQCgAEpQBAUAoABKUAQFAKAASlAECYNpwPtdvtqqqqqtVqjehmABgZ//r9/tfvecmwSmFgYKCqqqrq6en5H7cFwFgaGBiouru7i9c72geqjaqqBgcHq76+vqqzs7Pq6OhodIMAjLx2u10NDAxUc+bMqQ46qPwvB8MqBQCmBv/QDEBQCgAEpQBAUAoABKUAQFAKAASlAED4J2LkypycBQmhAAAAAElFTkSuQmCC", 175 | "text/plain": [ 176 | "
" 177 | ] 178 | }, 179 | "metadata": {}, 180 | "output_type": "display_data" 181 | } 182 | ], 183 | "source": [ 184 | "from simple_deep_learning.mnist_extended.semantic_segmentation import display_grayscale_array\n", 185 | "\n", 186 | "i = np.random.randint(len(train_x))\n", 187 | "display_grayscale_array(array=train_x[i])" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 9, 193 | "metadata": {}, 194 | "outputs": [ 195 | { 196 | "data": { 197 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAB6EAAAFtCAYAAABGNx9OAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAo5klEQVR4nO3de5RV5Xk/8GfoIKPIRSCiRpRwMSHVNCpFYJF6R0IUbaJZGBtAkIrRRKmXSrK0RWXZtAQRjAJquTQx3kAuNvESY5NAE0UUb0WyJAgKUSl3Ndyc8/sjP6G4t+QM5x3OnOHzWStryfe8+93PmT/mDXxnn6kqFAqFAAAAAAAAAIAEmpR7AAAAAAAAAAAaDyU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJFNdjpvW1tbG6tWro0WLFlFVVVWOEQAajEKhEJs3b44jjjgimjTxs0Ef58wA2MWZsWfODIBdnBl75swA2MWZsWfODIBd6nJmlKWEXr16dXTo0KEctwZosN5888048sgjyz1Gg+PMAMhyZuRzZgBkOTPyOTMAspwZ+ZwZAFnFnBllKaFbtGgREX8asGXLluUYAaDB2LRpU3To0GHn90Z258wA2MWZsWfODIBdnBl75swA2MWZsWfODIBd6nJmlKWE/ugjK1q2bOmbNsD/5+N88jkzALKcGfmcGQBZzox8zgyALGdGPmcGQFYxZ4Zf8AAAAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMlUl3sAAAAAoPFZsWJFbv7www9nsmuuuaake5111lm5+QMPPJDJWrVqVdK9AAAA+PM8CQ0AAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAkU13uAQAAAIDyWb16dW6+Zs2aTDZ+/PjctUuWLMlkGzZsyF37u9/9LpNVVVV98oBFeOKJJ3Lzd955J5O1atWqpHsBAADw53kSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAyVSXewAAAABg3xg/fnwmmzFjRu7axYsX1+8wAAAANFqehAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSqS73AAAAAMC+8cgjj2SyxYsX18u9vv71r+fmNTU1meyll17KZPU1FwAAAPXPk9AAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEimutwDAAAAAPvG9773vUx25ZVXFn39jh07cvNly5Zlsg4dOuSuvfXWWzPZtddem8kWL15c9FwAAAA0LJ6EBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJKpLvcAAAAAwL7Rt2/fTLZkyZKir9+0aVNufvfdd2eyH/3oR7lr33///Uw2adKkomfIc8opp+Tmn/rUp0raFwAAgL3jSWgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACRTXe4BAAAAgMrQsmXL3Pzqq6/OZB06dMhdO3DgwJJmmD59eia74IILctfW1NSUdC8AAAD2jiehAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQTHW5BwAAAAAq2/r16zPZnXfeWdKe3bt3z8379u2byWpqakq6F1D/Xnnlldx8x44d+2yGY445JpMddNBB++z+AAD7E09CAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMlUl3sAAAAAoDKsX78+Nz/33HMz2fz584ve95RTTslkw4cPz13bvn37ovcF6temTZty82HDhmWyOXPm5K7dvn170pn25Atf+EImO+uss3LXnn322Znsb/7mb5LPBADQWHkSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAyVSXewAAAACg4Vm9enUm+8Y3vpG7dv78+UXv26xZs0w2dOjQTHbhhRcWvSdQ/zZt2pTJ+vfvn7t2wYIF9T3OXnnppZeKyiIibrvttkz2Se/3vvvuy2TNmzev43QAAI2LJ6EBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZKrLPQAAAABQPps3b87NBw4cmMnmz59f9L4nnXRSbn7TTTdlsjPPPLPofYHyGDZsWCZbsGBB7tojjzwyk/Xs2TN3bb9+/TJZx44d6zZcjnfffTeTzZo1K5MtXrw49/rXX389k82dOzd37bXXXpvJ7rzzzj8zIQBA4+ZJaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJFNd7gEAAACAfWP69OmZ7N57781dO3/+/JLuNXTo0Nz8zDPPLGnf+rJw4cJM9vjjj+eu/cpXvpLJjj/++OQzQbk888wzmWzOnDmZ7Ljjjsu9Pu/7R8uWLUsfrEQXXnhhJnvllVdy1/bt2zeT/eEPf8hdu3r16tIGAwBohDwJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACRTXe4BAAAAgPRWr16dycaNG5fJXn755aL3POigg3LzsWPHZrKhQ4cWvW99mT9/fia74447ctcuXbo0k/Xq1St3bdeuXUsbDBq40aNHZ7JDDz00kz300EO517ds2TL5TCm8+eabmex73/te7to//OEPmaympiZ37d/93d+VNhgAQCPkSWgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACRTXe4BAAAAgL1377335uZ33313Jnv55ZeL3veAAw7IZKNHj85dO2LEiKL3zbNly5ZM9vzzz+euvf3224ve98knn8xkw4YNy117/fXXZ7IvfvGLRd8LGpMrrrgik/Xo0SOTtWvXbl+Ms0dbt27NzX/2s59lslGjRmWy1157reh7nXfeebn5+eefX/QeAAD7C09CAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMlUl3sAAAAAYO+9+OKLufmzzz5b0r533nlnJhs6dGjR17/++uu5+ezZszPZU089lckef/zxou/1ScaMGZPJRo0aVfK+0Nj179+/rPdfuXJlbr5gwYJMNnbs2Ny1zz//fFH3at68eW5+4YUXZrIf/vCHRe0JAIAnoQEAAAAAAABISAkNAAAAAAAAQDJKaAAAAAAAAACSUUIDAAAAAAAAkEx1uQcAAAAAijN37txM9qMf/aikPTt37pybd+jQIZP96le/yl07e/bsTPbQQw/lrl21alVRcx1yyCG5+Q9/+MNMdsIJJ+Su7dixY1H3Aspn3rx5mezKK6/MXbt8+fLk92/RokVuftxxx2WySZMm5a79yle+ksmOOuqo3LVNmzatw3QAAJXLk9AAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEimutwDAAAAALvbsWNHbn7LLbdksg0bNpR0r9atW+fm55xzTibbtm1bSfeKiDjggAMy2YABAzLZpZdemnv96aefXvIMQMMxe/bsTLZ8+fJ9dv+33347N7/yyiuL3iNvbU1NTe7aadOmZbJzzz236OsBACqFJ6EBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZKrLPQAAAACwu6lTp+bmzz33XPJ7LVq0KPmeERGXXHJJbv6d73wnkx177LH1MgPQ8I0bNy6TXXbZZfVyr1mzZmWyNWvW5K5dtmxZJnv66aeLvteWLVty84EDB2ayW2+9NZNdf/31Rd8LAKAh8iQ0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSqS73AAAAAMDuvva1r+Xmjz76aCabN29evczQtm3bTHbSSSflrr311lszWbdu3XLXVlf7pwhgl1atWmWy7t2718u96rJvbW1tJvvwww9z177xxhuZrG/fvkWv/e53v5vJVqxYkXv9XXfdlZs3Vps2bcpkO3bsyGR33HFH7vVPP/10Jtu4cWPu2hdeeKHouQYMGJDJZs2albv2L/7iL4reFwAaE09CAwAAAAAAAJCMEhoAAAAAAACAZOpcQnfq1CnWrl2byTds2BCdOnVKMhQAAAAAAAAAlanOJfQbb7yR+/tPtm7dGqtWrUoyFAAAAAAAAACVqbrYhXPnzt35348//ni0atVq558//PDDeOqpp6Jjx45JhwMAAAAAAACgshRdQp933nkREVFVVRWDBw/e7bWmTZtGx44d4wc/+EHS4QAAAGB/1KZNm9z83//93zPZ17/+9dy1eXl1ddH/DBCf+cxnMtlpp51W9PUAlaxJk+wHSOZlERFdu3bNZP/1X/+Vu/bj/64aEfHLX/4yk913332515977rmZrF+/frlrG6p33nknk02bNi137fjx44u6PoWqqqqi186bNy+T3XXXXblrr7jiir2eCQAqWdF/+6ytrY2IP/0ldOHChdGuXbt6GwoAAAAAAACAylT8j0D/f8uXL9/531u2bImampqkAwEAAAAAAABQufI/Q2YPamtr4+abb45Pf/rTcfDBB8fvf//7iIi44YYb4t57700+IAAAAAAAAACVo84l9C233BLTpk2Lf/3Xf40DDjhgZ37sscfGPffck3Q4AAAAAAAAACpLnT+Oe8aMGTFlypQ4/fTTY8SIETvzv/qrv4rXXnst6XAAAADALm3bts1kTz31VBkmAWBPjj766Nx8ypQpmaxXr16ZbN26dbnXjxo1KpP169evjtPtG+vXr8/Ne/bsmclWrFhR3+PUu5dffrncIwBAg1LnJ6FXrVoVXbp0yeS1tbWxffv2JEMBAAAAAAAAUJnqXEJ//vOfj1//+teZ/OGHH47jjz8+yVAAAAAAAAAAVKY6fxz3jTfeGIMHD45Vq1ZFbW1tzJo1K5YuXRozZsyIRx99tD5mBAAAAAAAAKBC1PlJ6HPPPTfmzZsXP//5z6N58+Zx4403xpIlS2LevHlx5pln1seMAAAAAAAAAFSIOj8JHRHxpS99KZ588snUswAAAAAAAABQ4faqhAYAAAAA2Jeee+65TDZhwoTctTNmzKjvcfbKMccck8m+9KUvZbI5c+bkXr948eLUI9WbQqGQm+/YsSP5vY4++ujcfPjw4Zmsc+fOuWsvvPDCpDMBwP6uziX0IYccElVVVZm8qqoqampqokuXLjFkyJC4+OKLkwwIAAAAAAAAQOWocwl94403xpgxY+LLX/5y9OjRIyIinn322Xjsscfi8ssvj+XLl8dll10WO3bsyP1JMwAAAAAAAAAarzqX0PPnz49bbrklRowYsVs+efLkeOKJJ2LmzJnxhS98ISZMmKCEBgAAAAAAANjPNKnrBY8//nicccYZmfz000+Pxx9/PCIi+vfvH7///e9Lnw4AAAAAAACAilLnJ6HbtGkT8+bNi5EjR+6Wz5s3L9q0aRMREe+//360aNEizYQAAAAAwH7j3/7t33LzG2+8MZN97nOfq+9x9sratWtz87vvvjuT/ed//mfR+3bv3n2vZ9rXPvq34o+bO3duJhswYEDu2lWrVhV1r7w9IyKOO+64TPbYY48VtWddVVVV1cu+AFCp6lxC33DDDXHZZZfF008/vfN3Qi9cuDB++tOfxqRJkyIi4sknn4yTTz457aQAAAAAAAAANHh1LqGHDx8en//85+OOO+6IWbNmRUTEZz/72fjlL38ZvXv3joiIq6++Ou2UAAAAAAAAAFSEOpXQ27dvj0svvTRuuOGG+MlPflJfMwEAAAAAAABQoZrUZXHTpk1j5syZ9TULAAAAAAAAABWuzh/Hfd5558Xs2bNj5MiR9TEPAAAAALAfmzNnTm6+ZcuWTPbiiy/mrv3a176WdKa6+ulPf5qb572HPJ/97Gdz8ylTpuz1TA3F8ccfn8leeOGF3LV//OMfi9rziCOOyM3Hjh2byW6++eai9tyTLl26ZLLrrruu5H0BoDGpcwndtWvXuOmmm2LBggVx4oknRvPmzXd7/Tvf+U6y4QAAAAAAAACoLHUuoe+9995o3bp1LFq0KBYtWrTba1VVVUpoAAAAAAAAgP1YnUvo5cuX18ccAAAAAAAAADQCTco9AAAAAAAAAACNR52fhI6IeOutt2Lu3LmxcuXK2LZt226vjRs3LslgAAAAAAAAAFSeOpfQTz31VAwYMCA6deoUr732Whx77LHxxhtvRKFQiBNOOKE+ZgQAGoGP/+DaR5o1a1bU9XPnzs3NzznnnL2eCQAAaHgefvjh3Lxfv36ZbMmSJblrZ82alXSm+tSyZctMdvnll+euPf744+t7nLJo165dbl5bW5vJduzYkcnGjx+fe/0NN9yQybZu3Vr0XFVVVbn5t771rUzWqVOnovcFgP1BnT+Oe9SoUXHNNdfEyy+/HDU1NTFz5sx488034+STT44LLrigPmYEAAAAAAAAoELUuYResmRJDBo0KCIiqqur449//GMcfPDBcdNNN8X3v//95AMCAAAAAAAAUDnqXEI3b95858dpHn744bFs2bKdr/3v//5vuskAAAAAAAAAqDhFl9A33XRTvP/++9GzZ8+YP39+RET0798/rr766hgzZkwMHTo0evbsWW+DAgAAAAAAANDwVRe7cPTo0TFixIgYN25cvPfeezuz9957Lx544IHo2rVrjBs3rt4GBQAap9atW2eyjRs3ZrINGzbU/zAAAEDZHXbYYbn54sWLM9mSJUty1y5cuDCTzZo1K5O9++67dRsux9tvv53JPuk9dOjQIZPdc889maxFixYlz9UYXH311Zns9ttvr5d7NW3aNJONGTMmd+1VV11VLzMAQGNSdAldKBQiIqJTp047s+bNm8ekSZPSTwUAAAAAAABARarT74SuqqqqrzkAAAAAAAAAaASKfhI6IuKYY475s0X0unXrShoIAAAAAAAAgMpVpxJ69OjR0apVq/qaBQAAAAAAAIAKV6cSeuDAgXHooYfW1ywAAAAAAAAAVLiiS2i/DxoAKMUBBxyQm1988cWZbPz48Zns7rvvzr3+m9/8ZklzAQAAlatbt25F54MGDaqXGdauXZvJ2rZtWy/32t8899xz++xenTp1ymQjRozYZ/cHgMamSbELC4VCfc4BAAAAAAAAQCNQ9JPQtbW19TkHAAAAAAAAAI1A0U9CAwAAAAAAAMCfo4QGAAAAAAAAIJmiP44bAKCc1q9fX3R+yCGH1Pc4AAAAERHRtm3bco/QaF1yySWZbM2aNZnsd7/7Xcn3Wrp0aSZ78MEHc9cOHTq05PsBQGPnSWgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgmepyDwAA7N+GDBmSycaPH5/JXn311dzr8/I+ffqUOhYAAABlNnjw4Ez25S9/OZNdddVVudc/8MADmaxQKBR9/6uvvjo379WrVybr1q1b0fsCwP7Ak9AAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEimutwDAAD7t0MPPbTcIwAAAFAh8v4Oed999+Wubd26dSabNGlS0ffauHFjbj5hwoRMdtdddxW9LwDsDzwJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZKrLPQAAsH9r3759JvvGN76Rye67777c6y+//PJM9tRTT+WubdeuXR2nAwAAoFL169cvk02aNKnkfdeuXVvyHgDQ2HkSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEimutwDAAD7t6qqqkzWt2/fTHbfffflXv/yyy9nsjVr1uSubdeuXR2nAwAAoFI9//zz5R4BAPZbnoQGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAy1eUeAADg47p3717uEQAAACra9u3bM9mdd96Zu/bEE0/MZD169MhkBxxwQOmD1YO33norN3/sscdK2rdZs2a5ef/+/UvaFwD2B56EBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJKpLvcAAAAf16FDh0zWo0eP3LXPPvtsJnvrrbdy13br1q20wQAAACrExIkTM9k111xT9PUjR44sKouIOPLII4sfrEQPPfRQJhs0aFDu2q1bt5Z0r7/927/NzYcMGVLSvgCwP/AkNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAkqku9wAAAB/XokWLTNa+ffuir//JT36Sm5955pl7PRMAAEAl+eCDD0q6/rbbbstkM2fOzF07dOjQTNanT5+i7zVr1qzcfNmyZZlswYIFmWzr1q1F3+uT5P099B/+4R9K3hcA9leehAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSqS73AAAAxfjLv/zL3HzevHn7eBIAAICG75JLLslkkydPzl27atWqovZcuXJlbv7P//zPRc9Vbu3bt8/Nn3zyyUx27LHH1vc4ANBoeRIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJVJd7AACAYvTv3z83/5d/+ZdMdv/99+eu/f73v5/JPvWpT5U2GAAAQAN02GGHZbKFCxfmrh09enQmmzx5cvKZ9rXevXtnsh//+Me5a48++uj6HgcA9iuehAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDLV5R4AACC1LVu25Oa1tbX7eBIAAICG47DDDsvNJ0yYkMkuuuiiTHb22WfnXr9p06bSBvsEXbt2zWRf/epXM9nll1+ee33btm0z2YEHHlj6YADAn+VJaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACCZ6nIPAAAAAABA+TRt2jST9enTJ5Nt2LBhH0wDADQGnoQGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMlUl3sAAIB9ZcWKFZmsffv2ZZgEAAAAAKDx8iQ0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSqS73AAAA+8r999+fyXr06FGGSQAAAAAAGi9PQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZKrLPQAAQDF69uyZm99+++2Z7JFHHslde9pppyWdCQAAAACALE9CAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMlUl3sAAIBiVFfn/9+Wb3/720VlAAAAAADsG56EBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJKOEBgAAAAAAACAZJTQAAAAAAAAAySihAQAAAAAAAEhGCQ0AAAAAAABAMkpoAAAAAAAAAJJRQgMAAAAAAACQjBIaAAAAAAAAgGSU0AAAAAAAAAAko4QGAAAAAAAAIBklNAAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgmepy3LRQKERExKZNm8pxe4AG5aPvhR99b2R3zgyAXZwZe+bMANjFmbFnzgyAXZwZe+bMANilLmdGWUrozZs3R0REhw4dynF7gAZp8+bN0apVq3KP0eA4MwCynBn5nBkAWc6MfM4MgCxnRj5nBkBWMWdGVaEMP95UW1sbq1evjhYtWkRVVdW+vj1Ag1IoFGLz5s1xxBFHRJMmfkvCxzkzAHZxZuyZMwNgF2fGnjkzAHZxZuyZMwNgl7qcGWUpoQEAAAAAAABonPxYEwAAAAAAAADJKKEBAAAAAAAASEYJDQAAAAAAAEAySmgapWnTpkXr1q1L3qeqqipmz55d8j4ANFzODACK5cwAoFjODACK5cygsVJC0yANGTIkzjvvvHKPsVfWrVsXF110UbRs2TJat24dw4YNi/fee6/cYwE0WpV8ZowZMyZ69+4dBx10UJK/bACwZ5V6ZrzxxhsxbNiw+MxnPhMHHnhgdO7cOf7pn/4ptm3bVu7RABqtSj0zIiIGDBgQRx11VNTU1MThhx8e3/zmN2P16tXlHgug0arkM+MjW7dujS9+8YtRVVUVixcvLvc4NBJKaEjsoosuildffTWefPLJePTRR+NXv/pV/P3f/325xwKgAdq2bVtccMEFcdlll5V7FAAasNdeey1qa2tj8uTJ8eqrr8Ztt90WkyZNiu9+97vlHg2ABujUU0+NBx98MJYuXRozZ86MZcuWxfnnn1/usQBowK677ro44ogjyj0GjYwSmoo0bty4OO6446J58+bRoUOH+Na3vpX7tPHs2bOja9euUVNTE2eddVa8+eabu70+Z86cOOGEE6KmpiY6deoUo0ePjh07duz1XEuWLInHHnss7rnnnjjppJOiT58+MXHixLj//vv9xClAmTTUMyMiYvTo0TFy5Mg47rjjStoHgDQa6pnRr1+/mDp1avTt2zc6deoUAwYMiGuuuSZmzZq113sCUJqGemZERIwcOTJ69uwZRx99dPTu3Tuuv/76+O1vfxvbt28vaV8A9k5DPjMiIn72s5/FE088EWPHji15L/i/lNBUpCZNmsSECRPi1VdfjenTp8cvfvGLuO6663Zb88EHH8SYMWNixowZsWDBgtiwYUMMHDhw5+u//vWvY9CgQXHllVfG//zP/8TkyZNj2rRpMWbMmE+87ymnnBJDhgz5xNd/85vfROvWraN79+47szPOOCOaNGkSzzzzzN6/YQD2WkM9MwBoeCrpzNi4cWO0adOmTtcAkE6lnBnr1q2LH//4x9G7d+9o2rRpnd8nAKVryGfGO++8E8OHD4//+I//iIMOOqik9wkfp4SmIl111VVx6qmnRseOHeO0006LW265JR588MHd1mzfvj3uuOOO6NWrV5x44okxffr0+O///u949tlnI+JPT59df/31MXjw4OjUqVOceeaZcfPNN8fkyZM/8b5HHXVUHH744Z/4+ttvvx2HHnrobll1dXW0adMm3n777RLeMQB7q6GeGQA0PJVyZrz++usxceLEuPTSS/fujQJQsoZ+ZvzjP/5jNG/ePNq2bRsrV66MOXPmlPaGAdhrDfXMKBQKMWTIkBgxYsRuD9ZBKtXlHgD2xs9//vO49dZb47XXXotNmzbFjh07YsuWLfHBBx/s/Gmd6urq+Ou//uud13zuc5+L1q1bx5IlS6JHjx7x4osvxoIFC3b7SaEPP/wws8//NWPGjPp/cwAk5cwAoFiVcGasWrUq+vXrFxdccEEMHz68hHcLQCka+plx7bXXxrBhw2LFihUxevToGDRoUDz66KNRVVVV4jsHoK4a6pkxceLE2Lx5c4waNSrRO4XdKaGpOG+88UacffbZcdlll8WYMWOiTZs2MX/+/Bg2bFhs27at6I+MeO+992L06NHx1a9+NfNaTU3NXs122GGHxbvvvrtbtmPHjli3bl0cdthhe7UnAHuvIZ8ZADQslXBmrF69Ok499dTo3bt3TJkypaS9ANh7lXBmtGvXLtq1axfHHHNMdOvWLTp06BC//e1vo1evXiXtC0DdNOQz4xe/+EX85je/iWbNmu2Wd+/ePS666KKYPn36Xu0LH1FCU3EWLVoUtbW18YMf/CCaNPnTJ8p//KMrIv5U/j733HPRo0ePiIhYunRpbNiwIbp16xYRESeccEIsXbo0unTpkmy2Xr16xYYNG2LRokVx4oknRsSfvpHX1tbGSSedlOw+ABSnIZ8ZADQsDf3MWLVqVZx66qlx4oknxtSpU3fOCMC+19DPjI+rra2NiIitW7fW630AyGrIZ8aECRPilltu2fnn1atXx1lnnRUPPPCAPoMklNA0WBs3bozFixfvlrVt2za6dOkS27dvj4kTJ8Y555wTCxYsiEmTJmWub9q0aXz729+OCRMmRHV1dVxxxRXRs2fPnd/Eb7zxxjj77LPjqKOOivPPPz+aNGkSL774Yrzyyiu7feP9vwYNGhSf/vSn49Zbb819vVu3btGvX78YPnx4TJo0KbZv3x5XXHFFDBw4MI444ojSviAAfKJKPDMiIlauXBnr1q2LlStXxocffrjzPXTp0iUOPvjgvftiALBHlXhmrFq1Kk455ZQ4+uijY+zYsbFmzZqdr/nEJYD6U4lnxjPPPBMLFy6MPn36xCGHHBLLli2LG264ITp37uwpaIB6VIlnxlFHHbXbnz/6t6jOnTvHkUceWdcvAWQVoAEaPHhwISIy/xs2bFihUCgUxo0bVzj88MMLBx54YOGss84qzJgxoxARhfXr1xcKhUJh6tSphVatWhVmzpxZ6NSpU6FZs2aFM844o7BixYrd7vPYY48VevfuXTjwwAMLLVu2LPTo0aMwZcqUna9HROGRRx7Z+eeTTz65MHjw4D3Ovnbt2sKFF15YOPjggwstW7YsXHzxxYXNmzcn+boAkFXJZ8Ynzf7000+n+NIA8DGVemZMnTo1d25/pQeoP5V6Zrz00kuFU089tdCmTZtCs2bNCh07diyMGDGi8NZbbyX72gCwu0o9Mz5u+fLlhYgovPDCC3v7pYDdVBUKhUI9dNsAAAAAAAAA7If8EikAAAAAAAAAklFCAwAAAAAAAJCMEhoAAAAAAACAZJTQAAAAAAAAACSjhAYAAAAAAAAgGSU0AAAAAAAAAMkooQEAAAAAAABIRgkNAAAAAAAAQDJKaAAAAAAAAACSUUIDAAAAAAAAkIwSGgAAAAAAAIBklNAAAAAAAAAAJPP/AFRIpL9VhkACAAAAAElFTkSuQmCC", 198 | "text/plain": [ 199 | "
" 200 | ] 201 | }, 202 | "metadata": {}, 203 | "output_type": "display_data" 204 | } 205 | ], 206 | "source": [ 207 | "from simple_deep_learning.mnist_extended.semantic_segmentation import plot_class_masks\n", 208 | "plot_class_masks(train_y[i])" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 10, 214 | "metadata": {}, 215 | "outputs": [ 216 | { 217 | "data": { 218 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAccAAAGFCAYAAACIbDjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAQY0lEQVR4nO3dXYjUZfvA8WvXddCtdcvWXhZdCgrDoiKjsCB6scIn5OmsgyDxIHhCo/AkOin+B5ERREFiEkVHUQRJFFFEYBL0YitBJQWBZbC+8pCuhm3rzv9A9MlrnXV35/U38/lAB07j73ePil/v2Wvu7SqXy+UAAE7rbvYCAKDViCMAJOIIAIk4AkAijgCQiCMAJOIIAEnPdJ40MTERIyMj0dfXF11dXfVeEwA1Vi6XY3R0NAYHB6O7277oXKYVx5GRkViyZEm91wJAnf3++++xePHiZi+j5U0rjn19fRFx8hd1wYIFdV0QALV35MiRWLJkyem/z5natOJ46q3UBQsWiCNAgfnS2PR44xkAEnEEgEQcASCZ1tccAWh/ExMTMTY21uxl1E2pVJr2x1jEEYAYGxuL3bt3x8TERLOXUjfd3d1xxRVXRKlUOudzxRGgw5XL5di7d2/MmTMnlixZ0paHBJw6zGbv3r0xNDR0zqldcQTocOPj4/Hnn3/G4OBg9Pb2Nns5dbNo0aIYGRmJ8fHxmDt37pTPbb9/HgAwIydOnIiImNbbjUV26vWder1TEUcAIqL9DwiYyesTRwBIfM0RgLPas+dwHDr0Z8PuNzDQG0ND/Q2731TEEYBJ9uw5HEuXvhLHj4837J7z5vXEzz+vb4lAelsVgEkOHfqzoWGMiDh+fHxWO9VNmzbF5ZdfHvPmzYtbbrklvvnmm6rXIo4AFNY777wTGzZsiGeeeSZ27twZ119/fdx3331x4MCBqq4rjgAU1osvvhiPPPJIrF27NpYtWxavvvpq9Pb2xhtvvFHVdcURgEIaGxuL4eHhWLly5enHuru7Y+XKlfHll19WdW1xBKCQDh06FCdOnIhLLrnkjMcvueSS2LdvX1XXFkcASMQRgEIaGBiIOXPmxP79+894fP/+/XHppZdWdW1xBKCQSqVSLF++PD777LPTj01MTMRnn30WK1asqOraDgEAoLA2bNgQa9asiZtuuiluvvnmeOmll+LYsWOxdu3aqq4rjgBMMjDQG/Pm9TT8hJyBgZl9y6wHH3wwDh48GE8//XTs27cvbrjhhvj4448nDenMlDgCMMnQUH/8/PP6Qpytun79+li/fn1N1yKOAJzV0FB/S5xz2gwGcgAgEUcASMQRABJxBIBEHAEgEUcASMQRABKfcwTgrPYcjDh0pHH3G1gQMbSocfebijgCMMmegxFL/xNx/O/G3XPe3IifX51ZILdv3x4vvPBCDA8Px969e2Pr1q3xwAMPVL0Wb6sCMMmhI40NY8TJ+810p3rs2LG4/vrrY9OmTTVdi50jAIW1atWqWLVqVc2va+cIAIk4AkAijgCQiCMAJOIIAIlpVQAK6+jRo/HLL7+c/vHu3bvju+++i4ULF8bQ0NCsryuOABTWt99+G3feeefpH2/YsCEiItasWRNvvvnmrK8rjgBMMrDg5Ik1jT4hZ2DBzH7OHXfcEeVyueZrEUcAJhladPIoN2erAsA/DC1qnVg1mmlVAEjsHIG6WhY7636PXXFj3e9BZ7FzBIBEHAEgEUcASMQRABJxBIDEtCp0uEZMk1JMIzEWf8R4w+53QfTEYJQadr+piCMAk4zEWPwrdsVY1P5otkpK0RUfxbJpB/K5556L9957L3766aeYP39+3HrrrfH888/H0qVLq16Lt1UBmOSPGG9oGCMixqI8o53q559/HuvWrYuvvvoqPv300/j777/j3nvvjWPHjlW9FjtHAArp448/PuPHb775Zlx88cUxPDwct99+e1XXtnMEoC0cPnw4IiIWLlxY9bXEEYDCm5iYiCeeeCJuu+22uPbaa6u+nrdVASi8devWxQ8//BBffPFFTa4njtBGfCyDTrR+/fr48MMPY/v27bF48eKaXFMcASikcrkcjz32WGzdujW2bdsWV1xxRc2uLY4AFNK6devirbfeivfffz/6+vpi3759ERHR398f8+fPr+raBnIAKKTNmzfH4cOH44477ojLLrvs9H/vvPNO1de2cwRgkguiJ0rR1fATci6YQZbK5fqtTRwBmGQwSvFRLHO2KlAcplJphMEotUysGs3XHAEgEUcASMQRABJxBIBEHAEgMa0KBbQrbjzr482cYm3FNcFs2TkCQGLnCMBZjcVIjMcfDbtfT1wQpRhs2P2mIo4ATDIWI7Er/hXlGGvYPbuiFMvio2kHcvPmzbF58+b49ddfIyLimmuuiaeffjpWrVpV9Vq8rQrAJOPxR0PDGBFRjrEZ7VQXL14cGzdujOHh4fj222/jrrvuin//+9/x448/Vr0WO0cACmn16tVn/PjZZ5+NzZs3x1dffRXXXHNNVdcWR2gjlSZGa6nS9KmpVJrpxIkT8e6778axY8dixYoVVV9PHAEorO+//z5WrFgRx48fj/PPPz+2bt0ay5Ytq/q6vuYIQGEtXbo0vvvuu/j666/j0UcfjTVr1sSuXbuqvq6dIwCFVSqV4sorr4yIiOXLl8eOHTvi5Zdfji1btlR1XTtHANrGxMRE/PXXX1Vfx84RgEJ66qmnYtWqVTE0NBSjo6Px1ltvxbZt2+KTTz6p+triCMxII85QbcTULcV34MCBePjhh2Pv3r3R398f1113XXzyySdxzz33VH1tcQRgkp64ILqi1PATcnrigmk///XXX6/bWsQRgElKMRjL4iNnqwLAP5VisGVi1WimVQEgEUcASLytCsyIM1TpBHaOAJCIIwAk4ggAiTgCQGIgB4Cz+++eiKOHGne/8wciFg417n5TEEcAJvvvnohnlkaMH2/cPXvmRfzfz7MO5MaNG+Opp56Kxx9/PF566aXqllLVz6Ypdkb13+W6Vd0Y1X+TUmqj3h/ZcLh4izt6qLFhjDh5v6OHZhXHHTt2xJYtW+K6666ryVJ8zRGAQjt69Gg89NBD8dprr8WFF15Yk2uKIwCFtm7durj//vtj5cqVNbumt1UBKKy33347du7cGTt27KjpdcURgEL6/fff4/HHH49PP/005s2bV9NriyMAhTQ8PBwHDhyIG2/833DXiRMnYvv27fHKK6/EX3/9FXPmzJnVtcWxRbXzROpUZvq6TbdWz1QqRXX33XfH999/f8Zja9eujauvvjqefPLJWYcxQhwBKKi+vr649tprz3jsvPPOi4suumjS4zNlWhUAEjtHACY7f+DkiTWNPiHn/IGqLrFt27baLKUmVwGgvSwcOnmUm7NVAeAfFg61TKwaTRybrBFTqc2c6Kz365vq+iZZ/6feE6kRplJpLwZyACARRwAiIqJcLjd7CXU1k9cnjgAd7tSH5cfGxpq8kvo69fqmcziArzkCdLienp7o7e2NgwcPxty5c6O7u/32TRMTE3Hw4MHo7e2Nnp5zp08cATpcV1dXXHbZZbF79+747bffmr2cuunu7o6hoaHo6uo653PFsYCKNIU507V26pmytWIqldkqlUpx1VVXtfVbq6VSadq7YnEEICJO7qxq/a2fiqr93lgGgCqJIwAk4ggAiTgCQGIgp0F8h/sz1Woqtd1/nSoxlTo9tfx1aodfD6bPzhEAEnEEgEQcASARRwBIxBEAEtOqDdKpU5XOSq1Op06l1vt1t+JrprXYOQJAIo4AkIgjACTiCACJOAJAIo4AkPgoB9PWzI9ltPtHYdr5owud+nEUis3OEQAScQSARBwBIBFHAEjEEQAS06odrBUPBW/3qdQiacSUaSWmT2k2O0cASMQRABJxBIBEHAEgEUcASEyrdoBWnEqtpBFrbeZEbLMmQJs5eVqJiVRamZ0jACTiCACJOAJAIo4AkIgjACSmVTtAM6czW3FSttKanOtaHdOntBM7RwBIxBEAEnEEgEQcASARRwBITKtSV0WalJ3p89t9urWZ06fNmnJu999Tps/OEQAScQSARBwBIBFHAEjEEQAS06q0rUqTh80877XSBOiy2Dmj57eiVjxHF2bLzhEAEnEEgEQcASARRwBIxBEAEnEEgKSrXC6Xz/WkI0eORH9/fxw+fDgWLFjQiHVBw9Xqowjtcni1j2acqei/r/4enxk7RwBIxBEAEnEEgEQcASARRwBIHDzOtE01vVj0ST5ay0z/PJmspdbsHAEgEUcASMQRABJxBIBEHAEgMa3KJCb/iKg8MeoMWjqBnSMAJOIIAIk4AkAijgCQiCMAJKZVO1gtp1I7ccK1U6ctm/m66/3nrFN/T5nMzhEAEnEEgEQcASARRwBIxBEAEtOqHazeZ2e2CxOMjWcqlWazcwSARBwBIBFHAEjEEQAScQSAxLQqk8xmkq9r9dkfH/6gOJOvJhir04pTzn5PmS07RwBIxBEAEnEEgEQcASARRwBIxBEAEh/loK6Wr57ZKH35gzotBGAG7BwBIBFHAEjEEQAScQSARBwBIDGtCtREpUO+W/FAcjgXO0cASMQRABJxBIBEHAEgEUcASEyrUhOVzkTtWt3YddB6mjnFWukeldYEp9g5AkAijgCQiCMAJOIIAIk4AkBiWhVoiqkmRus9yWqKlXOxcwSARBwBIBFHAEjEEQAScQSAxLQqLaXSWayVzm6lPTXzPFaIsHMEgEnEEQAScQSARBwBIBFHAEhMq1JXlaZMK02lQoSpVJrPzhEAEnEEgEQcASARRwBIxBEAEnEEgMRHOYCOU+lgczjFzhEAEnEEgEQcASARRwBIxBEAEtOq0AJqddB2kaYwHS5OK7NzBIBEHAEgEUcASMQRABJxBIDEtCqF0LX67I+XP2jsOlpdpQnQWk6xFmnKtEjTu7QWO0cASMQRABJxBIBEHAEgEUcASEyrQgco0oTpbJhKpdbsHAEgEUcASMQRABJxBIBEHAEgMa1KU1Q6E7XSGartrtK0ZbtPmc6UqVQaxc4RABJxBIBEHAEgEUcASMQRABLTqtDCZjqdWaTpVpOntDI7RwBIxBEAEnEEgEQcASARRwBIxBEAEh/lgDbi4xFQG3aOAJCIIwAk4ggAiTgCQCKOAJCIIwAk4ggAiTgCQCKOAJCIIwAk4ggAiTgCQCKOAJCIIwAk4ggAiTgCQCKOAJD0NHsBUI2u1ZX/X/mDxq0DaC92jgCQiCMAJOIIAIk4AkAijgCQiCMAJOIIAIk4AkAijgCQiCMAJOIIAImzVWkplc5DrXSGqvNTgXqwcwSARBwBIBFHAEjEEQAScQSARBwBIPFRDgrBRzaARrJzBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASMQRABJxBIBEHAEgEUcASHqm86RyuRwREUeOHKnrYgCoj1N/f5/6+5ypTSuOo6OjERGxZMmSui4GgPoaHR2N/v7+Zi+j5XWVp/HPiImJiRgZGYm+vr7o6upqxLoAqKFyuRyjo6MxODgY3d2+onYu04ojAHQS/3wAgEQcASARRwBIxBEAEnEEgEQcASARRwBI/h+w2iwkhhA1RAAAAABJRU5ErkJggg==", 219 | "text/plain": [ 220 | "
" 221 | ] 222 | }, 223 | "metadata": {}, 224 | "output_type": "display_data" 225 | } 226 | ], 227 | "source": [ 228 | "from simple_deep_learning.mnist_extended.semantic_segmentation import display_grayscale_array\n", 229 | "display_segmented_image(y=train_y[i])" 230 | ] 231 | } 232 | ], 233 | "metadata": { 234 | "kernelspec": { 235 | "display_name": "mnist_extended_dataset", 236 | "language": "python", 237 | "name": "python3" 238 | }, 239 | "language_info": { 240 | "codemirror_mode": { 241 | "name": "ipython", 242 | "version": 3 243 | }, 244 | "file_extension": ".py", 245 | "mimetype": "text/x-python", 246 | "name": "python", 247 | "nbconvert_exporter": "python", 248 | "pygments_lexer": "ipython3", 249 | "version": "3.10.14" 250 | } 251 | }, 252 | "nbformat": 4, 253 | "nbformat_minor": 2 254 | } 255 | --------------------------------------------------------------------------------