├── cnn ├── __init__.py ├── nn.py ├── dense.py ├── mnist_like.py ├── no_padding_1conv.py └── joint_rnn.py ├── util ├── __init__.py ├── load_data.py ├── helpers.py ├── ssm.py └── similarity.py ├── README.md ├── learning_tools.py ├── extract_features.py ├── baseline_experiment.py ├── ssmfeatures.py ├── deep_mnist.py ├── train.py ├── train_joint.py └── Lyrics Structure Analysis from stores.ipynb /cnn/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /util/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cnn/nn.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | 4 | class NN(object): 5 | def __init__(self): 6 | self.g_in = None 7 | self.g_labels = None 8 | self.g_out = None 9 | self.g_loss = None 10 | 11 | @staticmethod 12 | def weight_variable(shape): 13 | """weight_variable generates a weight variable of a given shape.""" 14 | initial = tf.truncated_normal(shape, stddev=0.1) 15 | return tf.Variable(initial) 16 | 17 | @staticmethod 18 | def bias_variable(shape): 19 | """bias_variable generates a bias variable of a given shape.""" 20 | initial = tf.constant(0.1, shape=shape) 21 | return tf.Variable(initial) 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Lyrics Segmentation using Convolutions 2 | 3 | This repository contains all the code needed to run and reproduce results reported in our COLING2018 paper. 4 | 5 | ## Abstract 6 | 7 | Lyrics contain repeated patterns that are correlated with the repetitions 8 | found in the music they accompany. Repetitions in song texts 9 | have been shown to enable lyrics segmentation 10 | – a fundamental prerequisite of automatically detecting the building blocks (e.g. chorus, verse) of a song text. 11 | In this article we improve on the state-of-the-art in lyrics segmentation by applying a convolutional neural 12 | network to the task, and experiment with novel features as a step towards deeper macrostructure detection of lyrics. 13 | 14 | ### Citation 15 | ``` 16 | @inproceedings{fell:segmentation, 17 | TITLE = {Lyrics Segmentation: Textual Macrostructure Detection using Convolutions}, 18 | AUTHOR = {Fell, Michael and Nechaev, Yaroslav and Cabrio, Elena and Gandon, Fabien}, 19 | BOOKTITLE = {Conference on Computational Linguistics (COLING)}, 20 | ADDRESS = {Santa Fe, New Mexico, United States}, 21 | YEAR = {2018}, 22 | } 23 | ``` 24 | -------------------------------------------------------------------------------- /learning_tools.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import train_test_split 2 | from sklearn.preprocessing import MinMaxScaler 3 | from sklearn.preprocessing import RobustScaler 4 | import numpy as np 5 | 6 | #fixed split with train, validation, test proportion of 60%-20%-20% 7 | #return no test fold currently 8 | def train_val_test_split(songs, train_part = 0.6, val_part = 0.2, test_part = 0.2, subset_factor=1): 9 | if subset_factor > 1 or subset_factor <= 0: 10 | raise ValueError('subset factor must be in (0,1]') 11 | if subset_factor == 1: 12 | songs_subset = songs 13 | else: 14 | songs_subset = train_test_split(songs, train_size=subset_factor, test_size=1-subset_factor, random_state=0)[0] 15 | 16 | train_size = 1 - test_part 17 | songs_train_val, songs_test = train_test_split(songs_subset, test_size = test_part, train_size = train_size, random_state=0) 18 | songs_train, songs_val = train_test_split(songs_train_val, train_size = train_part/train_size, random_state=0) 19 | return songs_train, songs_val 20 | 21 | 22 | #Scale each feature for itself 23 | #label_offset is 2 if right to the features is a label and a song id 24 | def scale_features(line_feature_matrix, label_offset=2): 25 | line_feature_matrix = np.array(line_feature_matrix, dtype=np.float64) 26 | #the last column is the label, don't scale that :o) 27 | for column_index in range(line_feature_matrix.shape[1] - label_offset): 28 | #scaler needs column vector 29 | column_normalized = RobustScaler().fit_transform(line_feature_matrix[:, column_index].reshape(-1,1)) 30 | column_normalized = MinMaxScaler().fit_transform(column_normalized) 31 | #transform back to row vector 32 | line_feature_matrix[:, column_index] = column_normalized.reshape(1,-1) 33 | return line_feature_matrix 34 | -------------------------------------------------------------------------------- /cnn/dense.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | from cnn.nn import NN 4 | 5 | 6 | class Dense(NN): 7 | def __init__(self, window_size, ssm_size): 8 | super().__init__() 9 | 10 | self.g_dprob = None 11 | self.g_results = None 12 | 13 | self.window_size = window_size 14 | self.ssm_size = ssm_size 15 | 16 | self.define(window_size, ssm_size) 17 | 18 | def define(self, window_size, ssm_size): 19 | # Input of size: 20 | # batch_size x window_size x max_ssm_size 21 | # Labels of size: 22 | # batch_size 23 | # Note that we do not fix the first dimension to allow flexible batch_size for evaluation / leftover samples 24 | with tf.name_scope('input'): 25 | self.g_in = tf.placeholder(tf.float32, shape=[None, 2*window_size, ssm_size], name="input") 26 | self.g_labels = tf.placeholder(tf.int32, shape=[None], name="labels") 27 | 28 | self.g_dprob = tf.placeholder(tf.float32) 29 | layers = 4 30 | fc_input_size = 2*window_size * ssm_size 31 | fc_input = tf.reshape(self.g_in, [-1, fc_input_size]) 32 | fc_size = 512 33 | for idx in range(layers): 34 | with tf.name_scope('fc%d' % idx): 35 | W_fc = self.weight_variable([fc_input_size, fc_size]) 36 | b_fc = self.bias_variable([fc_size]) 37 | 38 | fc_input = tf.nn.tanh(tf.matmul(fc_input, W_fc) + b_fc) 39 | 40 | # Dropout - controls the complexity of the model, prevents co-adaptation of features 41 | fc_input = tf.nn.dropout(fc_input, self.g_dprob) 42 | 43 | fc_input_size = fc_size 44 | fc_size = int(fc_size / 2) 45 | 46 | # Map the features to 2 classes 47 | with tf.name_scope('final'): 48 | W_fc2 = self.weight_variable([fc_input_size, 2]) 49 | b_fc2 = self.bias_variable([2]) 50 | 51 | self.g_out = tf.matmul(fc_input, W_fc2) + b_fc2 52 | 53 | # Loss 54 | with tf.name_scope('loss'): 55 | losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.g_labels, logits=self.g_out) 56 | self.g_loss = tf.reduce_mean(losses) 57 | 58 | # Evaluation 59 | with tf.name_scope('evaluation'): 60 | self.g_results = tf.argmax(self.g_out, axis=1, output_type=tf.int32) 61 | -------------------------------------------------------------------------------- /util/load_data.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from os import path 3 | 4 | # Open dataframes stored in hdf container 5 | # 6 | # Important: 7 | # segment_borders ids are the data points we want to model 8 | # they are a proper subset of ids in ssm stores, but we only model 9 | # for the 107k ids in segment_borders, as they are all english with 5+ segments 10 | 11 | 12 | def load_segment_borders(data_path: str) -> pd.DataFrame: 13 | with pd.HDFStore(path.join(data_path, 'borders_pub1.hdf')) as store: 14 | borders = store['mdb_127_en_seg5p'] 15 | return borders 16 | 17 | 18 | def load_linewise_feature(data_path:str, feat_name:str) -> pd.DataFrame: 19 | with pd.HDFStore(path.join(data_path, 'linewise_feats_watanabe.hdf')) as store: 20 | linewise = store[feat_name] 21 | return linewise 22 | 23 | 24 | def load_ssm_string(data_path: str) -> pd.DataFrame: 25 | with pd.HDFStore(path.join(data_path, 'ssm_store_pub1.hdf')) as store: 26 | sssm = store['mdb_127_en_seg5p_string_1'].append(store['mdb_127_en_seg5p_string_2']) 27 | return sssm 28 | 29 | 30 | def load_ssm_phonetics(data_path: str) -> pd.DataFrame: 31 | with pd.HDFStore(path.join(data_path, 'ssm_store_pub1.hdf')) as store: 32 | sppm = store['mdb_127_en_phonetics_1'].append(store['mdb_127_en_phonetics_2']) 33 | return sppm 34 | 35 | 36 | def load_ssm_lex_struct_watanabe(data_path: str) -> pd.DataFrame: 37 | base_name = 'ssm_store_lex_struct_watanabe_' 38 | table_name = 'ssm_lex_struct' 39 | with pd.HDFStore(path.join(data_path, base_name + str(1) + '.hdf')) as store: 40 | sssm = store[table_name] 41 | for i in [x+2 for x in range(9)]: 42 | with pd.HDFStore(path.join(data_path, base_name + str(i) + '.hdf')) as store: 43 | print('appending', data_path, base_name + str(i) + '.hdf') 44 | sssm = sssm.append(store[table_name]) 45 | return sssm 46 | 47 | 48 | # load some ssms by their names. Requires them to be in one piece 49 | def load_ssms_from(data_path: str, df_names: list) -> pd.DataFrame: 50 | with pd.HDFStore(path.join(data_path, 'ssm_store_pub1.hdf')) as store: 51 | ssms = [] 52 | for name in df_names: 53 | ssms.append(store['mdb_127_en_' + name]) 54 | return ssms 55 | 56 | 57 | # train and test on all genres 58 | def load_segment_borders_watanabe(data_path: str) -> pd.DataFrame: 59 | with pd.HDFStore(path.join(data_path, 'borders_pub2.hdf')) as store: 60 | train_borders = store['watanabe_train'].append(store['watanabe_dev']) 61 | test_borders = store['watanabe_test'] 62 | return train_borders, test_borders 63 | 64 | 65 | # train on all genres, test on single genre 66 | def load_segment_borders_for_genre(data_path: str, genre_name: str) -> pd.DataFrame: 67 | with pd.HDFStore(path.join(data_path, 'borders_pub2.hdf')) as store: 68 | train_borders = store['watanabe_train'].append(store['watanabe_dev']) 69 | test_borders = store[genre_name + '_watanabe_test'] 70 | return train_borders, test_borders 71 | 72 | ################ current genres and song counts ########### 73 | # /AlternativeRock_watanabe_dev -> 855 74 | # /AlternativeRock_watanabe_test -> 875 75 | # /HardRock_watanabe_dev -> 433 76 | # /HardRock_watanabe_test -> 462 77 | # /HeavyMetal_watanabe_dev -> 254 78 | # /HeavyMetal_watanabe_test -> 269 79 | # /HipHop_watanabe_dev -> 1066 80 | # /HipHop_watanabe_test -> 1097 81 | # /RnB_watanabe_dev -> 930 82 | # /RnB_watanabe_test -> 905 83 | # /Soul_watanabe_dev -> 129 84 | # /Soul_watanabe_test -> 118 85 | # /SouthernHipHop_watanabe_dev -> 188 86 | # /SouthernHipHop_watanabe_test -> 183 87 | # /Synthpop_watanabe_dev -> 108 88 | # /Synthpop_watanabe_test -> 126 89 | # /watanabe_dev -> 20531 90 | # /watanabe_test -> 20583 91 | # /watanabe_train -> 61687 92 | -------------------------------------------------------------------------------- /cnn/mnist_like.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | from cnn.nn import NN 4 | 5 | 6 | class MnistLike(NN): 7 | def __init__(self, window_size, ssm_size, channels): 8 | super().__init__() 9 | 10 | self.g_dprob = None 11 | self.g_results = None 12 | 13 | self.window_size = window_size 14 | self.ssm_size = ssm_size 15 | self.channels = channels 16 | 17 | self.define(window_size, ssm_size, channels) 18 | 19 | def define(self, window_size, ssm_size, channels): 20 | # Input of size: 21 | # batch_size x window_size x max_ssm_size 22 | # Labels of size: 23 | # batch_size 24 | # Note that we do not fix the first dimension to allow flexible batch_size for evaluation / leftover samples 25 | with tf.name_scope('input'): 26 | self.g_in = tf.placeholder(tf.float32, shape=[None, 2*window_size, ssm_size, channels], name="input") 27 | self.g_labels = tf.placeholder(tf.int32, shape=[None], name="labels") 28 | 29 | # Reshape to use within a convolutional neural net. 30 | # contrary to mnist example, it just adds the last dimension whichs is the amount of channels in the image, 31 | # in our case its only one, if we will add more features for each line – they will go there 32 | with tf.name_scope('reshape'): 33 | # x_image = tf.expand_dims(self.g_in, -1) 34 | # no reshaping necessary as incoming tensor has number of channels as lowest rank 35 | x_image = self.g_in 36 | 37 | # First convolutional layer - maps one grayscale image to 32 feature maps. 38 | with tf.name_scope('conv1'): 39 | W_conv1 = self.weight_variable([window_size+1, window_size+1, channels, 32]) 40 | b_conv1 = self.bias_variable([32]) 41 | h_conv1 = tf.nn.relu(self.conv2d(x_image, W_conv1) + b_conv1) 42 | 43 | # Pooling layer - downsamples by 2X. 44 | with tf.name_scope('pool1'): 45 | h_pool1 = self.max_pool_2x2(h_conv1) 46 | 47 | # Second convolutional layer -- maps 32 feature maps to 64. 48 | with tf.name_scope('conv2'): 49 | W_conv2 = self.weight_variable([int(window_size/2)+1, int(window_size/2)+1, 32, 64]) 50 | b_conv2 = self.bias_variable([64]) 51 | h_conv2 = tf.nn.relu(self.conv2d(h_pool1, W_conv2) + b_conv2) 52 | 53 | # Second pooling layer. 54 | with tf.name_scope('pool2'): 55 | h_pool2 = self.max_pool_2x2(h_conv2) 56 | 57 | # We have to either fix the ssm_size or do an average here 58 | fc1_size = 512 59 | fc1_input_size = int(window_size/2) * int(ssm_size/4) * 64 60 | with tf.name_scope('fc1'): 61 | W_fc1 = self.weight_variable([fc1_input_size, fc1_size]) 62 | b_fc1 = self.bias_variable([fc1_size]) 63 | 64 | h_pool2_flat = tf.reshape(h_pool2, [-1, fc1_input_size]) 65 | h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1) 66 | 67 | # Dropout - controls the complexity of the model, prevents co-adaptation of features 68 | with tf.name_scope('dropout'): 69 | self.g_dprob = tf.placeholder(tf.float32) 70 | h_fc1_drop = tf.nn.dropout(h_fc1, self.g_dprob) 71 | 72 | # Map the features to 2 classes 73 | with tf.name_scope('fc2'): 74 | W_fc2 = self.weight_variable([fc1_size, 2]) 75 | b_fc2 = self.bias_variable([2]) 76 | 77 | self.g_out = tf.matmul(h_fc1_drop, W_fc2) + b_fc2 78 | 79 | # Loss 80 | with tf.name_scope('loss'): 81 | losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.g_labels, logits=self.g_out) 82 | self.g_loss = tf.reduce_mean(losses) 83 | 84 | # Evaluation 85 | with tf.name_scope('evaluation'): 86 | self.g_results = tf.argmax(self.g_out, axis=1, output_type=tf.int32) 87 | 88 | @staticmethod 89 | def conv2d(x, W): 90 | """conv2d returns a 2d convolution layer with full stride.""" 91 | return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME') 92 | 93 | @staticmethod 94 | def max_pool_2x2(x): 95 | """max_pool_2x2 downsamples a feature map by 2X.""" 96 | return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], 97 | strides=[1, 2, 2, 1], padding='SAME') 98 | -------------------------------------------------------------------------------- /util/helpers.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | from sklearn.utils import shuffle 3 | import numpy as np 4 | import math 5 | 6 | 7 | def compact_buckets(buckets: dict()) -> dict(): 8 | """ 9 | Compacts buckets (puts data inside in a big numpy array) and prints bucket statistics 10 | 11 | :param buckets: buckets of data of different sizes 12 | :return: compacted buckets 13 | """ 14 | largest_bucket_id_len = (0, 0) 15 | for bucket_id in buckets: 16 | X, X_added, Y = buckets[bucket_id] 17 | #print('\n\n', X, X_added, Y) 18 | buckets[bucket_id] = (np.vstack(X), np.vstack(X_added), np.concatenate(Y)) 19 | bucket_len = buckets[bucket_id][2].shape[0] 20 | print(" max: %3d len: %d" % (2 ** bucket_id, bucket_len)) 21 | largest_bucket_len = largest_bucket_id_len[1] 22 | if bucket_len > largest_bucket_len: 23 | largest_bucket_id_len = (bucket_id, bucket_len) 24 | 25 | # Quick fix until I figure out how to process different sized buckets 26 | largest_bucket_id = largest_bucket_id_len[0] 27 | largest_bucket_content = buckets[largest_bucket_id] 28 | buckets = dict() 29 | buckets[largest_bucket_id] = largest_bucket_content 30 | return buckets 31 | 32 | 33 | def feed_joint(data: (np.ndarray, np.ndarray), ssm_size: int, batch_size: int) -> (np.ndarray, np.ndarray, np.ndarray): 34 | """ 35 | Produce random batches of data from the dataset 36 | 37 | :param data: tuple (X, X_added, Y) with feature for convolution X, feature for after convolution X_added, and labels Y 38 | :param batch_size: size of the batches 39 | :return: batch 40 | """ 41 | X, _, Y = data 42 | X, Y = shuffle(X, Y) 43 | size = len(Y) 44 | 45 | def put(X_batch, X_lengths, Y_batch, i, X, Y): 46 | item = X[i] 47 | y = Y[i] 48 | pad_size = ssm_size - item.shape[0] 49 | 50 | X_lengths.append(item.shape[0]) 51 | Y_batch.append(np.concatenate((y, np.zeros([pad_size])))) 52 | X_batch.append(np.concatenate(( 53 | item, 54 | np.zeros([pad_size, item.shape[1], item.shape[2], item.shape[3]]) # Padding from right 55 | ), axis=0)) # Column-wise 56 | 57 | pointer = 0 58 | while pointer < size: 59 | X_batch = [] 60 | X_lengths = [] 61 | Y_batch = [] 62 | for i in range(pointer, min(pointer+batch_size, size)): 63 | put(X_batch, X_lengths, Y_batch, i, X, Y) 64 | 65 | yield np.stack(X_batch), np.stack(X_lengths), np.stack(Y_batch) 66 | pointer += batch_size 67 | 68 | 69 | def feed(data: (np.ndarray, np.ndarray, np.ndarray), batch_size: int) -> (np.ndarray, np.ndarray, np.ndarray): 70 | """ 71 | Produce random batches of data from the dataset 72 | 73 | :param data: tuple (X, X_added, Y) with feature for convolution X, feature for after convolution X_added, and labels Y 74 | :param batch_size: size of the batches 75 | :return: batch 76 | """ 77 | X, X_added, Y = data 78 | X, X_added, Y = shuffle(X, X_added, Y) 79 | size = Y.shape[0] 80 | 81 | pointer = 0 82 | while pointer+batch_size < size: 83 | yield X[pointer:pointer+batch_size], X_added[pointer:pointer+batch_size], Y[pointer:pointer+batch_size] 84 | pointer += batch_size 85 | yield X[pointer:], X_added[pointer:], Y[pointer:] 86 | 87 | 88 | def tdiff(timestamp: float) -> float: 89 | """ 90 | Compute time offset (for time reporting purposes) 91 | """ 92 | return time() - timestamp 93 | 94 | 95 | def k(value: int) -> float: 96 | """ 97 | Shorthand for thousands 98 | """ 99 | return float(value) / 1000 100 | 101 | 102 | def precision(tp: int, fp: int) -> float: 103 | return tp / (tp + fp) 104 | 105 | 106 | def recall(tp: int, fn: int) -> float: 107 | return tp / (tp + fn) 108 | 109 | 110 | def f1(tp: int, fp: int, fn: int) -> float: 111 | prec = precision(tp, fp) 112 | rec = recall(tp, fn) 113 | return 2 * prec * rec / (prec + rec) 114 | 115 | 116 | def windowdiff(seg1, seg2, k=None, boundary=1): 117 | """ 118 | Compute the windowdiff score for a pair of segmentations. A segmentation is any sequence 119 | over a vocabulary of two items (e.g. 0, 1, where the specified boundary value is used 120 | to mark the edge of a segmentation) 121 | If k is None it is half of an average segment length considering seg1 as true segments 122 | """ 123 | 124 | assert len(seg1) == len(seg2), "Segments have unequal length: %d and %d" % (len(seg1), len(seg2)) 125 | if k is None: 126 | k = max(1, round(len(seg1) / (2 * np.count_nonzero(seg1 == boundary)))) 127 | assert k < len(seg1), "k (%d) can't be larger than a segment length (%d)" % (k, len(seg1)) 128 | 129 | wd = 0 130 | for i in range(len(seg1) - k): 131 | wd += abs(np.count_nonzero(seg1[i:i+k+1] == boundary) - np.count_nonzero(seg2[i:i+k+1] == boundary)) 132 | return wd / (len(seg1) - k) -------------------------------------------------------------------------------- /extract_features.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | import json 4 | 5 | 6 | def labels_from_label_array(label_array, ssm_size): 7 | if isinstance(label_array, str): 8 | label_array = json.loads(label_array) 9 | 10 | labels = np.zeros(ssm_size, dtype=np.int32) 11 | for label in label_array: 12 | # Workaround for buggy labels, probably not needed anymore 13 | # if label >= labels.shape[0]: 14 | # continue 15 | labels[label] = 1 16 | return labels 17 | 18 | 19 | def tensor_from_ssm(ssm: np.ndarray, pad_to_size: int, half_window=2) -> np.ndarray: 20 | """ 21 | Produce a tensor containing all line-wise features for the given similarity matrix 22 | 23 | the feature of a line is the partial picture that is centered around the line index 24 | e.g. if half window size is 2, the window is 5 including its center 25 | feature of line 4 would be the picture with lines [line 2, line 3, line 4, line 5, line 6] 26 | 27 | :param ssm: square similarity matrix 28 | :param pad_to_size: pad feature matrices to a certain size (maximum size of the image in the dataset) 29 | :param half_window: the size of the window 30 | :return: n feature matrices of size pad_to_size x 2*half_window, where n equals to ssm size 31 | """ 32 | assert ssm.shape[0] == ssm.shape[1] # SSM has to be square 33 | ssm_size = ssm.shape[0] 34 | 35 | # dimensions of the final tensor 36 | dim_x = ssm_size 37 | dim_y = 2 * half_window 38 | dim_z = pad_to_size 39 | tensor = np.empty([dim_x, dim_y, dim_z], dtype=np.float32) 40 | for line in range(ssm_size): 41 | # lower and upper bounds of the window 42 | lower = line - half_window + 1 43 | upper = line + half_window + 1 44 | 45 | unpadded_patch = np.concatenate(( 46 | np.zeros([max(-lower, 0), ssm_size], dtype=np.float32), # Padding from top 47 | ssm[max(lower, 0):min(upper, ssm_size), :], 48 | np.zeros([max(upper - ssm_size, 0), ssm_size]) # Padding from bottom 49 | ), axis=0) # Row-wise 50 | 51 | pad_size = float(pad_to_size - ssm_size) / 2 52 | tensor[line] = np.concatenate(( 53 | np.zeros([dim_y, int(math.floor(pad_size))]), # Padding from left 54 | unpadded_patch, 55 | np.zeros([dim_y, int(math.ceil(pad_size))]) # Padding from right 56 | ), axis=1) # Column-wise 57 | return tensor 58 | 59 | 60 | def tensor_from_multiple_ssms(multiple_ssms: list, pad_to_size: int, half_window=2) -> np.ndarray: 61 | ssm_first = multiple_ssms[0] 62 | 63 | for elem in multiple_ssms: 64 | # SSMs have to be square 65 | assert elem.shape[0] == elem.shape[1] 66 | # SSMs have to be of same size 67 | assert ssm_first.shape[0] == elem.shape[0] 68 | 69 | ssm_size = ssm_first.shape[0] 70 | 71 | # dimensions of the final tensor 72 | dim_x = ssm_size 73 | dim_layers = len(multiple_ssms) # number of SSM layers used 74 | dim_y = 2 * half_window 75 | dim_z = pad_to_size 76 | tensor = np.empty([dim_x, dim_y, dim_z, dim_layers], dtype=np.float32) 77 | for line in range(ssm_size): 78 | # lower and upper bounds of the window 79 | lower = line - half_window + 1 80 | upper = line + half_window + 1 81 | 82 | # padding size for left-right padding 83 | pad_lr_size = float(pad_to_size - ssm_size) / 2 84 | 85 | 86 | # pad SSMs 87 | # pad top and down 88 | td_padded_patches = [] 89 | for single_ssm in multiple_ssms: 90 | td_padded_patch_single = np.concatenate(( 91 | np.zeros([max(-lower, 0), ssm_size], dtype=np.float32), # Padding from top 92 | single_ssm[max(lower, 0):min(upper, ssm_size), :], 93 | np.zeros([max(upper - ssm_size, 0), ssm_size]) # Padding from bottom 94 | ), axis=0) # Row-wise 95 | td_padded_patches.append(td_padded_patch_single) 96 | 97 | 98 | # pad left and right 99 | td_lr_padded_patches = [] 100 | for td_padded_patch_single in td_padded_patches: 101 | td_lr_padded_patch_single = np.concatenate(( 102 | np.zeros([dim_y, int(math.floor(pad_lr_size))]), # Padding from left 103 | td_padded_patch_single, 104 | np.zeros([dim_y, int(math.ceil(pad_lr_size))]) # Padding from right 105 | ), axis=1) # Column-wise 106 | td_lr_padded_patches.append(td_lr_padded_patch_single) 107 | 108 | 109 | # stack SSMs on top of each other 110 | td_lr_padded_patch_layered = np.stack(td_lr_padded_patches, axis=-1) 111 | 112 | tensor[line] = td_lr_padded_patch_layered 113 | return tensor 114 | 115 | 116 | 117 | def remove_main_diagonal(ssm: np.ndarray): 118 | assert ssm.shape[0] == ssm.shape[1] 119 | ssm_size = ssm.shape[0] 120 | 121 | ssm_nodiag = np.empty([ssm_size, ssm_size - 1]) 122 | for line in range(ssm_size): 123 | line_nodiag = np.concatenate((ssm[line, 0:line], ssm[line, line+1:ssm_size]), axis=0) 124 | ssm_nodiag[line] = line_nodiag 125 | return ssm_nodiag 126 | -------------------------------------------------------------------------------- /cnn/no_padding_1conv.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | from cnn.nn import NN 4 | 5 | 6 | class NoPadding1Conv(NN): 7 | def __init__(self, window_size, ssm_size, added_features_size, channels): 8 | super().__init__() 9 | 10 | self.g_dprob = None 11 | self.g_results = None 12 | 13 | self.window_size = window_size 14 | self.ssm_size = ssm_size 15 | self.added_features_size = added_features_size 16 | self.channels = channels 17 | 18 | self.define(window_size, ssm_size, added_features_size, channels) 19 | 20 | def define(self, window_size, ssm_size, added_features_size, channels): 21 | # Input of size: 22 | # batch_size x window_size x max_ssm_size 23 | # Labels of size: 24 | # batch_size 25 | # Note that we do not fix the first dimension to allow flexible batch_size for evaluation / leftover samples 26 | with tf.name_scope('input'): 27 | self.g_in = tf.placeholder(tf.float32, shape=[None, 2*window_size, ssm_size, channels], name="input") 28 | self.g_labels = tf.placeholder(tf.int32, shape=[None], name="labels") 29 | self.g_dprob = tf.placeholder(tf.float32, name="dropout_prob") 30 | self.g_added_features = tf.placeholder(tf.float32, shape=[None, added_features_size], name="additional_features") 31 | 32 | # Reshape to use within a convolutional neural net. 33 | # contrary to mnist example, it just adds the last dimension whichs is the amount of channels in the image, 34 | # in our case its only one, if we will add more features for each line – they will go there 35 | with tf.name_scope('reshape'): 36 | # x_image = tf.expand_dims(self.g_in, -1) 37 | # no reshaping necessary as incoming tensor has number of channels as lowest rank 38 | x_image = self.g_in 39 | 40 | # First convolutional layer - 2d convolutions with windows always capturing the borders 41 | with tf.name_scope('conv1'): 42 | features_conv1 = 128 43 | W_conv1 = self.weight_variable([window_size+1, window_size+1, channels, features_conv1]) 44 | b_conv1 = self.bias_variable([features_conv1]) 45 | h_conv1 = tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='VALID') 46 | h_conv1 = tf.nn.relu(h_conv1 + b_conv1) 47 | 48 | # Pooling layer - downsamples by window_size. 49 | with tf.name_scope('pool1'): 50 | h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, window_size, window_size, 1], 51 | strides=[1, window_size, window_size, 1], padding='VALID') 52 | 53 | # Dropout - controls the complexity of the model, prevents co-adaptation of features 54 | with tf.name_scope('conv1-dropout'): 55 | h_pool1_drop = tf.nn.dropout(h_pool1, 1.0-(1.0-self.g_dprob)/2) 56 | 57 | # Second convolutional layer - performs horizontal convolutions 58 | with tf.name_scope('conv2'): 59 | features_conv2 = 128 60 | W_conv2 = self.weight_variable([1, window_size, features_conv1, features_conv2]) 61 | b_conv2 = self.bias_variable([features_conv2]) 62 | h_conv2 = tf.nn.conv2d(h_pool1_drop, W_conv2, strides=[1, 1, 1, 1], padding='VALID') 63 | h_conv2 = tf.nn.relu(h_conv2 + b_conv2) 64 | 65 | # Pooling layer - downsamples to a pixel. 66 | with tf.name_scope('pool2'): 67 | pool_size = int(ssm_size / window_size) - window_size 68 | h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 1, pool_size, 1], 69 | strides=[1, 1, pool_size, 1], padding='VALID') 70 | 71 | # We have to either fix the ssm_size or do an average here 72 | fc_input_size = features_conv2 + added_features_size 73 | fc_size = 512 74 | print('image_features.shape:', tf.reshape(h_pool2, [-1, features_conv2]).shape) 75 | print('added_features.shape:', self.g_added_features.shape) 76 | fc_input = tf.concat( 77 | ( 78 | tf.reshape(h_pool2, [-1, features_conv2]), 79 | self.g_added_features 80 | ), 81 | axis = 1 82 | ) 83 | print('combined.shape:', fc_input.shape) 84 | for fc_id in range(3): 85 | with tf.name_scope('fc-%d' % fc_id): 86 | W_fc = self.weight_variable([fc_input_size, fc_size]) 87 | b_fc = self.bias_variable([fc_size]) 88 | 89 | h_fc = tf.nn.tanh(tf.matmul(fc_input, W_fc) + b_fc) 90 | fc_input = tf.nn.dropout(h_fc, self.g_dprob) 91 | fc_input_size = fc_size 92 | 93 | # Map the features to 2 classes 94 | with tf.name_scope('fc-softmax'): 95 | W_fcs = self.weight_variable([fc_size, 2]) 96 | b_fcs = self.bias_variable([2]) 97 | 98 | self.g_out = tf.matmul(fc_input, W_fcs) + b_fcs 99 | 100 | 101 | # Regularization 102 | weights = tf.trainable_variables() 103 | l2_regularizer = tf.contrib.layers.l2_regularizer( 104 | scale=0.001, scope=None 105 | ) 106 | l2_reg = tf.contrib.layers.apply_regularization(l2_regularizer, weights) 107 | 108 | # Loss 109 | with tf.name_scope('loss'): 110 | losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.g_labels, logits=self.g_out) 111 | self.g_loss = tf.reduce_mean(losses) + l2_reg 112 | 113 | # Evaluation 114 | with tf.name_scope('evaluation'): 115 | self.g_results = tf.argmax(self.g_out, axis=1, output_type=tf.int32) 116 | -------------------------------------------------------------------------------- /baseline_experiment.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import ssmfeatures 3 | import learning_tools 4 | from sklearn.linear_model import LogisticRegression 5 | from sklearn.metrics import precision_score 6 | from sklearn.metrics import recall_score 7 | from sklearn.metrics import f1_score 8 | 9 | 10 | def line_feat_matrix_labelled(some_songs, ssm_lookup): 11 | #read first entry to find out the dimensionality of feature vector 12 | for song in some_songs.itertuples(): 13 | line_feature_matrix = np.empty([0, song_line_features_labels_id(song, ssm_lookup).shape[1]], dtype=np.float64) 14 | break 15 | for song in some_songs.itertuples(): 16 | line_feature_matrix = np.append(line_feature_matrix, song_line_features_labels_id(song, ssm_lookup), axis=0) 17 | return line_feature_matrix 18 | 19 | 20 | 21 | 22 | def song_line_features_labels_id(song, ssm_lookup): 23 | #declared count of feats here to that appending arrays can work below 24 | #hope its possible without this ugly hack? 25 | #1 = number of SSMs per song (only string encoding = 1) 26 | #4 = number of Watanabe features per threshold 27 | #9 number of thresholds 28 | #3 features without thresholds, cf. line 53 29 | feat_count = 1 * (4 * 9 + 3) 30 | song_line_feature_matrix = np.empty([0, feat_count], dtype=np.float64) 31 | 32 | #song is a tuple from df.itertuples() 33 | song_id = song[0] 34 | ssm_lines_string = ssm_lookup.loc[song_id].ssm 35 | #ssm_lines_postag = sppm.loc[song_id].ssm 36 | 37 | #compute features for whole song 38 | feat0_for_threshold, feat1_for_threshold,\ 39 | feat2_for_threshold, feat3_for_threshold,\ 40 | frpf3, frpf4b, frpf4e = ssmfeatures.ssm_feats_thresholds_watanabe(ssm_lines_string) 41 | 42 | #feat4_for_threshold, feat5_for_threshold,\ 43 | #feat6_for_threshold, feat7_for_threshold = ssm_feats_thresholds_watanabe(ssm_lines_postag) 44 | 45 | line_count = len(ssm_lines_string) 46 | for line_index in range(line_count): 47 | feats_of_line = [] 48 | #compute features for several thresholds 0.1, ..., 0.9 (cf. Watanabe paper) 49 | for lam in [0.1 * factor for factor in range(1, 10)]: 50 | feats_of_line.append(feat0_for_threshold[lam].get(line_index, 0)) 51 | feats_of_line.append(feat1_for_threshold[lam].get(line_index, 0)) 52 | feats_of_line.append(feat2_for_threshold[lam].get(line_index, 0)) 53 | feats_of_line.append(feat3_for_threshold[lam].get(line_index, 0)) 54 | #feats_of_line.append(feat4_for_threshold[lam].get(line_index, 0)) 55 | #feats_of_line.append(feat5_for_threshold[lam].get(line_index, 0)) 56 | #feats_of_line.append(feat6_for_threshold[lam].get(line_index, 0)) 57 | #feats_of_line.append(feat7_for_threshold[lam].get(line_index, 0)) 58 | ##add non-thresholded features here 59 | feats_of_line.append(frpf3.get(line_index, 0)) 60 | feats_of_line.append(frpf4b.get(line_index, 0)) 61 | feats_of_line.append(frpf4e.get(line_index, 0)) 62 | 63 | song_line_feature_matrix = np.append(song_line_feature_matrix, np.array([feats_of_line]), axis=0) 64 | 65 | #concatenate features, labels and id 66 | #segment_ending_indices = set(rd.segment_borders(song, 'mldb')) 67 | #instead of computing these indices, load it from mldb_seg5p_segment_borders.hdf 68 | 69 | song_line_labels = np.array([1 if line_index in segment_ending_indices else 0 for line_index in range(line_count)]).reshape(-1,1) 70 | 71 | song_line_id = np.array([song_id for line_index in range(line_count)]).reshape(-1,1) 72 | 73 | song_line_feature_matrix = np.concatenate((song_line_feature_matrix, song_line_labels, song_line_id), axis=1) 74 | return song_line_feature_matrix 75 | 76 | 77 | 78 | 79 | 80 | def baseline_experiment(): 81 | import time 82 | start_time = time.time() 83 | 84 | feat_count = 1 * (4 * 9 + 3) 85 | songs = load_corpus() 86 | sssm = load_ssm() 87 | 88 | songs_train, songs_val = learning_tools.train_val_test_split(songs, subset_factor=0.1) 89 | print('Training set size :', len(songs_train)) 90 | print('Validation set size:', len(songs_val)) 91 | 92 | print() 93 | print('Computing feature vectors...') 94 | XY_train = line_feat_matrix_labelled(songs_train, sssm) 95 | XY_val = line_feat_matrix_labelled(songs_val, sssm) 96 | 97 | print('Got', XY_train.shape[0], 'training instances and', XY_train.shape[1] - 2, 'features') 98 | print() 99 | print('Scaling each feature for itself...') 100 | XY_train = learning_tools.scale_features(XY_train) 101 | XY_val = learning_tools.scale_features(XY_val) 102 | 103 | 104 | print('Separating features X, labels Y, and groups G (the group of a line is the song it belongs to)...') 105 | X_train = XY_train[:, :feat_count] 106 | Y_train = XY_train[:, feat_count].ravel() 107 | G_train = XY_train[:, feat_count + 1].ravel() 108 | 109 | X_val = XY_val[:, :feat_count] 110 | Y_val = XY_val[:, feat_count].ravel() 111 | G_val = XY_val[:, feat_count + 1].ravel() 112 | 113 | print('Fitting model on training set...') 114 | #Logit as they use in the paper... 115 | model = LogisticRegression(class_weight='balanced') 116 | 117 | model.fit(X_train, Y_train) 118 | 119 | print('Predicting labels on validation set...') 120 | model_prediction = model.predict(X_val) 121 | 122 | print() 123 | print(time.time() - start_time) 124 | print('Precision:', precision_score(y_true=Y_val, y_pred=model_prediction)) 125 | print('Recall :', recall_score(y_true=Y_val, y_pred=model_prediction)) 126 | print('F-Score :', f1_score(y_true=Y_val, y_pred=model_prediction)) 127 | -------------------------------------------------------------------------------- /ssmfeatures.py: -------------------------------------------------------------------------------- 1 | ########################################################### 2 | ######RPF Feature Vectors from Watanabe et al. (2016)###### 3 | ########################################################### 4 | #############These functions extract features############## 5 | ##############from a self-similarity matrix################ 6 | ########################################################### 7 | 8 | from functools import reduce 9 | import numpy as np 10 | 11 | 12 | def ssm_feats_thresholds_watanabe(ssm_lines): 13 | feat0_for_threshold = {} 14 | feat1_for_threshold = {} 15 | feat2_for_threshold = {} 16 | feat3_for_threshold = {} 17 | for lam in [0.1 * factor for factor in range(1, 10)]: 18 | feat0_for_threshold[lam] = feat_rpf1_counts(ssm_lines, lam=lam) 19 | feat1_for_threshold[lam] = feat_rpf2_counts(ssm_lines, lam=lam) 20 | feat2_for_threshold[lam] = feat_rpf1_value_differences(ssm_lines, lam=lam) 21 | feat3_for_threshold[lam] = feat_rpf2_value_differences(ssm_lines, lam=lam) 22 | 23 | frpf3 = feat_rpf3(ssm_lines) 24 | frpf4b = feat_rpf4b(ssm_lines) 25 | frpf4e = feat_rpf4e(ssm_lines) 26 | return feat0_for_threshold, feat1_for_threshold,\ 27 | feat2_for_threshold, feat3_for_threshold,\ 28 | frpf3, frpf4b, frpf4e 29 | 30 | 31 | #return a dictionary after updating it 32 | def update_and_return(d, new_key, new_value): 33 | d.update({new_key: new_value}) 34 | return d 35 | 36 | def capped_row_count(matrix): 37 | return matrix.shape[0] - 1 38 | 39 | def capped_col_count(matrix): 40 | return matrix.shape[1] - 1 41 | 42 | 43 | def state_property(ssm, row): 44 | return ssm[row, row + 1] 45 | 46 | def border_start_property(ssm, row): 47 | return ssm[row, 0] 48 | 49 | def border_end_property(ssm, row): 50 | return ssm[row, capped_col_count(ssm)] 51 | 52 | def sequence_indicators(ssm, row, lam, diagonal_property): 53 | seq_indicators = set() 54 | for col in range(capped_col_count(ssm)): 55 | if row == col: 56 | continue 57 | if diagonal_property(ssm, row, col, lam): 58 | seq_indicators.add(col) 59 | return seq_indicators 60 | 61 | def sequence_indicator_value_differences(ssm, row, lam, diagonal_property): 62 | seq_indicators_diff = set() 63 | for col in range(capped_col_count(ssm)): 64 | if row == col: 65 | continue 66 | if diagonal_property(ssm, row, col, lam): 67 | value_difference = abs(ssm[row, col] - ssm[row + 1, col + 1]) 68 | if value_difference != 0: 69 | seq_indicators_diff.add(value_difference) 70 | return seq_indicators_diff 71 | 72 | 73 | def rpf(ssm, lam, indicator): 74 | #return {row : indicator(ssm, row, lam) for row in range(ssm.shape[0] - 1)} 75 | indicators = dict() 76 | for row in range(capped_row_count(ssm)): 77 | indicators_in_row = indicator(ssm, row, lam) 78 | if not indicators_in_row: 79 | continue 80 | indicators[row] = indicators_in_row 81 | return indicators 82 | 83 | #f_lambda^RPF# 84 | def feat_rpf_counts(ssm, lam, rpf_function): 85 | rpf_entries = rpf_function(ssm, lam) 86 | return reduce(lambda x,key: update_and_return(x, key, len(rpf_entries.get(key))), rpf_entries, {}) 87 | 88 | #f_lambda^RPFv 89 | def feat_rpf_value_differences(ssm, lam, rpf_function): 90 | rpf_entries = rpf_function(ssm, lam) 91 | return reduce(lambda x,key: update_and_return(x, key, np.average(list(rpf_entries.get(key)))), rpf_entries, {}) 92 | 93 | ################################# 94 | ################################# 95 | 96 | 97 | #######RPF1####### 98 | #sequence edge indicators, called g_lambda in the paper 99 | rpf1_property = lambda ssm,row,col,lam: (ssm[row, col] - lam) * (ssm[row + 1, col + 1] - lam) < 0 100 | 101 | def sequence_edge_indicators(ssm, row, lam): 102 | return sequence_indicators(ssm, row, lam, rpf1_property) 103 | 104 | def sequence_edge_value_differences(ssm, row, lam): 105 | return sequence_indicator_value_differences(ssm, row, lam, rpf1_property) 106 | 107 | def rpf1_counts(ssm, lam): 108 | return rpf(ssm, lam, sequence_edge_indicators) 109 | 110 | def rpf1_value_differences(ssm, lam): 111 | return rpf(ssm, lam, sequence_edge_value_differences) 112 | 113 | #f_lambda^RPF1# 114 | def feat_rpf1_counts(ssm, lam): 115 | return feat_rpf_counts(ssm, lam, rpf1_counts) 116 | 117 | #f_lambda^RPF1v 118 | def feat_rpf1_value_differences(ssm, lam): 119 | return feat_rpf_value_differences(ssm, lam, rpf1_value_differences) 120 | 121 | 122 | 123 | #######RPF2####### 124 | #sequence body indicators, called c_lambda in the paper 125 | rpf2_property = lambda ssm,row,col,lam: ssm[row, col] - lam >= 0 and ssm[row + 1, col + 1] - lam >= 0 126 | 127 | def sequence_body_indicators(ssm, row, lam): 128 | return sequence_indicators(ssm, row, lam, rpf2_property) 129 | 130 | def sequence_body_value_differences(ssm, row, lam): 131 | return sequence_indicator_value_differences(ssm, row, lam, rpf2_property) 132 | 133 | def rpf2_counts(ssm, lam): 134 | return rpf(ssm, lam, sequence_body_indicators) 135 | 136 | def rpf2_value_differences(ssm, lam): 137 | return rpf(ssm, lam, sequence_body_value_differences) 138 | 139 | #f_lambda^RPF2# 140 | def feat_rpf2_counts(ssm, lam): 141 | return feat_rpf_counts(ssm, lam, rpf2_counts) 142 | 143 | #f_lambda^RPF2v 144 | def feat_rpf2_value_differences(ssm, lam): 145 | return feat_rpf_value_differences(ssm, lam, rpf2_value_differences) 146 | 147 | 148 | #######RPF3####### 149 | ####Similarity with subsequent line#### 150 | def feat_rpf3(ssm): 151 | block_sim = dict() 152 | for row in range(capped_row_count(ssm)): 153 | block_sim[row] = state_property(ssm, row) 154 | return block_sim 155 | 156 | ####RPF4##### 157 | ###Similarities with beginning line (4b) or ending line (4e)### 158 | def feat_rpf4b(ssm): 159 | f4b = dict() 160 | for row in range(capped_row_count(ssm)): 161 | f4b[row] = border_start_property(ssm, row) 162 | return f4b 163 | 164 | def feat_rpf4e(ssm): 165 | f4e = dict() 166 | for row in range(capped_row_count(ssm)): 167 | f4e[row] = border_end_property(ssm, row) 168 | return f4e 169 | -------------------------------------------------------------------------------- /cnn/joint_rnn.py: -------------------------------------------------------------------------------- 1 | from cnn.no_padding_1conv import NoPadding1Conv 2 | 3 | import tensorflow as tf 4 | 5 | 6 | class JointRNN(NoPadding1Conv): 7 | def __init__(self, window_size, ssm_size, added_features_size, channels): 8 | super().__init__(window_size, ssm_size, added_features_size, channels) 9 | 10 | def define(self, window_size, ssm_size, added_features_size, channels): 11 | # Input of size: 12 | # batch_size x max_ssm_size x window_size x max_ssm_size 13 | # Labels of size: 14 | # batch_size x max_ssm_size 15 | # Note that we do not fix the first dimension to allow flexible batch_size for evaluation / leftover samples 16 | with tf.name_scope('input'): 17 | self.g_in = tf.placeholder(tf.float32, shape=[None, ssm_size, 2 * window_size, ssm_size, channels], 18 | name="input") 19 | self.g_lengths = tf.placeholder(tf.int32, shape=[None], name="lengths") 20 | self.g_labels = tf.placeholder(tf.int32, shape=[None, ssm_size], name="labels") 21 | self.g_dprob = tf.placeholder(tf.float32, name="dropout_prob") 22 | 23 | # Reshape to use within a convolutional neural net. 24 | # contrary to mnist example, it just adds the last dimension whichs is the amount of channels in the image, 25 | # in our case its only one, if we will add more features for each line – they will go there 26 | with tf.name_scope('reshape'): 27 | # x_image = tf.expand_dims(self.g_in, -1) 28 | # no reshaping necessary as incoming tensor has number of channels as lowest rank 29 | x_image = tf.reshape(self.g_in, [-1, 2 * window_size, ssm_size, channels], name="conv-reshaping") 30 | # x_image = self.g_in 31 | 32 | # First convolutional layer - 2d convolutions with windows always capturing the borders 33 | with tf.name_scope('conv1'): 34 | features_conv1 = 128 35 | W_conv1 = self.weight_variable([window_size + 1, window_size + 1, channels, features_conv1]) 36 | b_conv1 = self.bias_variable([features_conv1]) 37 | h_conv1 = tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='VALID') 38 | h_conv1 = tf.nn.tanh(h_conv1 + b_conv1) 39 | 40 | # Pooling layer - downsamples by window_size. 41 | with tf.name_scope('pool1'): 42 | h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, window_size, window_size, 1], 43 | strides=[1, window_size, window_size, 1], padding='VALID') 44 | 45 | # Dropout - controls the complexity of the model, prevents co-adaptation of features 46 | with tf.name_scope('conv1-dropout'): 47 | h_pool1_drop = tf.nn.dropout(h_pool1, self.g_dprob) 48 | 49 | # Second convolutional layer - performs horizontal convolutions 50 | with tf.name_scope('conv2'): 51 | features_conv2 = 128 52 | W_conv2 = self.weight_variable([1, window_size, features_conv1, features_conv2]) 53 | b_conv2 = self.bias_variable([features_conv2]) 54 | h_conv2 = tf.nn.conv2d(h_pool1_drop, W_conv2, strides=[1, 1, 1, 1], padding='VALID') 55 | h_conv2 = tf.nn.tanh(h_conv2 + b_conv2) 56 | 57 | # Pooling layer - downsamples to a pixel. 58 | with tf.name_scope('pool2'): 59 | pool_size = int(ssm_size / window_size) - window_size 60 | h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 1, pool_size, 1], 61 | strides=[1, 1, pool_size, 1], padding='VALID') 62 | 63 | with tf.name_scope('conv2-dropout'): 64 | h_pool2_drop = tf.nn.dropout(h_pool2, self.g_dprob) 65 | 66 | # We have to either fix the ssm_size or do an average here 67 | fc_input_size = features_conv2 68 | fc_size = 512 69 | fc_input = tf.reshape(h_pool2_drop, [-1, ssm_size, features_conv2]) 70 | # for fc_id in range(3): 71 | # with tf.name_scope('fc-%d' % fc_id): 72 | # W_fc = self.weight_variable([fc_input_size, fc_size]) 73 | # b_fc = self.bias_variable([fc_size]) 74 | # 75 | # h_fc = tf.nn.tanh(tf.matmul(fc_input, W_fc) + b_fc) 76 | # fc_input = tf.nn.dropout(h_fc, self.g_dprob) 77 | # fc_input_size = fc_size 78 | 79 | # Defining cell and initialising RSDAE 80 | with tf.variable_scope("forward-cell", initializer=tf.orthogonal_initializer()): 81 | lstm_size = 100 82 | cell = tf.nn.rnn_cell.BasicLSTMCell(lstm_size) 83 | cell = tf.contrib.rnn.DropoutWrapper( 84 | cell, output_keep_prob=self.g_dprob) 85 | self.g_forward_cell = cell 86 | 87 | outputs, _ = tf.nn.dynamic_rnn( 88 | self.g_forward_cell, inputs=fc_input, 89 | sequence_length=self.g_lengths, dtype=tf.float32) 90 | 91 | softmax_w = self.weight_variable([lstm_size, 2]) 92 | softmax_b = self.bias_variable([2]) 93 | logits = tf.nn.xw_plus_b(tf.reshape(outputs, [-1, lstm_size]), softmax_w, softmax_b) 94 | # Reshape logits to be a 3-D tensor for sequence loss 95 | self.g_out = tf.reshape(logits, [-1, ssm_size, 2], name="out") 96 | 97 | # Use the contrib sequence loss and average over the batches 98 | losses_mask = tf.sequence_mask( 99 | lengths=self.g_lengths, maxlen=ssm_size, 100 | dtype=tf.float32) 101 | 102 | def tile_column_wise(tensor, target): 103 | with tf.name_scope("tile_column_wise"): 104 | return tf.tile(tf.reshape(tensor, [-1, 1]), [1, target]) 105 | 106 | float_labels = tf.to_float(self.g_labels) 107 | losses_mask += (0.5 * float_labels * tile_column_wise(tf.to_float(self.g_lengths), ssm_size)) \ 108 | / tile_column_wise(tf.reduce_sum(float_labels, axis=1), ssm_size) 109 | loss = tf.contrib.seq2seq.sequence_loss( 110 | self.g_out, 111 | self.g_labels, 112 | losses_mask, 113 | average_across_timesteps=True, 114 | average_across_batch=True) 115 | 116 | # Regularization 117 | weights = tf.trainable_variables() 118 | l2_regularizer = tf.contrib.layers.l2_regularizer( 119 | scale=0.001, scope=None 120 | ) 121 | l2_reg = tf.contrib.layers.apply_regularization(l2_regularizer, weights) 122 | 123 | # Loss 124 | with tf.name_scope('loss'): 125 | self.g_loss = loss + l2_reg 126 | -------------------------------------------------------------------------------- /deep_mnist.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """A deep MNIST classifier using convolutional layers. 17 | 18 | See extensive documentation at 19 | https://www.tensorflow.org/get_started/mnist/pros 20 | """ 21 | # Disable linter warnings to maintain consistency with tutorial. 22 | # pylint: disable=invalid-name 23 | # pylint: disable=g-bad-import-order 24 | 25 | from __future__ import absolute_import 26 | from __future__ import division 27 | from __future__ import print_function 28 | 29 | import argparse 30 | import sys 31 | import tempfile 32 | 33 | from tensorflow.examples.tutorials.mnist import input_data 34 | 35 | import tensorflow as tf 36 | 37 | FLAGS = None 38 | 39 | 40 | def deepnn(x): 41 | """deepnn builds the graph for a deep net for classifying digits. 42 | 43 | Args: 44 | x: an input tensor with the dimensions (N_examples, 784), where 784 is the 45 | number of pixels in a standard MNIST image. 46 | 47 | Returns: 48 | A tuple (y, keep_prob). y is a tensor of shape (N_examples, 10), with values 49 | equal to the logits of classifying the digit into one of 10 classes (the 50 | digits 0-9). keep_prob is a scalar placeholder for the probability of 51 | dropout. 52 | """ 53 | # Reshape to use within a convolutional neural net. 54 | # Last dimension is for "features" - there is only one here, since images are 55 | # grayscale -- it would be 3 for an RGB image, 4 for RGBA, etc. 56 | with tf.name_scope('reshape'): 57 | x_image = tf.reshape(x, [-1, 28, 28, 1]) 58 | 59 | # First convolutional layer - maps one grayscale image to 32 feature maps. 60 | with tf.name_scope('conv1'): 61 | W_conv1 = weight_variable([5, 5, 1, 32]) 62 | b_conv1 = bias_variable([32]) 63 | h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1) 64 | 65 | # Pooling layer - downsamples by 2X. 66 | with tf.name_scope('pool1'): 67 | h_pool1 = max_pool_2x2(h_conv1) 68 | 69 | # Second convolutional layer -- maps 32 feature maps to 64. 70 | with tf.name_scope('conv2'): 71 | W_conv2 = weight_variable([5, 5, 32, 64]) 72 | b_conv2 = bias_variable([64]) 73 | h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2) 74 | 75 | # Second pooling layer. 76 | with tf.name_scope('pool2'): 77 | h_pool2 = max_pool_2x2(h_conv2) 78 | 79 | # Fully connected layer 1 -- after 2 round of downsampling, our 28x28 image 80 | # is down to 7x7x64 feature maps -- maps this to 1024 features. 81 | with tf.name_scope('fc1'): 82 | W_fc1 = weight_variable([7 * 7 * 64, 1024]) 83 | b_fc1 = bias_variable([1024]) 84 | 85 | h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64]) 86 | h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1) 87 | 88 | # Dropout - controls the complexity of the model, prevents co-adaptation of 89 | # features. 90 | with tf.name_scope('dropout'): 91 | keep_prob = tf.placeholder(tf.float32) 92 | h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob) 93 | 94 | # Map the 1024 features to 10 classes, one for each digit 95 | with tf.name_scope('fc2'): 96 | W_fc2 = weight_variable([1024, 10]) 97 | b_fc2 = bias_variable([10]) 98 | 99 | y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2 100 | return y_conv, keep_prob 101 | 102 | 103 | def conv2d(x, W): 104 | """conv2d returns a 2d convolution layer with full stride.""" 105 | return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME') 106 | 107 | 108 | def max_pool_2x2(x): 109 | """max_pool_2x2 downsamples a feature map by 2X.""" 110 | return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], 111 | strides=[1, 2, 2, 1], padding='SAME') 112 | 113 | 114 | def weight_variable(shape): 115 | """weight_variable generates a weight variable of a given shape.""" 116 | initial = tf.truncated_normal(shape, stddev=0.1) 117 | return tf.Variable(initial) 118 | 119 | 120 | def bias_variable(shape): 121 | """bias_variable generates a bias variable of a given shape.""" 122 | initial = tf.constant(0.1, shape=shape) 123 | return tf.Variable(initial) 124 | 125 | 126 | def main(_): 127 | # Import data 128 | mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True) 129 | 130 | # Create the model 131 | x = tf.placeholder(tf.float32, [None, 784]) 132 | 133 | # Define loss and optimizer 134 | y_ = tf.placeholder(tf.float32, [None, 10]) 135 | 136 | # Build the graph for the deep net 137 | y_conv, keep_prob = deepnn(x) 138 | 139 | with tf.name_scope('loss'): 140 | cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=y_, 141 | logits=y_conv) 142 | cross_entropy = tf.reduce_mean(cross_entropy) 143 | 144 | with tf.name_scope('adam_optimizer'): 145 | train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy) 146 | 147 | with tf.name_scope('accuracy'): 148 | correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1)) 149 | correct_prediction = tf.cast(correct_prediction, tf.float32) 150 | accuracy = tf.reduce_mean(correct_prediction) 151 | tf.summary.scalar('accuracy', accuracy) 152 | 153 | graph_location = tempfile.mkdtemp() 154 | print('Saving graph to: %s' % graph_location) 155 | merged = tf.summary.merge_all() 156 | train_writer = tf.summary.FileWriter(graph_location) 157 | train_writer.add_graph(tf.get_default_graph()) 158 | 159 | with tf.Session() as sess: 160 | sess.run(tf.global_variables_initializer()) 161 | for i in range(20000): 162 | batch = mnist.train.next_batch(50) 163 | if i % 100 == 0: 164 | train_accuracy = accuracy.eval(feed_dict={ 165 | x: batch[0], y_: batch[1], keep_prob: 1.0}) 166 | print('step %d, training accuracy %g' % (i, train_accuracy)) 167 | summary, _ = sess.run([merged, train_step], feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5}) 168 | train_step.run(feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5}) 169 | train_writer.add_summary(summary, i) 170 | 171 | print('test accuracy %g' % accuracy.eval(feed_dict={ 172 | x: mnist.test.images, y_: mnist.test.labels, keep_prob: 1.0})) 173 | 174 | if __name__ == '__main__': 175 | parser = argparse.ArgumentParser() 176 | parser.add_argument('--data_dir', type=str, 177 | default='/tmp/tensorflow/mnist/input_data', 178 | help='Directory for storing input data') 179 | FLAGS, unparsed = parser.parse_known_args() 180 | tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) 181 | -------------------------------------------------------------------------------- /util/ssm.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import seaborn as sns 3 | import numpy as np 4 | import os 5 | 6 | 7 | def self_similarity_matrix(items, metric): 8 | return np.array([[metric(x, y) for x in items] for y in items]) 9 | 10 | 11 | def draw_ssm_side_by_side(ssm_blocks, ssm_lines, ssm_tokens, representation, 12 | song_name='', artist_name='', genre_of_song='undef', save_to_file=False): 13 | 14 | # Create a figure space matrix consisting of 3 columns and 1 row 15 | fig, ax = plt.subplots(figsize=(39, 13), ncols=3, nrows=1) 16 | 17 | left = 0.125 # the left side of the subplots of the figure 18 | right = 0.9 # the right side of the subplots of the figure 19 | bottom = 0.1 # the bottom of the subplots of the figure 20 | top = 0.9 # the top of the subplots of the figure 21 | wspace = 0.05 # the amount of width reserved for blank space between subplots 22 | hspace = 0.1 # the amount of height reserved for white space between subplots 23 | 24 | # This function actually adjusts the sub plots using the above paramters 25 | plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace) 26 | 27 | # Title of the whole plot 28 | plt.suptitle("%s - %s (%s)" % (artist_name, song_name, genre_of_song), fontsize=40) 29 | 30 | # Titles of the subplots 31 | y_title_margin = 1.01 32 | sub_title_size = 22 33 | ax[0].set_title("Blocks [" + representation + ']', y=y_title_margin, fontsize=sub_title_size) 34 | ax[1].set_title("Lines [" + representation + ']', y=y_title_margin, fontsize=sub_title_size) 35 | ax[2].set_title("Words [" + representation + ']', y=y_title_margin, fontsize=sub_title_size) 36 | 37 | # The color bar [left, bottom, width, height] 38 | cbar_ax = fig.add_axes([.905, 0.125, .01, 0.751]) 39 | 40 | # The actual subplots 41 | sns.heatmap(data=ssm_blocks, square=True, ax=ax[0], cbar=False) 42 | sns.heatmap(data=ssm_lines, square=True, ax=ax[1], cbar=False) 43 | sns.heatmap(data=ssm_tokens, square=True, ax=ax[2], cbar_ax=cbar_ax) 44 | 45 | # Whether to display the plot or save it to a file 46 | if not save_to_file: 47 | plt.show() 48 | else: 49 | directory = 'SSM/' # + ('undef' if not genre_of_song else genre_of_song) 50 | if not os.path.exists(directory): 51 | os.makedirs(directory) 52 | plt.savefig(directory + song['_id'] + '.png') 53 | plt.close('all') 54 | 55 | 56 | def draw_ssm_encodings_side_by_side(ssm_some_encoding, ssm_other_encoding, ssm_third_encoding, 57 | representation_some, representation_other, representation_third, 58 | song_name='', artist_name='', genre_of_song='undef', save_to_file=False): 59 | 60 | # Create a figure space matrix consisting of 2 columns and 1 row 61 | fig, ax = plt.subplots(figsize=(39, 13), ncols=3, nrows=1) 62 | 63 | left = 0.125 # the left side of the subplots of the figure 64 | right = 0.9 # the right side of the subplots of the figure 65 | bottom = 0.1 # the bottom of the subplots of the figure 66 | top = 0.9 # the top of the subplots of the figure 67 | wspace = 0.05 # the amount of width reserved for blank space between subplots 68 | hspace = 0.1 # the amount of height reserved for white space between subplots 69 | 70 | # This function actually adjusts the sub plots using the above paramters 71 | plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace) 72 | 73 | # Title of the whole plot 74 | plt.suptitle("%s - %s (%s)" % (artist_name, song_name, genre_of_song), fontsize=40) 75 | 76 | # Titles of the subplots 77 | y_title_margin = 1.01 78 | sub_title_size = 22 79 | ax[0].set_title("Lines [" + representation_some + ']', y=y_title_margin, fontsize=sub_title_size) 80 | ax[1].set_title("Lines [" + representation_other + ']', y=y_title_margin, fontsize=sub_title_size) 81 | ax[2].set_title("Lines [" + representation_third + ']', y=y_title_margin, fontsize=sub_title_size) 82 | 83 | # The color bar [left, bottom, width, height] 84 | cbar_ax = fig.add_axes([.905, 0.125, .01, 0.751]) 85 | 86 | # The actual subplots 87 | sns.heatmap(data=ssm_some_encoding, square=True, ax=ax[0], cbar=False) 88 | sns.heatmap(data=ssm_other_encoding, square=True, ax=ax[1], cbar=False) 89 | sns.heatmap(data=ssm_third_encoding, square=True, ax=ax[2], cbar_ax=cbar_ax) 90 | 91 | # Whether to display the plot or save it to a file 92 | if not save_to_file: 93 | plt.show() 94 | else: 95 | directory = 'SSM/' # + ('undef' if not genre_of_song else genre_of_song) 96 | if not os.path.exists(directory): 97 | os.makedirs(directory) 98 | plt.savefig(directory + song['_id'] + '.png') 99 | plt.close('all') 100 | 101 | 102 | def draw_ssm_encodings_and_hierarchy(ssm_some_words, ssm_other_words, ssm_some_lines, ssm_other_lines, 103 | representation_some, representation_other, song_name='', artist_name='', 104 | genre_of_song='undef', save_to_file=False): 105 | 106 | # Create a figure space matrix consisting of 2 columns and 2 rows 107 | fig, ax = plt.subplots(figsize=(26, 26), ncols=2, nrows=2) 108 | 109 | left = 0.125 # the left side of the subplots of the figure 110 | right = 0.9 # the right side of the subplots of the figure 111 | bottom = 0.1 # the bottom of the subplots of the figure 112 | top = 0.9 # the top of the subplots of the figure 113 | wspace = 0.05 # the amount of width reserved for blank space between subplots 114 | hspace = 0.1 # the amount of height reserved for white space between subplots 115 | 116 | # This function actually adjusts the sub plots using the above paramters 117 | plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace) 118 | 119 | # Title of the whole plot 120 | plt.suptitle("%s - %s (%s)" % (artist_name, song_name, genre_of_song), fontsize=40) 121 | 122 | # Titles of the subplots 123 | y_title_margin = 1.01 124 | sub_title_size = 22 125 | ax[0][0].set_title("Words [" + representation_some + ']', y=y_title_margin, fontsize=sub_title_size) 126 | ax[0][1].set_title("Words [" + representation_other + ']', y=y_title_margin, fontsize=sub_title_size) 127 | ax[1][0].set_title("Lines [" + representation_some + ']', y=y_title_margin, fontsize=sub_title_size) 128 | ax[1][1].set_title("Lines [" + representation_other + ']', y=y_title_margin, fontsize=sub_title_size) 129 | 130 | # The color bar [left, bottom, width, height] 131 | cbar_ax = fig.add_axes([.905, 0.125, .01, 0.751]) 132 | 133 | # The actual subplots 134 | sns.heatmap(data=ssm_some_words, square=True, ax=ax[0][0], cbar=False) 135 | sns.heatmap(data=ssm_other_words, square=True, ax=ax[0][1], cbar=False) 136 | sns.heatmap(data=ssm_some_lines, square=True, ax=ax[1][0], cbar=False) 137 | sns.heatmap(data=ssm_other_lines, square=True, ax=ax[1][1], cbar_ax=cbar_ax) 138 | 139 | # Whether to display the plot or save it to a file 140 | if not save_to_file: 141 | plt.show() 142 | else: 143 | directory = 'SSM/' # + ('undef' if not genre_of_song else genre_of_song) 144 | if not os.path.exists(directory): 145 | os.makedirs(directory) 146 | plt.savefig(directory + artist_name + ' - ' + song_name + '.png') 147 | plt.close('all') 148 | -------------------------------------------------------------------------------- /util/similarity.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from py_stringmatching.similarity_measure.levenshtein import Levenshtein 3 | from numpy.linalg import norm 4 | from functools import reduce 5 | import re 6 | from fastdtw import fastdtw 7 | import phonetics 8 | 9 | 10 | def revert_pitches_dementia(pitches): 11 | # revert ugly hack from before 12 | pitches = re.sub('(#){2,}', '#', pitches) 13 | if pitches[0] == '#': 14 | pitches = pitches[1:] 15 | if pitches[-1] == '#': 16 | pitches = pitches[:-1] 17 | return pitches 18 | 19 | 20 | def average_pitch_per_token(token_pitch, octave_invariant=False): 21 | pitches_without_braindamage = list(map(revert_pitches_dementia, token_pitch)) 22 | if octave_invariant: 23 | return list(map(lambda x: reduce(lambda x, elem: x + int(elem) % 8, x.split('#'), 0) / len(x.split('#')), 24 | pitches_without_braindamage)) 25 | else: 26 | return list(map(lambda x: reduce(lambda x, elem: x + int(elem), x.split('#'), 0) / len(x.split('#')), 27 | pitches_without_braindamage)) 28 | 29 | 30 | def string_similarity(some, other): 31 | return Levenshtein().get_sim_score(some, other) 32 | 33 | 34 | # hacky and slow 35 | def dtw_string_similarity(some, other): 36 | if not some or not other: 37 | return 0.0 38 | return 1 - dtw_normalized(list(map(hash, list(some))), list(map(hash, list(other))), 39 | metric=lambda x, y: 0 if x == y else 1) 40 | 41 | 42 | def dtw_normalized(x, y, metric=lambda x, y: abs(x - y)): 43 | return fastdtw(x, y, dist=metric)[0] / np.maximum(len(x), len(y)) 44 | 45 | 46 | def phonetic_similarity(some, other, use_equivalences=False): 47 | if some == other: 48 | return 1.0 49 | if not some or not other: 50 | return 0.0 51 | 52 | some_phonetics = phonetics.dmetaphone(some) 53 | other_phonetics = phonetics.dmetaphone(other) 54 | if some_phonetics == other_phonetics: 55 | return 1.0 56 | 57 | pair_wise_similarities = [] 58 | for some_phonetic in some_phonetics: 59 | if not some_phonetic: 60 | continue 61 | for other_phonetic in other_phonetics: 62 | if not other_phonetic: 63 | continue 64 | some_equiv = metaphone_representative(some_phonetic) if use_equivalences else some_phonetic 65 | other_equiv = metaphone_representative(other_phonetic) if use_equivalences else other_phonetic 66 | pair_wise_similarities.append(string_similarity(some_equiv, other_equiv)) 67 | return 0.0 if not pair_wise_similarities else max(pair_wise_similarities) 68 | 69 | 70 | # paths: list of t list -> list of t list 71 | # paths [[a,b]] = [[a], [b]] 72 | # paths [[a], [b,c], [d]] = [[a,b,d], [a,c,d]] 73 | def paths(xs): 74 | if not xs: 75 | return [] 76 | if len(xs) == 1: 77 | return [[xs_elem] for xs_elem in xs[0]] 78 | else: 79 | p = [] 80 | for prefix in paths([xs[0]]): 81 | for suffix in paths(xs[1:]): 82 | p.append(prefix + suffix) 83 | return p 84 | 85 | 86 | def string_collate(xs): 87 | return reduce(lambda x, elem: x + elem, xs, '') 88 | 89 | 90 | # given a lists of tokens, compute the lists' phonetics' similarity 91 | # compute the phonetics token-wise 92 | def phonetic_similarity_lists(some, other, use_equivalences=False): 93 | if some == other: 94 | return 1.0 95 | if not some or not other: 96 | return 0.0 97 | some_phonetics = list(map(phonetics.dmetaphone, some)) 98 | some_phonetics_non_empty = reduce(lambda x, elem: x + [[alt for alt in elem if alt]], some_phonetics, []) 99 | some_phonetics_paths = paths(some_phonetics_non_empty) 100 | some_phonetics_paths_collated = list(map(string_collate, some_phonetics_paths)) 101 | # print('ps:', some_phonetics_paths_collated) 102 | 103 | other_phonetics = list(map(phonetics.dmetaphone, other)) 104 | other_phonetics_non_empty = reduce(lambda x, elem: x + [[alt for alt in elem if alt]], other_phonetics, []) 105 | other_phonetics_paths = paths(other_phonetics_non_empty) 106 | other_phonetics_paths_collated = list(map(string_collate, other_phonetics_paths)) 107 | # print('po:', other_phonetics_paths_collated) 108 | 109 | if some_phonetics_paths_collated == other_phonetics_paths_collated: 110 | return 1.0 111 | 112 | pair_wise_similarities = [] 113 | for some_phonetic in some_phonetics_paths_collated: 114 | if not some_phonetic: 115 | continue 116 | for other_phonetic in other_phonetics_paths_collated: 117 | if not other_phonetic: 118 | continue 119 | some_equiv = metaphone_representative(some_phonetic) if use_equivalences else some_phonetic 120 | other_equiv = metaphone_representative(other_phonetic) if use_equivalences else other_phonetic 121 | pair_wise_similarities.append(string_similarity(some_equiv, other_equiv)) 122 | return 0.0 if not pair_wise_similarities else max(pair_wise_similarities) 123 | 124 | 125 | # put all plosives, nasals, fricatives, approximants into each one equivalence class 126 | def metaphone_representative(phonetic): 127 | phonetic = re.sub('[PTK]', 'P', phonetic) 128 | phonetic = re.sub('[MN]', 'N', phonetic) 129 | phonetic = re.sub('[XRSFH]', 'F', phonetic) 130 | phonetic = re.sub('[LJ]', 'L', phonetic) 131 | return phonetic 132 | 133 | 134 | # faster way to compute cosine similarity (skips the input validation) 135 | def cosine_similarity(u, v): 136 | return np.dot(u, v) / (norm(u) * norm(v)) 137 | 138 | 139 | # vec_store: vector store for words word_i 140 | def cosine_similarity_with_store(word_u, word_v, vec_store): 141 | if word_u == word_v: 142 | return 1.0 143 | u = vec_store[word_u] 144 | v = vec_store[word_v] 145 | return np.dot(u, v) / (norm(u) * norm(v)) 146 | 147 | 148 | # intrinsically normalizes the matrix by its maximum value 149 | def normalize_distance_matrix(matrix): 150 | max_elem = max(list(map(max, matrix))) 151 | return list(map(lambda row: list(map(lambda x: 1 - x / max_elem, row)), matrix)) 152 | 153 | 154 | # abstract over pos tags by clustering them 155 | # assumes a Penn Treebank Tags input tag 156 | def tag_to_super_tag(tag, tts_mapping): 157 | return tts_mapping.get(tag, tag) 158 | 159 | 160 | def load_tag_to_super_tag_mapping(): 161 | return {'VB': 'V', 'VBG': 'V', 'VBP': 'V', 'VBZ': 'V', 'VBD': 'V', 'VBN': 'V', 'VBD': 'V', 'VBN': 'V', 162 | 'NN': 'N', 'NNS': 'N', 'NNP': 'N', 'NNPS': 'N', 163 | 'JJ': 'ADJ', 'JJR': 'ADJ', 'JJS': 'ADJ', 164 | 'RB': 'ADV', 'RBR': 'ADV', 'RBS': 'ADV', 165 | 'WDT': 'WH', 'WP': 'WH', 'WP$': 'WH', 'WRB': 'WH', 166 | '#': '?', '$': '?', '.': '?', ',': '?', ':': '?', '-RRB-': '?', '-LRB-': '?', '“': '?', '”': '?'} 167 | 168 | # Tests for paths(.) 169 | # ex1 = [['a','b']] 170 | # ex2 = [['a'], ['b']] 171 | # ex3 = [['a','b'], ['c']] 172 | # ex4 = [['a'], ['b','c']] 173 | # ex5 = [['a','b'], ['c','d']] 174 | # ex6 = [['a','b'], ['c'], ['d','e']] 175 | # ex7 = [['a'], ['b','c'], ['d']] 176 | # print(paths(ex1) == [['a'], ['b']]) 177 | # print(paths(ex2) == [['a','b']]) 178 | # print(paths(ex3) == [['a','c'], ['b','c']]) 179 | # print(paths(ex4) == [['a','b'], ['a','c']]) 180 | # print(paths(ex5) == [['a','c'], ['a','d'], ['b','c'], ['b','d']]) 181 | # print(paths(ex6) == [['a','c','d'], ['a','c','e'], ['b','c','d'], ['b','c','e']]) 182 | # print(paths(ex7) == [['a','b','d'], ['a','c','d']]) 183 | # paths(ex6) 184 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | from cnn.dense import Dense 2 | from cnn.mnist_like import MnistLike 3 | from cnn.no_padding_1conv import NoPadding1Conv 4 | from extract_features import tensor_from_multiple_ssms, labels_from_label_array 5 | from util.helpers import precision, recall, f1, k, tdiff, feed, compact_buckets 6 | 7 | from util.load_data import load_ssm_string, load_ssm_phonetics, load_linewise_feature, load_ssm_lex_struct_watanabe 8 | from util.load_data import load_segment_borders, load_segment_borders_watanabe, load_segment_borders_for_genre 9 | 10 | import argparse 11 | import math 12 | import numpy as np 13 | import tensorflow as tf 14 | from random import random 15 | from time import time 16 | from sklearn.metrics import confusion_matrix 17 | from os import path 18 | from tensorflow.contrib import slim 19 | 20 | 21 | def add_to_buckets(buckets: dict(), bucket_id: int, tensor: np.ndarray, added_features: np.ndarray, labels: np.ndarray) -> None: 22 | if bucket_id not in buckets: 23 | buckets[bucket_id] = ([], [], []) 24 | X, X_added, Y = buckets[bucket_id] 25 | X.append(tensor) 26 | X_added.append(added_features) 27 | Y.append(labels) 28 | 29 | # Given column vector of feature for line i, produce matrix indicating features in lines i-1, i, i+1 30 | # note this is slightly bugged at first and last line 31 | def window_features(feat_vector: np.ndarray) -> np.ndarray: 32 | return np.concatenate( 33 | ( 34 | np.roll(feat_vector, -1, axis = 0), 35 | feat_vector, 36 | np.roll(feat_vector, +1, axis = 0) 37 | ), 38 | axis = 1 39 | ) 40 | 41 | def main(args): 42 | print("Starting training with parameters:", vars(args)) 43 | 44 | # Load the data 45 | print("Loading data...") 46 | timestamp = time() 47 | 48 | # load different aligned SSMs 49 | multiple_ssms_data = [load_ssm_string(args.data), 50 | #load_ssm_phonetics(args.data), 51 | ] 52 | channels = len(multiple_ssms_data) 53 | print("Found", channels, "SSM channels") 54 | 55 | segment_borders = load_segment_borders(args.data) 56 | 57 | # ADDITIONAL FEATURES FOR AFTER CONVOLUTION 58 | # token_count_feat = load_linewise_feature(args.data, 'token_count') 59 | # token_count_feat.set_index(['id'], inplace=True) 60 | # syllable_count_feat = load_linewise_feature(args.data, 'syllable_count') 61 | # syllable_count_feat.set_index(['id'], inplace=True) 62 | char_count_feat = load_linewise_feature(args.data, 'char_count') 63 | char_count_feat.set_index(['id'], inplace=True) 64 | 65 | if not args.genre: 66 | train_borders, test_borders = load_segment_borders_watanabe(args.data) 67 | else: 68 | # load train/test for some genre only (training is always on whole Watanabe train set) 69 | train_borders, test_borders = load_segment_borders_for_genre(args.data, args.genre) 70 | 71 | train_borders_set = set(train_borders.id) 72 | test_borders_set = set(test_borders.id) 73 | train_test_borders_set = train_borders_set.union(test_borders_set) 74 | print("Done in %.1fs" % tdiff(timestamp)) 75 | 76 | # Figure out the maximum ssm size 77 | print("Gathering dataset statistics...") 78 | timestamp = time() 79 | max_ssm_size = 0 80 | counter = 0 81 | for ssm_obj in multiple_ssms_data[0].itertuples(): 82 | current_id = ssm_obj.id 83 | #skip ids not in training or dev 84 | if not current_id in train_test_borders_set: 85 | continue 86 | 87 | counter += 1 88 | max_ssm_size = max(max_ssm_size, ssm_obj.ssm.shape[0]) 89 | print("Done in %.1fs (%.2fk items, max ssm size: %d)" % (tdiff(timestamp), k(counter), max_ssm_size)) 90 | 91 | # Producing training set 92 | train_buckets = dict() 93 | test_buckets = dict() 94 | print("Producing training set...") 95 | counter = 0 96 | filtered = 0 97 | timestamp = time() 98 | max_ssm_size = min(max_ssm_size, args.max_ssm_size) 99 | 100 | #allow indexed access to dataframes 101 | for elem in multiple_ssms_data: 102 | elem.set_index(['id'], inplace=True) 103 | 104 | 105 | for borders_obj in segment_borders.itertuples(): 106 | counter += 1 107 | 108 | # temp. speedup for debugging 109 | #if counter % 100 != 0: 110 | # continue 111 | 112 | current_id = borders_obj.id 113 | 114 | #skip ids not in training or test 115 | if not current_id in train_test_borders_set: 116 | continue 117 | 118 | ssm_elems = [] 119 | for single_ssm in multiple_ssms_data: 120 | ssm_elems.append(single_ssm.loc[current_id].ssm) 121 | 122 | ssm_size = ssm_elems[0].shape[0] 123 | 124 | # Reporting 125 | if counter % 10000 == 0: 126 | print(" processed %3.0fk items (%4.1fs, filt.: %4.1fk)" % (k(counter), tdiff(timestamp), k(filtered))) 127 | timestamp = time() 128 | 129 | # Filter out too small or too large ssm 130 | if ssm_size < args.min_ssm_size or ssm_size > args.max_ssm_size: 131 | filtered += 1 132 | continue 133 | 134 | # Sentences are grouped into buckets to improve performance 135 | bucket_size = ssm_size 136 | if not args.buckets: 137 | bucket_size = max_ssm_size 138 | bucket_id = int(math.ceil(math.log2(bucket_size))) 139 | 140 | # one tensor for one song 141 | ssm_tensor = tensor_from_multiple_ssms(ssm_elems, 2**bucket_id, args.window_size) 142 | # concatenate all added features at axis=1 here 143 | added_features = np.concatenate( 144 | ( 145 | #window_features(token_count_feat.loc[current_id].feat_val), 146 | #window_features(syllable_count_feat.loc[current_id].feat_val), 147 | window_features(char_count_feat.loc[current_id].feat_val), 148 | ), 149 | axis = 1 150 | ) 151 | added_feats_count = added_features.shape[1] 152 | 153 | ssm_labels = labels_from_label_array(borders_obj.borders, ssm_size) 154 | 155 | # fill train/test buckets according to definition files 156 | if current_id in train_borders_set: 157 | add_to_buckets(train_buckets, bucket_id, ssm_tensor, added_features, ssm_labels) 158 | else: 159 | assert current_id in test_borders_set, 'id ' + current_id + ' is neither in train nor in test!' 160 | add_to_buckets(test_buckets, bucket_id, ssm_tensor, added_features, ssm_labels) 161 | 162 | del multiple_ssms_data 163 | del added_features 164 | del segment_borders 165 | del train_borders 166 | del test_borders 167 | del train_borders_set 168 | del test_borders_set 169 | del train_test_borders_set 170 | 171 | # Compacting buckets and printing statistics 172 | print("Training set buckets:") 173 | train_buckets = compact_buckets(train_buckets) 174 | print("Test set buckets:") 175 | test_buckets = compact_buckets(test_buckets) 176 | 177 | # Define the neural network 178 | # nn = Dense(window_size=args.window_size, ssm_size=2 ** next(train_buckets.keys().__iter__())) 179 | nn = NoPadding1Conv(window_size=args.window_size, ssm_size=2 ** next(train_buckets.keys().__iter__()), added_features_size = added_feats_count, channels=channels) 180 | # nn = MnistLike(window_size=args.window_size, ssm_size=2 ** next(train_buckets.keys().__iter__()), channels=channels) 181 | 182 | # Defining optimisation problem 183 | g_global_step = tf.train.get_or_create_global_step() 184 | g_train_op = slim.optimize_loss( 185 | loss=nn.g_loss, global_step=g_global_step, learning_rate=None, 186 | optimizer=tf.train.AdamOptimizer(), clip_gradients=5.0) 187 | 188 | # Logging 189 | summary_writer = tf.summary.FileWriter( 190 | logdir=path.join(args.output, 'log'), graph=tf.get_default_graph()) 191 | g_summary = tf.summary.merge_all() 192 | 193 | saver = tf.train.Saver(max_to_keep=10) 194 | 195 | with tf.Session() as sess: 196 | # Checkpoint restore / variable initialising 197 | save_path = path.join(args.output, 'model.ckpt') 198 | latest_checkpoint = tf.train.latest_checkpoint(args.output) 199 | if latest_checkpoint is None: 200 | print("Initializing variables") 201 | timestamp = time() 202 | tf.get_variable_scope().set_initializer(tf.random_normal_initializer(mean=0.0, stddev=0.01)) 203 | tf.global_variables_initializer().run() 204 | print("Done in %.2fs" % tdiff(timestamp)) 205 | else: 206 | print("Restoring from checkpoint variables") 207 | timestamp = time() 208 | saver.restore(sess=sess, save_path=latest_checkpoint) 209 | print("Done in %.2fs" % tdiff(timestamp)) 210 | 211 | print() 212 | timestamp = time() 213 | global_step_v = 0 214 | avg_loss = 0.0 215 | 216 | eval_precisions = [] 217 | eval_recalls = [] 218 | eval_fscores = [] 219 | 220 | # Training loop 221 | for epoch in range(args.max_epoch): 222 | for bucket_id in train_buckets: 223 | for batch_X, batch_X_added_feats, batch_Y in feed(train_buckets[bucket_id], args.batch_size): 224 | # Single training step 225 | summary_v, global_step_v, loss_v, _ = sess.run( 226 | fetches=[g_summary, g_global_step, nn.g_loss, g_train_op], 227 | feed_dict={nn.g_in: batch_X, 228 | nn.g_labels: batch_Y, 229 | nn.g_dprob: 0.6, 230 | nn.g_added_features: batch_X_added_feats}) 231 | summary_writer.add_summary(summary=summary_v, global_step=global_step_v) 232 | avg_loss += loss_v 233 | 234 | # Reporting 235 | if global_step_v % args.report_period == 0: 236 | print("Iter %d" % global_step_v) 237 | print(" epoch: %.0f, avg.loss: %.4f, iter/s: %.4f" % ( 238 | epoch, avg_loss / args.report_period, args.report_period / tdiff(timestamp) 239 | )) 240 | timestamp = time() 241 | avg_loss = 0.0 242 | 243 | # Evaluation 244 | if global_step_v % (args.report_period*10) == 0: 245 | tp = 0 246 | fp = 0 247 | fn = 0 248 | for bucket_id in test_buckets: 249 | for test_X, text_X_added_feats, true_Y in feed(test_buckets[bucket_id], args.batch_size): 250 | pred_Y = nn.g_results.eval(feed_dict={ 251 | nn.g_in: test_X, 252 | nn.g_dprob: 1.0, 253 | nn.g_added_features: text_X_added_feats 254 | }) 255 | try: 256 | _, cur_fp, cur_fn, cur_tp = confusion_matrix(true_Y, pred_Y).ravel() 257 | tp += cur_tp 258 | fp += cur_fp 259 | fn += cur_fn 260 | except Exception as e: 261 | print(e) 262 | print(confusion_matrix(true_Y, pred_Y).ravel()) 263 | print(confusion_matrix(true_Y, pred_Y)) 264 | 265 | current_precision = precision(tp, fp) * 100 266 | current_recall = recall(tp, fn) * 100 267 | current_fscore = f1(tp, fp, fn) * 100 268 | eval_precisions.append(current_precision) 269 | eval_recalls.append(current_recall) 270 | eval_fscores.append(current_fscore) 271 | print(" P: %.2f%%, R: %.2f%%, F1: %.2f%%" % ( 272 | current_precision, current_recall, current_fscore 273 | )) 274 | 275 | # Checkpointing 276 | if global_step_v % 10000 == 0: 277 | real_save_path = saver.save(sess=sess, save_path=save_path, global_step=global_step_v) 278 | print("Saved the checkpoint to: %s" % real_save_path) 279 | print('precisions:', eval_precisions) 280 | print('recalls:', eval_recalls) 281 | print('fscores:', eval_fscores) 282 | 283 | real_save_path = saver.save(sess=sess, save_path=save_path, global_step=global_step_v) 284 | print("Saved the checkpoint to: %s" % real_save_path) 285 | print('total precisions:', eval_precisions) 286 | print('total recalls:', eval_recalls) 287 | print('total fscores:', eval_fscores) 288 | print('--------------') 289 | n = 10 290 | print('n =',n) 291 | print('avg. of last n precisions:', np.round(np.median(eval_precisions[-n:]), 1), '+-', np.round(np.std(eval_precisions[-n:]), 1), '%') 292 | print('avg. of last n recalls :', np.round(np.median(eval_recalls[-n:]), 1), '+-', np.round(np.std(eval_recalls[-n:]), 1), '%') 293 | print('avg. of last n fscores :', np.round(np.median(eval_fscores[-n:]), 1), '+-', np.round(np.std(eval_fscores[-n:]), 1), '%') 294 | 295 | 296 | if __name__ == '__main__': 297 | parser = argparse.ArgumentParser(description='Train the lyrics segmentation cnn') 298 | parser.add_argument('--data', required=True, 299 | help='The directory with the data') 300 | parser.add_argument('--output', required=True, 301 | help='Output path') 302 | parser.add_argument('--genre') 303 | parser.add_argument('--batch-size', type=int, default=256, 304 | help='The size of a mini-batch') 305 | parser.add_argument('--max-epoch', type=int, default=5, 306 | help='The maximum epoch number') 307 | parser.add_argument('--window-size', type=int, default=2, 308 | help='The size of the window') 309 | parser.add_argument('--min-ssm-size', type=int, default=5, 310 | help='Minimum size of the ssm matrix') 311 | parser.add_argument('--max-ssm-size', type=int, default=128, 312 | help='Maximum size of the ssm matrix') 313 | parser.add_argument('--report-period', type=int, default=100, 314 | help='When to report stats') 315 | parser.add_argument('--buckets', default=False, action='store_true', 316 | help='Enable buckets') 317 | 318 | args = parser.parse_args() 319 | main(args) 320 | -------------------------------------------------------------------------------- /train_joint.py: -------------------------------------------------------------------------------- 1 | from tensorflow.core.protobuf import config_pb2 2 | 3 | from cnn.dense import Dense 4 | from cnn.joint_rnn import JointRNN 5 | from cnn.mnist_like import MnistLike 6 | from cnn.no_padding_1conv import NoPadding1Conv 7 | from extract_features import tensor_from_multiple_ssms, labels_from_label_array 8 | from util.helpers import precision, recall, f1, k, tdiff, feed, compact_buckets, feed_joint, windowdiff 9 | 10 | from util.load_data import load_ssm_string, load_ssm_phonetics, load_linewise_feature, load_ssm_lex_struct_watanabe 11 | from util.load_data import load_segment_borders, load_segment_borders_watanabe, load_segment_borders_for_genre 12 | 13 | import argparse 14 | import math 15 | import numpy as np 16 | import tensorflow as tf 17 | from random import random 18 | from time import time 19 | from sklearn.metrics import confusion_matrix 20 | from os import path 21 | from tensorflow.contrib import slim 22 | 23 | 24 | def add_to_buckets(buckets: dict(), bucket_id: int, tensor: np.ndarray, added_features: np.ndarray, labels: np.ndarray) -> None: 25 | if bucket_id not in buckets: 26 | buckets[bucket_id] = ([], [], []) 27 | X, X_added, Y = buckets[bucket_id] 28 | X.append(tensor) 29 | X_added.append(added_features) 30 | Y.append(labels) 31 | 32 | 33 | def main(args): 34 | print("Starting training with parameters:", vars(args)) 35 | 36 | # Load the data 37 | print("Loading data...") 38 | timestamp = time() 39 | 40 | # load different aligned SSMs 41 | multiple_ssms_data = [load_ssm_string(args.data), 42 | load_ssm_phonetics(args.data), 43 | load_ssm_lex_struct_watanabe(args.data) 44 | ] 45 | channels = len(multiple_ssms_data) 46 | print("Found", channels, "SSM channels") 47 | 48 | segment_borders = load_segment_borders(args.data) 49 | token_count_feat = load_linewise_feature(args.data, 'token_count') 50 | 51 | if not args.genre: 52 | train_borders, test_borders = load_segment_borders_watanabe(args.data) 53 | else: 54 | # load dev/test for some genre only (training is always on whole Watanabe train set) 55 | train_borders, test_borders = load_segment_borders_for_genre(args.data, args.genre) 56 | 57 | train_borders_set = set(train_borders.id) 58 | test_borders_set = set(test_borders.id) 59 | train_dev_borders_set = train_borders_set.union(test_borders_set) 60 | print("Done in %.1fs" % tdiff(timestamp)) 61 | 62 | # Figure out the maximum ssm size 63 | print("Gathering dataset statistics...") 64 | timestamp = time() 65 | max_ssm_size = 0 66 | counter = 0 67 | for ssm_obj in multiple_ssms_data[0].itertuples(): 68 | current_id = ssm_obj.id 69 | # skip ids not in training or dev 70 | if not current_id in train_dev_borders_set: 71 | continue 72 | 73 | counter += 1 74 | max_ssm_size = max(max_ssm_size, ssm_obj.ssm.shape[0]) 75 | print("Done in %.1fs (%.2fk items, max ssm size: %d)" % (tdiff(timestamp), k(counter), max_ssm_size)) 76 | 77 | # Producing training set 78 | train_buckets = dict() 79 | test_buckets = dict() 80 | print("Producing training set...") 81 | counter = 0 82 | filtered = 0 83 | timestamp = time() 84 | max_ssm_size = min(max_ssm_size, args.max_ssm_size) 85 | 86 | # allow indexed access to dataframes 87 | for elem in multiple_ssms_data: 88 | elem.set_index(['id'], inplace=True) 89 | token_count_feat.set_index(['id'], inplace=True) 90 | 91 | for borders_obj in segment_borders.itertuples(): 92 | counter += 1 93 | 94 | # temp. speedup for debugging 95 | #if counter % 100 != 0: 96 | # continue 97 | 98 | current_id = borders_obj.id 99 | 100 | #skip ids not in training or dev 101 | if not current_id in train_dev_borders_set: 102 | continue 103 | 104 | ssm_elems = [] 105 | for single_ssm in multiple_ssms_data: 106 | ssm_elems.append(single_ssm.loc[current_id].ssm) 107 | 108 | ssm_size = ssm_elems[0].shape[0] 109 | 110 | # Reporting 111 | if counter % 10000 == 0: 112 | print(" processed %3.0fk items (%4.1fs, filt.: %4.1fk)" % (k(counter), tdiff(timestamp), k(filtered))) 113 | timestamp = time() 114 | 115 | # Filter out too small or too large ssm 116 | if ssm_size < args.min_ssm_size or ssm_size > args.max_ssm_size: 117 | filtered += 1 118 | continue 119 | 120 | # Sentences are grouped into buckets to improve performance 121 | bucket_size = ssm_size 122 | if not args.buckets: 123 | bucket_size = max_ssm_size 124 | bucket_id = int(math.ceil(math.log2(bucket_size))) 125 | 126 | # one tensor for one song 127 | ssm_tensor = tensor_from_multiple_ssms(ssm_elems, 2**bucket_id, args.window_size) 128 | # concatenate all added features at axis=1 here 129 | added_features = token_count_feat.loc[current_id].feat_val 130 | added_feats_count = added_features.shape[1] 131 | 132 | ssm_labels = labels_from_label_array(borders_obj.borders, ssm_size) 133 | 134 | # fill train/test buckets according to definition files 135 | if current_id in train_borders_set: 136 | add_to_buckets(train_buckets, bucket_id, ssm_tensor, added_features, ssm_labels) 137 | else: 138 | assert current_id in test_borders_set, 'id ' + current_id + ' is neither in train nor in dev!' 139 | add_to_buckets(test_buckets, bucket_id, ssm_tensor, added_features, ssm_labels) 140 | 141 | del multiple_ssms_data 142 | del added_features 143 | del segment_borders 144 | del train_borders 145 | del test_borders 146 | del train_borders_set 147 | del test_borders_set 148 | del train_dev_borders_set 149 | 150 | # Compacting buckets and printing statistics 151 | #print("Training set buckets:") 152 | #train_buckets = compact_buckets(train_buckets) 153 | #print("Test set buckets:") 154 | #test_buckets = compact_buckets(test_buckets) 155 | 156 | # Define the neural network 157 | nn = JointRNN(window_size=args.window_size, ssm_size=2 ** next(train_buckets.keys().__iter__()), added_features_size=added_feats_count, channels=channels) 158 | 159 | # Defining optimisation problem 160 | g_global_step = tf.train.get_or_create_global_step() 161 | g_train_op = slim.optimize_loss( 162 | loss=nn.g_loss, global_step=g_global_step, learning_rate=None, 163 | optimizer=tf.train.AdamOptimizer(), clip_gradients=5.0) 164 | 165 | # Logging 166 | summary_writer = tf.summary.FileWriter( 167 | logdir=path.join(args.output, 'log'), graph=tf.get_default_graph()) 168 | g_summary = tf.summary.merge_all() 169 | 170 | saver = tf.train.Saver(max_to_keep=10) 171 | 172 | gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.95) 173 | with tf.device('/device:GPU:0'): 174 | with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess: 175 | # Checkpoint restore / variable initialising 176 | save_path = path.join(args.output, 'model.ckpt') 177 | latest_checkpoint = tf.train.latest_checkpoint(args.output) 178 | if latest_checkpoint is None: 179 | print("Initializing variables") 180 | timestamp = time() 181 | tf.get_variable_scope().set_initializer(tf.random_normal_initializer(mean=0.0, stddev=0.01)) 182 | tf.global_variables_initializer().run() 183 | print("Done in %.2fs" % tdiff(timestamp)) 184 | else: 185 | print("Restoring from checkpoint variables") 186 | timestamp = time() 187 | saver.restore(sess=sess, save_path=latest_checkpoint) 188 | print("Done in %.2fs" % tdiff(timestamp)) 189 | 190 | print() 191 | timestamp = time() 192 | global_step_v = 0 193 | avg_loss = 0.0 194 | 195 | eval_precisions = [] 196 | eval_recalls = [] 197 | eval_fscores = [] 198 | 199 | # Training loop 200 | try: 201 | for epoch in range(args.max_epoch): 202 | for bucket_id in train_buckets: 203 | for batch_X, batch_X_lengths, batch_Y in feed_joint(train_buckets[bucket_id], 2 ** next(train_buckets.keys().__iter__()), args.batch_size): 204 | # Single training step 205 | summary_v, global_step_v, loss_v, _ = sess.run( 206 | fetches=[g_summary, g_global_step, nn.g_loss, g_train_op], 207 | feed_dict={nn.g_in: batch_X, 208 | nn.g_labels: batch_Y, 209 | nn.g_dprob: 0.6, 210 | nn.g_lengths: batch_X_lengths}) 211 | summary_writer.add_summary(summary=summary_v, global_step=global_step_v) 212 | avg_loss += loss_v 213 | 214 | # Reporting 215 | if global_step_v % args.report_period == 0: 216 | avg_loss /= args.report_period 217 | print("Iter %d, epoch %.0f, avg.loss %.4f, iter/s %.4fs" % ( 218 | global_step_v, epoch, avg_loss, tdiff(timestamp) / args.report_period 219 | )) 220 | timestamp = time() 221 | avg_loss = 0.0 222 | 223 | # Evaluation 224 | if global_step_v % (args.report_period*10) == 0: 225 | for bucket_id in test_buckets: 226 | cur_p, cur_r, cur_f1, wd = do_eval(nn, feed_joint(test_buckets[bucket_id], 2 ** next(train_buckets.keys().__iter__()), args.batch_size), args.output) 227 | eval_precisions.append(cur_p) 228 | eval_recalls.append(cur_r) 229 | eval_fscores.append(cur_f1) 230 | print(" P: %.2f%%, R: %.2f%%, F1: %.2f%%, WD: %.2f" % ( 231 | cur_p, cur_r, cur_f1, wd 232 | )) 233 | timestamp = time() 234 | 235 | # Checkpointing 236 | if global_step_v % (args.report_period*10) == 0: 237 | real_save_path = saver.save(sess=sess, save_path=save_path, global_step=global_step_v) 238 | print("Saved the checkpoint to: %s" % real_save_path) 239 | print('precisions:', eval_precisions) 240 | print('recalls:', eval_recalls) 241 | print('fscores:', eval_fscores) 242 | except Exception as e: 243 | print(e) 244 | 245 | real_save_path = saver.save(sess=sess, save_path=save_path, global_step=global_step_v) 246 | for bucket_id in test_buckets: 247 | cur_p, cur_r, cur_f1, wd = do_eval(nn, feed_joint(test_buckets[bucket_id], 2 ** next(train_buckets.keys().__iter__()), args.batch_size), args.output) 248 | print(" P: %.2f%%, R: %.2f%%, F1: %.2f%%, WD: %.2f" % ( 249 | cur_p, cur_r, cur_f1, wd 250 | )) 251 | print("Saved the checkpoint to: %s" % real_save_path) 252 | print('total precisions:', eval_precisions) 253 | print('total recalls:', eval_recalls) 254 | print('total fscores:', eval_fscores) 255 | print('--------------') 256 | n = 10 257 | print('n =', n) 258 | print('avg. of last n precisions:', np.round(np.median(eval_precisions[-n:]), 1), '+-', np.round(np.std(eval_precisions[-n:]), 1), '%') 259 | print('avg. of last n recalls :', np.round(np.median(eval_recalls[-n:]), 1), '+-', np.round(np.std(eval_recalls[-n:]), 1), '%') 260 | print('avg. of last n fscores :', np.round(np.median(eval_fscores[-n:]), 1), '+-', np.round(np.std(eval_fscores[-n:]), 1), '%') 261 | 262 | 263 | def do_eval(model, generator, output): 264 | tp = 0 265 | fp = 0 266 | fn = 0 267 | wd = 0 268 | wd_count = 0 269 | with open(path.join(output, 'eval.txt'), 'w') as writer: 270 | for test_X, test_X_lengths, true_Y in generator: 271 | # batch_size x max_len x 2 272 | pred_Y = model.g_out.eval(feed_dict={ 273 | model.g_in: test_X, 274 | model.g_dprob: 1.0, 275 | model.g_lengths: test_X_lengths 276 | }) 277 | for i in range(pred_Y.shape[0]): 278 | writer.write("========\n") 279 | pred_sample_Y = np.argmax(pred_Y[i, :test_X_lengths[i], :], axis=1) 280 | true_sample_Y = true_Y[i, :test_X_lengths[i]] 281 | for i in range(true_sample_Y.shape[0]): 282 | writer.write("%d\t%d\n" % (true_sample_Y[i], pred_sample_Y[i])) 283 | try: 284 | _, cur_fp, cur_fn, cur_tp = confusion_matrix(true_sample_Y, pred_sample_Y).ravel() 285 | tp += cur_tp 286 | fp += cur_fp 287 | fn += cur_fn 288 | wd += windowdiff(true_sample_Y, pred_sample_Y) 289 | wd_count += 1 290 | except Exception as e: 291 | print(e) 292 | print(confusion_matrix(true_Y, pred_Y).ravel()) 293 | print(confusion_matrix(true_Y, pred_Y)) 294 | writer.write("========\n") 295 | 296 | current_precision = precision(tp, fp) * 100 297 | current_recall = recall(tp, fn) * 100 298 | current_f1 = f1(tp, fp, fn) * 100 299 | wd /= wd_count 300 | 301 | return current_precision, current_recall, current_f1, wd 302 | 303 | 304 | if __name__ == '__main__': 305 | parser = argparse.ArgumentParser(description='Train the lyrics segmentation cnn') 306 | parser.add_argument('--data', required=True, 307 | help='The directory with the data') 308 | parser.add_argument('--output', required=True, 309 | help='Output path') 310 | parser.add_argument('--genre') 311 | parser.add_argument('--batch-size', type=int, default=256, 312 | help='The size of a mini-batch') 313 | parser.add_argument('--max-epoch', type=int, default=5, 314 | help='The maximum epoch number') 315 | parser.add_argument('--window-size', type=int, default=2, 316 | help='The size of the window') 317 | parser.add_argument('--min-ssm-size', type=int, default=5, 318 | help='Minimum size of the ssm matrix') 319 | parser.add_argument('--max-ssm-size', type=int, default=128, 320 | help='Maximum size of the ssm matrix') 321 | parser.add_argument('--report-period', type=int, default=100, 322 | help='When to report stats') 323 | parser.add_argument('--buckets', default=False, action='store_true', 324 | help='Enable buckets') 325 | 326 | args = parser.parse_args() 327 | main(args) 328 | -------------------------------------------------------------------------------- /Lyrics Structure Analysis from stores.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from modules import ssm\n", 10 | "from modules import similarity as dist\n", 11 | "import pandas as pd\n", 12 | "import re\n", 13 | "from functools import reduce\n", 14 | "\n", 15 | "#open dataframes stored in hdf container\n", 16 | "with pd.HDFStore('resources/mldb_watanabe_5plus_seg_english.hdf') as store:\n", 17 | " songs = store['mldb_watanabe_5plus_seg_english']\n", 18 | " \n", 19 | "with pd.HDFStore('resources/ssm_store_pub1.hdf') as store:\n", 20 | " ssms_string = store['mdb_127_en_seg5p_string_1'].append(store['mdb_127_en_seg5p_string_2'])\n", 21 | " ssms_string.set_index(['id'], inplace=True)\n", 22 | " \n", 23 | "with pd.HDFStore('resources/borders_pub1.hdf') as store:\n", 24 | " borders = store['mdb_127_en_seg5p']\n", 25 | " borders.set_index(['id'], inplace=True)" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "collapsed": true 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "#check for normal (\\n\\n) and anomalous segment border indicators (e.g. \\n\\n\\n\\n, \\n\\n \\n\\n) and replace them with \n", 37 | "#afterwards: replace line border indicators (\\n) with \n", 38 | "#tree structure: encodes segment borders and line borders\n", 39 | "def tree_structure(text):\n", 40 | " #normalize segment border encoding\n", 41 | " segment_border_encoder = ''\n", 42 | " line_border_encoder = ''\n", 43 | " tree_string = re.sub('(( )*\\n( )*){2,}', segment_border_encoder, text)\n", 44 | " tree_string = re.sub('( )*\\n( )*', line_border_encoder, tree_string)\n", 45 | " #parse tree_string\n", 46 | " segment_structure = tree_string.split(segment_border_encoder)\n", 47 | " tree_structure = list(map(lambda segment: segment.split(line_border_encoder), segment_structure))\n", 48 | " return tree_structure\n", 49 | "\n", 50 | "#removed fancy stuff like bracket removal here, until we have stable results\n", 51 | "def normalize_lyric(lyric):\n", 52 | " return lyric.lower()\n", 53 | "\n", 54 | "#given a text tree structure, print it nicely\n", 55 | "def pretty_print_tree(text_tree):\n", 56 | " space_between = ' '\n", 57 | " res = ''\n", 58 | " output_separator = '\\n'\n", 59 | " block_index = 0\n", 60 | " line_index = 0\n", 61 | " for block in text_tree:\n", 62 | " if not block:\n", 63 | " continue\n", 64 | " line_in_block_index = 0\n", 65 | " for line in block:\n", 66 | " line = line.strip()\n", 67 | " if not line:\n", 68 | " continue\n", 69 | " line_pretty = space_between + str(block_index) + '.' + str(line_in_block_index)\\\n", 70 | " + space_between + str(line_index) + space_between + line + output_separator\n", 71 | " res += line_pretty\n", 72 | " line_in_block_index += 1\n", 73 | " line_index += 1\n", 74 | " block_index += 1\n", 75 | " res += output_separator\n", 76 | " return space_between + res" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": 3, 82 | "metadata": { 83 | "scrolled": false 84 | }, 85 | "outputs": [ 86 | { 87 | "name": "stdout", 88 | "output_type": "stream", 89 | "text": [ 90 | "segment borders: [10, 17, 28, 35, 42] \n", 91 | "\n", 92 | " 0.0 0 i'm taking it in\n", 93 | " 0.1 1 i'm holding it back\n", 94 | " 0.2 2 i'm filling my lungs with the knife in my back\n", 95 | " 0.3 3 but you kept going on just to make me feel like this\n", 96 | " 0.4 4 now that it's over\n", 97 | " 0.5 5 i won't feel the same\n", 98 | " 0.6 6 a broken frame with our picture is wasted\n", 99 | " 0.7 7 i've thrown it away\n", 100 | " 0.8 8 losing sight\n", 101 | " 0.9 9 you were all i had (all i can take...)\n", 102 | " 0.10 10 i guess the jokes on me\n", 103 | "\n", 104 | " 1.0 11 we missed our chance\n", 105 | " 1.1 12 i won't forget\n", 106 | " 1.2 13 as time will only tell where to go\n", 107 | " 1.3 14 (time will only tell where you go)\n", 108 | " 1.4 15 though i wish that you weren't breathing\n", 109 | " 1.5 16 i still just can't believe\n", 110 | " 1.6 17 you're gone...\n", 111 | "\n", 112 | " 2.0 18 and i was looking for something\n", 113 | " 2.1 19 but i came up with nothing\n", 114 | " 2.2 20 not a reason to leave or let this out\n", 115 | " 2.3 21 but you kept going on\n", 116 | " 2.4 22 just to make me feel like this\n", 117 | " 2.5 23 now that it's over\n", 118 | " 2.6 24 i won't feel the same\n", 119 | " 2.7 25 a broken friendship was worth it\n", 120 | " 2.8 26 i'll scrap any thoughts with your name\n", 121 | " 2.9 27 still i long for you to stay\n", 122 | " 2.10 28 i take back anything\n", 123 | "\n", 124 | " 3.0 29 we missed our chance\n", 125 | " 3.1 30 i won't forget\n", 126 | " 3.2 31 as time will only tell where to go\n", 127 | " 3.3 32 (time will only tell where you go)\n", 128 | " 3.4 33 though i wish that you weren't breathing\n", 129 | " 3.5 34 i still just can't believe\n", 130 | " 3.6 35 you're gone...\n", 131 | "\n", 132 | " 4.0 36 we missed our chance\n", 133 | " 4.1 37 i won't forget\n", 134 | " 4.2 38 as time will only tell where to go\n", 135 | " 4.3 39 (time will only tell where you go)\n", 136 | " 4.4 40 though i wish that you weren't breathing\n", 137 | " 4.5 41 i still just can't believe\n", 138 | " 4.6 42 you're gone...\n", 139 | "\n", 140 | " 5.0 43 we missed our chance\n", 141 | " 5.1 44 i won't forget\n", 142 | " 5.2 45 as time will only tell where to go\n", 143 | " 5.3 46 (time will only tell where you go)\n", 144 | " 5.4 47 though i wish that you weren't breathing\n", 145 | " 5.5 48 i still just can't believe\n", 146 | " 5.6 49 you're gone...\n", 147 | "\n", 148 | "\n" 149 | ] 150 | }, 151 | { 152 | "data": { 153 | "image/png": "iVBORw0KGgoAAAANSUhEUgAACOAAAAM/CAYAAABybj/DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XeYLGWV+PHvuZcoWUAkqQQjYkAEBESMoGLOElSMa3b1\nt7qua9w1rKKuypoVRQTMgjmTBJQoKChBUBAl58w9vz/eaqe6bvdMx+meud/P8/QzXdX1Vr1TXV39\nztSpcyIzkSRJkiRJkiRJkiRJkjSYJZPugCRJkiRJkiRJkiRJkrSQGYAjSZIkSZIkSZIkSZIkDcEA\nHEmSJEmSJEmSJEmSJGkIBuBIkiRJkiRJkiRJkiRJQzAAR5IkSZIkSZIkSZIkSRqCATiSJEmSJEmS\nJEmSJEnSEAzAkSRJkiRJkiRJkiRJkoaw0qQ7IEmSJEkLQUSsAjwIuCewEXAn4EbgKuAi4KTMvGpy\nPZweEbEVsA1wN2AtICn76QrgtMw8d4Ldk7qKiG8Az6gmb8jMNSfZH0mjERFnUr6XAE7OzO0n2Z/Z\nRMSrgE9Ukz/LzMdOsj/Tbprf24jYBdgf2AnYBFgHiOrl8zJz62q55wKHVvNPBnbIzGXz3F1JkiRJ\nI2AGHEmSpBVMRPwsIrL2uCMiNh9ifb9qrK/1eNyA69skIm7vsL7L52i3e5d+dHrcEBF/i4hjI+Kj\nEbHbAP28oI/tzfb46CzbOKiH9rdFxOUR8aeI+GZE/FtEbNbv7zOIHvf5soi4JiIujIifRMR7IuL+\nI9j2kmqdbcdIFSQzMtV2nhERRwLXAicCXwEOAN5T/fwC8BPgiog4KyLeGRFbzLHe1Rp9v3mO5fcc\nwbH2lh7X/aMBdhURcd+I+FhEXAycC3wX+DjwXuB9wKeArwPnRMSVEfGViHhMRCzov0sj4rARvDd3\nnfTvMW4R8bzG7/zlPtvv3WG/Pb3PdXy70f6Z/f0W6iYi1mzs2+vHvL0P1bZ15ji3NYyIeFhjvxzd\nZ/tdOhz3/9rnOj7SaP+m/n4LzaeI2IAyvoASvPrmCXZHA4qIlSPiIOBYSgDO/YB1mQm+aTocOK16\n/hDgRePuoyRJkqTxWND/6JQkSVJ/ogTaPLIxewmwzxg2t9+A7fYBlo6yIx3cCdgY2AV4HXBURJwa\nEVNzx2wfVgLWp2RleTrwAeCCKME76060Z0UAa1MyoTwWeBtwRkT8OCLuPsR6H1mts2594IlDrLNN\nRDwCOAP4BrAXsOpcTYD7AO8AzqsCTJp9XHQiYsMqmOL3wGsod3jPZT1gb+CnlONhZO+bplYz8KDf\nwMdOy/e8jogIYNc5+rRoRMQ35isYZhAR8epGUMhC/P7txUmUTGktO0TEXN8ldUMd912WP6rP9lMh\nIt7UOGaGDuadUu+ifEcCfDszT5lkZzSw9wAv6HXhzEzg7bVZ742INUbeK0mSJEljZwCOJEnSimVf\nOo8Be/4HcR+eFhGDlO7Yd+Q96c2DgF9HxJMmtP1RWkp5T0+Z4swajwNOj4iHDti+2zE7kmM5It4O\n/JJyx3LTecCvgK8B3wZOAC5troISYHLGKPozrSJiR8od2/uy/F3dV1P2zRHAYZT9+UfKHf119wO+\nFxFPG29vNUmZeTHls9Ny9+gv+9qwgQj3AzaoTZ+dmc3PrTRSmXkbcHxt1qrAjn2sotMxvmsVUDan\niFgbeGBt1vWAAR1TqgpMfklt1vsn1RcNrspi9PrarGuBf6VkttkS2KJ6PKLR9HvAH6rnd6EENUuS\nJElaYFaadAckSZI0r7oFJ9w7InbMzBNHsI3bgJUpWWaeCRzUa8OI2A6o39HcWtcgTgSe22kzwFrA\n1sAelMCB1avXVga+FhH3y8w/97m9hwMXDdDPa/tY9v9RsrHUtTLgbEdJcV/PIrAF8O2I2Lm6s3bc\nOu3zANYBtgWeD+xZe20d4IiIeGA/F8KrwK5upWeeEBEbZOasJcvmWP+HgTc0Zl9OyS70zU7HRnUx\ndHvgacArKb8bzJ01Z1BH03+w0VWj7EBE7Az8GKgH2iWlRNfngF9n5u0d2m0MPBl4GeW4bRnXvppv\nrwWO7LPNZePoyBQ6GtiqNv0IyvEyq4jYkJJdqumBEbF2ZvZyHn14Y3pBZgHRgnQ08Oja9G70kH2p\nKs+3c4eX1ge2AXopvbUz7VkFj8vMO3pop8l4G9AqpXl0Zv52kp3RwJ5A+5jmlZl5yFyNMjMj4gDg\n89Ws/xcRB2bmdePopCRJkqTxMABHkiRpBREROwH3qs36IfD42vQLKAEUw/oh5eI6lDJUB/XRtl62\n6nRgXWDQMkU3Z+YFs7z+O+BbVbDFL5gpm7Ma8G76z8Rz0RzbG4XLu2zjXODEiPgUJX39O2uv7UQJ\nCvnWmPsGs+/z04CDI2Jv4EvMXBC8K/DvLB/wMptnAvW0/PVjeWVKoM/H+ljfP0XE8zv05SvAqzPz\nmm7tqgCn3wK/rS6evI3x3rl80zwcb11VARFfpz345gLgmZl58mxtM/MS4NMR8RnKe/V+YLMxdXUS\nLpvkezPljgJeVJvejR4CcGgPnvkrcAsliHIJpazUD3pYRzOTyKItP6Wp0wz26jVz04MoJRwBbqV8\nx+xSW0cvATge9wtERGxE+9jz05Pqi4b2kNrzBL7bR9vDgI9QPvt3Bl4IfHxkPZMkSZI0dpagkiRJ\nWnE0s2X8G/D72vRzI2IVhvfl2vPdey0xEhErUS7Gd1rP2GTmHymZOOqeHBGDZt6ZmCzeBXy/8dLe\nk+hPJ9UdwO9tzH5Jn/u7fizfRLmgf12X13sWEVsDn2rMPhDYb7bgm6bMvCIz3wDsxYizzkyRg5gJ\nWgP4E/DwuYJv6qrj9RBKFpxjRts9Tanmxf9eAxHqyx0DHFubbma26cYMOJqUEylBYy0Pq8Y8c6kf\n9ycBP61N93rcNz9jHvfT6xXMZE25ivkJnNZ43KX2/LrMvL7Xhpl5I3BobdZrey05J0mSJGk6GIAj\nSZK0AoiIVYHn1GadnplnAgfX5q0HPGkEm/sZcElr08A+PbZ7PLBh9fwO4Ksj6EuvfghcWZtem8Ez\n70yD/2tMP2IivejuANovRq4J7NBLw4i4O+2/zxGZ+Q/gm7V520XENgP0678o5claTgReN2j5rsz8\nETPZChaNiNiTUl6h5XbgOZk5SAk2MvMySnmW40bQPU2xqnxb/Ti5d0TcpdvyNfVgg6NpD8CZM4gn\nIragPcvS+Zl5cQ/blYaWmTdTste0rEl7+b1uZjvu5wzAiYjVgIfWZt3U6IemRBVg8eLarG9Xx40W\npnp2wNsGaH9Y7fnWwCOH644kSZKk+WQJKkmSpBXDkykBNi2tkh9fBd5HCZSBkjmkHsgwiFbwzBur\n6X2rbcylXn7qp5n59/m64TMzl0XEeZRU7y0bUko7LUTNC2zrR8TqmXnTRHrTkJnXRMTJwM612fem\ntwCM/Zg5XmHmWP4KJU1/ywsoWZ56UgX2PLM2axnw4sy8o9d1dJKZ5w/Tfkq9sTF9QGaeNswKM/M2\nSmkhzSEi1qBcfN8c2AC4AbgUOCkzR3rOioi1KSWeNqHc0X89cGQVSDOoo2jPyrUb8I05+vDA2qxj\nKEFfLdv3cH4bWRaQiNiMEjC4ObAKZd+fmJlnD7HO1YFtgPtQvnvuRMnqdTlwyjDrVrsqIPmBlP19\nZ2B14GbgakoZvT9k5t/GsOmjKJ+llt2A38zRph5kcwxwAuXYXwnYNCK2yszzZmm/I+UYbTkhM2/t\ntcNVUMhDgPtSPv9LKcf7OdW6hvp+bGxrSdXfLYGNKTcMnpKZPxvVNnrow70pZb82p4wB/g4ck5nz\n8d30iGq7Ld+eh22OzLj2XUSsB+xerXdV4GLgrMw8dagOd97W1pTAuLtQgrGvoIxLjs7MG/pd3ZDd\nOQa4jJkbE/ahlMuVJEmStAAYgCNJkrRiqJfkWUaVXSYz/xoRR1H+uQ3w+Ii4S2ZeOuT2vszMRfr7\nRsRDM7PrXdcRsS7t2XfmpfzUHG6Ze5Gp1ans0XqUu9+nRTNbygY9tqsHal0O/Kh6/kvKhZlNq+l9\nIuLf+7hA+C+Ui4stP8vM33dbeEUVEfcFHlObdQfwiQl1Z4VSZXX6L0q2sFW7LPMnSoapz/dy7EfE\nq4GP12Y9NDNPioh7AB+gBG+u1mh2I/C5fvtfczR9BOBQski1PptXUC6+ZkRcSrlQugqwE+Uc0E0z\nAKdZCmtOEbEtZZ/sQYdswhHxO+CNvQYMRMTGlMx0TwEeRpf3tFr2Esrn7OOZed0sy+0FHNnhpTUi\nYrZMXs/KzNneg5GIiDVpLxdY99tZgm4PzMxXD7ntjYF3AM8F1plj2QuB7wEfHmEQ5dHAf9SmdwM+\nNEsf7svMxfdlwHGZeUNEnMpMVpvdgNkCcAY67iNiLeAtlIwsG3VZ7OqI+Crwrl7GjBGxASWgoOWA\nzHxTVX7yrcBLmfn+bvl5RJzWaFd3xizHzAGZ+aa5+lX17dHAu2kPCq6//kvgDZl5ei/rG9Czas9v\nBX7eS6OIeBPwwdqsbasMl720/R7wxGryiszsOg6LiDMpQWsAJ2fm9tX8sey76vN6APAM2oPIWq+f\nAXwoM4f6e6EKyHs1pfzX1l0WuyUijgTempnndFlP8/iuW7/L+bfrPs/MOyLix8xkEX16RLwsM2/v\ntLwkSZKk6WIJKkmSpEUuIjaiXDBs+WXj7u56GaqVgOcPu83M/B1Q/2f7ft2WrTyHmYuP1wLfGbYP\n/aju8t6yMfuC+ezDiK3dYd6CL2UQEbvQfoHk8NbFiMz8Z2BZZWPgsX2s/nGN6S8M1MnFr7lPfzpo\n6Sn1LiLeSjmnPpVZAjWAewGfBn4TEXcdcFt7AacBz2b54JtRaGafmauEVD0LyLG1knD1jFn9rKNT\nH2YVEftSStI9nu7/R3kA8OOIeHmPq/098BFKAOxs7ymU89l/AydXgRnqQ0Q8AjgbeDlzBN9U7g68\nCnjUCLvxa9ozN+0as6f5qx+zZ2TmNdXzfsqv9X3cR8QOlAw3b6V78A3AusArgXMi4omzLDfbtjah\n7Jd3snzwzbyIiLcDP6FLAEnlkcCvI6I5ThilPWvPfzstGQtnM659FxG7An8AnkeH4JvKtsCXIuKL\nVfakvkXEQ4A/UgLhugXfQDk/PxP4fUTsP8i2BlT/vK5DCdSUJEmStAAYgCNJkrT47U175sOvNF7/\nBu3BGS9gNOp3pT63usu5m/o2vzGBCw97AuvXpk/PzCvnuQ+j9KDG9DV0zoozSc2LbZf30KZ5bDaP\n5eZ0T8dydbf/AxqzBy5Rs8jt2ph2P41ZRLyfEnxRz9B0OyUA5XDgB5RyH3XbUS56btLn5rat1tkK\nUrgU+CFwKKX8xdV9rm85mflH4B/1bVZZ0LqpBxkc0+V510CEKhDpnrVZF/VTQisingAcRClVBOWC\n7XeBw4DjKVmgWpYAB1YXdufS/H/MhcBPga9T9vdPWP59vSclK8iGqCcRsTklm00zMPWPlGxBh1D2\n+c8oWdTGIjOvB06pzVqP8nnrZtjjfiXaL9jfSilh1VUV+PALlg+8+RPlmP8m8LvGa2sD34mIZ9Kf\nlSn7ffta/46hnH++xzyUAI2I1wPvonwWEzgV+BbwNdqDyKGUhTt80MDGOfqxJe1B4L2U45yoce27\niNiO8p3W/E44m1KW65tAPcPPCymZ4frt/2Mo45e7N146h3Je+CrlfHxt7bWVgc9HxCv63d6Ajm1M\n9xNULkmSJGmCLEElSZK0+NWDEG6i/PP6nzLz2og4gpLtAOBBEbFtZp4x5HYPAf6HctF4A0rmgCOa\nC0XE1rRfJJrX8lMRcU/gM43ZH53PPozBixvT9awRExcRazNz0a3lT3O0WY2ZYxTgvMxsu5iYmb+r\nysC0gmmeEhFrZ2b9AkonD6U9uOHizGxe+FaxU2P6pIn0YgVRZZZ4c2P2F4E3Z+ZlteWWUEqYfIKZ\ncm5bAAdFxB59fP4/RrlY+hfgdcARVXap1nZWo7cMInM5hpJRAMoF3F0pF93bVNt7aKNdS/3i5E4R\nsXJm3tZhW80ghX6CxlalXIhdQgnQeEOzvEtEbEEJ/mtlgVhK+e579BzrvoNyofdrwA+6BX1GxG6U\nEjM7VLM2przPz+mw+M8p7zvA/1G+d6GUDdumw/Itw5ad7NUNzPRvP8oF/JYnA93GHcudw6vyQr2U\nGHorsGZt+ruUUmEdSzdVQWtPZPnv0VE4mpn3Ecqx2Qxoaalnr+l23G8ZEZs0shq2bEf77z1rVpUq\nCO6rwBq12WcAL8/M4xvL3o+SaasVkLkS8IWIOCkzL+i2jYb9q/7dDrwP+GCzvFpVCu9KZo6Zl1Le\nz5Y96D52uKbL/JatmCkB9hVKeaG/Nrb/EEpA0FbVrHWB/6RkRxqlHRrT3Y6JaTGWfRcRq1Cycq5V\nm30m8JLMPLGx7AOAz1L23b9Rzi09iYi7Uc679WO9VU7tT41lV6ZkzvofZoIwPxoRv66yfbbUj1OA\nzzOTQesqyuexaa4ykedQbpBoZaLbcY7lJUmSJE0JM+BIkiQtYhHxINozexzZvMBRGShzyGwy8x+U\nO/hbupWhqs+/gHKBahRWi4h7dHhsERHbRsRTI+KTlFIrm9XaHQ58aYDtbdZle7M97jaS37QmIl7I\n8mXEPjvq7QzpDbSXXLmBUt5lNk+l/cL/IV2Wqx/Lq9MetNNNM0vIH3toM2l7RET28WgGzgxq48b0\nQthX8+3QPt6Xs7utpMpg8anG7Pdm5v714BsoJdgy83DgEZQLgS2PBZ7bR9/XpGRi2Tkzv1MPvqm2\nc3N1bh9Wr2WodmSmBMkNtGcQOZWZi653ArplnWmW4ennO2Ylynnnq8CezeAbgCqbzuNpz57yyIho\nZlZo2i4zn5yZX5kt41pmHk35HX5cm/2MTuvPzJsy84IqCOLG9pfK/C6PG5vrGocsWv1r/s6XzNK/\nYTLS7VV7fgrwjG7BN1Uf/5aZn83MnVh+bDSsno776r2tjw/+GYBTffbr591un51+j/t3ApvXpk8F\ndm0G31R9+AMlwOyntdlrAf87xzbqWsFBz8vMt3cam1bv/bLaMdPM5Pe3WY6ZubL+rUsJlnt3Zu7b\nDCCptn8yJcinniXy+VWgyCg1gzN+P+L1j9q49t1rgPvVpn9HOQaXGx9WwS+7U7IFLaVz6dVuPk/J\nQNXy+szcuxl8U23ntsz8BOW79NZq9qrABxrLLasff5QbHlqWdTlGl9tvjXXeAZxVm9VLZjVJkiRJ\nU8AAHEmSpMVtrpI9LT+ivQTQ3hGxtMuy/ahns9krIur/8CYiAtin3r8RZmrZEfhzh8f5lH/qfxt4\nBeXCLZSSKG8Enj9gH47psr3ZHkPf5RwRSyPiLhHx+Ij4OiU7Rt33M/O7w25nVCLiuZS7oOu+2CVz\nRV2vx/JXgXrQQC/BZHduTM915/wKqcpc1Myi6r4an2fQHhz4G+BtszWoLoz/a2P26/rc7ssyc2yl\neCrNYIBuQQT1+Sdk5u2tier5CV2WnW3d/ZZN+wtln3TNVlBl2fpwbVbM0p9Wmwt77UBm3krJ/tHq\nw1JKxiPNohrH1AMsj5jtfWzKzJvnXqovx9L+/dQMkmmpHzvnZeYlHdbTadlu64BZjvuIWBN4UW3W\nbcDes2WPq47JfWj/DtirKqfUqy9m5jf6WH7UjqUEHnVVBWsdXJu1LvDAEffjno3pns8NEzTSfVf9\nPfDK2qw7gP0ys+sYo8rotA/tQT6ziojtgcfUZh2cmXMGjmXmccB7a7P2iIh797rdIdSPhfWbf0dJ\nkiRJmk4G4EiSJC1SVfaEeiaUyymBNsupgh8Or826K+XO1WF9h5nSEauyfMmMh9Oesn1ey0/VnE8J\nCvlkM+PDlPliM4MGpXzDP4AfMFPSpeWnLJ8NZ5y6ZR16YETsHRHfBw6lvdzT5bRf1FhORGxMufu4\n5TeZeU6nZavAgV/WZu0aEVt1WrameUHDoJLOmvtpGdApo5ZGY+/G9Lt6DA78MqV0RcuOVam/XpyZ\nmT+Ze7GhnUF7BpTtIuJOHZarBxF0yuAxayBCVVbn/rVZ/8jMfrM2HZiZvZQ3+UFjeqQX6atsCfUM\nQJYj6U3Unm84sV4AmXk17YG3d42Ie3VYdNjjPpgpDwUlmOG4Wbr2ZNoziByWmWd1W7glMy8FPl6b\ntYT+xhwH9LHsOHywx3PqWD/bQD2b1U09ZO+ZBqPedw8D6sFbR2bm6XOtvMo2c/Bcy9XUg3wSeHsf\nbQ9kJoAuKKXqxq0ZDDtXZjVJkiRJU8AAHEmSpMXrCcBdatNfmyPLyDjKUN0MfL02q1mGqr6NE7oF\nVcyDLYHPABdExDMm1IdROpWyb/eY7Q72MeiWdeg0yvH1hMby1wFP6XB3f9O+tAftzFUWpPl6t/Jn\n3YwqC9M4HU0JXuv1cepkurlCei29vy+PnmU9D6s9v5L2kn5dVRdFD23M3rmXtsARPS43lKqP9SCC\nlWn0sQoire+DY1hefd4uEdH8H8eutP/fY5AShx0DVzs4h/bsJgMFe0TEqhGxYUTcvRnMCFxdW/Q+\ng6x/RVJluzm3NmvfiHhAt+XnSS/Zn+qZceY67u8XEes3Xt+G9sxup2Tm9bP0qXl+aJ4/ZvPVOdbV\nzfmZOclSS7cDP+tx2WapwFEHcm1Qe35116Wmxzj23cMa01/roz+Hz73IPz2y9vzkKoCnJ5l5Oe0l\noXbpY7uDah4PEw0ilCRJktSbZvpwSZIkLR69luwBIDNPiIhzgVamhCdHxDqzpX/v0cHAi6vnD4uI\nrTPz3IhYnfaMLf3cwdqLozJz904vRMSqlItTDwCeTQnQWIkSsPSNiHh1Zh7Y5/a26Ocf+WO2LnDx\nCMt5jcNRwIurEgVzqR/LtwOHzbH8N4H/A1avpveNiHfOsj+ad5uv00OfJu2mCRxvzf20BFiTKcqC\nU2VLWrWHRW/KzH+MqRuXDfveRMTdab8oe3K9/FIPTmhMP4TeMoyd1sc2hnU0JfNGy260X9TdDlij\nen4bcGKHdZxAOSesRDnvPYD232HY8lOw/AXkjjLzjoi4nplMImvPtnxLRNyNkjXkMZT+93qB1VIk\nvTmMmbKHawMnRsRXqvnHVKWU5tPRlCC9lt2Az7UmIuIuQL20zXIBOJl5XkRcAmxMycTxcErGwfo6\n6+Y67h/SmO70WevmbErWuNb3ZnNd3cznuaaTv2TmjT0u2xwH9/TZ7sMateejLns2DuPYd9s1pn/b\nR39O6mWhiNgEuEdt1p+rwMZ+1ANi+m07iJsa050yxUmSJEmaMmbAkSRJWoQi4s7AXrVZ52Xm8T00\nrQfprMbyJaMGcTRwQW163+rnU5n5R/ytzB1UMTKZeUtmXpKZP87MF1OyJNQzxfxvRGw/X/3pw/+j\nPXvGVpS+70f7BbYtgB9FxJPmvYedXQ9cRCkN9T5gu8zcvZfgm+p9uF9t1k8y87LZ2mTmdcB3a7O2\noD2jQFMzsGTdufq1grqOUsqkbtr21bfpnIWp+fjSpDrYo2YQRr/ZwZpllnoN6pj1szVizaCAZtBA\nffrkThd8q9JQ9Qv5zc95c539ZsC5qc8AjXqWuZVnWzAi1oiIj1JKIL6Pkg2pn+wGow4CWKw+CNQz\nrawGvIQS7HVVRPwiIt4TEY+pAoPHba4MOPXpv2fmuXRWLyk17HFfP+6uzMwruy7ZUAW2/qk2a4Oq\nBNZc5vNc00k/weXN7JGzfrYHUL85sp9Ay0kZx77bqPY8KefFnlQ3CvRyPG3WmH4WvY0X6o961pt6\nlqlxaR4Poz72JEmSJI2BATiSJEmL0/OAVWrTh/TYbhxlqLKx3n2qizP1skDf7+eCz6hl5omU4JaW\npcB7JtSd2VyemRfUHudn5nGZeXCV7efZzPyzfiXgkIi45zz276jMjA6PtTJz88x8VGa+NTP7KYfU\nVyanmmZGpdmO5b81pu/dcakVXPVZbpYLc1+NRzOwqd9Scs0LpL1mS5mtTM2onUp79qQdI6L+vTVX\nGZ5Or/0z8CAi7kR7VoUrgDP77OOyuRfpX9W3HwCvo728Xj/8f04PqoDM3SiZ0ZruRClJ8zbgp8Cl\nEXHwOMtUVQGk9TI2d4+IzWvTQx33HdaxbI71QPv5ZpCylfXzzRJ6Cw6bz3NNJ2P5bA+oHly42sR6\n0btx7Lv6MXhDZva7jV6CgkYdMLPmiNfXSTMosNfMQ5IkSZImyH/YSJIkLU7NYIO3R0TO9QCad1rv\nPKIAjnrpky0pd50+tsvrk3IwJRNPyx5VJqEFIzO/DrylNmstaqUtFprqYvzzGrO/2uOx/P1Gu2dV\nF707+S3tF5Q2jYiNuiy7outU2kjqW2beQXsWj9WAHQCqIM1da6/NFkBwbO15PfDgYbRnCzh2isry\nvYv2oInbgMOB/SmfqbtSLu4urQcz0jmIRHPIzCsz85nAg4GP0T2j1JrAPsBpEfHhiBjX/8ya2Z8e\nUXtePy56Pe4fHBFrAkTEVsAmtdfOyMyr0TSrByKu0XUpDWvU2WN6yfQ0rOa4dWpKfkqSJEnqzgAc\nSZKkRSYi7gs8dISr3G/uRWaXmefQfuH+08zc9X8FywdLzLvMvIn2si1Be/aEheIjwCm16d0iYhSl\nxCbhicD6I1rXWsDTOr2QmdcCpzdmP6LTslrugvBU7afM3KlLFqbmY89J93UOzQvm/ZYbWqcx3Syz\nNi26leO5PzPZCpL2QJ2meiDCRhHRysrUzArSDHqYiIhYG3htbdbVwE6Z+dzM/GJmnpKZ/8jMTlkg\n1pq/ni4+mXlaZr4uM+9FCVJ5FvBx4OzGogG8AfifMXWl43EfEesA9ew7swXgnM7MxfilzJTGGeS4\nr59vBiltVj/fLGOwLDorsr/Wnq/XyAQ2TtP0P+H6MbjGAMFvze+8TpqZNv+vx/FCt8cGffZxEBs3\npv/acSlJkiRJU2Wa/tiSJEnSaAxdNqqhVTJqWPUsN/VU84dl5m0jWP8oNO8snY9/ro9UdcH2rY3Z\n746IQcucTNKoj+XZgsl+2pjef8TbXix+1ph+XERsOpGeLG6XNaa37rP9veZY37ToFoBTz2Tz+9lK\nFGbmpcAjsyQpAAAgAElEQVSf5lhHp21NymNpLxH5zsw8pdvCDXcdQ39WSJl5SWZ+IzNfm5n3BbYF\nvtpY7PURcY8xbL7bcb8LM/+nuwb4XbcVVBmkju+wjkGO+/r54c4R0WvJupZ6psTLpyjT1EJxQe15\n0J7BaC63N6ZX6qNts9ThJP2j9jwo2TJ7UgWubdjDopc2phfCGL9+LNwBXDSpjkiSJEnqnQE4kiRJ\ni0h1x+g+tVk3UC7EbtHn40u1ddyD0WS5OIz2Ek8t01B+qqVZcuqmifRiSJn5Y+DE2qx7Ac+dUHcG\nEhEbAE+ozTqb/o/jLWjPjvGYWYJFPkm5uNHy2CqblGoy8w/AL2qzlgKvnlB3Fq3MvBC4vDbrIX0G\n0e3UmD55+F6NxW9pP8/uXP2e9SwevQQQ1D/nu0XEyrTvg2uB0wbu5Wg1g6l+3EujqsTQNn1sZ9qD\nIKaqf5l5ZmbuTRmrtCyl/XtoVNu6GDivNuveEXEX2o/74zpkQGpqO+4bP1t6+fw0zw879tAGgIi4\nD+2BHOM810zVMTNCZzSmmwGUs2lmG+opqKYKrL9PH9sZt2YQYj+ZPLfvcblzaf9e3aGPbUzKvWvP\nz8rMZsCVJEmSpClkAI4kSdLi8higHmDw/cw8JzMv6OfB8kExQ2ciycyrgO81Zv8xM38z7LpHISLW\npf0uboC/TaIvI/K+xvR/DJDSf5KeD6xcm/5av8dxdSwfWltHM0Dtn6plv9VY9vPD7rOI6Pku7gXk\ngMb0GyPiAR2X7FFErBwRmw2zjkWont1ifcr5vVfPm2VdUyMzb6W9POFawINpz+IxWxmelnogwsMp\nF29Xr807rsoYMg2apVJ6LdfzXGZKN/biltrz+Spp049bGtPT0scvNabvMabtdMqCUw+e6fe4f2j1\nfbNVbd5ZmdlL9qtfN6b7KVv5/Mb0OM8103rMDOukxvT9+2jbfH97DarZAeg309E4NY+bZ/fRtqfj\ntcrMVA8gvkdETG0QTpWJqv43XfM4kSRJkjSlFtI/4CVJkjS3ZqDM4QOu5yja08E/MyLWGHBdda+l\nXBxtPZ40gnWOyqtov7h5FXDqhPoyCkcAZ9am7ws8Y0J9GcSojuVv0J7ZZrZgsv8Arq9NPwz4yKAl\n2CJiD+C4QdpOuR/SnrVjZeDwiOinbMY/RcSGlNJWu46gb4tJsxzOf/ZyLEbEPrRnUPhNZp4z0p6N\n1lGN6RfRXnajl0CE+jJ3B/aeYxuTdHVjes5sF1X2m3/vczvX1J6vHBHNDG+Tdk1jelrKazUDojpl\n7huF5jG5B/CQ2nQvx/2JQKuE56rAG+fYRjdH0l6C83kRMWfZu+rcXc+Atozlz1ujNK3HzLBOoj0T\nWK8ZXQBOb0zv2WO7t/SxjflwPHB+bfpJEfHAuRpVJeL27WM7n25Mv3+KS7Q2j4NpKaMoSZIkaQ4G\n4EiSJC0SEbE28LTarOuBHwyyripTwDdrs9YEnj547/653osz89jaYyouCkfEs4F3NmYfspBTvVd3\n+r6/Mfs/Bg0mmU8RcX9gu9qs31elj/qWmZfSfhHyvhHRsbRBdTy+qjH7tcAXq89XTyJi/Yj4CPB9\npusO85Gojq39gL/XZt8HOCYiHtzreqJ4PqX0RLNsiso5uJ6FaxfgP2drUJVN+2hj9sdG3K9Ra15U\n3L/2/M9VuZ5ZZea5tB+P+zcWmaYLl81yM2+YbeGqnNaXgX6zaf2xMf3IPtuP29j7FxGbRsR+1T7s\nVTN4q9nPUWkek/swk9HlZkp5tlll5o20l+4Z6LjPzOuAg2qzVgUOqQK/Oqr26cG0f8f9IDPP69Jk\nFKb9mB5IZt5M+zjl4d2W7dD2L7QHruwVEdt1Wx4gIl4DPLWvTo5ZNa74ZG3WUuDLEdHMGPZPEbE6\n5RhcrY/t/IL24LZHAp/s5xwREUsj4lkjujFhNs3yvz2VK5QkSZI0eQbgSJIkLR7Por3kxpHVP/UH\n9bXG9H5DrGsSVouIe3R53CsidoqIl0fEzynZVVaqtf078I4+t7fZLNub7XG3kf3GyzsM+HNt+oFM\nV9ahbppZaprHYr96PpYz88vAxzv059yI+NfqbuvlVMEk20fEe4HzgNfTX7mYBaUKbHo2cGNt9pbA\nSRHxxYjYtdtd5RGxcUS8jHLX/yGApac6yMzbgH9pzH5XRHwqIjaoz4yIJRHxLOBXlHJVLT9nvBkp\nRuEE2rOM1C+m9pIFpKWebaq+jhuZrtIdvwKuqE0/OSI+3ylDTURsXy3fCq69vI/tNIMvDoyIV0bE\ndhGxZeN76E59rHdU/kD77/PyiPjviNglIrZu9G/Q7D3rUUpKXRARH6q+91fqtGBE3DkiPgS8sjb7\nOko2uZHLzD8DF9Vm1Y/Z31Tl2XpRL0PVDEToJ/PTO4B6sNsOwNGdAlarQL+fUbL2tFwPvK6P7Q3i\ndNqz4Lw2It4VETtHxFaNY2ahBb8eWXu+WUTMmRmr5gu150uBIyNiuSCeiLhLRBzITFBmMxvXpH2M\ncl5oeQAlsHfH5oJRyl7+kpI5bxm9l/KDMqarn4NfChwfEU+a5fywtBrj/TdljPc12v/mGodH1Z6f\nkpkLuSyuJEmStELp+IeFJEmSFqRRlexpOQa4BNi4mn5URGyWmRfN0maa7Eh78EmvLgUek5lX9tmu\nnwvFddcA6w7YdlaZeUdE/A/tdxW/jTFdUByFKmijmYFg2GP5m8AnmPn753kR8cZuFzgz87URcTVl\nX7UyBm0IHAAcEBHnAn8BLqOUX9qYEnyyUYfV3dhh3qKQmcdExGMp+7dVCmQJ8MLqcVVEnEXZTzdS\n9s+mlJI7nTIxLdp9NajMPCIiDqC9tMzLgf0j4kTKBfy1KBmjNm40vxB4QZVZYGpl5k0R8VtKhp+m\nfs6rx9C5zN7xVTDTVKh+33cD/1ubvT/w/Ig4gZL1aE3gfkC9DNBBlPe6p1KCmXlaRBzLTGm3jYAD\nuyz+LEq5vnmTmbdHxKco51ko59K3Vo+mA2kvddSvTSifoTcCN0fEGZRA22so2V62AB7E8v8je0Nm\nNssejdJRLP99B/0f983SUwDn9XPBPjOvioi9KZkTWwFZDwZ+ExFnA2dRyjluTdlXdXcAL8nM8xmj\nzLwlIj4LvKmatQrw9urRdEBtuYXga5RzQusYfArwwR7b/i/wMqAV0L0JJXjq95SAlqSU5tuemcDg\nT1OCX584dM9HJDNvjYh9KUGHa1WztwVOqMYSZ1F+l3tV81s+COwFbNPjdv4cEU8HvsvM+PshlLHx\nDRFxMuVvgVuBtSnfrdvQR6adYUXEXSl/x7QcMl/bliRJkjQ8M+BIkiQtAhGxJTMX2aDcCfqjYdaZ\nmctovyC3BNh3mHVOuTso/+DeNjN/P+nOjNAXaS/N8tCI2HNSnenB42gPJPhdZg5VAiQzL6fcKd2y\nPnNcdMrMtwOPoXP5ka0pdyY/h1Ka7WEsH3yzjLLv7z9YrxeGzPw15WLsVykXxurWA3amXEh8HmWf\n3Zvlg29+Bzw2M6c2MGySMvNNlAvMy2qzV6ac859LOZabwTenATv3Ur5pSnQrldNPIMKxXeb3kwVk\nXmTmx4DPNmavBuwOPB94Mu3BN1+jXGDv136Mr4TSKLwH+N48b3M14KGUbHD7UIKPtqc9+OZW4NWZ\n+fkx92UUx/1xXeb3fdxn5lGU771LGy/dh5KF6ZksH3xzPfD0zBw2ULZX/wn8ZJ62NW+qcUq9bGxP\ngXZV2+ur5ZuB49tQju9nU4I5WsE3X2b5cptTITNPoXynNbPz3Jcy3noG7cE3X6Fz0N5c2zmakuXp\n9MZLa1BKYj6Tci7eixKc0wy+uQEYZ2DnU5n5n/3twKFj3JYkSZKkETMAR5IkaXHYj/aL2kdk5i0j\nWG/zgkozy85CdTvlQsWfKEFGbwLunpn7VKV1Fo3qOPhwY/Z/TqIvPRp1+amWvo/lzPwF5QLWsykX\nxuYqCZKUu83fDmyRmfuvCCUDMvMfmbk35aLYgbQHfHVzBeUi4KOAB2Xmz8bYxQUvM99DufB9BLMf\nh+dQylZtv8COvU7BApf2GXx3GqVkUFO3IIeJysyXUTLfXDjLYqcA+2XmcwbJ4lOVOXow8GLgO5TS\nKdfRHsw1MZl5a2Y+CXgCpVTUGZQL77ePaBNnV+v+JOX7fi7XVf24f2Z2yxY0Sp2O+zuAX/e6gipw\n46wOLw103Gfm8cA9gfezfCBO3TXAp4Ct5zN4siqtuiclgOpg4ExGe8xM0idqz3eMiPv02jAzT6IE\nlHyX5YNhW84A9s3MF2TmHYN3c7wy8xhKBrBD6f59dxbw4szct7phYJDtnEPJHvcc4HjKZ28211K+\ng/cH7jrm7Fgvqj3/RmZeMsZtSZIkSRqxmPJs1JIkSZI0FSJiVcrF7HtSMt6sTrkL+irgr8BJmdm8\na3uFFBH3pAQv3Y1SSmIZZT9dBpyemedOsHsLWkSsCTwc2JySzelGyoXykzOzlyADTZGq7N5DKOeW\nO1POKX+jZP/y/RyhiNiAcl7akvLZWZ3y+bkC+D1ln48ieHlRiIigZAe6H6UM4xLKOfxcSmm3xRD0\nMlUi4kxmSikdUGVA63cdd6Fk09qUUqbrYuD3mXnqqPo5XyJiPeCRlO+7VSjnxrOqTDmj3tY6lKx9\nm1DOD0spQXmXUAJ+/jQfx3xE3J8SLNXysMw8YdzblSRJkjQ6BuBIkiRJkiRJ0gRFxPMoJR2hBK3e\nrSoxpRVERHwGeGk1+fPMfMwk+yNJkiSpf5agkiRJkiRJkqTJOpySjQlgPWYCMbQCiIiNKGWFW6a5\nZKwkSZKkLgzAkSRJkiRJkqQJysxlwFtqs94YEatNqj+ad28CVq2efyczj59kZyRJkiQNxgAcSZIk\nSZIkSZqwzPwe8KNqclPgtRPsjuZJRGwGvLqavAV44wS7I0mSJGkIK026A5IkSZIkSZIkoARi7Fs9\nv3mSHdG82QL4QPX8zMw8f5KdkSRJkjS4yMxJ90GSJEmSJEmSJEmSJElasCxBJUmSJEmSJEmSJEmS\nJA3BABxJkiRJkiRJkiRJkiRpCAbgSJIkSZIkSZIkSZIkSUMwAEeSJEmSJEmSJEmSJEkaggE4kiRJ\nkiRJkiRJkiRJ0hAMwJEkSZIkSZIkSZIkSZKGYACOJEmSJEmSJEmSJEmSNAQDcCRJkiRJkiRJkiRJ\nkqQhGIAjSZIkSZIkSZIkSZIkDcEAHEmSJEmSJEmSJEmSJGkIBuBIkiRJkiRJkiRJkiRJQzAAR5Ik\nSZIkSZIkSZIkSRqCATiSJEmSJEmSJEmSJEnSEAzAkSRJkiRJkiRJkiRJkoZgAI4kSZIkSZIkSZIk\nSZI0BANwJEmSJEmSJEmSJEmSpCEYgCNJkiRJkiRJkiRJkiQNwQAcSZIkSZIkSZIkSZIkaQgG4EiS\nJEmSJEmSJEmSJElDMABHkiRJkiRJkiRJkiRJGoIBOJIkSZIkSZIkSZIkSdIQDMCRJEmSJEmSJEmS\nJEmShmAAjiRJkiRJkiRJkiRJkjQEA3AkSZIkSZIkSZIkSZKkIRiAI0mSJEmSJEmSJEmSJA3BABxJ\nkiRJkiRJkiRJkiRpCAbgSJIkSZIkSZIkSZIkSUMwAEeSJEmSJEmSJEmSJEkaggE4kiRJkiRJkiRJ\nkiRJ0hAMwJEkSZIkSZIkSZIkSZKGYACOpAUtIi6IiIyI3Xtc/qBq+ReOt2fzo/b7tB7XT7pPABHx\nzqo/75xwPx7U2D+L5r2XJEmaFMfgjsHn6IdjcEmSpBFzDO4YfI5+OAaXpCmx0qQ7IEkaieOAc4Gb\nh1lJ9QfcL4GjMnP34bs1cVcCX6qe7wpsNcG+SJIkaXFxDN6ZY3BJkiSNi2PwzhyDS9KUMABH0orm\n34H3A5dMuiMj9rnMPGjSnaj5BHAYcPkkO5GZfwFeCOUuCfzDQ5IkaRIcg88Px+CSJElqcQw+PxyD\nS5LaGIAjaYWSmZew+P7omDqZeTkT/qNDkiRJ08Ex+PxwDC5JkqQWx+DzwzG4JKlpyaQ7IEnzqVvt\n23qt1ojYKCI+HREXRcQtEfHniHh/RKw2y3p3jIjDqja3RsRlEXFEROzaZfl7R8SXIuLCavnrqjq+\n346IZ4z4d94kIj4REedGxM0RcWNE/CUifhQRL6st9ytK2k2ARzTqxf6qvlyr3nBE7BYR34+IyyNi\nWUQ8tVqmY+3biHhhNf+giFgrIj5Y7d9bIuLiiPhkRNy5y+8REfGyiDg1Im6q9vG3ImLb+npHue8k\nSZI0PMfgjsElSZI0vxyDOwaXJE2GGXAkqd3mwMlAAL8G1qbUTH0zcD/gyc0GEfFG4IPV5CnA8cBm\nwBOBJ0bEKzLzs7Xlt6XUql0LOBs4EkhgU2APYHXgm6P4ZSJi4+r3uStwIfAj4JZqWzsB9wA+Uy3+\nI0rt3D2Af1TTLWd3WP2zgFcAfwB+CmwA3NZj19ah7INNgaOBMyn7+RXADhGxU2Y21/Vp4KXA7cBR\nwGXA9sCJwBd63K4kSZKmj2Nwx+CSJEmaX47BHYNLksbAABxJarc/8DngVZl5K0BE3Bf4DfCkiNgl\nM49rLRwRjwc+BPwNeHpmnlh7bRfgB8CBEXFUZv6peukNlD863pqZ76tvPCLWBLYd4e/zUsofHZ8G\n/iUzs7atVYEdW9OZ+f6IOIHyh8fZmfnCOdb9SuDlmfmZOZbr5KmUfbNzZl5f9WcT4ARgO+DZwCG1\nvj61+l2uBh6dmadU85cAHwDeNEAfJEmSNB0cgzsGlyRJ0vxyDO4YXJI0BpagkqR2fwVe2/qjAyAz\nzwIOriYf3Vj+ndXPl9T/6KjaHQe8B1gZeHntpY2qnz9sbjwzr8/M4wfu/fJa2/pR/Y+Oalu3ZObR\nQ6z7pwP+0QFwPfDi1h8dVX/+Bnyimmzu59dWPw9o/dFRtVkGvBW4aMB+SJIkafIcg/fOMbgkSZJG\nwTF47xyDS5J6ZgCOJLX7RWbe1GF+K/XkJq0ZEbEBsANwLfCTLus7qvr5sNq831Q/PxURj60i8Mel\nta0PRMRTI2KNEa77W0O0PTkz/95hfqf9vBKwczX51WaDKkXnN4boiyRJkibLMXjvHINLkiRpFByD\n984xuCSpZwbgSFK7v3SZf231c7XavC2qn2sDt0dENh/MDPw3rLX7IPBzStrLnwDXRMQJEfGBqi7u\nKB1MGazfC/h2ta3TIuJjEbHz7E3ndOEQbfvZzxsAqwLLKHdmjLovkiRJmizH4L1zDC5JkqRRcAze\nO8fgkqSerTTpDkjSlFnWx7JLq5/XAN+ZY9nLW08y80bgMRGxI7AnsAvlzoAdgX+LiHdk5rv76EdX\nVWrKvSPifcBe1bZ2AV4DvCYivpCZLx5w9Z3ukOhVP/u5LrvMH3R9kiRJmjzH4L1zDC5JkqRRcAze\nO8fgkqSeGYAjSYNrRaHflpkv7LdxVSv3RICIWAV4PvBZ4J0RcXhm/nFUHc3MM4Ezq20tAZ5AuSNg\n/2pb3VKHToMrgFuBVYDNgT93WOYe89khSZIkTYxj8PnhGFySJEktjsHnh2NwSVoELEElSQPKzIuB\nM4ANImL3Idd1a2YeBJwABPCAoTvYfVvLMvN7wHerWQ+svXxr9XNqAjSr2rbHV5PPa74eESsDz5jX\nTkmSJGkiHIPPD8fgkiRJanEMPj8cg0vS4mAAjiQN5z+rn1+JiMc1X4yIpRHxqIjYqTbvlRFx7w7L\nbglsU02OpJZrROwXEdt1mL8+Jd1nc1sXVz+3joip+eMD+Hj1800R8aDWzOouhv8C7jaRXkmSJGkS\nHIPPD8fgkiRJanEMPj8cg0vSAjdNXyqSNIz/i4hrZ3n9aZl5yag3mpnfjYg3Av8D/Dgi/gT8Ebge\nuCvwYGBd4F8oUf0ALwMOjIjzKekwW8vuSkkveVhm/mZEXXw68KWIuBg4DbgaWB94OLAGcAzw7drv\nc2FEnFr1+3cRcTJwC/DHzPzgiPrUt8z8ZkR8Adgf+G1E/Aq4DNieko7zk5R9fGvXlUiSJGnUHIN3\n5hhckiRJ4+IYvDPH4JKkqWAAjqTF4r5zvL7quDacmR+OiJ8DrwF2Bx4L3A5cAhwNHAl8q9bkbcBe\nwI7AzsDawD+Aoyi1b785wu4dAFxQbWd7YD3gcuAU4CDgkCq1Zd3TgQ8Aj6Ckulxa9W1if3hUXgr8\nFngF5Q+nG4BjgWcBT6qWuXwyXZMkSVohOQbvzDG4JEmSxsUxeGeOwSVJUyEyc9J9kCQNKCIOAl4A\nvKiqnbtCioifAY8GnpmZHf9wc19JkiRpFBxXFo7BJUmSNF8cVxaOwSVp+pkBR5IWh5dExO7AzZn5\nikl3ZhwiYhvgz5l5Y23eysCbKX90XAb8oNHmbsC7q8ld56mrkiRJWjE4BncMLkmSpPnlGNwxuCRN\nNQNwJGlx2KV63EBJTbkY/TvwtIg4BbiYUlN4W2ATSn3eF2bmTY02d6ZE+0uSJEmj5hjcMbgkSZLm\nl2Nwx+CSNNUsQSVJWhAi4omU+rfbAetTgkgvodTl/VBmnjHB7kmSJEmLjmNwSZIkaX45Bpekhc0A\nHEmSJEmSJEmSJEmSJGkISybdAUmSJEmSJEmSJEmSJGkhMwBHkiRJkiRJkiRJkiRJGoIBOJIkSZIk\nSZIkSZIkSdIQDMCRJEmSJEmSJEmSJEmShmAAjiRJkiRJkiRJkiRJklYYEfGFiLg0Is7s8npExMci\n4tyI+F1EbDfXOg3AkSRJkiRJkiRJkiRJ0orkIGDPWV5/PHDP6vEy4JNzrdAAHEmSJEmSJEmSJEmS\nJK0wMvNo4MpZFnkK8OUsTgDWjYiNZ1vnSqPsoCRJkiRJkiRJkiRJktRy2+Xn53xvc5UNt3o5JXNN\ny2cy8zN9rGJT4K+16YuqeZd0azD2AJxBd+RfH/mKgbb3nuvWGqjd6bf8Y6B2G6y05kDtrlt2y0Dt\nlhADtXsJmwzU7siVrh2o3UW3XTNQu7WXrjZQu4tvuWqgdjfecfNA7TZcZZ2B2l0/4PbuvuoGA7Vb\nGoMdLz+/9IyB2r3yrrsM1O4Wlg3UDuDU2y4bqN19Vr7zQO1uzjsGanfFssHe+0HPFZmDfYfdf5XB\njrUr89bBtheDnbO/fuM5A7XzM9/ZoJ/5LdeeNci3K9+HzhbK+wC+F93ccvNfB9vgGDgG78wxeGeO\nwTtbKOcemP9xuGPwzhyDd+ZnvrOFMvbzfejO96Izx+D9cwzemWPwzhb7GBwWzvnHMXhnjsG7bM8x\neEcL5fO+2Md94HvRjZ+JzqZpDD6XKtimn4CboVmCSpIkSZIkSZIkSZIkSZpxMbB5bXqzal5XlqCS\nJEmSJEmSJEmSJEnSeCwbLLvahB0BvDoiDgN2BK7JzK7lp8AAHEmSJEmSJEmSJEmSJK1AIuJQYHdg\ng4i4CHgHsDJAZn4K+AHwBOBc4EbgRXOt0wAcSZIkSZIkSZIkSZIkrTAy83lzvJ7Aq/pZ55wBOBFx\nH+ApwKbVrIuBIzLzrH42JEmSJKk3jsElSZKk+eUYXJIkSRqjXDbpHsyLJbO9GBFvBg4DAvhN9Qjg\n0Ih4y/i7J0mSJK1YHINLkiRJ88sxuCRJkqRRmDUAB3gx8NDMfH9mfqV6vB/YoXqto4h4WUScFBEn\nfe7Lh46yv5IkSdJi5xhckiRJml+OwSVJkiQNba4SVMuATYALG/M3rl7rKDM/A3wG4LbLz89hOihJ\nkiStYByDS5IkSfPLMbgkSZI0TstWjBJUcwXgvB74eUScA/y1mnc3YGvg1ePsmCRJkrSCcgwuSZIk\nzS/H4JIkSZKGNmsATmb+KCLuRUm1uWk1+2Lgt5l5x7g7J0mSJK1oHINLkiRJ88sxuCRJkjRemWbA\nASDLnjhhHvoiSZIkCcfgkiRJ0nxzDC5JkiRpWEsm3QFJkiRJkiRJkiRJkiRpIZszA86w/vrIVwzU\nbvNffmqgdptu/7aB2p29ZOWB2v3t1qsHarfWSqsP1G6NJasM1O7vA4ZarTzPMVrn33TpQO1WHfD9\n23L1jQZqd90dNw/Ubs2lqw3U7sZltw7Ubq2lqw7Ubr3V1hyo3UV502Dtbr92oHYANwz4Xhx364UD\ntVt96WCfwTsGTKs26Lni+gH3y3V5+0DtBvXpq04eqN2dV117oHZ+5jsb9DM/KN+HzhbK+wC+FwuB\nY/DOHIN35hi8s/k+9zz6LttyyrV/HqjtfI/DHYN35hi8Mz/znS2Usd9ifx+GOfcOyveiM8fg/XMM\n3plj8M4WyrkHFs75xzF4Z47BO3MM3tlC+bwPaqG8D+B70Y2fiRFbtmKUoDIDjiRJkiRJEzbfF4Al\nSZ57JUmSJEmjZQCOJEmSJEmSJEmSJEmSNISxl6CSJEmSJEmSJEmSJEnSCmrAMoULjRlwJEmSJEmS\nJEmSJEmSpCEYgCNJkiRJkiRJkiRJkiQNwRJUkiRJkiRJkiRJkiRJGo9ld0y6B/Ni4Aw4EfGiUXZE\nkiRJ0uwcg0uSJEnzz3G4JEmSpF4MU4LqXd1eiIiXRcRJEXHSoVdeNMQmJEmSJNU4BpckSZLmX8dx\nuGNwSZIkqUe5bP4fEzBrCaqI+F23l4CNurXLzM8AnwE4f9vH5cC9kyRJklYwjsElSZKk+TfIONwx\nuCRJkqS6WQNwKH9Y7AFc1ZgfwK/H0iNJkiRpxeYYXJIkSZp/jsMlSZIkDWWuAJzvAWtm5mnNFyLi\nV2PpkSRJkrRicwwuSZIkzT/H4ZIkSdK4LJtMSaj5NmsATma+eJbXnj/67kiSJEkrNsfgkiRJ0vxz\nHC5JkiRpWEsm3QFJkiRJkiRJkiRJkiRpIZurBNXQ3nPdWgO123T7tw3U7h0n/ddA7U7Z7tUDtbtl\n2W0DtVtjySoDtVtvyWoDtfv5sssGanfl7TcM1G7Q/bLnmvccqN0Jt/xtoHYX3nz5QO22udOmA7W7\nUwz2kTvn1sH6ue7S1QdqF8RA7f52+3UDtXvwyhsO1A7ghGWDvfcbrjLYvhnUTctuHajdoOeKNVYZ\nrJK8hp8AACAASURBVN2dYulA7WCwdpveaYMBtzcYP/OdDfqZX33pYMeZ70NnC+V9AN+LhcAxeGeO\nwTtzDN7ZfJ97Hrr2Vpx07fkDtZ3vcbhj8C7tHIN35Ge+s4Uy9lvs78Mw517fi84cg/fPMXhnjsE7\nW+xjcFg45x/H4J05Bu/MMXhnC+XzvtjHfeB70Y2fidHKXDFKUJkBR5IkSZKkCRv0ArAkaXCeeyVJ\nkiRJo2QAjiRJkiRJkiRJkiRJkjSEsZegkiRJkiRJkiRJkiRJ0gpqmSWoJEmSJEmSJEmSJEmSJM3B\nDDiSJEmSJEmSJEmSJEkajzQDDgARcZ+IeHRErNmYv+f4uiVJkv4/e/cebtlZ1wn+9zt16l6Ve1K5\nkwABRUEuR5DxBmp3AHvottUWow0y0Rq11elWR51HxxaddqRHcBzxVsqAl24d1GnFiHQrQ5AxICkh\nBAjhTq5UEpJKUvfb/s0fVXHO1Ky9d9W7z16Hffbn8zz74WSv/T37PfWeteq7k5d3AfNLBwcAgH7p\n4AAAwKRGLsDJzB+KiD+LiB+MiA9n5j9ddvjnpzkwAACYRzo4AAD0SwcHAABWwrhbUH1PRDyvqvZn\n5jUR8ceZeU1V/XJE5LBQZu6MiJ0RES+84Dnx9O3XrtBwAQBgzdPBAQCgXzo4AABM0+DEao+gF+Nu\nQbVQVfsjIqrqsxHxooh4aWa+PkZ88KiqXVW1VFVLPnQAAMBZ0cEBAKBfOjgAADCxcQtwHsjMZz/x\nD6c+hPyTiLgoIp45zYEBAMCc0sEBAKBfOjgAADCxcbegemVEHF/+RFUdj4hXZuZvTm1UAAAwv3Rw\nAADolw4OAADTVIPVHkEvRi7Aqap7Rxz725UfDgAAzDcdHAAA+qWDAwAAK2HcLagAAAAAAAAAAIAR\nxt2CamIfPPJAU+7OhfVNufc/9weacn/2/jc05V7wzFc25Tbkuqbc3ccfa8o9fvxgU67VCzZf1ZT7\n7GB/U+7I4FhTbvO6DU257dn2+3k4TjTlnr3x0qZcq8s2X9CU27G4rSm3pw435SIivmLj5U25Lz3R\nNvcvOe/Bpty3fP5oU671WrEp2y7vdx57pCl32eL2ptzGxmt96zXNOd+t9Zx37V1ZszIPEeZiFujg\n3XTwbjp4t76vPS897xlx25E9Tdm+e7gO3k0H7+ac7zYr3W+tz8Mk115zsbJ08LOng3fTwbv1fe2J\nWPvXHx28mw7eTQfvNivnu2vvylvrc7HW56HZYD5uQWUHHAAAAFhlrf8BGIB2rr0A09H6Hx4BaOfa\nC18Ypr4DDgAAAAAAAAAAc6rsgAMAAAAAAAAAAIxhAQ4AAAAAAAAAAEzALagAAAAAAAAAAJiOwXzc\ngmrsApzMfH5EVFXdmpnPiIiXRMSdVfW2qY8OAADmkA4OAAD90sEBAIBJjVyAk5n/NiJeGhGLmflX\nEfGCiHhnRPxEZj6nqv5dD2MEAIC5oYMDAEC/dHAAAGAljNsB51si4tkRsTEi9kTElVX1eGb+YkT8\nXUR0fvDIzJ0RsTMi4qpznhIXbbl05UYMAABrmw4OAAD90sEBAGCKqk6s9hB6sTDm+PGqOlFVByPi\nU1X1eEREVR2KiKE36aqqXVW1VFVLPnQAAMBZ0cEBAKBfOjgAADCxcQtwjmbmllNfP++JJzPz3Bjx\nwQMAAGimgwMAQL90cAAAYGLjbkH1NVV1JCKiqpZ/0FgfEa+a2qgAAGB+6eAAANAvHRwAAKap5mNd\n+8gFOE986Oh4/vMR8fmpjAgAAOaYDg4AAP3SwQEAgJUwbgccAAAAAAAAAABoM7ADzoq4aHFbU+7+\no4825Y4MjjXlXvDMVzbl/u5Dv9uU+5ovu7Epd8m6rU25qxfPbcrdeeTBptyDg0NNue25oSn3sb33\nNuW+bsczm3L3n9jXlNt/ovP/TDPW1g0XN+UersNNuQcO723Ofem2q5uy6zKbcvfVwabcBwYPNeX+\n4JGFplzf14qqasptXljflPvwwfubco8d3d+Ue/hQ2znonO/WfM4faPu72jx0a52HCHMxzGrMxRcK\nHbybDt5NB+/Weu2JaLv+XLr+nPjgvrua3u/ixe1NOR28mw7eTQfvpoN3m4drr7nopoOfPR28mw7e\nre8O/vnBoThax5uya/36o4N308G76eDdZuV877v3ufaOyOngnea5g38havsbFKBD34tvAOj/QwcA\no7X+S4/W/wAMgGsvwLT0/R+AAXDthVnnFlQAAAAAAAAAAExHzcctqOyAAwAAAAAAAAAAE7AABwAA\nAAAAAAAAJuAWVAAAAAAAAAAATMfgxGqPoBdnvQNOZv7uNAYCAAB008EBAKBfOjgAAHC2Ru6Ak5lv\nPf2piHhxZp4XEVFVL5/WwAAAYB7p4AAA0C8dHAAAWAnjbkF1ZUTcERG/HREVJz94LEXE60aFMnNn\nROyMiPji854RV267avKRAgDAfNDBAQCgXzo4AABMUw1WewS9GHcLqqWI+PuI+MmIeKyqbo6IQ1X1\nrqp617BQVe2qqqWqWvKhAwAAzooODgAA/dLBAQCAiY3cAaeqBhHxS5n5R6f+94FxGQAAoJ0ODgAA\n/dLBAQBgygbzsQPOGX2IqKp7I+JbM/MbI+Lx6Q4JAADQwQEAoF86OAAAMImzWsVfVX8REX8xpbEA\nAACn0cEBAKBfOjgAANDCNpoAAAAAAAAAAExHzcctqBZWewAAAAAAAAAAADDLsqqm+gYvvOLF032D\n02xd2NCU25DrmnJ7Txxqyv3NB9/YlPu+pR9ryn3k2MNNuYeP7mvKXbHx/KbcVy5e3JR7x7HPNeX+\n7fEdTbn3bmr7fWn1nsEjTbkLFzY15T5z/NGm3JWL5zTl7ji8pykXEfFVm69uyr3zwGeacv9s63VN\nuXcfa/sZW68V3/a8f92UuyDbrqEnou1Sf+extt/tVs75bq3nfCvz0G1W5iHCXAzznvvemU3BKdDB\nu+ng3XTwbrNy7Ynov4fr4N108G7O+W6z0v3Mw8ozF9108LOng3fTwbvNyrUnYnauPzp4Nx28mw7e\nbVbO91azMg8R5mIY50S31g5++D1/0GtfjojY9MJv7/3zgltQAQAAAAAAAAAwHQO3oAIAAAAAAAAA\nAMawAw4AAAAAAAAAANNhBxwAAAAAAAAAAGCcs9oBJzO/KiKeHxEfrqr/Mp0hAQAAT9DBAQCgXzo4\nAADQYuQOOJn5vmVff09EvCEitkfEv83Mn5jy2AAAYO7o4AAA0C8dHAAApqvqRO+P1TDuFlTrl329\nMyL+UVW9JiL+cUR8x7BQZu7MzN2ZufuBA/evwDABAGBu6OAAANAvHRwAAJjYuAU4C5l5fmZeGBFZ\nVQ9FRFTVgYg4PixUVbuqaqmqlnZsvXwFhwsAAGueDg4AAP3SwQEAgIktjjl+bkT8fURkRFRmXlZV\nn8vMbaeeAwAAVpYODgAA/dLBAQBgmgaD1R5BL0YuwKmqa4YcGkTEN634aAAAYM7p4AAA0C8dHAAA\nWAnjdsDpVFUHI+IzKzwWAABgCB0cAAD6pYMDAABno2kBDgAAAAAAAAAAjFXzcQuqhdUeAAAAAAAA\nAAAAzLKsqqm+wVde8XVNb7B1YUPT+52/sKkpd/fxx5pyl6zb2pS7uHGcv7773zflXvW8H2nKvf/Q\nvU25Vk/ftKMptzXXN+Xuapz3i9Ztacrdc+zRplyrzY3n0cPH9jXltqzb2JT7og0XN+UiIg7UsaZc\n61xctLitKbc52zYca71WvOB4W+6P8vNNudaf7/MnDjblnrR4blPOOd+t9Zx/7uYrm3LmoduszEOE\nuRjmzgdvzabgFOjg3XTwbjr4yur72hPRfw/Xwbvp4N2c891mpfuZh+HMRTcd/Ozp4N108G46+HBr\n/d+F6+DddPBua/2c18G7zcO111x0m5UOfugdu6a7MKXD5q/f2fvnBTvgAAAAAAAAAADABCzAAQAA\nAAAAAACACbTtlQYAAAAAAAAAAOPUYLVH0As74AAAAAAAAAAAwARGLsDJzBdk5jmnvt6cma/JzD/P\nzNdm5rn9DBEAAOaHDg4AAP3SwQEAgJUwbgec/z0iDp76+pcj4tyIeO2p5940xXEBAMC80sEBAKBf\nOjgAAEzTYND/YxUsjjm+UFXHT329VFXPPfX1/52Ztw0LZebOiNgZEfHkc58el269fPKRAgDAfNDB\nAQCgXzo4AAAwsXE74Hw4M1996usPZuZSRERmPi0ijg0LVdWuqlqqqiUfOgAA4Kzo4AAA0C8dHAAA\nmNi4BTjfHRFfm5mfiohnRMR7MvPTEfFbp44BAAArSwcHAIB+6eAAADBNNej/sQpG3oKqqh6LiO/K\nzHMi4tpTr7+3qh7oY3AAADBvdHAAAOiXDg4AAKyEkQtwnlBVj0fEB6c8FgAA4BQdHAAA+qWDAwDA\nlAxWZ0eavo27BRUAAAAAAAAAADCCBTgAAAAAAAAAADCBrKqpvsGbrvjOpjfYc0Y3x/r/e8fgoabc\n544+2pT70k2XNeXuPv5YU+7axfOacr/z969rym25/Kubcs+56KlNuWvXn9+Uu/3Q/U25Szec25Rr\n/X3Ztm5TU+7CxW1NuXuPPtKUy8im3EK25bY2/rlERBw4cbgpd8cjdzflvuSCJzXlvmjjJU251mvF\n1oUNTbkLFzY35Y5F27ZxHz20pynXyjnfrfWcP1Ft824eus3KPESYi2E+/MB724JToIN308G76eDd\nZuXaE9F/D9fBu+ng3Zzz3Wal+5mH4cxFNx387Ong3XTwbmu9g0fMzvVHB++mg3fTwbvNyvm+1ntf\nhLkYxjnRrbWDH/qL/3W6C1M6bP7Gf9375wU74AAAAAAAAAAAwAQswAEAAAAAAAAAgAk0bnAJAAAA\nAAAAAABjNN5CbNbYAQcAAAAAAAAAACYwcgFOZv5QZl7V12AAAGDe6eAAANAvHRwAAFgJ43bA+bmI\n+LvMfHdmfn9mXtzHoAAAYI7p4AAA0C8dHAAApmkw6P+xCsYtwPl0RFwZJz+APC8i7sjMt2fmqzJz\n+7BQZu7MzN2ZufvmA59YweECAMCap4MDAEC/dHAAAGBi4xbgVFUNquq/VNWNEXF5RPxaRLwkTn4o\nGRbaVVVLVbX0oq3XreBwAQBgzdPBAQCgXzo4AABMUw36f6yCxTHHc/k/VNWxiHhrRLw1M7dMbVQA\nADC/dHAAAOiXDg4AAExs3A443zbsQFUdXOGxAAAAOjgAAPRNBwcAACY2cgecqvp4XwMBAAB0cAAA\n6JsODgAAUzZYnVtC9W3cDjgAAAAAAAAAAMAIFuAAAAAAAAAAAMAERt6CaiX8+eLjTbn1jWuDHjl+\noCnX6s4jDzblDp042pTbe2x/U27L5V/dlDt4/7ubck//om9uyn3+aNvvy8aF9U25axbPacp97uij\nTbk9R/Y25R46+lhT7vJNFzblHj66ryn3ZVuuaMrde7xt3ifx5Rc/rSn34JG2ubgz+r1WbG2c+88c\na/sd3b6wsSn3yJG2ub9gY9u565zv1nrOr8u2v6vNQ7dZmYcIczELdPBuOng3HbzbLF17ZqWH6+Dd\ndPBuzvluOni31bj2motuOvjZ08G76eDd1noHj5id648O3k0H76aDd5uV832t974IczGMc2KFlVtQ\nAQAAAAAAAAAAY1iAAwAAAAAAAAAAE5j6LagAAAAAAAAAAJhTA7egAgAAAAAAAAAAxhi5A05mboiI\nV0TE/VX115l5Q0T8VxHx0YjYVVXHehgjAADMDR0cAAD6pYMDAMCUzckOOONuQfWmU6/Zkpmvioht\nEfF/RsTXR8TzI+JV0x0eAADMHR0cAAD6pYMDAAATG7cA55lV9azMXIyI+yLi8qo6kZm/HxEfHBbK\nzJ0RsTMi4tkXPCuu3fakFRswAACscTo4AAD0SwcHAAAmtjDu+KntN7dHxJaIOPfU8xsjYv2wUFXt\nqqqlqlryoQMAAM6KDg4AAP3SwQEAYJqq+n+sgnE74LwxIu6MiHUR8ZMR8UeZ+emI+IqI+MMpjw0A\nAOaRDg4AAP3SwQEAgImNXIBTVb+Umf/Hqa/vz8zfjYhviIjfqqr39TFAAACYJzo4AAD0SwcHAABW\nwrgdcKKq7l/29aMR8cdTHREAAMw5HRwAAPqlgwMAwBQNBqs9gl4srPYAAAAAAAAAAABgllmAAwAA\nAAAAAAAAE8iqmuobPP/yr53uG5zmwInDTbkXbL6qKffg4FBT7sDgaFPuc0cfbcqds7ilKff5o483\n5T5255805b7puT/YlNua65ty73rsY025f3LelzTl9tXxptz9x/c15c5dt6kpd+nC5qbc7Ucfaspt\nXdjQlIuIuGTd1qbcRw7vacpdvH57U671Z2y9Vjx0rO13plXrn0ur1nl3zndrPedbfz7z0G1W5iHC\nXAzz25/942wKToEO3k0H76aDd5uVa09E/z1cB++mg3dzzneble5nHoYzF9108P7o4N108G6zcu2J\nmJ3rjw7eTQdfWTp4Nx282zxce9f6XMzKPLR28EP/4X/stS9HRGz+jp/r/fOCHXAAAAAAAAAAAGAC\ni6s9AAAAAAAAAAAA1qgarPYIemEHHAAAAAAAAAAAmIAFOAAAAAAAAAAAMIGxt6DKzCdHxD+PiKsi\n4kREfDwi/mNVPT7lsQEAwFzSwQEAoF86OAAATNHALagiM38oIn4jIjZFxJdHxMY4+QHkvZn5oqmP\nDgAA5owODgAA/dLBAQCAlTBuB5zviYhnV9WJzHx9RLytql6Umb8ZEX8WEc/pCmXmzojYGRHxpHOv\ni0u2XLaSYwYAgLVMBwcAgH7p4AAAwMRG7oBzyhOLdDZGxLaIiKq6OyLWDwtU1a6qWqqqJR86AADg\nrOngAADQLx0cAACmpar/xyoYtwPOb0fErZn5dxHx1RHx2oiIzLw4Ih6Z8tgAAGAe6eAAANAvHRwA\nAJjYyAU4VfXLmfnXEfHFEfG6qrrz1PMPRcTX9DA+AACYKzo4AAD0SwcHAABWwrgdcKKqPhIRH+lh\nLAAAQOjgAADQNx0cAACmaDBY7RH0YmG1BwAAAAAAAAAAALNs7A44AAAAAAAAAADQZE52wMmqmuob\nfMNV1ze9wacPPdj0fi/Zdl1T7rOD/U257bmhKXddbmnKfWjweFNuU7attbr1wF1NuWdsubwp95/e\n/ytNuZc+5/uacq2O14mm3LaFjU25DbmuKXe4jjfljjb+fOsaN9W69+gjTbmIiC/b1Pa79uDgYFPu\nyKDtz/Tyxe1NudZrxXtPPNyUu2Zd2zg/eLTtmn2i2v6yvWBxa1OulXP+C4N5+MJhLrr99T3/OZuC\nU6CDd9PBu+ng3Wbl2hPRfw/Xwbvp4CvLOf+FwTx84TAX3XTws6eDd9PBu/V97YmYneuPDt5NB++m\ng3eblfO9b669XzicE91aO/ihN/7odBemdNh84y/2/nnBLagAAAAAAAAAAGACbkEFAAAAAAAAAMB0\nNO4GNmvsgAMAAAAAAAAAwNzIzJdk5scy85OZ+RMdx6/OzHdm5gcy8/bMfNm472kBDgAAAAAAAAAA\ncyEz10XEr0bESyPiGRHx7Zn5jNNe9lMR8Zaqek5EvCIifm3c93ULKgAAAAAAAAAApqIGtdpDON3z\nI+KTVfXpiIjM/MOI+KcRccey11REnHPq63Mj4v5x39QOOAAAAAAAAAAArBmZuTMzdy977Fx2+IqI\nuGfZP9976rnlfiYivjMz742It0XED457z6kswFn+g9y3/95pvAUAALCMDg4AAP3SwQEA4AtXVe2q\nqqVlj11n+S2+PSLeXFVXRsTLIuL3MnPkGpuRBzPz3Mz8hcy8MzMfycyHM/Ojp54770x+kCu2XXmW\nPwMAAMwvHRwAAPrX0sN1cAAAOEODQf+P0e6LiKuW/fOVp55b7saIeEtERFW9JyI2RcRFo77puB1w\n3hIReyPiRVV1QVVdGBEvPvXcW8aNGAAAOGs6OAAA9E8PBwCA+XFrRFyXmddm5oaIeEVEvPW019wd\nEV8fEZGZXxwnF+A8NOqbjluAc01Vvbaq9jzxRFXtqarXRsSTzvIHAAAAxtPBAQCgf3o4AABMSw36\nf4waTtXxiPiBiPjPEfHRiHhLVX0kM382M19+6mU/EhHfk5kfjIg/iIjvqqoa9X0Xx/wx3JWZPxYR\nv1NVD0REZOaOiPiuiLhnTBYAADh7OjgAAPRPDwcAgDlSVW+LiLed9txPL/v6joj4yrP5nuN2wPm2\niLgwIt516r63j0TEzRFxQUR869m8EQAAcEZ0cAAA6J8eDgAATGTkDjhVtTcifvzU4/8jM18dEW+a\n0rgAAGAu6eAAANA/PRwAAKZoMPLOTWtGjrlF1fBg5t1VdfW4133xJc/v9U9y87oNTbkjg2NNuY/t\nvbcp9+UXP60pd+3ieU25Dxy6rynX6lmbL2/KPTw41JT7yw/8elPuWc94RVNu67qNTbltjbmNub4p\n9/jgcFPu0/s/15R76ra2eR9E/xfcT+xrOyeu2HJRU+4jj9zVlGu9VrRe2zcvtP2u3Xdkb1PusaP7\nm3L3fupt41/UwTnfrfWcNw/d1vo8RJiLYR547M5sCp4FHfwkHbybDt5trV97Imanh+vg3XTwbs75\nbuah22pce81Ft7XYwSPOrIfr4N108G46+HCzcv3Rwbvp4N108G6zcr6v9XmIMBfDOCe6tXbwg7/6\nA73/B+Et/+oNvXxeWG7kDjiZefuwQxGxY+WHAwAA800HBwCA/unhAADApEYuwImTHyyuj4jTl3Rm\nRNwylREBAMB808EBAKB/ejgAAEzLYLDaI+jFuAU4N0XEtqq67fQDmXnzVEYEAADzTQcHAID+6eEA\nAMBERi7AqaobRxy7YeWHAwAA800HBwCA/unhAADApMbtgAMAAAAAAAAAAG3m5BZUC6s9AAAAAAAA\nAAAAmGVTWYCTmTszc3dm7n700IPTeAsAAGAZHRwAAPqlgwMAwBmq6v+xCpoX4GTmXw47VlW7qmqp\nqpbO23xJ61sAAADL6OAAANC/YT1cBwcAAJZbHHUwM5877FBEPHvlhwMAAPNNBwcAgP7p4QAAwKRG\nLsCJiFsj4l1x8kPG6c5b+eEAAMDc08EBAKB/ejgAAEzLYLDaI+jFuAU4H42I/7aqPnH6gcy8ZzpD\nAgCAuaaDAwBA//RwAABgIgtjjv/MiNf84MoOBQAACB0cAABWw8+EHg4AAExg5A44VfXHIw6fv8Jj\nAQCAuaeDAwBA//RwAACYokGt9gh6kVVtP2hm3l1VV4973ZMufFbTGzx5846WWNx1+PNNuc3rNjTl\nrtjQ9tnrh4+23Tb45xYfbMptyHVNuWsWz2nK/eWjdzTlvnT72F+pTvcd2duUu/2OP2zKvfJ5P9yU\na/XI4EhT7sKFTU253QfbdtX9xq1Pbcq999gDTbmIiEvWbW3KXbCwsSn3yWOPNuU2L6xvyrVeK35x\nQ9s58fiJw025L9lwUVPuvY2/a62c891az/l1OW4jvW7moduszEOEuRjm4w/tzqbgWdDBT9LBu+ng\nK2tWrj0R/fdwHbybDt7NOd9tVrqfeRjOXHRbix084sx6uA7eTQfvpoMPNyvXHx28mw7eTQfvNivn\n+1rvfRHmYhjnRLfWDn7wF7+79xU4W370t3v5vLDcyB1wMvP2YYciou2TAQAAMJQODgAA/dPDAQCA\nSY1cgBMnP1hcHxGnLyHNiLhlKiMCAID5poMDAED/9HAAAJiWGqz2CHoxbgHOTRGxrapuO/1AZt48\nlREBAMB808EBAKB/ejgAADCRkQtwqurGEcduWPnhAADAfNPBAQCgf3o4AABM0aBWewS9WFjtAQAA\nAAAAAAAAwCyzAAcAAAAAAAAAACYwlQU4mbkzM3dn5u79hx+ZxlsAAADL6OAAANAvHRwAAM5MDQa9\nP1bDyAU4mXlOZv7Pmfl7mXnDacd+bViuqnZV1VJVLW3bdMFKjRUAANY8HRwAAPrX0sN1cAAAYLlx\nO+C8KSIyIv4kIl6RmX+SmRtPHfuKqY4MAADmkw4OAAD908MBAICJLI45/pSq+uZTX/9pZv5kRPxf\nmfnyKY8LAADmlQ4OAAD908MBAGBaBrXaI+jFuAU4GzNzoaoGERFV9e8y876I+JuI2Db10QEAwPzR\nwQEAoH96OAAAMJFxt6D684j4uuVPVNWbI+JHIuLolMYEAADzTAcHAID+6eEAAMBERu6AU1U/NuT5\nt2fmz09nSAAAML90cAAA6J8eDgAAU3Ryo8k1L6va7rWVmXdX1dXjXrd02Vf3ejOvy9af25Tbnuub\ncvef2NeUe9G6HU25D9RjTbmPHX6gKdfqq7Y8qSn3yeOPNuX2nzjSlLtuw4VNud/9+9c35Y6//Y1N\nue/4yQ815TaO3eSq2wcO3deUe+qmS5py+wdt8xcRsZjrmnKbctwd+LpdtrClKfeJ43ubcq3Xit3V\ndi599sjDTblv2fSUptx92fZ/IPvAkT1NOed8t9Zzftu6TU0589BtVuYhwlwMc+eDt2ZT8Czo4Cfp\n4N108G5r/doT0X8P18G76eDdnPPdZqX7mYfhzEW3tdjBI86sh+vg3XTwbjr4cLNy/dHBu+ng3XTw\nbrNyvq/13hdhLoZxTnRr7eAH/qfv7LUvR0Rs/anf7+XzwnIj/+bNzNuHHYqItr8NAQCAoXRwAADo\nnx4OAABTNOh9/c2qGLf0dUdEXB8Rpy9ZzYi4ZSojAgCA+aaDAwBA//RwAABgIuMW4NwUEduq6rbT\nD2TmzVMZEQAAzDcdHAAA+qeHAwAAExm5AKeqbhxx7IaVHw4AAMw3HRwAAPqnhwMAwBQNBqs9gl4s\nrPYAAAAAAAAAAABgllmAAwAAAAAAAAAAE5jKApzM3JmZuzNz90MH90zjLQAAgGV0cAAA6JcODgAA\nZ2hQ/T9WwcgFOJl5aWb+emb+amZemJk/k5kfysy3ZOZlw3JVtauqlqpq6eItl678qAEAYI3SYqZs\noQAAIABJREFUwQEAoH8tPVwHBwAAlhu3A86bI+KOiLgnIt4ZEYci4mUR8e6I+I2pjgwAAObTm0MH\nBwCAvr059HAAAGACi2OO76iqX4mIyMzvr6rXnnr+VzLzxukODQAA5pIODgAA/dPDAQBgWmqw2iPo\nxbgdcJYf/93Tjq1b4bEAAAA6OAAArAY9HAAAmMi4HXD+LDO3VdX+qvqpJ57MzKdGxMemOzQAAJhL\nOjgAAPRPDwcAgGkZ1GqPoBcjF+BU1U8Pef6TmfkX0xkSAADMLx0cAAD6p4cDAACTGrcDziiviYg3\njXvR/hOHm775tnWbmnJbsu1HOhwnmnL7TxxpyrVuWnrPsUebcq1/nnuO7G3K7avjTbltCxubcn07\n/vY3NuUWX9J2u+gD/8O/asodzXF3mVtZe4493pS7dP05ze95uPF3bf+g7dz9RGOu72vFbfvvasp9\n5fanNuUez7Zr6OcGB5ty29b1e61wznczD93W+jxEmItVpoOHDj6MDt5tHq49ffdwHbybDr6ynPPd\nzEO31bj2motua7SDR5xBD9fBh9DBO+ngw83K9UcHH/J+OngnHbzbrJzva30eIszFMM4JWoxs6Zl5\n+7BDEbFj5YcDAADzTQcHAID+6eEAADA9NRis9hB6MW6Z/I6IuD4iTl/+nRFxy1RGBAAA800HBwCA\n/unhAADARMYtwLkpIrZV1W2nH8jMm6cyIgAAmG86OAAA9E8PBwAAJjJyAU5VDb1BWVXdsPLDAQCA\n+aaDAwBA//RwAACYokGt9gh6sbDaAwAAAAAAAAAAgFlmAQ4AAAAAAAAAAEzgrBfgZOYlZ/CanZm5\nOzN3P3roobaRAQAAEaGDAwDAahjXw3VwAAA4Q4Pq/7EKRi7AycwLTntcGBHvy8zzM/OCYbmq2lVV\nS1W1dN7mi1d80AAAsFbp4AAA0L+WHq6DAwAAyy2OOf75iLjrtOeuiIj3R0RFxJOnMSgAAJhjOjgA\nAPRPDwcAgGmpwWqPoBfjbkH130fExyLi5VV1bVVdGxH3nvraBw4AAFh5OjgAAPRPDwcAACYycgFO\nVb0uIr47In46M1+fmdvj5Gp/AABgCnRwAADonx4OAABMatwtqKKq7o2Ib83Ml0fEX0XElqmPCgAA\n5pgODgAA/dPDAQBgSgbzsbZ93C2o/kFVvTUiXhwR3xARkZmvntagAAAAHRwAAFaDHg4AALTIqraV\nRpl5d1VdPe5111/10qY3ODg42hKLA4MjTblnb7y0KXesBk25BweHmnIPHd/XlLt4cXtT7s6D9zXl\nrt50cVPu4sWtTbkDg2NNuVZbF9Y35VrHedMHfrUp9wNLP96U+5uDdzXlvnXLdU25/XGiKRcRceux\nh5py1yye2/yefWq9VrReQ+8+3Pbn+TXbntqUa9X659LKOd/t6o0XNuVamYdufc9DhLkY5qMPvi+b\ngmdBBz9JB++mg3db69eeiP57uA7eTQdfWc75bjp4t9W49pqLbmuxg0ecWQ/Xwbvp4N108OFm5fqj\ng68sHbybDt5NB+82D9fetT4XszIPrR18/w+/vPctcLa9/q29fF5YbuQtqDLz9mGHImLHyg8HAADm\nmw4OAAD908MBAGB6ak5uQTVyAU6c/GBxfUTsPe35jIhbpjIiAACYbzo4AAD0Tw8HAAAmMm4Bzk0R\nsa2qbjv9QGbePJURAQDAfNPBAQCgf3o4AAAwkZELcKrqxhHHblj54QAAwHzTwQEAoH96OAAATNGc\n3IJqYbUHAAAAAAAAAAAAs2zcLagAAAAAAAAAAKDNYLDaI+jFVHbAycydmbk7M3ffu/+eabwFAACw\njA4OAAD90sEBAIDlRi7AycyXLPv63Mx8Y2benpn/MTN3DMtV1a6qWqqqpSu3XbWS4wUAgDVNBwcA\ngP619HAdHAAAWG7cDjg/v+zr10XE5yLiv46IWyPiN6c1KAAAmGM6OAAA9E8PBwCAaRlU/49VsHgW\nr12qqmef+vqXMvNV0xgQAADwD3RwAADonx4OAACctXELcC7JzB+OiIyIczIzq+qJpULjds8BAADO\nng4OAAD908MBAICJjFuA81sRsf3U178TERdFxEOZeWlE3DbNgQEAwJzSwQEAoH96OAAATMsq3RKq\nbyMX4FTVa4Y8vycz3zmdIQEAwPzSwQEAoH96OAAAMKn8f3fRPMtg5t1VdfW4173s6pf1upRpXeNu\noBcvbGrK7RkcbMqdlxubcncdf6wp9+jxA025cxa3NOUuXNeWG0Tbr8veE4eactcunteUq8Zx7qtj\nTbkrFtr+PN+w+7VNuWuf9vKm3AUbto9/UYdHju5rykVEXLX54qbc48fbzt0nbbywKdeq9Vrxt/s/\n1ZR78uYdTbmFyKbcCxcvasq949jnmnLO+W6t53zr+Wceus3KPESYi2HufeTDbRfDs6CDn6SDd9PB\nu631a09E/z1cB++mg3dzzneble5nHoYzF93WYgePOLMeroN308G76eDDzcr1RwdfWTp4Nx28mw7e\nbR6uvWt9LmZlHlo7+L7vfUnvW+Bs/4239/J5YbmRO+Bk5u3DDkVE299OAADAUDo4AAD0Tw8HAIDp\nad0YZtaMXIATJz9YXB8Re097PiPilqmMCAAA5psODgAA/dPDAQCAiYxbgHNTRGyrqttOP5CZN09l\nRAAAMN90cAAA6J8eDgAA0zKwA05U1Y0jjt2w8sMBAID5poMDAED/9HAAAGBSC6s9AAAAAAAAAAAA\nmGXjbkEFAAAAAAAAAABt5uQWVGe9A05mXngGr9mZmbszc/fd++9uGxkAABAROjgAAKyGcT1cBwcA\nAJYbuQAnM38hMy869fVSZn46Iv4uM+/KzK8dlquqXVW1VFVLV2+7eoWHDAAAa5cODgAA/Wvp4To4\nAACw3LgdcL6xqj5/6uv/JSK+raqeGhH/KCJeN9WRAQDAfNLBAQCgf3o4AABMSQ2q98dqGLcAZzEz\nF099vbmqbo2IqKqPR8TGqY4MAADmkw4OAAD908MBAICJjFuA82sR8bbM/LqIeHtm/nJmfm1mviYi\nbpv+8AAAYO7o4AAA0D89HAAAmMjiqINV9SuZ+aGI+L6IeNqp118XEX8aET83/eEBAMB80cEBAKB/\nejgAAEzRKt0Sqm8jF+BERFTVzRFx8+nPZ+arI+JNKz8kAACYbzo4AAD0Tw8HAAAmkVVtK40y8+6q\nunrc6zZuuqrpDc7ftK0lFhnZlLts8wVNuQcO723KXb3lkqbc3mP7m3KLua4pd3RwvCn3NVuvacp9\n9sS+ptyHHr+rKXfuhrbfs4Vs+z1r1fp7ve/4wabcZz7+1qbc057+TU25Y3WiKRcRccGG7U25V254\nSlPu1w/e0ZQ7fOJIU671WrHv+KGm3MHGcbZeY9bluDshdnvkyONNOed8t9Zz/sqnvKwpZx66zco8\nRJiLYdZf9OSp/8Ho4Cfp4N108JU1K9eeiP57uA7eTQfv5pzvNivdzzwMZy66rcUOHnFmPVwH76aD\nd9PBh5uV648O3k0H76aDd5uV832t974IczGMc6Jbawd/7F9+fe9b4Jz7e+/o/YQauQNOZt4+7FBE\n7Fj54QAAwHzTwQEAoH96OAAAMKlxt6DaERHXR8Tpy9szIm6ZyogAAGC+6eAAANA/PRwAAJjIuAU4\nN0XEtqq67fQDmXnzVEYEAADzTQcHAID+6eEAADAlNej9DlSrYuQCnKq6ccSxG1Z+OAAAMN90cAAA\n6J8eDgAATGphtQcAAAAAAAAAAACzbNwtqAAAAAAAAAAAoM2c3ILKDjgAAAAAAAAAADCBkQtwMvP9\nmflTmfmUs/mmmbkzM3dn5u4TJ/ZPNkIAAJgjOjgAAPSvpYfr4AAAcIYGq/BYBeN2wDk/Is6LiHdm\n5vsy899k5uXjvmlV7aqqpapaWrdu24oMFAAA5oQODgAA/TvrHq6DAwAAy41bgLO3qn60qq6OiB+J\niOsi4v2Z+c7M3Dn94QEAwNzRwQEAoH96OAAAMJFxC3D+QVW9u6q+PyKuiIjXRsQLpzYqAABABwcA\ngFWghwMAwMqqQfX+WA2LY45//PQnqupERLz91AMAAFhZOjgAAPRPDwcAACYycgecqnrFsGOZ+eqV\nHw4AAMw3HRwAAPqnhwMAAJPKqratdzLz7lP3wx3p31zziqY3uLcOtcTi/uP7mnI7Frc15Q4MjjXl\nzlnY0JT71NGHm3LH60RT7ukbL2nK3XXs0abc+eu2NOX2D4405Z6/vu3n+8Sg7fdsz7HHm3Iv2zj2\nVOv0p4c/3ZTbd+xgU+7jH/tPTbk3PPenm3IREfcsHG/KfeTEY83v2afWa8U9x9p+vkG0/Z3wsvVX\nNuUey7Zr0/uOPdiUc853az3nL990YVPOPHSblXmIMBfDfObhD2ZT8Czo4Cfp4N108G5r/doT0X8P\n18G76eDdnPPdZqX7mYfhzEW3tdjBI86sh+vg3XTwbjr4cLNy/dHBV5YO3k0H76aDd5uHa+9an4tZ\nmYfWDr73m1/U+z2hzv+Tm3v5vLDcyFtQZebtww5FxI6VHw4AAMw3HRwAAPqnhwMAAJMauQAnTn6w\nuD4i9p72fEbELVMZEQAAzDcdHAAA+qeHAwAAExm3AOemiNhWVbedfiAzb57KiAAAYL7p4AAA0D89\nHAAApqQGvd+BalWMXIBTVTeOOHbDyg8HAADmmw4OAAD908MBAIBJjdsBBwAAAAAAAAAA2gxWewD9\nWFjtAQAAAAAAAAAAwCyzAAcAAAAAAAAAACYwcgFOZi5l5jsz8/cz86rM/KvMfCwzb83M54zI7czM\n3Zm5+0P7PrXyowYAgDVKBwcAgP619HAdHAAAzkwN+n+shnE74PxaRPz7iPiLiLglIn6zqs6NiJ84\ndaxTVe2qqqWqWnrm9qes2GABAGAO6OAAANC/s+7hOjgAALDcuAU466vqL6vqDyKiquqP4+QX74iI\nTVMfHQAAzB8dHAAA+qeHAwAAE1kcc/xwZv7jiDg3Iioz/1lV/Wlmfm1EnJj+8AAAYO7o4AAA0D89\nHAAApmWVbgnVt3ELcL43Tm67OYiI6yPi+zLzzRFxX0R8z3SHBgAAc0kHBwCA/unhAADAREbegqqq\nPlhV11fVS6vqzqr676rqvKr6koh4ek9jBACAuaGDAwBA//RwAABgUuN2wBnlNRHxpnEvOtK4l9C9\nxx9vyj1n/cVNuT11uCm3LrMpd8fhPU25L9t0eVPuM8cfbcq1zsPWhQ1t73f0kabcOYtbmnLvPfZA\nU25jrmvKXbr+nKbc/sZdbh85uq8p1+oNz/3pptwPvP9nm9/zVc/7kabcl687vym3udrO+b+tvU25\n1mvF0uYrm3Lnxfqm3LuPP9SU+866pCn33qi2nHN+RQ3MQ6e1Pg8R5mKV6eChgw+jg3ebh2tP3z1c\nB++mgw/JOedXlA7ebTWuveai2xrt4BFn0MN18G46+JD308GHmpXrjw7eTQfvpoN3m5Xzfa33vghz\nMYxzYmWVW1BFZObtww5FxI6VHw4AAMw3HRwAAPqnhwMAAJMatwPOjjh5v9vTl6xmRNwylREBAMB8\n08EBAKB/ejgAAEyLHXAiIuKmiNhWVbedfiAzb57KiAAAYL7p4AAA0D89HAAAmMjIBThVdeOIYzes\n/HAAAGC+6eAAANA/PRwAAJjUuB1wAAAAAAAAAACgSc3JLagWVnsAAAAAAAAAAAAwyyzAAQAAAAAA\nAACACYxcgJOZ2zLzZzPzI5n5WGY+lJnvzczvGpPbmZm7M3P3Hfs+vaIDBgCAtUwHBwCA/rX0cB0c\nAADOTA36f6yGcTvg/IeI+HREXB8Rr4mI/y0i/mVEvDgzf35YqKp2VdVSVS09Y/uTV2ywAAAwB3Rw\nAADo31n3cB0cAABYbtwCnGuq6s1VdW9VvT4iXl5Vn4iIV0fEP5/+8AAAYO7o4AAA0D89HAAAmMi4\nBTgHMvOrIiIy8+UR8UhERFUNIiKnPDYAAJhHOjgAAPRPDwcAgCmZl1tQLY45/r0R8duZeV1EfCQi\n/puIiMy8OCJ+dcpjAwCAeaSDAwBA//RwAABgIiMX4FTV7RHx/I7nH8rMfVMbFQAAzCkdHAAA+qeH\nAwDAFNV8bCqZVdUWzLy7qq4e97oXXvHipjc4cOJwSywWc11T7is2Xt6Uu29wsCl36cLmptyewaGm\n3GeOPtyUa/XFG3c05dY17ub6meOPNuUuWbe1KXegjjXlWh0ZHG/KHY+2vbVaz79v2PSkptyeOtKU\ni4j4nb9/XVPuW577Q025pyxsa8p9arC/Kdd6rfjUicebcnuOPtaUe9amy5pyrefSgycONOWc891a\nz/mt6zY15cxDt1mZhwhzMczte94z9U8QOvhJOng3HXxlzcq1J6L/Hq6DD3k/HbyTc77brHQ/8zCc\nuei2Fjt4xJn1cB28mw7eTQcfblauPzp4Nx28mw7ebVbO97Xe+yLMxTDOiW6tHfyBF72obWHKBHbc\nfHPvq35G7oCTmbcPOxQRbc0SAAAYSgcHAID+6eEAAMCkRi7AiZMfLK6PiL2nPZ8RcctURgQAAPNN\nBwcAgP7p4QAAMCXVtlHPzBm3AOemiNhWVbedfiAzb57KiAAAYL7p4AAA0D89HAAAmMjIBThVdeOI\nYzes/HAAAGC+6eAAANA/PRwAAJjUuB1wAAAAAAAAAACgSQ1ytYfQi4XVHgAAAAAAAAAAAMyykQtw\nMvPczPyFzLwzMx/JzIcz86Onnjuvr0ECAMC80MEBAKBfOjgAALASxu2A85aI2BsRL6qqC6rqwoh4\n8ann3jIslJk7M3N3Zu5+4MD9KzdaAABY+3RwAADolw4OAABTVIP+H6th3AKca6rqtVW154knqmpP\nVb02Ip40LFRVu6pqqaqWdmy9fKXGCgAA80AHBwCAfungAADAxMYtwLkrM38sM3c88URm7sjMH4+I\ne6Y7NAAAmEs6OAAA9EsHBwCAKarK3h+rYdwCnG+LiAsj4l2ZuTczH4mImyPigoj4F1MeGwAAzCMd\nHAAA+qWDAwDAnMnMl2TmxzLzk5n5E0Ne8y/+H/buPlrTs64P/fc3r5lk8kLeQAhkAoFSEAxmxLJs\nI9WqQKmo1UqrJ9RjjcdzgPbQF+mqy9cDVYu2aK01PR45atW26VE4p0gPq0ptOQ0y8lIgIm8mkEAS\nQhKSIclkZvbv/JEZ1xif/TyZa+/n3u55Pp+1nrX2fu793fuaufZ153snV+67qm6qqg9V1a8s+p67\n5h3s7nuq6heSvD3Jjd19+NTBJHnbaf4ZAACAOXRwAACYlg4OAACrpap2JvmZJF+T5NYk766qt3T3\nTad8zdOT/MMkX3HimuHSRd937h1wqurVSd6c5JVJPlhVLzvl8OtP/48BAADMo4MDAMC0dHAAAFiu\nXpv+tcDzk3ysuz/R3Q8n+bUkL3vU13xXkp/p7nuSpLvvXPRN594B58Q3vLq7D1fVgSQ3VNWB7n5j\nksf00Kxn7r7wsXzZn/DOh28Zyl2yZ99Q7ouP7xnKvXfts0O533/w9qHc/l1nDeVuuvuTQ7kvu+QZ\nQ7kPPTT253vCnvOHch+9/7ah3DMvfM5Q7vhaD+UOrx0Zyh3YNfb38p6Hxv5evmPv04dybz/+uaHc\nl+183FAuSb75S189lLvhPT81lHvlwe8dyt15/AtDudFzxYW79w/lLtl97lDuo0fH5v7zRx8Yyt19\n5L6hnDU/2+ia/9F7f3coZx5m2y7zkJiLCejgC+jgs+ngs63CuWfqHq6Dz6aDz2bNz7Zdup95WJ+5\nmE0HP306+Gw6+GxnegdPts/5RwefTQefTQefbbus9zO99yXmYj3WxPZXVdclue6Ut67v7utPfPyk\nJJ865ditSb78Ud/iGSe+zzuT7Ezyg9099+6Yizbg7Dh5u83uvrmqXphHLj4uz2O88AAAAE6LDg4A\nANPSwQEA4AxzYrPN9Qu/cH27kjw9yQuTXJbkd6rqOd1973qBuY+gSnJHVV11ygAPJ3lpkouTjP9v\n0wAAwHp0cAAAmJYODgAAS9RrNflrgduSPPmUzy878d6pbk3ylu4+2t1/mOQjeWRDzroWbcC5Nskf\nu+9bdx/r7muTXLNoxAAAwGnTwQEAYFo6OAAArJZ3J3l6VV1RVXuSvDzJWx71Nb+RR+5+k6q6OI88\nkuoT877p3EdQdfetc469c/GYAQCA06GDAwDAtHRwAABYLd19rKpemeQ/JtmZ5P/o7g9V1Q8nOdTd\nbzlx7Gur6qYkx5P8/e7+3LzvO3cDDgAAAAAAAAAAjOre6hH8Sd391iRvfdR733/Kx53kNSdej8mi\nR1ABAAAAAAAAAABzDG/Aqarf3MyBAAAA8+ngAAAwLR0cAAA2rtdq8tdWmPsIqqr60vUOJblqTu66\nJNclyQsufF7+zLlXDA8QAABWiQ4OAADT0sEBAIDNMHcDTpJ3J/nPeeRC49EuWC/U3dcnuT5JvuPA\nX/1T+DQvAAD4U0sHBwCAaengAADAhi3agPP7Sb67uz/66ANV9anlDAkAAFaaDg4AANPSwQEAYIm2\n6pFQU9ux4PgPzvmaV23uUAAAgOjgAAAwtR+MDg4AAGzQ3A043X1Dkqqqr66q/Y86/NDyhgUAAKtJ\nBwcAgGnp4AAAwGaYuwGnql6d5M15ZJf/B6vqZaccfv0yBwYAAKtIBwcAgGnp4AAAsFzd07+2QvWc\nn1xVH0jygu4+XFUHktyQ5Je6+41V9d7uft6iH/DXL/+GoT/aTUfuHIll/86zhnK/dOGuodwr7j4+\nlHvB7kuHcu87fs9Q7o6j9w3lDh8b+x88nnLWRUO5Y702lLvv+INDufN27hvKPXP3hUO5jx4bm78n\n7zxvKPe5Hpu/P3jg9qHclfseP5S7ZsfY32eSfK7G1uAXMpb754d+bCj3VV/yXUO50XPFWx/8w6Hc\nqMftOmco96zBtXTjQ7cN5az52UbX/P5dY//MNQ+zbZd5SMzFej5+13s25SG2OvhiOvhsOvhsZ/q5\nJ5m+h+vgs+ngs1nzs22X7mce1mcuZtPBT58OPpsOPtuZ3sGT7XP+0cFn08Fn08Fn2y7r/UzvfYm5\nWI81MdtoB//DL/maybfEXPH+t2/K9cLpWNS2d3T34STp7pur6oVJbqiqy5NMPlgAAFgBOjgAAExL\nBwcAADZs7iOoktxRVVed/OTERchLk1yc5DnLHBgAAKwoHRwAAKalgwMAwBL1Wk3+2gqLNuBcm+SP\n3Xuou49197VJrlnaqAAAYHXp4AAAMC0dHAAA2LC5j6Dq7lvnHHvn5g8HAABWmw4OAADT0sEBAGC5\nulfjya6L7oADAAAAAAAAAADMYQMOAAAAAAAAAABswNwNOFV1XlX946r6par6G4869i+WOzQAAFg9\nOjgAAExLBwcAgOXqtelfW2HRHXB+IUkl+fdJXl5V/76q9p449ufWC1XVdVV1qKoOfezwzZszUgAA\nWA06OAAATEsHBwAANmzRBpyndfdru/s3uvvrk7wnyW9V1UXzQt19fXcf7O6DV+4/sFljBQCAVaCD\nAwDAtHRwAABgw3YtOL63qnZ0P3KDnu5+XVXdluR3kuxf+ugAAGD16OAAADAtHRwAAJZorWurhzCJ\nRXfA+b+TfNWpb3T3m5L83SQPL2lMAACwynRwAACYlg4OAABs2NwNON39D5LcWlVfXVX7T3n/bUle\nvezBAQDAqtHBAQBgWjo4AACwGeZuwKmqVyV5c5JXJflgVb3slMOvW+bAAABgFengAAAwLR0cAACW\nq7smf22FXQuOX5fk6u4+XFUHktxQVQe6+41JHtOIP7f20NDAjj/yuN3T9uDa2B1Bv/musdy7PvCL\nQ7lrvuQ7h3KX7jxnKPfMvZcO5T6cO4dy5+zYM5ar3UO5//bZDw/lvurxzxnKffTYPUO5w8ePDOWy\ncyw26qHRcQ56Z4/9fSbJ7l70JL3Z7jz+haHcV33Jdw3lfuv9/2ooN3qu2L/zrKHcvh1ja/C2I2Nz\n+JHDtw3lPvfg/UM5a3620TV/y313DOXMw2zbZR4SczEBHXwBHXw2HXw25571jfZwHXw2HXw2a362\n7dL9zMP6zMXm0sFPnw4+mw4+27Y59yRn/PlHB59NB59NB99cOvgc5mIma4IRizbg7Ojuw0nS3TdX\n1QvzyMXH5XmMFx4AAMBp0cEBAGBaOjgAACxRr61GrV60ZfaOqrrq5CcnLkJemuTiJOP/2zQAALAe\nHRwAAKalgwMAABu2aAPOtUluP/WN7j7W3dcmuWZpowIAgNWlgwMAwLR0cAAAYMPmPoKqu2+dc+yd\nmz8cAABYbTo4AABMSwcHAIDl6t7qEUxj0R1wAAAAAAAAAACAOWzAAQAAAAAAAACADZi7AaeqnlBV\nP1tVP1NVF1XVD1bVB6rq31bVF001SAAAWBU6OAAATEsHBwCA5eq1mvy1FRbdAedNSW5K8qkkv53k\nwSQvSfJfkvzL9UJVdV1VHaqqQ7ce/tQmDRUAAFbCm6KDAwDAlN4UHRwAANigRRtwHt/dP93dP5rk\ngu7+se7+VHf/dJLL1wt19/XdfbC7D162/8mbOmAAADjD6eAAADAtHRwAANiwXQuOn7pB5xcfdWzn\nJo8FAADQwQEAYGo6OAAALNFab80joaa26A44b66q/UnS3d938s2qujLJHyxzYAAAsKJ0cAAAmJYO\nDgAAbNjcDTjd/f1JLquqrz55AXLi/Y8l+d+XPTgAAFg1OjgAAExLBwcAgOXqrslfW2HuBpyqelWS\nNyd5VZIPVtXLTjn8+mUODAAAVpEODgAA09LBAQCAzbBrwfHrklzd3Yer6kCSG6rqQHe/Mclj2jJ0\n/9qRoYGdu2vfUO6cHXuGcntq7FG+13zJdw7lfuf9Pz+U+56D/2Ao96GjnxvKPXj84aHcF9bGclft\nOn8o92WXPGMo95qHLxjK3XjW4KOfB2P/be3uodxFO84ayj3l7EuHcucNrr+bHrp9KJcrfKGbAAAg\nAElEQVQkf37fU4Zyv//g2M/8hnOePpSb+lzxrVf/naHchTU2hwd2nTeU+/Cesd/tp+7/oqGcNT/b\n6JofzZmH2bbLPCTmYgI6+AI6+Gw6+DpW4NwzdQ/XwWfTwWez5mfbLt3PPGx+1lzMpoOfPh18Nh18\ntu1y7km2z/lHB59NB59NB59tu6z3M733JeZiPdYEIxZtwNnR3YeTpLtvrqoX5pGLj8vzGC88AACA\n06KDAwDAtHRwAABYou6tHsE05j6CKskdVXXVyU9OXIS8NMnFSZ6zzIEBAMCK0sEBAGBaOjgAALBh\nizbgXJvkj92frruPdfe1Sa5Z2qgAAGB16eAAADAtHRwAANiwuY+g6u5b5xx75+YPBwAAVpsODgAA\n09LBAQBgudZ6NZ7suugOOAAAAAAAAAAAwBynvQGnqi5dxkAAAIDZdHAAAJiWDg4AAJyuuY+gqqoL\nH/1Wkt+tquclqe6+e2kjAwCAFaSDAwDAtHRwAABYrl6RR1DN3YCT5K4ktzzqvScleU+STvLUWaGq\nui7JdUlyxfnPyOPPeeIGhwkAACtDBwcAgGnp4AAAwIYtegTV30/yB0m+vruv6O4rktx64uOZFx1J\n0t3Xd/fB7j7oogMAAE6LDg4AANPSwQEAYIm6p39thbkbcLr7J5L8rSTfX1U/WVXn5pEd/wAAwBLo\n4AAAMC0dHAAA2AyL7oCT7r61u78lyTuSvD3J2cseFAAArDIdHAAApqWDAwAAG7Vr0RdU1TPzyPNu\nfyuPXHg87cT7L+ruty13eAAAsHp0cAAAmJYODgAAy7PWtdVDmMTcO+BU1auTvDnJq5J8MMnXdvcH\nTxx+/ZLHBgAAK0cHBwCAaengAADAZqju9R9lW1UfSPKC7j5cVQeS3JDkl7r7jVX13u5+3qIf8Oee\n+MKhZ+UePv7QSCxP2HP+UO68HXuHcp8+et9Q7om7zxvKvfj4WO7f1GeHcqNuf/jzQ7nR+Tt8/MhQ\nbv/OsXk/qxbePGqm9x2+ZSh3xb7HD+U++dDYvJ+7a+wOu/t3njWU+zN7LhrKJcltxw8P5UZ/Z0bP\nTaN/N6Pnin/ze/9sKPe1V333UO6zR+8fyj3jrEuHcqPnXmt+tqnXvHmYbbvMQ2Iu1vPBO27clC38\nOvhiOvhsOvhs2+Xck2yfHq6Dz6aDz2bNz7Zdut+ZPg+JuVjPdlkTOvjp08E3lw4+2+i5J9k+5x8d\nfDYdfDYdfLYzfb1vl3lIzMV6rInZRjv4ocu+Yagvb8TBW39j8tvuLPqt2dHdh5Oku2+uqhcmuaGq\nLk+yGvcIAgCAaengsIJG/6UHAOOcezmFDg4AAEvUHkGVJLmjqq46+cmJi5CXJrk4yXOWOTAAAFhR\nOjgAAExLBwcAADZs0Qaca5Pcfuob3X2su69Ncs3SRgUAAKtLBwcAgGnp4AAAwIbNfQRVd98659g7\nN384AACw2nRwAACYlg4OAADLteYRVAAAAAAAAAAAwCJz74ADAAAAAAAAAACjeqsHMJG5d8Cpqhed\n8vH5VfXzVfXfq+pXqurxyx8eAACsFh0cAACmpYMDAACbYdEjqF5/ysc/keQzSf5Kkncn+bn1QlV1\nXVUdqqpDdz7w6Y2PEgAAVocODgAA09LBAQCADTudR1Ad7O6rTnz8T6vqFet9YXdfn+T6JPlzT3zh\nqtxNCAAANpsODgAA09LBAQBgk611bfUQJrFoA86lVfWaJJXkvKqq7j55IbHo7jkAAMDp08EBAGBa\nOjgAALBhiy4e/lWSc5PsT/J/Jrk4SarqCUnet9yhAQDAStLBAQBgWjo4AACwYXPvgNPdP1RVz0zy\npCTv6u7DJ96/vap+ZYoBAgDAKtHBAQBgWjo4AAAsV6/II6jm3gGnql6V5M1JXpXkg1X1slMOv36Z\nAwMAgFWkgwMAwLR0cAAAYDPMvQNOkuuSXN3dh6vqQJIbqupAd78xjzwPd6Ev3nPx0MDu72NDubNr\n51Duw0fvHsrt27F7KHdh7RnK/bu6ayh30Y59Q7k/PHrPUG7UgZ3nDuU+3mtDufuOPzSUu/34kaHc\nV5x75VDuxsOfGMo9dd/jh3K3PDT2e3b2zr1DuQsyto6S5Pce/vxQ7pLdY79rhwd/Z6Y+V3ztVd89\nlPt/3/dzQ7lv/NJXDeUe7uNDudG/T2t+ttE1P7qOzMNs22UeEnMxAR18AR18c+ngs0197km2Tw/X\nwWfTwWez5mfbLt3vTJ+HxFysZ7usiU2kgy+gg28uHXx92+X8o4PPpoPPpoPPtl3W+5ne+xJzsR5r\nghGLNuDsOOV2mzdX1QvzyMXH5XmMFx4AAMBp0cFhBfmXHgDTc+7lFDo4AAAs0dg24u1n7iOoktxR\nVVed/OTERchLk1yc5DnLHBgAAKwoHRwAAKalgwMAABu2aAPOtUluP/WN7j7W3dcmuWZpowIAgNWl\ngwMAwLR0cAAAWKJOTf7aCnMfQdXdt8459s7NHw4AAKw2HRwAAKalgwMAAJth0R1wAAAAAAAAAACA\nOebeAWeWqrqouz+3jMEAAAB/kg4OAADT0sEBAGDzrPVWj2Aac++AU1U/WlUXn/j4YFV9Ism7quqW\nqvrKSUYIAAArRAcHAIBp6eAAAMBmWPQIqr/c3Xed+PifJPnW7r4yydck+Yn1QlV1XVUdqqpDH77/\nE5s0VAAAWAk6OAAATEsHBwAANmzRBpxdVXXyMVX7uvvdSdLdH0myd71Qd1/f3Qe7++Azz33qJg0V\nAABWgg4OAADT0sEBAGCJ1lKTv7bCog04/yLJW6vqq5K8rareWFVfWVU/lOR9yx8eAACsHB0cAACm\npYMDAAAbtmvewe7+6ar6QJLvSfKME1//9CS/keR/W/7wAABgtejgAAAwLR0cAADYDHM34Jxwe5Lr\nk7yruw+ffLOqXpTkbcsaGAAArDAdHAAApqWDAwDAkvQWPRJqanMfQVVVr07y5iSvSvLBqnrZKYdf\nv8yBAQDAKtLBAQBgWjo4AACwGRbdAee7klzd3Yer6kCSG6rqQHe/MXlsW5Tu7oc3NsLTtnMo9UW7\nzh3KffCBTw/lDuw6byi3rx7LTYv+pKNZG8qdu2PvUO6cHXuGcu9/+M6h3OePPjCU+4pzLh/KXd5n\nDeXuq+NDuWv2XzmU+/Txw4u/aIZdNbaOXrL7sqHcfzn22aFckjz3rC8ayn306OeGco/bdc5Q7rYj\n9wzlRs8Vnz16/1DuG7/0VUO5X3/PTw/lnv1n/9pQbpQ1P9vomh/9vTYPs22XeUjMxQR08AV08Nl0\n8Nm2y7nnirMuyaeO3D2UnbqH6+Cz6eCzWfOzbZfud6bPw0bOveZiNh18Sjr4LDr4bNvl3JNsn/OP\nDj6bDj6bDj7bdlnvZ3rvS8zFeqyJzTXW0rafRS12x8nbbXb3zVX1wjxy8XF5HuOFBwAAcFp0cFhB\no/8BGIBxzr2cQgcHAAA2bO4jqJLcUVVXnfzkxEXIS5NcnOQ5yxwYAACsKB0cAACmpYMDAAAbtmgD\nzrVJbj/1je4+1t3XJrlmaaMCAIDVpYMDAMC0dHAAAFiiTk3+2gpzH0HV3bfOOfbOzR8OAACsNh0c\nAACmpYMDAACbYdEdcAAAAAAAAAAAgDnm3gEHAAAAAAAAAABGrW31ACYy9w44VfWeqvq+qnraVAMC\nAIBVpoMDAMC0dHAAAGAzLHoE1eOSXJDkt6vqd6vqf62qJy76plV1XVUdqqpDf3j4lk0ZKAAArAgd\nHAAApqWDAwAAG7ZoA8493f33uvspSf5ukqcneU9V/XZVXbdeqLuv7+6D3X3wiv2Xb+Z4AQDgTKeD\nAwDAtHRwAABYorUteG2FRRtw/kh3/5fu/p+TPCnJjyV5wdJGBQAA6OAAADAxHRwAABi1a8Hxjzz6\nje4+nuRtJ14AAMDm0sEBAGBaOjgAACxRp7Z6CJOYewec7n55VT2zqr66qvafeqyqXrTcoQEAwOrR\nwQEAYFo6OAAAsBnmbsCpqlcleXOSVyX5YFW97JTDr1/mwAAAYBXp4AAAMC0dHAAA2AyLHkF1XZKr\nu/twVR1IckNVHejuNyaP7R5BX1znDg3s5+75vaHck86+eCi3d8fuodznHz48lPvwnruHcjsGb810\n99Gxcd595L6h3LPOffJQ7nivDeVG5+HG+tRQbtfZlw/lPrP2wFDucbV3KPeCXWPr4dMP3zuU+3wd\nH8o9d9eFee7RRaej2X6jB8d6dGwuvvScK4ZyHzl821Bu9FzxjLMuHco93GNz+Ow/+9eGch/6/X87\nlLvsaS8Zylnzs42u+dF/RpiH2aaeh7c+/KFcuPe8oay5WDodfAEdfDYdfLbtcu55wa6L8+8f+NhQ\ndrSHf3uPdUYdfDYdfDZrfjYdfLbtdO41F7Pp4KdPB59NB59tu3Tw5Mw//+jgs+ngs+ngs22X9b5d\nel9iLtZjTcw2dQdfW40nUC3cgLOjuw8nSXffXFUvzCMXH5fnMV54ACwyuvkGgHGjm2+YhA4OK2j0\nPwADMM65l1Po4LCJRv/DIwDjnHvhT4e5j6BKckdVXXXykxMXIS9NcnGS5yxzYAAAsKJ0cAAAmJYO\nDgAAbNii205cm+TYqW9097Ek11bVzy1tVAAAsLp0cAAAmJYODgAAS7S2IjeWnLsBp7tvnXPsnZs/\nHAAAWG06OAAATEsHBwAANsOiR1ABAAAAAAAAAABzLHoEFQAAAAAAAAAADOmtHsBE5t4Bp6oOVtVv\nV9UvV9WTq+rtVfX5qnp3VT1vqkECAMCq0MEBAGBaOjgAALAZFj2C6l8k+fEk/yHJ/5fk57r7/CSv\nPXFspqq6rqoOVdWh99z/sU0bLAAArAAdHAAApqWDAwDAEq1twWsrLNqAs7u7f7O7fzVJd/cNeeSD\n/5TkrPVC3X19dx/s7oNfeu6VmzhcAAA44+ngAAAwLR0cAADYsEUbcB6qqq+tqm9J0lX1DUlSVV+Z\n5PjSRwcAAKtHBwcAgGnp4AAAwIbtWnD8f8ojt95cS/J1Sb6nqt6U5LYk37XcoQEAwErSwQEAYFo6\nOAAALNFa1VYPYRJz74DT3e9P8neSvCHJrd39t7v7gu5+dpLzphggAACsEh0cAACmpYMDAACbYe4G\nnKp6dZJfT/KqJB+sqpedcvj1yxwYAACsIh0cAACmpYMDAACbYdEjqL4rycHuPlxVB5LcUFUHuvuN\nSR7TPYL+3QMfHRrYhXun/R8L7jv2wFDucw/eP5R76v4vGspdvuv8odzdRw8P5Ubn4dKd5wzldtbc\nPWHretfHf3Eo99xnvXwo994jtw/l9u/cO5Q7WmtDuY+t3TOUu/vIfUO5Xznyvly5/4mnnfvdJGvp\noZ85+hTu0T/jjTtvG8pNfa749NGxP9++HbuHcqMue9pLhnK3fvytQzlrfrbR9WAeZpt6HhJzsZ6t\nmItNooMvoIPPpoPPNvW55848mPvWHjrt3AW7z8knDn9m6Gf+7u47h3I36uAz6eCz6Ruz6eCzrcK5\n11zMpoOfPh18Nh18tqk7+H994JacM3geOdPPPzr4bDr4bDr4bNtlvU89D86967MmZtsuHXzwn4Db\nzqINODu6+3CSdPfNVfXCPHLxcXke44UHsDpGNt8kG9h8A8DkFx1MQgeHbWzkPwAnGf4PwAA497Ip\ndHCYYer/AAyAcy9sd4u2Wd9RVVed/OTERchLk1yc5DnLHBgAAKwoHRwAAKalgwMAwBKtbcFrKyza\ngHNtkj92b6XuPtbd1ya5ZmmjAgCA1aWDAwDAtHRwAABgw+Y+gqq7b51z7J2bPxwAAFhtOjgAAExL\nBwcAADbD3A04AAAAAAAAAAAwaq22egTTWPQIKgAAAAAAAAAAYI65G3Cqan9V/XBVfaiqPl9Vn62q\nG6vqb040PgAAWCk6OAAATEsHBwAANsOiO+D86ySfSPJ1SX4oyU8l+R+S/MWqev16oaq6rqoOVdWh\nex+8c9MGCwAAK0AHBwCAaengAACwRGupyV9bYdEGnAPd/abuvrW7fzLJ13f3R5N8R5JvWi/U3dd3\n98HuPnjBvks3c7wAAHCm08EBAGBaOjgAALBhizbgfKGq/nySVNXXJ7k7Sbp7LdmiLUMAAHBm08EB\nAGBaOjgAALBhuxYc/54k/6qqnp7kQ0m+M0mq6pIkP7PksQEAwCrSwQEAYFo6OAAALFFv9QAmMncD\nTne/v6pekeRJSW7s7sMn3v9sVX1kigECAMAq0cEBAGBaOjgAALAZ5j6CqqpeneTXk7wyyQer6mWn\nHH79MgcGAACrSAcHAIBp6eAAALBcazX9ayssegTVdyU52N2Hq+pAkhuq6kB3vzGefQsAAMuggwMA\nwLR0cAAAYMOqe/2nbVXVh7r72ad8vj/JDUluSvJV3X3Voh9w+UXPHXqc11P3PX4kllseumsot2/n\nnqHck/Y8bij3mocvGMr9yK47h3J7audQ7sCu84Zyv3nvTUO5Lz73KUO5247cM5T77zf92lDu2qtf\nM5QbdffakaHcRTvOGsodeuBTQ7m/fM6VQ7kbj94xlEuSS3eeM5S7cMfeodzHjt47lNu3Y/dQbvRc\n8YY9Y2vivuMPDeWevefiodyNg79ro6z52UbX/M6aeyO9dZmH2bbLPCTmYj0f+eyhTfkX8zr4Yjr4\nbDr45tou555k+h6ug8+mg89mzc+2XbqfeVifuZhNBz99OvhsOvhsZ3oHT7bP+UcHn00Hn00Hn227\nrPczvfcl5mI91sRsox38F5/07UN9eSOuve2XJ99Mv+i39I6q+qOLixPPvn1pkouTPGeZAwMAgBWl\ngwMAwLR0cAAAWKK1LXhthUUbcK5Ncvupb3T3se6+Nsk1SxsVAACsLh0cAACmpYMDAAAbtmvewe6+\ndc6xd27+cAAAYLXp4AAAMC0dHAAA2AxzN+AAAAAAAAAAAMCo3uoBTGTRI6gAAAAAAAAAAIA55t4B\np6p2JfnOJN+Y5Ikn3r4tyZuT/Hx3H13u8AAAYLXo4AAAMC0dHAAA2AyLHkH1S0nuTfKDSU4+B/ey\nJK9I8stJvnVWqKquS3Jdklx49pOy/6wLN2OsAACwCnRwAACYlg4OAABLtFZbPYJpLNqAc3V3P+NR\n792a5Maq+sh6oe6+Psn1SXL5Rc9dlcd5AQDAZtDBAQBgWjo4AACwYTsWHL+7qr6lqv7o66pqR1V9\na5J7ljs0AABYSTo4AABMSwcHAIAlWtuC11ZYtAHn5Um+OcntVfWRE7v9b0/yTSeOAQAAm0sHBwCA\naengAADAhs19BFV331xVP5nkJ5J8PMkzk7wgyU3d/YcTjA8AAFaKDg4AANPSwQEAgM0wdwNOVf1A\nkhef+Lq3J3l+knckeW1VPa+7X7f0EQIAwArRwQEAYFo6OAAALNdWPRJqanM34OSR225elWRvHrnl\n5mXdfV9VvSHJu5K48AAAgM2lgwMAwLR0cAAAYMMWbcA51t3HkzxQVR/v7vuSpLsfrKrHtEnpkj3n\nDw3s/uMPDeWeffaThnLn1u6h3KeP3z+Uu/GsnUO5i/vsodwfPHTHUO4zD987lHvpBc8eyn3s2NjP\nO2fn3qHctVe/Zij3i7/3k0O5Y2/7+aHct/2jDwzlOj2U21E1lPvo2th62Ftj6yFJvtBHh3LH18b+\nbp65+8Kh3EeP3TOUGz1XnNWLTu+z3X78yFDu8j5rKLfr7MuHcu89cvtQzpqfbXTNO/fOdqbPQ2Iu\nJqCDL6CDz6aDz7YK556pe7gOPpsOPps1P9t26X7mYX3mYjYd/PTp4LPp4LOd6eeeZPucf3Tw2XTw\n2XTw2bbLenfuXZ+5mO1MXxPMt+ifTA9X1dnd/UCSq0++WVXnZ3XuEgQAAFPSwQEAYFo6OAAALFGv\nyH6fRRtwrunuI0nS3adeaOxO8oqljQoAAFaXDg4AANPSwQEAgA3bMe/gyYuOGe/f1d1j90ACAADW\npYMDAMC0dHAAAFg9VfWiqvqDqvpYVb12ztf91arqqjq46HuOPRwRAAAAAAAAAAAW+NP2XNeq2pnk\nZ5J8TZJbk7y7qt7S3Tc96uvOTfK3k7zrsXzfuXfAAQAAAAAAAACAM8jzk3ysuz/R3Q8n+bUkL5vx\ndT+S5MeSPPRYvunwBpyqun40CwAAnD4dHAAApqWDAwDAxq1twWuBJyX51Cmf33rivT9SVV+a5Mnd\n/R8e659z7iOoqurC9Q4lecmc3HVJrkuSp5x3ZS45+wmPdTwAALDSdHAAAJiWDg4AAGeeU/v6Cdd3\n92PaYF9VO5L8ZJK/eTo/c+4GnCSfTXJLHrnQOKlPfH7peqETg74+SQ5+0V/o0xkQAACsOB0cAACm\npYMDAMAZ5tS+PsNtSZ58yueXnXjvpHOTfHGSd1RVkjwhyVuq6uu7+9B6P3PRBpxPJPnq7v7kow9U\n1admfD0AALAxOjgAAExLBwcAgCX6U7hb/d1Jnl5VV+SRjTcvT/I3Th7s7s8nufjk51X1jiR/b97m\nmyTZseCH/rMkj1vn2I8vHjMAAHCadHAAAJiWDg4AACuku48leWWS/5jk95P82+7+UFX9cFV9/ej3\nnXsHnO7+map6flV9WXe/u6qeleRFST7c3T89+kMBAIDZdHAAAJiWDg4AAKunu9+a5K2Peu/71/na\nFz6W7zl3A05V/UCSFyfZVVVvT/LlSX47yWur6nnd/brH8kMAAIDHRgcHAIBp6eAAALBca7XVI5jG\n3A04Sb45yVVJ9ia5Pcll3X1fVb0hybuSuPAAAIDNpYMDAMC0dHAAAGDDFm3AOdbdx5M8UFUf7+77\nkqS7H6yqtcfyAw4ff2hoYPt3njWUO7sW/ZFmeyjHh3KHjx8ZymXnWOxTR+8dyo3+fd5+5J6h3P19\nbCi3f8feodzUjr3t54dyu170nUO5L/zD/2Uo93DtGMqNuv3ofUO5J+w+b/hnPjT4u3Z4bWztfnQw\nN/W54n2HbxnKfcW5Vw7l7quxc+hn1h4Yyu3fOe25wpqfzTzMdqbPQ2IuJqCDL6CDz6aDz7YK556p\ne7gOPpsOvrms+dnMw2xbce41F7Pp4KdPB59NB//TYepzT7J9zj86+Do/TwefSQefbbus9zN9HhJz\nsR5rghGLWvrDVXV2dz+Q5OqTb1bV+Uke04UHAABwWnRwAACYlg4OAABLtCqletEGnGu6+0iSdPep\nfye7k7xiaaMCAIDVpYMDAMC0dHAAAGDD5m7AOXnRMeP9u5LctZQRAQDACtPBAQBgWjo4AAAs16rc\nAccDwQAAAAAAAAAAYANswAEAAAAAAAAAgA2YuwGnqnZW1XdX1Y9U1Vc86tj3LXdoAACwenRwAACY\nlg4OAADL1Vvw2gqL7oDzc0m+MsnnkvxUVf3kKce+ab1QVV1XVYeq6tC9D352E4YJAAArQwcHAIBp\n6eAAAMCGLdqA8/zu/hvd/c+SfHmS/VX1f1XV3iS1Xqi7r+/ug9198IJ9l2zmeAEA4EyngwMAwLR0\ncAAAYMMWbcDZc/KD7j7W3dcleX+S30qyf5kDAwCAFaWDAwDAtHRwAABYorWa/rUVFm3AOVRVLzr1\nje7+oSS/kOTAsgYFAAArTAcHAIBp6eAAAMCGzd2A093fnuTuqvqyJKmqZ1XVa5J8urt3TzFAAABY\nJTo4AABMSwcHAAA2w655B6vqB5K8OMmuqnp7Hnn+7W8neW1VPa+7XzfBGAEAYGXo4AAAMC0dHAAA\nlmttqwcwkbkbcJJ8c5KrkuxNcnuSy7r7vqp6Q5J3JXHhAQAAm0sHBwCAaengAADAhlV3r3+w6r3d\n/bxHf3zi8/d191WLfsDXPfnF6/+AOR5Ye3gkli+sHRnKXbX3CUO5oz22V+vOtQeHcp89dv9Q7pJd\n5w7lPvzAbUO5p5x1yVDukl3nDOW+sHZ0KDfqnB1jd54dHef/896fGcq98uD3DuV+54FbhnLfcvbT\nh3KHc3wolyTvPvrZodyBXecP/8wpjZ4rRs+hn3xo7O/zmv1XDuVGjf69jLLmZ3vK3ouGcqPMw2xT\nz0NiLtbz+3f+bg0FH0UHX0wHn00Hn+1MP/ck0/dwHXw2HXxzWfOz6eCzbcW511zMpoOfPh18Nh18\ntjO9gyfb5/yjg28uHXw2HXw2HXy2VTj3nulzsV3mYbSD/+PLv32oL2/EP7zllzfleuF07Fhw/OGq\nOvvEx1effLOqzs/q3CUIAACmpIMDAMC0dHAAAGDDFj2C6pruPpIk3X9si/vuJK9Y2qgAAGB16eAA\nADAtHRwAANiwuRtwTl50zHj/riR3LWVEAACwwnRwAACYlg4OAADLtZbJn0C1JRY9ggoAAAAAAAAA\nAJjDBhwAAAAAAAAAANiAuY+gqqqzk7wySSf56SQvT/JNST6c5Ie7+/DSRwgAACtEBwcAgGnp4AAA\nsFxrWz2AiSy6A86bkjw+yRVJ/kOSg0n+SZJK8rPrharquqo6VFWHbj38qU0aKgAArIQ3RQcHAIAp\nvSk6OAAAsEFz74CT5Bnd/deqqpJ8Jslf6u6uqv+a5P3rhbr7+iTXJ8nXPfnFvWmjBQCAM58ODgAA\n09LBAQCADVu0ASdJcuJi463d3ad87oICAACWRAcHAIBp6eAAALAcq1KqFz2C6lBV7U+S7v4fT75Z\nVU9Lcv8yBwYAACtKBwcAgGnp4AAAwIbNvQNOd/+tqnp+VXV3v7uqnpXkRUn+IMlfmGSEAACwQnRw\nAACYlg4OAADLtbbVA5jI3A04VfUDSV6cZFdVvT3Jlyf57STfm+SqJK9b+ggBAGCF6OAAADAtHRwA\nANgMczfgJPnmPHKBsTfJ7Uku6+77quoNSd4VFx4AALDZdHAAAJiWDg4AAGxYdff6B6ve293Pe/TH\nJz5/X3dftegHvOQpL1n/ByzBzuwYyl2y46yh3O1rDwzlLqi9Q7lbjn1+KHfvsS8M5c7bdfZQ7qKd\nY7m1jP263HP8waHcFbsuGMr14Djv76NDuSftGPv7/OeHfmwod8Uzvn4od+Gecy2/xbwAACAASURB\nVIdydz88/ijtJ++7ZCh337GxtXv53ouGcqNGzxXvPPzxodxT9z1+KLcjNZR7wa6Lh3L/6ehnhnLW\n/Gyja350/ZmH2bbLPCTmYj233v3BsZPho+jgi+ngs+ngs53p555k+h6ug8+mg89mzc+2XbqfeVif\nuZhNB5+ODj6bDj7bdjn3JNvn/KODby4dfDYdfDYdfLZVOPee6XOxXeZhtIN//4Fvm7QvJ8kP3/yv\nN+V64XQsaukPV9XJmb765JtVdX5W5zFdAAAwJR0cAACmpYMDAAAbtugRVNd095Ek6e5TLzR2J3nF\n0kYFAACrSwcHAIBp6eAAAMCGzd2Ac/KiY8b7dyW5aykjAgCAFaaDAwDAtHRwAABYrtFHcG43Yw+K\nBQAAAAAAAAAAktiAAwAAAAAAAAAAG3LaG3Cq6iPLGAgAADCbDg4AANPSwQEAYPP0Fry2wtwNOFV1\nf1Xdd+J1f1Xdn+RpJ9+fk7uuqg5V1aFPHv7kpg8aAADOVDo4AABMSwcHAAA2w6I74PxCkt9I8vTu\nPre7z03yyRMfn7deqLuv7+6D3X3wKfufspnjBQCAM50ODgAA09LBAQBgida24LUV5m7A6e5XJ3lj\nkl+tqldX1Y5s3d16AADgjKeDAwDAtHRwAABgMyy6A066+/eS/KUTn/7nJGctdUQAALDidHAAAJiW\nDg4AAGzUrkVfUFXPT9Ld/VNV9d4kf7GqXtLdb13+8AAAYPXo4AAAMC0dHAAAlmdtRW4wOXcDTlX9\nQJIXJ9lVVW9P8vwk70jy2qp6Xne/bvlDBACA1aGDAwDAtHRwAABgMyy6A843J7kqyd4ktye5rLvv\nq6o3JHlXEhceAACwuXRwAACYlg4OAABsWHWvf6ufqnpvdz/v0R+f+Px93X3Voh+w96wnD91L6HFn\n7R+JpVJDuS/ad+FQ7o6H7hnKPeXsS4dy9xw9PJTbVTuHcg+vHRvKXXPOgaHczcfvH8p94L5bhnLn\n7xn7PdtRY79no0Z/r+8/9sBQ7g8/8pah3DP+zDcO5Y728aFckly459yh3LV7njaU+9kHbhrKPXT8\nyFBu9Fxx/7EHh3IPDI5z9Byzs3YM5e4+ct9QzpqfbXTNX/a0lwzlzMNs22UeEnOxnt0XP3VT/mJ0\n8MV08Nl08M21Xc49yfQ9XAefTQefzZqfbbt0P/OwPnMxmw5++nTw2XTw2c70Dp5sn/OPDj6bDj6b\nDj7bdlnvZ3rvS8zFeqyJ2UY7+D848NcnfwbVj9/8q5MvqEVn/Ier6uwTH1998s2qOj/J2tJGBQAA\nq0sHBwCAaengAADAhi16BNU13X0kSbr71AuN3UlesbRRAQDA6tLBAQBgWjo4AACwYXM34Jy86Jjx\n/l1J7lrKiAAAYIXp4AAAMC0dHAAAlmtVbis59tBBAAAAAAAAAAAgyeJHUAEAAAAAAAAAwJC19FYP\nYRLugAMAAAAAAAAAABswdwNOVT33lI93V9X3VdVbqur1VXX2nNx1VXWoqg4dP354M8cLAABnNB0c\nAACmpYMDAACbYdEdcN50ysc/muTKJD+RZF+Sf7leqLuv7+6D3X1w5879Gx4kAACskDed8rEODgAA\ny/emUz7WwQEAYJP1Fry2wq4Fx+uUj786yZd199Gq+p0k71/esAAAYGXp4AAAMC0dHAAA2LBFG3DO\nr6pvyiMXIHu7+2iSdHdX1VZtGgIAgDOZDg4AANPSwQEAgA1btAHnPyf5Kyc+vrGqHt/dd1TVE5Lc\ntdyhAQDAStLBAQBgWjo4AAAs0dpWD2AiczfgdPd3VNWXJ1nr7ndX1bOq6tuSfLi7v3qaIQIAwOrQ\nwQEAYFo6OAAAsBnmbsCpqh9I8uIku6rq7Umen+QdSV5bVc/r7tctf4gAALA6dHAAAJiWDg4AAGyG\n6l7/EbZV9YEkVyXZm+T2JJd1931VtS/Ju7r7uYt+wJ+99PmTPiN33849Q7kja0eHcn9wz61DuS+7\n5BlDuSt2XTCUe++Dtw3lRj133xOHcp9be3Ao95vv/dmh3HOf9fKh3Dk79w7l9g/m9tbuodx9aw8N\n5T5x+DNDuSv3j837WqZ/lPZH7x9bE086++Kh3IfuvmUoN3qumHdun2ffjrHftduO3DOU+/zDh4dy\nt378rUM5a3620TVvHmY70+chMRfruePzH66h4KPo4Ivp4LPp4LOd6eeeZPv0cB18Nh18Nmt+NvMw\n21ace83FbDr4dHTw2XTw2bbLuSfZPucfHXw2HXw2HXy27bLez/R5SMzFeqyJ2UY7+KsPfOvk/0H4\np27+N5tyvXA6diw4fqy7j3f3A0k+3t33JUl3P5jVeUwXAABMSQcHAIBp6eAAAMCGzX0EVZKHq+rs\nExceV598s6rOjwsPAABYBh0cAACmpYMDAMASrUqpXrQB55ruPpIk3X3q38nuJK9Y2qgAAGB16eAA\nADAtHRwAANiwuRtwTl50zHj/riR3LWVEAACwwnRwAACYlg4OAABshkV3wAEAAAAAAAAAgCFr6a0e\nwiR2bPUAAAAAAAAAAABgO5u7AaeqXllVF5/4+Mqq+p2qureq3lVVz5mTu66qDlXVoXsfvHOzxwwA\nAGcsHRwAAKalgwMAAJth0R1wvufEc26T5I1J/ml3X5Dke5P8y/VC3X19dx/s7oMX7Lt0k4YKAAAr\nQQcHAIBp6eAAALBEvQWvrbBoA86uUz6+tLt/PUm6+x1Jzl3WoAAAYIXp4AAAMC0dHAAA2LBFG3Bu\nqKo3VdVTk/x6Vf2dqrq8qr4jyScnGB8AAKwaHRwAAKalgwMAABu2a97B7v5HVfU3k/xqkqcl2Zvk\nuiS/keTblj46AABYMTo4AABMSwcHAIDlWtuyh0JNa+4GnBNuSvLK7n53VT07yYuS/H53f365QwMA\ngJWlgwMAwLR0cAAAYEPmbsCpqh9I8uIku6rq7Umen+QdSV5bVc/r7tctf4gAALA6dHAAAJiWDg4A\nAMu1ttUDmEh1r3+rn6r6QJKr8sgtN29Pcll331dV+5K8q7ufu+gHXH7Rc4fuJfTUfY8fieWWh+4a\nyu3buWco96Q9jxvKvebhC4ZyP7LrzqHcnto5lDuw67yh3G/ee9NQ7ovPfcpQ7rYj9wzl/vtNvzaU\nu/bq1wzlRt29dmQod9GOs4Zyhx741FDuL59z5VDuxqN3DOWS5NKd5wzlLtyxdyj3saP3DuX27dg9\nlBs9V7xhz9iauO/4Q0O5Z++5eCh34+Dv2ihrfrbRNb+zdgzlzMNs22UeEnOxno989lANBR9FB19M\nB59NB99c2+Xck0zfw3Xw2XTw2f7/9u493LK7rg//+zMzySSTCQmXcJEkhKu3lqIZsf6qEcHHQrB4\nKam0WryntcXrryo0PkhrbfGC2KqFRi4RbbHGWp9AkGJRAvZXLhPD/Y5NIEgCSJLJkGSSmfP9/bH3\n2OFk7bX3WefsdXJmv1559jP77LPfs75nr+9a573mWVnLNt9tp3Q/62E266KbDr5xOng3Hbzbyd7B\nk52z/9HBu+ng3XTwbjtlez/Ze19iXcxim+g2tIP/4AWXjH4Pqt+8/sotOV7YiHmz9Ghr7Vhr7Y4k\nH2utHUqS1tqdWZ2TlAAAYEw6OAAAjEsHBwAANq33FlRJ7q6qfdMDjwuPv1hVZ8WBBwAALIMODgAA\n49LBAQBgiVpGvwDOtph3As5FrbUjSdJaO/FA45Qk3720UQEAwOrSwQEAYFw6OAAAsGm9J+AcP+jo\neP2zSYbdZBYAAJhJBwcAgHHp4AAAwFaYdwUcAAAAAAAAAAAYZFXu67pruwcAAAAAAAAAAAA7We8J\nOFX1B1X1XVW1fyN/aVVdWlUHq+rg4bs+t7kRAgDACtHBAQBgXDo4AACwFeZdAeerk3xrko9X1e9V\n1bdV1anz/tLW2uWttQOttQP7T3vAlgwUAABWhA4OAADj0sEBAGCJ2jb8tx3mnYDz6dbaM5NckOQ1\nSX4wySer6pVV9U3LHhwAAKwgHRwAAMalgwMAAJs27wScliSttUOttd9urV2c5EuSvC3Jc5c9OAAA\nWEE6OAAAjEsHBwCAJVrbhsd2mHcCzuH1L7TW/qq19tLW2pOXNCYAAFhlOjgAAIxLBwcAADZtT983\nW2sXVdUTJ0/bO6rqy5I8NckHW2uvG2WEAACwQnRwAAAYlw4OAABshd4TcKrqZ5M8LcmeqvrjJF+d\n5E+TPLeqvqK19vMjjBEAAFaGDg4AAOPSwQEAYLnWWtvuIYyiWs8PWlXvSfKEJHuT3JTk3Nbaoao6\nPcnbWmuPn7eAAw/7ulE/yYedctag3Jl1yqDcXx67fVDuSbsfMih3XbttUO5Dd908KDfU1+57xKDc\nR4/eOih3+NiRQbnHnvrAQblXXfsrg3JHX//yQbnvvOw9g3J7595lrtt1d35yUO4xpz14UO7w2rD1\nlyR7aveg3GnVe/7hTA/btW9Q7iNHbxmUG7qvONiGbUvXH/mrQblnnvboQblP1t2DctcduWlQzjbf\nbeg2v3/3aYNy1kO3nbIeEutilg9++h01KLiODj6fDt5NB+92su97kvF7uA7eTQfvZpvvtlO6n/Uw\nm3XRTQcfjw7eTQfvtlP2PcnO2f/o4N108G46eLedsr2f7L0vsS5msU10G9rB//Ejvn30M3B++4Y/\n2JLjhY2YtzaOttaOtdbuSPKx1tqhJGmt3ZlkbemjAwCA1aODAwDAuHRwAABg0+adgHN3VR0/zfXC\n4y9W1Vlx4AEAAMuggwMAwLh0cAAAWKK2DY/tMO/acxe11o4kSWvtxAONU5J899JGBQAAq0sHBwCA\ncengAADApvWegHP8oKPj9c8m+exSRgQAACtMBwcAgHHp4AAAwFaYdwUcAAAAAAAAAAAYZG3bbgo1\nrl3bPQAAAAAAAAAAANjJek/AqapHVdUrqurfVNX+qvrNqnpvVV1ZVRf05C6tqoNVdfAzd9y01WMG\nAICTlg4OAADj0sEBAGC52jb8tx3mXQHniiTvSHI4yVuTfDDJ05K8PskrZoVaa5e31g601g6cs++h\nWzRUAABYCVdEBwcAgDFdER0cAADYpHkn4JzZWntJa+2FSe7XWntRa+0TrbWXJ7n/COMDAIBVo4MD\nAMC4dHAAAGDT9sz5/lpVPS7J2Un2VdWB1trBqnpMkt3LHx4AAKwcHRwAAMalgwMAwBKtbfcARjLv\nBJyfSvKaTD6Pb03yvKp6fJKzkvzgkscGAACrSAcHAIBx6eAAAMCm9Z6A01p7Y1U9O8laa+0dVXVL\nJve+fX9r7XWjjBAAAFaIDg4AAOPSwQEAgK3QewJOVf1sJgcae6rqj5M8Mcmbkjy3qr6itfbzyx8i\nAACsDh0cAADGpYMDAMByraVt9xBGMe8WVM9M8oQke5PclOTc1tqhqvrlJG9LMvfA4/CxuwYNbP/u\n0wbl9tW8H6nbXTk2KHf42JFBuaF3Dv7EPbcOyg39PG86csug3O3t6KDc/l17B+XGdvT1Lx+U2/PU\n7x+U+/zz/vmg3N21a1BuqJvuOTQo99BT7jd4mXcNnGuH14Ztux8ZmBt7X/HOwzcMyv2dMx8zKHeo\nhu1DP7V2x6Dc/t3j7its892sh24n+3pIrIsR6OBz6ODddPBuq7DvGbuH6+DddPCtZZvvZj102459\nr3XRTQffOB28mw5+3zD2vifZOfsfHXzG8nTwTjp4t52yvZ/s6yGxLmaxTTDEvLVxtLV2rLV2R5KP\ntdYOJUlr7c5M7ocLAABsLR0cAADGpYMDAACbNu80+burat/0wOPC4y9W1Vlx4AEAAMuggwMAwLh0\ncAAAWKLmFlRJkotaa0eSpLV24oHGKUm+e2mjAgCA1aWDAwDAuHRwAABg03pPwDl+0NHx+meTfHYp\nIwIAgBWmgwMAwLh0cAAAWK5Vuazkru0eAAAAAAAAAAAA7GROwAEAAAAAAAAAgE3oPQGnqnZV1fdV\n1dVV9a6q+vOq+t2qetKc3KVVdbCqDt5652e2dMAAAHAy08EBAGBcOjgAACxXa230x3aYdwWclyc5\nP8m/S/KnSV47fe1nquqHZ4Vaa5e31g601g6cffo5WzZYAABYATo4AACMSwcHAAA2bc+c71/YWvve\n6fM/q6q3ttaeX1VvTvLOJL+23OEBAMDK0cEBAGBcOjgAALBp866Ac09VPTpJquork9ydJK21I0m2\n55o9AABwctPBAQBgXDo4AAAs0Vra6I/tMO8KOD+Z5E+r6sj0vc9Kkqo6J5PLcAIAAFtLBwcAgHHp\n4AAAwKb1noDTWvuTqvqOJEdba++oqi+rqp9I8sHW2k+NM0QAAFgdOjgAAIxLBwcAALZC7wk4VfWz\nSZ6WZE9V/XGSJyZ5U5LnVtVXtNZ+fvlDBACA1aGDAwDAuHRwAABYrrXtHsBIqrXZ976qqvckeUKS\nvUluSnJua+1QVZ2e5G2ttcfPW8DfPe9pg26udcfa3UNi+fzakUG5J+x96KDcPW3YVPn02p2Dcp85\nevug3Dl7zhyU++AdnxyUO/+0cwblztlzxqDc59fuGZQb6oxdpwzKDR3na6/7jUG55xz46UG5N99x\nw6DcJfseOyh3OMcG5ZLkHfd8ZlDugj1nDV7mmIbuK4buQz9+17DP86L9jxmUG2ro5zKUbb7b+Xsf\nOCg3lPXQbez1kFgXs3zg02+vQcF1dPD5dPBuOni3k33fk4zfw3Xwbjr41rLNd9PBu23Hvte66KaD\nb5wO3k0H73ayd/Bk5+x/dPCtpYN308G76eDdVmHfe7Kvi52yHoZ28L93/jcP6sub8ZqPv3ZLjhc2\novcKOJlccvNYkjuq6mOttUNJ0lq7s6pW5SQlAAAYkw4OAADj0sEBAGCJWkY//2Zb7Jrz/burat/0\n+YXHX6yqs7I6VwkCAIAx6eAAADAuHRwAANi0eVfAuai1diRJWvuCa0yekuS7lzYqAABYXTo4AACM\nSwcHAAA2rfcEnOMHHR2vfzbJZ5cyIgAAWGE6OAAAjEsHBwCA5VpzCyoAAAAAAAAAAGAeJ+AAAAAA\nAAAAAMAm9J6AU1V7quqfVNXrq+rd08cfVdU/rapTenKXVtXBqjp44+FPbP2oAQDgJKWDAwDAuHRw\nAABYrtba6I/tsGfO9387ya1JXpDkxulr5yb57iS/k+Q7ukKttcuTXJ4kf/e8p63GzbwAAGBr6OAA\nADAuHRwAANi0eSfgXNhae9y6125M8taq+vCSxgQAAKtMBwcAgHHp4AAAsERr2z2AkfTegirJ56rq\nkqr66/dV1a6q+o4ktyx3aAAAsJJ0cAAAGJcODgAAbNq8E3CeleSZSW6uqg9X1UeS3JTk26ffAwAA\ntpYODgAA49LBAQCATeu9BVVr7fpM729bVQ+cvvzvW2vfteRxAQDAStLBAQBgXDo4AAAsV0vb7iGM\novcEnKq6quPlJx9/vbX2jKWMCgAAVpQODgAA49LBAQCArVCtzT7TqKr+PMn7k7wsSUtSSV6d6WU3\nW2vXzFvAxedfPOqpTLvn3lWr2zm7ThuUu2ntjkG5s2vvoNwNR28blLv16OcH5e63Z9+g3AN3D8ut\nDTzz7ZZjdw7KPXLP2YNyQ8/Qu73dMyj38F3DPs9fP/gLg3KPfNywf1N4wKlnDsp97u7bB+WS5LzT\nzxmUO3R02Lb7iL0PnP+mLTR0X/G/Dn9sUO5Rpz9kUG5XalDua/Y8aFDujfd8alDONt9t6DY/dPuz\nHrrtlPWQWBez3Pi59w7bGa6jg8+ng3fTwbud7PueZPweroN308G72ea77ZTuZz3MZl1008HHo4N3\n08G77ZR9T7Jz9j86+NbSwbvp4N108G6rsO892dfFTlkPQzv4N5331NEvgfOGT7x+S44XNmJeSz+Q\n5NoklyW5rbX2piR3ttauWeSgAwAA2DAdHAAAxqWDAwDAEq2ljf7YDr23oGqtrSV5cVVdOf3z5nkZ\nAABgOB0cAADGpYMDAABbYaGDiNbajUkuqaqnJzm03CEBAAA6OAAAjEsHBwAANmNDZ/G31q5OcvWS\nxgIAAKyjgwMAwLh0cAAA2Fqtbc8toca2a7sHAAAAAAAAAAAAO5n72AIAAAAAAAAAsBRrcQWcXlV1\nec/3Lq2qg1V18OOHPz50EQAAwAl0cAAAGJcODgAALKr3CjhV9YBZ30py8axca+3yJJcnycXnX7wa\npzIBAMAW0MEBAGBcOjgAALAV5t2C6jNJbsjkQOO4Nv36wcsaFAAArDAdHAAAxqWDAwDAErUVuQXV\nvBNw/iLJU1pr97p+ZlV9YjlDAgCAlaaDAwDAuHRwAABg03bN+f6vJrn/jO/94haPBQAA0MEBAGBs\nOjgAAKyYqnpqVX2oqj5aVc/t+P5PVNX7q+rdVfXGqnrEvL+z9wSc1tpvtNbetW4hr5p+79c2+gMA\nAAD9dHAAABiXDg4AAMu11trojz5VtTvJbyR5WpIvS/IPq+rL1r3tuiQHWmuPT/L7WeDk/N5bUFXV\nVetfSvINVXV2krTWnjFvAQAAwOJ0cAAAGJcODgAAK+eJST7aWvuLJKmq303yLUnef/wNrbU/PeH9\nb03yXfP+0mo9Z/5U1XVJ3pfkZUlaJgcer07yrOkCr5m3gL2nndd/atEM9z9t/5BYKjUo97DTHzAo\nd/NdtwzKnb/vwYNyt9xzeFBuT+0elLt77eig3EVnXDAod/2x2wfl3nPohkG5s04dNs921bB5NtTQ\neX370TsG5f7Ph9f/m8NiHvfF3zYod087NiiXJA849cxBuWef+uhBuZfc8f75b+pw17Ejg3JD9xW3\nH71zUO6OgeMcuo/ZXfPuhNjtc0cODcrZ5rsN3ebPffTFg3LWQ7edsh4S62KWUx70qC35YHTw+XTw\nbjr41top+55k/B6ug3fTwbvZ5rvtlO5nPcxmXXTTwTdOB++mg3c72Tt4snP2Pzp4Nx28mw7ebads\n7yd770usi1lsE92GdvCLHv6UQX15M97yl3/yT5JcesJLl7fWLk+Sqnpmkqe21n5g+vU/TvLVrbXn\ndP1dVfXrSW5qrf2bvmX2XgEnyYVJfjTJZUl+srX2zqq6c5EDDgAAYBAdHAAAxqWDAwDAEo1+9k2S\n6ck2l2/276mq70pyIMnXz3tv7wk4rbW1JC+uqiunf948LwMAAAyngwMAwLh0cAAAWDmfTHLeCV+f\nO33tC1TVN2Zyov7Xt9bmXkJtoYOI1tqNSS6pqqcnGXa9MwAAYGE6OAAAjEsHBwCA5Vjblmvg9HpH\nksdW1SMzOfHmWUn+0YlvqKqvSPKfMrlV1acX+Us3dBZ/a+3qJFdvJAMAAAyngwMAwLh0cAAAOLm1\n1o5W1XOS/I8ku5O8orX2vqr610kOttauSvJLSfYnubKqkuTjrbVn9P29LqMJAAAAAAAAAMDKaK29\nLsnr1r32/BOef+NG/04n4AAAAAAAAAAAsBT3wVtQLcWuvm9W1e6q+idV9XNV9XfWfe9nljs0AABY\nPTo4AACMSwcHAAC2Qu8JOEn+U5KvT/JXSf5DVf3KCd/79lmhqrq0qg5W1cFjxw5vwTABAGBl6OAA\nADAuHRwAANi0eSfgPLG19o9aa7+a5KuT7K+qP6iqvUlqVqi1dnlr7UBr7cDu3fu3crwAAHCy08EB\nAGBcOjgAACxRa230x3aYdwLOqceftNaOttYuTfKuJH+SxBEFAABsPR0cAADGpYMDAACbNu8EnINV\n9dQTX2it/askr0xywbIGBQAAK0wHBwCAcengAADApu3p+2Zr7bvWv1ZVr2qtPTvJy5Y2KgAAWFE6\nOAAAjEsHBwCA5VrL9twSamy9J+BU1VXrX0ryDVV1dpK01p6xrIEBAMAq0sEBAGBcOjgAALAVek/A\nSXJekvdlcpZ/y+TA40CSFy26gEfd72GDBzfE6btPnf+mDkfW7hmUu/nztw7Knb/vwYNyX3n6uYNy\n1935yUG53TXvLmXdbm9HB+WGuvFjrxuUe/yXPWtQ7ozdewfl9g/M7a1TBuUOnXLGoNy5j754UO4x\n+79oUG47znh84a1vH5R7+L4HDcrdcOjmQbmh+4r9u08blDvnlDMH5T555JZBuc8dOTQoZ5vvNvY2\nbz10O9nXQ2JdzHLzbR8clOugg8+hg3fTwbud7PueZOf0cB28mw7ezTbfzXroth37Xuuimw4+Hh28\nmw7ebafse5Kds//Rwbvp4N108G47ZXs/2ddDYl3MYpvoNrSDtxW5As68VnlhkmuTXJbkttbam5Lc\n2Vq7prV2zbIHBwAAK0gHBwCAcengAADApvVeAae1tpbkxVV15fTPm+dlAACA4XRwAAAYlw4OAABs\nhYUOIlprNya5pKqenmTYddIAAICF6eAAADAuHRwAAJajtdW4BdWGzuJvrV2d5OoljQUAAFhHBwcA\ngHHp4AAAwBC7tnsAAAAAAAAAAACwk7mPLQAAAAAAAAAAS7GW1bgFVe8VcKpqX1X9VFX9ZFWdVlXf\nU1VXVdUvVtX+sQYJAACrQgcHAIBx6eAAAMBWmHcLqiuSPCTJIzO55+2BJL+UpJK8ZFaoqi6tqoNV\ndfDWOz+9RUMFAICVcEV0cAAAGNMV0cEBAIBNmncLqse11v5BVVWSTyX5xtZaq6o/S/KuWaHW2uVJ\nLk+SL33wE1fjWkIAALA1dHAAABiXDg4AAEvU2mrU5XlXwEmStMmn8brpn8e/Xo1PCAAAtoEODgAA\n49LBAQCAzZh3BZyDVbW/tXa4tfZ9x1+sqkcnuX25QwMAgJWkgwMAwLh0cAAAWKK1FTmvvfcKOK21\nH2itHT7xtap6VWvtY0m+bqkjAwCAFaSDAwDAuHRwAABgK/ReAaeqrlr/UpJvqKqzp18/YymjAgCA\nFaWDAwDAuHRwAABgK9T0drbd36y6Lsn7krwsk3vdVpJXJ3lWkrTWrpm3gEc88PGDriX0qNMfMiSW\nG+767KDc6btPHZR7+Kn3H5T7ibvPnv+mDj+359ODcqfW7kG5C/bcb1DucOo3jwAAIABJREFUj259\n/6Dc3zjz/EG5Tx65ZVDu3e//3UG5Z1/4E4NyQ31u7cig3AN3nTYod/COTwzKPf2MxwzKvfWemwfl\nkuTBu88YlHvArr2Dch+959ZBudN3nTIoN3Rf8cunDtsmDh27a1Duy0990KDcWwfOtaFs892GbvO7\nq/dCejNZD912ynpIrItZPvyZgzUouI4OPp8O3k0H31o7Zd+TjN/DdfBuOng323y3ndL9rIfZrItu\nOvjG6eDddPBuJ3sHT3bO/kcH76aDd9PBu+2U7f1k732JdTGLbaLb0A7++Id+zej3oHr3Tf97S44X\nNmLeLL0wybVJLktyW2vtTUnubK1ds8hBBwAAsGE6OAAAjEsHBwAANq33FlSttbUkL66qK6d/3jwv\nAwAADKeDAwDAuHRwAABgKyx0ENFauzHJJVX19CSHljskAABABwcAgHHp4AAAsBxrbfQ7UG2LDZ3F\n31q7OsnVSxoLAACwjg4OAADj0sEBAIAhdm33AAAAAAAAAAAAYCdzH1sAAAAAAAAAAJaiZTVuQbXh\nK+BU1YeXMRAAAKCbDg4AAOPSwQEAgI3qvQJOVd2e/PWpSDX9c9/x11tr95uRuzTJpUnygH0Pz/7T\nHrBFwwUAgJObDg4AAOPSwQEAYLnWmivgJMkrk/xhkse21s5srZ2Z5OPT550HHUnSWru8tXagtXbA\nQQcAAGyIDg4AAOPSwQEAgE3rPQGntfYjSf59kldX1Y9U1a5kRW7OBQAA20AHBwCAcengAADAVph3\nBZy01q5N8o3TL69JctpSRwQAACtOBwcAgHHp4AAAsDxtG/7bDnNPwEmS1tpaa+0/JPkHSfYud0gA\nAIAODgAA49LBAQCAzdjT982quqrj5b3HX2+tPWMpowIAgBWlgwMAwLh0cAAAYCv0noCT5Nwk70/y\nskzueVtJvirJixZdwDmnnjVoYLcfu2tQ7sv3PXxQ7sw6ZVDuL4/dPij31tN2D8o9qO0blPvQXTcP\nyn3q7lsH5b757C8flPvo0WHLO2P3sP8h5dkX/sSg3Kuu/ZVBuaOvf/mg3Hde9p5BuaGX1tpVNSj3\nkbVh28PeGrY9JMnn2z2DcsfWhn02X3LKAwblPnL0lkG5ofuK09q83Xu3m44dGZR7RBt2VeY9+x4x\nKHfdkZsG5Wzz3YZu8/a93U729ZBYFyPQwefQwbvp4N1WYd8zdg/Xwbvp4N1s8912SvezHmazLrrp\n4Bung3fTwbud7PueZOfsf3Twbjp4Nx28207Z3u17Z7Muup3s28RQa217bgk1tnm3oDqQ5NoklyW5\nrbX2piR3ttauaa1ds+zBAQDACtLBAQBgXDo4AACwab2nhrbW1pK8uKqunP5587wMAAAwnA4OAADj\n0sEBAICtsNBBRGvtxiSXVNXTkxxa7pAAAAAdHAAAxqWDAwDAcgy9VdZOs6Gz+FtrVye5ekljAQAA\n1tHBAQBgXDo4AAAwhMtoAgAAAAAAAACwFGttNa6As2u7BwAAAAAAAAAAADtZ7wk4VfX4E56fUlU/\nU1VXVdW/rap9yx8eAACsFh0cAADGpYMDAABbYd4VcK444fkLkzwmyYuSnJ7kpbNCVXVpVR2sqoOf\nueOmTQ8SAABWyBUnPNfBAQBg+a444bkODgAAW6xtw3/bYc+c79cJz5+S5Ktaa/dU1ZuTvGtWqLV2\neZLLk+TAw75uNW7mBQAAW0MHBwCAcengAADAps07Aeesqvq2TK6Us7e1dk+StNZaVTmgAACAraeD\nAwDAuHRwAABg0+adgPPmJM+YPn9rVT2ktXZzVT00yWeXOzQAAFhJOjgAAIxLBwcAgCVqbW27hzCK\n3hNwWmvfs/61qnpVa+3ZmVyKEwAA2EI6OAAAjEsHBwAAtkLvCThVdVXHy0+uqrOTpLX2jI7vAwAA\nA+ngAAAwLh0cAADYCvNuQXVekvcleVmSlqSSfFWSFy26gMPH7ho0sP27TxuU21fzfqRud+XYoNzh\nY0cG5bJ7WOwT99w6KDf087zpyC2Dcre3o4Ny+3ftHZQb29HXv3xQbs9Tv39Q7vPP++eDcnfXrkG5\noW6659Cg3ENPud/gZd41cK4dXhu27X5kYG7sfcU7D98wKPd3znzMoNyhGrYP/dTaHYNy+3ePu6+w\nzXezHrqd7OshsS5GoIPPoYN308G7rcK+Z+weroN308G3lm2+m/XQbTv2vdZFNx1843Twbjr4fcPY\n+55k5+x/dPAZy9PBO+ng3XbK9n6yr4fEupjFNrG11tK2ewijmLc2LkxybZLLktzWWntTkjtba9e0\n1q5Z9uAAAGAF6eAAADAuHRwAANi03tPkW2trSV5cVVdO/7x5XgYAABhOBwcAgHHp4AAAsFytrcYV\ncBY6iGit3Zjkkqp6epJh19UDAAAWpoMDAMC4dHAAAGAzNnQWf2vt6iRXL2ksAADAOjo4AACMSwcH\nAACGcBlNAAAAAAAAAACWYi2rcQuqXds9AAAAAAAAAAAA2Ml6T8CpqudU1YOmzx9TVW+uqlur6m1V\n9TfHGSIAAKwOHRwAAMalgwMAAFth3hVwfqi19tnp83+f5MWttbOT/HSSl84KVdWlVXWwqg7eeudn\ntmioAACwEnRwAAAYlw4OAABL1Fob/bEd5p2As+eE5w9urf33JGmtvSnJmbNCrbXLW2sHWmsHzj79\nnM2PEgAAVocODgAA49LBAQCATZt3As7vV9UVVfWoJP+9qn6sqh5RVd+b5OMjjA8AAFaNDg4AAOPS\nwQEAgE3b0/fN1tpl04OMVyd5dJK9SS5N8odJvnP5wwMAgNWigwMAwLh0cAAAWK61bbol1NjmXQEn\nrbVXtta+urX2oNbamUmuba39y9babSOMDwAAVo4ODgAA49LBAQCAzeq9Ak5VXdXx8pOPv95ae8ZS\nRgUAACtKBwcAgHHp4AAAsFwtq3EFnGo9l/qpqj9P8v4kL0vSklQml+F8VpK01q6Zt4C/e97TBn2S\nd6zdPSSWz68dGZR7wt6HDsrd09YG5T69dueg3GeO3j4od86eMwflPnjHJwflzj/tnEG5c/acMSj3\n+bV7BuWGOmPXKYNyQ8f52ut+Y1DuOQd+elDuzXfcMCh3yb7HDsodzrFBuSR5xz2fGZS7YM9Zg5c5\npqH7iqH70I/fNezzvGj/Ywblhhr6uQxlm+92/t4HDsoNZT10G3s9JNbFLB/49NtrUHAdHXw+Hbyb\nDt7tZN/3JOP3cB28mw6+tWzz3XTwbtux77UuuungG6eDd9PBu53sHTzZOfsfHXxr6eDddPBuOni3\nVdj3nuzrYqesh6Ed/KFnf+noZ+DcdOsHtuR4YSPm3YLqQJJrk1yW5LbW2puS3Nlau2aRgw4AAGDD\ndHAAABiXDg4AAGxa7y2oWmtrSV5cVVdO/7x5XgYAABhOBwcAgHHp4AAAsFx9d2Y6mSx0ENFauzHJ\nJVX19CSHljskAABABwcAgHHp4AAAwGZs6Cz+1trVSa5e0lgAAIB1dHAAABiXDg4AAAzhMpoAAAAA\nAAAAACzFWlbjFlS7tnsAAAAAAAAAAACwk/WegFNVf1BV31VV+8caEAAArDIdHAAAxqWDAwAAW2He\nFXC+Osm3Jvl4Vf1eVX1bVZ067y+tqkur6mBVHbzx8Ce2ZKAAALAidHAAABiXDg4AAEvUWhv9sR3m\nnYDz6dbaM5NckOQ1SX4wySer6pVV9U2zQq21y1trB1prB87df97WjRYAAE5+OjgAAIxLBwcAADZt\n3gk4LUlaa4daa7/dWrs4yZckeVuS5y57cAAAsIJ0cAAAGJcODgAAS7TW2uiP7TDvBJzD619orf1V\na+2lrbUnL2lMAACwynRwAAAYlw4OAABsWu8JOK21i9a/VlWvWt5wAABgtengAAAwLh0cAADYCnv6\nvllVV61/Kck3VNXZSdJae8ayBgYAAKtIBwcAgHHp4AAAsFxtm24JNbbq+0Gr6rok70vyskzug1tJ\nXp3kWUnSWrtm3gIuPv/iUT/J3XPvqtXtnF2nDcrdtHbHoNzZtXdQ7oajtw3K3Xr084Ny99uzb1Du\ngbuH5dYybLrccuzOQblH7jl7UK4NHOft7Z5BuYfvGvZ5/vrBXxiUe+Tjhv2bwgNOPXNQ7nN33z4o\nlyTnnX7OoNyho8O23UfsfeCg3FBD9xX/6/DHBuUedfpDBuV2pQblvmbPgwbl3njPpwblbPPdhm7z\nQ7c/66HbTlkPiXUxy42fe++wneE6Ovh8Ong3Hbzbyb7vScbv4Tp4Nx28m22+207pftbDbNZFNx18\nPDp4Nx28207Z9yQ7Z/+jg28tHbybDt5NB++2Cvvek31d7JT1MLSD33//Y0Y/A+eWwx/dkuOFjZjX\n0i9Mcm2Sy5Lc1lp7U5I7W2vXLHLQAQAAbJgODgAA49LBAQCATeu9BVVrbS3Ji6vqyumfN8/LAAAA\nw+ngAAAwLh0cAACWa+gVAHeahQ4iWms3Jrmkqp6e5NByhwQAAOjgAAAwLh0cAADYjA2dxd9auzrJ\n1UsaCwAAsI4ODgAA49LBAQCAIVxGEwAAAAAAAACApWhtNW5BtWu7BwAAAAAAAAAAADtZ7wk4VfWo\nqnpFVf2bqtpfVb9ZVe+tqiur6oJxhggAAKtDBwcAgHHp4AAAsFxrrY3+2A7zroBzRZJ3JDmc5K1J\nPpjkaUlen+QVs0JVdWlVHayqgx8//PEtGioAAKyEK6KDAwDAmK6IDg4AAGzSvBNwzmytvaS19sIk\n92utvai19onW2suT3H9WqLV2eWvtQGvtwPn7z9/SAQMAwElOBwcAgHHp4AAAwKbtmfP9tap6XJKz\nkuyrqgOttYNV9Zgku5c/PAAAWDk6OAAAjEsHBwCAJWrZnltCjW3eCTg/leQ1SdaSfGuS51XV4zM5\nELl0yWMDAIBVpIMDAMC4dHAAAGDTek/Aaa29MckXn/DSn1XVa5M8o7W2ttSRAQDACtLBAQBgXDo4\nAACwFXpPwKmqqzpeflKSP6yqtNaesZRRAQDAitLBAQBgXDo4AAAs11pbjVtQVev5QavquiTvS/Ky\nJC1JJXl1kmclSWvtmnkL2HvaeYM+yfuftn9ILJUalHvY6Q8YlLv5rlsG5c7f9+BBuVvuOTwot6eG\n3ar47rWjg3IXnXHBoNz1x24flHvPoRsG5c46ddg821XD5tlQQ+f17UfvGJT7Px/u+jeH+R73xd82\nKHdPOzYolyQPOPXMQblnn/roQbmX3PH+Qbm7jh0ZlBu6r7j96J2DcncMHOfQfczu2jUo97kjhwbl\nbPPdhm7z5z764kE566HbTlkPiXUxyykPetSWfDA6+Hw6eDcdfGvtlH1PMn4P18G76eDdbPPddkr3\nsx5msy666eAbp4N308G7newdPNk5+x8dvJsO3k0H77ZTtveTvfcl1sUstoluQzv46ac/YvQzcO68\n84bRN6h5e/wLk1yb5LIkt7XW3pTkztbaNYscdAAAABumgwMAwLh0cAAAYNN6b0E1vb/ti6vqyumf\nN8/LAAAAw+ngAAAwLh0cAACWq+/OTCeThQ4iWms3Jrmkqp6eZNj1zgAAgIXp4AAAMC4dHAAA2IwN\nncXfWrs6ydVLGgsAALCODg4AAOPSwQEAYGu1rMYVcHZt9wAAAAAAAAAAAGAncwIOAAAAAAAAAABs\nQu8tqKpqV5LvSfL3k5yb5FiSDyd5aWvtTcseHAAArBodHAAAxqWDAwDAcrW2Greg6j0BJ8nLk9yQ\n5N8leWaSQ0nekuRnqupvttZ+rStUVZcmuTRJdu85O7t379+6EQMAwMlNBwcAgHHp4AAAwKbNOwHn\nwtba906f/1lVvbW19vyqenOSdybpPPBorV2e5PIk2XvaeatxKhMAAGwNHRwAAMalgwMAAJu2a873\n76mqRydJVX1lkruTpLV2JIkDCgAA2Ho6OAAAjEsHBwCAJWqtjf7YDvOugPOTSf60qu5OsjvJP0yS\nqjonyWuXPDYAAFhFOjgAAIxLBwcAADat9wSc1tqfVNUjkjywtfbZJKmqV7XWnp3kp8YYIAAArBId\nHAAAxqWDAwAAW6H3BJyquuqE58efPrmqzk6S1tozljc0AABYPTo4AACMSwcHAIDlWpX7us67BdV5\nSd6X5GWZfCaV5KuSvGjJ4wIAgFWlgwMAwLh0cAAAYPNaazMfSXYl+fEkf5zkCdPX/qIvs5FHkkvl\ntj+3k8YqJycnd7LkdtJY5eTkNp/b4DJ08G1aRztlrHJycnInS24njVVOTm7n5Ta4DB18h41VTk5O\n7mTJ7aSxysnJyXks8Nkt+AGfm+TKJL+e5ONbtvDkoNz253bSWOXk5OROltxOGqucnNzmcwOXpYOP\nvI52yljl5OTkTpbcThqrnJzczssNXNZKd/CdNFY5OTm5kyW3k8YqJycn5zH/Me8WVEmS1tqNSS6p\nqqcnObRIBgAAGE4HBwCAcengAADAZix0As5xrbWrk1y9pLEAAADr6OAAADAuHRwAABhi1zYv/3K5\n+0RuO5YpJycnt+q57VimnJzc9uXuS3bKZ2a/LCcnJ3fy57ZjmXJycquTuy/ZSZ/ZThmrnJyc3MmS\n245lysnJyS0rt/Jqeg8vAAAAAAAAAABggO2+Ag4AAAAAAAAAAOxsrbVteSR5apIPJflokucumHlF\nkk8nee8Gl3Vekj9N8v4k70vyowvmTkvy9iTvmub+1QaXuzvJdUleu4HM9Unek+SdSQ5uIHd2kt9P\n8sEkH0jyNQtkvni6nOOPQ0l+bMHl/fj0M3lvklcnOW3B3I9OM+/rW1bXuk7ygCR/nOQj0z/vv2Du\nkuny1pIc2MDyfmn6eb47yX9PcvYGsj83zb0zyRuSfNFG5nOS/zdJS/KgBZf3giSfPGFdXrzo8pL8\n8PTnfF+SX1xwef/1hGVdn+SdC+aekOStx+d3kicumPtbSf73dNt4TZL7rct0buPz5kxPrnfO9OTm\nzpmebO+cmZWbN2d6ltc7Z/qW1zdnepbXO2d6cr1zpic3b8507t+TPDLJ2zL53fRfk5y6YO4508ys\n7XZW7j9n8rvwvZnM/VMWzL18+tq7M9n3718kd8L3/0OSwxsY5xVJ/s8J6/AJC+Yqyc8n+XAmv5t+\nZMHcW05Y1l8m+cMFc09J8ufT3J8lecyCuSdPc+9N8ltJ9qz/bKbv+4Lf6/PmS0+ud7705HrnS0+u\nd77Mys2bLz3L650vPbne+dKT650vPbne+dKTW2i+3Fcf0cFnZa6PDj5zfec+2MNn5HTwETv49D07\noof35HRwHVwH18Hn5XTw7pwOvoFHdPBZmeujg89c39HBZy3vBdHBdXAdXAefkzvh+zq4Dj43N2++\n9Cyvd7705JbawWdkV7KHb9djexY6WXkfS/KoJKdON4YvWyB3UZKvzMYPPB6W5Cunz8+cTuhFllfH\nN84kp0x3Ln97A8v9iST/Zf0GPCdzfd9OqCf3W0l+YPr81Mw4WWTOOrkpySMWeO/DpzuU06df/16S\n71kg9zemG+i+JHuS/M+eDfxe6zrJL2Z6kJrkuUl+YcHcl2ZykPWmzD7o6Mp90/EdSZJf6FpeT/Z+\nJzz/kSQvXXQ+Z1Kk/keSG7rmwozlvSDJv5jz+XflvmG6HvZOv37wouM84fsvSvL8BZf3hiRPmz6/\nOMmbFsy9I8nXT59/X5KfW5fp3MbnzZmeXO+c6cnNnTM92d45Mys3b870LK93zvTkeudM3zj75kzP\n8nrnTE9u3pzp3L9nsj971vT1lyb5oQVzX5HkgszYh/fkLp5+rzL5R5xFl3fifPmVrPsHvFm56dcH\nkvx2ug88Zi3viiTP7Jkvs3Lfm+RVSXbNmC9zf88m+W9Jnr3g8j6c5Eunr/+zJFcskPt/knwiyeOm\nr//rJN8/4+f8gt/r8+ZLT653vvTkeudLT653vszKzZsvPcvrnS89ud750jfOvvnSs7ze+dKVy+Tq\nkQvNl/viIzp4X6Z3e+zJnVQdfNb6zn2wh8/I6eAjdvDp6zuih/fkdHAdXAfXwefldPAZ4+ybLz3L\n08F18BMzvdtjT04H78/p4Dp455zpyengOrgOroOv/zl18MWW1ztfenJL7eAzlrlyPXw7H9t1C6on\nJvloa+0vWmt3J/ndJN8yL9Rae3OSz210Ya21T7XW/nz6/PZMziZ7+AK51lo7PP3ylOmjLbLMqjo3\nydOTvGyj492oqjork7L28iRprd3dWrt1g3/NU5J8rLV2w4Lv35Pk9Krak8mBxF8ukPnSJG9rrd3R\nWjua5Jok3971xhnr+lsyOcDK9M9vXSTXWvtAa+1DfQObkXvDdJzJ5MzjczeQPXTCl2ekY970zOcX\nJ/mprsycXK8ZuR9K8sLW2pHpez69keVVVSX5B5n8Alwk15Lcb/r8rHTMmxm5xyV58/T5Hyf5++sy\ns7bx3jkzKzdvzvTk5s6ZnmzvnJmzH5s5Zzax/5uV650z85Y3a8705HrnTE9u3pyZtX9/ciZnRifd\nc6Yz11q7rrV2fWboyb1u+r2WyRnp5y6YO5T89ed5eu49XzpzVbU7k/875ac2Ms5ZP9cCuR9K8q9b\na2vT962fL73Lq6r7ZbJO/nDB3Lz50pU7luTu1tqHp6/fa75Mx/IFv9enn33vfOnKTcfRO196cr3z\npSfXO19m5ebNl1m5RczI9c6XecubNV96cnN/J3XkHpgF5st9mA6+hU7GDp7snB4+I6eDj9jBp7kd\n0cN7cjq4Dq6D6+Dzcjq4Dr5ZOvgW0sHn5+b1qZ6cDq6Dd+rJ6eA6+L305HRwHbyTDr61Hbwnu4o9\nfNts1wk4D8/kjKnjbswCvwi3QlVdkMkZd29b8P27q+qdmVwK8I9bawvlkvxqJhvq2gaH2JK8oaqu\nrapLF8w8Mslnkryyqq6rqpdV1RkbXO6z0lEeOwfY2ieT/HKSjyf5VJLbWmtvWCD63iRfV1UPrKp9\nmZzBeN4GxviQ1tqnps9vSvKQDWQ36/uS/NFGAlX181X1iSTfmeT5C2a+JcknW2vv2vgQ85yqendV\nvaKq7r9g5nGZrJO3VdU1VfVVG1zm1yW5ubX2kQXf/2NJfmn6ufxykuctmHtf/u8/TlySnnmzbhtf\neM5sdN+wQG7unFmfXXTOnJjbyJzpGOtCc2ZdbuE5M+OzmTtn1uUWnjPrcnPnzPr9eyb/R9qtJxw4\ndv5uGvp7oS9XVack+cdJXr9orqpemcm8/pIkv7Zg7jlJrjphu9jIOH9+Ol9eXFV7F8w9Osl3VNXB\nqvqjqnrsRj6XTIr8G9cdmPflfiDJ66rqxkw+zxfOy2VS4PdU1YHpW56Z7n3M+t/rD8wC86Ujt6iZ\nub75Mis3b77MyM2dLz3j7J0vM3Jz50vP8pKe+TIjN3e+dOQ+m8Xmy32VDj6bDt5vx/RwHbzT0jt4\nsnN6uA6+UE4H18F18Hv/DDq4Dj6UDj6bDt5PB59NB5/SwRcepw6ug29knDq4Dr6Rcd7XOvis7Cr2\n8G2zXSfgbIuq2p/JJZl+rGdSfoHW2rHW2hMyObvuiVX1NxZYzjcn+XRr7doBw/za1tpXJnlakn9e\nVRctkNmTyaUKX9Ja+4okn8/kEoMLqapTkzwjyZULvv/+mfxCf2SSL0pyRlV917xca+0DmVyK8A2Z\n7CzfmckZlxs2Petxof8LY7Oq6rIkRzO55+DCWmuXtdbOm+aes8By9iX5l1nwIGWdl2Syw35CJgeD\nL1owtyeTe8P+7SQ/meT3qqo2sNx/mAUPWKd+KMmPTz+XH8/0/1ZZwPcl+WdVdW0ml1e8u+tNfdt4\n35wZsm/oyy0yZ7qyi8yZE3PTZSw0ZzqWt9Cc6cgtNGd6PtPeOdORW2jOdOTmzpn1+/dMCtlcQ34v\nLJD7j0ne3Fp7y6K51tr3ZrIP/kCS71ggd1EmB2FdpXPe8p6XyefzVZms/59eMLc3yV2ttQNJfjOT\n+7Vu5HOZOV9m5H48k/s4n5vklZlcZrI3l+TLM/nHtxdX1duT3J51v5uG/l5fYq5zvvTl+uZLV66q\nvihz5kvP8nrnS0+ud74s8Ll0zpeeXO986cpNf5f0zhfuTQefOd4d18Gnf999uofr4J2W2sGTndPD\ndfCFx6mD6+A6+L3p4N108PsoHXzmeHXwOXTwmXRwHVwH18H7cjq4Dr7tHXxOVg8fU9uG+14l+Zok\n/+OEr5+X5HkLZi/IBu99O82dksm9IX9iE+N+fubcX3T6vn+XyRmA12dylt0dSX5nwPJesODyHprk\n+hO+/rokV29gOd+S5A0beP8lSV5+wtfPTvIfB/x8/zbJP1t0XSf5UJKHTZ8/LMmHNjJH0nPf21m5\nJN+T5H8n2Td0XiY5v+d7f51L8jczORP1+unjaCb/d8VDN7i8hb+XyQHgN5zw9ceSnLPgZ7Mnyc1J\nzt3AOrwtSU2fV5JDA36GxyV5e8fr99rGF5kzXblF5sys3CJzpm+ZfXNmfW7RObPA8jo/7xmf6dw5\n0/PZ9M6ZGcubO2cW+Pk658y69zw/kwOpz+b/3rv4C35X9eT+xQlfX58F7l9+Yi7Jz2ZyqcBdG8md\n8NpFmXOP9WnuZzP5nXR8vqxlchnsjS7vSQsu718k+WCSR56w/m7bwOfyoCR/leS0BT+Xn8zkEtIn\nbkfvH/DzfVOS31v3Wtfv9f88b77MyP3OCd/vnC99ub75Mm95s+bLjNwt8+bLgsu713yZlZs3X+Z8\nLjPny4zc1fPmy4I/373my335sX6eRgef9fe8YMHlnZQdvGt95z7aw/vmZXTwUTr49Hs7oof3La9v\nzqzPLTpnFlhe5+c94/PUwe+d08Fn5KKD6+A6+H3qsX6eRgef9fe8YMHl6eALzpPo4Os/Tx18Rm6R\nOTMrN2/O9C2vb86szy06ZxZYXufnPePz1MHvndPBZ+Sig+vg94EO3pNdyR6+nY/tWejkl89fZHLm\n+KlJ3pXkyxfMXpANHnhMJ++rkvzqBnPnJDl7+vz0JG9J8s0b/DshaEX1AAAEiklEQVTutcH1vPeM\nJGee8Pz/S/LUBbNvSfLF0+cvSPJLGxjj7yb53g28/6szuazdvuln+1tJfnjB7IOnf54/3cGcvei6\nzuT+e8+dPn9ukl/cyBzJBg86kjw1yfvTUcQXyD72hOc/nOT3Nzqf01NgOpb3sBOe/3iS310w908z\nuc9gMilnn8i05M0b5/TzuWaDn8sHkjxp+vwpSa5dMHd83uzKZFv+vnXv79zG582ZWbl5c6ZneXPn\nTE+2d87MG+usOdOzvN4505PrnTN94+ybMz3L650zPbl5c6Zz/57J/wH1rOnrL826fxyZletbB3OW\n9wOZ7OtPn/G5dOX+XpLHnPDz/3KSX97IOKevH97AOB92wvJ+NZP7Hy+Se+Hxzz6T34fvWHSc07n2\nWxv4XL45kwOBx01f//4k/23B3PH5sjfJG5M8uWc7e1Kmv9fnzZdZuXnzpWd5vfOlKzddZ73zZd44\nZ82XnnH2zpeeXO986Rtn33yZ8bnsmTdfesa58Hy5rz2ig896rw4+Z33nPtrDO3I6ePfyltLBp9/b\nET28J6eD6+Bdn4sOvvjnooN35KKDz/pcdHAd/MT36uBz1nd08FnL08F18M4507M8HVwH18F18IXG\nOWu+9IzzPtvB1302K9nDt/OxfQue3Pf0w5mcNXrZgplXZ3KJuHsyOQvr+xfMfW0ml9t7dyaXe3xn\nJpdZmpd7fJLrprn3Jnn+gJ+zcwOe8d5HZXIQ9q5Miv1Cn8s0+4QkB6dj/cMk918wd0YmZ8qdtcGf\n619lcuDw3iS/nWTvgrm3ZFLK3pXkKRtZ15ncZ/CNST6S5H8mecCCuW+bPj+SyVnH9zqTd0buo5mU\nquNz5qUbGOt/m342707ymiQP3+h8zuwC07W8307ynunyrsoJpXJO7tRMzrZ8b5I/T8eOc9Y4k1yR\n5J9ucB1+bZJrp+v/bUkuXDD3o5nsLz6cyS+mWpfp3MbnzZmeXO+c6cnNnTM92d45Mys3b870LK93\nzvTkeudM3zj75kzP8nrnTE9u3pzp3L9nsh9++3RdXpl1+7ae3I9M58zRJH+Z5GUL5o5m8nvw+Nif\nPy+XycHU/5quv/dmcgb6/RZZ3rr3dB14zBrnn5ywvN9Jsn/B3NmZnF39nkz+b5i/teg4Mzno7/zH\nt57lfdt0We+a5h+1YO6XMjnI/VAml2/t+z32pPzf8tk7X3pyvfOlJ9c7X7pyi8yXWcubN196xtk7\nX3pyvfOlb5x986Vneb3zpSe38Hy5Lz6ig3e9Vwefs75zH+zhM3I6+IgdvG87nzdnenJL6eE9OR1c\nB9fBdfB5OR18xjj75kvP8nRwHfz4e3XwOes7Ovis5engOnjnnOlZng6ug+vgOnjv8ubNl55x3mc7\neMcyV7KHb9fj+OXMAAAAAAAAAACAAXZt9wAAAAAAAAAAAGAncwIOAAAAAAAAAABsghNwAAAAAAAA\nAABgE5yAAwAAAAAAAAAAm+AEHAAAAAAAAAAA2AQn4AAAAAAAAAAAwCY4AQcAAAAAAAAAADbBCTgA\nAAAAAAAAALAJ/z9r3tLZX/X4ZgAAAABJRU5ErkJggg==\n", 154 | "text/plain": [ 155 | "" 156 | ] 157 | }, 158 | "metadata": {}, 159 | "output_type": "display_data" 160 | } 161 | ], 162 | "source": [ 163 | "################################################\n", 164 | "############---MULTIPLE ENCODINGS---############\n", 165 | "################################################\n", 166 | "\n", 167 | "#select some song here\n", 168 | "song = songs.iloc[3829]\n", 169 | "\n", 170 | "song_id = song.id\n", 171 | "lyric = song.a_lyrics\n", 172 | "\n", 173 | "#get borders and SSM from stores\n", 174 | "segm_borders = borders.loc[song_id].borders\n", 175 | "ssm_lines_string = ssms_string.loc[song_id].ssm\n", 176 | "\n", 177 | "print('segment borders:', segm_borders, '\\n')\n", 178 | "print(pretty_print_tree(tree_structure(normalize_lyric(lyric))))\n", 179 | "\n", 180 | "#can show different encoding here, for now it's the same everywhere\n", 181 | "ssm.draw_ssm_encodings_side_by_side(ssm_some_encoding=ssm_lines_string, ssm_other_encoding=ssm_lines_string, ssm_third_encoding=ssm_lines_string,\\\n", 182 | " representation_some = 'string', representation_other = 'string', representation_third = 'string',\\\n", 183 | " artist_name=song.a_name, song_name=song.a_song, genre_of_song='undef')" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 4, 189 | "metadata": { 190 | "collapsed": true, 191 | "scrolled": false 192 | }, 193 | "outputs": [], 194 | "source": [ 195 | "#DEBUGGING: how the segment borders are computed\n", 196 | "\n", 197 | "#The indices of lines that end a segment\n", 198 | "def segment_borders(lyric):\n", 199 | " normalized_lyric = normalize_lyric(lyric)\n", 200 | " segment_lengths = reduce(lambda x, block: x + [len(block)], tree_structure(lyric), [])\n", 201 | " segment_indices = []\n", 202 | " running_sum = -1\n", 203 | " for i in range(len(segment_lengths)):\n", 204 | " running_sum += segment_lengths[i]\n", 205 | " segment_indices.append(running_sum)\n", 206 | " return segment_indices[:-1]\n", 207 | "\n", 208 | "def segment_count(lyric):\n", 209 | " return 1 + len(segment_borders(lyric))" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": 5, 215 | "metadata": {}, 216 | "outputs": [ 217 | { 218 | "data": { 219 | "text/plain": [ 220 | "array([[ 1. , 0.57894737, 0.2826087 , ..., 0.225 ,\n", 221 | " 0.23076923, 0.0625 ],\n", 222 | " [ 0.57894737, 1. , 0.34782609, ..., 0.2 ,\n", 223 | " 0.23076923, 0.10526316],\n", 224 | " [ 0.2826087 , 0.34782609, 1. , ..., 0.17391304,\n", 225 | " 0.23913043, 0.10869565],\n", 226 | " ..., \n", 227 | " [ 0.225 , 0.2 , 0.17391304, ..., 1. ,\n", 228 | " 0.3 , 0.15 ],\n", 229 | " [ 0.23076923, 0.23076923, 0.23913043, ..., 0.3 ,\n", 230 | " 1. , 0.11538462],\n", 231 | " [ 0.0625 , 0.10526316, 0.10869565, ..., 0.15 ,\n", 232 | " 0.11538462, 1. ]])" 233 | ] 234 | }, 235 | "execution_count": 5, 236 | "metadata": {}, 237 | "output_type": "execute_result" 238 | } 239 | ], 240 | "source": [ 241 | "#DEBUGGING: how an SSM is computed\n", 242 | "\n", 243 | "#flattened tree structure, does not differentiate between segment and line border\n", 244 | "def line_structure(text):\n", 245 | " return reduce(lambda x, segment: x + segment, tree_structure(text), [])\n", 246 | "\n", 247 | "lyric = song.a_lyrics\n", 248 | "normalized_lyric = normalize_lyric(lyric)\n", 249 | "line_encoding_string = line_structure(normalized_lyric)\n", 250 | "ssm.self_similarity_matrix(line_encoding_string, metric=lambda x,y: pow(dist.string_similarity(x,y), 1))" 251 | ] 252 | } 253 | ], 254 | "metadata": { 255 | "kernelspec": { 256 | "display_name": "Python 3", 257 | "language": "python", 258 | "name": "python3" 259 | }, 260 | "language_info": { 261 | "codemirror_mode": { 262 | "name": "ipython", 263 | "version": 3 264 | }, 265 | "file_extension": ".py", 266 | "mimetype": "text/x-python", 267 | "name": "python", 268 | "nbconvert_exporter": "python", 269 | "pygments_lexer": "ipython3", 270 | "version": "3.6.2" 271 | } 272 | }, 273 | "nbformat": 4, 274 | "nbformat_minor": 2 275 | } 276 | --------------------------------------------------------------------------------