├── .gitattributes
├── assets
├── block.png
├── model.png
├── siss.jpg
├── spes.jpg
├── result.png
├── result.fig.gif
├── siss-plot.png
└── spes-plot.png
├── SISS
├── dataAugmentation.py
└── dataExtractTraining.py
├── SPES
├── dataExtractTraining.py
├── dataAugmentation.py
└── spes.ipynb
└── README.md
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/assets/block.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/block.png
--------------------------------------------------------------------------------
/assets/model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/model.png
--------------------------------------------------------------------------------
/assets/siss.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/siss.jpg
--------------------------------------------------------------------------------
/assets/spes.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/spes.jpg
--------------------------------------------------------------------------------
/assets/result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/result.png
--------------------------------------------------------------------------------
/assets/result.fig.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/result.fig.gif
--------------------------------------------------------------------------------
/assets/siss-plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/siss-plot.png
--------------------------------------------------------------------------------
/assets/spes-plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/noobcoder17/ISCHEMIC-STROKE-LESION-SEGMENTATION-BY-DEEP-LEARNING-ISLES-2015/HEAD/assets/spes-plot.png
--------------------------------------------------------------------------------
/SISS/dataAugmentation.py:
--------------------------------------------------------------------------------
1 | from keras.preprocessing.image import ImageDataGenerator
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 | import random
5 |
6 | ign = np.load('./data/normal_data/data/dataset_2/training/imageNames.npy')
7 | imagenames = []
8 | for i in range(ign.shape[0]):
9 | imagenames.append(ign[i][0])
10 |
11 | random.shuffle(imagenames)
12 | print('total images={}'.format(len(imagenames)))
13 | split = int(len(imagenames)*0.8)
14 |
15 | trainImageNames = imagenames[:split]
16 | validImageNames = imagenames[split:]
17 | print('training images={}'.format(len(trainImageNames)))
18 | print('validation images={}'.format(len(validImageNames)))
19 |
20 | print(trainImageNames[:5])
21 | print(validImageNames[:5])
22 |
23 | #augmented image from one image
24 | ifi = 7
25 |
26 | data_gen_args = dict(
27 | rescale=1.0/255,
28 | rotation_range=30,
29 | horizontal_flip=True,
30 | vertical_flip=True,
31 | shear_range=0.2,
32 | zoom_range=0.1)
33 |
34 | def trainset(b_size):
35 | print('creating augmented training images...')
36 | seed = 1337
37 | image_datagen = ImageDataGenerator(**data_gen_args)
38 | mask_datagen = ImageDataGenerator(**data_gen_args)
39 |
40 | save_here_img = 'data/augmented_data/training/image'
41 | save_here_mask = 'data/augmented_data/training/mask'
42 |
43 | k=0
44 | for i in range(len(trainImageNames)):
45 | normalimgPath = 'data/normal_data/data/dataset_2/training/image/{}'.format(trainImageNames[i])
46 | normalmaskPath = 'data/normal_data/data/dataset_2/training/mask/{}'.format(trainImageNames[i])
47 | img = np.expand_dims(plt.imread(normalimgPath),0)
48 | mask = np.expand_dims(plt.imread(normalmaskPath),0)
49 | for x, y, val in zip(image_datagen.flow(img,batch_size=b_size,seed=seed,save_to_dir=save_here_img,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
50 | mask_datagen.flow(mask,batch_size=b_size,seed=seed,save_to_dir=save_here_mask,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
51 | range(ifi)) :
52 | #yield(x,y)
53 | k+=1
54 |
55 | def validset(b_size):
56 | print('creating augmented validation images...')
57 | seed = 1243
58 | image_datagen = ImageDataGenerator(**data_gen_args)
59 | mask_datagen = ImageDataGenerator(**data_gen_args)
60 |
61 | save_here_img = 'data/augmented_data/validation/image'
62 | save_here_mask = 'data/augmented_data/validation/mask'
63 |
64 | k=0
65 | for i in range(len(validImageNames)):
66 | normalimgPath = 'data/normal_data/data/dataset_2/training/image/{}'.format(validImageNames[i])
67 | normalmaskPath = 'data/normal_data/data/dataset_2/training/mask/{}'.format(validImageNames[i])
68 | img = np.expand_dims(plt.imread(normalimgPath),0)
69 | mask = np.expand_dims(plt.imread(normalmaskPath),0)
70 | for x, y, val in zip(image_datagen.flow(img,batch_size=b_size,seed=seed,save_to_dir=save_here_img,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
71 | mask_datagen.flow(mask,batch_size=b_size,seed=seed,save_to_dir=save_here_mask,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
72 | range(ifi)) :
73 | #yield(x,y)
74 | k+=1
75 |
76 |
77 | trainset(3)
78 | validset(3)
--------------------------------------------------------------------------------
/SPES/dataExtractTraining.py:
--------------------------------------------------------------------------------
1 | #normal training data extraction from .nii files
2 | #leavinng all the black slides and corping the images
3 | #only no black ot slides are considered
4 | #each image dimension 192 x 192 and saved as .jpg format in grayscale
5 |
6 |
7 | import os
8 | import nibabel as nib
9 | import numpy as np
10 | from tqdm import tqdm
11 | import matplotlib.pyplot as plt
12 | import random2
13 |
14 | data_dict = {
15 | "CBF" : "cbf",
16 | "CBV" : "cbv",
17 | "DWI" : "dwi",
18 | "T1c" : "t1c",
19 | "T2" : "t2",
20 | "Tmax" : "tmax",
21 | "TTP" : "ttp",
22 | #"OT" : 'ot'
23 | }
24 | data_types = ["OT"]
25 |
26 | dir_path = "D:\\Machine Learning\\data\\SPES_2015\\training"
27 |
28 | folders = []
29 | for i in range(1,31):
30 | folders.append(str(i))
31 |
32 | prefix = ["%.2d" % i for i in range(1,100)]
33 | imgNum = ["%.3d" % i for i in range(0,999999)]
34 |
35 | upper_black_slide = 0
36 | lower_black_slide = 0
37 | new_subject = True
38 | blank_slide_list = []
39 |
40 | print("Saving OT Images....")
41 | img_name = []
42 |
43 | c=-1
44 | for folder in tqdm(folders):
45 | c+=1
46 | flairImg=0
47 | validIndex = []
48 | new_subject_list = []
49 | upper_black_slide = 0
50 | lower_black_slide = 0
51 | new_subject = True
52 | sub_folders = os.listdir(os.path.join(dir_path,folder))
53 | for sub_folder in sub_folders:
54 | file_names = os.listdir(os.path.join(dir_path,folder,sub_folder))
55 | for each_file in file_names:
56 | if ".nii" in each_file:
57 | data = nib.load(os.path.join(dir_path,folder,sub_folder,each_file))
58 | data = data.get_fdata().T
59 | if data_types[0] in each_file:
60 | for i in range(data.shape[0]):
61 | temp = np.sum(data[i])
62 | if temp!=0:
63 | new_subject = False
64 | name = str("./data/normal_data/training/mask/"+prefix[c]+"_"+imgNum[flairImg]+".jpg")
65 | ss = [str(prefix[c]+"_"+imgNum[flairImg]+".jpg")]
66 | img_name.append(ss)
67 | flairImg+=1
68 | img = data[i]
69 | img = img[7:103,:]
70 | plt.imsave(name,img,cmap='gray')
71 | else :
72 | if new_subject :
73 | upper_black_slide+=1
74 | else:
75 | lower_black_slide+=1
76 | new_subject_list.append(upper_black_slide)
77 | new_subject_list.append(lower_black_slide)
78 | blank_slide_list.append(new_subject_list)
79 |
80 | np.save("./data/normal_data/training/imageNames",img_name)
81 | #other data extract
82 |
83 | for key in data_dict:
84 | print("Saving {} Images....".format(key))
85 | c=-1
86 | count = 0
87 | raw_data = []
88 | p = 0
89 | for folder in tqdm(folders):
90 | c+=1
91 | p+=1
92 | count=0
93 | sub_folders = os.listdir(os.path.join(dir_path,folder))
94 | for sub_folder in sub_folders:
95 | if 'penumbralabel' in sub_folder:
96 | sub_sub_folders = os.listdir(os.path.join(dir_path,folder,sub_folder))
97 | for sub_sub_folder in sub_sub_folders:
98 | file_names = os.listdir(os.path.join(dir_path,folder,sub_folder,sub_sub_folder))
99 | for each_file in file_names:
100 | if ".nii" in each_file:
101 | data = nib.load(os.path.join(dir_path,folder,sub_folder,sub_sub_folder,each_file))
102 | data = data.get_fdata().T
103 | if key in each_file:
104 | for i in range(blank_slide_list[p-1][0],data.shape[0]-blank_slide_list[p-1][1]):
105 | name = str("./data/normal_data/training/"+str(key)+"/"+prefix[c]+"_"+imgNum[count]+".jpg")
106 | img = data[i]
107 | img = img[7:103,:]
108 | plt.imsave(name,img,cmap='gray')
109 | count+=1
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/SISS/dataExtractTraining.py:
--------------------------------------------------------------------------------
1 | #normal training data extraction from .nii files
2 | #leavinng all the black slides and corping the images
3 | #only non black ot slides are considered
4 | #each image dimension 192 x 192 and saved as .jpg format in grayscale
5 |
6 |
7 | import os
8 | import nibabel as nib
9 | import numpy as np
10 | from tqdm import tqdm
11 | import matplotlib.pyplot as plt
12 | import random2
13 |
14 | data_dict = {
15 | "Flair" : "flair",
16 | "T1" : "t1",
17 | "T2" : "t2",
18 | "DWI" : "dwi",
19 | #"OT" : 'ot'
20 | }
21 | #data_types = ["Flair"]
22 | data_types = ["OT"]
23 |
24 | dir_path = "D:\\Machine Learning\\data\\SISS_2015\\data\\training"
25 |
26 | folders = []
27 | for i in range(1,29):
28 | folders.append(str(i))
29 |
30 | prefix = ["%.2d" % i for i in range(1,100)]
31 | imgNum = ["%.3d" % i for i in range(0,999999)]
32 |
33 | upper_black_slide = 0
34 | lower_black_slide = 0
35 | new_subject = True
36 | blank_slide_list = []
37 |
38 | print("Saving Flair Images....")
39 | img_name = []
40 |
41 | c=-1
42 | for folder in tqdm(folders):
43 | c+=1
44 | flairImg=0
45 | validIndex = []
46 | new_subject_list = []
47 | upper_black_slide = 0
48 | lower_black_slide = 0
49 | new_subject = True
50 | sub_folders = os.listdir(os.path.join(dir_path,folder))
51 | for sub_folder in sub_folders:
52 | file_names = os.listdir(os.path.join(dir_path,folder,sub_folder))
53 | for each_file in file_names:
54 | if ".nii" in each_file:
55 | data = nib.load(os.path.join(dir_path,folder,sub_folder,each_file))
56 | data = data.get_fdata().T
57 | if data_types[0] in each_file:
58 | for i in range(data.shape[0]):
59 | temp = np.sum(data[i])
60 | if temp!=0:
61 | new_subject = False
62 | #name = str("./data/normal_data/training/mask/"+prefix[c]+"_"+imgNum[flairImg]+".jpg")
63 | #name = str("./full_data/normal_data/training/mask/"+prefix[c]+"_"+imgNum[flairImg]+".jpg")
64 | name = str("./full_data/normal_data/training/flair/"+prefix[c]+"_"+imgNum[flairImg]+".jpg")
65 | ss = [str(prefix[c]+"_"+imgNum[flairImg]+".jpg")]
66 | img_name.append(ss)
67 | flairImg+=1
68 | img = data[i]
69 | img = img[19:211,19:211]
70 | plt.imsave(name,img,cmap='gray')
71 | else :
72 | if new_subject :
73 | upper_black_slide+=1
74 | else:
75 | lower_black_slide+=1
76 | new_subject_list.append(upper_black_slide)
77 | new_subject_list.append(lower_black_slide)
78 | blank_slide_list.append(new_subject_list)
79 |
80 | np.save("./full_data/normal_data/training/imageNames",img_name)
81 | #other data extract
82 |
83 | for key in data_dict:
84 | print("Saving {} Images....".format(key))
85 | c=-1
86 | count = 0
87 | raw_data = []
88 | p = 0
89 | for folder in tqdm(folders):
90 | c+=1
91 | p+=1
92 | count=0
93 | sub_folders = os.listdir(os.path.join(dir_path,folder))
94 | for sub_folder in sub_folders:
95 | file_names = os.listdir(os.path.join(dir_path,folder,sub_folder))
96 | for each_file in file_names:
97 | if ".nii" in each_file:
98 | data = nib.load(os.path.join(dir_path,folder,sub_folder,each_file))
99 | data = data.get_fdata().T
100 | if key in each_file:
101 | for i in range(blank_slide_list[p-1][0],data.shape[0]-blank_slide_list[p-1][1]):
102 | #name = str("./data/normal_data/training/"+str(key)+"/"+prefix[c]+"_"+imgNum[count]+".jpg")
103 | name = str("./full_data/normal_data/training/"+str(key)+"/"+prefix[c]+"_"+imgNum[count]+".jpg")
104 | img = data[i]
105 | img = img[19:211,19:211]
106 | plt.imsave(name,img,cmap='gray')
107 | count+=1
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ISLES (ISCHEMIC STROKE LESION SEGMENTATION)
2 |
3 | ## Update:
4 |
5 | 24th Dec, 2025 - Hi everyone, I don't have the access of the training data anymore. The data was stored in a Google Drive of my institutional email. I have graduated in 2020, so I don't have access to my institutional email and Google Drive anymore.
6 |
7 | ### Visual Result
8 |
9 |
10 |
11 |
Fig 1: Segmentation on SISS dataset.
12 |
13 |
14 | ### 1) About
15 | **The purpose of this project is to build a CNN model for stroke lesion segmentaion using ISLES 2015 dataset.**
16 | Recent studies have shown the potential of using magnetic resonance imaging (MRI) in diagnosing ischemic stroke. Reviewing hundreds of slices produced by MRI, however, takes a lot of time and can lead to numerous human errors. It is widely accepted by the medical practitioners that automated segmentation methods for ischemic stroke lesions could significantly speed up the beginning of a patient’s treatment. The automated segmentation can locate the tissue with lesions and give an estimate of its volume, which helps in the clinical practice by providing a better assessement and evaluation of the risks of each treatment. These reasons highlight the need for a fully automatic ischemic stroke lesion segmentation approach using a flexible, fast and effective deep neural network.
17 |
18 | ### 2) Dataset
19 | ### 2.1) SISS Dataset
20 |
21 | *File:
22 | There are 4 types of MRI scan for one person*
23 |
24 | *File Format:
25 | .nii*
26 |
27 | *Image Shape:
28 | 240(Slide Width) × 240(Slide Height) × 155(Number of Slide) × 4(Multi-mode)*
29 |
30 | *Image Mode:
31 | 4 (Multi-mode)*
32 |
33 |
34 |
35 |
36 |
37 |
Fig 2: SISS dataset.
38 |
39 |
40 |
41 | ### 2.2) SPES Dataset
42 |
43 | *File:
44 | There are 7 types of MRI scan for one person*
45 |
46 | *File Format:
47 | .nii*
48 |
49 | *Image Shape:
50 | 96(Slide Width) × 110(Slide Height) × 71(Number of Slide) × 7(Multi-mode)*
51 |
52 | *Image Mode:
53 | 7 (Multi-mode)*
54 |
55 |
56 |
57 |
58 |
59 |
Fig 3: SPES dataset.
60 |
61 |
62 | ### 2.3) Data Augmentation
63 | | Methods | Range |
64 | | ------------- | ------------- |
65 | | rescale | 1.0/255 |
66 | |rotation_range | 30 |
67 | |horizontal_flip| True |
68 | |vertical_flip | True |
69 | |shear_range | 0.2 |
70 | |zoom_range | 0.1 |
71 |
72 | ### 3) My CNN Architecture
73 |
74 |
75 |
76 |
Fig 4: The architecture of the proposed framework for Ischemic Stroke Lesion segmentation.
77 |
78 |
79 |
80 |
81 |
Fig 5: An schematic diagram of the proposed Inception block.
82 |
83 |
84 |
85 | ### 4) Evaluation Metric
86 | True Positive (TP): TP implies number of true positives, that is, positive correctly identified as positive.
87 |
88 | True Negative (TN): TN implies number of true negatives, that is, negative correctly identified as negative.
89 |
90 | False Positive (FP): FP implies number of false positives, that is, negative incorrectly identified as positive.
91 |
92 | False Negative (FN): FN implies number of false negatives, that is, positive incorrectly identified as negative.
93 | #### 1. Dice Similiarty Coefficient:
94 | 
95 |
96 | #### 2. Sensitivity
97 | 
98 |
99 | #### 3. Specificity
100 | 
101 |
102 | ### 5) Ortimizer and Hyperparameter
103 | #### 5.1) Optimizer
104 | [Adam Optimizer](https://arxiv.org/pdf/1412.6980.pdf)
105 | #### 5.2) Hyperparameter
106 | 
107 |
108 | Lri = Initial Learning Rate = 0.0001
109 |
110 | decay factor(f) = 0.2
111 |
112 | step = 2
113 |
114 | ### 6) Results
115 |
116 |
117 |
118 |
Fig 6: Performance of proposed network in term of dice coefficient on each modality on SISS and SPES Dataset for
119 | various loss functions.
120 |
121 |
122 |
123 |
124 |
Fig 7: From left to right: Plot for loss DSC and accuracy for training and validation set on SISS dataset.
125 |
126 |
127 |
128 |
129 |
130 |
Fig 8: From left to right: Plot for loss DSC and accuracy for training and validation set on SPES dataset.
131 |
132 |
133 | ### 7) Conclusion
134 | This study models a modification of the U-Net and Inception
135 | Net trained upon a well-known public dataset ISLES 2015,
136 | proposed for the automatic segmentation of stroke lesion from
137 | MR images. Upon careful evaluation on the above dataset,
138 | our model demonstrated an increase in the efficiency and
139 | performance, outperforming some of the other modern CNN
140 | based architectures. The same can be attributed to the flexibility of selecting the training method and the structure of the
141 | proposed model which in turn enhanced the training stability.
142 | The model has been tested for various loss functions for
143 | different modalities. A patient-wise study led to the conclusion that there is a further scope for improvement while detecting
144 | very small lesions. For subsequent work, we aim to expand
145 | onto other lesions studying different disorders as we believe
146 | that our model holds great potential in the medical field in
147 | relation to the diagnosis and treatment of several disorders.
148 |
149 |
150 |
151 |
152 |
153 |
--------------------------------------------------------------------------------
/SPES/dataAugmentation.py:
--------------------------------------------------------------------------------
1 | from keras.preprocessing.image import ImageDataGenerator
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 | import random
5 |
6 | ign = np.load('./data/normal_data/training/imageNames.npy')
7 | imagenames = []
8 | for i in range(ign.shape[0]):
9 | imagenames.append(ign[i][0])
10 |
11 | random.shuffle(imagenames)
12 | print('total images={}'.format(len(imagenames)))
13 | split = int(len(imagenames)*0.8)
14 |
15 | trainImageNames = imagenames[:split]
16 | validImageNames = imagenames[split:]
17 | print('training images={}'.format(len(trainImageNames)))
18 | print('validation images={}'.format(len(validImageNames)))
19 |
20 | print(trainImageNames[:5])
21 | print(validImageNames[:5])
22 |
23 | #augmented image from one image
24 | ifi = 7
25 |
26 | data_gen_args = dict(
27 | rescale=1.0/255,
28 | rotation_range=30,
29 | horizontal_flip=True,
30 | vertical_flip=True,
31 | shear_range=0.2,
32 | zoom_range=0.1)
33 |
34 | def trainset(b_size):
35 | print('creating augmented training images...')
36 | seed = 1337
37 | image_datagen = ImageDataGenerator(**data_gen_args)
38 | mask_datagen = ImageDataGenerator(**data_gen_args)
39 |
40 | save_here_cbf = 'data/augmented_data/training/cbf'
41 | save_here_cbv = 'data/augmented_data/training/cbv'
42 | save_here_dwi = 'data/augmented_data/training/dwi'
43 | save_here_t1c = 'data/augmented_data/training/t1c'
44 | save_here_t2 = 'data/augmented_data/training/t2'
45 | save_here_tmax = 'data/augmented_data/training/tmax'
46 | save_here_ttp = 'data/augmented_data/training/ttp'
47 | save_here_mask = 'data/augmented_data/training/mask'
48 |
49 | k=0
50 | for i in range(len(trainImageNames)):
51 | normalcbfPath = 'data/normal_data/training/cbf/{}'.format(trainImageNames[i])
52 | normalcbvPath = 'data/normal_data/training/cbv/{}'.format(trainImageNames[i])
53 | normaldwiPath = 'data/normal_data/training/dwi/{}'.format(trainImageNames[i])
54 | normalt1cPath = 'data/normal_data/training/t1c/{}'.format(trainImageNames[i])
55 | normalt2Path = 'data/normal_data/training/t2/{}'.format(trainImageNames[i])
56 | normaltmaxPath = 'data/normal_data/training/tmax/{}'.format(trainImageNames[i])
57 | normalttpPath = 'data/normal_data/training/ttp/{}'.format(trainImageNames[i])
58 | normalmaskPath = 'data/normal_data/training/mask/{}'.format(trainImageNames[i])
59 | cbf = np.expand_dims(plt.imread(normalcbfPath),0)
60 | cbv = np.expand_dims(plt.imread(normalcbvPath),0)
61 | dwi = np.expand_dims(plt.imread(normaldwiPath),0)
62 | t1c = np.expand_dims(plt.imread(normalt1cPath),0)
63 | t2 = np.expand_dims(plt.imread(normalt2Path),0)
64 | tmax = np.expand_dims(plt.imread(normaltmaxPath),0)
65 | ttp = np.expand_dims(plt.imread(normalttpPath),0)
66 | mask = np.expand_dims(plt.imread(normalmaskPath),0)
67 | for a, b, c, d, e, f, g, h, val in zip(
68 | image_datagen.flow(cbf,batch_size=b_size,seed=seed,save_to_dir=save_here_cbf,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
69 | image_datagen.flow(cbv,batch_size=b_size,seed=seed,save_to_dir=save_here_cbv,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
70 | image_datagen.flow(dwi,batch_size=b_size,seed=seed,save_to_dir=save_here_dwi,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
71 | image_datagen.flow(t1c,batch_size=b_size,seed=seed,save_to_dir=save_here_t1c,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
72 | image_datagen.flow(t2,batch_size=b_size,seed=seed,save_to_dir=save_here_t2,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
73 | image_datagen.flow(tmax,batch_size=b_size,seed=seed,save_to_dir=save_here_tmax,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
74 | image_datagen.flow(ttp,batch_size=b_size,seed=seed,save_to_dir=save_here_ttp,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
75 | mask_datagen.flow(mask,batch_size=b_size,seed=seed,save_to_dir=save_here_mask,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
76 | range(ifi)) :
77 | #yield(x,y)
78 | k+=1
79 |
80 | def validset(b_size):
81 | print('creating augmented validation images...')
82 | seed = 1337
83 | image_datagen = ImageDataGenerator(**data_gen_args)
84 | mask_datagen = ImageDataGenerator(**data_gen_args)
85 |
86 | save_here_cbf = 'data/augmented_data/validation/cbf'
87 | save_here_cbv = 'data/augmented_data/validation/cbv'
88 | save_here_dwi = 'data/augmented_data/validation/dwi'
89 | save_here_t1c = 'data/augmented_data/validation/t1c'
90 | save_here_t2 = 'data/augmented_data/validation/t2'
91 | save_here_tmax = 'data/augmented_data/validation/tmax'
92 | save_here_ttp = 'data/augmented_data/validation/ttp'
93 | save_here_mask = 'data/augmented_data/validation/mask'
94 |
95 | k=0
96 | for i in range(len(validImageNames)):
97 | normalcbfPath = 'data/normal_data/training/cbf/{}'.format(validImageNames[i])
98 | normalcbvPath = 'data/normal_data/training/cbv/{}'.format(validImageNames[i])
99 | normaldwiPath = 'data/normal_data/training/dwi/{}'.format(validImageNames[i])
100 | normalt1cPath = 'data/normal_data/training/t1c/{}'.format(validImageNames[i])
101 | normalt2Path = 'data/normal_data/training/t2/{}'.format(validImageNames[i])
102 | normaltmaxPath = 'data/normal_data/training/tmax/{}'.format(validImageNames[i])
103 | normalttpPath = 'data/normal_data/training/ttp/{}'.format(validImageNames[i])
104 | normalmaskPath = 'data/normal_data/training/mask/{}'.format(validImageNames[i])
105 | cbf = np.expand_dims(plt.imread(normalcbfPath),0)
106 | cbv = np.expand_dims(plt.imread(normalcbvPath),0)
107 | dwi = np.expand_dims(plt.imread(normaldwiPath),0)
108 | t1c = np.expand_dims(plt.imread(normalt1cPath),0)
109 | t2 = np.expand_dims(plt.imread(normalt2Path),0)
110 | tmax = np.expand_dims(plt.imread(normaltmaxPath),0)
111 | ttp = np.expand_dims(plt.imread(normalttpPath),0)
112 | mask = np.expand_dims(plt.imread(normalmaskPath),0)
113 | for a, b, c, d, e, f, g, h, val in zip(
114 | image_datagen.flow(cbf,batch_size=b_size,seed=seed,save_to_dir=save_here_cbf,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
115 | image_datagen.flow(cbv,batch_size=b_size,seed=seed,save_to_dir=save_here_cbv,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
116 | image_datagen.flow(dwi,batch_size=b_size,seed=seed,save_to_dir=save_here_dwi,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
117 | image_datagen.flow(t1c,batch_size=b_size,seed=seed,save_to_dir=save_here_t1c,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
118 | image_datagen.flow(t2,batch_size=b_size,seed=seed,save_to_dir=save_here_t2,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
119 | image_datagen.flow(tmax,batch_size=b_size,seed=seed,save_to_dir=save_here_tmax,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
120 | image_datagen.flow(ttp,batch_size=b_size,seed=seed,save_to_dir=save_here_ttp,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
121 | mask_datagen.flow(mask,batch_size=b_size,seed=seed,save_to_dir=save_here_mask,save_prefix='aug_{}'.format(str(k)),save_format='jpg'),
122 | range(ifi)) :
123 | #yield(x,y)
124 | k+=1
125 |
126 |
127 | trainset(3)
128 | validset(3)
--------------------------------------------------------------------------------
/SPES/spes.ipynb:
--------------------------------------------------------------------------------
1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"spes.ipynb","version":"0.3.2","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"FL2-_k_Bu1V9","colab_type":"text"},"source":["Mount Google Drive "]},{"cell_type":"code","metadata":{"id":"UQHEEljbuj1N","colab_type":"code","outputId":"d78d777f-3efe-4662-df1e-590555783ae0","executionInfo":{"status":"ok","timestamp":1564501082150,"user_tz":-330,"elapsed":68194,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}},"colab":{"base_uri":"https://localhost:8080/","height":122}},"source":["from google.colab import drive\n","drive.mount('/content/gdrive')"],"execution_count":1,"outputs":[{"output_type":"stream","text":["Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3Aietf%3Awg%3Aoauth%3A2.0%3Aoob&scope=email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdocs.test%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.photos.readonly%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpeopleapi.readonly&response_type=code\n","\n","Enter your authorization code:\n","··········\n","Mounted at /content/gdrive\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"Ch0kVV2xuzQj","colab_type":"text"},"source":["Import Modules "]},{"cell_type":"code","metadata":{"id":"N3J1FwrnvIux","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":34},"outputId":"72023db1-0d29-460b-fd45-8b20ae9e8a84","executionInfo":{"status":"ok","timestamp":1564501145937,"user_tz":-330,"elapsed":5301,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["import os\n","import math\n","import nibabel\n","import cv2\n","from tqdm import tqdm\n","import numpy as np\n","import matplotlib.pyplot as plt\n","import time\n","\n","from tensorflow.python.keras.utils.data_utils import Sequence\n","import tensorflow as tf\n","import tensorflow.contrib as tfcontrib\n","from tensorflow.python.keras import layers\n","from tensorflow.python.keras import losses\n","from tensorflow.python.keras import models\n","from tensorflow.python.keras import backend as K \n","\n","from keras.preprocessing.image import ImageDataGenerator"],"execution_count":2,"outputs":[{"output_type":"stream","text":["Using TensorFlow backend.\n"],"name":"stderr"}]},{"cell_type":"markdown","metadata":{"id":"XZYx3AevvZ61","colab_type":"text"},"source":["Select Modality "]},{"cell_type":"code","metadata":{"id":"tdt0H9pove1m","colab_type":"code","outputId":"984e2c86-a7b3-4ef2-95db-a88698625b74","executionInfo":{"status":"ok","timestamp":1564501159462,"user_tz":-330,"elapsed":5878,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}},"colab":{"base_uri":"https://localhost:8080/","height":68}},"source":["modality_type = \"dwi\"\n","\n","print(\"1.{} 2.{} 3.{} 4.{} 5.{} 6.{} 7.{}\" .format(\"DWI\",\"CBV\",\"CBF\",\"T1c\",\"T2\",\"Tmax\",\"TTP\"))\n","x = int(input(\"Enter modality type:\"))\n","if x==1:\n"," modality_type = \"dwi\"\n","elif x==2:\n"," modality_type = \"cbv\"\n","elif x==3:\n"," modality_type = \"cbf\"\n","elif x==4:\n"," modality_type = \"t1c\"\n","elif x==5:\n"," modality_type = \"t2\"\n","elif x==6:\n"," modality_type = \"tmax\"\n","elif x==7:\n"," modality_type = \"ttp\"\n","else:\n"," modality_type = \"dwi\"\n","print(\"selected modality -> {}\".format(modality_type))"],"execution_count":3,"outputs":[{"output_type":"stream","text":["1.DWI 2.CBV 3.CBF 4.T1c 5.T2 6.Tmax 7.TTP\n","Enter modality type:1\n","selected modality -> dwi\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"tc5s_IUXvwV2","colab_type":"text"},"source":["Extract Dataset "]},{"cell_type":"code","metadata":{"id":"x-DZcAH5yxqC","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":1000},"outputId":"7f4bb375-f625-4658-a5ec-bd540fdaa50c","executionInfo":{"status":"ok","timestamp":1564501170409,"user_tz":-330,"elapsed":4486,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["if modality_type == \"dwi\":\n"," #!unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_dwi.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_dwi.zip\"\n"," \n","if modality_type == \"cbv\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_cbv.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_cbv.zip\"\n"," \n","if modality_type == \"cbf\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_cbf.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_cbf.zip\"\n"," \n","if modality_type == \"t1c\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_t1c.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_t1c.zip\"\n"," \n","if modality_type == \"t2\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_t2.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_t2.zip\"\n"," \n","if modality_type == \"tmax\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_tmax.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_tmax.zip\"\n"," \n","if modality_type == \"ttp\":\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/training/dataset_ttp.zip\"\n"," !unzip \"/content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_ttp.zip\""],"execution_count":4,"outputs":[{"output_type":"stream","text":["Archive: /content/gdrive/My Drive/Colab Notebooks/final/spes/dataset/testing/test_dwi.zip\n"," creating: test_dwi/\n"," creating: test_dwi/dwi/\n"," inflating: test_dwi/dwi/01_000.jpg \n"," inflating: test_dwi/dwi/01_001.jpg \n"," inflating: test_dwi/dwi/01_002.jpg \n"," inflating: test_dwi/dwi/01_003.jpg \n"," inflating: test_dwi/dwi/01_004.jpg \n"," inflating: test_dwi/dwi/01_005.jpg \n"," inflating: test_dwi/dwi/01_006.jpg \n"," inflating: test_dwi/dwi/01_007.jpg \n"," inflating: test_dwi/dwi/01_008.jpg \n"," inflating: test_dwi/dwi/01_009.jpg \n"," inflating: test_dwi/dwi/01_010.jpg \n"," inflating: test_dwi/dwi/01_011.jpg \n"," inflating: test_dwi/dwi/01_012.jpg \n"," inflating: test_dwi/dwi/01_013.jpg \n"," inflating: test_dwi/dwi/01_014.jpg \n"," inflating: test_dwi/dwi/01_015.jpg \n"," inflating: test_dwi/dwi/01_016.jpg \n"," inflating: test_dwi/dwi/01_017.jpg \n"," inflating: test_dwi/dwi/01_018.jpg \n"," inflating: test_dwi/dwi/01_019.jpg \n"," inflating: test_dwi/dwi/01_020.jpg \n"," inflating: test_dwi/dwi/01_021.jpg \n"," inflating: test_dwi/dwi/01_022.jpg \n"," inflating: test_dwi/dwi/01_023.jpg \n"," inflating: test_dwi/dwi/01_024.jpg \n"," inflating: test_dwi/dwi/01_025.jpg \n"," inflating: test_dwi/dwi/01_026.jpg \n"," inflating: test_dwi/dwi/01_027.jpg \n"," inflating: test_dwi/dwi/01_028.jpg \n"," inflating: test_dwi/dwi/01_029.jpg \n"," inflating: test_dwi/dwi/01_030.jpg \n"," inflating: test_dwi/dwi/01_031.jpg \n"," inflating: test_dwi/dwi/01_032.jpg \n"," inflating: test_dwi/dwi/01_033.jpg \n"," inflating: test_dwi/dwi/01_034.jpg \n"," inflating: test_dwi/dwi/01_035.jpg \n"," inflating: test_dwi/dwi/01_036.jpg \n"," inflating: test_dwi/dwi/01_037.jpg \n"," inflating: test_dwi/dwi/01_038.jpg \n"," inflating: test_dwi/dwi/01_039.jpg \n"," inflating: test_dwi/dwi/01_040.jpg \n"," inflating: test_dwi/dwi/01_041.jpg \n"," creating: test_dwi/mask/\n"," inflating: test_dwi/mask/01_000.jpg \n"," inflating: test_dwi/mask/01_001.jpg \n"," inflating: test_dwi/mask/01_002.jpg \n"," inflating: test_dwi/mask/01_003.jpg \n"," inflating: test_dwi/mask/01_004.jpg \n"," inflating: test_dwi/mask/01_005.jpg \n"," inflating: test_dwi/mask/01_006.jpg \n"," inflating: test_dwi/mask/01_007.jpg \n"," inflating: test_dwi/mask/01_008.jpg \n"," inflating: test_dwi/mask/01_009.jpg \n"," inflating: test_dwi/mask/01_010.jpg \n"," inflating: test_dwi/mask/01_011.jpg \n"," inflating: test_dwi/mask/01_012.jpg \n"," inflating: test_dwi/mask/01_013.jpg \n"," inflating: test_dwi/mask/01_014.jpg \n"," inflating: test_dwi/mask/01_015.jpg \n"," inflating: test_dwi/mask/01_016.jpg \n"," inflating: test_dwi/mask/01_017.jpg \n"," inflating: test_dwi/mask/01_018.jpg \n"," inflating: test_dwi/mask/01_019.jpg \n"," inflating: test_dwi/mask/01_020.jpg \n"," inflating: test_dwi/mask/01_021.jpg \n"," inflating: test_dwi/mask/01_022.jpg \n"," inflating: test_dwi/mask/01_023.jpg \n"," inflating: test_dwi/mask/01_024.jpg \n"," inflating: test_dwi/mask/01_025.jpg \n"," inflating: test_dwi/mask/01_026.jpg \n"," inflating: test_dwi/mask/01_027.jpg \n"," inflating: test_dwi/mask/01_028.jpg \n"," inflating: test_dwi/mask/01_029.jpg \n"," inflating: test_dwi/mask/01_030.jpg \n"," inflating: test_dwi/mask/01_031.jpg \n"," inflating: test_dwi/mask/01_032.jpg \n"," inflating: test_dwi/mask/01_033.jpg \n"," inflating: test_dwi/mask/01_034.jpg \n"," inflating: test_dwi/mask/01_035.jpg \n"," inflating: test_dwi/mask/01_036.jpg \n"," inflating: test_dwi/mask/01_037.jpg \n"," inflating: test_dwi/mask/01_038.jpg \n"," inflating: test_dwi/mask/01_039.jpg \n"," inflating: test_dwi/mask/01_040.jpg \n"," inflating: test_dwi/mask/01_041.jpg \n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"cX4z-dPT3XB3","colab_type":"text"},"source":["Size of Dataset "]},{"cell_type":"code","metadata":{"id":"p9SMJ-NK3c5v","colab_type":"code","outputId":"72f060d1-ab25-4975-d606-506f5f0140df","executionInfo":{"status":"ok","timestamp":1564501184976,"user_tz":-330,"elapsed":1581,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["#num_train_examples = len(os.listdir('dataset_{}/training/mask'.format(modality_type)))\n","#num_val_examples = len(os.listdir('dataset_{}/validation/mask'.format(modality_type)))\n","num_test_examples = len(os.listdir('test_{}/mask'.format(modality_type)))\n","\n","#print(\"Number of training examples: {}\".format(num_train_examples))\n","#print(\"Number of validation examples: {}\".format(num_val_examples))\n","print(\"Number of validation examples: {}\".format(num_test_examples))\n","\n"],"execution_count":5,"outputs":[{"output_type":"stream","text":["Number of validation examples: 42\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"y6M6jXX43qsU","colab_type":"text"},"source":["Image Shape , Batch Size , Epochs "]},{"cell_type":"code","metadata":{"id":"evyrwgId3pwO","colab_type":"code","colab":{}},"source":["img_shape = (96, 96, 3)\n","batch_size = 15\n","epochs = 100"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZmD_vkeA3-oK","colab_type":"text"},"source":["Network Architectire "]},{"cell_type":"markdown","metadata":{"id":"_P-P_6f54Kin","colab_type":"text"},"source":["1. Encoder Decoder Blocks "]},{"cell_type":"code","metadata":{"id":"qCls18fH4QKx","colab_type":"code","colab":{}},"source":["def conv_block(inputs, filters):\n"," conv11 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(inputs)\n"," bn11 = layers.BatchNormalization(axis=3)(conv11)\n"," a11 = layers.Activation(\"relu\")(bn11)\n","\n"," #filters-line-conv=2 \n"," conv21 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(inputs)\n"," bn21 = layers.BatchNormalization(axis=3)(conv21)\n"," a21 = layers.Activation(\"relu\")(bn21)\n"," conv22 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a21)\n"," bn22 = layers.BatchNormalization(axis=3)(conv22)\n"," a22 = layers.Activation(\"relu\")(bn22)\n","\n"," #filters-line-conv=3\n"," conv31 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(inputs)\n"," bn31 = layers.BatchNormalization(axis=3)(conv31)\n"," a31 = layers.Activation(\"relu\")(bn31)\n"," conv32 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a31)\n"," bn32 = layers.BatchNormalization(axis=3)(conv32)\n"," a32 = layers.Activation(\"relu\")(bn32)\n"," conv33 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a32)\n"," bn33 = layers.BatchNormalization(axis=3)(conv33)\n"," a33 = layers.Activation(\"relu\")(bn33)\n","\n"," resout = layers.concatenate([a11,a22,a33,inputs])\n"," resout = layers.Activation(\"relu\")(resout)\n","\n"," return resout\n","\n","def encoder_block(input_tensor, num_filters):\n"," encoder = conv_block(input_tensor, num_filters)\n"," pool = layers.MaxPooling2D((2, 2), strides=(2, 2))(encoder)\n"," \n"," return pool, encoder\n"," \n","def upconv_block(lower_input,higher_input,filters):\n"," transpose = layers.Conv2DTranspose(filters, (2, 2), strides=(2, 2), padding='same')(lower_input)\n"," upconv_concat = layers.concatenate([higher_input, transpose], axis=-1)\n"," return upconv_concat\n","\n","def decoder_block(inputs, concat_tensor, filters):\n"," decoder = layers.Conv2DTranspose(filters, (2, 2), strides=(2, 2), padding='same')(inputs)\n"," decoder = layers.concatenate([concat_tensor, decoder], axis=-1)\n"," decoder = layers.BatchNormalization()(decoder)\n"," decoder = layers.Activation('relu')(decoder)\n","\n"," conv11 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(decoder)\n"," bn11 = layers.BatchNormalization(axis=3)(conv11)\n"," a11 = layers.Activation(\"relu\")(bn11)\n","\n"," #filters-line-conv=2 \n"," conv21 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(decoder)\n"," bn21 = layers.BatchNormalization(axis=3)(conv21)\n"," a21 = layers.Activation(\"relu\")(bn21)\n"," conv22 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a21)\n"," bn22 = layers.BatchNormalization(axis=3)(conv22)\n"," a22 = layers.Activation(\"relu\")(bn22)\n","\n"," #filters-line-conv=3\n"," conv31 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(decoder)\n"," bn31 = layers.BatchNormalization(axis=3)(conv31)\n"," a31 = layers.Activation(\"relu\")(bn31)\n"," conv32 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a31)\n"," bn32 = layers.BatchNormalization(axis=3)(conv32)\n"," a32 = layers.Activation(\"relu\")(bn32)\n"," conv33 = layers.Conv2D(filters, 3, padding = 'same', kernel_initializer = 'he_normal')(a32)\n"," bn33 = layers.BatchNormalization(axis=3)(conv33)\n"," a33 = layers.Activation(\"relu\")(bn33)\n","\n"," \n"," resout = layers.concatenate([a11,a22,a33,decoder])\n"," resout = layers.Activation(\"relu\")(resout)\n","\n"," return resout"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"RnCGgSBV4UDj","colab_type":"text"},"source":["2. Network "]},{"cell_type":"markdown","metadata":{"id":"6Etldx7j4cXs","colab_type":"text"},"source":["2.1 Initital Finter Size "]},{"cell_type":"code","metadata":{"id":"SnTUdEAm4lJy","colab_type":"code","colab":{}},"source":["#initial filter number\n","ifn=16"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"DHgWQ_U24l0c","colab_type":"text"},"source":["2.2 Network Layers "]},{"cell_type":"code","metadata":{"id":"gTrl1yhO4i00","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":105},"outputId":"bc5eb2eb-081c-46d9-825e-cdb950f8cd60","executionInfo":{"status":"ok","timestamp":1564501264682,"user_tz":-330,"elapsed":6676,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["inputs = layers.Input(shape=img_shape)\n","# 256\n","\n","encoder0_pool, encoder0 = encoder_block(inputs, ifn)\n","# 128\n","\n","encoder1_pool, encoder1 = encoder_block(encoder0_pool, 2*ifn)\n","upconv_concat1 = upconv_block(encoder1,encoder0,ifn)\n","# 64\n","\n","encoder2_pool, encoder2 = encoder_block(encoder1_pool, 4*ifn)\n","upconv_concat2 = upconv_block(encoder2,encoder1,2*ifn)\n","# 32\n","\n","encoder3_pool, encoder3 = encoder_block(encoder2_pool, 8*ifn)\n","upconv_concat3 = upconv_block(encoder3,encoder2,4*ifn)\n","# 16\n","\n","encoder4_pool, encoder4 = encoder_block(encoder3_pool, 16*ifn)\n","upconv_concat4 = upconv_block(encoder4,encoder3,8*ifn)\n","# 8\n","\n","center = conv_block(encoder4_pool, 32*ifn)\n","# center\n","\n","decoder4 = decoder_block(center, encoder4, 16*ifn)\n","# 16\n","\n","decoder3 = decoder_block(decoder4, upconv_concat4, 8*ifn)\n","# 32\n","\n","decoder2 = decoder_block(decoder3, upconv_concat3, 4*ifn)\n","# 64\n","\n","decoder1 = decoder_block(decoder2, upconv_concat2, 2*ifn)\n","# 128\n","\n","decoder0 = decoder_block(decoder1, upconv_concat1, ifn)\n","# 256\n","\n","outputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder0)\n","\n","model = models.Model(inputs=[inputs], outputs=[outputs])"],"execution_count":9,"outputs":[{"output_type":"stream","text":["WARNING: Logging before flag parsing goes to stderr.\n","W0730 15:41:00.324790 140433340405632 deprecation.py:506] From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/init_ops.py:1251: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Call initializer instance with the dtype argument instead of passing it to the constructor\n"],"name":"stderr"}]},{"cell_type":"markdown","metadata":{"id":"JNMiUhDv5GHV","colab_type":"text"},"source":["Evaluation Matrices"]},{"cell_type":"code","metadata":{"id":"N-MVVRSn44z1","colab_type":"code","colab":{}},"source":["def dice_coeff(y_true, y_pred):\n"," smooth = 1.\n"," # Flatten\n"," y_true_f = tf.reshape(y_true, [-1])\n"," y_pred_f = tf.reshape(y_pred, [-1])\n"," intersection = tf.reduce_sum(y_true_f * y_pred_f)\n"," score = (2. * intersection + smooth) / (tf.reduce_sum(y_true_f) + tf.reduce_sum(y_pred_f) + smooth)\n"," return score\n","\n","def dice_loss(y_true, y_pred):\n"," loss = 1 - dice_coeff(y_true, y_pred)\n"," return loss\n"," \n","def bce_dice_loss(y_true, y_pred):\n"," loss = losses.binary_crossentropy(y_true, y_pred) + dice_loss(y_true, y_pred)\n"," return loss\n"," \n","def bce(y_true, y_pred):\n"," loss = losses.binary_crossentropy(y_true, y_pred)\n"," return loss\n","\n","def sce(y_true, y_pred):\n"," loss = tf.losses.sigmoid_cross_entropy(y_true, y_pred)\n"," return loss\n"," \n","def specificity(y_true, y_pred):\n"," neg_y_true = 1 - y_true\n"," neg_y_pred = 1 - y_pred\n"," fp = K.sum(neg_y_true * y_pred)\n"," tn = K.sum(neg_y_true * neg_y_pred)\n"," specificity = tn / (tn + fp + K.epsilon())\n"," return specificity\n","\n","def dsc(y_true, y_pred):\n"," neg_y_true = 1 - y_true\n"," neg_y_pred = 1 - y_pred\n"," tp = K.sum(y_true * y_pred)\n"," fn = K.sum(y_true * neg_y_pred)\n"," fp = K.sum(neg_y_true * y_pred)\n"," dsc = (2*tp) / ((2*tp) + fn + fp + K.epsilon())\n"," return dsc\n"," \n","def sensitivity(y_true, y_pred):\n"," neg_y_true = 1 - y_true\n"," neg_y_pred = 1 - y_pred\n"," tp = K.sum(y_true * y_pred)\n"," fn = K.sum(y_true * neg_y_pred)\n"," sensitivity = tp / (tp + fn + K.epsilon())\n"," return sensitivity"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"z10NPxVm5T2L","colab_type":"text"},"source":["Compile Model "]},{"cell_type":"code","metadata":{"id":"bI3Aykgg5CIT","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":88},"outputId":"1c0d2a90-1750-43f5-d280-b9a9ce17df61","executionInfo":{"status":"ok","timestamp":1564501265137,"user_tz":-330,"elapsed":1141,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["#model.compile(optimizer='adam', loss=bce_dice_loss, metrics=[dice_loss,dsc,sensitivity,specificity,'accuracy'])\n","model.compile(optimizer='adam', loss=sce, metrics=[dice_loss,dsc,sensitivity,specificity,'accuracy'])\n","\n","#model.summary()"],"execution_count":11,"outputs":[{"output_type":"stream","text":["W0730 15:41:05.448718 140433340405632 deprecation.py:323] From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/nn_impl.py:180: add_dispatch_support..wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use tf.where in 2.0, which has the same broadcast rule as np.where\n"],"name":"stderr"}]},{"cell_type":"markdown","metadata":{"id":"_YsyaUVg5eUv","colab_type":"text"},"source":["Image Data Generator "]},{"cell_type":"markdown","metadata":{"id":"6MDH86TU5qcC","colab_type":"text"},"source":["Training Image "]},{"cell_type":"code","metadata":{"id":"F59UQL_P5y50","colab_type":"code","colab":{}},"source":["def trainset(b_size):\n"," seed = 1337\n"," image_datagen = ImageDataGenerator(rescale=1.0/255)\n"," mask_datagen = ImageDataGenerator(rescale=1.0/255)\n","\n"," image = image_datagen.flow_from_directory(\n"," \"dataset_{}/training\".format(modality_type),\n"," classes = [modality_type],\n"," shuffle=True,\n"," class_mode=None,\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed\n"," )\n","\n","\n"," mask = mask_datagen.flow_from_directory(\n"," \"dataset_{}/training\".format(modality_type),\n"," classes = ['mask'],\n"," shuffle=True,\n"," class_mode=None,\n"," color_mode = \"grayscale\",\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed\n"," )\n"," \n","\n"," train_data_generator = zip(image,mask)\n","\n"," for (image,mask) in train_data_generator:\n"," yield(image-0.5,mask)\n","\n","train_set = trainset(batch_size)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1xmxkDrn5vwG","colab_type":"text"},"source":["Validation Image "]},{"cell_type":"code","metadata":{"id":"It0yI_4K552U","colab_type":"code","colab":{}},"source":["def validset(b_size):\n"," seed = 1223\n"," image_datagen = ImageDataGenerator(rescale=1.0/255)\n"," mask_datagen = ImageDataGenerator(rescale=1.0/255)\n","\n"," image = image_datagen.flow_from_directory(\n"," \"dataset_{}/validation\".format(modality_type),\n"," classes = [modality_type],\n"," shuffle=True,\n"," class_mode=None,\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed\n"," )\n","\n","\n"," mask = mask_datagen.flow_from_directory(\n"," \"dataset_{}/validation\".format(modality_type),\n"," classes = ['mask'],\n"," shuffle=True,\n"," class_mode=None,\n"," color_mode = \"grayscale\",\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed\n"," )\n"," \n","\n"," valid_data_generator = zip(image,mask)\n","\n"," for (image,mask) in valid_data_generator:\n"," yield(image-0.5,mask)\n","valid_set = validset(batch_size)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Q74khIYB6JeD","colab_type":"text"},"source":["Learing Rate Optimizer "]},{"cell_type":"code","metadata":{"id":"zvhtt_v56PKN","colab_type":"code","colab":{}},"source":["def step_decay_schedule(initial_lr=1e-3, decay_factor=0.75, step_size=10):\n"," def schedule(epoch):\n"," return initial_lr * (decay_factor ** np.floor(epoch/step_size)) \n"," return tf.keras.callbacks.LearningRateScheduler(schedule,verbose=1)\n","lr = step_decay_schedule(initial_lr=1e-4, decay_factor=0.2, step_size=2)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"HZa_Df2G7Kyo","colab_type":"text"},"source":["Trainig "]},{"cell_type":"markdown","metadata":{"id":"tzvOgE9R6Ujj","colab_type":"text"},"source":["Model name, saving location and callback functions "]},{"cell_type":"code","metadata":{"id":"t-5YraZV6VFd","colab_type":"code","colab":{}},"source":["model_name = 'unet-{}-model-loss-sce-batch_size={}-time={}.hdf5'.format(modality_type,int(batch_size),int(time.time()))\n","save_model_path = '/content/gdrive/My Drive/Colab Notebooks/final/spes/checkpoint/{}/{}'.format(modality_type,model_name)\n","tb = tf.keras.callbacks.TensorBoard(log_dir='/content/gdrive/My Drive/Colab Notebooks/final/spes/logs/{}/{}'.format(modality_type,model_name))\n","cp = tf.keras.callbacks.ModelCheckpoint(filepath=save_model_path, monitor='val_loss', save_best_only=True, verbose=1)\n","es = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, verbose=1, mode='min', baseline=None, restore_best_weights=False)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"3rPjeyXc7W8-","colab_type":"text"},"source":["Train "]},{"cell_type":"code","metadata":{"id":"rnOeV8VE7ZJB","colab_type":"code","colab":{}},"source":["#model.load_weights('/content/gdrive/My Drive/Colab Notebooks/final/checkpoint/unet-model-batch_size=3-time=1559301950.hdf5');\n","history = model.fit_generator(\n"," train_set,\n"," steps_per_epoch=int(np.ceil(num_train_examples / float(batch_size))),\n"," epochs=epochs,\n"," validation_data=valid_set,\n"," validation_steps=int(np.ceil(num_val_examples / float(batch_size))),\n"," callbacks=[tb,cp,es,lr]\n"," )"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"VC5Nl2G87q7B","colab_type":"text"},"source":["Testing "]},{"cell_type":"markdown","metadata":{"id":"I0K9HcLUkQE9","colab_type":"text"},"source":[""]},{"cell_type":"markdown","metadata":{"id":"eylX2Uv0-D2A","colab_type":"text"},"source":["1. Image Viewer "]},{"cell_type":"code","metadata":{"id":"jhXlsheB-KHk","colab_type":"code","colab":{}},"source":["def remove_keymap_conflicts(new_keys_set):\n"," for prop in plt.rcParams:\n"," if prop.startswith('keymap.'):\n"," keys = plt.rcParams[prop]\n"," remove_list = set(keys) & new_keys_set\n"," for key in remove_list:\n"," keys.remove(key)\n","\n","def multi_slice_viewer(volume1,volume2):\n"," remove_keymap_conflicts({'j', 'k'})\n"," fig, (ax1,ax2) = plt.subplots(1,2)\n"," ax1.index = volume1.shape[0] // (volume1.shape[0]+1)\n"," ax2.index = volume2.shape[0] // (volume2.shape[0]+1)\n"," ax1.volume = volume1\n"," ax2.volume = volume2\n"," ax1.set_xlabel(\"Ground Truth \")\n"," ax2.set_xlabel(\"Prediction \")\n"," ax1.imshow(volume1[ax1.index])\n"," ax2.imshow(volume2[ax2.index])\n"," fig.canvas.mpl_connect('key_press_event', process_key)\n","\n","def process_key(event):\n"," fig = event.canvas.figure\n"," ax1 = fig.axes[0]\n"," ax2 = fig.axes[1]\n"," if event.key == 'j':\n"," previous_slice(ax1)\n"," previous_slice(ax2)\n"," elif event.key == 'k':\n"," next_slice(ax1)\n"," next_slice(ax2)\n"," fig.canvas.draw()\n","\n","def previous_slice(ax):\n"," volume = ax.volume\n"," ax.index = (ax.index - 1) # wrap around using %\n"," ax.images[0].set_array(volume[ax.index])\n","\n","def next_slice(ax):\n"," volume = ax.volume\n"," ax.index = (ax.index + 1) \n"," ax.images[0].set_array(volume[ax.index])\n","\n","def showResult():\n"," true = []\n"," predict = []\n","\n"," for i in range(num_test_examples):\n"," image = cv2.imread('/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/image/{}.jpg'.format(i))\n"," mask = cv2.imread('/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/mask/{}.jpg'.format(i))\n"," predicted = cv2.imread('/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/predicted/{}.jpg'.format(i))\n"," masked_image = cv2.addWeighted(image,1,mask,0.6,0)\n"," predicted_image = cv2.addWeighted(image,1,predicted,0.6,0)\n"," true.append(image)\n"," predict.append(predicted)\n","\n"," x = np.array(true)\n"," y = np.array(predict)\n"," print(x.shape)\n"," multi_slice_viewer(x,y)\n"," plt.show()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jkrpw9at-lNW","colab_type":"text"},"source":["2. Image Data Generator "]},{"cell_type":"code","metadata":{"id":"UxxIobHn-pFt","colab_type":"code","colab":{}},"source":["def saveData(image,mask,cnt):\n"," for i in range(image.shape[0]):\n"," img_name = '/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/image/{}.jpg'.format(str(cnt+i))\n"," mask_name = '/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/mask/{}.jpg'.format(str(cnt+i))\n"," plt.imsave(img_name,image[i,:,:],cmap='gray')\n"," plt.imsave(mask_name,mask[i,:,:,0],cmap='gray')\n","\n","def testset(b_size):\n"," seed = 1337\n"," image_datagen = ImageDataGenerator(rescale=1.0/255)\n"," mask_datagen = ImageDataGenerator(rescale=1.0/255)\n","\n"," image = image_datagen.flow_from_directory(\n"," \"test_{}\".format(modality_type),\n"," classes = [modality_type],\n"," shuffle=False,\n"," class_mode=None,\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed \n"," )\n","\n"," mask = mask_datagen.flow_from_directory(\n"," \"test_{}\".format(modality_type),\n"," classes = ['mask'],\n"," shuffle=False,\n"," class_mode=None,\n"," color_mode = \"grayscale\",\n"," target_size = (96,96),\n"," batch_size = b_size,\n"," seed=seed\n"," )\n"," \n"," train_data_generator = zip(image,mask)\n"," cnt = 0\n"," for (image,mask) in train_data_generator:\n"," saveData(image,mask,cnt)\n"," cnt+=image.shape[0]\n"," yield(image-0.5,mask)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"mf6jtGzm_Bsv","colab_type":"text"},"source":["3. Test "]},{"cell_type":"code","metadata":{"id":"qJUSBDmS_EbC","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":68},"outputId":"07b89f19-3379-4bab-ffdd-c4fa14b788cd","executionInfo":{"status":"ok","timestamp":1564501371727,"user_tz":-330,"elapsed":18934,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["model.load_weights('/content/gdrive/My Drive/Colab Notebooks/final/spes/checkpoint/dwi/unet-dwi-model-loss-bce_dicebatch_size=15-time=1563470853.hdf5')\n","test_set = testset(batch_size)\n","predicted = model.predict_generator(test_set,steps=int(np.ceil(num_test_examples / float(batch_size))), verbose=1)"],"execution_count":16,"outputs":[{"output_type":"stream","text":["Found 42 images belonging to 1 classes.\n","Found 42 images belonging to 1 classes.\n","3/3 [==============================] - 7s 2s/step\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"Yc22KSdW_1kI","colab_type":"text"},"source":["4. Saving and Showing Test Result "]},{"cell_type":"code","metadata":{"id":"PQqgC4jX_8nD","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":286},"outputId":"0b36cca0-72d8-495c-da6f-40e3d5302e4b","executionInfo":{"status":"ok","timestamp":1564501477446,"user_tz":-330,"elapsed":3861,"user":{"displayName":"AKASH DEBNATH","photoUrl":"","userId":"10443073240143707204"}}},"source":["print(predicted.shape)\n","data = np.reshape(predicted,(predicted.shape[0],1,96,96))\n","print(data.shape)\n","\n","for i in tqdm(range(data.shape[0])):\n"," img_name = '/content/gdrive/My Drive/Colab Notebooks/final/spes/test_result/predicted/{}.jpg'.format(str(i))\n"," plt.imsave(img_name,data[i][0],cmap='gray')\n","\n","showResult()"],"execution_count":18,"outputs":[{"output_type":"stream","text":[" 29%|██▊ | 12/42 [00:00<00:00, 118.89it/s]"],"name":"stderr"},{"output_type":"stream","text":["(42, 96, 96, 1)\n","(42, 1, 96, 96)\n"],"name":"stdout"},{"output_type":"stream","text":["100%|██████████| 42/42 [00:00<00:00, 140.78it/s]\n"],"name":"stderr"},{"output_type":"stream","text":["(42, 96, 96, 3)\n"],"name":"stdout"},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAXQAAADJCAYAAAAzQMlMAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztnXuUZFV977+/enRVv2amp2eYGWaA\nGUAgYBTMgBBcYEDv9RECitGYEMFlwrroVUKuBPRe9OK6WVckKmDEBOUmKBGCRB6RLFEBRSUMMjzk\nNcDMwAwzTDfz7Ol3dVXt+8ep3+7f2bVPPbq7qqtO/z5rzZrq89r7VO3zO7/9e20yxkBRFEVpfxLz\n3QFFURRlblCBriiKEhNUoCuKosQEFeiKoigxQQW6oihKTFCBriiKEhNmJdCJ6D1E9CIRbSaiK+eq\nU4oy3+jYVtoRmmkcOhElAbwE4N0AdgD4DYCPGmOen7vuKUrz0bGttCuz0dBPAbDZGLPVGJMDcDuA\nc+emW4oyr+jYVtqS2Qj01QBeE3/vKG1TlHZHx7bSlqQa3QARXQzg4tKfv9fo9pSFjTGGmtGOO66J\nws1qSQ1ljtljjFle7aDZCPSdAA4Tf68pbQthjLkJwE0AQEQ6ypV2oOrYluM6kUiYVCoVEuIq0JXZ\nIpWEfD6/rZZzZmNy+Q2ANxHROiLqAPAnAO6dxfUUpVWoa2wbY6wAl5/d/SrklXqYybiZsYZujMkT\n0X8HcD+AJID/Z4x5bqbXU5RWQce20q7MOGxxRo2pyUVpMM2yoUuIyKRSKW6/IW34ruva7ZXm06zf\npVAobDTGrK92nGaKKoqixAQV6IqiKDGh4WGLirIQaLTpUs0rrUmr/S6qoSuKosQEFeiKoigxQQW6\noihKizBb050KdEVRlJigTlFFURqKq3U2y5Eo221Um3Mdhz7bfqpAVxSlobRaJMhc0mr3piYXRVGU\nmKAauqIos6IZpg2lNlRDVxRFiQmqoSuK0nDmQ4tfiLMFFeiKoswKn+CMiqeOm5Dl+2yV+1KTi6Io\nSkxQga4oStOI66pNrbIqlZpcFEWZFT77eCKR8O6fb4E317SKqYVRDV1RFCUmqIauKMqsICKrqcr/\n+XO1RbSVuUMFehMhojKvuDEGZ555JgDgF7/4RWi7/AwA6XQaU1NT9no8rS0Wi825AUUpIU0NUkDn\n83kAwdjk8ZlIJFAoFOwxPF7lNfjYQqEAXp+Vz0kmkzMa4wtxHVY1uSiKosQEaub0h4gW9FwrkUiU\naSdnnHGG1UR6e3tx+umnAwCuuuoqr4YhtR6+Fv8vty1UjDFNV8GIyCSTyWY3O69EaehyVsmadrFY\ntMcUCgU7hnlbKpWyz4COYT+FQmGjMWZ9teNUQ1cURYkJVQU6ER1GRA8R0fNE9BwRXVravpSIfkpE\nL5f+72t8d9ubYrGIVCqFVCqF888/H+effz46OjpQLBZRLBbR1dWFTCaDTCaDr371q9axxP9SqZQ9\nNp/P28+MOpvqQ8f2zPHVOCcipNNppNNpO15TqRSSySSy2Syy2SwymYy1r0c5Tqu1pURT1eRCRKsA\nrDLGPEFEvQA2AjgPwEUA9hljvkxEVwLoM8ZcUeVaC/qXkU7Niy66CADw6quv2kG9dOlSnHDCCQCA\n1atXI5fLAQAWL14MAPjYxz6GdDoNAJiamrJTVz5fOp4WKvWYXOZqbC9EkwsQjmiRjnsgGJ/82RiD\nTCZjP/O4dk2GQOBU9TlFF7pQnzOTizFmlzHmidLnYQAvAFgN4FwAt5QOuwXBg6AobYOObSVu1BW2\nSERrAZwEYAOAFcaYXaVdAwBWzGnPYojUqlnjmJyctBpJX18fRkdHAQCbN29Gd3c3AGBkZAQAcMUV\nV+Caa64puy5rMlJTUupDx3b9+ApTcdgiEIxtIJw1SkTg2YxPw2dTTNR+pTI1C3Qi6gHwbwD+yhhz\n0PFymyhzChFdDODi2XY0LvD0Uk47Ozo6AASmFTbJFAoFu52FfHd3d2iQ87X4AVGTy8yYydjWcT2N\nmzgEBKYXFujGGDuuiciOaxnlwvvT6XRIQQEal2cRxzj1mqJciCiNYMD/izHmh6XNgyUbJNsi3/Cd\na4y5yRizvhb7j6I0m5mObR3XSitSVUOn4JV1M4AXjDFfE7vuBXAhgC+X/r+nIT2MGaxNj4+PAwim\nqHv27AEwrYkDgabCf7M2L51DUpPQGN6ZoWN77nDzK5LJpDdngojseJVmFsan7TdKa253bdxHLSaX\n0wH8OYBniOip0rbPIxjsdxDRJwBsA/DhxnRRURqGjm0lVlQV6MaYXwGIepWdPbfdiT+sqXAYF9sZ\n+TNr7ul0GocddhgAYN++fQDCDiepiWtNl5mhY3tuiKpRJG3gvF9+5v1yXOfz+TKfkM48a0eLcwlk\nESAedMlkMuRs9DkgOUrFNxjlOXL7HXfcAQA48cQT7b7e3l4bu7tt2zYb5bJjxw4AwBFHHOHtt2+w\ny3Rq+QDJaa7rFEomk6GHy/ei0JfH/FCv+cGX+DOT9vhc93xpGsnn8yHzirtNHlssFu12qdTw9dPp\ntDUx+s6vdi/yWF8AgXS68rZUKlVm5mlXNPVfURQlJqiGDn+mpdS0fQ5Ixi1pK7Pbos4BpjWBp556\nym5bv349du7caf92wxJ3794dcjqxNs1avcyyk32SmXe+UDCpVcljpVbD52ho5PxQr4Zd7fhaNNFK\n13DT9t3ruuNErmTkjkGptScSCbhZt3K/bKPaSkm+/km4HZ9WX+m8ZlL3767VFv1TM99+X41yNy3Z\nPd+1L/IgYmGcSqVCZo73ve99AICJiQl7bDabBQDcd999oWgBn/CWuCnU8v58dkn5sMmqeO1kctFq\ni7XjE15yrFYSbj4TjO9/PlaWqXDLBOTz+dAzKGPWXXxC3O2nqwjl83kb+57L5UKfuR++Gu1RZphm\nRcc4CVtabVFRFGUhoRq6wPd2llPAfD4f0syB6KmlvI6b7g9Maye5XC6kvUht2mcekQ5Y35TSZyqK\ncoq69/qWt7wFb3nLWwAAhx9+OP72b/82dKw8XzX0adpRQ5+Nk3Su4GfAfYbkqkdAeGbpc9BG9c83\nmwWmNXM5g/WN8ajrN1pD911fNXRFUZQFhmroCGu9ro3YtTXLY4HysEYXGR7os9G7mng1DTzqPN7m\nC0uMCnn79Kc/DWDaRj8xMRFyanHZ3kMOOQQAcPnll4fs/a2IaujT1Orgk/t4/PkyNqPOk+1U056N\nMV6nZ6U+y1mub3+hUAjVhZElfPn6PManpqbKnitpd2+F7NHZaOgq0AWugxIATjjhBBx77LEAgrhZ\nHhhLliwBAGzduhX33BNkhnd0dJQ5KN0Fcn2D2fdAAdMvDd8glo4mX7XFKPMRc9lll2FiYgIAMDQ0\nBAA4ePCgHdgTExNYtGgRANgEp2QyiRtuuCHUt1ZDBfo01QS6T3BIhaaSmUEKfF9OQ639iuqbizQl\n+uLMfdeXQlom9PEzynkeExMTodj0VhDqgDpFFUVRFjSqoSO8ygpr4CeddBIA4LjjjrNv8oGBAavB\n9PT0AAD2799vM95uv/12r/Yh4119zkQ3jAqITneWGpR7ntTAfaGGF1xwgd12xBFH2Bj4sbExAIHW\nzfcqTTpLly611zn55JMBAJ/61KfK+tYKqIZeGanV+j5XypmI+lyPhu4zO8rQX9m+NLPIMgFuYIKb\n8+Ezn0gzCz83MvadZ6utEHsuEfddk4auiUUIx3G/9a1vBQCcccYZAAKBzfVVZDIPC3ReQxEImzzk\nFFEOUp/Ajyoj4EM+DCzI5WD1RbRcddVVAICdO3di7dq1AIBNmzbZ++aB3d3dbc/p6uqyA//gwYMA\nAsEuK0Iq7UOtZoSo4yrZ0+sRgm6NF/k/t+9GhUkziMyVYKQQTyQSZdEz8oXB6/HKe+Vxzue791NP\n6YG5pt4XjJpcFEVRYkLsNPRKUSoyIsWX8fjud78bZ555JoBpR2E+n7dLwMm3+969ewEAnZ2dVoOX\nDlBfm0DlNy4RzcjZGOXw4u2Dg4MAgEMPPdRWd2Stm+8BCKJdePuBAwdCZQaAwDTzyCOPAAA+8IEP\n4K677gLg/y59DmZl5rjx/zK7UUZH+fIEfPkHrkmuUn6FW2iLj3NXFpLXlVqxMcZbfsKXnyH/9mVw\ny+dZFteSWrmMQOPjfJE0MqhAzozldXmb7x6jYuLnE9XQFUVRYkLsNHR+e/ocjUB5SJPUJPr7+7F7\n924AwGuvvWaPY4fJsmXLrDbOtmRZ+vOKK67AV77yFQDTNnC3uJevLoScNczWKSM1MG53YGDA9n/X\nrl223+zM9RU24n18LSD4LlmD7+jowFlnnQUAePDBB+35rBVJv4TWs549/Bux1hiVs8Djzs1Qdsvb\nSlu09ONI7ZOPTSaT9veUzktf2GBUHLqvEFfUOdW0Xne8yqJxsl6STyv3OXVle9lsNrSaGF9Hfu9y\n5tpq4zp2Ap1/ZF/BKhlby1O0k08+2Z6zcuVKvPLKKwBghTgR2XjsfD4fWpACCAYFC79sNovPfOYz\nAIDrr78eQDhuFph+0cjB4htscyEEP/CBD4TuZXh4OPQiY1MLtzs1NRWamvN3yH2WD3mxWLSx+Oed\ndx4A4O677w5NV+U9KrPDLThljAkpDSzQWPDIeGtfUTnX0edWCZVmGrmIs7yOr155tSS4qFwJiW+/\nT1GpJLjd6/i2+0wn8rvyvahavfyFmlwURVFiQuw0dPetKVcjkUV4PvjBDwIAnnvuORvKt2PHjrI4\nXGOM1XClViudNF1dXQCCsD+O6T7nnHMAAPfeey8+/OFgSco777yzLNQwKnZ8pm9/35R53bp1AAJH\np/wu+F7lykYch57L5exshPs6NjYWCvGS4ZwAcP7551tHqaytrs7R2SOn/EBYe8xkMmUmk6mpqdA5\nrrPTzbjk/VLr5mtJk041R6bvb1fT9WnztRbc8pmP5LHSgeqLs/ddV2r9UkbI2b5cyUyabV3z0XwT\nO4HuTsPcJdX4x2TBNTw8bM/Zv3+/jS+X5gZZE4IFNicgGWNsREwikbDb+TqXXnopfvaznwEALrnk\nEnzjG98A4LfpASirQzFTLrjgAuzfvz/UhjEGw8PDAAIhsGLFCgDAG2+8YfvE7eZyubIHL5/Po7e3\n137m71j6Ey677DIAwHXXXdeSU9J2xY2dliatyclJK7x5/CxdutT6O6QQYpLJZKR5wj3HJ+TkWIlK\nqvIJWV9fopDHST+TG9ETVVojqjSAz4bvM8/4Xjiy/60mzAE1uSiKosSG2Gnobpy5O0ViU8i+ffsA\nBBmRrGlkMhmrgbNpYWJiwl4rlUrZ7b5ays8//7zth7wmLwQ9ODhYFmUjnZ9S65JaQz1wX/r7+221\nxNWrVwMAnn32Wdt+Op22JhNpZmHzUiqVsuYh/k4XLVpkP+dyOWt+Yqfxtm3bbMy7/F6U2ePTSuVs\nzo0uGh4etr8r/858HhDW0KVTU86qfE5HXyZyLVEptTpC5TFRJiE3Dl2aYapd2+2X+zmRSJSZCFOp\nlG3fDYpoNfSJUxRFiQk1a+hElATwOICdxpg/JKJ1AG4H0A9gI4A/N8bkKl2jmfiKUxUKBfT39wMA\ntm/fDiDQoJctWwYgsAW7mop0qso3tVwTlJ2ig4ODodriAPD6669bTXb//v1lYZPSeSU1g5lmWn71\nq18FADz++OP44z/+YwDA5s2bAQTZodxWIpGwsxFZ5Iht5MaYkD0dCMfoGmOstrJ8+XLbPse5f+Qj\nH8EPfvCDuvvfbNplXPtmntK3w2OFC6nxmAPCTk1Zv0eOdTeD1M0ureRI9Gm6UZmqvplntUxRmWkq\nwzVl/3wz8png1kMCgueiXYqv1WNyuRTACwAWlf6+BsDXjTG3E9E/APgEgG/Ncf/qxh1kMu338ssv\nt2n8LMxOPvnkkFOUhTM7+hKJhDWzyB+7r6/PXp8ftnQ6XRYZcvDgQSsEjzzySHzhC18AAHzpS18C\nUC64ZxsZsmnTJgDAsccea6/x2GOP2WtLp/CBAwcAwArxTCZj+5/L5ew98stJRq7IvrJT+Mgjj7Rt\nPvvsszPq/zzQFuPajS6RCWm9vb1WUPNiJEBQjA0IhDiPa/7NZJkJOa6lmUWm0LulA6SZI2qRl6iY\nd5daBLB8HnylAXwF7qIWfq5mynSFt4wIKxaLVhnjfa1ETSYXIloD4P0AvlP6mwCcBeDO0iG3ADiv\nER1UlEah41qJG7Vq6NcB+BsAvaW/+wEcMMbwa3MHgNVz3LcZ4Ytx5c9DQ0PYsWMHgGmTx/j4eKiM\nJmsd0szAzsWRkZEyR1EqlbJabWdnp9V6pcNGrgzEq//ceOONAIBPfvKToemuTzOX+33ZeXIba9Vd\nXV347ne/G7qX/v7+kPbBMw9p/uFtckrNZqS9e/eGwuaktgcEGj5P+Z9//vl2CFtsm3HN+MrEjo6O\nWsc0a4wjIyMhZ76b9euWkfUV5fKZT3yLmMsyADKUUV7fZ56RzwjfizQPyXuVfXLDBt14dDfrNSre\nPUpTr6TBu7MRX8y771qzMQPVQ1WBTkR/COANY8xGInpnvQ0Q0cUALp5B32aE6yHv6OjAxz/+cfuZ\na7WwmUFWFZTJOKtWrQIQCDsWaDJygwfg0NBQKMLANzXmbWNjY9i2bRsA4OyzzwZQbp/0JRr5HgI5\nsHjbbbfdZmOPf/3rX9v9/BDLutEySoIF/tjYmE3nHxsbs/ulPZaZnJwMRfIAQWQFt/WOd7wDp5xy\nCoBg4Y9WE+7tNq4Z6QPxJZFJk5iMXuLtbEqUyUjSHi8VlkrKkZvu75o5AFgzj6x3JCs3ynEtzT+u\n8JOKTNTLgYmKI/cRJWQrCV/3mpXq0vjuZabRa7VSi4Z+OoA/IqL3AcgisDVeD2AJEaVK2swaADt9\nJxtjbgJwEwBQi65YpCxIdFwrsaOqQDfGfA7A5wCgpMl81hjzZ0T0AwAfQhARcCGAexrYz5px3365\nXM46QlevXm1jstlMMjw8bDXQZDJZ5pQsFApWax0aGgpp/kBgjpCFvPi6rOmk02mrwRaLRashSdOM\nNHlIDdzti7tUF5/38MMPAwg0aa58ODQ0ZPdz+1JTk/G0fH/5fN46i3t7e63mzjH7hULBOnilM5e1\nFKmhp9NpPP300/YeW412G9esTfNYHRsbs2Msm83acc3jq7e3N2RKZA2cZ5tDQ0Mh84mboVzNeeju\nk3kbQHhZN6lhS8e8vJaMJPPtrzRblZ+jNPhKWr3vfqKYrcmmkdo5MLs49CsA/DURbUZge7x5brqk\nKPOKjmulbakrU9QY83MAPy993grglLnv0twgbVtc02TlypXWrihtcqytJhIJ+1lmgvLqRLK+itQ+\nWAPu7OwMbedzWCtKpVJWw+J47XQ6HbouazqylK1EOmP5PO7z1q1bba0WeV0Zwysdnezs5f7t2rXL\nat0yK5QdrSMjI/b60inKHHroofa7KhQKVlu86KKL8M///M9l99IqtMO4lrkU/Lf0ccjfnY9buXIl\ngGBMSd8JENQa2rNnj93Gs1jG1dB9tmDfsVIT9y1eLsMdfXHuMvSXSafTobDKSjO+emzosk/Vtvu0\nfXmMb79vltNo52jsUv8Z+aOzA3R0dNQKId5WKBSsmSGXy5U5CgGEhJg7NU0mkzZyhYtcAdMPlnxQ\n5MBm5+XVV1+Nz3/+83a/u5AAt8t95WvIaBh2sN5666048sgjAYSFO/8vhUBPT4/9DjgyZc+ePfZh\nmpyctOYV/i46OzvtOfKFIxfK4P5PTEzg2GOPBYCWFubtBv8+HR0ddgzIkg08rlKpFE499VQAwHHH\nHWejYFigPPXUU3jyyScBBMlvMteCj6u0dKIr7NwkNRn54i7G4SZJSUehLPomkX1xY7/dwAJfdEw9\n+JzBTJRgj6JZ0S2Mpv4riqLEhNhp6G741F/+5V9aDfKll16yWs3hhx8OIDDD8DkyzlzW+maThK/+\n8bJly6zGIJ2m3Obk5KQtpfvGG2/Yz2zu4NAu2WcgbL6RTlGp3cg4YAA47LDDbJx9JpOx5/G9FItF\nq20fOHDAzh5kpihrWl1dXaEQN74X7u/Q0JA1X/E9T01NhUoHyJIAytwgHez8+y9atKjMQZ3P560z\n+8gjj8Q73/lOANPO+MHBwdD44KJqPo3SF7bo7pdOTSAYq7IomM9kIjV4qRXLMFsgHA4sQzQZ+bfU\n1qXTt5L5KOpeo/ZXM7nMJ7ET6G5iTiKRsLbCjo4Oa37gdPU1a9ZYISvtvlLQymuywJPX59h2AKFk\nDz6XB1xHR0fIxgkE8e7cl/7+fjtA5MPgW6ndV0vjpJNOsseuWLEi1C8gEMjc/0wmY19ezzzzDIDg\nJSOXyON66fywL1myxAqJZDJp7eX8wiMi+70lEgk7/Vdmj3xpAsH3zy/U0dHR0EsXCMYhKwQ9PT32\nfH7Jvv3tb7fPwtKlS+0Y5HPcOHN3rEkBNjU1FVqDFgjGBCsPro3d9b24dV/4WvKFI80orkkkqn5L\nNbt3NarZxeuNBPJdd65Rk4uiKEpMiJ2G7nq4v/Wtb9nFko0xoWqBAPDaa6/Zc/P5fJnJZWxszGo1\no6OjZUt1HTx40GrdRlSC4/2LFy+2Wo90ZLH2e9ppp9k+33rrrbY/qVTKzhy4GuTBgwethjY4OGij\nd4444ggAwEMPPYS3ve1tAIIZBjs7WVOX8bzJZNLOIlgrTyaToRmIm9o/NDRkP0utkK8jY4hHR0dD\njmVldrjp8FNTU/Z7X7t2rTWl8PhMpVJWg9+7d68t2sX7h4eH7fjo7++3s1i5+lYl56BE/u48Q5Bm\nElmv3V003b0/GfHliz33LTsXpSnPRBOudg5RePWl2VyrEcRWoDPGGNx9990AgPe+973WJMA/ytjY\nmDUZjI+PW+HGwiiVStkHBwjb5QBgYGDAhiLKpcB44I2Pj4cWGuAkHrY1p1Ip/Md//AcA4O6777YP\nhDHG9ovDzyYmJqzJY2RkxN7DCy+8AAA4+uij7QNx2GGH2bK5XDJ43759ofK/0l4OBDZ+aXJiIcH2\nfrnOaE9PT1mom6w8CaDM5KPMHDcsUH7P+/bts+NWCrytW7cCADZs2GB/C448OuaYY0Iv3y1btgBA\naHzItn1hk1KguqUD3OqHPoEeZdeuFMUio2d85/quWUsykS+Esp4koKjzXXu/NJU2AjW5KIqixITY\naejuNC2VSmH9+vUAAq2WzRivv/46gMD0wdpFPp+3mok0Q/C2iYkJa2bgKezAwIA1qcjUfLkABmvC\niUTCaiocF7x9+3b8+Mc/BhBotLw/l8uVvd1zuVxo+S23qtyWLVvw5je/uey74Kl1V1eX1dSks5a1\ncmOMnUEMDg7aWQQXKtu1a5c9Z2JiomwpsHQ6bWcofX19ociHmdZ3VwLc2GpZk1uaEuU2Nsm9+OKL\n1pTH19m7d68dP7t27QoV6nKplkIfFe/tOjt520wjaaL6EkU1zdzXpm+5vUrXqtaHZkfBqIauKIoS\nE2KnoTP8Rrz44ovxq1/9CkAQssXaONuFpZ0rmUza/axVp9PpUPlYd+HdVCpltfaRkZGyWs1SI0kk\nEtY2zxrrL3/5S6s9yYV75WK1vlCyqakpq1WxrT2Xy9mZRz6ft32V2gE7eF9//XWr5cnsQ25ryZIl\ntn22lU9OToaKisk669w/uQQff28nnngiHn/8cSgzR2YLA+HZXmdnZ5mzv1gs2rDRTZs22d/9vvvu\nAxD2t/gWL5ehgNXC76RdnPsUVRrAd3696fE+P1mlv6vtq+d82X412/1cOWrrIXYCXRbKBwIzBHvw\nDz30UCu8fIInkUjYB0Yu2cU/TDabLSuaL1OkE4lEyEHqbkskEjYdm52bu3fvtj9yJpOxQlLWr+Bt\n7n5+yPnlQ0R45ZVXAADr1q2zD7SMwuFzUqmUFQLSKczIWGG+pqwA2dHRYc1S0rnK9d4XLVpkhYMK\n89nDSgP/RkQU+o14XEjTCe8fHx+3L2J+LqTJ5vXXX7c5BT6Bzn/L/+VnX05EJefiTOqb1OOgZHwm\nHx/VBHOtx/tQk4uiKIoyI2Knocs4Vv6bTQ/5fD60BBcQaKcyVJAdiKzdEpHVYEdGRsreuFNTU6E2\nZboyEF7tZ/ny5TamfGBgAEAwQ2CtX4YidnR0WK1MagEyttstc5DNZm3/ly1bhhdffNFeCwCOP/54\nW0AsmUzamGNZYZGzB2WVSK4MmclkQvfF3wE731KplDVlyaxbubqOMjNk+QYgvPrU+Ph4mVPTXdFK\nFqADgnHHv08ul/OaCWp1+MllDWVQgm85OxnHXev13WNnEv9dazijbwZSj3YdlbUq257JbKNWYifQ\n3UUhBgYG7MMwODiINWvWhI6fmJgInSNLzQJhW3GxWLQLCXDcbrFYtNEgIyMjZVEycsX1vr6+snT5\n/v7+UDkAacOXZXe5LdekJPenUikbkdLZ2Wkjcdgu/vjjj+PQQw+1/eOIFhbyiUTCCuTh4eGyWi7j\n4+OhkqjyRQCEF9WQJVBVmM8emTAEhAWmTAaTCoWsq8IvalYwZFVDGT1VTfBVE4KMuw6oFI6+MgLV\n7N61tluP+aZauQAfUTHw8iVVz0txrlGTi6IoSkyInYbupt5v3rzZLtAwOjpqTRIcuVEoFKyGKQsK\nsZlGLobc29trTTG8/8CBA6EIA1f7yOVyoTc2a0is/a5evRqPPfYYgHA1R3kOv/19ixAA0w6uNWvW\n4Oijj7bnsObN5+3bt886Y6empmwdd5kdyu13dnZazY/vv7u727Y5PDxsZx6yFrYs8uTOhpSZ42ZP\nGmNCpg4eAzK6Smqi/Jlna3KBElnH3pdaH0WljM9EIhGZpl8tg9O9Vi1EFdKqRDUNvx4H6UxmMY1A\nNXRFUZSYEDsN3bWhS4flokWLrAOPNeVjjz3WZk8ODg5au7VcDYbt0tK5xFq/LBPb0dFhbcf8ll65\ncqXd9uqrr1oNae3atQACDZ2Lh911112h+Ha2e7JdO5PJhOpCS20LAE444QRrzx8bG7N2fNbK8/m8\nnYFMTk5i586d9r6AQHvj72vJkiVldWe2bdsW0ur4HqV9lr+X3bt32++w0Y6ghYBbI0XGoRcKBfu7\nygxpPtbV7oGwM19q+9K5KjWBZS4fAAAgAElEQVTdStqyDO31ZTLLEEbfWHDj1N3M0yhHaq2aeL3U\nU8uFv7daC3ZV+y5nS+wEujt4icgK7GKxaM0ALJg6OzutyaFQKJQJ5HQ6bZ2Ce/bssUJKLnohnZpy\nOxB2Do6NjdmCWZzokUqlbGLO7/zO7+Dpp5+212JBzoNGRi6MjIzYdvle3vSmN9mIFBnFIM1DvsQP\nplgs2v7v37/fmlTky48TlxKJhP3M+zs6OqxgWbFihY3/l/H9ysxwU/+LxaIdi1JIyCJaMiLGzZ8w\nxoSc1T5HYdQCEdy+bMsVwrJyqWtmqRYF4gpq2ZZc2EX2pVLCUJQZSbYjXz7uvcjvT5YC4fuMajfq\n/hqJmlwURVFiQuw0dN/bXSJjtoEgrJFNJtLJJJfyYjMN0fRqKqz1yzf2okWLrFbK9c7z+by3nCaf\n8/LLL9uCYSeddBJ++9vf2valVsD7N27cCCDQylmDZpPIk08+GUrTZw2dzTjS0SnLHPD/Mvs0m81a\nU410urJJxRhjNXPux4YNG+yxshCYaudzj6vpSg0WCI+1Wpx0roYfpWXWE6cd5VT0lZSthHuvtRYQ\nq4dK15fty9mALMbXKibF2Al0WfkPCNvnli5datfcZGS98qmpqVBiDxAIS44dX7VqlU2hlnXTWcgP\nDw9bgcfXkenZ8vpsd160aJEVnFu2bLFrnQ4MDNgkpA9+8IMAgHPOOQf3338/AODBBx+05hsW4hs3\nbrR9KRQK3sgIHpjLli2z3wW/MLLZrP3eRkZGyhKbDh48aM1AU1NT9kWxfft2AIFfgO9reHjYvgil\nqUiZGbXGTkdFkLiCp976Jb7jfUIwaptUtFx7s3xGpUlFCv5abdS19NUl6oVRLU6+FX1DNZlciGgJ\nEd1JRJuI6AUiOo2IlhLRT4no5dL/fY3urKLMNTq2lThRq4Z+PYAfG2M+REQdALoAfB7AA8aYLxPR\nlQCuBHBFg/pZM6xtuiUAgCArk9+67Dw86qijytKigWkNure315pnZHEqWbBLavWuJjoyMhJaxZy1\nXm5r79691rn40ksvhSoXHnXUUbaPAPDoo4/amPkTTzzRZquyptzR0WFnEIVCwZpipCNNLmHGJhOp\n1bO2L4/l6+zfv99ev6+vLxS/DwQaPJuSfvOb39jvoMUzRdtmbAPR1QJ9ER9RnyudU+tv5c4K3CgU\nd5scg5Vqr882dtx3jNSko7T1ahEzvhlIlDN2PqmqoRPRYgBnALgZAIwxOWPMAQDnArildNgtAM5r\nVCcVpRHo2FbiRi0a+joAuwH8ExG9FcBGAJcCWGGM2VU6ZgDAisZ0sT586xGy1t7X12c1Y9Yqu7q6\nrFPUGGO1To7xlkWqdu7cGVr9Bwg0bdZK8/m8PVZq8KzVEpG1i3M/BwcHrSMVmJ4ZGGNsnfSHH34Y\nQBD2yNcdGBiw15XhYzLckrfzDKSzs9Nq2wMDA1Yzl1q7LKvKn3l/V1eXnQ3s37/fOn75mlNTU9bB\nWygUvHVnWoy2GduubbwWTbbW8DlXg610bKU+1brfdby6YY2+WUKlturN7qz2vbjauOyPtPH7wjUb\nHWdejVoEegrA2wB82hizgYiuRzAFtRhjDBF5vyUiuhjAxbPuaY3IqV2pb9b88swzz+D4448HML0s\n2/j4uBXePT09VrizYHrjjTdsLPm6devsj82ml8nJydACFGwy4W1AuPY094UF49DQUOiFIKtB8uLP\nMnLFh6zBzi8KGdHCyMiYJUuW2GPZ6dvR0WGvNTExYR2g3P+xsTH7wjtw4IA1W3EJAVnkKZVKec1e\nLcaMx3Yzx3UtQqKa09M9vx5hV8s23wsnStBWEujV2o2i1mOlwPa9ONyiYvy/79hWM7cAtTlFdwDY\nYYzZUPr7TgQPwSARrQKA0v9v+E42xtxkjFlvjFk/Fx1WlDlkxmNbx7XSilTV0I0xA0T0GhEda4x5\nEcDZAJ4v/bsQwJdL/9/T0J7WiOuckW//0dFRq5Vy6v3TTz8d0qBZM2fTTDKZDJkWuI65LGnLWrUx\nJlQ7HSg3Q7AGzGYWqdXKfgDTmjdr0KlUKpT9x9o2a8KTk5MhByxr9DwbmZqasg7YxYsXlxUiKxQK\ndmbR3d1tz+eSwbt27bL3t3jxYjubkRmjsjyrL+yslWj1sV3NqVnpcy0mmZmYBqLadU1BlTT0Stt8\nJo+Z9qkeao1Dl/cVtTj2fFJrlMunAfxLKQpgK4CPI9Du7yCiTwDYBuDDjenizGC7ubTf7tu3z9qd\nZTSHNHNwMg4ja5qMjo5awcRCev/+/SG7PV+X25+cnAwtIcf75ZJhLBhlf4koFEsvr8nncbtyTVBm\nYmLCnsdCWiZCyDhyJpVKlSU+AbAVFKVddnJy0ppfmGKxaOPw2yj2vC3Gts+GXCn+vJYY6Ur28lps\n0fI6voQkX+y5TwhGvazkdjn2a/UN+PpSzd7vu75coMU1HdX7Amm0jb0mgW6MeQqAb2p59tx2R1Ga\ni45tJU7ELlOUYQ1TLqxsjCkzmRhj7LHj4+NWg2BzRn9/vzVpjIyMWPMDa725XC60GC9nfcpVhGS6\nvayGyNdhR+qBAwdCRYJYK+D+yWvl83nbR54tjI2Nhaq/sfmDnZsjIyOhhaV5BsAaQyaTsdcfGxuz\nphje1tHREUox5/7JevPf/va37X5ldlRyFEZFsUhN2Jd/UM0MUqntSvt9GqwbHeLb7uvTTCJ6omYu\n/HclB6wbk+8WsJuamgqVUZBO1VqdzbXuny361CmKosSE2Gro/CacnJwMaeNcv4S3rVmzxsZ7p1Ip\n6wBl7XVqasra3bPZrH17ywWQ+Y3d1dVVVpdavt0nJibsftaeicjastPpdOjt79qgpSNUhijK8r/S\nXs994FlDJpOx7eZyOWsD5xkAHweEa7vzDMAYEwrXdMO/Vq5cGdIKpVbTJvb0tqGa/VaG2tWS0Viv\nU7MeG3tUW1HXq5eoTNG5DCv0XXO+Y859xFagy2QbX2LLQw89BAA45phjrJA+5JBDrODhbQcOHLCR\nHRMTE7bgFJtOpHNSJhHJSokycYcTh+Qi0jxIZA3prq6uUD1qbov3u8Kfz+d+ASjbL/siHVXs9Mzl\ncvb6LPiBaUdvd3e37X8+n7emHH7J7N+/PxSxw6aqVhv07UilyA9flEdUVFG1xCHZTr3RJlHXmi1u\nXyr1p5Y2K8XoS9OQu3iFizQ7tgpqclEURYkJsdXQ5RRfLkvHphSuO97f3x8qCctvXDY/dHZ22nNG\nR0dDTkU+hzX4bDYbMmlw27LuOGsD7FTNZrPW5EJE9rrd3d1lC/fKUry5XM6aRGSZW2ly4TZYa+/s\n7LQadiqVKgvR7Ovrs31JJpO2Xf4uDjnkEKvNT05O2u/la1/7mr2GNDW5y5opjSEqdV+uGOQ6LaPM\nFD6iYshrdQBGmWt8jszZmHL4HL5GlGPeV0hM9o3P53Hrc362KrET6O7ASCQS1uQiF6Pg/UuXLrWC\nc2JiwkZ2yMgVFrI9PT1l6zS6g4YFurt4BDCdwANM260nJiZCtdX5fBa88p66urpsRMnBgwetQOa+\ndnR0WCGeTqfti0peh8/ft2+ffeDli0Lavfm+edvU1FSohg33W8Yg8/cq17Rsxalpu1EpMkPul4Jd\n+jPqqYFSL/WeGyVI3W0zpVoUiyvwpT9CvvyYYrEYejnO9qXTSNTkoiiKEhNip6FLDREIZzxKpyJz\nxx134NxzzwUQrB7EJgd+e4+MjFiThVyijbVPqRWPjIxYDZa1171791rTxCGHHGLNJOxcTaVSdls2\nm7Xmm/Hx8bJ65MuXL7eLSxNN15hmTV0W90qn01bD5mvmcjnbl6VLl4ZWFAICDZxnEVKr5muOj4/b\n+16+fDn+8R//MfRdSm1Fpv6rdt4YqjlK61mJvlYzyFxSa8q+nG3Uci0XGXHlQ5ppZOCA3N8uY1k1\ndEVRlJgQOw3drUUi366yXrjU3FeuXAkgKD7F2jKf09fXZzM5h4aGrFYrM/Jk3RjWhrmd8fHxUCYq\nn8f2benIHB0dtTHfPT091v7O2+SKRGNjY6HFnfk4ni3I+5O2Qp5h8D1JZA1zGb8vM+d4BnLzzTfb\n86QmpxmijcWnSUeFIkrfka+s9Ey0TZ9WXUtcebVQw6gaMDNhJvZ46bh3687I70rG9LfiWI+dQPcN\nJPljSUHP3HTTTQCA97///da8wPtXr15tqwlOTExYp6WMV2eTyMGDB+0LgU03mUwmVANdVmZkeJus\nrS4XbJY1xnkQrVu3ztYj5yiU4eFh+3Lo6ekJLS3HfZUJUTISBwgX7CoWi3Y/v3zy+TyOOeYYu98t\nyCTNNOoIbQyVHIrudvm7u8KnUChUTUyqVaDW4lx1i1rJba7g9S0aUasj0uf0jIpdjyoeVu1lVE8B\ntGbTeq8YRVEUZUbEXkOvJWOOPy9evNhqqGzmeP3110Phga6Gn0wmraNyYmLCns/njI2NWTNFMpkM\nldUFwgtH9/T0hExCbhmB3t5e+7mjo8Nq6DITlGcLmUymLP5bFmk6ePBgaGk6IOw05WvwdiBYmYjj\n0H3fbVRhJmXuqUUzlGPJDYfNZrOhEFc3qzfKUSq3+cJVfe3X+kxG7Y/SsKO+g0ox9sVisawsdTqd\nDhXwcwMnorJmG62dzyQkMnYCfSbwwLz11lvxxS9+EQDw8ssvAwiEGQvJ0dFR+yXL2G0WcqlUygp0\nfoCMMaF4bLn+Jv/P5xhj7GDr6OiwbfA5W7dutW0lEgl7XX5hyAdzbGysrIwBELbty5ICfL78Tnj9\nU15go6+vD1/60peqf6FKSyAFqltGQgpkwJ/m7jPvSHwVHCv1g/ElGlWqlljp3HrxCeRao23aATW5\nKIqixATV0BHWNFhLZa1UFsFKpVLeNzgf29vbW1ZD3BgTMtNwlIpcwo61+bGxsVBpAbeo2NDQkL2+\nMQZHHHEEgGkNe2hoKGSS4aJajIxTTyaT1qQiSwTwbCGbzdrp+VFHHQUAuPrqq73x/Urr4CuuVSwW\ny0wqMntURm7U6nSNMrP4+uJet1Kav8+8Uktb8jqVolx899VOqf3VUA1dURQlJqiGLkgmk7jqqqsA\nAJdccgkAYPv27fZN3tnZaT+zpj0xMWFt3IcffjjeeCNYIJ41/Ww2a9cMNcZYDXjVqlUAAru8b2Hl\nzs5O6zhlVq1aZe3l3d3d9lipVUtbqAxR5PvzldLl/o+MjNj1U1OplF1cmsMWZV0cpbVx48zdQm8y\n7NQXW+3WU2eq2dWrFeSSyJj4Shp8vfkN7vmu1i41cyCcn1LJudsOqEAXSGF14403AgCuueYaPPbY\nYwCCCAG5tBwQCFFeFOPoo4+2QpiF5FNPPWUF9tDQkG2DXwj5fN6aPpYsWRJKt5fOVgBYv369fREU\nCgX8+te/BhAuU8ACP5vN2kgduTC1TGzi6/KAXbNmTah4GX/evHmz/V7U5NI+8O/qW+RYJpHJ7dXi\nyKsJ8moFt2qp0jhX1GoSilNElppcFEVRYgI1M0yHiFoyJkg6KBmZOXbhhRcCgF1gGpgOS+zv77fn\nZzKZsnT5vXv3WqepzM6T5hLW0Ds7O21MezKZtKYUdoSedNJJIa16y5YtAICdO3cCCGYQ7IzNZDL2\nHqRJRrbPn9nMkkgk7KxgyZIl1mRz2223lfW/VTHGNH1+TETGjV1uJZLJpDfOXJY/dstC16JFRzk1\n3X0SX4axZC61d5+ZRZpueJYpt7VSaVyn2N1GY8z6aufUZHIhossA/AUAA+AZAB8HsArA7QD6AWwE\n8OfGmFz93Z5/fHVPmGKxiEMOOQTAtGAEpu3K27ZtC0WW8APBZhhpI/dFnsgpaDKZtCYTGZnAAj2f\nz2P79u32WO4DC+GRkRG7bWxsrKxaooxTz2aztjY6v3ASiYTdlkgk7JTcVy4hLsR9bLv4YtMlPmEb\nFbvti5jxmW6k8iBLcdSaLFRvYpG7P2r91Wrmo3akqsmFiFYD+AyA9caYNwNIAvgTANcA+Lox5mgA\n+wF8opEdVZS5Rse2EjdqdYqmAHQS0RSALgC7AJwF4E9L+28B8L8BfGuuO9gMpKNPLnIMBNEBL730\nEoAgNZ9ThF944QUA4VThzs5O+5k1nZUrV9rIF3e5OiDQtGU6/eDgIIDA5OFq6I888oiNDe/u7sam\nTZsAAKeddprtn8we5ega7nM6nQ5FyfCMgzVwdrgCgdOWTTFMKxYjmgNiO7Z92rSsnOnL9PTFmftK\nOkitOUp79kWuSJOLr8iVr+TAbMddVKZqrdmp7URVgW6M2UlEfwdgO4BxAD9BMA09YIxhW8UOAKsb\n1ssGI21pvkUB7r77bgDAxz72MXusXGqOhf/w8LCNLOHjli9fHopCYZMLC97R0dFQHQnmhhtu8A7E\nyy+/HACwYcMG+1Lg9VFzuZw1uXR2doYW4QDCtWJGR0dtQhG3u3v3bltWN5vNVg1Ra3faaWzPxN7s\nM4lI82KUIAfK7cpR26POl+Tzebs9mUx6o0tkaC0jwyZrraEiXx5y4RZ+RhOJhI1Ek2U8WHmS/Ytq\nr1WidHzUYnLpA3AugHUADgXQDeA9tTZARBcT0eNE9HjdvVOUBjKbsa3jWmlFajG5vAvAK8aY3QBA\nRD8EcDqAJUSUKmkyawDs9J1sjLkJwE2lc1tyvu5LtpCaDL8pb7nlFqshSzPK2rVrAQTRJrwAxp49\newAEUTCsKRQKBWs+4Tj11157LRQlwxEvsrKjXKBi3bp1AIAdO3Zg9+7dAKbNN7LGuYxv52gVIgpV\ncGQHqIyAkI7SG264IXT/coGMmMShz3hsN3tcz5VW6LuO1IBZky0UCnaG55pPeJscF77a+HKGIK/l\nLrbhRuHwsbIAXqX7d2PbfRq1L0mpUrmBdqWWOPTtAE4loi4K7vpsAM8DeAjAh0rHXAjgnsZ0UVEa\nho5tJVbUYkPfQER3AngCQB7Akwg0k/sA3E5E/6e07eboq7QPPk1EptNfe+21AIDPfvazAAK7M4c1\nPvHEE7bkLDs3d+3aZWPLgenl7tghyZo6EDgy+W/fIsvXXXcdnn76aQCBPd7VROSC1TL1W9rNZcw9\n29bZrr9o0SLrSP3mN79pryvvP05O0YU2tqPwOUDd0gGMz1GZz+e95XmlA9aJqQYQtpfzWJTj0hdW\nKduohi+vxO0j76uW+t8u414TiwS+ZdPk4JEJGEwqlcI111wDAPj+979fVv8kn8+HIl9YuLPgXLly\npX0hZDIZXHrppWX9+vrXvw4gKCPAETFEZJ07Ml3f95CwkE+lUnZbd3e3HaR8nZ6eHmvGuf/++yvG\nKbdqurQmFtVGVGRJtZopPkeqey3fsW5cOhB2WrKiMTk5aceomwxVqS8+ouq1u+ahdhDWhUKhpsQi\nTf1XFEWJCaqhC5LJZNkqP66WWklDveCCC6zmzQ7JxYsX20Jcxhhs27YNAGx44P333x+6tlwQmhev\n3rBhA4BgMWg21RBR2bJi3AYQaODcxquvvmqvySad/v5+q5nz+Xfeeae9TrFYLIuplwW7WpW4aujV\nimPN1fVlO9WchpVi1t14cnktfjZ4LLnVHt3Q4aiCXtWyR6NCLd1nWG5rVW29Vg1dqy1i+gcvFApl\nglx662UtE94m04q///3vlwl6+ZLgv+X1fXG3QHhNUTaDyIdhfHzc2r5lZAILbCl8+brd3d3Wxp/J\nZLB161YAwE9+8hN7HN9XR0eHbT/Klqk0n0bVGpHjGQivP0tEZZEhfIyLr/yuez6bUqRyIGsUufZs\n3zb3s6Tay8+Xa9Kqgrxe1OSiKIoSE1RDx/QbWxavYtzpnvSMAyjTvl2nUqFQCMWRuyYLN3JETk1/\n/vOfA4CNbe/q6rLt9ff3W+fRrl27bFts3unv77fX5SXk9u7da4+96667vPG63D5r5y6t7hSNK/MZ\nJ+2LmJL4ggkA/wIWxWLR5k34Sm7IIAJfdmktGaO+SDX3fuKKauiKoigxQZ2iCNu1WWvg78VdnspX\nX8WnCcw0dltqwHzdU045BQCwbNkyW1xLxqyz1j01NWU1nUwmY4trnXDCCQCAJ598Et/73vdsO7Lm\nu9umr89RmlgrEVen6GzxjVsfvkxPX0y6tIsD5SGCMuNTzvzk8+Tz16RSqVAxOW7f199qGrr0jfnO\n84Vdtiq1OkVVoLcRV155pf380ksvWYF9+OGHAwheIi+++CKAwLzCD8YDDzxgz4u7yWS+BHoqlfIm\nr8SBWuK9a73fKPNiran71fpXiwLVir9NlCLF22td4EJNLoqiKDFBNfQ2Qpo8zjnnHGtqOfroowEE\n5QY4Tv3RRx8tK7gknU/tYD6ZCfOpobux1+1INTNG1Dkz0dCrXbueTFTf9etpa77xxfTL7RqHHiNc\nuz4A/Pu//7u103MC0ZYtW+wSdfJ4jtyR58vKknEU7PNFqwqMWplJ/2d7z1FmBvm3b4y2o2kFCAvv\nSn2cSf/V5KIoihIT1OTSwvgcJTKmXdaYlsfzZ1erSafToUJelSJa2pX5jHKpJVqi3U0ys6WesSaj\nVWrNVm7X7zXKKaxOUUVRlAWK2tBbGBnLDoRXMZL10qXWx59lcS3WbmQWrFtjRpkb4jjrmUtqyfR0\nqee7rMdB24rMdtyoQG9h3CXocrlcyOTiVpNzhbgsMAaEq8rJkgTqFJ09tQqSdhY2c03Ud+FL1a/2\novS9KNrpu56rQmFqclEURYkJqqG3MK7JRZpJpPlFhjVKM4pvZRi5TzXz5jATM0OcqPf+fWGL7ah1\n10NUKGO996sCvYVxbdzyb1m73RXUQHgldVlNMu5RLvMFJ4S0i8Cp1dxRy/mNHj/V2mrnF6bv5SU/\n1zue1OSiKIoSE5odh74bwCiAPU1rNMyyeWx7obffjLaPMMYsb3AbZbTAuAZ0bMX93msa200V6ABA\nRI/XEiAft7YXevvzfe+NZr7vbyG3v5Dv3UVNLoqiKDFBBbqiKEpMmA+BftM8tNkKbS/09uf73hvN\nfN/fQm5/Id97iKbb0BVFUZTGoCYXRVGUmNA0gU5E7yGiF4loMxFdWf2MWbd3GBE9RETPE9FzRHRp\naftSIvopEb1c+r+vgX1IEtGTRPSj0t/riGhD6Tv4VyLqaGDbS4joTiLaREQvENFpTb73y0rf+7NE\ndBsRZZt5/82kmWO7FcZ1qb0FObZbfVw3RaATURLANwG8F8DxAD5KRMc3uNk8gP9hjDkewKkAPlVq\n80oADxhj3gTggdLfjeJSAC+Iv68B8HVjzNEA9gP4RAPbvh7Aj40xxwF4a6kfTbl3IloN4DMA1htj\n3gwgCeBP0Nz7bwrzMLZbYVwDC3Bst8W45pTlRv4DcBqA+8XfnwPwuWa0Ldq8B8C7AbwIYFVp2yoA\nLzaovTUIBtZZAH4EgBAkH6R838kct70YwCso+UjE9mbd+2oArwFYiqC8xI8A/Ndm3X+Tx9W8ju1m\nj+vS9Rfk2G6Hcd0skwt/EcyO0ramQERrAZwEYAOAFcaYXaVdAwBWNKjZ6wD8DQCugNUP4IAxJl/6\nu5HfwToAuwH8U2la/B0i6kaT7t0YsxPA3wHYDmAXgCEAG9G8+28m8za252lcAwt0bLfDuI69U5SI\negD8G4C/MsYclPtM8Eqd8zAfIvpDAG8YYzbO9bVrJAXgbQC+ZYw5CUFaemgK2qh7B4CS/fJcBA/f\noQC6AbynEW0tVOZjXJfaXbBjux3GdbME+k4Ah4m/15S2NRQiSiMY9P9ijPlhafMgEa0q7V8F4I0G\nNH06gD8iolcB3I5gano9gCVExBUuG/kd7ACwwxizofT3nQgegmbcOwC8C8ArxpjdxpgpAD9E8J00\n6/6bSdPH9jyOa2Bhj+2WH9fNEui/AfCmkje4A4Ej4d5GNkhEBOBmAC8YY74mdt0L4MLS5wsR2CDn\nFGPM54wxa4wxaxHc64PGmD8D8BCADzWy7VL7AwBeI6JjS5vOBvA8mnDvJbYDOJWIukq/A7fflPtv\nMk0d2/M5roEFP7Zbf1w3y1gP4H0AXgKwBcD/bEJ770Aw7fotgKdK/96HwN73AICXAfwMwNIG9+Od\nAH5U+nwkgMcAbAbwAwCZBrZ7IoDHS/d/N4C+Zt47gKsBbALwLIDvAcg08/6b+a+ZY7tVxnWpLwtu\nbLf6uNZMUUVRlJgQe6eooijKQkEFuqIoSkxQga4oihITVKAriqLEBBXoiqIoMUEFugciWkFE3yei\nrUS0kYj+k4g+MA/9eJWIljnbNhDRU0S0nYh2lz4/VUoDr/W6ZxHRqeLvW4novLnrudKuEFGhNJ6e\nJaIfEFHXLK71TlGN8Y8qVaIsVVD8pPj7UCK6c6ZtL1RUoDuUEgbuBvCwMeZIY8zvIUigWOM5NuVu\nazTGmLcbY04E8AUA/2qMObH071Wnb8kKlzkLQaU+RXEZL42nNwPIAfhvcicF1C03jDH3GmO+XOGQ\nJQA+KY5/3RjzoQrHKx5UoJdzFoCcMeYfeIMxZpsx5hsAQEQXEdG9RPQggAdKA/zakkbzDBF9pHSc\n1U5Kf/89EV1U+vwqEV1NRE+UzjmutL2fiH5CQb3l7yCoYlcTRJQiogNEdB0R/RbAKUS0g4iWlPaf\nSkQ/I6KjAPwFgMtLmtjvly7xB0T0SGlW0vTZiNKS/BLA0US0loJ6799FkFBzGBH9l9LM9YmSJt8D\n2Nrwm4joCQAf5AuVnpu/L31eQUR3EdHTpX+/D+DLAI4qjclrS20+Wzo+S0T/VHpWniSiPxDX/CER\n/ZiCOuhfae7X03qoQC/nBABPVDnmbQA+ZIw5E8GgPRFBXeZ3AbiWSjUlqrDHGPM2AN8C8NnSti8C\n+JUx5gQAdwE4vM6+L0Yws3iLMeY/fQcYY7YA+A6Aa0ua2COlXYcgqEtxHoD/W2e7SswozT7fC+CZ\n0qY3AbixNDZHAfwvAO8qjeHHAfw1EWUBfBvAOQB+D8DKiMvfAOAXxpi3IniWnkNQYGtLaUxe7hz/\nKQQ1t34XwEcB3FJqC25hy4wAAAJQSURBVAievY8A+F0AHyGiw7CAUYFeBSL6ZkmL+I3Y/FNjzL7S\n53cAuM0YUzDGDAL4BYCTa7g0F1XaCGBt6fMZAG4FAGPMfQiK5ddDDsGLYCbcbQJ+i3iUtVVmRicR\nPYVASG9HUDcGALYZYx4tfT4VwWIevy4deyGAIwAch6B41csmSEG/NaKNsxAoMig9N0NV+vQOTD8X\nmwBsA3BMad8DxpghY8wEgroqR9R1tzGj6TbgNuA5AOfzH8aYT5Uck4+LY0ZruE4e4Rdm1tk/Wfq/\ngLn7HcZNuJaD7IPbvsuk+FyzqUeJHeMlH40lcCuFxjwhUGo+6hwXOq9JyHE7l89SW6IaejkPAsgS\n0SViWyVP/y8RTPWSRLQcgZb9GAIt4ngiypTs2GfX0PbDAP4UAIjovQiKDs2GVxFMfQHxkgIwDKB3\nltdWFi6PAjidiI4GACLqJqJjEBStWlvy0wCBecTHAwAuKZ2bJKLFqDwmfwngz0rHH4PAFPniXNxI\n3FCB7lDScM8DcCYRvUJEjwG4BcAVEafchaDq29MIXgZ/Y4wZMMa8BuAOBE6kOwA8WUPzVwM4g4ie\nQ2Cb3z6rmwH+N4AbS+ainNh+D4APlxxMv+89U1EiMMbsBnARgNtKDvj/BHBcyexxMYD7Sk7RqJrk\nlyJwwj+DwOR4vDFmLwITzrNEdK1z/I0AEqXj/xXARcaYSShlaLVFRVGUmKAauqIoSkxQga4oihIT\nVKAriqLEBBXoiqIoMUEFuqIoSkxQga4oihITVKAriqLEBBXoiqIoMeH/A1q6UdxU+yKLAAAAAElF\nTkSuQmCC\n","text/plain":[""]},"metadata":{"tags":[]}}]}]}
--------------------------------------------------------------------------------