├── .ipynb_checkpoints
├── MISA_Project_PreProcesing_Step(1)_Registration-checkpoint.ipynb
├── MISA_Project_PreProcesing_Step(2)_Normalization-checkpoint.ipynb
└── PreparingTestingData-checkpoint.ipynb
├── Evaluation_MISA_Project.ipynb
├── Images
├── 5_2.png
├── Preprocessing_pipelines.PNG
├── architecture.PNG
├── example_preprpcessed.png
├── figures_forreport.png
└── overlay_val14.png
├── Instructios_How_to_run.pdf
├── MISA_Project_PreProcesing_Step(1)_Registration.ipynb
├── MISA_Project_PreProcesing_Step(2)_Normalization.ipynb
├── MISA_Project_Report.pdf
├── PreparingTestingData.ipynb
├── README.md
├── model
├── README.md
├── config_all.json
├── config_fsl_fast.json
├── config_fsl_first.json
├── config_malp_em.json
├── config_malp_em_tissue.json
├── config_spm_tissue.json
├── config_tissue.json
├── deploy.pvpy
├── deploy.py
├── dim.nii
├── eval.ipynb
├── neuronet.py
├── neuronet.pyc
├── parse_csvs.ipynb
├── reader.py
├── reader.pyc
├── reader2.pyc
├── sandbox.ipynb
├── test.csv
├── train.csv
├── train.py
└── val.csv
└── paper_147.pptx
/.ipynb_checkpoints/PreparingTestingData-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "toc": true
7 | },
8 | "source": [
9 | "
Table of Contents
\n",
10 | ""
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | " Description & Instructions How to use this Script.\n",
18 | "\n",
19 | "* 1) Import the libraries running the Section \"Importing Libararies\"\n",
20 | "* 2) Function used in for histpgram matching and Normalization\n",
21 | "* 3) Apply the Histogram Maching\n",
22 | "* 4) Making 1mx1mx1m predicted Segmented Prediction to Original Spacing Using the prevviously saved Transformation Matrix"
23 | ]
24 | },
25 | {
26 | "cell_type": "markdown",
27 | "metadata": {
28 | "heading_collapsed": true
29 | },
30 | "source": [
31 | "# Importing Libraries"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 1,
37 | "metadata": {
38 | "hidden": true
39 | },
40 | "outputs": [
41 | {
42 | "name": "stderr",
43 | "output_type": "stream",
44 | "text": [
45 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
46 | " from ._conv import register_converters as _register_converters\n"
47 | ]
48 | }
49 | ],
50 | "source": [
51 | "import SimpleITK as sitk\n",
52 | "import os\n",
53 | "import pandas as pd\n",
54 | "import numpy as np\n",
55 | "import glob\n",
56 | "import os\n",
57 | "import nibabel as nib\n",
58 | "\n",
59 | "from matplotlib import pyplot as plt\n",
60 | "from dltk.io.augmentation import *\n",
61 | "from dltk.io.preprocessing import *"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {
67 | "heading_collapsed": true
68 | },
69 | "source": [
70 | "# Function"
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "# Histogram Matching"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": 2,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "# -------------------------------------------------------\n",
87 | "##This functions were coppied from the github reprocesoty:https://github.com/sergivalverde/mri_utils\n",
88 | "# if you this code please refer this github.\n",
89 | "\n",
90 | "# Image processing functions\n",
91 | "# Useful for brain MRI analysis\n",
92 | "#\n",
93 | "# Sergi Valverde 2018\n",
94 | "# svalverde@eia.udg.edu\n",
95 | "#\n",
96 | "# -------------------------------------------------------\n",
97 | "\n",
98 | "import numpy as np\n",
99 | "from scipy.ndimage import label\n",
100 | "from scipy.ndimage import labeled_comprehension as lc\n",
101 | "import SimpleITK as sitk\n",
102 | "\n",
103 | "\n",
104 | "\n",
105 | "def histogram_matching(mov_scan, ref_scan,\n",
106 | " histogram_levels=2048,\n",
107 | " match_points=100,\n",
108 | " set_th_mean=True):\n",
109 | " \"\"\"\n",
110 | " Histogram matching following the method developed on\n",
111 | " Nyul et al 2001 (ITK implementation)\n",
112 | " inputs:\n",
113 | " - mov_scan: np.array containing the image to normalize\n",
114 | " - ref_scan np.array containing the reference image\n",
115 | " - histogram levels\n",
116 | " - number of matched points\n",
117 | " - Threshold Mean setting\n",
118 | " outputs:\n",
119 | " - histogram matched image\n",
120 | " \"\"\"\n",
121 | "\n",
122 | " # convert np arrays into itk image objects\n",
123 | " ref = sitk.GetImageFromArray(ref_scan.astype('float32'))\n",
124 | " mov = sitk.GetImageFromArray(mov_scan.astype('float32'))\n",
125 | "\n",
126 | " # perform histogram matching\n",
127 | " caster = sitk.CastImageFilter()\n",
128 | " caster.SetOutputPixelType(ref.GetPixelID())\n",
129 | "\n",
130 | " matcher = sitk.HistogramMatchingImageFilter()\n",
131 | " matcher.SetNumberOfHistogramLevels(histogram_levels)\n",
132 | " matcher.SetNumberOfMatchPoints(match_points)\n",
133 | " matcher.SetThresholdAtMeanIntensity(set_th_mean)\n",
134 | " matched_vol = matcher.Execute(mov, ref)\n",
135 | "\n",
136 | " return matched_vol"
137 | ]
138 | },
139 | {
140 | "cell_type": "markdown",
141 | "metadata": {},
142 | "source": [
143 | "# Normalization and Histogram Matching"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {},
149 | "source": [
150 | "## Loading Reference data "
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 4,
156 | "metadata": {},
157 | "outputs": [
158 | {
159 | "name": "stderr",
160 | "output_type": "stream",
161 | "text": [
162 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\ipykernel_launcher.py:5: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n",
163 | " \"\"\"\n"
164 | ]
165 | }
166 | ],
167 | "source": [
168 | "mylist = pd.read_csv(\n",
169 | " \"MISAPreorocessingTestReg_info.csv\",\n",
170 | " dtype=object,\n",
171 | " keep_default_na=False,\n",
172 | " na_values=[]).as_matrix()"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": 5,
178 | "metadata": {},
179 | "outputs": [],
180 | "source": [
181 | "Save_Preprocessed_Test_data=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/save_processed_test_data/\"\n"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": 6,
187 | "metadata": {},
188 | "outputs": [],
189 | "source": [
190 | "ref_volume=sitk.ReadImage(\"ref_07.nii.gz\", sitk.sitkFloat32)\n",
191 | "ref_array = sitk.GetArrayFromImage(ref_volume)"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 8,
197 | "metadata": {},
198 | "outputs": [
199 | {
200 | "name": "stdout",
201 | "output_type": "stream",
202 | "text": [
203 | "IBSR_02\n",
204 | "########Saved#########\n",
205 | "IBSR_10\n",
206 | "########Saved#########\n",
207 | "IBSR_15\n",
208 | "########Saved#########\n"
209 | ]
210 | }
211 | ],
212 | "source": [
213 | "for im in mylist:\n",
214 | " ###Getting Suvject MRI\n",
215 | " img_fn = str(im[1])\n",
216 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
217 | " print(img_name)\n",
218 | " #Image_Name.append(img_name)\n",
219 | " histoMached_imageName=img_name+'.nii.gz' \n",
220 | "\n",
221 | "# =============================================================================\n",
222 | "# load data\n",
223 | "# =============================================================================\n",
224 | " #Loading the image\n",
225 | " sitk_t1 = sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
226 | " t1 = sitk.GetArrayFromImage(sitk_t1)\n",
227 | " normalized_vol=normalise_zero_one(t1)\n",
228 | " \n",
229 | " \n",
230 | " Histo_mached_vol=histogram_matching(normalized_vol,ref_array)\n",
231 | " Histo_mached_vol.CopyInformation(sitk_t1)\n",
232 | " \n",
233 | " sitk.WriteImage(Histo_mached_vol, os.path.join(Save_Preprocessed_Test_data,histoMached_imageName))\n",
234 | " print(\"########Saved#########\")"
235 | ]
236 | },
237 | {
238 | "cell_type": "code",
239 | "execution_count": 9,
240 | "metadata": {},
241 | "outputs": [
242 | {
243 | "name": "stdout",
244 | "output_type": "stream",
245 | "text": [
246 | "[['IBSR_02'\n",
247 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_02.nii.gz'\n",
248 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_02.tfm']\n",
249 | " ['IBSR_10'\n",
250 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_10.nii.gz'\n",
251 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_10.tfm']\n",
252 | " ['IBSR_15'\n",
253 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_15.nii.gz'\n",
254 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_15.tfm']]\n"
255 | ]
256 | }
257 | ],
258 | "source": [
259 | "print(mylist)"
260 | ]
261 | },
262 | {
263 | "cell_type": "markdown",
264 | "metadata": {
265 | "heading_collapsed": true
266 | },
267 | "source": [
268 | "# Making the labels back to original Spacing"
269 | ]
270 | },
271 | {
272 | "cell_type": "markdown",
273 | "metadata": {
274 | "hidden": true
275 | },
276 | "source": [
277 | "For This Process Following steps need to be done.\n",
278 | "\n",
279 | "* 1) Put path of the Prediction(Segmented Nifti file) in Section 5.1 \"Final_Seg_test_path\"\n",
280 | "* 2) Put path of the RAW DATA(Nifti file) in Section 5.2 \"Test_data_raw_path\"\n",
281 | "* 3) Put path of the Saved Transformation Matrics(Nifti file) in Section 5.3 \"tmf_path\"\n",
282 | "* 4) put the desired folder path were you want to save the Segmented nifti with the original spacing \"path_to_save_images\" and RUN."
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {
288 | "heading_collapsed": true,
289 | "hidden": true
290 | },
291 | "source": [
292 | "## Segmented Results"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 35,
298 | "metadata": {
299 | "hidden": true
300 | },
301 | "outputs": [
302 | {
303 | "name": "stdout",
304 | "output_type": "stream",
305 | "text": [
306 | "['IBSR_02_seg.nii.gz', 'IBSR_10_seg.nii.gz', 'IBSR_15_seg.nii.gz']\n"
307 | ]
308 | }
309 | ],
310 | "source": [
311 | "Final_Seg_test_path=\"H:/f_r/final_test/\"\n",
312 | "My_Predicted_Seg_list=os.listdir(Final_Seg_test_path)\n",
313 | "print(My_Predicted_Seg_list)\n",
314 | "\n",
315 | "#complete_Segmented_data=Final_Seg_test_path+My_Predicted_Seg_list[0]\n",
316 | "#print(Segmented_data)"
317 | ]
318 | },
319 | {
320 | "cell_type": "markdown",
321 | "metadata": {
322 | "heading_collapsed": true,
323 | "hidden": true
324 | },
325 | "source": [
326 | "## Raw Test Data "
327 | ]
328 | },
329 | {
330 | "cell_type": "code",
331 | "execution_count": 36,
332 | "metadata": {
333 | "hidden": true
334 | },
335 | "outputs": [
336 | {
337 | "name": "stdout",
338 | "output_type": "stream",
339 | "text": [
340 | "['IBSR_02.nii.gz', 'IBSR_10.nii.gz', 'IBSR_15.nii.gz']\n"
341 | ]
342 | }
343 | ],
344 | "source": [
345 | "Test_data_raw_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/\"\n",
346 | "Test_data_list=os.listdir(Test_data_raw_path)\n",
347 | "print(Test_data_list)\n",
348 | "\n",
349 | "#complete_RAW_data=Test_data_raw_path+Test_data_list[0]\n",
350 | "#print(complete_RAW_data)"
351 | ]
352 | },
353 | {
354 | "cell_type": "markdown",
355 | "metadata": {
356 | "heading_collapsed": true,
357 | "hidden": true
358 | },
359 | "source": [
360 | "## Transformation Matrix"
361 | ]
362 | },
363 | {
364 | "cell_type": "code",
365 | "execution_count": 37,
366 | "metadata": {
367 | "hidden": true
368 | },
369 | "outputs": [
370 | {
371 | "name": "stdout",
372 | "output_type": "stream",
373 | "text": [
374 | "['IBSR_02.tfm', 'IBSR_10.tfm', 'IBSR_15.tfm']\n"
375 | ]
376 | }
377 | ],
378 | "source": [
379 | "tmf_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/\"\n",
380 | "tmf_data_list=os.listdir(tmf_path)\n",
381 | "print(tmf_data_list)"
382 | ]
383 | },
384 | {
385 | "cell_type": "markdown",
386 | "metadata": {
387 | "heading_collapsed": true,
388 | "hidden": true
389 | },
390 | "source": [
391 | "## Inverse Registration"
392 | ]
393 | },
394 | {
395 | "cell_type": "code",
396 | "execution_count": 41,
397 | "metadata": {
398 | "hidden": true
399 | },
400 | "outputs": [
401 | {
402 | "name": "stdout",
403 | "output_type": "stream",
404 | "text": [
405 | "H:/f_r/final_test/IBSR_02_seg.nii.gz\n",
406 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_02.nii.gz\n",
407 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_02.tfm\n",
408 | "IBSR_02\n",
409 | "H:/f_r/final_test/IBSR_10_seg.nii.gz\n",
410 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_10.nii.gz\n",
411 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_10.tfm\n",
412 | "IBSR_10\n",
413 | "H:/f_r/final_test/IBSR_15_seg.nii.gz\n",
414 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_15.nii.gz\n",
415 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_15.tfm\n",
416 | "IBSR_15\n"
417 | ]
418 | }
419 | ],
420 | "source": [
421 | "path_to_save_images=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/submission/\"\n",
422 | "\n",
423 | "for im in range(0,3):\n",
424 | " \n",
425 | " ##Creating Data Path\n",
426 | " complete_Segmented_data=Final_Seg_test_path+My_Predicted_Seg_list[im]\n",
427 | " complete_RAW_data=Test_data_raw_path+Test_data_list[im]\n",
428 | " complete_tmf_data=tmf_path+tmf_data_list[im]\n",
429 | " \n",
430 | " print(complete_Segmented_data)\n",
431 | " print(complete_RAW_data)\n",
432 | " print(complete_tmf_data)\n",
433 | " \n",
434 | " ###Getting Suvject MRI\n",
435 | " img_fn = str(complete_RAW_data)\n",
436 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
437 | " label_fn=str(complete_Segmented_data)\n",
438 | " \n",
439 | " \n",
440 | " \n",
441 | " print(img_name)\n",
442 | " \n",
443 | " ##Creating Name\n",
444 | " Registered_imageName=img_name+\"_seg\"+'.nii.gz'\n",
445 | " \n",
446 | " ##The Original Spaced Image\n",
447 | " Original_fixed=sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
448 | " \n",
449 | " ####Segmented Prediction\n",
450 | " label_Registered_moving=sitk.ReadImage(label_fn, sitk.sitkFloat32)\n",
451 | " \n",
452 | " ######Load the Transformation\n",
453 | " initial_transform_for_InterTransformation=sitk.ReadTransform(complete_tmf_data)\n",
454 | " inverse_Transformation=initial_transform_for_InterTransformation.GetInverse()\n",
455 | " \n",
456 | " Original_resampled_label = sitk.Resample(label_Registered_moving, Original_fixed, \n",
457 | " inverse_Transformation, sitk.sitkNearestNeighbor, 0.0, \n",
458 | " label_Registered_moving.GetPixelID())\n",
459 | " \n",
460 | " sitk.WriteImage(Original_resampled_label, os.path.join(path_to_save_images, Registered_imageName))\n",
461 | " \n",
462 | " \n",
463 | " "
464 | ]
465 | },
466 | {
467 | "cell_type": "code",
468 | "execution_count": null,
469 | "metadata": {
470 | "hidden": true
471 | },
472 | "outputs": [],
473 | "source": []
474 | }
475 | ],
476 | "metadata": {
477 | "kernelspec": {
478 | "display_name": "Python 3",
479 | "language": "python",
480 | "name": "python3"
481 | },
482 | "language_info": {
483 | "codemirror_mode": {
484 | "name": "ipython",
485 | "version": 3
486 | },
487 | "file_extension": ".py",
488 | "mimetype": "text/x-python",
489 | "name": "python",
490 | "nbconvert_exporter": "python",
491 | "pygments_lexer": "ipython3",
492 | "version": "3.6.7"
493 | },
494 | "toc": {
495 | "base_numbering": 1,
496 | "nav_menu": {},
497 | "number_sections": true,
498 | "sideBar": true,
499 | "skip_h1_title": false,
500 | "title_cell": "Table of Contents",
501 | "title_sidebar": "Contents",
502 | "toc_cell": true,
503 | "toc_position": {},
504 | "toc_section_display": true,
505 | "toc_window_display": true
506 | },
507 | "varInspector": {
508 | "cols": {
509 | "lenName": 16,
510 | "lenType": 16,
511 | "lenVar": 40
512 | },
513 | "kernels_config": {
514 | "python": {
515 | "delete_cmd_postfix": "",
516 | "delete_cmd_prefix": "del ",
517 | "library": "var_list.py",
518 | "varRefreshCmd": "print(var_dic_list())"
519 | },
520 | "r": {
521 | "delete_cmd_postfix": ") ",
522 | "delete_cmd_prefix": "rm(",
523 | "library": "var_list.r",
524 | "varRefreshCmd": "cat(var_dic_list()) "
525 | }
526 | },
527 | "types_to_exclude": [
528 | "module",
529 | "function",
530 | "builtin_function_or_method",
531 | "instance",
532 | "_Feature"
533 | ],
534 | "window_display": false
535 | }
536 | },
537 | "nbformat": 4,
538 | "nbformat_minor": 2
539 | }
540 |
--------------------------------------------------------------------------------
/Images/5_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/5_2.png
--------------------------------------------------------------------------------
/Images/Preprocessing_pipelines.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/Preprocessing_pipelines.PNG
--------------------------------------------------------------------------------
/Images/architecture.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/architecture.PNG
--------------------------------------------------------------------------------
/Images/example_preprpcessed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/example_preprpcessed.png
--------------------------------------------------------------------------------
/Images/figures_forreport.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/figures_forreport.png
--------------------------------------------------------------------------------
/Images/overlay_val14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Images/overlay_val14.png
--------------------------------------------------------------------------------
/Instructios_How_to_run.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/Instructios_How_to_run.pdf
--------------------------------------------------------------------------------
/MISA_Project_PreProcesing_Step(1)_Registration.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "toc": true
7 | },
8 | "source": [
9 | "Table of Contents
\n",
10 | ""
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "This Notebook is to 1st Preprocessing Steps for the MISA project. Registering the Training and Validation Data to\n",
18 | " MNI 1mm Template (MNI152_T1_1mm_Brain.nii.gz) "
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {
24 | "heading_collapsed": true
25 | },
26 | "source": [
27 | "# Import Libraries"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 1,
33 | "metadata": {
34 | "hidden": true
35 | },
36 | "outputs": [
37 | {
38 | "name": "stderr",
39 | "output_type": "stream",
40 | "text": [
41 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
42 | " from ._conv import register_converters as _register_converters\n"
43 | ]
44 | }
45 | ],
46 | "source": [
47 | "import os\n",
48 | "import numpy as np\n",
49 | "import nibabel as nib\n",
50 | "import matplotlib.pyplot as plt\n",
51 | "from sklearn.cluster import KMeans\n",
52 | "from numpy.linalg import inv, det, norm\n",
53 | "from math import sqrt, pi\n",
54 | "from functools import partial\n",
55 | "from scipy.spatial.distance import dice\n",
56 | "import time\n",
57 | "import operator\n",
58 | "import matplotlib.pyplot as plt\n",
59 | "import SimpleITK as sitk\n",
60 | "import pandas as pd\n",
61 | "import seaborn as sns\n",
62 | "%matplotlib inline"
63 | ]
64 | },
65 | {
66 | "cell_type": "markdown",
67 | "metadata": {
68 | "heading_collapsed": true
69 | },
70 | "source": [
71 | "# Reading the Training and validation Data"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 2,
77 | "metadata": {
78 | "hidden": true
79 | },
80 | "outputs": [
81 | {
82 | "name": "stderr",
83 | "output_type": "stream",
84 | "text": [
85 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\ipykernel_launcher.py:5: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n",
86 | " \"\"\"\n"
87 | ]
88 | }
89 | ],
90 | "source": [
91 | "mylist = pd.read_csv(\n",
92 | " \"TrainAndValidation.csv\",\n",
93 | " dtype=object,\n",
94 | " keep_default_na=False,\n",
95 | " na_values=[]).as_matrix()"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 3,
101 | "metadata": {
102 | "hidden": true
103 | },
104 | "outputs": [
105 | {
106 | "name": "stdout",
107 | "output_type": "stream",
108 | "text": [
109 | "['1'\n",
110 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/TrainingValidationTestSets/Training_Set/IBSR_01/IBSR_01.nii'\n",
111 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/TrainingValidationTestSets/Training_Set/IBSR_01/IBSR_01_seg.nii']\n"
112 | ]
113 | }
114 | ],
115 | "source": [
116 | "print(mylist[0])"
117 | ]
118 | },
119 | {
120 | "cell_type": "markdown",
121 | "metadata": {
122 | "heading_collapsed": true
123 | },
124 | "source": [
125 | "# Function For registration"
126 | ]
127 | },
128 | {
129 | "cell_type": "markdown",
130 | "metadata": {
131 | "hidden": true
132 | },
133 | "source": [
134 | "Helper Function for the registartion"
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": 4,
140 | "metadata": {
141 | "hidden": true
142 | },
143 | "outputs": [],
144 | "source": [
145 | "from ipywidgets import interact, fixed\n",
146 | "from IPython.display import clear_output\n",
147 | "\n",
148 | "# =============================================================================\n",
149 | "# Function Definitions\n",
150 | "# =============================================================================\n",
151 | "\n",
152 | "# Callback invoked by the interact IPython method for scrolling through the image stacks of\n",
153 | "# the two images (moving and fixed).\n",
154 | "def display_images(fixed_image_z, moving_image_z, fixed_npa, moving_npa):\n",
155 | " # Create a figure with two subplots and the specified size.\n",
156 | " plt.subplots(1,2,figsize=(10,8))\n",
157 | " \n",
158 | " # Draw the fixed image in the first subplot.\n",
159 | " plt.subplot(1,2,1)\n",
160 | " plt.imshow(fixed_npa[fixed_image_z,:,:],cmap=plt.cm.Greys_r);\n",
161 | " plt.title('fixed image')\n",
162 | " plt.axis('off')\n",
163 | " \n",
164 | " # Draw the moving image in the second subplot.\n",
165 | " plt.subplot(1,2,2)\n",
166 | " plt.imshow(moving_npa[moving_image_z,:,:],cmap=plt.cm.Greys_r);\n",
167 | " plt.title('moving image')\n",
168 | " plt.axis('off')\n",
169 | " \n",
170 | " plt.show()\n",
171 | "\n",
172 | "# Callback invoked by the IPython interact method for scrolling and modifying the alpha blending\n",
173 | "# of an image stack of two images that occupy the same physical space. \n",
174 | "def display_images_with_alpha(image_z, alpha, fixed, moving):\n",
175 | " img = (1.0 - alpha)*fixed[:,:,image_z] + alpha*moving[:,:,image_z] \n",
176 | " plt.imshow(sitk.GetArrayViewFromImage(img),cmap=plt.cm.Greys_r);\n",
177 | " plt.axis('off')\n",
178 | " plt.show()\n",
179 | " \n",
180 | "# Callback invoked when the StartEvent happens, sets up our new data.\n",
181 | "def start_plot():\n",
182 | " global metric_values, multires_iterations\n",
183 | " \n",
184 | " metric_values = []\n",
185 | " multires_iterations = []\n",
186 | "\n",
187 | "# Callback invoked when the EndEvent happens, do cleanup of data and figure.\n",
188 | "def end_plot():\n",
189 | " global metric_values, multires_iterations\n",
190 | " \n",
191 | " del metric_values\n",
192 | " del multires_iterations\n",
193 | " # Close figure, we don't want to get a duplicate of the plot latter on.\n",
194 | " plt.close()\n",
195 | "\n",
196 | "# Callback invoked when the IterationEvent happens, update our data and display new figure. \n",
197 | "def plot_values(registration_method):\n",
198 | " global metric_values, multires_iterations\n",
199 | " \n",
200 | " metric_values.append(registration_method.GetMetricValue()) \n",
201 | " # Clear the output area (wait=True, to reduce flickering), and plot current data\n",
202 | " clear_output(wait=True)\n",
203 | " # Plot the similarity metric values\n",
204 | " plt.plot(metric_values, 'r')\n",
205 | " plt.plot(multires_iterations, [metric_values[index] for index in multires_iterations], 'b*')\n",
206 | " plt.xlabel('Iteration Number',fontsize=12)\n",
207 | " plt.ylabel('Metric Value',fontsize=12)\n",
208 | " plt.show()\n",
209 | " \n",
210 | "# Callback invoked when the sitkMultiResolutionIterationEvent happens, update the index into the \n",
211 | "# metric_values list. \n",
212 | "def update_multires_iterations():\n",
213 | " global metric_values, multires_iterations\n",
214 | " multires_iterations.append(len(metric_values))\n",
215 | " "
216 | ]
217 | },
218 | {
219 | "cell_type": "markdown",
220 | "metadata": {
221 | "heading_collapsed": true
222 | },
223 | "source": [
224 | "# Performing Registration"
225 | ]
226 | },
227 | {
228 | "cell_type": "code",
229 | "execution_count": 10,
230 | "metadata": {
231 | "hidden": true
232 | },
233 | "outputs": [],
234 | "source": [
235 | "#Defininf The Output Folder\n",
236 | "Output_Registered_image_path='C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/'\n",
237 | "Output_Registered_label_path='C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/'\n",
238 | "Output_Registered_transformation_path='C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/'"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": 12,
244 | "metadata": {
245 | "hidden": true
246 | },
247 | "outputs": [
248 | {
249 | "data": {
250 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZcAAAENCAYAAADDmygoAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XucVXW9//HXhxmQmyjIRRQQFJg0RZQRcRjwilrHwguWpkmdjDynTp1j+NOysuOlvJR6Op0stBQt01LyVqaIKCNxV0cRNBARkYugcpU7n98f37VlGPee2TOz9qy997yfj8d+rL3XrLXXZ7ayP/O9rM/X3B0REZE4tUo6ABERKT5KLiIiEjslFxERiZ2Si4iIxE7JRUREYqfkIiIisVNyERGR2Cm5iIhI7JRcREQkdqVJB5CUrl27et++fZMOQ0SkoMybN2+tu3er77gWm1z69u3L3Llzkw5DRKSgmNnb2RynbjEREYmdkouIiMROyUVERGKn5CIiIrFTchERkdgpuYiISOyUXBpo5Uo48URYtSrpSESkLvq3mqwWe59LY113HbzwgnPttcavfpV0NCKSSfi3CtdeS+7+re7aBS++CO+/Dx06QPv2e287dIB99gGzcOyGDfDhh+Gxbt3e2+3boVcv6NMHDjkkPG/TpmHx7NwJ7lBaGq6ZIHP3RANISnl5uTfkJsp27WDr1k/ub9sWtmyJMTARaZKM/1bZypbeA6FVKygpCds2bWDQIBg2LDwGDw7JoC5LlsDkyeExZUpIDnVp1WrPF0VDvm/N4KCD9iSbQw4J71M7KdV8vnnz3tctKQmJprR07+fTpsGAAdnHsldYNs/dy+s7Ti2XLC1ZAuPHwyN/2s5HO9vQvp1zzrnGz36WdGQiUtOSJTD+O9t55M87+IgOtC/Zxjl9X+JnQ/8EbU8LLYjdu8Nj82aYPh0eeCCc3KYNHHvsnmQzbBjstx9MnQpPPx0SyptvhmN794Zzz4XTTw8JYPNm+Oijvbep51u2hFZM586w//5hW/t5aSksXw5vvw3LloVt6vmcOfDww7BjB3TqtPe5Awbseb7ffiGp7NoVWjE7d+79PPV6331z/t9BySVLPXuG/6Zbd7WmLVvYurUtnTrBgQcmHZmI1NSzJ3Ta/j5b6U7b1rvYumsfOp0+jAN/NSzzSe++C7NmwcyZ4fGb38Dtt+99TMeOcPLJ8J3vhIQycGD8XU/9+4dHOrt3h5ZPSUm818wRJZcGWL0aLrtkM+MmDmfCCRNZuWpw0iGJSBqrl37EZfyacU+MYcKjPVi5sp4TDj44tELOPTe83rEDXn01JJr334eTTgqtmNatcx16Zq0Ka/5V4mMuZtYFeBDoCywFvuDuH6Y5bhfwavRymbt/Ptp/D3AisD762Vfc/eX6rtvQMZe9HHYYHH00TJrUuPNFJLfGjQvdSGvXJj6wXWyyHXPJh1R4FTDF3QcAU6LX6Wxx98HR4/O1fnZFjZ/Vm1iarLIyTENpoZMhRPJedXX4A1CJJTH5kFxGAxOj5xOBsxOMJTsjRsCaNfDPfyYdiYjUtmtX6NI6+uikI2nR8iG59HD3lQDRtnuG49qa2Vwzm2lmtRPQDWb2ipndZmb1zCOMQWVl2FZV5fxSItJAixeH2VlKLolqluRiZs+Y2fw0j9ENeJs+UT/fl4DbzeywaP/3gE8BxwFdgCvriGNclKDmrlmzprG/DpSVQdeuoWtMRPJLdXXYKrkkqllmi7n7aZl+Zmarzaynu680s57AexneY0W0XWJmzwHHAG+mWj3ANjO7GxhfRxwTgAkQBvQb9cuEoPeMu4hIfqmuDveMHHFE0pG0aPnQLfYYMDZ6PhZ4tPYBZtY51d1lZl2B4cCC6HXPaGuE8Zr5zRBzGHd5803qn+MoIs2quho+9an677SXnMqH5HIjMMrMFgGjoteYWbmZ3RUdczgw18yqganAje6+IPrZH8zsVcI05a7A9c0SdWrcRa0XkfySmikmiUr8Jkp3fx84Nc3+ucCl0fN/AEdlOP+UnAaYyTHHhOJ0VVVw/vmJhCAitXzwQSihouSSuHxouRSm1q3DHbtquYjkDw3m5w0ll6YYMSL8z7xhQ9KRiAgoueQRJZemqKwMxeRmzEg6EhGBkFx69AgPSZSSS1MMGxYqlOpmSpH8oMH8vKHk0hQdO4aBfY27iCRvxw547TUllzyh5NJUlZVhHYht25KORKRle+ONsFSwkkteUHJpqhEjwpqqL76YdCQiLZsG8/OKkktTDR8etuoaE0lWdXVYprisLOlIBCWXpuvRIyx3qkF9kWRVV8OnP53sapHyMSWXOFRWwvTpYVqyiCRDM8XyipJLHEaMCGUnFi5MOhKRlmn16vBQcskbSi5xUBFLkWRpMD/vKLnE4bDDwtiLxl1EkqHkkneUXOJgFrrG1HIRSUZ1NfTqBV26JB2JRJRc4lJZCW+/De+8k3QkIi2PBvPzjpJLXEaMCFu1XkSa17Zt8PrrSi55JvHkYmZdzGyymS2Ktp0zHNfHzJ42s4VmtsDM+kb7+5nZrOj8B82sTXPG/7FBg0KtMY27FIWVK+HEE2HVqqQjkXotWAA7dyq55JnEkwtwFTDF3QcAU6LX6dwL3OLuhwNDgfei/TcBt0Xnfwh8LcfxpldaChUVarkUieuuC/8pr7026UikXhrMz0v5kFxGAxOj5xOBs2sfYGZHAKXuPhnA3Te5+0dmZsApwEN1nd9sKith/nz48MPEQpCmadcuzM+4445wT+wdd4TX7dolHZlkVF0d/gP17590JFJDPiSXHu6+EiDadk9zzEBgnZlNMrOXzOwWMysBDgDWufvO6LjlwMHNEnU6I0aAO/zjH4mFIE2zZAl86UKnPR8B0L49XHQRvPVWwoFJZtXVcNRRYW0lyRvNklzM7Bkzm5/mMTrLtygFRgDjgeOAQ4GvAJbmWK8jjnFmNtfM5q5Zs6aBv0UWhg4NdY3UNVawevaETh12sZV9aMsWtm51OnWCAw9MOjJJy10zxfJUsyQXdz/N3Y9M83gUWG1mPQGi7Xtp3mI58JK7L4laKY8AxwJrgf3NrDQ6rhewoo44Jrh7ubuXd+vWLc5fMWjfHoYM0aB+gVv97g4u49fMZBiXnfSGBvXz2bvvhtJLSi55Jx+6xR4DxkbPxwKPpjlmDtDZzFIZ4RRggbs7MBUYU8/5zaeyEubMCWu8SEGadOvb/B/f4mhe4f9Kvs2kSUlHJBlpMD9v5UNyuREYZWaLgFHRa8ys3MzuAnD3XYQusSlm9iqhO+zO6PwrgcvNbDFhDOa3zRz/3iorw2p4c+YkGoY0wbp1YVteDlOmwIqMjWFJWiq5DBqUbBzyCYknF3d/391PdfcB0faDaP9cd7+0xnGT3X2Qux/l7l9x9+3R/iXuPtTd+7v7+e6e7HrDlZVhetFtt4U1vaXwrF8ftt/8Zpgy9sc/JhuPZFZdDf36QadOSUcitSSeXIrOAQfAzTfDX/4CX/xiuHtYCksquRx3HBx/PNx3X7LxSGYazM9bSi65MH48/OIXIcGccw5s2ZJ0RNIQqW6x/faDiy8OX2CvvppsTPJJH30EixYpueQpJZdc+Y//gAkT4O9/h7POgs2bk45IspVquey3X2h9lpbC73+fbEzySfPnh25LJZe8pOSSS1//Otx7Lzz3HJxxBmzYkHREko1166BVq1Arrls3OPNM+MMftIx1vtFMsbym5JJrF18MDz4Is2bBaaeFOfmS39avD60Wi+7R/fKXw/0Uzz2XaFhSS3U17Lsv9O2bdCSShpJLcxgzBiZNCv8YTj4Z3kt3n6jkjXXrYP/997z+3OfCbCR1jeWX6uowBbmVvsbykf6rNJfPfQ6eeCIMQJ50ku6dyGeplktKu3bhD4SHHgqDyJI8d3jlFXWJ5TEll+Y0ahQ8+WRYrXLkSI3B5Kv16/duuUDo3ty4ER5/PJmYZG9Ll4Z/P0oueUvJpbmdeCI88AC8+SY880zS0Ug669bt3XKB8N+tVy/d85IvNJif95RckjBqVOhqmTYt6UgkndrdYhD69S+6KEwtj6Oi9tq1MHky3HorvPhi09+vPp6xWHhhevnlMOHiyCOTjkQyUHJJQps2MGyYqifnq9oD+ikXXwy7doWWZ7bc4e234ZFH4Jpr4POfh969wxTn00+H7343lAzKZSt2xYpQaWDYsHBvSKF75ZWQlCsqoEOHpKORDJRckjJyZPjrK3XDnuSH3bvD2ErtlguEv5IHD86ua2zHDrj6aujaNUyVPeccuP56WLw4/Le/5ZaQUN54I6ygeNZZoVUUt/nzQ1JZsCB0xQ4ZAj/9aVhzvhAtXw6f/WyYvdeQJC/NTsklKSNHhi8yrVqZXzZuDK2NdC0XCK2XOXNCUsjknXfCjMCf/CRMPf/Vr2DGjPDeCxaEGzLHj4dTT4WBA2HqVDjiCBg9Ot4JA88+C8OHh0RSVRWuPXo0fP/7cMIJ8Npr8V2rOaxfHxLLxo3wt7+FMTDJW0ouSRk2LJQV0bhLfqlZVyydCy8M4y+Z7nn5619D6+aVV+D++8P05X/7t/Dfu3379OcccEAo7X/00XDuuaEmXVPdd1+oLNC7N8ycCcccE7ri/vSn8Fi6FI49tnBaMdu3w3nnwcKF8PDDKrFfAJRcktK+fVgvROMu+aVmXbF0DjootDh+//u9B8l37ID/9/9C91bv3jBvXkhE2ercOQzwH3ccnH9+SACN4Q7XXQeXXBLGcl54Afr02fuY888PrZbPfz60YioqQqsmX7mHUkpTpsBdd4VKF5L3lFySNHIkzJ6tqsn5JNVyydQtBqEczNKlMH16eL1sWZiqfMstcNlloaUwcGDDr73ffvDUU6HL6sILQ/dZQ+zYEb6Ef/SjEOPf/5759+jeHf7851Ca6K23Qsvmpz8NU3zfeCNMQli9OiTbbduSnW32ox+FGn3XXgtjx9Z/vOQHd2+RjyFDhnjinnjCHdynTk06Ekl57LHw32T27MzHbNzo3r69+ze+4f744+5durjvu6/7Aw/EE8PGje4nneRu5n7PPdmds369+xlnhNh/+EP33buzv97q1e7nnRfOrevRtq37gQe6jxnj/qtfub/xRsOu0xh33hmufemlub+WZAWY61l8x5YmndzMrAvwINAXWAp8wd0/THNcH+AuoDfgwGfdfamZ3QOcCKSmXX3F3V/OfeQxGD48zNWfNi0MAEvyUt1idbVcOnYMs7/uvht+85vwV/+DD8KAAfHE0LFjGLsZPRq++tXQcvjCF2DTprB0Q81t6vn//E+YGXbXXfC1rzXseqlWzMyZsGoVbN2a+bFiRZiA8NBD4dxeveCUU0JX4amnwsEH73nfHTvC5Ia33tr7sWxZmEF30knhcdhhe4qE1vTkk6EleOaZYVJEumMkf2WTgXL5AG4GroqeXwXclOG454BR0fOOQPvo+T3AmIZeNy9aLu7uRx/tftppSUchKf/7v+Ev5dWr6z5u6tTQsvj3f3ffsiU3sWzZ4v6Zz9TfooDQcvr733MTR227d4dWyx13hFbMAQfsiWPgQPeTT3bv29e9pGTvGEtKwv4RI9y7d9+z/6CD3L/0Jfff/GZPa2jePPcOHdyPOcZ9w4bm+b0kKxRKywUYDZwUPZ9ISCJX1jzAzI4ASt19MoC7b2rG+HJr5Ej47W/DX3mtWycdjdQ3oJ9y0klhfCaXa7e3bRtmjv3ud6HV0KFDaNV07LjneWrbrVvz3VBoFsaUBg4MLYvdu8PsuGefDYPu69aFVnm/fuHRt2/Y9u4dZkhCSCuvvw7PPx+WMnj22TC7DuDAA8PssAMOCC24ffdtnt9LYmUhESUYgNk6d9+/xusP3b1zrWPOBi4FtgP9gGcIrZ1dUbfYCcA2YEq0v96F68vLy33u3Lnx/SKN9dBDYfbOzJnhLmpJ1hVXwC9/qUkWzc09VAx/7rmQcBYvDt2ORxyRdGRSi5nNc/fy+o5rlpaLmT0DHJjmR1dn+RalwAjgGGAZYYzmK8Bvge8Bq4A2wARCq+faDHGMA8YB9Kk9PTMpI0aE7bRpSi75IF1dMcm9mq2hceOSjkZi0CxTkd39NHc/Ms3jUWC1mfUEiLbpVtJaDrzk7kvcfSfwCHBs9N4ro67AbcDdwNA64pjg7uXuXt6tW7e4f83G6dEj/IPS/S75IV25fRFpsHy4z+UxIDV5fSzwaJpj5gCdzSyVEU4BFsDHCQkzM+BsoPAq840cGZKL1mhPXrpy+yLSYPmQXG4ERpnZImBU9BozKzezuwDcfRcwHphiZq8CBtwZnf+HaN+rQFfg+maOv+lGjgxfasVQsbbQqeUiEousx1zM7HBgDHCgu3/TzD4FtHH3V5oSgLu/D5yaZv9cwiB+6vVk4BMFhdz9lKZcPy+MHBm2VVWqmZS0des+WS5FRBosq5aLmZ0PPA8cDHw52t0RuDVHcbUshxwSpmmqiGXyNKAvEotsu8WuBU5398uAXdG+akBrjMZl5MiQXBKeGt7iqVtMJBbZJpfuhGQCofRKaqtvwriMHBlKbyxenHQkLdf27eH+FrVcRJos2+Qyjz3dYSkXALPjDacFqznuIsnIpq6YiGQl2+TybeB6M3se6GBmTwHXAf+Vs8hamrKyUMJD4y7JqW+hMBHJWlazxdz99Wh22FnAE8A7wBNFVeMraWbhbn0ll+RkW1dMROqV9VRkd/8IaOTyeJKVkSNh0iRYvlzrgydB3WIisckquZhZFRkG7919ZKwRtWQ1x10askSuxEPdYiKxybblclet1wcCXwN+H284LdygQaGE+7RpSi5JUMtFJDbZjrlMrL3PzB4mFIpMW4FYGqGkJKyDoXGXZKjlIhKbptQWe5c05VikiUaOhAULYO3apCNpedavDxMrcrkAmEgLke2Yy7/W2tUeOBeYGXtELV1q3OWFF+Dss5ONpaVZty6setgqH+q5ihS2bMdcat9AuRn4B3BbvOEI5eVhedtp05RcmpvqionEJtsxl5NzHYhE2rSBYcM07pIE1RUTiU3G5GJmh2bzBu6+JL5wBAhdY9dfDxs2qP+/OWmhMJHY1NVyWUy4t8XqOMaBklgjkpBcdu+GGTPgjDOSjqblWL8eDj446ShEikLGkUt3b+XuJdE206PJicXMupjZZDNbFG07pznmZDN7ucZjq5mdHf2sn5nNis5/0MzaNDWmxA0bBqWl6hprbmq5iMQmH6bFXAVMcfcBwJTo9V7cfaq7D3b3wcApwEfA09GPbwJui87/kHBzZ2Hr0AGGDFFyaW4a0BeJTbYrUZaa2bfN7GEze97MpqUeMcQwGkjdpDkRqG+K1BjgSXf/yMyMkGweasD5hWHECJg9G7ZuTTqSlsFdA/oiMcq25XIb8A1gGjAEeJiwgNizMcTQw91XAkTb7vUcfwHwx+j5AcA6d98ZvV5OWIq58FVWhsWr5s1LOpKWYfNm2LVLLReRmGSbXM4FPuPu/wPsjLZnA1lNUTazZ8xsfprH6IYEa2Y9gaOAp1K70hyWcXVMMxtnZnPNbO6aNWsacunmV1ERti+8kGwcLYXqionEKtubKNsT1nAB2GJm7aM1Xo7J5mR3Py3Tz8xstZn1dPeVUfJ4r463+gLwF3ffEb1eC+xvZqVR66UXsKKOOCYAEwDKy8vze4nmbt3CAmLTpycdScugumIiscq25bIQOC56Phf4sZn9gFBfrKkeA8ZGz8cCj9Zx7IXs6RLD3R2YShiHyeb8wlJZGZLL7t1JR1L81HIRiVW2yeU7QGpc43LgWOBzwLgYYrgRGGVmi4BR0WvMrNzMPi71b2Z9gd7A87XOvxK43MwWE8ZgfhtDTPmhshI++ABefz3pSIqfVqEUiVWd3WJmtr+7r3P3Oal97r4IyNjN1VDu/j5wapr9c4FLa7xeSprB+qhCwNC44skrw4eH7fTpcMQRycZS7NQtJhKr+louq8zsz2b2L2amO/GbW//+0L27BvWbg7rFRGJVX3IZCiwlDIKvMLPbzOzonEclgVlovSi55J5aLiKxqjO5uPsr7n4FYazjEqAbMN3MXjGz75rZgc0RZItWWQlLlsDKlUlHUtzWr4fWraFdu6QjESkKWQ3ou/tud3/K3S8GegA/A74FLMtlcEJILqApybmWKv1iddVpFZFsNai2mJkdBHyTMEPrAOAPuQhKajjmmPDXtLrGcktFK0ViVW9yMbN2ZnaxmU0G3gJOJ0wXPtDdv5rrAFu81q3h+OPVcsk11RUTiVWdycXMJgKrgasJNyv2d/fT3P0+d/+oOQIUwqD+Sy/Bpk1JR1K81HIRiVV9LZctwOnufri7/8Td36nneMmFyspQVHHWrKQjKV5quYjEqr7ZYpe5+8zmCkYyOOGEMNCsrrHc0VouIrHKh8XCpD777QdHHaVB/Vxat04tF5EYKbkUispKmDEDdu6s/1hpmJ07w3iWWi4isVFyKRSVleEL8NVXk46k+GzYELZKLiKxyXaZ40vMbFCtfUeb2ZdzE5Z8QqqIpbrG4qe6YiKxy7blch17FgtLeQe4Pt5wJKM+faB3bw3q54LK7YvELtvk0gnYUGvfekB/6jWn4cOhqgo8vxfRLDipopVquYjEJtvksgA4r9a+cwgrVEpzqayEFSvg7beTjqS4qOUiErs6Fwur4Urgb2b2ReBNoD9hga/P5iowSaNmEcu+fRMNpaio3L5I7LKtivwCcCQwB+gAzAaOdPcmDwCYWRczm2xmi6Jt5zTHnGxmL9d4bDWzs6Of3WNmb9X42eCmxpS3jjwSOnXSoH7cNKAvErtsWy64+zKi9e1jdhUwxd1vNLOrotdX1rr2VGAwhGQELAaernHIFe7+UA5iyy8lJeFufSWXeKVaLp06JRuHSBHJmFzMbIK7j4ue3wekHUV290uaGMNo4KTo+UTgOWoll1rGAE+22MKZw4fDNdfAhx9C50808qQx1q+HDh1CBWoRiUVd3WJv1Xi+mDDWku7RVD3cfSVAtO1ez/EXAH+ste+GaHXM28xsn0wnmtk4M5trZnPXrFnTtKiTUlkZZovNmJF0JMVDdcVEYpex5eLuPwUwsxLCPS33u/vWxlzEzJ4B0i2JfHUD36cncBTwVI3d3wNWAW2ACYRWz7Xpznf3CdExlJeXF+Z83qFDobQ0dI19VvMpYqG6YiKxq3fMxd13mdmt7v67xl7E3U/L9DMzW21mPd19ZZQ83qvjrb4A/MXdd9R479Ti8tvM7G5gfGPjLAgdOoTVKXUzZXzUchGJXbb3uTxuZp/LUQyPAWOj52OBR+s49kJqdYlFCQkzM+BsYH4OYswvlZUwezZs25Z0JMVBC4WJxC7b5NIWeMjMnjOz+8zs3tQjhhhuBEaZ2SJgVPQaMys3s7tSB5lZX6A38Hyt8/9gZq8CrwJdaQklaSorYetWePHFpCMpDlooTCR22U5Fnk+OWgTu/j7hhsza++cCl9Z4vRQ4OM1xp+QirryWKmI5fXqYmixNo24xkdhlm1x+4+6rau80s3SD9JJrPXpA//5hUH98cQ8xNQsN6IvELttusX9m2L8grkCkgYYPDy0XFbFsmq1bYft2tVxEYpZtcrFP7DDrBOyONxzJWmUlrF0L/8yU9yUrqismkhN1douZ2TuEO/PbmdmyWj8+gE/ezCjNJVXE8oUXoKws2VgKmeqKieREfWMuFxNaLX8Daq466cBqd38jV4FJPcrKQvmXWbPga19LOprCpXL7IjlRZ3Jx9+cBzKxri63lla/M4PDD4Q3l9ybRQmEiOZHtmMsuM7vBzJaY2XoAMzvdzL6Vw9ikPmVlSi5NpZaLSE5km1xuJ6znchF7qiO/BvxbLoKSLJWVwerVe74gpeE0oC+SE9kml7OBL7n7DKIZYu7+LmluapRmNHBg2GrGWONpQF8kJ7JNLtupNT5jZt2A92OPSLKXmiWmrrHGW78eWrWCjh2TjkSkqGSbXP4MTDSzfvBxschfAg/kKjDJwmGHhS9GJZfGSxWttE/cyiUiTZBtcvk+sJRQHHJ/YBGwAvjv3IQlWdlnH+jXT8mlKVRXTCQnsqot5u7bgf8E/jPqDlvrrrojeUEzxppGdcVEcqK+O/T7ZPhRb4u6Edy99p370pzKymDqVNi9O3SRScOo5SKSE/W1XJayZ+pxuk5pB0riDEgaaOBA2LIFli+HPpn+FpCM1q+HQw5JOgqRolPfn7qvEMZXfgAcArSu9WiT0+ikfpox1jTqFhPJiTqTi7sPBsYAXYAXCDXGLgDauPsud98VRxBm1sXMJpvZomjbOcNxN5vZa2a20Mx+ES1tjJkNMbNXzWxxzf0tgpJL06hbTCQn6u2kd/f57n4F0A+4FTgLWGlmx8YYx1XAFHcfAEyJXu/FzCqA4cAgQrWA44ATox/fAYwDBkSPM2OMLb/17Bnu0VByabjdu7XEsUiONGQEeADhy/wE4CXgwxjjGA1MjJ5PJFQEqM2BtoSuuH0I3XKro3tuOrn7jGgG270Zzi9OZqH1orv0G27TprDYmlouIrGrM7lE3VXfNLPZwCPAJmCku5/s7m/FGEcPd18JEG271z4gKj0zFVgZPZ5y94WEEjTLaxy6nJZWlkbTkRtHRStFcqa+2WIrgLeA+4CZ0b7+ZtY/dYC7P5vNhczsGeDAND+6Osvz+wOHA72iXZPNbCSwJc3hae/BMbNxhO4z+hTTzKqBA+GPfwyzxtq1SzqawqFy+yI5U19yWUXoivp69KjNgUOzuZC7n5bpZ2a22sx6uvvKqJvrvTSHnQPMdPdN0TlPAsMIia9XjeN6EZJiuhgmABMAysvLi+cm0LKy0L2zaBEMGpR0NIVDLReRnKlvtlhfd+9XxyOrxJKFx4Cx0fOxwKNpjlkGnGhmpWbWmjD+szDqRttoZsOiWWKXZDi/eGnGWOOo5SKSM/lyS/eNwCgzWwSMil5jZuVmdld0zEPAm4T6ZtVAtbs/Hv3s34C7gMXRMU82Y+zJS5XeV3JpGLVcRHImq9piuebu7wOnptk/F7g0er4L+EaG8+cSpie3TB06QK9emjHWUGq5iORMvrRcpKk0Y6zh1HIRyRkll2IxcGBILipWnb3168OyBfvsk3QkIkVHyaVYlJWFL8tLtYBsAAATp0lEQVT30k20k7RUV0wkZ5RcioVmjDWc6oqJ5IySS7FIJRcN6mdPLReRnFFyKRZ9+oSxA7VcsqeWi0jOKLkUi5ISGDBAyaUhlFxEckbJpZikZoxJdtQtJpIzSi7FpKwMliyBHTuSjqQwqOUikjNKLsWkrAx27gwJRuq2Ywd89JFaLiI5ouRSTDRjLHu6O18kp5RcionudcleKrmo5SKSE0ouxaRzZ+jWTcklG6milWq5iOSEkkuxKeYZY6+9BuedB7NnN/291C0mklNKLsWmGKsj794Nt90GQ4bApEnwu981/T1Vbl8kp5Rcik1ZWShemfryLHTvvAOjRsHll8Ppp8MJJ8A//tH091XLRSSnlFyKTbHMGHOH+++Ho46CWbPgzjvh0UfhzDNh/vw9yaGxNKAvklOJJxcz62Jmk81sUbTtnOG4m83sNTNbaGa/MDOL9j9nZm+Y2cvRo3vz/gZ5phhmjH3wAVxwAVx0ERxxBFRXw6WXghlUVITEM2tW066Ratntu2/T4xWRT0g8uQBXAVPcfQAwJXq9FzOrAIYDgwjLGR8HnFjjkIvcfXD0aNkLmhx6aKgzVqjJ5emnQ2tl0iS44QaYNg0OO2zPz4cOhVatmt41tn59SCwlJU17HxFJKx+Sy2hgYvR8InB2mmMcaAu0AfYBWgOrmyW6QtOmDfTrV5jJ5Wc/gzPOCOMgs2bB978PpaV7H9OpExx5ZNOTi+qKieRUPiSXHu6+EiDafqJby91nAFOBldHjKXdfWOOQu6MusR+musvSMbNxZjbXzOauWbMm3t8inxTijDF3uP12OPlkmDcPjj0287EVFTBzJuza1fjrqa6YSE41S3Ixs2fMbH6ax+gsz+8PHA70Ag4GTjGzkdGPL3L3o4AR0ePLmd7H3Se4e7m7l3fr1q1pv1Q+KyuDRYvCFN5CsXQpvPtuuI+lXbu6j62ogI0bYcGCxl9v/Xq1XERyqFmSi7uf5u5Hpnk8Cqw2s54A0TbdmMk5wEx33+Tum4AngWHRe78bbTcC9wNDm+N3ymtlZbB1a5jGWyiqqsJ2xIj6j62oCNumdI2tW6eWi0gO5UO32GPA2Oj5WODRNMcsA040s1Iza00YzF8Yve4KEO0/C5jfDDHnt0KcMVZVFVoSRx5Z/7GHHgrduzctuajlIpJT+ZBcbgRGmdkiYFT0GjMrN7O7omMeAt4EXgWqgWp3f5wwuP+Umb0CvAy8C9zZzPHnn0JNLsOHh5lg9UlNSVbLRSRvldZ/SG65+/vAqWn2zwUujZ7vAr6R5pjNwJBcx1hwevQI02wLJbm8916I9atfzf6cigp45JFwbvcG3trkrgF9kRzLh5aLxM0stF4K5S79F14I22zGW1JS4y4zZjT8elu2hEXV1C0mkjNKLsWqkKYjV1VB27ZQXp79OUOGQOvWjesaU7l9kZxTcilWZWWwbFlYyjffVVXB8ceHG0Cz1bZtSDCNSS6qKyaSc0ouxSo1qL9oUbJx1GfjRnjppYZ1iaVUVMCcObB9e8POU8tFJOeUXIpVocwYmzEj3OzZmORywgmwbRu8/HLDzlO5fZGcU3IpVv37h22+J5eqqjD9+IQTGn5uY2+m1EJhIjmn5FKsOnSA3r3zf8ZYVRUcc0zjSt8fdBAcckjDk4taLiI5p+RSzPJ9xti2baH6cWO6xFIqKmD69HDvSrY0oC+Sc0ouxSyVXGp/8b73Hvz1r3DNNfCZz8C55zatwnBjzZsXaqA1NbmsWNGwOmrr1oVS/vUVyBSRRkv8Dn3JobIy2LAhLA+8eDHMnh1mVy1dGn7eqlXoVnrrLXjsMTjnnOaNL1WssrKy8e9Rc9ylT5/szkndnZ95dQYRaSK1XIrZpz4VtuecA1dcERLL0KFwyy3w/PPhS/af/4S+fcNCXc2tqiokwIaWb6lp0CBo375h4y7vvqsuMZEcU8ulmJ18MtxxRxjYP+64zF/il18O3/52+IJOtQRybffuMFYyZkzT3qe0NNyAmW1ymT8fHn8cvvOdpl1XROqklksxKy2Fyy6Df/mXulsH//qv0KVLaNE0l/nzw9hHU8ZbUioqwr0umzfXf+z48aFL7Ac/aPp1RSQjJRcJ05b//d/D2ExzTV1uyOJg9amoCBMS5syp+7inngqPH/4wJFMRyRklFwm+9a1Q2+vWW5vnelVVcPDBYbynqYYNC9u6KiTv2hVaLYceGhKpiOSUkosEPXrAJZfAPfeEqcq55B6Sy4gR8czY6tIlTF6oa9zl7rtDV9xNN8E++zT9miJSp8STi5l1MbPJZrYo2nbOcNxNZjY/enyxxv5+ZjYrOv9BM2tAaV3Zy3e/G4pA/vKXub3OW2+Fe1Pi6BJLSa1Mme5myk2bQlfY8OFw3nnxXVNEMko8uQBXAVPcfQAwJXq9FzP7F+BYYDBwPHCFmXWKfnwTcFt0/ofA15ol6mJUVgaf/zz83/9lNzjeWHGOt6RUVMAHH6QfM7r5Zli1Cn7+c93bItJM8iG5jAYmRs8nAmenOeYI4Hl33xktbVwNnGlmBpwCPFTP+ZKtK64IX9L33JO7a1RVQefO8OlPx/eemYpYLl8e7uG54IIwZVlEmkU+JJce7r4SINqmmzNbDXzGzNqbWVfgZKA3cACwzt13RsctBw5uhpiLV0VFGCC/9dbclYSpqgpdVK1i/N+vrCwkrNrJ5Qc/CL/HT34S37VEpF7NklzM7Jka4yU1H6OzOd/dnwb+BvwD+CMwA9gJpOvjyFjB0MzGmdlcM5u7Zs2aRvwmLYBZaL0sWQKTJsX//qtXh66rOLvEYE/Z/prJ5aWX4N57ww2T/frFez0RqVOzJBd3P83dj0zzeBRYbWY9AaJt2qlK7n6Duw9291GEpLIIWAvsb2apSgO9gBV1xDHB3cvdvbxbt25x/orFZfTosB7MLbc0rNpwNqZPD9um1BPLpKICFiyADz8McY8fH2aSff/78V9LROqUD91ijwFjo+djgUdrH2BmJWZ2QPR8EDAIeNrdHZgKjKnrfGmgkpIwc2zOHJg2Ld73rqqCtm2hvDze94U94y4zZ4aqz88+Cz/+seqIiSTAPO6/TBsaQEgafwL6AMuA8939AzMrBy5z90vNrC3wYnTKhmj/y9H5hwIPAF2Al4CL3X1bfdctLy/3uXPnxv8LFYstW0KV4WHDQi2uuJSXQ8eO8Nxz8b1nyqZNIZFccQU88kioXzZ/PrRuHf+1RFooM5vn7vX+dZh44Up3fx84Nc3+ucCl0fOthBlj6c5fAgzNZYwtUrt24a79H/84dDUdkfbjb5iNG8M4SK66qTp2DFWSb789rBPzyCNKLCIJyYduMclX3/xmSDI//3k87zdjRmhNxD2YX1NFRUgsJ54Y7tkRkUQouUhmXbvCV78Kv/89rFzZ9PerqtozqytXzjgj1EjTDZMiiVJykbpdfjns3Anf+EbTKyZXVcExx8C++8YTWzpnnQVr18KQIbm7hojUS8lF6nbYYfDf/w1PPx2KQ55zTsNWfUzZtg1mzcptlxiE1kouk5eIZEXJRer3gx/A22/D1VeHqcnDh4exjUmTsruLf/dumDw5jIXkOrmISF5IfCpyUjQVuZE2bw51x269NdzF379/6DobOzaMdbz5ZphdtmABLFwYtq+/HqY2l5aGasi6gVWkYGU7FVnJRRpn1y74y1/CXfyzZ4euqK1bYceOPcf06QOHHx6mMR9xBAwdGqYKi0jBKpj7XKRAlZTAmDFhfZTp00MNry5dQhI5/PAwPqOxD5EWS8lFmsYs1AnLRa0wESlYGtAXEZHYKbmIiEjslFxERCR2Si4iIhI7JRcREYmdkouIiMROyUVERGKn5CIiIrFrseVfzGwN8HYjT+8KrI0xnFwrpHgVa+4UUryFFCsUVrxNjfUQd6+3QGCLTS5NYWZzs6mtky8KKV7FmjuFFG8hxQqFFW9zxapuMRERiZ2Si4iIxE7JpXEmJB1AAxVSvIo1dwop3kKKFQor3maJVWMuIiISO7VcREQkdkouDWRmZ5rZG2a22MyuSjqeupjZUjN71cxeNrO8W3bTzH5nZu+Z2fwa+7qY2WQzWxRtOycZY0qGWH9sZu9Gn+/LZvbZJGNMMbPeZjbVzBaa2Wtm9p1of75+tpnizbvP18zamtlsM6uOYv3vaH8/M5sVfbYPmlmbpGOFOuO9x8zeqvHZDo792uoWy56ZlQD/BEYBy4E5wIXuviDRwDIws6VAubvn5fx7MxsJbALudfcjo303Ax+4+41R8u7s7lcmGWcUV7pYfwxscvefJRlbbWbWE+jp7i+a2b7APOBs4Cvk52ebKd4vkGefr5kZ0MHdN5lZa+AF4DvA5cAkd3/AzH4NVLv7HUnGCnXGexnwhLs/lKtrq+XSMEOBxe6+xN23Aw8AoxOOqWC5+zTgg1q7RwMTo+cTCV8yicsQa15y95Xu/mL0fCOwEDiY/P1sM8WbdzzYFL1sHT0cOAVIfVHn02ebKd6cU3JpmIOBd2q8Xk6e/iOIOPC0mc0zs3FJB5OlHu6+EsKXDtA94Xjq8y0zeyXqNsuLbqaazKwvcAwwiwL4bGvFC3n4+ZpZiZm9DLwHTAbeBNa5+87okLz6Xqgdr7unPtsbos/2NjPbJ+7rKrk0jKXZl8/9isPd/VjgM8A3o64dic8dwGHAYGAl8PNkw9mbmXUEHgb+0903JB1PfdLEm5efr7vvcvfBQC9Cb8bh6Q5r3qgyqx2vmR0JfA/4FHAc0AWIvXtUyaVhlgO9a7zuBaxIKJZ6ufuKaPse8BfCP4R8tzrqg0/1xb+XcDwZufvq6B/ubuBO8ujzjfrXHwb+4O6Tot15+9mmizefP18Ad18HPAcMA/Y3s9LoR3n5vVAj3jOjrkh3923A3eTgs1VyaZg5wIBoZkgb4ALgsYRjSsvMOkSDo5hZB+B0YH7dZ+WFx4Cx0fOxwKMJxlKn1Bd15Bzy5PONBnF/Cyx091tr/CgvP9tM8ebj52tm3cxs/+h5O+A0whjRVGBMdFg+fbbp4n29xh8ZRhgfiv2z1WyxBoqmQ94OlAC/c/cbEg4pLTM7lNBaASgF7s+3WM3sj8BJhCqtq4FrgEeAPwF9gGXA+e6e+EB6hlhPInTZOLAU+EZqTCNJZlYJVAGvAruj3d8njGPk42ebKd4LybPP18wGEQbsSwh/nP/J3a+N/r09QOhiegm4OGoVJKqOeJ8FuhG6+l8GLqsx8B/PtZVcREQkbuoWExGR2Cm5iIhI7JRcREQkdkouIiISOyUXERGJnZKLSIzMbFM0LbWgRVVzr086DilcSi5SNCwsMXBa9PwrZvZCjq/3nJldWnOfu3d09yU5uNZSM1sd3RCb2nepmT0X97VE4qDkIpJGjVIe+aSUUC69oERLVUgLo+QiRcfMDgd+DZwQdVOti/bvY2Y/M7NlUSvg11FJDMzsJDNbbmZXmtkq4G4z62xmT5jZGjP7MHreKzr+BmAE8MvoGr+M9ruZ9Y+e72dm90bnv21mPzCzVtHPvmJmL0TxfGhh4abP1POr3QKMT5XzqPU7942uXVpj38ctq+h606MKuOvMbImZVUT737GwENrYWm/b1cKiYhvN7HkzO6TGe38q+tkHFhbP+0KNn91jZneY2d/MbDNwchb/2aTIKLlI0XH3hYTFkGZE3VSpL+ObgIGEkiL9CWXRf1Tj1AMJ5TsOAcYR/n3cHb3uA2wBfhld42pCyZJvRdf4VppQ/hfYDzgUOBG4BPhqjZ8fD7xBKClzM/DbqNZTJnMJhQfH1/shpHc88ApwAHA/oVzJcYTP4mJCouxY4/iLgOui+F4G/gAf16qbHL1Hd0KZll+Z2adrnPsl4AZgX8ICVdLCKLlIixB9aX8d+C93/yBalOonhOKjKbuBa9x9m7tvcff33f1hd/8oOv4GQpLI5nolwBeB77n7RndfSigZ/+Uah73t7ne6+y5C/aeeQI963vpHwH+YWbds4qjlLXe/O7reg4QK39dGv+/TwHZCokn5q7tPi2pkXU1oCfYGzgKWRu+1M1ro62H2FG4EeNTdp7v7bnff2ohYpcDlY7+ySC50A9oD82o0DoxQ0C9lTc0vQjNrD9wGnAmkFqra18xKoi/ounQF2gBv19j3NnsvIrUq9cTdP4riqtly+AR3n29mTwBXEarxNsTqGs+3RO9Xe1/N63+8MF60TO4HwEGEltzxqe7GSClwX7pzpWVScpFiVbsi61rCl+en3f3dLM/5LlAGHO/uq8xsMKHirWU4vvb1dhC+iBdE+/oAma7dENcAL7L34lmbo217ILUw2IFNvM7HaxdF3WVdCOuUvAM87+6j6jhXFXFbOHWLSbFaDfSysO4ONRacus3MugOY2cFmdkYd77EvISGtM7MuhC/12tdIe09L1LL5E2Ep2X2jwfDLgd834XdKvfdiQrfWt2vsW0NIXBdbWNb2XwmrODbFZ82sMvoMrwNmufs7wBPAQDP7spm1jh7HRRMpRAAlFylezwKvAavMbG2070pgMTDTzDYAzxBaJpncDrQjtEJmAn+v9fP/AcZEs71+keb8/yC0KJYQBrXvB37XuF/nE64FOtTa93XgCuB94NPAP5p4jfsJCfUDYAhhgJ9o/Ol0wnjVCkL33k1A7OuwS+HSei4iIhI7tVxERCR2Si4iIhI7JRcREYmdkouIiMROyUVERGKn5CIiIrFTchERkdgpuYiISOyUXEREJHb/H4bEOorTxqTnAAAAAElFTkSuQmCC\n",
251 | "text/plain": [
252 | ""
253 | ]
254 | },
255 | "metadata": {},
256 | "output_type": "display_data"
257 | },
258 | {
259 | "name": "stdout",
260 | "output_type": "stream",
261 | "text": [
262 | "Final metric value: -0.5335141688363023\n",
263 | "Optimizer's stopping condition, GradientDescentOptimizerv4Template: Convergence checker passed at iteration 9.\n"
264 | ]
265 | },
266 | {
267 | "data": {
268 | "application/vnd.jupyter.widget-view+json": {
269 | "model_id": "2d49e3267df847f99c5aa0f920d56fb1",
270 | "version_major": 2,
271 | "version_minor": 0
272 | },
273 | "text/plain": [
274 | "interactive(children=(IntSlider(value=91, description='image_z', max=182), FloatSlider(value=0.5, description=…"
275 | ]
276 | },
277 | "metadata": {},
278 | "output_type": "display_data"
279 | },
280 | {
281 | "data": {
282 | "application/vnd.jupyter.widget-view+json": {
283 | "model_id": "1fba8c45bb544702ab8e09d4259ee492",
284 | "version_major": 2,
285 | "version_minor": 0
286 | },
287 | "text/plain": [
288 | "interactive(children=(IntSlider(value=91, description='image_z', max=182), FloatSlider(value=0.5, description=…"
289 | ]
290 | },
291 | "metadata": {},
292 | "output_type": "display_data"
293 | }
294 | ],
295 | "source": [
296 | "fixed_image = sitk.ReadImage('MNI152_T1_1mm_Brain.nii.gz', sitk.sitkFloat32)\n",
297 | "for im in mylist:\n",
298 | " ###Getting Suvject MRI\n",
299 | " img_fn = str(im[1])\n",
300 | " label_fn=str(im[2])\n",
301 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
302 | " label_name = label_fn.split('/')[-1].split('.')[0]\n",
303 | " print(img_name)\n",
304 | " print(label_name)\n",
305 | " \n",
306 | " ########### Output Name & Folder ##########\n",
307 | " Registered_imageName=img_name+'.nii.gz'\n",
308 | " Registered_labelName=label_name+'.nii.gz'\n",
309 | " Transformation_imageName=img_name+'.tfm'\n",
310 | " \n",
311 | " # =============================================================================\n",
312 | " # Loading the data\n",
313 | " # =============================================================================\n",
314 | " moving_image = sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
315 | " interact(display_images, fixed_image_z=(0,fixed_image.GetSize()[2]-1), moving_image_z=(0,moving_image.GetSize()[2]-1), fixed_npa = fixed(sitk.GetArrayViewFromImage(fixed_image)), moving_npa=fixed(sitk.GetArrayViewFromImage(moving_image)));\n",
316 | " # =============================================================================\n",
317 | " # initial Alighment \n",
318 | " # =============================================================================\n",
319 | " initial_transform = sitk.CenteredTransformInitializer(fixed_image, \n",
320 | " moving_image, \n",
321 | " sitk.Euler3DTransform(), \n",
322 | " sitk.CenteredTransformInitializerFilter.GEOMETRY)\n",
323 | "\n",
324 | " moving_resampled = sitk.Resample(moving_image, fixed_image, initial_transform, sitk.sitkNearestNeighbor, 0.0, moving_image.GetPixelID())\n",
325 | "\n",
326 | " interact(display_images_with_alpha, image_z=(0,fixed_image.GetSize()[2]), alpha=(0.0,1.0,0.05), fixed = fixed(fixed_image), moving=fixed(moving_resampled));\n",
327 | " # =============================================================================\n",
328 | " # Registration\n",
329 | " # =============================================================================\n",
330 | " registration_method = sitk.ImageRegistrationMethod()\n",
331 | "\n",
332 | " # Similarity metric settings.\n",
333 | " registration_method.SetMetricAsMattesMutualInformation(numberOfHistogramBins=50)\n",
334 | " registration_method.SetMetricSamplingStrategy(registration_method.RANDOM)\n",
335 | " registration_method.SetMetricSamplingPercentage(0.01)\n",
336 | "\n",
337 | " registration_method.SetInterpolator(sitk.sitkNearestNeighbor)\n",
338 | "\n",
339 | " # Optimizer settings.\n",
340 | " registration_method.SetOptimizerAsGradientDescent(learningRate=1.0, numberOfIterations=100, convergenceMinimumValue=1e-6, convergenceWindowSize=10)\n",
341 | " registration_method.SetOptimizerScalesFromPhysicalShift()\n",
342 | "\n",
343 | " # Setup for the multi-resolution framework. \n",
344 | " registration_method.SetShrinkFactorsPerLevel(shrinkFactors = [4,2,1])\n",
345 | " registration_method.SetSmoothingSigmasPerLevel(smoothingSigmas=[2,1,0])\n",
346 | " registration_method.SmoothingSigmasAreSpecifiedInPhysicalUnitsOn()\n",
347 | "\n",
348 | " # Don't optimize in-place, we would possibly like to run this cell multiple times.\n",
349 | " registration_method.SetInitialTransform(initial_transform, inPlace=False)\n",
350 | "\n",
351 | " # Connect all of the observers so that we can perform plotting during registration.\n",
352 | " registration_method.AddCommand(sitk.sitkStartEvent, start_plot)\n",
353 | " registration_method.AddCommand(sitk.sitkEndEvent, end_plot)\n",
354 | " registration_method.AddCommand(sitk.sitkMultiResolutionIterationEvent, update_multires_iterations) \n",
355 | " registration_method.AddCommand(sitk.sitkIterationEvent, lambda: plot_values(registration_method))\n",
356 | "\n",
357 | " final_transform = registration_method.Execute(sitk.Cast(fixed_image, sitk.sitkFloat32), \n",
358 | " sitk.Cast(moving_image, sitk.sitkFloat32))\n",
359 | "\n",
360 | " # =============================================================================\n",
361 | " # post processing Analysis\n",
362 | " # =============================================================================\n",
363 | " print('Final metric value: {0}'.format(registration_method.GetMetricValue()))\n",
364 | " print('Optimizer\\'s stopping condition, {0}'.format(registration_method.GetOptimizerStopConditionDescription()))\n",
365 | " #Visualize Expected Results\n",
366 | "\n",
367 | " moving_resampled = sitk.Resample(moving_image, fixed_image, final_transform, sitk.sitkNearestNeighbor, 0.0, moving_image.GetPixelID())\n",
368 | "\n",
369 | " interact(display_images_with_alpha, image_z=(0,fixed_image.GetSize()[2]), alpha=(0.0,1.0,0.05), fixed = fixed(fixed_image), moving=fixed(moving_resampled));\n",
370 | "\n",
371 | " sitk.WriteImage(moving_resampled, os.path.join(Output_Registered_image_path, Registered_imageName))\n",
372 | " sitk.WriteTransform(final_transform,os.path.join(Output_Registered_transformation_path,Transformation_imageName))\n",
373 | " \n",
374 | " # =============================================================================\n",
375 | " # Label Registration With the Previous Transformation\n",
376 | " # =============================================================================\n",
377 | " ###Read The label\n",
378 | " moving_label=sitk.ReadImage(label_fn, sitk.sitkFloat32)\n",
379 | " ###Get the Transformation\n",
380 | " transform=final_transform\n",
381 | " \n",
382 | " ###resampling the labels\n",
383 | " moving_resampled_label = sitk.Resample(moving_label, fixed_image, transform, sitk.sitkNearestNeighbor, 0.0, moving_label.GetPixelID())\n",
384 | "\n",
385 | " interact(display_images_with_alpha, image_z=(0,fixed_image.GetSize()[2]), alpha=(0.0,1.0,0.05), fixed = fixed(fixed_image), moving=fixed(moving_resampled_label));\n",
386 | "\n",
387 | " sitk.WriteImage(moving_resampled_label, os.path.join(Output_Registered_label_path,Registered_labelName))"
388 | ]
389 | },
390 | {
391 | "cell_type": "markdown",
392 | "metadata": {
393 | "heading_collapsed": true
394 | },
395 | "source": [
396 | "# Dice Similarity Fuction "
397 | ]
398 | },
399 | {
400 | "cell_type": "code",
401 | "execution_count": 16,
402 | "metadata": {
403 | "hidden": true
404 | },
405 | "outputs": [],
406 | "source": [
407 | "def dice_similarity(Seg_img, GT_img,state):\n",
408 | " \"\"\" \n",
409 | " Inputs:\n",
410 | " Seg_img (numpy.ndarray): Segmented Image.\n",
411 | " GT_img (numpy.ndarray): Ground Truth Image.\n",
412 | " State: \"nifti\" if the images are nifti file\n",
413 | " \"arr\" if the images are an ndarray\n",
414 | " output:\n",
415 | " Dice Similarity Coefficient: dice_CSF, dice_GM, dice_WM.\"\"\"\n",
416 | " \n",
417 | " if (state==\"nifti\"):\n",
418 | " segmented_data = Seg_img.get_data().copy()\n",
419 | " groundtruth_data = GT_img.get_data().copy()\n",
420 | " elif (state==\"arr\"):\n",
421 | " segmented_data = Seg_img.copy()\n",
422 | " groundtruth_data = GT_img.copy()\n",
423 | " \n",
424 | " #Calculte DICE\n",
425 | " def dice_coefficient(SI,GT):\n",
426 | " # 2 * TP / (FN + (2 * TP) + FP)\n",
427 | " intersection = np.logical_and(SI, GT)\n",
428 | " return 2. * intersection.sum() / (SI.sum() + GT.sum())\n",
429 | " \n",
430 | " #Dice for CSF\n",
431 | " Seg_CSF = (segmented_data == 1) * 1\n",
432 | " GT_CSF = (groundtruth_data == 1) * 1\n",
433 | " dice_CSF = dice_coefficient(Seg_CSF, GT_CSF)\n",
434 | " #Dice for GM\n",
435 | " Seg_GM = (segmented_data == 2) * 1\n",
436 | " GT_GM = (groundtruth_data == 2) * 1\n",
437 | " dice_GM = dice_coefficient(Seg_GM, GT_GM)\n",
438 | " #Dice for WM\n",
439 | " Seg_WM = (segmented_data == 3) * 1\n",
440 | " GT_WM = (groundtruth_data == 3) * 1\n",
441 | " dice_WM = dice_coefficient(Seg_WM, GT_WM)\n",
442 | " \n",
443 | " return dice_CSF, dice_GM, dice_WM"
444 | ]
445 | },
446 | {
447 | "cell_type": "markdown",
448 | "metadata": {
449 | "heading_collapsed": true
450 | },
451 | "source": [
452 | "# Cheaking inverse registration will work or not"
453 | ]
454 | },
455 | {
456 | "cell_type": "code",
457 | "execution_count": 13,
458 | "metadata": {
459 | "hidden": true
460 | },
461 | "outputs": [
462 | {
463 | "name": "stdout",
464 | "output_type": "stream",
465 | "text": [
466 | "['IBSR_01_seg.nii.gz', 'IBSR_03_seg.nii.gz', 'IBSR_04_seg.nii.gz', 'IBSR_05_seg.nii.gz', 'IBSR_06_seg.nii.gz', 'IBSR_07_seg.nii.gz', 'IBSR_08_seg.nii.gz', 'IBSR_09_seg.nii.gz', 'IBSR_11_seg.nii.gz', 'IBSR_12_seg.nii.gz', 'IBSR_13_seg.nii.gz', 'IBSR_14_seg.nii.gz', 'IBSR_16_seg.nii.gz', 'IBSR_17_seg.nii.gz', 'IBSR_18_seg.nii.gz']\n"
467 | ]
468 | }
469 | ],
470 | "source": [
471 | "My_Registered_label_list=os.listdir(Output_Registered_label_path)\n",
472 | "print(My_Registered_label_list)"
473 | ]
474 | },
475 | {
476 | "cell_type": "code",
477 | "execution_count": 14,
478 | "metadata": {
479 | "hidden": true
480 | },
481 | "outputs": [
482 | {
483 | "name": "stdout",
484 | "output_type": "stream",
485 | "text": [
486 | "['IBSR_01.tfm', 'IBSR_03.tfm', 'IBSR_04.tfm', 'IBSR_05.tfm', 'IBSR_06.tfm', 'IBSR_07.tfm', 'IBSR_08.tfm', 'IBSR_09.tfm', 'IBSR_11.tfm', 'IBSR_12.tfm', 'IBSR_13.tfm', 'IBSR_14.tfm', 'IBSR_16.tfm', 'IBSR_17.tfm', 'IBSR_18.tfm']\n"
487 | ]
488 | }
489 | ],
490 | "source": [
491 | "My_Registered_Trans_File=os.listdir(Output_Registered_transformation_path)\n",
492 | "print(My_Registered_Trans_File)"
493 | ]
494 | },
495 | {
496 | "cell_type": "code",
497 | "execution_count": 24,
498 | "metadata": {
499 | "hidden": true,
500 | "scrolled": true
501 | },
502 | "outputs": [
503 | {
504 | "name": "stdout",
505 | "output_type": "stream",
506 | "text": [
507 | "#############Loading Results If we Do inverse Registration############### \n",
508 | "data: IBSR_01\n",
509 | "CSF DICE = 0.9825621309899767 GM DICE = 0.9912496715782244 WM DICE = 0.9880310024988352\n",
510 | "############################ \n",
511 | "data: IBSR_03\n",
512 | "CSF DICE = 0.9697968972494941 GM DICE = 0.991009179006726 WM DICE = 0.9870380013130924\n",
513 | "############################ \n",
514 | "data: IBSR_04\n",
515 | "CSF DICE = 0.9775035832242787 GM DICE = 0.991514231804458 WM DICE = 0.987651804670913\n",
516 | "############################ \n",
517 | "data: IBSR_05\n",
518 | "CSF DICE = 0.9825619414371867 GM DICE = 0.9900341642668938 WM DICE = 0.9893874286823152\n",
519 | "############################ \n",
520 | "data: IBSR_06\n",
521 | "CSF DICE = 0.9862122788761707 GM DICE = 0.9866776751765345 WM DICE = 0.9871989311140422\n",
522 | "############################ \n",
523 | "data: IBSR_07\n",
524 | "CSF DICE = 0.998586537992884 GM DICE = 0.9994064054436964 WM DICE = 0.9993687508790062\n",
525 | "############################ \n",
526 | "data: IBSR_08\n",
527 | "CSF DICE = 0.9986510373057959 GM DICE = 0.9985595970632075 WM DICE = 0.9984952139443508\n",
528 | "############################ \n",
529 | "data: IBSR_09\n",
530 | "CSF DICE = 0.9968720448097766 GM DICE = 0.9984399123036154 WM DICE = 0.9983404133068171\n",
531 | "############################ \n",
532 | "data: IBSR_16\n",
533 | "CSF DICE = 0.9507329161241548 GM DICE = 0.9824083459155735 WM DICE = 0.9735339391230945\n",
534 | "############################ \n",
535 | "data: IBSR_18\n",
536 | "CSF DICE = 0.9591194295136732 GM DICE = 0.9817603190815379 WM DICE = 0.9746477068175066\n",
537 | "############################ \n",
538 | "data: IBSR_11\n",
539 | "CSF DICE = 0.997624703087886 GM DICE = 0.9985968288140679 WM DICE = 0.9986385574634562\n",
540 | "############################ \n",
541 | "data: IBSR_12\n",
542 | "CSF DICE = 0.9964391691394658 GM DICE = 0.9980941459737718 WM DICE = 0.9977821939864278\n",
543 | "############################ \n",
544 | "data: IBSR_13\n",
545 | "CSF DICE = 0.9747929583201339 GM DICE = 0.9918151106915374 WM DICE = 0.9866391946408345\n",
546 | "############################ \n",
547 | "data: IBSR_14\n",
548 | "CSF DICE = 0.978958385967445 GM DICE = 0.9904732572623514 WM DICE = 0.9878724958249686\n",
549 | "############################ \n",
550 | "data: IBSR_17\n",
551 | "CSF DICE = 0.9666451820818562 GM DICE = 0.9812091136119974 WM DICE = 0.9711705490642771\n",
552 | "############################ \n"
553 | ]
554 | }
555 | ],
556 | "source": [
557 | "print(\"#############Loading Results If we Do inverse Registration############### \")\n",
558 | "\n",
559 | "for im in mylist:\n",
560 | " ###Getting Suvject MRI\n",
561 | " img_fn = str(im[1])\n",
562 | " label_fn=str(im[2])\n",
563 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
564 | " label_name = label_fn.split('/')[-1].split('.')[0]\n",
565 | " \n",
566 | " ##The Original Spaced Image\n",
567 | " label_Original_fixed=sitk.ReadImage(label_fn, sitk.sitkFloat32)\n",
568 | " ###REgisted MNI label\n",
569 | " label_Registered_moving=sitk.ReadImage(Output_Registered_label_path+label_name+'.nii.gz', sitk.sitkFloat32)\n",
570 | " #print(Output_Registered_label_path+label_name+'.nii.gz')\n",
571 | " \n",
572 | " ######Load the Transformation\n",
573 | " initial_transform_for_InterTransformation=sitk.ReadTransform(Output_Registered_transformation_path+img_name+'.tfm')\n",
574 | " inverse_Transformation=initial_transform_for_InterTransformation.GetInverse()\n",
575 | " \n",
576 | " Original_resampled_label = sitk.Resample(label_Registered_moving, label_Original_fixed, inverse_Transformation, sitk.sitkNearestNeighbor, 0.0, label_Registered_moving.GetPixelID())\n",
577 | " \n",
578 | " np_img3 = sitk.GetArrayFromImage(Original_resampled_label)\n",
579 | " np_img4 = sitk.GetArrayFromImage(label_Original_fixed)\n",
580 | " \n",
581 | " \n",
582 | " print(\"data:\",img_name)\n",
583 | " dice_CSF, dice_GM, dice_WM = dice_similarity(np_img3,np_img4,\"arr\")\n",
584 | " print(\"CSF DICE = {}\".format(dice_CSF), \"GM DICE = {}\".format(dice_GM), \"WM DICE = {}\".format(dice_WM))\n",
585 | " print(\"############################ \")\n",
586 | " \n",
587 | " "
588 | ]
589 | },
590 | {
591 | "cell_type": "markdown",
592 | "metadata": {
593 | "heading_collapsed": true
594 | },
595 | "source": [
596 | "# Creating the excel File of the Registered Training and Validation Path."
597 | ]
598 | },
599 | {
600 | "cell_type": "code",
601 | "execution_count": 28,
602 | "metadata": {
603 | "hidden": true
604 | },
605 | "outputs": [
606 | {
607 | "name": "stdout",
608 | "output_type": "stream",
609 | "text": [
610 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_01.nii.gz\n",
611 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_01_seg.nii.gz\n",
612 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_01.tfm\n",
613 | "###############################\n",
614 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_03.nii.gz\n",
615 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_03_seg.nii.gz\n",
616 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_03.tfm\n",
617 | "###############################\n",
618 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_04.nii.gz\n",
619 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_04_seg.nii.gz\n",
620 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_04.tfm\n",
621 | "###############################\n",
622 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_05.nii.gz\n",
623 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_05_seg.nii.gz\n",
624 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_05.tfm\n",
625 | "###############################\n",
626 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_06.nii.gz\n",
627 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_06_seg.nii.gz\n",
628 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_06.tfm\n",
629 | "###############################\n",
630 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_07.nii.gz\n",
631 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_07_seg.nii.gz\n",
632 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_07.tfm\n",
633 | "###############################\n",
634 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_08.nii.gz\n",
635 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_08_seg.nii.gz\n",
636 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_08.tfm\n",
637 | "###############################\n",
638 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_09.nii.gz\n",
639 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_09_seg.nii.gz\n",
640 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_09.tfm\n",
641 | "###############################\n",
642 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_16.nii.gz\n",
643 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_16_seg.nii.gz\n",
644 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_16.tfm\n",
645 | "###############################\n",
646 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_18.nii.gz\n",
647 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_18_seg.nii.gz\n",
648 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_18.tfm\n",
649 | "###############################\n",
650 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_11.nii.gz\n",
651 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_11_seg.nii.gz\n",
652 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_11.tfm\n",
653 | "###############################\n",
654 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_12.nii.gz\n",
655 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_12_seg.nii.gz\n",
656 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_12.tfm\n",
657 | "###############################\n",
658 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_13.nii.gz\n",
659 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_13_seg.nii.gz\n",
660 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_13.tfm\n",
661 | "###############################\n",
662 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_14.nii.gz\n",
663 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_14_seg.nii.gz\n",
664 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_14.tfm\n",
665 | "###############################\n",
666 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_data/IBSR_17.nii.gz\n",
667 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_label/IBSR_17_seg.nii.gz\n",
668 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Transformation/IBSR_17.tfm\n",
669 | "###############################\n"
670 | ]
671 | }
672 | ],
673 | "source": [
674 | "id_list=[]\n",
675 | "sub_t1=[]\n",
676 | "label_list=[]\n",
677 | "tfm_list=[]\n",
678 | "\n",
679 | "\n",
680 | "for im in mylist:\n",
681 | " ###Getting Suvject MRI\n",
682 | " img_fn = str(im[1])\n",
683 | " label_fn=str(im[2])\n",
684 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
685 | " id_list.append(img_name)\n",
686 | " label_name = label_fn.split('/')[-1].split('.')[0]\n",
687 | " \n",
688 | " ######Getting Path\n",
689 | " reg_img_path=Output_Registered_image_path+img_name+'.nii.gz'\n",
690 | " sub_t1.append(reg_img_path)\n",
691 | " \n",
692 | " reg_label_path=Output_Registered_label_path+label_name+'.nii.gz'\n",
693 | " label_list.append(reg_label_path)\n",
694 | " \n",
695 | " trans_tfm_file_path=Output_Registered_transformation_path+img_name+'.tfm'\n",
696 | " tfm_list.append(trans_tfm_file_path)\n",
697 | " \n",
698 | " \n",
699 | " \n",
700 | " \n",
701 | " print(reg_img_path)\n",
702 | " print(reg_label_path)\n",
703 | " print(trans_tfm_file_path)\n",
704 | " print(\"###############################\")\n",
705 | "\n",
706 | "Inf0_data=pd.DataFrame(list(zip(id_list, sub_t1, label_list,tfm_list)),\n",
707 | "columns=['id','subj_folder','subj_label','registartion_tmf'])\n",
708 | "Inf0_data.to_csv(\"MISAPreorocessingReg_info.csv\", encoding='utf-8', index=False) "
709 | ]
710 | },
711 | {
712 | "cell_type": "markdown",
713 | "metadata": {
714 | "heading_collapsed": true
715 | },
716 | "source": [
717 | "# Registartion For Test Imaes"
718 | ]
719 | },
720 | {
721 | "cell_type": "code",
722 | "execution_count": 29,
723 | "metadata": {
724 | "hidden": true
725 | },
726 | "outputs": [
727 | {
728 | "name": "stdout",
729 | "output_type": "stream",
730 | "text": [
731 | "['IBSR_02.nii.gz', 'IBSR_10.nii.gz', 'IBSR_15.nii.gz']\n"
732 | ]
733 | }
734 | ],
735 | "source": [
736 | "Test_data_raw_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/\"\n",
737 | "Test_data_list=os.listdir(Test_data_raw_path)\n",
738 | "print(Test_data_list)"
739 | ]
740 | },
741 | {
742 | "cell_type": "code",
743 | "execution_count": 31,
744 | "metadata": {
745 | "hidden": true
746 | },
747 | "outputs": [
748 | {
749 | "name": "stdout",
750 | "output_type": "stream",
751 | "text": [
752 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_02.nii.gz\n"
753 | ]
754 | }
755 | ],
756 | "source": [
757 | "print(Test_data_raw_path+Test_data_list[0])"
758 | ]
759 | },
760 | {
761 | "cell_type": "code",
762 | "execution_count": 34,
763 | "metadata": {
764 | "hidden": true,
765 | "scrolled": true
766 | },
767 | "outputs": [
768 | {
769 | "data": {
770 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAENCAYAAAAhRzNRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XmcU9X5x/HPwzIMiygjKLhBrVq3upW6YBVQ0eqvFa27oqIC4tbaSoXWpValggtatWqRqohWq9WqteOCCAhaW0dxq7ZqEUUZYVjEjZ3n98e5KeMwSyaT5Ca53/frlddNbu4kDwHynXvOPeeYuyMiItJcreIuQEREipMCREREMqIAERGRjChAREQkIwoQERHJiAJEREQyogAREZGMKEBERCQjChAREclIm7gLyKWuXbt6r1694i5DRKSovPzyywvdvVtTx5V0gPTq1Yuqqqq4yxARKSpm9kE6x6kJS0REMqIAERGRjChAREQkIwoQERHJiAJEREQyogAREZGMKEBERCQjChARkVxbsgTuuANKbAlxBYiISK7deCOccQZMnRp3JVmlABERybXKyrC9885468gyBYiISC4tWAAvvQQdO8JDD8Fnn8VdUdYoQEREcumpp0Lfx9ixsGwZPPhg3BVljQJERCSXnngCNtkEzjoLtt8e7ror89dyh0GD4P77s1ZeSyhARERyZc0aePJJOPRQaNUKBg+GmTPhvfcye70nnoB774URI2DFiqyWmgkFiIhIrvzjH+ES3sMOC48HDQpBMnFiZq83dix06AAffwz33JO9OjOkABERyZXKSmjdGgYMCI833xwOPjgEyNq1zXutF1+E556D0aNh993h6qvDGU6MFCAiIrlSWQl9+kCXLuv2DR4Mc+c2f0zI1VeH1xkyBEaNgnfegb/8JavlNpcCREQkF+bNg1mz1jVfpQwcCBtu2LzO9P/8Bx55BM49Fzp1gqOOgm22gTFjYh3drgAREcmFJ58M20MP/fr+8nI44YQwJmTp0vRe65proF07OO+88Lh1a7jwQnj5ZXjmmezV3EwKEBGRXKishM02g112Wf+5wYPTHxMybx5MmgSnnw7duq3bf8op0KNHOAuJiQJERCTbVq2CyZND85XZ+s/vuWf6Y0J++1tYvRouuODr+9u1g5/9DJ59Fv75z6yU3VwKEBGRbHvhhTBlSd3+jxSzcBby/PPw7rsNv87SpXDbbXDssbD11us/f+aZsNFGcNVVWSm7uRQgIiLZVlkJbdvCgQc2fMzJJzc9JuT3vw9BdOGF9T+/wQahY/2RR+Dtt1tWcwYUICIi2VZZCfvtB507N3zMZpvBIYeEAKlvPMeKFXDDDWEMye67N/w6P/4xtG8fBhnmmQJERCSbPvwQ3nyz4ear2gYPho8+qn9MyKRJUF0NI0c2/hrduoWxIffeG947jxQgIiLZ9MQTYZtOgBx+eOjDqNuZvnZtuHR3jz3ggAOafp0RI8L2uuuaVWpLKUBERLKpshJ69QpXWTUlNSbk4Ye/Pibk0UfDSPORI+u/iquurbaCk06C22+HhQszLr25Yg8QM6sws8lm9m607dLIsZ3N7GMzuzmfNYqIpGXFCpgypeHLd+uTGhPywAPhcWrtkK23DiPO03XhheF1bryx2WVnKvYAAUYBU9x9W2BK9LghVwDT81KViEhzzZgBX36ZXvNVyne/CzvssK4Za8aMMIvviBFhxHm6dtwRjjgCbroJPv+8WWVnqhACZCCQuo5tInBEfQeZ2XeATYGn81SXiEjzVFaGAX79+6f/M6kxIS+8EJqtxo4NC1ANHtz89x81Cj79FMaPb/7PZqAQAmRTd68GiLab1D3AzFoB1wE/b+rFzGyYmVWZWVVNTU3WixURaVBlJfTrF9bsaI7UOiEXXhheI3VpbnPttVcIr3Hj8rLgVF4CxMyeMbM367kNTPMlzgYq3X1uUwe6+3h37+3uvbvVnjdGRCSX/vvfMGtuc5qvUlJjQh59FDp2DMvfZuoXv1g3f1aOtcn5OwDuflBDz5nZfDPr4e7VZtYDWFDPYfsA+5nZ2UAnoMzMvnD3xvpLRETypzmX79Zn8ODwGsOGQUVF5nUcdFC4/HfcODjjjPQ78zOQlwBpwmPAqcCYaPto3QPc/aTUfTMbDPRWeIhIQamshG23Det0ZOLII8PYj9NPb1kdZuFy3oqKnIYHFEYfyBhggJm9CwyIHmNmvc1sQqyViYik46uvwmjyTM8+IMydNWJEy84+UvbYI4xFybHYz0DcfRGw3oxj7l4FDKln/13AXTkvTEQkXdOmwfLlLQuQIlQIZyAiIsXtiSfClVf77x93JXmlABERaQn30P9x4IFhapIEUYCIiLTEO+/A7NmJa74CBYiISMs89FDYHnpovHXEQAEiIpKJ1avhkkvg4ouhb1/o2TPuivIu9quwRESKzrx5cOKJMH16GLdx001xVxQLBYiISHM8/XSYu+rLL8NytKecEndFsVETlohIOlavDs1V3/9+mC23qirR4QE6AxERadq8eWHlwOeeC/NL3Xhj82fcLUE6AxGRolFdHfqrP/kkj2/69NOw227hjOPuu2HCBIVHRGcgIlI0rrgCZs50Lv813HJrFicKXL0a5s8PZxrV1eu2770H990HO+0UlpzdYYfsvWcJMHePu4ac6d27t1dVVcVdhoi0UPv2YaqpuspZxrIum4eJCMvKvr5t3TrcWrVad7/2YzNYtCiERU1NGFFem1no6zjySLjuukSddZjZy+7eu6njdAYiIgVv9mwYcYHzyP3L+crb06HNSo7c7l9cu9+jUDYIVq6EVau+vl2zBtauDdvULfV4xYpwf4stwprkm20GPXp8fbvJJtBGX5GN0acjIgWvRw/ovHIhy72C8rarWb6mjM59d6f7LbvHXVqiqRNdRIrC/FfnMbzV7bw45SuGD89zR7rUS2cgIlL4Vq3i4c8GwI/6wn7D+d1+cRckoDMQESkGTz8dOrpPPjnuSqQWBYiIFL5Jk2DjjcMocCkYChARKWxLl8Ijj4SR4GVlcVcjtShARKSw/fnP4bJbNV8VHAWIiBS2SZNgu+3CeA0pKAoQESlcc+aENTdOOSWMDJeCogARkcJ1771he9JJ8dYh9VKAiEhhcg/NV/vvD716xV2N1EMBIiKF6aWX4D//SfyiTYVMASIihWnSJCgvh6OPjrsSaYACREQKz8qVcP/9cPjhsOGGcVcjDVCAiEjhefJJWLhQYz8KnAJERArPpEnQrRscckjclUgjFCAiUlg+/RT++tcwdUnbtnFXI41QgDSguhr69tWaAyJ59+CDmrqkSChAGnDF5c7MmXD55XFXIpIwd98NO+wA3/lO3JVIExQgdbRvH2ZMuPU2Y+1auPXW8Lh9+7grE0mA99+HmTPD2YemLil4CpA6Zs+GE49eSQdbBkCH9s5JJ4V/1yKSY/fcE7aauqQoxB4gZlZhZpPN7N1o26WB49aY2avR7bFc1dOjB3TuWsZyyilnGcuXOZ03cLp3z9U7iggQpi65+27o3x+22iruaiQNaQeIme1gZpeY2e+ix9ub2S5ZqGEUMMXdtwWmRI/rs8zdd4tuh2fhfRs0fz4MP8t4ceQjDOdWPpnxbi7fTkQA/vEPeO89dZ4XkbQCxMyOAaYDmwOpv91OwLgs1DAQmBjdnwgckYXXbJGHH4bf/Q52vep4fnf0NB7+947wwgtxlyVFSlf0peHTT+E3vwlTlxx1VNzVSJrSPQO5HDjY3YcDa6J9rwG7ZqGGTd29GiDabtLAceVmVmVmL5pZgyFjZsOi46pqampaVpkZTJgAPXvCccfBokUtez1JpCuuQFf0NWTFChg3DrbeGh5/HH7xC+jcOe6qJE3m7k0fZLYI6OrubmaL3b3CzNoA89y9oS/82j//DFBfL8JFwER336jWsUvcfb1+EDPbzN3nmdnWwLPAge7+38bet3fv3l5VVdVUeU17+WXo0wcOOigMcGoVe9eRFIH27WH58vX3l5fDsmX5r6egrF0L990HF10EH3wQRpyPHQu7ZuN3UmkpM3vZ3Xs3dVy634Qvs67pKuV44J/p/LC7H+TuO9dzexSYb2Y9oqJ7AAsaeI150XY2MA3YPc3aW+473wm/JVVWwrXX5u1tpbjNng0nnggd2q4EoIMt46Q+7/P+u6tjrixmzzwDvXvDoEFQUQGTJ4e5rxQeRSfdAPkxcKWZTQc6mtlTwBXAT7NQw2PAqdH9U4FH6x5gZl3MrF10vyuwL/BWFt47fWefDcccA7/8JTz/fF7fWopTjx6hNWb5qjbhij5vR+cXnqB7v+3h9ttD802SvPpqONMYMACWLAmrDVZVhTN7KUppNWEBmFkH4AdAT2Au8Li7f9HiAsw2Bh4AtgI+BI5x98Vm1hsY7u5DzKwP8HtgLSH0bnD3PzT12llrwkpZujScjSxfHv4zdO2avdeWkvSjH0GPWZUMa3MH4wc8SPWsah5ePTB8cW6+OYwYAUOHQseOcZf6datWwTvvhDXJ27aFdu3Crbz869t27cLzn38e+ggXL67/NncuPPUUdOkCF18cfiFr1y7uP6U0IN0mrLQDpBhlPUAAXnkF9tkHDjwwdPqpP0Sa0qdP6BCZMiU8dg/NOKNHw/Tp4ReR88+Hc86BTp3CFUlLltR/++yz0OzTs2e4bbUVdO+e+b9Dd/j4Y3jjjXW311+Hf/87rMnRUm3bwsYbh5p/+EMYNQo22qjpn5NYZTVAzGwGUO+B7r5/88vLj5wECMAtt4T/7GPGwMiR2X99KS3bbhvOXO+/f/3nnn8+XL5aWQmtW8OaNesfU1ubNrC6Th9K27aw5ZbrAqVnT+jQIQTAihVhm7rVfvzJJyEwlixZ91qbbw7f/jbsskvYbrNNqGnFinBbvnz9+ytXhra6iopwSwVGRUWoQ1OSFJ10A6RNmq83oc7j7sAZwD3NLawknHUWTJsWriDZd1/43vfirkgKWU1NWNuiPvvuC3/7W2gSfeCB0DTUpUv4Lb1Ll/Vv5eWhKfXDD8Ptgw++vn3mGZg3L5xZQDgzadcOysrWv1VUhH69VGDsvHPYJ5KmjJuwzGwb4E533y+7JWVPzs5AIDQl7LZb+M1r1iz9liX1W7kyfIFffjlcckl+3nPVqnCWUlYWzmpEminbl/HW52MgG1OZFKfOneHSS+G110Lzg0h9Fi4M24bOQHKhbdvQ56LwkBxLqwnLzE6vs6sD8CPgxaxXVExOOgkuuwyuvBIOO0xnIbK+1GwI+QwQkTxJtw+k7iDCL4EXgOuzW06Rads2dKKffTZMnQoHHBB3RVJoFCBSwtIKEHfvn+tCitZpp4XJjq68UgEi61OASAlrMECiOaeaFE0tklzl5WEw2AUXhBl7+/SJuyIpJAoQKWGNdaK/B7wbbRu6aaEMgDPPDNe+jx4ddyVSaGpqwqW0ujxWSlCDAeLurdy9dbRt6KbLPCBMQ/HTn4arsWbNirsaKSQ1NeGXC81YICVI/6qz5ZxzwqW9OguR2hYsUPOVlKx0L+NtA5wN9AW6Av+7XrWQpzLJq402gvPOC9NSvPUW7Lhj3BVJIWhsFLpIkUv3DOR64EzgOeA7wEOElQOfzVFdxen888MArquuirsSKRQKEClh6QbIj4BD3f23wOpoewSgy3tr69oVhg8PK63NTvbFaRKpqYFNmly0U6QopRsgHQhrgAAsM7MO7v5v8rkqYLG44IIwhcTYsXFXInFbvTqshaEzEClR6QbI28B3o/tVwGVmdjFhPiypbbPN4Iwz4M474aOP4q5G4rRoUdgqQKREpRsgPwFSixD8DNgD+CEwLBdFFb0LL4S1a7V+etJpEKGUuEYDxMw2AnD3l9z9lej+u+5+kLvv5e4z8lFk0enVC04+GcaPD5dxSjIpQKTENXUG8omZPWhm/2dmGjTYHKNGhRXbrk/2fJOJpgCREtdUgOwJzAHGA/PM7Hoz2zXnVZWCb30rrPb2u999fclQSQ4FiJS4RgPE3V93958DWwKnAN2A583sdTO7wMy656PIonXRRfD553DTTXFXInFIBcjGG8dbh0iOpNWJ7u5r3f0pdx8EbApcC5wLfJjL4oreLrvAD38IN94IK1bEXY3k24IFYRLFNukuuyNSXJo1F5aZbQacA4wENgbuzUVRJeWcc8LlnI88Enclkm8ahS4lrskAMbP2ZjbIzCYD7wMHA2OA7u5+Wq4LLHoDBkDPnnD77XFXIvmmAJES19RlvBOB+cBFwFRgm+gS3knu/lU+Cix6rVqFgYVTpsB//xt3NZJPmsZESlxTZyDLgIPdfQd3/427z23ieKnPaaeFIPnDH+KuRPJJZyBS4pq6Cmu4u7+Yr2JK1hZbwGGHhelNVq2KuxrJh7VrQ9+XAkRKmBaUypehQ+GTT+Bvf4u7EsmHxYtDiChApIQpQPLlsMPCRIvqTE8GDSKUBFCA5EubNqEv5Ikn4EMNnyl5ChBJgLQCxMxOMbNd6uzb1cxOzk1ZJeqMM8L2jjvirUNyTwEiCZDuGcgVrFtQKmUucGV2yylx3/hGGBdyxx2wZk3c1UguKUAkAdINkM7AZ3X2LQU2ym45CTB0KMydC089FXclkkupAOnaNd46RHIo3QB5Cziqzr4jCSsVSnMcfnj4rVSd6aVtwQLYcEMoK4u7EpGcSXeWt5FApZkdB/wX2AY4EDispQWYWQXwJ6AXYer4Y919vfnPzWwrYAJhZmAHDnP3OS19/7wrK4PBg2HcOKiuhh494q5IckGDCCUB0p2NdyawM/AS0BH4J7Czuz+fhRpGAVPcfVtgSvS4PncD17j7DoR1Sop3qb8hQ0IfyF13xV2J5IqmMZEESPsyXnf/0N3HuPs50TZb05oMBCZG9ycCR9Q9wMx2BNq4++Soli+Kei6u7baDvn1hwoQw2ExKj85AJAEaDBAzG1/r/iQzu7u+WxZq2NTdqwGibX2/tm0HfGpmD5vZLDO7pqElds1smJlVmVlVTaojsxANHQqzZ8Ozz8ZdieSCAkQSoLEzkPdr3X+P0PdR361JZvaMmb1Zz21gmnW2AfYDRgDfBbYGBtd3oLuPd/fe7t67WyH/Bz7qKOjSRZ3ppcgdFi5UgEjJa7AT3d2vAoh+058L/NHdl2fyJu5+UEPPmdl8M+vh7tVm1oP6+zY+Ama5++zoZx4B9gaKd3rb8nI45RS45Rb9tlpqPv0UVq/W36mUvCb7QNx9DTAu0/BIw2PAqdH9U4FH6znmJaCLmaX+Rx5AuLS4uA0dGmbnvTsbLYFSMDSIUBIi3U70v5rZD3NUwxhggJm9CwyIHmNmvc1sAvwvxEYAU8zsDcCA4m/72Wkn2Gef0IzlHnc1ki0KEEmIdMeBlAN/NrO/E5qz/vdt5+6ntKQAd19EGFNSd38VMKTW48nALnWPK3pDh8Lpp8PMmbDffnFXI9mgAJGESPcM5E3gN4Rlbet2qEtLHHssdO6szvRSogCRhEj3DOT37v5J3Z1m1j3L9SRPx45w4olhUOHNN4cwkeK2ILoORAEiJS7dM5B3Gthf/B3ZheDoo2H58tCMJcWvpgY6dQpX2omUsHQDxNbbYdYZ0DDqbNhnH2jbFqZNi7sSyQZNYyIJ0WgTlpmlOszbm1ndZfQ2Bu7LVWGJ0qED7LWXAqRUaFyPJERTfSCDCGcflUDt1QcdmO/u/8lVYYnTvz+MHg2ffaZ+kGJXUwNbbBF3FSI512gTlrtPd/dpQNfofur2nMIjy/r1CxMrzpgRdyXSUjoDkYRItw9kjZmNNrPZZrYUwMwONrNzc1hbsuy9d1grRM1Yxc1dASKJkW6A3EBYD+Qk1g0i/BdwVi6KSiT1g5SGzz+HlSsVIJII6QbIEcCJ7v53oiuv3P1jYPNcFZZI/fvDK6/A0qVxVyKZ0iBCSZB0A2QldTrco4kNF2W9oiRL9YNoPEjxUoBIgqQbIA8CE83sGwDRtOs3A/fnqrBESvWDTJ0adyWSKQWIJEi6AfJLYA7wBrAR8C4wD/h1bspKqPbtQ4ioH6R4KUAkQdIKEHdf6e7nu3snYFNgA3f/qbuvzG15CdS/P8yaFRYlkuKjebAkQRoNEDPbqu4NaA9sWeuxZJP6QYpbTU24oq5jx7grEcm5pkaiz2HdZbvrzYcVPdc6mwUl3t57Q7t2oR/kBz+IuxppLo0BkQRpqgnrdUJ/x8VAT6BtnVtZTqtLovJy9YMUMwWIJEhTU5nsBhwNVAAzCXNiHQ+UufuaaKlZybZ+/dQPUqwUIJIgTXaiu/ub7v5z4BvAOOAHQLWZ7ZHr4hKrf/8wJYbmxSo+ChBJkHQv4wXYFugL7APMApbkpCIJU5q0a6dmrGKkAJEEaWo9kArgBOBUYANgErC/u9ddG0Syqbw8LDKlAYXF5csvYdkyBYgkRlNXYc0D3icEx4vRvm3MbJvUAe7+bI5qS7Z+/eDXv4YlS6BLl7irkXRoEKEkTFMB8glQDgyNbnU5sHW2ixJCP8hll4V+kMMPj7saSYcCRBKm0QBx9155qkPq2nPP0JQ1bZoCpFgoQCRhmtOJLvmU6gdRR3rxSE1jsskm8dYhkicKkELWrx+8+iosXhx3JZIOnYFIwihACpnGgxSXmppw+XWnTnFXIpIXCpBCVrsfRApfagyI1TdtnEjpUYAUsnbtoE8fBUix0CBCSRgFSKHr1w9ee039IMVAASIJowApdP36hX6Q556LuxJpigJEEkYBUuj23DMsdatmrMKnAJGEUYAUOvWDFIfly+GLLxQgkigKkGLQrx+8/rr6QQqZxoBIAsUeIGZWYWaTzezdaLvezIFm1t/MXq11W25mR8RRbyxS/SDTp8ddiTREASIJFHuAAKOAKe6+LTAlevw17j7V3XeLVkg8APgKeDq/ZcZI/SCFLxUgmsZEEqQQAmQgMDG6PxFo6sziaOAJd/8qp1UVkrIy2HdfBUghS82DpTMQSZBCCJBN3b0aINo29Svc8cB9DT1pZsPMrMrMqmpSvxWWglQ/yKJFcVci9VETliRQXgLEzJ4xszfruQ1s5uv0AL4NPNXQMe4+3t17u3vvbqX0n/mAA8J2/Ph465D61dRA27aw4YZxVyKSN00tKJUV7n5QQ8+Z2Xwz6+Hu1VFALGjkpY4F/uLuq7JeZKHbe2845hi45BLYf//QpCWFo6YGunbVPFiSKIXQhPUYYc11ou2jjRx7Ao00X5U0M7j9dujVC447DhYujLsiqU2DCCWBCiFAxgADzOxdYED0GDPrbWYTUgeZWS9gSyC517JuuCE88ED4sjr5ZFi7Nu6KJEUBIgkUe4C4+yJ3P9Ddt422i6P9Ve4+pNZxc9x9c3dP9rfmHnvADTfAk0/C2LFxVyMpChBJoNgDRDIwfHhoxrr4Yk2yWCgUIJJACpBiZBauxvrmN+GEE9aNQZB4rFwJS5cqQCRxFCDFqnNnePDBMC5E/SHxSl3QoACRhFGAFLNdd4Ubb4Snn4bf/CbuapJL05hIQilAit3QoXDiifCrX8HUqXFXk0yaxkQSSgFS7Mzg97+HbbcNQTJ/ftwVJY+mMZGEUoCUgk6dQn/I0qVw0kmwZk3cFSWLAkQSSgFSKr79bbj5ZpgyBa67Lu5qkqWmBlq3hi7rLWUjUtIUIKXktNPg8MPhyit1aW8+1dTAxhtDK/13kmTRv/hSYgbXXAPLloVOdckPDSKUhFKAlJrttgsj1cePh7feiruaZFCASEIpQErRr34FG2wAF14YdyXJoACRhFKAlKKuXeGii+Bvfwud6pJbChBJKAVIqTrvPOjZEy64QJf15tLq1bB4sQJEEkkBUqrKy2HMGHjtNZg0Ke5qSldqjXpNYyIJpAApZccdB3vtFZqzvvwy7mpKkwYRSoIpQEqZWRhUOG+eBhfmiubBkgRTgJS6ffeFo46Cq6+G6uq4qyk9OgORBFOAJMHYsWHRo0svjbuS0qMAkQRTgCTBN78J554Ld9wBb7wRdzWlpaYmNBVuvHHclYjknQIkKS6+GDbcEEaMiLuS0lJTAxUVYTJFkYRRgCRFRQVccklYvfCpp+KupnQsWKDmK0ksBUiSnHNOaM4aMUKDC1uqujrMfvzww7DTTnFXIxKLNnEXIHlUVhYGFx5zDPz0p2ENkVatwq1163X3U4832gj23jvMqyXB8uVw/fVhDfoVK0IYX3xx3FWJxEIBkjRHHQWHHAI33ZTe8a1bQ+/e0K9fuO27bzIDxT2cbfz85/D++zBwIFx7LWyzTdyVicRGAZI0ZlBZGdZOX7s23Nasqf/+vHkwfTpMmwbjxoXLgZMYKK++CuefHz6LnXeGZ56BAw+MuyqR2Jm7x11DzvTu3durqqriLqM0fPkl/P3vIUymTYN//hNWrQrNYj/+8bqrvErJggXhzzVhQrgI4corYcgQaKPfu6S0mdnL7t67yeMUIJKRVKDccw/cfXcYB3H55TB0aGl8wf7pT2Fhri++CDMbX3pp6BMSSYB0A0RXYUlmOnaEgw6Cu+6CqirYcUc4+2zYbbfivkx46VI4+WQ4/nj41rfCwMtx4xQeIvVQgEjL7bFHaNZ66KGwHvv3vw+HHQZvvx13Zc0zYwbsuivcdx9cdhnMnAnbbx93VSIFSwEi2WEGP/pRWIf9mmvg+efDZcLnngsLF8ZdXeNWroRf/hL69g3NbzNnhmWBS6EpTiSHFCCSXe3ahbER770Hw4bBrbeGS10ffTTuyur3739Dnz5w1VVw+ukwa1YY+yIiTVKASG506wa33AKvvw7bbQdHHw0PPhh3Veu4h3DbYw+YMyeM8ZgwofQvSRbJIp2jS27ttBNMmRL6RI4/PozeHjQonlqWLoU33wyh9sgjYV6wQw6BO++EHj3iqUmkiMUeIGZWAfwJ6AXMAY519yX1HHc18H+Es6bJwE+8lK9BLiUbbABPPgmHHw6nnBL6HE4/PXfvt2oVvPNOCIo33gi311+HDz9cd0yXLvDb34Y+mlY6ERfJROwBAowCprj7GDMbFT0eWfsAM+sD7AvsEu2aCfQFpuWxTmmJjh3h8cfhyCPhjDNCiAwfnv7Pu8PUqWE0+OefN35bsiSECISO8O23DyPmhw+HXXYJnftbbhk6/kUkY4Xsu3QjAAAJ5ElEQVQQIAOBftH9iYRQGFnnGAfKgTLAgLbA/PyUJ1nTvn1oOjr2WDjrrNCc9ZOfNP1zU6eGq6JmzAiPO3UKZzW1b1tuue5+RUVoOvv2t0N4lJXl9s8lklCFECCbuns1gLtXm9kmdQ9w97+b2VSgmhAgN7t7vYMMzGwYMAxgq622yl3Vkpnycvjzn+HEE8P8UitWwIUX1n/s9OkhOKZPh802CxNADhkSXkNEYpeXADGzZ4Du9Tx1UZo/vw2wA7BFtGuyme3v7s/VPdbdxwPjIUxlklnFklNlZXD//aE/ZOTIECKXXLLu+RkzQnBMnQrdu4e+imHDFBwiBSYvAeLuBzX0nJnNN7Me0dlHD2BBPYcdCbzo7l9EP/MEsDewXoBIkWjTBiZNCuNGLr00hMihh4bgmDIFNt00rLtx5pmh6UtECk4hXH7yGHBqdP9UoL4RZx8Cfc2sjZm1JXSgF9k8GbKe1q3hD38IZxejR8P3vheumLruOpg9OzRxKTxEClYh9IGMAR4wszMIQXEMgJn1Boa7+xDgz8ABwBuEDvUn3f2vMdUr2dSqFdx2G2y9NbRtG844OnaMuyoRSYOmcxcRka/RdO4iIpJTChAREcmIAkRERDKiABERkYwoQEREJCMKEBERyYgCREREMqIAERGRjJT0QEIzqwE+aMFLdAUWZqmcfFLd+aW680t1515Pd+/W1EElHSAtZWZV6YzGLDSqO79Ud36p7sKhJiwREcmIAkRERDKiAGnc+LgLyJDqzi/VnV+qu0CoD0RERDKiMxAREcmIAqQeZvZ9M/uPmb1nZqPiriddZjbHzN4ws1fNrKAXQjGzO8xsgZm9WWtfhZlNNrN3o22XOGusTwN1X2ZmH0ef+6tmdlicNdbHzLY0s6lm9raZ/cvMfhLtL+jPvJG6C/ozN7NyM/unmb0W1f3raP83zOwf0ef9JzMri7vWllATVh1m1hp4BxgAfAS8BJzg7m/FWlgazGwO0NvdC/5aczPbH/gCuNvdd472XQ0sdvcxUXB3cfeRcdZZVwN1XwZ84e7XxllbY8ysB9DD3V8xsw2Al4EjgMEU8GfeSN3HUsCfuZkZ0NHdv4iW4Z4J/AT4GfCwu99vZrcBr7n7rXHW2hI6A1nfnsB77j7b3VcC9wMDY66p5Lj7c8DiOrsHAhOj+xMJXxQFpYG6C567V7v7K9H9z4G3gc0p8M+8kboLmgdfRA/bRjcnLM3952h/wX3ezaUAWd/mwNxajz+iCP7BRhx42sxeNrNhcReTgU3dvRrCFwewScz1NMe5ZvZ61MRVUM1AdZlZL2B34B8U0Wdep24o8M/czFqb2avAAmAy8F/gU3dfHR1STN8t9VKArM/q2Vcs7Xz7uvsewKHAOVFzi+TercA3gd2AauC6eMtpmJl1Ah4Cznf3z+KuJ1311F3wn7m7r3H33YAtCC0bO9R3WH6ryi4FyPo+Aras9XgLYF5MtTSLu8+LtguAvxD+0RaT+VGbd6rte0HM9aTF3edHXxZrgdsp0M89aot/CLjX3R+Odhf8Z15f3cXymQO4+6fANGBvYCMzaxM9VTTfLQ1RgKzvJWDb6GqJMuB44LGYa2qSmXWMOhkxs47AwcCbjf9UwXkMODW6fyrwaIy1pC31BRw5kgL83KNO3T8Ab7v7uFpPFfRn3lDdhf6Zm1k3M9sout8eOIjQfzMVODo6rOA+7+bSVVj1iC4JvAFoDdzh7qNjLqlJZrY14awDoA3wx0Ku28zuA/oRZiidD/wKeAR4ANgK+BA4xt0LqsO6gbr7EZpSHJgDnJnqVygUZvY9YAbwBrA22v1LQn9CwX7mjdR9AgX8mZvZLoRO8taEX9QfcPfLo/+n9wMVwCxgkLuviK/SllGAiIhIRtSEJSIiGVGAiIhIRhQgIiKSEQWIiIhkRAEiIiIZUYCINJOZfRFdjlnUzOwuM7sy7jqkeClApKhEU9YfFN0fbGYzc/x+08xsSO197t7J3Wfn4L3mmNn8aCBoat8QM5uW7fcSyQYFiCRWrSklCkkbwrTfRSVaBkESRgEiRcnMdgBuA/aJmpQ+jfa3M7NrzezD6Lf526KpJDCzfmb2kZmNNLNPgDvNrIuZPW5mNWa2JLq/RXT8aGA/4OboPW6O9ruZbRPd39DM7o5+/gMzu9jMWkXPDTazmVE9S8zsfTM7tIk/2jXAiNQ0GHX+zL2i925Ta9//zpCi93vezK43s0/NbLaZ9Yn2z7WwENapdV62q4WFpD43s+lm1rPWa28fPbfYwgJrx9Z67i4zu9XMKs3sS6B/Gn9tUmIUIFKU3P1tYDjw96hJKfWFOxbYjjDNxTaE6bIvrfWj3QnTSPQEhhH+D9wZPd4KWAbcHL3HRYRpNM6N3uPcekq5CdgQ2BroC5wCnFbr+b2A/xCmPrka+EM0v1NDqggT741o8kOo317A68DGwB8J02Z8l/BZDCKEYadax58EXBHV9ypwL/xvPrXJ0WtsQpg65BYz26nWz54IjAY2ICyYJAmjAJGSEX0xDwV+6u6LowWIfkOYEDNlLfArd1/h7svcfZG7P+TuX0XHjyYEQTrv1xo4DviFu3/u7nMI04qfXOuwD9z9dndfQ5gbqQewaRMvfSlwnpl1S6eOOt539zuj9/sTYWbpy6M/79PASkKYpPzN3Z+L5mO6iHBGtyXwA2BO9Fqro0WdHmLdRIAAj7r78+6+1t2XZ1CrFLlCbAMWyVQ3oAPwcq1f8o0woV1KTe0vOzPrAFwPfB9ILUq0gZm1jr6EG9MVKAM+qLXvA76+SNAnqTvu/lVUV+0zgPW4+5tm9jgwijCDa3PMr3V/WfR6dffVfv//LZ4WLb+6GNiMcEa2V6ppMNIGmFTfz0oyKUCkmNWdCXQh4QtyJ3f/OM2fuQD4FrCXu39iZrsRZkm1Bo6v+36rCF+2b0X7tgIaeu/m+BXwCl9fKOnLaNsBSC0G1b2F7/O/tW+ipq0KwhoVc4Hp7j6gkZ/VTKwJpyYsKWbzgS2idVuotbjQ9Wa2CYCZbW5mhzTyGhsQQudTM6sgfHHXfY96x3xEZygPAKPNbIOoA/pnwD0t+DOlXvs9QhPUj2vtqyGE0yALy6WeTliVryUOM7PvRZ/hFcA/3H0u8DiwnZmdbGZto9t3o4sXRAAFiBS3Z4F/AZ+Y2cJo30jgPeBFM/sMeIZwhtGQG4D2hLOJF4En6zz/W+Do6CqqG+v5+fMIZwazCR3JfwTuyOyPs57LgY519g0Ffg4sAnYCXmjhe/yREJqLge8QOtWJ+oMOJvQfzSM0xY0F2rXw/aSEaD0QERHJiM5AREQkIwoQERHJiAJEREQyogAREZGMKEBERCQjChAREcmIAkRERDKiABERkYwoQEREJCP/DwT77E68a5slAAAAAElFTkSuQmCC\n",
771 | "text/plain": [
772 | ""
773 | ]
774 | },
775 | "metadata": {},
776 | "output_type": "display_data"
777 | },
778 | {
779 | "name": "stdout",
780 | "output_type": "stream",
781 | "text": [
782 | "Final metric value: -0.3417778050822575\n",
783 | "Optimizer's stopping condition, GradientDescentOptimizerv4Template: Convergence checker passed at iteration 9.\n"
784 | ]
785 | },
786 | {
787 | "data": {
788 | "application/vnd.jupyter.widget-view+json": {
789 | "model_id": "0222ebe7be7747e8a8a6dfbbf52fde65",
790 | "version_major": 2,
791 | "version_minor": 0
792 | },
793 | "text/plain": [
794 | "interactive(children=(IntSlider(value=91, description='image_z', max=182), FloatSlider(value=0.5, description=…"
795 | ]
796 | },
797 | "metadata": {},
798 | "output_type": "display_data"
799 | }
800 | ],
801 | "source": [
802 | "REegistered_Test_output_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/\"\n",
803 | "Registered_Testdata_tmf_file=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/\"\n",
804 | "\n",
805 | "fixed_image = sitk.ReadImage('MNI152_T1_1mm_Brain.nii.gz', sitk.sitkFloat32)\n",
806 | "\n",
807 | "for im in range(0,3):\n",
808 | " \n",
809 | " complete_test_img_path=Test_data_raw_path+Test_data_list[im]\n",
810 | " \n",
811 | " ###Getting Suvject MRI\n",
812 | " img_fn = str(complete_test_img_path)\n",
813 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
814 | " \n",
815 | " print(img_name)\n",
816 | " \n",
817 | " ########### Output Name & Folder ##########\n",
818 | " Registered_imageName=img_name+'.nii.gz'\n",
819 | " Transformation_imageName=img_name+'.tfm'\n",
820 | " \n",
821 | " # =============================================================================\n",
822 | " # Loading the data\n",
823 | " # =============================================================================\n",
824 | " moving_image = sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
825 | " interact(display_images, fixed_image_z=(0,fixed_image.GetSize()[2]-1), moving_image_z=(0,moving_image.GetSize()[2]-1), fixed_npa = fixed(sitk.GetArrayViewFromImage(fixed_image)), moving_npa=fixed(sitk.GetArrayViewFromImage(moving_image)));\n",
826 | " # =============================================================================\n",
827 | " # initial Alighment \n",
828 | " # =============================================================================\n",
829 | " initial_transform = sitk.CenteredTransformInitializer(fixed_image, \n",
830 | " moving_image, \n",
831 | " sitk.Euler3DTransform(), \n",
832 | " sitk.CenteredTransformInitializerFilter.GEOMETRY)\n",
833 | "\n",
834 | " moving_resampled = sitk.Resample(moving_image, fixed_image, initial_transform, sitk.sitkNearestNeighbor, 0.0, moving_image.GetPixelID())\n",
835 | "\n",
836 | " interact(display_images_with_alpha, image_z=(0,fixed_image.GetSize()[2]), alpha=(0.0,1.0,0.05), fixed = fixed(fixed_image), moving=fixed(moving_resampled));\n",
837 | " # =============================================================================\n",
838 | " # Registration\n",
839 | " # =============================================================================\n",
840 | " registration_method = sitk.ImageRegistrationMethod()\n",
841 | "\n",
842 | " # Similarity metric settings.\n",
843 | " registration_method.SetMetricAsMattesMutualInformation(numberOfHistogramBins=50)\n",
844 | " registration_method.SetMetricSamplingStrategy(registration_method.RANDOM)\n",
845 | " registration_method.SetMetricSamplingPercentage(0.01)\n",
846 | "\n",
847 | " registration_method.SetInterpolator(sitk.sitkNearestNeighbor)\n",
848 | "\n",
849 | " # Optimizer settings.\n",
850 | " registration_method.SetOptimizerAsGradientDescent(learningRate=1.0, numberOfIterations=100, convergenceMinimumValue=1e-6, convergenceWindowSize=10)\n",
851 | " registration_method.SetOptimizerScalesFromPhysicalShift()\n",
852 | "\n",
853 | " # Setup for the multi-resolution framework. \n",
854 | " registration_method.SetShrinkFactorsPerLevel(shrinkFactors = [4,2,1])\n",
855 | " registration_method.SetSmoothingSigmasPerLevel(smoothingSigmas=[2,1,0])\n",
856 | " registration_method.SmoothingSigmasAreSpecifiedInPhysicalUnitsOn()\n",
857 | "\n",
858 | " # Don't optimize in-place, we would possibly like to run this cell multiple times.\n",
859 | " registration_method.SetInitialTransform(initial_transform, inPlace=False)\n",
860 | "\n",
861 | " # Connect all of the observers so that we can perform plotting during registration.\n",
862 | " registration_method.AddCommand(sitk.sitkStartEvent, start_plot)\n",
863 | " registration_method.AddCommand(sitk.sitkEndEvent, end_plot)\n",
864 | " registration_method.AddCommand(sitk.sitkMultiResolutionIterationEvent, update_multires_iterations) \n",
865 | " registration_method.AddCommand(sitk.sitkIterationEvent, lambda: plot_values(registration_method))\n",
866 | "\n",
867 | " final_transform = registration_method.Execute(sitk.Cast(fixed_image, sitk.sitkFloat32), \n",
868 | " sitk.Cast(moving_image, sitk.sitkFloat32))\n",
869 | "\n",
870 | " # =============================================================================\n",
871 | " # post processing Analysis\n",
872 | " # =============================================================================\n",
873 | " print('Final metric value: {0}'.format(registration_method.GetMetricValue()))\n",
874 | " print('Optimizer\\'s stopping condition, {0}'.format(registration_method.GetOptimizerStopConditionDescription()))\n",
875 | " #Visualize Expected Results\n",
876 | "\n",
877 | " moving_resampled = sitk.Resample(moving_image, fixed_image, final_transform, sitk.sitkNearestNeighbor, 0.0, moving_image.GetPixelID())\n",
878 | "\n",
879 | " interact(display_images_with_alpha, image_z=(0,fixed_image.GetSize()[2]), alpha=(0.0,1.0,0.05), fixed = fixed(fixed_image), moving=fixed(moving_resampled));\n",
880 | "\n",
881 | " sitk.WriteImage(moving_resampled, os.path.join(REegistered_Test_output_path, Registered_imageName))\n",
882 | " sitk.WriteTransform(final_transform,os.path.join(Registered_Testdata_tmf_file,Transformation_imageName))"
883 | ]
884 | },
885 | {
886 | "cell_type": "markdown",
887 | "metadata": {
888 | "heading_collapsed": true
889 | },
890 | "source": [
891 | "# Creating A Excel File For Registered Test Data"
892 | ]
893 | },
894 | {
895 | "cell_type": "code",
896 | "execution_count": 35,
897 | "metadata": {
898 | "hidden": true
899 | },
900 | "outputs": [
901 | {
902 | "name": "stdout",
903 | "output_type": "stream",
904 | "text": [
905 | "IBSR_02\n",
906 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_02.nii.gz\n",
907 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_02.tfm\n",
908 | "###############################\n",
909 | "IBSR_10\n",
910 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_10.nii.gz\n",
911 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_10.tfm\n",
912 | "###############################\n",
913 | "IBSR_15\n",
914 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_15.nii.gz\n",
915 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_15.tfm\n",
916 | "###############################\n"
917 | ]
918 | }
919 | ],
920 | "source": [
921 | "test_id_list=[]\n",
922 | "test_sub_t1=[]\n",
923 | "test_tfm_list=[]\n",
924 | "\n",
925 | "\n",
926 | "REegistered_Test_output_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/\"\n",
927 | "Registered_Testdata_tmf_file=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/\"\n",
928 | "\n",
929 | "fixed_image = sitk.ReadImage('MNI152_T1_1mm_Brain.nii.gz', sitk.sitkFloat32)\n",
930 | "\n",
931 | "for im in range(0,3):\n",
932 | " \n",
933 | " complete_test_img_path=Test_data_raw_path+Test_data_list[im]\n",
934 | " \n",
935 | " ###Getting Suvject MRI\n",
936 | " img_fn = str(complete_test_img_path)\n",
937 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
938 | " print(img_name)\n",
939 | " test_id_list.append(img_name)\n",
940 | " \n",
941 | "\n",
942 | " ######Getting Path\n",
943 | " reg_img_path=REegistered_Test_output_path+img_name+'.nii.gz'\n",
944 | " test_sub_t1.append(reg_img_path)\n",
945 | " \n",
946 | " \n",
947 | " trans_tfm_file_path=Registered_Testdata_tmf_file+img_name+'.tfm'\n",
948 | " test_tfm_list.append(trans_tfm_file_path)\n",
949 | " \n",
950 | " \n",
951 | " \n",
952 | " \n",
953 | " print(reg_img_path)\n",
954 | " print(trans_tfm_file_path)\n",
955 | " print(\"###############################\")\n",
956 | "\n",
957 | "Inf0_data=pd.DataFrame(list(zip(test_id_list, test_sub_t1, test_tfm_list)),\n",
958 | "columns=['id','subj_folder','registartion_tmf'])\n",
959 | "Inf0_data.to_csv(\"MISAPreorocessingTestReg_info.csv\", encoding='utf-8', index=False)"
960 | ]
961 | },
962 | {
963 | "cell_type": "code",
964 | "execution_count": null,
965 | "metadata": {
966 | "hidden": true
967 | },
968 | "outputs": [],
969 | "source": []
970 | }
971 | ],
972 | "metadata": {
973 | "kernelspec": {
974 | "display_name": "Python 3",
975 | "language": "python",
976 | "name": "python3"
977 | },
978 | "language_info": {
979 | "codemirror_mode": {
980 | "name": "ipython",
981 | "version": 3
982 | },
983 | "file_extension": ".py",
984 | "mimetype": "text/x-python",
985 | "name": "python",
986 | "nbconvert_exporter": "python",
987 | "pygments_lexer": "ipython3",
988 | "version": "3.6.7"
989 | },
990 | "toc": {
991 | "base_numbering": 1,
992 | "nav_menu": {},
993 | "number_sections": true,
994 | "sideBar": true,
995 | "skip_h1_title": false,
996 | "title_cell": "Table of Contents",
997 | "title_sidebar": "Contents",
998 | "toc_cell": true,
999 | "toc_position": {},
1000 | "toc_section_display": true,
1001 | "toc_window_display": true
1002 | },
1003 | "varInspector": {
1004 | "cols": {
1005 | "lenName": 16,
1006 | "lenType": 16,
1007 | "lenVar": 40
1008 | },
1009 | "kernels_config": {
1010 | "python": {
1011 | "delete_cmd_postfix": "",
1012 | "delete_cmd_prefix": "del ",
1013 | "library": "var_list.py",
1014 | "varRefreshCmd": "print(var_dic_list())"
1015 | },
1016 | "r": {
1017 | "delete_cmd_postfix": ") ",
1018 | "delete_cmd_prefix": "rm(",
1019 | "library": "var_list.r",
1020 | "varRefreshCmd": "cat(var_dic_list()) "
1021 | }
1022 | },
1023 | "types_to_exclude": [
1024 | "module",
1025 | "function",
1026 | "builtin_function_or_method",
1027 | "instance",
1028 | "_Feature"
1029 | ],
1030 | "window_display": false
1031 | }
1032 | },
1033 | "nbformat": 4,
1034 | "nbformat_minor": 2
1035 | }
1036 |
--------------------------------------------------------------------------------
/MISA_Project_Report.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/MISA_Project_Report.pdf
--------------------------------------------------------------------------------
/PreparingTestingData.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "toc": true
7 | },
8 | "source": [
9 | "Table of Contents
\n",
10 | ""
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | " Description & Instructions How to use this Script.\n",
18 | "\n",
19 | "* 1) Import the libraries running the Section \"Importing Libararies\"\n",
20 | "* 2) Function used in for histpgram matching and Normalization\n",
21 | "* 3) Apply the Histogram Maching\n",
22 | "* 4) Making 1mx1mx1m predicted Segmented Prediction to Original Spacing Using the prevviously saved Transformation Matrix"
23 | ]
24 | },
25 | {
26 | "cell_type": "markdown",
27 | "metadata": {
28 | "heading_collapsed": true
29 | },
30 | "source": [
31 | "# Importing Libraries"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 1,
37 | "metadata": {
38 | "hidden": true
39 | },
40 | "outputs": [
41 | {
42 | "name": "stderr",
43 | "output_type": "stream",
44 | "text": [
45 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
46 | " from ._conv import register_converters as _register_converters\n"
47 | ]
48 | }
49 | ],
50 | "source": [
51 | "import SimpleITK as sitk\n",
52 | "import os\n",
53 | "import pandas as pd\n",
54 | "import numpy as np\n",
55 | "import glob\n",
56 | "import os\n",
57 | "import nibabel as nib\n",
58 | "\n",
59 | "from matplotlib import pyplot as plt\n",
60 | "from dltk.io.augmentation import *\n",
61 | "from dltk.io.preprocessing import *"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {
67 | "heading_collapsed": true
68 | },
69 | "source": [
70 | "# Function"
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "# Histogram Matching"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": 2,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "# -------------------------------------------------------\n",
87 | "##This functions were coppied from the github reprocesoty:https://github.com/sergivalverde/mri_utils\n",
88 | "# if you this code please refer this github.\n",
89 | "\n",
90 | "# Image processing functions\n",
91 | "# Useful for brain MRI analysis\n",
92 | "#\n",
93 | "# Sergi Valverde 2018\n",
94 | "# svalverde@eia.udg.edu\n",
95 | "#\n",
96 | "# -------------------------------------------------------\n",
97 | "\n",
98 | "import numpy as np\n",
99 | "from scipy.ndimage import label\n",
100 | "from scipy.ndimage import labeled_comprehension as lc\n",
101 | "import SimpleITK as sitk\n",
102 | "\n",
103 | "\n",
104 | "\n",
105 | "def histogram_matching(mov_scan, ref_scan,\n",
106 | " histogram_levels=2048,\n",
107 | " match_points=100,\n",
108 | " set_th_mean=True):\n",
109 | " \"\"\"\n",
110 | " Histogram matching following the method developed on\n",
111 | " Nyul et al 2001 (ITK implementation)\n",
112 | " inputs:\n",
113 | " - mov_scan: np.array containing the image to normalize\n",
114 | " - ref_scan np.array containing the reference image\n",
115 | " - histogram levels\n",
116 | " - number of matched points\n",
117 | " - Threshold Mean setting\n",
118 | " outputs:\n",
119 | " - histogram matched image\n",
120 | " \"\"\"\n",
121 | "\n",
122 | " # convert np arrays into itk image objects\n",
123 | " ref = sitk.GetImageFromArray(ref_scan.astype('float32'))\n",
124 | " mov = sitk.GetImageFromArray(mov_scan.astype('float32'))\n",
125 | "\n",
126 | " # perform histogram matching\n",
127 | " caster = sitk.CastImageFilter()\n",
128 | " caster.SetOutputPixelType(ref.GetPixelID())\n",
129 | "\n",
130 | " matcher = sitk.HistogramMatchingImageFilter()\n",
131 | " matcher.SetNumberOfHistogramLevels(histogram_levels)\n",
132 | " matcher.SetNumberOfMatchPoints(match_points)\n",
133 | " matcher.SetThresholdAtMeanIntensity(set_th_mean)\n",
134 | " matched_vol = matcher.Execute(mov, ref)\n",
135 | "\n",
136 | " return matched_vol"
137 | ]
138 | },
139 | {
140 | "cell_type": "markdown",
141 | "metadata": {},
142 | "source": [
143 | "# Normalization and Histogram Matching"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {},
149 | "source": [
150 | "## Loading Reference data "
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 4,
156 | "metadata": {},
157 | "outputs": [
158 | {
159 | "name": "stderr",
160 | "output_type": "stream",
161 | "text": [
162 | "C:\\Users\\Fakrul-IslamTUSHAR\\Anaconda2\\envs\\nnet\\lib\\site-packages\\ipykernel_launcher.py:5: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n",
163 | " \"\"\"\n"
164 | ]
165 | }
166 | ],
167 | "source": [
168 | "mylist = pd.read_csv(\n",
169 | " \"MISAPreorocessingTestReg_info.csv\",\n",
170 | " dtype=object,\n",
171 | " keep_default_na=False,\n",
172 | " na_values=[]).as_matrix()"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": 5,
178 | "metadata": {},
179 | "outputs": [],
180 | "source": [
181 | "Save_Preprocessed_Test_data=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/save_processed_test_data/\"\n"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": 6,
187 | "metadata": {},
188 | "outputs": [],
189 | "source": [
190 | "ref_volume=sitk.ReadImage(\"ref_07.nii.gz\", sitk.sitkFloat32)\n",
191 | "ref_array = sitk.GetArrayFromImage(ref_volume)"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 8,
197 | "metadata": {},
198 | "outputs": [
199 | {
200 | "name": "stdout",
201 | "output_type": "stream",
202 | "text": [
203 | "IBSR_02\n",
204 | "########Saved#########\n",
205 | "IBSR_10\n",
206 | "########Saved#########\n",
207 | "IBSR_15\n",
208 | "########Saved#########\n"
209 | ]
210 | }
211 | ],
212 | "source": [
213 | "for im in mylist:\n",
214 | " ###Getting Suvject MRI\n",
215 | " img_fn = str(im[1])\n",
216 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
217 | " print(img_name)\n",
218 | " #Image_Name.append(img_name)\n",
219 | " histoMached_imageName=img_name+'.nii.gz' \n",
220 | "\n",
221 | "# =============================================================================\n",
222 | "# load data\n",
223 | "# =============================================================================\n",
224 | " #Loading the image\n",
225 | " sitk_t1 = sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
226 | " t1 = sitk.GetArrayFromImage(sitk_t1)\n",
227 | " normalized_vol=normalise_zero_one(t1)\n",
228 | " \n",
229 | " \n",
230 | " Histo_mached_vol=histogram_matching(normalized_vol,ref_array)\n",
231 | " Histo_mached_vol.CopyInformation(sitk_t1)\n",
232 | " \n",
233 | " sitk.WriteImage(Histo_mached_vol, os.path.join(Save_Preprocessed_Test_data,histoMached_imageName))\n",
234 | " print(\"########Saved#########\")"
235 | ]
236 | },
237 | {
238 | "cell_type": "code",
239 | "execution_count": 9,
240 | "metadata": {},
241 | "outputs": [
242 | {
243 | "name": "stdout",
244 | "output_type": "stream",
245 | "text": [
246 | "[['IBSR_02'\n",
247 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_02.nii.gz'\n",
248 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_02.tfm']\n",
249 | " ['IBSR_10'\n",
250 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_10.nii.gz'\n",
251 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_10.tfm']\n",
252 | " ['IBSR_15'\n",
253 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata/IBSR_15.nii.gz'\n",
254 | " 'C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_15.tfm']]\n"
255 | ]
256 | }
257 | ],
258 | "source": [
259 | "print(mylist)"
260 | ]
261 | },
262 | {
263 | "cell_type": "markdown",
264 | "metadata": {
265 | "heading_collapsed": true
266 | },
267 | "source": [
268 | "# Making the labels back to original Spacing"
269 | ]
270 | },
271 | {
272 | "cell_type": "markdown",
273 | "metadata": {
274 | "hidden": true
275 | },
276 | "source": [
277 | "For This Process Following steps need to be done.\n",
278 | "\n",
279 | "* 1) Put path of the Prediction(Segmented Nifti file) in Section 5.1 \"Final_Seg_test_path\"\n",
280 | "* 2) Put path of the RAW DATA(Nifti file) in Section 5.2 \"Test_data_raw_path\"\n",
281 | "* 3) Put path of the Saved Transformation Matrics(Nifti file) in Section 5.3 \"tmf_path\"\n",
282 | "* 4) put the desired folder path were you want to save the Segmented nifti with the original spacing \"path_to_save_images\" and RUN."
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {
288 | "heading_collapsed": true,
289 | "hidden": true
290 | },
291 | "source": [
292 | "## Segmented Results"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 35,
298 | "metadata": {
299 | "hidden": true
300 | },
301 | "outputs": [
302 | {
303 | "name": "stdout",
304 | "output_type": "stream",
305 | "text": [
306 | "['IBSR_02_seg.nii.gz', 'IBSR_10_seg.nii.gz', 'IBSR_15_seg.nii.gz']\n"
307 | ]
308 | }
309 | ],
310 | "source": [
311 | "Final_Seg_test_path=\"H:/f_r/final_test/\"\n",
312 | "My_Predicted_Seg_list=os.listdir(Final_Seg_test_path)\n",
313 | "print(My_Predicted_Seg_list)\n",
314 | "\n",
315 | "#complete_Segmented_data=Final_Seg_test_path+My_Predicted_Seg_list[0]\n",
316 | "#print(Segmented_data)"
317 | ]
318 | },
319 | {
320 | "cell_type": "markdown",
321 | "metadata": {
322 | "heading_collapsed": true,
323 | "hidden": true
324 | },
325 | "source": [
326 | "## Raw Test Data "
327 | ]
328 | },
329 | {
330 | "cell_type": "code",
331 | "execution_count": 36,
332 | "metadata": {
333 | "hidden": true
334 | },
335 | "outputs": [
336 | {
337 | "name": "stdout",
338 | "output_type": "stream",
339 | "text": [
340 | "['IBSR_02.nii.gz', 'IBSR_10.nii.gz', 'IBSR_15.nii.gz']\n"
341 | ]
342 | }
343 | ],
344 | "source": [
345 | "Test_data_raw_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/\"\n",
346 | "Test_data_list=os.listdir(Test_data_raw_path)\n",
347 | "print(Test_data_list)\n",
348 | "\n",
349 | "#complete_RAW_data=Test_data_raw_path+Test_data_list[0]\n",
350 | "#print(complete_RAW_data)"
351 | ]
352 | },
353 | {
354 | "cell_type": "markdown",
355 | "metadata": {
356 | "heading_collapsed": true,
357 | "hidden": true
358 | },
359 | "source": [
360 | "## Transformation Matrix"
361 | ]
362 | },
363 | {
364 | "cell_type": "code",
365 | "execution_count": 37,
366 | "metadata": {
367 | "hidden": true
368 | },
369 | "outputs": [
370 | {
371 | "name": "stdout",
372 | "output_type": "stream",
373 | "text": [
374 | "['IBSR_02.tfm', 'IBSR_10.tfm', 'IBSR_15.tfm']\n"
375 | ]
376 | }
377 | ],
378 | "source": [
379 | "tmf_path=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/\"\n",
380 | "tmf_data_list=os.listdir(tmf_path)\n",
381 | "print(tmf_data_list)"
382 | ]
383 | },
384 | {
385 | "cell_type": "markdown",
386 | "metadata": {
387 | "heading_collapsed": true,
388 | "hidden": true
389 | },
390 | "source": [
391 | "## Inverse Registration"
392 | ]
393 | },
394 | {
395 | "cell_type": "code",
396 | "execution_count": 41,
397 | "metadata": {
398 | "hidden": true
399 | },
400 | "outputs": [
401 | {
402 | "name": "stdout",
403 | "output_type": "stream",
404 | "text": [
405 | "H:/f_r/final_test/IBSR_02_seg.nii.gz\n",
406 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_02.nii.gz\n",
407 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_02.tfm\n",
408 | "IBSR_02\n",
409 | "H:/f_r/final_test/IBSR_10_seg.nii.gz\n",
410 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_10.nii.gz\n",
411 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_10.tfm\n",
412 | "IBSR_10\n",
413 | "H:/f_r/final_test/IBSR_15_seg.nii.gz\n",
414 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Test_Data_Raw/IBSR_15.nii.gz\n",
415 | "C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/Registered_Testdata_tmf_file/IBSR_15.tfm\n",
416 | "IBSR_15\n"
417 | ]
418 | }
419 | ],
420 | "source": [
421 | "path_to_save_images=\"C:/Users/Fakrul-IslamTUSHAR/Documents/MISAProject/MisaPreProcessing/submission/\"\n",
422 | "\n",
423 | "for im in range(0,3):\n",
424 | " \n",
425 | " ##Creating Data Path\n",
426 | " complete_Segmented_data=Final_Seg_test_path+My_Predicted_Seg_list[im]\n",
427 | " complete_RAW_data=Test_data_raw_path+Test_data_list[im]\n",
428 | " complete_tmf_data=tmf_path+tmf_data_list[im]\n",
429 | " \n",
430 | " print(complete_Segmented_data)\n",
431 | " print(complete_RAW_data)\n",
432 | " print(complete_tmf_data)\n",
433 | " \n",
434 | " ###Getting Suvject MRI\n",
435 | " img_fn = str(complete_RAW_data)\n",
436 | " img_name = img_fn.split('/')[-1].split('.')[0]\n",
437 | " label_fn=str(complete_Segmented_data)\n",
438 | " \n",
439 | " \n",
440 | " \n",
441 | " print(img_name)\n",
442 | " \n",
443 | " ##Creating Name\n",
444 | " Registered_imageName=img_name+\"_seg\"+'.nii.gz'\n",
445 | " \n",
446 | " ##The Original Spaced Image\n",
447 | " Original_fixed=sitk.ReadImage(img_fn, sitk.sitkFloat32)\n",
448 | " \n",
449 | " ####Segmented Prediction\n",
450 | " label_Registered_moving=sitk.ReadImage(label_fn, sitk.sitkFloat32)\n",
451 | " \n",
452 | " ######Load the Transformation\n",
453 | " initial_transform_for_InterTransformation=sitk.ReadTransform(complete_tmf_data)\n",
454 | " inverse_Transformation=initial_transform_for_InterTransformation.GetInverse()\n",
455 | " \n",
456 | " Original_resampled_label = sitk.Resample(label_Registered_moving, Original_fixed, \n",
457 | " inverse_Transformation, sitk.sitkNearestNeighbor, 0.0, \n",
458 | " label_Registered_moving.GetPixelID())\n",
459 | " \n",
460 | " sitk.WriteImage(Original_resampled_label, os.path.join(path_to_save_images, Registered_imageName))\n",
461 | " \n",
462 | " \n",
463 | " "
464 | ]
465 | },
466 | {
467 | "cell_type": "code",
468 | "execution_count": null,
469 | "metadata": {
470 | "hidden": true
471 | },
472 | "outputs": [],
473 | "source": []
474 | }
475 | ],
476 | "metadata": {
477 | "kernelspec": {
478 | "display_name": "Python 3",
479 | "language": "python",
480 | "name": "python3"
481 | },
482 | "language_info": {
483 | "codemirror_mode": {
484 | "name": "ipython",
485 | "version": 3
486 | },
487 | "file_extension": ".py",
488 | "mimetype": "text/x-python",
489 | "name": "python",
490 | "nbconvert_exporter": "python",
491 | "pygments_lexer": "ipython3",
492 | "version": "3.6.7"
493 | },
494 | "toc": {
495 | "base_numbering": 1,
496 | "nav_menu": {},
497 | "number_sections": true,
498 | "sideBar": true,
499 | "skip_h1_title": false,
500 | "title_cell": "Table of Contents",
501 | "title_sidebar": "Contents",
502 | "toc_cell": true,
503 | "toc_position": {},
504 | "toc_section_display": true,
505 | "toc_window_display": true
506 | },
507 | "varInspector": {
508 | "cols": {
509 | "lenName": 16,
510 | "lenType": 16,
511 | "lenVar": 40
512 | },
513 | "kernels_config": {
514 | "python": {
515 | "delete_cmd_postfix": "",
516 | "delete_cmd_prefix": "del ",
517 | "library": "var_list.py",
518 | "varRefreshCmd": "print(var_dic_list())"
519 | },
520 | "r": {
521 | "delete_cmd_postfix": ") ",
522 | "delete_cmd_prefix": "rm(",
523 | "library": "var_list.r",
524 | "varRefreshCmd": "cat(var_dic_list()) "
525 | }
526 | },
527 | "types_to_exclude": [
528 | "module",
529 | "function",
530 | "builtin_function_or_method",
531 | "instance",
532 | "_Feature"
533 | ],
534 | "window_display": false
535 | }
536 | },
537 | "nbformat": 4,
538 | "nbformat_minor": 2
539 | }
540 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Brain Tissue Segmentation Using NeuroNet With Different Pre-processing Techniques
2 | This Repository is for the MISA Course final project which was Brain tissue segmentation. we adopt NeuroNet which is a comprehensive brain image segmentation tool based on a novel multi-output CNN architecture which has been trained and tuned using IBSR18 data. If you use this model in your work please refer to the Original NeuroNet Paper and DLTK at https://github.com/DLTK/models/tree/master/ukbb_neuronet_brain_segmentation
3 |
4 | If our work help in your task or project please site the work at https://ieeexplore.ieee.org/document/8858515 (Pre-print https://arxiv.org/abs/1904.00068 ). This work is been accepted for presented at 3rd International Conference on Imaging, Vision & Pattern Recognition (IVPR),2019.
5 |
6 | # Citation
7 | ```
8 | F. I. Tushar, B. Alyafi, M. K. Hasan and L. Dahal,
9 | "Brain Tissue Segmentation Using NeuroNet With Different Pre-processing Techniques,"
10 | 2019 Joint 8th International Conference on Informatics, Electronics & Vision (ICIEV)
11 | and 2019 3rd International Conference on Imaging, Vision & Pattern Recognition (icIVPR),
12 | Spokane, WA, USA, 2019, pp. 223-227.
13 |
14 | @INPROCEEDINGS{8858515, author={F. I. {Tushar} and B. {Alyafi} and M. K. {Hasan} and L. {Dahal}},
15 | booktitle={2019 Joint 8th International Conference on Informatics, Electronics Vision (ICIEV) and
16 | 2019 3rd International Conference on Imaging, Vision Pattern Recognition (icIVPR)},
17 | title={Brain Tissue Segmentation Using NeuroNet With Different Pre-processing Techniques},
18 | year={2019},pages={223-227},
19 | }
20 | ```
21 | # Overview
22 | Automatic segmentation of MRI brain images is one of the vital steps for quantitative analysis of brain for further inspection. Since manual segmentation of brain tissues (white matter (WM), gray matter (GM) and cerebrospinal fluid (CSF)) is a time-consuming and tedious task that engages valuable human resources, hence, automatic brain tissue segmentation draws an enormous amount of attention in medical imaging. In this project, NeuroNet has been adopted to segment the brain which uses Residual Network (ResNet) in encoder and Fully Convolution Network (FCN) in the decoder. To achieve the best performance, various hyper-parameters have been tuned, while, network parameters (kernel and bias) were initialized using the NeuroNet pre-trained model. Different pre-processing pipelines have also been introduced to get best a robust trained model. The performance of the segmented validation images were measured quantitatively using Dice Similarity Co-efficient (DSC) and were reported in the best case as 0.8986±0.0174 for CSF, 0.9412 ± 0.0086 for GM, and 0.9335 ± 0.0166 for WM. We worked out that keeping the original patch size and using histogram preprocessing with 4000 steps had the highest achievable performance.
23 | 
24 |
25 | In this work two different pre-processing pipelines were implemented. To see the effect on the performance of the deep CNN with different pre-processing scheme. Figure below shown the overview of the pre-processing pipelines.
26 |
27 | 
28 |
29 | 
30 |
31 | # How to Run the Code
32 | To run and model and re-produce the best results thins steps need to perform.
33 |
34 | 1. Run Notebook “MISA_Project_PreProcesing_Step(1)_Registration.ipynb” to perform the registration of the Volumes to MNI template.
35 | 2. Run Notebook “MISA_Project_PreProcesing_Step(2)_Normalization.ipynb” to Perform Preprocessing (Pre-processing pipeline-2 mentioned in report) and to create the excel files that containing the path of the training , validation and testing data. Network Read the data from excel files that have the path of the data.
36 | 3. Folder “Model” Contain the pretrained model, Download the Weights from here https://goo.gl/VmhGYc
37 | 4. To run the code please the command “python train.py --config config_spm_tissue.json”
38 | 5. In the file “config_spm_tissue.json” to maintain and configure model
39 |
40 | model_path": put ur model weights path (spm_tissue folder)
41 |
42 | 6. To prepare the Testing Data and After segmentation to bring it back to the original spacing use this Notebook “PreparingTestingData.ipynb”
43 | 7. To run the testing ““python deploy.py --config config_spm_tissue.json””
44 | 8. Finaly to Compute the Dice and Box plot Run the “Evaluation_MISA_Project.ipynb”
45 |
46 |
47 | 
48 | 
49 |
50 |
51 |
--------------------------------------------------------------------------------
/model/README.md:
--------------------------------------------------------------------------------
1 | ## Fast and Robust Reproduction of Multiple Brain Image Segmentation Pipelines
2 |
3 | 
4 |
5 | ### Contact and referencing this work
6 | If there are any issues please contact the corresponding author of this implementation. If you employ this model in your work, please refer to this citation of the [paper](https://openreview.net/pdf?id=Hks1TRisM).
7 | ```
8 | @inproceedings{rajchl2018neuronet,
9 | title={NeuroNet: Fast and Robust Reproduction of Multiple Brain Image Segmentation Pipelines},
10 | author={Martin Rajchl and Nick Pawlowski and Daniel Rueckert and Paul M. Matthews and Ben Glocker},
11 | booktitle={International conference on Medical Imaging with Deep Learning (MIDL)},
12 | year={2018}
13 | }
14 | ```
15 |
16 |
17 | ### Data
18 | The data can be downloaded after registration from the [UK Biobank Imaging Enhancement Study website](https://imaging.ukbiobank.ac.uk/).
19 |
20 | Images and segmentations are read from a csv file in the format below. The original files (*.csv) is provided in this repo.
21 |
22 | These are parsed and extract tf.Tensor examples for training and evaluation in `reader.py` using a [SimpleITK](http://www.simpleitk.org/) for i/o of the .nii files.
23 |
24 |
25 | ### Usage
26 | Files:
27 | - `parse_csvs.ipynb` creates training/validation/testing .csv files from data paths and splits the subject ids into categories.
28 | - `sandbox.ipynb` visually assesses the outputs of the `reader.py` for a visual check of the inputs
29 | - `eval.ipynb` computes the visual and numerical results for the paper
30 |
31 | - `reader.py` dltk reader, containing the label mappings to and from consecutive ids and the python generator creating input tensors to the network, using a SimpleITK interface
32 | - `train.py` main training script to run all experiments with
33 | - `deploy.py` generic deploy script for all experiments
34 |
35 | - `config*.json` are configuration files to determine the dataset(s) to train on, scaling the flexible NeuroNet architecture and a few exposed training parameters.
36 | - `*.csv` csv files generated with `parse_csvs.ipynb`, containing the paths to all .nii image files
37 |
38 |
39 | #### Data Preprocessing
40 | We did not apply any data preprocessing, such as brain stripping or additional bias correction, etc. The input to the network is a single MNI registered 1mm isotropic T1-weighted MR image (as procude by the UK Biobank). Please refer to the [UKB Neuroimaging documentation](https://biobank.ctsu.ox.ac.uk/crystal/docs/brain_mri.pdf) for additional information.
41 |
42 | #### Training
43 | You can use the code (train.py) to train the model on the data yourself. Alternatively, we provide pretrained models from the paper here:
44 | - [neuronet_all](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/neuronet_all.tar.gz)
45 | - [neuronet_tissue](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/neuronet_tissue.tar.gz)
46 | - [neuronet_single fsl fast](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/fsl_fast.tar.gz)
47 | - [neuronet_single fsl first](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/fsl_first.tar.gz)
48 | - [neuronet_single spm tissue](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/spm_tissue.tar.gz)
49 | - [neuronet_single malp_em tissue](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/malp_em_tissue.tar.gz)
50 | - [neuronet_single malp_em](http://www.doc.ic.ac.uk/~mrajchl/dltk_models/model_zoo/neuronet/malp_em.tar.gz)
51 |
52 |
53 | Depending on the model, the number of output volumes will correspond with the number of segmentation tasks (i.e. neuronet_single will produce one volume, neuronet_all will produce 5 segmentation volumes).
54 |
55 | You can start a basic training with
56 | ```
57 | python train.py -c CUDA_DEVICE --config MY_CONFIG
58 | ```
59 | that will load the file paths from the previously created csvs, according to the config parameters.
60 |
61 | #### Deploy
62 | To deploy a model and run inference, run the deploy.py script and point to the model save_path:
63 |
64 | ```
65 | python deploy.py -p path/to/saved/model -c CUDA_DEVICE --config MY_CONFIG
66 | ```
--------------------------------------------------------------------------------
/model/config_all.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["fsl_fast", "fsl_first", "spm_tissue", "malp_em", "malp_em_tissue"],
3 | "num_classes": [4, 16, 4, 139, 6],
4 | "model_path": "/tmp/neuronet/models/neuronet_all",
5 | "out_segm_path": "/tmp/neuronet/out/neuronet_all",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_fsl_fast.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["fsl_fast"],
3 | "num_classes": [4],
4 | "model_path": "/tmp/neuronet/models/fsl_fast",
5 | "out_segm_path": "/tmp/neuronet/out/fsl_fast",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_fsl_first.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["fsl_first"],
3 | "num_classes": [16],
4 | "model_path": "/tmp/neuronet/models/fsl_first",
5 | "out_segm_path": "/tmp/neuronet/out/fsl_first",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_malp_em.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["malp_em"],
3 | "num_classes": [139],
4 | "model_path": "/tmp/neuronet/models/malp_em",
5 | "out_segm_path": "/tmp/neuronet/out/malp_em",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_malp_em_tissue.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["malp_em_tissue"],
3 | "num_classes": [6],
4 | "model_path": "/tmp/neuronet/models/malp_em_tissue",
5 | "out_segm_path": "/tmp/neuronet/out/malp_em_tissue",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_spm_tissue.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["spm_tissue"],
3 | "num_classes": [4],
4 | "model_path": "/home/maia_kbf/MISA_FIT/nnet_fit/spm_tissue",
5 | "out_segm_path": "/tmp/neuronet/out/spm_tissue",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/config_tissue.json:
--------------------------------------------------------------------------------
1 | {
2 | "protocols": ["fsl_fast", "spm_tissue", "malp_em_tissue"],
3 | "num_classes": [4, 4, 6],
4 | "model_path": "/tmp/neuronet/models/neuronet_tissue",
5 | "out_segm_path": "/tmp/neuronet/out/neuronet_tissue",
6 | "learning_rate": 0.001,
7 | "network": {
8 | "filters": [16, 32, 64, 128],
9 | "strides": [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
10 | "num_residual_units": 2
11 | }
12 | }
--------------------------------------------------------------------------------
/model/deploy.pvpy:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division
3 | from __future__ import print_function
4 |
5 | import argparse
6 | import os
7 | import time
8 | import numpy as np
9 | import pandas as pd
10 | import tensorflow as tf
11 | from keras import backend as K
12 | ####Tensorflow wizard
13 | config=tf.ConfigProto()
14 | config.gpu_options.allow_growth=True
15 | config.gpu_options.per_process_gpu_memory_fraction=0.23
16 | K.tensorflow_backend.set_session(tf.Session(config=config))
17 |
18 |
19 |
20 | import SimpleITK as sitk
21 | import json
22 |
23 | from tensorflow.contrib import predictor
24 |
25 | from dltk.utils import sliding_window_segmentation_inference
26 |
27 | from reader import read_fn, map_labels
28 |
29 |
30 | def predict(args, config):
31 |
32 | # Read in the csv with the file names you would want to predict on
33 | file_names = pd.read_csv(args.csv,
34 | dtype=object,
35 | keep_default_na=False,
36 | na_values=[]).as_matrix()
37 |
38 | # From the model model_path, parse the latest saved estimator model
39 | # and restore a predictor from it
40 | export_dir = [os.path.join(config["model_path"], o) for o in os.listdir(config["model_path"])
41 | if os.path.isdir(os.path.join(config["model_path"], o)) and o.isdigit()][-1]
42 | print('Loading from {}'.format(export_dir))
43 | my_predictor = predictor.from_saved_model(export_dir)
44 |
45 | protocols = config["protocols"]
46 | # Fetch the output probability ops of the trained network
47 | y_probs = [my_predictor._fetch_tensors['y_prob_{}'.format(p)] for p in protocols]
48 |
49 | # Iterate through the files, predict on the full volumes and
50 | # compute a Dice similariy coefficient
51 | for output in read_fn(file_references=file_names,
52 | mode=tf.estimator.ModeKeys.PREDICT,
53 | params={'extract_examples': False,
54 | 'protocols': protocols}):
55 |
56 | print('Running file {}'.format(output['img_id']))
57 | t0 = time.time()
58 |
59 | # Parse the read function output and add a dummy batch dimension
60 | # as required
61 | img = np.expand_dims(output['features']['x'], axis=0)
62 |
63 | # Do a sliding window inference with our DLTK wrapper
64 | preds = sliding_window_segmentation_inference(
65 | session=my_predictor.session,
66 | ops_list=y_probs,
67 | sample_dict={my_predictor._feed_tensors['x']: img},
68 | batch_size=2)
69 |
70 | # Calculate the prediction from the probabilities
71 | #preds = [np.squeeze(np.argmax(pred, -1), axis=0) for pred in preds]
72 |
73 | # Map the consecutive integer label ids back to the original ones
74 | #for i in range(len(protocols)):
75 | #preds[i] = map_labels(preds[i],
76 | #protocol=protocols[i],
77 | #convert_to_protocol=True)
78 |
79 | # Save the file as .nii.gz using the header information from the
80 | # original sitk image
81 | out_folder = os.path.join(config["out_segm_path"], '{}'.format(output['img_id']))
82 | os.system('mkdir -p {}'.format(out_folder))
83 |
84 | for i in range(len(protocols)):
85 | output_fn = os.path.join(out_folder, protocols[i] + '.nii.gz')
86 | new_sitk = sitk.GetImageFromArray(preds[i].astype(np.int32))
87 | new_sitk.CopyInformation(output['sitk'])
88 | sitk.WriteImage(new_sitk,"{}_Seg.nii.gz".format(output['img_id']))
89 |
90 |
91 |
92 | # Print outputs
93 | print('ID={}; input_dim={}; time={};'.format(
94 | output['img_id'], img.shape, time.time() - t0))
95 |
96 |
97 | if __name__ == '__main__':
98 | # Set up argument parser
99 | parser = argparse.ArgumentParser(description='Neuronet deploy script')
100 | parser.add_argument('--verbose', default=False, action='store_true')
101 | parser.add_argument('--cuda_devices', '-c', default='0')
102 |
103 | parser.add_argument('--csv', default='val.csv')
104 | parser.add_argument('--config', default='config_all.json')
105 |
106 | args = parser.parse_args()
107 |
108 | # Set verbosity
109 | if args.verbose:
110 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
111 | tf.logging.set_verbosity(tf.logging.INFO)
112 | else:
113 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
114 | tf.logging.set_verbosity(tf.logging.ERROR)
115 |
116 | # GPU allocation options
117 | os.environ["CUDA_VISIBLE_DEVICES"] = args.cuda_devices
118 |
119 | # Parse the run config
120 | with open(args.config) as f:
121 | config = json.load(f)
122 |
123 | # Call training
124 | predict(args, config)
125 |
--------------------------------------------------------------------------------
/model/deploy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division
3 | from __future__ import print_function
4 |
5 | import argparse
6 | import os
7 | import time
8 | import numpy as np
9 | import pandas as pd
10 | import tensorflow as tf
11 | from keras import backend as K
12 | ####Tensorflow wizard
13 | config=tf.ConfigProto()
14 | config.gpu_options.allow_growth=True
15 | config.gpu_options.per_process_gpu_memory_fraction=0.23
16 | K.tensorflow_backend.set_session(tf.Session(config=config))
17 |
18 |
19 |
20 | import SimpleITK as sitk
21 | import json
22 |
23 | from tensorflow.contrib import predictor
24 |
25 | from dltk.utils import sliding_window_segmentation_inference
26 |
27 | from reader import read_fn, map_labels
28 |
29 |
30 | def predict(args, config):
31 |
32 | # Read in the csv with the file names you would want to predict on
33 | file_names = pd.read_csv(args.csv,
34 | dtype=object,
35 | keep_default_na=False,
36 | na_values=[]).as_matrix()
37 |
38 | # From the model model_path, parse the latest saved estimator model
39 | # and restore a predictor from it
40 | export_dir = [os.path.join(config["model_path"], o) for o in os.listdir(config["model_path"])
41 | if os.path.isdir(os.path.join(config["model_path"], o)) and o.isdigit()][-1]
42 | print('Loading from {}'.format(export_dir))
43 | my_predictor = predictor.from_saved_model(export_dir)
44 |
45 | protocols = config["protocols"]
46 | # Fetch the output probability ops of the trained network
47 | y_probs = [my_predictor._fetch_tensors['y_prob_{}'.format(p)] for p in protocols]
48 |
49 | # Iterate through the files, predict on the full volumes and
50 | # compute a Dice similariy coefficient
51 | for output in read_fn(file_references=file_names,
52 | mode=tf.estimator.ModeKeys.PREDICT,
53 | params={'extract_examples': False,
54 | 'protocols': protocols}):
55 |
56 | print('Running file {}'.format(output['img_id']))
57 | t0 = time.time()
58 |
59 | # Parse the read function output and add a dummy batch dimension
60 | # as required
61 | img = np.expand_dims(output['features']['x'], axis=0)
62 |
63 | # Do a sliding window inference with our DLTK wrapper
64 | preds = sliding_window_segmentation_inference(
65 | session=my_predictor.session,
66 | ops_list=y_probs,
67 | sample_dict={my_predictor._feed_tensors['x']: img},
68 | batch_size=2)
69 |
70 | # Calculate the prediction from the probabilities
71 | preds = [np.squeeze(np.argmax(pred, -1), axis=0) for pred in preds]
72 |
73 | # Map the consecutive integer label ids back to the original ones
74 | for i in range(len(protocols)):
75 | preds[i] = map_labels(preds[i],
76 | protocol=protocols[i],
77 | convert_to_protocol=True)
78 |
79 | # Save the file as .nii.gz using the header information from the
80 | # original sitk image
81 | out_folder = os.path.join(config["out_segm_path"], '{}'.format(output['img_id']))
82 | os.system('mkdir -p {}'.format(out_folder))
83 |
84 | for i in range(len(protocols)):
85 | output_fn = os.path.join(out_folder, protocols[i] + '.nii.gz')
86 | new_sitk = sitk.GetImageFromArray(preds[i].astype(np.int32))
87 | new_sitk.CopyInformation(output['sitk'])
88 | sitk.WriteImage(new_sitk,"{}_Seg.nii.gz".format(output['img_id']))
89 |
90 |
91 |
92 | # Print outputs
93 | print('ID={}; input_dim={}; time={};'.format(
94 | output['img_id'], img.shape, time.time() - t0))
95 |
96 |
97 | if __name__ == '__main__':
98 | # Set up argument parser
99 | parser = argparse.ArgumentParser(description='Neuronet deploy script')
100 | parser.add_argument('--verbose', default=False, action='store_true')
101 | parser.add_argument('--cuda_devices', '-c', default='0')
102 |
103 | parser.add_argument('--csv', default='test.csv')
104 | parser.add_argument('--config', default='config_all.json')
105 |
106 | args = parser.parse_args()
107 |
108 | # Set verbosity
109 | if args.verbose:
110 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
111 | tf.logging.set_verbosity(tf.logging.INFO)
112 | else:
113 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
114 | tf.logging.set_verbosity(tf.logging.ERROR)
115 |
116 | # GPU allocation options
117 | os.environ["CUDA_VISIBLE_DEVICES"] = args.cuda_devices
118 |
119 | # Parse the run config
120 | with open(args.config) as f:
121 | config = json.load(f)
122 |
123 | # Call training
124 | predict(args, config)
125 |
--------------------------------------------------------------------------------
/model/dim.nii:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/model/dim.nii
--------------------------------------------------------------------------------
/model/neuronet.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 | from __future__ import print_function
3 | from __future__ import division
4 | from __future__ import absolute_import
5 |
6 | import tensorflow as tf
7 |
8 | from dltk.core.residual_unit import vanilla_residual_unit_3d
9 | from dltk.core.upsample import linear_upsample_3d
10 |
11 |
12 | def upscore_layer_3d(inputs,
13 | inputs2,
14 | out_filters,
15 | in_filters=None,
16 | strides=(2, 2, 2),
17 | mode=tf.estimator.ModeKeys.EVAL, use_bias=False,
18 | kernel_initializer=tf.initializers.variance_scaling(distribution='uniform'),
19 | bias_initializer=tf.zeros_initializer(),
20 | kernel_regularizer=None,
21 | bias_regularizer=None):
22 | """Upscore layer according to [1].
23 |
24 | [1] J. Long et al. Fully convolutional networks for semantic segmentation.
25 | CVPR 2015.
26 |
27 | Args:
28 | inputs (tf.Tensor): Input features to be upscored.
29 | inputs2 (tf.Tensor): Higher resolution features from the encoder to add.
30 | out_filters (int): Number of output filters (typically, number of
31 | segmentation classes)
32 | in_filters (None, optional): None or number of input filters.
33 | strides (tuple, optional): Upsampling factor for a strided transpose
34 | convolution.
35 | mode (TYPE, optional): One of the tf.estimator.ModeKeys strings: TRAIN,
36 | EVAL or PREDICT
37 | use_bias (bool, optional): Boolean, whether the layer uses a bias.
38 | kernel_initializer (TYPE, optional): An initializer for the convolution
39 | kernel.
40 | bias_initializer (TYPE, optional): An initializer for the bias vector.
41 | If None, no bias will be applied.
42 | kernel_regularizer (None, optional): Optional regularizer for the
43 | convolution kernel.
44 | bias_regularizer (None, optional): Optional regularizer for the bias
45 | vector.
46 |
47 | Returns:
48 | tf.Tensor: Upscore tensor
49 |
50 | """
51 | conv_params = {'use_bias': use_bias,
52 | 'kernel_initializer': kernel_initializer,
53 | 'bias_initializer': bias_initializer,
54 | 'kernel_regularizer': kernel_regularizer,
55 | 'bias_regularizer': bias_regularizer}
56 |
57 | # Compute an upsampling shape dynamically from the input tensor. Input
58 | # filters are required to be static.
59 | if in_filters is None:
60 | in_filters = inputs.get_shape().as_list()[-1]
61 |
62 | assert len(inputs.get_shape().as_list()) == 5, \
63 | 'inputs are required to have a rank of 5.'
64 | assert len(inputs.get_shape().as_list()) == len(inputs2.get_shape().as_list()), \
65 | 'Ranks of input and input2 differ'
66 |
67 | # Account for differences in the number of input and output filters
68 | if in_filters != out_filters:
69 | x = tf.layers.conv3d(inputs=inputs,
70 | filters=out_filters,
71 | kernel_size=(1, 1, 1),
72 | strides=(1, 1, 1),
73 | padding='same',
74 | name='filter_conversion',
75 | **conv_params)
76 | else:
77 | x = inputs
78 |
79 | # Upsample inputs
80 | x = linear_upsample_3d(inputs=x, strides=strides)
81 |
82 | # Skip connection
83 | x2 = tf.layers.conv3d(inputs=inputs2,
84 | filters=out_filters,
85 | kernel_size=(1, 1, 1),
86 | strides=(1, 1, 1),
87 | padding='same',
88 | **conv_params)
89 |
90 | x2 = tf.layers.batch_normalization(
91 | x2, training=mode == tf.estimator.ModeKeys.TRAIN)
92 |
93 | # Return the element-wise sum
94 | return tf.add(x, x2)
95 |
96 |
97 | def neuronet_3d(inputs,
98 | num_classes,
99 | protocols,
100 | num_res_units=2,
101 | filters=(16, 32, 64, 128),
102 | strides=((1, 1, 1), (2, 2, 2), (2, 2, 2), (2, 2, 2)),
103 | mode=tf.estimator.ModeKeys.EVAL,
104 | use_bias=False,
105 | activation=tf.nn.relu6,
106 | kernel_initializer=tf.initializers.variance_scaling(distribution='uniform'),
107 | bias_initializer=tf.zeros_initializer(),
108 | kernel_regularizer=None,
109 | bias_regularizer=None):
110 | """
111 | NeuroNet [1] is a multi-task image segmentation network based on an FCN
112 | architecture [2] using residual units [3] as feature extractors.
113 | Downsampling and upsampling of features is done via strided convolutions
114 | and transpose convolutions, respectively. On each resolution scale s
115 | are num_residual_units with filter size = filters[s]. strides[s] determine
116 | the downsampling factor at each resolution scale.
117 |
118 | [1] M. Rajchl et al. NeuroNet: Fast and Robust Reproduction of Multiple
119 | Brain Image Segmentation Pipelines. MIDL 2018.
120 |
121 | [2] J. Long et al. Fully convolutional networks for semantic segmentation.
122 | CVPR 2015.
123 | [3] K. He et al. Identity Mappings in Deep Residual Networks. ECCV 2016.
124 |
125 | Args:
126 | inputs (tf.Tensor): Input feature tensor to the network (rank 5
127 | required).
128 | num_classes (int): Number of output classes.
129 | num_res_units (int, optional): Number of residual units at each
130 | resolution scale.
131 | filters (tuple, optional): Number of filters for all residual units at
132 | each resolution scale.
133 | strides (tuple, optional): Stride of the first unit on a resolution
134 | scale.
135 | mode (TYPE, optional): One of the tf.estimator.ModeKeys strings:
136 | TRAIN, EVAL or PREDICT
137 | use_bias (bool, optional): Boolean, whether the layer uses a bias.
138 | kernel_initializer (TYPE, optional): An initializer for the convolution
139 | kernel.
140 | bias_initializer (TYPE, optional): An initializer for the bias vector.
141 | If None, no bias will be applied.
142 | kernel_regularizer (None, optional): Optional regularizer for the
143 | convolution kernel.
144 | bias_regularizer (None, optional): Optional regularizer for the bias
145 | vector.
146 |
147 | Returns:
148 | dict: dictionary of output tensors
149 | """
150 | outputs = {}
151 | assert len(strides) == len(filters)
152 | assert len(inputs.get_shape().as_list()) == 5, \
153 | 'inputs are required to have a rank of 5.'
154 | assert len(protocols) == len(num_classes)
155 |
156 | conv_params = {'use_bias': use_bias,
157 | 'kernel_initializer': kernel_initializer,
158 | 'bias_initializer': bias_initializer,
159 | 'kernel_regularizer': kernel_regularizer,
160 | 'bias_regularizer': bias_regularizer}
161 |
162 | x = inputs
163 |
164 | # Inital convolution with filters[0]
165 | x = tf.layers.conv3d(inputs=x,
166 | filters=filters[0],
167 | kernel_size=(3, 3, 3),
168 | strides=strides[0],
169 | padding='same',
170 | **conv_params)
171 |
172 | tf.logging.info('Init conv tensor shape {}'.format(x.get_shape()))
173 |
174 | # Residual feature encoding blocks with num_res_units at different
175 | # resolution scales res_scales
176 | res_scales = [x]
177 | saved_strides = []
178 | with tf.variable_scope('encoder'):
179 | for res_scale in range(1, len(filters)):
180 |
181 | # Features are downsampled via strided convolutions. These are defined
182 | # in `strides` and subsequently saved
183 | with tf.variable_scope('unit_{}_0'.format(res_scale)):
184 |
185 | x = vanilla_residual_unit_3d(
186 | inputs=x,
187 | out_filters=filters[res_scale],
188 | strides=strides[res_scale],
189 | mode=mode)
190 | saved_strides.append(strides[res_scale])
191 |
192 | for i in range(1, num_res_units):
193 |
194 | with tf.variable_scope('unit_{}_{}'.format(res_scale, i)):
195 |
196 | x = vanilla_residual_unit_3d(
197 | inputs=x,
198 | out_filters=filters[res_scale],
199 | strides=(1, 1, 1),
200 | mode=mode)
201 | res_scales.append(x)
202 |
203 | tf.logging.info('Encoder at res_scale {} tensor shape: {}'.format(
204 | res_scale, x.get_shape()))
205 |
206 | outputs['encoder_out'] = x
207 |
208 | tails = []
209 | for tail in range(len(num_classes)):
210 | # Create a separate prediction tail for each labeling protocol to learn
211 | with tf.variable_scope('tail_{}'.format(tail)):
212 | x = outputs['encoder_out']
213 |
214 | for res_scale in range(len(filters) - 2, -1, -1):
215 | # Upscore layers [2] reconstruct the predictions to
216 | # higher resolution scales
217 | with tf.variable_scope('upscore_{}'.format(res_scale)):
218 | x = upscore_layer_3d(
219 | inputs=x,
220 | inputs2=res_scales[res_scale],
221 | out_filters=num_classes[tail],
222 | strides=saved_strides[res_scale],
223 | mode=mode,
224 | **conv_params)
225 |
226 | tf.logging.info('Decoder at res_scale {} tensor shape: {}'.format(
227 | res_scale, x.get_shape()))
228 |
229 | # Last convolution
230 | with tf.variable_scope('last'):
231 | tails.append(tf.layers.conv3d(inputs=x,
232 | filters=num_classes[tail],
233 | kernel_size=(1, 1, 1),
234 | strides=(1, 1, 1),
235 | padding='same',
236 | **conv_params))
237 |
238 | tf.logging.info('Output tensor shape {}'.format(x.get_shape()))
239 |
240 | # Define the outputs
241 | for i in range(len(tails)):
242 | outputs['logits_{}'.format(protocols[i])] = tails[i]
243 |
244 | with tf.variable_scope('pred'):
245 | outputs['y_prob_{}'.format(protocols[i])] = tf.nn.softmax(tails[i])
246 | outputs['y_{}'.format(protocols[i])] = tf.argmax(tails[i], axis=-1)
247 |
248 | return outputs
249 |
--------------------------------------------------------------------------------
/model/neuronet.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/model/neuronet.pyc
--------------------------------------------------------------------------------
/model/parse_csvs.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 10,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import os\n",
10 | "import pandas as pd\n",
11 | "import numpy as np"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 8,
17 | "metadata": {},
18 | "outputs": [
19 | {
20 | "name": "stdout",
21 | "output_type": "stream",
22 | "text": [
23 | "['fsl_fast']\n"
24 | ]
25 | }
26 | ],
27 | "source": [
28 | "import json\n",
29 | "config_fn = 'fsl_fast_config.json'\n",
30 | "with open(config_fn) as f:\n",
31 | " config = json.load(f)\n",
32 | "print (config['targets']['protocols'])"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 35,
38 | "metadata": {
39 | "collapsed": true
40 | },
41 | "outputs": [],
42 | "source": [
43 | "# paths\n",
44 | "base_path = '/vol/biobank/12579/brain/images'\n",
45 | "\n",
46 | "# list all ids \n",
47 | "all_ids = sorted(os.listdir(base_path))\n",
48 | "\n",
49 | "# check if all of them contain a T1w image and required segmentation(s)\n",
50 | "valid_ids = []\n",
51 | "for i in all_ids:\n",
52 | " if (os.path.isfile(os.path.join(base_path, i,'T1.nii.gz')) and \n",
53 | " os.path.isfile(os.path.join(base_path, i,'T1_first_all_fast_firstseg.nii.gz')) and\n",
54 | " os.path.isfile(os.path.join(base_path, i,'T1_brain_seg.nii.gz')) and\n",
55 | " os.path.isfile(os.path.join(base_path, i,'T1_brain_seg_spm.nii.gz')) and\n",
56 | " os.path.isfile(os.path.join(base_path, i,'T1_MALPEM.nii.gz')) and\n",
57 | " os.path.isfile(os.path.join(base_path, i,'T1_MALPEM_tissues.nii.gz'))):\n",
58 | " valid_ids.append(i)\n"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": 36,
64 | "metadata": {},
65 | "outputs": [
66 | {
67 | "name": "stdout",
68 | "output_type": "stream",
69 | "text": [
70 | "5723\n"
71 | ]
72 | }
73 | ],
74 | "source": [
75 | "print(len(valid_ids))"
76 | ]
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": 37,
81 | "metadata": {
82 | "collapsed": true
83 | },
84 | "outputs": [],
85 | "source": [
86 | "def get_full_paths(_id, fn):\n",
87 | " return os.path.join(base_path, _id, fn)\n",
88 | "\n",
89 | "hdr = ['id', 't1', 'fsl_fast', 'fsl_first', 'spm_tissue', 'malp_em', 'malp_em_tissue']\n",
90 | "valid_df = []\n",
91 | "for i in valid_ids:\n",
92 | " valid_df.append([i, \n",
93 | " get_full_paths(i, 'T1.nii.gz'),\n",
94 | " get_full_paths(i, 'T1_brain_seg.nii.gz'),\n",
95 | " get_full_paths(i, 'T1_first_all_fast_firstseg.nii.gz'),\n",
96 | " get_full_paths(i, 'T1_brain_seg_spm.nii.gz'),\n",
97 | " get_full_paths(i, 'T1_MALPEM.nii.gz'),\n",
98 | " get_full_paths(i, 'T1_MALPEM_tissues.nii.gz')])\n",
99 | "\n"
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": 38,
105 | "metadata": {},
106 | "outputs": [
107 | {
108 | "name": "stdout",
109 | "output_type": "stream",
110 | "text": [
111 | "['1000845', '/vol/biobank/12579/brain/images/1000845/T1.nii.gz', '/vol/biobank/12579/brain/images/1000845/T1_brain_seg.nii.gz', '/vol/biobank/12579/brain/images/1000845/T1_first_all_fast_firstseg.nii.gz', '/vol/biobank/12579/brain/images/1000845/T1_brain_seg_spm.nii.gz', '/vol/biobank/12579/brain/images/1000845/T1_MALPEM.nii.gz', '/vol/biobank/12579/brain/images/1000845/T1_MALPEM_tissues.nii.gz']\n",
112 | "(5723, 7)\n"
113 | ]
114 | }
115 | ],
116 | "source": [
117 | "print(valid_df[0])\n",
118 | "print(np.array(valid_df).shape)\n"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 39,
124 | "metadata": {},
125 | "outputs": [
126 | {
127 | "name": "stdout",
128 | "output_type": "stream",
129 | "text": [
130 | "5000\n",
131 | "10\n",
132 | "713\n"
133 | ]
134 | }
135 | ],
136 | "source": [
137 | "\n",
138 | "# 5k training ids\n",
139 | "write_df = valid_df[:5000]\n",
140 | "pd.DataFrame(write_df).to_csv('train.csv', index=False, header=hdr)\n",
141 | "print(len(write_df))\n",
142 | "\n",
143 | "# 10 validation ids\n",
144 | "write_df = valid_df[5000:5010]\n",
145 | "pd.DataFrame(write_df).to_csv('val.csv', index=False, header=hdr)\n",
146 | "print(len(write_df))\n",
147 | "\n",
148 | "# 713 test ids\n",
149 | "write_df = valid_df[5010:]\n",
150 | "pd.DataFrame(write_df).to_csv('test.csv', index=False, header=hdr)\n",
151 | "print(len(write_df))"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "metadata": {},
158 | "outputs": [],
159 | "source": []
160 | }
161 | ],
162 | "metadata": {
163 | "kernelspec": {
164 | "display_name": "Python 3",
165 | "language": "python",
166 | "name": "python3"
167 | },
168 | "language_info": {
169 | "codemirror_mode": {
170 | "name": "ipython",
171 | "version": 3
172 | },
173 | "file_extension": ".py",
174 | "mimetype": "text/x-python",
175 | "name": "python",
176 | "nbconvert_exporter": "python",
177 | "pygments_lexer": "ipython3",
178 | "version": "3.5.2"
179 | }
180 | },
181 | "nbformat": 4,
182 | "nbformat_minor": 2
183 | }
184 |
--------------------------------------------------------------------------------
/model/reader.py:
--------------------------------------------------------------------------------
1 | import SimpleITK as sitk
2 | import tensorflow as tf
3 | import numpy as np
4 |
5 | from dltk.io.augmentation import extract_random_example_array
6 | from dltk.io.preprocessing import whitening
7 |
8 | ALL_PROTOCOLS = ['spm_tissue', 'fsl_first', 'fast_tissue', 'malp_em', 'malp_em_tissue']
9 | NUM_CLASSES = [4, 16, 4, 139, 6]
10 |
11 |
12 | def map_labels(lbl, protocol=None, convert_to_protocol=False):
13 | """
14 | Map dataset specific label id protocols to consecutive integer ids for training and back.
15 | Parameters
16 | ----------
17 | lbl : np.array
18 | a label map
19 | protocol : str
20 | a string describing the labeling protocol
21 | convert_to_protocol : bool
22 | flag to determine to convert from or to the protocol ids
23 | """
24 |
25 | """
26 | SPM tissue ids:
27 | 0 Background
28 | 1 CSF
29 | 2 GM
30 | 3 WM
31 | """
32 | spm_tissue_ids = range(4)
33 |
34 | """
35 | Fast ids:
36 | 0 Background
37 | 1 CSF
38 | 2 GM
39 | 3 WM
40 | """
41 | fast_ids = range(4)
42 |
43 | """
44 | First ids:
45 | 0 Background
46 | 10 Left-Thalamus-Proper 40
47 | 11 Left-Caudate 30
48 | 12 Left-Putamen 40
49 | 13 Left-Pallidum 40
50 | 16 Brain-Stem /4th Ventricle 40
51 | 17 Left-Hippocampus 30
52 | 18 Left-Amygdala 50
53 | 26 Left-Accumbens-area 50
54 | 49 Right-Thalamus-Proper 40
55 | 50 Right-Caudate 30
56 | 51 Right-Putamen 40
57 | 52 Right-Pallidum 40
58 | 53 Right-Hippocampus 30
59 | 54 Right-Amygdala 50
60 | 58 Right-Accumbens-area 50
61 | """
62 | first_ids = [0, 10, 11, 12, 13, 16, 17, 18, 26, 49, 50, 51, 52, 53, 54, 58]
63 |
64 | """
65 | MALP-EM tissue ids:
66 | 0 Background
67 | 1 Ventricles
68 | 2 Sub-cortical and cerebellum GM
69 | 3 WM
70 | 4 Cortical GM
71 | 5
72 | """
73 | malpem_tissue_ids = range(6)
74 |
75 | """
76 | MALP-EM ids:
77 | 0 Background
78 | 1 3rdVentricle
79 | 2 4thVentricle
80 | 3 RightAccumbensArea
81 | 4 LeftAccumbensArea
82 | 5 RightAmygdala
83 | 6 LeftAmygdala
84 | 7 BrainStem
85 | 8 RightCaudate
86 | 9 LeftCaudate
87 | 10 RightCerebellumExterior
88 | 11 LeftCerebellumExterior
89 | 12 RightCerebellumWhiteMatter
90 | 13 LeftCerebellumWhiteMatter
91 | 14 RightCerebralExterior
92 | 15 LeftCerebralExterior
93 | 16 RightCerebralWhiteMatter
94 | 17 LeftCerebralWhiteMatter
95 | 18 CSF
96 | 19 RightHippocampus
97 | 20 LeftHippocampus
98 | 21 RightInfLatVent
99 | 22 LeftInfLatVent
100 | 23 RightLateralVentricle
101 | 24 LeftLateralVentricle
102 | 25 RightPallidum
103 | 26 LeftPallidum
104 | 27 RightPutamen
105 | 28 LeftPutamen
106 | 29 RightThalamusProper
107 | 30 LeftThalamusProper
108 | 31 RightVentralDC
109 | 32 LeftVentralDC
110 | 33 Rightvessel
111 | 34 Leftvessel
112 | 35 OpticChiasm
113 | 36 CerebellarVermalLobulesI-V
114 | 37 CerebellarVermalLobulesVI-VII
115 | 38 CerebellarVermalLobulesVIII-X
116 | 39 LeftBasalForebrain
117 | 40 RightBasalForebrain
118 | 41 RightACg Ganteriorcingulategyrus Right
119 | 42 LeftACg Ganteriorcingulategyrus Left
120 | 43 RightAIns Anteriorinsula Right
121 | 44 LeftAIns Anteriorinsula Left
122 | 45 RightAOrG Anteriororbitalgyrus Right
123 | 46 LeftAOrG Anteriororbitalgyrus Left
124 | 47 RightAnG Angulargyrus Right
125 | 48 LeftAnG Angulargyrus Left
126 | 49 RightCalc Calcarinecortex Right
127 | 50 LeftCalc Calcarinecortex Left
128 | 51 RightCO Centraloperculum Right
129 | 52 LeftCO Centraloperculum Left
130 | 53 RightCun Cuneus Right
131 | 54 LeftCun Cuneus Left
132 | 55 RightEntA Ententorhinalarea Right
133 | 56 LeftEntA Ententorhinalarea Left
134 | 57 RightFO Frontaloperculum Right
135 | 58 LeftFO Frontaloperculum Left
136 | 59 RightFRP Frontalpole Right
137 | 60 LeftFRP Frontalpole Left
138 | 61 RightFuG Fusiformgyrus Right
139 | 62 LeftFuG Fusiformgyrus Left
140 | 63 RightGRe Gyrusrectus Right
141 | 64 LeftGRe Gyrusrectus Left
142 | 65 RightIOG Inferioroccipitalgyrus Right
143 | 66 LeftIOG Inferioroccipitalgyrus Left
144 | 67 RightITG Inferiortemporalgyrus Right
145 | 68 LeftITG Inferiortemporalgyrus Left
146 | 69 RightLiG Lingualgyrus Right
147 | 70 LeftLiG Lingualgyrus Left
148 | 71 RightLOrG Lateralorbitalgyrus Right
149 | 72 LeftLOrG Lateralorbitalgyrus Left
150 | 73 RightMCgG Middlecingulategyrus Right
151 | 74 LeftMCgG Middlecingulategyrus Left
152 | 75 RightMFC Medialfrontalcortex Right
153 | 76 LeftMFC Medialfrontalcortex Left
154 | 77 RightMFG Middlefrontalgyrus Right
155 | 78 LeftMFG Middlefrontalgyrus Left
156 | 79 RightMOG Middleoccipitalgyrus Right
157 | 80 LeftMOG Middleoccipitalgyrus Left
158 | 81 RightMOrG Medialorbitalgyrus Right
159 | 82 LeftMOrG Medialorbitalgyrus Left
160 | 83 RightMPoG Postcentralgyrusmedialsegment Right
161 | 84 LeftMPoG Postcentralgyrusmedialsegment Left
162 | 85 RightMPrG Precentralgyrusmedialsegment Right
163 | 86 LeftMPrG Precentralgyrusmedialsegment Left
164 | 87 RightMSFG Superiorfrontalgyrusmedialsegment Right
165 | 88 LeftMSFG Superiorfrontalgyrusmedialsegment Left
166 | 89 RightMTG Middletemporalgyrus Right
167 | 90 LeftMTG Middletemporalgyrus Left
168 | 91 RightOCP Occipitalpole Right
169 | 92 LeftOCP Occipitalpole Left
170 | 93 RightOFuG Occipitalfusiformgyrus Right
171 | 94 LeftOFuG Occipitalfusiformgyrus Left
172 | 95 RightOpIFG Opercularpartoftheinferiorfrontalgyrus Right
173 | 96 LeftOpIFG Opercularpartoftheinferiorfrontalgyrus Left
174 | 97 RightOrIFG Orbitalpartoftheinferiorfrontalgyrus Right
175 | 98 LeftOrIFG Orbitalpartoftheinferiorfrontalgyrus Left
176 | 99 RightPCgG Posteriorcingulategyrus Right
177 | 100 LeftPCgG Posteriorcingulategyrus Left
178 | 101 RightPCu Precuneus Right
179 | 102 LeftPCu Precuneus Left
180 | 103 RightPHG Parahippocampalgyrus Right
181 | 104 LeftPHG Parahippocampalgyrus Left
182 | 105 RightPIns Posteriorinsula Right
183 | 106 LeftPIns Posteriorinsula Left
184 | 107 RightPO Parietaloperculum Right
185 | 108 LeftPO Parietaloperculum Left
186 | 109 RightPoG Postcentralgyrus Right
187 | 110 LeftPoG Postcentralgyrus Left
188 | 111 RightPOrG Posteriororbitalgyrus Right
189 | 112 LeftPOrG Posteriororbitalgyrus Left
190 | 113 RightPP Planumpolare Right
191 | 114 LeftPP Planumpolare Left
192 | 115 RightPrG Precentralgyrus Right
193 | 116 LeftPrG Precentralgyrus Left
194 | 117 RightPT Planumtemporale Right
195 | 118 LeftPT Planumtemporale Left
196 | 119 RightSCA Subcallosalarea Right
197 | 120 LeftSCA Subcallosalarea Left
198 | 121 RightSFG Superiorfrontalgyrus Right
199 | 122 LeftSFG Superiorfrontalgyrus Left
200 | 123 RightSMC Supplementarymotorcortex Right
201 | 124 LeftSMC Supplementarymotorcortex Left
202 | 125 RightSMG Supramarginalgyrus Right
203 | 126 LeftSMG Supramarginalgyrus Left
204 | 127 RightSOG Superioroccipitalgyrus Right
205 | 128 LeftSOG Superioroccipitalgyrus Left
206 | 129 RightSPL Superiorparietallobule Right
207 | 130 LeftSPL Superiorparietallobule Left
208 | 131 RightSTG Superiortemporalgyrus Right
209 | 132 LeftSTG Superiortemporalgyrus Left
210 | 133 RightTMP Temporalpole Right
211 | 134 LeftTMP Temporalpole Left
212 | 135 RightTrIFG Triangularpartoftheinferiorfrontalgyrus Right
213 | 136 LeftTrIFG Triangularpartoftheinferiorfrontalgyrus Left
214 | 137 RightTTG Transversetemporalgyrus Right
215 | 138 LeftTTG Transversetemporalgyrus Left
216 | """
217 | malpem_ids = range(139)
218 |
219 | out_lbl = np.zeros_like(lbl)
220 |
221 | if protocol == 'fsl_fast':
222 | ids = fast_ids
223 | elif protocol == 'fsl_first':
224 | ids = first_ids
225 | elif protocol == 'spm_tissue':
226 | ids = spm_tissue_ids
227 | elif protocol == 'malp_em':
228 | ids = malpem_ids
229 | elif protocol == 'malp_em_tissue':
230 | ids = malpem_tissue_ids
231 | else:
232 | print("Method is not recognised. Exiting.")
233 | return -1
234 |
235 | if convert_to_protocol:
236 | # map from consecutive ints to protocol labels
237 | for i in range(len(ids)):
238 | out_lbl[lbl == i] = ids[i]
239 | else:
240 | # map from protocol labels to consecutive ints
241 | for i in range(len(ids)):
242 | out_lbl[lbl == ids[i]] = i
243 |
244 | return out_lbl
245 |
246 |
247 | def read_fn(file_references, mode, params=None):
248 | """A custom python read function for interfacing with nii image files.
249 |
250 | Args:
251 | file_references (list): A list of lists containing file references,
252 | such as [['id_0', 'image_filename_0', target_value_0], ...,
253 | ['id_N', 'image_filename_N', target_value_N]].
254 | mode (str): One of the tf.estimator.ModeKeys strings: TRAIN, EVAL
255 | or PREDICT.
256 | params (dict, optional): A dictionary to parameterise read_fn ouputs
257 | (e.g. reader_params = {'n_examples': 10, 'example_size':
258 | [64, 64, 64], 'extract_examples': True}, etc.).
259 |
260 | Yields:
261 | dict: A dictionary of reader outputs for dltk.io.abstract_reader.
262 | """
263 |
264 | if mode == tf.estimator.ModeKeys.TRAIN:
265 | np.random.shuffle(file_references)
266 |
267 | for f in file_references:
268 |
269 | # Read the image nii with sitk
270 | img_id = f[0]
271 | img_fn = f[1]
272 | img_sitk = sitk.ReadImage(str(img_fn))
273 | img = sitk.GetArrayFromImage(img_sitk)
274 |
275 | # Normalise volume image
276 | img = whitening(img)
277 |
278 | # Create a 4D image (i.e. [x, y, z, channels])
279 | img = np.expand_dims(img, axis=-1).astype(np.float32)
280 |
281 | if mode == tf.estimator.ModeKeys.PREDICT:
282 | yield {'features': {'x': img},
283 | 'labels': None,
284 | 'sitk': img_sitk,
285 | 'img_id': img_id}
286 | continue
287 |
288 | # Read the label nii with sitk for each of the protocols
289 | lbls = []
290 | for p in params['protocols']:
291 | idx = ALL_PROTOCOLS.index(p)
292 | lbl_fn = f[2 + idx]
293 | lbl = sitk.GetArrayFromImage(sitk.ReadImage(str(lbl_fn))).astype(np.int32)
294 |
295 | # Map the label ids to consecutive integers
296 | lbl = map_labels(lbl, protocol=p)
297 | lbls.append(lbl)
298 |
299 | # Check if the reader is supposed to return training examples or
300 | # full images
301 | if params['extract_examples']:
302 | # Concatenate into a list of images and labels and extract
303 | img_lbls_list = [img] + lbls
304 | img_lbls_list = extract_random_example_array(
305 | img_lbls_list,
306 | example_size=params['example_size'],
307 | n_examples=params['n_examples'])
308 |
309 | # Yield each image example and corresponding label protocols
310 | for e in range(params['n_examples']):
311 | yield {'features': {'x': img_lbls_list[0][e].astype(np.float32)},
312 | 'labels': {params['protocols'][i]: img_lbls_list[1 + i][e]
313 | for i in range(len(params['protocols']))}}
314 | else:
315 | yield {'features': {'x': img},
316 | 'labels': {params['protocols'][i]:
317 | lbls[i] for i in range(len(params['protocols']))},
318 | 'sitk': img_sitk,
319 | 'img_id': img_id}
320 | return
321 |
--------------------------------------------------------------------------------
/model/reader.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/model/reader.pyc
--------------------------------------------------------------------------------
/model/reader2.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/model/reader2.pyc
--------------------------------------------------------------------------------
/model/sandbox.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf\n",
10 | "import numpy as np\n",
11 | "import pandas as pd\n",
12 | "import json"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": 100,
18 | "metadata": {},
19 | "outputs": [
20 | {
21 | "name": "stdout",
22 | "output_type": "stream",
23 | "text": [
24 | "['4453610' '/vol/biobank/12579/brain/images/4453610/T1.nii.gz'\n",
25 | " '/vol/biobank/12579/brain/images/4453610/T1_brain_seg.nii.gz'\n",
26 | " '/vol/biobank/12579/brain/images/4453610/T1_first_all_fast_firstseg.nii.gz'\n",
27 | " '/vol/biobank/12579/brain/images/4453610/T1_brain_seg_spm.nii.gz'\n",
28 | " '/vol/biobank/12579/brain/images/4453610/T1_MALPEM.nii.gz'\n",
29 | " '/vol/biobank/12579/brain/images/4453610/T1_MALPEM_tissues.nii.gz']\n",
30 | "debug\n",
31 | "(64, 64, 64, 1)\n",
32 | "debug\n",
33 | "malp_em_tissue\n",
34 | "spm_tissue\n",
35 | "fsl_fast\n",
36 | "(64, 64, 64)\n",
37 | "(64, 64, 64)\n",
38 | "(64, 64, 64)\n"
39 | ]
40 | },
41 | {
42 | "data": {
43 | "text/plain": [
44 | "[None, None, None]"
45 | ]
46 | },
47 | "execution_count": 100,
48 | "metadata": {},
49 | "output_type": "execute_result"
50 | }
51 | ],
52 | "source": [
53 | "import reader as reader\n",
54 | "import importlib\n",
55 | "importlib.reload(reader)\n",
56 | "\n",
57 | "all_filenames = pd.read_csv(\n",
58 | " 'train.csv',\n",
59 | " dtype=object,\n",
60 | " keep_default_na=False,\n",
61 | " na_values=[]).as_matrix()\n",
62 | "\n",
63 | "# Parse the run config\n",
64 | "with open(\"config_tissue.json\") as f:\n",
65 | " config = json.load(f)\n",
66 | " \n",
67 | "# Set up a data reader to handle the file i/o. \n",
68 | "reader_params = {\n",
69 | " 'n_examples': 16,\n",
70 | " 'example_size': [64, 64, 64],\n",
71 | " 'extract_examples': True,\n",
72 | " 'protocols' : config[\"protocols\"]}\n",
73 | "\n",
74 | "# Create a generator with the read file_references `all_filenames` and \n",
75 | "# `reader_params` in PREDICT mode:\n",
76 | "it = reader.read_fn(file_references=all_filenames,\n",
77 | " mode=tf.estimator.ModeKeys.TRAIN,\n",
78 | " params=reader_params)\n",
79 | "\n",
80 | "# If you call `next`, the `read_fn` will yield an output dictionary as designed\n",
81 | "# by you:\n",
82 | "ex_dict = next(it)\n",
83 | "\n",
84 | "# Print that output dict to debug\n",
85 | "np.set_printoptions(edgeitems=1)\n",
86 | "print('debug')\n",
87 | "print(ex_dict['features']['x'].shape)\n",
88 | "print('debug')\n",
89 | "[print(l) for l in ex_dict['labels']]\n",
90 | "[print(ex_dict['labels'][l].shape) for l in ex_dict['labels']]"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": 93,
96 | "metadata": {},
97 | "outputs": [
98 | {
99 | "data": {
100 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6IAAAE1CAYAAAD9BF1VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3XuQZOd53/fnFYnr3ndnL9gFsIsL\nCSzBC8SLaJNSFoIZknaYkkouLF2RFKccJnZSqcRRyVaiRCKQxLacpCLbSlIu04qtkDYtUKmQLKfA\nIkUDMIgSSZEEDYIEQRIQgMXeZ2+zu1iQIHDyxzTkeb/nbL/dMz1vX+b7qWIJz3T3Oe95z+nWnO35\nvU9qmiYkSZIkSarlJ8Y9AEmSJEnS2uKNqCRJkiSpKm9EJUmSJElVeSMqSZIkSarKG1FJkiRJUlXe\niEqSJEmSqvJGVJKkClJKv5hS+vy4xyFptqWU/mlK6X8a9ziWI6X0QErpL497HKrDG1H1lVJ6NqX0\nvgr7uTel9InV3o8k1ZBS2pdSalJKr3/tZ03T/LOmad4/znFJ0qTo+t2vaZo/3zTN741rTKrLG1FJ\nkiRJUlXeiGpgKaX/KKX0pZTS/5pSOpNS+pOU0p9f8vhDKaW/k1L6akppIaX0mZTS1t5jd6WUXsD2\nnk0pvS+l9MGI+PWI+HBK6UJK6d8MMJZbUkqnU0pv79W7U0onU0p3jfSgJU21lNKvpZQOp5TOp5Se\nSin9ud6/wv9BSun3ez//RkrpbUte82xK6W+klB5PKV1MKf1uSmln70/GzqeU/jCltKWw63/d+79n\ne59rf/a1z9DePlJK6bdTSid6n5ffSim9uffYX0gpfae3r8MppV/t/fxPX79krE1K6dbef1/V+3x+\nPqV0PKX0D1NK14xqLiWtrmE+e1JKn0opHUspnUsp/euU0h2X2eZdKaUXUkq/nlKa7+3jFwcYy2U/\nT5Zs82/2PsOOppR+vvfZ9b3e72e/Xth+5+9+vd8lP9L771tTSg/3jnE+pfT7vZ/3+/z809f36uxz\nM6V0e0rpC70xPpVSOliaC60eb0Q1rHdHxFMRMRcR/3NE/G5KKS15/D+MiL8SEddFxI8j4h+UNtg0\nzeci4m9HxO83TbO+aZq3RUSklP6blNK/vMxrno6IX4uIT6SUro2IfxIRv9c0zUPLPTBJsyWldFtE\n/BcR8a6maTZExAci4tnewz8XEZ+KiK0R8c8j4tMppSuWvPwvRsS/GxFvjIh/PyIeiMVfmrbH4v/v\n/C8Lu/93ev93c+9z7Y/w+Pt7z3ljRGyKiIMRcar32O9GxF/tjfnNEfGvBjzk3+pt786IuDUi9kTE\nbw74WkmTYdDPngci4g0RsSMivhER/6zPNnfF4u9teyLiL0fEP+p9PvZT+jzZFRFXL/n5xyLilyLi\nHRHxMxHxGymlmy638cv97gf/Y0R8PiK2RMT1EfE7vZ/3+/y8rJTSuoj4Qix+5u+IiL8UEf9nSulN\npddqdXgjqmE91zTNx5qmeSUifi8Wbzh3Lnn8403TPNE0zcWI+I2IOJhSet1ydtQ0zW81TfOhPo9/\nLCJ+EBFf6Y3jv1vOfiTNrFci4qqIeFNK6YqmaZ7t/SNWRMTXm6b5g6ZpXo6I/y0Wf6H6M0te+ztN\n0xxvmuZwRDwSEV9pmuaxpmleioj/NyJ+coVjezkiNkTE7RGRmqZ5smmao0see1NKaWPTNGeapvlG\naWO9fxD8TyPiv26a5nTTNOdj8Ze8v7TCcUqqa6DPnqZp/q+mac43TfPDiLg3It6WUtrUZ7u/0TTN\nD5umeTgi/r9YvHnrNODnycsR8bd6n6H/IhZvdP9+b0zfjojvRETXzeUwXo6IvRGxu2mal5qm+dKS\nn1/u87OfD0XEs03T/JOmaX7cNM1jEfH/RMQ9KxynlskbUQ3r2Gv/0TTNi73/XL/k8UNL/vu5iLgi\nFj+cVsvHYvEbg9/pfRhLUkRENE3zg4j467H4S9qJlNK/SCnt7j18aMnzXo2IFyJi95KXH1/y35c6\n6qWfe8sZ27+KiP89Iv6P3tj+UUppY+/hvxgRfyEinuv9WdqfHWCT2yPi2oj4ekrpbErpbER8rvdz\nSdOj+NmTUnpdSum3UkpPp5QW4t/+pcflft860/uC4DXPRf55R4N8npzqfSnx2ri6xr6iz8mI+JsR\nkSLiqymlb6eU/kpE8fOzn70R8e7Xjql3XL8Yi9/uagy8EdWo3bDkv2+MxX+1mo+Ii7H4oRYREb1v\nSZd+oDXD7iiltD4i/l4s/hnbvamXR5Wk1zRN88+bpvnpWPwFpImIv9t76E8/q1JKPxGLf/Z1ZJS7\nHmBs/6BpmndExJti8U/M/kbv53/cNM3PxeKfjn06Iu7vvYSfo0t/eZqPxV/87miaZnPvf5uaplnp\nL4KSJs9/EIvxgvfF4p+m7uv9PF3m+Vt6f5b6mhuj/+ddrc+Tvp+TTdMca5rmP2maZndE/NVY/DPa\nW3uPdX5+Bj4nI7/JPBQRDy85pteiE//ZqA5Iw/FGVKP2SymlN/Vym/9DRPxB71/MvhcRV6eU/r1e\nDuu/j8U/mXvN8YjY1/uFcFB/PyK+1jTNR2Lxz0z+4WgOQdIsSCndllK6O6V0VUS8FIu/WL3ae/gd\nKaVfSIvtVf56RPwwIr48wt2f7O3r5suM7V0ppXf3Pg8v9sb3akrpyrTYb3RT70/eFpaM+d9ExB0p\npTtTSlfH4je9EfGn3+p+LCJ+O6W0o7ePPSmlD4zwmCRNhg2x+Jl1KhZvuv72AK+5r/f58jOx+Ceq\nn7rcEyt+nvT93S+ldE9K6fpeeSYWb1xfvdznZ+9534yIX0gpXdu7af2Pl2zyX0bEG1NKv5xSuqL3\nv3ellPaP+Lg0IG9ENWofj4h/Got/wnt19EL1TdOci4j/PCL+cUQcjsUPjqWr6L72gXgqpfSNiIi0\nuMLbA107SSn9XER8MCJe+1esX4mIt6cBVoKTtGZcFYsLbszH4mfSjoj4b3uPfSYiPhyLv9z8ckT8\nQu/GbyR60YW/FRGP9v4E7M/gKRtj8Re9M7H4Z3KnIuJ/6T32yxHxbO9P7v5aLP7pWDRN871Y/Ae+\nP4yI70fEl7DNX4vF3PyXe6/9w4goLUgiafr837H4uXE4FrOYpX9EOxaLnzVHYnFRo7/WNM13C6+p\n8XnS+t0P3hURX0kpXYiIz0bEf9U0zTPR//PztyPiR7F4k/t7sWQRp17W9f2xmHU9Eovz8ncj/2JE\nFaWmGfovIqVOKaWHIuITTdP843GPRZIuJ6V0b0Tc2jTNL417LJK0mtJiW7tPNE1zfem5Um1+IypJ\nkiRJqsobUUmSlqGX5bzQ8b9vj3tsklTSW4m26zNsZDGnlNIDl9nHr49qH5pe/mmuJEmSJKkqvxGV\nJEmSJFX1+po7u/fee7OvX3/0ox/1ff4rr7yS1S+/3F7Q8Nprr83q178+P6SU8pZKW7Zsyepz585l\n9Ze+lC9CeORI/7ZyP/7xj1s/u+GGG7L6pptuyuoNGzZk9TXXXJPVL730UlaXjmlhYSGrT5061RoT\nv/nmPumHP/xhVr/66qtZzTHwXF26dCmreZ5efPHF1j4PHTqU1RcuXOg7pje/+c1Z/da3vjWrr7ji\niqy+6qp8UTRef13X10/8RP5vNZxHzgO97nWvy2peL6Vz3XUuX3jhhazmcZw/fz6reQxXX311VvOY\nrrzyytY+l+K57doG54XHtX593orsgQce6D+R02fi/9Tk4MGDI9/mPffcs6LHSz71qct2G5gqKz2O\n+++/v/ykGVCap9LjK73eVsM999wzU591991338R/1t17770r3sZDDz001PMPHDiwov3dd999K3r9\nOAw7R4N48MEHR77NcXv44YfHPYQqDhw4MNBnnd+ISpIkSZKq8kZUkiRJklSVN6KSJEmSpKqqZkSZ\nE2NejrkyZttK2bWIdpaRGT3mAJnf5PM/+9nPZjUze+vWrWuN4fjx41m9bdu2rOZxMPvIbCPHzHm5\nePFi3+13/awr57cUc6d8PXOup0+f7jtGHgOPMaKdG2TWkdvguaLSvJXynV24z9I2mJ0lvieI8xwR\nsWnTpqzmNcnjZB6X75ESHmPX67lPjpvHOewYtHI8j8z1jiIzysweM3qzkvGsbRKzjtNgGjOkGj1m\nRJeTGb3rrruyupSHXCs5wKU4RxGrkxuddl354bV4vbzGb0QlSZIkSVV5IypJkiRJqsobUUmSJElS\nVVUzoswkMbvG3CBzZl19R/ka5qC4D/ZQZA5x48aNWf3+978/q7/85S9ndVefUY6bmVHOw9atW7Oa\n2Ub202Rek8fcNU/M5DGfyx6bnAc+n5lA9iXlvJb6lka0j4u5wtK5HrYfJp/PY4poZzz5Gl5PnGee\ny1KmlM/vOgZeX8z7cp6Yaz179mzfx4c9DxHtPrGcl665VV28dom9KWtkRqV+ShljM8hajnFkRiV1\n87dDSZIkSVJV3ohKkiRJkqryRlSSJEmSVNVY+4gyG8kMU6nPaBfm3Zh9JGYX2dNzx44dWX333Xdn\n9aOPPtra5hNPPJHVc3NzfcfAjB/7jjJvV8qE8vldrynlK0uZUs4TMcfI7XVlBvkaPofnls9nT1eO\nkflLXhtdYyq9hmPicXblKfs9zvPC/Ue0+4jy/LOn65YtW/rukzlqbo/HyPxwl0Hyt5psZkZXj9nG\n1VG6njjPXo+KGE1mVN3M0g6GvUXXUl9RfzuUJEmSJFXljagkSZIkqSpvRCVJkiRJVVXNiDKzxzwc\ns2jMlA7SH5P5yFIPRWbwmM8s9fzcv39/a0w8TmZEDx06lNUnTpzIavZkZC/L9evXt/a51EsvvdT6\nGbOwnFvmVDmvpSwkcYx8ftcYeT1wHpjP5Lku5Vz5+CA5Rs4bs7KlbZbGwMd5Xri/iHbG87rrruv7\nmosXL2Y1s7WlHq+lPqMR7eMqZbVLPS01eZgZjRhNblQa1EqztWZANYiujGgpN8osJJmN1DDWUmbU\nb0QlSZIkSVV5IypJkiRJqsobUUmSJElSVd6ISpIkSZKqqrpYERc54UItXCyGi9esW7eutU0ugsKF\nWroWe1nqxRdfzGoussMxc+EgLqgTEXHnnXdm9ZEjR7Kai8OcP38+q48ePdpnxBELCwt9x9C1kBDn\ngWPg4jFcNKe06A4XDiot6sPzFlFeMGnTpk1ZzeuB++C54+Nc/IoL7HThIku8ZkuLOHFMvMa5eFbX\nAkocN8/V7t27s3p+fj6rL1y4kNWcF46B89I1Jh53aWEpLuil6cQFjIZdvMjFZgazVo+7ZKWLF0mD\n4mJFpcWLqLSY0bDbm1UPPvjguIcwkSZx8aJRjclvRCVJkiRJVXkjKkmSJEmqyhtRSZIkSVJVY82I\nMm/J7Buzasw1dm2DGb5S9pE4BmbdrrjiiqxmHi8i4tChQ1nNDCgznSdOnMhqZvrm5uaymvlLZv44\nJxHtfCUx98d54rwS55nZR+rKCF68eDGrmU3kvHGMPDfM/w6SU6VSxph5Xc5TaR54TTNL2ZXH5HP4\nvuL7htfPli1bsvrMmTNZXcrB8j0SEbFhw4asHmRupWEtJwM4ifnKacwyrnTMNc5DaR+rMe/j2Kdm\nz3IyotOQK33ooYfGPYShjTp/ySxljW2O4hhWY9xd/EZUkiRJklSVN6KSJEmSpKq8EZUkSZIkVVU1\nI8psGfNzzASy7sqdMX9Z6uvIPCXzchxjKd/ZleFjdpHjZp/QUv9MjpnzwkxgV4avlF3ctWtX3zGX\n+l0yf8m8JsfUNW9d2daltm3bltXMRvIYS31rqauPKLOsp06dympmRDkv3Cf3UcrWch4j2r1uWZey\ntDy3HBPnkcfQNY/cJvfJrG3XNSpp0STkWkedbRzF9lY6L5Mwr9KorLS3qRatdk/O5Wx/pfnMWvnO\nUfAbUUmSJElSVd6ISpIkSZKq8kZUkiRJklRV1YwoM4DsoVjKOnZl05gDLGUT+Tj3yYwex8xsY1eu\nkc9hDpV9Q3kMpfwcM6UcA3s6RpR7T3b1aF2K88R9MutY6jvaNW/MFW7cuDGrOWZmG7lN5hJ57ll3\n5VZL1wMzpNwnt8kxl7bfNU88FydPnsxqzuP27duzmhlSziuvH77vuuaplL/l4zwGzQZm8OyhqFEq\nXU9mQFWL+UytplKudJoyoCV+IypJkiRJqsobUUmSJElSVd6ISpIkSZKqqpoRZf/Mm2++OauZY2Ru\nkXm6LszoMZPHfFyp92SpL2nXmJjrYx6O+2SG76abbspqzgvHxHl98cUXW2NiTvW6667LamY8mX3k\nGFgT54nb6xojM5u7d+/O6j179mR1KWdYyoAOklvk+eU1Wcqt8no8e/ZsVjMPzO0x7xnRnnseJ68v\nZkh5TKX3XSmHHdE+Do6b2yj1jJ12zLKZXes27LzMSua0dNyzeL3UeE+s9PqYxXnXZBg2QzrI82ch\nl/rggw+OewhTadjepJOcKfUbUUmSJElSVd6ISpIkSZKq8kZUkiRJklRVYjZwNX3oQx/Kdvae97wn\ne3zdunVZzQzfVVddVdzHhQsXsvr8+fNZzeNlLpAZv66eif2eH9HOyx06dCirDx8+nNWbN2/Oamb0\nOIZSPrMrV1iaO2YXmelkxq+UM+TjnJOuPCaPc9u2bX3HwPzl+vXrW9tciv1Xu84dlfK9PJe83kr5\nTc4z5+3ixYutMXEu2W+VeeDSvJV6gJZyrxHtzCevQV7jfB9+8pOfLAfAp8jBgwezA7z//vvHNZSq\nmNEzKzuYaZiX1c7nDrL9aZinknvuuWemPutSStln3SzkFjU6Dz30UFZPQyZ02Pyluh04cGCgzzq/\nEZUkSZIkVeWNqCRJkiSpKm9EJUmSJElVVe0jyt6RCwsLWc0sG/NxXRlAZvBY79ixI6uZ0WNukI8z\nj8e8XFfOkD/jcW/atCmrmafjGHbu3JnVzBHy9cwMdo2BWUZmIZnX5Lnh81mX+oh2nctdu3b13Se3\nUep3yeeX+oZ25YF5Lpm1LWVnuc0NGzZkNc8Vr7euDDdz0Owjy8eZtS31pS1lZ7vyvZxb5kiPHTuW\n1XxfajbNQqZvNTgvy1PKkTqv0vgwDxoxHZnQSXTXXXdlddfczgq/EZUkSZIkVeWNqCRJkiSpKm9E\nJUmSJElVVc2Istcgs5DMnjGP15VdYxaNGVFm15jhY83nM6NXyhlGRJw+fbrva5jJ45iZIS1lQjkH\nzN5GtPOSzN8Oe5x8nOeG22OWkn0lI9rzwvPPTCjzvczBlvqv8vW8PiPK1wfPRSkzSpwnjqGr/yuz\ntGfOnMlq9oRlZpRzz3kpHQPzxRHteWBPYOJ7f9bNaj/N1e4tOQtm5VyvtkHmqXS9DXs9lvbZtT3P\np9TNPOjqYWa0ZJoypX4jKkmSJEmqyhtRSZIkSVJV3ohKkiRJkqqqmhFlLpE1s2rMTg7S55EZO+6D\nWTZm8NjHsdRz8ciRI60xfe973+u7zz179mR16Tj5emJmryuXyHli1rHUL5UZ09KYOO/sn8m6S6lv\nKMfAvCVziMzSMgs5CM4Lt8Fzx7o0z7wWus4lr1keN4+T18f8/HzfMfDclHqERrRzp8zflt5nmnxd\neTn7OqomXk8rzSibcZaW76Mf/ei4hzCzmPEcNiM6TX1I/UZUkiRJklSVN6KSJEmSpKq8EZUkSZIk\nVeWNqCRJkiSpqqqLFXEhocOHD2f1TTfdlNXbtm3Lai6AE9FeaIWLonChFS6AU1qEh4sVnTt3LquP\nHTvWGtPCwkJW79q1q++YuSANj+Hs2bN9X19aIKdrH1xwprToExe8oa1bt2Y1zx3nsWsRntKiOHwN\nFwrimLlYER/n67sWYOKiOtxGafEijoFK8855i2jPAxcrIm6DCwlx3q+99tq+Y+L1GdFe4IjvK76G\n25x1XBSFi64Me06X+5rVNguLE5XOVckszMG0GPXiRaXtq+zee+/tW6ueUS9Q8+CDD450exrcShcv\nmmR+IypJkiRJqsobUUmSJElSVd6ISpIkSZKqqpoRZV7uxIkTWf3CCy9k9Y4dO7K6K/v44osv9n0O\n98mcKXNW69aty2rmFJlrZR0RsWfPnqzevHlzVjOTxwzfmTNnsprzxOczl3jNNde0xsSM56lTp7Ka\nWUjmJV9++eWsZh6Nx8jMIMfE7UW05+WKK67oW/PcMJfIMXKfvDY4j126sq39Huf1OGyurytLyddw\nXjZu3JjVpRw1zzW3x2PguY1ozy23WXpfrjWjyLKtdiaUY+wa86gzdCvNZw6yzZW+nmMyRzg5SplR\nz5UmRSm/uZwM4KgzoWvVgQMHsvrhhx8e00gub5bOtd+ISpIkSZKq8kZUkiRJklSVN6KSJEmSpKqq\nZkTZg5OZwMcffzyrmTN75zvf2doms4fs4cnM56ZNm/qOiX1BmaXk4/v27WuNiT01mV1kBu/06dNZ\n/dhjj2U1M6HMzjLf2ZW/47w8//zzfbfBzCd7S3Jema/kmJlT7Boj54XnlnlJ9sNk/pf5YeZk+Tiv\nja4xcR6Yt2RWkplQPs7tl7bXtU3OC/OYHDPPBd9nzN4SX981Tm6TY2QOeq1hdu3+++/P6lIWOWLl\nGdFSdrJGvm7YMWgwpXlbzrlc7Z6dq2ESMqFrPadqX9FFw2b6JiEDOIl9Q4fNazLvqcnjN6KSJEmS\npKq8EZUkSZIkVeWNqCRJkiSpqqoZUWb+mEucn5/P6kceeSSrmXOMiHjjG9/Yd5/Ms3EfzIgyu3b0\n6NGs3r59e1Zv27attU9mF0u9JZlV3LlzZ1Yzb8nMHx/vmidmMpkv45iYp2SWkTlY5iuZdSzlGrue\nwzEzh8hM6NmzZ7Oax8Dzwhwjr8eI9nExC8t547ngPpkX5jzwvHCMl/vZUsx4cl5L54Z1Vya0pNSH\n9sKFC0Nvc5YNkiNjjnSl+1jOGGpbzhhGnV2chHmgUfdGjZjM49TsGSQjuto50knMrdo3tNtKe3h2\nvd7c6GTxG1FJkiRJUlXeiEqSJEmSqvJGVJIkSZJUVdWM6IYNG7Ka+Uxm05gje/TRR1vbfOqpp7Ka\nmT3m5ZhtZL9M5g75OOuurBszdcwVMrvIzN5tt92W1czXMcvIXCLznF1jYNaVx83+qcz3lvqEcnvM\nDHaNkeeOGVEeN7OwzGMyb8m+tbw2eF66XrNx48asLuUnmQHlMfBxnidmSiPKmeNS71Kei1LmlMfc\nheeK2+T1sJzc6Vp38ODBcQ9h5FYjl1ja5krzlePIUtbo2TmJGWGtTSvNbJZeP45M6HIyoMNuc6WZ\n0UnsG7oamBs1MzpefiMqSZIkSarKG1FJkiRJUlXeiEqSJEmSqqqaEWVWbf369X2fz2xaV3/MZ555\nJquZQ73xxhuzmn0/mQHk65m/K/UI7dpmqYdmaR/MFZbqrswfe4/yXDAzevPNN2c1M4DM+DFvWcpG\ndo2RWdlSppPHUOrRyWNgHrNrTLzm2PO1dL3wuDlGjoE4B11KGU9ef6VepevWrctq5nm7ctE8Du6T\n17QZ0frM/C1a6TyYpRxMKdc6q/NWI8+r3CT0AZ0EzIx+9KMfHc9AVmClfUM1Prz++Lvm5fiNqCRJ\nkiSpKm9EJUmSJElVeSMqSZIkSaqqaka0tXNk00p5uq5M6XXXXZfVO3fuzGrm3Zhd4z6YI2S2jWPo\n6ofJLCNzg3wNn88MHrOQzNcN+nfY/cbEfTBTyrwux8x55TGW8poR7eMo1ZyHUnaWz+/q0UnMrTIz\nyozo3Nxc3+dzexwDe+ty+xHtc8W55XGWMqTMd7Iuzesgr+H7qOt9o9U1bHZtVjN8GkwpC8u69Hyz\nk1otK82Isv/mSvtxTor77rsvq6cxM1pDqa8oa3Oso+U3opIkSZKkqrwRlSRJkiRV5Y2oJEmSJKmq\nqhlR9mBkzpC5xF27dmX1nj17Wtu85pprsrqUReTjx48fz+ojR45kNbORzCl2ZfiYvyxlRjkvpbwl\ne0syA3j11Ve3xsSfcR6YTeS8bNmyJas3bdrUd8zMMZb6ZUZEXLx4MauZp+Tcl84tcXvM4vI8RbSv\nSY6R2+DzeX3w3J08eTKrT58+ndXMOEeU+85yrvkeYc3rp9QTlvvv+tmpU6eymlnYrm1ostgvs9s0\nzsMg+czaxzUr15fZ1+nH/oez2pfUzOh48PpajQwyc6y0GrlWHtdy+dugJEmSJKkqb0QlSZIkSVV5\nIypJkiRJqqpqRpT9DZm/fMMb3pDVzIh29ctktrGUQ2UG77nnnsvqUr6TuUTmDiPax8mar2EGlHnL\nUp6OGb/z58+3nlPqNclcIfdZ6tlZyhEyU9qF+UnmCkv9M5l95LkunRfmPSMitm/f3neMfA2PkxlR\nXk88V/Pz81l99uzZ4ph4HKVzwXPN5/N91pWdJb4PDx8+nNVbt27N6q4+stI0KGUCR5F1nMTc4Wr3\nFe065mnNjUqaXWu1r+ioMqHkN6KSJEmSpKq8EZUkSZIkVeWNqCRJkiSpKm9EJUmSJElVVV2siIv0\ncPGZhYWFrOaCOKwj2gvacPEYPs6FWbhwCxej4aIqfPzSpUutMc3NzWU1FyPicXOxIY6Z1q1b13f7\nXYvLcO62bNmS1Zx74rxxER3OCxcOGmSMnAcu2sS55uN8Pc8dF+259tprs7pr3jlOXsOla5T74Dyv\nX78+qzdv3pzVJ0+ebI3p6quvzmoukMTjYM1zxXkrvQe4OFJEe2Eozhv3wfeAJp+LyUyulS4kpME5\nd5oV9913X9/HP/rRj1YayWTh4kNcnGhYDz30UFZ3LfrD5wxrmhdM8htRSZIkSVJV3ohKkiRJkqry\nRlSSJEmSVFXVjChzY8xzMgPIXBkzfhHt/Br3sXXr1r7bJGYh+Xw+3pXhY86PucKLFy9mNY+LGb1S\nLpEZQD4/op2n5LxxG8wBckxd56LfGJqm6bv/iHaesjRv3Ab3wRwizxXzmczeRrSvJ16jzGfycR4D\nM8m0ffv2rOZ7JCLi6aefzuqbb745qzmPvF44j8ycMs/La6Hr3PNnGzZsyOpSblWaVePI1k5DjnGQ\n3Gop+yqthnvvvXegn6m+rnzlUivNWo4Lj2taj2M5/EZUkiRJklSVN6KSJEmSpKq8EZUkSZIkVVU1\nI8rsGntNEjOAXb0umcFjfer6b5jkAAAgAElEQVTUqb77YD6OWUrmDkt5zYiIEydOZDVzf6UMaKnn\nIsdEXfm7Uq9IZvxKNcdU6k1JXb0ouQ3OLfO5HBMfZ13qK9qVrS311OT1U+oRyxzquXPnsprX+J49\ne1pjOnr0aFYfOXIkq9nHtpRTLeU1S/Pc9ZxSD1jOm6aTGT5dzrC9S7uuHb5mOduQ1K2Utxx1P80a\nxpG15LyUenp2jYnjXkuZUb8RlSRJkiRV5Y2oJEmSJKkqb0QlSZIkSVVVzYgyFzY/P5/VzOMxQ8qe\njRHt/BuzaTRs3pKPl7KTEe08XCnryMeZnyxlG0u9KbuUcqrMDbKfZVeecqkXX3yx7+Nd887jZg9P\nHncp98rHS31peW10PYc1zx3PFa9hnqsdO3ZkNa+drutr//79WX38+PGs5rniudi4cWPfMXLeqasH\nLHuTcq673rvSLJqGHp6jMGw+eNjM6HJeM4mZ5bVyPcw69hGdxL6ipcznLGBWchKPedjMaEQ5AzqO\n/qm15tZvRCVJkiRJVXkjKkmSJEmqyhtRSZIkSVJVVTOixJwg+4yWel1GRKxfv77vPph17OqB2A+z\nbswEduUzOc5SXpLZyPPnz2c1M3s85kGOqZRtHPb1nFc+zpwidWVMS7lTZhOZp+Q8smcn88Ocg67z\nVMrfch54zV5xxRVZXTqX119/fVZ39cHl+2bLli19t3ns2LGsfuGFF/o+n+9DzsHZs2dbY2JGlNnX\n3bt3Z3VXHlfTzz6PWk2l62fYvqOl7S8n32kmdG2YxGziaitlHZfTZ3SQ/ORKdJ2nYfOUK+2nupzM\nKJWysdPcd9RvRCVJkiRJVXkjKkmSJEmqyhtRSZIkSVJVqdRHc5R++qd/uu/OmEXj2DZs2NB6DbOJ\nzL+dPHmy7zZZl3pNMo/3lre8pTUm5iOPHDmS1cwuMhvJutRPk89ndrLrNTzOQfpXLsUsJDOCzEZu\n27at7+Nd+2R/S2ZEz5w5k9XMHZayuXw9jyminXUs9aG97rrrspqZz1I2kvPCDGpExMLCQlYzR8qs\nLLfx9NNPZzUzn7w+OaZz5861xsQ+ocydckxzc3NZ/bnPfa5/A+Apc/DgwXofrFPEjGi3UWQTV/r6\nlZ6b1Ti345iHUeMYmqaZqc+6lJKfdTFdmbzVspyMKK00MzpIdnel52oUx1ky7DxMYmZ50M86vxGV\nJEmSJFXljagkSZIkqSpvRCVJkiRJVVXtI7p///6sZl6u1L+wq/cleySyByd7R3blAJdidpIZwA99\n6ENZ3fW34idOnMjqr3/961n9ta99Las3b96c1czTMevILCXnsSvfWeo1yn0yl8pzwedfffXVWd2V\nAe33+oj23JfOxbDnlhljXitdmVLug3Nbyt4y18rt8bzw+V19TEt5W2aS+fjNN9+c1c8//3xWM4M6\nPz+f1V25cmZpmc3mNcrnz5pS5m85ebraebdR7G/Y4xzFPGn0mdNxGbZvaOn1pecPMk9ek7l77713\nKraptaHUb3NaDNt7dJqP229EJUmSJElVeSMqSZIkSarKG1FJkiRJUlVVM6Lsqch8HTN+7H/IHp4R\n7R6IzJEy68iMXilnyDGz/yH7J0ZE3HbbbVnN7OFXv/rVvmNkTpDZRs4LX9+VlWROsJRDZdaRmBPk\nPJQyoqU8Z0T7uFiXeuAyf8nrjfPalRHlPtmztXT98PWcF/ZfZW61K+/LTPHGjRuzmtcHM57Mnd5y\nyy19n0/Mcke0M8I8Lu6DfUZnDXNkw+bKeF0tZxvTwEzoaEzDPI6jt2lpe9OY3dZkYiZvFvuK1uif\nOYsG6QlamtuVZkZpkjKkfiMqSZIkSarKG1FJkiRJUlXeiEqSJEmSqqqaEd21a1dWs7fgmTNnsppZ\ntK7cIX/G1zAvx2wkM6XMZ37nO9/J6i984QtZfccdd7TGxJzft771rb5jOnz4cFYzZ8hjKvWaZPYx\nop015DaYfezq87lUqR/mNddck9XMc3ZlUPkabpPXSwnPLXOL3F5XbvXSpUtZzXkp1cxCnjt3LquP\nHz/edwxdOdhS7nndunVZzeM8depUVvNcbN26NauZw+b2IyIuXLiQ1Zz77du3993nrOnKeE6b1ehF\nOYo+jiWTmI+k1R7jcuZxEnOmpTGt9BqdlX6r41Tq+blWe4KWMnizmCFdjkHyk+M2bDZ2OcfE14w6\nMzrJ/EZUkiRJklSVN6KSJEmSpKq8EZUkSZIkVVU1qMXsJLONzEIuLCxkNbNxEe3+heypePr06awu\n9eDkmJhb/MY3vpHVv/mbv9kaE/NwPG7mDktjOnr0aFYzo8ftdWX4OLfMjLIPKOeRmT9m/DhvHBOz\nvF0ZOvbxZD6y1KOzlPnkPBOvpUF09flcimMsjYEZ066MKM/FsWPHspp9RnluOQbmhUt9bLvynUeO\nHMnq6667LquZQS71gFXbuPNsk5AZHMRKc4I1rPTcTUOWcTXGWNrmuN8jalurGdGS1ejjaO60bVrn\nZNjM6LA4L+PsK+o3opIkSZKkqrwRlSRJkiRV5Y2oJEmSJKkqb0QlSZIkSVVVXazo29/+dlZzQRMu\nwsIFcLpwQSMupMJFcrh4DBeo4eJEXDyGCw89/vjjrTFxcRhug4vFcJ8XLlzIai7sc/Lkyb6vP3Pm\nTGtMFy9ezGrOy/z8fFbfeOONWc3FiziPXGiIi9PwmLsWKzp79mxWv/LKK323WTq3fD1rjqFrAZ3S\nNjkGLl5UWsyI1yvnke+JiPZiVBwjr1Eu4sRzweNmzcWKeP1FtN+Ht956a1ZzHkqLNk270qIp999/\nf1YfPHhw1cekbjw307Ao0yQsyjOJ81RjHlwASZOqtODMShfu4QI6NMiCOqVtlKz2MU6qlc7bJPMb\nUUmSJElSVd6ISpIkSZKq8kZUkiRJklRV1Yzo5z//+ax+85vfnNX79u3L6pdeeimrmX2LaGf2mPlk\nho95Oe6DeTu+nvu7/vrrW2PiGLgPZka5zVI2lpnQV199te/zI9q5v3PnzmX15s2bs5qZP2ZIeYys\nt2zZktXr16/P6kuXLrXGyLwkMS/J3CGzsaw5Bs47s5AR7Xnha0rnkjXxeiOep4h23nfnzp1ZzXni\nXDMbyzHyWuF7pmtMpfPP9xGzsLOulKdjZrSL2bQ6hp3ncWQlV+NaGPY4StnaYXOsgxzTqOfa95TW\nklK+kiYhbznsmPn8STgG6jqm2uMcdl5Xk9+ISpIkSZKq8kZUkiRJklSVN6KSJEmSpKqqZkSZwTt2\n7FhWM4s2NzeX1ddee21rm6X+lKyZG2QWkpk99s9kHvPQoUOtMTFXSKVca6kPJDOnzIx29ejkPtiL\ncvv27X2f/+yzz2Y1c4ocE+eAmUHOa9c+mb/kcXGe2IeUY+TrmRnlGCPavUuZlWWulblVnjseEzEf\nzDmJ6L7mlmIPWJ4bHhOPgc9nxrQr18p9lvqjluZh1i2nd2XpOebdxmM5877SPOZKTWIP0EFMY89X\nrRzzc5OUb5tlw/bsnOVel6PUlQedhmzravEbUUmSJElSVd6ISpIkSZKq8kZUkiRJklRV1Yzo/v37\ns5o5sVJPz65+mfwZ82/M7DEXePTo0aw+ffp0VjOXyjHt3bu3NSbmL7kN5ueYU92xY0drm0sxW8ss\nJHOzXc9h/9MLFy5kNbOQzHQy81fKX/Jcs9dl18+4Tfa35LyxVyVrHiNzrF19THmc3CYzncxT8twz\nz8t9MlPa1duU22A2ltcfx8h5PHXqVN/nc545jxERt99+e1aXzmVX9nWWDdtTcRT7oBoZ0nFnH6fF\nWj3ufrqunVHP07Db8zyVjSPLVtrnsNnGcZiFnOtq5BpnYV5GYdRzO8nz6jeikiRJkqSqvBGVJEmS\nJFXljagkSZIkqaqqGVHmN5kjYzaN2H8zop3ZZM6U2URm7tg7kmNilo3H0IWvYU6VmJ9jDpHHwDEz\n//n888+39sEc4blz57KauUJmF7kP5gRvuOGGrGZmkGPmeYqI+Na3vtV3H3xNKWfI64XzyixtV3/M\n0nOYt2RGlBnm0rnl9rtyq8y2MjP8zDPPZHXpGucYeC2cOHEiq5fTA5a68t6zpJSVHEf/w1JOdRLG\nNKxJyPDxGA4ePDj0a7Q8486QajJNQga0ZNgxTnLG7zVdY6zdA3Y1zj37ox44cGDk+yjN0zSc/+Xy\nG1FJkiRJUlXeiEqSJEmSqvJGVJIkSZJUVdWMKPN0w2b2urKWzKIxb8ksGrOO3Aezbqy5/fPnz7fG\nxJwgs688bm6TGT4eN+eRvSr37NnTGhOPm9vkPMzNzWU185rM2jIj+oMf/CCr3/Wud2V1V76TGdBS\nP0s+n/ld1ps2bcpq5i/ZMzSinaXlPDFPyT6gzOZyXjlGvge6MqKlc3PkyJGs5vXEMe/evbvv63m9\nsQdtRPt9xn2W+qdKy1Gj92Rpn9OYK1zOmIftUzuKHGxprich5yzV0JV9nIbc4DSMcdSYIWXGVDm/\nEZUkSZIkVeWNqCRJkiSpKm9EJUmSJElVVQ1qlXp6MvPHvBxzZ12Yv2Rmr9TTk1k3ZtmY/+zqK7p9\n+/asLuUzuY9SJpTbY7/Mrj6PzHCy9+T69euzen5+vrWNfh577LGs5rn+/Oc/n9XMDEa0c6PMY/Lc\nMjN68uTJrP7Jn/zJvq8/dOhQawzE8808Js/dzp07s7qUWeYxs2dnV29dbpPXCzPDvB6YCeU8Myu7\nbdu2rO46dzwuXk+c+9L7UJpUpXzlrOYUJzGPOQt9aDWbhu01uZwcYe0enZOoNAfLydZOQh/atXRu\n/W1QkiRJklSVN6KSJEmSpKq8EZUkSZIkVVU1I8q8JjOh7G/IXBnzeRHtDF+phyfzlsz4cQzsE8rn\nMwsZ0c7wcZt8nNssZSVLfUe7bNmyJauZAS1lX/l8HjePkZnB0nmKaGeGmX1lzXP7Uz/1U1ldyl8y\nS8ntR7R7j+7atavvGLgNXuN8nOeW2+vKUvJnvB6Yrd6xY0dWc+55je/du7fv4109QDlunn+OsSv7\nqromMeM3iszeLPT5nASTcH1Ik2LYzOdKt2/vycEsJzs5CRlQ/Vt+IypJkiRJqsobUUmSJElSVd6I\nSpIkSZKqqpoRZe6QubEzZ85kNTODrCPamU8+h7lS5ueYI2S2jWNmD0XuP6KdnyzlBrkP1swEcvsc\nM3OHEe3jZL/L48eP9x0z98l9MKfK1zNDyl6VEe1zxblm/1QeA/GYOE8c89atW1vbYA/NrvPd73Fm\nc5l77jpXS3XlVjluZq/Zx5bH/dRTT2U1s7LMwfI9w2OIaF+jpffRqVOnWtuQND4rzYROYqbUfLCm\n1XIyo2up9+SsW0vnzm9EJUmSJElVeSMqSZIkSarKG1FJkiRJUlXeiEqSJEmSqqq6WBEXm+GiJ69/\nff/hdC0Uw0VRiIv0XHnllVnNxWK4MAufT1zoJaK9wMylS5f67rO02BAX7eGY+PjJkydbY+JiMjt2\n7MhqLhR17ty5rOYxcVEojpk1Fyc6duxYa4xcJIeLF/H64LyeOHEiq9evX5/VnDeeOz6/awxctInX\nFxdl4pjPnj2b1QsLC1nNhYe6Fgbia3j98DiOHj2a1Vwsi68/ffp0Vt9www1Z3fWe4PnmGHj9dJ1/\nrT01FpPhIjouYFNnYaFRzLPnavZwEZ4ugyzMs5pGMcZBtqHR44JNozDsuRz39Ttt/EZUkiRJklSV\nN6KSJEmSpKq8EZUkSZIkVVU1I8ocGfNvrFNKWb158+bWNvma48ePZzWzaVTK6LH+0Y9+lNVXXXVV\ncZvMMhLzmzwm7pO5RNbMb3Ztg7lA1twms5E8lzxXzAgyezs3N9caYyk7y4wx85ubNm3KauZct2zZ\n0neM3F5EO3/LeWDOdOvWrVnN4y7lM0s52IiIU6dO9R0Dr1k+nzlWPr+U1eb1GtE+Dj7n/Pnzfbep\nXFeGb9R5OW6vRm5Qs2MSrxczpZNnOVlJvmYSMnfDjmHUGdKu50/CvMyiceR777rrrur7nBR+IypJ\nkiRJqsobUUmSJElSVd6ISpIkSZKqqpoRLfVgLGVEmSGMaOfZmI9kNpI5Q2b22COR22cmlGPs+hn7\nn7KnZikj2tU/dalSr9OI9jwwN8hzwzwuj5tj4utZM6vL/GZEO1tb6k3JXCJ7dPL64jwzG8nnd42B\nOdONGzf2HQNzp8yMlvrWcnsR7bndvXt3Vj/77LNZzeuR88Z9dvVTXYrvmYj2+4TXGzOid9xxR999\nqG2l/TDNhGolVrsfq/lOXU7tzGiN7CX3sRq5xNXoqTmsacg+2id0vPxGVJIkSZJUlTeikiRJkqSq\nvBGVJEmSJFVVNSNKzLoxm1bKa0a0+ywyD8c8Jmtukz0ZiVlH5u0i2vm4UvaR+UvmDpkz5Lwxb9k1\nJuYhmZVlv0yeC+ZWOQb2z2SvSh7Dtm3bWmNkbvXkyZNZzXljfpLXAnOs7DPKTGhXFpfHTcw+8lwz\nT8mcKued56UrI8q54za5T557uvHGG7Oa576U941ov494HHTbbbf1fXzWjSOfOY59TkIutZQ9XOmY\nlpOdrJ2HHMW8m+HUuKyFTF4pMzqtc8Cc6qgzozVysKOe+2nIzdbkN6KSJEmSpKq8EZUkSZIkVeWN\nqCRJkiSpqrFmRJl/YzaNWUnmOyPa2UPmBEv5OPYdZW6QeThmSJkHjWj3hmR+jhk+ZhOZrytlGfk4\n5ySifBzz8/N9n88sI3OonGdmJXnuunKGxHnjcTEzyiwje34yW8kxdPWE5XHzOaVMKPdRyhgfPXo0\nq7t6xG7fvj2rma3l9bBz586s5ntkbm4uq3mMHHPXe4rj5PXEMe/du7e1DfU3Cxm9cWRG12L/y2no\nETuJ8yZpda00M2omdPb4jagkSZIkqSpvRCVJkiRJVXkjKkmSJEmqqmpGlJk+ZtGYjTx9+nRWMwMY\n0e7zyJ6KpZ6czHgyl8gen4Pk5ThOjok1M6XM27EnJ4+Z8zoIZhk5T8RcKrONzEryGPj6rnnjNpll\n5Dzx+TwG9g0t9ULtwnFy3jj3Xb1u+2GvVB7zzTff3HoN85fc5913353VTz31VFYfPnw4q0vzxDng\neyKi/b7g+d+3b1/ffa41y8lKrnbWca2YhvzkLFqN69X3gCYVc4GlbCP7hs6qScxLTmuP1lnhN6KS\nJEmSpKq8EZUkSZIkVeWNqCRJkiSpqqoZUWZAmRNjr0n2Q9yxY0drm8yvMbPHTB8zosy2sZcplXKu\nEe0encwyMpvIx5kh5TFyzMzsDZK/ZKaTY2bfR+YQuT3OK3tb8vXsIRvR3ZO13z55/bAnLM8NrwVm\nIbt6dnKueW54jXJemWvl48xBX3/99X3HGNHuj3rLLbdk9dNPP53V3/zmN7P6Xe96V1Yzv8tj5jXf\nNU98Tqlv6KxnRMfRL3MSTcJxl3KEKx3jJOYUx3H91ZiHSZxr9cf83VrJQg7b73JWc4qTmAkdtbVw\njKvJb0QlSZIkSVV5IypJkiRJqsobUUmSJElSVVUzoswRMl/Hx5nHm5uba22TOUDm3ZhdY061qydi\nv+0zH8fcYkS7FyTzkRwT85I87lKGj6/vyvBxnNdcc01W81wwr8kendwe98msLftlcn8R5Xwvt8n8\nJa8fYjaX/VkHyT7SmTNnspqZT14LPCZmQpmt5PYiIvbs2ZPVTz75ZFZ/97vfzeo777wzq3lNlzLL\nnPeua57XC9+rt912W1ZzXta61cj0TWJOdRLGNOp9rpX+ruM4rlmdy1kybL/MtZoZXQvWSlZyrRxn\nLX4jKkmSJEmqyhtRSZIkSVJV3ohKkiRJkqqqmhFln9Dz589nNbNozJExjxfR7rPIPCa3wTwcc4HM\nXzLLyMwgc4oR5d6lHCP3ee7cuaxm30jmO7k9HmPXODn3zIDSsWPH+o6JfUj5OHONXXnO5557LquZ\nK2UOkVlanmueK85TKV8c0c7f8txyn+yfynO7b9++rC5laW+66abWmNgnlmN85zvfmdV8j/Ba4DyW\ndM0T57ZUz8/PD7XPaTcJ2chJNI3zUhrjIMfA456ELOS4xzDu/Uuradgs7aTuo59B9rfSfOU45lGr\ny29EJUmSJElVeSMqSZIkSarKG1FJkiRJUlXeiEqSJEmSqqq6WBEX2eHiQ1zYZcOGDVnNRVq6fsZt\ncJ9caIULuXChHy6Qc/HixazmAkxd2yRuk/vk4jGcBy42w2PuWqzoxIkTWc3FY7goE+uFhYWsPnLk\nSFZv2rQpq7kIz1VXXdW3joi44YYbsprzcPz48azmcZcW8eExcMEmXitduAASt8lFm/bu3ZvVr776\nalbzXL3jHe/Iah5jxGALGi3Fa5bnlmPgPLLuWqyIc8kFt3jcfA+sNTUW6ZmEhX/GsQDNau+ztP1B\n5p3PGfeYx2ESx6SVG/eCOZOitODNKOZpGhfV4XGOevEiTR+/EZUkSZIkVeWNqCRJkiSpKm9EJUmS\nJElVVc2IMjfGrNqWLVuyuitHSMyaMT+ZUur7eubdmMljJpDP5+MR7Twcs4eXLl3KaubrNm/enNXM\nBDLzx3l66aWXWmPizzj3fJw5Vx7nhQsXsppZyY0bN2Y1M6Scoy48l8RzwXm48sors5p5XuZk161b\n19pHV0ZzKZ4rYk719ttvz+r9+/dnNa+Fo0ePtrbJueNxM9PJxzlvnGdeG6X3RBdmhDnXpXM765aT\n31wLmbpJPMZJHNM0cN6kwZl11FrlN6KSJEmSpKq8EZUkSZIkVeWNqCRJkiSpqqoZUeY52TeSmT4+\nn49HtLONpb6fzOyVsovssch8XVfWkflKZvaYEeXzT58+ndXM17G/Juegq7cpt0HM73KMzA1SKUPK\n/TMPHNHOMnKfpT60PLcc0+7du/turwvHwHPF64v7vPvuu7Oa1/Djjz+e1exD2tVvsysDvBSvj1Lv\n3FJOmvvryl2z1+2ePXuymnN96tSp1jZm2Th6etboVTqNRtEHdJjtL6evaGmbk2gaxqjVZ9/QybHS\nXqVrtdep6vIbUUmSJElSVd6ISpIkSZKq8kZUkiRJklRV1YzosHk79iLsyqaxHyGziMy/se8j98Ht\nMQM6SP9L5t+Yl2TmjtlIZh+3bt2a1bt27cpqZkrZdzSinV0sZfheeOGF1jaWYtaRucQTJ05k9ZEj\nR7Ka+eCIiB07dmQ1rxfmJ3k9MHfKc8m85SB9RLlPnlu+5v3vf39WM3v72GOPZTXPPfuIduG8MBPM\n9xWPgRlQziPfh8xJd+WN+Ro+h/vsyntrtGpkQsedCxxk/8POw6iPqWt7k5jXHTbbOu5zr+l04MCB\ncQ+haFZzjSs9rlmdF42X34hKkiRJkqryRlSSJEmSVJU3opIkSZKkqqpmRLdv357VzLoxi8behF35\nTOYC+RpmI5llZMaP/TOZCeQYmEOMaOfhmE1kbpUZUh4Tn3/s2LGs3rZtW1a/6U1vao2JmU3iPHEe\nmAFkDpHzcOONN/bd/uHDh1tjYDaW88KsLM8dn88xzc3NZfXGjRuzmtdGRMRb3/rWrL7++uuzmtcw\nj+vJJ5/Mal4/nGdmK0v9WyPaGVBeL9wHc6l8nK/nvDP3GtF+77JmLrqrj+xaNgm9Jqcx87caWcth\n52E5Yxj3XA+y/3GPUdNhGvuGmnWUJoffiEqSJEmSqvJGVJIkSZJUlTeikiRJkqSqqmZEmRtjFo1Z\nR/a2fPnll1vbZD9C9lRkJo95OmYduQ9mK5n37Optyn3wuLkN9jLlmHiM7DXJnp/ve9/7WmN673vf\nm9Xf+c53spr5zJ07d2b1c889l9U8V8w2LiwsZDXPZRfOPc8d87rcB3OpzEJyHt/whjdk9b59+1pj\n2rt3b1az/ymzr88++2xW89wyh8rsLcfIvHFEO/vKMXAbnAfuk5iz5vXL7UWUc9DHjx/Pao551pRy\ng6PINprhW94cDDv3k9jzc1heKxqXaegbKml8/EZUkiRJklSVN6KSJEmSpKq8EZUkSZIkVTVRGdFS\nL8KuPqLsT8gsGvNwzCpyDF2Zz6VKWbiIdq6PfRhLx8lcIXtXMp/5/PPPZ/X3v//91phuu+22rP7I\nRz6S1V/84hez+o/+6I+ymj1gN2/enNXsd3n27NmsZu9J5hAjIjZt2pTVzKkyh7p///6svvXWW7Oa\nWUrOM88L86AR7Tzu/Px8VjMvyeczs8zrldc0r6+u3qbcJ5/Dx7typkvx3JW2xz62Ee18L4+D1wPz\nvJo8NfKXo9jnSvfBMY/6GMwDS5LUzW9EJUmSJElVeSMqSZIkSarKG1FJkiRJUlVVM6LMkbFfJrNp\nzNOxjmjnAJnxZD6OfUG7eiIuxZ6MXTlV4nERs46cF84D+1syC/nBD34wq7/yla+09skc6c/8zM9k\n9Xve856sZg6Qr+e5uHDhQt/t79q1K6uZIezaJs/dunXrsppZWmZAS/1cee6PHDnSGhPPDbfxxBNP\nZPXhw4ezmsdQeg+UctRd2+A1yfcE52Fubi6r2Z+V1yfnmY9HtMfN42Ym+JZbbmltQ/2VcoKrkU0s\nWQs9NkvHOOx5kSRJi/xGVJIkSZJUlTeikiRJkqSqvBGVJEmSJFVVNSP64x//OKuZf2PWjc/vyojy\nOaU+n+yZyDzdpUuXspoZ0dL2Ito9NpmfI2bumBHdsWNHVt95551Z/fa3vz2rf/7nf761j0984hNZ\n/dRTT2U1c4TcB+vHH388q5mN5DHzGLt6djJXyN6jhw4d6vs4e1XymN761rdmNXOLzLlGtHu28vpa\nWFjIah4nr49SvpPXV1cek9vk+4Jj3LhxY9/ns2bPWG6P75mIdk9WbnPLli1Z3fW+UV3DZhdnIQ+6\nHGY8pboefvjhcQ9BUiV+IypJkiRJqsobUUmSJElSVd6ISpIkSZKq8kZUkiRJklRV1cWKrrrqqqzm\nQkMvvvhi39dzoZcu58+f77vNG2+8MauvueaarD569GjfffL5O3fubI2BC7NwERwu/sIFadavX9+3\nvummm7J6kAWUuIDRZ4m0OR4AAASWSURBVD/72aw+ffp0VnPhnzvuuCOr77777qx+5JFHsprz+Nxz\nz2X1H//xH7fGyHl7y1vektVcMOlrX/taVj/66KNZvX///qzmok8XL17M6nXr1rXGVMLFrVJKWc3r\n78yZM1nNxZC4UFXXYkU837yeuAAS3xN8PcfMhaZ4DFyYKCJi3759Wc33yXXXXZfVnHvlBlkgZ6WL\n6HDxIW5vkMWJStsocSGg5VnpvEuTwoWJpLXNb0QlSZIkSVV5IypJkiRJqsobUUmSJElSVVUzoldc\ncUVWM8vIrBqzbl0Z0pMnT2Y1s4mbNm3KauZUmV1jJo/5u2uvvTarmWuMaGdf5+fnW89ZihnQ9773\nvVnNbOSVV16Z1ceOHcvq73//+619cJzM+R0/fjyrOeZPf/rTWf2rv/qrWf3hD384qx988MGsnpub\ny+of/OAHrTF+9atfzeo/+ZM/6bsPjpHbZN6Sx7xx48as5rUQEXHrrbdm9TPPPJPVvN6IGeJSvrOU\no+56zQ9/+MOs5rlm5pOZ0Q0bNmQ1c6/c3/XXX98aE983zFbzGj9y5EhrG1pdBw8ezGpeizWYZRwN\n51FryV133ZXVDz300Iq2x9dz+5Lq8RtRSZIkSVJV3ohKkiRJkqryRlSSJEmSVFXVjOgrr7yS1cyu\nlXRlmhYWFrKa+TZmRM+dO9d3TKW+oYNk+Eq9SDkm5uf27t2b1bt3785qZiN5zE8++WRrTBwna84L\n87hPPPFEVn/yk5/M6l/5lV/J6ptvvjmr2S/zwIEDrTG++93vzuoHHnggqz/+8Y9nNTPHmzdvzurD\nhw9n9VNPPZXVb3vb27KaOdaI9vXFc8tcM68n7oNZyu9+97tZzWuDPWgj2tlp5pZ5PfF9w0wp+6d2\n7XOprlw0c6fcJq+vZ599tu8+1LbSXOD9998/0u2NahuSJGlt8htRSZIkSVJV3ohKkiRJkqryRlSS\nJEmSVFXVjCgx6/bSSy9l9cWLF/s+HtHuFcncKfOTp0+fzmrm4Zj5Y76TNTOBEe1cIfulMsPHzB0f\nZ77y29/+dlYfOnQoq5k7jIg4ceJE62dLMcPHHCqzjd/85jez+otf/GJWf+ADH8hq5hK7zuXP/uzP\nZjVzpp/5zGey+pFHHuk7Rnruueey+vbbb89q5n8j2nPN3qbMEPOa5fXGc80enuzZ2ZWjZr6X2+A1\nfOrUqazm3PP65BiYUe7CMfHcMUPadY2uJdOYrVzOmKfxOCWtnocffnjF21hpX1H7hkqTw29EJUmS\nJElVeSMqSZIkSarKG1FJkiRJUlWpqzenJEmSJEmrxW9EJUmSJElVeSMqSZIkSarKG1FJkiRJUlXe\niEqSJEmSqvJGVJIkSZJUlTeikiRJkqSqvBGVJEmSJFXljagkSZIkqSpvRCVJkiRJVXkjKkmSJEmq\nyhtRSZIkSVJV3ohKkiRJkqryRlSSJEmSVJU3opIkSZKkqrwRlSRJkiRV5Y2oJEmSJKkqb0QlSZIk\nSVV5IypJkiRJqsobUUmSJElSVd6ISpIkSZKq8kZUkiRJklSVN6KSJEmSpKq8EZUkSZIkVfX/Aw1q\naNY/SIKgAAAAAElFTkSuQmCC\n",
101 | "text/plain": [
102 | ""
103 | ]
104 | },
105 | "metadata": {},
106 | "output_type": "display_data"
107 | }
108 | ],
109 | "source": [
110 | "# We can visualise the `batch_features` using matplotlib.\n",
111 | "%matplotlib inline\n",
112 | "import matplotlib.pyplot as plt\n",
113 | "\n",
114 | "f, axarr = plt.subplots(1, 3, figsize=(16,8))\n",
115 | "axarr[0].imshow(np.squeeze(ex_dict['features']['x'][32, :, :, 0]), cmap='gray')\n",
116 | "axarr[0].set_title('Input: x')\n",
117 | "axarr[0].axis('off')\n",
118 | "\n",
119 | "axarr[1].imshow(np.squeeze(ex_dict['labels']['spm_tissue'][32, :, :]), cmap='gray')\n",
120 | "axarr[1].set_title('spm_tissue')\n",
121 | "axarr[1].axis('off')\n",
122 | "\n",
123 | "axarr[2].imshow(np.squeeze(ex_dict['labels']['malp_em_tissue'][32, :, :]), cmap='gray')\n",
124 | "axarr[2].set_title('malp_em_tissue')\n",
125 | "axarr[2].axis('off')\n",
126 | "\n",
127 | "plt.show()"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "metadata": {},
134 | "outputs": [],
135 | "source": []
136 | }
137 | ],
138 | "metadata": {
139 | "kernelspec": {
140 | "display_name": "Python 3",
141 | "language": "python",
142 | "name": "python3"
143 | },
144 | "language_info": {
145 | "codemirror_mode": {
146 | "name": "ipython",
147 | "version": 3
148 | },
149 | "file_extension": ".py",
150 | "mimetype": "text/x-python",
151 | "name": "python",
152 | "nbconvert_exporter": "python",
153 | "pygments_lexer": "ipython3",
154 | "version": "3.5.2"
155 | }
156 | },
157 | "nbformat": 4,
158 | "nbformat_minor": 2
159 | }
160 |
--------------------------------------------------------------------------------
/model/test.csv:
--------------------------------------------------------------------------------
1 | id,t1,spm_tissue
2 | 2,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_02.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_02_seg.nii
3 | 10,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_10.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_10_seg.nii
4 | 15,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_15.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Test_Set/IBSR_15_seg.nii
5 | 11,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_11.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_11_seg.nii
6 | 12,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_12.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_12_seg.nii
7 | 13,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_13.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_13_seg.nii
8 | 14,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_14.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_14_seg.nii
9 | 17,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_17.nii,/home/maia_kbf/MISA_FIT/Preprocessing/Validation_Set/IBSR_17_seg.nii
10 |
11 |
--------------------------------------------------------------------------------
/model/train.csv:
--------------------------------------------------------------------------------
1 | id,subj_t1,subj_label
2 | IBSR_01,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_01.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_01_seg.nii
3 | IBSR_03,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_03.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_03_seg.nii
4 | IBSR_04,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_04.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_04_seg.nii
5 | IBSR_05,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_05.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_05_seg.nii
6 | IBSR_06,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_06.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_06_seg.nii
7 | IBSR_07,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_07.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_07_seg.nii
8 | IBSR_08,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_08.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_08_seg.nii
9 | IBSR_09,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_09.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_09_seg.nii
10 | IBSR_16,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_16.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_16_seg.nii
11 | IBSR_18,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_18.nii,/home/maia_kbf/MISA_FIT/FinalData/TrainingSet/IBSR_18_seg.nii
12 |
--------------------------------------------------------------------------------
/model/train.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division
3 | from __future__ import print_function
4 |
5 | import argparse
6 | import os
7 |
8 | import numpy as np
9 | import pandas as pd
10 | import tensorflow as tf
11 | from keras import backend as K
12 | ####Tensorflow wizard
13 | config=tf.ConfigProto()
14 | config.gpu_options.allow_growth=True
15 | config.gpu_options.per_process_gpu_memory_fraction=0.23
16 | K.tensorflow_backend.set_session(tf.Session(config=config))
17 |
18 |
19 | from dltk.core.metrics import dice
20 | from dltk.core.activations import leaky_relu
21 | from dltk.io.abstract_reader import Reader
22 |
23 | from neuronet import neuronet_3d
24 |
25 | from reader import read_fn
26 | import json
27 |
28 | # PARAMS
29 | EVAL_EVERY_N_STEPS = 100
30 | EVAL_STEPS = 10
31 |
32 | NUM_CHANNELS = 1
33 |
34 | BATCH_SIZE = 1
35 | SHUFFLE_CACHE_SIZE = 10
36 |
37 | MAX_STEPS = 4000
38 |
39 |
40 | # MODEL
41 | def model_fn(features, labels, mode, params):
42 |
43 | # 1. create a model and its outputs
44 | def lrelu(x):
45 | return leaky_relu(x, 0.1)
46 |
47 | protocols = params["protocols"]
48 |
49 | net_output_ops = neuronet_3d(features['x'],
50 | num_classes=params["num_classes"],
51 | protocols=protocols,
52 | num_res_units=params["network"]["num_residual_units"],
53 | filters=params["network"]["filters"],
54 | strides=params["network"]["strides"],
55 | activation=lrelu,
56 | mode=mode)
57 |
58 | # 1.1 Generate predictions only (for `ModeKeys.PREDICT`)
59 | if mode == tf.estimator.ModeKeys.PREDICT:
60 | return tf.estimator.EstimatorSpec(
61 | mode=mode,
62 | predictions=net_output_ops,
63 | export_outputs={'out': tf.estimator.export.PredictOutput(net_output_ops)})
64 |
65 | # 2. set up a loss function
66 | ce = []
67 | for p in protocols:
68 | ce.append(tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
69 | logits=net_output_ops['logits_{}'.format(p)],
70 | labels=labels[p])))
71 |
72 | # Sum the crossentropy losses and divide through number of protocols to be predicted
73 | loss = tf.div(tf.add_n(ce), tf.constant(len(protocols), dtype=tf.float32))
74 |
75 | # 3. define a training op and ops for updating moving averages (i.e. for batch normalisation)
76 | global_step = tf.train.get_global_step()
77 | optimiser = tf.train.AdamOptimizer(learning_rate=params["learning_rate"], epsilon=1e-5)
78 |
79 | update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
80 | with tf.control_dependencies(update_ops):
81 | train_op = optimiser.minimize(loss, global_step=global_step)
82 |
83 | # 4.1 (optional) create custom image summaries for tensorboard
84 | my_image_summaries = {}
85 | my_image_summaries['feat_t1'] = features['x'][0, 64, :, :, 0]
86 | for p in protocols:
87 | my_image_summaries['{}/lbl'.format(p)] = tf.cast(labels[p], tf.float32)[0, 64, :, :]
88 | my_image_summaries['{}/pred'.format(p)] = tf.cast(net_output_ops['y_{}'.format(p)], tf.float32)[0, 64, :, :]
89 |
90 | expected_output_size = [1, 128, 128, 1] # [B, W, H, C]
91 | [tf.summary.image(name, tf.reshape(image, expected_output_size))
92 | for name, image in my_image_summaries.items()]
93 |
94 | # 4.2 (optional) create custom metric summaries for tensorboard
95 | for i in range(len(protocols)):
96 | p = protocols[i]
97 | c = tf.constant(params["num_classes"][i])
98 |
99 | mean_dice = tf.reduce_mean(tf.py_func(
100 | dice, [net_output_ops['y_{}'.format(p)], labels[p], c], tf.float32)[1:])
101 | tf.summary.scalar('dsc_{}'.format(p), mean_dice)
102 |
103 | # 5. Return EstimatorSpec object
104 | return tf.estimator.EstimatorSpec(mode=mode,
105 | predictions=None,
106 | loss=loss,
107 | train_op=train_op,
108 | eval_metric_ops=None)
109 |
110 |
111 | def train(args, config):
112 |
113 | np.random.seed(42)
114 | tf.set_random_seed(42)
115 |
116 | print('Setting up...')
117 | # Parse csv files for file names
118 | train_filenames = pd.read_csv(args.train_csv,
119 | dtype=object,
120 | keep_default_na=False,
121 | na_values=[]).as_matrix()
122 |
123 | val_filenames = pd.read_csv(args.val_csv,
124 | dtype=object,
125 | keep_default_na=False,
126 | na_values=[]).as_matrix()
127 |
128 | # Set up a data reader to handle the file i/o.
129 | reader_params = {
130 | 'n_examples':5,
131 | 'example_size': [128, 128, 128],
132 | 'extract_examples': True,
133 | 'protocols': config["protocols"]}
134 |
135 | reader_example_shapes = {
136 | 'features': {'x': reader_params['example_size'] + [NUM_CHANNELS, ]},
137 | 'labels': {p: reader_params['example_size'] for p in config["protocols"]}}
138 |
139 | reader = Reader(read_fn,
140 | {'features': {'x': tf.float32},
141 | 'labels': {p: tf.int32 for p in config["protocols"]}})
142 |
143 | # Get input functions and queue initialisation hooks for training and validation data
144 | train_input_fn, train_qinit_hook = reader.get_inputs(
145 | train_filenames,
146 | tf.estimator.ModeKeys.TRAIN,
147 | example_shapes=reader_example_shapes,
148 | batch_size=BATCH_SIZE,
149 | shuffle_cache_size=SHUFFLE_CACHE_SIZE,
150 | params=reader_params)
151 |
152 | val_input_fn, val_qinit_hook = reader.get_inputs(
153 | val_filenames,
154 | tf.estimator.ModeKeys.EVAL,
155 | example_shapes=reader_example_shapes,
156 | batch_size=BATCH_SIZE,
157 | shuffle_cache_size=SHUFFLE_CACHE_SIZE,
158 | params=reader_params)
159 |
160 | # Instantiate the neural network estimator
161 | nn = tf.estimator.Estimator(model_fn=model_fn,
162 | model_dir=config["model_path"],
163 | params=config,
164 | config=tf.estimator.RunConfig(session_config=tf.ConfigProto()))
165 |
166 | # Hooks for validation summaries
167 | val_summary_hook = tf.contrib.training.SummaryAtEndHook(
168 | os.path.join(config["model_path"], 'eval'))
169 | step_cnt_hook = tf.train.StepCounterHook(
170 | every_n_steps=EVAL_EVERY_N_STEPS, output_dir=config["model_path"])
171 |
172 | print('Starting training...')
173 | try:
174 | for _ in range(MAX_STEPS // EVAL_EVERY_N_STEPS):
175 | nn.train(input_fn=train_input_fn,
176 | hooks=[train_qinit_hook, step_cnt_hook],
177 | steps=EVAL_EVERY_N_STEPS)
178 |
179 | results_val = nn.evaluate(input_fn=val_input_fn,
180 | hooks=[val_qinit_hook, val_summary_hook],
181 | steps=EVAL_STEPS)
182 | print('Step = {}; val loss = {:.5f};'.format(results_val['global_step'], results_val['loss']))
183 |
184 | except KeyboardInterrupt:
185 | pass
186 |
187 | print('Stopping now.')
188 | export_dir = nn.export_savedmodel(
189 | export_dir_base=config["model_path"],
190 | serving_input_receiver_fn=reader.serving_input_receiver_fn(reader_example_shapes))
191 | print('Model saved to {}.'.format(export_dir))
192 |
193 |
194 | if __name__ == '__main__':
195 |
196 | # Set up argument parser
197 | parser = argparse.ArgumentParser(description='NeuroNet training script')
198 | parser.add_argument('--restart', default=False, action='store_true')
199 | parser.add_argument('--verbose', default=False, action='store_true')
200 | parser.add_argument('--cuda_devices', '-c', default='0')
201 |
202 | parser.add_argument('--train_csv', default='train.csv')
203 | parser.add_argument('--val_csv', default='val.csv')
204 | parser.add_argument('--config', default='config_all.json')
205 |
206 | args = parser.parse_args()
207 |
208 | # Set verbosity
209 | if args.verbose:
210 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
211 | tf.logging.set_verbosity(tf.logging.INFO)
212 | else:
213 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
214 | tf.logging.set_verbosity(tf.logging.ERROR)
215 |
216 | # GPU allocation options
217 | os.environ["CUDA_VISIBLE_DEVICES"] = args.cuda_devices
218 |
219 | # Parse the run config
220 | with open(args.config) as f:
221 | config = json.load(f)
222 |
223 | # Handle restarting and resuming training
224 | if args.restart:
225 | print('Restarting training from scratch.')
226 | os.system('rm -rf {}'.format(config["model_path"]))
227 |
228 | if not os.path.isdir(config["model_path"]):
229 | os.system('mkdir -p {}'.format(config["model_path"]))
230 | else:
231 | print('Resuming training on model_path {}'.format(config["model_path"]))
232 |
233 | # Call training
234 | train(args, config)
235 |
--------------------------------------------------------------------------------
/model/val.csv:
--------------------------------------------------------------------------------
1 | id,subj_t1,subj_label
2 | IBSR_11,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_11.nii,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_11_seg.nii
3 | IBSR_12,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_12.nii,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_12_seg.nii
4 | IBSR_13,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_13.nii,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_13_seg.nii
5 | IBSR_14,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_14.nii,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_14_seg.nii
6 | IBSR_17,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_17.nii,/home/maia_kbf/MISA_FIT/FinalData/ValidationSet/IBSR_17_seg.nii
7 |
--------------------------------------------------------------------------------
/paper_147.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitushar/Brain-Tissue-Segmentation-Using-Deep-Learning-Pipeline-NeuroNet/e3d33136d34a9ea4b55b3e210c78271980b683e2/paper_147.pptx
--------------------------------------------------------------------------------