├── .gitattributes
├── .github
└── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── .gitignore
├── CODE_OF_CONDUCT.md
├── GWR
├── README.md
├── groupwise_niftyreg_params.sh
└── groupwise_niftyreg_run.sh
├── HelperFunctions
├── matlab
│ ├── cropPad
│ │ ├── ReadMe.md
│ │ ├── cropPad.m
│ │ ├── cropPadNii.m
│ │ └── cropPadNiiBatch.m
│ └── reorient
│ │ ├── reorientNii.m
│ │ └── reorientNiiBatch.m
└── python
│ ├── ReadME.md
│ ├── bash_function_generators.py
│ ├── segmentation_propagation.py
│ └── utils.py
├── LICENSE
├── MASHelperFunctions.sh
├── MBSHelperFunctions.sh
├── MRM_NeAT_Atlas_Label.txt
├── README.md
├── _config.yml
├── demo
├── mas_demo.sh
└── mbs_demo.sh
├── depreciated
├── ReadMe.md
├── for_cluster
│ ├── LR_separation.sh
│ ├── STEPS_optimisation.sh
│ ├── STEPS_optimisation_dice.sh
│ ├── brain_extraction_batch.sh
│ ├── dice_batch.sh
│ ├── dice_batch_step2.sh
│ ├── dilate_batch.sh
│ ├── labfusion.sh
│ ├── labfusion_STAPLE.sh
│ ├── labfusion_STEPS.sh
│ ├── labfusion_batch.sh
│ ├── labfusion_batch_STAPLE.sh
│ ├── leave_one_out_STAPLE.sh
│ ├── leave_one_out_dice.sh
│ ├── leave_one_out_dice_STAPLE.sh
│ ├── leave_one_out_labfusion.sh
│ ├── leave_one_out_parcellate.sh
│ ├── mask.sh
│ ├── mask_batch.sh
│ ├── parcellation.sh
│ ├── parcellation_batch.sh
│ ├── parcellation_merge_4D.sh
│ ├── parcellation_merge_4D_batch.sh
│ ├── single_atlas_dice.sh
│ └── single_atlas_dice_step2.sh
└── for_single_workstation
│ ├── brain_extraction_batch.sh
│ ├── dilate_batch.sh
│ ├── labfusion.sh
│ ├── labfusion_batch.sh
│ ├── mask.sh
│ ├── mask_batch.sh
│ ├── parcellation.sh
│ └── parcellation_batch.sh
├── docs
├── _config.yml
├── groupwise_ex-vivo.png
├── groupwise_in_vivo.png
├── journal.pone.0086576.g001.png
└── quickcheckdemo.png
└── parameters_samples
├── old_version
├── default_parameter_for_ex_vivo_coarse_step.sh
├── default_parameter_for_ex_vivo_fine_step.sh
├── default_parameter_for_in_vivo_coarse_step.sh
├── default_parameter_for_in_vivo_fine_step.sh
└── sample_parameter.sh
└── parameter_sample.sh
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
4 | # Explicitly declare text files we want to always be normalized and converted
5 | # to native line endings on checkout.
6 | *.sh text eol=lf
7 |
8 | # Custom for Visual Studio
9 | *.cs diff=csharp
10 | *.sln merge=union
11 | *.csproj merge=union
12 | *.vbproj merge=union
13 | *.fsproj merge=union
14 | *.dbproj merge=union
15 |
16 | # Standard to msysgit
17 | *.doc diff=astextplain
18 | *.DOC diff=astextplain
19 | *.docx diff=astextplain
20 | *.DOCX diff=astextplain
21 | *.dot diff=astextplain
22 | *.DOT diff=astextplain
23 | *.pdf diff=astextplain
24 | *.PDF diff=astextplain
25 | *.rtf diff=astextplain
26 | *.RTF diff=astextplain
27 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 |
5 | ---
6 |
7 | **Describe the bug**
8 | A clear and concise description of what the bug is.
9 |
10 | **To Reproduce**
11 | Steps to reproduce the behavior:
12 | 1. Go to '...'
13 | 2. Click on '....'
14 | 3. Scroll down to '....'
15 | 4. See error
16 |
17 | **Expected behavior**
18 | A clear and concise description of what you expected to happen.
19 |
20 | **Screenshots**
21 | If applicable, add screenshots to help explain your problem.
22 |
23 | **Desktop (please complete the following information):**
24 | - OS: [e.g. iOS]
25 | - Browser [e.g. chrome, safari]
26 | - Version [e.g. 22]
27 |
28 | **Smartphone (please complete the following information):**
29 | - Device: [e.g. iPhone6]
30 | - OS: [e.g. iOS8.1]
31 | - Browser [e.g. stock browser, safari]
32 | - Version [e.g. 22]
33 |
34 | **Additional context**
35 | Add any other context about the problem here.
36 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 |
5 | ---
6 |
7 | **Is your feature request related to a problem? Please describe.**
8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9 |
10 | **Describe the solution you'd like**
11 | A clear and concise description of what you want to happen.
12 |
13 | **Describe alternatives you've considered**
14 | A clear and concise description of any alternative solutions or features you've considered.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | .ipynb_checkpoints
3 |
4 | #################
5 | ## Eclipse
6 | #################
7 |
8 | *.pydevproject
9 | .project
10 | .metadata
11 | bin/
12 | tmp/
13 | *.tmp
14 | *.bak
15 | *.swp
16 | *~.nib
17 | local.properties
18 | .classpath
19 | .settings/
20 | .loadpath
21 |
22 | # External tool builders
23 | .externalToolBuilders/
24 |
25 | # Locally stored "Eclipse launch configurations"
26 | *.launch
27 |
28 | # CDT-specific
29 | .cproject
30 |
31 | # PDT-specific
32 | .buildpath
33 |
34 |
35 | #################
36 | ## Visual Studio
37 | #################
38 |
39 | ## Ignore Visual Studio temporary files, build results, and
40 | ## files generated by popular Visual Studio add-ons.
41 |
42 | # User-specific files
43 | *.suo
44 | *.user
45 | *.sln.docstates
46 |
47 | # Build results
48 | [Dd]ebug/
49 | [Rr]elease/
50 | *_i.c
51 | *_p.c
52 | *.ilk
53 | *.meta
54 | *.obj
55 | *.pch
56 | *.pdb
57 | *.pgc
58 | *.pgd
59 | *.rsp
60 | *.sbr
61 | *.tlb
62 | *.tli
63 | *.tlh
64 | *.tmp
65 | *.vspscc
66 | .builds
67 | *.dotCover
68 |
69 | ## TODO: If you have NuGet Package Restore enabled, uncomment this
70 | #packages/
71 |
72 | # Visual C++ cache files
73 | ipch/
74 | *.aps
75 | *.ncb
76 | *.opensdf
77 | *.sdf
78 |
79 | # Visual Studio profiler
80 | *.psess
81 | *.vsp
82 |
83 | # ReSharper is a .NET coding add-in
84 | _ReSharper*
85 |
86 | # Installshield output folder
87 | [Ee]xpress
88 |
89 | # DocProject is a documentation generator add-in
90 | DocProject/buildhelp/
91 | DocProject/Help/*.HxT
92 | DocProject/Help/*.HxC
93 | DocProject/Help/*.hhc
94 | DocProject/Help/*.hhk
95 | DocProject/Help/*.hhp
96 | DocProject/Help/Html2
97 | DocProject/Help/html
98 |
99 | # Click-Once directory
100 | publish
101 |
102 | # Others
103 | [Bb]in
104 | [Oo]bj
105 | sql
106 | TestResults
107 | *.Cache
108 | ClientBin
109 | stylecop.*
110 | ~$*
111 | *.dbmdl
112 | Generated_Code #added for RIA/Silverlight projects
113 |
114 | # Backup & report files from converting an old project file to a newer
115 | # Visual Studio version. Backup files are not needed, because we have git ;-)
116 | _UpgradeReport_Files/
117 | Backup*/
118 | UpgradeLog*.XML
119 |
120 |
121 |
122 | ############
123 | ## Windows
124 | ############
125 |
126 | # Windows image file caches
127 | Thumbs.db
128 |
129 | # Folder config file
130 | Desktop.ini
131 |
132 |
133 | #############
134 | ## Python
135 | #############
136 |
137 | *.py[co]
138 |
139 | # Packages
140 | *.egg
141 | *.egg-info
142 | dist
143 | build
144 | eggs
145 | parts
146 | bin
147 | var
148 | sdist
149 | develop-eggs
150 | .installed.cfg
151 |
152 | # Installer logs
153 | pip-log.txt
154 |
155 | # Unit test / coverage reports
156 | .coverage
157 | .tox
158 |
159 | #Translations
160 | *.mo
161 |
162 | #Mr Developer
163 | .mr.developer.cfg
164 |
165 | # Mac crap
166 | .DS_Store
167 | for_single_workstation/depreciated/mas_fusion_batch_pbs_qsub.sh
168 | for_single_workstation/depreciated/mas_quickcheck.sh
169 | for_single_workstation/depreciated/mas_fusion.sh
170 | for_single_workstation/depreciated/mas_mapping.sh
171 | for_single_workstation/depreciated/mas_mapping.sh
172 | for_single_workstation/depreciated/mas_mapping_batch_pbs_qsub.sh
173 | mas_masking_batch.sh
174 | docs/Pipeline_ReadMe.pptx
175 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at da_,ma@sfu.ca. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/GWR/README.md:
--------------------------------------------------------------------------------
1 | # Group-wise Registration (Average Template Creation) pipeline
2 |
3 | This Group-wise Registration pipeline is modified from NiftyReg's original groupwise registration pipeline, which was designed for SGE pbs cluster architecture, and added the SLURM cluster compatibility.
4 |
5 | This script can be run either locally or on SLURM cluster.
6 |
7 | # Sample results on mouse model:
8 |
9 |
10 |
11 | # Reference:
12 | Original NiftyReg Groupwise registration pipeline:
13 | - http://cmictig.cs.ucl.ac.uk/wiki/index.php/Niftyreg_Groupwise
14 |
15 | Python wrapper using Nipype:
16 | - https://nipype.readthedocs.io/en/1.1.7/interfaces/generated/workflows.smri/niftyreg.groupwise.html
17 |
18 | # Paper Citation:
19 | - Ma, D., Cardoso, M. J., Zuluaga, M. A., Modat, M., Powell, N. M., Wiseman, F. K., Cleary, J. O., Sinclair, B., Harrison, I. F., Siow, B., Popuri, K., Lee, S., Matsubara, J. A., Sarunic, M. V, Beg, M. F., Tybulewicz, V. L. J., Fisher, E. M. C., Lythgoe, M. F., & Ourselin, S. (2020). **Substantially thinner internal granular layer and reduced molecular layer surface in the cerebellum of the Tc1 mouse model of Down Syndrome – a comprehensive morphometric analysis with active staining contrast-enhanced MRI**. NeuroImage, 117271. https://doi.org/https://doi.org/10.1016/j.neuroimage.2020.117271
20 | - Ma, D., Cardoso, M. J., Zuluaga, M. A., Modat, M., Powell, N., Wiseman, F., Tybulewicz, V., Fisher, E., Lythgoe, M. F., & Ourselin, S. (2015). **Grey Matter Sublayer Thickness Estimation in the Mouse Cerebellum**. In Medical Image Computing and Computer Assisted Intervention 2015 (pp. 644–651). https://doi.org/10.1007/978-3-319-24574-4_77
21 | - Holmes HE, Powell NM, Ma D, Ismail O, Harrison IF, Wells JA, Colgan N, O'Callaghan JM, Johnson RA, Murray TK, Ahmed Z, Heggenes M, Fisher A, Cardoso MJ, Modat M, O'Neill MJ, Collins EC, Fisher EM, Ourselin S, Lythgoe MF. **Comparison of In Vivo and Ex Vivo MRI for the Detection of Structural Abnormalities in a Mouse Model of Tauopathy. Front Neuroinform**. 2017 Mar 31;11:20. doi: 10.3389/fninf.2017.00020. PMID: 28408879; PMCID: PMC5374887.
22 | - Powell NM, Modat M, Cardoso MJ, Ma D, Holmes HE, Yu Y, O’Callaghan J, Cleary JO, Sinclair B, Wiseman FK, Tybulewicz VL. **Fully-automated μMRI morphometric phenotyping of the Tc1 mouse model of Down syndrome**. PLoS One. 2016 Sep 22;11(9):e0162974.
23 |
--------------------------------------------------------------------------------
/GWR/groupwise_niftyreg_params.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | ############################################################################
4 | ###################### PARAMETERS THAT CAN BE CHANGED ######################
5 | ############################################################################
6 | # Array that contains the input images to create the atlas
7 | export IMG_INPUT=(`ls /home/arbaza1/scratch/morph_data/Tc1_Nick_TBM_Paper/1.1_FILES/*.nii.gz`)
8 | export IMG_INPUT_MASK= # leave empty to not use floating masks
9 |
10 | # template image to use to initialise the atlas creation
11 | export TEMPLATE=`ls ${IMG_INPUT[0]}`
12 | export TEMPLATE_MASK= # leave empty to not use a reference mask
13 |
14 | # folder where the result images will be saved
15 | export RES_FOLDER=`pwd`/groupwise_result_Sba
16 |
17 | # argument to use for the affine (reg_aladin)
18 | export AFFINE_args="-omp 8"
19 | # argument to use for the non-rigid registration (reg_f3d)
20 | export NRR_args="-omp 8"
21 |
22 | # number of affine loop to perform - Note that the first step is always rigid
23 | export AFF_IT_NUM=10
24 | # number of non-rigid loop to perform
25 | export NRR_IT_NUM=10
26 |
27 | # grid engine arguments
28 | export SBATCH_CMD="sbatch --account=rrg-mfbeg-ad --time=15:30:00 --ntasks=8 --mem-per-cpu=20G --export=ALL --mail-user=arbaza@sfu.ca --mail-type=BEGIN --mail-type=END --mail-type=FAIL --export=ALL --requeue"
29 | ############################################################################
30 |
--------------------------------------------------------------------------------
/GWR/groupwise_niftyreg_run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #### What could be done ##################################################################
4 | # - add a preprocessing step in order to intensity normalise all the input images ???
5 | # - Any other ?
6 | ##########################################################################################
7 |
8 |
9 | if [ $# -lt 1 ]
10 | then
11 | echo ""
12 | echo "*******************************************************************************"
13 | echo "One argument is expected to run this script:"
14 | echo "- File with contains the altas creation parameters"
15 | echo "example: $0 param_groupwise_niftyreg.sh "
16 | echo "*******************************************************************************"
17 | echo ""
18 | exit
19 | fi
20 |
21 |
22 | #############################################################################
23 | # read the input parameters
24 | . $1
25 |
26 | #############################################################################
27 | ## the argument value are checked
28 | if [ ${#IMG_INPUT[@]} -lt 2 ]
29 | then
30 | echo "Less than 2 images have been specified"
31 | echo "Exit ..."
32 | exit
33 | fi
34 |
35 | if [ ! -e ${TEMPLATE} ]
36 | then
37 | echo "The template image (${TEMPLATE}) does not exist"
38 | echo "Exit ..."
39 | exit
40 | fi
41 |
42 | if [ "${TEMPLATE_MASK}" != "" ] && [ ! -f ${TEMPLATE_MASK} ]
43 | then
44 | echo "The template image mask (${TEMPLATE_MASK}) does not exist"
45 | echo "Exit ..."
46 | fi
47 |
48 | IMG_NUMBER=${#IMG_INPUT[@]}
49 | MASK_NUMBER=${#IMG_INPUT_MASK[@]}
50 | if [ ${MASK_NUMBER} -gt 0 ] && [ ! -f ${IMG_INPUT_MASK[0]} ] \
51 | && [ ${MASK_NUMBER} != ${IMG_NUMBER} ]
52 | then
53 | echo "The number of images is different from the number of floating masks"
54 | echo "Exit ..."
55 | exit
56 | fi
57 |
58 | #############################################################################
59 | ## SET UP THE NIFTYREG EXECUTABLES
60 | AFFINE=reg_aladin
61 | NRR=reg_f3d
62 | RES=reg_resample
63 | AVERAGE=reg_average
64 | TRANS=reg_transform
65 | TOOLS=reg_tools
66 |
67 | #############################################################################
68 | echo ""
69 | echo "************************************************************"
70 | echo ">>> There are ${IMG_NUMBER} input images to groupwise register <<<"
71 | echo ">>> The template image to initialise the registration is ${TEMPLATE} <<<"
72 | echo "************************************************************"
73 | echo ""
74 | #############################################################################
75 | # CREATE THE RESULT FOLDER
76 | if [ ! -d ${RES_FOLDER} ]
77 | then
78 | echo "The output image folder (${RES_FOLDER}) does not exist"
79 | mkdir ${RES_FOLDER}
80 | if [ ! -d ${RES_FOLDER} ]
81 | then
82 | echo "Unable to create the ${RES_FOLDER} folder"
83 | echo "Exit ..."
84 | exit
85 | else
86 | echo "The output image folder (${RES_FOLDER}) has been created"
87 | fi
88 | fi
89 |
90 | #############################################################################
91 | #############################################################################
92 | # PERFORM THE RIGID/AFFINE REGISTRATION
93 |
94 | # The initial average image is as specified by the user
95 | averageImage=${TEMPLATE}
96 |
97 | # Loop over all iterations
98 | for (( CUR_IT=1; CUR_IT<=${AFF_IT_NUM}; CUR_IT++ ))
99 | do
100 | # Check if the iteration has already been performed
101 | if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz ]
102 | then
103 | #############################
104 | # Create a folder to store the result
105 | if [ ! -d ${RES_FOLDER}/aff_${CUR_IT} ]
106 | then
107 | mkdir ${RES_FOLDER}/aff_${CUR_IT}
108 | fi
109 |
110 | #############################
111 | # Run the rigid or affine registration
112 | if [ "`which sbatch 2> /dev/null`" == "" ]
113 | then
114 | # All registration are performed serially
115 | for (( i=0 ; i<${IMG_NUMBER}; i++ ))
116 | do
117 | name=`basename ${IMG_INPUT[${i}]} .gz`
118 | name=`basename ${name} .nii`
119 | name=`basename ${name} .hdr`
120 | name=`basename ${name} .img`
121 | # Check if the registration has already been performed
122 | if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_${name}_it${CUR_IT}.txt ]
123 | then
124 | aladin_args=""
125 | # Registration is forced to be rigid for the first step
126 | if [ ${CUR_IT} == 1 ]
127 | then
128 | aladin_args="-rigOnly"
129 | else
130 | # Check if a previous affine can be use for initialisation
131 | if [ -f ${RES_FOLDER}/aff_`expr ${CUR_IT} - 1`/aff_mat_${name}_it`expr ${CUR_IT} - 1`.txt ]
132 | then
133 | aladin_args="-inaff \
134 | ${RES_FOLDER}/aff_`expr ${CUR_IT} - 1`/aff_mat_${name}_it`expr ${CUR_IT} - 1`.txt"
135 | fi
136 | fi
137 | # Check if a mask has been specified for the reference image
138 | if [ "${TEMPLATE_MASK}" != "" ]
139 | then
140 | aladin_args="${aladin_args} -rmask ${TEMPLATE_MASK}"
141 | fi
142 | # Check if a mask has been specified for the floating image
143 | if [ ${MASK_NUMBER} == ${IMG_NUMBER} ]
144 | then
145 | aladin_args="${aladin_args} -fmask ${IMG_INPUT_MASK[${i}]}"
146 | fi
147 | result="/dev/null"
148 | if [ "${CUR_IT}" == "${AFF_IT_NUM}" ]
149 | then
150 | result="${RES_FOLDER}/aff_${CUR_IT}/aff_res_${name}_it${CUR_IT}.nii.gz"
151 | fi
152 | # Perform the registration
153 | reg_aladin ${AFFINE_args} ${aladin_args} \
154 | -ref ${averageImage} \
155 | -flo ${IMG_INPUT[${i}]} \
156 | -aff ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_${name}_it${CUR_IT}.txt \
157 | -res ${result} > ${RES_FOLDER}/aff_${CUR_IT}/aff_log_${name}_it${CUR_IT}.txt
158 | if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_${name}_it${CUR_IT}.txt ]
159 | then
160 | echo "Error when creating \
161 | ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_${name}_it${CUR_IT}.txt"
162 | exit
163 | fi
164 | fi
165 | done
166 | else
167 | # Create shell script to run all jobs in an array
168 | echo \#\!/bin/bash > ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
169 |
170 | if [ $[avg_aff_`expr ${CUR_IT} - 1`_${$}] -ne 0 ]; then
171 | echo "#SBATCH --dependency=afterok:$[avg_aff_`expr ${CUR_IT} - 1`_${$}]" \
172 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
173 | fi
174 |
175 | # Define the current image index
176 | echo "img_number=\`expr \$SLURM_ARRAY_TASK_ID - 1\`" \
177 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
178 | echo ". `readlink -f $1`" \
179 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
180 | # Extract the name of the file without the path and the extension
181 | echo "name=\`basename \${IMG_INPUT[\$img_number]} .gz\`" \
182 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
183 | echo "name=\`basename \$name .nii\`" \
184 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
185 | echo "name=\`basename \$name .hdr\`" \
186 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
187 | echo "name=\`basename \$name .img\`" \
188 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
189 | # Check that the registration has not already been performed
190 | echo "if [ ! -e ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_\${name}_it${CUR_IT}.txt ]" >> \
191 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
192 | echo "then" >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
193 | # Check if an input affine is available
194 | echo "trans_affine=${RES_FOLDER}/aff_`expr ${CUR_IT} - 1`/aff_mat_\${name}_it`expr ${CUR_IT} - 1`.txt" >> \
195 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
196 | # Set up the registration argument
197 | echo "${AFFINE} ${AFFINE_args} \\" \
198 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
199 | echo "-ref ${averageImage} \\" \
200 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
201 | echo "-flo \${IMG_INPUT[img_number]} \\" \
202 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
203 | echo "-aff ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_\${name}_it${CUR_IT}.txt \\" >> \
204 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
205 | result="/dev/null"
206 | if [ "${CUR_IT}" == "${AFF_IT_NUM}" ]
207 | then
208 | result="${RES_FOLDER}/aff_${CUR_IT}/aff_res_\${name}_it${CUR_IT}.nii.gz"
209 | fi
210 | echo "-res ${result} \\" >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
211 | if [ "${TEMPLATE_MASK}" != "" ]; then
212 | echo "-rmask ${TEMPLATE_MASK} \\" \
213 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
214 | fi
215 | if [ ${MASK_NUMBER} == ${IMG_NUMBER} ]; then
216 | echo "-fmask \${IMG_INPUT_MASK[\$img_number]} \\" \
217 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
218 | fi
219 | # If this is the first iteration. The registration is forced to be rigid
220 | # Otherwise the previous affine is used for initialisation
221 | if [ ${CUR_IT} == 1 ]
222 | then
223 | echo "-rigOnly" \
224 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
225 | else
226 | echo "-inaff \${trans_affine}" \
227 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
228 | fi
229 | echo "fi" >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh
230 | ## Run the rigid/affine registration - Submit the job array
231 | # Wait to see if the previous iteration average has been created
232 | ID=$(${SBATCH_CMD} \
233 | --output=${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.out \
234 | --job-name=aladin_${CUR_IT}_${$} \
235 | --array=1-${IMG_NUMBER} \
236 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_aladin_${CUR_IT}_${$}.sh)
237 | echo $ID
238 | declare "aladin_${CUR_IT}_${$}=${ID##* }"
239 | fi
240 |
241 | #############################
242 | if [ "${CUR_IT}" != "${AFF_IT_NUM}" ]
243 | then
244 | # The transformation are demean'ed to create the average image
245 | # Note that this is not done for the last iteration step
246 | list_average=""
247 | for img in ${IMG_INPUT[@]}
248 | do
249 | name=`basename ${img} .gz`
250 | name=`basename ${name} .nii`
251 | name=`basename ${name} .hdr`
252 | name=`basename ${name} .img`
253 | list_average="${list_average} \
254 | ${RES_FOLDER}/aff_${CUR_IT}/aff_mat_${name}_it${CUR_IT}.txt ${img}"
255 | done
256 | if [ "`which sbatch 2> /dev/null`" == "" ]
257 | then
258 | # The average is created on the host
259 | echo "reg_average \
260 | ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz \
261 | -demean ${averageImage} \
262 | ${list_average}"
263 | reg_average \
264 | ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz \
265 | -demean ${averageImage} \
266 | ${list_average}
267 | if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz ]
268 | then
269 | echo "Error when creating \
270 | ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz"
271 | exit
272 | fi
273 | else # if [ "`which qsub 2> /dev/null`" == "" ]
274 | # The average is performed through the cluster
275 |
276 | echo \#\!/bin/bash > ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
277 | echo "${AVERAGE} \\" \
278 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
279 | echo "${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz \\" \
280 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
281 | echo "-demean ${averageImage} \\" \
282 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
283 | echo "${list_average}" \
284 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
285 |
286 | ID=$(${SBATCH_CMD} \
287 | --dependency=afterok:$[aladin_${CUR_IT}_${$}] \
288 | --output=${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.out \
289 | --job-name=avg_aff_${CUR_IT}_${$} \
290 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh)
291 | echo $ID
292 | declare "avg_aff_${CUR_IT}_${$}=${ID##* }"
293 | fi # if [ "`which qsub 2> /dev/null`" == "" ]
294 | else # if [ "${CUR_IT}" != "${AFF_IT_NUM}" ]
295 | # All the result images are directly averaged during the last step
296 | if [ "`which sbatch 2> /dev/null`" == "" ]
297 | then
298 | reg_average \
299 | ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz \
300 | -avg \
301 | `ls ${RES_FOLDER}/aff_${CUR_IT}/aff_res_*_it${CUR_IT}.nii*`
302 | if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz ]
303 | then
304 | echo "Error when creating \
305 | ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz"
306 | exit
307 | fi
308 | else # if [ "`which qsub 2> /dev/null`" == "" ]
309 | # The average is performed through the cluster
310 |
311 | echo \#\!/bin/bash > ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
312 | echo "${AVERAGE} \\" \
313 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
314 | echo "${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz \\" \
315 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
316 | echo "-avg ${RES_FOLDER}/aff_${CUR_IT}/aff_res_*_it${CUR_IT}.nii*" \
317 | >> ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh
318 |
319 | ID=$(${SBATCH_CMD} \
320 | --dependency=afterok:$[aladin_${CUR_IT}_${$}] \
321 | --output=${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.out \
322 | --job-name=avg_aff_${CUR_IT}_${$} \
323 | ${RES_FOLDER}/aff_${CUR_IT}/run_gw_niftyReg_avg_aff_${CUR_IT}_${$}.sh)
324 | echo $ID
325 | declare "avg_aff_${CUR_IT}_${$}=${ID##* }"
326 | fi # if [ "`which qsub 2> /dev/null`" == "" ]
327 | fi # if [ "${CUR_IT}" != "${AFF_IT_NUM}" ]
328 | else # if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz ]
329 | echo "${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz already exists"
330 | fi # if [ ! -f ${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz ]
331 | # Update the average image used as a reference
332 | averageImage=${RES_FOLDER}/aff_${CUR_IT}/average_affine_it_${CUR_IT}.nii.gz
333 | done # Loop over affine iteration
334 |
335 |
336 | #############################################################################
337 | #############################################################################
338 | ### Non rigid registration loop
339 |
340 | for (( CUR_IT=1; CUR_IT<=${NRR_IT_NUM}; CUR_IT++ ))
341 | do
342 |
343 | #############################
344 | # Check if the current average image has already been created
345 | if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz ]
346 | then
347 |
348 | #############################
349 | # Create a folder to store the current results
350 | if [ ! -d ${RES_FOLDER}/nrr_${CUR_IT} ]
351 | then
352 | mkdir ${RES_FOLDER}/nrr_${CUR_IT}
353 | fi
354 |
355 | #############################
356 | # Run the nonrigid registrations
357 | if [ "`which sbatch 2> /dev/null`" == "" ]
358 | then
359 | for (( i=0 ; i<${IMG_NUMBER}; i++ ))
360 | do
361 | name=`basename ${IMG_INPUT[${i}]} .gz`
362 | name=`basename ${name} .nii`
363 | name=`basename ${name} .hdr`
364 | name=`basename ${name} .img`
365 | # Check if the registration has already been performed
366 | if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/nrr_cpp_${name}_it${CUR_IT}.nii* ]
367 | then
368 | f3d_args=""
369 | # Check if a mask has been specified for the reference image
370 | if [ "${TEMPLATE_MASK}" != "" ]
371 | then
372 | f3d_args="${f3d_args} -rmask ${TEMPLATE_MASK}"
373 | fi
374 | # Check if a mask has been specified for the floating image
375 | if [ ${MASK_NUMBER} == ${IMG_NUMBER} ]
376 | then
377 | f3d_args="${f3d_args} -fmask ${IMG_INPUT_MASK[${i}]}"
378 | fi
379 | if [ ${AFF_IT_NUM} -gt 0 ]
380 | then
381 | f3d_args="${f3d_args} -aff \
382 | ${RES_FOLDER}/aff_${AFF_IT_NUM}/aff_mat_${name}_it${AFF_IT_NUM}.txt"
383 | fi
384 | result="/dev/null"
385 | if [ "${CUR_IT}" == "${NRR_IT_NUM}" ]
386 | then
387 | result="${RES_FOLDER}/nrr_${CUR_IT}/nrr_res_${name}_it${CUR_IT}.nii.gz"
388 | fi
389 | # Perform the registration
390 | reg_f3d ${NRR_args} ${f3d_args} \
391 | -ref ${averageImage} \
392 | -flo ${IMG_INPUT[${i}]} \
393 | -cpp ${RES_FOLDER}/nrr_${CUR_IT}/nrr_cpp_${name}_it${CUR_IT}.nii.gz \
394 | -res ${result} > ${RES_FOLDER}/nrr_${CUR_IT}/nrr_log_${name}_it${CUR_IT}.txt
395 | fi
396 | done
397 | else
398 | echo \#\!/bin/bash > ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
399 |
400 | if [ $[avg_nrr_`expr ${CUR_IT} - 1`_${$}] -ne 0 ]
401 | then
402 | echo "#SBATCH --dependency=afterok:$[avg_aff_${AFF_IT_NUM}_${$}],$[avg_nrr_`expr ${CUR_IT} - 1`_${$}]" \
403 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
404 | else
405 | echo "#SBATCH --dependency=afterok:$[avg_aff_${AFF_IT_NUM}_${$}]" \
406 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
407 | fi
408 |
409 | # Define the current image index
410 | echo "img_number=\`expr \$SLURM_ARRAY_TASK_ID - 1\`" \
411 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
412 | echo ". `readlink -f $1`" \
413 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
414 | # Extract the name of the file without the path and the extension
415 | echo "name=\`basename \${IMG_INPUT[\$img_number]} .gz\`" \
416 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
417 | echo "name=\`basename \$name .nii\`" \
418 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
419 | echo "name=\`basename \$name .hdr\`" \
420 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
421 | echo "name=\`basename \$name .img\`" \
422 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
423 | # Check that the registration has not already been performed
424 | echo "if [ ! -e ${RES_FOLDER}/nrr_${CUR_IT}/nrr_cpp_\${name}_it${CUR_IT}.nii* ]" >> \
425 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
426 | echo "then" >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
427 | # Set up the registration argument
428 | echo "${NRR} ${NRR_args} \\" \
429 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
430 | echo "-ref ${averageImage} \\" \
431 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
432 | if [ "${TEMPLATE_MASK}" != "" ]; then
433 | echo "-rmask ${TEMPLATE_MASK} \\" \
434 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
435 | fi
436 | echo "-flo \${IMG_INPUT[\$img_number]} \\" \
437 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
438 | if [ ${AFF_IT_NUM} -gt 0 ]
439 | then
440 | echo "-aff ${RES_FOLDER}/aff_${AFF_IT_NUM}/aff_mat_\${name}_it${AFF_IT_NUM}.txt \\" >> \
441 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
442 | fi
443 | if [ ${MASK_NUMBER} == ${IMG_NUMBER} ]; then
444 | echo "-fmask \${IMG_INPUT_MASK[\$img_number]} \\" \
445 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
446 | fi
447 | echo "-cpp ${RES_FOLDER}/nrr_${CUR_IT}/nrr_cpp_\${name}_it${CUR_IT}.nii.gz \\" >> \
448 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
449 | result="/dev/null"
450 | if [ "${CUR_IT}" == "${NRR_IT_NUM}" ]
451 | then
452 | result="${RES_FOLDER}/nrr_${CUR_IT}/nrr_res_\${name}_it${CUR_IT}.nii.gz"
453 | fi
454 | echo "-res ${result}" >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
455 | echo "fi" >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh
456 | ## Run the nonrigid registrations - Submit the job array
457 | ID=$(${SBATCH_CMD} \
458 | --output=${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.out \
459 | --job-name=f3d_${CUR_IT}_${$} \
460 | --array=1-${IMG_NUMBER} \
461 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_f3d_${CUR_IT}_${$}.sh)
462 | echo $ID
463 | declare "f3d_${CUR_IT}_${$}=${ID##* }"
464 |
465 | fi
466 |
467 | #############################
468 | # The transformation are demean'ed to create the average image
469 | # Note that this is not done for the last iteration step
470 | if [ "${CUR_IT}" != "${NRR_IT_NUM}" ]
471 | then
472 | list_average=""
473 | for img in ${IMG_INPUT[@]}
474 | do
475 | name=`basename ${img} .gz`
476 | name=`basename ${name} .nii`
477 | name=`basename ${name} .hdr`
478 | name=`basename ${name} .img`
479 | list_average="${list_average} \
480 | ${RES_FOLDER}/aff_${AFF_IT_NUM}/aff_mat_${name}_it${AFF_IT_NUM}.txt \
481 | ${RES_FOLDER}/nrr_${CUR_IT}/nrr_cpp_${name}_it${CUR_IT}.nii.gz ${img}"
482 | done
483 | if [ "`which sbatch 2> /dev/null`" == "" ]
484 | then
485 | # The average is created on the host
486 | reg_average \
487 | ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz \
488 | -demean_noaff ${averageImage} \
489 | ${list_average}
490 | if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz ]
491 | then
492 | echo "Error when creating \
493 | ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz"
494 | exit
495 | fi
496 | else # if [ "`which qsub 2> /dev/null`" == "" ]
497 | # The average is performed through the cluster
498 |
499 | echo \#\!/bin/bash > ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
500 | echo "${AVERAGE} \\" \
501 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
502 | echo "${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz \\" \
503 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
504 | echo "-demean_noaff ${averageImage} \\" \
505 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
506 | echo "${list_average}" \
507 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
508 |
509 | ID=$(${SBATCH_CMD} \
510 | --dependency=afterok:$[f3d_${CUR_IT}_${$}] \
511 | --output=${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.out \
512 | --job-name=avg_nrr_${CUR_IT}_${$} \
513 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh)
514 | echo $ID
515 | declare "avg_nrr_${CUR_IT}_${$}=${ID##* }"
516 | fi # if [ "`which qsub 2> /dev/null`" == "" ]
517 | else # if [ "${CUR_IT}" != "${NRR_IT_NUM}" ]
518 | # All the result images are directly averaged during the last step
519 | if [ "`which sbatch 2> /dev/null`" == "" ]
520 | then
521 | reg_average \
522 | ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz \
523 | -avg \
524 | `ls ${RES_FOLDER}/nrr_${CUR_IT}/nrr_res_*_it${CUR_IT}.nii*`
525 | if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz ]
526 | then
527 | echo "Error when creating \
528 | ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz"
529 | exit
530 | fi
531 | else # if [ "`which qsub 2> /dev/null`" == "" ]
532 | # The average is performed through the cluster
533 |
534 | echo \#\!/bin/bash > ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
535 | echo "${AVERAGE} \\" \
536 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
537 | echo "${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz \\" \
538 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
539 | echo "-avg ${RES_FOLDER}/nrr_${CUR_IT}/nrr_res_*_it${CUR_IT}.nii*" \
540 | >> ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh
541 |
542 | ID=$(${SBATCH_CMD} \
543 | --dependency=afterok:$[f3d_${CUR_IT}_${$}] \
544 | --output=${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.out \
545 | --job-name=avg_nrr_${CUR_IT}_${$} \
546 | ${RES_FOLDER}/nrr_${CUR_IT}/run_gw_niftyReg_avg_nrr_${CUR_IT}_${$}.sh)
547 | echo $ID
548 | declare "avg_nrr_${CUR_IT}_${$}=${ID##* }"
549 | fi # if [ "`which qsub 2> /dev/null`" == "" ]
550 | fi # if [ "${CUR_IT}" != "${NRR_IT_NUM}" ]
551 | else # if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz ]
552 | echo "${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz already exists"
553 | fi # if [ ! -f ${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz ]
554 | # Update the average image
555 | averageImage=${RES_FOLDER}/nrr_${CUR_IT}/average_nonrigid_it_${CUR_IT}.nii.gz
556 | done
557 | #############################################################################
558 |
--------------------------------------------------------------------------------
/HelperFunctions/matlab/cropPad/ReadMe.md:
--------------------------------------------------------------------------------
1 | # To Crop the black (0) surrounding voxels (and padding) given voxels around
2 |
3 | Author: Da Ma [da_ma@sfu.ca](da_ma@sfu.ca)
4 |
5 | - Padding is yet to be implemented
6 |
7 |
--------------------------------------------------------------------------------
/HelperFunctions/matlab/cropPad/cropPad.m:
--------------------------------------------------------------------------------
1 | function vol = cropPad(vol,padNum)
2 | %% crop the zero padding around multi-dimensional volume
3 | % Author: Da Ma (da_ma@sfu.ca)
4 | % [Yet to be implemented]:
5 | % padNum: Number of voxels to pad around the cropped images
6 |
7 | %%
8 | % get volume size
9 | volSize = size(vol);
10 | % get number of dimension
11 | numDim = length(volSize);
12 |
13 | %% Initialize cropping parameter
14 | cropParam = nan(numDim,2);
15 |
16 | for dim = 1:numDim
17 | %% find the remainign dimension number
18 | dimRemain = setdiff(1:numDim,dim);
19 | %% find the min/max index of non-zero value in vol for this dimension
20 | numEle = 1; % only need to find the index of the 1st find non-zero value
21 | cropParam(dim,1) = find(max((vol>0),[],dimRemain),numEle); % min
22 | cropParam(dim,2) = find(max((vol>0),[],dimRemain),numEle, 'last'); % max
23 | end
24 |
25 | %% cropping the vol in each dim
26 | for dim = 1:numDim
27 | % crop current dimension
28 | vol = vol(cropParam(dim,1):cropParam(dim,2),:,:);
29 | % shift the 1st dimension to the last, to prepare croping the next dimension
30 | vol = shiftdim(vol,1);
31 | end
--------------------------------------------------------------------------------
/HelperFunctions/matlab/cropPad/cropPadNii.m:
--------------------------------------------------------------------------------
1 | function cropPadNii(niiIn,niiOut,compressFlag,normalizeFlag,padNum)
2 | %% crop the zero padding around nifti file
3 | % Author: Da Ma (da_ma@sfu.ca)
4 | % compressFlag: whether to compress the output nifti file (Default: False)
5 | % normalizeFlag: whether/how to normalize the input volume
6 | % 0 (default): donot normalize
7 | % 1 : normalize to [0,1]
8 | % [Yet to be implemented]:
9 | % padNum: Number of voxels to pad around the cropped images
10 | %%
11 |
12 | % By default, don't compress .nii file to .nii.gz
13 | if ~exist('compressFlag','var'); compressFlag=false;end
14 | % By default, compress to [0,1]
15 | if ~exist('normalizeFlag','var'); normalizeFlag=0;end
16 |
17 | %% Read input nifti
18 | disp('loading nifti file ...')
19 | % read nifti volume
20 | vol = niftiread(niiIn);
21 | % % read nifti head
22 | disp('read nifti header ...')
23 | niiHead = niftiinfo(niiIn);
24 | %% crop zero pad
25 | disp('cropping nifti volume ...')
26 | vol = cropPad(vol);
27 | % [Yet to be implemented]:
28 | % padNum: Number of voxels to pad around the cropped images%% normalze
29 | if normalizeFlag == 1
30 | vol = mat2gray(vol);
31 | vol = single(vol);
32 | end
33 |
34 | %% update header dimension
35 | niiHead.ImageSize = size(vol);
36 | niiHead.raw.dim(2:4) = size(vol);
37 |
38 | %% save nifti
39 | disp('saving nifti file ...')
40 | % determine if need to save as a '.nii.gz' compression file
41 | [niiPath,niiName,niiExt] = fileparts(niiOut);
42 | [~,~,internalExt] = fileparts(niiName);
43 | if internalExt == ".nii" ||niiExt == ".gz"
44 | compressFlag = 1; % = true
45 | end
46 | savePath = fullfile(niiPath,niiName);
47 | %% save nii
48 | niftiwrite(vol,savePath,niiHead,'Compressed',compressFlag);
49 |
50 |
--------------------------------------------------------------------------------
/HelperFunctions/matlab/cropPad/cropPadNiiBatch.m:
--------------------------------------------------------------------------------
1 | function cropPadNiiBatch(inNiiDir,outNiiDir)
2 |
3 | %%% To be implemented
4 | % Author: Da Ma (da_ma@sfu.ca)
--------------------------------------------------------------------------------
/HelperFunctions/matlab/reorient/reorientNii.m:
--------------------------------------------------------------------------------
1 | % Matlab function to batch-reorient .nii images
2 | % Require the Matlab NIfTI toolbox available at:
3 | % (https://www.mathworks.com/matlabcentral/fileexchange/8797-tools-for-nifti-and-analyze-image)
4 | % @author: MA, Da, da_ma@sfu.ca, d.ma.11@ucl.ac.uk
5 |
6 | function reorientNii(input,output_folder)
7 |
8 | [~,input_name,input_ext]=fileparts(input);
9 | % load test image with maximum tolerance
10 | A=load_nii(input,'','','','','',1);
11 | % display test image
12 | view_nii(A);
13 | % reorient test image
14 | A=rri_orient(A);
15 |
16 | % M=[A.hdr.dime.pixdim(2) A.hdr.dime.pixdim(3) A.hdr.dime.pixdim(4)];
17 | % A=make_nii(A.img,M);
18 |
19 | if ~exist(output_folder,'dir') % 7=folder
20 | mkdir(output_folder);
21 | end
22 | % save reoriented image
23 | output= strcat(output_folder,'/',input_name,input_ext);
24 | save_nii(A,output);
25 | close(gcf);
--------------------------------------------------------------------------------
/HelperFunctions/matlab/reorient/reorientNiiBatch.m:
--------------------------------------------------------------------------------
1 | function reorientNiiBatch(inNiiDir,outNiiDir)
2 | targetlist = dir(fullfile(inNiiDir,'*_degibb.nii'));
3 | for id = 1:length(targetlist)
4 | target = targetlist(id);
5 | reoriented_path = fullfile(outNiiDir,target.name);
6 | if exist(reoriented_path,'file')
7 | fprintf("%s exist, skippping ...", target.name);
8 | continue
9 | end
10 | target = targetlist(id);
11 | target_path = fullfile(target.folder, target.name);
12 | reorientNii(target_path, outNiiDir);
13 | end
--------------------------------------------------------------------------------
/HelperFunctions/python/ReadME.md:
--------------------------------------------------------------------------------
1 | # Python Helper Fuction for the `MASMAT` toolbox
2 |
3 | ## Main functions
4 | Main functions are stored in `segmentatiom_propagation.py`:
5 |
6 | ### `reg_aladin`:
7 | > affine registration using NiftyReg package
8 |
9 | ### `reg_resample`:
10 | > resample nifti image using NiftyReg package
11 |
12 | ### `reorient`
13 | > reorient nifti image using nibabel library
14 |
15 | ### `N4_correction`
16 | > N4 Bias Field Correction using nipype
17 |
18 | ### `N4_correction_slicer`
19 | > N4 Bias Field Correction using 3D Slicer
20 |
21 | ### `N4_correction_itk`
22 | > N4 Bias Field Correction using SimpleITK
23 |
24 | ### `mas_quickcheck`
25 | > generate quickcheck files using FSL
26 |
27 | ### `affine_mask_propagation`
28 | > generate slurm sbatch file for affine mask mask propagation
29 |
30 | ### `affine_label_fusion`
31 | > [SLURM] affine label fusion (after slurm_affine_mask_propagation)
32 |
33 | ### `nonrigid_label_propagation`
34 | > [SLURM] nonrigid label fusion
35 |
36 | ### `nonrigid_label_fusion`
37 | > [SLURM] nonrigid label/brain-mask fusion (after slurm_nonrigid_label_propagation)
38 |
39 | ### `extract_label_volumes`
40 | > extract label volumes from nifti files of segmentation labels
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/HelperFunctions/python/bash_function_generators.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import os
3 |
4 | #%% function to write to a sbatch file
5 | def get_default_conda_env(conda_env_sh):
6 | conda_lines = f'''## activate the virtual environment
7 | source {conda_env_sh}
8 | conda_init > /dev/null 2>&1
9 | conda deactivate > /dev/null 2>&1
10 | conda activate fastai > /dev/null 2>&1
11 | '''
12 | return conda_lines
13 |
14 | #%%
15 | def generate_slurm_boilerplate(time="3:00:00", ntasks=1, account='rrg-mfbeg-ad', mem=8000, singleline=False, **kwargs):
16 | '''
17 | Generate slurm sbatch job submission biolerplate
18 | kwargs:
19 | - mem-per-cpu="8000"
20 | - array="1-4"
21 | -------------
22 | bol: begin-of-line
23 | eol: end-of-line
24 | '''
25 |
26 | if singleline is False:
27 | sbatch_str = "#!/bin/bash\n"
28 | bol = '#SBATCH'
29 | eol = '\n'
30 | else:
31 | sbatch_str = "sbatch"
32 | bol = ''
33 | eol = ' '
34 |
35 | # if speficied account as None, use user's own linux group
36 | if account is None:
37 | account = os.getenv('USER')
38 |
39 | args = ['time','ntasks','account', 'mem']
40 | vals = [ time , ntasks , account , mem]
41 | #% add default sbatch arguments (convert '_' to '-')
42 | for i,arg in enumerate(args):
43 | sbatch_str += f"{bol} --{arg.replace('_','-')}={vals[i]}{eol}"
44 | #% add other sbatch arguments
45 | for key in kwargs.keys():
46 | sbatch_str += (f"{bol} --{key.replace('_','-')}={kwargs[key]}{eol}")
47 |
48 | # sbatch_str += eol
49 | return sbatch_str
50 |
51 | def generate_slurm_conda_boilerplate(**kwargs):
52 | sbatch_str = generate_slurm_boilerplate(**kwargs) + get_default_conda_env()
53 | return sbatch_str
54 |
55 | #%%
56 | def write_slurm_script(cmd_lines, cmd_path, slurm=False, conda=False, **kwargs):
57 | with open(cmd_path, "w") as f:
58 | if slurm is True:
59 | f.write(generate_slurm_boilerplate(**kwargs))
60 | if conda is True:
61 | f.write(get_default_conda_env())
62 | for line in cmd_lines:
63 | f.write(line)
64 |
65 |
--------------------------------------------------------------------------------
/HelperFunctions/python/segmentation_propagation.py:
--------------------------------------------------------------------------------
1 | import os, subprocess, multiprocessing, shutil
2 | import nipype, pandas as pd
3 | import bash_function_generators as slurm
4 | from pathlib import Path
5 | import SimpleITK as sitk
6 |
7 | #%% Change this to your local location that store MASHelperFunctions.sh
8 | # mas_helpfunctions_path = f'../../MASHelperFunctions.sh'
9 | mas_helpfunctions_path = f'{os.getenv("HOME")}/Codes/Github/multi-atlas-segmentation/MASHelperFunctions.sh'
10 |
11 | #%%
12 | def reg_aladin(ref_file, flo_file, res_file, aff_file=None, fmask_file=None, verbose=False, n_cpu=None, args='', **kwargs):
13 | ''' affine registration using NiftyReg package
14 | > ref: https://nipype.readthedocs.io/en/latest/api/generated/nipype.interfaces.niftyreg.regutils.html
15 | Parameters
16 | ----------
17 |
18 | Returns
19 | -------
20 | None.
21 |
22 | '''
23 | node = nipype.interfaces.niftyreg.RegAladin()
24 | node.inputs.ref_file = ref_file
25 | node.inputs.flo_file = flo_file
26 | node.inputs.res_file = res_file
27 |
28 | if not aff_file is None:
29 | node.inputs.aff_file = aff_file
30 | if not fmask_file is None:
31 | node.inputs.fmask_file = fmask_file
32 | if not args is None:
33 | node.inputs.args = args # ' '.join([arg for arg in args])
34 | if n_cpu is None:
35 | node.inputs.omp_core_val = multiprocessing.cpu_count()
36 | if verbose is True: print(node.cmdline)
37 |
38 | return node
39 |
40 | #%%
41 | def reg_resample(ref_file, flo_file, trans_file, res_file, inter=0, verbose=False, n_cpu=None, args='', **kwargs):
42 | '''
43 | -inter
44 | Interpolation order (0, 1, 3, 4)[3] (0=NN, 1=LIN; 3=CUB, 4=SINC)
45 | Parameters
46 | ----------
47 |
48 | DESCRIPTION.
49 |
50 | Returns
51 | -------
52 | None.
53 |
54 | '''
55 | node = nipype.interfaces.niftyreg.RegResample()
56 | node.inputs.ref_file = ref_file
57 | node.inputs.flo_file = flo_file
58 | node.inputs.trans_file = trans_file
59 | node.inputs.out_file = res_file
60 | node.inputs.inter = inter
61 |
62 | if not args is None:
63 | node.inputs.args = args # ' '.join([arg for arg in args])
64 | if n_cpu is None:
65 | node.inputs.omp_core_val = multiprocessing.cpu_count()
66 | if verbose is True: print(node.cmdline)
67 |
68 | return node
69 |
70 | def reorient(src_fname, dest_fname, old_orient="PIR", new_orient="RAS", verbose=False):
71 | # skip if result file already exist
72 | if os.path.isfile(dest_fname):
73 | if verbose==True: print(f" -- {dest_fname} exist, skipping ...")
74 | return
75 | # load the raw volume
76 | vol_nii = nib.load(src_fname)
77 | # reorient the image
78 | vol_reorient = image_io.reorient_vol(vol_nii.get_fdata(), old_orient,new_orient)
79 | # save the reoriented images
80 | #%% save reoriented images # https://bic-berkeley.github.io/psych-214-fall-2016/saving_images.html
81 | vol_reorient_nii = nib.Nifti1Image(vol_reorient, vol_nii.affine, vol_nii.header)
82 | vol_reorient_nii.to_filename(dest_fname)
83 | return
84 |
85 | def N4_correction(input_fname, n4_fname, mask_fname=None, exe=True, verbose=True, bspline_fitting_distance=20, bspline_order=4, shrink_factor=4, n_iterations = [150,100,80,50], convergence_threshold = 1e-12, **kwargs):
86 | '''N4 Bias Field Correction using nipype
87 | # Ref: https://nipype.readthedocs.io/en/latest/api/generated/nipype.interfaces.ants.segmentation.html
88 | # Parameter options: https://www.programcreek.com/python/example/122771/nipype.interfaces.ants.N4BiasFieldCorrection
89 | # kwarg exsample:
90 | n4.inputs: dimension = 3
91 | n4.inputs: bspline_fitting_distance = 10
92 | n4.inputs: bspline_order = 4
93 | n4.inputs: shrink_factor = 3
94 | n4.inputs: n_iterations = [150,100,50,30]
95 | n4.inputs: convergence_threshold = 1e-11
96 | '''
97 | from nipype.interfaces.ants import N4BiasFieldCorrection
98 | # skip if result file already exist
99 | if os.path.isfile(n4_fname):
100 | if verbose==True: print(f" -- {n4_fname} exist, skipping ...")
101 | return
102 | n4 = N4BiasFieldCorrection()
103 | n4.inputs.input_image = input_fname
104 | n4.inputs.output_image = n4_fname
105 | n4.inputs.bspline_fitting_distance = bspline_fitting_distance # default=10
106 | n4.inputs.bspline_order = bspline_order # default=4
107 | n4.inputs.shrink_factor = shrink_factor # default=3
108 | n4.inputs.n_iterations = n_iterations # default=[150,100,50,30]
109 | n4.inputs.convergence_threshold = convergence_threshold # default=1e-11
110 |
111 | # use mask if specified
112 | if mask_fname is not None:
113 | n4.inputs.mask_image = mask_fname
114 | # add other specified keywords
115 | for key,value in kwargs.items():
116 | setattr(n4.inputs, key, value)
117 |
118 | if exe == True:
119 | n4.run()
120 |
121 | return n4.cmdline
122 |
123 | def N4_correction_slicer(input_fname, n4_fname, mask_fname=None, exe=True, verbose=False):
124 | '''N4 Bias Field Correction using nipype
125 | # Ref: https://nipype.readthedocs.io/en/latest/api/generated/nipype.interfaces.slicer.filtering.n4itkbiasfieldcorrection.html
126 | '''
127 | from nipype.interfaces.slicer.filtering import n4itkbiasfieldcorrection
128 | # skip if result file already exist
129 | if os.path.isfile(n4_fname):
130 | if verbose==True:
131 | print(f" -- {n4_fname} exist, skipping ...")
132 | return
133 | n4 = n4itkbiasfieldcorrection()
134 | n4.inputs.inputimage = input_fname
135 | n4.inputs.outputimage = input_fname
136 |
137 | # mask
138 | if mask_fname is not None: n4.inputs.mask_image = mask_fname
139 |
140 | if exe == True: n4.run()
141 | return n4.cmdline
142 |
143 | def N4_correction_itk(input_fname, n4_fname, mask_fname=None, exe=True, verbose=False, image_type=sitk.sitkFloat64, mask_type=sitk.sitkUInt8):
144 | '''N4 bias field correction with SimpleITK
145 | # Parameter options: https://www.programcreek.com/python/example/122771/nipype.interfaces.ants.N4BiasFieldCorrection
146 | '''
147 | import SimpleITK as sitk
148 | # skip if result file already exist
149 | if os.path.isfile(n4_fname):
150 | if verbose==True: print(f" -- {n4_fname} exist, skipping ...")
151 | return
152 |
153 | input_image = sitk.ReadImage(input_fname, outputPixelType=image_type)
154 | input_image = sitk.Cast(input_image,sitk.sitkFloat32)
155 |
156 | if mask_fname == None:
157 | output_image = sitk.N4BiasFieldCorrection(input_image)
158 | else:
159 | mask_image = sitk.ReadImage(mask_fname, outputPixelType=mask_type)
160 | output_image = sitk.N4BiasFieldCorrection(input_image, mask_image)
161 |
162 | corrector = sitk.N4BiasFieldCorrectionImageFilter()
163 | output = corrector.Execute(input_image, mask_image)
164 |
165 | sitk.WriteImage(output_image, n4_fname)
166 | return os.path.abspath(n4_fname)
167 |
168 | #%% ===================
169 |
170 | def mas_quickcheck(bg_img, qc_dir, qc_filename=None, overlay_img="''", exe=True, exe_mode='local', job_dir=None):
171 | '''generate quickcheck files'''
172 | # initialize slurm cmd
173 |
174 | if qc_filename == None:
175 | bg_name = ".".join(bg_img.split('/')[-1].split('.')[:-1])
176 | qc_filename = f"{bg_name}"
177 | # MASHelpfunction-specific lines
178 | src_line = f'source {mas_helpfunctions_path} > /dev/null'
179 | mas_quickcheck_cmd = f"{src_line}; mas_quickcheck {bg_img} {overlay_img} {qc_dir} {qc_filename}"
180 | cmd_path = None
181 |
182 | if exe == True:
183 | if exe_mode == 'local':
184 | returned_value = subprocess.call(mas_quickcheck_cmd, shell=True)
185 | print('returned value:', returned_value)
186 | elif exe_mode == 'slurm':
187 | if job_dir is None:
188 | job_dir = Path(qc_dir)/'job'
189 | job_out_dir = f"{job_dir}/output"
190 | Path(job_out_dir).mkdir(exist_ok=True, parents=True)
191 | cmd_path = f'{job_dir}/{qc_filename}_mask_labelfusion.sh'
192 | slurm_output=f"{job_out_dir}/{qc_filename}_%j.out\n"
193 | slurm_error=f"{job_out_dir}/{qc_filename}_%j.error\n"
194 | print(f"=== writing cmd to {cmd_path} ===")
195 | slurm.write_slurm_script(mas_quickcheck_cmd, cmd_path, slurm=True,
196 | output=slurm_output, error=slurm_error, **kwargs)
197 |
198 | return mas_quickcheck_cmd, cmd_path
199 |
200 |
201 | # [slurm] affine mask propagation
202 | def affine_mask_propagation(target_dir, target_id, atlas_dir, result_dir, job_dir=None, verbose=False, mas_helpfunctions_path=mas_helpfunctions_path, affine_param='', **kwargs):
203 | '''generate slurm sbatch file for affine mask mask propagation
204 | - target_dir
205 | - target_id
206 | - atlas_dir
207 | - job_dir
208 | '''
209 | # get template list
210 | templatelist = os.listdir(f'{atlas_dir}/template/')
211 | templatelist = [t.split('.')[0] for t in templatelist]
212 |
213 | # initialize slurm cmd
214 | slurm_cmd = slurm.generate_slurm_boilerplate(array=f'0-{len(templatelist)}', **kwargs)
215 | if job_dir is not None:
216 | job_out_dir = f"{job_dir}/output"
217 | Path(job_out_dir).mkdir(exist_ok=True, parents=True)
218 | slurm_cmd += f"#SBATCH --output={job_out_dir}/{target_id}_%j_%a.out\n"
219 | slurm_cmd += f"#SBATCH --error={job_out_dir}/{target_id}_%j_%a.error\n\n"
220 |
221 | # MASHelpfunction-specific lines
222 | src_line = f'source {mas_helpfunctions_path} > /dev/null\n\n'
223 | slurm_cmd += src_line
224 |
225 | # job array
226 | templatelist_str = ' '.join([t.split('.')[0] for t in templatelist])
227 | slurm_cmd += f"templatelist=({templatelist_str})\n"
228 | slurm_cmd += "atlas_id=${templatelist[$SLURM_ARRAY_TASK_ID]}\n"
229 |
230 | # command line
231 | slurm_cmd += f"mas_masking -T {target_dir} -t {target_id} -A {atlas_dir} -a $atlas_id -r {result_dir} -f {affine_param}"
232 |
233 | # print command
234 | if verbose is True:
235 | print(slurm_cmd)
236 |
237 | # write command
238 | slurm_cmd_path = None
239 | if not job_dir is None:
240 | slurm_cmd_path = f'{job_dir}/{target_id}_affine_mask.sh'
241 | slurm.write_slurm_script(slurm_cmd, slurm_cmd_path)
242 |
243 | return slurm_cmd_path, slurm_cmd
244 |
245 |
246 | #%% ===================
247 | # [slurm] affine label/mask fusion
248 | def affine_label_fusion(target_dir, target_id, atlas_dir, result_dir, exe_mode='local', parallel=False, job_dir=None, verbose=False, mas_helpfunctions_path=mas_helpfunctions_path, **kwargs):
249 | '''[SLURM] affine label fusion (after slurm_affine_mask_propagation)
250 | parallel: (only for local run)
251 | - True: call subprocess.Popen to run cmds in parallel
252 | - False: call subprocess.call to run cmds in sequential
253 | '''
254 | # MASHelpfunction-specific lines
255 | src_line = f'source {mas_helpfunctions_path} > /dev/null'
256 |
257 | mas_masking_fusion_cmd = f"{src_line}; mas_masking_fusion {target_dir} {target_id} {result_dir} {atlas_dir}"
258 |
259 | # print command
260 | if verbose is True:
261 | print(mas_masking_fusion_cmd)
262 |
263 | # execute the command
264 | if exe_mode == 'local':
265 | print("=== running locally ===")
266 | if parallel is True:
267 | returned_value = subprocess.Popen(mas_masking_fusion_cmd, shell=True)
268 | else: # run things in sequential order
269 | returned_value = subprocess.call(mas_masking_fusion_cmd, shell=True)
270 | print('returned value:', returned_value)
271 | return mas_masking_fusion_cmd
272 | elif exe_mode == 'slurm':
273 | cmd_path = None
274 | # if job_dir is not None:
275 | # Path(job_dir).mkdir(exist_ok=True, parents=True)
276 | job_out_dir = f"{job_dir}/output"
277 | Path(job_out_dir).mkdir(exist_ok=True, parents=True)
278 | cmd_path = f'{job_dir}/{target_id}_mask_labelfusion.sh'
279 | slurm_output=f"{job_out_dir}/{target_id}_%j.out\n"
280 | slurm_error=f"{job_out_dir}/{target_id}_%j.error\n"
281 | print(f"=== writing cmd to {cmd_path} ===")
282 | slurm.write_slurm_script(mas_masking_fusion_cmd, cmd_path, slurm=True,
283 | output=slurm_output, error=slurm_error, **kwargs)
284 | return mas_masking_fusion_cmd, cmd_path
285 |
286 |
287 | #%% =================
288 | # non-rigid label propagation
289 | def nonrigid_label_propagation(target_dir, target_id, target_mask, atlas_dir, result_dir, exe_mode='slurm', job_dir=None, verbose=False, mas_helpfunctions_path=mas_helpfunctions_path, **kwargs):
290 | '''[SLURM] nonrigid label fusion
291 | '''
292 | # get template list
293 | templatelist = os.listdir(f'{atlas_dir}/template/')
294 | templatelist = [t.split('.')[0] for t in templatelist]
295 |
296 | # initialize slurm cmd
297 | slurm_cmd = slurm.generate_slurm_boilerplate(array=f'0-{len(templatelist)}', **kwargs)
298 | if job_dir is not None:
299 | job_out_dir = f"{job_dir}/output"
300 | Path(job_out_dir).mkdir(exist_ok=True, parents=True)
301 | slurm_cmd += f"#SBATCH --output={job_out_dir}/{target_id}_%j_%a.out\n"
302 | slurm_cmd += f"#SBATCH --error={job_out_dir}/{target_id}_%j_%a.error\n\n"
303 |
304 | # MASHelpfunction-specific lines
305 | src_line = f'source {mas_helpfunctions_path} > /dev/null\n\n'
306 | slurm_cmd += src_line
307 |
308 | # job array
309 | templatelist_str = ' '.join([t.split('.')[0] for t in templatelist])
310 | slurm_cmd += f"templatelist=({templatelist_str})\n\n"
311 | slurm_cmd += "atlas_id=${templatelist[$SLURM_ARRAY_TASK_ID]}\n\n"
312 |
313 | # command line
314 | slurm_cmd += f"mas_mapping -T {target_dir} -t {target_id} -m {target_mask} -A {atlas_dir} -a $atlas_id -r {result_dir}"
315 |
316 | # print command
317 | if verbose is True:
318 | print(slurm_cmd)
319 |
320 | # write command
321 | slurm_cmd_path = None
322 | if not job_dir is None:
323 | Path(job_dir).mkdir(exist_ok=True, parents=True)
324 | slurm_cmd_path = f'{job_dir}/{target_id}_nonrigid_label.sh'
325 | slurm.write_slurm_script(slurm_cmd, slurm_cmd_path)
326 |
327 | return slurm_cmd_path, slurm_cmd
328 |
329 | #%% ===================
330 | # [slurm] affine label/mask fusion
331 | def nonrigid_label_fusion(target_dir, target_id, atlas_name, atlas_list, result_dir, target_mask=None, exe_mode='local', execution=True, parallel = False, job_dir=None, mas_helpfunctions_path=mas_helpfunctions_path, verbose=False, **kwargs):
332 | '''[SLURM] nonrigid label fusion (after slurm_nonrigid_label_propagation)'''
333 | # MASHelpfunction-specific lines
334 | src_line = f'source {mas_helpfunctions_path} > /dev/null'
335 |
336 | slurm_cmd = f"{src_line}; mas_fusion -T {target_dir} -t {target_id} -A {atlas_name} -a {atlas_list} -r {result_dir}"
337 | if target_mask is not None: slurm_cmd += f" -m {target_mask}"
338 | if exe_mode == 'local':
339 | if execution == True:
340 | print("=== running locally ===")
341 | if parallel == True:
342 | returned_value = subprocess.Popen(slurm_cmd , shell=True)
343 | elif parallel == False:
344 | returned_value = subprocess.call(slurm_cmd , shell=True)
345 | print('returned value:', returned_value)
346 | return slurm_cmd, returned_value
347 | elif exe_mode == 'slurm':
348 | cmd_path = None
349 | if job_dir is None:
350 | job_dir = Path(result_dir)/'jobs'
351 | job_out_dir = f"{job_dir}/output"
352 | Path(job_out_dir).mkdir(exist_ok=True, parents=True)
353 | cmd_path = f'{job_dir}/{target_id}_labelfusion.sh'
354 | slurm_output=f"{job_out_dir}/{target_id}_%j.out\n"
355 | slurm_error=f"{job_out_dir}/{target_id}_%j.error\n"
356 | print(f"=== writing cmd to {cmd_path} ===")
357 | slurm_cmd_path = f'{job_dir}/{target_id}_affine_mask.sh'
358 | slurm.write_slurm_script(slurm_cmd, slurm_cmd_path, slurm=True,
359 | output=slurm_output, error=slurm_error, **kwargs)
360 |
361 | if verbose is True:
362 | print(slurm_cmd)
363 |
364 | return cmd_path, slurm_cmd
365 |
366 |
367 |
368 | def extract_label_volumes(label_dir, targetlist, vol_dir, vol_csv_fname, ext='.nii.gz', tmp_subdir="tmp", structure_list=None):
369 | '''extract label volumes
370 | tmp_subdir: temp directory to save individual ccsv volumetrics files'''
371 | # make directory for individual volume csv
372 | vol_individuals = f"{vol_dir}/{tmp_subdir}"
373 | Path(vol_individuals).mkdir(exist_ok=True, parents=True)
374 | # remove result file if already exist
375 | vol_csv = f"{vol_dir}/{vol_csv_fname}"
376 | if os.path.isfile(vol_csv): os.remove(vol_csv)
377 |
378 | # add invidual volme one at a time
379 | for target_id in targetlist:
380 | vol_csv_individual = f"{vol_individuals}/{target_id}.csv"
381 | # extract the volume for single structure
382 | cmd = f"seg_stats {label_dir}/{target_id}{ext} -Vl {vol_csv_individual}"
383 | returned_value = subprocess.call(cmd, shell=True)
384 | # print('returned value:', returned_value)
385 | # write to master csv
386 | cmd = f'echo -e "{target_id},$(cat {vol_csv_individual})" >> {vol_csv}'
387 | returned_value = subprocess.call(cmd, shell=True)
388 | # break
389 |
390 | # read structure list if it's a file path
391 | if isinstance(structure_list, (str,Path)):
392 | structure_list = pd.read_csv(structure_list).structure_name
393 | # adding structural title to volume list if structure_list is not None:
394 | volume_df = pd.read_csv(vol_csv, names=structure_list, header=None, index_col=0)
395 | volume_df.to_csv(vol_csv)
396 | else:
397 | volume_df = pd.read_csv(vol_csv, header=None, index_col=0)
398 |
399 | # remove temp folder
400 | os.rmdir(vol_individuals)
401 | # shutil.rmtree(vol_individuals)
402 | return volume_df
403 |
--------------------------------------------------------------------------------
/HelperFunctions/python/utils.py:
--------------------------------------------------------------------------------
1 | # %%
2 | import os
3 | import numpy as np
4 | import nibabel as nib
5 |
6 | # %%
7 | def get_nii_orientation(target):
8 | affine = nib.load(target).header.get_best_affine()
9 | orient = nib.orientations.aff2axcodes(affine)
10 | orient = ''.join(orient)
11 | return orient
12 |
13 | # %%
14 | def reorient_vol(vol:np.ndarray, old_orient:(str,tuple), new_orient:(str,tuple)='LPS'):
15 | '''reorient volume according to orientation change'''
16 | # convert old_orient/new_orient from str ('LPS') into tuple: ('L', 'P', 'S')
17 | # if isinstance(new_orient, str): old_orient = tuple(old_orient) = src_axcode
18 | # if isinstance(new_orient, str): new_orient = tuple(new_orient) = des_axcode
19 |
20 | # convert source axcode into orientation array (with respect to standard RAS)
21 | src_ornt = nib.orientations.axcodes2ornt(tuple(old_orient)) # labels = (('L','R'), ('P','A'), ('I','S')) # =RAS
22 | # convert new_orient into dest_ornt orientation array
23 | dest_ornt = nib.orientations.axcodes2ornt(tuple(new_orient))
24 | # derive transform array from src_ornt to dest_ornt
25 | ornt_trans = nib.orientations.ornt_transform(src_ornt, dest_ornt)
26 | # apply the orientation transform array on the loaded volume
27 | vol_reoriented = nib.orientations.apply_orientation(vol, ornt_trans)
28 | return vol_reoriented
29 |
30 | def reorient_nii(src_fname, dest_fname, old_orient="PIR", new_orient="RAS", verbose=False):
31 | # skip if result file already exist
32 | if os.path.isfile(dest_fname):
33 | if verbose==True: print(f" -- {dest_fname} exist, skipping ...")
34 | return
35 | # load the raw volume
36 | vol_nii = nib.load(src_fname)
37 | # reorient the image
38 | vol_reorient = reorient_vol(vol_nii.get_fdata(), old_orient,new_orient)
39 | # save the reoriented images
40 | #%% save reoriented images # https://bic-berkeley.github.io/psych-214-fall-2016/saving_images.html
41 | vol_reorient_nii = nib.Nifti1Image(vol_reorient, affine=vol_nii.affine, header=vol_nii.header)
42 | vol_reorient_nii.to_filename(dest_fname)
43 | return
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 dancebean
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MBSHelperFunctions.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ##########################
4 | # Multi-Brain-Seperation #
5 | # Author: Da MA #
6 | # da_ma@sfu.ca #
7 | # d.ma.11@ucl.ac.uk #
8 | ##########################
9 |
10 | source MASHelperFunctions.sh > /dev/null 2>&1
11 |
12 | function getOrientation(){
13 | #gets orientation string using mri_info
14 |
15 | if [ "$#" -lt "1" ]; then
16 | return 1
17 | else
18 | IMG=$1
19 | fi
20 |
21 | ORIENT=`mri_info $IMG | grep Orientation`
22 | ORIENT=${ORIENT##*\ }
23 | echo $ORIENT
24 | }
25 |
26 | function convert_dcm_to_nifti_batch(){
27 | local function_name=${FUNCNAME[0]}
28 | if [[ $# -lt 3 ]]; then
29 | echo "Usage: $function_name [input_dir] [dcm_nii_dir]"
30 | return 1
31 | fi
32 |
33 | local input_dir=$1
34 | local dcm_nii_dir=$2
35 |
36 | local line
37 | for line in $(cat $targetlist_raw); do
38 | local id=$(echo $line | cut -d',' -f1)
39 | local input_dir=$(echo $line | cut -d',' -f2)
40 |
41 | dcm2niix_afni -9 -f $id -t -o $dcm_nii_dir -v 1 -z i $input_dir
42 | mv $input_dir/$id.nii.gz $dcm_nii_dir
43 | done
44 | }
45 |
46 | function multi_brain_seperation(){
47 | local function_name=${FUNCNAME[0]}
48 | if [[ $# -lt 3 ]]; then
49 | echo "Usage: $function_name [target_dir] [target_id] [result_dir] (Optional) [brain_no] [threshold]"
50 | return 1
51 | fi
52 |
53 | local target_dir=$1
54 | local target_id=$2
55 | local result_dir=$3
56 | ## Number of brain to separate
57 | local brain_no
58 | if [[ ! -z $4 ]]; then
59 | brain_no=$4
60 | else
61 | # default number of brain = 3
62 | brain_no=3
63 | fi
64 | ## Threshold value to extract the brain region
65 | if [[ ! -z $5 ]]; then
66 | local thr=$5
67 | else
68 | # default threshold = 5000
69 | local thr=5000
70 | fi
71 |
72 | echo "target_dir=$target_dir"
73 | echo "target_id=$target_id"
74 | echo "result_dir=$result_dir"
75 |
76 | local ero=2
77 | local dil=4
78 | local ero2=2
79 | local tmp_dir=$result_dir/tmp_${RANDOM}
80 | mkdir -p $tmp_dir
81 | # create multi-brain mask
82 | seg_maths $target_dir/$target_id -thr $thr -bin -ero $ero -dil $dil -fill -ero $ero2 $tmp_dir/${target_id}_multimask_1.nii.gz
83 | local i=1
84 | while [[ i -le $brain_no ]]; do
85 | echo "extract ${i}th brain"
86 | # extract ${i}th mask out
87 | seg_maths $tmp_dir/${target_id}_multimask_$i.nii.gz -lconcomp $tmp_dir/${target_id}_mask_$i.nii.gz
88 | # using mask
89 | seg_maths $tmp_dir/${target_id}_mask_$i.nii.gz -dil 1 -mul $target_dir/$target_id $result_dir/${target_id}_${i}.nii.gz
90 | # substract ${i}th extracted mask
91 | if [[ i -lt $brain_no ]]; then
92 | # echo "substract ${i}th extracted mask (-sub)"
93 | seg_maths $tmp_dir/${target_id}_multimask_$i.nii.gz -sub $tmp_dir/${target_id}_mask_$i.nii.gz $tmp_dir/${target_id}_multimask_$(( i + 1 )).nii.gz
94 | fi
95 | i=$(( $i + 1 ))
96 | done
97 | rm -rf $tmp_dir
98 | }
99 |
100 | function multi_brain_seperation_batch(){
101 | local function_name=${FUNCNAME[0]}
102 | if [[ $# -lt 3 ]]; then
103 | echo "Usage: $function_name [target_dir] [targetlist] [result_dir]"
104 | return 1
105 | fi
106 |
107 | local target_dir=$1
108 | local targetlist=$2
109 | local result_dir=$3
110 |
111 | # local thr=5000
112 | # local ero=2
113 | # local dil=6
114 | # local ero2=4
115 | # local tmp_dir=$result_dir/tmp_${RANDOM}
116 | # mkdir -p $tmp_dir
117 | # brain_no=3
118 |
119 | for target_id in $(cat $targetlist); do
120 | multi_brain_seperation $target_dir $target_id $result_dir # $brain_no
121 | # seg_maths $target_dir/$target_id -thr $thr -bin -ero $ero -dil $dil -fill -ero $ero2 $tmp_dir/${target_id}_mask_multi.nii.gz
122 | done
123 | }
124 |
125 | function fix_header_info(){
126 | local function_name=${FUNCNAME[0]}
127 | if [[ $# -lt 3 ]]; then
128 | echo "$function_name Usage: $function_name [Input_file with wrong header] [orientation] [Output_file] [(Optional) output_type (analyze/nii)]"
129 | return 1
130 | fi
131 |
132 | local input_file=$1
133 | local orientation=$2
134 | local output_file=$3
135 | local output_type
136 | if [[ ! -z $4 ]]; then
137 | output_type=$4
138 | else
139 | output_type=nii
140 | fi
141 |
142 | mri_convert --in_orientation $orientation --out_orientation $orientation -ot $output_type $input_file $3 # -odt float
143 | }
144 |
145 | function reorder_brain(){
146 | # not working properly
147 | local function_name=${FUNCNAME[0]}
148 | if [[ $# -lt 2 ]];then
149 | echo "Usage: $function_name [input_nii] [output_nii] [out_orientation (Optional,default=RAS)]"
150 | return 1
151 | fi
152 |
153 | local input_nii=$1
154 | local output_nii=$2
155 | local orient_out=$3 #RAS
156 |
157 | local orient_in=$(getOrientation $input_nii)
158 | mri_convert --in_orientation $orient_in $input_nii $output_nii $orient_out
159 | fix_header_info $output_nii $orient_out $output_nii
160 | }
161 |
162 |
163 | function reorient_brain(){
164 | local function_name=${FUNCNAME[0]}
165 | if [[ $# -lt 4 ]]; then
166 | echo "Usage: $function_name [target_dir] [target_id] [location (L/R/S)] [result_dir] [(Optional) convert_RAS]"
167 | echo " convert_RAS: 0) skip (not) convert to LAS MNI152 standatd orientation (no resampling) [Default]"
168 | echo " Otherwise) convert to LAS MNI152 standatd orientation (involve resampling)"
169 | return 1
170 | fi
171 |
172 | local target_dir=$1
173 | local target_id=$2
174 | local location=$3
175 | local result_dir=$4
176 | local convert_RAS=$5
177 |
178 | local out_orientation="LR PA IS" # = RAS
179 |
180 | if [[ "$location" = "S" ]]; then
181 | # orientation=RSA
182 | orientation=RSP
183 | elif [[ "$location" = "R" ]]; then
184 | # orientation=ILA
185 | orientation=ILP
186 | elif [[ "$location" = "L" ]]; then
187 | # orientation=SRA
188 | orientation=SRP
189 | fi
190 | # local
191 | target_file=$(ls $target_dir/${target_id}* | cut -d' ' -f1)
192 |
193 | # Fix nifti hearder info using FreeSurfer
194 | fix_header_info $target_file $orientation $result_dir/$target_id.nii.gz
195 |
196 | # If FSL is installed, reorient into LAS MNI152 space
197 | # check if FSL is installed (by checking variable $FSLDIR)
198 | if [[ -z $convert_RAS ]]; then convert_RAS=1; fi
199 | echo "convert_RAS = $convert_RAS"
200 | if [[ $convert_RAS -eq 1 ]] && [[ ! -z $FSLDIR ]]; then
201 | echo "using fslswapdim to convert data to RAS"
202 | fslswapdim $result_dir/$target_id.nii.gz $out_orientation $result_dir/$target_id.nii.gz
203 | fi
204 |
205 | }
206 |
207 | function reorient_brain_batch_3brain(){
208 | local function_name=${FUNCNAME[0]}
209 | if [[ $# -lt 3 ]]; then
210 | echo "Usage: $function_name [target_dir] [scan_list] [result_dir]"
211 | return 1
212 | fi
213 |
214 | local target_dir=$1
215 | local scan_list=$2
216 | local result_dir=$3
217 |
218 | echo "target_dir=$target_dir"
219 | echo "scan_list=$scan_list"
220 | echo "result_dir=$result_dir"
221 |
222 | local scan_id
223 | local target_id
224 | local target_id_oriented
225 | local convert_RAS=1
226 | local location
227 | local orientation
228 |
229 | local qc_orientation="LAS"
230 | for scan_id in $(cat $scan_list); do
231 | for brain_id in 1 2 3; do
232 | target_id=${scan_id}_$brain_id
233 | # make three orientation guess
234 | for location in L R S; do
235 |
236 | if [[ "$location" = "S" ]]; then
237 | # orientation=RSA
238 | orientation=RSP
239 | elif [[ "$location" = "R" ]]; then
240 | # orientation=ILA
241 | orientation=ILP
242 | elif [[ "$location" = "L" ]]; then
243 | # orientation=SRA
244 | orientation=SRP
245 | fi
246 |
247 | echo ".......... reorienting: $target_id to $orientation (guessing location: $location) .........."
248 | if [[ -e $target_dir/$target_id.nii.gz ]]; then
249 | echo ".......... reorienting ......"
250 | reorient_brain $target_dir $target_id $location $result_dir
251 | # rename to add orientation guess behind
252 | target_id_oriented=${target_id}_$location
253 | mv $result_dir/$target_id.nii.gz $result_dir/$target_id_oriented.nii.gz
254 | # generate quickcheck (not working as expected yet, due to the limitation of FSL's slicer command)
255 | # echo ".......... generating quickcheck ......"
256 | # mas_quickcheck $result_dir/$target_id_oriented.nii.gz '' $result_dir $target_id_oriented $qc_orientation
257 | fi
258 | done
259 | done
260 | done
261 | }
262 |
263 | function extract_label(){
264 | local function_name=$[FUNCNAME[0]]
265 | if [[ $# -lt 4 ]]; then
266 | echo "Usage: $function_name [target_dir] [target_id] [label] [result_dir]"
267 | return 1
268 | fi
269 |
270 | local target_dir=$1
271 | local target_list=$2
272 | local label=$3
273 | local result_dir=$4
274 |
275 | seg_maths $target_dir/$target_id.nii.gz -thr $(($label-0.5)) -uthr $(($label+0.5)) $result_dir/$target_id.nii.gz
276 | }
277 |
278 | # function masking_batch_nii(){
279 | # local function_name=${FUNCNAME[0]}
280 | # if [[ $# -lt 3 ]]; then
281 | # echo "Usage: $function_name [target_dir] [target_list] [result_dir] [(optional) atlas_dir]"
282 | # return 1
283 | # fi
284 |
285 | # local target_dir=$1
286 | # local target_list=$2
287 | # local result_dir=$3
288 | # local atlas_dir=$4
289 |
290 | # echo "target_dir=$target_dir"
291 | # echo "target_list=$target_list"
292 | # echo "result_dir=$result_dir"
293 | # echo "atlas_dir=$atlas_dir"
294 |
295 | # local target_id
296 |
297 | # for target_id in $(cat $target_list); do
298 | # # $$AtlasListFileName=template_list.cfg
299 | # for atlas_id in $(cat $atlas_dir/$AtlasListFileName); do
300 | # # cloud process need to run on rcg-queen
301 | # # local target_result_dir=$result_dir/$target_id
302 | # # mkdir -p $target_result_dir
303 | # mas_masking -T $target_dir -t $target_id -A $atlas_dir -a $atlas_id -r $result_dir
304 | # done
305 | # done
306 | # }
307 |
--------------------------------------------------------------------------------
/MRM_NeAT_Atlas_Label.txt:
--------------------------------------------------------------------------------
1 | ====== Atlas_index-anatomical_label mapping =====
2 |
3 | 1 Left Hippocampus
4 | 2 External Capsule
5 | 3 Left Caudate Putamen
6 | 4 Left Ant Commissure
7 | 5 Left Globus Pallidus
8 | 6 Left Internal Capsule
9 | 7 Left Thalamus
10 | 8 Left Cerebellum
11 | 9 Left Superior Colliculi
12 | 10 Ventricles
13 | 11 Left Hypothalamus
14 | 12 Left Inferior Colliculi
15 | 13 Left Central Gray
16 | 14 Left Neocortex
17 | 15 Left Amygdala
18 | 16 Left Olfactory bulb
19 | 17 Brain Stem
20 | 18 Left Rest of Midbrain
21 | 19 Left Basal Forebrain Septum
22 | 20 Left Fimbria
23 | 21 Right Hippocampus
24 | 22 None (Right External Capsule label merged into label 2, covering the entire Corpus Callosum region)
25 | 23 Right Caudate Putamen
26 | 24 Right Ant Commissure
27 | 25 Right Globus Pallidus
28 | 26 Right Internal Capsule
29 | 27 Right Thalamus
30 | 28 Right Cerebellum
31 | 29 Right Superior Colliculi
32 | 30 None (Right Ventricles label merged with label 10)
33 | 31 Right Hypothalamus
34 | 32 Right Inferior Colliculi
35 | 33 Right Central Gray
36 | 34 Right Neocortex
37 | 35 Right Amygdala
38 | 36 Right Olfactory bulb
39 | 37 None (Right Brain Stem label merged with label 17)
40 | 38 Right Rest of Midbrain
41 | 39 Right Basal Forebrain Septum
42 | 40 Right Fimbria
43 |
44 | ========================
45 | The left/right hemisphere is separated using automatic mirroring/registration method.
46 | The left/right part of the ventricles (10/30), brain stem (17/37) and External Capsule (2/22) as they're usually not considered as anatomically separable.
47 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Multi Atlas Segmentation and Morphometric Analysis Toolkit (MASMAT)
2 |
3 | > Originally designed for mouse brain MRI, but is applicable to any spicious (e.g. Non-human primate, or even human neuroimages)
4 |
5 | Author: Da Ma (dma@wakehealth.edu; da_ma@sfu.ca; d.ma.11@ucl.ac.uk)
6 |
7 | ## Description
8 |
9 | > Automatic brain structural parcellation through registration-based segmentation-propagation and multi-atlas-based label-fusion
10 | This bash scripts is created for `Multi-atlas based automatic brain structural parcellation`, mainly for mouse brain MRI.
11 |
12 | This script achieve automatic brain MRI image segmentation with given [__mouse brain MRI atlases__](https://github.com/dancebean/mouse-brain-atlas) - which is a set of pairs of template images along with their manually labells. Sample atlases can be downloadable from the Github respsitory [here](https://github.com/dancebean/mouse-brain-atlas). For detailed description of the pipeline, please refer to the papers [[1]](http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0086576) [[2]](https://www.frontiersin.org/articles/10.3389/fnins.2019.00011). [Citation](#citation) of the two papers are listed at the bottom of this page.
13 | The MASMAT tool has been extensively tested to segment mouse brain MRI. It should be also capable of handelling the multi-atlas-based parcellation/segmentation for other type of images, organs, or species (e.g. CT, heart, embryo, human, macaque, _etc._), providing appropriate atlases are givien.
14 |
15 | **[Updates] Please refer to [HelperFunctions/python](HelperFunctions/python) for the python library to call/use the MASMAT tools**
16 |
17 | ## Installation
18 | - Pre-requisite package installation: [NityReg](https://github.com/KCL-BMEIS/niftyreg/wiki), [NitySeg](https://github.com/KCL-BMEIS/NiftySeg), and [FSL](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki) (FSL is optional, but recommended as it is used to generate QuickCheck figures which will make the inspection of the results much easier).
19 | - Ther easist and recommended way to install `NiftyReg` and `NiftySeg` is by installing [`NifTK`](https://github.com/NifTK/NifTK/releases) which includes both of the two packages, as well as other useful tools, including a 3D nifti file viewer.
20 |
21 | For example, to download and install NifTK version v18.05.4 on ubuntu to your `home` folder, using the following lines:
22 |
23 | cd $HOME
24 | # Download binary files
25 | wget https://github.com/NifTK/NifTK/releases/download/v18.05.4/niftk-v18.05.4-ubuntu-14.04-x64.tar.bz2
26 | # extracct the binary files
27 | tar -xvjf niftk-v18.05.4-ubuntu-14.04-x64.tar.bz2
28 | # A folder called 'niftk-18.5.4' will be created under your $HOME folder
29 |
30 | - If you choose to compile the `NiftyReg` and `NiftySeg` from source code instead, please make sure you have downloaded and installed the latest version, since the earlier version might not be compatible with this tool.
31 |
32 | - For NiftyReg, please use the command line below to download the lastest version of the source code, and follow the compile instruction from the NiftyReg's [install page](https://github.com/KCL-BMEIS/niftyreg/wiki/install) to build and compile the binary files.
33 |
34 | git clone git://git.code.sf.net/p/niftyreg/git niftyreg
35 | or
36 |
37 | git clone git@cmiclab.cs.ucl.ac.uk:mmodat/niftyreg.git niftyreg
38 |
39 | - For NitySeg, please use the command line below to download the lastest version of the source code for compile, and follow the compile instruction from the NitySeg's [install page](https://github.com/KCL-BMEIS/NiftySeg) to build and compile the binary files.
40 |
41 | `git clone https://github.com/KCL-BMEIS/NiftySeg.git`
42 |
43 | - [**Important**] After install or compilation the executable binary files, do remember to add the directories of the executable binary files - which is the `bin` subdirectory within directory where ther packages are installed) - to the system `$PATH` variable.
44 |
45 | - For example, if you're using Linux, and installed the NifTK at: `/home/YourUserName/niftk-18.5.4`, then add the following 2 lines to the file `/home/YourUserName/.bashrc` (e.g. by typing: `gedit $HOME/.bashrc` or `nano $HOME/.bashrc` or `emacs $HOME/.bashrc`):
46 |
47 | export PATH=${PATH}:"$HOME/niftk-18.5.4/bin"
48 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:"$HOME/niftk-18.5.4/bin"
49 |
50 | - Otherwise, if you download and compiled NiftyReg and NiftySeg separately at: `/home/YourUserName/nifty_reg` and `/home/YourUserName/nifty_seg`, then add the following 4 lines to the file `~/.bashrc`:
51 |
52 | export PATH=${PATH}:"$HOME/nifty_reg/bin"
53 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}/"$HOME/nifty_reg/lib"
54 | export PATH=${PATH}:"$HOME/nifty_seg/bin"
55 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:"$HOME/nifty_seg/lib"
56 |
57 | (`$HOME` represent your home directory, which is located at `/home/YourUserName/`).
58 |
59 | - After setting up the system variables, refresh your bash environment by simply type `bash` in the terminal, or open a new termina, or logout/login again. Type the following two command in the refreshed terminal to check whether the `NiftyReg` and `NiftySeg` has been installed and loaded successfully in the correct locations:
60 |
61 | `which reg_resample`
62 |
63 | `which seg_LabFusion`
64 |
65 |
66 | - If correct installation location is given with the two command above, now you're ready to use the [`MASHelperFunctions.sh`](MASHelperFunctions.sh) script, or test the [demo](demo/mas_demo.sh) script. The bash script is compatible with Linux/Windows/Mac system.
67 |
68 |
69 | ## Usage
70 |
71 | - The core functions of *MASMAT* toolbox are in the main script: the [*MASHelperFunctions.sh*](MASHelperFunctions.sh), which is capable of handling batch brain parcellation (functions with suffix `_batch`) either on the local workstation or on PBS cluster by simply specifying the `-e` flag as either `local` or `cluster`).
72 | To load the script, simply type `source MASHelperFunctions.sh` to load all corresponding functions.
73 |
74 | To get help for each function, type `function_name -h`.
75 | For example: `mas_mapping -h`
76 |
77 | - To run the *MASMAT* in python, and take advantage of the cluster-based parallel computation pipeline (i.e. on `SLURM` cluster), import and load the python helper function `segmentation_propagation.py` in [HelperFunctions/python](HelperFunctions/python) folder.
78 |
79 | **[Important]**
80 |
81 | **Please make sure the orientation information in the header of your test image is correct before process**. Sometimes, it is a bit tricky to get the correct orientation for nifty images (please see the detailed explanation at FSL website [Ref1](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Orientation%20Explained) and [Ref 2](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Fslutils#Orientation-related_Utilities). Additional information in the answer of the first question in the Q/A session.
82 |
83 | **Please make sure the voxel dimension (voxel size) of the image is correct**. If the images are reconstructed using tools for human brian MRI, sometimes the voxel dimension will be set to 1mm isotropic, which is incorrect and will affect the registration algorithm. A typical resolution for *in vivo* mouse brain MRI would be around 0.1-0.2mm, and for *ex vivo* can be as small as 0.05mm.
84 | - You can check the voxel dimension using:
85 | - the `fslinfo` command in the [FSL](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki) package (field name: `pixdim`);
86 | - the `mri_info` command in the [FreeSurfer](https://surfer.nmr.mgh.harvard.edu/) package (field name: `voxel sizes`),
87 | - the `nifti_tool` command in the [AFNI](https://afni.nimh.nih.gov/) package: `nifti_tool -disp_hdr -infiles $input_filename`.
88 | - or other GUI tools such as [ITK-SNAP](http://www.itksnap.org/pmwiki/pmwiki.php) (in: Tools - Image Information).
89 | - A convenient tool to change the voxel dimension (field name: `pixeldim`) is: [`nifti_tool`](https://afni.nimh.nih.gov/pub/dist/doc/program_help/nifti_tool.html) from the [AFNI](https://afni.nimh.nih.gov/) package. Here is an example to change the input with incorrected voxel size (e.g. 1mm) into the correct one (0.1mm): `nifti_tool -mod_hdr -mod_field pixdim '0.0 0.1 0.1 0.1 0.1 0.1 0.1 0.1' -infiles $input_filename -prefix $output_filename`
90 | - Sometimes, the image origin in the nifty head will be misplaced after the pixeldim change, which will make the affine registration fail (for both the mas_masking step and the mas_parcellation). A quick solution is to load the image into the [`NifTK`](https://github.com/NifTK/NifTK/releases), and "save as" a `.nii` or `.nii.gz` file, which will effectively reinitialize the image origin information int he nifti file header. We will post a more elegant solution with future update.
91 |
92 |
93 | ## Pipeline example
94 | ### Processing pipeline schematic diagram
95 | [ "Click here for the paper with detailed description of the processing pipeline"](https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0086576)
96 | ### pipeline demo
97 |
98 | A [demo script](demo/mas_demo.sh) is provided, which contains a complete end-to-end demonstration showing how to build and run the pipeline. It first download the atlas and test image from the [mouse brain atlas](https://github.com/dancebean/mouse-brain-atlas). It then run the following steps in sequential: brain extraction => N4 Bias Field Correction => brain structural parcellation (which itself consists of 3 sub-steps: atlas-to-test image registration (using dilated mask and bias-field-corrected brain image) -> atlas label propagation -> and multi-atlas label fusion).
99 |
100 | ### Function demo
101 | - Load script:
102 |
103 | `source MASHelperFunctions.sh`
104 |
105 | - Step 1: __*brain extraction*__ (masking)
106 |
107 | `mas_masking_batch -T “targe t_dir” -t “target_list” -A “atlas_dir” -r “result_dir”`
108 | - `-h`: Use mas_masking_batch -h to show help for usage
109 | - `-T`: specify folder contain the target image to be segmented (images should be in nifty format: nii or nii.gz. The image orientation should be correctly indicated in the nifti header. Please refer to the Q&A section *What image orientation should my test image be?* for more details about image orientation.)
110 | - `-t`: specify text file contain a list of target image file names inside the target_dir. (Each line contains the name of one image file. User can just provide file name without the `.nii` or '.nii.gz' extension. The algorithm will automatically figure out the correct file extension.)
111 | - `-A`: folder contains the atlas (sample atlas containing multiple templates can be downloaded here)
112 |
113 | [Optional argument]
114 | - `-a`: text file list the templates inside the atlas folder to be used (default: `template_list.cfg` file within the atlas folder)
115 | - `-p`: configuration file to tune the parameters for the registration and label fusion algorithms
116 | - `-e`: specify to run locally (`local`) on on `cluster` . Specify `cluster` will submit parallel pbs jobs to cluster; specify `local` will run job sequentially on local machine. cluster is set by default
117 |
118 | = Step 2. bias field correction
119 | > This is an important step before the parcellation. It is skipped in the demo as the images are already "bias-corrected" using the N4 algorithm
120 | `mas_N4_batch`
121 |
122 | - Step 3. __*brain structure parcellation*__
123 |
124 | `mas_parcellation_batch -T "target_dir" -t "target_list" -A "atlas_dir" -r "result_dir" -M "targetmask_dir" -M "dilate_mask_dir" -m "mask_suffix" -e "exe_mode"`
125 | - `-h`: Use mas_masking_batch -h to show help for usage
126 | - `-T`: specify folder contain the test image to be segmented (please use the bias-field corrected image)
127 | - `-t`: specify text file contain a list of target image file names inside the target_dir (in nifty format: nii or nii.gz, can only provide file name without extension)
128 | - `-A`: folder contains the atlas (sample atlas containing multiple templates can be downloaded here)
129 | - `-M`: folder contain the dilated brain mask
130 |
131 | [optional argument]
132 | - `-M`: folder containing the brainmask file of the test images
133 | - `-m`: suffix (e.g. for `test1.nii.gz` with mask file `test1.mask.nii.gz`: `-m ".mask"`)
134 | - `-a`: text file list the templates inside the atlas folder to be used (default: `template_list.cfg` file within the atlas folder)
135 | - `-p`: configuration file to tune the parameters for the registration and label fusion algorithms
136 | - `-a`: text file list the templates inside the atlas folder to be used (default: `template_list.cfg` file within the atlas folder)
137 | - `-p`: configuration file to tune the parameters for the registration and label fusion algorithms
138 |
139 | ### Sample image of the pipeline output
140 | [ "Click for sample quality control image of the parcellation output (generated using mas_quickcheck)."](docs/quickcheckdemo.png) The similar color between the olfactory bulb and the cortex is due to the limited colormap of `jet`.
141 |
142 | ## List of functions
143 |
144 | [Basic functions]
145 | - `check_image_file`
146 | - `check_atlas_file`
147 | - `check_mapping_file`
148 | - `check_label_fusion_file`
149 |
150 | [Single image processing functions]
151 | - `mas_masking` (prerequisite: NiftyReg): single atlas brain masking (affine image registration)
152 | - `mas_masking_fusion` (prerequisite: NiftySeg): multi atlas brain masking (fuse the result from mas_masking)
153 | - `mas_mapping` (prerequisite: NiftyReg): single atlas label propagation
154 | - `mas_fusion` (prerequisite: NiftySeg): multi atlas label fusion
155 | - `mas_quickcheck` (prerequisite: FSL): quality control (quickcheck) image generator
156 | - `mas_label_volume` (prerequisite: NiftySeg): extract label volume (into a .csv file)
157 | - `mas_template_function`: template functions for advanced user to develop your own additional functions
158 |
159 | [Batch image processing functions]:
160 | - `mas_masking_batch`
161 | - `mas_mask_dilate_batch`
162 | - `mas_mapping_batch`
163 | - `mas_fusion_batch`
164 | - `mas_parcellation_batch` (label propogations + fusions)
165 | - `mas_quickcheck_batch`
166 | (The parallel brain structure parcellation on PBS cluster is achieved through PBS array and PBS dependency.)
167 |
168 | [ Pre-processing functions ]:
169 | - `mas_fix_header_info`
170 | - `mas_smooth_batch`
171 | - `mas_N4_batch` (prerequisite: ANT)
172 |
173 | [ Post-processing functions ]:
174 | - `mas_extract_label`
175 | - `mas_extract_label_batch`
176 | - `mas_extract_volume`
177 | - `mas_extract_volume_batch`
178 | - `mas_quickcheck_panorama`
179 |
180 | ## Version History and Roadmap
181 | - Older implementation in previous version (will be removed in future release)
182 | (Code repository move from the [original page](http://cmic.cs.ucl.ac.uk/staff/da_ma/multi_atlas/) that is stated in the paper.)
183 | - for_single_workstation: to be used on a single PC.
184 | - for_cluster: to be run on computer cluster, use parallel image registration to speed-up the process.
185 | - parameter_samples: sample parameter files that can be fed to the command when running the script [optional].
186 | - Future release will also provide suport for Slurm-based clusters.
187 |
188 | ## Q/A
189 |
190 | ### Q. What image orientation should my test image be?
191 |
192 | A. The orientation of the default atlas is: RAS, although the algorithms should be able to identify any correctly oriented images.
193 |
194 | - A simple but effective script [`orient_nii.m`](https://github.com/dama-lab/multi-atlas-segmentation/tree/master/HelperFunctions/matlab/reorient) is provided in this package. It uses the Matlab NIfTI toolbox (https://www.mathworks.com/matlabcentral/fileexchange/8797-tools-for-nifti-and-analyze-image) to interactively visualize and determine the orientation, as well as reorient it.
195 |
196 | - Alternatively, you can use the reorientation function provided by the latest version of [ITK-SNAP](http://www.itksnap.org/) to reorient the image (Tools - Reorient Image).
197 |
198 | - If you have FSL installed, use `fslorient` to check the image orientation, and use `fslswapdim` to change the image orientation (swap the image dimension).
199 | - If you have FreeSurfer installed, use `mri_info` to check the image orientations in the nifti header. use `mri_convert --in_orientation $input_orientation --out_orientation $output_orientation -ot nifti -odt float $input_image $output_image` to change the image orientation.
200 |
201 | ### Q. Why is part of my parcellation not properly overlayed with the original image?
202 |
203 | A. Check if your MR image has been properly oriented to RAS (See the Q/A above). If that's not the problem, then make sure your MR image has been preprocessed to correct for the intensity inhomogeneity (also called bias field correction). There are several tools that can perform the bias field correction. We provide the implementation for several of them in the python helpfunctions [`segmentation_propagation.py`](https://github.com/dama-lab/multi-atlas-segmentation/tree/master/HelperFunctions/python) including: `N4_correction`, `N4_correction_slicer`, and `N4_correction_itk`. You shall have the corresponding toolbox installed in the system (i.e. ANTs, 3D-Slicer, SimpleITK).
204 |
205 | Alternatively, you can also use the following tools directly to achieve bias field corrrection:
206 |
207 | - (1) If you have the [ANTs](http://stnava.github.io/ANTs/) tools installed, the function `mas_N4_batch` used the handy bias field correction function `N4BiasFieldCorrection` provide by ANTs package, which used an upgrade version of the N3 algorithm as used in the FreeSurfer's nu_correct, and it can handle the nifti format out-of-the-box as it's using the ITK framework.
208 |
209 | - (2) If you have have [3D-slicer](https://www.slicer.org/wiki/Documentation/4.3/Modules/N4ITKBiasFieldCorrection) installed, it also provide the N4ITK implementation of function `N4BiasFieldCorrection` through command line interface (CLI).
210 |
211 | - (3) The [FreeSurfer](https://surfer.nmr.mgh.harvard.edu/) package provide a tool `nu_correct` which uses N3 bias field correction algorithm.
212 |
213 | - (4) The [NiftySeg](http://cmic.cs.ucl.ac.uk/staff/da_ma/Multi_Atlas/) package provide bias field correction using automatic tissue segmentation (**seg_EM**).
214 |
215 | ## Citation
216 |
217 | If you used our code in your study, we ask you to kindly cite the following papers:
218 |
219 | - Ma D, Cardoso MJ, Modat M, Powell N, Wells J, Holmes H, Wiseman F, Tybulewicz V, Fisher E, Lythgoe MF, Ourselin S. **Automatic structural parcellation of mouse brain MRI using multi-atlas label fusion.** PLOS ONE. 2014 Jan 27;9(1):e86576.
220 | http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0086576
221 |
222 | - Ma D, Holmes HE, Cardoso MJ, Modat M, Harrison IF, Powell NM, O'Callaghan J, Ismail O, Johnson RA, O’Neill MJ, Collins EC., Mirza F. Beg, Karteek Popuri, Mark F. Lythgoe, and Sebastien Ourselin. **Study the longitudinal in vivo and cross-sectional ex vivo brain volume difference for disease progression and treatment effect on mouse model of tauopathy using automated MRI structural parcellation.** Frontiers in Neuroscience. 2019;13:11.
223 | https://www.frontiersin.org/articles/10.3389/fnins.2019.00011
224 |
225 |
226 | If you're using the our [mouse MRI Cerebellar atlas](https://github.com/dancebean/mouse-brain-atlas/tree/master/Tc1_Cerebellum), we ask you to please kindly cite our following papers:
227 | - Ma, D., Cardoso, M. J., Zuluaga, M. A., Modat, M., Powell, N. M., Wiseman, F. K., Cleary, J. O., Sinclair, B., Harrison, I. F., Siow, B., Popuri, K., Lee, S., Matsubara, J. A., Sarunic, M. V, Beg, M. F., Tybulewicz, V. L. J., Fisher, E. M. C., Lythgoe, M. F., & Ourselin, S. (2020). **Substantially thinner internal granular layer and reduced molecular layer surface in the cerebellum of the Tc1 mouse model of Down Syndrome – a comprehensive morphometric analysis with active staining contrast-enhanced MRI**. NeuroImage, 117271. https://doi.org/https://doi.org/10.1016/j.neuroimage.2020.117271
228 | - Ma, D., Cardoso, M. J., Zuluaga, M. A., Modat, M., Powell, N., Wiseman, F., Tybulewicz, V., Fisher, E., Lythgoe, M. F., & Ourselin, S. (2015). **Grey Matter Sublayer Thickness Estimation in the Mouse Cerebellum**. In Medical Image Computing and Computer Assisted Intervention 2015 (pp. 644–651). https://doi.org/10.1007/978-3-319-24574-4_77
229 |
230 | ## Funding
231 | The works in this repositories received multiple funding from EPSRC, UCL School of Engineering, Alzheimer's Society Research Program (Alzheimer's Society of Canada), UCL Leonard Wolfson Experimental Neurology center, Medical Research Council (MRC), the NIHR Biomedical Research Unit (Dementia) at UCL and the National Institute for Health Research University College London Hospitals Biomedical Research center, the UK Regenerative Medicine Platform Safety Hub, and the Kings College London and UCL Comprehensive Cancer Imaging center CRUK & EPSRC in association with the MRC and DoH (England), UCL Faculty of Engineering funding scheme, Alzheimer Society Reseasrch Program from Alzheimer Society Canada, NSERC, CIHR, MSFHR Canada, Eli Lilly and Company, Wellcome Trust, the Francis Crick Institute, Cancer Research UK, and University of Melbourne McKenzie Fellowship.
232 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-slate
--------------------------------------------------------------------------------
/demo/mas_demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #########################################
4 | # Demonstrate the comman usage of the
5 | # Multi-Atlas Segmentation (MAS) pipeline
6 | #########################################
7 |
8 |
9 | # Add niftk installation location to system paths: `PATH` and `LD_LIBRARY_PATH`.
10 | # This will only work if user followed the installation instruction, and installed packages in the recommended location.
11 | # If you installed the packages in other locations, please change the variable `$HOME` to your own installed locations.
12 |
13 | # Option 1: if user installed the default niftk package
14 | export PATH=${PATH}:"$HOME/niftk-18.5.4/bin"
15 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}"$HOME/niftk-18.5.4/bin"
16 |
17 | # option 2: if use choose to compile the niftyreg/niftyseg from the source code.
18 | export PATH=${PATH}:"$HOME/nifty_reg/bin"
19 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}/"$HOME/nifty_reg/lib"
20 | export PATH=${PATH}:"$HOME/nifty_seg/bin"
21 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:"$HOME/nifty_seg/lib"
22 |
23 | ####################################
24 | ######## prepare demo data #########
25 | ####################################
26 |
27 | # (Optional) create demo directory within the current folder (preferrably an empty folder)
28 | mkdir -p ./demo
29 | cd ./demo
30 |
31 | # Download the "in_vivo" atlas from: https://github.com/dancebean/mouse-brain-atlas
32 | mkdir -p Atlas
33 | cd Atlas
34 | svn export https://github.com/dancebean/mouse-brain-atlas/trunk/FVB_NCrl/in_vivo FVB_NCrl_in_vivo
35 | # only use three atlas for fast processing and demonstration purpose
36 | cat FVB_NCrl_in_vivo/template_list.cfg | head -n 3 > FVB_NCrl_in_vivo/template_list_demo.cfg
37 | cd ..
38 |
39 | # create input directory, file, and target list
40 | mkdir -p ./input
41 | cd ./input
42 | target_id="A0"
43 | svn export https://github.com/dancebean/mouse-brain-atlas/trunk/NeAt/in_vivo/template/$target_id.nii.gz
44 | echo $target_id > targetlist.txt
45 | cd ..
46 |
47 | # create targetlist (only 1 file)
48 | ls ./input | head -n 1 | cut -d. -f1 > targetlist.txt
49 |
50 | # create output directory
51 | mkdir -p ./output
52 |
53 |
54 | ####################################
55 | ######## start demo script #########
56 | ####################################
57 |
58 | # Download the main script if not yet done
59 | svn export --force https://github.com/dancebean/multi-atlas-segmentation/trunk/MASHelperFunctions.sh
60 | # (Optional) Download the sample parameter configuration file
61 | svn export --force https://github.com/dancebean/multi-atlas-segmentation/trunk/parameters_samples/parameter_sample.sh
62 | # You can edit the advanced parameters to fine-tune the algorithm
63 |
64 | # source the main script (or use the location of your own copy)
65 | source ./MASHelperFunctions.sh
66 | # Alternatively, if you want to mute the listing of all the available functions, use:
67 | # source ./MASHelperFunctions.sh > /dev/null 2>&1
68 |
69 | # define parameters
70 | atlas_name="FVB_NCrl_in_vivo"
71 | atlas_dir="Atlas/$atlas_name"
72 | target_dir="input"
73 | result_dir="output"
74 | target_id="A0"
75 | target_list="input/targetlist.txt"
76 | atlas_list=$atlas_dir/template_list_demo.cfg
77 | exe_mode=local
78 | parameter_cfg=./parameter_sample.sh
79 |
80 | dil_voxel=1
81 | raw_mask_dir=$result_dir/mask
82 | dilate_mask_dir=$result_dir/mask_dilate_$dil_voxel
83 | mask_suffix=".mask.$atlas_name"
84 |
85 |
86 | ####################################
87 | #### Demo script ####
88 | ####################################
89 |
90 | # 1. ~~~~~ brain extracting/masking ~~~~~~
91 | mas_masking_batch -T $target_dir -t $target_list -A $atlas_dir -a $atlas_list -r $result_dir -e $exe_mode # -p $parameter_cfg
92 |
93 | # 2. ~~~~~ brain mask dilation (not always necessary, check the quickcheck images to decide) ~~~~~
94 | mas_mask_dilate_batch $target_list $raw_mask_dir $dilate_mask_dir $mask_suffix $dil_voxel $exe_mode # -p $parameter_cfg
95 | # generate quickcheck for dilated mask
96 | mas_quickcheck $target_dir/$target_id $dilate_mask_dir/$target_id$mask_suffix $result_dir/quickcheck/ \
97 | $target_id$mask_suffix.d_$dil_voxel # -p $parameter_cfg
98 |
99 | # 3. ~~~~~ bias field correction ~~~~~
100 | # [Skipped] This is an important step before the parcellation. It is skipped in the demo as the images are already "bias-corrected" using the N4 algorithm
101 |
102 | # 4. ~~~~~ parcellation ~~~~~
103 | mas_parcellation_batch -T $target_dir -t $target_list -A $atlas_dir -a $atlas_list -r $result_dir \
104 | -M $dilate_mask_dir -m $mask_suffix -e $exe_mode # -p $parameter_cfg
105 |
106 | # # alternatively, if using non-dilated mask:
107 | # mas_parcellation_batch -T $target_dir -t $target_list -A $atlas_dir -a $atlas_list -r $result_dir \
108 | # -M raw_mask_dir -m $mask_suffix -e local # -p $parameter_cfg
109 |
--------------------------------------------------------------------------------
/demo/mbs_demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #########################################
4 | # Demonstrate the comman usage of the
5 | # Multi-Brain Separation (MBS) pipeline
6 | #########################################
7 |
8 | # Add niftk installation location to system paths: `PATH` and `LD_LIBRARY_PATH`.
9 | # This will only work if user followed the installation instruction, and installed packages in the recommended location.
10 | # If you installed the packages in other locations, please change the variable `$HOME` to your own installed locations.
11 |
12 | # Option 1: if user installed the default niftk package
13 | export PATH=${PATH}:"$HOME/niftk-18.5.4/bin"
14 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}"$HOME/niftk-18.5.4/bin"
15 |
16 | # option 2: if use choose to compile the niftyreg/niftyseg from the source code.
17 | export PATH=${PATH}:"$HOME/nifty_reg/bin"
18 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}/"$HOME/nifty_reg/lib"
19 | export PATH=${PATH}:"$HOME/nifty_seg/bin"
20 | export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:"$HOME/nifty_seg/lib"
21 |
22 | ####################################
23 | ######## prepare demo data #########
24 | ####################################
25 | # Ref: https://figshare.com/collections/Tc1_and_WT_data/3258139
26 | # DOI: 10.6084/m9.figshare.c.3258139
27 | # Cohort 1: https://figshare.com/articles/Tc1_and_WT_brains_cohort_1_C1_/3382693
28 | # Cohort 2: https://figshare.com/articles/Tc1_and_WT_brains_cohort_2_C2_/3394786
29 | HOME=$HOME
30 | WORK_DIR=$HOME/Data/TC1
31 | RAW_DIR=$WORK_DIR/RAW_DATA
32 | script=$WORK_DIR/script
33 | mkdir -p $RAW_DIR
34 | mkdir -p $script
35 |
36 | # get to the current directory
37 | cd $RAW_DIR
38 |
39 | # Get the data if not already existed/downloaded
40 | tc1_269455=$RAW_DIR/'tc1_269455.nii.gz'
41 | tc1_269455=$RAW_DIR/'010913_02.nii.gz'
42 | if [[ ! -e $tc1_269455 ]]; then
43 | wget --content-disposition -P $RAW_DIR https://ndownloader.figshare.com/files/5275453
44 | wget --content-disposition -P $RAW_DIR https://ndownloader.figshare.com/files/5303806
45 |
46 | fi
47 |
48 | ####################################
49 | ######## start demo script #########
50 | ####################################
51 |
52 | # Download the main script if not yet done
53 | (cd $script && svn export --force https://github.com/dancebean/multi-atlas-segmentation/trunk/MultiBrainSepsrationHelperFunctions.sh)
54 | (cd $script && svn export --force https://github.com/dancebean/multi-atlas-segmentation/trunk/MASHelperFunctions.sh)
55 |
56 | # source the main script (or use the location of your own copy)
57 | source $script/MultiBrainSepsrationHelperFunctions.sh
58 | source $script/MASHelperFunctions.sh > /dev/null 2>&1
59 |
60 | # Alternatively, if you want to show the listing of all the available functions, use:
61 | # source ./MASHelperFunctions.sh
62 |
63 |
64 |
65 | ## Three brain separation
66 |
67 |
68 |
69 | ## Quickcheck
70 | mas_quickcheck [bg_img] [(optional) overlay_img] [qc_dir] [qc_filename]
71 |
72 |
73 |
74 |
75 | ###################################
76 |
--------------------------------------------------------------------------------
/depreciated/ReadMe.md:
--------------------------------------------------------------------------------
1 | Old version (< 1.0) with implementation of cluster/local_single_workstation separately
2 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/LR_separation.sh:
--------------------------------------------------------------------------------
1 | # Separate the labels into left-right hemisphere
2 | # Step 1: make a left-right flipped version of the template image and
3 | # corresponding mask (Done in Matlab NIFTI Tool)
4 | # Step 2: register the image to the flipped counterpart to get affine matrix
5 | # Step 3: half the affine matrix
6 | # Step 4: Apply the halved affine matrix to the original image to make it sit
7 | # perfectly in the middle (reg_transfer -updSform)
8 | # Step 5: Call Matlab script to add value on the right hemisphere
9 | #
10 | # @Author: Ma Da (d.ma.11@ucl.ac.uk) 2013.08.06
11 | #
12 | # usage: LR_separation.sh $1
13 | # $1: atlas folder (including all the flipped folder)
14 | # Folder structure: template - template_flipped - mask - mask_flipped
15 | # $2: image used for LR_seperation (right half equal to 1)
16 |
17 | # Setup default value for parameters
18 | if [ ! -d job_output ]
19 | then mkdir job_output
20 | fi
21 | if [ ! -d job_error ]
22 | then mkdir job_error
23 | fi
24 | if [ ! -d $1/updSform ]
25 | then mkdir $1/updSform
26 | fi
27 | if [ ! -d $1/updSform/template ]
28 | then mkdir $1/updSform/template
29 | fi
30 | if [ ! -d $1/temp ]
31 | then mkdir $1/temp
32 | fi
33 | if [ ! -d $1/template_middle ]
34 | then mkdir $1/template_middle
35 | fi
36 | if [ ! -d $1/label_middle ]
37 | then mkdir $1/label_middle
38 | fi
39 | if [ ! -d $1/mask_middle ]
40 | then mkdir $1/mask_middle
41 | fi
42 | if [ ! -d $1/label_LR ]
43 | then mkdir $1/label_LR
44 | fi
45 | ROOT_DIR=$(pwd)
46 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error "
47 | MATLAB_CMD="matlab -nosplash -logfile matlab_output.txt -r " # -nodesktop
48 |
49 | # update template/label/mask
50 | jid=$$ # generate random job identification number
51 | for G in `ls $1/template/`
52 | do
53 | NAME=`echo "$G" | cut -d'.' -f1` # extract file name
54 | jname=${NAME}_${jid}
55 | affine=affine_${jname} # compute the affine to transform to the flipped image
56 | ${QSUB_CMD} -N ${affine} reg_aladin -rigOnly -flo $1/template/$G -fmask $1/mask/$G -ref $1/template_flipped/$G -rmask $1/mask_flipped/$G -res $1/updSform/$G -aff $1"/updSform/aff_"${NAME}.txt
57 | half_affine=half_${jname} # half the affine matrix
58 | ${QSUB_CMD} -N ${half_affine} -hold_jid ${affine} reg_transform -ref $1/template/$G -halfAffine $1"/updSform/aff_"${NAME}.txt $1"/updSform/aff_"${NAME}_half.txt
59 |
60 | # updSform=updSform_${jname} # update template label and mask Sform to the middle position
61 | # updSform_template=template_${updSform}
62 | # ${QSUB_CMD} -N ${updSform_template} -hold_jid ${half_affine} reg_transform -ref $1/template/$G -updSform $1/template/$G $1"/updSform/aff_"${NAME}_half.txt $1/template_middle/$G
63 | # updSform_label=label_${updSform} # apply the half matrix to label
64 | # ${QSUB_CMD} -N ${updSform_label} -hold_jid ${half_affine} reg_transform -ref $1/label/$G -updSform $1/label/$G $1"/updSform/aff_"${NAME}_half.txt $1/label_middle/$G
65 | # updSform_mask=mask_${updSform} # apply the half matrix to mask
66 | # ${QSUB_CMD} -N ${updSform_mask} -hold_jid ${half_affine} reg_transform -ref $1/mask/$G -updSform $1/mask/$G $1"/updSform/aff_"${NAME}_half.txt $1/mask_middle/$G
67 | # updSform_LR=LR_${updSform} # apply the half matrix to the LR_seperate image
68 | # ${QSUB_CMD} -N ${updSform_LR} -hold_jid ${half_affine} reg_transform -ref $2 -updSform $2 $1"/updSform/aff_"${NAME}_half.txt $1/temp/LR_${NAME}.nii.gz
69 |
70 | resample=resample_${jname} # resample template label and mask to middle position
71 | resample_template=label_${resample}
72 | ${QSUB_CMD} -N ${resample_template} -hold_jid ${half_affine} reg_resample -NN -ref $1/template/$G -flo $1/template/$G -aff $1"/updSform/aff_"${NAME}_half.txt -res $1/template_middle/$G
73 | resample_label=label_${resample}
74 | ${QSUB_CMD} -N ${resample_label} -hold_jid ${half_affine} reg_resample -NN -ref $1/label/$G -flo $1/label/$G -aff $1"/updSform/aff_"${NAME}_half.txt -res $1/label_middle/$G
75 | resample_mask=mask_${resample}
76 | ${QSUB_CMD} -N ${resample_mask} -hold_jid ${half_affine} reg_resample -NN -ref $1/mask/$G -flo $1/mask/$G -aff $1"/updSform/aff_"${NAME}_half.txt -res $1/mask_middle/$G
77 |
78 | LR_label=LR_label_${jname} # use Matlab to seperate labels into left/right hemispheres
79 | ${QSUB_CMD} -N ${LR_label} -hold_jid *_${resample} ${MATLAB_CMD} "\"atlas_LR('$1/mask_middle/$G','$1/label_middle/$G','$2','$1/label_LR')\""
80 | # use seg_maths to calculate LR (failed because result nii img type uint18->single)
81 | # right_mask=right_${jname} # calculate the right_only mask - multiply by right_half
82 | # ${QSUB_CMD} -N ${right_mask} -hold_jid *_${resample} seg_maths $1/mask_middle/$G -mul $2 $1/temp/r_$G
83 | # right_mask_20=right20_${jname} # right_only mask * 20
84 | # ${QSUB_CMD} -N ${right_mask_20} -hold_jid ${right_mask} seg_maths $1/temp/r_$G -mul 20 $1/temp/r20_$G
85 | # LR_label=LR_label_${jname} # original label + right_only mask = LR seperated label
86 | # ${QSUB_CMD} -N ${LR_label} -hold_jid ${right_mask_20} seg_maths $1/temp/r20_$G -add $$1/label_middle/$G $1/label_LR/$G
87 |
88 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/STEPS_optimisation.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # to test parameters used for MLSTEPS
6 | # usage: ./STEPS_optimisation.sh $1 $2
7 | # $1: atlas (in_vivo ex_vivo)
8 | # $2: sample number
9 | # echo "Bash version ${BASH_VERSION}"
10 | export QSUB_CMD="qsub -l h_rt=10:00:00 -l h_vmem=3.5G -l vf=3.5G -l s_stack=10240 -j y -S /bin/sh -b y -cwd -V"
11 |
12 | # LABEL_NUMBER=20
13 | KERNAL_MIN=1
14 | KERNAL_MAX=9
15 | export SAMPLE_NUMBER=10 # total number of sample in the atlas
16 | # Read user defined sample number
17 | if [ ! -z $2 ]; then # check if there is a 3rd argument
18 | SAMPLE_NUMBER=$2 # assign user-defined total number of sample in the atlas
19 | fi
20 |
21 | if [ ! -d $1"/Dice_Score" ]
22 | then mkdir $1"/Dice_Score"
23 | fi
24 | if [ ! -d $1"/Dice_Score/temp" ]
25 | then mkdir $1"/Dice_Score/temp"
26 | fi
27 |
28 | for ((kX2=$KERNAL_MIN*2;kX2<=$KERNAL_MAX*2;kX2+=1)) #{1..6}
29 | do
30 | for ((n=3;n<$SAMPLE_NUMBER;n+=1)) # or: for i in {1..20}
31 | do
32 | k=$(echo "scale=1;$kX2/2"|bc)
33 | echo "begin leave one out for kernal=$k and $n label selected"
34 | . leave_one_out_labfusion.sh $1 $k $n # generate parcellation for specific STEPS parameter
35 | done
36 | done
37 |
38 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/STEPS_optimisation_dice.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # to test parameters used for MLSTEPS
6 | # usage: ./STEPS_optimisation_dice.sh $1 $2
7 | # $1: atlas (in_vivo ex_vivo)
8 | # $2: sample number
9 | # echo "Bash version ${BASH_VERSION}"
10 | export QSUB_CMD="qsub -l h_rt=10:00:00 -l h_vmem=3.5G -l vf=3.5G -l s_stack=10240 -j y -S /bin/sh -b y -cwd -V"
11 |
12 | # Load default value for parameter
13 | KERNAL_MIN=1
14 | KERNAL_MAX=9
15 | SAMPLE_NUMBER=10 # total number of sample in the atlas
16 | # Read user defined sample number
17 | if [ ! -z $2 ]; then # check if there is a 3rd argument
18 | SAMPLE_NUMBER=$2 # assign userd-defined total number of sample in the atlas
19 | fi
20 |
21 | # TITLE_LINE=$(date +"%m-%d-%y")
22 | for ((kX2=$KERNAL_MIN*2;kX2<=$KERNAL_MAX*2;kX2+=1)) #{1..6}
23 | do
24 | for ((n=3;n<$SAMPLE_NUMBER;n+=1)) # or: for i in {1..20}
25 | do
26 | k=$(echo "scale=1;$kX2/2"|bc)
27 | . leave_one_out_dice.sh $1 $k $n # exporting dice score
28 | done
29 | done
30 |
31 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/brain_extraction_batch.sh:
--------------------------------------------------------------------------------
1 | export QSUB_CMD="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
2 |
3 | # $1: folder containing the original images
4 | # $2: folder containing the brain masks
5 | # $3: folder to put the extracted brain images
6 | if [ ! -d job_output ]
7 | then mkdir job_output
8 | fi
9 | if [ ! -d job_error ]
10 | then mkdir job_error
11 | fi
12 | if [ ! -d $3 ]
13 | then mkdir -p $3
14 | fi
15 |
16 | for G in `ls $1`
17 | do
18 | IMG=`echo "$G" | cut -d'.' -f1`
19 | ${QSUB_CMD} -N extract_$G seg_maths -in $1/${IMG} -mul $2/${IMG} -out $3/${IMG}.nii.gz
20 | done
21 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/dice_batch.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # usage: ./dice_batch.sh $1 $2 $3 $4 $5
6 | # $1: destination atlas folder to be tested (only use label subfolder)
7 | # $2: source atlas folder containing multiple manual labels
8 | # $3: folder containing the parcellated structures
9 | # $4: STEPS parameter k (kernel size in terms of voxel number)
10 | # $5: STEPS parameter n (number of top ranked local atlas to select for label fusion)
11 | ROOT_DIR=$(pwd)
12 | # echo "Bash version ${BASH_VERSION}..."
13 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
14 | ATLAS=$(basename $2)
15 | LABEL_NUMBER=40
16 |
17 | if [ ! -d Dice_Score/temp ]
18 | then mkdir -p Dice_Score/temp
19 | fi
20 | # Set STEPS parameters
21 | if [[ ! -z $5 ]] && [[ ! -z $4 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
22 | export k=$4
23 | export n=$5
24 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "3 8 "
25 | export k=3
26 | export n=8
27 | fi
28 | export STEPS_PARAMETER="${k} ${n} "
29 |
30 | # begin dice score calculation
31 | for H in `ls $1/template/`
32 | do
33 | TEST_IMAGE=`echo "$H" | cut -d'.' -f1`
34 | echo "******************************************"
35 | echo "* Dice score OF ${TEST_IMAGE} step 1 (STEPS $k $n) *"
36 | # echo "******************************************"
37 | # Step 1: Calculating Dice score for each sample
38 | ATLAS_IMAGE=`echo "$G" | cut -d'.' -f1`
39 | # jid=$$
40 | # j_stats=stats_${jid}
41 | j_stats=${TEST_IMAGE}_${ATLAS}_label_STEPS_${k}_${n}
42 | ${QSUB_CMD} -N ${j_stats} seg_stats $1/label/$H -D "\"label/${TEST_IMAGE}_${ATLAS}_label_STEPS_${k}_${n}.nii.gz\"" "\"Dice_Score/temp/${TEST_IMAGE}_${ATLAS}_label_STEPS_${k}_${n}.csv\""
43 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/dice_batch_step2.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # usage: ./dice_batch.sh $1 $2 $3 $4 $5
6 | # $1: destination atlas folder to be tested (only use label subfolder)
7 | # $2: source atlas folder containing multiple manual labels
8 | # $3: folder containing the parcellated structures
9 | # $4: STEPS parameter k (kernel size in terms of voxel number)
10 | # $5: STEPS parameter n (number of top ranked local atlas to select for label fusion)
11 | ROOT_DIR=$(pwd)
12 | # echo "Bash version ${BASH_VERSION}..."
13 | ATLAS=$(basename $2)
14 | LABEL_NUMBER=40
15 |
16 | if [ ! -d Dice_Score/temp ]
17 | then mkdir -p Dice_Score/temp
18 | fi
19 | # Set STEPS parameters
20 | if [[ ! -z $5 ]] && [[ ! -z $4 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
21 | export k=$4
22 | export n=$5
23 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "3 8 "
24 | export k=3
25 | export n=8
26 | fi
27 | export STEPS_PARAMETER="${k} ${n} "
28 |
29 | # Title line
30 | echo -e "k=${k}+n=${n}\c" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
31 | for ((m=1;m<=$LABEL_NUMBER;m+=1))
32 | do
33 | echo -e ",$m\c" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
34 | done
35 | echo -e "\n\c" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
36 |
37 | # begin dice score calculation
38 | for H in `ls $1/template/`
39 | do
40 | TEST_IMAGE=`echo "$H" | cut -d'.' -f1`
41 | echo "******************************************"
42 | echo "* Dice score OF ${TEST_IMAGE} step 2 (STEPS $k $n) *"
43 | # echo "******************************************"
44 | # Step 2: Exporting Dice score for template image
45 | echo -e "${TEST_IMAGE},\c" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
46 | cat "Dice_Score/temp/${TEST_IMAGE}_${ATLAS}_label_STEPS_${k}_${n}.csv" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
47 | echo -e "\n\c" >> "Dice_Score/${ATLAS}_Dice_Score_STEPS_${k}_${n}.csv"
48 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/dilate_batch.sh:
--------------------------------------------------------------------------------
1 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=2.9G -l tmem=2.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | # $1: folder containing the original mask
4 | # $2: folder containing the dilated mask
5 | # $3: pixel to dilate
6 | if [ ! -d job_output ]
7 | then mkdir job_output
8 | fi
9 | if [ ! -d job_error ]
10 | then mkdir job_error
11 | fi
12 | if [ ! -d $2 ]
13 | then mkdir -p $2
14 | fi
15 |
16 | DILATE=$3
17 |
18 | for G in `ls $1`
19 | do
20 | MASK=`echo "$G" | cut -d'.' -f1`
21 | ${QSUB_CMD} -N dil_$G seg_maths $1/$G -dil ${DILATE} $2/${MASK}_d${DILATE}.nii.gz
22 | done
23 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/labfusion.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | #!/bin/bash
4 | # echo "Bash version ${BASH_VERSION}..."
5 |
6 | # $1: enquiry image
7 | # $2: atlas folder "in_vivo" or "ex_vivo"
8 | # $3: if exist, read user defined parameters
9 | echo "***************************************************"
10 | echo "* CAUTION!! DO NOT use the same name as the atlas *"
11 | echo "* if it is not for leave-one-out testing *"
12 | echo "***************************************************"
13 | echo "usage: parcellation.sh new_image corresponding_mask atlas_folder"
14 |
15 | # setup default value for parameters
16 | ROOT_DIR=$(pwd)
17 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 1 -R y -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # old flag: -l s_stack=128M
18 | export QSUB_CMD_ONE_CORE="qsub -l h_rt=5:00:00 -pe smp 1 -R y -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # old flag: -l s_stack=128M
19 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=9G -l tmem=9G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
20 | PARCELLATION_NNR="-ln 4 -lp 4 -sx -3"
21 | DILATE=3 # value to be dilated for the result mask
22 | LABFUSION="-STEPS"
23 | LABFUSION_OPTION="-v 1" # parameter options for STAPLE or STEPS in seg_LabFusion
24 | MASK_AFF=""
25 |
26 | # Set STEPS parameters
27 | if [[ -z $k ]] && [[ -z $n ]]; then # if STEPS parameter is not set
28 | # set default STEPS parameter to: "3 8 "
29 | export k=3
30 | export n=8
31 | fi
32 | export STEPS_PARAMETER="${k} ${n} "
33 |
34 | FULL_TEST_NAME=$(basename $1)
35 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
36 | echo "Creating parcellation label for: "$TEST_NAME
37 | ATLAS=$(basename $2)
38 |
39 | # Read user-defined parameters
40 | if [ ! -z $3 ]; then # check if there is a 4th argument
41 | if [ -f $3 ]; then # check if the file specified by 4th argument exist
42 | . $3 # if file of 4th argument exist, read the parameters from the file
43 | fi
44 | fi
45 |
46 | if [ ! -d job_output ]; then mkdir job_output; fi
47 | if [ ! -d job_error ]; then mkdir job_error; fi
48 | if [ ! -d temp/${ATLAS} ]; then mkdir -p temp/${ATLAS}; fi
49 | if [ ! -d mask/${ATLAS} ]; then mkdir -p mask/${ATLAS}; fi
50 | if [ ! -d label/${ATLAS} ]; then mkdir -p label/${ATLAS}; fi
51 |
52 | # echo "*********************************************"
53 | # echo "* Segmentation pipeline for mouse brain MRI *"
54 | # echo "* for ${TEST_NAME} *"
55 | # echo "* using multi-atlas label fusion methods *"
56 | # echo "* step 2 - structural parcellation *"
57 | # echo "*********************************************"
58 | # echo "usage: parcellation new_image mask atlas_type (in_vivo/ex_vivo)"
59 |
60 | # start structural parcellation
61 | echo "Creating label for: "$TEST_NAME
62 | PARAMETER_NUMBER=0
63 | jid="$$" # generate a random number as job ID
64 | jid_reg="reg_${jid}"
65 | TEST_NAME=`echo "$(basename $1)" | cut -d'.' -f1`
66 | MASK=${MASK_FOLDER}/${TEST_NAME}${MASK_SUFFIX}
67 | MERGE_LABEL=""
68 | for G in `ls $2/template/`
69 | do
70 | NAME=`echo "$G" | cut -d'.' -f1`
71 | jname=${jid_reg}_${TEST_NAME}_${NAME}
72 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
73 | if [[ ${3}/template/${NAME} != $1 ]] && [[ ${3}/template/${NAME}.nii != $1 ]] && [[ ${3}/template/${NAME}.nii.gz != $1 ]] && [[ ${3}/template/${NAME}.hdr != $1 ]]
74 | then
75 | # prepare parameters for label fusion
76 | if (( $PARAMETER_NUMBER==0 )); then
77 | FIRST_TEMPLATE="temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
78 | FIRST_MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
79 | FIRST_LABEL="label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
80 | else
81 | MERGE_TEMPLATE="${MERGE_TEMPLATE} temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
82 | MERGE_MASK="${MERGE_MASK} mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
83 | MERGE_LABEL="${MERGE_LABEL} label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
84 | fi
85 | let PARAMETER_NUMBER+=1
86 | else
87 | echo -e "Atlas image name ${TEST_NAME} is same as test image, skipped"
88 | fi
89 | done
90 | let PARAMETER_NUMBER-=1
91 |
92 | # Prepare 4D images for label fusion
93 | jid_4d="merge4d_${TEST_NAME}"
94 |
95 | # create average rough mask to reduce memory usage for label fusion
96 | if [ ! -f mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz ]; then
97 | jid_4d_nrr_mask_avg="${jid_4d}_nrr_mask_avg"
98 | ${QSUB_CMD} -hold_jid ${jid_reg}_* -N ${jid_4d_nrr_mask_avg} \
99 | reg_average mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -avg $FIRST_MASK $MERGE_MASK
100 | jid_4d_nrr_mask_avg_bin="${jid_4d}_nrr_mask_avg_bin"
101 | ${QSUB_CMD} -hold_jid ${jid_4d_nrr_mask_avg}_* -N ${jid_4d_nrr_mask_avg_bin} \ seg_maths mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -bin -dil ${DILATE} mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz
102 | fi
103 |
104 | MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz"
105 |
106 | # merge 4D masks if not done yet
107 | if [ ! -f mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz ]; then
108 | jid_4d_nrr_mask="${jid_4d}_nrr_mask";
109 | ${QSUB_SEG_MATH} -N ${jid_4d_nrr_mask} \
110 | seg_maths $FIRST_MASK -v -merge $PARAMETER_NUMBER 4 $MERGE_MASK mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz
111 | else
112 | jid_4d_nrr_mask="${jid_4d}_skip"
113 | ${QSUB_CMD} -N ${jid_4d_nrr_mask} echo "4D mask already exist, skip merging again"
114 | fi
115 | # merge 4D labels if not done yet
116 | if [ ! -f label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz ]; then
117 | jid_4d_label="${jid_4d}_label"
118 | ${QSUB_SEG_MATH} -N ${jid_4d_label} \
119 | seg_maths $FIRST_LABEL -v -merge $PARAMETER_NUMBER 4 $MERGE_LABEL label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz
120 | else
121 | jid_4d_label="${jid_4d}_label"
122 | ${QSUB_CMD} -N ${jid_4d_label} echo "4D label already exist, skip merging again"
123 | fi
124 |
125 | # Start label fusion
126 | export jid_LabFusion="LabFusion_${TEST_NAME}"
127 | # Determine which label fusion method to use
128 | if [ ${LABFUSION} == "-STEPS" ]; then
129 | # merge 4D template if not done yet
130 | if [ ! -f label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ]; then
131 | jid_4d_tempate="${jid_4d}_template"
132 | ${QSUB_SEG_MATH} -N ${jid_4d_tempate} seg_maths $FIRST_TEMPLATE -v -merge $PARAMETER_NUMBER 4 $MERGE_TEMPLATE label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz
133 | else
134 | jid_4d_tempate="${jid_4d}_template"
135 | ${QSUB_CMD} -N ${jid_4d_tempate} echo "4D template already exist, skip merging again"
136 | fi
137 | # create final label using label fusion
138 | ${QSUB_SEG_MATH} -hold_jid ${jid_4d}_* -N ${jid_LabFusion} \
139 | seg_LabFusion\
140 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
141 | -mask ${MASK}\
142 | -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} \
143 | -out label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz
144 | # jid_NRR_mask="NRR_mask_${TEST_NAME}"
145 | # ${QSUB_CMD} -hold_jid ${jid_4d}_* -N ${jid_NRR_mask} seg_LabFusion -in mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz -mask ${MASK} -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} -out mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz
146 | # jid_NRR_mask_dilate="dil_NRR_mask_${TEST_NAME}"
147 | # ${QSUB_CMD} -hold_jid ${jid_NRR_mask} -N ${jid_NRR_mask_dilate} seg_maths mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}_d${DILATE}.nii.gz
148 | elif [ ${LABFUSION} == "-STAPLE" ]; then
149 | ${QSUB_SEG_MATH} -hold_jid ${jid_4d}_* -N ${jid_LabFusion} \
150 | seg_LabFusion\
151 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
152 | -mask ${MASK}\
153 | -STAPLE ${LABFUSION_OPTION} \
154 | -out label/${TEST_NAME}_label_${ATLAS}_STAPLE.nii.gz
155 | elif [ ${LABFUSION} == "-SBA" ]; then
156 | ${QSUB_SEG_MATH} -hold_jid ${jid_4d}_* -N ${jid_LabFusion} \
157 | seg_LabFusion\
158 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
159 | -mask ${MASK}\
160 | -SBA ${LABFUSION_OPTION} \
161 | -out label/${TEST_NAME}_label_${ATLAS}_SBA.nii.gz
162 | else # elif [[ ${LABFUSION }== "-MV" ]]; then
163 | ${QSUB_SEG_MATH} -hold_jid ${jid_4d}_* -N ${jid_LabFusion} \
164 | seg_LabFusion\
165 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
166 | -mask ${MASK}\
167 | -MV ${LABFUSION_OPTION} \
168 | -out label/${TEST_NAME}_label_${ATLAS}_MV.nii.gz
169 | fi
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/labfusion_STAPLE.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.09.11
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 |
7 | # $1: enquiry image
8 | # $2: atlas folder "in_vivo" or "ex_vivo"
9 | # $3: if exist, read user defined parameters
10 |
11 | # echo "***************************************************"
12 | # echo "* CAUTION!! DO NOT use the same name as the atlas *"
13 | # echo "* if it is not for leave-one-out testing *"
14 | # echo "***************************************************"
15 | # echo "usage: labfusion_STAPLE.sh test_image atlas_folder"
16 |
17 | # setup default value for parameters
18 | ROOT_DIR=$(pwd)
19 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=1.5G -l vf=1.5G -l s_stack=10240 -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
20 | QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=10G -l vf=10G -l s_stack=256M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
21 | DILATE=3 # value to be dilated for the result mask
22 | LABFUSION="-STAPLE" # can be added to the user specified parameters
23 | if [ -z $STEPS_PARAMETER ]; then # if STEPS parameter has not been defined (string=0, e.g. not called by fine_tune.sh)
24 | export STEPS_PARAMETER="4 6" # set up the optimized value for STEPS_PARAMETER
25 | fi
26 | # Read user-defined parameters
27 | if [ ! -z $3 ]; then # check if there is a 3th argument
28 | if [ -f $3 ]; then # check if the file specified by 3th argument exist
29 | . $3 # if file of 4th argument exist, read the parameters from the file
30 | fi
31 | fi
32 |
33 | FULL_TEST_NAME=$(basename $1)
34 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
35 | echo "Creating parcellation label for: "$TEST_NAME
36 | ATLAS=$(basename $2)
37 |
38 | # create dilated mask for every template image if not already exist
39 | if [ ! -d job_output ]
40 | then mkdir job_output
41 | fi
42 | if [ ! -d job_error ]
43 | then mkdir job_error
44 | fi
45 |
46 | jid=STAPLE_"$$" # generate a random number as job ID
47 |
48 | # Start label fusion
49 | export jid_LabFusion="${jid}_LabFusion"
50 | # Determine which label fusion method to use
51 | if [[ ${LABFUSION}=="-STAPLE" ]]; then
52 | ${QSUB_SEG_MATH} -N ${jid_LabFusion} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STAPLE -out "\"label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz\""
53 | fi
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/labfusion_STEPS.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.09.11
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 |
7 | # $1: enquiry image
8 | # $2: atlas folder "in_vivo" or "ex_vivo"
9 | # $3: STEPS parameter k (kernel size in terms of voxel number)
10 | # $4: STEPS parameter n (number of top ranked local atlas to select for label fusion)
11 | # $5: file that contains other LabFusion parameters
12 |
13 | # echo "***************************************************"
14 | # echo "* CAUTION!! DO NOT use the same name as the atlas *"
15 | # echo "* if it is not for leave-one-out testing *"
16 | # echo "***************************************************"
17 | # echo "usage: labfusion_STAPLE.sh test_image atlas_folder"
18 |
19 | # setup default value for parameters
20 | ROOT_DIR=$(pwd)
21 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=1.5G -l vf=1.5G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
22 | QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=8G -l vf=8G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
23 | DILATE=3 # value to be dilated for the result mask
24 |
25 | # Set STEPS parameters
26 | if [[ ! -z $3 ]] && [[ ! -z $4 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
27 | export k=$3
28 | export n=$4
29 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6"
30 | export k=5
31 | export n=8
32 | fi
33 | export STEPS_PARAMETER="${k} ${n} $5"
34 |
35 | FULL_TEST_NAME=$(basename $1)
36 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
37 | echo "Creating parcellation label for: "$TEST_NAME
38 | ATLAS=$(basename $2)
39 |
40 | # create dilated mask for every template image if not already exist
41 | if [ ! -d job_output ]
42 | then mkdir job_output
43 | fi
44 | if [ ! -d job_error ]
45 | then mkdir job_error
46 | fi
47 |
48 | jid=STAPLE_"$$" # generate a random number as job ID
49 |
50 | # Start label fusion
51 | export jid_LabFusion="${jid}_LabFusion"
52 | # Determine which label fusion method to use
53 | ${QSUB_SEG_MATH} -N ${jid_LabFusion} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STEPS -out "\"label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz\""
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/labfusion_batch.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 | # for STEPS label fusion on foler of images with registration already done
5 |
6 | # usage: ./labfusion_batch.sh atlas $1 $2 $3 $4
7 | # $1: folder include all the images for label fusion
8 | # $2: atlas (in_vivo ex_vivo)
9 | # $3: STEPS parameter k (kernel size in terms of voxel number)
10 | # $4: STEPS parameter n (number of top ranked local atlas to select for label fusion)
11 | # $5: file that contains other LabFusion parameters
12 | ROOT_DIR=$(pwd)
13 | # echo "Bash version ${BASH_VERSION}"
14 | ATLAS=$(basename $2)
15 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=2.5G -l tmem=2.5G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
16 |
17 | # Set STEPS parameters
18 | if [[ ! -z $3 ]] && [[ ! -z $4 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
19 | export k=$3
20 | export n=$4
21 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6"
22 | export k=5
23 | export n=8
24 | fi
25 |
26 | # Read user-defined parameters
27 | if [ ! -z $5 ]; then # check if there is a 5th argument
28 | if [ -f $5 ]; then # check if the file specified by 5th argument exist
29 | . $5 # if file of 5th argument exist, read the parameters from the file
30 | fi
31 | fi
32 | export STEPS_PARAMETER="${k} ${n} "
33 |
34 | echo "***********************************************"
35 | echo "* batch STEPS label fusion (STEPS) ${k} ${n} *"
36 | echo "***********************************************"
37 | # begin parcellation and dice score calculation
38 | for H in `ls $1`
39 | do
40 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
41 | jid_LabFusion=labfusion_"$$"
42 | ${QSUB_CMD} -N ${jid_LabFusion} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STEPS ${STEPS_PARAMETER} $1/$H label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz -out label/${TEST_NAME}_${ATLAS}_label_STEPS_${k}_${n}.nii.gz
43 | done
44 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/labfusion_batch_STAPLE.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 | # for STEPS label fusion on foler of images with registration already done
5 |
6 | # usage: ./labfusion_batch.sh atlas $1 $2 $3 $4
7 | # $1: folder include all the images for label fusion
8 | # $2: atlas (in_vivo ex_vivo)
9 | ROOT_DIR=$(pwd)
10 | # echo "Bash version ${BASH_VERSION}"
11 | ATLAS=$(basename $2)
12 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=14.9G -l vf=14.9G -l s_stack=512M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
13 |
14 | echo "***********************************************"
15 | echo "* batch STEPS label fusion (STAPLE) *"
16 | echo "***********************************************"
17 | # begin parcellation and dice score calculation
18 | for H in `ls $1`
19 | do
20 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
21 | jid_LabFusion=labfusion_"$$"
22 | ${QSUB_CMD} -N ${jid_LabFusion} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STAPLE -out label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz
23 | done
24 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/leave_one_out_STAPLE.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.09.12
4 |
5 | # usage: ./leave_one_out_STAPLE.sh atlas (in_vivo ex_vivo)
6 | # $1: atlas (in_vivo ex_vivo)
7 | ROOT_DIR=$(pwd)
8 | echo "Bash version ${BASH_VERSION}..."
9 | ATLAS=$(basename $1)
10 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
11 |
12 | # export STEPS_PARAMETER="4 6"
13 |
14 | # if [ -f "$1/Dice_Score/Dice_Score_STEPS.csv" ]
15 | # then
16 | # rm "$1/Dice_Score/Dice_Score_STEPS.csv"
17 | # fi
18 | if [ ! -d $1"/Dice_Score" ]
19 | then mkdir $1"/Dice_Score"
20 | fi
21 | if [ ! -d $1"/Dice_Score/temp" ]
22 | then mkdir $1"/Dice_Score/temp"
23 | fi
24 |
25 | jid=$$
26 | # begin STAPLE label fusion and dice score calculation
27 | for H in `ls $1/template/`
28 | do
29 | # labfusion_STAPLE.sh $1/template/$H $1
30 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
31 | echo "***********************************************************************"
32 | echo "* Segmentation performance evaluation step 3 - leave one out (STAPLE) *"
33 | echo "***********************************************************************"
34 | # Calculate Dice Score for template image
35 | j_staple=STAPLE_${jid}
36 | ${QSUB_CMD} -N ${j_staple} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STAPLE -out "\"label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz\""
37 | jstat=stat_${jid}
38 | ${QSUB_CMD} -hold_jid ${j_staple} -N ${jstat} seg_stats $1/label/$H -D "\"label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz\"" "\"$1/Dice_Score/temp/${TEST_NAME}_${ATLAS}_label_STAPLE.csv\""
39 | done
40 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/leave_one_out_dice.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # usage: ./leave_one_out_dice.sh $1 $2 $3
6 | # $1: atlas (in_vivo ex_vivo)
7 | # $2: STEPS parameter k (kernel size in terms of voxel number)
8 | # $3: STEPS parameter n (number of top ranked local atlas to select for label fusion)
9 | ROOT_DIR=$(pwd)
10 | # echo "Bash version ${BASH_VERSION}..."
11 | ATLAS=$(basename $1)
12 | LABEL_NUMBER=80
13 |
14 | # Set STEPS parameters
15 | if [[ ! -z $2 ]] && [[ ! -z $3 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
16 | export k=$2
17 | export n=$3
18 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6 "
19 | export k=4
20 | export n=6
21 | fi
22 | export STEPS_PARAMETER="${k} ${n} "
23 |
24 | echo -e "k=${k}+n=${n}\c" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
25 | for ((m=1;m<=$LABEL_NUMBER;m+=1))
26 | do
27 | echo -e ",$m\c" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
28 | done
29 | echo -e "\n\c" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
30 |
31 | # begin dice score calculation
32 | for H in `ls $1/template/`
33 | do
34 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
35 | echo "******************************************"
36 | echo "* leave one out (STEPS $k $n) Dice score *"
37 | # echo "******************************************"
38 | # Exporting Dice score for template image
39 | echo -e $TEST_NAME",\c" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
40 | cat "$1/Dice_Score/temp/${TEST_NAME}_STEPS_${k}_${n}.csv" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
41 | echo -e "\n\c" >> "$1/Dice_Score/Dice_Score_STEPS_${k}_${n}.csv"
42 |
43 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/leave_one_out_dice_STAPLE.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 |
5 | # usage: ./leave_one_out_dice_STAPLE.sh $1
6 | # $1: atlas (in_vivo ex_vivo)
7 |
8 | ROOT_DIR=$(pwd)
9 | # echo "Bash version ${BASH_VERSION}..."
10 | ATLAS=$(basename $1)
11 | LABEL_NUMBER=80
12 |
13 | echo -e "STAPLE\c" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
14 | for ((m=1;m<=$LABEL_NUMBER;m+=1))
15 | do
16 | echo -e ",$m\c" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
17 | done
18 | echo -e "\n\c" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
19 |
20 | # begin dice score calculation
21 | for H in `ls $1/template/`
22 | do
23 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
24 | echo "******************************************"
25 | echo "* leave one out (STAPLE) Dice score *"
26 | # echo "******************************************"
27 | # Exporting Dice score for template image
28 | # seg_stats $1/label/$H -D "\"label/${TEST_NAME}_${ATLAS}_label_STAPLE.nii.gz\"" "\"$1/Dice_Score/temp/${TEST_NAME}_${ATLAS}_label_STAPLE.csv\""
29 | echo -e $TEST_NAME",\c" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
30 | cat "$1/Dice_Score/temp/${TEST_NAME}_${ATLAS}_label_STAPLE.csv" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
31 | echo -e "\n\c" >> "$1/Dice_Score/Dice_Score_STAPLE.csv"
32 |
33 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/leave_one_out_labfusion.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 | # usage: ./leave_one_out_labfusion.sh atlas $1 $2 $3
5 | # $1: atlas (in_vivo ex_vivo)
6 | # $2: STEPS parameter k (kernel size in terms of voxel number)
7 | # $3: STEPS parameter n (number of top ranked local atlas to select for label fusion)
8 | ROOT_DIR=$(pwd)
9 | # echo "Bash version ${BASH_VERSION}"
10 | ATLAS=$(basename $1)
11 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
12 |
13 | # Set STEPS parameters
14 | if [[ ! -z $2 ]] && [[ ! -z $3 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
15 | export k=$2
16 | export n=$3
17 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6"
18 | export k=4
19 | export n=6
20 | fi
21 | export STEPS_PARAMETER="${k} ${n} "
22 | #
23 | # if [ -f "$1/Dice_Score/Dice_Score_STEPS.csv" ]
24 | # then rm "$1/Dice_Score/Dice_Score_STEPS.csv"
25 | # fi
26 | if [ ! -d $1"/Dice_Score" ]
27 | then mkdir $1"/Dice_Score"
28 | fi
29 | if [ ! -d $1"/Dice_Score/temp" ]
30 | then mkdir $1"/Dice_Score/temp"
31 | fi
32 |
33 | # create dilated mask for every template image
34 | if [ ! -d $1/mask ]
35 | then
36 | echo "create mask for every template image if not done yet"
37 | mkdir $1/mask
38 | fi
39 | if [ ! -d $1/mask_dilate ]
40 | then
41 | echo "create dilated mask for every template image if not done yet"
42 | mkdir $1/mask_dilate
43 | fi
44 |
45 | for G in `ls $1/template/`
46 | do
47 | if [ ! -f $1/mask_dilate/$G ] && [ ! -f $1/mask_dilate/$G".nii" ] && [ ! -f $1/mask_dilate/$G".nii.gz" ] && [ ! -f $1/mask_dilate/$G".hdr" ]
48 | then
49 | if [ ! -f $1/mask/$G ] && [ ! -f $1/mask/$G".nii" ] && [ ! -f $1/mask/$G".nii.gz" ] && [ ! -f $1/mask/$G".hdr" ]
50 | then reg_tools -in $1/label/$G -bin -out $1/mask/$G
51 | fi
52 | seg_maths $1/mask/$G -dil 3 $1/mask_dilate/$G
53 | fi
54 | done
55 |
56 | echo "***********************************************"
57 | echo "* leave one out cross validation (STEPS) ${k} ${n}*"
58 | echo "***********************************************"
59 |
60 | # begin parcellation and dice score calculation
61 | for H in `ls $1/template/`
62 | do
63 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
64 | jid_LabFusion=labfusion_"$$"
65 | ${QSUB_CMD} -N ${jid_LabFusion} seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STEPS ${STEPS_PARAMETER} $1/template/$H label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz -out "\"label/${TEST_NAME}_${ATLAS}_label_STEPS_${k}_${n}.nii.gz\""
66 | # Calculate Dice Score for template image
67 | jid=leave_one_out_"$$"
68 | ${QSUB_CMD} -hold_jid ${jid_LabFusion} -N ${jid} seg_stats $1/label/$H -D "\"label/${TEST_NAME}_${ATLAS}_label_STEPS_${k}_${n}.nii.gz\"" "\"$1/Dice_Score/temp/${TEST_NAME}_STEPS_${k}_${n}.csv\""
69 | done
70 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/leave_one_out_parcellate.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29
4 | # usage: ./leave_one_out_parcellation.sh $1 $2 $3
5 | # $1: atlas (in_vivo ex_vivo)
6 | # $2: STEPS parameter k (kernel size in terms of voxel number)
7 | # $3: STEPS parameter n (number of top ranked local atlas to select for label fusion)
8 | ROOT_DIR=$(pwd)
9 | # echo "Bash version ${BASH_VERSION}..."
10 | ATLAS=$(basename $1)
11 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=1.5G -l vf=1.5G -l s_stack=10240 -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
12 |
13 | # Set STEPS parameters
14 | if [[ ! -z $2 ]] && [[ ! -z $3 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
15 | export k=$2
16 | export n=$3
17 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6 "
18 | export k=4
19 | export n=6
20 | fi
21 | export STEPS_PARAMETER="${k} ${n} "
22 |
23 | # if [ -f "$1/Dice_Score/Dice_Score_STEPS.csv" ]
24 | # then
25 | # rm "$1/Dice_Score/Dice_Score_STEPS.csv"
26 | # fi
27 | if [ ! -d $1"/Dice_Score" ]
28 | then mkdir $1"/Dice_Score"
29 | fi
30 | if [ ! -d $1"/Dice_Score/temp" ]
31 | then mkdir $1"/Dice_Score/temp"
32 | fi
33 |
34 | # create dilated mask for every template image
35 | if [ ! -d $1/mask ]
36 | then
37 | echo "create mask for every template image if not done yet"
38 | mkdir $1/mask
39 | fi
40 | if [ ! -d $1/mask_dilate ]
41 | then
42 | echo "create dilated mask for every template image if not done yet"
43 | mkdir $1/mask_dilate
44 | fi
45 | for G in `ls $1/template/`
46 | do
47 | if [ ! -f $1/mask_dilate/$G ] && [ ! -f $1/mask_dilate/$G".nii" ] && [ ! -f $1/mask_dilate/$G".nii.gz" ] && [ ! -f $1/mask_dilate/$G".hdr" ]
48 | then
49 | if [ ! -f $1/mask/$G ] && [ ! -f $1/mask/$G".nii" ] && [ ! -f $1/mask/$G".nii.gz" ] && [ ! -f $1/mask/$G".hdr" ]
50 | then reg_tools -in $1/label/$G -bin -out $1/mask/$G
51 | fi
52 | seg_maths $1/mask/$G -dil 3 $1/mask_dilate/$G
53 | fi
54 | done
55 |
56 | # begin parcellation and dice score calculation
57 | for H in `ls $1/template/`
58 | do
59 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
60 | . parcellation.sh $1/template/$H $1/mask_dilate/$H $1
61 | echo "***************************************"
62 | echo "* Segmentation performance evaluation *"
63 | echo "* step 3 - leave one out (STEPS) *"
64 | echo "***************************************"
65 | # Calculate Dice Score for template image now put to a different script: "leave_one_out_dice.sh"
66 | jid=leave_one_out_"$$"
67 | ${QSUB_CMD} -hold_jid ${jid_LabFusion} -N ${jid} seg_stats $1/label/$H -D "\"label/${TEST_NAME}_${ATLAS}_label_STEPS_${k}_${n}.nii.gz\"" "\"$1/Dice_Score/temp/${TEST_NAME}_${ATLAS}_label_STEPS_${k}_${n}.csv\""
68 | done
69 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/mask.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.7_2013.04.15 (add non-rigid registration for accurate brain extraction)
4 | # echo "Bash version ${BASH_VERSION}..."
5 | #!/bin/bash
6 |
7 |
8 | if [ $# -lt 2 ]
9 | then
10 | echo ""
11 | echo "*********************************************"
12 | echo "* Segmentation pipeline for mouse brain MRI *"
13 | echo "* using multi-atlas label fusion methods *"
14 | echo "* step 1 - brain extraction *"
15 | echo "*********************************************"
16 | echo "usage: mask 'new_image' 'atlas_type(in_vivo/ex_vivo)' 'parameter_file (optional)'"
17 | echo ""
18 | exit
19 | fi
20 |
21 | # $1: enquiry image
22 | # $2: atlas folder "in_vivo" or "ex_vivo"
23 | # $3: if exist, read the file to load user defined parameters (see file under sample_parameters for examples)
24 |
25 | # echo "*********************************************"
26 | # echo "* Segmentation pipeline for mouse brain MRI *"
27 | # echo "* for ${TEST_NAME} *"
28 | # echo "* using multi-atlas label fusion methods *"
29 | # echo "* step 1 - brain extraction *"
30 | # echo "*********************************************"
31 | # echo "usage: mask.sh new_image atlas_folder"
32 |
33 | # Setup default value for parameters
34 | ROOT_DIR=$(pwd)
35 | # QSUB_CMD="qsub -l h_rt=2:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
36 | # QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 4 -R y -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
37 |
38 | QSUB_CMD="qsub -l h_rt=2:00:00 -pe -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
39 | QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
40 |
41 | DILATE=2 # value to be dilated for the result mask
42 | INITIAL_AFFINE="initial_affine.txt"
43 | MASK_AFF="-ln 4 -lp 4 -omp 4 -speeeeed"
44 | LABFUSION_OPTION="-v 1"
45 | # Read user defined parameters # need to add a line to check if $3 exist ...
46 | if [ ! -z $3 ]; then # check if there is a 3rd argument
47 | if [ -f $3 ]; then # check if the file specified by 3rd argument exist
48 | . $3 # if file of 4th argument exist, read the parameters from the file
49 | fi
50 | fi
51 | FULL_TEST_NAME=$(basename $1) # basename: truncate path name from the string
52 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
53 | ATLAS=$(basename $2)
54 |
55 | jid="mask_$$" # generate a random number as job ID "$$"
56 | jid_folder="${jid}_folder" # creating various folders if not exist
57 | if [ ! -f $1 ] && [ ! -f $1".nii" ] && [ ! -f $1".nii.gz" ] && [ ! -f $1".hdr" ]
58 | then echo "test image not exist"
59 | fi
60 | if [ ! -d temp ]
61 | then mkdir temp
62 | fi
63 | if [ ! -d temp/${ATLAS} ]
64 | then mkdir temp/${ATLAS}
65 | fi
66 | if [ ! -d mask ]
67 | then
68 | mkdir mask
69 | fi
70 | if [ ! -d mask/${ATLAS} ]
71 | then mkdir mask/${ATLAS}
72 | fi
73 | # create mask for every template image if not done yet
74 | if [ ! -d $2/mask ]
75 | then mkdir $2/mask
76 | fi
77 | # create dilated mask for every template image if not done yet
78 | if [ ! -d $2/mask_dilate ]
79 | then mkdir $2/mask_dilate
80 | fi
81 | if [ ! -d job_output ]
82 | then mkdir job_output
83 | fi
84 | if [ ! -d job_error ]
85 | then mkdir job_error
86 | fi
87 |
88 | # Mask back propagation
89 | echo "Creating mask for: ${TEST_NAME}"
90 | PARAMETER_NUMBER=0
91 | jid_reg="${jid}_reg"
92 | for G in `ls $2/template/`
93 | do
94 | jname=${jid_reg}_${G}
95 | NAME=`echo "$G" | cut -d'.' -f1`
96 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
97 | if [[ $2/template/$NAME != $1 ]] && [[ $2/template/$NAME.nii != $1 ]] && [[ $2/template/$NAME.nii.gz != $1 ]] && [[ $2/template/$NAME.hdr != $1 ]]
98 | then
99 | if [ ! -f $2/mask/$G ] && [ ! -f $2/mask/$G".nii" ] && [ ! -f $2/mask/$G".nii.gz" ] && [ ! -f $2/mask/$G".hdr" ] # if no mask for atlas, create from labels
100 | then
101 | jbinary="${jname}_mask_binary"
102 | ${QSUB_CMD} -N ${jbinary} reg_tools -in $2/label/$G -bin -out $2/mask/$G
103 | fi
104 | if [ ! -f $2/mask_dilate/$G ] # if no dilated mask for atlas, create one
105 | then
106 | jdilate="${jname}_mask_dilate"
107 | ${QSUB_CMD} -N ${jdilate} seg_maths $2/mask/$G -dil $DILATE $2/mask_dilate/$G
108 | fi
109 | # if mask & dilated mask exist, ready to preed to the affine registration step
110 | j_mask_ready="${jname}_mask_ready"
111 | ${QSUB_CMD} -N ${j_mask_ready} echo "get binary mask and dilated mask ready before registration"
112 |
113 | job_aladin="${jname}_aladin" # start create affine registration matrix
114 | if [ ! -f ${INITIAL_AFFINE} ] # if no initial affine matrix file
115 | then
116 | ${QSUB_CMD} -hold_jid $"${jname}_mask_*" -N ${job_aladin} reg_aladin \
117 | -flo $1 \
118 | -ref $2/template/$G \
119 | -rmask $2/mask_dilate/$G \
120 | -aff temp/${ATLAS}/${TEST_NAME}_${NAME}_aff \
121 | -res temp/${ATLAS}/${TEST_NAME}_${NAME}_aff.nii.gz \
122 | ${MASK_AFF}
123 | else # if initial affine matrix file exist, use it
124 | ${QSUB_CMD} -hold_jid $"${jname}_mask_*" -N ${job_aladin} reg_aladin -flo $1 -ref $2/template/$G -rmask $2/mask_dilate/$G -inaff ${INITIAL_AFFINE} -aff temp/${ATLAS}/${TEST_NAME}_${NAME}_aff -res temp/${ATLAS}/${TEST_NAME}_${NAME}_aff.nii.gz ${MASK_AFF}
125 | fi
126 | job_transform="${jname}_transform"
127 | ${QSUB_CMD} -hold_jid ${job_aladin} -N ${job_transform} reg_transform -ref $2/template/$G -invAff temp/${ATLAS}/${TEST_NAME}_${NAME}_aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff
128 | # generate mask from affine registration
129 | job_resample="${jname}_resample"
130 | ${QSUB_CMD} -hold_jid ${job_transform} -N ${job_resample} reg_resample -flo $2/mask/$G -ref $1 -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff -NN -res mask/${ATLAS}/$TEST_NAME"_mask_"$G
131 |
132 |
133 | if (( $PARAMETER_NUMBER==0 ))
134 | then FIRST_PARAMETER=mask/${ATLAS}/$TEST_NAME"_mask_"$G
135 | else
136 | MERGE_PARAMETERS=$MERGE_PARAMETERS" "mask/${ATLAS}/$TEST_NAME"_mask_"$G
137 | fi
138 | let PARAMETER_NUMBER+=1
139 | fi
140 | done
141 | let PARAMETER_NUMBER-=1
142 |
143 | # Label Fusion
144 | jname_avg_mask="${jid}_mask_avg"
145 | ${QSUB_CMD} -hold_jid ${jid_reg}_* -N ${jname_avg_mask} reg_average mask/${ATLAS}/${TEST_NAME}_mask_avg.nii.gz -avg $FIRST_PARAMETER $MERGE_PARAMETERS
146 | jname_bin_avg_mask="${jid}_mask_avg_bin_dil"
147 | ${QSUB_CMD} -hold_jid ${jname_avg_mask} -N ${jname_bin_avg_mask} seg_maths mask/${ATLAS}/${TEST_NAME}_mask_avg.nii.gz -bin -dil ${DILATE} mask/${ATLAS}/${TEST_NAME}_mask_avg_bin_dil.nii.gz
148 | jname_merge_mask="${jid}_mask_merge"
149 | ${QSUB_SEG_MATH} -hold_jid ${jid_reg}_* -N ${jname_merge_mask} seg_maths $FIRST_PARAMETER -merge $PARAMETER_NUMBER 4 $MERGE_PARAMETERS mask/${ATLAS}/${TEST_NAME}_mask_4D.nii.gz
150 | jname_seg_LabFusion="${jid}_seg_LabFusion"
151 | ${QSUB_SEG_MATH} -hold_jid ${jid}_mask_* -N ${jname_seg_LabFusion} seg_LabFusion -in mask/${ATLAS}/${TEST_NAME}_mask_4D -STAPLE ${LABFUSION_OPTION} -mask mask/${ATLAS}/${TEST_NAME}_mask_avg_bin_dil.nii.gz -out mask/${TEST_NAME}_mask_${ATLAS}_STAPLE.nii.gz
152 | export jname_dilate="${jid}_dilate"
153 | ${QSUB_CMD} -hold_jid ${jname_seg_LabFusion} -N ${jname_dilate} seg_maths mask/${TEST_NAME}_mask_${ATLAS}_STAPLE.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz
154 | echo "creating mask at: mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz"
155 |
156 | # rm mask/temp/*.*
157 | # rm temp/*.*
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/mask_batch.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.10.19 to be modified ...
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 | # $1: folder include all the images to be masked
7 | # $2: atlas folder
8 | # $3: parcellation parameter file (if exist)
9 |
10 | #!/bin/bash
11 | DILATE=4
12 | ATLAS=$(basename $2)
13 |
14 | for G in `ls $1`
15 | do
16 | TEST_NAME=`echo "$G" | cut -d'.' -f1`
17 | NAME=`echo "$G" | cut -d'.' -f1`
18 | mask.sh $1/$G $2 $3
19 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/parcellation.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.9_2013.10.21
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 |
7 | # $1: enquiry image
8 | # $2: mask for enquiry image. if no mask just type "no_mask"
9 | # $3: atlas folder "in_vivo" or "ex_vivo"
10 | # $4: if exist, read the file containing user defined parameters
11 | if [ $# -lt 3 ]
12 | then
13 | echo "********************************************************************"
14 | echo "* CAUTION!! DO NOT use the same subject name as the atlas template *"
15 | echo "* if it is not for leave-one-out testing *"
16 | echo "********************************************************************"
17 | echo "usage: parcellation.sh new_image corresponding_mask atlas_folder"
18 | exit
19 | fi
20 |
21 | # setup default value for parameters
22 | ROOT_DIR=$(pwd)
23 | # export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # old flag: -l s_stack=128M
24 | # export QSUB_CMD_ONE_CORE="qsub -l h_rt=5:00:00 -pe smp 1 -R y -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # old flag: -l s_stack=128M
25 | # export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 4 -R y -l h_vmem=2G -l tmem=2G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
26 |
27 | export QSUB_CMD="qsub -V -I -l walltime=5:00:00,pmem=1G -W group_list=ensc-image -o job_output -e job_error" # old flag: -l s_stack=128M
28 | export QSUB_CMD_ONE_CORE="qsub -V -I -l walltime=5:00:00,pmem=1G -W group_list=ensc-image -o job_output -e job_error" # old flag: -l s_stack=128M
29 | export QSUB_SEG_MATH="qsub -V -I -l walltime=5:00:00,pmem=1G -W group_list=ensc-image -o job_output -e job_error" # -l s_stack=128M
30 |
31 | PARCELLATION_NNR="-ln 4 -lp 4 -sx -3"
32 | DILATE=1 # value to be dilated for the result mask
33 | LABFUSION="-STEPS"
34 | LABFUSION_OPTION="-v 1" # parameter options for STAPLE or STEPS in seg_LabFusion
35 | MASK_AFF=""
36 |
37 | # Set STEPS parameters
38 | if [[ -z $k ]] && [[ -z $n ]]; then # if STEPS parameter is not set
39 | # set default STEPS parameter to: "3 8 "
40 | export k=3
41 | export n=8
42 | fi
43 | export STEPS_PARAMETER="${k} ${n} "
44 |
45 | FULL_TEST_NAME=$(basename $1)
46 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
47 | echo "Creating parcellation label for: "$TEST_NAME
48 | export ATLAS=$(basename $3)
49 | MASK=$2
50 |
51 | # Read user-defined parameters
52 | if [ ! -z $4 ]; then # check if there is a 4th argument
53 | if [ -f $4 ]; then # check if the file specified by 4th argument exist
54 | . $4 # if file of 4th argument exist, read the parameters from the file
55 | fi
56 | fi
57 |
58 | # create dilated mask for every template image if not already exist
59 | if [ ! -d $3/mask ]; then
60 | echo "create mask for every template image if not done yet"
61 | mkdir $3/mask
62 | fi
63 | if [ ! -d $3/mask_dilate ]; then
64 | echo "create dilated mask for every template image if not done yet"
65 | mkdir $3/mask_dilate
66 | fi
67 | if [ ! -d job_output ]; then mkdir job_output; fi
68 | if [ ! -d job_error ]; then mkdir job_error; fi
69 | if [ ! -d temp/${ATLAS} ]; then mkdir -p temp/${ATLAS}; fi
70 | if [ ! -d mask/${ATLAS} ]; then mkdir -p mask/${ATLAS}; fi
71 | for G in `ls $3/template/`
72 | do
73 | if [ ! -f $3/mask_dilate/$G ] && [ ! -f $3/mask_dilate/$G".nii" ] && [ ! -f $3/mask_dilate/$G".nii.gz" ] && [ ! -f $3/mask_dilate/$G".hdr" ]; then
74 | if [ ! -f $3/mask/$G ] && [ ! -f $3/mask/$G".nii" ] && [ ! -f $3/mask/$G".nii.gz" ] && [ ! -f $3/mask/$G".hdr" ]; then
75 | reg_tools -in $3/label/$G -bin -out $3/mask/$G
76 | fi
77 | seg_maths $3/mask/$G -dil ${DILATE} $3/mask_dilate/$G
78 | fi
79 | done
80 |
81 | if [ ! -d label ]; then mkdir label; fi
82 | if [ ! -d label/${ATLAS} ]; then mkdir label/${ATLAS}; fi
83 | if [ ! -d mask ]; then mkdir mask; fi
84 |
85 | jmask="mask_${TEST_NAME}" # if no mask has been created yet, evoke mask.sh
86 | if [ ! -f $2 ] && [ ! -f $2".nii" ] && [ ! -f $2".nii.gz" ] && [ ! -f $2".hdr" ]
87 | then
88 | # create mask for the test image first
89 | if [ ! -z $4 ] && [ -f $4 ]; # check if there is a 4th argument# check if the file specified by 4th argument exist
90 | then . mask.sh $1 $3 $4 # if file of 4th argument exist, read the parameters from the file
91 | else . mask.sh $1 $3 # if there's no 4th argument or file not exist ("." eauqals to "source")
92 | fi # if path of the script is not defined in bashrc, use "./mask.sh" instead
93 | # Mask for the test image created
94 | MASK=mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz
95 | ${QSUB_CMD} -hold_jid ${jname_dilate} -N ${jmask} echo -e "Pre-defined mask ${MASK} NOT found, parcellation will start after the mask is generated"
96 | else
97 | ${QSUB_CMD} -N ${jmask} echo -e "Pre-defined mask ${MASK} found, start to search/generate initial affine registration from atlas to test image now"
98 | fi
99 |
100 | # echo "*********************************************"
101 | # echo "* Segmentation pipeline for mouse brain MRI *"
102 | # echo "* for ${TEST_NAME} *"
103 | # echo "* using multi-atlas label fusion methods *"
104 | # echo "* step 2 - structural parcellation *"
105 | # echo "*********************************************"
106 | # echo "usage: parcellation new_image mask atlas_type (in_vivo/ex_vivo)"
107 |
108 | # start structural parcellation
109 | echo "Creating label for: "$TEST_NAME
110 | PARAMETER_NUMBER=0
111 | jid="$$" # generate a random number as job ID
112 | jid_reg="reg_${jid}"
113 | TEST_NAME=`echo "$(basename $1)" | cut -d'.' -f1`
114 | for G in `ls $3/template/`
115 | do
116 | NAME=`echo "$G" | cut -d'.' -f1`
117 | jname=${jid_reg}_${TEST_NAME}_${NAME}
118 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
119 | if [[ ${3}/template/${NAME} != $1 ]] && [[ ${3}/template/${NAME}.nii != $1 ]] && [[ ${3}/template/${NAME}.nii.gz != $1 ]] && [[ ${3}/template/${NAME}.hdr != $1 ]]
120 | then
121 | # 1) check if affine matrix exists
122 | job_affine="${jname}_affine_matrix"
123 | if [ ! -f temp/${ATLAS}/${NAME}_${TEST_NAME}_aff ]; then
124 | # 1.1) if affine matrix not found, generate affine atlas->test
125 | ${QSUB_CMD} -hold_jid ${jmask} -N ${job_affine} reg_aladin -flo ${3}/template/${NAME} -ref $1 -fmask ${3}/mask_dilate/${NAME} -rmask ${MASK} -res temp/${ATLAS}/${NAME}_${TEST_NAME}_aff.nii.gz -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff ${MASK_AFF} -omp 4
126 | else
127 | ${QSUB_CMD} -hold_jid ${jmask} -N ${job_affine} echo -e "Pre-defined affine transformation matrix ${NAME}_${TEST_NAME}_aff found, begin non-rigid registration now"
128 | fi
129 | # 1.2) use affine transform matrix to initialize non-rigid registration (coarse step)
130 | job_reg="${jname}_reg"
131 | ${QSUB_CMD} -hold_jid ${job_affine} -N ${job_reg} reg_f3d -flo ${3}/template/${NAME} -fmask ${3}/mask_dilate/${NAME} -ref ${1} -rmask ${MASK} -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff -res temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz ${PARCELLATION_NNR} -omp 4
132 | # 1.3) apply control point (coarse) to transform label/mask from atlas to test image
133 | job_resample="${jname}_resample"
134 | job_resample_nrr_mask=${job_resample}_nrr_mask
135 | ${QSUB_CMD} -hold_jid ${job_reg} -N ${job_resample_nrr_mask} reg_resample -flo ${3}/mask/${NAME} -ref ${1} -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz -NN -res mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz
136 | job_resample_label=${job_resample}_label
137 | ${QSUB_CMD} -hold_jid ${job_reg} -N ${job_resample_label} reg_resample -flo ${3}/label/${NAME} -ref ${1} -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz -NN -res label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz
138 |
139 | # 2) prepare parameters for label fusion
140 | if (( $PARAMETER_NUMBER==0 )); then
141 | FIRST_TEMPLATE="temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
142 | FIRST_MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
143 | FIRST_LABEL="label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
144 | else
145 | MERGE_TEMPLATE="${MERGE_TEMPLATE} temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
146 | MERGE_MASK="${MERGE_MASK} mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
147 | MERGE_LABEL="${MERGE_LABEL} label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
148 | fi
149 | let PARAMETER_NUMBER+=1
150 | else
151 | echo -e "Atlas image name ${TEST_NAME} is same as test image, skipped"
152 | fi
153 | done
154 | let PARAMETER_NUMBER-=1
155 |
156 | # Prepare 4D images for label fusion
157 | jid_4d="merge4d_${TEST_NAME}"
158 | # create average rough mask to reduce memory usage for label fusion
159 | jid_4d_nrr_mask_avg="${jid_4d}_nrr_mask_avg"
160 | ${QSUB_CMD} -hold_jid ${jid_reg}_* -N ${jid_4d_nrr_mask_avg} reg_average mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -avg $FIRST_MASK $MERGE_MASK
161 | jid_4d_nrr_mask_avg_bin="${jid_4d}_nrr_mask_avg_bin"
162 | ${QSUB_CMD} -hold_jid ${jid_4d_nrr_mask_avg} -N ${jid_4d_nrr_mask_avg_bin} seg_maths mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -bin mask/${ATLAS}/${TEST_NAME}_mask_avg_bin.nii.gz
163 | jid_4d_nrr_mask="${jid_4d}_nrr_mask"
164 | ${QSUB_SEG_MATH} -hold_jid ${jid_reg}_* -N ${jid_4d_nrr_mask} seg_maths $FIRST_MASK -merge $PARAMETER_NUMBER 4 $MERGE_MASK mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz
165 | jid_4d_label="${jid_4d}_label"
166 | ${QSUB_SEG_MATH} -hold_jid ${jid_reg}_* -N ${jid_4d_label} seg_maths $FIRST_LABEL -merge $PARAMETER_NUMBER 4 $MERGE_LABEL label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz
167 | # Start label fusion
168 | export jid_LabFusion="LabFusion_${TEST_NAME}"
169 | # Determine which label fusion method to use
170 | if [[ ${LABFUSION}=="-STEPS" ]]; then
171 | jid_4d_tempate="${jid_4d}_template"
172 | ${QSUB_SEG_MATH} -hold_jid ${jid_reg}_* -N ${jid_4d_tempate} seg_maths $FIRST_TEMPLATE -merge $PARAMETER_NUMBER 4 $MERGE_TEMPLATE label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz
173 | ${QSUB_SEG_MATH} -hold_jid ${jid_4d}_* -N ${jid_LabFusion} seg_LabFusion \
174 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
175 | -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} \
176 | -mask mask/${ATLAS}/${TEST_NAME}_mask_avg_bin.nii.gz \
177 | -out label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz
178 |
179 | # Creating NRR mask
180 | jid_NRR_mask="NRR_mask_${TEST_NAME}"
181 | ${QSUB_CMD} -hold_jid ${jid_4d}_* -N ${jid_NRR_mask} seg_LabFusion \
182 | -in mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz \
183 | -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} \
184 | -mask mask/${ATLAS}/${TEST_NAME}_mask_avg_bin.nii.gz \
185 | -out mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz
186 | jid_NRR_mask_dilate="dil_NRR_mask_${TEST_NAME}"
187 | ${QSUB_CMD} -hold_jid ${jid_NRR_mask} -N ${jid_NRR_mask_dilate} seg_maths mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}_d${DILATE}.nii.gz
188 | fi
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/parcellation_batch.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | #!/bin/bash
4 | # echo "Bash version ${BASH_VERSION}..."
5 | # $1: folder include all the images to be parcellated
6 | # $2: atlas folder
7 | # $3: parcellation parameter file if exist
8 |
9 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 1 -R y -l h_vmem=4G -l tmem=4G -l s_stack=1024M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
10 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=8G -l tmem=8G -l s_stack=1024M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
11 |
12 | #!/bin/bash
13 | DILATE=1
14 | ATLAS=$(basename $2)
15 | MASK_FOLDER="mask" # default mask folder
16 | MASK_SUFFIX=""
17 | MASK_SUFFIX="_mask_${ATLAS}_STAPLE_d${DILATE}" # default mask suffix
18 |
19 | # Read user defined parameters # need to add a line to check if $3 exist ...
20 | if [ ! -z $3 ]; then # check if there is a 3rd argument
21 | if [ -f $3 ]; then # check if the file specified by 3rd argument exist
22 | . $3 # if file of 4th argument exist, read the parameters from the file
23 | fi
24 | fi
25 |
26 | for G in `ls $1`
27 | do
28 | TEST_NAME=`echo "$G" | cut -d'.' -f1`
29 | NAME=`echo "$G" | cut -d'.' -f1`
30 | parcellation.sh $1/$G "${MASK_FOLDER}/${TEST_NAME}${MASK_SUFFIX}.nii.gz" $2 $3
31 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/parcellation_merge_4D.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.08.29 to be modified ...
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 |
7 | # $1: enquiry image
8 | # $2: atlas folder "in_vivo" or "ex_vivo"
9 |
10 | if [ ! -d job_output ]
11 | then mkdir job_output
12 | fi
13 | if [ ! -d job_error ]
14 | then mkdir job_error
15 | fi
16 | if [ ! -d temp/${ATLAS} ]
17 | then mkdir -p temp/${ATLAS}
18 | fi
19 | if [ ! -d label/${ATLAS} ]
20 | then mkdir - label/${ATLAS}
21 | fi
22 |
23 | # setup default value for parameters
24 | ROOT_DIR=$(pwd)
25 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
26 | QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=12G -l tmem=12G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
27 | PARCELLATION_NNR="-ln 4 -lp 4 -sx 3 -sy 3 -sz 3"
28 | DILATE=4 # value to be dilated for the result mask
29 | LABFUSION="-STEPS"
30 | MASK_AFF=" -rigOnly "
31 |
32 | FULL_TEST_NAME=$(basename $1)
33 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
34 | ATLAS=$(basename $2)
35 |
36 | # start structural parcellation
37 | echo "Creating 4D-label for: "$TEST_NAME
38 | PARAMETER_NUMBER=0
39 | for G in `ls $2/template/`
40 | do
41 | jname=${jid_reg}_${G}
42 | NAME=`echo "$G" | cut -d'.' -f1`
43 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
44 | if [[ ${3}/template/${NAME} != $1 ]] && [[ ${3}/template/${NAME}.nii != $1 ]] && [[ ${3}/template/${NAME}.nii.gz != $1 ]] && [[ ${3}/template/${NAME}.hdr != $1 ]]
45 | then
46 | # prepare parameters for label fusion
47 | if (( $PARAMETER_NUMBER==0 )); then
48 | FIRST_TEMPLATE="temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
49 | FIRST_LABEL="label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
50 | else
51 | MERGE_TEMPLATE="${MERGE_TEMPLATE} temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
52 | MERGE_LABEL="${MERGE_LABEL} label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
53 | fi
54 | let PARAMETER_NUMBER+=1
55 | else
56 | echo -e "Atlas image name ${TEST_NAME} is same as test image, skipped"
57 | fi
58 | done
59 | let PARAMETER_NUMBER-=1
60 |
61 | # Prepare 4D images for label fusion
62 | jid="$$" # generate random number as job ID
63 | jid_4d="${jid}_4d"
64 | jid_4d_label="label_${jid_4d}"
65 | ${QSUB_SEG_MATH} -N ${jid_4d_label} seg_maths $FIRST_LABEL -merge $PARAMETER_NUMBER 4 $MERGE_LABEL label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz
66 | jid_4d_template="template_${jid_4d}"
67 | ${QSUB_SEG_MATH} -N ${jid_4d_template} seg_maths $FIRST_TEMPLATE -merge $PARAMETER_NUMBER 4 $MERGE_TEMPLATE label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
--------------------------------------------------------------------------------
/depreciated/for_cluster/parcellation_merge_4D_batch.sh:
--------------------------------------------------------------------------------
1 | # merge 4D-labels for images in a folder
2 | # $1: folder contains enquiry images
3 | # $2: atlas folder "in_vivo" or "ex_vivo"
4 |
5 | for G in `ls $1`
6 | do
7 | parcellation_merge_4D.sh $1/$G $2
8 | done
--------------------------------------------------------------------------------
/depreciated/for_cluster/single_atlas_dice.sh:
--------------------------------------------------------------------------------
1 | # start from splash path:
2 | # usage: single_atlas_dice.sh $1 $2
3 | # $1 Test atlas folder
4 | # $2 Source atlas folder
5 | ROOT_DIR=$(pwd)
6 | # echo "Bash version ${BASH_VERSION}"
7 | ATLAS=$(basename $2)
8 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
9 |
10 | if [ ! -d "Dice_Score/temp" ]
11 | then mkdir -p "Dice_Score/temp"
12 | fi
13 |
14 | for H in `ls $1/template/`
15 | do
16 | TEST_IMAGE=`echo "$H" | cut -d'.' -f1`
17 | for G in `ls $2/template/`
18 | do
19 | jid=$$
20 | ATLAS_IMAGE=`echo "$G" | cut -d'.' -f1`
21 | j_stats=stats_${jid}
22 | ${QSUB_CMD} -N ${j_stats} seg_stats $1/label/$H -D "\"label/${ATLAS}/${TEST_IMAGE}_label_${ATLAS_IMAGE}.nii.gz\"" "\"Dice_Score/temp/${TEST_IMAGE}_label_${ATLAS_IMAGE}.csv\""
23 | # j_dice=dice_${jid}
24 | # ${QSUB_CMD} -N ${j_dice} -hold_jid ${j_stats}
25 | done
26 | done
27 |
28 |
29 | ############# old script ###########
30 | # echo -e "A0_label_A0_flip,\c" >> "dice_score.csv"
31 | # seg_stats ../../in_vivo_double/label/A0.nii.gz -D in_vivo_double/A0_label_A0_flip.nii.gz "dice_score.csv"
32 | # echo -e "\n\c" >> "dice_score.csv"
--------------------------------------------------------------------------------
/depreciated/for_cluster/single_atlas_dice_step2.sh:
--------------------------------------------------------------------------------
1 | # start from splash path:
2 | # usage: single_atlas_dice.sh $1 $2
3 | # $1 Test atlas folder
4 | # $2 Source atlas folder
5 | ROOT_DIR=$(pwd)
6 | # echo "Bash version ${BASH_VERSION}"
7 | ATLAS=$(basename $2)
8 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
9 |
10 | if [ ! -d "Dice_Score/temp" ]
11 | then mkdir -p "Dice_Score/temp"
12 | fi
13 |
14 | for H in `ls $1/template/`
15 | do
16 | TEST_IMAGE=`echo "$H" | cut -d'.' -f1`
17 | echo -e ${TEST_IMAGE}"," >> "Dice_Score/Dice_Score_single.csv"
18 | for G in `ls $2/template/`
19 | do
20 | ATLAS_IMAGE=`echo "$G" | cut -d'.' -f1`
21 | echo -e ${ATLAS_IMAGE}",\c" >> "Dice_Score/Dice_Score_single.csv"
22 | cat "Dice_Score/temp/${TEST_IMAGE}_label_${ATLAS_IMAGE}.csv" >> "Dice_Score/Dice_Score_single.csv"
23 | echo -e "\n\c" >> "Dice_Score/Dice_Score_single.csv"
24 | done
25 | echo -e "\n\c" >> "Dice_Score/Dice_Score_single.csv"
26 | done
27 |
28 |
29 | ############# old script ###########
30 | # echo -e "A0_label_A0_flip,\c" >> "dice_score.csv"
31 | # seg_stats ../../in_vivo_double/label/A0.nii.gz -D in_vivo_double/A0_label_A0_flip.nii.gz "dice_score.csv"
32 | # echo -e "\n\c" >> "dice_score.csv"
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/brain_extraction_batch.sh:
--------------------------------------------------------------------------------
1 | # $1: folder containing the original images
2 | # $2: folder containing the brain masks
3 | # $3: folder to put the extracted brain images
4 |
5 | # export QSUB_CMD="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error" # -l s_stack=128M
6 |
7 | if [ ! -d job_output ]
8 | then mkdir job_output
9 | fi
10 | if [ ! -d job_error ]
11 | then mkdir job_error
12 | fi
13 | if [ ! -d $3 ]
14 | then mkdir -p $3
15 | fi
16 |
17 | for G in `ls $1`
18 | do
19 | MASK=`echo "$G" | cut -d'.' -f1`
20 | seg_maths $1/${MASK} -mul $2/${MASK} $3/${MASK}.nii.gz
21 | done
22 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/dilate_batch.sh:
--------------------------------------------------------------------------------
1 | QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=2.9G -l tmem=2.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | # $1: folder containing the original mask
4 | # $2: folder containing the dilated mask
5 | # $3: pixel to dilate
6 |
7 | # if [ ! -d job_output ]
8 | # then mkdir job_output
9 | # fi
10 | # if [ ! -d job_error ]
11 | # then mkdir job_error
12 | # fi
13 |
14 | if [ ! -d $2 ]
15 | then mkdir -p $2
16 | fi
17 |
18 | DILATE=$3
19 |
20 | for G in `ls $1`
21 | do
22 | MASK=`echo "$G" | cut -d'.' -f1`
23 | seg_maths $1/$G -dil ${DILATE} $2/${MASK}_d${DILATE}.nii.gz
24 | done
25 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/labfusion.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # echo "Bash version ${BASH_VERSION}..."
3 |
4 | # Structural Parcellation shell script (SGE)
5 | # Author: Da Ma (d.ma.11@ucl.ac.uk)
6 |
7 | # $1: enquiry image
8 | # $2: atlas folder "in_vivo" or "ex_vivo"
9 | # $3: if exist, read user defined parameters
10 |
11 | if [[ $# -lt 2 ]]
12 | then
13 | echo "***************************************************"
14 | echo "* CAUTION!! DO NOT use the same name as the atlas *"
15 | echo "* if it is not for leave-one-out testing *"
16 | echo "***************************************************"
17 | echo "usage: labfusion.sh new_image atlas_folder"
18 | exit
19 | fi
20 |
21 | # setup default value for parameters
22 | ROOT_DIR=$(pwd)
23 | PARCELLATION_NNR="-ln 4 -lp 4 -sx -3"
24 | DILATE=3 # value to be dilated for the result mask
25 | LABFUSION="-STEPS"
26 | LABFUSION_OPTION="-v 1" # parameter options for STAPLE or STEPS in seg_LabFusion
27 | MASK_AFF=""
28 |
29 | # Set STEPS parameters
30 | if [[ -z $k ]] && [[ -z $n ]]; then # if STEPS parameter is not set
31 | # set default STEPS parameter to: "3 8 "
32 | export k=3
33 | export n=8
34 | fi
35 | export STEPS_PARAMETER="${k} ${n} "
36 |
37 | FULL_TEST_NAME=$(basename $1)
38 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
39 | echo "Creating parcellation label for: "$TEST_NAME
40 | ATLAS=$(basename $2)
41 |
42 | # Read user-defined parameters
43 | if [ ! -z $3 ]; then # check if there is a 3rd argument
44 | if [ -f $3 ]; then # check if the file specified by 4th argument exist
45 | . $3 # if file of 4th argument exist, read the parameters from the file
46 | fi
47 | fi
48 |
49 | if [ ! -d job_output ]; then mkdir job_output; fi
50 | if [ ! -d job_error ]; then mkdir job_error; fi
51 | if [ ! -d temp/${ATLAS} ]; then mkdir -p temp/${ATLAS}; fi
52 | if [ ! -d mask/${ATLAS} ]; then mkdir -p mask/${ATLAS}; fi
53 | if [ ! -d label/${ATLAS} ]; then mkdir -p label/${ATLAS}; fi
54 |
55 | # echo "*********************************************"
56 | # echo "* Segmentation pipeline for mouse brain MRI *"
57 | # echo "* for ${TEST_NAME} *"
58 | # echo "* using multi-atlas label fusion methods *"
59 | # echo "* step 2 - structural parcellation *"
60 | # echo "*********************************************"
61 | # echo "usage: parcellation new_image mask atlas_type (in_vivo/ex_vivo)"
62 |
63 | # start structural parcellation
64 | echo "Creating label for: "$TEST_NAME
65 | PARAMETER_NUMBER=0
66 | jid="$$" # generate a random number as job ID
67 | jid_reg="reg_${jid}"
68 | TEST_NAME=`echo "$(basename $1)" | cut -d'.' -f1`
69 | MASK=${MASK_FOLDER}/${TEST_NAME}${MASK_SUFFIX}
70 | MERGE_LABEL=""
71 | for G in `ls $2/template/`
72 | do
73 | NAME=`echo "$G" | cut -d'.' -f1`
74 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
75 | if [[ ${3}/template/${NAME} != $1 ]] && [[ ${3}/template/${NAME}.nii != $1 ]] && [[ ${3}/template/${NAME}.nii.gz != $1 ]] && [[ ${3}/template/${NAME}.hdr != $1 ]]
76 | then
77 | # prepare parameters for label fusion
78 | if (( $PARAMETER_NUMBER==0 )); then
79 | FIRST_TEMPLATE="temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
80 | FIRST_MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
81 | FIRST_LABEL="label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
82 | else
83 | MERGE_TEMPLATE="${MERGE_TEMPLATE} temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
84 | MERGE_MASK="${MERGE_MASK} mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
85 | MERGE_LABEL="${MERGE_LABEL} label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
86 | fi
87 | let PARAMETER_NUMBER+=1
88 | else
89 | echo -e "Atlas image name ${TEST_NAME} is same as test image, skipped"
90 | fi
91 | done
92 | let PARAMETER_NUMBER-=1
93 |
94 | # Prepare 4D images for label fusion
95 | jid_4d="merge4d_${TEST_NAME}"
96 |
97 | # create average rough mask to reduce memory usage for label fusion
98 | if [ ! -f mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz ]; then
99 | reg_average mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -avg $FIRST_MASK $MERGE_MASK
100 | seg_maths mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg.nii.gz -bin -dil ${DILATE} mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz
101 | fi
102 |
103 | MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_avg_bin.nii.gz"
104 |
105 | # merge 4D masks if not done yet
106 | if [ ! -f mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz ]; then
107 | seg_maths $FIRST_MASK -v -merge $PARAMETER_NUMBER 4 $MERGE_MASK mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz
108 | else
109 | echo "4D mask already exist, skip merging again"
110 | fi
111 | # merge 4D labels if not done yet
112 | if [ ! -f label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz ]; then
113 | seg_maths $FIRST_LABEL -v -merge $PARAMETER_NUMBER 4 $MERGE_LABEL label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz
114 | else
115 | echo "4D label already exist, skip merging again"
116 | fi
117 |
118 | # Start label fusion
119 | export jid_LabFusion="LabFusion_${TEST_NAME}"
120 | # Determine which label fusion method to use
121 | if [ ${LABFUSION} == "-STEPS" ]; then
122 | # merge 4D template if not done yet
123 | if [ ! -f label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ]; then
124 | seg_maths $FIRST_TEMPLATE -v -merge $PARAMETER_NUMBER 4 $MERGE_TEMPLATE label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz
125 | else
126 | echo "4D template already exist, skip merging again"
127 | fi
128 | # create final label using label fusion
129 | seg_LabFusion\
130 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
131 | -mask ${MASK}\
132 | -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} \
133 | -out label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz
134 | # jid_NRR_mask="NRR_mask_${TEST_NAME}"
135 | # ${QSUB_CMD} -hold_jid ${jid_4d}_* -N ${jid_NRR_mask} seg_LabFusion -in mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz -mask ${MASK} -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} -out mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz
136 | # jid_NRR_mask_dilate="dil_NRR_mask_${TEST_NAME}"
137 | # ${QSUB_CMD} -hold_jid ${jid_NRR_mask} -N ${jid_NRR_mask_dilate} seg_maths mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}_d${DILATE}.nii.gz
138 | elif [ ${LABFUSION} == "-STAPLE" ]; then
139 | seg_LabFusion\
140 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
141 | -mask ${MASK}\
142 | -STAPLE ${LABFUSION_OPTION} \
143 | -out label/${TEST_NAME}_label_${ATLAS}_STAPLE.nii.gz
144 | elif [ ${LABFUSION} == "-SBA" ]; then
145 | seg_LabFusion\
146 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
147 | -mask ${MASK}\
148 | -SBA ${LABFUSION_OPTION} \
149 | -out label/${TEST_NAME}_label_${ATLAS}_SBA.nii.gz
150 | else # elif [[ ${LABFUSION }== "-MV" ]]; then
151 | seg_LabFusion\
152 | -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
153 | -mask ${MASK}\
154 | -MV ${LABFUSION_OPTION} \
155 | -out label/${TEST_NAME}_label_${ATLAS}_MV.nii.gz
156 | fi
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/labfusion_batch.sh:
--------------------------------------------------------------------------------
1 | # Brain extraction shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # for STEPS label fusion on foler of images with registration already done
4 |
5 | # usage: ./labfusion_batch.sh atlas $1 $2 $3 $4
6 | # $1: folder include all the images for label fusion
7 | # $2: atlas (in_vivo ex_vivo)
8 | # $3: STEPS parameter k (kernel size in terms of voxel number)
9 | # $4: STEPS parameter n (number of top ranked local atlas to select for label fusion)
10 | ROOT_DIR=$(pwd)
11 | # echo "Bash version ${BASH_VERSION}"
12 | ATLAS=$(basename $2)
13 | export QSUB_CMD="qsub -l h_rt=1:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
14 |
15 | # Set STEPS parameters
16 | if [[ ! -z $3 ]] && [[ ! -z $4 ]]; then # if STEPS parameter is set (-z: zero = not set), so ! -z = set
17 | export k=$3
18 | export n=$4
19 | else # if [[ -z "${STEPS_PARAMETER}" ]] set default STEPS parameter to: "4 6"
20 | export k=8
21 | export n=8
22 | fi
23 | export STEPS_PARAMETER="${k} ${n} "
24 |
25 | echo "***********************************************"
26 | echo "* batch STEPS label fusion (STEPS) ${k} ${n} *"
27 | echo "***********************************************"
28 | # begin parcellation and dice score calculation
29 | for H in `ls $1`
30 | do
31 | TEST_NAME=`echo "$H" | cut -d'.' -f1`
32 | # jid_LabFusion=labfusion_"$$"
33 |
34 | seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz \
35 | -STEPS ${STEPS_PARAMETER} $1/$H label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz \
36 | -out label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz \
37 | -unc -v 1
38 | done
39 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/mask.sh:
--------------------------------------------------------------------------------
1 | # Brain Extraction shell script (non-SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 |
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 | # export QSUB_CODE="qsub -l h_rt=1:00:00 -l h_vmem=1.9G -l tmem=1.9G -l s_stack=10240 -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error "
7 |
8 | if [ $# -lt 2 ]
9 | then
10 | echo ""
11 | echo "*********************************************"
12 | echo "* Segmentation pipeline for mouse brain MRI *"
13 | echo "* using multi-atlas label fusion methods *"
14 | echo "* step 1 - brain extraction *"
15 | echo "*********************************************"
16 | echo "usage: bash/sh mask.sh <[optional] user-specified-parameter-file>"
17 | echo ""
18 | exit
19 | fi
20 |
21 |
22 | # $1: enquiry image
23 | # $2: atlas folder. e.g. "in_vivo" or "ex_vivo"
24 | # $3: if exist, read the file to load user defined parameters (see file under sample_parameters for examples)
25 | # start brain extraction
26 | ROOT_DIR=$(pwd)
27 | FULL_TEST_NAME=$(basename $1)
28 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
29 | ATLAS=$(basename $2)
30 | DILATE=2 # value to be dilated for the result mask
31 | INITIAL_AFFINE="initial_affine.txt"
32 | MASK_AFF=" "
33 |
34 | # Read user-defined parameters
35 | if [ ! -z $3 ]; then # check if there is a 4th argument
36 | if [ -f $3 ]; then # check if the file specified by 4th argument exist
37 | . $3 # if file of 3th argument exist, read the parameters from the file
38 | fi
39 | fi
40 |
41 | if [ ! -f $1 ] && [ ! -f $1".nii" ] && [ ! -f $1".nii.gz" ] && [ ! -f $1".hdr" ]
42 | then echo "test image not exist"
43 | fi
44 | if [ ! -d temp/${ATLAS} ]
45 | then mkdir -p temp/${ATLAS}
46 | fi
47 | if [ ! -d mask ]
48 | then mkdir mask
49 | fi
50 | if [ ! -d mask/${ATLAS} ]
51 | then mkdir -p mask/${ATLAS}
52 | fi
53 | if [ ! -d $2/mask ]
54 | then mkdir $2/mask
55 | fi
56 | if [ ! -d $2/mask_dilate ]
57 | then mkdir $2/mask_dilate
58 | fi
59 |
60 | MASK_AFF=" -rigOnly "
61 |
62 | # Mask back propagation
63 |
64 | echo "Creating mask for: "$TEST_NAME
65 | PARAMETER_NUMBER=0
66 | for G in `ls $2/template/`
67 | do
68 | NAME=`echo "$G" | cut -d'.' -f1`
69 | if [[ $2/template/$NAME != $1 ]] && [[ $2/template/$NAME.nii != $1 ]] && [[ $2/template/$NAME.nii.gz != $1 ]] && [[ $2/template/$NAME.hdr != $1 ]]
70 | then
71 | if [ ! -f $2/mask/$G ] && [ ! -f $2/mask/$G".nii" ] && [ ! -f $2/mask/$G".nii.gz" ] && [ ! -f $2/mask/$G".hdr" ]
72 | then reg_tools -in $2/label/$G -bin -out $2/mask/$G
73 | fi
74 | if [ ! -f $2/mask_dilate/$G ]
75 | then seg_maths $2/mask/$G -dil 3 $2/mask_dilate/$G
76 | fi
77 | reg_aladin -flo $1 -ref $2/template/$G -rmask $2/mask_dilate/$G -aff temp/${ATLAS}/$TEST_NAME"_"$NAME"_aff" -res temp/${ATLAS}/$TEST_NAME"_"$NAME"_aff".nii.gz ${MASK_AFF}
78 | reg_transform -ref $2/template/$G -invAffine temp/${ATLAS}/$TEST_NAME"_"$NAME"_aff" temp/${ATLAS}/${NAME}_${TEST_NAME}_aff
79 | reg_resample -flo $2/mask/$G -ref $1 -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff -res mask/${ATLAS}/$TEST_NAME"_mask_"$G -NN
80 |
81 | # change non-rigid registration for more accurate masking (not always working)
82 | # reg_f3d -flo $2/template/$G -ref $1 -aff temp/$TEST_NAME"_"$NAME"_inv_aff" -res temp/${TEST_NAME}_${NAME}_NRR.nii.gz -cpp temp/${TEST_NAME}_${NAME}_NRR_cpp.nii.gz
83 | # Resample using cpp to obtain mask candidate
84 | # reg_resample -flo $2/mask/$G -ref $1 -cpp temp/${TEST_NAME}_${NAME}_NRR_cpp.nii.gz -NN -res mask/temp/$TEST_NAME"_mask_"$G
85 |
86 | if (( $PARAMETER_NUMBER==0 ))
87 | then FIRST_PARAMETER=mask/${ATLAS}/$TEST_NAME"_mask_"$G
88 | else
89 | MERGE_PARAMETERS=$MERGE_PARAMETERS" "mask/${ATLAS}/$TEST_NAME"_mask_"$G
90 | fi
91 | let PARAMETER_NUMBER+=1
92 | fi
93 | done
94 | let PARAMETER_NUMBER-=1
95 |
96 | # Label Fusion
97 | seg_maths $FIRST_PARAMETER -merge $PARAMETER_NUMBER 4 $MERGE_PARAMETERS mask/${ATLAS}/${TEST_NAME}_mask_4D.nii.gz
98 | seg_LabFusion -in mask/${ATLAS}/${TEST_NAME}"_mask_4D" -STAPLE -out mask/${TEST_NAME}_mask_${ATLAS}_STAPLE.nii.gz
99 | seg_maths mask/${TEST_NAME}_mask_${ATLAS}_STAPLE.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz
100 | echo "create mask at: mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz"
101 |
102 | # rm mask/temp/*.*
103 | # rm temp/*.*
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/mask_batch.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 | # Version 0.8_2013.10.19 to be modified ...
4 | #!/bin/bash
5 | # echo "Bash version ${BASH_VERSION}..."
6 | # $1: folder include all the images to be masked
7 | # $2: atlas folder
8 | # $3: parcellation parameter file (if exist)
9 |
10 | #!/bin/bash
11 | DILATE=4
12 | ATLAS=$(basename $2)
13 |
14 | for G in `ls $1`
15 | do
16 | TEST_NAME=`echo "$G" | cut -d'.' -f1`
17 | NAME=`echo "$G" | cut -d'.' -f1`
18 | mask.sh $1/$G $2 $3
19 | done
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/parcellation.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # echo "Bash version ${BASH_VERSION}..."
3 |
4 | # Structural Parcellation shell script (SGE)
5 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
6 |
7 | # $1: enquiry image
8 | # $2: mask for enquiry image. if no mask just type "no_mask"
9 | # $3: atlas folder. e.g. "in_vivo" or "ex_vivo"
10 | # $4: if exist, read the file to load user defined parameters (see file under sample_parameters for examples)
11 |
12 | if [ $# -lt 3 ]
13 | then
14 | echo ""
15 | echo "********************************************************************"
16 | echo "* CAUTION!! DO NOT use the same subject name as the atlas template *"
17 | echo "* if it is not for leave-one-out testing *"
18 | echo "********************************************************************"
19 | echo "usage: bash/sh parcellation.sh <[optional] user-specified-parameter-file>"
20 | echo ""
21 | exit
22 | fi
23 |
24 | # if [ ! -d job_output ]; then mkdir job_output; fi
25 | # if [ ! -d job_error ]; then mkdir job_error; fi
26 |
27 | # setup default value for parameters
28 | ROOT_DIR=$(pwd)
29 | export QSUB_CMD="qsub -l h_rt=5:00:00 -l h_vmem=4G -l tmem=4G -l s_stack=1024M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
30 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=8G -l tmem=8G -l s_stack=1024M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
31 | PARCELLATION_NNR="-vel" # -ln 4 -lp 4 -sx 0.6 would take much longer time
32 | DILATE=3 # value to be dilated for the result mask
33 | LABFUSION="-STEPS"
34 | MASK_AFF="-ln 4 -lp 4"
35 |
36 | # Set STEPS parameters
37 | if [[ -z $k ]] && [[ -z $n ]]; then # if STEPS parameter is not set
38 | # set default STEPS parameter to: "3 8 "
39 | export k=3
40 | export n=8
41 | fi
42 | export STEPS_PARAMETER="${k} ${n} "
43 |
44 | # Read user-defined parameters
45 | if [ ! -z $4 ]; then # check if there is a 4th argument
46 | if [ -f $4 ]; then # check if the file specified by 4th argument exist
47 | . $4 # if file of 4th argument exist, read the parameters from the file
48 | fi
49 | fi
50 |
51 | FULL_TEST_NAME=$(basename $1)
52 | TEST_NAME=`echo "$FULL_TEST_NAME" | cut -d'.' -f1`
53 | echo "Creating parcellation label for: "$TEST_NAME
54 | ATLAS=$(basename $3)
55 | MASK=$2
56 |
57 | if [ ! -d temp/${ATLAS} ]; then mkdir -p temp/${ATLAS}; fi
58 | if [ ! -d mask/${ATLAS} ]; then mkdir -p mask/${ATLAS}; fi
59 |
60 | # create dilated mask for every template image if not already exist
61 | if [ ! -d $3/mask ]; then
62 | echo "create mask for every template image if not done yet"
63 | mkdir $3/mask
64 | fi
65 | if [ ! -d $3/mask_dilate ]; then
66 | echo "create dilated mask for every template image if not done yet"
67 | mkdir $3/mask_dilate
68 | fi
69 |
70 | for G in `ls $3/template/`
71 | do
72 | if [ ! -f $3/mask_dilate/$G ] && [ ! -f $3/mask_dilate/$G".nii" ] && [ ! -f $3/mask_dilate/$G".nii.gz" ] && [ ! -f $3/mask_dilate/$G".hdr" ]; then
73 | if [ ! -f $3/mask/$G ] && [ ! -f $3/mask/$G".nii" ] && [ ! -f $3/mask/$G".nii.gz" ] && [ ! -f $3/mask/$G".hdr" ]; then
74 | reg_tools -in $3/label/$G -bin -out $3/mask/$G
75 | fi
76 | seg_maths $3/mask/$G -dil ${DILATE} $3/mask_dilate/$G
77 | fi
78 | done
79 |
80 | if [ ! -d label ]; then mkdir label; fi
81 | if [ ! -d label/${ATLAS} ]; then mkdir label/${ATLAS}; fi
82 | if [ ! -d mask ]; then mkdir mask; fi
83 |
84 | # if no mask has been created yet, evoke mask.sh
85 | if [ ! -f $MASK ] && [ ! -f $MASK".nii" ] && [ ! -f $MASK".nii.gz" ] && [ ! -f $MASK".hdr" ]
86 | then
87 | echo -e "Pre-defined mask ${MASK} NOT found, parcellation will start after the mask is generated"
88 | # create mask for the test image first
89 | if [ ! -z $4 ] && [ -f $4 ]; # check if there is a 4th argument# check if the file specified by 4th argument exist
90 | then . mask.sh $1 $3 $4 # if file of 4th argument exist, read the parameters from the file
91 | else . mask.sh $1 $3 # if there's no 4th argument or file not exist ("." eauqals to "source")
92 | fi # if path of the script is not defined in bashrc, use "./mask.sh" instead
93 | # Mask for the test image created
94 | MASK=mask/${TEST_NAME}_mask_${ATLAS}_STAPLE_d${DILATE}.nii.gz
95 |
96 | else
97 | echo -e "Pre-defined mask ${MASK} found, start to search/generate initial affine registration from atlas to test image now"
98 | fi
99 |
100 | # echo "*********************************************"
101 | # echo "* Segmentation pipeline for mouse brain MRI *"
102 | # echo "* for ${TEST_NAME} *"
103 | # echo "* using multi-atlas label fusion methods *"
104 | # echo "* step 2 - structural parcellation *"
105 | # echo "*********************************************"
106 | # echo "usage: parcellation new_image mask atlas_type (in_vivo/ex_vivo)"
107 |
108 | # start structural parcellation
109 | echo "Creating label for: "$TEST_NAME
110 | PARAMETER_NUMBER=0
111 | TEST_NAME=`echo "$(basename $1)" | cut -d'.' -f1`
112 | for G in `ls $3/template/`
113 | do
114 | NAME=`echo "$G" | cut -d'.' -f1`
115 | jname=${jid_reg}_${TEST_NAME}_${NAME}
116 | # Check testing image name is different from atlas template. If same, skip (for leave-one-out)
117 | if [[ ${3}/template/${NAME} != $1 ]] && [[ ${3}/template/${NAME}.nii != $1 ]] && [[ ${3}/template/${NAME}.nii.gz != $1 ]] && [[ ${3}/template/${NAME}.hdr != $1 ]]
118 | then
119 | # 1)check if affine matrix exists
120 | if [ ! -f temp/${ATLAS}/${NAME}_${TEST_NAME}_aff ]; then
121 | # 1.1) if affine matrix not found, generate affine atlas->test
122 | reg_aladin -flo ${3}/template/${NAME} -ref $1 -fmask ${3}/mask_dilate/${NAME} -rmask ${MASK} -res temp/${ATLAS}/${NAME}_${TEST_NAME}_aff.nii.gz -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff ${MASK_AFF}
123 | else
124 | echo -e "Pre-defined affine transformation matrix ${NAME}_${TEST_NAME}_aff found, begin non-rigid registration now"
125 | fi
126 |
127 | # 1.2) use affine transform matrix to initialize non-rigid registration (coarse step)
128 | reg_f3d -flo ${3}/template/${NAME} -fmask ${3}/mask_dilate/${NAME} -ref ${1} -rmask ${MASK} -aff temp/${ATLAS}/${NAME}_${TEST_NAME}_aff -res temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz ${PARCELLATION_NNR}
129 |
130 | # 1.3) apply control point to generate transformed mask/label from atlas to test image
131 | reg_resample -flo ${3}/mask/${NAME} -ref ${1} -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz -NN -res mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz
132 | reg_resample -flo ${3}/label/${NAME} -ref ${1} -cpp temp/${ATLAS}/${NAME}_${TEST_NAME}_cpp.nii.gz -NN -res label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz
133 |
134 | # 2) prepare parameters for label fusion
135 | if (( $PARAMETER_NUMBER==0 )); then
136 | FIRST_TEMPLATE="temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
137 | FIRST_MASK="mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
138 | FIRST_LABEL="label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
139 | else
140 | MERGE_TEMPLATE="${MERGE_TEMPLATE} temp/${ATLAS}/${NAME}_${TEST_NAME}_f3d.nii.gz"
141 | MERGE_MASK="${MERGE_MASK} mask/${ATLAS}/${TEST_NAME}_nrr_mask_${NAME}.nii.gz"
142 | MERGE_LABEL="${MERGE_LABEL} label/${ATLAS}/${TEST_NAME}_label_${NAME}.nii.gz"
143 | fi
144 | let PARAMETER_NUMBER+=1
145 | else
146 | echo -e "Atlas image name ${TEST_NAME} is same as test image, skipped"
147 | fi
148 | done
149 | let PARAMETER_NUMBER-=1
150 |
151 | # Prepare 4D images for label fusion
152 | seg_maths $FIRST_MASK -merge $PARAMETER_NUMBER 4 $MERGE_MASK mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz
153 | seg_maths $FIRST_LABEL -merge $PARAMETER_NUMBER 4 $MERGE_LABEL label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz
154 |
155 | # Start label fusion
156 | # Determine which label fusion method to use
157 | if [[ ${LABFUSION}=="-STEPS" ]]; then
158 | seg_maths $FIRST_TEMPLATE -merge $PARAMETER_NUMBER 4 $MERGE_TEMPLATE label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz
159 | seg_LabFusion -in label/${ATLAS}/${TEST_NAME}_label_4D.nii.gz -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz -out "label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz"
160 | # potential suffix: _NNG_${PARCELLATION_NNR} ?
161 |
162 | # Creating NRR mask
163 | # seg_maths label/${TEST_NAME}_label_${ATLAS}_STEPS_${k}_${n}.nii.gz -bin mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz
164 | seg_LabFusion -in mask/${ATLAS}/${TEST_NAME}_nrr_mask_4D.nii.gz -STEPS ${k} ${n} $1 label/${ATLAS}/${TEST_NAME}_template_4D.nii.gz ${LABFUSION_OPTION} -out mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz
165 | seg_maths mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}.nii.gz -dil ${DILATE} mask/${TEST_NAME}_mask_${ATLAS}_NRR_STEPS_${k}_${n}_d${DILATE}.nii.gz
166 | fi
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
--------------------------------------------------------------------------------
/depreciated/for_single_workstation/parcellation_batch.sh:
--------------------------------------------------------------------------------
1 | # Structural Parcellation shell script (SGE)
2 | # Author: Ma Da (d.ma.11@ucl.ac.uk)
3 |
4 | # $1: folder include all the images to be parcellated
5 | # $2: atlas folder
6 | # $3: If exist, read the file to load user defined parameters (see file under sample_parameters for examples)
7 |
8 | #!/bin/bash
9 | # echo "Bash version ${BASH_VERSION}..."
10 | # export QSUB_CMD="qsub -l h_rt=2:00:00 -l h_vmem=9.9G -l tmem=9.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
11 | # export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -l h_vmem=14.9G -l tmem=14.9G -l s_stack=128M -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
12 | #!/bin/bash
13 | DILATE=3
14 | ATLAS=$(basename $2)
15 | MASK_FOLDER="mask_f3d_manual_correct_d1/wildtype" # default mask folder
16 | MASK_SUFFIX="_mask_${ATLAS}_STAPLE_d${DILATE}" # default mask suffix
17 | MASK_SUFFIX=""
18 |
19 | # Read user defined parameters # need to add a line to check if $3 exist ...
20 | if [ ! -z $3 ]; then # check if there is a 3rd argument
21 | if [ -f $3 ]; then # check if the file specified by 3rd argument exist
22 | . $3 # if file of 4th argument exist, read the parameters from the file
23 | fi
24 | fi
25 |
26 | for G in `ls $1`
27 | do
28 | TEST_NAME=`echo "$G" | cut -d'.' -f1`
29 | NAME=`echo "$G" | cut -d'.' -f1`
30 | bash parcellation.sh $1/$G "${MASK_FOLDER}/${TEST_NAME}${MASK_SUFFIX}.nii.gz" $2 $3
31 | done
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-slate
--------------------------------------------------------------------------------
/docs/groupwise_ex-vivo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dama-lab/multi-atlas-segmentation/7e2a76d66a0d970c7271204dbf70af8d1c1bf8df/docs/groupwise_ex-vivo.png
--------------------------------------------------------------------------------
/docs/groupwise_in_vivo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dama-lab/multi-atlas-segmentation/7e2a76d66a0d970c7271204dbf70af8d1c1bf8df/docs/groupwise_in_vivo.png
--------------------------------------------------------------------------------
/docs/journal.pone.0086576.g001.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dama-lab/multi-atlas-segmentation/7e2a76d66a0d970c7271204dbf70af8d1c1bf8df/docs/journal.pone.0086576.g001.png
--------------------------------------------------------------------------------
/docs/quickcheckdemo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dama-lab/multi-atlas-segmentation/7e2a76d66a0d970c7271204dbf70af8d1c1bf8df/docs/quickcheckdemo.png
--------------------------------------------------------------------------------
/parameters_samples/old_version/default_parameter_for_ex_vivo_coarse_step.sh:
--------------------------------------------------------------------------------
1 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=12G -l tmem=12G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
4 |
5 | export PARCELLATION_NNR="-ln 4 -lp 4 -sx 0.6"
6 | export MASK_FOLDER="mask"
7 | export MASK_SUFFIX=""
8 | # export MASK_FOLDER="mask/mask_ex_vivo_LR_STEPS_3_8_d3"
9 | #export MASK_SUFFIX="_ex_vivo_LR_label_STEPS_3_8"
10 | export MASK_AFF="-ln 4 -lp 4"
11 | export k=10
12 | export n=8
13 | export LABFUSION_OPTION="-v 1 -MRF_beta 4"
14 |
--------------------------------------------------------------------------------
/parameters_samples/old_version/default_parameter_for_ex_vivo_fine_step.sh:
--------------------------------------------------------------------------------
1 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=12G -l tmem=12G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
4 |
5 | export PARCELLATION_NNR="-ln 3 -lp 3 -sx 0.3"
6 | export MASK_FOLDER="mask"
7 | export MASK_SUFFIX=""
8 | # export MASK_FOLDER="mask/mask_ex_vivo_LR_STEPS_3_8_d3"
9 | #export MASK_SUFFIX="_ex_vivo_LR_label_STEPS_3_8"
10 | export MASK_AFF="-ln 4 -lp 4"
11 | export k=8
12 | export n=8
13 | export LABFUSION_OPTION="-v 1 -MRF_beta 4"
14 |
--------------------------------------------------------------------------------
/parameters_samples/old_version/default_parameter_for_in_vivo_coarse_step.sh:
--------------------------------------------------------------------------------
1 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=8G -l tmem=8G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
4 |
5 | export PARCELLATION_NNR="-ln 4 -lp 4 -sx 0.6"
6 | export MASK_FOLDER="mask"
7 | export MASK_SUFFIX=""
8 | # export MASK_FOLDER="mask/mask_ex_vivo_LR_STEPS_3_8_d3"
9 | #export MASK_SUFFIX="_ex_vivo_LR_label_STEPS_3_8"
10 | export MASK_AFF="-ln 4 -lp 4"
11 | export k=10
12 | export n=8
13 | export LABFUSION_OPTION="-v 1 -MRF_beta 4"
14 |
--------------------------------------------------------------------------------
/parameters_samples/old_version/default_parameter_for_in_vivo_fine_step.sh:
--------------------------------------------------------------------------------
1 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
2 |
3 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=8G -l tmem=8G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
4 |
5 | export PARCELLATION_NNR="-ln 3 -lp 3 -sx 0.4"
6 | export MASK_FOLDER="mask"
7 | export MASK_SUFFIX=""
8 | # export MASK_FOLDER="mask/mask_ex_vivo_LR_STEPS_3_8_d3"
9 | #export MASK_SUFFIX="_ex_vivo_LR_label_STEPS_3_8"
10 | export MASK_AFF="-ln 4 -lp 4"
11 | export k=5
12 | export n=8
13 | export LABFUSION_OPTION="-v 1 -MRF_beta 4"
14 |
--------------------------------------------------------------------------------
/parameters_samples/old_version/sample_parameter.sh:
--------------------------------------------------------------------------------
1 | ########### job submission parameters (only needed for cluster version) ###############
2 | # Recommended job submission parameters large FOV (e.g. 256*512*256), normally ex vivo scan normally ex vivo scan
3 | export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
4 | export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=12G -l tmem=12G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
5 | # Recommended job submission parameters for small matrix (e.g. 192*256*96), normally in vivo scan (only necessary for cluster version)
6 | # export QSUB_CMD="qsub -l h_rt=5:00:00 -pe smp 4 -R y -l h_vmem=1G -l tmem=1G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
7 | # export QSUB_SEG_MATH="qsub -l h_rt=1:00:00 -pe smp 1 -R y -l h_vmem=8G -l tmem=8G -j y -S /bin/sh -b y -cwd -V -o job_output -e job_error"
8 |
9 | ################## image registration parameters #####################
10 | # MASK_AFF: affine registration parameter for "mask.sh" (in "reg_aladin" command)
11 | # PARCELLATION_NNR: non-rigid registration parameter for "parcellation.sh" (in "reg_f3d" command)
12 | #######################################################################
13 | # Recommendedecommended parameters for images with high resolution (e.g. ~50µm), normally for ex vivo high resolution images
14 | export MASK_AFF="-ln 4 -lp 4"
15 | export PARCELLATION_NNR="-vel -ln 4 -lp 4 -sx 0.6"
16 | # Recommended parameters for images with high resolution (e.g. ~100µm) normally for in vivo lower resolution images
17 | # export PARCELLATION_NNR="-ln 3 -lp 3 -sx 0.4"
18 |
19 | ############# parameters to specify brain mask files #################
20 | # MASK_FOLDER: the folder contains brain mask files
21 | # MASK_SUFFIX: suffix of the mask files. For instance, if the test image is "A.nii", the corresponding mask file is
22 | ######################################################################
23 | # default mask file name pattern, following the output of the mask.sh
24 | export MASK_FOLDER="mask"
25 | export MASK_SUFFIX=""
26 |
27 | ################## label fusion parameters for parcellation.sh ###################
28 | # k: kernal size of local normallised cross correlation for atlas ranking (in seg_LabFusion -STEPS)
29 | # n: number of top-ranked atlases selected for label fusion
30 | # LABFUSION_OPTION: other parameters to pass to "seg_LabFusion"
31 | #############################################################
32 | export k=10
33 | export n=8
34 | export LABFUSION_OPTION="-v 1 -MRF_beta 4"
35 |
--------------------------------------------------------------------------------
/parameters_samples/parameter_sample.sh:
--------------------------------------------------------------------------------
1 | # affine transformation parameter for masking (for reg_aladin)
2 | # -nac: initialize transformation with nifty header (default: centre of image)
3 | # -cog: centor of mass/gravity of mask to initialize transformation (default: centre of image)
4 | affine_param="" # e.g. -rigOnly -nac
5 |
6 | # parameter for reg_resample
7 | resamp_param=""
8 |
9 | # parameter for seg_LabFusion in mas_masking
10 | labfusion_param=""
11 |
12 | # parameter for reg_f3d in mas_mapping
13 | nrr_param=""
14 |
15 |
16 | ################################################################
17 | # AVOID define parameters for the following internal variable:
18 | # -mas_mapping_param, mas_fusion_param, mas_parcell_fusion_param
19 | # Otherwise, may cause program error
20 | ################################################################
--------------------------------------------------------------------------------