├── README ├── check_all_featdirs.py ├── check_featdir.py ├── data ├── facemask.nii.gz └── mean_reg2mean.nii.gz ├── deface.py ├── fsld └── fsld_raw.R ├── launch.py ├── launch_qsub.py ├── run_shell_cmd.py └── setup_subject.py /README: -------------------------------------------------------------------------------- 1 | This is various python code related to data processing and analysis. 2 | 3 | deface.py: this performing defacing of an anatomical image using a 4 | custom mask developed at UT. you will need to grab the template 5 | and mask from the data directory and then change the paths to those 6 | images in the code 7 | 8 | launch.py: this is an experimental front-end for the launch_qsub 9 | function to allow it to be used from the command line. not well 10 | tested yet. 11 | 12 | launch_qsub.py: code to launch SGE jobs from within python. This is 13 | customized for the TACC system and will require modification to run 14 | on any other system. 15 | 16 | run_shell_cmd.py: simple front end to Popen to run shell command 17 | and wait for it to finish 18 | 19 | setup_subject.py: a script to perform preprocessing and setup for 20 | a full fMRI study. this is a recent port of the setup_subject 21 | script from UCLA, and has not been thoroughly tested yet. 22 | It makes a number of assumptions about the DICOM structure which 23 | are true of GE data but probably will not work without modification 24 | for any other systems. 25 | 26 | fsld: this is an R script used by setup_subject.py to perform data 27 | diagnostics, but it could also be used in standalone mode. 28 | -------------------------------------------------------------------------------- /check_all_featdirs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ check_all_featdirs.py - check all first-level feat dirs 3 | 4 | USAGE: check_featdir 5 | """ 6 | 7 | ## Copyright 2011, Russell Poldrack. All rights reserved. 8 | 9 | ## Redistribution and use in source and binary forms, with or without modification, are 10 | ## permitted provided that the following conditions are met: 11 | 12 | ## 1. Redistributions of source code must retain the above copyright notice, this list of 13 | ## conditions and the following disclaimer. 14 | 15 | ## 2. Redistributions in binary form must reproduce the above copyright notice, this list 16 | ## of conditions and the following disclaimer in the documentation and/or other materials 17 | ## provided with the distribution. 18 | 19 | ## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED 20 | ## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 | ## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR 22 | ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | ## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 26 | ## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 27 | ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | import os,sys 30 | from check_featdir import * 31 | 32 | 33 | def usage(): 34 | """ print the docstring and exit""" 35 | sys.stdout.write(__doc__) 36 | sys.exit(2) 37 | 38 | 39 | def main(): 40 | if len(sys.argv)<2: 41 | usage() 42 | 43 | check_all_featdirs(sys.argv[1]) 44 | 45 | def check_all_featdirs(basedir): 46 | import numpy as N 47 | 48 | if not os.path.exists(basedir): 49 | print basedir+' does not exist!' 50 | usage() 51 | if basedir[-1]=='/': 52 | basedir=basedir[:-1] 53 | 54 | featdirs=[] 55 | for d in os.listdir(basedir): 56 | if d[0:3]=='sub': 57 | for m in os.listdir('%s/%s/model/'%(basedir,d)): 58 | if m[-5:]=='.feat': 59 | #print 'found %s/%s/model/%s'%(basedir,d,m) 60 | featdirs.append('%s/%s/model/%s'%(basedir,d,m)) 61 | 62 | badness={} 63 | status={} 64 | print 'checking %d featdirs...'%len(featdirs) 65 | for f in featdirs: 66 | badness[f],status[f]=check_featdir(f) 67 | if N.sum(badness.values())==0: 68 | print 'no problems found' 69 | else: 70 | for f in featdirs: 71 | if badness[f]>0: 72 | print 'problem with %s'%f 73 | print status[f] 74 | 75 | return badness,status 76 | 77 | 78 | if __name__ == '__main__': 79 | main() 80 | -------------------------------------------------------------------------------- /check_featdir.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ check_featdir.py - check a first-level feat dir 3 | 4 | USAGE: check_featdir 5 | """ 6 | 7 | ## Copyright 2011, Russell Poldrack. All rights reserved. 8 | 9 | ## Redistribution and use in source and binary forms, with or without modification, are 10 | ## permitted provided that the following conditions are met: 11 | 12 | ## 1. Redistributions of source code must retain the above copyright notice, this list of 13 | ## conditions and the following disclaimer. 14 | 15 | ## 2. Redistributions in binary form must reproduce the above copyright notice, this list 16 | ## of conditions and the following disclaimer in the documentation and/or other materials 17 | ## provided with the distribution. 18 | 19 | ## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED 20 | ## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 | ## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR 22 | ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | ## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 26 | ## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 27 | ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | import os,sys 30 | from mvpa.misc.fsl.base import read_fsl_design 31 | 32 | 33 | def usage(): 34 | """ print the docstring and exit""" 35 | sys.stdout.write(__doc__) 36 | sys.exit(2) 37 | 38 | def main(): 39 | if len(sys.argv)<2: 40 | usage() 41 | 42 | badness,status=check_featdir(sys.argv[1]) 43 | if badness==0: 44 | print 'no problems found' 45 | else: 46 | print 'problem found' 47 | print status 48 | 49 | def check_featdir(featdir): 50 | 51 | if not os.path.exists(featdir+'/design.fsf'): 52 | print featdir+'/design.fsf does not exist!' 53 | usage() 54 | 55 | design=read_fsl_design(featdir+'/design.fsf') 56 | 57 | status={} 58 | badness=0 59 | 60 | status['subdirs']={} 61 | subdirs_to_check=['reg','stats'] 62 | for s in subdirs_to_check: 63 | if not os.path.exists(featdir+'/'+s): 64 | status['subdirs'][s]=0 65 | print 'missing: '+featdir+'/'+s 66 | badness+=1 67 | else: 68 | status['subdirs'][s]=1 69 | 70 | status['files']={} 71 | files_to_check=['filtered_func_data.nii.gz','stats/res4d.nii.gz','reg/example_func2standard.mat','reg/highres2standard_warp.nii.gz'] 72 | for s in files_to_check: 73 | if not os.path.exists(featdir+'/'+s): 74 | status['files'][s]=0 75 | print 'missing: '+featdir+'/'+s 76 | badness+=1 77 | else: 78 | status['files'][s]=1 79 | 80 | 81 | status['zstats']={} 82 | ncontrasts=design['fmri(ncon_orig)'] 83 | for c in range(ncontrasts): 84 | if not os.path.exists(featdir+'/stats/zstat%d.nii.gz'%int(c+1)): 85 | status['zstats'][c+1]=0 86 | badness+=1 87 | else: 88 | status['zstats'][c+1]=1 89 | 90 | if badness>0: 91 | print 'found %d problems'%badness 92 | 93 | return badness,status 94 | 95 | if __name__ == '__main__': 96 | main() 97 | -------------------------------------------------------------------------------- /data/facemask.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/poldrack/python/c3fbb940be81e9bff770be382d2682cf282f0795/data/facemask.nii.gz -------------------------------------------------------------------------------- /data/mean_reg2mean.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/poldrack/python/c3fbb940be81e9bff770be382d2682cf282f0795/data/mean_reg2mean.nii.gz -------------------------------------------------------------------------------- /deface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ deface an image using FSL 3 | USAGE: deface 4 | """ 5 | 6 | import nibabel 7 | import os,sys 8 | import numpy as N 9 | 10 | import subprocess 11 | 12 | def run_shell_cmd(cmd,cwd=[]): 13 | """ run a command in the shell using Popen 14 | """ 15 | if cwd: 16 | process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,cwd=cwd) 17 | else: 18 | process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) 19 | for line in process.stdout: 20 | print line.strip() 21 | process.wait() 22 | 23 | def usage(): 24 | """ print the docstring and exit""" 25 | sys.stdout.write(__doc__) 26 | sys.exit(2) 27 | 28 | 29 | template='/corral/utexas/poldracklab/data/facemask/mean_reg2mean.nii.gz' 30 | facemask='/corral/utexas/poldracklab/data/facemask/facemask.nii.gz' 31 | 32 | if len(sys.argv)<2: 33 | # usage() 34 | infile='mprage.nii.gz' 35 | else: 36 | infile=sys.argv[1] 37 | 38 | if os.environ.has_key('FSLDIR'): 39 | FSLDIR=os.environ['FSLDIR'] 40 | else: 41 | print 'FSLDIR environment variable must be defined' 42 | sys.exit(2) 43 | 44 | 45 | #temp=nibabel.load(template) 46 | 47 | #tempdata=temp.get_data() 48 | 49 | #facemask=N.ones((91,109,91)) 50 | 51 | #facemask[:,71:,:18]=0 52 | 53 | #facemaskimg=nibabel.Nifti1Image(facemask,temp.get_affine()) 54 | #facemaskimg.to_filename('facemask.nii.gz') 55 | 56 | cmd='flirt -in %s -ref %s -omat tmp_mask.mat'%(template,infile) 57 | print 'Running: '+cmd 58 | run_shell_cmd(cmd) 59 | 60 | cmd='flirt -in %s -out facemask_tmp -ref %s -applyxfm -init tmp_mask.mat'%(facemask,infile) 61 | print 'Running: '+cmd 62 | run_shell_cmd(cmd) 63 | 64 | 65 | cmd='fslmaths %s -mul facemask_tmp %s'%(infile,infile.replace('.nii.gz','_defaced.nii.gz')) 66 | print 'Running: '+cmd 67 | run_shell_cmd(cmd) 68 | 69 | os.remove('facemask_tmp.nii.gz') 70 | os.remove('tmp_mask.mat') 71 | -------------------------------------------------------------------------------- /fsld/fsld_raw.R: -------------------------------------------------------------------------------- 1 | fsld_raw <- function(fname,maskfname='') { 2 | # fmri diagnosis for raw fmri timeseries data 3 | 4 | .libPaths('/corral/utexas/poldracklab/software_lonestar/R_libs/') 5 | MCTHRESH=2.0 6 | 7 | if (!exists('maskfname')) { 8 | maskfname='' 9 | } 10 | 11 | library(matlab) 12 | library(lattice) 13 | library(MASS) 14 | library(fmri) 15 | library(gplots) 16 | 17 | si=Sys.info() 18 | MYEMAIL=sprintf('%s@vone.psych.ucla.edu',si['user']) 19 | 20 | # currently this code makes some pretty shaky assumptions that .img 21 | # and .nii.gz are the only file types 22 | is_gzipped=0 23 | 24 | if (!file.exists(fname)) { 25 | stop(sprintf('%s does not exist - exiting!',fname)) 26 | } 27 | 28 | fileinfo=fileparts(fname) 29 | if (fileinfo$pathstr=='' | fileinfo$pathstr=='/') { 30 | fileinfo$pathstr='.' 31 | } 32 | if (fileinfo$ext == '.gz') { 33 | print(sprintf('unzipping %s...',fname)) 34 | is_gzipped=1; 35 | system(sprintf('gunzip %s',fname)) 36 | fname=sprintf('%s/%s',fileinfo$pathstr,fileinfo$name) 37 | fileinfo=fileparts(fname) 38 | if (fileinfo$pathstr==''| fileinfo$pathstr=='/') { 39 | fileinfo$pathstr='.' 40 | } 41 | 42 | } 43 | #maskfname=sprintf('%s/%s_brain_mask%s',fileinfo$pathstr,fileinfo$name,fileinfo$ext) 44 | 45 | if (maskfname == '') { 46 | maskfname=sprintf('%s/%s_brain_mask.nii.gz',fileinfo$pathstr,fileinfo$name) 47 | print(sprintf('checking for %s',maskfname)) 48 | if (!file.exists(maskfname)) { 49 | print('hold on...') 50 | if (is_gzipped) { 51 | system(sprintf('gzip %s',fname)) 52 | } 53 | stop('no default or specified mask; you need to run betfunc first!') 54 | } 55 | } 56 | if (!file.exists(maskfname)) { 57 | stop(sprintf('%s does not exist - exiting!',maskfname)) 58 | } 59 | maskfileinfo=fileparts(maskfname) 60 | if (maskfileinfo$pathstr=='' | maskfileinfo$pathstr=='/') { 61 | maskfileinfo$pathstr='.' 62 | } 63 | if (maskfileinfo$ext == '.gz') { 64 | print(sprintf('unzipping %s...',maskfname)) 65 | is_gzipped=1; 66 | system(sprintf('gunzip %s',maskfname)) 67 | maskfname=sprintf('%s/%s',maskfileinfo$pathstr,maskfileinfo$name) 68 | maskfileinfo=fileparts(maskfname) 69 | } 70 | if (maskfileinfo$pathstr=='' | maskfileinfo$pathstr=='/') { 71 | maskfileinfo$pathstr='.' 72 | } 73 | 74 | 75 | # load fmri data 76 | if (file.exists(fname)) { 77 | print(sprintf('loading image data (%s)...',fname)) 78 | func_data<-read.NIFTI(fname) 79 | img<-extract.data(func_data) 80 | 81 | } else { 82 | print('image data already loaded - skipping load') 83 | } 84 | 85 | 86 | # load mask 87 | if (file.exists(maskfname)) { 88 | print(sprintf('loading mask data (%s)...',maskfname)) 89 | mask_data<-read.NIFTI(maskfname) 90 | maskimg<-extract.data(mask_data) 91 | 92 | } else { 93 | print('mask_data already loaded - skipping load') 94 | } 95 | 96 | if (is_gzipped) { 97 | print('recompressing data files...') 98 | system(sprintf('gzip %s',fname)) 99 | system(sprintf('gzip %s',maskfname)) 100 | } 101 | 102 | # load motion data 103 | mcpar_file=sprintf('%s/%s.par',fileinfo$pathstr,fileinfo$name) 104 | if (file.exists(mcpar_file)) { 105 | print(sprintf('loading motion parameters from %s',mcpar_file)) 106 | mcpar=read.table(mcpar_file) 107 | } else { 108 | print(sprintf('no motion parameters present in %s',mcpar_file)) 109 | mcpar=0 110 | } 111 | 112 | 113 | # compute displacement for motion data 114 | if (length(mcpar) > 1) { 115 | ntp=length(mcpar[,1]) 116 | mcdisp=mcpar[2:ntp,]-mcpar[1:ntp-1,] 117 | mean_mcdisp=mean(mcdisp) 118 | } 119 | 120 | maxdisp=max(abs(mcdisp[,4:6])) 121 | maxrot=max(abs(mcdisp[,1:3])) 122 | 123 | img.size<-size(img) 124 | img.std<-zeros(img.size[1:3]) 125 | img.mean<-zeros(img.size[1:3]) 126 | 127 | print('computing voxelwise statistics') 128 | 129 | for (x in 1:img.size[1]) { 130 | for (y in 1:img.size[2]) { 131 | for (z in 1:img.size[3]) { 132 | img.std[x,y,z]=std(img[x,y,z,]) 133 | img.mean[x,y,z]=mean(img[x,y,z,]) 134 | } 135 | } 136 | } 137 | 138 | 139 | 140 | img.mean_stdunits=img.mean/img.std 141 | img.mask_mean=zeros(1,img.size[4]) 142 | img.mask_std=zeros(1,img.size[4]) 143 | img.n_outliers=zeros(1,img.size[4]) 144 | img.mean_resid=zeros(1,img.size[4]) 145 | out_cut=2 146 | img.mad=zeros(1,img.size[4]) 147 | img.cv=zeros(1,img.size[4]) 148 | img.snr=zeros(1,img.size[4]) 149 | 150 | slice.mean=zeros(img.size[3],img.size[4]) 151 | 152 | print('computing timepoint statistics') 153 | 154 | for (t in 1:img.size[4]) { 155 | tmp<-img[,,,t] 156 | tmp_nonbrain=tmp[maskimg==0] 157 | tmp<-tmp[maskimg>0] 158 | img.mask_mean[t]=mean(tmp) 159 | # img.mask_std[t]=std(tmp) 160 | img.mad[t]=mad(tmp)/1.4826 # normalization factor to approximate stdev estimate 161 | img.cv[t]=img.mad[t]/median(tmp) 162 | med=median(tmp) 163 | outliers=find(tmp<(med - img.mad[t]*out_cut ) | tmp>(med + img.mad[t]*out_cut )) 164 | img.n_outliers[t]=length(outliers) 165 | img.snr[t]=med/median(tmp_nonbrain) 166 | # compute slicewise statistics at each timepoint 167 | for (z in 1:img.size[3]) { 168 | tmpslice=img[,,z,t] 169 | maskslice=maskimg[,,z,1] 170 | tmpslice=tmpslice[maskslice>0] 171 | if (is.nan(mean(tmpslice))) {slice.mean[z,t]=0} 172 | else {slice.mean[z,t]=mean(tmpslice)} 173 | 174 | } 175 | } 176 | 177 | img.n_outliers=img.n_outliers/length(tmp) 178 | 179 | 180 | # make figures 181 | 182 | # first, make data diagnostics figure 183 | pdf(file=sprintf('%s/%s_diag.pdf',fileinfo$pathstr,fileinfo$name),width=8,height=10) 184 | 185 | layout(matrix(c(1,2,3,3,4,4,5,5),4,2,byrow=TRUE)) 186 | plotinfo=c(sprintf('%s',getwd()),sprintf('Mean SNR: %0.3f',mean(img.snr)),sprintf('Maximum translation: %0.3f',maxdisp),sprintf('Maximum rotation: %0.3f',maxrot)) 187 | textplot(plotinfo,halign="left",valign="top",cex=1) 188 | img.hist.mean<-truehist(img.mean[img.mean > 0],xlab='Intensity') 189 | title(main='Signal Histogram') 190 | #img.hist.std<-truehist(img.std[img.mean > 0],xlab='Stdev') 191 | #title(main='Stdev Histogram') 192 | #plot(img.mean[img.mean > 0],img.std[img.mean > 0],xlab='Intensity',ylab='stdev') 193 | #title(main='Stdev vs. mean') 194 | 195 | plot(1:t,img.mask_mean,type='l',xlab='Timepoints',ylab='Mean intensity') 196 | title(main='Global in-mask signal: Mean') 197 | 198 | s=spectrum(t(img.mask_mean),spans=10,main='Log power spectrum of global signal timecourse',ylab='log power') 199 | 200 | plot(1:t,img.snr,type='l',xlab='Timepoints',ylab='SNR') 201 | title('Signal to noise ratio (based on BET mask)') 202 | 203 | layout(matrix(c(1,2,3,4),4,1,byrow=TRUE)) 204 | 205 | imagesc(slice.mean,xlab='Timepoints',ylab='Slices') 206 | title(main='Mean slice intensity by time') 207 | 208 | plot(1:t,img.cv,type='l',xlab='Timepoints',ylab='Robust CV (MAD/median)') 209 | title(main='Robust coefficient of variation on in-mask voxels') 210 | 211 | if (length(mcpar)>1) { 212 | matplot(mcdisp[,4:6],type='l',xlab='timepoints',ylab='translational displacement (mm)',lty=c(1,1,1)) 213 | title(sprintf('Motion parameters: Translation (max: X = %0.3f mm, Y = %0.3f mm, Z = %0.3f mm)',max(mcdisp[,4]),max(mcdisp[,5]), max(mcdisp[,6]))) 214 | matplot(mcdisp[,1:3],type='l',xlab='timepoints',ylab='rotational displacement (degrees)',lty=c(1,1,1)) 215 | title(sprintf('Motion parameters: Rotation (max: X = %0.3f mm, Y = %0.3f mm, Z = %0.3f mm)',max(mcdisp[,1]),max(mcdisp[,2]), max(mcdisp[,3]))) 216 | 217 | } 218 | 219 | dev.off() 220 | 221 | if (maxdisp > MCTHRESH) { 222 | print('motion threshold exceeded - sending email') 223 | mailmsg=sprintf('Filename: %s\nDirectory: %s\nMaximum translational displacement = %0.3f\nMaximum rotational displacement = %0.3f',fname,getwd(),maxdisp,maxrot) 224 | 225 | mailcmd=sprintf('echo "%s" | mailx -a %s -r %s -s "Motion threshold exceeded: %s", %s',mailmsg,sprintf('%s_diag.pdf',fileinfo$name),MYEMAIL,getwd(),MYEMAIL) 226 | 227 | system(mailcmd) 228 | } 229 | 230 | } 231 | -------------------------------------------------------------------------------- /launch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ launch.py - a python function to launch SGE jobs on TACC Lonestar 3 | this is a function version of the launch command line script 4 | 5 | """ 6 | 7 | ## Copyright 2011, Russell Poldrack. All rights reserved. 8 | 9 | ## Redistribution and use in source and binary forms, with or without modification, are 10 | ## permitted provided that the following conditions are met: 11 | 12 | ## 1. Redistributions of source code must retain the above copyright notice, this list of 13 | ## conditions and the following disclaimer. 14 | 15 | ## 2. Redistributions in binary form must reproduce the above copyright notice, this list 16 | ## of conditions and the following disclaimer in the documentation and/or other materials 17 | ## provided with the distribution. 18 | 19 | ## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED 20 | ## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 | ## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR 22 | ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | ## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 26 | ## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 27 | ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | 30 | 31 | import argparse 32 | import sys,os 33 | from tempfile import * 34 | import subprocess 35 | from launch_qsub import * 36 | 37 | 38 | def main(): 39 | if len(sys.argv)<2: 40 | usage() 41 | 42 | c,parser=SetupParser() 43 | argdata=parser.parse_known_args(sys.argv[1:],namespace=c) 44 | 45 | # first check for .launch file in home directory 46 | if os.path.exists(os.path.expanduser('~')+'/.launch_user') and not c.ignoreuser: 47 | f=open(os.path.expanduser('~')+'/.launch_user') 48 | for cmd in f.readlines(): 49 | print cmd 50 | #sys.argv.append(cmd.strip()) 51 | parser.parse_args([cmd.strip()],namespace=c) 52 | f.close() 53 | 54 | 55 | if not c.projname: 56 | print 'You must specify a project name using the -j flag - exiting' 57 | sys.exit(0) 58 | 59 | if len(argdata[1])>0: 60 | cmd=' '.join(argdata[1]) 61 | else: 62 | cmd='' 63 | 64 | 65 | launch_qsub(cmd,script_name=c.script_name,runtime=c.runtime,ncores=c.ncores,parenv=c.parenv,jobname=c.jobname,projname=c.projname,queue=c.queue,email=c.email,qsubfile=c.qsubfile,keepqsubfile=c.keepqsubfile,ignoreuser=c.ignoreuser,test=c.test,compiler=c.compiler,verbose=1,parser=parser,c=c) 66 | 67 | 68 | 69 | if __name__ == '__main__': 70 | main() 71 | -------------------------------------------------------------------------------- /launch_qsub.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ launch_qsub.py - a python function to launch SGE jobs on TACC Lonestar 3 | 4 | this is a function version of the launch command line script 5 | 6 | """ 7 | 8 | ## Copyright 2011, Russell Poldrack. All rights reserved. 9 | 10 | ## Redistribution and use in source and binary forms, with or without modification, are 11 | ## permitted provided that the following conditions are met: 12 | 13 | ## 1. Redistributions of source code must retain the above copyright notice, this list of 14 | ## conditions and the following disclaimer. 15 | 16 | ## 2. Redistributions in binary form must reproduce the above copyright notice, this list 17 | ## of conditions and the following disclaimer in the documentation and/or other materials 18 | ## provided with the distribution. 19 | 20 | ## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED 21 | ## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 22 | ## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR 23 | ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 | ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | ## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 26 | ## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 27 | ## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 28 | ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | 30 | 31 | 32 | import argparse 33 | import sys,os 34 | from tempfile import * 35 | import subprocess 36 | 37 | MAXCORES=4104 38 | 39 | 40 | def usage(): 41 | """ print the docstring and exit""" 42 | sys.stdout.write(__doc__) 43 | sys.exit(2) 44 | 45 | class C(object): 46 | pass 47 | 48 | def SetupParser(): 49 | c=C() 50 | parser = argparse.ArgumentParser(description='process SGE job.') 51 | 52 | parser.add_argument('-s','--script',help='name of parallel script to run',dest='script_name') 53 | parser.add_argument('-d','--cwd',help='name of working directory',dest='directory') 54 | parser.add_argument('-r','--runtime',help='maximum runtime for job',default='01:00:00',dest='runtime') 55 | parser.add_argument('-o','--outfile',help='outputfile',dest='outfile') 56 | parser.add_argument('-p','--numproc',help='number of cores',dest='ncores') 57 | parser.add_argument('-e','--parenv',help='name of parallel environment (use smaller way for more memory per job)',default='12way',dest='parenv') 58 | parser.add_argument('-n','--jobname',help='job name',default='launch',dest='jobname') 59 | parser.add_argument('-j','--projname',help='name of project',dest='projname') 60 | parser.add_argument('-q','--queue',help='name of queue',default='normal',dest='queue') 61 | parser.add_argument('-m','--email',help='email address for notification',dest='email') 62 | parser.add_argument('-f','--qsubfile',help='name of qsub file',dest='qsubfile') 63 | parser.add_argument('-k','--keepqsubfile',help='keep qsub file',dest='keepqsubfile', action="store_true",default=False) 64 | parser.add_argument('-u','--ignoreuser',help='ignore ~/.launch_user',dest='ignoreuser', action="store_true",default=False) 65 | parser.add_argument('-t','--test',help='do not actually launch job',dest='test', action="store_true",default=False) 66 | parser.add_argument('-c','--compiler',help='compiler (default=intel, use gcc for numpy)',dest='compiler', default='intel') 67 | parser.add_argument('-i','--hold_jid',help='job ID to wait for before starting this job',dest='hold', default=[]) 68 | 69 | return c,parser 70 | 71 | 72 | def launch_qsub(serialcmd='',script_name='',runtime='01:00:00',ncores=0,parenv='12way',jobname='launch',projname='',queue='normal',email=False,qsubfile='',keepqsubfile=False,ignoreuser=False,test=False,compiler='intel',parser=[],c=[],verbose=0,hold=[],outfile=[],cwd=[]): 73 | """ function to launch SGE job 74 | 75 | launch(serialcmd='',script_name='',runtime='01:00:00',ncores=0,parenv='12way',jobname='launch',projname='',queue='normal',email,qsubfile='',keepqsubfile=False,ignoreuser=False,test=False,compiler='intel',parser=[],verbose=0,hold=[]) 76 | """ 77 | 78 | if parser==[]: 79 | c,parser=SetupParser() 80 | 81 | # set email prior to reading .launch file 82 | # so that setting overrides default 83 | if c.__dict__.has_key('email'): 84 | email=c.email 85 | 86 | # first check for .launch file in home directory 87 | if os.path.exists(os.path.expanduser('~')+'/.launch_user') and not ignoreuser: 88 | f=open(os.path.expanduser('~')+'/.launch_user') 89 | for cmd in f.readlines(): 90 | if verbose: 91 | print cmd 92 | parser.parse_args([cmd.strip()],namespace=c) 93 | f.close() 94 | 95 | if c.projname: 96 | projname=c.projname 97 | if not projname: 98 | print 'You must specify a project name' 99 | sys.exit(0) 100 | #return [] 101 | 102 | if len(serialcmd)>0: 103 | parametric=0 104 | print 'Running serial command: '+serialcmd 105 | ncores=12 106 | parenv='1way' 107 | queue='serial' 108 | elif script_name: 109 | parametric=1 110 | if verbose: 111 | print 'Submitting parametric job file: '+script_name 112 | try: 113 | f=open(script_name,'r') 114 | except: 115 | print '%s does not exist -e!'%script_name 116 | sys.exit(0) 117 | script_cmds=f.readlines() 118 | f.close() 119 | ncmds=len(script_cmds) 120 | if verbose: 121 | print 'found %d commands'%ncmds 122 | # need to check for empty lines 123 | for s in script_cmds: 124 | if s.strip()=='': 125 | print 'command file contains empty lines - please remove them first' 126 | sys.exit() 127 | if not ncores: 128 | ncores=(ncmds/12+1)*12 129 | if verbose: 130 | print 'Number of processors not specified - estimating as %d'%ncores 131 | 132 | if int(ncores)>MAXCORES: 133 | ncores=MAXCORES 134 | else: 135 | print 'ERROR: you must either specify a script name (using -s) or a command to run\n\n' 136 | parser.print_help() 137 | sys.exit() 138 | 139 | if not qsubfile: 140 | qsubfile_fd,qsubfile=mkstemp(prefix=jobname+"_",dir='.',suffix='.qsub',text=True) 141 | os.close(qsubfile_fd) 142 | 143 | if verbose: 144 | print 'Outputting qsub commands to %s'%qsubfile 145 | qsubfile_fd=open(qsubfile,'w') 146 | if parametric: 147 | qsubfile_fd.write('#!/bin/csh\n#\n') 148 | else: 149 | qsubfile_fd.write('#!/bin/bash\n#\n') 150 | qsubfile_fd.write('# SGE control file automatically created by launch\n') 151 | if parametric==1: 152 | qsubfile_fd.write('# Using parametric launcher with control file: %s\n'%script_name) 153 | else: 154 | qsubfile_fd.write('# Launching single command: %s\n#\n#\n'%serialcmd) 155 | 156 | qsubfile_fd.write('#$ -V #Inherit the submission environment\n') 157 | qsubfile_fd.write('#$ -cwd # Start job in submission directory\n') 158 | qsubfile_fd.write('#$ -j y # Combine stderr and stdout\n') 159 | qsubfile_fd.write('#$ -N %s # Job Name\n'%jobname) 160 | qsubfile_fd.write('#$ -A %s\n'%projname) 161 | qsubfile_fd.write('#$ -o $JOB_NAME.o$JOB_ID # Name of the output file (eg. myMPI.oJobID)\n') 162 | qsubfile_fd.write('#$ -pe %s %d\n'%(parenv,int(ncores))) 163 | qsubfile_fd.write('#$ -q %s\n'%queue) 164 | qsubfile_fd.write('#$ -l h_rt=%s\n'%runtime) 165 | 166 | if email: 167 | qsubfile_fd.write('#$ -M %s\n'%email) 168 | qsubfile_fd.write('#$ -m be\n') 169 | if c.hold: 170 | hold=int(c.hold) 171 | if hold: 172 | if verbose: 173 | print 'will hold until completion of job %d'%hold 174 | qsubfile_fd.write('#$ -hold_jid %d\n'%hold) 175 | 176 | 177 | qsubfile_fd.write('#----------------\n# Job Submission\n#----------------\n') 178 | 179 | if not parametric: 180 | qsubfile_fd.write('\n\nset -x # Echo commands, use "set echo" with csh\n') 181 | qsubfile_fd.write(serialcmd+'\n') 182 | 183 | else: 184 | qsubfile_fd.write('module load launcher\n') 185 | if compiler=='intel': 186 | qsubfile_fd.write('module swap gcc intel\n') 187 | if compiler=='gcc': 188 | qsubfile_fd.write('module swap intel gcc\n') 189 | 190 | qsubfile_fd.write('setenv EXECUTABLE $TACC_LAUNCHER_DIR/init_launcher\n') 191 | qsubfile_fd.write('setenv CONTROL_FILE %s\n'%script_name) 192 | qsubfile_fd.write('setenv WORKDIR .\n') 193 | 194 | qsubfile_fd.write('cd $WORKDIR/\n') 195 | qsubfile_fd.write('echo " WORKING DIR: $WORKDIR/"\n') 196 | qsubfile_fd.write('$TACC_LAUNCHER_DIR/paramrun $EXECUTABLE $CONTROL_FILE\n') 197 | 198 | qsubfile_fd.write('echo " "\necho " Parameteric Job Complete"\necho " "\n') 199 | 200 | qsubfile_fd.close() 201 | 202 | 203 | output=[] 204 | jobid=0 205 | if c.directory: 206 | cwd=c.directory 207 | if not test: 208 | if cwd: 209 | print 'outputting to %s'%cwd 210 | process = subprocess.Popen('qsub %s'%qsubfile, shell=True, stdout=subprocess.PIPE,cwd=cwd) 211 | else: 212 | process = subprocess.Popen('qsub %s'%qsubfile, shell=True, stdout=subprocess.PIPE) 213 | for line in process.stdout: 214 | if verbose: 215 | print line.strip() 216 | output.append(line.strip()) 217 | if line.find('Your job')>-1 and line.find('has been submitted')>-1: 218 | jobid=int(line.split(' ')[2]) 219 | print 'job id: ',jobid 220 | process.wait() 221 | 222 | if not keepqsubfile: 223 | if verbose: 224 | print 'Deleting qsubfile: %s'%qsubfile 225 | os.remove(qsubfile) 226 | 227 | return jobid,output 228 | 229 | 230 | -------------------------------------------------------------------------------- /run_shell_cmd.py: -------------------------------------------------------------------------------- 1 | """ 2 | utility functions to run shell commands 3 | """ 4 | 5 | import subprocess 6 | 7 | def run_shell_cmd(cmd,echo=False,cwd=[]): 8 | """ run a command in the shell using Popen 9 | """ 10 | stdout_holder=[] 11 | if cwd: 12 | process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,cwd=cwd) 13 | else: 14 | process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) 15 | for line in process.stdout: 16 | if echo: 17 | print line.strip() 18 | stdout_holder.append(line.strip()) 19 | process.wait() 20 | return stdout_holder 21 | 22 | def run_logged_cmd(cmd,cmdfile): 23 | outfile=open(cmdfile,'a') 24 | subcode=cmdfile.split('/')[-2] 25 | outfile.write('\n%s: Running:'%subcode+cmd+'\n') 26 | p = sub.Popen(cmd.split(' '),stdout=sub.PIPE,stderr=sub.PIPE) 27 | output, errors = p.communicate() 28 | outfile.write('%s: Output: '%subcode+output+'\n') 29 | if errors: 30 | outfile.write('%s: ERROR: '%subcode+errors+'\n') 31 | print '%s: ERROR: '%subcode+errors 32 | outfile.close() 33 | 34 | -------------------------------------------------------------------------------- /setup_subject.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | process data from Skyra 4 | Russ Poldrack, 8/31/2012 5 | 6 | """ 7 | 8 | import subprocess as sub 9 | import argparse 10 | import os,sys 11 | from datetime import datetime 12 | import dicom 13 | import pickle 14 | import socket 15 | import xnat_tools 16 | 17 | def run_logged_cmd(cmd,cmdfile): 18 | outfile=open(cmdfile,'a') 19 | subcode=cmdfile.split('/')[-2] 20 | outfile.write('\n%s: Running:'%subcode+cmd+'\n') 21 | p = sub.Popen(cmd.split(' '),stdout=sub.PIPE,stderr=sub.PIPE) 22 | output, errors = p.communicate() 23 | outfile.write('%s: Output: '%subcode+output+'\n') 24 | if errors: 25 | outfile.write('%s: ERROR: '%subcode+errors+'\n') 26 | print '%s: ERROR: '%subcode+errors 27 | outfile.close() 28 | 29 | def log_message(message,cmdfile): 30 | outfile=open(cmdfile,'a') 31 | outfile.write(message+'\n') 32 | outfile.close() 33 | 34 | 35 | 36 | def parse_command_line(): 37 | parser = argparse.ArgumentParser(description='setup_subject') 38 | #parser.add_argument('integers', metavar='N', type=int, nargs='+',help='an integer for the accumulator') 39 | # set up boolean flags 40 | 41 | 42 | parser.add_argument('--getdata', dest='getdata', action='store_true', 43 | default=False,help='get data from XNAT') 44 | parser.add_argument('--keepdata', dest='keepdata', action='store_true', 45 | default=False,help='keep DICOMs after conversion') 46 | parser.add_argument('--dcm2nii', dest='dcm2nii', action='store_true', 47 | default=False,help='perform dicom conversion') 48 | parser.add_argument('-o', dest='overwrite', action='store_true', 49 | default=False,help='overwrite existing files') 50 | parser.add_argument('-t', dest='testmode', action='store_true', 51 | default=False,help='run in test mode (do not execute commands)') 52 | parser.add_argument('--motcorr', dest='motcorr', action='store_true', 53 | default=False,help='run motion correction') 54 | parser.add_argument('--betfunc', dest='betfunc', action='store_true', 55 | default=False,help='run BET on func data') 56 | parser.add_argument('--qa', dest='qa', action='store_true', 57 | default=False,help='run QA on func data') 58 | parser.add_argument('--fm', dest='fm', action='store_true', 59 | default=False,help='process fieldmap') 60 | parser.add_argument('--dtiqa', dest='dtiqa', action='store_true', 61 | default=False,help='run QA on DTI data') 62 | parser.add_argument('--topup', dest='topup', action='store_true', 63 | default=False,help='run topup on DTI data') 64 | parser.add_argument('--melodic', dest='melodic', action='store_true', 65 | default=False,help='run melodic on func data') 66 | parser.add_argument('--unzip', dest='unzip', action='store_true', 67 | default=False,help='unzip data file') 68 | parser.add_argument('--fsrecon', dest='fsrecon', action='store_true', 69 | default=False,help='run freesurfer autorecon1') 70 | parser.add_argument('-v', dest='verbose',action='store_true', 71 | help='give verbose output') 72 | parser.add_argument('--bet-inplane', dest='bet_inplane', action='store_true', 73 | default=False,help='run bet on inplane') 74 | parser.add_argument('--all', dest='doall', action='store_true', 75 | default=False,help='run all steps') 76 | 77 | # set up flags with arguments 78 | 79 | parser.add_argument('--xnat_server', dest='xnat_server', 80 | help='URL for xnat server',default="https://xnat.irc.utexas.edu/xnat-irc") 81 | parser.add_argument('--xnat_username', dest='xnat_username', 82 | help='user name for xnat server',default='') 83 | parser.add_argument('--xnat_password', dest='xnat_password', 84 | help='password for xnat server',default='') 85 | parser.add_argument('-f', dest='filename', 86 | help='path to zipped data file') 87 | parser.add_argument('--studyname', dest='studyname', 88 | help='name of study',required=True) 89 | parser.add_argument('-b', dest='basedir', 90 | help='base directory for data file', default='/corral-repl/utexas/poldracklab/data/') 91 | parser.add_argument('-s','--subcode', dest='subcode', 92 | help='subject code',required=True) 93 | parser.add_argument('--subdir', dest='subdir', 94 | help='subject dir (defaults to subject code)',default='') 95 | parser.add_argument('--mcflirt-args', dest='mcflirt_args', 96 | help='arguments for mcflirt',default='-plots -sinc_final') 97 | parser.add_argument('--xnat-project', dest='xnat_project', 98 | help='project in XNAT',default='poldrack') 99 | parser.add_argument('--mricrondir', dest='mricrondir', 100 | help='directory for mricron',default='') 101 | parser.add_argument('--fs-subdir', dest='fs_subdir', 102 | help='subject directory for freesurfer',default='/corral-repl/utexas/poldracklab/data/subdir') 103 | 104 | args = parser.parse_args() 105 | arglist={} 106 | for a in args._get_kwargs(): 107 | arglist[a[0]]=a[1] 108 | 109 | return arglist 110 | 111 | def setup_dir(args): 112 | 113 | # basedir for data 114 | studyname=args['studyname'] 115 | subcode=args['subcode'] 116 | if args['verbose']: 117 | print subcode 118 | 119 | studydir=os.path.join(args['basedir'],studyname) 120 | if not os.path.exists(studydir): 121 | print 'ERROR: study dir %s does not exist!'%studydir 122 | sys.exit() 123 | #subcode=sys.argv[1] 124 | 125 | subdir=os.path.join(studydir,args['subdir']) 126 | 127 | if not os.path.exists(subdir): 128 | os.mkdir(subdir) 129 | else: 130 | print 'subdir %s already exists'%subdir 131 | if args['overwrite']==False: 132 | sys.exit() 133 | else: 134 | print 'overwriting...' 135 | 136 | subdirs=['BOLD','DTI','anatomy','logs','raw','model','behav','fieldmap'] 137 | subdir_names={} 138 | 139 | for s in subdirs: 140 | subdir_names[s]=os.path.join(subdir,s) 141 | if not os.path.exists(subdir_names[s]): 142 | os.mkdir(subdir_names[s]) 143 | 144 | return subdir,subdir_names 145 | 146 | def setup_outfiles(): 147 | timestamp = datetime.now().strftime('%Y_%m_%d_%H_%M_%S') 148 | outfile={} 149 | outfile['main']=os.path.join(subdir,'logs/cmd_'+timestamp+'.log') 150 | outfile['dcm2nii']=os.path.join(subdir,'logs/dcm2nii_cmd_'+timestamp+'.log') 151 | outfile['unzip']=os.path.join(subdir,'logs/unzip_'+timestamp+'.log') 152 | 153 | log_message("#command file automatically generated by setup_subject.py\n#Started: %s\n\n"%timestamp,outfile['main']) 154 | return outfile 155 | 156 | def load_dcmhdrs(subdir_names): 157 | TR={} 158 | hdrfile=os.path.join(subdir_names['logs'],'dicom_headers.pkl') 159 | f=open(hdrfile,'rb') 160 | dcmhdrs=pickle.load(f) 161 | f.close() 162 | for d in dcmhdrs.iterkeys(): 163 | if dcmhdrs[d].SequenceName.find('epfid')>-1: 164 | TR[dcmhdrs[d].ProtocolName.replace(' ','_')]=float(dcmhdrs[d].RepetitionTime)/1000.0 165 | 166 | return dcmhdrs,TR 167 | 168 | def save_dcmhdrs(dcmhdrs,subdir_names): 169 | hdrfile=os.path.join(subdir_names['logs'],'dicom_headers.pkl') 170 | f=open(hdrfile,'wb') 171 | pickle.dump(dcmhdrs,f) 172 | f.close() 173 | 174 | 175 | def fs_setup(args,subdir_names): 176 | if args['verbose']: 177 | print 'running freesurfer setup' 178 | # set up subdir 179 | if not os.path.exists(args['fs_subdir']): 180 | print 'fs_subdir %s does not exist - skipping fs_setup'%args['fs_subdir'] 181 | sub_fsdir=os.path.join(args['fs_subdir'],args['fs_subcode']) 182 | if os.path.exists(sub_fsdir): 183 | if not args['overwrite']: 184 | print 'subject dir %s already exists - skipping fs_setup' 185 | return 186 | else: 187 | print 'subject dir %s already exists - overwriting' 188 | else: 189 | cmd='recon-all -i %s -subjid %s -sd %s'%(os.path.join(subdir_names['anatomy'],'highres001.nii.gz'),args['fs_subcode'],args['fs_subdir']) 190 | print cmd 191 | if not args['testmode']: 192 | run_logged_cmd(cmd,outfile['main']) 193 | 194 | def run_autorecon1(args,subdir_names): 195 | if args['verbose']: 196 | print 'running freesurfer autorecon1' 197 | # set up subdir 198 | sub_fsdir=os.path.join(args['fs_subdir'],args['fs_subcode']) 199 | brainmask=os.path.join(sub_fsdir,'mri/brainmask.mgz') 200 | 201 | if not os.path.exists(sub_fsdir): 202 | print 'subject dir %s does not exist - skipping autorecon1'%sub_fsdir 203 | return 204 | elif os.path.exists(brainmask): 205 | print 'brainmask %s already exists - skipping autorecon1'%brainmask 206 | return 207 | else: 208 | cmd='recon-all -autorecon1 -subjid %s -sd %s'%(args['fs_subcode'],args['fs_subdir']) 209 | print cmd 210 | if not args['testmode']: 211 | run_logged_cmd(cmd,outfile['main']) 212 | 213 | def copy_stripped_T1(args,subdir_names): 214 | if args['verbose']: 215 | print 'copying stripped T1' 216 | # set up subdir 217 | sub_fsdir=os.path.join(args['fs_subdir'],args['fs_subcode']) 218 | brainmask=os.path.join(sub_fsdir,'mri/brainmask.mgz') 219 | 220 | if not os.path.exists(brainmask): 221 | print 'brainmask %s does not exist - skipping copy'%brainmask 222 | return 223 | else: 224 | cmd='mri_convert --out_orientation LAS %s --reslice_like %s/highres001.nii.gz %s/highres001_brain.nii'%(brainmask, 225 | subdir_names['anatomy'],subdir_names['anatomy']) 226 | print cmd 227 | if not args['testmode']: 228 | run_logged_cmd(cmd,outfile['main']) 229 | cmd='gzip %s/highres001_brain.nii'%subdir_names['anatomy'] 230 | print cmd 231 | if not args['testmode']: 232 | run_logged_cmd(cmd,outfile['main']) 233 | cmd='fslmaths %s/highres001_brain.nii.gz -thr 1 -bin %s/highres001_brain_mask.nii.gz'%(subdir_names['anatomy'],subdir_names['anatomy']) 234 | print cmd 235 | if not args['testmode']: 236 | run_logged_cmd(cmd,outfile['main']) 237 | 238 | def bet_inplane(args,subdir_names): 239 | if args['verbose']: 240 | print 'running bet on inplane' 241 | inplane_file=os.path.join(subdir_names['anatomy'],'inplane001.nii.gz') 242 | if not os.path.exists(inplane_file): 243 | print 'inplane file %s does not exist - skippping bet_inplane'%inplane_file 244 | return 245 | cmd='bet %s %s -f 0.3 -R'%(os.path.join(subdir_names['anatomy'],'inplane001.nii.gz'), 246 | os.path.join(subdir_names['anatomy'],'inplane001_brain.nii.gz')) 247 | print cmd 248 | if not args['testmode']: 249 | run_logged_cmd(cmd,outfile['main']) 250 | 251 | 252 | def process_fieldmap(args,subdir_names): 253 | if args['verbose']: 254 | print 'processing field maps' 255 | magfile='%s/fieldmap_mag.nii.gz'%subdir_names['fieldmap'] 256 | phasefile='%s/fieldmap_mag.nii.gz'%subdir_names['fieldmap'] 257 | if not os.path.exists(magfile) or not os.path.exists(phasefile): 258 | print 'field map does not exist, skippping process_fieldmap' 259 | return 260 | 261 | 262 | cmd='bet %s/fieldmap_mag.nii.gz %s/fieldmap_mag_brain -f 0.3 -F'%(subdir_names['fieldmap'],subdir_names['fieldmap']) 263 | print cmd 264 | if not args['testmode']: 265 | run_logged_cmd(cmd,outfile['main']) 266 | 267 | 268 | cmd='fsl_prepare_fieldmap SIEMENS %s/fieldmap_phase.nii.gz %s/fieldmap_mag_brain.nii.gz %s/fm_prep 2.46'%(subdir_names['fieldmap'],subdir_names['fieldmap'],subdir_names['fieldmap']) 269 | print cmd 270 | if not args['testmode']: 271 | run_logged_cmd(cmd,outfile['main']) 272 | 273 | 274 | def dtiqa(args,subdir_names): 275 | if args['verbose']: 276 | print 'running QC on DTI' 277 | dtifiles=[i.strip() for i in os.listdir(subdir_names['DTI']) if (i.find('DTI_')==0 and i.find('.nii.gz')>0)] 278 | for dtifile in dtifiles: 279 | print 'found DTI file: %s'%dtifile 280 | 281 | cmd='dtiqa.py %s'%os.path.join(subdir_names['DTI'],dtifile) 282 | print cmd 283 | if not args['testmode']: 284 | run_logged_cmd(cmd,outfile['main']) 285 | 286 | def topup(args,subdir_names): 287 | if args['verbose']: 288 | print 'running topup on DTI' 289 | if (not os.path.exists(os.path.join(subdir_names['DTI'],'DTI_1.nii.gz'))) or (not os.path.exists(os.path.join(subdir_names['DTI'],'DTI_2.nii.gz'))): 290 | print 'topup requires two DTI files - skipping' 291 | return 292 | 293 | 294 | cmd='run_topup.py %s %s'%(os.path.join(subdir_names['DTI'],'DTI_1.nii.gz'),os.path.join(subdir_names['DTI'],'DTI_2.nii.gz')) 295 | print cmd 296 | if not args['testmode']: 297 | run_logged_cmd(cmd,outfile['main']) 298 | 299 | 300 | def download_from_xnat(args,subdir): 301 | args['keepdata'] = True 302 | #use default download username 303 | if (len(args['xnat_username'])<1) or (len(args['xnat_password'])<1): 304 | xnat_tools.down_subject_dicoms(args['xnat_server'], 305 | os.path.join(subdir,'raw'), 306 | args['xnat_project'], args['subcode']) 307 | else: 308 | xnat_tools.down_subject_dicoms(args['xnat_server'], 309 | os.path.join(subdir,'raw'), 310 | args['xnat_project'], args['subcode'], 311 | xnat_username=args['xnat_username'], 312 | xnat_password=args['xnat_password']) 313 | 314 | def do_unzipping(args,subdir): 315 | if not args['filename'] or not os.path.exists(args['filename']): 316 | print 'filename %s not found for unzipping - exiting'%args['filename'] 317 | sys.exit() 318 | cmd='unzip %s -d %s'%(args['filename'],subdir) 319 | print cmd 320 | if not args['testmode']: 321 | run_logged_cmd(cmd,outfile['unzip']) 322 | 323 | def convert_dicom_to_nifti(args, subdir): 324 | 325 | TR={} 326 | scan_keys = { 327 | 'anatomy': ['MPRAGE','FSE','T1w','T2w','PDT','PD-T2','tse2d','mprage','t1w','t2w','t2spc','t2_spc'], 328 | 'BOLD':['epfid'], 329 | 'DTI':['ep_b'], 330 | 'fieldmap':['fieldmap','field_mapping','FieldMap'], 331 | 'localizer':['localizer','Localizer','Scout','scout'], 332 | 'reference':['SBRef'] 333 | } 334 | 335 | if args['unzip']: 336 | dcmbase=os.path.join(subdir,args['subcode'],'SCANS') 337 | else: 338 | dcmbase=os.path.join(subdir,'raw',args['subcode']) 339 | dcmdirs=os.listdir(dcmbase) 340 | dcmhdrs={} 341 | for d in dcmdirs: 342 | if args['unzip']: 343 | dcmdir=os.path.join(dcmbase,d,'DICOM') 344 | else: 345 | dcmdir=os.path.join(dcmbase,d) 346 | dcmfiles=[i for i in os.listdir(dcmdir) if i.find('.dcm')>0] 347 | try: 348 | dcmhdrs[d]=dicom.read_file(os.path.join(dcmdir,dcmfiles[0])) 349 | except: 350 | continue 351 | file_type='raw' 352 | #print d, dcmhdrs[d].ImageType 353 | if not dcmhdrs[d].ImageType[0]=='ORIGINAL': 354 | print 'skipping derived series ',d 355 | file_type='derived' 356 | # first look for anatomy 357 | if file_type=='raw': 358 | for key in scan_keys['reference']: 359 | if (dcmhdrs[d].ProtocolName.find(key)>-1) or (dcmhdrs[d].SeriesDescription.find(key)>-1) : 360 | file_type='reference' 361 | if file_type=='raw': 362 | for key in scan_keys['BOLD']: 363 | if dcmhdrs[d].SequenceName.find(key)>-1: 364 | file_type='BOLD' 365 | TR[dcmhdrs[d].ProtocolName.replace(' ','_')+'_'+d]=float(dcmhdrs[d].RepetitionTime)/1000.0 366 | if file_type=='raw': 367 | for key in scan_keys['anatomy']: 368 | if (dcmhdrs[d].ProtocolName.find(key)>-1) or (dcmhdrs[d].SequenceName.find(key)>-1) : 369 | file_type='anatomy' 370 | if file_type=='raw': 371 | for key in scan_keys['localizer']: 372 | if dcmhdrs[d].ProtocolName.find(key)>0: 373 | file_type='localizer' 374 | if file_type=='raw': 375 | for key in scan_keys['DTI']: 376 | if dcmhdrs[d].SequenceName.find(key)>-1: 377 | file_type='DTI' 378 | if file_type=='raw': 379 | for key in scan_keys['fieldmap']: 380 | if (dcmhdrs[d].ProtocolName.find(key)>-1) or (dcmhdrs[d].SequenceName.find(key)>-1): 381 | file_type='fieldmap' 382 | 383 | 384 | print 'detected %s: (%s) %s %s'%(file_type,d,dcmhdrs[d].ProtocolName,dcmhdrs[d].SeriesDescription) 385 | if (not file_type=='localizer') and (not file_type=='derived') and (not file_type=='reference'): 386 | cmd='%sdcm2nii -d n -i n -o %s %s'%(args['mricrondir'],subdir_names[file_type],dcmdir) 387 | print cmd 388 | if not args['testmode']: 389 | run_logged_cmd(cmd,outfile['main']) 390 | # save dicom headers to pickle file 391 | save_dcmhdrs(dcmhdrs,subdir_names) 392 | 393 | if not args['keepdata']: 394 | cmd='rm -rf %s'%dcmbase 395 | print cmd 396 | if not args['testmode']: 397 | run_logged_cmd(cmd,outfile['main']) 398 | 399 | # save the 400 | # rename data appropriately 401 | boldfiles=[i for i in os.listdir(subdir_names['BOLD']) if i.find('.nii.gz')>0] 402 | for f in boldfiles: 403 | runnum=f.rsplit('a')[-2].rsplit('s')[-1].lstrip('0') 404 | runname=dcmhdrs[runnum].ProtocolName.replace(' ','_') 405 | rundir=os.path.join(subdir_names['BOLD'],'%s_%s'%(runname,runnum)) 406 | if not os.path.exists(rundir): 407 | os.mkdir(rundir) 408 | cmd='mv %s %s/bold.nii.gz'%(os.path.join(subdir_names['BOLD'],f),rundir) 409 | print cmd 410 | if not args['testmode']: 411 | run_logged_cmd(cmd,outfile['dcm2nii']) 412 | 413 | anatfiles=[i for i in os.listdir(subdir_names['anatomy']) if i.find('.nii.gz')>0] 414 | other_anat_dir=os.path.join(subdir_names['anatomy'],'other') 415 | if not os.path.exists(other_anat_dir): 416 | os.mkdir(other_anat_dir) 417 | 418 | highresctr=1 419 | inplanectr=1 420 | for a in anatfiles: 421 | print a 422 | runnum=a.rsplit('a')[-2].rsplit('s')[-1].lstrip('0') 423 | mprage=0 424 | for key in ['MPRAGE','mprage','t1w','T1w']: 425 | if a.find(key)>0: 426 | mprage=1 427 | if a.find('o')==0 and mprage==1 : 428 | cmd='mv %s %s/highres%03d.nii.gz'%(os.path.join(subdir_names['anatomy'],a),subdir_names['anatomy'],highresctr) 429 | print cmd 430 | if not args['testmode']: 431 | run_logged_cmd(cmd,outfile['main']) 432 | highresctr+=1 433 | print 'highresctr is at ',highresctr 434 | if (mprage != 1) and a.find('PDT2')>0: 435 | cmd='fslroi %s %s/inplane%03d.nii.gz 1 1'%(os.path.join(subdir_names['anatomy'],a),subdir_names['anatomy'],inplanectr) 436 | print cmd 437 | if not args['testmode']: 438 | run_logged_cmd(cmd,outfile['main']) 439 | inplanectr+=1 440 | cmd='mv %s %s'%(os.path.join(subdir_names['anatomy'],a),other_anat_dir) 441 | print cmd 442 | if not args['testmode']: 443 | run_logged_cmd(cmd,outfile['main']) 444 | elif not mprage == 1: 445 | cmd='mv %s %s'%(os.path.join(subdir_names['anatomy'],a),other_anat_dir) 446 | print cmd 447 | if not args['testmode']: 448 | run_logged_cmd(cmd,outfile['main']) 449 | 450 | # process fieldmap files 451 | # TBD: need to distinguish between SE and gradient field maps 452 | # we are making a brittle assumption here taht the first series is the 453 | # magnitude image 454 | fmfiles=[i for i in os.listdir(subdir_names['fieldmap']) if i.find('fieldmap')>0] 455 | fmctr=0 456 | fmtypes=['mag','phase'] 457 | for f in fmfiles: 458 | cmd='mv %s/%s %s/fieldmap_%s.nii.gz'%(subdir_names['fieldmap'],f,subdir_names['fieldmap'],fmtypes[fmctr]) 459 | print cmd 460 | if not args['testmode']: 461 | run_logged_cmd(cmd,outfile['main']) 462 | fmctr+=1 463 | 464 | 465 | 466 | # process DTI files 467 | dtifiles=[i for i in os.listdir(subdir_names['DTI']) if i.find('.nii.gz')>0] 468 | dtictr=1 469 | for f in dtifiles: 470 | cmd='mv %s/%s %s/DTI_%d.nii.gz'%(subdir_names['DTI'],f,subdir_names['DTI'],dtictr) 471 | print cmd 472 | if not args['testmode']: 473 | run_logged_cmd(cmd,outfile['main']) 474 | 475 | cmd='mv %s/%s %s/DTI_%d.bvec'%(subdir_names['DTI'],f.replace('.nii.gz','.bvec'),subdir_names['DTI'],dtictr) 476 | print cmd 477 | if not args['testmode']: 478 | run_logged_cmd(cmd,outfile['main']) 479 | 480 | cmd='mv %s/%s %s/DTI_%d.bval'%(subdir_names['DTI'],f.replace('.nii.gz','.bval'),subdir_names['DTI'],dtictr) 481 | print cmd 482 | if not args['testmode']: 483 | run_logged_cmd(cmd,outfile['main']) 484 | dtictr+=1 485 | 486 | 487 | return subdir_names, TR 488 | 489 | def execute_commands(args, subdir_names, TR): 490 | 491 | bolddirs=[d for d in os.listdir(subdir_names['BOLD']) if os.path.isdir(os.path.join(subdir_names['BOLD'],d))] 492 | 493 | print 'bolddirs:' 494 | print bolddirs 495 | 496 | command_dict={'motcorr':"'mcflirt -in %s/bold.nii.gz %s'%(__import__('os').path.join(subdir_names['BOLD'],b),args['mcflirt_args'])", 497 | 'qa':"'fmriqa.py %s/bold_mcf.nii.gz %f'%(__import__('os').path.join(subdir_names['BOLD'],b),TR[b])", 498 | 'betfunc':"'bet %s/bold_mcf.nii.gz %s/bold_mcf_brain.nii.gz -F'%(__import__('os').path.join(subdir_names['BOLD'],b),__import__('os').path.join(subdir_names['BOLD'],b))", 499 | 'melodic':"'melodic -i %s/bold_mcf.nii.gz --Oall --report'%__import__('os').path.join(subdir_names['BOLD'],b)"} 500 | 501 | bold_commands=['motcorr','betfunc','qa','melodic'] 502 | if bolddirs: 503 | for k in bold_commands: 504 | if args[k]: 505 | if args['verbose']: 506 | print 'running %s'%k 507 | # run betfunc 508 | else: 509 | for b in bolddirs: 510 | print k 511 | print command_dict 512 | print command_dict[k] 513 | # NO O_O 514 | cmd=eval(command_dict[k]) 515 | print cmd 516 | if not args['testmode']: 517 | run_logged_cmd(cmd,outfile['main']) 518 | 519 | 520 | if __name__ == "__main__": 521 | 522 | args=parse_command_line() 523 | 524 | # parse doall 525 | doall_cmds=['dcm2nii','motcorr','betfunc','qa','melodic','bet_inplane','fsrecon'] 526 | if args['doall']: 527 | for c in doall_cmds: 528 | args[c]=True 529 | 530 | # this is for debugging - detect whether I am 531 | # on my macbook or on lonestar 532 | if socket.gethostname().find('tacc')>0: 533 | USE_MAC=0 534 | else: 535 | USE_MAC=1 536 | 537 | if USE_MAC: 538 | args['basedir']='/Users/poldrack/data2/setup_subject_ut/data' 539 | args['mricrondir']='/Applications/fmri_progs/mricronmac/' 540 | args['filename']='/Users/poldrack/data2/setup_subject_ut/Skyra_testing/rpo-BOOST-pilot1_8_30_2012_17_32_1.zip' 541 | 542 | if args['subdir']=='': 543 | args['subdir']=args['subcode'] 544 | args['fs_subcode']='%s_%s'%(args['studyname'],args['subdir']) 545 | 546 | subdir,subdir_names=setup_dir(args) 547 | 548 | outfile=setup_outfiles() 549 | 550 | if args['getdata']: 551 | download_from_xnat(args,subdir) 552 | 553 | if args['unzip']: 554 | do_unzipping(args,subdir) 555 | 556 | 557 | 558 | if args['dcm2nii']: 559 | subdir_names, TR = convert_dicom_to_nifti(args, subdir) 560 | 561 | else: # try to load dicom headers 562 | try: 563 | dcmhdrs,TR=load_dcmhdrs(subdir_names) 564 | except: 565 | print "can't load dicom headers from pickle, exiting (ignore this if just running --getdata)" 566 | sys.exit() 567 | ##from here 568 | execute_commands(args,subdir_names, TR) 569 | ## to here 570 | if args['bet_inplane']: 571 | bet_inplane(args,subdir_names) 572 | 573 | # run freesurfer autorecon1 574 | if args['fsrecon']: 575 | fs_setup(args,subdir_names) 576 | run_autorecon1(args,subdir_names) 577 | copy_stripped_T1(args,subdir_names) 578 | 579 | if args['dtiqa']: 580 | dtiqa(args,subdir_names) 581 | 582 | if args['fm']: 583 | process_fieldmap(args,subdir_names) 584 | 585 | log_message("completed: %s"%datetime.now().strftime('%Y_%m_%d_%H_%M_%S'),outfile['main']) 586 | 587 | --------------------------------------------------------------------------------