├── README.md ├── SUMA ├── RunSUMAGroupAnalysis.sh ├── SUMA_afni_proc.sh ├── SUMA_proc_MNI.sh └── runFreeSurfer.sh ├── cmd.ap.AFNI_TENT ├── createSphere.sh ├── extractBetas.sh ├── make_AFNI_Timings_TENT.sh ├── make_Gamble_Timings.sh ├── make_Timings.sh ├── proc_Flanker.sh ├── runAFNIproc.sh ├── runGroupAnalysis_GainPM.sh ├── runGroupAnalysis_LossPM.sh ├── sub_08_afni_proc.sh └── ttest_TENT.sh /README.md: -------------------------------------------------------------------------------- 1 | # AFNI_Scripts 2 | Scripts used for fMRI data analysis in AFNI 3 | -------------------------------------------------------------------------------- /SUMA/RunSUMAGroupAnalysis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh 2 | 3 | #Left Hemisphere 4 | 5 | set dirA = $PWD 6 | 7 | # specify and possibly create results directory 8 | set results_dir = test.results 9 | if ( ! -d $results_dir ) mkdir $results_dir 10 | 11 | # ------------------------- process the data ------------------------- 12 | 13 | 3dttest++ -prefix $results_dir/Flanker_Inc-Con_ttest.lh.niml.dset \ 14 | -setA Inc-Con \ 15 | 01 "$dirA/sub-01/sub-01.results_SUMA/stats.sub-01.lh.niml.dset[10]" \ 16 | 02 "$dirA/sub-02/sub-02.results_SUMA/stats.sub-02.lh.niml.dset[10]" \ 17 | 03 "$dirA/sub-03/sub-03.results_SUMA/stats.sub-03.lh.niml.dset[10]" \ 18 | 04 "$dirA/sub-04/sub-04.results_SUMA/stats.sub-04.lh.niml.dset[10]" \ 19 | 05 "$dirA/sub-05/sub-05.results_SUMA/stats.sub-05.lh.niml.dset[10]" \ 20 | 06 "$dirA/sub-06/sub-06.results_SUMA/stats.sub-06.lh.niml.dset[10]" \ 21 | 07 "$dirA/sub-07/sub-07.results_SUMA/stats.sub-07.lh.niml.dset[10]" \ 22 | 08 "$dirA/sub-08/sub-08.results_SUMA/stats.sub-08.lh.niml.dset[10]" \ 23 | 09 "$dirA/sub-09/sub-09.results_SUMA/stats.sub-09.lh.niml.dset[10]" \ 24 | 10 "$dirA/sub-10/sub-10.results_SUMA/stats.sub-10.lh.niml.dset[10]" \ 25 | 11 "$dirA/sub-11/sub-11.results_SUMA/stats.sub-11.lh.niml.dset[10]" \ 26 | 12 "$dirA/sub-12/sub-12.results_SUMA/stats.sub-12.lh.niml.dset[10]" \ 27 | 13 "$dirA/sub-13/sub-13.results_SUMA/stats.sub-13.lh.niml.dset[10]" \ 28 | 14 "$dirA/sub-14/sub-14.results_SUMA/stats.sub-14.lh.niml.dset[10]" \ 29 | 15 "$dirA/sub-15/sub-15.results_SUMA/stats.sub-15.lh.niml.dset[10]" \ 30 | 16 "$dirA/sub-16/sub-16.results_SUMA/stats.sub-16.lh.niml.dset[10]" \ 31 | 17 "$dirA/sub-17/sub-17.results_SUMA/stats.sub-17.lh.niml.dset[10]" \ 32 | 18 "$dirA/sub-18/sub-18.results_SUMA/stats.sub-18.lh.niml.dset[10]" \ 33 | 19 "$dirA/sub-19/sub-19.results_SUMA/stats.sub-19.lh.niml.dset[10]" \ 34 | 20 "$dirA/sub-20/sub-20.results_SUMA/stats.sub-20.lh.niml.dset[10]" \ 35 | 21 "$dirA/sub-21/sub-21.results_SUMA/stats.sub-21.lh.niml.dset[10]" \ 36 | 22 "$dirA/sub-22/sub-22.results_SUMA/stats.sub-22.lh.niml.dset[10]" \ 37 | 23 "$dirA/sub-23/sub-23.results_SUMA/stats.sub-23.lh.niml.dset[10]" \ 38 | 24 "$dirA/sub-24/sub-24.results_SUMA/stats.sub-24.lh.niml.dset[10]" \ 39 | 25 "$dirA/sub-25/sub-25.results_SUMA/stats.sub-25.lh.niml.dset[10]" \ 40 | 26 "$dirA/sub-26/sub-26.results_SUMA/stats.sub-26.lh.niml.dset[10]" 41 | 42 | #Right Hemisphere 43 | 44 | 3dttest++ -prefix $results_dir/Flanker_Inc-Con_ttest.rh.niml.dset \ 45 | -setA Inc-Con \ 46 | 01 "$dirA/sub-01/sub-01.results_SUMA/stats.sub-01.rh.niml.dset[10]" \ 47 | 02 "$dirA/sub-02/sub-02.results_SUMA/stats.sub-02.rh.niml.dset[10]" \ 48 | 03 "$dirA/sub-03/sub-03.results_SUMA/stats.sub-03.rh.niml.dset[10]" \ 49 | 04 "$dirA/sub-04/sub-04.results_SUMA/stats.sub-04.rh.niml.dset[10]" \ 50 | 05 "$dirA/sub-05/sub-05.results_SUMA/stats.sub-05.rh.niml.dset[10]" \ 51 | 06 "$dirA/sub-06/sub-06.results_SUMA/stats.sub-06.rh.niml.dset[10]" \ 52 | 07 "$dirA/sub-07/sub-07.results_SUMA/stats.sub-07.rh.niml.dset[10]" \ 53 | 08 "$dirA/sub-08/sub-08.results_SUMA/stats.sub-08.rh.niml.dset[10]" \ 54 | 09 "$dirA/sub-09/sub-09.results_SUMA/stats.sub-09.rh.niml.dset[10]" \ 55 | 10 "$dirA/sub-10/sub-10.results_SUMA/stats.sub-10.rh.niml.dset[10]" \ 56 | 11 "$dirA/sub-11/sub-11.results_SUMA/stats.sub-11.rh.niml.dset[10]" \ 57 | 12 "$dirA/sub-12/sub-12.results_SUMA/stats.sub-12.rh.niml.dset[10]" \ 58 | 13 "$dirA/sub-13/sub-13.results_SUMA/stats.sub-13.rh.niml.dset[10]" \ 59 | 14 "$dirA/sub-14/sub-14.results_SUMA/stats.sub-14.rh.niml.dset[10]" \ 60 | 15 "$dirA/sub-15/sub-15.results_SUMA/stats.sub-15.rh.niml.dset[10]" \ 61 | 16 "$dirA/sub-16/sub-16.results_SUMA/stats.sub-16.rh.niml.dset[10]" \ 62 | 17 "$dirA/sub-17/sub-17.results_SUMA/stats.sub-17.rh.niml.dset[10]" \ 63 | 18 "$dirA/sub-18/sub-18.results_SUMA/stats.sub-18.rh.niml.dset[10]" \ 64 | 19 "$dirA/sub-19/sub-19.results_SUMA/stats.sub-19.rh.niml.dset[10]" \ 65 | 20 "$dirA/sub-20/sub-20.results_SUMA/stats.sub-20.rh.niml.dset[10]" \ 66 | 21 "$dirA/sub-21/sub-21.results_SUMA/stats.sub-21.rh.niml.dset[10]" \ 67 | 22 "$dirA/sub-22/sub-22.results_SUMA/stats.sub-22.rh.niml.dset[10]" \ 68 | 23 "$dirA/sub-23/sub-23.results_SUMA/stats.sub-23.rh.niml.dset[10]" \ 69 | 24 "$dirA/sub-24/sub-24.results_SUMA/stats.sub-24.rh.niml.dset[10]" \ 70 | 25 "$dirA/sub-25/sub-25.results_SUMA/stats.sub-25.rh.niml.dset[10]" \ 71 | 26 "$dirA/sub-26/sub-26.results_SUMA/stats.sub-26.rh.niml.dset[10]" 72 | -------------------------------------------------------------------------------- /SUMA/SUMA_afni_proc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh 2 | 3 | # set subject and group identifiers 4 | set subj = sub-08 5 | set gname = Flanker 6 | 7 | # set data directories 8 | set top_dir = /Users/ajahn/Desktop/${gname}/sub-08 9 | set anat_dir = $top_dir/anat 10 | set epi_dir = $top_dir/func 11 | set stim_dir = $top_dir/func 12 | 13 | # run afni_proc.py to create a single subject processing script 14 | afni_proc.py -subj_id $subj \ 15 | -script proc.$subj -scr_overwrite \ 16 | -blocks tshift align tlrc volreg surf blur scale regress \ 17 | -copy_anat $anat_dir/sub-08_T1w.nii.gz \ 18 | -dsets \ 19 | $epi_dir/sub-08_task-flanker_run-1_bold.nii.gz \ 20 | $epi_dir/sub-08_task-flanker_run-2_bold.nii.gz \ 21 | -tcat_remove_first_trs 0 \ 22 | -align_opts_aea -giant_move \ 23 | -tlrc_base MNI_avg152T1+tlrc \ 24 | -volreg_align_to MIN_OUTLIER \ 25 | -volreg_align_e2a \ 26 | -volreg_tlrc_warp \ 27 | -surf_anat ~/Desktop/Flanker/FS/sub-08_T1w/surf/SUMA/sub-08_SurfVol+orig \ 28 | -surf_spec ~/Desktop/Flanker/FS/sub-08_T1w/surf/SUMA/std.141.sub-08_?h.spec \ 29 | -blur_size 4.0 \ 30 | -regress_stim_times \ 31 | $stim_dir/congruent.1D \ 32 | $stim_dir/incongruent.1D \ 33 | -regress_stim_labels congruent incongruent \ 34 | -regress_basis 'GAM' \ 35 | -regress_censor_motion 0.3 \ 36 | -regress_motion_per_run \ 37 | -regress_opts_3dD \ 38 | -jobs 8 \ 39 | -gltsym 'SYM: congruent -incongruent' -glt_label 1 Con-Inc \ 40 | -gltsym 'SYM: incongruent -congruent' -glt_label 2 Inc-Con \ 41 | -regress_reml_exec \ 42 | -regress_make_ideal_sum sum_ideal.1D 43 | -------------------------------------------------------------------------------- /SUMA/SUMA_proc_MNI.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh -xef 2 | 3 | echo "auto-generated by afni_proc.py, Tue Oct 15 15:45:40 2019" 4 | echo "(version 6.47, August 27, 2019)" 5 | echo "execution started: `date`" 6 | 7 | # to execute via tcsh: 8 | # tcsh -xef proc.${subj} |& tee output.proc.${subj} 9 | # to execute via bash: 10 | # tcsh -xef proc.${subj} 2>&1 | tee output.proc.${subj} 11 | 12 | # =========================== auto block: setup ============================ 13 | # script setup 14 | 15 | # take note of the AFNI version 16 | afni -ver 17 | 18 | # check that the current AFNI version is recent enough 19 | afni_history -check_date 27 Jun 2019 20 | if ( $status ) then 21 | echo "** this script requires newer AFNI binaries (than 27 Jun 2019)" 22 | echo " (consider: @update.afni.binaries -defaults)" 23 | exit 24 | endif 25 | 26 | # the user may specify a single subject to run with 27 | if ( $#argv > 0 ) then 28 | set subj = $argv[1] 29 | else 30 | set subj = ${subj} 31 | endif 32 | 33 | # assign output directory name 34 | set output_dir = $subj.results_SUMA 35 | 36 | # set directory variables 37 | set surface_dir = ${PWD}/FS/${subj}_T1w/surf/SUMA 38 | 39 | # verify that the results directory does not yet exist 40 | if ( -d $output_dir ) then 41 | echo output dir "$subj.results" already exists 42 | exit 43 | endif 44 | 45 | # set list of runs 46 | set runs = (`count -digits 2 1 2`) 47 | 48 | # create results and stimuli directories 49 | mkdir $output_dir 50 | mkdir $output_dir/stimuli 51 | 52 | # copy stim files into stimulus directory 53 | cp ${PWD}/${subj}/func/congruent.1D \ 54 | ${PWD}/${subj}/func/incongruent.1D \ 55 | $output_dir/stimuli 56 | 57 | # copy anatomy to results dir 58 | 3dcopy ${subj}/anat/${subj}_T1w.nii.gz $output_dir/${subj}_T1w 59 | 60 | # ============================ auto block: tcat ============================ 61 | # apply 3dTcat to copy input dsets to results dir, 62 | # while removing the first 0 TRs 63 | 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat \ 64 | ${subj}/func/${subj}_task-flanker_run-1_bold.nii.gz'[0..$]' 65 | 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat \ 66 | ${subj}/func/${subj}_task-flanker_run-2_bold.nii.gz'[0..$]' 67 | 68 | # and make note of repetitions (TRs) per run 69 | set tr_counts = ( 146 146 ) 70 | 71 | # ------------------------------------------------------- 72 | # enter the results directory (can begin processing data) 73 | cd $output_dir 74 | 75 | 76 | # ========================== auto block: outcount ========================== 77 | # data check: compute outlier fraction for each volume 78 | touch out.pre_ss_warn.txt 79 | foreach run ( $runs ) 80 | 3dToutcount -automask -fraction -polort 2 -legendre \ 81 | pb00.$subj.r$run.tcat+orig > outcount.r$run.1D 82 | 83 | # outliers at TR 0 might suggest pre-steady state TRs 84 | if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then 85 | echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \ 86 | >> out.pre_ss_warn.txt 87 | endif 88 | end 89 | 90 | # catenate outlier counts into a single time series 91 | cat outcount.r*.1D > outcount_rall.1D 92 | 93 | # get run number and TR index for minimum outlier volume 94 | set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'` 95 | set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \ 96 | -index_to_run_tr $minindex` ) 97 | # save run and TR indices for extraction of vr_base_min_outlier 98 | set minoutrun = $ovals[1] 99 | set minouttr = $ovals[2] 100 | echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt 101 | 102 | # ================================= tshift ================================= 103 | # time shift data so all slice timing is the same 104 | foreach run ( $runs ) 105 | 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \ 106 | pb00.$subj.r$run.tcat+orig 107 | end 108 | 109 | # -------------------------------- 110 | # extract volreg registration base 111 | 3dbucket -prefix vr_base_min_outlier \ 112 | pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]" 113 | 114 | # ================================= align ================================== 115 | # for e2a: compute anat alignment transformation to EPI registration base 116 | # (new anat will be intermediate, stripped, ${subj}_T1w_ns+orig) 117 | align_epi_anat.py -anat2epi -anat ${subj}_T1w+orig \ 118 | -save_skullstrip -suffix _al_junk \ 119 | -epi vr_base_min_outlier+orig -epi_base 0 \ 120 | -epi_strip 3dAutomask \ 121 | -giant_move \ 122 | -volreg off -tshift off 123 | 124 | # ================================== tlrc ================================== 125 | # warp anatomy to standard space 126 | @auto_tlrc -base MNI_avg152T1+tlrc -input ${subj}_T1w_ns+orig -no_ss -init_xform AUTO_CENTER 127 | 128 | # store forward transformation matrix in a text file 129 | cat_matvec ${subj}_T1w_ns+tlrc::WARP_DATA -I > warp.anat.Xat.1D 130 | 131 | # ================================= volreg ================================= 132 | # align each dset to base volume, to anat, warp to tlrc space 133 | 134 | # verify that we have a +tlrc warp dataset 135 | if ( ! -f ${subj}_T1w_ns+tlrc.HEAD ) then 136 | echo "** missing +tlrc warp dataset: ${subj}_T1w_ns+tlrc.HEAD" 137 | exit 138 | endif 139 | 140 | # register and warp 141 | foreach run ( $runs ) 142 | # register each volume to the base image 143 | 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \ 144 | -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \ 145 | -cubic \ 146 | -1Dmatrix_save mat.r$run.vr.aff12.1D \ 147 | pb01.$subj.r$run.tshift+orig 148 | 149 | # create an all-1 dataset to mask the extents of the warp 150 | 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \ 151 | -prefix rm.epi.all1 152 | 153 | # catenate volreg/epi2anat/tlrc xforms 154 | cat_matvec -ONELINE \ 155 | ${subj}_T1w_ns+tlrc::WARP_DATA -I \ 156 | ${subj}_T1w_al_junk_mat.aff12.1D -I \ 157 | mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D 158 | 159 | # apply catenated xform: volreg/epi2anat/tlrc 160 | 3dAllineate -base ${subj}_T1w_ns+tlrc \ 161 | -input pb01.$subj.r$run.tshift+orig \ 162 | -1Dmatrix_apply mat.r$run.warp.aff12.1D \ 163 | -mast_dxyz 3 \ 164 | -prefix rm.epi.nomask.r$run 165 | 166 | # warp the all-1 dataset for extents masking 167 | 3dAllineate -base ${subj}_T1w_ns+tlrc \ 168 | -input rm.epi.all1+orig \ 169 | -1Dmatrix_apply mat.r$run.warp.aff12.1D \ 170 | -mast_dxyz 3 -final NN -quiet \ 171 | -prefix rm.epi.1.r$run 172 | 173 | # make an extents intersection mask of this run 174 | 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc 175 | end 176 | 177 | # make a single file of registration params 178 | cat dfile.r*.1D > dfile_rall.1D 179 | 180 | # ---------------------------------------- 181 | # create the extents mask: mask_epi_extents+tlrc 182 | # (this is a mask of voxels that have valid data at every TR) 183 | 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD 184 | 3dcalc -a rm.epi.mean+tlrc -expr 'step(a-0.999)' -prefix mask_epi_extents 185 | 186 | # and apply the extents mask to the EPI data 187 | # (delete any time series with missing data) 188 | foreach run ( $runs ) 189 | 3dcalc -a rm.epi.nomask.r$run+tlrc -b mask_epi_extents+tlrc \ 190 | -expr 'a*b' -prefix pb02.$subj.r$run.volreg 191 | end 192 | 193 | # warp the volreg base EPI dataset to make a final version 194 | cat_matvec -ONELINE \ 195 | ${subj}_T1w_ns+tlrc::WARP_DATA -I \ 196 | ${subj}_T1w_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D 197 | 198 | 3dAllineate -base ${subj}_T1w_ns+tlrc \ 199 | -input vr_base_min_outlier+orig \ 200 | -1Dmatrix_apply mat.basewarp.aff12.1D \ 201 | -mast_dxyz 3 \ 202 | -prefix final_epi_vr_base_min_outlier 203 | 204 | # create an anat_final dataset, aligned with stats 205 | 3dcopy ${subj}_T1w_ns+tlrc anat_final.$subj 206 | 207 | # record final registration costs 208 | 3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX \ 209 | -input anat_final.$subj+tlrc |& tee out.allcostX.txt 210 | 211 | # ----------------------------------------- 212 | # warp anat follower datasets (affine) 213 | 3dAllineate -source ${subj}_T1w+orig \ 214 | -master anat_final.$subj+tlrc \ 215 | -final wsinc5 -1Dmatrix_apply warp.anat.Xat.1D \ 216 | -prefix anat_w_skull_warped 217 | 218 | # ======================= surf (map data to surface) ======================= 219 | # map EPI data to the surface domain 220 | 221 | 222 | 223 | # align the surface anatomy with the current experiment anatomy 224 | @SUMA_AlignToExperiment -exp_anat anat_final.$subj+tlrc \ 225 | -surf_anat $surface_dir/${subj}_SurfVol+orig \ 226 | -wd -strip_skull surf_anat \ 227 | -atlas_followers -overwrite_resp S \ 228 | -prefix ${subj}_SurfVol_Alnd_Exp 229 | 230 | # map volume data to the surface of each hemisphere 231 | foreach hemi ( lh ) 232 | foreach run ( $runs ) 233 | 3dVol2Surf -spec $surface_dir/std.141.${subj}_${hemi}.spec \ 234 | -sv ${subj}_SurfVol_Alnd_Exp+tlrc \ 235 | -surf_A smoothwm \ 236 | -surf_B pial \ 237 | -f_index nodes \ 238 | -f_steps 10 \ 239 | -map_func ave \ 240 | -oob_value 0 \ 241 | -grid_parent pb02.$subj.r$run.volreg+tlrc \ 242 | -out_niml pb03.$subj.$hemi.r$run.surf.niml.dset 243 | end 244 | end 245 | 246 | # make local script for running suma, and make it executable 247 | echo suma -spec $surface_dir/std.141.${subj}_lh.spec \ 248 | -sv ${subj}_SurfVol_Alnd_Exp+tlrc > run_suma 249 | chmod 755 run_suma 250 | 251 | # =========================== blur (on surface) ============================ 252 | foreach hemi ( lh ) 253 | foreach run ( $runs ) 254 | # to save time, estimate blur parameters only once 255 | if ( ! -f surf.smooth.params.1D ) then 256 | SurfSmooth -spec $surface_dir/std.141.${subj}_${hemi}.spec \ 257 | -surf_A smoothwm \ 258 | -input pb03.$subj.$hemi.r$run.surf.niml.dset \ 259 | -met HEAT_07 \ 260 | -target_fwhm 4.0 \ 261 | -blurmaster pb03.$subj.$hemi.r$run.surf.niml.dset \ 262 | -detrend_master \ 263 | -output pb04.$subj.$hemi.r$run.blur.niml.dset \ 264 | | tee surf.smooth.params.1D 265 | else 266 | set params = `1dcat surf.smooth.params.1D` 267 | SurfSmooth -spec $surface_dir/std.141.${subj}_${hemi}.spec \ 268 | -surf_A smoothwm \ 269 | -input pb03.$subj.$hemi.r$run.surf.niml.dset \ 270 | -met HEAT_07 \ 271 | -Niter $params[1] \ 272 | -sigma $params[2] \ 273 | -output pb04.$subj.$hemi.r$run.blur.niml.dset 274 | endif 275 | end 276 | end 277 | 278 | # ================================= scale ================================== 279 | # scale each voxel time series to have a mean of 100 280 | # (be sure no negatives creep in) 281 | # (subject to a range of [0,200]) 282 | foreach hemi ( lh ) 283 | foreach run ( $runs ) 284 | 3dTstat -prefix rm.$hemi.mean_r$run.niml.dset \ 285 | pb04.$subj.$hemi.r$run.blur.niml.dset 286 | 3dcalc -a pb04.$subj.$hemi.r$run.blur.niml.dset \ 287 | -b rm.$hemi.mean_r$run.niml.dset \ 288 | -expr 'min(200, a/b*100)*step(a)*step(b)' \ 289 | -prefix pb05.$subj.$hemi.r$run.scale.niml.dset 290 | end 291 | end 292 | 293 | # ================================ regress ================================= 294 | 295 | # compute de-meaned motion parameters (for use in regression) 296 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 297 | -demean -write motion_demean.1D 298 | 299 | # compute motion parameter derivatives (just to have) 300 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 301 | -derivative -demean -write motion_deriv.1D 302 | 303 | # convert motion parameters for per-run regression 304 | 1d_tool.py -infile motion_demean.1D -set_nruns 2 \ 305 | -split_into_pad_runs mot_demean 306 | 307 | # create censor file motion_${subj}_censor.1D, for censoring motion 308 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 309 | -show_censor_count -censor_prev_TR \ 310 | -censor_motion 0.3 motion_${subj} 311 | 312 | # note TRs that were not censored 313 | set ktrs = `1d_tool.py -infile motion_${subj}_censor.1D \ 314 | -show_trs_uncensored encoded` 315 | 316 | # ------------------------------ 317 | # run the regression analysis 318 | foreach hemi ( lh ) 319 | 3dDeconvolve -input pb05.$subj.$hemi.r*.scale.niml.dset \ 320 | -censor motion_${subj}_censor.1D \ 321 | -ortvec mot_demean.r01.1D mot_demean_r01 \ 322 | -ortvec mot_demean.r02.1D mot_demean_r02 \ 323 | -polort 2 \ 324 | -num_stimts 2 \ 325 | -stim_times 1 stimuli/congruent.1D 'GAM' \ 326 | -stim_label 1 congruent \ 327 | -stim_times 2 stimuli/incongruent.1D 'GAM' \ 328 | -stim_label 2 incongruent \ 329 | -jobs 8 \ 330 | -gltsym 'SYM: congruent -incongruent' \ 331 | -glt_label 1 Con-Inc \ 332 | -gltsym 'SYM: incongruent -congruent' \ 333 | -glt_label 2 Inc-Con \ 334 | -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \ 335 | -x1D_uncensored X.nocensor.xmat.1D \ 336 | -fitts fitts.$subj.$hemi.niml.dset \ 337 | -errts errts.${subj}.$hemi.niml.dset \ 338 | -bucket stats.$subj.$hemi.niml.dset 339 | 340 | # -- execute the 3dREMLfit script, written by 3dDeconvolve -- 341 | tcsh -x stats.REML_cmd 342 | end 343 | 344 | 345 | # display any large pairwise correlations from the X-matrix 346 | 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt 347 | 348 | # display degrees of freedom info from X-matrix 349 | 1d_tool.py -show_df_info -infile X.xmat.1D |& tee out.df_info.txt 350 | 351 | # create an all_runs dataset to match the fitts, errts, etc. 352 | foreach hemi ( lh ) 353 | 3dTcat -prefix all_runs.$subj.$hemi.niml.dset \ 354 | pb05.$subj.$hemi.r*.scale.niml.dset 355 | end 356 | 357 | # -------------------------------------------------- 358 | # create a temporal signal to noise ratio dataset 359 | # signal: if 'scale' block, mean should be 100 360 | # noise : compute standard deviation of errts 361 | foreach hemi ( lh ) 362 | 3dTstat -mean -prefix rm.signal.all.$hemi.niml.dset \ 363 | all_runs.$subj.$hemi.niml.dset"[$ktrs]" 364 | 3dTstat -stdev -prefix rm.noise.all.$hemi.niml.dset \ 365 | errts.${subj}.$hemi.niml.dset"[$ktrs]" 366 | 3dcalc -a rm.signal.all.$hemi.niml.dset \ 367 | -b rm.noise.all.$hemi.niml.dset \ 368 | -expr 'a/b' -prefix TSNR.$subj.$hemi.niml.dset 369 | end 370 | 371 | # create ideal files for fixed response stim types 372 | 1dcat X.nocensor.xmat.1D'[6]' > ideal_congruent.1D 373 | 1dcat X.nocensor.xmat.1D'[7]' > ideal_incongruent.1D 374 | 375 | # -------------------------------------------------- 376 | # extract non-baseline regressors from the X-matrix, 377 | # then compute their sum 378 | 1d_tool.py -infile X.nocensor.xmat.1D -write_xstim X.stim.xmat.1D 379 | 3dTstat -sum -prefix sum_ideal.1D X.stim.xmat.1D 380 | 381 | # ========================== auto block: finalize ========================== 382 | 383 | # remove temporary files 384 | \rm -f rm.* 385 | 386 | -------------------------------------------------------------------------------- /SUMA/runFreeSurfer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Written by Andrew Jahn, 09.12.2019 4 | # For use with the SUMA part of the AFNI short course, found here: 5 | # https://andysbrainbook.readthedocs.io/en/latest/AFNI/AFNI_Short_Course/AFNI_09_SurfaceAnalysis.html 6 | 7 | # Check whether the file "subjList.txt" exists; if it doesn't, create a file containing all of the subject names in our study 8 | 9 | if [ ! -f subjList.txt ]; then 10 | ls .. | grep ^sub- > subjList.txt 11 | fi 12 | 13 | # Copy each subject's anatomical file into the current directory, unzip the file, and set the current directory as FreeSurfer's SUBJECTS_DIR. Then process each of the anatomical files with recon-all using the "parallel" command 14 | 15 | for sub in `cat subjList.txt`; do 16 | cp ../${sub}/anat/*.gz . 17 | done 18 | 19 | gunzip *.gz 20 | 21 | SUBJECTS_DIR=`pwd` 22 | 23 | ls *.nii | parallel --jobs 8 recon-all -s {.} -i {} -all -qcache 24 | 25 | # Clean up 26 | 27 | rm *.nii 28 | -------------------------------------------------------------------------------- /cmd.ap.AFNI_TENT: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env tcsh 2 | 3 | # created by uber_subject.py: version 0.37 (April 14, 2015) 4 | # creation date: Tue Mar 22 11:56:09 2016 5 | 6 | # set data directories 7 | set top_dir = $PWD 8 | 9 | # set subject and group identifiers 10 | set subj = $1 11 | set group_id = ToneCounting 12 | 13 | # run afni_proc.py to create a single subject processing script 14 | afni_proc.py -subj_id $subj \ 15 | -script proc.$subj -scr_overwrite \ 16 | -blocks tshift align tlrc volreg blur mask scale regress \ 17 | -copy_anat $top_dir/anat/${subj}_T1w.nii.gz \ 18 | -tcat_remove_first_trs 0 \ 19 | -dsets $top_dir/func/${subj}_task-tonecounting_bold.nii.gz \ 20 | -volreg_align_to third \ 21 | -volreg_align_e2a \ 22 | -volreg_tlrc_warp \ 23 | -blur_size 4.0 \ 24 | -regress_stim_times \ 25 | $top_dir/func/tone_counting_onset_times.txt \ 26 | $top_dir/func/tone_counting_probe_onsets.txt \ 27 | -regress_stim_labels \ 28 | tone_counting probe \ 29 | -regress_basis_multi \ 30 | 'TENT(0,12,7)' 'TENT(0,14,8)' \ 31 | -regress_censor_motion 0.3 \ 32 | -regress_make_ideal_sum sum_ideal.1D \ 33 | -regress_run_clustsim no \ 34 | -html_review_style none 35 | 36 | tcsh -xef proc.$subj |& tee output.proc.$subj 37 | -------------------------------------------------------------------------------- /createSphere.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Written by Andrew Jahn, University of Michigan, 03.25.2019 4 | 5 | display_usage() { 6 | echo "$(basename $0) [sphere radius] [master dataset] [x] [y] [z] [ROI name]" 7 | echo "This script creates a sphere for ROI analysis. It requires 6 arguments: 8 | 1) The size of the radius of the sphere (in millimeters); 9 | 2) A master dataset, which determines the dimensions and voxel resolution of the ROI [I recommend using the errts file created by 3dDeconvolve]; 10 | 3-5) X, Y, and Z coordinates (assumed to be MNI and in LPI orientation); 11 | 6) A label for the ROI" 12 | 13 | This command also extracts a time series from the ROI, labeled [ROI name]_TimeSeries.txt, and performs a functinal connectivity analysis on that time series. 14 | } 15 | 16 | if [ $# -le 5 ] 17 | then 18 | display_usage 19 | exit 1 20 | fi 21 | 22 | 23 | SRAD=$1 24 | MASTER=$2 25 | X=$3 26 | Y=$4 27 | Z=$5 28 | OUTPUT=$6 29 | 30 | ################## 31 | # Create the ROI # 32 | ################## 33 | 34 | echo $X $Y $Z | 3dUndump -prefix $OUTPUT -srad $SRAD -orient LPI -master $MASTER -xyz - 35 | 36 | 37 | ######################################## 38 | # Extract the time series from the ROI # 39 | ######################################## 40 | 41 | 3dmaskave -quiet -mask ${OUTPUT}+tlrc $MASTER > ${OUTPUT}_TimeSeries.txt 42 | 43 | #Examines the timeseries using 1dplot 44 | 45 | 1dplot ${OUTPUT}_TimeSeries.txt 46 | 47 | 48 | #################################### 49 | # Functional connectivity analysis # 50 | #################################### 51 | 52 | 3dfim+ -bucket ${OUTPUT}_corr -out Correlation -ideal_file ${OUTPUT}_TimeSeries.txt -input $MASTER 53 | 54 | #Converts the correlation map to a z-map 55 | 56 | 3dcalc -a ${OUTPUT}_corr+tlrc -expr 'atanh(a)' -prefix ${OUTPUT}_corr_r2z 57 | -------------------------------------------------------------------------------- /extractBetas.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #This script extracts the betas for use with 3dmaskave for an ROI analysis 4 | #Replace the number in the brackets with the sub-brik indicating the beta you wish to extract, and change the name of the output dataset as well 5 | 6 | for subj in `cat subjList.txt`; do 7 | 3dbucket -aglueto Congruent_betas+tlrc.HEAD ${subj}/${subj}.results/stats.${subj}+tlrc'[1]' 8 | 3dbucket -aglueto Incongruent_betas+tlrc.HEAD ${subj}/${subj}.results/stats.${subj}+tlrc'[4]' 9 | done 10 | -------------------------------------------------------------------------------- /make_AFNI_Timings_TENT.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Check whether the file subjList.txt exists; if not, create it 4 | if [ ! -f subjList.txt ]; then 5 | ls -d sub-?? > subjList.txt 6 | fi 7 | 8 | #Loop over all subjects and format timing files into FSL format 9 | for subj in `cat subjList.txt` ; do 10 | cd $subj/func #Navigate to the subject's func directory, which contains the timing files 11 | 12 | #Extract onset times for tone count and probe 13 | cat ${subj}_task-tonecounting_events.tsv | awk '{if (NR!=1 && $5=="") {print $1}}' > tone_counting_onset_times.txt 14 | cat ${subj}_task-tonecounting_events.tsv | awk '{if ($5=="probe") {print $1}}' > tone_counting_probe_onsets.txt 15 | 16 | cd ../.. 17 | done 18 | -------------------------------------------------------------------------------- /make_Gamble_Timings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Check whether the file subjList.txt exists; if not, create it 4 | if [ ! -f subjList.txt ]; then 5 | ls | grep ^sub- > subjList.txt 6 | fi 7 | 8 | #Loop over all subjects and format timing files into FSL format 9 | for subj in `cat subjList.txt`; do 10 | cd $subj/func 11 | cat ${subj}_task-mixedgamblestask_run-01_events.tsv | awk 'NR>1 {print $1"*"$5","$3}' > gamble_Timings.1D 12 | cat ${subj}_task-mixedgamblestask_run-02_events.tsv | awk 'NR>1 {print $1+480"*"$5","$3}' >> gamble_Timings.1D 13 | cat ${subj}_task-mixedgamblestask_run-03_events.tsv | awk 'NR>1 {print $1+960"*"$5","$3}' >> gamble_Timings.1D 14 | 15 | cd ../.. 16 | 17 | done 18 | -------------------------------------------------------------------------------- /make_Timings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | #Check whether the file subjList.txt exists; if not, create it 5 | if [ ! -f subjList.txt ]; then 6 | ls | grep ^sub- > subjList.txt 7 | fi 8 | 9 | #Loop over all subjects and format timing files into FSL format 10 | for subj in `cat subjList.txt`; do 11 | cd $subj/func 12 | cat ${subj}_task-flanker_run-1_events.tsv | awk '{if ($3=="incongruent_correct") {print $1, $2, 1}}' > incongruent_run1.txt 13 | cat ${subj}_task-flanker_run-1_events.tsv | awk '{if ($3=="congruent_correct") {print $1, $2, 1}}' > congruent_run1.txt 14 | 15 | cat ${subj}_task-flanker_run-2_events.tsv | awk '{if ($3=="incongruent_correct") {print $1, $2, 1}}' > incongruent_run2.txt 16 | cat ${subj}_task-flanker_run-2_events.tsv | awk '{if ($3=="congruent_correct") {print $1, $2, 1}}' > congruent_run2.txt 17 | 18 | #Now convert to AFNI format 19 | timing_tool.py -fsl_timing_files congruent*.txt -write_timing congruent.1D 20 | timing_tool.py -fsl_timing_files incongruent*.txt -write_timing incongruent.1D 21 | 22 | cd ../.. 23 | 24 | done 25 | -------------------------------------------------------------------------------- /proc_Flanker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh -xef 2 | 3 | echo "auto-generated by afni_proc.py, Fri Sep 28 14:08:08 2018" 4 | echo "(version 6.14, May 25, 2018)" 5 | echo "execution started: `date`" 6 | 7 | # execute via : 8 | # tcsh -xef proc.s01 |& tee output.proc.s01 9 | 10 | # =========================== auto block: setup ============================ 11 | # script setup 12 | 13 | # take note of the AFNI version 14 | afni -ver 15 | 16 | # check that the current AFNI version is recent enough 17 | afni_history -check_date 3 May 2018 18 | if ( $status ) then 19 | echo "** this script requires newer AFNI binaries (than 3 May 2018)" 20 | echo " (consider: @update.afni.binaries -defaults)" 21 | exit 22 | endif 23 | 24 | # the user may specify a single subject to run with 25 | if ( $#argv > 0 ) then 26 | set subj = $argv[1] 27 | else 28 | set subj = s01 29 | endif 30 | 31 | # assign output directory name 32 | set output_dir = $subj.results 33 | 34 | # verify that the results directory does not yet exist 35 | if ( -d $output_dir ) then 36 | echo output dir "$subj.results" already exists 37 | exit 38 | endif 39 | 40 | # set list of runs 41 | set runs = (`count -digits 2 1 2`) 42 | 43 | # create results and stimuli directories 44 | mkdir $output_dir 45 | mkdir $output_dir/stimuli 46 | 47 | # copy stim files into stimulus directory 48 | cp \ 49 | $PWD/{$subj}/func/congruent.1D \ 50 | $PWD/{$subj}/func/incongruent.1D \ 51 | $output_dir/stimuli 52 | 53 | # copy anatomy to results dir 54 | 3dcopy \ 55 | $PWD/{$subj}/anat/{$subj}_T1w.nii.gz \ 56 | $output_dir/{$subj}_T1w 57 | 58 | # ============================ auto block: tcat ============================ 59 | # apply 3dTcat to copy input dsets to results dir, 60 | # while removing the first 0 TRs 61 | 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat \ 62 | $PWD/{$subj}/func/{$subj}_task-flanker_run-1_bold.nii.gz'[0..$]' 63 | 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat \ 64 | $PWD/{$subj}/func/{$subj}_task-flanker_run-2_bold.nii.gz'[0..$]' 65 | 66 | # and make note of repetitions (TRs) per run 67 | set tr_counts = ( 146 146 ) 68 | 69 | # ------------------------------------------------------- 70 | # enter the results directory (can begin processing data) 71 | cd $output_dir 72 | 73 | 74 | # ========================== auto block: outcount ========================== 75 | # data check: compute outlier fraction for each volume 76 | touch out.pre_ss_warn.txt 77 | foreach run ( $runs ) 78 | 3dToutcount -automask -fraction -polort 2 -legendre \ 79 | pb00.$subj.r$run.tcat+orig > outcount.r$run.1D 80 | 81 | # outliers at TR 0 might suggest pre-steady state TRs 82 | if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then 83 | echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \ 84 | >> out.pre_ss_warn.txt 85 | endif 86 | end 87 | 88 | # catenate outlier counts into a single time series 89 | cat outcount.r*.1D > outcount_rall.1D 90 | 91 | # get run number and TR index for minimum outlier volume 92 | set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'` 93 | set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \ 94 | -index_to_run_tr $minindex` ) 95 | # save run and TR indices for extraction of vr_base_min_outlier 96 | set minoutrun = $ovals[1] 97 | set minouttr = $ovals[2] 98 | echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt 99 | 100 | # ================================= tshift ================================= 101 | # time shift data so all slice timing is the same 102 | foreach run ( $runs ) 103 | 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \ 104 | pb00.$subj.r$run.tcat+orig 105 | end 106 | 107 | # -------------------------------- 108 | # extract volreg registration base 109 | 3dbucket -prefix vr_base_min_outlier \ 110 | pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]" 111 | 112 | # ================================= align ================================== 113 | # for e2a: compute anat alignment transformation to EPI registration base 114 | # (new anat will be intermediate, stripped, {$subj}_T1w_ns+orig) 115 | align_epi_anat.py -anat2epi -anat {$subj}_T1w+orig \ 116 | -save_skullstrip -suffix _al_junk \ 117 | -epi vr_base_min_outlier+orig -epi_base 0 \ 118 | -epi_strip 3dSkullStrip \ 119 | -giant_move \ 120 | -volreg off -tshift off 121 | 122 | # ================================== tlrc ================================== 123 | # warp anatomy to standard space 124 | @auto_tlrc -base MNI_avg152T1+tlrc -input {$subj}_T1w_ns+orig -init_xform AUTO_CENTER -no_ss 125 | 126 | # store forward transformation matrix in a text file 127 | cat_matvec {$subj}_T1w_ns+tlrc::WARP_DATA -I > warp.anat.Xat.1D 128 | 129 | # ================================= volreg ================================= 130 | # align each dset to base volume, align to anat, warp to tlrc space 131 | 132 | # verify that we have a +tlrc warp dataset 133 | if ( ! -f {$subj}_T1w_ns+tlrc.HEAD ) then 134 | echo "** missing +tlrc warp dataset: {$subj}_T1w_ns+tlrc.HEAD" 135 | exit 136 | endif 137 | 138 | # register and warp 139 | foreach run ( $runs ) 140 | # register each volume to the base image 141 | 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \ 142 | -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \ 143 | -cubic \ 144 | -1Dmatrix_save mat.r$run.vr.aff12.1D \ 145 | pb01.$subj.r$run.tshift+orig 146 | 147 | # create an all-1 dataset to mask the extents of the warp 148 | 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \ 149 | -prefix rm.epi.all1 150 | 151 | # catenate volreg/epi2anat/tlrc xforms 152 | cat_matvec -ONELINE \ 153 | {$subj}_T1w_ns+tlrc::WARP_DATA -I \ 154 | {$subj}_T1w_al_junk_mat.aff12.1D -I \ 155 | mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D 156 | 157 | # apply catenated xform: volreg/epi2anat/tlrc 158 | 3dAllineate -base {$subj}_T1w_ns+tlrc \ 159 | -input pb01.$subj.r$run.tshift+orig \ 160 | -1Dmatrix_apply mat.r$run.warp.aff12.1D \ 161 | -mast_dxyz 3 \ 162 | -prefix rm.epi.nomask.r$run 163 | 164 | # warp the all-1 dataset for extents masking 165 | 3dAllineate -base {$subj}_T1w_ns+tlrc \ 166 | -input rm.epi.all1+orig \ 167 | -1Dmatrix_apply mat.r$run.warp.aff12.1D \ 168 | -mast_dxyz 3 -final NN -quiet \ 169 | -prefix rm.epi.1.r$run 170 | 171 | # make an extents intersection mask of this run 172 | 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc 173 | end 174 | 175 | # make a single file of registration params 176 | cat dfile.r*.1D > dfile_rall.1D 177 | 178 | # ---------------------------------------- 179 | # create the extents mask: mask_epi_extents+tlrc 180 | # (this is a mask of voxels that have valid data at every TR) 181 | 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD 182 | 3dcalc -a rm.epi.mean+tlrc -expr 'step(a-0.999)' -prefix mask_epi_extents 183 | 184 | # and apply the extents mask to the EPI data 185 | # (delete any time series with missing data) 186 | foreach run ( $runs ) 187 | 3dcalc -a rm.epi.nomask.r$run+tlrc -b mask_epi_extents+tlrc \ 188 | -expr 'a*b' -prefix pb02.$subj.r$run.volreg 189 | end 190 | 191 | # warp the volreg base EPI dataset to make a final version 192 | cat_matvec -ONELINE \ 193 | {$subj}_T1w_ns+tlrc::WARP_DATA -I \ 194 | {$subj}_T1w_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D 195 | 196 | 3dAllineate -base {$subj}_T1w_ns+tlrc \ 197 | -input vr_base_min_outlier+orig \ 198 | -1Dmatrix_apply mat.basewarp.aff12.1D \ 199 | -mast_dxyz 3 \ 200 | -prefix final_epi_vr_base_min_outlier 201 | 202 | # create an anat_final dataset, aligned with stats 203 | 3dcopy {$subj}_T1w_ns+tlrc anat_final.$subj 204 | 205 | # record final registration costs 206 | 3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX \ 207 | -input anat_final.$subj+tlrc |& tee out.allcostX.txt 208 | 209 | # ----------------------------------------- 210 | # warp anat follower datasets (affine) 211 | 3dAllineate -source {$subj}_T1w+orig \ 212 | -master anat_final.$subj+tlrc \ 213 | -final wsinc5 -1Dmatrix_apply warp.anat.Xat.1D \ 214 | -prefix anat_w_skull_warped 215 | 216 | # ================================== blur ================================== 217 | # blur each volume of each run 218 | foreach run ( $runs ) 219 | 3dmerge -1blur_fwhm 4.0 -doall -prefix pb03.$subj.r$run.blur \ 220 | pb02.$subj.r$run.volreg+tlrc 221 | end 222 | 223 | # ================================== mask ================================== 224 | # create 'full_mask' dataset (union mask) 225 | foreach run ( $runs ) 226 | 3dAutomask -prefix rm.mask_r$run pb03.$subj.r$run.blur+tlrc 227 | end 228 | 229 | # create union of inputs, output type is byte 230 | 3dmask_tool -inputs rm.mask_r*+tlrc.HEAD -union -prefix full_mask.$subj 231 | 232 | # ---- create subject anatomy mask, mask_anat.$subj+tlrc ---- 233 | # (resampled from tlrc anat) 234 | 3dresample -master full_mask.$subj+tlrc -input {$subj}_T1w_ns+tlrc \ 235 | -prefix rm.resam.anat 236 | 237 | # convert to binary anat mask; fill gaps and holes 238 | 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.anat+tlrc \ 239 | -prefix mask_anat.$subj 240 | 241 | # compute tighter EPI mask by intersecting with anat mask 242 | 3dmask_tool -input full_mask.$subj+tlrc mask_anat.$subj+tlrc \ 243 | -inter -prefix mask_epi_anat.$subj 244 | 245 | # compute overlaps between anat and EPI masks 246 | 3dABoverlap -no_automask full_mask.$subj+tlrc mask_anat.$subj+tlrc \ 247 | |& tee out.mask_ae_overlap.txt 248 | 249 | # note Dice coefficient of masks, as well 250 | 3ddot -dodice full_mask.$subj+tlrc mask_anat.$subj+tlrc \ 251 | |& tee out.mask_ae_dice.txt 252 | 253 | # ---- create group anatomy mask, mask_group+tlrc ---- 254 | # (resampled from tlrc base anat, MNI_avg152T1+tlrc) 255 | 3dresample -master full_mask.$subj+tlrc -prefix ./rm.resam.group \ 256 | -input /Users/${USER}/abin/MNI_avg152T1+tlrc 257 | 258 | # convert to binary group mask; fill gaps and holes 259 | 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.group+tlrc \ 260 | -prefix mask_group 261 | 262 | # ================================= scale ================================== 263 | # scale each voxel time series to have a mean of 100 264 | # (be sure no negatives creep in) 265 | # (subject to a range of [0,200]) 266 | foreach run ( $runs ) 267 | 3dTstat -prefix rm.mean_r$run pb03.$subj.r$run.blur+tlrc 268 | 3dcalc -a pb03.$subj.r$run.blur+tlrc -b rm.mean_r$run+tlrc \ 269 | -c mask_epi_extents+tlrc \ 270 | -expr 'c * min(200, a/b*100)*step(a)*step(b)' \ 271 | -prefix pb04.$subj.r$run.scale 272 | end 273 | 274 | # ================================ regress ================================= 275 | 276 | # compute de-meaned motion parameters (for use in regression) 277 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 278 | -demean -write motion_demean.1D 279 | 280 | # compute motion parameter derivatives (just to have) 281 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 282 | -derivative -demean -write motion_deriv.1D 283 | 284 | # convert motion parameters for per-run regression 285 | 1d_tool.py -infile motion_demean.1D -set_nruns 2 \ 286 | -split_into_pad_runs mot_demean 287 | 288 | # create censor file motion_${subj}_censor.1D, for censoring motion 289 | 1d_tool.py -infile dfile_rall.1D -set_nruns 2 \ 290 | -show_censor_count -censor_prev_TR \ 291 | -censor_motion 0.3 motion_${subj} 292 | 293 | # note TRs that were not censored 294 | set ktrs = `1d_tool.py -infile motion_${subj}_censor.1D \ 295 | -show_trs_uncensored encoded` 296 | 297 | # ------------------------------ 298 | # run the regression analysis 299 | 3dDeconvolve -input pb04.$subj.r*.scale+tlrc.HEAD \ 300 | -censor motion_${subj}_censor.1D \ 301 | -mask mask_group+tlrc \ 302 | -polort 2 \ 303 | -num_stimts 14 \ 304 | -stim_times 1 stimuli/congruent.1D 'BLOCK(2,1)' \ 305 | -stim_label 1 congruent \ 306 | -stim_times 2 stimuli/incongruent.1D 'BLOCK(2,1)' \ 307 | -stim_label 2 incongruent \ 308 | -stim_file 3 mot_demean.r01.1D'[0]' -stim_base 3 -stim_label 3 roll_01 \ 309 | -stim_file 4 mot_demean.r01.1D'[1]' -stim_base 4 -stim_label 4 pitch_01 \ 310 | -stim_file 5 mot_demean.r01.1D'[2]' -stim_base 5 -stim_label 5 yaw_01 \ 311 | -stim_file 6 mot_demean.r01.1D'[3]' -stim_base 6 -stim_label 6 dS_01 \ 312 | -stim_file 7 mot_demean.r01.1D'[4]' -stim_base 7 -stim_label 7 dL_01 \ 313 | -stim_file 8 mot_demean.r01.1D'[5]' -stim_base 8 -stim_label 8 dP_01 \ 314 | -stim_file 9 mot_demean.r02.1D'[0]' -stim_base 9 -stim_label 9 roll_02 \ 315 | -stim_file 10 mot_demean.r02.1D'[1]' -stim_base 10 -stim_label 10 \ 316 | pitch_02 \ 317 | -stim_file 11 mot_demean.r02.1D'[2]' -stim_base 11 -stim_label 11 yaw_02 \ 318 | -stim_file 12 mot_demean.r02.1D'[3]' -stim_base 12 -stim_label 12 dS_02 \ 319 | -stim_file 13 mot_demean.r02.1D'[4]' -stim_base 13 -stim_label 13 dL_02 \ 320 | -stim_file 14 mot_demean.r02.1D'[5]' -stim_base 14 -stim_label 14 dP_02 \ 321 | -jobs 8 \ 322 | -gltsym 'SYM: incongruent -congruent' \ 323 | -glt_label 1 incongruent-congruent \ 324 | -gltsym 'SYM: congruent -incongruent' \ 325 | -glt_label 2 congruent-incongruent \ 326 | -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \ 327 | -x1D_uncensored X.nocensor.xmat.1D \ 328 | -fitts fitts.$subj \ 329 | -errts errts.${subj} \ 330 | -bucket stats.$subj 331 | 332 | 333 | # if 3dDeconvolve fails, terminate the script 334 | if ( $status != 0 ) then 335 | echo '---------------------------------------' 336 | echo '** 3dDeconvolve error, failing...' 337 | echo ' (consider the file 3dDeconvolve.err)' 338 | exit 339 | endif 340 | 341 | 342 | # display any large pairwise correlations from the X-matrix 343 | 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt 344 | 345 | # -- execute the 3dREMLfit script, written by 3dDeconvolve -- 346 | tcsh -x stats.REML_cmd 347 | 348 | # create an all_runs dataset to match the fitts, errts, etc. 349 | 3dTcat -prefix all_runs.$subj pb04.$subj.r*.scale+tlrc.HEAD 350 | 351 | # -------------------------------------------------- 352 | # create a temporal signal to noise ratio dataset 353 | # signal: if 'scale' block, mean should be 100 354 | # noise : compute standard deviation of errts 355 | 3dTstat -mean -prefix rm.signal.all all_runs.$subj+tlrc"[$ktrs]" 356 | 3dTstat -stdev -prefix rm.noise.all errts.${subj}+tlrc"[$ktrs]" 357 | 3dcalc -a rm.signal.all+tlrc \ 358 | -b rm.noise.all+tlrc \ 359 | -c full_mask.$subj+tlrc \ 360 | -expr 'c*a/b' -prefix TSNR.$subj 361 | 362 | # --------------------------------------------------- 363 | # compute and store GCOR (global correlation average) 364 | # (sum of squares of global mean of unit errts) 365 | 3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}+tlrc 366 | 3dmaskave -quiet -mask full_mask.$subj+tlrc rm.errts.unit+tlrc \ 367 | > gmean.errts.unit.1D 368 | 3dTstat -sos -prefix - gmean.errts.unit.1D\' > out.gcor.1D 369 | echo "-- GCOR = `cat out.gcor.1D`" 370 | 371 | # --------------------------------------------------- 372 | # compute correlation volume 373 | # (per voxel: average correlation across masked brain) 374 | # (now just dot product with average unit time series) 375 | 3dcalc -a rm.errts.unit+tlrc -b gmean.errts.unit.1D -expr 'a*b' -prefix rm.DP 376 | 3dTstat -sum -prefix corr_brain rm.DP+tlrc 377 | 378 | # create ideal files for fixed response stim types 379 | 1dcat X.nocensor.xmat.1D'[6]' > ideal_congruent.1D 380 | 1dcat X.nocensor.xmat.1D'[7]' > ideal_incongruent.1D 381 | 382 | # -------------------------------------------------------- 383 | # compute sum of non-baseline regressors from the X-matrix 384 | # (use 1d_tool.py to get list of regressor colums) 385 | set reg_cols = `1d_tool.py -infile X.nocensor.xmat.1D -show_indices_interest` 386 | 3dTstat -sum -prefix sum_ideal.1D X.nocensor.xmat.1D"[$reg_cols]" 387 | 388 | # also, create a stimulus-only X-matrix, for easy review 389 | 1dcat X.nocensor.xmat.1D"[$reg_cols]" > X.stim.xmat.1D 390 | 391 | # ============================ blur estimation ============================= 392 | # compute blur estimates 393 | touch blur_est.$subj.1D # start with empty file 394 | 395 | # create directory for ACF curve files 396 | mkdir files_ACF 397 | 398 | # -- estimate blur for each run in epits -- 399 | touch blur.epits.1D 400 | 401 | # restrict to uncensored TRs, per run 402 | foreach run ( $runs ) 403 | set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ 404 | -show_trs_run $run` 405 | if ( $trs == "" ) continue 406 | 3dFWHMx -detrend -mask full_mask.$subj+tlrc \ 407 | -ACF files_ACF/out.3dFWHMx.ACF.epits.r$run.1D \ 408 | all_runs.$subj+tlrc"[$trs]" >> blur.epits.1D 409 | end 410 | 411 | # compute average FWHM blur (from every other row) and append 412 | set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{0..$(2)}'\'` ) 413 | echo average epits FWHM blurs: $blurs 414 | echo "$blurs # epits FWHM blur estimates" >> blur_est.$subj.1D 415 | 416 | # compute average ACF blur (from every other row) and append 417 | set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{1..$(2)}'\'` ) 418 | echo average epits ACF blurs: $blurs 419 | echo "$blurs # epits ACF blur estimates" >> blur_est.$subj.1D 420 | 421 | # -- estimate blur for each run in errts -- 422 | touch blur.errts.1D 423 | 424 | # restrict to uncensored TRs, per run 425 | foreach run ( $runs ) 426 | set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ 427 | -show_trs_run $run` 428 | if ( $trs == "" ) continue 429 | 3dFWHMx -detrend -mask full_mask.$subj+tlrc \ 430 | -ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D \ 431 | errts.${subj}+tlrc"[$trs]" >> blur.errts.1D 432 | end 433 | 434 | # compute average FWHM blur (from every other row) and append 435 | set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'` ) 436 | echo average errts FWHM blurs: $blurs 437 | echo "$blurs # errts FWHM blur estimates" >> blur_est.$subj.1D 438 | 439 | # compute average ACF blur (from every other row) and append 440 | set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'` ) 441 | echo average errts ACF blurs: $blurs 442 | echo "$blurs # errts ACF blur estimates" >> blur_est.$subj.1D 443 | 444 | 445 | # ================== auto block: generate review scripts =================== 446 | 447 | # generate a review script for the unprocessed EPI data 448 | gen_epi_review.py -script @epi_review.$subj \ 449 | -dsets pb00.$subj.r*.tcat+orig.HEAD 450 | 451 | # generate scripts to review single subject results 452 | # (try with defaults, but do not allow bad exit status) 453 | gen_ss_review_scripts.py -mot_limit 0.3 -exit0 454 | 455 | # ========================== auto block: finalize ========================== 456 | 457 | # remove temporary files 458 | \rm -f rm.* 459 | 460 | # if the basic subject review script is here, run it 461 | # (want this to be the last text output) 462 | if ( -e @ss_review_basic ) ./@ss_review_basic |& tee out.ss_review.$subj.txt 463 | 464 | # return to parent directory 465 | cd .. 466 | 467 | mv {$subj}.results {$subj} 468 | 469 | echo "execution finished: `date`" 470 | 471 | 472 | 473 | 474 | # ========================================================================== 475 | # script generated by the command: 476 | # 477 | # afni_proc.py -subj_id s01 -script proc.s01 -scr_overwrite -blocks tshift \ 478 | # align tlrc volreg blur mask scale regress -copy_anat \ 479 | # $PWD/{$subj}/anat/{$subj}_T1w.nii.gz \ 480 | # -dsets \ 481 | # $PWD/{$subj}/func/{$subj}_task-flanker_run-1_bold.nii.gz \ 482 | # $PWD/{$subj}/func/{$subj}_task-flanker_run-2_bold.nii.gz \ 483 | # -tcat_remove_first_trs 0 -align_opts_aea -giant_move -tlrc_base \ 484 | # MNI_avg152T1+tlrc -volreg_align_to MIN_OUTLIER -volreg_align_e2a \ 485 | # -volreg_tlrc_warp -blur_size 4.0 -regress_stim_times \ 486 | # $PWD/{$subj}/func/congruent.1D \ 487 | # $PWD/{$subj}/func/incongruent.1D \ 488 | # -regress_stim_labels congruent incongruent -regress_basis 'BLOCK(2,1)' \ 489 | # -regress_censor_motion 0.3 -regress_motion_per_run -regress_opts_3dD \ 490 | # -jobs 8 -regress_make_ideal_sum sum_ideal.1D -regress_est_blur_epits \ 491 | # -regress_est_blur_errts -regress_run_clustsim no 492 | -------------------------------------------------------------------------------- /runAFNIproc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh 2 | set subj = $argv[1] 3 | 4 | afni_proc.py -subj_id ${subj} -script proc.${subj} -scr_overwrite -blocks tshift \ 5 | align tlrc volreg blur mask scale regress -copy_anat \ 6 | $PWD/{$subj}/anat/{$subj}_T1w.nii.gz \ 7 | -dsets \ 8 | $PWD/{$subj}/func/{$subj}_task-mixedgamblestask_run-01_bold.nii.gz \ 9 | $PWD/{$subj}/func/{$subj}_task-mixedgamblestask_run-02_bold.nii.gz \ 10 | $PWD/{$subj}/func/{$subj}_task-mixedgamblestask_run-03_bold.nii.gz \ 11 | -tcat_remove_first_trs 0 -align_opts_aea -giant_move -tlrc_base \ 12 | MNI_avg152T1+tlrc -volreg_align_to MIN_OUTLIER -volreg_align_e2a \ 13 | -volreg_tlrc_warp -blur_size 4.0 -regress_stim_times \ 14 | $PWD/{$subj}/func/gamble_Timings.1D \ 15 | -regress_stim_labels Gamble -regress_stim_types AM2 -regress_basis 'BLOCK(3,1)' \ 16 | -regress_censor_motion 0.3 -regress_motion_per_run -regress_opts_3dD \ 17 | -jobs 8 -regress_make_ideal_sum sum_ideal.1D \ 18 | -regress_run_clustsim no 19 | 20 | tcsh proc.${subj} 21 | -------------------------------------------------------------------------------- /runGroupAnalysis_GainPM.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh -xef 2 | 3 | # created by uber_ttest.py: version 2.0 (December 28, 2017) 4 | # creation date: Wed Feb 19 11:33:21 2020 5 | 6 | # ---------------------- set process variables ---------------------- 7 | 8 | set mask_dset = $PWD/sub-01.results/mask_group+tlrc 9 | 10 | set dirA = $PWD 11 | 12 | # specify and possibly create results directory 13 | set results_dir = test.results_GainPM 14 | if ( ! -d $results_dir ) mkdir $results_dir 15 | 16 | # ------------------------- process the data ------------------------- 17 | 18 | 3dttest++ -prefix $results_dir/GainPM \ 19 | -mask $mask_dset \ 20 | -setA GainPM \ 21 | 01 "$dirA/sub-01.results/stats.sub-01+tlrc[3]" \ 22 | 02 "$dirA/sub-02.results/stats.sub-02+tlrc[3]" \ 23 | 03 "$dirA/sub-03.results/stats.sub-03+tlrc[3]" \ 24 | 04 "$dirA/sub-04.results/stats.sub-04+tlrc[3]" \ 25 | 05 "$dirA/sub-05.results/stats.sub-05+tlrc[3]" \ 26 | 06 "$dirA/sub-06.results/stats.sub-06+tlrc[3]" \ 27 | 07 "$dirA/sub-07.results/stats.sub-07+tlrc[3]" \ 28 | 08 "$dirA/sub-08.results/stats.sub-08+tlrc[3]" \ 29 | 09 "$dirA/sub-09.results/stats.sub-09+tlrc[3]" \ 30 | 10 "$dirA/sub-10.results/stats.sub-10+tlrc[3]" \ 31 | 11 "$dirA/sub-11.results/stats.sub-11+tlrc[3]" \ 32 | 12 "$dirA/sub-12.results/stats.sub-12+tlrc[3]" \ 33 | 13 "$dirA/sub-13.results/stats.sub-13+tlrc[3]" \ 34 | 14 "$dirA/sub-14.results/stats.sub-14+tlrc[3]" \ 35 | 15 "$dirA/sub-15.results/stats.sub-15+tlrc[3]" \ 36 | 16 "$dirA/sub-16.results/stats.sub-16+tlrc[3]" 37 | -------------------------------------------------------------------------------- /runGroupAnalysis_LossPM.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh -xef 2 | 3 | # created by uber_ttest.py: version 2.0 (December 28, 2017) 4 | # creation date: Wed Feb 19 11:33:21 2020 5 | 6 | # ---------------------- set process variables ---------------------- 7 | 8 | set mask_dset = $PWD/sub-01.results/mask_group+tlrc 9 | 10 | set dirA = $PWD 11 | 12 | # specify and possibly create results directory 13 | set results_dir = test.results_LossPM 14 | if ( ! -d $results_dir ) mkdir $results_dir 15 | 16 | # ------------------------- process the data ------------------------- 17 | 18 | 3dttest++ -prefix $results_dir/LossPM \ 19 | -mask $mask_dset \ 20 | -setA LossPM \ 21 | 01 "$dirA/sub-01.results/stats.sub-01+tlrc[5]" \ 22 | 02 "$dirA/sub-02.results/stats.sub-02+tlrc[5]" \ 23 | 03 "$dirA/sub-03.results/stats.sub-03+tlrc[5]" \ 24 | 04 "$dirA/sub-04.results/stats.sub-04+tlrc[5]" \ 25 | 05 "$dirA/sub-05.results/stats.sub-05+tlrc[5]" \ 26 | 06 "$dirA/sub-06.results/stats.sub-06+tlrc[5]" \ 27 | 07 "$dirA/sub-07.results/stats.sub-07+tlrc[5]" \ 28 | 08 "$dirA/sub-08.results/stats.sub-08+tlrc[5]" \ 29 | 09 "$dirA/sub-09.results/stats.sub-09+tlrc[5]" \ 30 | 10 "$dirA/sub-10.results/stats.sub-10+tlrc[5]" \ 31 | 11 "$dirA/sub-11.results/stats.sub-11+tlrc[5]" \ 32 | 12 "$dirA/sub-12.results/stats.sub-12+tlrc[5]" \ 33 | 13 "$dirA/sub-13.results/stats.sub-13+tlrc[5]" \ 34 | 14 "$dirA/sub-14.results/stats.sub-14+tlrc[5]" \ 35 | 15 "$dirA/sub-15.results/stats.sub-15+tlrc[5]" \ 36 | 16 "$dirA/sub-16.results/stats.sub-16+tlrc[5]" 37 | -------------------------------------------------------------------------------- /sub_08_afni_proc.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env tcsh 2 | 3 | # created by uber_subject.py: version 1.2 (April 5, 2018) 4 | # creation date: Mon Nov 18 12:30:05 2019 5 | 6 | # set subject and group identifiers 7 | set subj = sub_08 8 | set gname = Flanker 9 | 10 | # set data directories 11 | set top_dir = ${PWD}/sub-08 12 | set anat_dir = $top_dir/anat 13 | set epi_dir = $top_dir/func 14 | set stim_dir = $top_dir/func 15 | 16 | # run afni_proc.py to create a single subject processing script 17 | afni_proc.py -subj_id $subj \ 18 | -script proc.$subj -scr_overwrite \ 19 | -blocks tshift align tlrc volreg blur mask scale regress \ 20 | -copy_anat $anat_dir/sub-08_T1w.nii.gz \ 21 | -dsets \ 22 | $epi_dir/sub-08_task-flanker_run-1_bold.nii.gz \ 23 | $epi_dir/sub-08_task-flanker_run-2_bold.nii.gz \ 24 | -tcat_remove_first_trs 0 \ 25 | -align_opts_aea -giant_move \ 26 | -tlrc_base MNI_avg152T1+tlrc \ 27 | -volreg_align_to MIN_OUTLIER \ 28 | -volreg_align_e2a \ 29 | -volreg_tlrc_warp \ 30 | -blur_size 4.0 \ 31 | -regress_stim_times \ 32 | $stim_dir/congruent.1D \ 33 | $stim_dir/incongruent.1D \ 34 | -regress_stim_labels \ 35 | congruent incongruent \ 36 | -regress_basis 'GAM' \ 37 | -regress_censor_motion 0.3 \ 38 | -regress_motion_per_run \ 39 | -regress_opts_3dD \ 40 | -jobs 8 \ 41 | -gltsym 'SYM: incongruent -congruent' -glt_label 1 \ 42 | incongruent-congruent \ 43 | -gltsym 'SYM: congruent -incongruent' -glt_label 2 \ 44 | congruent-incongruent \ 45 | -regress_reml_exec \ 46 | -regress_make_ideal_sum sum_ideal.1D \ 47 | -regress_est_blur_epits \ 48 | -regress_est_blur_errts \ 49 | -regress_run_clustsim no 50 | 51 | -------------------------------------------------------------------------------- /ttest_TENT.sh: -------------------------------------------------------------------------------- 1 | #!/bin/tcsh -xef 2 | 3 | # created by uber_ttest.py: version 2.0 (December 28, 2017) 4 | # creation date: Wed Feb 19 11:33:21 2020 5 | 6 | # ---------------------- set process variables ---------------------- 7 | 8 | set mask_dset = $PWD/sub-01/sub-01.results/mask_group+tlrc 9 | 10 | set dirA = $PWD 11 | 12 | # specify and possibly create results directory 13 | set results_dir = test.results_6s-0s 14 | if ( ! -d $results_dir ) mkdir $results_dir 15 | 16 | # ------------------------- process the data ------------------------- 17 | 18 | 3dttest++ -prefix $results_dir/results_6s-0s -paired \ 19 | -mask $mask_dset \ 20 | -setA SixSeconds \ 21 | 01 "$dirA/sub-01/sub-01.results/stats.sub-01+tlrc[7]" \ 22 | 02 "$dirA/sub-02/sub-02.results/stats.sub-02+tlrc[7]" \ 23 | 03 "$dirA/sub-03/sub-03.results/stats.sub-03+tlrc[7]" \ 24 | 04 "$dirA/sub-04/sub-04.results/stats.sub-04+tlrc[7]" \ 25 | 05 "$dirA/sub-05/sub-05.results/stats.sub-05+tlrc[7]" \ 26 | 06 "$dirA/sub-06/sub-06.results/stats.sub-06+tlrc[7]" \ 27 | 07 "$dirA/sub-07/sub-07.results/stats.sub-07+tlrc[7]" \ 28 | 08 "$dirA/sub-08/sub-08.results/stats.sub-08+tlrc[7]" \ 29 | 09 "$dirA/sub-09/sub-09.results/stats.sub-09+tlrc[7]" \ 30 | 10 "$dirA/sub-10/sub-10.results/stats.sub-10+tlrc[7]" \ 31 | 11 "$dirA/sub-11/sub-11.results/stats.sub-11+tlrc[7]" \ 32 | 12 "$dirA/sub-12/sub-12.results/stats.sub-12+tlrc[7]" \ 33 | 13 "$dirA/sub-13/sub-13.results/stats.sub-13+tlrc[7]" \ 34 | 14 "$dirA/sub-14/sub-14.results/stats.sub-14+tlrc[7]" \ 35 | -setB ZeroSeconds \ 36 | 01 "$dirA/sub-01/sub-01.results/stats.sub-01+tlrc[1]" \ 37 | 02 "$dirA/sub-02/sub-02.results/stats.sub-02+tlrc[1]" \ 38 | 03 "$dirA/sub-03/sub-03.results/stats.sub-03+tlrc[1]" \ 39 | 04 "$dirA/sub-04/sub-04.results/stats.sub-04+tlrc[1]" \ 40 | 05 "$dirA/sub-05/sub-05.results/stats.sub-05+tlrc[1]" \ 41 | 06 "$dirA/sub-06/sub-06.results/stats.sub-06+tlrc[1]" \ 42 | 07 "$dirA/sub-07/sub-07.results/stats.sub-07+tlrc[1]" \ 43 | 08 "$dirA/sub-08/sub-08.results/stats.sub-08+tlrc[1]" \ 44 | 09 "$dirA/sub-09/sub-09.results/stats.sub-09+tlrc[1]" \ 45 | 10 "$dirA/sub-10/sub-10.results/stats.sub-10+tlrc[1]" \ 46 | 11 "$dirA/sub-11/sub-11.results/stats.sub-11+tlrc[1]" \ 47 | 12 "$dirA/sub-12/sub-12.results/stats.sub-12+tlrc[1]" \ 48 | 13 "$dirA/sub-13/sub-13.results/stats.sub-13+tlrc[1]" \ 49 | 14 "$dirA/sub-14/sub-14.results/stats.sub-14+tlrc[1]" 50 | --------------------------------------------------------------------------------