├── .codespellrc ├── .gitattributes ├── .github └── workflows │ └── codespell.yml ├── .gitignore ├── .zenodo.json ├── AUTHOR ├── Dockerfile ├── LICENSE ├── README.md ├── Singularity ├── bin └── reproin ├── docs ├── Makefile ├── source │ └── images │ │ ├── dbic-conversions.png │ │ ├── dbic-flow.png │ │ └── walkthrough-1 │ │ ├── Makefile │ │ ├── crop.py │ │ ├── orig │ │ ├── wt1-1-topmost.png │ │ ├── wt1-2.png │ │ ├── wt1-3.png │ │ ├── wt1-4.png │ │ ├── wt1-5-newregion.png │ │ ├── wt1-6.png │ │ ├── wt1-7-newexam.png │ │ ├── wt1-8.png │ │ ├── wt1-9.png │ │ ├── wt1-a.png │ │ ├── wt1-b.0.png │ │ ├── wt1-b.1-save.png │ │ ├── wt1-b.2-save2_crop-dot+save.png │ │ ├── wt1-b.2-saved3.png │ │ ├── wt1-c.1-ses02.png │ │ ├── wt1-c.2-saveas.png │ │ ├── wt1-c.3_crop-dot+save.png │ │ ├── wt1-c.4.png │ │ ├── wt1-d.1.register_crop-patientreg.png │ │ ├── wt1-d.2.choose-investigator_crop-patientconf.png │ │ ├── wt1-d.3.chose-investigator_crop-patientconf.png │ │ ├── wt1-d.4.chose-exam_crop-patientconf.png │ │ ├── wt1-d.5-endofdescription_crop-patientconf.png │ │ ├── wt1-e.1-scan-interrupt-func_crop-exam.png │ │ ├── wt1-e.1-scan-interrupt-repeat_crop-exam+menu.png │ │ ├── wt1-e.1-scan1_crop-exam.png │ │ ├── wt1-e.3-scan-interrupt-repeated_crop-exam.png │ │ ├── wt1-f.1-repeatscout_crop-exam+menu.png │ │ ├── wt1-f.2-hadtorepeatfun-run02_crop-exam.png │ │ ├── wt1-f.3-repeatfmap_crop-exam+menu.png │ │ ├── wt1-f.4-skip_origrun02_crop-exam.png │ │ ├── wt1-f.5-renamedfmap-run02_crop-exam.png │ │ ├── wt1-f.5-renamefmap_crop-exam.png │ │ └── wt1-g-done_crop-exam.png │ │ ├── wt1-1-topmost.png │ │ ├── wt1-2.png │ │ ├── wt1-3.png │ │ ├── wt1-4.png │ │ ├── wt1-5-newregion.png │ │ ├── wt1-6.png │ │ ├── wt1-7-newexam.png │ │ ├── wt1-8.png │ │ ├── wt1-9.png │ │ ├── wt1-a.png │ │ ├── wt1-b.0.png │ │ ├── wt1-b.1-save.png │ │ ├── wt1-b.2-save2.png │ │ ├── wt1-b.2-saved3.png │ │ ├── wt1-c.1-ses02.png │ │ ├── wt1-c.2-saveas.png │ │ ├── wt1-c.3.png │ │ ├── wt1-c.4.png │ │ ├── wt1-d.1.register.png │ │ ├── wt1-d.2.choose-investigator.png │ │ ├── wt1-d.3.chose-investigator.png │ │ ├── wt1-d.4.chose-exam.png │ │ ├── wt1-d.5-endofdescription.png │ │ ├── wt1-e.1-scan-interrupt-func.png │ │ ├── wt1-e.1-scan-interrupt-repeat.png │ │ ├── wt1-e.1-scan1.png │ │ ├── wt1-e.3-scan-interrupt-repeated.png │ │ ├── wt1-f.1-repeatscout.png │ │ ├── wt1-f.2-hadtorepeatfun-run02.png │ │ ├── wt1-f.3-repeatfmap.png │ │ ├── wt1-f.4-skip_origrun02.png │ │ ├── wt1-f.5-renamedfmap-run02.png │ │ ├── wt1-f.5-renamefmap.png │ │ └── wt1-g-done.png └── walkthrough-1.md ├── generate_container.sh ├── resources └── cfg_reproin_bids.py └── tests └── test_run.sh /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | skip = .git,*.pdf,*.svg 3 | # ignore-words-list = 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * annex.backend=MD5E 2 | * annex.largefiles=(not(mimetype=text/*)) 3 | **/.git* annex.largefiles=nothing 4 | -------------------------------------------------------------------------------- /.github/workflows/codespell.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Codespell 3 | 4 | on: 5 | push: 6 | branches: [master] 7 | pull_request: 8 | branches: [master] 9 | 10 | jobs: 11 | codespell: 12 | name: Check for spelling errors 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v3 18 | - name: Codespell 19 | uses: codespell-project/actions-codespell@v1 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "creators": [ 3 | { 4 | "affiliation": "Dartmouth College", 5 | "name": "Visconti di Oleggio Castello, Matteo", 6 | "orcid": "0000-0001-7931-5272" 7 | }, 8 | { 9 | "affiliation": "Dartmouth College", 10 | "name": "Dobson, James E." 11 | }, 12 | { 13 | "affiliation": "Dartmouth College", 14 | "name": "Sackett, Terry" 15 | }, 16 | { 17 | "affiliation": "Dartmouth College", 18 | "name": "Kodiweera, Chandana" 19 | }, 20 | { 21 | "affiliation": "Dartmouth College", 22 | "name": "Haxby, James V." 23 | }, 24 | { 25 | "affiliation": "MIT", 26 | "name": "Goncalves, Mathias" 27 | }, 28 | { 29 | "affiliation": "MIT, HMS", 30 | "name": "Ghosh, Satrajit", 31 | "orcid": "0000-0002-5312-6729" 32 | }, 33 | { 34 | "affiliation": "Dartmouth College", 35 | "name": "Halchenko, Yaroslav O.", 36 | "orcid": "0000-0003-3456-2493" 37 | } 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /AUTHOR: -------------------------------------------------------------------------------- 1 | Yaroslav O. Halchenko 2 | Matteo Visconti di Oleggio Castello 3 | Mathias Goncalves 4 | Satrajit Ghosh 5 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+0.gdc97516.dirty 2 | # Timestamp: 2023/06/05 22:47:49 UTC 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | 10 | FROM neurodebian:bookworm 11 | 12 | USER root 13 | 14 | ARG DEBIAN_FRONTEND="noninteractive" 15 | 16 | RUN apt-get update -qq \ 17 | && apt-get install -y -q --no-install-recommends neurodebian-freeze \ 18 | && nd_freeze 20230604 \ 19 | && apt-get clean \ 20 | && rm -rf /var/lib/apt/lists/* 21 | 22 | ENV LANG="en_US.UTF-8" \ 23 | LC_ALL="en_US.UTF-8" \ 24 | ND_ENTRYPOINT="/neurodocker/startup.sh" 25 | RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ 26 | && apt-get update -qq \ 27 | && apt-get install -y -q --no-install-recommends \ 28 | apt-utils \ 29 | bzip2 \ 30 | ca-certificates \ 31 | curl \ 32 | locales \ 33 | unzip \ 34 | && apt-get clean \ 35 | && rm -rf /var/lib/apt/lists/* \ 36 | && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ 37 | && dpkg-reconfigure --frontend=noninteractive locales \ 38 | && update-locale LANG="en_US.UTF-8" \ 39 | && chmod 777 /opt && chmod a+s /opt \ 40 | && mkdir -p /neurodocker \ 41 | && if [ ! -f "$ND_ENTRYPOINT" ]; then \ 42 | echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ 43 | && echo 'set -e' >> "$ND_ENTRYPOINT" \ 44 | && echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" \ 45 | && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ 46 | fi \ 47 | && chmod -R 777 /neurodocker && chmod a+s /neurodocker 48 | 49 | ENTRYPOINT ["/neurodocker/startup.sh"] 50 | 51 | RUN apt-get update -qq \ 52 | && apt-get install -y -q --no-install-recommends \ 53 | vim \ 54 | wget \ 55 | strace \ 56 | time \ 57 | ncdu \ 58 | gnupg \ 59 | curl \ 60 | procps \ 61 | datalad \ 62 | pigz \ 63 | less \ 64 | tree \ 65 | git-annex \ 66 | python3-nibabel \ 67 | python3-nipype \ 68 | virtualenv \ 69 | shellcheck \ 70 | python3-dcmstack \ 71 | python3-funcsigs \ 72 | python3-etelemetry \ 73 | python3-pytest \ 74 | dcmtk \ 75 | python3-pip \ 76 | python3-wheel \ 77 | python3-setuptools \ 78 | python3-datalad \ 79 | python3-filelock \ 80 | dcm2niix \ 81 | python3-pytest \ 82 | python3-nose \ 83 | python3-venv \ 84 | heudiconv=0.13.1-1~nd120+1 \ 85 | && apt-get clean \ 86 | && rm -rf /var/lib/apt/lists/* 87 | 88 | RUN : 89 | 90 | RUN apt-get update && apt-get -y dist-upgrade 91 | 92 | RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - 93 | 94 | RUN apt-get update -qq \ 95 | && apt-get install -y -q --no-install-recommends \ 96 | nodejs \ 97 | npm \ 98 | && apt-get clean \ 99 | && rm -rf /var/lib/apt/lists/* 100 | 101 | RUN npm install -g bids-validator@1.11.0 102 | 103 | RUN mkdir /afs /inbox 104 | 105 | COPY ["bin/reproin", "/usr/local/bin/reproin"] 106 | 107 | RUN chmod a+rx /usr/local/bin/reproin 108 | 109 | RUN test "$(getent passwd reproin)" || useradd --no-user-group --create-home --shell /bin/bash reproin 110 | USER reproin 111 | 112 | ENTRYPOINT ["/usr/local/bin/reproin"] 113 | 114 | RUN echo '{ \ 115 | \n "pkg_manager": "apt", \ 116 | \n "instructions": [ \ 117 | \n [ \ 118 | \n "base", \ 119 | \n "neurodebian:bookworm" \ 120 | \n ], \ 121 | \n [ \ 122 | \n "ndfreeze", \ 123 | \n { \ 124 | \n "date": "20230604" \ 125 | \n } \ 126 | \n ], \ 127 | \n [ \ 128 | \n "install", \ 129 | \n [ \ 130 | \n "vim", \ 131 | \n "wget", \ 132 | \n "strace", \ 133 | \n "time", \ 134 | \n "ncdu", \ 135 | \n "gnupg", \ 136 | \n "curl", \ 137 | \n "procps", \ 138 | \n "datalad", \ 139 | \n "pigz", \ 140 | \n "less", \ 141 | \n "tree", \ 142 | \n "git-annex", \ 143 | \n "python3-nibabel", \ 144 | \n "python3-nipype", \ 145 | \n "virtualenv", \ 146 | \n "shellcheck", \ 147 | \n "python3-dcmstack", \ 148 | \n "python3-funcsigs", \ 149 | \n "python3-etelemetry", \ 150 | \n "python3-pytest", \ 151 | \n "dcmtk", \ 152 | \n "python3-pip", \ 153 | \n "python3-wheel", \ 154 | \n "python3-setuptools", \ 155 | \n "python3-datalad", \ 156 | \n "python3-filelock", \ 157 | \n "dcm2niix", \ 158 | \n "python3-pytest", \ 159 | \n "python3-nose", \ 160 | \n "python3-venv", \ 161 | \n "heudiconv=0.13.1-1~nd120+1" \ 162 | \n ] \ 163 | \n ], \ 164 | \n [ \ 165 | \n "run", \ 166 | \n ":" \ 167 | \n ], \ 168 | \n [ \ 169 | \n "run", \ 170 | \n "apt-get update && apt-get -y dist-upgrade" \ 171 | \n ], \ 172 | \n [ \ 173 | \n "run", \ 174 | \n "curl -sL https://deb.nodesource.com/setup_16.x | bash - " \ 175 | \n ], \ 176 | \n [ \ 177 | \n "install", \ 178 | \n [ \ 179 | \n "nodejs", \ 180 | \n "npm" \ 181 | \n ] \ 182 | \n ], \ 183 | \n [ \ 184 | \n "run", \ 185 | \n "npm install -g bids-validator@1.11.0" \ 186 | \n ], \ 187 | \n [ \ 188 | \n "run", \ 189 | \n "mkdir /afs /inbox" \ 190 | \n ], \ 191 | \n [ \ 192 | \n "copy", \ 193 | \n [ \ 194 | \n "bin/reproin", \ 195 | \n "/usr/local/bin/reproin" \ 196 | \n ] \ 197 | \n ], \ 198 | \n [ \ 199 | \n "run", \ 200 | \n "chmod a+rx /usr/local/bin/reproin" \ 201 | \n ], \ 202 | \n [ \ 203 | \n "user", \ 204 | \n "reproin" \ 205 | \n ], \ 206 | \n [ \ 207 | \n "entrypoint", \ 208 | \n "/usr/local/bin/reproin" \ 209 | \n ] \ 210 | \n ] \ 211 | \n}' > /neurodocker/neurodocker_specs.json 212 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016-2018 Center for Reproducible Neuroimaging Computation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![DOI](https://zenodo.org/badge/120343858.svg)](https://zenodo.org/badge/latestdoi/120343858) 2 | 3 | # ReproIn 4 | 5 | This project is a part of the [ReproNim Center](http://ReproNim.org) 6 | suite of tools and frameworks. Its goal is to provide a 7 | turnkey flexible setup for automatic generation of shareable, 8 | version-controlled BIDS datasets from MR scanners. To not reinvent the wheel, 9 | all actual software development is largely done through contribution to 10 | existing software projects: 11 | 12 | - [HeuDiConv]: 13 | a flexible DICOM converter for organizing brain imaging data into structured 14 | directory layouts. 15 | ReproIn [heuristic] was developed and now is shipped within HeuDiConv, 16 | so it could be used independently of the ReproIn setup on any HeuDiConv 17 | installation (specify `-f reproin` to heudiconv call). 18 | - [DataLad]: 19 | a modular version control platform and distribution for both code and 20 | data. DataLad support was contributed to HeuDiConv, and could be 21 | enabled by adding `--datalad` option to the `heudiconv` call. 22 | 23 | ## Specification 24 | 25 | The header of the [heuristic] file describes details of the 26 | specification on how to organize and name study sequences at MR console. 27 | 28 | If you like to use a GUI for crafting the names, consider using [@NPACore's](https://github.com/NPACore) [ReproIn namer](https://npacore.github.io/reproin-namer/#) website. 29 | 30 | ## Overall workflow 31 | 32 | Schematic description of the overall setup: 33 | 34 | ![Setup](docs/source/images/dbic-flow.png) 35 | 36 | **Note:** for your own setup, [dcm2niix](https://github.com/rordenlab/dcm2niix) 37 | [author](https://github.com/neurolabusc) 38 | [recommends](https://github.com/neurolabusc/dcm_qa_agfa) to avoid dcm4che and 39 | choose another PACS. 40 | 41 | ![Setup](docs/source/images/dbic-conversions.png) 42 | 43 | ## Tutorial/HOWTO 44 | 45 | ### Data collection 46 | 47 | #### Making your sequence compatible with ReproIn heuristic 48 | 49 | - [Walkthrough #1](docs/walkthrough-1.md): guides you through 50 | ReproIn approach to organizing exam cards and managing canceled runs/sessions 51 | on Siemens scanner(s) 52 | 53 | #### Renaming sequences to conform the specification needed by ReproIn 54 | 55 | TODO: Describe how sequences could be renamed per study by creating a derived 56 | heuristic 57 | 58 | ### Conversion 59 | 60 | 1. Install [HeuDiConv] and [DataLad]: e.g. 61 | `apt-get update; apt-get install heudiconv datalad` in any NeuroDebian environment. 62 | If you do not have one, you could get either of 63 | - [NeuroDebian Virtual Machine](http://neuro.debian.net/vm.html) 64 | - ReproIn Docker image: `docker run -it --rm -v $PWD:$PWD repronim/reproin` 65 | - ReproIn Singularity image: you can either 66 | - convert from the docker image: `singularity pull docker://repronim/reproin` 67 | - download the most recent version from 68 | http://datasets.datalad.org/?dir=/repronim/containers/images/repronim 69 | which is a DataLad dataset which you can install via `datalad install ///repronim/containers` 70 | (see/subscribe https://github.com/ReproNim/reproin/issues/64 71 | for HOWTO setup YODA style dataset) 72 | 2. Collect a subject/session (or multiple of them) while placing and 73 | naming sequences in the scanner following the [specification]. 74 | But for now we will assume that you have no such dataset yet, and 75 | want to try on phantom data: 76 | 77 | datalad install -J3 -r -g ///dicoms/dartmouth-phantoms/bids_test4-20161014 78 | 79 | to get all subdatasets recursively, while getting the data as well 80 | in parallel 3 streams. 81 | This dataset is a sample of multi-session acquisition with anatomicals and 82 | functional sequences on a friendly phantom impersonating two different 83 | subjects (note: fieldmaps were deficient, without magnitude images). 84 | You could also try other datasets such as [///dbic/QA] 85 | 86 | 3. We are ready to convert all the data at once (heudiconv will sort 87 | into accessions) or one accession at a time. 88 | The recommended invocation for the heudiconv is 89 | 90 | heudiconv -f reproin --bids --datalad -o OUTPUT --files INPUT 91 | 92 | to convert all found in `INPUT` DICOMs and place then within the 93 | hierarchy of DataLad datasets rooted at `OUTPUT`. So we will start 94 | with a single accession of `phantom-1/` 95 | 96 | heudiconv -f reproin --bids --datalad -o OUTPUT --files bids_test4-20161014/phantom-1 97 | 98 | and inspect the result under OUTPUT, probably best with `datalad ls` 99 | command: 100 | 101 | ... WiP ... 102 | 103 | 104 | 105 | #### HeuDiConv options to overload autodetected variables: 106 | 107 | - `--subject` 108 | - `--session` 109 | - `--locator` 110 | 111 | 112 | 113 | ## Sample converted datasets 114 | 115 | You could find sample datasets with original DICOMs 116 | 117 | - [///dbic/QA] is a publicly 118 | available DataLad dataset with historical data on QA scans from DBIC. 119 | You could use DICOM tarballs under `sourcedata/` for your sample 120 | conversions. 121 | TODO: add information from which date it is with scout DICOMs having 122 | session identifier 123 | - [///dicoms/dartmouth-phantoms](http://datasets.datalad.org/?dir=/dicoms/dartmouth-phantoms) 124 | provides a collection of datasets acquired at [DBIC] to establish 125 | ReproIn specification. Some earlier accessions might not be following 126 | the specification. 127 | [bids_test4-20161014](http://datasets.datalad.org/?dir=/dicoms/dartmouth-phantoms/bids_test4-20161014) 128 | provides a basic example of multi-subject and multi-session acquisition. 129 | 130 | ## Containers/Images etc 131 | 132 | This repository provides a [Singularity](./Singularity) environment 133 | definition file used to generate a complete environment needed to run 134 | a conversion. But also, since all work is integrated within the 135 | tools, any environment providing them would suffice, such as 136 | [NeuroDebian](https://neuro.debian.net) docker or Singularity images, virtual appliances, and 137 | other Debian-based systems with NeuroDebian repositories configured, 138 | which would provide all necessary for ReproIn setup components. 139 | 140 | ## Getting started from scratch 141 | 142 | ### Setup environment 143 | 144 | reproin script relies on having datalad, datalad-containers, and singularity 145 | available. The simplest way to get them all is to install a conda 146 | distribution, e.g. miniforge ([link for 147 | amd64](https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh)), 148 | and setup the environment with all components installed: 149 | 150 | mamba create -n reproin -y datalad datalad-container singularity 151 | 152 | Note that in future sessions you will need to activate this environment: 153 | 154 | mamba activate reproin 155 | 156 | Then make sure you have your git configured. If `git config --list` does not 157 | include these entries, add (adjust to fit your persona) 158 | 159 | git config --global user.name "My Name" 160 | git config --global user.email "MyName@example.com" 161 | 162 | and install the ReproNim/containers 163 | 164 | datalad clone https://github.com/ReproNim/containers repronim-containers 165 | cd repronim-containers 166 | 167 | which would clone the dataset from GitHub and auto-enable datasets.datalad.org 168 | remote to actually get annexed content of the images. 169 | Now fetch the image for the most recent version of reproin from under images/repronim, e.g. 170 | 171 | datalad get images/repronim/repronim-reproin--0.13.1.sing 172 | cd .. 173 | 174 | ### "Install" reproin script 175 | 176 | The singularity image we fetched already comes with reproin installed inside, 177 | but to "drive" conversion we need to have `reproin` available in the base 178 | environment. Because we do not have it (yet) packaged for conda 179 | distribution, we will just clone this repository and gain access to the script: 180 | 181 | git clone https://github.com/ReproNim/reproin 182 | 183 | To avoid typing the full path to the `reproin` script, can do 184 | 185 | export "PATH=$PWD/reproin/bin/:$PATH" 186 | 187 | to place it in the PATH. 188 | 189 | NB. It is important ATM to not just `cp` that `reproin` script elsewhere 190 | because it relies on being able to find other resources made available in that 191 | repository (e.g., `cfg_reproin_bids.py`). 192 | 193 | ### "Configure" the reproin setup 194 | 195 | Currently `reproin` script hardcodes the path to DICOMS to reside under 196 | `/inbox/DICOM` and extracted lists and converted data to reside under 197 | `/inbox/BIDS`. 198 | It is possible to overload location for BIDS via `BIDS_DIR` env variable, so 199 | we can do e.g. 200 | 201 | export BIDS_DIR=$HOME/BIDS-demo 202 | 203 | and then let's create the top-level datalad dataset to contain all converted 204 | data, configuring to store text files in git rather than git-annex, 205 | 206 | datalad create -c text2git "$BIDS_DIR" 207 | 208 | ### Collect DICOMs listing 209 | 210 | ATM reproin container has an older version of the script, so to use newer version we would just bind mount our cloned script inside, 211 | 212 | singularity run -e -c \ 213 | --env BIDS_DIR=$BIDS_DIR \ 214 | -B $HOME/reproin/bin/reproin:/usr/local/bin/reproin \ 215 | -B /inbox/DICOM:/inbox/DICOM:ro \ 216 | -B $BIDS_DIR:$BIDS_DIR \ 217 | ~/repronim-containers/images/repronim/repronim-reproin--0.13.1.sing lists-update-study-shows 218 | 219 | which should output summary over the studies it found under /inbox/DICOM, e.g. 220 | 221 | dbic/QA: new=16 no studydir yet 222 | PI/Researcher/1110_SuperCool: new=12 no studydir yet 223 | 224 | and you should see a file appeared for the current year and month under `$BIDS_DIR/reproin/lists`. 225 | 226 | ### Create target dataset 227 | 228 | Now we can create "studydir" for the study of interest, e.g. 229 | 230 | reproin study-create dbic/QA 231 | 232 | which would 233 | 234 | - create target BIDS dataset within the hierarchy 235 | - install repronim/containers borrowing the image from the `~/repronim-containers` 236 | - rerun `study-show` to output summary over the current state like 237 | 238 | todo=4 done=0 /afs/.dbic.dartmouth.edu/usr/haxby/yoh/BIDS-demo/dbic/QA/.git/study-show.sh 2024-11-11 239 | 240 | ### Convert the dataset 241 | 242 | Go to the folder of the dataset, e.g. 243 | 244 | cd "$BIDS_DIR/dbic/QA" 245 | 246 | to see that `reproin` pre-setup everything needed to run conversion (`cat .datalad/config`). 247 | And now you should be able to run conversion for your study via "datalad-container" 248 | extension: 249 | 250 | datalad containers-run -n repronim-reproin study-convert dbic/QA 251 | 252 | 253 | ## Gotchas 254 | 255 | 256 | ## Complete setup at DBIC 257 | 258 | It relies on the hardcoded ATM in `reproin` locations and organization 259 | of DICOMs and location of where to keep converted BIDS datasets. 260 | 261 | - `/inbox/DICOM/{YEAR}/{MONTH}/{DAY}/A00{ACCESSION}` 262 | - `/inbox/BIDS/{PI}/{RESEARCHER}/{ID}_{name}/` 263 | 264 | ### CRON job 265 | 266 | ``` 267 | # m h dom mon dow command 268 | 55 */12 * * * $HOME/reproin-env-0.9.0 -c '~/proj/reproin/bin/reproin lists-update-study-shows' && curl -fsS -m 10 --retry 5 -o /dev/null https://hc-ping.com/61dfdedd-SENSORED 269 | ``` 270 | 271 | NB: that `curl` at the end is to make use of https://healthchecks.io 272 | to ensure that we do have CRON job ran as we expected. 273 | 274 | ATM we reuse a singularity environment based on reproin 0.9.0 produced from this repo and shipped within ReproNim/containers. For the completeness sake 275 | 276 | ```shell 277 | (reproin-3.8) [bids@rolando lists] > cat $HOME/reproin-env-0.9.0 278 | #!/bin/sh 279 | 280 | env -i /usr/local/bin/singularity exec -B /inbox -B /afs -H $HOME/singularity_home $(dirname $0)/reproin_0.9.0.simg /bin/bash "$@" 281 | ``` 282 | 283 | which produces emails with content like 284 | 285 | ``` 286 | Wager/Wager/1102_MedMap: new=92 todo=5 done=102 /inbox/BIDS/Wager/Wager/1102_MedMap/.git/study-show.sh 2023-03-30 287 | PI/Researcher/ID_name: new=32 no studydir yet 288 | Haxby/Jane/1073_MonkeyKingdom: new=4 todo=39 done=8 fixups=6 /inbox/BIDS/Haxby/Jane/1073_MonkeyKingdom/.git/study-show.sh 2023-03-30 289 | ``` 290 | 291 | where as you can see it updates on the status for each study which was scanned for from the 292 | beginning of the current month. And it ends with the pointer to `study-show.sh` script which 293 | would provide details on already converted or heudiconv line invocations for what yet to do. 294 | 295 | ### reproin study-create 296 | 297 | For the "no studydir yet" we need first to generate study dataset (and 298 | possibly all leading `PI/Researcher` super-datasets via 299 | 300 | ```shell 301 | reproin study-create PI/Researcher/ID_name 302 | ``` 303 | 304 | ### reproin study-convert 305 | 306 | Unless there are some warnings/conflicts (subject/session already 307 | converted, etc) are found, 308 | 309 | ```shell 310 | reproin study-convert PI/Researcher/ID_name 311 | ``` 312 | 313 | could be used to convert all new subject/sessions for that study. 314 | 315 | ### XNAT 316 | 317 | Anonymization or other scripts might obfuscate "Study Description" thus ruining 318 | "locator" assignment. See 319 | [issue #57](https://github.com/ReproNim/reproin/issues/57) for more information. 320 | 321 | ## TODOs/WiP/Related 322 | 323 | - [ ] add a pre-configured DICOM receiver for fully turnkey deployments 324 | - [ ] [heudiconv-monitor] to fully automate conversion of the incoming 325 | data 326 | - [ ] [BIDS dataset manipulation helper](https://github.com/INCF/bidsutils/issues/6) 327 | 328 | [HeuDiConv]: https://github.com/nipy/heudiconv 329 | [DataLad]: http://datalad.org 330 | [heuristic]: https://github.com/nipy/heudiconv/blob/master/heudiconv/heuristics/reproin.py 331 | [specification]: https://github.com/nipy/heudiconv/blob/master/heudiconv/heuristics/reproin.py 332 | [heudiconv-monitor]: https://github.com/nipy/heudiconv/blob/master/heudiconv/cli/monitor.py 333 | [DBIC]: http://dbic.dartmouth.edu 334 | [///dbic/QA]: http://datasets.datalad.org/?dir=/dbic/QA 335 | -------------------------------------------------------------------------------- /Singularity: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+0.gdc97516.dirty 2 | # Timestamp: 2023/06/05 22:47:55 UTC 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | 10 | Bootstrap: docker 11 | From: neurodebian:bookworm 12 | 13 | %post 14 | apt-get update -qq 15 | apt-get install -y -q --no-install-recommends neurodebian-freeze 16 | nd_freeze 20230604 17 | apt-get clean 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | su - root 21 | 22 | export ND_ENTRYPOINT="/neurodocker/startup.sh" 23 | apt-get update -qq 24 | apt-get install -y -q --no-install-recommends \ 25 | apt-utils \ 26 | bzip2 \ 27 | ca-certificates \ 28 | curl \ 29 | locales \ 30 | unzip 31 | apt-get clean 32 | rm -rf /var/lib/apt/lists/* 33 | sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen 34 | dpkg-reconfigure --frontend=noninteractive locales 35 | update-locale LANG="en_US.UTF-8" 36 | chmod 777 /opt && chmod a+s /opt 37 | mkdir -p /neurodocker 38 | if [ ! -f "$ND_ENTRYPOINT" ]; then 39 | echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" 40 | echo 'set -e' >> "$ND_ENTRYPOINT" 41 | echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" 42 | echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; 43 | fi 44 | chmod -R 777 /neurodocker && chmod a+s /neurodocker 45 | 46 | apt-get update -qq 47 | apt-get install -y -q --no-install-recommends \ 48 | vim \ 49 | wget \ 50 | strace \ 51 | time \ 52 | ncdu \ 53 | gnupg \ 54 | curl \ 55 | procps \ 56 | datalad \ 57 | pigz \ 58 | less \ 59 | tree \ 60 | git-annex \ 61 | python3-nibabel \ 62 | python3-nipype \ 63 | virtualenv \ 64 | shellcheck \ 65 | python3-dcmstack \ 66 | python3-funcsigs \ 67 | python3-etelemetry \ 68 | python3-pytest \ 69 | dcmtk \ 70 | python3-pip \ 71 | python3-wheel \ 72 | python3-setuptools \ 73 | python3-datalad \ 74 | python3-filelock \ 75 | dcm2niix \ 76 | python3-pytest \ 77 | python3-nose \ 78 | python3-venv \ 79 | heudiconv=0.13.1-1~nd120+1 80 | apt-get clean 81 | rm -rf /var/lib/apt/lists/* 82 | 83 | : 84 | 85 | apt-get update && apt-get -y dist-upgrade 86 | 87 | curl -sL https://deb.nodesource.com/setup_16.x | bash - 88 | 89 | apt-get update -qq 90 | apt-get install -y -q --no-install-recommends \ 91 | nodejs \ 92 | npm 93 | apt-get clean 94 | rm -rf /var/lib/apt/lists/* 95 | 96 | npm install -g bids-validator@1.11.0 97 | 98 | mkdir /afs /inbox 99 | 100 | chmod a+rx /usr/local/bin/reproin 101 | 102 | test "$(getent passwd reproin)" || useradd --no-user-group --create-home --shell /bin/bash reproin 103 | su - reproin 104 | 105 | echo '{ 106 | \n "pkg_manager": "apt", 107 | \n "instructions": [ 108 | \n [ 109 | \n "base", 110 | \n "neurodebian:bookworm" 111 | \n ], 112 | \n [ 113 | \n "ndfreeze", 114 | \n { 115 | \n "date": "20230604" 116 | \n } 117 | \n ], 118 | \n [ 119 | \n "user", 120 | \n "root" 121 | \n ], 122 | \n [ 123 | \n "_header", 124 | \n { 125 | \n "version": "generic", 126 | \n "method": "custom" 127 | \n } 128 | \n ], 129 | \n [ 130 | \n "install", 131 | \n [ 132 | \n "vim", 133 | \n "wget", 134 | \n "strace", 135 | \n "time", 136 | \n "ncdu", 137 | \n "gnupg", 138 | \n "curl", 139 | \n "procps", 140 | \n "datalad", 141 | \n "pigz", 142 | \n "less", 143 | \n "tree", 144 | \n "git-annex", 145 | \n "python3-nibabel", 146 | \n "python3-nipype", 147 | \n "virtualenv", 148 | \n "shellcheck", 149 | \n "python3-dcmstack", 150 | \n "python3-funcsigs", 151 | \n "python3-etelemetry", 152 | \n "python3-pytest", 153 | \n "dcmtk", 154 | \n "python3-pip", 155 | \n "python3-wheel", 156 | \n "python3-setuptools", 157 | \n "python3-datalad", 158 | \n "python3-filelock", 159 | \n "dcm2niix", 160 | \n "python3-pytest", 161 | \n "python3-nose", 162 | \n "python3-venv", 163 | \n "heudiconv=0.13.1-1~nd120+1" 164 | \n ] 165 | \n ], 166 | \n [ 167 | \n "run", 168 | \n ":" 169 | \n ], 170 | \n [ 171 | \n "run", 172 | \n "apt-get update && apt-get -y dist-upgrade" 173 | \n ], 174 | \n [ 175 | \n "run", 176 | \n "curl -sL https://deb.nodesource.com/setup_16.x | bash - " 177 | \n ], 178 | \n [ 179 | \n "install", 180 | \n [ 181 | \n "nodejs", 182 | \n "npm" 183 | \n ] 184 | \n ], 185 | \n [ 186 | \n "run", 187 | \n "npm install -g bids-validator@1.11.0" 188 | \n ], 189 | \n [ 190 | \n "run", 191 | \n "mkdir /afs /inbox" 192 | \n ], 193 | \n [ 194 | \n "copy", 195 | \n [ 196 | \n "bin/reproin", 197 | \n "/usr/local/bin/reproin" 198 | \n ] 199 | \n ], 200 | \n [ 201 | \n "run", 202 | \n "chmod a+rx /usr/local/bin/reproin" 203 | \n ], 204 | \n [ 205 | \n "user", 206 | \n "reproin" 207 | \n ], 208 | \n [ 209 | \n "entrypoint", 210 | \n "/usr/local/bin/reproin \"$@\"" 211 | \n ] 212 | \n ] 213 | \n}' > /neurodocker/neurodocker_specs.json 214 | 215 | %environment 216 | export LANG="en_US.UTF-8" 217 | export LC_ALL="en_US.UTF-8" 218 | export ND_ENTRYPOINT="/neurodocker/startup.sh" 219 | 220 | %files 221 | bin/reproin /usr/local/bin/reproin 222 | 223 | %runscript 224 | /usr/local/bin/reproin "$@" 225 | -------------------------------------------------------------------------------- /bin/reproin: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # A helper to assist using heudiconv with reproin heuristic in a (proto)typical 4 | # setup. 5 | # 6 | # ATM paths and setup is DBIC specific. TODO - make it use a config file 7 | # 8 | set -eu 9 | 10 | # Causes too many pipes overall fail where we actually would just expect 11 | # empty result, so disabling for now 12 | # -o pipefail 13 | 14 | export PS4='+${LINENO}: ' 15 | 16 | function infodir_sourcepath() { 17 | awk -F'"' '/\//{print $2}' .heudiconv/$1/info/filegroup*.json | python3 -c 'import os.path as op, sys; print(op.commonpath(sys.stdin.readlines()))' 18 | } 19 | 20 | # all outputs will be prepended with # so we could just copy/paste it all as a script 21 | 22 | function info() { 23 | echo "# $@" 24 | } 25 | 26 | function error() { 27 | echo "# ERROR: $@" 28 | } 29 | 30 | function debug() { 31 | # echo "# DEBUG: $@" 32 | : 33 | } 34 | 35 | function wc_hits() { 36 | { grep "$1" "$2" || : ; } | wc -l 37 | } 38 | 39 | 40 | function setup_containers() { 41 | if [ -e code/containers ]; then 42 | error "There is already code/containers" 43 | exit 1 44 | fi 45 | mkdir -p code 46 | datalad clone -d . --reckless=ephemeral "$local_containers" code/containers 47 | # but make stored url to point to public resource 48 | # TODO -- take it from the ephemeral default location 49 | git config --file ./.gitmodules submodule."code/containers".url https://datasets.datalad.org/repronim/containers/.git 50 | git config --file ./.gitmodules submodule."code/containers".datalad-url https://datasets.datalad.org/repronim/containers/.git 51 | ( 52 | cd code/containers/ 53 | scripts/freeze_versions --save-dataset=../../ repronim-reproin 54 | ) 55 | # add bind mounts since we managed to contain it all nicely due to use 56 | # of datalad to manage/create hierarchy within heudiconv 57 | # TODO: make it so inside would not rely/really need hierarchy but also gets input data 58 | if [ -n "$BIDS_DIR" ]; then 59 | # if custom folder is used, pass that env and use it to bind-mount 60 | bids_mount='-B "$BIDS_DIR" --env "BIDS_DIR=$BIDS_DIR"' 61 | else 62 | bids_mount='-B "$bidsdir"' 63 | fi 64 | cfg=datalad."containers.repronim-reproin".cmdexec ; git config -f .datalad/config "${cfg}" | sed -e "s,{img},-B '$dicomdir' $bids_mount {img},g" | tr -d '\n' | xargs -0 git config -f .datalad/config ${cfg} 65 | datalad save -m "Saving tune ups to enable using the embedded container with reproin" .gitmodules .datalad/config 66 | } 67 | 68 | function setup_devel_reproin() { 69 | # overload reproin in container to use the one from github since we have not released/placed into container yet 70 | if [ ! -e code/containers ]; then 71 | error "Must have setup_containers already" 72 | exit 1 73 | fi 74 | datalad clone -d . https://github.com/ReproNim/reproin code/reproin 75 | cfg=datalad."containers.repronim-reproin".cmdexec 76 | git config -f .datalad/config "${cfg}" | sed -e 's,{img} {cmd}.*,-B {img_dspath}/code/reproin/bin/reproin:/usr/local/bin/reproin {img} /usr/local/bin/reproin {cmd},g' -e 's, run , exec ,g' | tr -d '\n' | xargs -0 git config -f .datalad/config ${cfg} 77 | datalad save -m "Bundle/use development version of reproin script for now inside the container" .gitmodules .datalad/config 78 | } 79 | 80 | # 81 | # A Master run script for a study 82 | # 83 | dcm2niix_version=$(dcm2niix -v|grep '^v' || { dcm2niix -v || : ; } |sed -n -e '/dcm2niiX version v/s,.*X version ,,gp'| sed -e 's,[ \t].*,,g') 84 | if [ "$#" = 0 ]; then 85 | echo -n "heudiconv: " 86 | heudiconv --version 87 | echo "dcm2niix: $dcm2niix_version" 88 | exit 0 89 | fi 90 | 91 | dicomdir="${DICOM_DIR:-/inbox/DICOM}" 92 | bidsdir="${BIDS_DIR:-/inbox/BIDS}" 93 | listdir="$bidsdir/reproin/lists" 94 | # TODO: make it possible to not rely on local clone or prepopulate if not present?! 95 | local_containers="${REPRONIM_CONTAINERS:-$HOME/repronim-containers}" 96 | 97 | # To achieve those, we really need to run this script for "lists-update" 98 | # outside of any container and involve some configured here container for 99 | # invoking heudiconv, and then when doing study-create or study-convert 100 | # or any other command, we would use the container registered in that dataset. 101 | do_auto_create_ds="yes" 102 | do_auto_conversion="yes" # TODO: not implemented yet fully 103 | 104 | heuristic="reproin" 105 | 106 | heudiconvdir=".heudiconv" 107 | skipfile=".heudiconv/sid-skip" # TODO: check what used now 108 | vallog=".heudiconv/bids-validator.log" 109 | valconfig=".bids-validator-config.json" 110 | # common prefix for heudiconv invocation (reconversion might differ) 111 | heudiconvcmd="heudiconv -c dcm2niix --bids -o $bidsdir -g accession_number" 112 | 113 | self=$(realpath "$0") 114 | selftop=$(dirname "$self" | xargs dirname) 115 | 116 | action="$1" 117 | 118 | # early 119 | case "$action" in 120 | lists-update) 121 | # TODO: get Y and M as args 122 | Y=${2:-`date +%Y`}; 123 | M=${3:-`date +%m`}; 124 | D=${4:-*}; 125 | [ "$D" = "*" ] && DD=xx || DD=$D 126 | mkdir -p "$listdir" 127 | listfile="$listdir/$Y${M}${DD}.txt" 128 | echo "INFO: updating $listfile" >&2 129 | # TODO: make more robust here. Go through each accession folder, 130 | # and ensure we have permission (now we seems just skip), then do not 131 | # rely on seeing "scout" (already had to relax to "cout"). Ideally should be a sample 132 | # of few DICOMs, e.g. first/last ones among subfolders to possibly catch different 133 | # studies dumped into the same folder. Complain then! 134 | eval "$heudiconvcmd -f $heuristic --command ls --files $dicomdir/$Y/$M/$D/*/00*cout*" >| "$listfile" 135 | exit 0 136 | ;; 137 | lists-check) 138 | # TODO: add an option to just update when discrepnancy found 139 | doit=1 140 | Y=${2:-20??}; 141 | M=${3:-??}; 142 | declare -A todo 143 | declare -A groups 144 | for d in $dicomdir/$Y/$M; do 145 | #D=$(basename $d) 146 | M=$(basename $d) 147 | Y=$(dirname $d | xargs basename) 148 | if [ $Y = 2016 ]; then 149 | # skip that early one 150 | continue 151 | fi 152 | listfile="$listdir/$Y${M}xx.txt" 153 | if [ ! -e $listfile ]; then 154 | echo "I: no $listfile" 155 | if /bin/ls -d $dicomdir/$Y/$M/*/*/00*cout*; then 156 | echo "E: there were legitimatish accession folders with scouts!" 157 | todo["${M}_$Y"]="scouts" 158 | fi 159 | continue 160 | fi 161 | missing= 162 | for a in $dicomdir/$Y/$M/*/*; do 163 | # echo "$a" 164 | case "$a" in 165 | *_backup) 166 | if [ -e "${a%_backup}" ] ; then 167 | echo "skip odd backup $a for which original also exists"; 168 | continue 169 | fi 170 | ;; 171 | esac 172 | # ATM we react only to ones having /00*cout* 173 | scouts=$(/bin/ls -1 $a/00*cout* 2>&1 | head -n 1) 174 | if echo "$scouts" | grep -q "No such file or directory"; then 175 | debug "no scouts under $a" 176 | # ls -l $a 177 | continue 178 | fi 179 | if ! grep -q "^$a" "$listfile"; then 180 | debug "scouts: >$scouts<" 181 | if echo "$scouts" | grep -qi 'permission'; then 182 | # no need to try to update list if we have permissions issue 183 | # ATM 184 | reason="permissions? $(ls -dl $a)" 185 | groups[$(stat -c '%G' "$a")]+="$a " 186 | else 187 | reason="unknown" 188 | missing+="$a" 189 | fi 190 | echo "$a is missing: $reason" 191 | fi 192 | done 193 | if [ -n "$missing" ]; then 194 | todo["${M}_$Y"]="missing" 195 | fi 196 | done 197 | exit_code=0 198 | if [[ -n "${todo[@]}" ]]; then 199 | echo "List of TODOs:" 200 | for MY in "${!todo[@]}"; do 201 | Y=${MY#*_} 202 | M=${MY%_*} 203 | cmd="'$0' lists-update $Y $M" 204 | if [ -n "$doit" ]; then 205 | $cmd 206 | else 207 | echo $cmd 208 | fi 209 | done 210 | ((exit_code += 1)) 211 | fi 212 | if [[ -n "${groups[@]}" ]]; then 213 | echo "List of groups for which permissions fail: ${!groups[@]}" 214 | for g in "${!groups[@]}"; do 215 | echo " $g: ${groups[$g]}" 216 | done 217 | ((exit_code += 2)) 218 | fi 219 | exit $exit_code 220 | ;; 221 | lists-update-summary) 222 | # to be used on the stderr output from heudiconv --command ls or lists-update ran by cron job 223 | sed -ne "/StudySes/s,.*locator='\([^']*\)'.*,\1,gp" | sort | uniq -c | sort -nr 224 | exit 0 225 | ;; 226 | lists-update-study-shows) 227 | # to be used on the stderr output from heudiconv --command ls or lists-update ran by cron job 228 | # will also produce study-show on those studies 229 | $0 lists-update 2>&1 \ 230 | | $0 lists-update-summary \ 231 | | while read n study; do 232 | echo -n "$study: new=$n " 233 | $0 study-show-save $study 234 | done 235 | exit 0 236 | ;; 237 | reconvert-sourcedata) 238 | # TODO: add option to reconvert from the original location as extracting from available files 239 | shift; 240 | if [ ! -e dataset_description.json ]; then 241 | error "Run from the top directory of BIDS dataset" 242 | exit 1 243 | fi 244 | # Could be pointed to source data - we need to strip it for further analyses, 245 | # and we will re-preprefix with sourcedata/ 246 | paths=( ) 247 | for f in "$@"; do 248 | paths=( ${paths[@]} "${f#sourcedata/}" ) 249 | done 250 | # Sanity check - ensure that all specified folders have *_scans.tsv 251 | for f in "${paths[@]}"; do 252 | if ! /bin/ls "$f"/*_scans.tsv >& /dev/null; then 253 | error "$f lacks a _scans.tsv file. Make sure to point to sub-[/ses-] folders" 254 | exit 1; 255 | fi 256 | if [ ! -e sourcedata/$f ]; then 257 | error "$f lacks entry under sourcedata/" 258 | exit 1; 259 | fi 260 | done 261 | # TODO: deduplicate 262 | if [ -e ".heudiconv/heuristic.py" ]; then 263 | info "Will use study specific heuristic" 264 | heuristic=".heudiconv/heuristic.py" 265 | fi 266 | 267 | info "Will reconvert $# subject session folders" 268 | for f in "${paths[@]}"; do 269 | case ${f} in 270 | sub-*) sub=${f%/*}; sub=${sub#sub-};; 271 | *) error "Folder is not sub-"; exit 1; 272 | esac 273 | opts=( -s "$sub" ) 274 | heudiconv_f=".heudiconv/$sub" 275 | ses=${f#*/} 276 | case $ses in 277 | ses-*) opts=( ${opts[@]} -ss ${ses#ses-} ); 278 | heudiconv_f+="/$ses";; 279 | esac 280 | rm_f=( $(/bin/ls -1d $f/* | grep -v physio) ) # we might have some physio data which we do not convert 281 | # and need to move physio back from moved aside .src before removing 282 | # 283 | # keep invocation across lines for readability and at the beginning of the line to avoid 284 | # all the \t etc. Trailing slashes ensure absence of not needed \n in the command record. 285 | datalad run -m "Reconvert $f" --input sourcedata/$f bash -x -c \ 286 | "rm -rf ${rm_f[*]} $heudiconv_f && \ 287 | mkdir -p "$heudiconv_f" && \ 288 | mv sourcedata/$f sourcedata/$f.src && \ 289 | heudiconv -f $heuristic -c dcm2niix --bids -o . -l . ${opts[*]} --files sourcedata/$f.src -g all >& $heudiconv_f/heudiconv.log && \ 290 | if [ -e sourcedata/$f.src/physio ] ; then mv sourcedata/$f.src/physio sourcedata/$f/; fi && \ 291 | rm -rf sourcedata/$f.src" 292 | done 293 | exit 0 294 | ;; 295 | study-show-save) 296 | # to be used on the output from heudiconv --command ls ran by cron job 297 | study="$2" 298 | studydir="$bidsdir/$study" 299 | do_study_show=yes 300 | if [ ! -e "$studydir/.git" ]; then 301 | if [ "$do_auto_create_ds" == "yes" ]; then 302 | # TODO: later -- add duct, and redirect away since it would enup in email 303 | echo "creating study directory" 304 | $0 study-create "$study" 305 | fi 306 | fi 307 | if [ ! -e "$studydir/.git" ]; then 308 | echo "no studydir yet" 309 | do_study_show=no 310 | fi 311 | if [ "$do_study_show" == "yes" ]; then 312 | studyshow="$studydir/.git/study-show" 313 | $0 study-show "$study" >| "$studyshow.sh" 2>| "$studyshow.stderr" 314 | [ -s "$studyshow.stderr" ] || rm -f "$studyshow.stderr" 315 | $0 study-show-summary "$study" || echo "study-show-summary $study errored out, continuing" >&2 316 | fi 317 | exit 0 318 | ;; 319 | study-show-summary) 320 | # to be used in conjunction with lists-update-summary-shows or just by itself 321 | studyshow="$bidsdir/$2/.git/study-show" 322 | todo=$(wc_hits '^heudiconv ' "$studyshow.sh") 323 | warnings=$(wc_hits 'WARNING: ' "$studyshow.sh") 324 | fixups=$(wc_hits '!!!' "$studyshow.sh") 325 | don=$(wc_hits '#.* done ' "$studyshow.sh") 326 | echo -n "todo=$todo done=$don" 327 | if [ $fixups -gt 0 ]; then 328 | echo -n " fixups=$fixups" 329 | fi 330 | if [ $warnings -gt 0 ]; then 331 | echo -n " warnings=$warnings" 332 | fi 333 | if [ -s "$studyshow.stderr" ]; then 334 | echo -n " stderrs=$(wc -l $studyshow.stderr)" 335 | fi 336 | date_modified=$(stat "$studyshow.sh" | awk '/^Modify/{print $2;}') 337 | echo " $studyshow.sh $date_modified" 338 | exit 0 339 | ;; 340 | setup-containers) 341 | # just operates in a current folder 342 | setup_containers 343 | exit 0 344 | ;; 345 | setup-devel-reproin) 346 | # just operates in a current folder 347 | setup_devel_reproin 348 | exit 0 349 | ;; 350 | esac 351 | 352 | # The rest of the commands operate on a given study 353 | study=${2#*:} 354 | 355 | # TODO: Add option to remap -- record those remappings somehow! 356 | # For now allowing for ostudy:study mapping in cmdline 357 | ostudy="${2%%:*}" 358 | # TODO: Add option to limit by year/month 359 | # TODO: Add option to "hardcode" add session 360 | 361 | studydir="$bidsdir/$study" 362 | 363 | if [ ! -e "$studydir" ]; then 364 | echo "I: no study directory yet - $studydir" 365 | cd /tmp # to be safe/avoid side-effects 366 | else 367 | cd "$studydir" # implies it exists!!! TODO 368 | fi 369 | 370 | if [ ! -e "$valconfig" ]; then 371 | valconfig=~/heudiconv/heudiconv/heuristics/reproin_validator.cfg 372 | fi 373 | 374 | # Track already seen 375 | subses_ids="" 376 | 377 | case "$action" in 378 | study-create) 379 | if [ -e "$studydir" ]; then 380 | echo "$study already exists, nothing todo" 381 | exit 1; 382 | fi 383 | if [ ! -e "$bidsdir" ] ; then 384 | datalad create -c text2git "$bidsdir" 385 | fi 386 | cd "$bidsdir" 387 | echo "$study" | tr '/' '\n' \ 388 | | while read d; do 389 | if [ ! -e "$d" ] ; then 390 | if [ "$PWD/$d" == "$studydir" ]; then 391 | datalad create --fake-dates -d . "$d" 392 | else 393 | datalad create -c text2git -d . "$d" 394 | fi 395 | if ! grep -q "\.nfs" "$d/.gitignore" 2>/dev/null; then 396 | echo ".nfs*" >> "$d/.gitignore" 397 | datalad save -d "$d" -m "ignore .nfs* files" "$d/.gitignore" 398 | fi 399 | fi 400 | cd "$d" 401 | done 402 | cd "$studydir" 403 | # TODO: this would not work within container where we install it under /usr/local/bin 404 | # So think better 405 | datalad -c datalad.locations.user-procedures="$selftop/resources/" run-procedure cfg_reproin_bids 406 | git tag -m "The beginning" 0.0.0 407 | # after creating a dataset tag it with 0.0.0 408 | # This would allow for a sensible git describe output 409 | 410 | # Embrace containerization setup 411 | setup_containers 412 | setup_devel_reproin 413 | $0 study-show-save "$study" 414 | ;; 415 | study-remove-subject) 416 | # TODO: provision for possibly having a specific session not entire subject 417 | git rm -r sub-$sid sourcedata/sub-$sid .heudiconv/$sid 418 | echo "not implemented" 419 | exit 1 420 | ;; 421 | study-remove-subject2redo) 422 | echo "not implemented" 423 | exit 1 424 | # figure out where came from 425 | $0 study-remove-subject 426 | # add original location to skip file 427 | ;; 428 | study-accession-skip) 429 | if [ -L "$skipfile" ]; then 430 | ( 431 | cd "$(dirname $skipfile)" 432 | git annex unlock "$(basename $skipfile)" 433 | ) 434 | fi 435 | echo "$3 ${4:-}" >> "$skipfile" 436 | git annex add "$skipfile" 437 | datalad save -d. -m 'skip an accession' "$skipfile" 438 | ;; 439 | study-show|study-convert) 440 | # TODO: make it do a pass and verify that no duplicate/existing session+subject in 441 | # what to be converted. We might need to remove some older one or mark some as 442 | # to not do 443 | 444 | # Check that version of the dcm2niix is the same 445 | dcm2niixs_study=( $(git grep -h ConversionSoftwareVersion | awk '{print $2;}' | sed -e 's,[",],,g' | sort | uniq) ) 446 | if [ -n "${dcm2niixs_study:-}" ]; then 447 | if [[ ${#dcm2niixs_study[@]} != 1 ]]; then 448 | echo "W: Study already used multiple versions of dcm2niix: ${dcm2niixs_study[@]}" 449 | fi 450 | dcm2niix_study=${dcm2niixs_study[-1]} 451 | if [ ! -z "$dcm2niix_study" ] && [ "$dcm2niix_study" != "$dcm2niix_version" ]; then 452 | msg="Wrong environment - dcm2niix $dcm2niix_version when study used $dcm2niix_study" 453 | case "$action" in 454 | study-convert) 455 | if [ -z "${REPROIN_ALLOW_WRONG_ENVIRONMENT:-}" ]; then 456 | echo "E: $msg" >&2 457 | exit 1 458 | fi 459 | esac 460 | echo "W: $msg" 461 | fi 462 | fi 463 | 464 | 465 | targetsub="${3:-}" 466 | if [ -e "$heudiconvdir/heuristic.py" ]; then 467 | info "Will use study specific heuristic" 468 | heuristic=".heudiconv/heuristic.py" 469 | fi 470 | if [ -e "$heudiconvdir/anon-cmd" ]; then 471 | info "Will use study specific anon-cmd" 472 | heudiconvcmd="$heudiconvcmd --anon-cmd '$heudiconvdir/anon-cmd'" 473 | fi 474 | 475 | do_conversion="$do_auto_conversion" 476 | # TODO: use datalad run/containers-run 477 | grep -h -B1 "$ostudy[\"']" $listdir/202[3-9]*xx.txt \ 478 | | grep 'DICOM.*/\(qa\|A\|202[0-9]\)' \ 479 | | sed -e 's,/[^/]*$,,g' \ 480 | | sort \ 481 | | uniq \ 482 | | while read td; do 483 | #debug "TD: <$td>" 484 | # TODO: too ad-hoc, do properly 485 | subses=$(grep -h -A1 $td $listdir/*xx.txt | awk '/StudySess/{print $2, $3}' | uniq | head -n 1); 486 | sub=$(echo "$subses" | sed -e "s,.*subject=',,g" -e "s,'),,g") 487 | ses=$(echo "$subses" | sed -e "s,.*session='*,,g" -e "s/'*, .*//g" -e "s,None,,g") 488 | 489 | if grep -q -R $td "$skipfile" 2>/dev/null; then 490 | info "$td skip # $subses" 491 | continue 492 | fi 493 | if [ -z "$sub" ]; then 494 | error "Empty subject for $td" 495 | exit 1 496 | fi 497 | if [ -n "$targetsub" ] && [ "${sub}" != "$targetsub" ]; then 498 | info "Skipping $subses since ${sub} != $targetsub" 499 | continue 500 | fi 501 | 502 | # We must take anonymized $sub if there is anon-cmd!!! 503 | sub_orig="$sub" 504 | if [ -e "$heudiconvdir/anon-cmd" ]; then 505 | sub_=$("$heudiconvdir/anon-cmd" "$sub" || echo "FAILED") 506 | if [ $sub_ = "FAILED" ]; then 507 | error "failed to get anonymized ID for $sub, skipping" 508 | continue 509 | fi 510 | sub="$sub_" 511 | fi 512 | # Overload subses with a bit neater version reflecting possible anonymization 513 | subses="$sub " 514 | if [ "$sub_orig" != "$sub" ]; then subses+="($sub_orig) "; fi 515 | subses+="session=$ses" 516 | 517 | # make one which uses CLI options so could be just uncommented to be used 518 | subses_opts="-s '$sub'" 519 | if [ -n "$ses" ]; then 520 | subses_opts+=" --ses '$ses'" 521 | fi 522 | 523 | subses_id="$sub" # shortish and only if there ses would have /ses- 524 | if [ ! -z "$ses" ]; then 525 | subses_id+=/ses-$ses 526 | fi 527 | subsesheudiconvdir=$heudiconvdir/$subses_id 528 | 529 | # Analyze what actually was converted into that sub/ses or where that td went into 530 | srcdir=$(infodir_sourcepath "$subses_id" 2>/dev/null || :) 531 | if [ -n "$srcdir" ] && [ "$srcdir" != "$td" ]; then 532 | alert=" !!! came from $srcdir" 533 | else 534 | alert="" 535 | fi 536 | td_found_in=$( 537 | { grep -l -R "$td" "$heudiconvdir"/* 2>/dev/null \ 538 | | grep /info/ \ 539 | | sed -E 's,.*\.heudiconv/(.*)/info/.*,\1,g' \ 540 | | sort \ 541 | | uniq \ 542 | | tr '\n' ' ' \ 543 | | sed -e 's, *$,,g' \ 544 | || : ; } 545 | ) 546 | if [ -n "$td_found_in" ] && [ "$td_found_in" != "$subses_id" ] ; then 547 | alert=" !!! was converted into $td_found_in$alert" 548 | # those were already converted to other, not what is in DICOM 549 | # so we will not memorize what we see in dicom but rather what was 550 | # converted into 551 | subses_ids+=" $td_found_in " 552 | else 553 | if [[ " $subses_ids " =~ " ${subses_id} " ]]; then 554 | do_conversion=no 555 | info "WARNING: $subses_id already known or converted" 556 | fi 557 | subses_ids+=" $subses_id" 558 | fi 559 | 560 | 561 | 562 | # so we have converted something into that subject, might be different data 563 | if /bin/ls -d "$subsesheudiconvdir/info" 2>/dev/null | grep -q . ; then 564 | info "$td done $subses $alert" 565 | continue 566 | fi 567 | # we converted but into another subses? 568 | if [ -n "$td_found_in" ]; then 569 | info "$td done $subses $alert" 570 | continue 571 | fi 572 | 573 | cmd="$heudiconvcmd -f $heuristic -l $study --files $td" 574 | case "$action" in 575 | study-show) 576 | echo "$cmd # $subses_opts" 577 | ;; 578 | study-convert) 579 | info "Converting $subses ($studydir) [$td]" 580 | mkdir -p "$subsesheudiconvdir" 581 | logfile="$subsesheudiconvdir/heudiconv.log" 582 | if ! eval "$cmd" > "$logfile" 2>&1; then 583 | error "conversion script exited with $?. Please check details in $studydir/$logfile." 584 | echo " The tail of it is:" 585 | tail "$logfile" 586 | exit 1 587 | fi 588 | 589 | info "Running validator now" 590 | "$self" "validator-save" "$study" || "echo validator failed; check $vallog" 591 | if [ -e "$vallog" ]; then 592 | cp --reflink=auto "$vallog" "$subsesheudiconvdir/bids-validator.log" 593 | fi 594 | datalad save -m "converted subject $sub session $ses" -r . .heudiconv .heudiconv/* 595 | ;; 596 | esac 597 | done 598 | if [ "$action" = "study-convert" ]; then 599 | "$self" "validator-summary" "$study" || echo "WARNING: Failed to provide validator summary: $?" 600 | fi 601 | 602 | ;; 603 | validator) 604 | bids-validator --verbose -c "$valconfig" $PWD || echo "WARNING: validator exited with exit code $?" 605 | ;; 606 | validator-save) 607 | rm -f "$vallog" 608 | "$self" "validator" "$study" > "$vallog" 609 | info "Validator output in $PWD/$vallog" 610 | datalad save -d . -m "New BIDS validator output" $vallog 611 | ;; 612 | validator-summary) 613 | info "Errors/warnings from current state of the validator:" 614 | grep -E '[0-9]+: \[[A-Z]+\]' "$vallog" || echo " no messages were found" 615 | ;; 616 | validator-show) 617 | ${PAGER:-vim} $vallog 618 | ;; 619 | *) 620 | info "Unknown action $action" >&2 621 | exit 1 622 | ;; 623 | esac 624 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | walkthrough-%.md: Makefile 2 | for f in source/images/walkthrough-$*/*.png; do \ 3 | bn=$$(basename $$f); \ 4 | echo "### $$bn\n\n![$$bn]($$f)\n"; \ 5 | done >| $@ 6 | 7 | -------------------------------------------------------------------------------- /docs/source/images/dbic-conversions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/dbic-conversions.png -------------------------------------------------------------------------------- /docs/source/images/dbic-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/dbic-flow.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | ./crop.py orig/*png 3 | -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/crop.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | import os 4 | from os.path import join as opj, basename 5 | import re 6 | 7 | outdir = "." 8 | def get_crop(filename): 9 | # by default we will do cockpit one 10 | crop = re.search('(?P.*?)(_crop-(?P[^.]*))?\.(?P[^.]+?$)', filename) 11 | groups = crop.groupdict() 12 | if 'crop' in groups: 13 | #print("CROP: %s" % str(groups)) 14 | crop = groups['crop'] 15 | crop_ = { 16 | "dot+save": '736x577+196+890', 17 | "patientreg": '1030x710+100+940', 18 | "patientconf": '1074x857+67+868', 19 | "exam": '505x450+0+1348', 20 | "exam+menu": '505x801+0+997', 21 | }.get( 22 | crop, '639x511+196+890' # default is the cockpit window 23 | ) 24 | filename_ = "{basename}.{ext}".format(**groups) 25 | return crop_, filename_ 26 | 27 | for origfile in sys.argv[1:]: 28 | crop, filename = get_crop(basename(origfile)) 29 | assert origfile != filename, "specify file under orig" 30 | cmd = "convert {origfile} -crop {crop} {filename}".format(**locals()) 31 | print("Running {}".format(repr(cmd))) 32 | os.system(cmd) 33 | -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-1-topmost.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/6M/kM/SHA256E-s213001--060c2d7130053acd5bbae0bf7d9fa8b6abfc9bcd9052e63550ccb4bbbc43e818.png/SHA256E-s213001--060c2d7130053acd5bbae0bf7d9fa8b6abfc9bcd9052e63550ccb4bbbc43e818.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-2.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/VQ/Wk/SHA256E-s211437--aecfc4472857ff84937a7835cb3661dd6685c073ee12ce7725d6c923171fc305.png/SHA256E-s211437--aecfc4472857ff84937a7835cb3661dd6685c073ee12ce7725d6c923171fc305.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-3.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/J2/6p/SHA256E-s211554--2bc660ef79bfaf2c6e6c602bc12b31b62a3c7ebceaae1a371cd475b05a9848b2.png/SHA256E-s211554--2bc660ef79bfaf2c6e6c602bc12b31b62a3c7ebceaae1a371cd475b05a9848b2.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-4.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/jg/mf/SHA256E-s198072--63fd77635f3c61df5be0f4026ac9600abbd0ac6c6f3cbe773cc7f780d4bf4e22.png/SHA256E-s198072--63fd77635f3c61df5be0f4026ac9600abbd0ac6c6f3cbe773cc7f780d4bf4e22.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-5-newregion.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/ZP/jg/SHA256E-s200592--1326bea3ee82c3426bbdee5596b77375ba4540eb26f62d4b64d673a8e084eff3.png/SHA256E-s200592--1326bea3ee82c3426bbdee5596b77375ba4540eb26f62d4b64d673a8e084eff3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-6.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/W8/j8/SHA256E-s199697--9cc7342e2acdc04a5aedf3441408136fc756f61371496f938c0c0bbd0f42e8c0.png/SHA256E-s199697--9cc7342e2acdc04a5aedf3441408136fc756f61371496f938c0c0bbd0f42e8c0.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-7-newexam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/KJ/4W/SHA256E-s201656--5299f25c0a14d73d43a763cfa76d24bfa2d5022ec11043907379e882dc01cbe4.png/SHA256E-s201656--5299f25c0a14d73d43a763cfa76d24bfa2d5022ec11043907379e882dc01cbe4.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-8.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/8x/Vp/SHA256E-s200712--9deea828a346681aa0872ab7c555a2eb06da5bcf823d2493c96d264187279700.png/SHA256E-s200712--9deea828a346681aa0872ab7c555a2eb06da5bcf823d2493c96d264187279700.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-9.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/2w/zw/SHA256E-s216421--58d47f3df0f865299bcdf1a458a110d299c0a45e865d66355f35c9488ffa87f6.png/SHA256E-s216421--58d47f3df0f865299bcdf1a458a110d299c0a45e865d66355f35c9488ffa87f6.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-a.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Wk/Qw/SHA256E-s225940--6630a6d5e148c2f4e4dfbcce1dbba2c5433e3862155893d078d85939aeb23e09.png/SHA256E-s225940--6630a6d5e148c2f4e4dfbcce1dbba2c5433e3862155893d078d85939aeb23e09.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-b.0.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/57/j3/SHA256E-s244224--78edc54d00a08353bce3466c2d79eb6bf273c33307834fa867c209a28cd94c66.png/SHA256E-s244224--78edc54d00a08353bce3466c2d79eb6bf273c33307834fa867c209a28cd94c66.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-b.1-save.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/2W/kK/SHA256E-s248466--28faaa63351d73299ec89bdece46ee00c2282fbfd76c96ff6b86b73dc9dee56d.png/SHA256E-s248466--28faaa63351d73299ec89bdece46ee00c2282fbfd76c96ff6b86b73dc9dee56d.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-b.2-save2_crop-dot+save.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Kp/37/SHA256E-s228209--91c8315212a04a151204e983c732e54c01bff9781606c62b89d1befe7701abb5.png/SHA256E-s228209--91c8315212a04a151204e983c732e54c01bff9781606c62b89d1befe7701abb5.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-b.2-saved3.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/zg/j6/SHA256E-s243261--18cb3d910981bc763869121409ef79323b8c378aa04fabf1f38c79a95a408885.png/SHA256E-s243261--18cb3d910981bc763869121409ef79323b8c378aa04fabf1f38c79a95a408885.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-c.1-ses02.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/k4/f7/SHA256E-s239687--8636064ff75cd74ca74e958c842e5e6425a5d22c817757105a480b0b9a1dc906.png/SHA256E-s239687--8636064ff75cd74ca74e958c842e5e6425a5d22c817757105a480b0b9a1dc906.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-c.2-saveas.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/qK/K9/SHA256E-s244361--fbff47e5a552021b75f4b5147e0f1fc782aff3f058c2e6330bd24db449887d64.png/SHA256E-s244361--fbff47e5a552021b75f4b5147e0f1fc782aff3f058c2e6330bd24db449887d64.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-c.3_crop-dot+save.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/J8/JX/SHA256E-s226777--03161d12d332187ae501d76075cc3ee3e33e50ae3b28e69191a8248616d7ac08.3.png/SHA256E-s226777--03161d12d332187ae501d76075cc3ee3e33e50ae3b28e69191a8248616d7ac08.3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-c.4.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/kf/M8/SHA256E-s237277--83ac96212995c53c2711773e7b642afb6898b514f1035b57d4928e09b9670934.4.png/SHA256E-s237277--83ac96212995c53c2711773e7b642afb6898b514f1035b57d4928e09b9670934.4.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-d.1.register_crop-patientreg.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/wp/5K/SHA256E-s213312--aecf23f7cdcf7ca63cace18ae8238efdf8c1c01baec80bd566eb7857156e23bd.png/SHA256E-s213312--aecf23f7cdcf7ca63cace18ae8238efdf8c1c01baec80bd566eb7857156e23bd.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-d.2.choose-investigator_crop-patientconf.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/1G/ZJ/SHA256E-s232997--460c1bc1e23685808d3afd0d5825b4ef1382d9fb7a499576ad5514f8d6c1557c.png/SHA256E-s232997--460c1bc1e23685808d3afd0d5825b4ef1382d9fb7a499576ad5514f8d6c1557c.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-d.3.chose-investigator_crop-patientconf.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/w1/jk/SHA256E-s221210--e91fa5468352c5b71be1aab78424494c57863ec5a1a84c21154a4a64a56598a5.png/SHA256E-s221210--e91fa5468352c5b71be1aab78424494c57863ec5a1a84c21154a4a64a56598a5.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-d.4.chose-exam_crop-patientconf.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/pp/p2/SHA256E-s225825--920432d64fb58c6d803de08a75289dd98b6f34cf494cf2d2d41afa8052d809c7.png/SHA256E-s225825--920432d64fb58c6d803de08a75289dd98b6f34cf494cf2d2d41afa8052d809c7.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-d.5-endofdescription_crop-patientconf.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/jq/kq/SHA256E-s226143--3d5214af4287a0a1134ea942db8d31cc60f9ca1cbe65e7a62f5becf08f4cbf59.png/SHA256E-s226143--3d5214af4287a0a1134ea942db8d31cc60f9ca1cbe65e7a62f5becf08f4cbf59.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-e.1-scan-interrupt-func_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/JJ/7F/SHA256E-s382352--21c5144f854cbdef45f834867f28bc1c679f744abc6513b957b275efe56fb255.png/SHA256E-s382352--21c5144f854cbdef45f834867f28bc1c679f744abc6513b957b275efe56fb255.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-e.1-scan-interrupt-repeat_crop-exam+menu.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/7J/PJ/SHA256E-s386096--44405c804e0b5a26b01eb7bf1ea980cecdd88640826ffc9ab3bb7caf637ffdb5.png/SHA256E-s386096--44405c804e0b5a26b01eb7bf1ea980cecdd88640826ffc9ab3bb7caf637ffdb5.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-e.1-scan1_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Mm/27/SHA256E-s232816--c5df46f67dd77135dd0660ff38a0b1e54ad2b62e42de4bc06eb8b020444dcbc7.png/SHA256E-s232816--c5df46f67dd77135dd0660ff38a0b1e54ad2b62e42de4bc06eb8b020444dcbc7.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-e.3-scan-interrupt-repeated_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Zw/7w/SHA256E-s385407--730b6e15ee56e1c91afa11d875ca43c9b9cef27513ee18e81419b39ad159a8a7.png/SHA256E-s385407--730b6e15ee56e1c91afa11d875ca43c9b9cef27513ee18e81419b39ad159a8a7.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.1-repeatscout_crop-exam+menu.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/10/zQ/SHA256E-s377608--838f024c0f068bba8774d815e67def6837cb0af4d1c04722ad55545323577894.png/SHA256E-s377608--838f024c0f068bba8774d815e67def6837cb0af4d1c04722ad55545323577894.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.2-hadtorepeatfun-run02_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/zp/k5/SHA256E-s386873--322f7aa1d39a080b07608d8858c025a8397d9f9238854eb4991579230068337f.png/SHA256E-s386873--322f7aa1d39a080b07608d8858c025a8397d9f9238854eb4991579230068337f.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.3-repeatfmap_crop-exam+menu.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/pP/0v/SHA256E-s377364--dd02dae0e683b444859cd16c02bd072d04fcfff55184bbcffeacb28b2424df73.png/SHA256E-s377364--dd02dae0e683b444859cd16c02bd072d04fcfff55184bbcffeacb28b2424df73.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.4-skip_origrun02_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Gp/mG/SHA256E-s386177--aa8d0689b1dadda43ee942c4a8adabd3c0cbecedd56a5e3bc0d775a3a8da47c3.png/SHA256E-s386177--aa8d0689b1dadda43ee942c4a8adabd3c0cbecedd56a5e3bc0d775a3a8da47c3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.5-renamedfmap-run02_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/6G/75/SHA256E-s385817--9abad0dd9a28964ceca6b1079b832a540a32cb60d0b33274b31d39974ea83e10.png/SHA256E-s385817--9abad0dd9a28964ceca6b1079b832a540a32cb60d0b33274b31d39974ea83e10.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-f.5-renamefmap_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/Jj/vJ/SHA256E-s386747--dc8294d69bc7820afcf93ca375eccf8cd28612abec23557789da97f440ae8508.png/SHA256E-s386747--dc8294d69bc7820afcf93ca375eccf8cd28612abec23557789da97f440ae8508.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/orig/wt1-g-done_crop-exam.png: -------------------------------------------------------------------------------- 1 | ../../../../../.git/annex/objects/mf/pZ/SHA256E-s372205--23285e226601e6279adb3ebd5d3f7b314b9a0436414775dd272ce34939a39a6e.png/SHA256E-s372205--23285e226601e6279adb3ebd5d3f7b314b9a0436414775dd272ce34939a39a6e.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-1-topmost.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-1-topmost.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-2.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-4.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-5-newregion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-5-newregion.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-6.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-7-newexam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-7-newexam.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-8.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-9.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-a.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-b.0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-b.0.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-b.1-save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-b.1-save.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-b.2-save2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-b.2-save2.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-b.2-saved3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-b.2-saved3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-c.1-ses02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-c.1-ses02.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-c.2-saveas.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-c.2-saveas.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-c.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-c.3.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-c.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-c.4.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-d.1.register.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-d.1.register.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-d.2.choose-investigator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-d.2.choose-investigator.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-d.3.chose-investigator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-d.3.chose-investigator.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-d.4.chose-exam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-d.4.chose-exam.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-d.5-endofdescription.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-d.5-endofdescription.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-e.1-scan-interrupt-func.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-e.1-scan-interrupt-func.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-e.1-scan-interrupt-repeat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-e.1-scan-interrupt-repeat.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-e.1-scan1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-e.1-scan1.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-e.3-scan-interrupt-repeated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-e.3-scan-interrupt-repeated.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.1-repeatscout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.1-repeatscout.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.2-hadtorepeatfun-run02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.2-hadtorepeatfun-run02.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.3-repeatfmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.3-repeatfmap.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.4-skip_origrun02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.4-skip_origrun02.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.5-renamedfmap-run02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.5-renamedfmap-run02.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-f.5-renamefmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-f.5-renamefmap.png -------------------------------------------------------------------------------- /docs/source/images/walkthrough-1/wt1-g-done.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/reproin/2f3d6a6db272d615ce3b243184131050603cfcd8/docs/source/images/walkthrough-1/wt1-g-done.png -------------------------------------------------------------------------------- /docs/walkthrough-1.md: -------------------------------------------------------------------------------- 1 | # Siemens Prisma 3T walkthrough 2 | 3 | ## Organization 4 | 5 | The purpose of the ReproIn effort is to automate as much as possible, 6 | requiring just minimal amount of information entry at the scanner 7 | sufficient to have collected session be placed correctly within a 8 | hierarchy of the datasets, get new data converted to BIDS, and 9 | (optionally) be placed under the version control by 10 | [DataLad](http://datalad.org). 11 | 12 | To achieve that first we should make it possible to make DICOMs cary 13 | information about the Investigator (e.g. a PI of the study or a mentor), 14 | possibly corresponding Experimenter (student/assistant or Investigator 15 | himself), and the study (in our case ID-name) itself. 16 | 17 | ### Tree -> Investigator 18 | 19 | To accomplish that, in the Dot Cockpit we first created a dedicated tree 20 | for each Investigator: 21 | 22 | ![wt1-1-topmost.png](source/images/walkthrough-1/wt1-1-topmost.png) 23 | 24 | ![wt1-2.png](source/images/walkthrough-1/wt1-2.png) 25 | 26 | ![wt1-3.png](source/images/walkthrough-1/wt1-3.png) 27 | 28 | ![wt1-4.png](source/images/walkthrough-1/wt1-4.png) 29 | 30 | ### Region -> Experimenter 31 | 32 | Then for a specific Experimenter who is responsible for the study we 33 | defined the 2nd level (Region) entry as a join `Investigator_Experimenter` 34 | entry 35 | 36 | ![wt1-5-newregion.png](source/images/walkthrough-1/wt1-5-newregion.png) 37 | 38 | ![wt1-6.png](source/images/walkthrough-1/wt1-6.png) 39 | 40 | ### Exam -> Study 41 | 42 | And then at the 3rd level of the Exam we define the study. 43 | 44 | Note: you could have multiple entries at any of those levels (e.g. 45 | multiple Experimenters working for the same PI; or multiple studies for 46 | the same Experimenter): 47 | 48 | ![wt1-7-newexam.png](source/images/walkthrough-1/wt1-7-newexam.png) 49 | 50 | ![wt1-8.png](source/images/walkthrough-1/wt1-8.png) 51 | 52 | ## New Program 53 | 54 | Now it is possible to finally define Program(s) with the desired 55 | sequence of protocols: 56 | 57 | ![wt1-9.png](source/images/walkthrough-1/wt1-9.png) 58 | 59 | ![wt1-a.png](source/images/walkthrough-1/wt1-a.png) 60 | 61 | Note that the sequence names follow [reproin 62 | specification](https://github.com/nipy/heudiconv/blob/master/heudiconv/heuristics/reproin.py#L26). 63 | So in the example below it is intended for the first session of a study 64 | collecting a T1 anatomical, a fieldmap, two runs for task1 functional 65 | sequence, then a diffusion image, and completes with a single run for 66 | the functional task2. Those sequences could be copied from prior/other 67 | studies which might have already followed the naming convention and 68 | otherwise have desired settings: 69 | 70 | ![wt1-b.0.png](source/images/walkthrough-1/wt1-b.0.png) 71 | 72 | ![wt1-b.1-save.png](source/images/walkthrough-1/wt1-b.1-save.png) 73 | 74 | Program should be saved under some descriptive (but otherwise arbitrary) 75 | name. Note that if the study requires multiple scanning sessions, it is 76 | useful to use `_ses-` suffix to right away depict for which session a 77 | particular program is intended. 78 | 79 | ![wt1-b.2-save2.png](source/images/walkthrough-1/wt1-b.2-save2.png) 80 | 81 | ![wt1-b.2-saved3.png](source/images/walkthrough-1/wt1-b.2-saved3.png) 82 | 83 | ### Next Session 84 | 85 | Next session program should acquire session marker within the name 86 | of the scout sequence and be saved separately: 87 | 88 | ![wt1-c.1-ses02.png](source/images/walkthrough-1/wt1-c.1-ses02.png) 89 | 90 | ![wt1-c.2-saveas.png](source/images/walkthrough-1/wt1-c.2-saveas.png) 91 | 92 | ![wt1-c.3.png](source/images/walkthrough-1/wt1-c.3.png) 93 | 94 | ![wt1-c.4.png](source/images/walkthrough-1/wt1-c.4.png) 95 | 96 | 97 | ## New Accession 98 | 99 | ### Register a (New) Subject 100 | 101 | In the exam card we enter no personal information to avoid leakage through 102 | DICOMs. All personal data for a given subject id is stored elsewhere 103 | (`Age` and `Sex` from the exam card though are used to autofill up 104 | `age` and `sex` fields within `participants.tsv`): 105 | 106 | ![wt1-d.1.register.png](source/images/walkthrough-1/wt1-d.1.register.png) 107 | 108 | ### Choose the Investigator in the Tree 109 | 110 | ![wt1-d.2.choose-investigator.png](source/images/walkthrough-1/wt1-d.2.choose-investigator.png) 111 | 112 | ![wt1-d.3.chose-investigator.png](source/images/walkthrough-1/wt1-d.3.chose-investigator.png) 113 | 114 | ### Choose the desired Program 115 | 116 | That is where some "magic" (i.e. automation) happens: 117 | `Investigator_Experimenter` and `StudyID_Study-Name` fields get copied 118 | by the UI into the `Study Description` field which later is included 119 | within transmitted DICOM headers. 120 | 121 | Note also that **only** the `Investigator_Experimenter` and 122 | `StudyID_Study-Name` fields get copied into the `Study Description`. The 123 | **Program name** (such as `anuname-eg-ses01`) is **not copied**, and 124 | appears nowhere within DICOM precluding its utility for automation 125 | (hence `anyname` was chosen here for this example). 126 | 127 | ![wt1-d.4.chose-exam.png](source/images/walkthrough-1/wt1-d.4.chose-exam.png) 128 | 129 | 130 | ![wt1-d.5-endofdescription.png](source/images/walkthrough-1/wt1-d.5-endofdescription.png) 131 | 132 | **Do not edit `Study Description`**, unless you really need to and can 133 | *guarantee consistency. This location will determine the location of 134 | *the dataset on the file system within the hierarchy of (DataLad if ran 135 | *with `--datalad` option) datasets. Having it this fully automated 136 | *guarantees that for the next subject/session, data will be placed into 137 | *the same dataset without the need to specify target location manually, 138 | *and thus preventing possible human errors. 139 | 140 | ## Interrupted Scan 141 | 142 | As you can see in the following example, we have interrupted 143 | `func_task-task1_run-01` functional scan, may be because our phantom 144 | fell asleep. Our scanner console is configured to transmit data 145 | immediately upon successful volume was collected, so those volumes were 146 | already transmitted to PACS: 147 | 148 | ![wt1-e.1-scan-interrupt-func.png](source/images/walkthrough-1/wt1-e.1-scan-interrupt-func.png) 149 | 150 | To make `heudiconv` `reproin` heuristic figure out that the run was 151 | canceled or otherwise needs to be discarded, just `Repeat` the run 152 | **without changing anything in its name!**: 153 | 154 | ![wt1-e.1-scan-interrupt-repeat.png](source/images/walkthrough-1/wt1-e.1-scan-interrupt-repeat.png) 155 | 156 | Then, upon conversion, the earlier (e.g. canceled) scans will also be 157 | converted, but assigned `__dupX` suffix (X is an incrementing integer to 158 | allow for possibly multiple canceled runs). This way data curator could 159 | inspect those volumes and if everything matches the notes/sanity checks, 160 | remove those `__dupX` files. Meanwhile you can proceed with completing 161 | your Program of scans: 162 | 163 | ![wt1-e.1-scan1.png](source/images/walkthrough-1/wt1-e.1-scan1.png) 164 | 165 | 166 | ## Interrupted Program 167 | 168 | Some times it is necessary to take subject outside of the scanner, and 169 | bring him back later to finish the scanning session. Typically some volumes 170 | (e.g. at least scouts and fieldmaps) need to be reran. Because it would be 171 | desired to keep both version of the files -- from both original scanning 172 | session and the continued one -- you would need to `Repeat` those scans 173 | but this time assign them a new suffix. E.g., you could assign the `_run-` 174 | suffix matching the `run_` suffix of the next functional run, so it would 175 | make it easier later on to associate fieldmaps with the corresponding 176 | functional run file(s). 177 | 178 | ![wt1-e.3-scan-interrupt-repeated.png](source/images/walkthrough-1/wt1-e.3-scan-interrupt-repeated.png) 179 | 180 | ![wt1-f.1-repeatscout.png](source/images/walkthrough-1/wt1-f.1-repeatscout.png) 181 | 182 | ![wt1-f.2-hadtorepeatfun-run02.png](source/images/walkthrough-1/wt1-f.2-hadtorepeatfun-run02.png) 183 | 184 | ![wt1-f.3-repeatfmap.png](source/images/walkthrough-1/wt1-f.3-repeatfmap.png) 185 | 186 | ![wt1-f.4-skip_origrun02.png](source/images/walkthrough-1/wt1-f.4-skip_origrun02.png) 187 | 188 | ![wt1-f.5-renamedfmap-run02.png](source/images/walkthrough-1/wt1-f.5-renamedfmap-run02.png) 189 | 190 | ![wt1-f.5-renamefmap.png](source/images/walkthrough-1/wt1-f.5-renamefmap.png) 191 | 192 | ### Done again: 193 | 194 | ![wt1-g-done.png](source/images/walkthrough-1/wt1-g-done.png) 195 | 196 | Data was transmitted to PACS and ready for processing using `heudiconv 197 | -f reproin`. -------------------------------------------------------------------------------- /generate_container.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | # Either to build against to-be-released heudiconv 6 | dev_build= 7 | 8 | generate() { 9 | if [ "$dev_build" = "1" ]; then 10 | apt_pkgs=python3-pip 11 | run_cmd="pip install git+https://github.com/nipy/heudiconv@master" 12 | else 13 | apt_pkgs="heudiconv=0.13.1-1~nd120+1" 14 | run_cmd=":" 15 | fi 16 | # more details might come on https://github.com/ReproNim/neurodocker/issues/330 17 | [ "$1" == singularity ] && add_entry=' "$@"' || add_entry='' 18 | #neurodocker generate "$1" \ 19 | ndversion=0.7.0 20 | #ndversion=master 21 | docker run --rm repronim/neurodocker:$ndversion generate "$1" \ 22 | --base=neurodebian:bookworm \ 23 | --ndfreeze date=20230604 \ 24 | --pkg-manager=apt \ 25 | --install vim wget strace time ncdu gnupg curl procps datalad pigz less tree \ 26 | git-annex python3-nibabel \ 27 | python3-nipype virtualenv shellcheck \ 28 | python3-dcmstack python3-funcsigs python3-etelemetry \ 29 | python3-pytest dcmtk python3-pip python3-wheel \ 30 | python3-setuptools python3-datalad python3-filelock \ 31 | dcm2niix python3-pytest python3-nose python3-venv $apt_pkgs \ 32 | --run "$run_cmd" \ 33 | --run "apt-get update && apt-get -y dist-upgrade" \ 34 | --run "curl -sL https://deb.nodesource.com/setup_16.x | bash - " \ 35 | --install nodejs npm \ 36 | --run "npm install -g bids-validator@1.11.0" \ 37 | --run "mkdir /afs /inbox" \ 38 | --copy bin/reproin /usr/local/bin/reproin \ 39 | --run "chmod a+rx /usr/local/bin/reproin" \ 40 | --user=reproin \ 41 | --entrypoint "/usr/local/bin/reproin$add_entry" 42 | } 43 | 44 | generate docker > Dockerfile 45 | generate singularity > Singularity 46 | -------------------------------------------------------------------------------- /resources/cfg_reproin_bids.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Procedure for default configuration of ReproIn BIDS dataset 3 | 4 | It slightly diverges from original datalad-neuroimaging cfg_bids in 5 | assuring that .heudiconv/ content (which might be subdataset or not) 6 | would go under git-annex. 7 | """ 8 | 9 | import os 10 | import sys 11 | from pathlib import Path # for compatibility with older DataLad lacking .pathobj 12 | from datalad.consts import DATASET_CONFIG_FILE 13 | from datalad.distribution.dataset import require_dataset 14 | from datalad.support import path as op 15 | from datalad.utils import ensure_tuple_or_list 16 | #from datalad.support.external_versions import external_versions as ev 17 | 18 | ds = require_dataset( 19 | sys.argv[1], 20 | check_installed=True, 21 | purpose='ReproIn BIDS dataset configuration') 22 | 23 | # unless taken care of by the template already, each item in here 24 | # will get its own .gitattributes entry to keep it out of the annex 25 | # give relative path to dataset root (use platform notation) 26 | force_in_git = [ 27 | 'README*', 28 | 'CHANGES*', 29 | 'dataset_description.json', 30 | '.bidsignore', 31 | 'code/**', 32 | '*.tsv', 33 | '*.json', 34 | '*.txt', 35 | ] 36 | # just to be sure + _scans.tsv could contain dates 37 | force_in_annex = [ 38 | '*.nii.gz', 39 | '*.tgz', 40 | '*_scans.tsv', 41 | ] 42 | # make an attempt to discover the prospective change in .gitattributes 43 | # to decide what needs to be done, and make this procedure idempotent 44 | # (for simple cases) 45 | attr_fpath = op.join(ds.path, '.gitattributes') 46 | if op.lexists(attr_fpath): 47 | with open(attr_fpath, 'rb') as f: 48 | attrs = f.read().decode() 49 | else: 50 | attrs = '' 51 | 52 | for paths, largefile in [ 53 | (force_in_git, 'nothing'), 54 | (force_in_annex, 'anything'), 55 | # not sufficient since order matters (last match wins) and 56 | # heudiconv ATM also would add a line 57 | # for * and base on the size and also the one for _scans.tsv 58 | # so we end up with _scans.tsv, small they are, being added 59 | # to git.... TODO: fix in heudiconv 60 | ]: 61 | # amend gitattributes, if needed 62 | ds.repo.set_gitattributes([ 63 | (path, {'annex.largefiles': largefile}) 64 | for path in paths 65 | if '{} annex.largefiles={}'.format(path, largefile) not in attrs 66 | ]) 67 | 68 | 69 | def add_line_to_file(subpath, line): 70 | pathobj = Path(ds.path) 71 | f = pathobj / subpath 72 | if not f.parent.exists(): 73 | f.parent.mkdir() 74 | content = f.read_text().split(os.linesep) if f.exists() else [] 75 | if line not in content: 76 | f.write_text(os.linesep.join(content + [line])) 77 | 78 | 79 | # Everything under .heudiconv should go into annex. 80 | # But it might be a subdataset or not, so we will 81 | # just adjust it directly 82 | for l in [ 83 | "* annex.largefiles=anything", 84 | "**/.git* annex.largefiles=nothing", 85 | ]: 86 | add_line_to_file( 87 | op.join(".heudiconv", ".gitattributes"), 88 | l) 89 | 90 | add_line_to_file( 91 | op.join(".heudiconv", ".gitignore"), 92 | "*.pyc") 93 | 94 | # leave clean 95 | ds.save( 96 | path=['.gitattributes', 97 | op.join(".heudiconv", ".gitattributes"), 98 | op.join(".heudiconv", ".gitignore")], 99 | message="Apply default ReproIn BIDS dataset setup", 100 | ) 101 | 102 | existing_types = ensure_tuple_or_list( 103 | ds.config.get('datalad.metadata.nativetype', [], get_all=True)) 104 | for nt in 'bids', 'nifti1': 105 | if nt in existing_types: 106 | # do not duplicate 107 | continue 108 | ds.config.add( 109 | 'datalad.metadata.nativetype', 110 | nt, 111 | scope='branch', 112 | reload=False) 113 | 114 | ds.save( 115 | path=op.join(ds.path, DATASET_CONFIG_FILE), 116 | message="Configure metadata type(s)", 117 | result_renderer='disabled' 118 | ) 119 | -------------------------------------------------------------------------------- /tests/test_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #emacs: -*- mode: shell-script; c-basic-offset: 4; tab-width: 4; indent-tabs-mode: t -*- 3 | #ex: set sts=4 ts=4 sw=4 noet: 4 | # 5 | # A sample script to test correct operation for a prototypical/recommended 6 | # setup etc 7 | # 8 | # COPYRIGHT: Yaroslav Halchenko 2019 9 | # 10 | # LICENSE: MIT 11 | # 12 | # Permission is hereby granted, free of charge, to any person obtaining a copy 13 | # of this software and associated documentation files (the "Software"), to deal 14 | # in the Software without restriction, including without limitation the rights 15 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 16 | # copies of the Software, and to permit persons to whom the Software is 17 | # furnished to do so, subject to the following conditions: 18 | # 19 | # The above copyright notice and this permission notice shall be included in 20 | # all copies or substantial portions of the Software. 21 | # 22 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 23 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 24 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 25 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 26 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 27 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 28 | # THE SOFTWARE. 29 | # 30 | 31 | set -eu 32 | 33 | INPUT=( ~/datalad/dbic/QA/sourcedata/sub-qa/ses-20180904/ ) 34 | 35 | STUDY=$(mktemp -u /tmp/reproin.XXXXXX) 36 | 37 | #HEUDICONV=datalad containers-run containers/reproin 38 | 39 | # This a study oriented scenario. Will override 40 | # --locator 41 | 42 | # requires datalad >= 0.12b* 43 | datalad create -c text2git "$STUDY" 44 | # requires datalad-neuroimaging 45 | datalad create -c bids -d "$STUDY" "$STUDY/data/bids" 46 | 47 | # Reuse our containers collection 48 | # datalad install -d $STUDY/data/bids -s ///repronim/containers .containers 49 | 50 | cd "$STUDY/data/bids" 51 | HEUDICONV="docker run -it --workdir $PWD -v $HOME/datalad:$HOME/datalad -v $PWD:$PWD -u $(id -u):$(id -g) -e HOME=$HOME -v $HOME:$HOME 723a01d04689" 52 | 53 | $HEUDICONV -f reproin --bids --datalad -o . --files "${INPUT[@]}" 54 | --------------------------------------------------------------------------------