├── dax
├── tests
│ ├── __init__.py
│ ├── test_dax.py
│ ├── unit_test_assessor_wrapper.py
│ ├── unit_test_xnatutils.py
│ ├── unit_test_common_pyxnat_objects.py
│ ├── unit_test_utilities.py
│ ├── common_session_tools.py
│ ├── unit_test_xnatinterface.py
│ ├── unit_test_autoprocessor.py
│ └── unit_test_processor_graph.py
├── version.py
├── schema
│ ├── example.txt
│ ├── project_processor.yaml
│ ├── processor.yaml
│ └── subject_processor.yaml
├── __init__.py
├── yaml_doc.py
├── validate.py
├── test_suppdf.py
├── assessor_utils.py
├── lockfiles.py
├── rcq
│ ├── __init__.py
│ ├── project_job_template.txt
│ ├── trialbuilder.py
│ └── projectinfo.py
├── log.py
├── errors.py
├── dax_settings.py
├── utilities.py
├── BidsToXnat.py
└── processor_graph.py
├── test
├── pipelines
│ ├── test_pipeline.py
│ ├── Proc_A
│ │ └── v1.0.0
│ │ │ └── Proc_A_v1_0_0.py
│ └── Proc_B
│ │ └── v1.0.0
│ │ └── Proc_B_v1_0_0.py
├── job_template.txt
├── settings
│ ├── test_upload.yaml
│ └── test.yaml
├── test_subjgenproc.py
├── test_rcq_all.py
├── processors
│ ├── processor_proc_b.yaml
│ ├── processor_proc_e.yaml
│ ├── processor_proc_a.yaml
│ ├── processor_proc_c.yaml
│ └── processor_proc_d.yaml
├── README.txt
└── test_job_runner.py
├── docs
├── tutorials.rst
├── requirements.txt
├── build_sphinx.sh
├── images
│ ├── dax_logo.png
│ ├── dax_manager
│ │ ├── slant_args.PNG
│ │ ├── slant_file.PNG
│ │ ├── dcm2niix_args.PNG
│ │ ├── dcm2niix_file.PNG
│ │ ├── dax_manager_home.png
│ │ └── dax_manager_module_field_note.png
│ ├── BIDS_walkthrough
│ │ ├── Step1.1.PNG
│ │ ├── Step11.1.PNG
│ │ ├── Step14.1.PNG
│ │ ├── Step17.1.PNG
│ │ ├── Step3.1.PNG
│ │ └── Step8.1.PNG
│ ├── manage_project
│ │ └── assessor_list.png
│ ├── dax_executables
│ │ └── life_cycle_of_dax_task.png
│ └── README
├── files
│ └── README
├── release.txt
├── contributors.rst
├── requirements_for_dax_on_accre.rst
├── index.rst
├── dax.rst
├── assessors_in_vuiis_xnat.rst
├── how_to_contribute.rst
├── faq.rst
├── dax_xnat_dataypes_install.rst
├── installing_dax_in_a_virtual_environment.rst
├── dax_executables.rst
└── dax_manager.rst
├── .settings
└── .gitignore
├── requirements.txt
├── misc
├── README.md
├── templates
│ └── SLURM
│ │ └── README.md
└── xnat-plugins
│ ├── dax-plugin-genProcData-1.4.1.jar
│ └── dax-plugin-genProcData-1.4.2.jar
├── MANIFEST.in
├── .readthedocs.yaml
├── .gitignore
├── LICENSE
├── README.md
├── bin
├── supplemental_tools
│ ├── Xnatreport_assessor.py
│ ├── XnatChangePrearchiveProject
│ ├── XnatRepushExamcards.py
│ ├── dax-testrun.sh
│ ├── Xnatreport_assessors
│ └── XnatDetailedReport.m
├── Xnat_tools
│ ├── XnatAssessorFile
│ ├── XnatRandomSessionList
│ ├── XnatCheckLogin
│ ├── XnatBOND
│ ├── Xnatsetvar
│ └── Xnatquery
└── freesurfer_tools
│ ├── fsview
│ ├── fs6download
│ └── fs6upload
└── setup.py
/dax/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/pipelines/test_pipeline.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dax/version.py:
--------------------------------------------------------------------------------
1 | VERSION = '2.17.0'
2 |
--------------------------------------------------------------------------------
/docs/tutorials.rst:
--------------------------------------------------------------------------------
1 | Tutorials
2 | ==========
3 |
--------------------------------------------------------------------------------
/test/pipelines/Proc_A/v1.0.0/Proc_A_v1_0_0.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/pipelines/Proc_B/v1.0.0/Proc_B_v1_0_0.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.settings/.gitignore:
--------------------------------------------------------------------------------
1 | /org.eclipse.core.resources.prefs
2 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx~=4.0
2 | pandas==1.1.5
3 |
--------------------------------------------------------------------------------
/docs/build_sphinx.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | sphinx-build -b html . _build
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pyxnat
2 | pyyaml
3 | pycap
4 | fpdf2
5 | PyPDF2
6 | pandas
7 |
--------------------------------------------------------------------------------
/misc/README.md:
--------------------------------------------------------------------------------
1 | The misc directory contains miscellaneous supporting files for dax.
2 |
--------------------------------------------------------------------------------
/test/job_template.txt:
--------------------------------------------------------------------------------
1 | python ./test_job_runner.py ${job_cmds} ${job_output_file}
2 |
--------------------------------------------------------------------------------
/docs/images/dax_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_logo.png
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE
2 |
3 | graft docs
4 |
5 | include dax/schema/*.yaml
6 |
--------------------------------------------------------------------------------
/misc/templates/SLURM/README.md:
--------------------------------------------------------------------------------
1 | Templates now stored at https://github.com/VUIIS/dax_templates .
2 |
--------------------------------------------------------------------------------
/docs/images/dax_manager/slant_args.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/slant_args.PNG
--------------------------------------------------------------------------------
/docs/images/dax_manager/slant_file.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/slant_file.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step1.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step1.1.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step11.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step11.1.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step14.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step14.1.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step17.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step17.1.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step3.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step3.1.PNG
--------------------------------------------------------------------------------
/docs/images/BIDS_walkthrough/Step8.1.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/BIDS_walkthrough/Step8.1.PNG
--------------------------------------------------------------------------------
/docs/images/dax_manager/dcm2niix_args.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/dcm2niix_args.PNG
--------------------------------------------------------------------------------
/docs/images/dax_manager/dcm2niix_file.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/dcm2niix_file.PNG
--------------------------------------------------------------------------------
/docs/images/dax_manager/dax_manager_home.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/dax_manager_home.png
--------------------------------------------------------------------------------
/docs/images/manage_project/assessor_list.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/manage_project/assessor_list.png
--------------------------------------------------------------------------------
/misc/xnat-plugins/dax-plugin-genProcData-1.4.1.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/misc/xnat-plugins/dax-plugin-genProcData-1.4.1.jar
--------------------------------------------------------------------------------
/misc/xnat-plugins/dax-plugin-genProcData-1.4.2.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/misc/xnat-plugins/dax-plugin-genProcData-1.4.2.jar
--------------------------------------------------------------------------------
/docs/images/dax_executables/life_cycle_of_dax_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_executables/life_cycle_of_dax_task.png
--------------------------------------------------------------------------------
/test/settings/test_upload.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | settings:
3 | - host: http://10.1.1.17
4 | username:
5 | password:
6 | projects:
7 | - proj1
8 |
--------------------------------------------------------------------------------
/docs/images/dax_manager/dax_manager_module_field_note.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VUIIS/dax/HEAD/docs/images/dax_manager/dax_manager_module_field_note.png
--------------------------------------------------------------------------------
/docs/files/README:
--------------------------------------------------------------------------------
1 | Each folder here should be named exactly the same as a .rst file (without extension). All files referenced in that .rst
2 | file should exist in that directory
--------------------------------------------------------------------------------
/docs/images/README:
--------------------------------------------------------------------------------
1 | Each folder here should be named exactly the same as a .rst file (without extension). All images referenced in that .rst
2 | file should exist in that directory
--------------------------------------------------------------------------------
/dax/tests/test_dax.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import dax
4 |
5 |
6 | class TestJoke(TestCase):
7 | def test_is_string(self):
8 | s = 'TEST_STRING';
9 | self.assertTrue(isinstance(s, str))
10 |
--------------------------------------------------------------------------------
/dax/schema/example.txt:
--------------------------------------------------------------------------------
1 | yamale -s processor.yaml FS7_v1.1.0.yaml
2 | Validating /gpfs52/home/boydb1/dev-yamale/FS7_v1.1.0.yaml...
3 | Validation success! 👍
4 |
5 | for i in ~/git/analyses/processors/*/processor.yaml;do yamale -s project_processor.yaml $i;done
6 |
--------------------------------------------------------------------------------
/docs/release.txt:
--------------------------------------------------------------------------------
1 | Note that version.py must be manually modified just before and just after createing a release.
2 |
3 | To create the new release, run these steps on a workstation:
4 | git clone --branch vX.Y.Z https://github.com/VUIIS/dax.git --single-branch
5 | cd dax
6 | python setup.py sdist bdist_wheel
7 | twine upload dist/*
8 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_assessor_wrapper.py:
--------------------------------------------------------------------------------
1 |
2 | from unittest import TestCase
3 |
4 | from dax import assessor_wrapper
5 |
6 | # TODO BenM/assessor_of_assessor/pick this up later; it isn't required for the
7 | # initial working solution
8 | class AssessorWrapperUnitTests(TestCase):
9 |
10 | def test_fn(self):
11 | pass
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 | build:
3 | os: "ubuntu-20.04"
4 | tools:
5 | python: "3.8"
6 | sphinx:
7 | configuration: docs/conf.py
8 | formats: all
9 | python:
10 | install:
11 | - method: pip
12 | path: .
13 | extra_requirements:
14 | - docs
15 | - method: setuptools
16 | path: .
17 |
--------------------------------------------------------------------------------
/docs/contributors.rst:
--------------------------------------------------------------------------------
1 | Contributors
2 | ============
3 | DAX is a multi-institution collaborative effort of the following labs:
4 |
5 | `MASI `_ at Vanderbilt University, Nashville, Tennessee, USA
6 |
7 | `Center for Cognitive Medicine `_ at Vanderbilt University Medical Center, Nashville, Tennessee, USA
8 |
9 | `TIG `_ at UCL (University College London), London, UK
10 |
11 |
--------------------------------------------------------------------------------
/docs/requirements_for_dax_on_accre.rst:
--------------------------------------------------------------------------------
1 | Requirements for DAX in the ACCRE Environment
2 | =============================================
3 |
4 | Table of Contents
5 | ~~~~~~~~~~~~~~~~~
6 |
7 | 1. `Required Modules <#required-modules>`__
8 |
9 | ----------------
10 | Required Modules
11 | ----------------
12 |
13 | When running dax on ACCRE, there are some required modules that need to be loaded. They can be loaded with the following:
14 |
15 | ::
16 |
17 | module load GCCcore/.10.2.0 git/2.28.0-nodocs Python/3.8.6 pbzip2/1.1.13
18 |
--------------------------------------------------------------------------------
/test/test_subjgenproc.py:
--------------------------------------------------------------------------------
1 | # TODO: daxlauncher.upload()
2 |
3 | import logging
4 |
5 | from dax import bin, XnatUtils
6 |
7 |
8 | # Create the launcher
9 | settingspath = '/Users/boydb1/.dax/settings/settings-subjgenproc.yaml'
10 | daxlauncher = bin.read_yaml_settings(settingspath, logging.getLogger())
11 |
12 | # Get xnat connection
13 | xnat = XnatUtils.get_interface(host=daxlauncher.xnat_host)
14 |
15 | # Build
16 | print('building')
17 | project = 'DepMIND2'
18 | daxlauncher.build_project_subjgenproc(xnat, project)
19 |
20 | print('All Done!')
21 |
--------------------------------------------------------------------------------
/test/settings/test.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | attrs:
3 | priority_project:
4 | queue_limit: 400
5 | job_email: benjamin.murray@ucl.ac.uk
6 | job_email_options: a
7 | xnat_host: http://10.1.1.17
8 | #modules:
9 | #processors:
10 | yamlprocessors:
11 | - name: Proc_A
12 | filepath: processors/processor_proc_a.yaml
13 | - name: Proc_B
14 | filepath: processors/processor_proc_b.yaml
15 | - name: Proc_C
16 | filepath: processors/processor_proc_c.yaml
17 | - name: Proc_D
18 | filepath: processors/processor_proc_d.yaml
19 | - name: Proc_E
20 | filepath: processors/processor_proc_e.yaml
21 | projects:
22 | - project: proj1
23 | yamlprocessors: Proc_A, Proc_B, Proc_C, Proc_D, Proc_E
24 |
--------------------------------------------------------------------------------
/test/test_rcq_all.py:
--------------------------------------------------------------------------------
1 | '''Test rcq REDCap Queue sync by running an update.'''
2 | import logging
3 | import os
4 |
5 | import redcap
6 |
7 | from dax import XnatUtils
8 | from dax import rcq
9 |
10 | # dax manager will run builder update, launcher update, then queue sync
11 |
12 | if __name__ == "__main__":
13 | logging.basicConfig(
14 | format='%(asctime)s - %(levelname)s:%(module)s:%(message)s',
15 | level=logging.DEBUG,
16 | datefmt='%Y-%m-%d %H:%M:%S')
17 |
18 | api_url = 'https://redcap.vanderbilt.edu/api/'
19 | rc = redcap.Project(api_url, os.environ['API_KEY_DAX_RCQ'])
20 | instances = redcap.Project(api_url, os.environ['API_KEY_DAX_INSTANCES'])
21 |
22 | rcq.update(rc, instances)
23 |
--------------------------------------------------------------------------------
/dax/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | # -*- coding: utf-8 -*-
3 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
4 | # vi: set ft=python sts=4 ts=4 sw=4 et:
5 |
6 | from . import bin
7 | from . import dax_tools_utils
8 | from . import log
9 | from . import xnat_tools_utils
10 | from . import XnatUtils
11 |
12 | from .task import Task
13 | from .cluster import PBS
14 | from .launcher import Launcher
15 | from .dax_settings import DAX_Settings, DAX_Netrc
16 | from .version import VERSION as __version__
17 | from .git_revision import git_revision as __git_revision__
18 | from .XnatUtils import SpiderProcessHandler, AssessorHandler
19 | from .modules import ScanModule, SessionModule
20 | from .processors import AutoProcessor
21 |
--------------------------------------------------------------------------------
/test/processors/processor_proc_b.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | inputs:
3 | default:
4 | spider_path: pipelines/Proc_B/v1.0.0/Proc_B_v1_0_0.py
5 | working_dir: /home/ben/dax/scratch
6 | nipype_exe: proc_b.py
7 | # gmatrix: /cluster/project0/SegBiASM/DataToTryBaMoS/GMatrix4_Low3.txt
8 | # rule: /cluster/project0/SegBiASM/DataToTryBaMoS/GenericRule_CSF.txt
9 | # icbm: /cluster/project0/SegBiASM/ICBM_Priors
10 | xnat:
11 | assessors:
12 | - name: asr1
13 | proctypes: Proc_A_v1
14 | resources:
15 | - resource: SEG
16 | varname: seg
17 | command: python {spider_path} --seg{seg}
18 | attrs:
19 | suffix:
20 | xsitype: proc:genProcData
21 | walltime: 24:00:00
22 | memory: 8048
23 | ppn: 4
24 | #env: /share/apps/cmic/NiftyPipe/v2.0/setup_v2.0.sh
25 | type: session
26 |
--------------------------------------------------------------------------------
/test/processors/processor_proc_e.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | inputs:
3 | default:
4 | spider_path: pipelines/Proc_E/v1.0.0/Proc_E_v1_0_0.py
5 | working_dir: /home/ben/dax/scratch
6 | nipype_exe: proc_e.py
7 | # gmatrix: /cluster/project0/SegBiASM/DataToTryBaMoS/GMatrix4_Low3.txt
8 | # rule: /cluster/project0/SegBiASM/DataToTryBaMoS/GenericRule_CSF.txt
9 | # icbm: /cluster/project0/SegBiASM/ICBM_Priors
10 | xnat:
11 | assessors:
12 | - name: asr1
13 | proctypes: Proc_X_v1
14 | resources:
15 | - resource: SEG
16 | varname: seg
17 | command: python {spider_path} --seg{seg}
18 | attrs:
19 | suffix:
20 | xsitype: proc:genProcData
21 | walltime: 24:00:00
22 | memory: 8048
23 | ppn: 4
24 | #env: /share/apps/cmic/NiftyPipe/v2.0/setup_v2.0.sh
25 | type: session
26 |
--------------------------------------------------------------------------------
/dax/yaml_doc.py:
--------------------------------------------------------------------------------
1 |
2 | # TODO: BenM/general refactor/missing project requirement
3 | import yaml
4 | from io import StringIO
5 |
6 | from . import utilities
7 |
8 |
9 | # TODO: BenM/general refactor/document if this is staying
10 | class YamlDoc:
11 | def __init__(self):
12 | self.source_type = None
13 | self.source_id = None
14 | self.contents = None
15 |
16 | def from_string(self, source):
17 | contents = yaml.load(
18 | (StringIO.StringIO(source)), Loader=yaml.FullLoader)
19 | self.source_type = "string"
20 | self.source_id = "string source"
21 | self.contents = contents
22 | return self
23 |
24 | def from_file(self, source):
25 | contents = utilities.read_yaml(source)
26 | self.source_type = "file"
27 | self.source_id = source
28 | self.contents = contents
29 | return self
30 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Gedit backup files
2 | *.py~
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | .idea/
14 | env/
15 | #bin/
16 | build/
17 | develop-eggs/
18 | dist/
19 | eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 |
29 | # Installer logs
30 | pip-log.txt
31 | pip-delete-this-directory.txt
32 |
33 | # Unit test / coverage reports
34 | htmlcov/
35 | .tox/
36 | .coverage
37 | .cache
38 | nosetests.xml
39 | coverage.xml
40 |
41 | # Translations
42 | *.mo
43 |
44 | # Mr Developer
45 | .mr.developer.cfg
46 | .project
47 | .pydevproject
48 |
49 | # Rope
50 | .ropeproject
51 |
52 | # Django stuff:
53 | *.log
54 | *.pot
55 |
56 | # Sphinx documentation
57 | docs/_build/
58 |
59 | # Test directory
60 | test/log_test_job_runner.txt
61 |
62 | .DS_Store
63 | dax/git_revision.py
64 |
--------------------------------------------------------------------------------
/test/processors/processor_proc_a.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | inputs:
3 | default:
4 | spider_path: pipelines/Proc_A/v1.0.0/Proc_A_v1_0_0.py
5 | working_dir: /home/ben/dax/scratch
6 | nipype_exe: proc_a.py
7 | #gmatrix: /cluster/project0/SegBiASM/DataToTryBaMoS/GMatrix4_Low3.txt
8 | #rule: /cluster/project0/SegBiASM/DataToTryBaMoS/GenericRule_CSF.txt
9 | #icbm: /cluster/project0/SegBiASM/ICBM_Priors
10 | xnat:
11 | scans:
12 | - name: scan1
13 | types: T1
14 | resources:
15 | - resource: NIFTI
16 | varname: t1
17 | - name: scan2
18 | types: FLAIR
19 | resources:
20 | - resource: NIFTI
21 | varname: flair
22 | command: python {spider_path} --t1 {t1} --flair {flair}
23 | attrs:
24 | suffix:
25 | xsitype: proc:genProcData
26 | walltime: 24:00:00
27 | memory: 8048
28 | ppn: 4
29 | #env: /share/apps/cmic/NiftyPipe/v2.0/setup_v2.0.sh
30 | type: session
31 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to DAX's documentation!
2 | ===============================
3 |
4 | DAX is Distributed Automation for `XNAT `_
5 |
6 | DAX allows you to:
7 |
8 | - store analyzed imaging data on XNAT (datatypes)
9 | - extract information from XNAT via scripts (Xnat_tools)
10 | - run pipelines on your data in XNAT via a cluster (processors)
11 |
12 |
13 | Installation
14 | ------------
15 |
16 | To install please reference our `Install Page `_
17 |
18 | Contents:
19 |
20 | .. toctree::
21 | :maxdepth: 3
22 |
23 | installing_dax_in_a_virtual_environment
24 | dax_xnat_dataypes_install
25 | dax
26 | dax_manager
27 | contributors
28 | how_to_contribute
29 | faq
30 | processors
31 | processors_v3
32 | assessors_in_vuiis_xnat
33 | dax_command_line_tools
34 | dax_executables
35 | manage_a_project
36 | BIDS_walkthrough
37 |
--------------------------------------------------------------------------------
/test/processors/processor_proc_c.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | inputs:
3 | default:
4 | spider_path: pipelines/Proc_C/v1.0.0/Proc_C_v1_0_0.py
5 | working_dir: /home/ben/dax/scratch
6 | nipype_exe: proc_c.py
7 | # gmatrix: /cluster/project0/SegBiASM/DataToTryBaMoS/GMatrix4_Low3.txt
8 | # rule: /cluster/project0/SegBiASM/DataToTryBaMoS/GenericRule_CSF.txt
9 | # icbm: /cluster/project0/SegBiASM/ICBM_Priors
10 | xnat:
11 | scans:
12 | - name: scan1
13 | types: T1
14 | resources:
15 | - resource: NIFTI
16 | varname: cur
17 | - name: scan2
18 | types: T1
19 | select-session: prior(1)
20 | resources:
21 | - resource: NIFTI
22 | varname: prev
23 | command: python {spider_path} --cur{cur} --prev{prev}
24 | attrs:
25 | suffix:
26 | xsitype: proc:genProcData
27 | walltime: 24:00:00
28 | memory: 8048
29 | ppn: 4
30 | #env: /share/apps/cmic/NiftyPipe/v2.0/setup_v2.0.sh
31 | type: session
32 |
--------------------------------------------------------------------------------
/test/processors/processor_proc_d.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | inputs:
3 | default:
4 | spider_path: pipelines/Proc_D/v1.0.0/Proc_D_v1_0_0.py
5 | working_dir: /home/ben/dax/scratch
6 | nipype_exe: proc_c.py
7 | # gmatrix: /cluster/project0/SegBiASM/DataToTryBaMoS/GMatrix4_Low3.txt
8 | # rule: /cluster/project0/SegBiASM/DataToTryBaMoS/GenericRule_CSF.txt
9 | # icbm: /cluster/project0/SegBiASM/ICBM_Priors
10 | xnat:
11 | scans:
12 | - name: scan1
13 | types: T1
14 | select: all
15 | resources:
16 | - resource: NIFTI
17 | varname: cur
18 | - name: scan2
19 | types: T1
20 | select: all
21 | select-session: prior(1)
22 | resources:
23 | - resource: NIFTI
24 | varname: prev
25 | command: python {spider_path} --cur{cur} --prev{prev}
26 | attrs:
27 | suffix:
28 | xsitype: proc:genProcData
29 | walltime: 24:00:00
30 | memory: 8048
31 | ppn: 4
32 | #env: /share/apps/cmic/NiftyPipe/v2.0/setup_v2.0.sh
33 | type: session
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 VUIIS
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | Distributed Automation for XNAT
3 | ===
4 |
5 | # Install DAX
6 |
7 | Install it with:
8 |
9 | ~~~~~~~~
10 | pip install dax
11 | ~~~~~~~~
12 |
13 | Use --upgrade to overwrite previous installation
14 |
15 | To install current main branch:
16 |
17 | ~~~~~~~~
18 | pip install git+https://github.com/VUIIS/dax.git --upgrade
19 | ~~~~~~~~
20 |
21 | or a specific branch:
22 |
23 | ~~~~~~~~
24 | pip install git+https://github.com/VUIIS/dax.git@branch_name --upgrade
25 | ~~~~~~~~
26 |
27 | # Docs
28 | [](http://dax.readthedocs.org/en/latest/?badge=latest)
29 |
30 |
34 |
35 |
39 |
--------------------------------------------------------------------------------
/docs/dax.rst:
--------------------------------------------------------------------------------
1 | Source Documentation
2 | ====================
3 |
4 | :mod:`dax` -- Root package
5 | **************************
6 |
7 | .. automodule:: dax
8 | :members:
9 |
10 | :mod:`dax.task` -- Task class
11 | *****************************
12 |
13 | .. automodule:: dax.task
14 | :members:
15 |
16 | :mod:`dax.spiders` -- Spider class
17 | **********************************
18 |
19 | .. automodule:: dax.spiders
20 | :members:
21 |
22 | :mod:`dax.processors` -- Processor class
23 | ****************************************
24 |
25 | .. automodule:: dax.processors
26 | :members:
27 |
28 | :mod:`dax.log` -- Logging utility
29 | *********************************
30 |
31 | .. automodule:: dax.log
32 | :members:
33 |
34 | :mod:`dax.bin` -- Responsible for launching, building and updating a Task
35 | *************************************************************************
36 |
37 | .. automodule:: dax.bin
38 | :members:
39 |
40 | :mod:`dax.XnatUtils` -- Collection of utilities for upload/download and general access
41 | **************************************************************************************
42 |
43 | .. automodule:: dax.XnatUtils
44 | :members:
45 |
--------------------------------------------------------------------------------
/dax/schema/project_processor.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | containers: list(include('container'))
3 | requirements:
4 | walltime: str()
5 | memory: str()
6 | inputs:
7 | vars: map(required=False)
8 | xnat:
9 | subjects:
10 | assessors: list(include('assessor'), required=False)
11 | sessions: list(include('session'), required=False)
12 | pre: include('command', required=False)
13 | command: include('command')
14 | post: include('command', required=False)
15 | description: str(required=False)
16 | ---
17 | command:
18 | type: enum('singularity_run', 'singularity_exec')
19 | container: str()
20 | opts: str(required=False)
21 | extraopts: str(required=False)
22 | args: str(required=False)
23 | container:
24 | name: str()
25 | path: str()
26 | source: str(required=False)
27 | session:
28 | types: str(required=False)
29 | select: str(required=False)
30 | assessors: list(include('assessor'), required=False)
31 | scans: list(include('scan'), required=False)
32 | assessor:
33 | types: str(required=False)
34 | proctypes: str(required=False)
35 | nifti: str(required=False)
36 | resources: list(include('resource'), required=False)
37 | scan:
38 | types: str()
39 | nifti: str(required=False)
40 | resources: list(include('resource'), required=False)
41 | resource:
42 | resource: str()
43 | ftype: enum('FILE', 'DIR', 'DIRJ', required=False)
44 | fmatch: str(required=False)
45 | fdest: str(required=False)
46 |
--------------------------------------------------------------------------------
/test/README.txt:
--------------------------------------------------------------------------------
1 | Dax Integration Test README
2 |
3 | Version 1.0 - 27/06/2014
4 |
5 |
6 | This folder contains a set of tools that make it simpler to integration test DAX
7 | installs. As of version 1.0 of this document, these tools can be used to test
8 | DAX manually, without the need for most of the additional periphery that this
9 | would require, such as work queues and so forth. Subsequently, it will become
10 | part of a continuous integration / continuous delivery test suite.
11 |
12 |
13 | Contents:
14 |
15 | ./job_template.txt
16 | The template that invokes test_job_runner.py. This must be copied into
17 | ~/.dax_templates/job_template.txt
18 |
19 |
20 | ./test_job_runner.py
21 | The test job runner. It is invoked by the command generated through
22 | job_template.txt and in turn invokes the pipeline processor for the appropriate
23 | pipeline
24 |
25 |
26 | ./processors/
27 | A series of processor yaml definitions to cover the various relationships that
28 | can be created between scans and processor types with their corresponding
29 | assessors
30 |
31 | ./pipelines/test_pipeline.py
32 | Shared functionality for pipelines that is used by the different processor
33 | pipelines to generate completed jobs and result artefacts
34 |
35 | ./pipelines/
36 | Processor pipelines corresponding to the various test processors, that generate
37 | outputs for the assessors when they are mock executed
38 |
--------------------------------------------------------------------------------
/docs/assessors_in_vuiis_xnat.rst:
--------------------------------------------------------------------------------
1 | Assessors in VUIIS XNAT
2 | =======================
3 |
4 | An assessor is processed on XNAT. All files produced by a script using data from one scan / multiple scans / any other process data will be / need to be upload to an assessor.
5 |
6 | The VUIIS XNAT is using two kind of assessors :
7 |
8 | - proc:genProcData : the generic assessor type
9 | - fs:fsData : the specific FreeSurfer assessor type that we created (deprecated)
10 |
11 | We are using these statuses for the assessor:
12 |
13 | - NO_DATA : no data exists on the sessions to be able to run
14 | - NEED_INPUTS : input data has not been created yet for a scan, multiple scans or other assessor; sometimes this means the inputs it needs aren't present, other times, this means everything is present but the assessor hasn't built yet
15 | - NEED_TO_RUN : ready to be launched on the cluster (ACCRE). All input data for the process to run exists
16 | - JOB_RUNNING : the assessor is built and the job is running on ACCRE or the job is completed and is waiting to be uploaded
17 | - JOB_FAILED : the job failed on the cluster
18 | - READY_TO_UPLOAD : Job done, waiting for the results to be uploaded to XNAT from the cluster
19 | - UPLOADING : in the process of uploading the resources on XNAT
20 | - READY_TO_COMPLETE : the assessors contains all the files but we still need finish up (this includes getting the walltime and memory used on ACCRE)
21 | - COMPLETE : all finished
22 |
23 | There is a QA status that is managed by the project owner. This field defaults to "Needs QA". Other values can be set as desired. If set to "Rerun", the assessor will automatically be deleted and rerun.
24 |
25 |
--------------------------------------------------------------------------------
/test/test_job_runner.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 | import sys
4 | import time
5 |
6 | from dax import dax_settings
7 |
8 |
9 | if __name__ == '__main__':
10 |
11 |
12 | with open('./log_test_job_runner.txt', 'a') as f:
13 | ts = datetime.datetime.fromtimestamp(time.time())
14 | f.write('{}: {}\n'.format(ts, sys.argv))
15 |
16 | settings = dax_settings.DAX_Settings()
17 | results_dir = settings.get_results_dir()
18 | try:
19 | i = sys.argv.index('-a')
20 | folder_to_create = os.path.join(results_dir, sys.argv[i+1])
21 | if not os.path.exists(folder_to_create):
22 | os.makedirs(folder_to_create)
23 | file_to_create = os.path.join(folder_to_create, 'READY_TO_UPLOAD.txt')
24 | print 'file_to_create =', file_to_create
25 | with open(file_to_create, 'w') as g:
26 | os.write('')
27 | except Exception as e:
28 | f.write('{}: {}\n'.format(ts, e))
29 |
30 |
31 |
32 | # 2018-06-27 15:38:24.683594: [
33 | # '/home/ben/git/refactordax/bin/dax_tools/test_job_runner.py',
34 | # 'python',
35 | # '/home/ben/git/comic100_dax_config/pipelines/Proc_A/v1.0.0/Proc_A_v1_0_0.py',
36 | # '--t1',
37 | # '/projects/proj1/subjects/subj1/experiments/sess1/scans/1/resources/NIFTI',
38 | # '--flair',
39 | # '/projects/proj1/subjects/subj1/experiments/sess1/scans/11/resources/NIFTI',
40 | # '--working_dir',
41 | # '/home/ben/dax/scratch',
42 | # '--nipype_exe',
43 | # 'proc_a.py',
44 | # '-a',
45 | # 'proj1-x-subj1-x-sess1-x-54bd8ffc-0a2a-49c4-b0d1-00c159192d2a',
46 | # '-d',
47 | # '/home/ben/dax/scratch/proj1-x-subj1-x-sess1-x-54bd8ffc-0a2a-49c4-b0d1-00c159192d2a',
48 | # '/home/ben/dax/upload/OUTLOG/proj1-x-subj1-x-sess1-x-54bd8ffc-0a2a-49c4-b0d1-00c159192d2a.output']
49 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/Xnatreport_assessor.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 | import sys
5 | import pandas
6 | import pyxnat
7 | from dax import XnatUtils
8 | from dax import utilities
9 |
10 | if len(sys.argv)!=4 :
11 | print('Usage:')
12 | print('python Xnatreport_assessor.py ')
13 | sys.exit()
14 |
15 |
16 | project = sys.argv[1]
17 | proctype = sys.argv[2]
18 | outfile = sys.argv[3]
19 |
20 | xnat = XnatUtils.get_interface()
21 |
22 | Assrs = xnat.list_project_assessors(project)
23 |
24 | R = list()
25 | for assr in Assrs :
26 |
27 | if assr.get('proctype') != proctype : continue
28 |
29 | #print(assr['assessor_label'])
30 |
31 | # Get desired fields
32 | thisR = {}
33 | for key in ('project_label','subject_label','session_label','proctype',
34 | 'assessor_id','procstatus','qcstatus','assessor_label') :
35 | thisR[key] = assr[key]
36 |
37 | # Clean up the inputs field, split on / and keep the last bit
38 | inps = utilities.decode_url_json_string(assr['inputs'])
39 | #inps = json.loads(assr['inputs'].replace('"','"'))
40 | for key in inps.keys() :
41 | thisR[key] = inps[key].split('/')[-1]
42 |
43 | # We need to explicitly copy here to avoid overwriting R
44 | R.append(thisR.copy())
45 |
46 |
47 | D = pandas.DataFrame(R)
48 |
49 | # Reorder columns
50 | colorder = ('project_label','subject_label','session_label','proctype',
51 | 'assessor_id','procstatus','qcstatus')
52 | oldcols = D.columns.tolist()
53 | newcols = list()
54 | for col in colorder :
55 | newcols.append(col)
56 | oldcols.remove(col)
57 | newcols.extend(oldcols)
58 |
59 | D.to_csv(outfile,index=False,columns=newcols)
60 |
61 |
--------------------------------------------------------------------------------
/docs/how_to_contribute.rst:
--------------------------------------------------------------------------------
1 | How To Contribute
2 | =================
3 |
4 | We encourage all collaborations! However, we follow a pull-request work flow to help facilitate a simplified code-review process.
5 | If you would like to contribute, we kindly request that any of your work be done in
6 | a branch. Rules for branching and merging are outlined below:
7 |
8 | #. Branches - The scope of your branch should be narrow. Do not make a branch only for changing documentation, and then refactor how task.py works. These should be two totally separate branches.
9 |
10 | #. Testing - You should test your branch before making a pull request. Do not make a pull request with untested code.
11 |
12 | #. Committing - Use helpful commit messages. Do not use messages like "updates", "bug fix", and "updated a few files" etc. Please make these commit messages at least somewhat helpful. Use lots of commits, do not make 1 bulk commit of all of the changes that you make. This practice makes it hard for others to review.
13 |
14 | #. Pull request - When you are ready to make a pull request, please try to itemize all of the changes that you made in at least moderate depth. This will alert everyone reviewing the code of possible things to check to make sure that you didn't break anything.
15 |
16 | #. Merging - Do NOT merge your own pull request. Contributors should review each and every pull request before merging into the master branch. Please allow at least a few days before commenting and asking for status. If the depth of changes is deep, please allow at least a few weeks.
17 |
18 | #. Master branch - NEVER commit to the master branch directly unless there is a serious bug fix.
19 |
20 | If you are unfamiliar with branches in github, please see the link below:
21 |
22 | `Working with Branches `_
23 |
--------------------------------------------------------------------------------
/dax/validate.py:
--------------------------------------------------------------------------------
1 | import os
2 | import yaml
3 |
4 | import yamale
5 |
6 | from .errors import DaxError
7 |
8 |
9 | def validate(filename):
10 | schema_file = None
11 | schema = None
12 | data = None
13 | contents = {}
14 |
15 | # Determine which schema should be used based on yaml contents
16 | try:
17 | with open(filename, 'r') as f:
18 | contents = yaml.safe_load(f)
19 |
20 | if contents.get('inputs', False).get('xnat', False).get('subjects', False):
21 | schema_file = os.path.realpath(os.path.join(
22 | os.path.dirname(__file__),
23 | 'schema',
24 | 'project_processor.yaml'))
25 | elif contents.get('inputs', False).get('xnat', False).get('sessions', False):
26 | schema_file = os.path.realpath(os.path.join(
27 | os.path.dirname(__file__),
28 | 'schema',
29 | 'subject_processor.yaml'))
30 | else:
31 | schema_file = os.path.realpath(os.path.join(
32 | os.path.dirname(__file__),
33 | 'schema',
34 | 'processor.yaml'))
35 | except Exception as err:
36 | raise Exception(f'failed to determine processor type of yaml, cannot validate.')
37 |
38 | # Load the schema
39 | try:
40 | schema = yamale.make_schema(schema_file)
41 | except Exception as err:
42 | raise DaxError('failed to read schema:{}:{}'.format(schema_file, err))
43 |
44 | # Load the file to be validated
45 | try:
46 | data = yamale.make_data(filename)
47 | except Exception as err:
48 | raise DaxError('failed to read file:{}:{}'.format(filename, err))
49 |
50 | # Validate data against the schema
51 | try:
52 | yamale.validate(schema, data)
53 | except ValueError as err:
54 | raise DaxError('validate failed:{}:{}'.format(schema_file, err))
55 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/XnatAssessorFile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from argparse import ArgumentParser
4 | from dax import XnatUtils
5 | import os
6 | import requests
7 |
8 | __exe__ = os.path.basename(__file__)
9 | __purpose__ = "Download file from specific assessor resource on given project"
10 |
11 |
12 | def parse_args():
13 | ap = ArgumentParser(prog=__exe__, description=__purpose__)
14 | ap.add_argument('--project', dest='project', default=None, help='Project')
15 | ap.add_argument('--assessor', dest='ass_name', default=None, help='Assessor Name')
16 | ap.add_argument('--resource', dest='res_name', default=None, help='Assessor Resource')
17 | ap.add_argument('--file', dest='file_name', default=None, help='File Name')
18 | ap.add_argument('--dir', dest='download_dir', default=None, help='Download Directory')
19 | return ap.parse_args()
20 |
21 |
22 | if __name__ == '__main__':
23 | args = parse_args()
24 |
25 | with XnatUtils.get_interface() as xnat:
26 | assessors = xnat.list_project_assessors(args.project)
27 |
28 | if not os.path.exists(args.download_dir):
29 | os.makedirs(args.download_dir)
30 |
31 | for assessor in assessors:
32 | if args.ass_name in assessor['proctype'] and 'COMPLETE' in assessor['procstatus']:
33 | req = f"https://xnat.vanderbilt.edu/xnat/data/projects/{assessor['project_id']}/subjects/{assessor['subject_label']}/experiments/\
34 | {assessor['session_label']}/assessors/{assessor['assessor_label']}/out/resources/{args.res_name}/files/{args.file_name}"
35 | filename = f"{assessor['assessor_label']}_{args.file_name}"
36 | file_path = os.path.join(args.download_dir, filename)
37 | with requests.get(req) as r:
38 | r.raise_for_status()
39 | with open(file_path,'wb') as f:
40 | for chunk in r.iter_content(chunk_size=8192):
41 | f.write(chunk)
42 |
--------------------------------------------------------------------------------
/docs/faq.rst:
--------------------------------------------------------------------------------
1 | FAQ
2 | ===
3 |
4 | These FAQs assume that you have read the XNAT documentation and or are familiar with navigating through the web UI.
5 | If you are not, you can read the XNAT documentation `here `_.
6 |
7 | #. What is DAX?
8 | DAX is an open source project that uses the pyxnat wrapper for the REST api to automate pipeline running on a DRMAA compliant grid.
9 |
10 | #. What are Modules?
11 | Modules are a special class in DAX. They represent, generally, a task that should not be performed on the grid. The purpose for this was to not fill up the grid queue with jobs that take 20-30 seconds. Examples of such tasks could be converting a DICOM to a NIfTI file, changing the scan type, archiving a session from the prearchive, or performing skull-stripping. As you can see, these tasks can all be considered "light-weight" and thus probably don't have a place on the grid.
12 |
13 | #. What are Spiders?
14 | Spiders are a python script. The purpose of the script is to download data from XNAT, run an image processing pipeline, and then prepare the data to be uploaded to XNAT. Spiders are run on the grid because they can take hours to days.
15 |
16 | #. How do I know the EXACT command line call that was made?
17 | The PBS resource contains the script that was submitted to the grid scheduler for execution. You can view this file for the exact command line call(s) that were executed on the grid.
18 |
19 | #. I think I found a bug, what should I do?
20 | The easiest way to get a bug fixed is to post as much information as you can on the `DAX github issue tracker `_. If possible, please post the command line call you made (with any sensitive information removed) and the stack trace or error log in question.
21 |
22 | #. I have an idea of something I want to add. How do I go about adding it?
23 | Great! We'd love to see what you have to include! Please read the guidelines on how to contribute.
24 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_xnatutils.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import json
4 |
5 | from dax import XnatUtils
6 | from dax import assessor_utils
7 |
8 | from dax.tests import unit_test_entity_common as common
9 |
10 |
11 | class XnatUtilsUnitTest(TestCase):
12 |
13 | def test_get_proctype(self):
14 | proctype, version = XnatUtils.get_proctype(common.spider_tiv_from_gif)
15 | self.assertEqual(proctype, 'BrainTivFromGIF_v1')
16 | self.assertEqual(version, '1.0.0')
17 |
18 | def test_get_assessor_inputs(self):
19 | class TestAssessor:
20 | class TestAttrs:
21 |
22 | def __init__(self, datatype, property):
23 | self.datatype = datatype
24 | self.property = property
25 |
26 | def get(self, name):
27 | if name == self.datatype + '/' + self.property:
28 | return json.dumps({'a': 'b'})
29 | else:
30 | raise IndexError("it's an index error")
31 |
32 | def __init__(self, datatype, property):
33 | self.attrs = TestAssessor.TestAttrs(datatype, property)
34 | self.datatype_ = datatype
35 |
36 | def datatype(self):
37 | return self.datatype_
38 |
39 | assr = TestAssessor('proc:genProcData', 'inputs')
40 | assr2 = TestAssessor('something', 'else')
41 | self.assertEqual(XnatUtils.get_assessor_inputs(assr), {'a': 'b'})
42 | self.assertEqual(XnatUtils.get_assessor_inputs(assr2), None)
43 |
44 | def test_create_full_assessor_name(self):
45 | test_entries = [
46 | ['proj1', 'subj1', 'sess1', '01234567-89ab-cdef-0123-456789abcdef']
47 | ]
48 |
49 | test_names = ['-x-'.join(t) for t in test_entries]
50 |
51 | for t in range(len(test_entries)):
52 | name = assessor_utils.full_label(*test_entries[t])
53 | self.assertEqual(test_names[t], name)
54 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_common_pyxnat_objects.py:
--------------------------------------------------------------------------------
1 |
2 | # TODO BenM/assessor_of_assessor/pick this up later; it isn't required for the
3 | # initial working solution
4 |
5 | class TestPyxnatSession:
6 | def __init__(self, project, subject, session, scans, assessors):
7 | self.scans_ = scans
8 | self.assessors_ = assessors
9 | self.project = project
10 | self.subject = subject
11 | self.session = session
12 |
13 | def scans(self):
14 | return self.scans_
15 |
16 | def assessors(self):
17 | return self.assessors_
18 |
19 |
20 | class TestAttrs:
21 | def __init__(self, properties):
22 | pass
23 |
24 |
25 | class TestPyxnatScan:
26 | def __init__(self, project, subject, session, scanjson):
27 | self.scanjson = scanjson
28 | self.project = project
29 | self.subject = subject
30 | self.session = session
31 | uristr = '/data/project/{}/subjects/{}/experiments/{}/scans/{}'
32 | self._uri = uristr.format(project,
33 | subject,
34 | session,
35 | self.scanjson['label'])
36 |
37 | def id(self):
38 | return self.scanjson['id']
39 |
40 | def label(self):
41 | return self.scanjson['label']
42 |
43 |
44 | class TestPyxnatAssessor:
45 | def __init__(self, project, subject, session, asrjson):
46 | self.asrjson = asrjson
47 | self.project = project
48 | self.subject = subject
49 | self.session = session
50 | uristr = '/data/project/{}/subjects/{}/experiments/{}/assessors/{}'
51 | self._uri = uristr.format(project,
52 | subject,
53 | session,
54 | self.asrjson['label'])
55 |
56 | def id(self):
57 | return self.asrjson['id']
58 |
59 | def label(self):
60 | return self.asrjson['label']
61 |
62 | def inputs(self):
63 | return self.asrjson['xsitype'] + '/' + self.asrjson['inputs']
64 |
--------------------------------------------------------------------------------
/dax/test_suppdf.py:
--------------------------------------------------------------------------------
1 | from .suppdf import make_suppdf
2 |
3 | if __name__ == '__main__':
4 | infile = '/Users/boydb1/TEST-fmri_msit/report_test3b.pdf'
5 | outfile = '/Users/boydb1/TEST-fmri_msit/report_test3b_sup.pdf'
6 | info = {}
7 |
8 | info['assessor'] = 'REMBRANDT-x-28034-x-28034a-x-fmri_msit_v2-x-6a8dcef0'
9 | info['proctype'] = 'fmri_msit_v2'
10 | info['procversion'] = '2.0.0'
11 | info['procdate'] = '2021/10/31'
12 | info['description'] = '''
13 | 1. Write conditions file
14 | 2. Write contrasts file
15 | 3. Realign/Motion-correction of FMRI
16 | 4. Coregister/Estimate mean FMRI to original T1
17 | 5. Segment & Normalize T1 to MNI space and segment
18 | 6. Apply warps and reslice FMRI and T1
19 | 7. ART outliers using CONN liberal settings
20 | 8. Skull-strip mean functional and apply to others
21 | 9. Create smoothed fmri'''
22 |
23 | info['inputs'] = (
24 | ('scan_anat', '3', '/projects/REMBRANDT/subjects/28043/experiments/28043a/scans/3'),
25 | ('scan_fmri', '14', '/projects/REMBRANDT/subjects/28043/experiments/28043a/scans/14'))
26 |
27 | info['outputs'] = [
28 | {
29 | 'path': 'report*.pdf',
30 | 'type': 'FILE',
31 | 'resource': 'PDF'
32 | },
33 | {
34 | 'path': 'stats.txt',
35 | 'type': 'FILE',
36 | 'resource': 'STATS',
37 | },
38 | {
39 | 'path': 'PREPROC',
40 | 'type': 'DIR',
41 | 'resource': 'PREPROC',
42 | },
43 | {
44 | 'path': '1stLEVEL',
45 | 'type': 'DIR',
46 | 'resource': '1stLEVEL'
47 | }
48 | ]
49 |
50 | info['session'] = {
51 | 'PROJECT': 'REMBRANDT',
52 | 'SUBJECT': '28043',
53 | 'SESSION': '28043a'}
54 |
55 | info['proc'] = {
56 | 'dax_version': '2.3.0',
57 | 'dax_manager': 'vuiis_daily_singularity@hickory'}
58 |
59 | info['job'] = {
60 | 'jobid': '33269491',
61 | 'duration': '00:34:07',
62 | 'memory': '2296336'}
63 |
64 | make_suppdf(infile, outfile, info)
65 |
--------------------------------------------------------------------------------
/dax/schema/processor.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | procyamlversion: enum('3.0.0-dev.0')
3 | containers: list(include('container'))
4 | requirements:
5 | walltime: str()
6 | memory: str()
7 | inputs:
8 | vars: map(required=False)
9 | xnat:
10 | scans: list(include('scan'), required=False)
11 | assessors: list(include('assessor'), required=False)
12 | attrs: list(include('attr'), required=False)
13 | filters: list(include('filter'), required=False)
14 | outputs: list(include('output'))
15 | pre: include('command', required=False)
16 | command: include('command')
17 | post: include('command', required=False)
18 | description: str(required=False)
19 | jobtemplate: str(required=False)
20 | ---
21 | command:
22 | type: enum('singularity_run', 'singularity_exec')
23 | container: str()
24 | opts: str(required=False)
25 | extraopts: str(required=False)
26 | args: str(required=False)
27 | container:
28 | name: str()
29 | path: str()
30 | source: str(required=False)
31 | assessor:
32 | name: str()
33 | types: str(required=False)
34 | proctypes: str(required=False)
35 | nifti: str(required=False)
36 | resources: list(include('resource'), required=False)
37 | needs_qc: bool(required=False)
38 | scan:
39 | name: str()
40 | types: str()
41 | nifti: str(required=False)
42 | resources: list(include('resource'), required=False)
43 | needs_qc: bool(required=False)
44 | skip_unusable: bool(required=False)
45 | keep_multis: str(required=False)
46 | output:
47 | path: str(required=False)
48 | resource: str(required=False)
49 | type: enum('FILE', 'DIR', required=False)
50 | dir: str(required=False)
51 | stats: str(required=False)
52 | pdf: str(required=False)
53 | resource:
54 | resource: str()
55 | ftype: enum('FILE', 'DIR', 'DIRJ', required=False)
56 | fmatch: str(required=False)
57 | fdest: str(required=False)
58 | varname: str(required=False)
59 | fmulti: enum('any1', required=False)
60 | attr:
61 | varname: str()
62 | object: enum('subject', 'session', 'scan', 'assessor')
63 | attr: str()
64 | ref: str(required=False)
65 | filter:
66 | type: enum('match')
67 | inputs: str()
68 |
--------------------------------------------------------------------------------
/bin/freesurfer_tools/fsview:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | Load freesurfer subject in freeview
6 |
7 | @author: Brian D. Boyd, Psychiatry, Vanderbilt University
8 | '''
9 |
10 | import os
11 | import sys
12 |
13 |
14 | __copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
15 | __exe__ = os.path.basename(__file__)
16 | __author__ = 'Brian Boyd'
17 | __purpose__ = "Load FreeSurfer subject in freeview"
18 | FREEVIEW_CMD = '''freeview -v {ls_path}/mri/T1.mgz:visible=1 {ls_path}/mri/\
19 | aparc+aseg.mgz:colormap=lut:opacity=0.7:visible=0 {ls_path}/mri/\
20 | wm.mgz:colormap=heat:opacity=0.7:visible=1 {ls_path}/mri/\
21 | brainmask.mgz:visible=1 -f {ls_path}/surf/\
22 | lh.white:edgecolor=blue:edgethickness=1 {ls_path}/surf/\
23 | lh.pial:edgecolor=red:edgethickness=1 {ls_path}/surf/\
24 | rh.white:edgecolor=blue:edgethickness=1 {ls_path}/surf/\
25 | rh.pial:edgecolor=red:edgethickness=1'''
26 |
27 |
28 | def parse_args():
29 | """
30 | Method to parse arguments base on ArgumentParser
31 |
32 | :return: parser object parsed
33 | """
34 | from argparse import ArgumentParser
35 | ap = ArgumentParser(prog=__exe__, description=__purpose__)
36 | ap.add_argument('session', help='Session Label')
37 | ap.add_argument('-sd', '--subjects_dir', dest='subjects_dir',
38 | help='Subjects Directory',
39 | default=os.environ.get('SUBJECTS_DIR', '/tmp'))
40 | return ap.parse_args()
41 |
42 |
43 | if __name__ == '__main__':
44 | args = parse_args()
45 | sess = args.session
46 | subjects_dir = args.subjects_dir
47 |
48 | local_subj_path = os.path.join(subjects_dir, sess)
49 |
50 | if not os.path.exists(local_subj_path):
51 | print('ERROR:cannot load, %s not found in local FreeSurfer subjects \
52 | directory.' % (sess))
53 | sys.exit(1)
54 |
55 | cmd = FREEVIEW_CMD.format(ls_path=local_subj_path)
56 |
57 | cp_file_path = os.path.join(local_subj_path, 'tmp', 'control.dat')
58 | if os.path.isfile(cp_file_path):
59 | cmd += ' -c {}:radius=1'.format(cp_file_path)
60 |
61 | print('Launching freeview with command: {}'.format(cmd))
62 | os.system(cmd)
63 |
--------------------------------------------------------------------------------
/dax/assessor_utils.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | SGP_PATTERN = r'^(?!.*-x-.*-x-.*-x-.*-x-)[\w-]+(?:-x-[\w-]+){2}_v[0-9]+-x-[0-9a-f]+$'
4 |
5 | def full_label(project, subject, session, assessor):
6 | return '-x-'.join([project, subject, session, assessor])
7 |
8 |
9 | def full_label_from_assessor(assessor):
10 | assessor_label = assessor.label()
11 | if '-x-' in assessor_label:
12 | return assessor_label
13 |
14 | components = list()
15 | components.append(assessor_label)
16 | entity = assessor
17 | while True:
18 | entity = entity.parent()
19 | if entity is None:
20 | break
21 | components.append(entity.label())
22 | return '-x-'.join(reversed(components))
23 |
24 |
25 | def parse_full_assessor_name(assessor_name):
26 | elements = assessor_name.split('-x-')
27 | assrdict = dict()
28 |
29 | if is_sgp_assessor(assessor_name):
30 | assrdict = dict(list(zip([
31 | 'project_id', 'subject_label', 'session_label', 'label'],
32 | [elements[0], elements[1], '', assessor_name])))
33 | elif len(elements) == 5:
34 | # relabel is in use or old style with scan id in label
35 | assrdict = dict(list(zip(
36 | ['project_id', 'subject_label', 'session_label', 'label'],
37 | [elements[0], elements[1], elements[2], assessor_name])))
38 | elif len(elements) == 4:
39 | if len(elements[3]) == 36:
40 | # new style label with uuid
41 | assrdict = dict(list(zip(
42 | ['project_id', 'subject_label', 'session_label', 'label'],
43 | elements)))
44 | else:
45 | # old style label
46 | assrdict = dict(list(zip(
47 | ['project_id', 'subject_label', 'session_label', 'label'],
48 | [elements[0], elements[1], elements[2], assessor_name])))
49 | else:
50 | raise ValueError(("'assessor_name' parameter '{}' is not a valid full "
51 | "assessor name".format(assessor_name)))
52 |
53 | return assrdict
54 |
55 |
56 | def is_sgp_assessor(assessor):
57 | # Try to match the assessor label with the SGP pattern
58 | return re.match(SGP_PATTERN, assessor)
59 |
--------------------------------------------------------------------------------
/dax/schema/subject_processor.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | procyamlversion: enum('3.0.0-dev.0')
3 | containers: list(include('container'))
4 | requirements:
5 | walltime: str()
6 | memory: str()
7 | inputs:
8 | vars: map(required=False)
9 | xnat:
10 | sessions: list(include('session'), required=False)
11 | attrs: list(include('attr'), required=False)
12 | filters: list(include('filter'), required=False)
13 | outputs: list(include('output'))
14 | pre: include('command', required=False)
15 | command: include('command')
16 | post: include('command', required=False)
17 | description: str(required=False)
18 | jobtemplate: str(required=False)
19 | ---
20 | command:
21 | type: enum('singularity_run', 'singularity_exec')
22 | container: str()
23 | opts: str(required=False)
24 | extraopts: str(required=False)
25 | args: str(required=False)
26 | container:
27 | name: str()
28 | path: str()
29 | source: str(required=False)
30 | session:
31 | type: str()
32 | scans: list(include('scan'), required=False)
33 | assessors: list(include('assessor'), required=False)
34 | assessor:
35 | name: str()
36 | types: str(required=False)
37 | proctypes: str(required=False)
38 | nifti: str(required=False)
39 | resources: list(include('resource'), required=False)
40 | needs_qc: bool(required=False)
41 | scan:
42 | name: str()
43 | types: str()
44 | nifti: str(required=False)
45 | resources: list(include('resource'), required=False)
46 | needs_qc: bool(required=False)
47 | skip_unusable: bool(required=False)
48 | keep_multis: str(required=False)
49 | output:
50 | path: str(required=False)
51 | resource: str(required=False)
52 | type: enum('FILE', 'DIR', required=False)
53 | dir: str(required=False)
54 | stats: str(required=False)
55 | pdf: str(required=False)
56 | resource:
57 | resource: str()
58 | ftype: enum('FILE', 'DIR', 'DIRJ', required=False)
59 | fmatch: str(required=False)
60 | fdest: str(required=False)
61 | varname: str(required=False)
62 | fmulti: enum('any1', required=False)
63 | ddest: str(required=False)
64 | attr:
65 | varname: str()
66 | object: enum('subject', 'session', 'scan', 'assessor')
67 | attr: str()
68 | ref: str(required=False)
69 | filter:
70 | type: enum('match')
71 | inputs: str()
72 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/XnatChangePrearchiveProject:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | from argparse import ArgumentParser, RawDescriptionHelpFormatter
5 | from dax import XnatUtils
6 | import json
7 | import os
8 | import pandas as pd
9 | import pyxnat
10 | import re
11 | import requests
12 | import sys
13 | import time
14 |
15 |
16 | def parse_args():
17 | argp = ArgumentParser(prog='SwitchProjects', formatter_class=RawDescriptionHelpFormatter)
18 | argp.add_argument('--newProj', dest='newProj', default=None, help='Project we want to move to')
19 | argp.add_argument('--txt', dest='txtFile', default=None, help='List of sessions that we want to move')
20 | return argp
21 |
22 |
23 | if __name__ == '__main__':
24 | PARSER = parse_args()
25 | OPTIONS = PARSER.parse_args()
26 |
27 | xnat = XnatUtils.get_interface()
28 | archive_uri = '/data/services/archive'
29 | prearchive_uri = '/data/services/prearchive/move'
30 | txtFile = open(OPTIONS.txtFile, 'r')
31 | lines = txtFile.readlines()
32 |
33 | req = requests.get("https://xnat.vanderbilt.edu/xnat/data/prearchive/projects")
34 | req_con = json.loads(req.content)
35 |
36 | for line in lines:
37 | splitLine = line.split()
38 | sess_id = splitLine[2].strip()
39 | if sess_id in req.text:
40 | for key, value in req_con.items():
41 | for ke, va in value.items():
42 | for x in va:
43 | value_dict = dict(x)
44 | if sess_id in x['url']:
45 | url = x['url']
46 | subject = splitLine[0].strip()
47 | session = splitLine[1].strip()
48 | print('Moving Session {} to project {}'.format(sess_id,OPTIONS.newProj))
49 | post_body = """src={}&newProject={}&async=false""".format(url,OPTIONS.newProj)
50 | xnat._exec(prearchive_uri,'POST',post_body,{'content-type': 'application/x-www-form-urlencoded'})
51 | time.sleep(20)
52 | post_body = """src={}&project={}&subject={}&session={}""".format(url,OPTIONS.newProj,subject,session)
53 | xnat._exec(archive_uri,'POST',post_body,{'content-type': 'application/x-www-form-urlencoded'})
54 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/XnatRandomSessionList:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | '''
4 | Gets a list of random sessions from XNAT
5 |
6 | @author: Steve Daemon, Electrical Engineering, Vanderbilt University
7 | '''
8 |
9 |
10 |
11 | import os
12 |
13 | from dax import XnatUtils
14 |
15 |
16 | __copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
17 | __exe__ = os.path.basename(__file__)
18 | __author__ = 'sdaemon'
19 | __purpose__ = 'Program to get a random list of sessions'
20 | __description__ = """What is the script doing :
21 | * Return a random list of sessions from XNAT
22 | """
23 |
24 |
25 | def parse_args():
26 | """
27 | Method to parse arguments base on ArgumentParser
28 |
29 | :return: parser object parsed
30 | """
31 | from argparse import ArgumentParser, RawTextHelpFormatter
32 | parser = ArgumentParser(prog=__exe__, description=__description__,
33 | formatter_class=RawTextHelpFormatter)
34 | parser.add_argument('--host', dest='host', default=None,
35 | help='Host for XNAT. Default: env XNAT_HOST.')
36 | parser.add_argument('-u', '--username', dest='username', default=None,
37 | help='Username for XNAT.')
38 | parser.add_argument("-p", "--project",
39 | help="Project to get sessions from", required=True)
40 | parser.add_argument("-n", "--num-sessions",
41 | help="Number of sessions to return (default=10)",
42 | default=10, required=False)
43 | parser.add_argument("-d", "--delimeter",
44 | help="Delimeter to separate session IDs (default=,)",
45 | default=",", required=False)
46 | return parser.parse_args()
47 |
48 |
49 | def main():
50 | """
51 | Main Method
52 |
53 | :return: None
54 | """
55 | args = parse_args()
56 | if args.host:
57 | host = args.host
58 | else:
59 | host = os.environ['XNAT_HOST']
60 | user = args.username
61 |
62 | with XnatUtils.get_interface(host=host, user=user) as xnat:
63 | project_id = args.project
64 | num_sessions = args.num_sessions
65 | delim = args.delimeter
66 | sesses = XnatUtils.get_random_sessions(xnat, project_id, num_sessions)
67 |
68 | sesses = sesses.replace(",", delim)
69 | print(sesses)
70 |
71 |
72 | if __name__ == '__main__':
73 | main()
74 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/XnatCheckLogin:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | Set and Check the logins for XNAT.
6 |
7 | Created on Jan 24, 2013
8 | Edited on February 26,2015
9 | Edited on January 25, 2017
10 |
11 | @author: byvernault
12 | '''
13 |
14 |
15 | import os
16 | import sys
17 |
18 | from dax import DAX_Netrc
19 | from dax.dax_tools_utils import test_connection_xnat
20 | from dax.errors import DaxNetrcError
21 | import dax.xnat_tools_utils as utils
22 |
23 |
24 | __copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
25 | __exe__ = os.path.basename(__file__)
26 | __author__ = 'byvernault'
27 | __purpose__ = "Set and Check the logins for XNAT."
28 | BASH_PROFILE_XNAT = """# Xnat Host for default dax executables:
29 | {export_cmd}
30 | """
31 |
32 |
33 | def check_settings(host):
34 | """
35 | Method to check the settings for host in xnatnetrc
36 |
37 | :param host: Xnat Host URL
38 | :return: None
39 | """
40 | if not host:
41 | # use XNAT_HOST
42 | print('No host specified, using XNAT_HOST environment variable.')
43 | try:
44 | host = os.environ['XNAT_HOST']
45 | except KeyError:
46 | print('ERROR:host not found, set environment variable XNAT_HOST')
47 | sys.exit(1)
48 |
49 | print('Checking login for host={}'.format(host))
50 | try:
51 | dax_netrc = DAX_Netrc()
52 | user, pwd = dax_netrc.get_login(host)
53 | except DaxNetrcError:
54 | print('ERROR FOUND - PLEASE CHECK THE FOLLOWING')
55 | print(' 1. Login not found, set in ~/.netrc')
56 | print(' 2. File permissions on the .netrc file MUST be user-only')
57 | print(' - chmod go-rwx ~/.netrc')
58 | sys.exit(1)
59 |
60 | print('Checking connection:host={}, user={}'.format(host, user))
61 | test_connection_xnat(host, user, pwd)
62 |
63 |
64 | def parse_args():
65 | """
66 | Method to parser arguments following ArgumentParser
67 |
68 | :return: arguments parsed
69 | """
70 | from argparse import ArgumentParser
71 | ap = ArgumentParser(prog=__exe__, description=__purpose__)
72 | ap.add_argument('--host', dest='host', default=None, help='Host for XNAT.')
73 | return ap.parse_args()
74 |
75 |
76 | if __name__ == '__main__':
77 | args = parse_args()
78 | utils.print_separators()
79 | print('Checking your settings for XNAT')
80 | check_settings(args.host)
81 | utils.print_separators()
82 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_utilities.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from dax import utilities
3 |
4 |
5 | class GroupbyToDictTest(TestCase):
6 |
7 | def test_find_with_pred(self):
8 | source = [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 1, 'b': 4}]
9 | actual = utilities.find_with_pred(source, lambda x: x['a'] == 1)
10 | expected = {'a': 1, 'b': 2}
11 | self.assertDictEqual(actual, expected)
12 |
13 | def test_groupby_to_dict(self):
14 | source = [{'a': 1, 'b': 1}, {'a': 2, 'b': 2}, {'a': 3, 'b': 1}, {'a': 4, 'b': 2}]
15 |
16 | actual = utilities.groupby_to_dict(source, lambda x: x['b'])
17 | expected = {
18 | 1: [{'a': 1, 'b': 1}, {'a': 3, 'b': 1}],
19 | 2: [{'a': 2, 'b': 2}, {'a': 4, 'b': 2}]
20 | }
21 |
22 | self.assertDictEqual(actual, expected)
23 |
24 | def test_groupby_to_dict_2(self):
25 | source = [{'a': 1, 'b': 1}, {'a': 2, 'b': 2}, {'a': 3, 'b': 2}, {'a': 4, 'b': 1}]
26 |
27 | actual = utilities.groupby_to_dict(source, lambda x: x['b'])
28 | expected = {
29 | 1: [{'a': 1, 'b': 1}, {'a': 4, 'b': 1}],
30 | 2: [{'a': 2, 'b': 2}, {'a': 3, 'b': 2}]
31 | }
32 |
33 | self.assertDictEqual(actual, expected)
34 |
35 | def test_groupby_groupby_to_dict(self):
36 | source = [
37 | {'a': 1, 'b': 10, 'c': 100},
38 | {'a': 1, 'b': 11, 'c': 101},
39 | {'a': 1, 'b': 11, 'c': 102},
40 | {'a': 2, 'b': 12, 'c': 103},
41 | {'a': 1, 'b': 10, 'c': 104}
42 | ]
43 | actual = utilities.groupby_groupby_to_dict(source, lambda x: x['a'], lambda y: y['b'])
44 | expected = {
45 | 1: {
46 | 10: [{'a': 1, 'b': 10, 'c': 100}, {'a': 1, 'b': 10, 'c': 104}],
47 | 11: [{'a': 1, 'b': 11, 'c': 101}, {'a': 1, 'b': 11, 'c': 102}]},
48 | 2: {
49 | 12: [{'a': 2, 'b': 12, 'c': 103}]
50 | }
51 | }
52 |
53 | self.assertDictEqual(actual, expected)
54 |
55 | def test_strip_leading_and_trailing_spaces(self):
56 | tests = [
57 | ('', ''),
58 | (' ', ''),
59 | ('a', 'a'),
60 | (' a', 'a'),
61 | ('a ', 'a'),
62 | (' a ', 'a'),
63 | (' a ', 'a'),
64 | (' a , b ', 'a,b'),
65 | ]
66 |
67 | for t in tests:
68 | actual = utilities.strip_leading_and_trailing_spaces(t[0])
69 | self.assertEqual(actual, t[1])
70 |
--------------------------------------------------------------------------------
/dax/tests/common_session_tools.py:
--------------------------------------------------------------------------------
1 |
2 | import json
3 |
4 | from dax import XnatUtils
5 |
6 |
7 | class SessionTools:
8 |
9 | @staticmethod
10 | def get_connection(host):
11 | return XnatUtils.get_interface(host=host)
12 |
13 | @staticmethod
14 | def prep_project(intf, proj_id, subj_id, sess_id, scans, assessors):
15 | sess = intf.select_experiment(proj_id, subj_id, sess_id)
16 | for scan in scans:
17 | SessionTools.add_scan(sess, scan['name'], scan)
18 | for assessor in assessors:
19 | SessionTools.add_assessor(assessor, assessor['name'], assessor)
20 |
21 | @staticmethod
22 | def add_session(proj, name, parameters):
23 | sess = proj.scan(name)
24 | if sess.exists():
25 | sess.delete()
26 | sess = proj.session(name)
27 | sess.create(session=parameters['xsitype'])
28 | return sess
29 |
30 | @staticmethod
31 | def add_scan(sess, name, parameters):
32 | scn = sess.scan(name)
33 | if scn.exists():
34 | scn.delete()
35 | scn = sess.scan(name)
36 | kwargs = dict()
37 | print(parameters)
38 | print((parameters['xsitype']))
39 | kwargs[parameters['xsitype'] + '/type'] = parameters['type']
40 | kwargs[parameters['xsitype'] + '/quality'] = parameters['quality']
41 | scn.create(scans=parameters['xsitype'], **kwargs)
42 | for output in parameters['files']:
43 | for f in output[1]:
44 | scn.resource(output[0]).file(f).insert('./file.txt')
45 | # print output[0], f
46 | return scn
47 |
48 | @staticmethod
49 | def add_assessor(sess, name, parameters, inputs_policy="empty_if_not_set"):
50 | asr = sess.assessor(name)
51 | if asr.exists():
52 | asr.delete()
53 | asr = sess.assessor(name)
54 | kwargs = dict()
55 | kwargs[parameters['xsitype'] + '/proctype'] = parameters['proctype']
56 | kwargs[parameters['xsitype'] + '/procversion'] = '1.0.0'
57 | kwargs[parameters['xsitype'] + '/validation/status'] = "Needs QA"
58 | if inputs_policy == "empty_if_not_set":
59 | kwargs[parameters['xsitype'] + '/inputs'] =\
60 | json.dumps(parameters.get('inputs', {}))
61 | elif inputs_policy == "not_inputs":
62 | pass
63 |
64 | asr.create(assessors=parameters['xsitype'], **kwargs)
65 | for output in parameters['files']:
66 | for f in output[1]:
67 | asr.resource(output[0]).file(f).insert('./file.txt')
68 | return asr
69 |
--------------------------------------------------------------------------------
/docs/dax_xnat_dataypes_install.rst:
--------------------------------------------------------------------------------
1 | Installation of fs:fsData and proc:genProcData
2 | ----------------------------------------------
3 |
4 | Prerequisites:
5 |
6 | - install an XNAT instance
7 | https://wiki.xnat.org/documentation/getting-started-with-xnat
8 |
9 | On XNAT VM:
10 | ^^^^^^^^^^^
11 |
12 | 1) Make a BACKUP of your $XNAT\_HOME, postgres db, and tomcat deployment
13 |
14 | 2) Stop tomcat
15 |
16 | 3) Copy plugins to XNAT
17 |
18 | Copy the files dax-plugin-fsData-X.Y.Z.jar and dax-plugin-genProcData-X.Y.Z.jar to ${XNAT_HOME}/plugins
19 |
20 | The plugins folder is located in the dax package at the path
21 | dax/misc/xnat-plugins/files. You can download the files from github
22 | repository: https://github.com/VUIIS/dax .
23 |
24 | 4) Start tomcat and confirm that plugins are installed
25 |
26 | ON XNAT webapp:
27 | ^^^^^^^^^^^^^^^
28 |
29 | 1) Log onto XNAT as admin
30 |
31 | 2) click Administer > Data types
32 |
33 | 3) click Setup Additional Data Type
34 |
35 | 4) for fs:fsData (NOTE: the fs:fsData datatype is deprecated. Install only if the need is known to exist)
36 |
37 | 4.a) select fs:fsData and valid without adding anything at first.
38 |
39 | 4.b) Come back to the new types and edit the fields:
40 |
41 | ::
42 |
43 | enter "FreeSurfer" in both Singular Name and Plural Name field
44 | enter "FS" in Code field
45 |
46 | 4.c) Edit the "Available Report Actions" by adding delete if you want to
47 | be able to delete assessor with the following values:
48 |
49 | ::
50 |
51 | Remove Name: delete
52 | Display Name: Delete
53 | Grouping:
54 | Image: delete.gif
55 | Popup:
56 | Secure Access: delete
57 | Feature:
58 | Additional Parameters:
59 | Sequence: 4
60 |
61 | 4.d) click submit and then accept defaults for subsequent screens
62 |
63 | 5) for proc:genProcData
64 |
65 | 5.a) select proc:genProcData and valid without adding anything at first.
66 |
67 | 5.b) Come back to the new types and edit the fields:
68 |
69 | ::
70 |
71 | enter "Processing" in both Singular Name and Plural Name field
72 | enter "Proc" in Code field
73 |
74 | 5.c) Edit the "Available Report Actions" by adding delete if you want to
75 | be able to delete assessor with the following values:
76 |
77 | ::
78 |
79 | Remove Name: delete
80 | Display Name: Delete
81 | Grouping:
82 | Image: delete.gif
83 | Popup:
84 | Secure Access: delete
85 | Feature:
86 | Additional Parameters:
87 | Sequence: 4
88 |
89 | 5.d) click submit and then accept defaults for subsequent screens
90 |
91 | You are now ready to use the two assessor types fs:fsData and
92 | proc:genProcData
93 |
--------------------------------------------------------------------------------
/dax/lockfiles.py:
--------------------------------------------------------------------------------
1 | import socket
2 | import os
3 |
4 |
5 | def check_lockfile(file, logger):
6 | # Try to read host-PID from lockfile
7 | try:
8 | with open(file, 'r') as f:
9 | line = f.readline()
10 |
11 | host, pid = line.split('-')
12 | pid = int(pid)
13 |
14 | # Compare host to current host
15 | this_host = socket.gethostname().split('.')[0]
16 | if host != this_host:
17 | logger.debug('different host, cannot check PID:{}', format(file))
18 | elif pid_exists(pid):
19 | logger.debug('host matches and PID exists:{}'.format(str(pid)))
20 | else:
21 | logger.debug('host matches and PID not running, deleting lockfile')
22 | os.remove(file)
23 | except IOError:
24 | logger.debug('failed to read from lock file:{}'.format(file))
25 | except ValueError:
26 | logger.debug('failed to parse lock file:{}'.format(file))
27 |
28 |
29 | def clean_lockfiles(lock_dir, logger):
30 | lock_list = os.listdir(lock_dir)
31 |
32 | # Make full paths
33 | lock_list = [os.path.join(lock_dir, f) for f in lock_list]
34 |
35 | # Check each lock file
36 | for file in lock_list:
37 | logger.debug('checking lock file:{}'.format(file))
38 | check_lockfile(file, logger)
39 |
40 |
41 | def pid_exists(pid):
42 | if pid < 0:
43 | return False # NOTE: pid == 0 returns True
44 | try:
45 | os.kill(pid, 0)
46 | except ProcessLookupError: # errno.ESRCH
47 | return False # No such process
48 | except PermissionError: # errno.EPERM
49 | return True # Operation not permitted (i.e., process exists)
50 | else:
51 | return True # no error, we can send a signal to the process
52 |
53 |
54 | def lock_flagfile(lock_file):
55 | """
56 | Create the flagfile to lock the process
57 |
58 | :param lock_file: flag file use to lock the process
59 | :return: True if the file didn't exist, False otherwise
60 | """
61 | if os.path.exists(lock_file):
62 | return False
63 | else:
64 | open(lock_file, 'w').close()
65 |
66 | # Write hostname-PID to lock file
67 | _pid = os.getpid()
68 | _host = socket.gethostname().split('.')[0]
69 | with open(lock_file, 'w') as f:
70 | f.write('{}-{}'.format(_host, _pid))
71 |
72 | return True
73 |
74 |
75 | def unlock_flagfile(lock_file):
76 | """
77 | Remove the flagfile to unlock the process
78 |
79 | :param lock_file: flag file use to lock the process
80 | :return: None
81 | """
82 | if os.path.exists(lock_file):
83 | os.remove(lock_file)
84 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/XnatRepushExamcards.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | from argparse import ArgumentParser, RawDescriptionHelpFormatter
5 | from dax import XnatUtils
6 | import ast
7 | import os
8 | import pandas as pd
9 | import pyxnat
10 | import re
11 | import requests
12 |
13 | host = os.environ['XNAT_HOST']
14 | user = 'admin'
15 |
16 |
17 | def parse_args():
18 | argp = ArgumentParser(prog='SwitchProjects', formatter_class=RawDescriptionHelpFormatter)
19 | argp.add_argument('--proj', dest='project', default=None, help='Project we want to push from')
20 | argp.add_argument('--txt', dest='txtFile', default=None, help='Name of file we want to store session list')
21 | return argp
22 |
23 |
24 | if __name__ == '__main__':
25 | PARSER = parse_args()
26 | OPTIONS = PARSER.parse_args()
27 |
28 | project = OPTIONS.project
29 | f = open(OPTIONS.txtFile,'a')
30 |
31 | with XnatUtils.get_interface(host=host, user=user) as XNAT:
32 | subjects = XNAT.get_subjects(project)
33 |
34 | for subject in subjects:
35 | sessions = XNAT.get_sessions(project, subject['label'])
36 | for session in sessions:
37 | scans = XNAT.get_scans(project, subject['label'], session['label'])
38 | df = pd.DataFrame(scans)
39 | df = df[['subject_label','session_label','scan_type','scan_label']]
40 | if 'unknown' in str(df.loc[0]):
41 | print('ExamCard found')
42 | else:
43 | patient_id = session['session_label'].split('_')
44 | request = 'http://10.109.20.19:8080/dcm4chee-arc/aets/DCM4CHEE57/rs/studies/?PatientID=*{}'.format(patient_id[1])
45 | res = requests.get(request)
46 | instances = ast.literal_eval(res.text)
47 | for j, instance in enumerate(instances):
48 | instanceUID = instance['00081190']['Value'][0]
49 | instanceUID = instanceUID + '/series'
50 | res = requests.get(instanceUID)
51 | experiment = ast.literal_eval(res.text)
52 | for k, instances in enumerate(experiment):
53 | if '0008103E' not in instances:
54 | url = instances['00081190']['Value'][0]
55 | url = url + '/export/VandyXNAT'
56 | res = requests.post(url)
57 | print('*****************************')
58 | print(patient_id[1])
59 | f.write(session['subject_label'] + ' ' + session['session_label'] + ' ' + patient_id[1] + '\n')
60 |
61 | f.close()
62 |
--------------------------------------------------------------------------------
/dax/rcq/__init__.py:
--------------------------------------------------------------------------------
1 | """ rcq implements a job queue for DAX in REDCap. Launch/Update/Finish, no build or upload here."""
2 | import logging
3 |
4 | from ..utilities import get_this_instance
5 | from ..XnatUtils import get_interface
6 |
7 | from .taskbuilder import TaskBuilder
8 | from .tasklauncher import TaskLauncher
9 | from .taskqueue import TaskQueue
10 | from .analysislauncher import AnalysisLauncher
11 |
12 |
13 | logger = logging.getLogger('manager.rcq')
14 |
15 |
16 | # LAUNCH: requires TaskQueue from projects_redcap and slurm
17 | # BUILD: requires Processors and TaskQueue from projects_redcap and xnat
18 | # UPDATE: required projects_redcap and slurm
19 | # SYNC: requires TaskQueue from projects_redcap and xnat
20 |
21 |
22 | # TaskBuilder:
23 | # instances_redcap
24 | # projects_redcap
25 | # xnat
26 | # build()
27 |
28 |
29 | # TaskQueue:
30 | # projects_redcap
31 | # get_open_tasks()
32 | # add_task()
33 | # apply_updates()
34 |
35 |
36 | def update(rc, instance_settings, build_enabled=True, launch_enabled=True, projects=None):
37 | logger.info('connecting to redcap')
38 | def_field = rc.def_field
39 |
40 | if projects is None:
41 | projects = [x[def_field] for x in rc.export_records(fields=[def_field])]
42 |
43 | logger.debug(f'instance_settings={instance_settings}')
44 |
45 | yamldir = instance_settings['main_processorlib']
46 | logger.debug(f'yamldir={yamldir}')
47 |
48 | with get_interface() as xnat:
49 |
50 | logger.info('Running update of analyses')
51 | try:
52 | AnalysisLauncher(xnat, rc, instance_settings).update(
53 | projects,
54 | launch_enabled=launch_enabled
55 | )
56 | except Exception as err:
57 | logger.error(f'analyses update failed:{err}')
58 | pass
59 |
60 | logger.info('Running sync of queue status from XNAT to REDCap')
61 | TaskQueue(rc).sync(xnat, projects)
62 |
63 | logger.info('Running update of queue from REDCap to SLURM')
64 | TaskLauncher(rc, instance_settings).update(
65 | launch_enabled=launch_enabled,
66 | projects=projects)
67 |
68 | if build_enabled:
69 | logger.info('Running build of tasks in XNAT and REDCap queue')
70 | task_builder = TaskBuilder(rc, xnat, yamldir)
71 | for p in projects:
72 | logger.debug(f'building:{p}')
73 | task_builder.update(p)
74 |
75 |
76 | def _load_instance_settings(instance_redcap):
77 | """Load DAX settings for current instance from REDCap"""
78 | instance_name = get_this_instance()
79 | logger.debug(f'instance={instance_name}')
80 |
81 | # Return the record associated with this instance_name
82 | records = instance_redcap.export_records(records=[instance_name], raw_or_label='label')
83 | if len(records) == 0:
84 | settings = {}
85 | else:
86 | settings = records[0]
87 |
88 | return settings
89 |
--------------------------------------------------------------------------------
/dax/log.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | import logging
4 | import sys
5 |
6 |
7 | def setup_debug_logger(name, logfile):
8 | """
9 | Sets up the debug logger
10 |
11 | :param name: Name of the logger
12 | :param logfile: file to store the log to. sys.stdout if no file define
13 | :return: logger object
14 |
15 | """
16 | fmt = '%(asctime)s - %(levelname)s - %(module)s - %(message)s'
17 | formatter = logging.Formatter(fmt=fmt)
18 |
19 | if logfile:
20 | handler = logging.FileHandler(logfile)
21 | else:
22 | handler = logging.StreamHandler(sys.stdout)
23 | handler.setFormatter(formatter)
24 |
25 | logger = logging.getLogger(name)
26 | logger.setLevel(logging.DEBUG)
27 | logger.addHandler(handler)
28 | return logger
29 |
30 |
31 | def setup_info_logger(name, logfile):
32 | """
33 | Sets up the info logger
34 |
35 | :param name: Name of the logger
36 | :param logfile: file to store the log to. sys.stdout if no file define
37 | :return: logger object
38 |
39 | """
40 | fmt = '%(asctime)s - %(levelname)s - %(module)s - %(message)s'
41 | formatter = logging.Formatter(fmt=fmt)
42 |
43 | if logfile:
44 | handler = logging.FileHandler(logfile)
45 | else:
46 | handler = logging.StreamHandler(sys.stdout)
47 |
48 | handler.setFormatter(formatter)
49 |
50 | logger = logging.getLogger(name)
51 | logger.setLevel(logging.INFO)
52 | logger.addHandler(handler)
53 | return logger
54 |
55 |
56 | def setup_critical_logger(name, logfile):
57 | """
58 | Sets up the critical logger
59 |
60 | :param name: Name of the logger
61 | :param logfile: file to store the log to. sys.stdout if no file define
62 | :return: logger object
63 |
64 | """
65 | if logfile:
66 | handler = logging.FileHandler(logfile)
67 | else:
68 | handler = logging.StreamHandler(sys.stdout)
69 |
70 | logger = logging.getLogger(name)
71 | logger.setLevel(logging.CRITICAL)
72 | logger.addHandler(handler)
73 | return logger
74 |
75 |
76 | def setup_warning_logger(name, logfile):
77 | """
78 | Sets up the warning logger
79 |
80 | :param name: Name of the logger
81 | :param logfile: file to store the log to. sys.stdout if no file define
82 | :return: logger object
83 |
84 | """
85 | if logfile:
86 | handler = logging.FileHandler(logfile)
87 | else:
88 | handler = logging.StreamHandler(sys.stdout)
89 |
90 | logger = logging.getLogger(name)
91 | logger.setLevel(logging.WARNING)
92 | logger.addHandler(handler)
93 | return logger
94 |
95 |
96 | def setup_error_logger(name, logfile):
97 | """
98 | Sets up the error logger
99 |
100 | :param name: Name of the logger
101 | :param logfile: file to store the log to. sys.stdout if no file define
102 | :return: logger object
103 |
104 | """
105 | if logfile:
106 | handler = logging.FileHandler(logfile)
107 | else:
108 | handler = logging.StreamHandler(sys.stdout)
109 |
110 | logger = logging.getLogger(name)
111 | logger.setLevel(logging.ERROR)
112 | logger.addHandler(handler)
113 | return logger
114 |
--------------------------------------------------------------------------------
/docs/installing_dax_in_a_virtual_environment.rst:
--------------------------------------------------------------------------------
1 | Installing DAX in a Virtual Environment
2 | =======================================
3 |
4 | Table of Contents
5 | ~~~~~~~~~~~~~~~~~
6 |
7 | 1. `Setup <#setup>`__
8 | 2. `Create the Virtual Environment <#create-the-virtual-environment>`__
9 | 3. `Install DAX <#install-dax>`__
10 | 4. `Verify Installation <#verify-installation>`__
11 |
12 | -----
13 | Setup
14 | -----
15 |
16 | To install miniconda3 go to https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html . Follow the procedure described on the miniconda site to install for your OS. It is very important that you follow the directions closely and not forget to source conda. The main idea is to download the Python 3.7 or newer bash file and open the terminal (using 3.8 and MacOS as an example here). Run the following where the file was downloaded:
17 |
18 | ::
19 |
20 | bash Miniconda3-latest-MacOSX-x86_64.sh
21 |
22 | Follow the prompts until miniconda is installed. Now, source conda and add the path to .bash_profile. Then close and reopen terminal. To display a list of installed packages:
23 |
24 | ::
25 |
26 | conda list
27 |
28 | ------------------------------
29 | Create the Virtual Environment
30 | ------------------------------
31 |
32 | DAX is to be installed only on virtual environments on Python 3. To create a new environment in Miniconda with Python 3.8:
33 |
34 | ::
35 |
36 | conda create -n daxvenv python=3.8
37 |
38 | which can then be activated or deactivated with:
39 |
40 | ::
41 |
42 | conda activate daxvenv # Activation of environment
43 | conda deactivate # Deactivation of environment
44 |
45 | After activating the new environment, git version 2.11+ should be installed.
46 |
47 | - For ACCRE users, refer to the instructions here: https://dax.readthedocs.io/en/latest/requirements_for_dax_on_accre.html
48 | - Otherwise, install git using these instructions: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git
49 |
50 | -----------
51 | Install DAX
52 | -----------
53 |
54 | Once the virtual environment with Python 3 is created and the correct version of git is installed, you'll need to install dax itself
55 |
56 | ::
57 |
58 | (daxvenv) $ pip install dax
59 |
60 |
61 | Configure an environment variable named XNAT_HOST set to the full url of your xnat server. This can
62 | be incuded in your startup file (such as .bashrc or .bash_profile).
63 |
64 | ::
65 |
66 | (daxvenv) $ export XNAT_HOST=https://central.xnat.org
67 |
68 | Configure your credentials in a file named ".netrc" in your home directory.
69 |
70 | ::
71 |
72 | machine
73 | login
74 | password
75 |
76 | Here SERVER is the server name only. For example, central.xnat.org, not https://xnat.website.com/xnat.
77 | Make sure that the xnat_host is formatted similarly to 'xnat.website.com' NOT 'https://xnat.website.com/xnat'
78 | The full url WILL NOT WORK properly.
79 |
80 | File permissions on the .netrc must be user-only, e.g. need to run
81 |
82 | ::
83 |
84 | chmod go-rwx ~/.netrc
85 |
86 | -------------------
87 | Verify Installation
88 | -------------------
89 |
90 | Next, run XnatCheckLogin, which will verify that you can log on successfully.
91 |
92 | ::
93 |
94 | (daxvenv) $ XnatCheckLogin
95 | ================================================================
96 | Checking your settings for XNAT
97 | No host specified, using XNAT_HOST environment variable.
98 | Checking login for host=https://central.xnat.org
99 | Checking connection:host=https://central.xnat.org, user=someusername
100 | --> Good login.
101 | ================================================================
102 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_xnatinterface.py:
--------------------------------------------------------------------------------
1 |
2 | from unittest import TestCase
3 |
4 | import itertools
5 |
6 |
7 | from dax import XnatUtils
8 |
9 |
10 | class InterfaceTempUnitTests(TestCase):
11 |
12 | def unit_test_connection_strings(self):
13 | #positive tests
14 | self.assertEqual(
15 | XnatUtils.InterfaceTemp.P_XPATH.format(project='proj1'),
16 | '/project/proj1'
17 | )
18 |
19 | self.assertEqual(
20 | XnatUtils.InterfaceTemp.S_XPATH.format(project='proj1',
21 | subject='subj1'),
22 | '/project/proj1/subject/subj1'
23 | )
24 |
25 | self.assertEqual(
26 | XnatUtils.InterfaceTemp.E_XPATH.format(project='proj1',
27 | subject='subj1',
28 | session='sess1'),
29 | '/project/proj1/subject/subj1/experiment/sess1'
30 | )
31 |
32 | self.assertEqual(
33 | XnatUtils.InterfaceTemp.C_XPATH.format(project='proj1',
34 | subject='subj1',
35 | session='sess1',
36 | scan='scan1'),
37 | '/project/proj1/subject/subj1/experiment/sess1/scan/scan1'
38 | )
39 |
40 | self.assertEqual(
41 | XnatUtils.InterfaceTemp.CR_XPATH.format(project='proj1',
42 | subject='subj1',
43 | session='sess1',
44 | scan='scan1',
45 | resource='res1'),
46 | '/project/proj1/subject/subj1/experiment/sess1'
47 | '/scan/scan1/resource/res1'
48 | )
49 |
50 | self.assertEqual(
51 | XnatUtils.InterfaceTemp.A_XPATH.format(project='proj1',
52 | subject='subj1',
53 | session='sess1',
54 | assessor='assr1'),
55 | '/project/proj1/subject/subj1/experiment/sess1/assessor/assr1'
56 | )
57 |
58 | self.assertEqual(
59 | XnatUtils.InterfaceTemp.AR_XPATH.format(project='proj1',
60 | subject='subj1',
61 | session='sess1',
62 | assessor='assr1',
63 | resource='res1'),
64 | '/project/proj1/subject/subj1/experiment/sess1'
65 | '/assessor/assr1/out/resource/res1'
66 |
67 | )
68 |
69 | def test_object_type_from_path(self):
70 | tests = [
71 | ('project/p1', 'project'),
72 | ('project/p1/subject/s1', 'subject'),
73 | ('project/p1/subject/s1/experiment/e1', 'experiment'),
74 | ('project/p1/subject/s1/experiment/e1/scan/sc1', 'scan'),
75 | ('project/p1/subject/s1/experiment/e1/assessor/as1', 'assessor'),
76 | ('project/p1/subject/s1/experiment/e1/scan/sc1/resource/r1',
77 | 'resource'),
78 | ('project/p1/subject/s1/experiment/e1/assessor/as1/in/resource/r1',
79 | 'resource'),
80 | ('project/p1/subject/s1/experiment/e1/assessor/as1/out/resource/r1',
81 | 'resource'),
82 | ]
83 |
84 | prefix = ['', 'data/', 'xnat:/', '/', '/data/']
85 | postfix = ['', '/']
86 |
87 | for prepost in itertools.product(prefix, postfix):
88 | for t in tests:
89 | instr = prepost[0] + t[0] + prepost[1]
90 | print(('testing ', instr))
91 | self.assertEqual(
92 | t[1],
93 | XnatUtils.InterfaceTemp.object_type_from_path(instr),
94 | 'unexpected object type')
95 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/dax-testrun.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # Test a new v3 processor yaml file. Test assessor is automatically named
4 | # "test-NNN" to avoid confusion and for easy cleanup.
5 | #
6 | # This script ties up the terminal while the job is running, so either use
7 | # tmux/screen or nohup &>
8 | #
9 | # Options:
10 | # [--basedir] A subdir is created here to store all working files.
11 | # Defaults to /nobackup/user/${USER}
12 | # [--delay] How often in sec to check whether the job has finished.
13 | # Defaults to 1800 (30min)
14 | # --project XNAT project to test on
15 | # --session XNAT session to test on
16 | # --procyaml The v3 processor yaml file to test (overrides are not implemented)
17 |
18 |
19 | # Defaults
20 | basedir="/nobackup/user/${USER}"
21 | delay=1800
22 |
23 | # Command line args
24 | while [[ $# -gt 0 ]]; do
25 | key="$1"
26 | case $key in
27 | --basedir) basedir="$2"; shift; shift ;;
28 | --project) project="$2"; shift; shift ;;
29 | --session) session="$2"; shift; shift ;;
30 | --procyaml) procyaml="$2"; shift; shift ;;
31 | --delay) delay="$2"; shift; shift ;;
32 | *) echo "Input ${1} not recognized"; shift ;;
33 | esac
34 | done
35 | if [ -z "${project}" -o -z "${session}" -o -z "${procyaml}" ]; then
36 | echo Usage:
37 | echo $(basename "${0}") [--basedir BASEDIR] [--delay DELAYSEC] --project PROJECT --session SESSION --procyaml proc.yaml
38 | exit 0
39 | fi
40 |
41 | # File and directory names
42 | dstamp=$(date +%Y%m%d-%H%M%S)
43 | tstamp=$(date +%H%M%S)
44 | assessor=$(basename "${procyaml}" .yaml)
45 | filetag="${project}-x-${session}-x-${assessor}-x-${dstamp}"
46 | wkdir="${basedir}/${filetag}" && mkdir "${wkdir}"
47 | yamlfile="test-${tstamp}-$(basename ${procyaml})"
48 | setfile="${wkdir}/settings.yaml"
49 | buildlog="${wkdir}/build.log"
50 | launchlog="${wkdir}/launch.log"
51 | updatelog="${wkdir}/update.log"
52 | uploadlog="${wkdir}/upload.log"
53 | resdir="${wkdir}/Spider_Upload_Dir" && mkdir -p "${resdir}"/{OUTLOG,PBS,PDF}
54 |
55 | # Copy yaml file to wkdir and name as "test". v3 pulls assessor label from this filename
56 | cp "${procyaml}" "${wkdir}/${yamlfile}"
57 |
58 | # Create dax settings file
59 | cat << EOF > "${setfile}"
60 | ---
61 | processorlib: ${wkdir}
62 | modulelib: /data/mcr/rocky9/dax_modules
63 | # singularity not moved to rocky9 yet
64 | singularity_imagedir: /data/mcr/centos7/singularity
65 | jobtemplate: /data/mcr/rocky9/dax_templates/job_template_v3.txt
66 | resdir: ${resdir}
67 | attrs:
68 | job_rungroup: h_vuiis
69 | xnat_host: ${XNAT_HOST}
70 | skip_lastupdate: Y
71 | yamlprocessors:
72 | - name: PROCESSOR
73 | filepath: ${yamlfile}
74 | projects:
75 | - project: ${project}
76 | yamlprocessors: PROCESSOR
77 | EOF
78 |
79 | echo "Logging to ${wkdir}"
80 |
81 | # Build
82 | echo "Building"
83 | dax build \
84 | --logfile "${buildlog}" \
85 | --project "${project}" \
86 | --sessions "${session}" \
87 | "${setfile}"
88 |
89 | # FIXME Check for successful build or fail with useful info
90 |
91 | # Launch
92 | echo "Launching"
93 | dax launch \
94 | --logfile "${launchlog}" \
95 | --project "${project}" \
96 | --sessions "${session}" \
97 | "${setfile}"
98 |
99 | # Identify and track the job (check every $delay seconds)
100 | jobid=$(grep "INFO - cluster - Submitted batch job" "${launchlog}" | cut -d ' ' -f 11)
101 |
102 | if [ -z "${jobid}" ]; then
103 | echo "Job not launched"
104 | exit 1
105 | else
106 | echo "Job ${jobid} launched"
107 | fi
108 |
109 | jobstate=
110 | while [ "${jobstate}" != "completed" ]; do
111 | sleep "${delay}"
112 | jobstate=$(rtracejob ${jobid} |grep "State")
113 | jobstate=$(echo ${jobstate##*|})
114 | echo "Job ${jobid} state: ${jobstate}"
115 | done
116 |
117 | # FIXME show the assessor status (JOB_FAILED.txt, READY_TO_COMPLETE.txt, ...)
118 |
119 | # FIXME If failed, report error lines in outlog
120 |
121 | # Update/upload to get results to xnat
122 | echo Updating
123 | dax update \
124 | --logfile "${updatelog}" \
125 | --project "${project}" \
126 | --sessions "${session}" \
127 | "${setfile}"
128 |
129 | echo "Uploading"
130 | dax upload --project "${project}" --resdir "${resdir}"
131 |
132 | # FIXME Can we make it easier to delete test assessors than via XNAT GUI?
133 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_autoprocessor.py:
--------------------------------------------------------------------------------
1 |
2 | from unittest import TestCase
3 |
4 | import io
5 |
6 | import yaml
7 |
8 | from dax.processors import AutoProcessor
9 |
10 | from dax.tests import unit_test_entity_common as common
11 | from dax import XnatUtils
12 | from dax import yaml_doc
13 |
14 |
15 | class ConnectionStringUnitTest(TestCase):
16 |
17 | def test_a_xpath(self):
18 | print((XnatUtils.InterfaceTemp.A_XPATH.format(
19 | project='proj1', subject='subj1',
20 | session='sess1', assessor='assr1')))
21 |
22 |
23 | class AutoProcessorUnitTest(TestCase):
24 |
25 | @staticmethod
26 | def _make_yaml_source(resource):
27 | return yaml_doc.YamlDoc().from_string(resource)
28 |
29 | def _construct_session(self, name):
30 | tpo = common.TestProjectObject(
31 | common.xnat_contents[name]['projects'][0]
32 | )
33 | return tpo.subjects()['subj1'].sessions()['sess1']
34 |
35 | def test_scan_processor_construction(self):
36 | yaml_source = self._make_yaml_source(
37 | common.processor_yamls.scan_brain_tiv_from_gif_yaml)
38 | ap = AutoProcessor(common.FakeXnat, yaml_source)
39 |
40 | yaml_source = self._make_yaml_source(common.git_pct_t1_yaml)
41 | ap = AutoProcessor(common.FakeXnat, yaml_source)
42 |
43 | def test_test(self):
44 | print("hello world")
45 |
46 | def test_get_assessor_input_types(self):
47 | yaml_source = self._make_yaml_source(
48 | common.processor_yamls.scan_brain_tiv_from_gif_yaml)
49 | ap = AutoProcessor(common.FakeXnat, yaml_source)
50 | print((ap.get_assessor_input_types()))
51 |
52 | # def test_scan_assessor_get_assessor_name(self):
53 | # tseo = self._construct_session('brain_tiv_from_gif')
54 | # tsco = tseo.scan_by_key('1')
55 | #
56 | # yaml_source = self._make_yaml_source(
57 | # common.processor_yamls.scan_brain_tiv_from_gif_yaml)
58 | # ap = AutoProcessor(common.FakeXnat, yaml_source)
59 | #
60 | # actual = ap.get_assessor_name(tsco)
61 | # self.assertEquals(actual,
62 | # "proj1-x-subj1-x-sess1-x-1-x-BrainTivFromGIF_v1")
63 |
64 | # def test_scan_assessor_get_assessor(self):
65 | # tseo = self._construct_session('brain_tiv_from_gif')
66 | # tsco = tseo.scan_by_key('1')
67 | #
68 | # yaml_source = self._make_yaml_source(
69 | # common.processor_yamls.scan_brain_tiv_from_gif_yaml)
70 | # ap = AutoProcessor(common.FakeXnat, yaml_source)
71 | #
72 | # actual, name = ap.get_assessor(tsco)
73 | # self.assertEquals(name,
74 | # "proj1-x-subj1-x-sess1-x-1-x-BrainTivFromGIF_v1")
75 |
76 | def test_scan_assessor_should_run(self):
77 | tseo = self._construct_session('brain_tiv_from_gif')
78 | tsco = tseo.scan_by_key('1')
79 |
80 | yaml_source = self._make_yaml_source(
81 | common.processor_yamls.scan_brain_tiv_from_gif_yaml)
82 | ap = AutoProcessor(common.FakeXnat, yaml_source)
83 |
84 | ret = ap.should_run(tseo.info())
85 | self.assertEqual(ret, 1)
86 |
87 | # TODO: BenM/asr_of_asr/this method needs to run off pyxnat assessor
88 | # objects, so create a mocked pyxnat assessor for this (and other) tests
89 | def test_scan_assessor_has_inputs(self):
90 | tseo = self._construct_session('brain_tiv_from_gif')
91 | tsco = tseo.scan_by_key('1')
92 |
93 | yaml_source = self._make_yaml_source(
94 | common.processor_yamls.scan_brain_tiv_from_gif_yaml)
95 | ap = AutoProcessor(common.FakeXnat, yaml_source)
96 |
97 | ret, comment = ap.has_inputs(tsco)
98 | self.assertEqual(ret, 1)
99 |
100 | def test_scan_assessor_build_cmds(self):
101 | tseo = self._construct_session('brain_tiv_from_gif')
102 | tsco = tseo.assessor_by_key('proc1')
103 |
104 | yaml_source = self._make_yaml_source(
105 | common.processor_yamls.scan_brain_tiv_from_gif_yaml)
106 | ap = AutoProcessor(common.FakeXnat, yaml_source)
107 |
108 | tsao = tseo.assessors()
109 | print(tsao)
110 | # TODO:BenM/assessor_of_assessor/we are passing an interface object
111 | # rather than a cached object. Fix and then re-enable
112 | cmds = ap.get_cmds(tsco, '/testdir')
113 | print(("cmds =", cmds))
114 |
--------------------------------------------------------------------------------
/dax/errors.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """errors.py
5 |
6 | Method related to errors and Custom Exceptions.
7 | """
8 |
9 | import netrc
10 | import sys
11 |
12 |
13 | __copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
14 | __all__ = ['DaxError', 'DaxXnatError', 'DaxProcessorError',
15 | 'DaxSetupError', 'DaxNetrcError', 'DaxUploadError',
16 | 'XnatAuthentificationError', 'XnatUtilsError', 'XnatAccessError',
17 | 'XnatToolsError', 'XnatToolsUserError',
18 | 'ClusterLaunchException', 'ClusterCountJobsException',
19 | 'ClusterJobIDException',
20 | 'AutoProcessorError']
21 |
22 |
23 | # DAX error:
24 | class DaxError(Exception):
25 | """Basic exception for errors raised by dax."""
26 |
27 |
28 | # DAX XNAT error:
29 | class DaxXnatError(DaxError):
30 | """Basic exception for errors related to XNAT raised by dax."""
31 |
32 |
33 | # DAX Processor error:
34 | class DaxProcessorError(DaxError):
35 | """Basic exception for errors related to processor raised by dax."""
36 |
37 |
38 | # dax_setup errors
39 | class DaxSetupError(DaxError, ValueError):
40 | """DaxSetup exception."""
41 | def __init__(self, message):
42 | Exception.__init__(self, 'Error in dax setup: %s' % message)
43 |
44 |
45 | # Launcher errors:
46 | class DaxUploadError(DaxError):
47 | """Custom exception raised with dax upload."""
48 | def __init__(self, message):
49 | Exception.__init__(self, 'Error with dax upload: %s' % message)
50 |
51 |
52 | # Dax netrc errors
53 | class DaxNetrcError(netrc.NetrcParseError):
54 | """Basic exception for errors related to DAX_Netrc raised by dax."""
55 |
56 |
57 | # Launcher errors:
58 | class DaxLauncherError(DaxError):
59 | """Custom exception raised with dax launcher."""
60 | def __init__(self, message):
61 | Exception.__init__(self, 'Error with Launcher: %s' % message)
62 |
63 |
64 | # XnatUtils errors
65 | class XnatAuthentificationError(DaxXnatError):
66 | """Custom exception raised when xnat connection failed."""
67 | def __init__(self, host=None, user=None):
68 | msg = 'ERROR: XNAT Authentification failed. Check logins.'
69 | if host:
70 | msg = '%s. Host: %s' % (msg, host)
71 | if user:
72 | msg = '%s / User: %s' % (msg, user)
73 | Exception.__init__(self, msg)
74 |
75 |
76 | class XnatUtilsError(DaxXnatError):
77 | """XnatUtils exception."""
78 | def __init__(self, message):
79 | Exception.__init__(self, 'Error in XnatUtils: %s' % message)
80 |
81 |
82 | class XnatToolsError(DaxError):
83 | """Xnat Tools Exception."""
84 | def __init__(self, message):
85 | Exception.__init__(self, 'Error in xnat_tools: %s' % message)
86 |
87 |
88 | class XnatToolsUserError(DaxError):
89 | """Xnat Tools Exception."""
90 | def __init__(self, script, message):
91 | print(('\n%s: error: %s' % (script, message)))
92 | sys.exc_info()[-1]
93 | sys.exit(self)
94 |
95 |
96 | class XnatAccessError(DaxXnatError, ValueError):
97 | """XNAT access exception if item does not exist."""
98 | def __init__(self, message):
99 | Exception.__init__(self, 'Error to access XNAT object: %s' % message)
100 |
101 |
102 | # Cluster errors
103 | class ClusterError(DaxError):
104 | """Basic exception for errors related to cluster raised by dax."""
105 |
106 |
107 | class ClusterLaunchException(ClusterError):
108 | """Custom exception raised when launch on the grid failed"""
109 | def __init__(self):
110 | Exception.__init__(self, 'ERROR: Failed to launch job on the grid.')
111 |
112 |
113 | class ClusterCountJobsException(ClusterError):
114 | """Custom exception raised when attempting to get the number of
115 | jobs fails"""
116 | def __init__(self):
117 | Exception.__init__(self, 'ERROR: Failed to fetch number of '
118 | 'jobs from the grid.')
119 |
120 |
121 | class ClusterJobIDException(ClusterError):
122 | """Custom exception raised when attempting to get the job id failed"""
123 | def __init__(self):
124 | Exception.__init__(self, 'ERROR: Failed to get job id.')
125 |
126 |
127 | # Task:
128 | class NeedInputsException(DaxError):
129 | def __init__(self, value):
130 | self.value = value
131 |
132 | def __str__(self):
133 | return repr(self.value)
134 |
135 |
136 | class NoDataException(DaxError):
137 | def __init__(self, value):
138 | self.value = value
139 |
140 | def __str__(self):
141 | return repr(self.value)
142 |
143 |
144 | # Processor Exception:
145 | class AutoProcessorError(DaxProcessorError):
146 | pass
147 |
--------------------------------------------------------------------------------
/docs/dax_executables.rst:
--------------------------------------------------------------------------------
1 | DAX Executables
2 | ===============
3 |
4 | Table of Contents
5 | ~~~~~~~~~~~~~~~~~
6 |
7 | 1. `DAX Packages <#dax-packages>`__
8 | 2. `How Does it Work? <#how-does-it-work>`__
9 | 3. `DAX Settings <#dax-settings>`__
10 | 4. `How to Write a ProjectSettings.py File <#how-to-write-projectsettings-file>`__
11 | 5. `DAX Executables <#dax-executables>`__
12 | 6. `DAX Build <#dax-build>`__
13 | 7. `DAX Update Tasks <#dax-update-tasks>`__
14 | 8. `DAX Launch <#dax-launch>`__
15 | 9. `DAX Upload <#dax-upload>`__
16 | 10. `DAX Manager <#dax-manager>`__
17 |
18 | ------------
19 | DAX Packages
20 | ------------
21 |
22 | We have been developing a high throughput pipeline processing and quality assurance environment based on Washington University's XNAT platform. This system has been deployed as the primary data archival platform for all VUIIS studies. This pipeline has been implemented in a python package called Distributed Automation for XNAT (DAX). Data processing occurs on the Vanderbilt Advanced Computing Center for Research and Education (ACCRE). DAX has been developed with a series of settings making the package portable on any batch scripting system. Each customized module is a spider that performs an image processing task using a variety of open source software.
23 |
24 | DAX is available on github at https://github.com/VUIIS/dax and be installed with "pip install dax".
25 |
26 | How Does it Work?
27 | ~~~~~~~~~~~~~~~~~
28 |
29 | DAX consists of three main executables that communicates with an XNAT system to process and archived imaging data. XNAT has an object implemented as a child of a session that is called an Assessor that corresponds to processed data. By reading the database on a project basis, the different executables will generate the assessors, check for inputs, run the scripts on the cluster as a job, check on the status of the jobs, and upload data back to XNAT. DAX will also maintain data on REDCap. DAX uses a settings files to specify various customizations of the DAX installation and to specify which processes each project should run and any customizations to the processes.
30 |
31 | DAX Settings
32 | ~~~~~~~~~~~~
33 |
34 | Inside the package DAX, there is a dax_settings.py file. This file contains variables that can be set by the user such as the commands used by your cluster, the different paths (the upload directory, root job, etc...), email settings, or REDCap settings for dax_manager.
35 |
36 | By default, the package is set to use the settings used by Vanderbilt University. It's set for SLURM cluster.
37 |
38 | How to Write a ProjectSettings.yaml File
39 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
40 |
41 | Two of the DAX executables will need a ProjectSettings.py file to run. This file is a python script providing the description of each modules/processors that need to run for a project or a list of projects. You can learn on how to write a ProjectSettings.yaml file here: Writing a settings file.
42 |
43 | ---------------
44 | DAX Executables
45 | ---------------
46 |
47 | The main executables in the DAX package are:
48 |
49 | - dax build
50 | - dax update
51 | - dax launch
52 | - dax upload
53 | - dax manager
54 |
55 | See image below to understand the role of each executable:
56 |
57 | .. image:: images/dax_executables/life_cycle_of_dax_task.png
58 |
59 | DAX Build
60 | ~~~~~~~~~
61 |
62 | dax build will build all the projects in your ProjectSettings.yaml file. It will check each session of your project and run the different modules (e.g: converting dicom to nifti, generating preview, extracting physlog, etc...) and generates the assessors from the processors set in the ProjectSettings.yaml file.
63 |
64 | DAX Update
65 | ~~~~~~~~~~~~~~~~
66 |
67 | dax update handles assessors for all the projects in your ProjectSettings.yaml file. It will get the list of all the assessors that are "open", meaning with a status from the list below and update each assessors status.
68 |
69 | Open assessors status:
70 |
71 | - NEED_TO_RUN
72 | - UPLOADING
73 | - JOB_RUNNING
74 | - READY_TO_COMPLETE
75 | - JOB_FAILED
76 |
77 | DAX Launch
78 | ~~~~~~~~~~
79 |
80 | It will submit jobs to the cluster for each assessors that have the status NEED_TO_RUN.
81 |
82 | DAX Upload
83 | ~~~~~~~~~~
84 |
85 | Each job on the cluster will not upload data directly to XNAT but copies the data to a temporary folder on the computer. dax upload will read each processed data from this folder and will upload them on XNAT under an assessor that was previously created by dax build.
86 |
87 | DAX Manager
88 | ~~~~~~~~~~~
89 |
90 | dax manager allows users to manage multiple projects from REDCap (https://redcap.vanderbilt.edu). It will automatically generate a ProjectSettings.yaml file from the REDCap database and will run dax build/update/launch/upload from those files.
91 |
92 | On the REDCap project, each record corresponds to a project. Each library is a module or a processor that can be enabled and customized by the user.
93 |
--------------------------------------------------------------------------------
/dax/rcq/project_job_template.txt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #SBATCH --account=${job_rungroup}
3 | #SBATCH --nodes=1
4 | #SBATCH --ntasks=${job_ppn}
5 | #SBATCH --time=${job_walltime}
6 | #SBATCH --mem=${job_memory}
7 | #SBATCH -o ${job_output_file}
8 |
9 | uname -a # outputs node info (name, date&time, type, OS, etc)
10 | date
11 |
12 | #=============================================================================
13 | # These fields should be added here by replacing job_cmds:
14 | #ANALYSISID=
15 | #PROJECT=
16 | #REPO=
17 | #VERSION=
18 | #INLIST=
19 | #XNATHOST=
20 | #XNATUSER=
21 | #MAINCMD=
22 | ${job_cmds}
23 | #=============================================================================
24 | JOBDIR=$(mktemp -d "/tmp/tmp.XXXXXXXXX") || { echo "mktemp failed"; exit 1; }
25 | INDIR=$JOBDIR/INPUTS
26 | OUTDIR=$JOBDIR/OUTPUTS
27 |
28 | SESSLIMIT=20
29 | SLEEPMAX=300
30 |
31 | echo $ANALYSISID
32 | echo $PROJECT
33 | echo $REPO
34 | echo $VERSION
35 | echo $INDIR
36 | echo $OUTDIR
37 | echo $XNATHOST
38 | echo $XNATUSER
39 | echo $SESSLIMIT
40 | echo $SLEEPMAX
41 |
42 | mkdir -p $INDIR
43 | mkdir -p $OUTDIR
44 |
45 | # Download the code repository
46 | mkdir $OUTDIR/self
47 | curl -sL $REPO | tar zxvf - -C $OUTDIR/self --strip-components=1
48 |
49 | # Check number of open sessions on XNAT before we download
50 | echo "Checking that we can download"
51 | while true;do
52 | SESSID=`curl -s -n "$XNATHOST/data/JSESSION"`
53 | SESSCOUNT=`curl -s -b "JSESSIONID=$SESSID" "$XNATHOST/data/user/$XNATUSER/sessions" | cut -s -f2 -d ":" | cut -f1 -d "}"`
54 | if (( "$SESSCOUNT" > "$SESSLIMIT" )); then
55 | echo -n "Cannot download yet, too many open sessions, sleeping "
56 | curl -s -b "JSESSIONID=$SESSID" -X DELETE "$XNATHOST/data/JSESSION"
57 | SLEEPSECS=$[ ( $RANDOM % $SLEEPMAX ) ]s && echo "$SLEEPSECS" && sleep $SLEEPSECS
58 | else
59 | echo "Looks good, ready to download"
60 | break
61 | fi
62 | done
63 |
64 | # Download XNAT inputs
65 | for IN in "${INLIST[@]}"; do
66 | IFS=',' read -r col1 col2 col3 col4 <<< "$IN"
67 |
68 | if [ $col4 ]; then
69 | # Create the subdir
70 | DDIR="$INDIR/$col4"
71 | mkdir -p $DDIR
72 | else
73 | DDIR="$INDIR"
74 | fi
75 |
76 | if [ $col2 == "FILE" ]; then
77 | CMD="curl -D - -s -b JSESSIONID=$SESSID \"$col3\" -o $DDIR/$col1"
78 | CMD+=" | head -n 1 | awk '{print \$2}'"
79 | elif [ $col2 == "DIRJ" ]; then
80 | CMD="curl -D - -s -b JSESSIONID=$SESSID \"$col3\"?format=zip -o $DDIR/${col1}.zip"
81 | CMD+=" | head -n 1 | awk '{print \$2}'"
82 | CMD+=" && unzip -q -j $DDIR/${col1}.zip -d $DDIR/$col1"
83 | else
84 | CMD="curl -D - -s -b JSESSIONID=$SESSID \"${col3}?format=zip&structure=simplified\" -o $DDIR/${col1}.zip"
85 | CMD+=" | head -n 1 | awk '{print \$2}'"
86 | CMD+=" && unzip -q $DDIR/${col1}.zip -d $DDIR/$col1 && rm $DDIR/${col1}.zip"
87 | fi
88 |
89 | # Show the whole command
90 | SAFE_CMD=$(echo ${CMD} | sed "s/$SESSID/SESSID/g")
91 | echo $SAFE_CMD
92 |
93 | # Run the full command
94 | eval result=\$\($CMD\)
95 | echo "result=$result"
96 |
97 | # Check for success
98 | if test "$result" != '200' ; then
99 | echo "Download failed with code:$result, exiting..."
100 | curl -s -b "JSESSIONID=$SESSID" -X DELETE "$XNATHOST/data/JSESSION"
101 | exit 1
102 | fi
103 | done
104 |
105 | # Disconnect XNAT
106 | curl -s -b "JSESSIONID=$SESSID" -X DELETE "$XNATHOST/data/JSESSION"
107 |
108 | # Run main commands
109 | echo $MAINCMD
110 | eval $MAINCMD
111 |
112 | # Get an XNAT session, checking number of open sessions
113 | echo "Checking that we can upload"
114 | while true;do
115 | SESSID=`curl -s -n "$XNATHOST/data/JSESSION"`
116 | SESSCOUNT=`curl -s -b "JSESSIONID=$SESSID" "$XNATHOST/data/user/$XNATUSER/sessions" | cut -s -f2 -d ":" | cut -f1 -d "}"`
117 | echo "$SESSCOUNT"
118 | if (( "$SESSCOUNT" > "$SESSLIMIT" )); then
119 | echo -n "Cannot upload yet, too many open sessions, sleeping "
120 | curl -s -b "JSESSIONID=$SESSID" -X DELETE "$XNATHOST/data/JSESSION"
121 | SLEEPSECS=$[ ( $RANDOM % $SLEEPMAX ) ]s && echo "$SLEEPSECS" && sleep $SLEEPSECS
122 | else
123 | echo "Looks good, ready to upload"
124 | break
125 | fi
126 | done
127 |
128 | # Upload outputs as zip files to the project resource named with this run of the analysis id
129 | cd $OUTDIR
130 | for i in *; do
131 | if [ -d $i ]; then
132 | FILE="${i%/}.zip"
133 | zip -r $FILE $i
134 | else
135 | FILE=$i
136 | fi
137 |
138 | # Upload it
139 | CMD="curl --fail -D - -s -b JSESSIONID=$SESSID -X PUT \"$XNATHOST/data/projects/$PROJECT/resources/${ANALYSISID}/files/${FILE}\" -F file=\"@$FILE\""
140 | eval result=\$\($CMD\)
141 | echo "result=$result"
142 | done
143 |
144 | # Disconnect XNAT
145 | curl -s -b "JSESSIONID=$SESSID" -X DELETE "$XNATHOST/data/JSESSION"
146 |
147 | # Delete local files
148 | rm -rf $JOBDIR
149 |
150 | echo "DONE!"
151 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/Xnatreport_assessors:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 | import sys
5 | import pandas
6 | import pyxnat
7 | import argparse
8 | import time
9 |
10 | from dax import XnatUtils
11 | from dax import utilities
12 |
13 |
14 | # Specify and parse command line arguments
15 | parser = argparse.ArgumentParser()
16 | parser.add_argument('-p', dest='project', help='Project name', required=True)
17 | args = parser.parse_args()
18 | print('Project: {}'.format(args.project))
19 |
20 |
21 | with XnatUtils.get_interface() as xnat:
22 | Assrs = xnat.list_project_assessors(args.project)
23 |
24 | outfile1 = f'status_by_assessor-{args.project}.csv'
25 | outfile2 = f'status_by_session-{args.project}.csv'
26 |
27 | # Or timestamp the output files
28 | #timestamp = time.strftime("%Y%m%d%H%M%S")
29 | #outfile1 = 'status_by_assessor_{}_{}.csv'.format(args.project,timestamp)
30 | #outfile2 = 'status_by_session_{}_{}.csv'.format(args.project,timestamp)
31 |
32 | R = list()
33 | for assr in Assrs:
34 |
35 | #print(assr)
36 | #sys.exit(0)
37 |
38 | # Get desired fields
39 | thisR = {}
40 | for key in (
41 | 'project_label',
42 | 'subject_label',
43 | 'session_label',
44 | 'proctype',
45 | 'procstatus',
46 | 'version',
47 | 'dax_docker_version',
48 | 'dax_version',
49 | 'dax_version_hash',
50 | 'assessor_id',
51 | 'jobstartdate',
52 | 'walltimeused',
53 | 'memused',
54 | 'jobnode',
55 | 'jobid',
56 | 'qcstatus',
57 | 'qcnotes',
58 | 'assessor_label',
59 | ):
60 | if key in assr:
61 | thisR[key] = assr[key]
62 | else:
63 | thisR[key] = ''
64 |
65 | # Clean up the inputs field, split on / and keep the last bit
66 | thisR['has_inputs'] = False
67 | if 'inputs' in assr and assr['inputs']:
68 | thisR['has_inputs'] = True
69 | inps = utilities.decode_url_json_string(assr['inputs'])
70 | for key in inps.keys():
71 | if isinstance(inps[key],list):
72 | thisR[key] = '(list)'
73 | else:
74 | thisR[key] = inps[key].split('/')[-1]
75 |
76 | # We need to explicitly copy here to avoid overwriting R
77 | R.append(thisR.copy())
78 |
79 | D = pandas.DataFrame(R)
80 |
81 | # Reorder columns
82 | colorder = (
83 | 'project_label',
84 | 'subject_label',
85 | 'session_label',
86 | 'assessor_label',
87 | 'assessor_id',
88 | 'proctype',
89 | 'procstatus',
90 | 'procversion',
91 | 'dax_docker_version',
92 | 'qcstatus',
93 | 'qcnotes',
94 | 'dax_version',
95 | 'dax_version_hash',
96 | 'jobstartdate',
97 | 'jobnode',
98 | 'jobid',
99 | 'walltimeused',
100 | 'memused',
101 | )
102 | oldcols = D.columns.tolist()
103 | newcols = list()
104 | for col in colorder:
105 | if col in oldcols:
106 | newcols.append(col)
107 | oldcols.remove(col)
108 | newcols.extend(oldcols)
109 |
110 | # Store the full list by assessor
111 | D.to_csv(outfile1,index=False,columns=newcols)
112 |
113 |
114 | # For each subject, count how many are present and how many complete
115 |
116 | proctypes = D['proctype'].unique()
117 | sessions = D['session_label'].unique()
118 |
119 | # Loop through sessions, loop through uniques, count statuses
120 | S = list()
121 | for session in sessions:
122 |
123 | thisS = {}
124 | thisD = D.loc[D['session_label']==session,:]
125 |
126 | # Check that we only got one session's data
127 | sinfo = thisD[['project_label','subject_label','session_label']].drop_duplicates()
128 | if sinfo.shape[0]!=1:
129 | # Seems to happen when a label is empty
130 | print('Unexpected value - skipping in session report:')
131 | for s in range(sinfo.shape[0]) :
132 | print(' ' + sinfo['project_label'].values[s] + ' - ' +
133 | sinfo['subject_label'].values[s] + ' - ' +
134 | sinfo['session_label'].values[s])
135 | print(' ')
136 |
137 | thisS['project_label'] = sinfo['project_label'].values[0]
138 | thisS['subject_label'] = sinfo['subject_label'].values[0]
139 | thisS['session_label'] = sinfo['session_label'].values[0]
140 |
141 | for proctype in proctypes:
142 | thisthisD = thisD.loc[thisD['proctype']==proctype,:]
143 | total = thisthisD.shape[0]
144 | notcomplete = thisthisD.loc[thisthisD['procstatus']!='COMPLETE',:].shape[0]
145 | thisS[proctype+'_total'] = total
146 | thisS[proctype+'_notcomplete'] = notcomplete
147 |
148 | S.append(thisS.copy())
149 |
150 | DS = pandas.DataFrame(S)
151 |
152 | colorder = ('project_label','subject_label','session_label')
153 | oldcols = DS.columns.tolist()
154 | newcols = list()
155 | for col in colorder:
156 | newcols.append(col)
157 | oldcols.remove(col)
158 | newcols.extend(oldcols)
159 |
160 | DS.to_csv(outfile2,index=False,columns=newcols)
161 |
162 |
--------------------------------------------------------------------------------
/bin/freesurfer_tools/fs6download:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | Download a FreeSurfer 6 subject from XNAT
6 | '''
7 |
8 | import os
9 | import sys
10 | import shutil
11 |
12 | from dax import XnatUtils
13 |
14 |
15 | __exe__ = os.path.basename(__file__)
16 | __author__ = 'Brian Boyd'
17 | __purpose__ = "Download FreeSurfer subject from XNAT"
18 | CURL_CMD = '''curl -qu {xuser}:{xpass} {xhost}/data/archive/projects/{proj}/\
19 | subjects/{subj}/experiments/{sess}/assessors/{asse}/out/resources/DATA/\
20 | files?format=zip > {zip}'''
21 |
22 |
23 | def parse_args():
24 | """
25 | Method to parse arguments base on ArgumentParser
26 |
27 | :return: parser object parsed
28 | """
29 | from argparse import ArgumentParser
30 | ap = ArgumentParser(prog=__exe__, description=__purpose__)
31 | ap.add_argument(
32 | '--host', dest='host', default=None,
33 | help='Host for XNAT. Default: env XNAT_HOST.')
34 | ap.add_argument(
35 | '-u', '--username', dest='username', default=None,
36 | help='Username for XNAT.')
37 | ap.add_argument('project', help='Project Label')
38 | ap.add_argument('session', help='Session Label')
39 | ap.add_argument(
40 | 'proc_suffix', help='Proc name suffix', nargs='?', default='')
41 | ap.add_argument(
42 | '-sd', '--subjects_dir', dest='subjects_dir',
43 | help='Subjects Directory',
44 | default=os.environ.get('SUBJECTS_DIR', '/tmp'))
45 | return ap.parse_args()
46 |
47 |
48 | if __name__ == '__main__':
49 | args = parse_args()
50 | proj_label = args.project
51 | sess_label = args.session
52 | subjects_dir = args.subjects_dir
53 | fs = None
54 |
55 | # Check for suffix
56 | if not args.proc_suffix:
57 | proc_suffix = ''
58 | else:
59 | proc_suffix = args.proc_suffix
60 |
61 | if os.path.exists(os.path.join(subjects_dir, sess_label)):
62 | print('ERROR:cannot download, session exists in FS subjects dir')
63 | sys.exit(1)
64 |
65 | if args.host:
66 | host = args.host
67 | else:
68 | host = os.environ['XNAT_HOST']
69 |
70 | user = args.username
71 | with XnatUtils.get_interface(host=host, user=user) as xnat:
72 | print('INFO:connection to xnat <{}>:'.format(host))
73 |
74 | # Find the FreeSurfer assessor
75 | flist = []
76 | print('start running list_project_assessors')
77 | alist = xnat.list_project_assessors(proj_label)
78 | print('finish running list_project_assessors')
79 |
80 | alist = [a for a in alist if a['session_label'] == sess_label]
81 | flist = [a for a in alist if a['proctype'].startswith('FS6_')]
82 |
83 | if not flist:
84 | print('ERROR:FreeSurfer not found for session')
85 | sys.exit(1)
86 |
87 | if len(flist) == 1:
88 | fs = flist[0]
89 | elif not proc_suffix:
90 | print('ERROR:multiple FreeSurfers found, specify a suffix')
91 | sys.exit(1)
92 | else:
93 | flist2 = []
94 | flist2 = [a for a in flist if a['label'].endswith(proc_suffix)]
95 | if not flist2:
96 | print('ERROR:FreeSurfer not found with suffix')
97 | sys.exit(1)
98 |
99 | if len(flist2) == 1:
100 | fs = flist2[0]
101 | else:
102 | print('ERROR:mutliple FreeSurfer found with suffix')
103 | sys.exit(1)
104 |
105 | # Download it
106 | assr_label = fs['assessor_label']
107 | out_zip = os.path.join(subjects_dir, sess_label + '.zip')
108 | if os.path.exists(os.path.join(subjects_dir, assr_label)):
109 | print('ERROR:cannot download, fs already exists locally')
110 | sys.exit(1)
111 |
112 | if os.path.exists(out_zip):
113 | print('ERROR:cannot download, zip already exists locally')
114 | sys.exit(1)
115 |
116 | print('Downloading:{}'.format(assr_label))
117 | cmd = CURL_CMD.format(
118 | xuser=xnat.user, xpass=xnat.pwd, xhost=xnat.host,
119 | proj=fs['project_id'], subj=fs['subject_id'],
120 | sess=fs['session_id'], asse=fs['assessor_id'],
121 | zip=out_zip)
122 | os.system(cmd)
123 |
124 | # Unzip
125 | os.chdir(subjects_dir)
126 | cmd = 'unzip -q {}'.format(out_zip)
127 | os.system(cmd)
128 |
129 | # Build path to data
130 | data_dir = os.path.join(
131 | subjects_dir, assr_label, 'out', 'resources', 'DATA', 'files')
132 |
133 | # Move files
134 | if os.path.exists(os.path.join(data_dir, 'mri')):
135 | # Move subdir containing FS subject up to level of SUBJECTS_DIR,
136 | # effectively renaming to session label
137 | src = data_dir
138 | dst = os.path.join(subjects_dir, sess_label)
139 | shutil.move(src, dst)
140 | else:
141 | print('ERROR:failed to find FreeSurfer data in downloaded files')
142 |
143 | # Delete the downloaded zip and directory
144 | shutil.rmtree(os.path.join(subjects_dir, assr_label))
145 | os.remove(out_zip)
146 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """ setup.py
5 |
6 | Packaging for dax
7 | """
8 |
9 | from glob import glob
10 | import os
11 | from setuptools import setup, find_packages
12 | import subprocess
13 |
14 |
15 | def git_version():
16 | def _minimal_ext_cmd(cmd):
17 | env = {}
18 | for k in ['SYSTEMROOT', 'PATH', 'HOME']:
19 | v = os.environ.get(k)
20 | if v is not None:
21 | env[k] = v
22 |
23 | out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env)
24 | return out
25 |
26 | try:
27 | out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
28 | GIT_REVISION = out.strip().decode('ascii')
29 | except OSError:
30 | GIT_REVISION = "Unknown"
31 |
32 | return GIT_REVISION
33 |
34 |
35 | def write_git_revision_py(filename='dax/git_revision.py'):
36 | cnt = """
37 | # THIS FILE IS GENERATED BY SETUP.PY
38 | git_revision = '%(git_revision)s'
39 | """
40 |
41 | GIT_REVISION = git_version()
42 |
43 | with open(filename, 'w') as f:
44 | f.write(cnt % {'git_revision': GIT_REVISION})
45 |
46 |
47 | def get_version():
48 | basedir = os.path.dirname(__file__)
49 | with open(os.path.join(basedir, 'dax/version.py')) as f:
50 | VERSION = None
51 | version_ns = {}
52 | exec(f.read(), version_ns)
53 | VERSION = version_ns['VERSION']
54 | return VERSION
55 | raise RuntimeError("No version found")
56 |
57 |
58 | def readme():
59 | with open('README.md') as f:
60 | return f.read()
61 |
62 |
63 | description = 'Distributed Automation for XNAT'
64 |
65 | # Note: this long_description is actually a copy/paste from the top-level
66 | # README.txt, so that it shows up nicely on PyPI. So please remember to edit
67 | # it only in one place and sync it correctly.
68 | long_description = """
69 | ========================================================
70 | DAX: Distributed Automation for XNAT
71 | ========================================================
72 |
73 | *DAX*, is a python package developed at Vanderbilt University, Nashville, TN,
74 | USA. It's available on github at: https://github.com/VUIIS/dax.
75 |
76 | XNAT provides a flexible imaging informatics software platform to
77 | organize and manage imaging data.
78 |
79 | *DAX*, an open-source initiative under the umbrella of Vanderbilt University
80 | Institute of Imaging Science (VUIIS), is a Python project that provides a
81 | uniform interface to run pipelines on a cluster by grabbing data from a XNAT
82 | database via RESTApi Calls.
83 |
84 | *DAX* allows you to:
85 |
86 | * extract information from XNAT via scripts (bin/Xnat_tools/Xnat...)
87 | * create pipelines (spiders/processors) to run pipelines on your data
88 | * build a project on XNAT with pipelines (assessors)
89 | * launch pipelines on your cluster and upload results back to xnat
90 | * interact with XNAT via python using commands in XnatUtils.
91 | """
92 |
93 | NAME = 'dax'
94 | MAINTAINER = 'VUIIS CCI'
95 | MAINTAINER_EMAIL = 'vuiis-cci@googlegroups.com'
96 | DESCRIPTION = description
97 | LONG_DESCRIPTION = long_description
98 | URL = "http://github.com/VUIIS/dax"
99 | DOWNLOAD_URL = "http://github.com/VUIIS/dax"
100 | LICENSE = 'MIT'
101 | CLASSIFIERS = [
102 | # As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
103 | "Development Status :: 5 - Production/Stable",
104 | "Environment :: Console",
105 | "Intended Audience :: Science/Research",
106 | "Operating System :: MacOS :: MacOS X",
107 | "Operating System :: POSIX",
108 | "Operating System :: POSIX :: Linux",
109 | "Operating System :: Unix",
110 | "Programming Language :: Python :: 3.7",
111 | "Topic :: Scientific/Engineering",
112 | "Topic :: Scientific/Engineering :: Information Analysis"]
113 | AUTHOR = MAINTAINER
114 | AUTHOR_EMAIL = MAINTAINER_EMAIL
115 | PLATFORMS = ["MacOs",
116 | "Linux"]
117 | VERSION = get_version()
118 |
119 | # versions
120 | SPHINX_MIN_VERSION = '4'
121 | PYXNAT_MIN_VERSION = '1.1.0.2'
122 |
123 | REQUIRES = [
124 | 'pyxnat>=%s' % PYXNAT_MIN_VERSION,
125 | 'pyyaml',
126 | 'pycap',
127 | 'pandas',
128 | 'fpdf2',
129 | 'PyPDF2',
130 | 'yamale']
131 |
132 | DOCS_REQUIRES = [
133 | 'Sphinx>=%s' % SPHINX_MIN_VERSION]
134 |
135 | TESTS_REQUIRES = ['nose']
136 |
137 | BIDS_REQUIRES = ['cubids-bond-fork', 'nibabel']
138 |
139 |
140 | if __name__ == '__main__':
141 | write_git_revision_py()
142 |
143 | setup(name=NAME,
144 | maintainer=MAINTAINER,
145 | maintainer_email=MAINTAINER_EMAIL,
146 | version=get_version(),
147 | description=DESCRIPTION,
148 | long_description=LONG_DESCRIPTION,
149 | url=URL,
150 | download_url=DOWNLOAD_URL,
151 | author=AUTHOR,
152 | author_email=AUTHOR_EMAIL,
153 | platforms=PLATFORMS,
154 | license=LICENSE,
155 | packages=find_packages(),
156 | include_package_data=True,
157 | test_suite='nose.collector',
158 | tests_require=TESTS_REQUIRES,
159 | install_requires=REQUIRES,
160 | python_requires='~=3.6',
161 | zip_safe=True,
162 | scripts=glob(os.path.join('bin', '*', '*')),
163 | classifiers=CLASSIFIERS,
164 | extras_require={
165 | 'docs': DOCS_REQUIRES,
166 | 'bids': BIDS_REQUIRES,
167 | })
168 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/XnatBOND:
--------------------------------------------------------------------------------
1 | #!/home/local/VANDERBILT/kanakap/miniconda3/envs/bids_enchanement/bin/python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | XNATBond support
6 |
7 | @author: Praitayini Kanakaraj, Electrical Engineering, Vanderbilt University
8 |
9 | Alpha version
10 | '''
11 | import bond
12 | import logging
13 | import argparse
14 | from dax import XnatToBids
15 |
16 | DESCRIPTION = """What is the script doing :
17 | *Generate the csv files that have the summary of key groups and param groups from the
18 | bidsdata and modify them in the bids data.
19 |
20 | Examples:
21 | *Generate orginial key and parameter groups:
22 | XnatBOND --bids_dir BIDS_DIR --bond_dir BOND_DIR
23 | *Update the key and parameter groups:
24 | XnatBOND --bids_dir BIDS_DIR --modify_keyparam
25 | """
26 | def add_parser_args():
27 | """
28 | Method to parse all the arguments for the tool on ArgumentParser
29 | :return: parser object
30 | """
31 | argp = argparse.ArgumentParser(prog='XnatBOND', description=DESCRIPTION,
32 | formatter_class=argparse.RawTextHelpFormatter)
33 | # Connect to XNAT
34 | argp.add_argument('--bids_dir', dest='bids_dir', required=True,
35 | help='BIDS data directory.')
36 | argp.add_argument('-b', '--bond_dir', dest='bond_dir', default=None,
37 | help='BOND data directory.')
38 | argp.add_argument('-m', '--modify_keyparam', dest='modify_keyparam', default=None, action='append',nargs=3,
39 | metavar=('keyparam_edited','keyparam_files','new_keyparam_prefix'),
40 | help='Values to modify the keyparam in bids.')
41 | argp.add_argument("-o", "--logfile", dest="logfile", default=None,
42 | help="Write the display/output in a file given to this OPTIONS.")
43 | return argp
44 |
45 | def get_proper_str(str_option, end=False):
46 | """
47 | Method to shorten a string into the proper size for display
48 |
49 | :param str_option: string to shorten
50 | :param end: keep the end of the string visible (default beginning)
51 | :return: shortened string
52 | """
53 | if len(str_option) > 32:
54 | if end:
55 | return '...' + str_option[-29:]
56 | else:
57 | return str_option[:29] + '...'
58 | else:
59 | return str_option
60 |
61 | def main_display():
62 | """
63 | Main display for the tool
64 | """
65 | print('################################################################')
66 | print('# XNATBOND #')
67 | print('# #')
68 | print('# Developed by the MASI Lab Vanderbilt University, TN, USA. #')
69 | print('# If issues, please start a thread here: #')
70 | print('# https://groups.google.com/forum/#!forum/vuiis-cci #')
71 | print('# Usage: #')
72 | print('# Generate and alternate key params in BIDS using BOND #')
73 | print('# Parameters : #')
74 |
75 | if OPTIONS.bids_dir:
76 | print('# %*s -> %*s#' % (
77 | -20, 'BIDS Directory', -33, get_proper_str(OPTIONS.bids_dir)))
78 | if OPTIONS.bond_dir:
79 | print('# %*s -> %*s#' % (
80 | -20, 'BONDS Directory', -33, get_proper_str(OPTIONS.bond_dir)))
81 | if OPTIONS.modify_keyparam:
82 | print('# %*s -> %*s#' % (
83 | -20, 'Keyparam edited file', -33, get_proper_str(OPTIONS.modify_keyparam[0][0])))
84 | print('# %*s -> %*s#' % (
85 | -20, 'Keyparam files csv', -33, get_proper_str(OPTIONS.modify_keyparam[0][1])))
86 | print('# %*s -> %*s#' % (
87 | -20, 'New Keyparam prefix', -33, get_proper_str(OPTIONS.modify_keyparam[0][2])))
88 | print('################################################################')
89 |
90 | def setup_info_logger(name, logfile):
91 | """
92 | Using logger for the executables output.
93 | Setting the information for the logger.
94 |
95 | :param name: Name of the logger
96 | :param logfile: log file path to write outputs
97 | :return: logging object
98 | """
99 | if logfile:
100 | handler = logging.FileHandler(logfile, 'w')
101 | else:
102 | handler = logging.StreamHandler()
103 |
104 | logger = logging.getLogger(name)
105 | logger.setLevel(logging.INFO)
106 | logger.addHandler(handler)
107 | return logger
108 |
109 | if __name__ == '__main__':
110 | parser = add_parser_args()
111 | OPTIONS = parser.parse_args()
112 | main_display()
113 | LOGGER = setup_info_logger('XnatBOND', OPTIONS.logfile)
114 | if OPTIONS.bond_dir:
115 | LOGGER.info("Detecting Key and Parameter Groups")
116 | XnatToBids.XNATBond(OPTIONS.bids_dir).generate_params(OPTIONS.bond_dir)
117 | LOGGER.info("Finished: Key and Parameter Groups stored in %s" % (OPTIONS.bond_dir))
118 | if OPTIONS.modify_keyparam:
119 | keyparam_edited = OPTIONS.modify_keyparam[0][0]
120 | keyparam_files = OPTIONS.modify_keyparam[0][1]
121 | new_keyparam_prefix =OPTIONS.modify_keyparam[0][2]
122 | LOGGER.info("Modifying Key and Parameter Group Assignments")
123 | XnatToBids.XNATBond(OPTIONS.bids_dir).edit_params(keyparam_edited,keyparam_files,new_keyparam_prefix)
124 | LOGGER.info("Finished: Key and Parameter Groups from %s modified file stored as %s" % (keyparam_edited,new_keyparam_prefix))
--------------------------------------------------------------------------------
/dax/dax_settings.py:
--------------------------------------------------------------------------------
1 | import os
2 | import stat
3 | import netrc
4 | from string import Template
5 | from urllib.parse import urlparse
6 |
7 | from .errors import DaxNetrcError
8 |
9 | # Assessor datatypes
10 | DEFAULT_FS_DATATYPE = 'fs:fsData'
11 | DEFAULT_DATATYPE = 'proc:genProcData'
12 | DEFAULT_SGP_DATATYPE = 'proc:subjGenProcData'
13 |
14 | SLURM_JOBMEM = "sacct -j ${jobid}.batch --format MaxRss --noheader | awk '{print $1+0}'"
15 |
16 | SLURM_JOBTIME = "sacct -j ${jobid}.batch --format CPUTime --noheader"
17 |
18 | SLURM_COUNTJOBS = 'squeue --me --noheader | wc -l'
19 |
20 | SLURM_COUNTJOBS_LAUNCHED = 'squeue --me --noheader | wc -l'
21 |
22 | SLURM_COUNTJOBS_PENDING = 'squeue --me -t PENDING --noheader | wc -l'
23 |
24 | SLURM_COUNTJOBS_LAUNCHED_RCQ = 'squeue -A ${job_rungroup} --noheader | wc -l'
25 |
26 | SLURM_COUNTJOBS_PENDING_RCQ = 'squeue -A ${job_rungroup} -t PENDING --noheader | wc -l'
27 |
28 | SLURM_COUNT_PENDINGUPLOADS = 'find ${resdir} -maxdepth 2 -name READY_TO_UPLOAD.txt |wc -l'
29 |
30 | SLURM_JOBNODE = "sacct -j ${jobid}.batch --format NodeList --noheader"
31 |
32 | SLURM_JOBSTATUS = "squeue -j ${jobid} --noheader | awk {'print $5'}"
33 |
34 | SLURM_EXT = '.slurm'
35 |
36 | SLURM_SUBMIT = 'sbatch'
37 |
38 | SLURM_COMPLETE = 'slurm_load_jobs error: Invalid job id specified'
39 |
40 | SLURM_RUNNING = 'R'
41 |
42 | SLURM_QUEUED = 'Q'
43 |
44 |
45 | class DAX_Netrc(object):
46 | """Class for DAX NETRC file containing information about XNAT logins """
47 | def __init__(self):
48 | self.netrc_file = os.path.expanduser('~/.netrc')
49 | if not os.path.exists(self.netrc_file):
50 | open(self.netrc_file, 'a').close()
51 | # Setting mode for the file:
52 | os.chmod(self.netrc_file, stat.S_IWUSR | stat.S_IRUSR)
53 | self.is_secured()
54 | self.netrc_obj = netrc.netrc(self.netrc_file)
55 |
56 | def is_secured(self):
57 | """ Check if file is secure."""
58 | st = os.stat(self.netrc_file)
59 | grp_access = bool(st.st_mode & stat.S_IRWXG)
60 | other_access = bool(st.st_mode & stat.S_IRWXO)
61 | if grp_access or other_access:
62 | msg = 'Wrong permissions on %s. Only user should have access.'
63 | raise DaxNetrcError(msg % self.netrc_file)
64 |
65 | def is_empty(self):
66 | """ Return True if no host stored."""
67 | return len(list(self.netrc_obj.hosts.keys())) == 0
68 |
69 | def has_host(self, host):
70 | """ Return True if host present."""
71 | return host in list(self.netrc_obj.hosts.keys())
72 |
73 | def add_host(self, host, user, pwd):
74 | """ Adding host to daxnetrc file."""
75 | netrc_template = \
76 | """machine {host}
77 | login {user}
78 | password {pwd}
79 | """
80 | parsed_host = urlparse(host).hostname
81 |
82 | with open(self.netrc_file, "a") as f_netrc:
83 | lines = netrc_template.format(host=parsed_host, user=user, pwd=pwd)
84 | f_netrc.writelines(lines)
85 |
86 | def get_hosts(self):
87 | """ Rerutn list of hosts from netrc file."""
88 | return list(self.netrc_obj.hosts.keys())
89 |
90 | def get_login(self, host):
91 | """ Getting login for a host from .netrc file."""
92 | parsed_host = urlparse(host).hostname
93 |
94 | netrc_info = self.netrc_obj.authenticators(parsed_host)
95 | if not netrc_info:
96 | msg = 'Host {} not found in netrc file.'.format(host)
97 | raise DaxNetrcError(msg)
98 |
99 | return netrc_info[0], netrc_info[2]
100 |
101 |
102 | class DAX_Settings(object):
103 | def __init__(self):
104 | pass
105 |
106 | def get_user_home(self):
107 | return os.path.expanduser('~')
108 |
109 | def get_xsitype_include(self):
110 | return ['proc:genProcData']
111 |
112 | def get_cmd_submit(self):
113 | return SLURM_SUBMIT
114 |
115 | def get_prefix_jobid(self):
116 | return 'Submitted batch job'
117 |
118 | def get_suffix_jobid(self):
119 | return ''
120 |
121 | def get_cmd_count_nb_jobs(self):
122 | return SLURM_COUNTJOBS
123 |
124 | def get_cmd_count_jobs_launched(self):
125 | return SLURM_COUNTJOBS_LAUNCHED
126 |
127 | def get_cmd_count_jobs_pending(self):
128 | return SLURM_COUNTJOBS_PENDING
129 |
130 | def get_cmd_count_jobs_launched_rcq(self):
131 | return Template(SLURM_COUNTJOBS_LAUNCHED_RCQ)
132 |
133 | def get_cmd_count_jobs_pending_rcq(self):
134 | return Template(SLURM_COUNTJOBS_PENDING_RCQ)
135 |
136 | def get_cmd_count_pendinguploads(self):
137 | return Template(SLURM_COUNT_PENDINGUPLOADS)
138 |
139 | def get_cmd_get_job_status(self):
140 | return Template(SLURM_JOBSTATUS)
141 |
142 | def get_queue_status(self):
143 | return SLURM_QUEUED
144 |
145 | def get_running_status(self):
146 | return SLURM_RUNNING
147 |
148 | def get_complete_status(self):
149 | return SLURM_COMPLETE
150 |
151 | def get_cmd_get_job_memory(self):
152 | return Template(SLURM_JOBMEM)
153 |
154 | def get_cmd_get_job_walltime(self):
155 | return Template(SLURM_JOBTIME)
156 |
157 | def get_cmd_get_job_node(self):
158 | return Template(SLURM_JOBNODE)
159 |
160 | def get_job_extension_file(self):
161 | return SLURM_EXT
162 |
163 | def get_root_job_dir(self):
164 | return '/tmp'
165 |
166 | def get_launcher_type(self):
167 | return 'diskq-combined'
168 |
169 | def get_use_reference(self):
170 | return False
171 |
172 | def get_email_opts(self):
173 | return 'FAIL'
174 |
175 | def get_job_template(self, filepath):
176 | filepath = os.path.expanduser(filepath)
177 |
178 | with open(filepath, 'r') as f:
179 | data = f.read()
180 |
181 | return Template(data)
182 |
--------------------------------------------------------------------------------
/bin/freesurfer_tools/fs6upload:
--------------------------------------------------------------------------------
1 | #!/Users/boydb1/miniconda2/bin/python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | Upload manual edits to FreeSurfer 6 subject on XNAT
6 | '''
7 |
8 | import os
9 | import sys
10 | import subprocess
11 | import time
12 |
13 | from dax import XnatUtils, task
14 |
15 |
16 | __exe__ = os.path.basename(__file__)
17 | __author__ = 'Brian Boyd'
18 | __purpose__ = "Upload FreeSurfer edits to subject on XNAT"
19 |
20 |
21 | def mri_diff(file1, file2):
22 | """
23 | Method to estimate the difference between two files using mri_diff
24 |
25 | :param file1: path to file 1
26 | :param file2: path to file 2
27 | :return: parser object parsed
28 | """
29 | cmd = 'mri_diff {} {}'.format(file1, file2)
30 |
31 | try:
32 | output = subprocess.call(cmd, stderr=subprocess.STDOUT, shell=True)
33 | return output
34 | except (subprocess.CalledProcessError, ValueError):
35 | return -1
36 |
37 |
38 | def parse_args():
39 | """
40 | Method to parse arguments base on ArgumentParser
41 |
42 | :return: parser object parsed
43 | """
44 | from argparse import ArgumentParser
45 | ap = ArgumentParser(prog=__exe__, description=__purpose__)
46 | ap.add_argument(
47 | '--host', dest='host', default=None,
48 | help='Host for XNAT. Default: env XNAT_HOST.')
49 | ap.add_argument(
50 | '-u', '--username', dest='username', default=None,
51 | help='Username for XNAT.')
52 | ap.add_argument('project', help='Project Label')
53 | ap.add_argument('session', help='Session Label')
54 | ap.add_argument(
55 | 'proc_suffix', help='Proc name suffix', nargs='?', default='')
56 | ap.add_argument(
57 | '-sd', '--subjects_dir', dest='subjects_dir',
58 | help='Subjects Directory',
59 | default=os.environ.get('SUBJECTS_DIR', '/tmp'))
60 | return ap.parse_args()
61 |
62 |
63 | if __name__ == '__main__':
64 | args = parse_args()
65 | proj_label = args.project
66 | sess_label = args.session
67 | subjects_dir = args.subjects_dir
68 | fs = None
69 | if not args.proc_suffix:
70 | proc_suffix = ''
71 | else:
72 | proc_suffix = args.proc_suffix
73 |
74 | local_subj_path = os.path.join(subjects_dir, sess_label)
75 | if not os.path.exists(local_subj_path):
76 | print('ERROR:cannot upload, subject not found locally')
77 | sys.exit(1)
78 |
79 | if args.host:
80 | host = args.host
81 | else:
82 | host = os.environ.get('XNAT_HOST', None)
83 |
84 | user = args.username
85 | with XnatUtils.get_interface(host=host, user=user) as xnat:
86 | print('INFO: connection to xnat <{}>:'.format(host))
87 |
88 | # Find the FreeSurfer assessor
89 | fs_list = []
90 | alist = xnat.list_project_assessors(proj_label)
91 | alist = [a for a in alist if a['session_label'] == sess_label]
92 | flist = [a for a in alist if a['proctype'].startswith('FS6_')]
93 |
94 | if not flist:
95 | print('ERROR:FreeSurfer not found on XNAT')
96 | sys.exit(1)
97 |
98 | if len(flist) == 1:
99 | fs = flist[0]
100 | elif not proc_suffix:
101 | print('ERROR:mutliple FreeSurfers found, specify suffix')
102 | sys.exit(1)
103 |
104 | else:
105 | flist2 = []
106 | flist2 = [a for a in flist if a['label'].endswith(proc_suffix)]
107 |
108 | if not flist2:
109 | print('ERROR:FreeSurfer not found with suffix')
110 | sys.exit(1)
111 |
112 | if len(flist2) == 1:
113 | fs = flist2[0]
114 | else:
115 | print('ERROR:mutliple FreeSurfers found suffix')
116 | sys.exit(1)
117 |
118 | # TODO: Check for edits saved with original filenames
119 | # res = mri_diff(local_subj_path + '/mri/brainmask.auto.mgz',
120 | # local_subj_path + '/mri/brainmask.mgz')
121 | # print('diff brainmask result='+str(res))
122 |
123 | # res = mri_diff(local_subj_path + '/mri/aseg.auto.mgz',
124 | # local_subj_path + '/mri/aseg.mgz')
125 | # print('diff aeg result='+str(res))
126 |
127 | # Upload the edits - brainmask, wm, aseg, control.dat,...
128 | #assessor_obj = XnatUtils.get_full_object(fs)
129 | assessor_obj = xnat.select_assessor(
130 | fs['project_label'],
131 | fs['subject_label'],
132 | fs['session_label'],
133 | fs['label'])
134 |
135 | resource = assessor_obj.out_resource('EDITS')
136 | curtime = time.strftime("%Y%m%d-%H%M%S")
137 | brainmask_path = os.path.join(
138 | local_subj_path, 'mri', 'brainmask.edited.mgz')
139 | wm_path = os.path.join(local_subj_path, 'mri', 'wm.edited.mgz')
140 | aseg_path = os.path.join(local_subj_path, 'mri', 'aseg.edited.mgz')
141 | control_path = os.path.join(local_subj_path, 'tmp', 'control.dat')
142 |
143 | if os.path.isfile(brainmask_path):
144 | print('Uploading brainmask...')
145 | resource.file(
146 | 'brainmask.edited.mgz.{}'.format(curtime)).put(brainmask_path)
147 | else:
148 | print('No edited brainmask found')
149 |
150 | if os.path.isfile(wm_path):
151 | print('Uploading wm...')
152 | resource.file('wm.edited.mgz.{}'.format(curtime)).put(wm_path)
153 | else:
154 | print('No edited wm found')
155 |
156 | if os.path.isfile(aseg_path):
157 | print('Uploading aseg...')
158 | resource.file('aseg.edited.mgz.{}'.format(curtime)).put(aseg_path)
159 | else:
160 | print('No edited aseg found')
161 |
162 | if os.path.isfile(control_path):
163 | print('Uploading control.dat...')
164 | resource.file('control.dat.{}'.format(curtime)).put(control_path)
165 | else:
166 | print('No edited control points found')
167 |
168 | # Set QC Status to trigger reprocessing
169 | print('Setting qcstatus to trigger reprocessing...')
170 | assessor_obj.attrs.set(
171 | 'xnat:imageAssessorData/validation/status', task.REPROC)
172 |
173 | print('DONE')
174 |
--------------------------------------------------------------------------------
/docs/dax_manager.rst:
--------------------------------------------------------------------------------
1 | DAX Manager
2 | ===========
3 |
4 | Table of Contents:
5 | ~~~~~~~~~~~~~~~~~~
6 |
7 | 1. `About <#about>`__
8 | 2. `How to set it up <#how-to-set-it-up>`__
9 | 3. `DAX 1 <#dax-1>`__
10 | 1. `How to add a Module in DAX 1 <#how-to-add-a-module-in-dax-1>`__
11 | 2. `How to add a Process in DAX 1 <#how-to-add-a-process-in-dax-1>`__
12 | 4. `LDAX <#ldax>`__
13 | 1. `How to add a Module in LDAX <#how-to-add-a-module-in-ldax>`__
14 | 2. `How to add a Process in LDAX <#how-to-add-a-process-in-ldax>`__
15 |
16 | --------------
17 |
18 | -----
19 | About
20 | -----
21 | DAX Manager is a non-required tool hosted in REDCap which allows you to quickly generate settings files that can be
22 | launched with DAX. This alleviates the need to manual write settings files and makes updating scan types, walltimes, etc
23 | a much quicker and streamlined process.
24 |
25 | ----------------
26 | How to set it up
27 | ----------------
28 | The main instrument should be called General and contains a lot of standard variables that are required for DAX to
29 | interface with DAX Manager appropriately. For convenience, a copy of the latest data dictionary has been included
30 | and can be downloaded here for reference. It is suggested to use this version even if you do not plan on running all of the
31 | spiders because it is currently being used in production
32 |
33 | https://github.com/VUIIS/dax/blob/master/docs/files/dax_manager/XNATProjectSettings_DataDictionary_2016-01-21.csv
34 |
35 | DAX 1
36 | ~~~~~
37 |
38 | ----------------------------
39 | How to add a Module in DAX 1
40 | ----------------------------
41 | Variables used in a module must all start with the FULL module name. For example, consider "Module dcm2niix". All of the variables for this module must start with "module_dcm2niix_". There are 2 required variables. The first is the "Module File" variable. This variable for "Module dcm2niix" would be called "module_dcm2niix_file". The "Action Tags / Field Annotation" should be @DEFAULT="MODULE_NAME". See below for an example.
42 |
43 | .. image:: images/dax_manager/dcm2niix_file.PNG
44 |
45 | The second required variable is the "Module Arguments" variable. In the case of "Module dcm2niix", this variable would be called "module_dcm2niix_args". See below for an example.
46 |
47 | .. image:: images/dax_manager/dcm2niix_args.PNG
48 |
49 | -----------------------------
50 | How to add a Process in DAX 1
51 | -----------------------------
52 | Processes are setup very similarly to Modules. There are 2 required variables, "Processor YAML File" and "Processor Arguments". The variable names use slighly different naming conventions as Modules. For example, consider "Processor slant_v1". The "Processor YAML File" variable should be named "slant_v1_file" and the "Action Tags / Field Annotation" field should contain the full name of the processor (@DEFAULT="slant_v1.0.0_processor.yaml"). See below for an example.
53 |
54 | .. image:: images/dax_manager/slant_file.PNG
55 |
56 | The second required variable, "Processor Arguments" follows the same naming conventions. See below for an example.
57 |
58 | .. image:: images/dax_manager/slant_args.PNG
59 |
60 | LDAX
61 | ~~~~
62 |
63 | ---------------------------
64 | How to add a Module in LDAX
65 | ---------------------------
66 | Variables used in a module must all start with the text immediately AFTER Module. For example, consider
67 | "Module dcm2nii philips". All of the variables for this module must start with "dcm2nii_philips_". One required variable
68 | is the "on" variable. This variable, again, in the case of "Module dcm2nii philips", would be called "dcm2nii_philips_on".
69 | This is used to check to see if the module related to this record in REDCap should be run for your project or not. It must
70 | also be of the yes/no REDCap type. If you do not have this variable included, you will get errors when you run dax_manager.
71 | The second required variable is the "Module name" variable. In the case of "Module dcm2nii philips", this variable is called
72 | "dcm2nii_philips_mod_name". This relates to the class name of the python module file. This information is stored in the
73 | REDCap "Field Note" (See below).
74 | .. image:: images/dax_manager/dax_manager_module_field_note.png
75 |
76 | This variable must be a REDCap Text Box type (as do all other variables at this point). This must be entered in the
77 | following format: "Default: ". All other variables that are used must also start with the "dcm2nii_philips_"
78 | prefix and must match those of the module init.
79 |
80 | Additionally, for the sake of user-friendliness, all variables should use REDCap's branching logic to only appear if the
81 | module is "on". It is important to note that in all cases, the REDCap "Field Label" is not used in any automated fashion,
82 | but should be something obvious to the users.
83 |
84 | ----------------------------
85 | How to add a Process in LDAX
86 | ----------------------------
87 | Just like in the case of Modules, Processes follow a close formatting pattern. Similarly, all process variables should
88 | start with the text immediately after "Process ". For this example, consider "Process Multi_Atlas". Just like in the case
89 | of the modules, the first variable should be a REDCap yes/no and should be called "multi_atlas_on". The remainder of the
90 | variables should all be of REDCap type "Text Box". The next required variable is the "Processor Name" variable which must
91 | be labeled with the "_proc_name" suffix. In the case of "Process Multi_Atlas", this is called
92 | "multi_atlas_proc_name". Just like in the case of the Module, the class name of the processor should be entered in the REDCap
93 | Field Note after "Default: ".
94 |
95 | There are several other required variables which will be enumerated below (suffix listed first):
96 |
97 | #. _suffix_proc - Used to determine what the processor suffix (if any should be)
98 | #. _version - The version of the spider (1.0.0, 2.0.1 etc)
99 | #. _walltime - The amount of walltime to use for the spider when executed on the grid
100 | #. _mem_mb - The amount of ram to request for the job to run. Note this should be in megabytes
101 | #. _scan_types - If writing a ScanProcessor, this is required. If writing a SessionProcessor, this is not required. This, in the case of a ScanProcessor, is used to filter out the scan types that the processor will accept to run the spider on.
102 |
103 | Just like in the case of a Module, all variables other than the "on" variable should use REDCap branching logic to only
104 | be visible when the process is "on".
105 |
--------------------------------------------------------------------------------
/bin/supplemental_tools/XnatDetailedReport.m:
--------------------------------------------------------------------------------
1 | function [scan_detail_file,sess_detail_file] = XnatDetailedReport( ...
2 | scan_file,assr_file,out_dir)
3 | % MATLAB R2015b+ function to merge XNAT scan and assessor reports for a
4 | % project, to generate human-readable lists of assessor status indexed by
5 | % scan and session. Example function call:
6 | % [scan_detail_file,sess_detail_file] = XnatDetailedReport( ...
7 | % 'report_scan.csv','report_assr.csv','.')
8 | %
9 | % Requires commit b81943a or later of DAX (https://github.com/VUIIS/dax) so
10 | % that Xnatreport quotes fields correctly in its output.
11 | %
12 | % INPUTS
13 | %
14 | % scan_file CSV output of Xnatreport run with the specific command line
15 | % Xnatreport -p -c report_scan.csv --format \
16 | % object_type,project_id,subject_label,session_label,scan_id,type,series_description,quality,note
17 | %
18 | % assr_file CSV output of Xnatreport run with the specific command line
19 | % Xnatreport -p -c report_assr.csv --format \
20 | % object_type,project_id,subject_label,session_label,assessor_label,proctype,procstatus,qcstatus,version
21 | %
22 | % out_dir Where the output files detailed_report_scan.csv and
23 | % detailed_report_sess.csv will be saved.
24 | %
25 | % Multiple projects may be specified on the Xnatreport command lines in the
26 | % usual way using a comma separated list. See the adjacent file
27 | % generate_xnat_reports.sh for a complete example.
28 | %
29 | %
30 | % OUTPUTS
31 | %
32 | % Two detailed reports are produced:
33 | %
34 | % detailed_report_scan.csv For scan assessors. Each scan gets a row, and
35 | % columns are added for the status of each
36 | % associated scan assessor.
37 | %
38 | % detailed_report_sess.csv For session assessors. Each session gets a row,
39 | % and columns are added for the status of each
40 | % associated session assessor.
41 | %
42 | %
43 | % It shouldn't be too hard to refactor this in python using pandas data
44 | % frames if that is needed. Essentially we're just joining tables.
45 |
46 | % Read data from Xnatreport output. We rely on very specific formats here.
47 | disp('Reading Xnatreports')
48 | scan = readtable(scan_file, ...
49 | 'Format','%C%C%C%C%C%q%q%q%q', ...
50 | 'HeaderLines',2);
51 | assr = readtable(assr_file, ...
52 | 'Format','%C%C%C%C%C%q%q%q%q', ...
53 | 'HeaderLines',2);
54 |
55 | % These fields should not be empty. If they are, we replace the contents
56 | % with "MISSING" so they'll be easy to notice.
57 | assr.proctype(cellfun(@isempty,assr.proctype)) = {'MISSING'};
58 | assr.procstatus(cellfun(@isempty,assr.procstatus)) = {'MISSING'};
59 | assr.qcstatus(cellfun(@isempty,assr.qcstatus)) = {'MISSING'};
60 |
61 |
62 | % We need the scan IDs in the assessor table - extract them from assessor
63 | % labels and put them in a new column. While we're at it, label the session
64 | % assessors as such based on their lack of a scan ID in the assessor label.
65 | %
66 | % There is probably some much faster way to do this with cellfun or varfun
67 | % but that code comes out almost unreadable so we'll just be patient.
68 | disp('Extracting assessor scan_id')
69 | warning('off','MATLAB:table:RowsAddedNewVars')
70 | for h = 1:height(assr)
71 | q = strsplit(char(assr.assessor_label(h)),'-x-');
72 | if length(q)==5
73 | assr.scan_id{h} = q{4};
74 | else
75 | assr.scan_id{h} = 'sess';
76 | end
77 | end
78 | assr.scan_id = categorical(assr.scan_id);
79 |
80 | % Identify all proctypes that are present for scan and session assessors.
81 | proctypes_scan = cellstr(unique(assr.proctype(assr.scan_id~='sess')));
82 | proctypes_sess = cellstr(unique(assr.proctype(assr.scan_id=='sess')));
83 |
84 |
85 | % One scan assessor type at a time, make a table whose columns contain the
86 | % assessor status info for each scan, and merge it with the existing scan
87 | % table.
88 | disp('Merging for scans')
89 | newscan = scan;
90 | newscan.object_type = [];
91 | for p = 1:length(proctypes_scan)
92 | thisassr = assr(strcmp(assr.proctype,proctypes_scan{p}),:);
93 | thisassr = thisassr(:, ...
94 | {'project_id','subject_label','session_label','scan_id', ...
95 | 'procstatus','qcstatus','version'});
96 | thisassr.Properties.VariableNames{'procstatus'} = ...
97 | [proctypes_scan{p} '_procstatus'];
98 | thisassr.Properties.VariableNames{'qcstatus'} = ...
99 | [proctypes_scan{p} '_qcstatus'];
100 | thisassr.Properties.VariableNames{'version'} = ...
101 | [proctypes_scan{p} '_version'];
102 |
103 | newscan = outerjoin( ...
104 | newscan, ...
105 | thisassr, ...
106 | 'Keys',{'project_id','subject_label','session_label','scan_id'}, ...
107 | 'MergeKeys', true, ...
108 | 'Type','Left' ...
109 | );
110 |
111 | end
112 |
113 | % Save the scan assessor table to file. We will timestamp it according to
114 | % the modification time of the XNAT assessor report.
115 | disp('Writing scan data to file')
116 | d = dir(assr_file);
117 | timestamp = datestr(d.date,'yyyymmddHHMMSS');
118 | scan_detail_file = fullfile(out_dir, ...
119 | ['detailed_report_scan_' timestamp '.csv']);
120 | writetable(newscan,scan_detail_file,'QuoteStrings',true)
121 |
122 |
123 | % Now for session assessors. One session assessor type at a time, make a
124 | % table whose columns contain the assessor status info for each session,
125 | % and merge it with the existing session table.
126 | if isempty(proctypes_sess)
127 |
128 | disp('No session assessors found')
129 |
130 | else
131 |
132 | disp('Merging for sessions')
133 | for p = 1:length(proctypes_sess)
134 | thisassr = assr(strcmp(assr.proctype,proctypes_sess{p}),:);
135 | thisassr = thisassr(:, ...
136 | {'project_id','subject_label','session_label', ...
137 | 'procstatus','qcstatus','version'});
138 | thisassr.Properties.VariableNames{'procstatus'} = ...
139 | [proctypes_sess{p} '_procstatus'];
140 | thisassr.Properties.VariableNames{'qcstatus'} = ...
141 | [proctypes_sess{p} '_qcstatus'];
142 | thisassr.Properties.VariableNames{'version'} = ...
143 | [proctypes_sess{p} '_version'];
144 |
145 | if p==1
146 | newsess = thisassr;
147 | else
148 | newsess = outerjoin( ...
149 | newsess, ...
150 | thisassr, ...
151 | 'Keys',{'project_id','subject_label','session_label'}, ...
152 | 'MergeKeys', true, ...
153 | 'Type','Full' ...
154 | );
155 | end
156 |
157 | end
158 |
159 | % Save the session assessor table to file
160 | disp('Writing session data to file')
161 | sess_detail_file = fullfile(out_dir, ...
162 | ['detailed_report_sess_' timestamp '.csv']);
163 | writetable(newsess,sess_detail_file,'QuoteStrings',true)
164 |
165 | end
166 |
167 |
168 | % We're done
169 | return
170 |
--------------------------------------------------------------------------------
/dax/utilities.py:
--------------------------------------------------------------------------------
1 | import itertools as it
2 | import json
3 | import html
4 | import fnmatch
5 | import yaml
6 | import os
7 | import shutil
8 | import re
9 | import smtplib
10 | from email.mime.text import MIMEText
11 | import socket
12 |
13 | from .errors import DaxError
14 |
15 |
16 | def parse_list(csv_string):
17 | """
18 | Split string on commas including any leading/trailing spaces with split
19 | """
20 | return re.split(r'\s*,\s*', csv_string)
21 |
22 |
23 | def decode_url_json_string(json_string):
24 | """
25 | Load a string representing serialised json into
26 | :param json_string:
27 | :return:
28 | """
29 | strings = json.loads(html.unescape(json_string),
30 | object_pairs_hook=parse_json_pairs)
31 | return strings
32 |
33 |
34 | # TODO: BenM/assessor_of_assessor/document me!
35 | # useful function for preventing key-value pairs in serialized json from being
36 | # auto-converted to unicode
37 | def parse_json_pairs(pairs):
38 | """
39 | An object hook for the json.loads method. Used in decode_url_json_string.
40 | :param pairs:
41 | :return: A dictionary of parsed json
42 | """
43 | sink_pairs = []
44 | for k, v in pairs:
45 | if isinstance(k, str):
46 | k = k.encode('utf-8').decode()
47 | if isinstance(v, str):
48 | v = v.encode('utf-8').decode()
49 | sink_pairs.append((k, v))
50 | return dict(sink_pairs)
51 |
52 |
53 | def groupby_to_dict(source, group_pred):
54 | """
55 | Given a source iterable and a predicate defining how to group elements of
56 | the source iterable, convert the source iterable into a dictionary grouped
57 | by the key returned by the predicate.
58 | Example:
59 | source iterable: [{a:1, b:2}, (a:2, b:3}, {a:1, b:4}]
60 | group_pred: lambda x: x[a]
61 |
62 | results in:
63 | {
64 | 1:[{a:1, b:2}, {a:1, b:4}],
65 | 2:[{a:2, b:3}]
66 | }
67 |
68 | :param source: a keyless iterable (list or dictionary values or similar)
69 | :param group_pred: a function to determine the key by which each entry
70 | is grouped
71 | :return: the resulting dictionary of elements
72 | """
73 | results = dict()
74 |
75 | for k, v in it.groupby(source, group_pred):
76 | d = results.get(k, list())
77 | d.extend(list(v))
78 | results[k] = d
79 |
80 | return results
81 |
82 |
83 | def groupby_groupby_to_dict(source, outer_pred, inner_pred):
84 | """
85 | Given a source iterable and two predicates defining how to group elements
86 | of the source iterable, convert the source iterable into a dictionary of
87 | dictionaries grouped by the keys returned by the predicate.
88 |
89 | :param source: a keyless iterable (list or dictionary values or similar)
90 | :param outer_pred: a function to determine the top level key by which each
91 | entry is grouped
92 | :param inner_pred: a function to determine the second level by which each
93 | entry is grouped
94 | :return: the resulting dictionary of dictionaries of elements
95 | """
96 | return {
97 | k: groupby_to_dict(v, inner_pred)
98 | for k, v in list(groupby_to_dict(source, outer_pred).items())
99 | }
100 |
101 |
102 | def find_with_pred(items, pred):
103 | """
104 | Given a source iterable and a predicate defining how to identify an
105 | element, find and return the element if it is present, or None if the
106 | element is not found.
107 |
108 | :param items: a keyless iterable (list or dictionary values or similar)
109 | :param pred:
110 | :return: the element found by the predicate, or None
111 | """
112 | for i in items:
113 | if pred(i):
114 | return i
115 | return None
116 |
117 |
118 | def strip_leading_and_trailing_spaces(list_arg):
119 | return ','.join([x.strip() for x in list_arg.split(',')])
120 |
121 |
122 | def extract_exp(expression, full_regex=False):
123 | """Extract the experession with or without full_regex.
124 |
125 | :param expression: string to filter
126 | :param full_regex: using full regex
127 | :return: regex Object from re package
128 | """
129 | if not full_regex:
130 | exp = fnmatch.translate(expression)
131 | return re.compile(exp)
132 |
133 |
134 | def clean_directory(directory):
135 | """
136 | Remove a directory tree or file
137 |
138 | :param directory: The directory (with sub directories if desired that you
139 | want to delete). Also works with a file.
140 | :return: None
141 |
142 | """
143 | for fname in os.listdir(directory):
144 | fpath = os.path.join(directory, fname)
145 | if os.path.isdir(fpath):
146 | shutil.rmtree(fpath)
147 | else:
148 | os.remove(fpath)
149 |
150 |
151 | def check_image_format(fpath):
152 | """
153 | Check to see if a NIfTI file or REC file are uncompress and runs gzip via
154 | system command if not compressed
155 |
156 | :param fpath: Filepath of a NIfTI or REC file
157 | :return: the new file path of the gzipped file.
158 |
159 | """
160 | if fpath.endswith('.nii') or fpath.endswith('.rec'):
161 | os.system('gzip %s' % fpath)
162 | fpath = '%s.gz' % fpath
163 | return fpath
164 |
165 |
166 | def read_yaml(yaml_file):
167 | """Functio to read a yaml file and return the document info
168 |
169 | :param yaml_file: yaml file path
170 | """
171 | with open(yaml_file, "r") as yaml_stream:
172 | try:
173 | return yaml.load(yaml_stream, Loader=yaml.FullLoader)
174 | except yaml.error.YAMLError as exc:
175 | err = 'YAML File {} could not be loaded properly. Error: {}'
176 | raise DaxError(err.format(yaml_file, exc))
177 | return None
178 |
179 |
180 | def send_email(smtp_from, smtp_host, smtp_pass, to_addr, subject, content):
181 | """
182 | Send an email
183 | :param content: content of the email
184 | :param to_addr: address to send the email to
185 | :param subject: subject of the email
186 | :return: None
187 | """
188 |
189 | # Create the container (outer) email message.
190 | msg = MIMEText(content)
191 | msg['Subject'] = subject
192 | msg['From'] = smtp_from
193 | msg['To'] = ','.join(to_addr)
194 |
195 | # Send the email via our SMTP server
196 | smtp = smtplib.SMTP(smtp_host)
197 | smtp.starttls()
198 | smtp.login(smtp_from, smtp_pass)
199 | smtp.sendmail(smtp_from, to_addr, msg.as_string())
200 | smtp.quit()
201 |
202 |
203 | def send_email_netrc(smtp_host, to_addr, subject, content):
204 | import netrc
205 |
206 | (smtp_from, _, smtp_pass) = netrc.netrc().authenticators(smtp_host)
207 |
208 | send_email(smtp_from, smtp_host, smtp_pass, to_addr, subject, content)
209 |
210 |
211 | def get_this_instance():
212 | # build the instance name
213 | this_host = socket.gethostname().split('.')[0]
214 | this_user = os.environ['USER']
215 | return '{}@{}'.format(this_user, this_host)
216 |
--------------------------------------------------------------------------------
/dax/rcq/trialbuilder.py:
--------------------------------------------------------------------------------
1 | """ Trial Builder for DAX"""
2 |
3 | import os, shutil
4 | import logging
5 | import tempfile
6 |
7 | from dax.task import NeedInputsException, NoDataException
8 | from dax.task import JOB_PENDING, JOB_RUNNING
9 | from dax.task import NEED_INPUTS, NEED_TO_RUN, NO_DATA
10 | from dax.processors import load_from_yaml, SgpProcessor
11 | from .projectinfo import load_project_info
12 | from .taskqueue import TaskQueue
13 | from dax import validate
14 |
15 |
16 | logger = logging.getLogger('dax')
17 |
18 |
19 | PREFIX = 'TRIAL_'
20 |
21 |
22 | class TrialBuilder(object):
23 | # Builds trial run of a processor on a project by creating assessor in XNAT and adding to queue in REDCap
24 |
25 | def __init__(self, projects_redcap, xnat, xnat_override=None):
26 | self._projects_rc = projects_redcap
27 | self._xnat = xnat
28 | self._queue = TaskQueue(projects_redcap)
29 | self._xnat_override = xnat_override
30 |
31 | def build(self, project, yaml_file, subject):
32 | logger.info(f'{project}:{yaml_file}:{subject}')
33 |
34 | try:
35 | validate.validate(yaml_file)
36 | logger.info('processor yaml is valid!')
37 | except Exception as err:
38 | logger.info(f'processor yanml is not valid!:{err}')
39 | raise Exception(f'Invalid yaml')
40 |
41 | with tempfile.TemporaryDirectory() as tmpdir:
42 | # Configure processor as "TRIAL" by making a temporary copy with prefix
43 | temp_file = os.path.join(tmpdir, PREFIX + os.path.basename(yaml_file))
44 |
45 | try:
46 | shutil.copy(yaml_file, temp_file)
47 | except Exception as err:
48 | raise Exception(f'error copying yaml file:{yaml_file}:{err}')
49 |
50 | # Load the processor object from yaml file
51 | logger.debug(f'loading processor from yaml:{temp_file}')
52 | try:
53 | processor = load_from_yaml(self._xnat, temp_file, job_template='~/job_template.txt')
54 | except Exception as err:
55 | logger.error(f'failed to load, cannot build:{temp_file}:{err}')
56 | raise Exception(f'failed to load processor from yaml:{err}')
57 |
58 | if not processor:
59 | logger.error(f'loading processor:{temp_file}')
60 | raise Exception(f'failed to load processor from yaml:{err}')
61 |
62 | # Load project info
63 | info = load_project_info(self._xnat, project)
64 |
65 | if isinstance(processor, SgpProcessor):
66 | if subject not in info['all_subjects']:
67 | raise Exception(f'subject not found:{subject}')
68 |
69 | self._build_subject_trial(processor, subject, info)
70 | else:
71 | session = subject
72 | if session not in info['all_sessions']:
73 | raise Exception(f'session not found:{session}')
74 |
75 | self._build_session_trial(processor, session, info)
76 |
77 | def _build_session_trial(self, processor, session, project_info):
78 | # Get list of inputs sets (not yet matched with existing)
79 | inputsets = processor.parse_session_pd(session, project_info)
80 |
81 | for inputs in inputsets:
82 | if inputs == {}:
83 | # Blank inputs
84 | return
85 |
86 | # Get(create) assessor with given inputs and proc type
87 | (assr, info) = processor.get_assessor(
88 | session, inputs, project_info)
89 |
90 | if info['PROCSTATUS'] in [NEED_TO_RUN, NEED_INPUTS]:
91 | logger.info(f'building task:{info["ASSR"]}')
92 | (assr, info) = self._build_trial(assr, info, processor, project_info)
93 | logger.debug(f'{info}')
94 | logger.info(info['PROCSTATUS'])
95 | else:
96 | logger.info('already built:{}'.format(info['ASSR']))
97 |
98 | def _build_subject_trial(self, processor, subject, project_info):
99 | # Get list of inputs sets (not yet matched with existing)
100 | inputsets = processor.parse_subject(subject, project_info)
101 |
102 | for inputs in inputsets:
103 | if inputs == {}:
104 | # Blank inputs
105 | return
106 |
107 | # Get(creating if necessary) assessor with given inputs and proctype
108 | (assr, info) = processor.get_assessor(
109 | self._xnat, subject, inputs, project_info)
110 |
111 | if info['PROCSTATUS'] in [NEED_TO_RUN, NEED_INPUTS]:
112 | logger.info(f'building task:{info["ASSR"]}')
113 | (assr, info) = self._build_trial(assr, info, processor, project_info)
114 | logger.debug(f'assr after={info}')
115 | logger.info(info['PROCSTATUS'])
116 | else:
117 | logger.info('already built:{}'.format(info['ASSR']))
118 |
119 | def _build_trial(self, assr, info, processor, project_info):
120 | '''Build a task, create assessor in XNAT, add new record to redcap'''
121 | old_proc_status = info['PROCSTATUS']
122 | old_qc_status = info['QCSTATUS']
123 |
124 | try:
125 | var2val, inputlist = processor.build_var2val(
126 | assr,
127 | info,
128 | project_info)
129 |
130 | if self._xnat_override:
131 | # Replace the xnat host with the override so that
132 | # correct host is used to downlod the inputs
133 | for i in inputlist:
134 | _old = self._xnat.host
135 | _new = self._xnat_override
136 | i['fpath'] = i['fpath'].replace(_old, _new)
137 |
138 | self._queue._add_task(
139 | project_info['name'],
140 | info['ASSR'],
141 | inputlist,
142 | var2val,
143 | processor.walltime_str,
144 | processor.memreq_mb,
145 | processor.yaml_file,
146 | processor.user_inputs,
147 | custom=True
148 | )
149 |
150 | # Set new statuses to be updated
151 | new_proc_status = JOB_RUNNING
152 | new_qc_status = JOB_PENDING
153 | except NeedInputsException as e:
154 | new_proc_status = NEED_INPUTS
155 | new_qc_status = e.value
156 | except NoDataException as e:
157 | new_proc_status = NO_DATA
158 | new_qc_status = e.value
159 |
160 | # Update on xnat
161 | _xsitype = processor.xsitype.lower()
162 | if new_proc_status != old_proc_status:
163 | assr.attrs.set(f'{_xsitype}/procstatus', new_proc_status)
164 | if new_qc_status != old_qc_status:
165 | assr.attrs.set(f'{_xsitype}/validation/status', new_qc_status)
166 |
167 | # Update local info
168 | info['PROCSTATUS'] = new_proc_status
169 | info['QCSTATUS'] = new_qc_status
170 |
171 | return (assr, info)
172 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/Xnatsetvar:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | '''Set variables on XNAT objects from a CSV file.
3 |
4 | The optional -o/--outfile argument may be used to write logging information
5 | to file instead of the terminal.
6 |
7 | No effort is made to identify duplicate or erroneous entries in the CSV file.
8 |
9 | Scan 'quality' workflow
10 |
11 | 1. Create a CSV file containing these labeled columns (in no particular order):
12 | project_id,subject_label,session_label,scan_id,quality
13 |
14 | A starting point for the CSV may be obtained using Xnatreport with
15 | --format project_id,subject_label,session_label,scan_id,quality
16 | The resulting CSV can be edited with the desired changes.
17 |
18 | 2. Run this script:
19 | Xnatsetvar --var quality --csv
20 |
21 | The optional argument --notes will additionally read a column 'note'
22 | from the csv and put its value in the scan's 'note' field.
23 |
24 | Assessor 'qcstatus' workflow
25 |
26 | 1. Create a CSV file containing these labeled columns (in no particular order):
27 | project_id,subject_label,session_label,assessor_label,qcstatus
28 |
29 | A starting point for the CSV may be obtained using Xnatreport with
30 | --format project_id,subject_label,session_label,assessor_label,qcstatus
31 | The resulting CSV can be edited with the desired changes.
32 |
33 | 2. Run this script:
34 | Xnatsetvar --var qcstatus --csv
35 |
36 | The optional argument --validator may be used to set the
37 | assessor's 'validated_by' field. It defaults to 'Xnatsetvar'.
38 |
39 | The optional argument --notes will additionally read a column 'qcnotes'
40 | from the csv and put its value in the assessor's 'notes' field.
41 |
42 | Session 'session_type' workflow
43 |
44 | 1. Create a CSV file containing these labeled columns (in no particular order):
45 | project_id,subject_label,session_label,session_type
46 |
47 | 2. Run this script:
48 | Xnatsetvar --var session_type --csv
49 | '''
50 |
51 | import argparse
52 | import logging
53 | import time
54 |
55 | import pandas
56 | from dax import XnatUtils
57 |
58 |
59 | def initialize_logger(name):
60 | if args.outfile:
61 | handler = logging.FileHandler(args.outfile, 'w')
62 | else:
63 | handler = logging.StreamHandler()
64 | lgr = logging.getLogger(name)
65 | lgr.setLevel(logging.INFO)
66 | lgr.addHandler(handler)
67 | return lgr
68 |
69 |
70 | def read_csv(reqs):
71 | # Using keep_default_na allows empty strings to come straight through
72 | info = pandas.read_csv(args.csv, delimiter=',', dtype=str, keep_default_na=False)
73 | for req in reqs:
74 | if req not in info.columns:
75 | raise Exception(f'Column "{req}" not found in CSV file')
76 | return info
77 |
78 |
79 | def set_scanqual():
80 |
81 | # Load the info csv
82 | reqs = [
83 | 'project_id',
84 | 'subject_label',
85 | 'session_label',
86 | 'scan_id',
87 | 'quality',
88 | ]
89 | if args.notes:
90 | reqs.append('note')
91 | info = read_csv(reqs)
92 |
93 | # Set values for scans that exist
94 | with XnatUtils.get_interface() as xnat:
95 | for row in info.itertuples():
96 | scan = xnat.select_scan(
97 | row.project_id,
98 | row.subject_label,
99 | row.session_label,
100 | row.scan_id
101 | )
102 | tag = f'{row.project_id} {row.subject_label} {row.session_label} {row.scan_id}'
103 | if scan.exists():
104 | scan.attrs.set('quality', row.quality)
105 | if args.notes:
106 | scan.attrs.set('note', row.note)
107 | logger.info('Set %s for %s', row.quality, tag)
108 | else:
109 | logger.warning('Scan not found: %s', tag)
110 |
111 |
112 | def set_assrqc():
113 |
114 | # Load the info csv
115 | reqs = [
116 | 'project_id',
117 | 'subject_label',
118 | 'session_label',
119 | 'assessor_label',
120 | 'qcstatus',
121 | ]
122 | if args.notes:
123 | reqs.append('qcnotes')
124 | info = read_csv(reqs)
125 |
126 | # Set values for assessors that exist
127 | datestr = time.strftime('%Y-%m-%d')
128 | with XnatUtils.get_interface() as xnat:
129 | for row in info.itertuples():
130 | assr = xnat.select_assessor(
131 | row.project_id,
132 | row.subject_label,
133 | row.session_label,
134 | row.assessor_label
135 | )
136 | tag = f'{row.project_id} {row.subject_label} {row.session_label} {row.assessor_label}'
137 | if assr.exists():
138 | sdict = {
139 | 'proc:genProcData/validation/status': row.qcstatus,
140 | 'proc:genProcData/validation/date': datestr,
141 | 'proc:genProcData/validation/validated_by': args.validator,
142 | }
143 | if args.notes:
144 | sdict['proc:genProcData/validation/notes'] = row.qcnotes
145 | assr.attrs.mset(sdict)
146 | logger.info('Set %s for %s', row.qcstatus, tag)
147 | else:
148 | logger.warning('Assessor not found: %s', tag)
149 |
150 |
151 | def set_sesstype():
152 |
153 | # Load the info csv
154 | reqs = [
155 | 'project_id',
156 | 'subject_label',
157 | 'session_label',
158 | 'session_type',
159 | ]
160 | info = read_csv(reqs)
161 |
162 | # Set values for sessions that exist
163 | with XnatUtils.get_interface() as xnat:
164 | for row in info.itertuples():
165 | sess = xnat.select_experiment(
166 | row.project_id,
167 | row.subject_label,
168 | row.session_label
169 | )
170 | tag = f'{row.project_id} {row.subject_label} {row.session_label}'
171 | if sess.exists():
172 | sess.attrs.set('session_type', row.session_type)
173 | logger.info('Set %s for %s', row.session_type, tag)
174 | else:
175 | logger.warning('Session not found: %s', tag)
176 |
177 |
178 | if __name__ == '__main__':
179 |
180 | # Arguments. args is global
181 | parser = argparse.ArgumentParser(
182 | description=__doc__,
183 | formatter_class=argparse.RawTextHelpFormatter
184 | )
185 | parser.add_argument('-v', '--var', required=True)
186 | parser.add_argument('-c', '--csv', required=True)
187 | parser.add_argument('--validator', default='Xnatsetvar')
188 | parser.add_argument('--notes', action='store_true')
189 | parser.add_argument('-o', '--outfile', default=None)
190 | args = parser.parse_args()
191 |
192 | # Logging. logger is global
193 | logger = initialize_logger('Xnatsetvar')
194 |
195 | # Call appropriate routine
196 | if args.var == 'quality':
197 | set_scanqual()
198 | elif args.var == 'qcstatus':
199 | set_assrqc()
200 | elif args.var == 'session_type':
201 | set_sesstype()
202 | else:
203 | raise Exception(f'Variable type {args.var} is not handled')
204 |
--------------------------------------------------------------------------------
/dax/BidsToXnat.py:
--------------------------------------------------------------------------------
1 | '''
2 | BIDS to XNAT
3 |
4 | Extract the information from BIDS for Xnatupload
5 |
6 | @author: Praitayini Kanakaraj, Electrical Engineering, Vanderbilt University
7 |
8 | '''
9 | import os
10 | import sys
11 | import json
12 | import glob
13 |
14 | # check if valid bids
15 | # extract and map to dict.
16 |
17 |
18 | def split_name_with_nii(filename):
19 | """
20 | Returns the clean basename and extension of a file.
21 | Means that this correctly manages the ".nii.gz" extensions.
22 |
23 | Parameters
24 | ----------
25 | filename: str
26 | The filename to clean
27 |
28 | Returns
29 | -------
30 | base, ext : tuple(str, str)
31 | Clean basename and the full extension
32 | """
33 | base, ext = os.path.splitext(filename)
34 |
35 | if ext == ".gz":
36 | # Test if we have a .nii additional extension
37 | temp_base, add_ext = os.path.splitext(base)
38 |
39 | if add_ext == ".nii":
40 | ext = add_ext + ext
41 | base = temp_base
42 |
43 | return base, ext
44 |
45 |
46 | def transform_to_xnat(bids_dir, project):
47 | """
48 | Method to transfrom bids to xnat
49 |
50 | :param bids_dir: BIDS Directory
51 | :param project: XNAT project ID
52 | :return: uplaod_scan -> the dict with info of scans to upload
53 | """
54 | # Check bids dir path exists
55 | if not os.path.exists(bids_dir):
56 | print('ERROR: %s path does not exists' % (bids_dir))
57 | exit()
58 |
59 | # Extract the values from the bids data
60 | bids_dict = {}
61 | upload_scan = []
62 | unq_scan_id = 1
63 | pre_dir = None
64 | xnat_dataset = dataset_source_xnat(bids_dir)
65 | diffusion_dict = {}
66 | for root, dirs, files in os.walk(bids_dir):
67 |
68 | # increment scan id if the previous dir is different
69 | cur_dir = root.rsplit('/', 1)[0]
70 | if pre_dir is not None and cur_dir != pre_dir:
71 | unq_scan_id = 1
72 | pre_dir = cur_dir
73 |
74 | # Sorting the files helps with DWI files, files will be
75 | # grouped together in this order: bval/bvec/nii.gz
76 | for i in sorted(files):
77 | if i.endswith('nii.gz') or i.endswith('.bvec') \
78 | or i.endswith('.bval'):
79 | bids_filename_contents = i.split('_')
80 |
81 | if xnat_dataset:
82 | # get info from json file for data dervied from XNAT
83 | bids_filename = i.split('.')[0]
84 | json_file = bids_filename + '.json'
85 | with open(os.path.join(root, json_file), 'r') as f:
86 | json_contents = json.load(f)
87 |
88 | # subj from json
89 | subject = json_contents['XNATProvenance'].split('/')[8]
90 | bids_dict['subject_label'] = subject
91 |
92 | # sess from json
93 | session = json_contents['XNATProvenance'].split('/')[10]
94 | bids_dict['session_label'] = session
95 |
96 | # series des (on xnat/bidsmap) from bids
97 | bids_dict['series_description'] = json_contents['SeriesDescription']
98 |
99 | # label -x--x--x-
100 | scan_id = json_contents['XNATProvenance'].split('/')[12]
101 | bids_dict['label'] = '-'.join((project,
102 | subject, session, scan_id))
103 |
104 | # type quality from json
105 | bids_dict['ID'] = scan_id
106 | bids_dict['type'] = json_contents['ScanType']
107 | bids_dict['quality'] = json_contents['ScanQuality']
108 |
109 | # resource and resource path
110 | bids_filepath = os.path.join(root, i)
111 |
112 | if bids_filepath.endswith('nii.gz'):
113 | bids_dict['resource'] = {'NIFTI': [bids_filepath]}
114 | if bids_filepath.endswith('bvec'):
115 | bids_dict['resource'] = {'BVEC': [bids_filepath]}
116 | if bids_filepath.endswith('bval'):
117 | bids_dict['resource'] = {'BVAL': [bids_filepath]}
118 |
119 | else:
120 | # get data from filename for public bids dataset
121 | # sub, sess from bids
122 | subject = [(i.split('-')[1])
123 | for i in bids_filename_contents if i.startswith('sub')][0]
124 | bids_dict['subject_label'] = subject
125 | try:
126 | session = [(i.split('-')[1])
127 | for i in bids_filename_contents if i.startswith('ses')][0]
128 | except IndexError:
129 | session = subject
130 | # xnatupload needs unique session id
131 | bids_dict['session_label'] = subject + '-' + session
132 |
133 | basename, ext = split_name_with_nii(i)
134 | if ext == '.bval' and basename not in diffusion_dict:
135 | diffusion_dict[basename] = unq_scan_id
136 | elif basename in diffusion_dict:
137 | unq_scan_id = diffusion_dict[basename]
138 | # id increment unique value
139 | bids_dict['ID'] = "{0:0=2d}".format(unq_scan_id)
140 |
141 | # label from bids datatype
142 | datatype = root.split('/')[-1]
143 | scan_id = datatype
144 | bids_dict['label'] = '-'.join((project,
145 | subject, session, scan_id))
146 |
147 | # series_des type from last key in bids + run + acq
148 | try:
149 | run_number = [(i.split('-')[1])
150 | for i in bids_filename_contents if i.startswith('run')][0]
151 | except IndexError:
152 | run_number = ''
153 |
154 | bids_dict['series_description'] = i.split(
155 | '.')[0].split('_')[-1] + run_number
156 | bids_dict['type'] = i.split(
157 | '.')[0].split('_')[-1] + run_number
158 |
159 | bids_dict['quality'] = 'questionable'
160 |
161 | bids_filepath = os.path.join(root, i)
162 | if bids_filepath.endswith('nii.gz'):
163 | bids_dict['resource'] = {'NIFTI': [bids_filepath]}
164 | if bids_filepath.endswith('bvec'):
165 | bids_dict['resource'] = {'BVEC': [bids_filepath]}
166 | if bids_filepath.endswith('bval'):
167 | bids_dict['resource'] = {'BVAL': [bids_filepath]}
168 | unq_scan_id += 1
169 |
170 | # other keys
171 | bids_dict['object_type'] = 'scan'
172 | bids_dict['project_id'] = project
173 | # check the 4 datatypes for MR
174 | if bids_filepath.split('/')[-2] == 'anat' or 'func' or 'dwi' \
175 | or 'fmap' or 'perf':
176 | bids_dict['session_type'] = 'MR'
177 | else:
178 | sys.exit()
179 |
180 | upload_scan.append(bids_dict.copy())
181 |
182 | return upload_scan
183 |
184 |
185 | def dataset_source_xnat(bids_dir):
186 | """
187 | Method to check if the data was downloaded from xnat
188 |
189 | :param bids_dir: BIDS Directory
190 | :return: True or False
191 | """
192 | dataset_description_file = glob.glob(
193 | bids_dir + "/**/dataset_description.json", recursive=True)
194 | if not os.path.exists(dataset_description_file[0]):
195 | return False
196 | else:
197 | with open(dataset_description_file[0], 'r') as f:
198 | json_contents = json.load(f)
199 | if 'DatasetDOI' not in json_contents:
200 | return False
201 | elif not json_contents['DatasetDOI'].endswith('xnat'):
202 | return False
203 | return True
204 |
--------------------------------------------------------------------------------
/dax/processor_graph.py:
--------------------------------------------------------------------------------
1 |
2 | import logging
3 | LOGGER = logging.getLogger('dax')
4 |
5 | # TODO: BenM/asr_of_asr/should detect / report cycles
6 | # TODO: BenM/asr_of_asr/needs proper unittesting
7 |
8 |
9 | class ProcessorGraph:
10 |
11 | @staticmethod
12 | def processor_inputs_from_sources(yaml_sources):
13 | sources = dict()
14 | for name, source in yaml_sources:
15 | xnat = source.contents.get('inputs', {}).get('xnat', {})
16 |
17 | asrs = xnat.get('assessors', {})
18 | inputs = set()
19 | for a in asrs:
20 | types = a.get('types', '').split(',')
21 |
22 | inputs = inputs.union(types)
23 | sources[name] = list(inputs)
24 | return sources
25 |
26 | @staticmethod
27 | def get_forward_edges(nodes_to_src_edges):
28 | sink_edges = dict()
29 | # calculate a list of nodes to sink edges
30 | for k, v in list(nodes_to_src_edges.items()):
31 | if k not in sink_edges:
32 | sink_edges[k] = []
33 | for src in v:
34 | if src not in sink_edges:
35 | sink_edges[src] = []
36 | sink_edges[src].append(k)
37 | for v in sink_edges:
38 | sink_edges[v] = sorted(sink_edges[v])
39 |
40 | return sink_edges
41 |
42 | @staticmethod
43 | def order_processors(processors, log=None):
44 | """
45 | Order a list of processors in dependency order.
46 | This method takes a list of processors and orders them so that:
47 | . if processor type b takes an input of processor type a, processor a
48 | appears in the list before processor b
49 | . processors without a well-formed name are placed at the end of the
50 | list in no particular order
51 |
52 |
53 | :param processors:
54 | :return:
55 | """
56 | processor_map = dict([(p.get_proctype(), p) for p in processors])
57 | named_processors = []
58 | unnamed_processors = []
59 | assessor_inputs = {}
60 | for p in processors:
61 | proctype = p.get_proctype()
62 | if not proctype:
63 | unnamed_processors.append(p)
64 | else:
65 | assessor_inputs[p.get_proctype()] =\
66 | p.get_assessor_input_types()
67 |
68 | ordered_names = ProcessorGraph.order_from_inputs(assessor_inputs, log)
69 | for n in ordered_names:
70 | named_processors.append(processor_map[n])
71 | return named_processors + unnamed_processors
72 |
73 | # TODO: BenM/assessor_of_assessor/ideally, this would operate on a list of
74 | # processors rather than a list of yaml_sources, but this would require a
75 | # refactor of existing processors
76 | @staticmethod
77 | def order_from_inputs(artefacts_to_inputs, log=None):
78 | # artefact_type_map is a list of artefacts to their *inputs*.
79 | # consider the following graph:
80 | # a --> b --> d
81 | # \ \ \
82 | # \ \ \
83 | # > c --> e --> f
84 | # this function takes a mapping from a node to its inputs:
85 | # a -> [], b -> [a], c -> [a], d -> [b], e -> [b, c], f -> [d, e]
86 | #
87 | # the algorithm then proceeds as follows:
88 | # 1. calculate a list of nodes to sink edges (fwd_edges)
89 | # 2. calculate in-degrees for the nodes, adding any node with no
90 | # inputs to the satisfied node list
91 | # 3. keep taking nodes from the satisfied node list, following their
92 | # sink edges and lowering the in-degree of each sink node by 1.
93 | # Nodes that hit zero in-degree are added to the satisfied list and
94 | # the current node is added to the ordered list
95 | open_nodes = dict()
96 | satisfied = list()
97 | ordered = list()
98 | fwd_edges = ProcessorGraph.get_forward_edges(artefacts_to_inputs)
99 |
100 | # calculate the 'in-degree' (number of incoming edges) for each
101 | # node; this is more easily done using the mapping of nodes to inputs.
102 | # if a node has an in-degree of zero then it can go onto the satisfied
103 | # list
104 | for k, v in list(artefacts_to_inputs.items()):
105 | open_nodes[k] = len(v)
106 | if len(v) == 0:
107 | satisfied.append(k)
108 |
109 | # keep picking nodes from the front of the satisfied list until there
110 | # are no more. each follow the sink edges for that node and reduce the
111 | # sink node in-degrees by 1. Any node whose in-degree falls to 0 is
112 | # added to the end of the satisfied list
113 | while len(satisfied) > 0:
114 | cur = satisfied[0]
115 | ordered.append(cur)
116 | satisfied = satisfied[1:]
117 | for sink in fwd_edges[cur]:
118 | open_nodes[sink] -= 1
119 | if open_nodes[sink] == 0:
120 | satisfied.append(sink)
121 |
122 | unordered = list()
123 | for k, v in list(open_nodes.items()):
124 | if v > 0:
125 | unordered.append(k)
126 | unordered = sorted(unordered)
127 |
128 | if len(unordered) > 0 and log is not None:
129 | log.warning('Unable to order all processors:')
130 | log.warning(' Unordered: ' + ', '.join(unordered))
131 | regions = ProcessorGraph.tarjan(fwd_edges)
132 | if len(regions) < len(artefacts_to_inputs):
133 | log.warning('Cyclic processor dependencies detected:')
134 | regions.reverse()
135 | for r in [x for x in regions if len(x) > 1]:
136 | log.warning(' Cycle: ' + ', '.join(r))
137 |
138 | return ordered + unordered
139 |
140 | @staticmethod
141 | def tarjan(graph):
142 |
143 | class Vertex:
144 | def __init__(self, v, e):
145 | self.v = v
146 | self.e = e
147 | self.index = None
148 | self.onstack = False
149 | self.lowLink = None
150 |
151 | class TarjanImpl:
152 | """
153 | Expecting graph to be in the form of:
154 | {
155 | a: [b, c],
156 | b: [d],
157 | c: [e],
158 | d: [f],
159 | e: [f],
160 | f: []
161 | }
162 | :param graph:
163 | :return:
164 | """
165 |
166 | def __init__(self):
167 | self.V = dict()
168 | self.S = list()
169 | self.index = 0
170 | self.scrs = list()
171 |
172 | def go(self, graph):
173 | self.V = dict()
174 | self.S = list()
175 | self.index = 0
176 | self.scrs = list()
177 | self.V = {v: Vertex(v, w) for (v, w) in list(graph.items())}
178 |
179 | self.index = 0
180 | self.S = list()
181 |
182 | for v in self.V.values():
183 | if v.index is None:
184 | self.strongconnect(v)
185 |
186 | return self.scrs
187 |
188 | def strongconnect(self, v):
189 | v.index = self.index
190 | v.lowlink = self.index
191 | self.index += 1
192 | self.S.append(v)
193 | v.onstack = True
194 |
195 | for d in v.e:
196 | w = self.V[d]
197 | if w.index is None:
198 | self.strongconnect(w)
199 | v.lowlink = min(v.lowlink, w.lowlink)
200 | elif w.onstack is True:
201 | v.lowlink = min(v.lowlink, w.index)
202 |
203 | if v.lowlink == v.index:
204 | scr = []
205 | while True:
206 | w = self.S.pop()
207 | w.onstack = False
208 | scr.append(w.v)
209 | if w is v:
210 | break
211 | self.scrs.append(scr)
212 |
213 | t = TarjanImpl()
214 | results = t.go(graph)
215 | # print results, len(results) < len(graph)
216 | return results
217 |
--------------------------------------------------------------------------------
/dax/tests/unit_test_processor_graph.py:
--------------------------------------------------------------------------------
1 |
2 | from unittest import TestCase
3 |
4 | from dax.processor_graph import ProcessorGraph
5 | from dax import yaml_doc
6 | from dax.tests import unit_test_common_processor_yamls as yamls
7 | from dax.tests import unit_test_entity_common as common
8 | from dax.processors import AutoProcessor
9 |
10 |
11 | class TestLog:
12 | def __init__(self):
13 | self.warnings = list()
14 |
15 | def warning(self, message):
16 | self.warnings.append(message)
17 |
18 | def clear(self):
19 | self.warnings = list()
20 |
21 |
22 | class ProcessorGraphUnitTests(TestCase):
23 |
24 | @staticmethod
25 | def __getabcdscenario():
26 | proc_a = yamls.generate_yaml(
27 | procname="Proc_A",
28 | scans=[{
29 | 'name': 'scan1', 'types': 'T1',
30 | 'resources': [
31 | {'type': 'NIFTI', 'name': 't1'}
32 | ]}
33 | ])
34 | proc_b = yamls.generate_yaml(
35 | procname="Proc_B",
36 | scans=[{
37 | 'name': 'scan1', 'types': 'T1',
38 | 'resources': [
39 | {'type': 'NIFTI', 'name': 't1'}
40 | ]
41 | }
42 | ])
43 | proc_c = yamls.generate_yaml(
44 | procname="Proc_C",
45 | assessors=[
46 | {
47 | 'name': 'proc1', 'types': 'Proc_A_v1',
48 | 'resources': [
49 | {'type': 'SEG', 'name': 'proc_a'}
50 | ]
51 | },
52 | {
53 | 'name': 'proc2', 'types': 'Proc_B_v1',
54 | 'resources': [
55 | {'type': 'SEG2', 'name': 'proc_b'}
56 | ]
57 | }
58 | ])
59 | proc_d = yamls.generate_yaml(
60 | procname="Proc_D",
61 | assessors=[
62 | {
63 | 'name': 'proc1', 'types': 'Proc_C_v1',
64 | 'resources': [
65 | {'type': 'THING', 'name': 'proc_c'}
66 | ]
67 | },
68 | {
69 | 'name': 'proc2', 'types': 'Proc_B_v1',
70 | 'resources': [
71 | {'type': 'SEG2', 'name': 'proc_b'}
72 | ]
73 | }
74 | ])
75 |
76 | return [
77 | ('Proc_A_v1', yaml_doc.YamlDoc().from_string(proc_a)),
78 | ('Proc_B_v1', yaml_doc.YamlDoc().from_string(proc_b)),
79 | ('Proc_C_v1', yaml_doc.YamlDoc().from_string(proc_c)),
80 | ('Proc_D_v1', yaml_doc.YamlDoc().from_string(proc_d))
81 | ]
82 |
83 |
84 | #def _getTestGraph1(self):
85 |
86 |
87 |
88 | def test_ordered_processors(self):
89 | log = TestLog()
90 |
91 | graph_description = {
92 | 'a': [],
93 | 'b': ['a'],
94 | 'c': ['a'],
95 | 'd': ['b'],
96 | 'e': ['b', 'c'],
97 | 'f': ['d', 'e']
98 | }
99 | actual = ProcessorGraph.order_from_inputs(graph_description, log)
100 | print(actual)
101 |
102 |
103 | def test_ordered_processors_has_cycle(self):
104 | log = TestLog()
105 |
106 | graph_description = {
107 | 'a': [],
108 | 'b': ['a', 'e'],
109 | 'c': ['a'],
110 | 'd': ['b'],
111 | 'e': ['c', 'd'],
112 | 'f': ['d', 'e']
113 | }
114 | actual = ProcessorGraph.order_from_inputs(graph_description, log)
115 | print(actual)
116 |
117 | def test_processor_inputs_from_sources(self):
118 |
119 | print((ProcessorGraph.processor_inputs_from_sources(
120 | ProcessorGraphUnitTests.__getabcdscenario()
121 | )))
122 |
123 |
124 | def test_ordering_from_sources(self):
125 | log = TestLog()
126 | print((ProcessorGraph.order_from_inputs(
127 | ProcessorGraph.processor_inputs_from_sources(
128 | ProcessorGraphUnitTests.__getabcdscenario()
129 | ),
130 | log
131 | )))
132 |
133 | def test_order_processors(self):
134 | yamldocs = [p[1] for p in ProcessorGraphUnitTests.__getabcdscenario()]
135 | processors = [AutoProcessor(common.FakeXnat, p) for p in yamldocs]
136 | log = TestLog()
137 | print((ProcessorGraph.order_processors(processors, log)))
138 |
139 | def test_order_processors_mocked(self):
140 | class TestProcessor:
141 | def __init__(self, name, inputs):
142 | self.name = name
143 | self.inputs = inputs
144 |
145 | def get_proctype(self):
146 | return self.name
147 |
148 | def get_assessor_input_types(self):
149 | return self.inputs
150 |
151 | log = TestLog()
152 | p = [
153 | TestProcessor('a', []),
154 | TestProcessor('b', []),
155 | TestProcessor('c', ['a', 'b']),
156 | TestProcessor(None, ['b']),
157 | TestProcessor('e', ['c', 'd']),
158 | ]
159 |
160 | actual = ProcessorGraph.order_processors(p, log)
161 | self.assertListEqual(
162 | actual, [p[0], p[1], p[2], p[4], p[3]]
163 | )
164 | self.assertListEqual(
165 | log.warnings,
166 | [
167 | 'Unable to order all processors:',
168 | ' Unordered: e'
169 | ]
170 | )
171 |
172 | log = TestLog()
173 | p = [
174 | TestProcessor('a', []),
175 | TestProcessor('b', ['a', 'd']),
176 | TestProcessor('c', ['b']),
177 | TestProcessor('d', ['c']),
178 | TestProcessor('e', ['b'])
179 | ]
180 |
181 | actual = ProcessorGraph.order_processors(p, log)
182 | self.assertListEqual(
183 | actual, [p[0], p[1], p[2], p[3], p[4]]
184 | )
185 | self.assertListEqual(
186 | log.warnings,
187 | [
188 | 'Unable to order all processors:',
189 | ' Unordered: b, c, d, e',
190 | 'Cyclic processor dependencies detected:',
191 | ' Cycle: d, c, b'
192 | ]
193 | )
194 |
195 | log = TestLog()
196 | p = [
197 | TestProcessor('a', []),
198 | TestProcessor('b', ['a', 'd']),
199 | TestProcessor('c', ['b']),
200 | TestProcessor('d', ['c']),
201 | TestProcessor('e', ['b', 'g']),
202 | TestProcessor('f', ['e']),
203 | TestProcessor('g', ['f']),
204 | TestProcessor('h', ['e'])
205 | ]
206 |
207 | actual = ProcessorGraph.order_processors(p, log)
208 | self.assertListEqual(
209 | actual, [p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7]]
210 | )
211 | self.assertListEqual(
212 | log.warnings,
213 | [
214 | 'Unable to order all processors:',
215 | ' Unordered: b, c, d, e, f, g, h',
216 | 'Cyclic processor dependencies detected:',
217 | ' Cycle: d, c, b',
218 | ' Cycle: g, f, e'
219 | ]
220 | )
221 |
222 |
223 | def test_tarjan(self):
224 |
225 | def impl(g, expected):
226 | actual = ProcessorGraph.tarjan(g)
227 | print(actual)
228 | self.assertListEqual(actual, expected)
229 |
230 | g = {
231 | 'a': ['b', 'c'],
232 | 'b': ['d'],
233 | 'c': ['e'],
234 | 'd': ['f'],
235 | 'e': ['f'],
236 | 'f': []
237 | }
238 | impl(g, [['f'], ['d'], ['b'], ['e'], ['c'], ['a']])
239 |
240 | g = {
241 | 'a': ['b'],
242 | 'b': ['c', 'e'],
243 | 'c': ['d'],
244 | 'd': ['b'],
245 | 'e': []
246 | }
247 | impl(g, [['e'], ['d', 'c', 'b'], ['a']])
248 |
249 | g = {
250 | 'a': ['b'],
251 | 'b': ['c'],
252 | 'c': ['a']
253 | }
254 | impl(g, [['c', 'b', 'a']])
255 |
256 | g = {
257 | 'a': ['b'],
258 | 'b': ['c', 'e'],
259 | 'c': ['d'],
260 | 'd': ['b'],
261 | 'e': ['f', 'h'],
262 | 'f': ['g'],
263 | 'g': ['e'],
264 | 'h': []
265 | }
266 | impl(g, [['h'], ['g', 'f', 'e'], ['d', 'c', 'b'], ['a']])
267 |
--------------------------------------------------------------------------------
/dax/rcq/projectinfo.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import json
3 |
4 | from dax.XnatUtils import decode_inputs
5 |
6 |
7 | logger = logging.getLogger('manager.rcq.projectinfo')
8 |
9 | # The scan URI is a hacky way to get a row for each resource of a every
10 | # scan including all modalities. Things go awry when we try to add any
11 | # other columns.
12 | SCAN_URI = '/REST/experiments?xsiType=xnat:imagesessiondata\
13 | &columns=\
14 | project,\
15 | subject_label,\
16 | session_label,\
17 | session_type,\
18 | xnat:imagesessiondata/note,\
19 | xnat:imagesessiondata/date,\
20 | tracer_name,\
21 | xnat:imagesessiondata/acquisition_site,\
22 | xnat:imagesessiondata/label,\
23 | xnat:imagescandata/id,\
24 | xnat:imagescandata/type,\
25 | xnat:imagescandata/quality,\
26 | xnat:imagescandata/frames,\
27 | xnat:imagescandata/file/label'
28 |
29 |
30 | # The scan URI is a hacky way to get a row for each assessor. We do
31 | # not try to get a row per resource because that query takes too long.
32 | # The column name is: proc:genprocdata/out/file/label
33 | ASSR_URI = '/REST/experiments?xsiType=xnat:imagesessiondata\
34 | &columns=\
35 | project,\
36 | subject_label,\
37 | session_label,\
38 | session_type,\
39 | xnat:imagesessiondata/acquisition_site,\
40 | xnat:imagesessiondata/note,\
41 | xnat:imagesessiondata/date,\
42 | xnat:imagesessiondata/label,\
43 | proc:genprocdata/label,\
44 | proc:genprocdata/procstatus,\
45 | proc:genprocdata/proctype,\
46 | proc:genprocdata/validation/status,\
47 | proc:genprocdata/validation/date,\
48 | proc:genprocdata/validation/validated_by,\
49 | proc:genprocdata/jobstartdate,\
50 | last_modified,\
51 | proc:genprocdata/inputs'
52 |
53 |
54 | SGP_URI = '/REST/subjects?xsiType=xnat:subjectdata\
55 | &columns=\
56 | project,\
57 | label,\
58 | proc:subjgenprocdata/label,\
59 | proc:subjgenprocdata/date,\
60 | proc:subjgenprocdata/procstatus,\
61 | proc:subjgenprocdata/proctype,\
62 | proc:subjgenprocdata/validation/status,\
63 | proc:subjgenprocdata/inputs,\
64 | last_modified'
65 |
66 |
67 | SCAN_RENAME = {
68 | 'project': 'PROJECT',
69 | 'subject_label': 'SUBJECT',
70 | 'session_label': 'SESSION',
71 | 'session_type': 'SESSTYPE',
72 | 'tracer_name': 'TRACER',
73 | 'xnat:imagesessiondata/note': 'NOTE',
74 | 'xnat:imagesessiondata/date': 'DATE',
75 | 'xnat:imagesessiondata/acquisition_site': 'SITE',
76 | 'xnat:imagescandata/id': 'SCANID',
77 | 'xnat:imagescandata/type': 'SCANTYPE',
78 | 'xnat:imagescandata/quality': 'QUALITY',
79 | 'xsiType': 'XSITYPE',
80 | 'xnat:imagescandata/file/label': 'RESOURCES',
81 | 'xnat:imagescandata/frames': 'FRAMES',
82 | }
83 |
84 | ASSR_RENAME = {
85 | 'project': 'PROJECT',
86 | 'subject_label': 'SUBJECT',
87 | 'session_label': 'SESSION',
88 | 'session_type': 'SESSTYPE',
89 | 'xnat:imagesessiondata/note': 'NOTE',
90 | 'xnat:imagesessiondata/date': 'DATE',
91 | 'xnat:imagesessiondata/acquisition_site': 'SITE',
92 | 'proc:genprocdata/label': 'ASSR',
93 | 'proc:genprocdata/procstatus': 'PROCSTATUS',
94 | 'proc:genprocdata/proctype': 'PROCTYPE',
95 | 'proc:genprocdata/jobstartdate': 'JOBDATE',
96 | 'proc:genprocdata/validation/status': 'QCSTATUS',
97 | 'proc:genprocdata/validation/date': 'QCDATE',
98 | 'proc:genprocdata/validation/validated_by': 'QCBY',
99 | 'xsiType': 'XSITYPE',
100 | 'proc:genprocdata/inputs': 'INPUTS',
101 | }
102 |
103 | SGP_RENAME = {
104 | 'project': 'PROJECT',
105 | 'label': 'SUBJECT',
106 | 'proc:subjgenprocdata/date': 'DATE',
107 | 'proc:subjgenprocdata/label': 'ASSR',
108 | 'proc:subjgenprocdata/procstatus': 'PROCSTATUS',
109 | 'proc:subjgenprocdata/proctype': 'PROCTYPE',
110 | 'proc:subjgenprocdata/validation/status': 'QCSTATUS',
111 | 'proc:subjgenprocdata/inputs': 'INPUTS'}
112 |
113 | XSI2MOD = {
114 | 'xnat:eegSessionData': 'EEG',
115 | 'xnat:mrSessionData': 'MR',
116 | 'xnat:petSessionData': 'PET'}
117 |
118 |
119 | def load_project_info(xnat, project):
120 | info = {}
121 |
122 | logger.info(f'loading project info from XNAT:{project}')
123 |
124 | info['name'] = project
125 | info['scans'] = _load_scan_data(xnat, project)
126 | info['assessors'] = _load_assr_data(xnat, project)
127 | info['sgp'] = _load_sgp_data(xnat, project)
128 |
129 | info['all_sessions'] = list(set([x['SESSION'] for x in info['scans']]))
130 | info['all_subjects'] = list(set([x['SUBJECT'] for x in info['scans']]))
131 |
132 | return info
133 |
134 |
135 | def _get_result(xnat, uri):
136 | logger.debug(uri)
137 | json_data = json.loads(xnat._exec(uri, 'GET'), strict=False)
138 | return json_data['ResultSet']['Result']
139 |
140 |
141 | def _scan_info(record):
142 | """Get scan info."""
143 | info = {}
144 |
145 | for k, v in SCAN_RENAME.items():
146 | info[v] = record[k]
147 |
148 | # set_modality
149 | info['MODALITY'] = XSI2MOD.get(info['XSITYPE'], 'UNK')
150 |
151 | # Get the full path
152 | _p = '/projects/{0}/subjects/{1}/experiments/{2}/scans/{3}'.format(
153 | info['PROJECT'],
154 | info['SUBJECT'],
155 | info['SESSION'],
156 | info['SCANID'])
157 | info['full_path'] = _p
158 |
159 | return info
160 |
161 |
162 | def _assessor_info(record):
163 | """Get assessor info."""
164 | info = {}
165 |
166 | for k, v in ASSR_RENAME.items():
167 | info[v] = record[k]
168 |
169 | # Decode inputs into list
170 | info['INPUTS'] = decode_inputs(info['INPUTS'])
171 |
172 | # Get the full path
173 | _p = '/projects/{0}/subjects/{1}/experiments/{2}/assessors/{3}'.format(
174 | info['PROJECT'],
175 | info['SUBJECT'],
176 | info['SESSION'],
177 | info['ASSR'])
178 | info['full_path'] = _p
179 |
180 | # set_modality
181 | info['MODALITY'] = XSI2MOD.get(info['XSITYPE'], 'UNK')
182 |
183 | return info
184 |
185 |
186 | def _sgp_info(record):
187 | """Get subject assessor info."""
188 | info = {}
189 |
190 | # Copy with new var names
191 | for k, v in SGP_RENAME.items():
192 | info[v] = record[k]
193 |
194 | info['XSITYPE'] = 'proc:subjgenprocdata'
195 |
196 | # Decode inputs into list
197 | info['INPUTS'] = decode_inputs(info['INPUTS'])
198 |
199 | # Get the full path
200 | _p = '/projects/{0}/subjects/{1}/assessors/{2}'.format(
201 | info['PROJECT'],
202 | info['SUBJECT'],
203 | info['ASSR'])
204 | info['full_path'] = _p
205 |
206 | return info
207 |
208 |
209 | def _load_scan_data(xnat, project):
210 | # Get main project scans
211 | uri = SCAN_URI + f'&project={project}'
212 | result = _get_result(xnat, uri)
213 |
214 | # Append shared project scans
215 | uri = SCAN_URI + f'&xnat:imagesessiondata/sharing/share/project={project}'
216 | result += _get_result(xnat, uri)
217 |
218 | # Change from one row per resource to one row per scan
219 | scans = {}
220 | for r in result:
221 | # Force project to be requested not parent
222 | r['project'] = project
223 |
224 | k = (r['project'], r['session_label'], r['xnat:imagescandata/id'])
225 | if k in scans.keys():
226 | # Append to list of resources
227 | _resource = r['xnat:imagescandata/file/label']
228 | scans[k]['RESOURCES'] += ',' + _resource
229 | else:
230 | scans[k] = _scan_info(r)
231 |
232 | # Get just the values in a list
233 | scans = list(scans.values())
234 |
235 | return scans
236 |
237 |
238 | def _load_assr_data(xnat, project):
239 | """Get assessor info from XNAT as list of dicts."""
240 | assessors = []
241 | uri = ASSR_URI
242 | uri += f'&project={project}'
243 |
244 | result = _get_result(xnat, uri)
245 |
246 | # Append shared project assessors
247 | uri = ASSR_URI + f'&xnat:imagesessiondata/sharing/share/project={project}'
248 | result += _get_result(xnat, uri)
249 |
250 | for r in result:
251 | # Force project to be requested not parent
252 | r['project'] = project
253 | assessors.append(_assessor_info(r))
254 |
255 | return assessors
256 |
257 |
258 | def _load_sgp_data(xnat, project):
259 | """Get assessor info from XNAT as list of dicts."""
260 | assessors = []
261 | uri = SGP_URI
262 | uri += f'&project={project}'
263 |
264 | logger.debug(f'get_result uri=:{uri}')
265 | result = _get_result(xnat, uri)
266 |
267 | for r in result:
268 | assessors.append(_sgp_info(r))
269 |
270 | return assessors
271 |
--------------------------------------------------------------------------------
/bin/Xnat_tools/Xnatquery:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | '''
5 | Query through Xnat
6 |
7 | @author: Benjamin Yvernault, Electrical Engineering, Vanderbilt University
8 | '''
9 |
10 |
11 |
12 | import os
13 |
14 | from dax import XnatUtils
15 | from dax.errors import XnatToolsUserError
16 | import dax.xnat_tools_utils as utils
17 |
18 |
19 | __copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
20 | __exe__ = os.path.basename(__file__)
21 | __author__ = 'byvernault'
22 | __purpose__ = 'Query through XNAT at the level you want.'
23 | __description__ = """What is the script doing :
24 | * Query on Xnat at any level.
25 |
26 | Examples:
27 | *Show all the projects you have access to:
28 | Xnatquery --me
29 | *Show all projects:
30 | Xnatquery --all
31 | *Query a specific level (example scan/assessors for a session):
32 | Xnatquery -p PID -s 109873 -e 109873
33 | *Query a specific level with all objects under it :
34 | Xnatquery -p PID -s 109873 --all"""
35 |
36 |
37 | def query_project(xnat, project, qall=False):
38 | """
39 | Method to query a project
40 |
41 | :param project: project ID on XNAT
42 | :return: None
43 | """
44 | if not project:
45 | err = 'argument project not provided.'
46 | raise XnatToolsUserError(__exe__, err)
47 | print('Project: %s' % (project))
48 | subjects = xnat.get_subjects(project)
49 | if not subjects:
50 | err = 'no subjects found for project %s.' % project
51 | raise XnatToolsUserError(__exe__, err)
52 | for subject in subjects:
53 | print(' + Subject: {}'.format(subject['label']))
54 | if qall:
55 | query_subject(xnat, project, subject['label'], qall)
56 |
57 |
58 | def query_subject(xnat, project, subject, qall=False):
59 | """
60 | Method to query a subject
61 |
62 | :param project: project ID on XNAT
63 | :param subject: subject label on XNAT
64 | :return: None
65 | """
66 | if not project:
67 | err = 'argument project not provided.'
68 | raise XnatToolsUserError(__exe__, err)
69 | if not subject:
70 | err = 'argument subject not provided.'
71 | raise XnatToolsUserError(__exe__, err)
72 | sessions = xnat.get_sessions(project, subject)
73 | if not sessions:
74 | err = 'no sessions found for subject %s.' % project
75 | raise XnatToolsUserError(__exe__, err)
76 | for session in sessions:
77 | print(' * Session: {}'.format(session['label']))
78 | if qall:
79 | query_session(xnat, project, subject, session['label'], qall)
80 |
81 |
82 | def query_session(xnat, project, subject, session, qall=False):
83 | """
84 | Method to query a session
85 |
86 | :param project: project ID on XNAT
87 | :param subject: subject label on XNAT
88 | :param session: session label on XNAT
89 | :return: None
90 | """
91 | if not project:
92 | err = 'argument project not provided.'
93 | raise XnatToolsUserError(__exe__, err)
94 | if not subject:
95 | err = 'argument subject not provided.'
96 | raise XnatToolsUserError(__exe__, err)
97 | if not session:
98 | err = 'argument session not provided.'
99 | raise XnatToolsUserError(__exe__, err)
100 | scans = xnat.get_scans(project, subject, session)
101 | if not scans:
102 | err = 'no scans found for session %s.' % project
103 | raise XnatToolsUserError(__exe__, err)
104 | print(' *** SCANS ***')
105 | for scan in scans:
106 | query_scan(xnat, project, subject, session, scan['ID'], scan['type'],
107 | qall)
108 |
109 | print(' *** PROCESSES ***')
110 | assrs = xnat.get_assessors(project, subject, session)
111 | for assessor in assrs:
112 | query_assessor(xnat, assessor['label'], qall)
113 |
114 |
115 | def query_scan(xnat, project, subject, session, scan, scantype, qall=False):
116 | """
117 | Method to query a scan
118 |
119 | :param project: project ID on XNAT
120 | :param subject: subject label on XNAT
121 | :param session: session label on XNAT
122 | :param scan: scan ID on XNAT
123 | :param scantype: scan type for display
124 | :return: None
125 | """
126 | print(' - %s -- %s' % (scan, scantype))
127 | if qall:
128 | for resource in xnat.get_scan_resources(project, subject,
129 | session, scan):
130 | print(' -> %s' % (resource['label']))
131 |
132 |
133 | def query_assessor(xnat, assessor_label, qall=False):
134 | """
135 | Method to query a assessor
136 |
137 | :param assessor_label: assessor label on XNAT
138 | :return: None
139 | """
140 | labels = assessor_label.split('-x-')
141 | print(' - %s' % (assessor_label))
142 | if qall:
143 | list_assrs = xnat.get_assessor_out_resources(
144 | labels[0], labels[1], labels[2], assessor_label)
145 | for out_resource in list_assrs:
146 | print(' -> %s' % (out_resource['label']))
147 |
148 |
149 | def run_xnat_query(args):
150 | """
151 | Main function for xnat query.
152 |
153 | :param args: arguments parse by argparse
154 | """
155 | if args.host:
156 | host = args.host
157 | else:
158 | host = os.environ['XNAT_HOST']
159 | user = args.username
160 |
161 | utils.print_separators()
162 |
163 | with XnatUtils.get_interface(host=host, user=user) as xnat:
164 | print('INFO: connection to xnat <%s>:' % host)
165 | if args.me:
166 | print('List of projects on XNAT you have access to:')
167 | print('---------------------------------------')
168 | for proj in xnat.get_projects():
169 | if xnat.get_subjects(proj['ID']):
170 | print('%*s : %*s' % (-20, proj['ID'], -30, proj['name']))
171 | print('---------------------------------------')
172 |
173 | # if all projects
174 | if args.assessor:
175 | utils.print_separators()
176 | labels = args.assessor.split('-x-')
177 | utils.display_item(labels[0], labels[1], labels[2])
178 | query_assessor(xnat, args.assessor, qall=True)
179 |
180 | elif args.project == 'all' or args.all:
181 | utils.print_separators()
182 | projects_list = xnat.get_projects()
183 | for project in projects_list:
184 | query_project(xnat, project['ID'], qall=True)
185 |
186 | #subject, session and scan
187 | else:
188 | if args.subject:
189 | utils.print_separators()
190 | if args.session:
191 | utils.display_item(args.project, args.subject,
192 | args.session)
193 | if args.scan:
194 | scan_obj = xnat.select_scan(
195 | args.project, args.subject, args.session,
196 | args.scan)
197 | scan_type = scan_obj.attrs.get('type')
198 | query_scan(args.project, args.subject, args.session,
199 | args.scan, scan_type, qall=True)
200 | else:
201 | query_session(xnat, args.project, args.subject, args.session,
202 | args.all)
203 | else:
204 | utils.display_item(args.project, args.subject)
205 | query_subject(xnat, args.project, args.subject, args.all)
206 | elif args.project:
207 | utils.print_separators()
208 | utils.display_item(args.project)
209 | query_project(xnat, args.project, args.all)
210 | elif not args.me:
211 | raise XnatToolsUserError(__exe__, 'No query selected.')
212 |
213 | utils.print_end(__exe__)
214 |
215 |
216 | def add_to_parser(parser):
217 | """
218 | Method to add arguments to default parser for xnat_tools in utils.
219 |
220 | :param parser: parser object
221 | :return: parser object with new arguments
222 | """
223 | _h = "project ID on Xnat or 'all' to see all the project."
224 | parser.add_argument("-p", "--project", dest="project", default=None,
225 | help=_h)
226 | _h = "Subject label on Xnat"
227 | parser.add_argument("-s", "--subject", dest="subject", default=None,
228 | help=_h)
229 | _h = "Session label on Xnat"
230 | parser.add_argument("-e", "--experiment", dest="session", default=None,
231 | help=_h)
232 | _h = "Assessor/Process label on XNAT. E.G: VUSTP-x-VUSTP1-x-VUSTP1a-x-FS"
233 | parser.add_argument("-a", "--assessor", dest="assessor", default=None,
234 | help=_h)
235 | parser.add_argument("-c", "--scan", dest="scan", default=None,
236 | help="Scan ID on Xnat.")
237 | _h = "Print all the objects on XNAT from the level you are at."
238 | parser.add_argument("--all", dest="all", action="store_true",
239 | help=_h)
240 | parser.add_argument("--me", dest="me", action="store_true",
241 | help="Give the projects ID that you have access.")
242 | return parser
243 |
244 |
245 | if __name__ == '__main__':
246 | utils.run_tool(__exe__, __description__, add_to_parser, __purpose__,
247 | run_xnat_query)
248 |
--------------------------------------------------------------------------------