├── .editorconfig ├── .flake8 ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .pre-commit-config.yaml ├── .travis.yml ├── AUTHORS.md ├── CONTRIBUTING.rst ├── Dockerfile-dmriprep ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── dmriprep-docker ├── dmriprep ├── __init__.py ├── cli.py ├── data.py ├── dmriprep.py ├── io.py ├── qc.py ├── run.py └── utils.py ├── dmriprepViewer ├── .babelrc ├── .editorconfig ├── .eslintignore ├── .eslintrc.js ├── .gitignore ├── .postcssrc.js ├── .snyk ├── README.md ├── build │ ├── build.js │ ├── check-versions.js │ ├── logo.png │ ├── utils.js │ ├── vue-loader.conf.js │ ├── webpack.base.conf.js │ ├── webpack.dev.conf.js │ └── webpack.prod.conf.js ├── config │ ├── dev.env.js │ ├── index.js │ ├── prod.env.js │ └── test.env.js ├── deploy.sh ├── index.html ├── package-lock.json ├── package.json ├── src │ ├── App.vue │ ├── assets │ │ └── logo.svg │ ├── components │ │ ├── BrainSprite.vue │ │ ├── Bucket.vue │ │ ├── GroupStats.vue │ │ ├── HelloWorld.vue │ │ ├── LineChart.vue │ │ ├── Report.vue │ │ ├── Sprite4D.vue │ │ └── brainsprite.js │ ├── main.js │ └── router │ │ └── index.js ├── static │ └── .gitkeep └── test │ ├── e2e │ ├── custom-assertions │ │ └── elementCount.js │ ├── nightwatch.conf.js │ ├── runner.js │ └── specs │ │ └── test.js │ └── unit │ ├── .eslintrc │ ├── jest.conf.js │ ├── setup.js │ └── specs │ └── HelloWorld.spec.js ├── docker ├── Dockerfile ├── environment.yml └── license.txt ├── docs ├── Makefile ├── authors.rst ├── conf.py ├── contributing.rst ├── history.rst ├── img │ └── dmriprep_icon.svg ├── index.rst ├── installation.rst ├── make.bat └── usage.rst ├── kubernetes ├── create_kube_job.py ├── delete_cluster.sh ├── delete_job.sh ├── docker │ ├── build_tag_push.sh │ ├── dmriprep_all.sh │ └── dockerfile-dmriprep-kube ├── run_dmriprep.yml.tmpl ├── run_job.sh └── setup_gcp_kubernetes_dmriprep.sh ├── requirements.txt ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── tests ├── test_dmriprep.py └── test_utils.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 80 3 | select = C,E,F,W,B,B950 4 | ignore = E501,N802,N806,W503,E203 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * dmriprep version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | # Mac OS nonsense: 105 | .DS_Store 106 | 107 | #kubernetes stuff 108 | kubernetes/jobs/ 109 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/python/black 3 | rev: stable 4 | hooks: 5 | - id: black 6 | language_version: python3.6 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: node_js 3 | node_js: 4 | - '6.0' 5 | cache: 6 | directories: 7 | - dmriprepViewer/node_modules 8 | branches: 9 | only: 10 | - master 11 | notifications: 12 | email: false 13 | before_script: 14 | - cd dmriprepViewer 15 | - npm prune 16 | script: 17 | - npm install 18 | - npm run build 19 | after_success: 20 | - bash ./deploy.sh 21 | env: 22 | global: 23 | - GH_USER: nipy 24 | - GH_REPO: dmriprep 25 | - secure: raeexu68fu/Hu4Ey0VkP3ljvAJut2X8xRAkTXO9nL6HlWQGt1pjYhXSxaLLZVQpgpDcDeOziUUCb4y7UALFHTkIrYrzp/Jq3Cn10tmsItEMAhH0cfzTuMr0Huhl/Zeiw67Qn16W7xpYEtV7zHBOYicQIFvzAOhNWDy/A/9c7XkKsEU47FqN/vxodCv+WMHQyvuM3ceaofccF+mVekAg7PztPrukzpwP+uishNfSeoHI3WeUDjX82bQZ3KE/oRHrB+YCB5rFg0wfekfsUnkyhS9HWF9CECM5mKWcPle0AZpjQXODrUIXk8dRSAprAzD1XGxRzmYG3nYPGx9a6gm6ofueHbLPery/0cLVYNX4IHBR2UpW6aA9VsgMBgG8LAdGiMqvvio2CZ+/tGdmIXx5twRCTGaJ4qJUdyyL881ChaUl8rBde7R8EtCBThKeV8Tnnun2PfLPt12qAc+EAc1r6a9zf/Mb8ahjTrZSU1heUn5irs44KvJk+QuftW9hF6hIvBfug7SHBIAdvyCd8TjMNShJNVPqQZU6NWSwTAGAj4Y/QsocvxYRoKHIhpyHbMZ+Lj+P27VfNYdkTe/viQcLEg8x4YYHCaKiYl4iwepEtyevtbtzUtb1Ba5clotgisw/PFao9q6GrWuy57eb/ZM8aKSHfb4JhN0swmi1t0uBOzP0= 26 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Credits 2 | 3 | ## Development Leads 4 | 5 | - Anisha Keshavan [Corresponding developer] 6 | - Adam Richie-Halford 7 | 8 | ## Contributors 9 | 10 | - Ariel Rokem 11 | - Add your name here 12 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | Installing a development version of dmriprep 11 | -------------------------------------------- 12 | 13 | First, you can install a development version of dmriprep by cloning this repository 14 | and then typing:: 15 | 16 | $ pip install -e .[dev] 17 | 18 | Activate the pre-commit formatting hook by typing:: 19 | 20 | $ pre-commit install 21 | 22 | Before committing your work, you can check for formatting issues or error by typing:: 23 | 24 | $ make lint 25 | $ make test 26 | 27 | Types of Contributions 28 | ---------------------- 29 | 30 | You can contribute in many ways: 31 | 32 | Report Bugs 33 | ~~~~~~~~~~~ 34 | 35 | Report bugs at https://github.com/nipy/dmriprep/issues. 36 | 37 | If you are reporting a bug, please include: 38 | 39 | * Your operating system name and version. 40 | * Any details about your local setup that might be helpful in troubleshooting. 41 | * Detailed steps to reproduce the bug. 42 | 43 | Fix Bugs 44 | ~~~~~~~~ 45 | 46 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 47 | wanted" is open to whoever wants to implement it. 48 | 49 | Implement Features 50 | ~~~~~~~~~~~~~~~~~~ 51 | 52 | Look through the GitHub issues for features. Anything tagged with "enhancement" 53 | and "help wanted" is open to whoever wants to implement it. 54 | 55 | Write Documentation 56 | ~~~~~~~~~~~~~~~~~~~ 57 | 58 | dmriprep could always use more documentation, whether as part of the 59 | official dmriprep docs, in docstrings, or even on the web in blog posts, 60 | articles, and such. 61 | 62 | Submit Feedback 63 | ~~~~~~~~~~~~~~~ 64 | 65 | The best way to send feedback is to file an issue at https://github.com/nipy/dmriprep/issues. 66 | 67 | If you are proposing a feature: 68 | 69 | * Explain in detail how it would work. 70 | * Keep the scope as narrow as possible, to make it easier to implement. 71 | * Remember that this is a volunteer-driven project, and that contributions 72 | are welcome :) 73 | 74 | Get Started! 75 | ------------ 76 | 77 | Ready to contribute? Here's how to set up `dmriprep` for local development. 78 | 79 | 1. Fork the `dmriprep` repo on GitHub. 80 | 2. Clone your fork locally:: 81 | 82 | $ git clone git@github.com:your_name_here/dmriprep.git 83 | 84 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 85 | 86 | $ mkvirtualenv dmriprep 87 | $ cd dmriprep/ 88 | $ python setup.py develop 89 | 90 | 4. Create a branch for local development:: 91 | 92 | $ git checkout -b name-of-your-bugfix-or-feature 93 | 94 | Now you can make your changes locally. 95 | 96 | 5. When you're done making changes, check that your changes pass flake8 and the 97 | tests, including testing other Python versions with tox:: 98 | 99 | $ flake8 dmriprep tests 100 | $ python setup.py test or py.test 101 | $ tox 102 | 103 | To get flake8 and tox, just pip install them into your virtualenv. 104 | 105 | 6. Commit your changes and push your branch to GitHub:: 106 | 107 | $ git add . 108 | $ git commit -m "Your detailed description of your changes." 109 | $ git push origin name-of-your-bugfix-or-feature 110 | 111 | 7. Submit a pull request through the GitHub website. 112 | 113 | Pull Request Guidelines 114 | ----------------------- 115 | 116 | Before you submit a pull request, check that it meets these guidelines: 117 | 118 | 1. The pull request should include tests. 119 | 2. If the pull request adds functionality, the docs should be updated. Put 120 | your new functionality into a function with a docstring, and add the 121 | feature to the list in README.rst. 122 | 3. The pull request should work for Python 2.7, 3.4, 3.5 and 3.6, and for PyPy. Check 123 | https://travis-ci.org/nipy/dmriprep/pull_requests 124 | and make sure that the tests pass for all supported Python versions. 125 | 126 | Tips 127 | ---- 128 | 129 | To run a subset of tests:: 130 | 131 | $ py.test tests.test_dmriprep 132 | 133 | 134 | Deploying 135 | --------- 136 | 137 | A reminder for the maintainers on how to deploy. 138 | Make sure all your changes are committed (including an entry in HISTORY.rst). 139 | Then run:: 140 | 141 | $ bumpversion patch # possible: major / minor / patch 142 | $ git push 143 | $ git push --tags 144 | 145 | Travis will then deploy to PyPI if tests pass. 146 | -------------------------------------------------------------------------------- /Dockerfile-dmriprep: -------------------------------------------------------------------------------- 1 | FROM dmriprep:dev 2 | 3 | ADD . /dmriprep 4 | WORKDIR /dmriprep 5 | RUN /neurodocker/startup.sh python setup.py install 6 | WORKDIR / 7 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 0.1.0 (2018-09-06) 6 | ------------------ 7 | 8 | * First release on GitHub. 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | 3 | BSD License 4 | 5 | Copyright (c) 2018, Anisha Keshavan and Adam Richie-Halford 6 | All rights reserved. 7 | 8 | Redistribution and use in source and binary forms, with or without modification, 9 | are permitted provided that the following conditions are met: 10 | 11 | * Redistributions of source code must retain the above copyright notice, this 12 | list of conditions and the following disclaimer. 13 | 14 | * Redistributions in binary form must reproduce the above copyright notice, this 15 | list of conditions and the following disclaimer in the documentation and/or 16 | other materials provided with the distribution. 17 | 18 | * Neither the name of the copyright holder nor the names of its 19 | contributors may be used to endorse or promote products derived from this 20 | software without specific prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 23 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 24 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 25 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 26 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 27 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 28 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 29 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 30 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 31 | OF THE POSSIBILITY OF SUCH DAMAGE. 32 | 33 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | try: 8 | from urllib import pathname2url 9 | except: 10 | from urllib.request import pathname2url 11 | 12 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 13 | endef 14 | export BROWSER_PYSCRIPT 15 | 16 | define PRINT_HELP_PYSCRIPT 17 | import re, sys 18 | 19 | for line in sys.stdin: 20 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 21 | if match: 22 | target, help = match.groups() 23 | print("%-20s %s" % (target, help)) 24 | endef 25 | export PRINT_HELP_PYSCRIPT 26 | 27 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 28 | 29 | help: 30 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 31 | 32 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 33 | 34 | clean-build: ## remove build artifacts 35 | rm -fr build/ 36 | rm -fr dist/ 37 | rm -fr .eggs/ 38 | find . -name '*.egg-info' -exec rm -fr {} + 39 | find . -name '*.egg' -exec rm -f {} + 40 | 41 | clean-pyc: ## remove Python file artifacts 42 | find . -name '*.pyc' -exec rm -f {} + 43 | find . -name '*.pyo' -exec rm -f {} + 44 | find . -name '*~' -exec rm -f {} + 45 | find . -name '__pycache__' -exec rm -fr {} + 46 | 47 | clean-test: ## remove test and coverage artifacts 48 | rm -fr .tox/ 49 | rm -f .coverage 50 | rm -fr htmlcov/ 51 | rm -fr .pytest_cache 52 | 53 | lint: ## check style with flake8 54 | flake8 dmriprep tests 55 | 56 | test: ## run tests quickly with the default Python 57 | py.test 58 | 59 | test-all: ## run tests on every Python version with tox 60 | tox 61 | 62 | coverage: ## check code coverage quickly with the default Python 63 | coverage run --source dmriprep -m pytest 64 | coverage report -m 65 | coverage html 66 | $(BROWSER) htmlcov/index.html 67 | 68 | docs: ## generate Sphinx HTML documentation, including API docs 69 | rm -f docs/dmriprep.rst 70 | rm -f docs/modules.rst 71 | sphinx-apidoc -o docs/ dmriprep 72 | $(MAKE) -C docs clean 73 | $(MAKE) -C docs html 74 | $(BROWSER) docs/_build/html/index.html 75 | 76 | servedocs: docs ## compile the docs watching for changes 77 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 78 | 79 | release: dist ## package and upload a release 80 | twine upload dist/* 81 | 82 | dist: clean ## builds source and wheel package 83 | python setup.py sdist 84 | python setup.py bdist_wheel 85 | ls -l dist 86 | 87 | install: clean ## install the package to the active Python's site-packages 88 | python setup.py install 89 | 90 | docker-dev: ## build the development environment 91 | docker build -t dmriprep:dev -f docker/Dockerfile docker/. 92 | 93 | docker: docker-dev 94 | docker build -t dmriprep:prod -f Dockerfile-dmriprep . 95 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DEVELOPMENT OF dmriprep HAS MOVED TO [https://github.com/nipreps/dmriprep](https://github.com/nipreps/dmriprep). 2 | 3 | PLEASE REFER ISSUES AND PULL REQUESTS TO THAT REPO. 4 | -------------------------------------------------------------------------------- /dmriprep-docker: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$1" ]; then 4 | echo "usage: dmriprep-docker /input_dir /output_dir" 5 | exit 6 | fi 7 | 8 | if [ -z "$2" ]; then 9 | echo "usage: dmriprep-docker /input_dir /output_dir" 10 | exit 11 | fi 12 | 13 | if [ -z "$3" ]; then 14 | docker run --rm -ti -v "$1":/inputs -v "$2":/outputs dmriprep:prod dmriprep /inputs /outputs 15 | else 16 | inputDir=$1 17 | outputDir=$2 18 | 19 | shift 2 20 | 21 | docker run --rm -ti -v "$inputDir":/inputs -v "$outputDir":/outputs dmriprep:prod dmriprep /inputs /outputs --participant-label "$@" 22 | fi 23 | -------------------------------------------------------------------------------- /dmriprep/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Top-level package for dmriprep.""" 4 | 5 | __author__ = """Anisha Keshavan""" 6 | __email__ = 'anishakeshavan@gmail.com' 7 | __version__ = '0.1.0' 8 | 9 | import errno 10 | import logging 11 | import os 12 | import warnings 13 | 14 | # Filter warnings that are visible whenever you import another package that 15 | # was compiled against an older numpy than is installed. 16 | warnings.filterwarnings("ignore", message="numpy.dtype size changed") 17 | warnings.filterwarnings("ignore", message="numpy.ufunc size changed") 18 | 19 | from . import data 20 | from . import io 21 | from . import qc 22 | from . import run 23 | 24 | module_logger = logging.getLogger(__name__) 25 | 26 | # get the log level from environment variable 27 | if "DMIRPREP_LOGLEVEL" in os.environ: 28 | loglevel = os.environ['DMRIPREP_LOGLEVEL'] 29 | module_logger.setLevel(getattr(logging, loglevel.upper())) 30 | else: 31 | module_logger.setLevel(logging.WARNING) 32 | 33 | # create a file handler 34 | logpath = os.path.join(os.path.expanduser('~'), '.dmriprep', 'dmriprep.log') 35 | 36 | # Create the config directory if it doesn't exist 37 | logdir = os.path.dirname(logpath) 38 | try: 39 | os.makedirs(logdir) 40 | except OSError as e: 41 | pre_existing = (e.errno == errno.EEXIST and os.path.isdir(logdir)) 42 | if pre_existing: 43 | pass 44 | else: 45 | raise e 46 | 47 | handler = logging.FileHandler(logpath, mode='w') 48 | handler.setLevel(logging.DEBUG) 49 | 50 | # create a logging format 51 | formatter = logging.Formatter( 52 | '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 53 | ) 54 | handler.setFormatter(formatter) 55 | 56 | # add the handlers to the logger 57 | module_logger.addHandler(handler) 58 | module_logger.info('Started new dmriprep session') 59 | -------------------------------------------------------------------------------- /dmriprep/cli.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Console script for dmriprep.""" 4 | import os 5 | import sys 6 | import warnings 7 | 8 | import click 9 | 10 | from . import io 11 | from . import run 12 | from .data import get_dataset 13 | 14 | # Filter warnings that are visible whenever you import another package that 15 | # was compiled against an older numpy than is installed. 16 | warnings.filterwarnings("ignore", message="numpy.dtype size changed") 17 | warnings.filterwarnings("ignore", message="numpy.ufunc size changed") 18 | 19 | 20 | @click.command() 21 | @click.option('--participant-label', 22 | help="The label(s) of the participant(s) that should be" 23 | "analyzed. The label corresponds to" 24 | "sub- from the BIDS spec (so it does" 25 | "not include 'sub-'). If this parameter is not provided" 26 | "all subjects will be analyzed. Multiple participants" 27 | "can be specified with a space separated list.", 28 | default=None) 29 | @click.option('--eddy-niter', 30 | help="Fixed number of eddy iterations. See " 31 | "https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/eddy/UsersGuide" 32 | "#A--niter", 33 | default=5, type=(int)) 34 | @click.option('--slice-outlier-threshold', 35 | help="Number of allowed outlier slices per volume. " 36 | "If this is exceeded the volume is dropped from analysis. " 37 | "If an int is provided, it is treated as number of allowed " 38 | "outlier slices. If a float between 0 and 1 " 39 | "(exclusive) is provided, it is treated the fraction of " 40 | "allowed outlier slices.", 41 | default=0.02) 42 | @click.argument('bids_dir', 43 | ) 44 | @click.argument('output_dir', 45 | ) 46 | @click.argument('analysis_level', 47 | type=click.Choice(['participant', 'group']), 48 | default='participant') 49 | def main(participant_label, bids_dir, output_dir, 50 | eddy_niter=5, slice_outlier_threshold=0.02, 51 | analysis_level="participant"): 52 | """ 53 | BIDS_DIR: The directory with the input dataset formatted according to 54 | the BIDS standard. 55 | 56 | OUTPUT_DIR: The directory where the output files should be stored. 57 | If you are running a group level analysis, this folder 58 | should be prepopulated with the results of 59 | the participant level analysis. 60 | 61 | ANALYSIS_LEVEL: Level of the analysis that will be performed. Multiple 62 | participant level analyses can be run independently 63 | (in parallel). 64 | """ 65 | if analysis_level is not 'participant': 66 | raise NotImplementedError('The only valid analysis level for dmriprep ' 67 | 'is participant at the moment.') 68 | 69 | inputs = io.get_bids_files(participant_label, bids_dir) 70 | 71 | for subject_inputs in inputs: 72 | run.run_dmriprep_pe(**subject_inputs, 73 | working_dir=os.path.join(output_dir, 'scratch'), 74 | out_dir=output_dir, 75 | eddy_niter=eddy_niter, 76 | slice_outlier_threshold=slice_outlier_threshold) 77 | 78 | return 0 79 | 80 | @click.command() 81 | @click.argument('output_dir', 82 | ) 83 | @click.option('--subject', help="subject id to download (will choose 1 subject if not specified", 84 | default="sub-NDARBA507GCT") 85 | @click.option('--study', help="which study to download. Right now we only support the HBN dataset", 86 | default="HBN") 87 | def data(output_dir, study="HBN", subject="sub-NDARBA507GCT"): 88 | """ 89 | Download dwi raw data in BIDS format from public datasets 90 | 91 | :param output_dir: A directory to write files to 92 | :param study: A study name, right now we only support 'HBN' 93 | :param subject: A subject from the study, starting with 'sub-' 94 | :return: None 95 | """ 96 | if not os.path.exists(output_dir): 97 | os.makedirs(output_dir) 98 | 99 | if study.upper() != 'HBN': 100 | raise NotImplementedError('We only support data downloads from the HBN dataset right now.') 101 | 102 | get_dataset(os.path.abspath(output_dir), source=study.upper(), subject_id=subject) 103 | print('done') 104 | 105 | 106 | @click.command() 107 | @click.argument('output_dir') 108 | @click.argument('bucket') 109 | @click.option('--access_key', help="your AWS access key") 110 | @click.option('--secret_key', help="your AWS access secret") 111 | @click.option('--provider', default='s3', help="Cloud storage provider. Only S3 is supported right now.") 112 | @click.option('--subject', default=None, help="Subject id to upload (optional)") 113 | def upload(output_dir, bucket, access_key, secret_key, provider='s3', subject=None): 114 | """ 115 | OUTPUT_DIR: The directory where the output files were stored. 116 | 117 | BUCKET: The cloud bucket name to upload data to. 118 | """ 119 | import boto3 120 | from dask import compute, delayed 121 | from glob import glob 122 | from tqdm.auto import tqdm 123 | 124 | output_dir = os.path.abspath(output_dir) 125 | if not output_dir.endswith('/'): 126 | output_dir += '/' 127 | 128 | if provider == 's3' or provider == 'S3': 129 | client = boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key) 130 | 131 | if subject is not None: 132 | assert os.path.exists(os.path.join(output_dir, subject)), 'this subject id does not exist!' 133 | subjects = [subject] 134 | else: 135 | subjects = [os.path.split(s)[1] for s in glob(os.path.join(output_dir, 'sub-*'))] 136 | 137 | def upload_subject(sub, sub_idx): 138 | base_dir = os.path.join(output_dir, sub, 'dmriprep') 139 | for root, dirs, files in os.walk(base_dir): 140 | if len(files): 141 | for f in tqdm(files, desc=f"Uploading {sub} {root.split('/')[-1]}", position=sub_idx): 142 | filepath = os.path.join(root, f) 143 | key = root.replace(output_dir, '') 144 | client.upload_file(filepath, bucket, os.path.join(key, f)) 145 | 146 | uploads = [delayed(upload_subject)(s, idx) for idx, s in enumerate(subjects)] 147 | _ = list(compute(*uploads, scheduler="threads")) 148 | else: 149 | raise NotImplementedError('Only S3 is the only supported provider for data uploads at the moment') 150 | 151 | 152 | if __name__ == "__main__": 153 | sys.exit(main()) # pragma: no cover 154 | -------------------------------------------------------------------------------- /dmriprep/dmriprep.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Main module.""" 4 | 5 | import logging 6 | import os 7 | import os.path as op 8 | import subprocess 9 | 10 | from .run import run_dmriprep 11 | 12 | 13 | mod_logger = logging.getLogger(__name__) 14 | -------------------------------------------------------------------------------- /dmriprep/io.py: -------------------------------------------------------------------------------- 1 | """ 2 | BIDS-functions to return inputs for the run.py functions. 3 | 4 | """ 5 | import os 6 | import os.path as op 7 | from glob import glob 8 | 9 | import bids 10 | 11 | 12 | def get_bids_subject_input_files(subject_id, bids_input_directory): 13 | """ 14 | Function to return the needed files for dmriprep based on subject id and a bids directory. 15 | 16 | :param subject_id: string 17 | :param bids_input_directory: string to bids dir 18 | :return: dict of inputs 19 | """ 20 | layout = bids.layout.BIDSLayout(bids_input_directory, validate=False) 21 | subjects = layout.get_subjects() 22 | assert subject_id in subjects, "subject {} is not in the bids folder".format(subject_id) 23 | 24 | ap_file = layout.get(subject=subject_id, 25 | datatype='fmap', 26 | suffix='epi', 27 | dir='AP', 28 | extensions=['.nii', '.nii.gz']) 29 | assert len(ap_file) == 1, 'found {} ap fieldmap files and we need just 1'.format(len(ap_file)) 30 | 31 | pa_file = layout.get(subject=subject_id, 32 | datatype='fmap', 33 | suffix='epi', 34 | dir='PA', 35 | extensions=['.nii', '.nii.gz']) 36 | assert len(pa_file) == 1, 'found {} pa fieldmap files and we need just 1'.format(len(pa_file)) 37 | 38 | dwi_files = layout.get(subject=subject_id, datatype='dwi', suffix='dwi') 39 | valid_dwi_files = [] 40 | 41 | for d in dwi_files: 42 | if d.path.startswith(op.abspath(op.join(bids_input_directory, 'sub-' + subject_id))): 43 | valid_dwi_files.append(d.path) 44 | 45 | dwi_file = [d for d in valid_dwi_files if d.endswith('.nii.gz') and not "TRACE" in d] 46 | assert len(dwi_file) == 1, 'found {} dwi files and we need just 1'.format(len(dwi_file)) 47 | 48 | bval_file = [d.path for d in dwi_files if d.filename.endswith('.bval')] 49 | assert len(bval_file) == 1, 'found {} bval files and we need just 1'.format(len(bval_file)) 50 | 51 | bvec_file = [d.path for d in dwi_files if d.filename.endswith('.bvec')] 52 | assert len(bvec_file) == 1, 'found {} bvec files and we need just 1'.format(len(bvec_file)) 53 | 54 | subjects_dir = op.join(bids_input_directory, 'derivatives', 'sub-'+subject_id) 55 | 56 | if not op.exists(op.join(subjects_dir, 'freesurfer')): 57 | raise NotImplementedError('we have not yet implemented a version of dmriprep that runs freesurfer for you.' 58 | 'please run freesurfer and try again' 59 | ) 60 | 61 | outputs = dict(subject_id="sub-"+subject_id, 62 | dwi_file=dwi_file[0], 63 | dwi_file_AP=ap_file[0].path, 64 | dwi_file_PA=pa_file[0].path, 65 | bvec_file=bvec_file[0], 66 | bval_file=bval_file[0], 67 | subjects_dir=op.abspath(subjects_dir)) 68 | return outputs 69 | 70 | 71 | def get_bids_files(subject_id, bids_input_directory): 72 | """ 73 | subject to get all bids files for am optional subject id and bids dir. if subject id is blank then all subjects 74 | are used. 75 | :param subject_id: 76 | :param bids_input_directory: 77 | :return: 78 | """ 79 | if not subject_id: 80 | subjects = [s.split("/")[-1].replace("sub-", "") for s in glob(os.path.join(bids_input_directory, "sub-*"))] 81 | assert len(subjects), "No subject files found in bids directory" 82 | return [get_bids_subject_input_files(sub, bids_input_directory) for sub in subjects] 83 | else: 84 | return [get_bids_subject_input_files(subject_id, bids_input_directory)] 85 | -------------------------------------------------------------------------------- /dmriprep/qc.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os.path as op 3 | from io import BytesIO 4 | 5 | import matplotlib 6 | matplotlib.use('agg') 7 | import matplotlib.pyplot as plt 8 | import nibabel as nib 9 | import numpy as np 10 | 11 | from dipy.segment.mask import median_otsu 12 | from nipype.utils.filemanip import save_json, load_json 13 | 14 | 15 | def reorient_array(data, aff): 16 | # rearrange the matrix to RAS orientation 17 | orientation = nib.orientations.io_orientation(aff) 18 | data_RAS = nib.orientations.apply_orientation(data, orientation) 19 | # In RAS 20 | return nib.orientations.apply_orientation( 21 | data_RAS, 22 | nib.orientations.axcodes2ornt("IPL") 23 | ) 24 | 25 | 26 | def mplfig(data, outfile=None, as_bytes=False): 27 | fig = plt.figure(frameon=False, dpi=data.shape[0]) 28 | fig.set_size_inches(float(data.shape[1])/data.shape[0], 1) 29 | ax = plt.Axes(fig, [0., 0., 1., 1.]) 30 | ax.set_axis_off() 31 | fig.add_axes(ax) 32 | ax.imshow(data, aspect=1, cmap=plt.cm.Greys_r) # previous aspect="normal" 33 | if outfile: 34 | fig.savefig(outfile, dpi=data.shape[0], transparent=True) 35 | plt.close() 36 | return outfile 37 | if as_bytes: 38 | IObytes = BytesIO() 39 | plt.savefig(IObytes, format='png', dpi=data.shape[0], transparent=True) 40 | IObytes.seek(0) 41 | base64_jpgData = base64.b64encode(IObytes.read()) 42 | return base64_jpgData.decode("ascii") 43 | 44 | 45 | def mplfigcontour(data, outfile=None, as_bytes=False): 46 | fig = plt.figure(frameon=False) 47 | fig.set_size_inches(float(data.shape[1])/data.shape[0], 1) 48 | ax = plt.Axes(fig, [0., 0., 1., 1.]) 49 | ax.set_axis_off() 50 | fig.add_axes(ax) 51 | 52 | bg = np.zeros(data.shape) 53 | bg[:] = np.nan 54 | ax.imshow(bg, aspect=1, cmap=plt.cm.Greys_r) # used to be aspect="normal" 55 | ax.contour(data, colors="red", linewidths=0.1) 56 | if outfile: 57 | fig.savefig(outfile, dpi=data.shape[0], transparent=True) 58 | plt.close() 59 | return outfile 60 | if as_bytes: 61 | IObytes = BytesIO() 62 | plt.savefig(IObytes, format='png', dpi=data.shape[0], transparent=True) 63 | IObytes.seek(0) 64 | base64_jpgData = base64.b64encode(IObytes.read()) 65 | return base64_jpgData.decode("ascii") 66 | 67 | 68 | def load_and_reorient(filename): 69 | img = nib.load(filename) 70 | data, aff = img.get_data(), img.affine 71 | data = reorient_array(data, aff) 72 | return data 73 | 74 | 75 | def reshape3D(data, n=256): 76 | return np.pad(data, ( 77 | ( 78 | (n-data.shape[0]) // 2, 79 | ((n-data.shape[0]) + (data.shape[0] % 2 > 0)) // 2 80 | ), 81 | ( 82 | (n-data.shape[1]) // 2, 83 | ((n-data.shape[1]) + (data.shape[1] % 2 > 0)) // 2 84 | ), 85 | (0, 0) 86 | ), "constant", constant_values=(0, 0)) 87 | 88 | 89 | def reshape4D(data, n=256): 90 | return np.pad(data, ( 91 | ( 92 | (n-data.shape[0]) // 2, 93 | ((n-data.shape[0]) + (data.shape[0] % 2 > 0)) // 2 94 | ), 95 | ( 96 | (n-data.shape[1]) // 2, 97 | ((n-data.shape[1]) + (data.shape[1] % 2 > 0)) // 2 98 | ), 99 | (0, 0), (0, 0) 100 | ), "constant", constant_values=(0, 0)) 101 | 102 | 103 | def get_middle_slices(data, slice_direction): 104 | slicer = {"ax": 0, "cor": 1, "sag": 2} 105 | all_data_slicer = [slice(None), slice(None), slice(None)] 106 | num_slices = data.shape[slicer[slice_direction]] 107 | slice_num = int(num_slices / 2) 108 | all_data_slicer[slicer[slice_direction]] = slice_num 109 | tile = data[tuple(all_data_slicer)] 110 | 111 | # make it a square 112 | N = max(tile.shape[:2]) 113 | tile = reshape3D(tile, N) 114 | 115 | return tile 116 | 117 | 118 | def nearest_square(limit): 119 | answer = 0 120 | while (answer+1)**2 < limit: 121 | answer += 1 122 | if (answer ** 2) == limit: 123 | return answer 124 | else: 125 | return answer + 1 126 | 127 | 128 | def create_sprite_from_tiles(tile, out_file=None, as_bytes=False): 129 | num_slices = tile.shape[-1] 130 | N = nearest_square(num_slices) 131 | M = int(np.ceil(num_slices/N)) 132 | # tile is square, so just make a big arr 133 | pix = tile.shape[0] 134 | 135 | if len(tile.shape) == 3: 136 | mosaic = np.zeros((N*tile.shape[0], M*tile.shape[0])) 137 | else: 138 | mosaic = np.zeros((N*tile.shape[0], M*tile.shape[0], tile.shape[-2])) 139 | 140 | mosaic[:] = np.nan 141 | helper = np.arange(N*M).reshape((N, M)) 142 | 143 | for t in range(num_slices): 144 | x, y = np.nonzero(helper == t) 145 | xmin = x[0] * pix 146 | xmax = (x[0] + 1) * pix 147 | ymin = y[0] * pix 148 | ymax = (y[0] + 1) * pix 149 | 150 | if len(tile.shape) == 3: 151 | mosaic[xmin:xmax, ymin:ymax] = tile[:, :, t] 152 | else: 153 | mosaic[xmin:xmax, ymin:ymax, :] = tile[:, :, :, t] 154 | 155 | if as_bytes: 156 | img = mplfig(mosaic, out_file, as_bytes=as_bytes) 157 | return dict(img=img, N=N, M=M, pix=pix, num_slices=num_slices) 158 | 159 | if out_file: 160 | img = mplfig(mosaic, out_file), N, M, pix, num_slices 161 | 162 | return dict(mosaic=mosaic, N=N, M=M, pix=pix, num_slices=num_slices) 163 | 164 | 165 | def createSprite4D(dwi_file): 166 | 167 | # initialize output dict 168 | output = [] 169 | 170 | # load the file 171 | dwi = load_and_reorient(dwi_file)[:, :, :, 1:] 172 | 173 | # create tiles from center slice on each orientation 174 | for orient in ['sag', 'ax', 'cor']: 175 | tile = get_middle_slices(dwi, orient) 176 | 177 | # create sprite images for each 178 | results = create_sprite_from_tiles(tile, as_bytes=True) 179 | results['img_type'] = '4dsprite' 180 | results['orientation'] = orient 181 | output.append(results) 182 | 183 | return output 184 | 185 | 186 | def createB0_ColorFA_Mask_Sprites(b0_file, colorFA_file, mask_file): 187 | colorfa = load_and_reorient(colorFA_file) 188 | b0 = load_and_reorient(b0_file)[:, :, :, 0] 189 | anat_mask = load_and_reorient(mask_file) 190 | 191 | N = max(*b0.shape[:2]) 192 | 193 | # make a b0 sprite 194 | b0 = reshape3D(b0, N) 195 | _, mask = median_otsu(b0) 196 | outb0 = create_sprite_from_tiles(b0, as_bytes=True) 197 | outb0['img_type'] = 'brainsprite' 198 | 199 | # make a colorFA sprite, masked by b0 200 | Q = reshape4D(colorfa, N) 201 | Q[np.logical_not(mask)] = np.nan 202 | Q = np.moveaxis(Q, -2, -1) 203 | outcolorFA = create_sprite_from_tiles(Q, as_bytes=True) 204 | outcolorFA['img_type'] = 'brainsprite' 205 | 206 | # make an anat mask contour sprite 207 | outmask = create_sprite_from_tiles(reshape3D(anat_mask, N)) 208 | img = mplfigcontour(outmask.pop("mosaic"), as_bytes=True) 209 | outmask['img'] = img 210 | 211 | return outb0, outcolorFA, outmask 212 | 213 | 214 | def create_report_json(dwi_corrected_file, eddy_rms, eddy_report, 215 | color_fa_file, anat_mask_file, 216 | outlier_indices, 217 | eddy_qc_file, 218 | outpath=op.abspath('./report.json')): 219 | 220 | report = {} 221 | report['dwi_corrected'] = createSprite4D(dwi_corrected_file) 222 | 223 | b0, colorFA, mask = createB0_ColorFA_Mask_Sprites(dwi_corrected_file, 224 | color_fa_file, 225 | anat_mask_file) 226 | report['b0'] = b0 227 | report['colorFA'] = colorFA 228 | report['anat_mask'] = mask 229 | report['outlier_volumes'] = outlier_indices.tolist() 230 | 231 | with open(eddy_report, 'r') as f: 232 | report['eddy_report'] = f.readlines() 233 | 234 | report['eddy_params'] = np.genfromtxt(eddy_rms).tolist() 235 | eddy_qc = load_json(eddy_qc_file) 236 | report['eddy_quad'] = eddy_qc 237 | save_json(outpath, report) 238 | return outpath 239 | -------------------------------------------------------------------------------- /dmriprep/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility functions for other submodules 3 | 4 | """ 5 | import itertools 6 | 7 | import numpy as np 8 | 9 | 10 | mod_logger = logging.getLogger(__name__) 11 | 12 | 13 | def is_hemispherical(vecs): 14 | """Test whether all points on a unit sphere lie in the same hemisphere. 15 | 16 | Parameters 17 | ---------- 18 | vecs : numpy.ndarray 19 | 2D numpy array with shape (N, 3) where N is the number of points. 20 | All points must lie on the unit sphere. 21 | 22 | Returns 23 | ------- 24 | is_hemi : bool 25 | If True, one can find a hemisphere that contains all the points. 26 | If False, then the points do not lie in any hemisphere 27 | 28 | pole : numpy.ndarray 29 | If `is_hemi == True`, then pole is the "central" pole of the 30 | input vectors. Otherwise, pole is the zero vector. 31 | 32 | References 33 | ---------- 34 | https://rstudio-pubs-static.s3.amazonaws.com/27121_a22e51b47c544980bad594d5e0bb2d04.html # noqa 35 | """ 36 | if vecs.shape[1] != 3: 37 | raise ValueError("Input vectors must be 3D vectors") 38 | if not np.allclose(1, np.linalg.norm(vecs, axis=1)): 39 | raise ValueError("Input vectors must be unit vectors") 40 | 41 | # Generate all pairwise cross products 42 | v0, v1 = zip(*[p for p in itertools.permutations(vecs, 2)]) 43 | cross_prods = np.cross(v0, v1) 44 | 45 | # Normalize them 46 | cross_prods /= np.linalg.norm(cross_prods, axis=1)[:, np.newaxis] 47 | 48 | # `cross_prods` now contains all candidate vertex points for "the polygon" 49 | # in the reference. "The polygon" is a subset. Find which points belong to 50 | # the polygon using a dot product test with each of the original vectors 51 | angles = np.arccos(np.dot(cross_prods, vecs.transpose())) 52 | 53 | # And test whether it is orthogonal or less 54 | dot_prod_test = angles <= np.pi / 2.0 55 | 56 | # If there is at least one point that is orthogonal or less to each 57 | # input vector, then the points lie on some hemisphere 58 | is_hemi = len(vecs) in np.sum(dot_prod_test.astype(int), axis=1) 59 | 60 | if is_hemi: 61 | vertices = cross_prods[np.sum(dot_prod_test.astype(int), axis=1) == len(vecs)] 62 | pole = np.mean(vertices, axis=0) 63 | pole /= np.linalg.norm(pole) 64 | else: 65 | pole = np.array([0.0, 0.0, 0.0]) 66 | return is_hemi, pole 67 | -------------------------------------------------------------------------------- /dmriprepViewer/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | ["env", { 4 | "modules": false, 5 | "targets": { 6 | "browsers": ["> 1%", "last 2 versions", "not ie <= 8"] 7 | } 8 | }], 9 | "stage-2" 10 | ], 11 | "plugins": ["transform-vue-jsx", "transform-runtime"], 12 | "env": { 13 | "test": { 14 | "presets": ["env", "stage-2"], 15 | "plugins": ["transform-vue-jsx", "transform-es2015-modules-commonjs", "dynamic-import-node"] 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /dmriprepViewer/.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | -------------------------------------------------------------------------------- /dmriprepViewer/.eslintignore: -------------------------------------------------------------------------------- 1 | /build/ 2 | /config/ 3 | /dist/ 4 | /*.js 5 | /test/unit/coverage/ 6 | -------------------------------------------------------------------------------- /dmriprepViewer/.eslintrc.js: -------------------------------------------------------------------------------- 1 | // https://eslint.org/docs/user-guide/configuring 2 | 3 | module.exports = { 4 | root: true, 5 | parserOptions: { 6 | parser: 'babel-eslint' 7 | }, 8 | env: { 9 | browser: true, 10 | }, 11 | // https://github.com/vuejs/eslint-plugin-vue#priority-a-essential-error-prevention 12 | // consider switching to `plugin:vue/strongly-recommended` or `plugin:vue/recommended` for stricter rules. 13 | extends: ['plugin:vue/essential', 'airbnb-base'], 14 | // required to lint *.vue files 15 | plugins: [ 16 | 'vue' 17 | ], 18 | // check if imports actually resolve 19 | settings: { 20 | 'import/resolver': { 21 | webpack: { 22 | config: 'build/webpack.base.conf.js' 23 | } 24 | } 25 | }, 26 | // add your custom rules here 27 | rules: { 28 | // don't require .vue extension when importing 29 | 'import/extensions': ['error', 'always', { 30 | js: 'never', 31 | vue: 'never' 32 | }], 33 | // disallow reassignment of function parameters 34 | // disallow parameter object manipulation except for specific exclusions 35 | 'no-param-reassign': ['error', { 36 | props: true, 37 | ignorePropertyModificationsFor: [ 38 | 'state', // for vuex state 39 | 'acc', // for reduce accumulators 40 | 'e' // for e.returnvalue 41 | ] 42 | }], 43 | // allow optionalDependencies 44 | 'import/no-extraneous-dependencies': ['error', { 45 | optionalDependencies: ['test/unit/index.js'] 46 | }], 47 | // allow debugger during development 48 | 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off' 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /dmriprepViewer/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules/ 3 | /dist/ 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | /test/unit/coverage/ 8 | /test/e2e/reports/ 9 | selenium-debug.log 10 | 11 | # Editor directories and files 12 | .idea 13 | .vscode 14 | *.suo 15 | *.ntvs* 16 | *.njsproj 17 | *.sln 18 | -------------------------------------------------------------------------------- /dmriprepViewer/.postcssrc.js: -------------------------------------------------------------------------------- 1 | // https://github.com/michael-ciniawsky/postcss-load-config 2 | 3 | module.exports = { 4 | "plugins": { 5 | "postcss-import": {}, 6 | "postcss-url": {}, 7 | // to edit target browsers: use "browserslist" field in package.json 8 | "autoprefixer": {} 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /dmriprepViewer/.snyk: -------------------------------------------------------------------------------- 1 | # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. 2 | version: v1.13.5 3 | ignore: {} 4 | # patches apply the minimum changes required to fix a vulnerability 5 | patch: 6 | SNYK-JS-LODASH-450202: 7 | - lodash: 8 | patched: '2019-07-04T07:28:09.204Z' 9 | - bootstrap-vue > opencollective > inquirer > lodash: 10 | patched: '2019-07-04T07:28:09.204Z' 11 | -------------------------------------------------------------------------------- /dmriprepViewer/README.md: -------------------------------------------------------------------------------- 1 | # dmriprep-viewer 2 | 3 | > Viewer for dmriprep reports 4 | 5 | ## Build Setup 6 | 7 | ``` bash 8 | # install dependencies 9 | npm install 10 | 11 | # serve with hot reload at localhost:8080 12 | npm run dev 13 | 14 | # build for production with minification 15 | npm run build 16 | 17 | # build for production and view the bundle analyzer report 18 | npm run build --report 19 | 20 | # run unit tests 21 | npm run unit 22 | 23 | # run e2e tests 24 | npm run e2e 25 | 26 | # run all tests 27 | npm test 28 | ``` 29 | 30 | For a detailed explanation on how things work, check out the [guide](http://vuejs-templates.github.io/webpack/) and [docs for vue-loader](http://vuejs.github.io/vue-loader). 31 | -------------------------------------------------------------------------------- /dmriprepViewer/build/build.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | require('./check-versions')() 3 | 4 | process.env.NODE_ENV = 'production' 5 | 6 | const ora = require('ora') 7 | const rm = require('rimraf') 8 | const path = require('path') 9 | const chalk = require('chalk') 10 | const webpack = require('webpack') 11 | const config = require('../config') 12 | const webpackConfig = require('./webpack.prod.conf') 13 | 14 | const spinner = ora('building for production...') 15 | spinner.start() 16 | 17 | rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => { 18 | if (err) throw err 19 | webpack(webpackConfig, (err, stats) => { 20 | spinner.stop() 21 | if (err) throw err 22 | process.stdout.write(stats.toString({ 23 | colors: true, 24 | modules: false, 25 | children: false, // If you are using ts-loader, setting this to true will make TypeScript errors show up during build. 26 | chunks: false, 27 | chunkModules: false 28 | }) + '\n\n') 29 | 30 | if (stats.hasErrors()) { 31 | console.log(chalk.red(' Build failed with errors.\n')) 32 | process.exit(1) 33 | } 34 | 35 | console.log(chalk.cyan(' Build complete.\n')) 36 | console.log(chalk.yellow( 37 | ' Tip: built files are meant to be served over an HTTP server.\n' + 38 | ' Opening index.html over file:// won\'t work.\n' 39 | )) 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /dmriprepViewer/build/check-versions.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const chalk = require('chalk') 3 | const semver = require('semver') 4 | const packageConfig = require('../package.json') 5 | const shell = require('shelljs') 6 | 7 | function exec (cmd) { 8 | return require('child_process').execSync(cmd).toString().trim() 9 | } 10 | 11 | const versionRequirements = [ 12 | { 13 | name: 'node', 14 | currentVersion: semver.clean(process.version), 15 | versionRequirement: packageConfig.engines.node 16 | } 17 | ] 18 | 19 | if (shell.which('npm')) { 20 | versionRequirements.push({ 21 | name: 'npm', 22 | currentVersion: exec('npm --version'), 23 | versionRequirement: packageConfig.engines.npm 24 | }) 25 | } 26 | 27 | module.exports = function () { 28 | const warnings = [] 29 | 30 | for (let i = 0; i < versionRequirements.length; i++) { 31 | const mod = versionRequirements[i] 32 | 33 | if (!semver.satisfies(mod.currentVersion, mod.versionRequirement)) { 34 | warnings.push(mod.name + ': ' + 35 | chalk.red(mod.currentVersion) + ' should be ' + 36 | chalk.green(mod.versionRequirement) 37 | ) 38 | } 39 | } 40 | 41 | if (warnings.length) { 42 | console.log('') 43 | console.log(chalk.yellow('To use this template, you must update following to modules:')) 44 | console.log() 45 | 46 | for (let i = 0; i < warnings.length; i++) { 47 | const warning = warnings[i] 48 | console.log(' ' + warning) 49 | } 50 | 51 | console.log() 52 | process.exit(1) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /dmriprepViewer/build/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/dmriprep/92945a4f552ac980c4dae0b28714c2d7cf7582cf/dmriprepViewer/build/logo.png -------------------------------------------------------------------------------- /dmriprepViewer/build/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const path = require('path') 3 | const config = require('../config') 4 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 5 | const packageConfig = require('../package.json') 6 | 7 | exports.assetsPath = function (_path) { 8 | const assetsSubDirectory = process.env.NODE_ENV === 'production' 9 | ? config.build.assetsSubDirectory 10 | : config.dev.assetsSubDirectory 11 | 12 | return path.posix.join(assetsSubDirectory, _path) 13 | } 14 | 15 | exports.cssLoaders = function (options) { 16 | options = options || {} 17 | 18 | const cssLoader = { 19 | loader: 'css-loader', 20 | options: { 21 | sourceMap: options.sourceMap 22 | } 23 | } 24 | 25 | const postcssLoader = { 26 | loader: 'postcss-loader', 27 | options: { 28 | sourceMap: options.sourceMap 29 | } 30 | } 31 | 32 | // generate loader string to be used with extract text plugin 33 | function generateLoaders (loader, loaderOptions) { 34 | const loaders = options.usePostCSS ? [cssLoader, postcssLoader] : [cssLoader] 35 | 36 | if (loader) { 37 | loaders.push({ 38 | loader: loader + '-loader', 39 | options: Object.assign({}, loaderOptions, { 40 | sourceMap: options.sourceMap 41 | }) 42 | }) 43 | } 44 | 45 | // Extract CSS when that option is specified 46 | // (which is the case during production build) 47 | if (options.extract) { 48 | return ExtractTextPlugin.extract({ 49 | use: loaders, 50 | fallback: 'vue-style-loader' 51 | }) 52 | } else { 53 | return ['vue-style-loader'].concat(loaders) 54 | } 55 | } 56 | 57 | // https://vue-loader.vuejs.org/en/configurations/extract-css.html 58 | return { 59 | css: generateLoaders(), 60 | postcss: generateLoaders(), 61 | less: generateLoaders('less'), 62 | sass: generateLoaders('sass', { indentedSyntax: true }), 63 | scss: generateLoaders('sass'), 64 | stylus: generateLoaders('stylus'), 65 | styl: generateLoaders('stylus') 66 | } 67 | } 68 | 69 | // Generate loaders for standalone style files (outside of .vue) 70 | exports.styleLoaders = function (options) { 71 | const output = [] 72 | const loaders = exports.cssLoaders(options) 73 | 74 | for (const extension in loaders) { 75 | const loader = loaders[extension] 76 | output.push({ 77 | test: new RegExp('\\.' + extension + '$'), 78 | use: loader 79 | }) 80 | } 81 | 82 | return output 83 | } 84 | 85 | exports.createNotifierCallback = () => { 86 | const notifier = require('node-notifier') 87 | 88 | return (severity, errors) => { 89 | if (severity !== 'error') return 90 | 91 | const error = errors[0] 92 | const filename = error.file && error.file.split('!').pop() 93 | 94 | notifier.notify({ 95 | title: packageConfig.name, 96 | message: severity + ': ' + error.name, 97 | subtitle: filename || '', 98 | icon: path.join(__dirname, 'logo.png') 99 | }) 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /dmriprepViewer/build/vue-loader.conf.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const utils = require('./utils') 3 | const config = require('../config') 4 | const isProduction = process.env.NODE_ENV === 'production' 5 | const sourceMapEnabled = isProduction 6 | ? config.build.productionSourceMap 7 | : config.dev.cssSourceMap 8 | 9 | module.exports = { 10 | loaders: utils.cssLoaders({ 11 | sourceMap: sourceMapEnabled, 12 | extract: isProduction 13 | }), 14 | cssSourceMap: sourceMapEnabled, 15 | cacheBusting: config.dev.cacheBusting, 16 | transformToRequire: { 17 | video: ['src', 'poster'], 18 | source: 'src', 19 | img: 'src', 20 | image: 'xlink:href' 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /dmriprepViewer/build/webpack.base.conf.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const path = require('path') 3 | const utils = require('./utils') 4 | const config = require('../config') 5 | const vueLoaderConfig = require('./vue-loader.conf') 6 | 7 | function resolve (dir) { 8 | return path.join(__dirname, '..', dir) 9 | } 10 | 11 | const createLintingRule = () => ({ 12 | test: /\.(js|vue)$/, 13 | loader: 'eslint-loader', 14 | enforce: 'pre', 15 | include: [resolve('src'), resolve('test')], 16 | options: { 17 | formatter: require('eslint-friendly-formatter'), 18 | emitWarning: !config.dev.showEslintErrorsInOverlay 19 | } 20 | }) 21 | 22 | module.exports = { 23 | context: path.resolve(__dirname, '../'), 24 | entry: { 25 | app: './src/main.js' 26 | }, 27 | output: { 28 | path: config.build.assetsRoot, 29 | filename: '[name].js', 30 | publicPath: process.env.NODE_ENV === 'production' 31 | ? config.build.assetsPublicPath 32 | : config.dev.assetsPublicPath 33 | }, 34 | resolve: { 35 | extensions: ['.js', '.vue', '.json'], 36 | alias: { 37 | 'vue$': 'vue/dist/vue.esm.js', 38 | '@': resolve('src'), 39 | } 40 | }, 41 | module: { 42 | rules: [ 43 | ...(config.dev.useEslint ? [createLintingRule()] : []), 44 | { 45 | test: /\.vue$/, 46 | loader: 'vue-loader', 47 | options: vueLoaderConfig 48 | }, 49 | { 50 | test: /\.js$/, 51 | loader: 'babel-loader', 52 | include: [resolve('src'), resolve('test'), resolve('node_modules/webpack-dev-server/client')] 53 | }, 54 | { 55 | test: /\.(png|jpe?g|gif|svg)(\?.*)?$/, 56 | loader: 'url-loader', 57 | options: { 58 | limit: 10000, 59 | name: utils.assetsPath('img/[name].[hash:7].[ext]') 60 | } 61 | }, 62 | { 63 | test: /\.(mp4|webm|ogg|mp3|wav|flac|aac)(\?.*)?$/, 64 | loader: 'url-loader', 65 | options: { 66 | limit: 10000, 67 | name: utils.assetsPath('media/[name].[hash:7].[ext]') 68 | } 69 | }, 70 | { 71 | test: /\.(woff2?|eot|ttf|otf)(\?.*)?$/, 72 | loader: 'url-loader', 73 | options: { 74 | limit: 10000, 75 | name: utils.assetsPath('fonts/[name].[hash:7].[ext]') 76 | } 77 | } 78 | ] 79 | }, 80 | node: { 81 | // prevent webpack from injecting useless setImmediate polyfill because Vue 82 | // source contains it (although only uses it if it's native). 83 | setImmediate: false, 84 | // prevent webpack from injecting mocks to Node native modules 85 | // that does not make sense for the client 86 | dgram: 'empty', 87 | fs: 'empty', 88 | net: 'empty', 89 | tls: 'empty', 90 | child_process: 'empty' 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /dmriprepViewer/build/webpack.dev.conf.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const utils = require('./utils') 3 | const webpack = require('webpack') 4 | const config = require('../config') 5 | const merge = require('webpack-merge') 6 | const path = require('path') 7 | const baseWebpackConfig = require('./webpack.base.conf') 8 | const CopyWebpackPlugin = require('copy-webpack-plugin') 9 | const HtmlWebpackPlugin = require('html-webpack-plugin') 10 | const FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin') 11 | const portfinder = require('portfinder') 12 | 13 | const HOST = process.env.HOST 14 | const PORT = process.env.PORT && Number(process.env.PORT) 15 | 16 | const devWebpackConfig = merge(baseWebpackConfig, { 17 | module: { 18 | rules: utils.styleLoaders({ sourceMap: config.dev.cssSourceMap, usePostCSS: true }) 19 | }, 20 | // cheap-module-eval-source-map is faster for development 21 | devtool: config.dev.devtool, 22 | 23 | // these devServer options should be customized in /config/index.js 24 | devServer: { 25 | clientLogLevel: 'warning', 26 | historyApiFallback: { 27 | rewrites: [ 28 | { from: /.*/, to: path.posix.join(config.dev.assetsPublicPath, 'index.html') }, 29 | ], 30 | }, 31 | hot: true, 32 | contentBase: false, // since we use CopyWebpackPlugin. 33 | compress: true, 34 | host: HOST || config.dev.host, 35 | port: PORT || config.dev.port, 36 | open: config.dev.autoOpenBrowser, 37 | overlay: config.dev.errorOverlay 38 | ? { warnings: false, errors: true } 39 | : false, 40 | publicPath: config.dev.assetsPublicPath, 41 | proxy: config.dev.proxyTable, 42 | quiet: true, // necessary for FriendlyErrorsPlugin 43 | watchOptions: { 44 | poll: config.dev.poll, 45 | } 46 | }, 47 | plugins: [ 48 | new webpack.DefinePlugin({ 49 | 'process.env': require('../config/dev.env') 50 | }), 51 | new webpack.HotModuleReplacementPlugin(), 52 | new webpack.NamedModulesPlugin(), // HMR shows correct file names in console on update. 53 | new webpack.NoEmitOnErrorsPlugin(), 54 | // https://github.com/ampedandwired/html-webpack-plugin 55 | new HtmlWebpackPlugin({ 56 | filename: 'index.html', 57 | template: 'index.html', 58 | inject: true 59 | }), 60 | // copy custom static assets 61 | new CopyWebpackPlugin([ 62 | { 63 | from: path.resolve(__dirname, '../static'), 64 | to: config.dev.assetsSubDirectory, 65 | ignore: ['.*'] 66 | } 67 | ]) 68 | ] 69 | }) 70 | 71 | module.exports = new Promise((resolve, reject) => { 72 | portfinder.basePort = process.env.PORT || config.dev.port 73 | portfinder.getPort((err, port) => { 74 | if (err) { 75 | reject(err) 76 | } else { 77 | // publish the new Port, necessary for e2e tests 78 | process.env.PORT = port 79 | // add port to devServer config 80 | devWebpackConfig.devServer.port = port 81 | 82 | // Add FriendlyErrorsPlugin 83 | devWebpackConfig.plugins.push(new FriendlyErrorsPlugin({ 84 | compilationSuccessInfo: { 85 | messages: [`Your application is running here: http://${devWebpackConfig.devServer.host}:${port}`], 86 | }, 87 | onErrors: config.dev.notifyOnErrors 88 | ? utils.createNotifierCallback() 89 | : undefined 90 | })) 91 | 92 | resolve(devWebpackConfig) 93 | } 94 | }) 95 | }) 96 | -------------------------------------------------------------------------------- /dmriprepViewer/build/webpack.prod.conf.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const path = require('path') 3 | const utils = require('./utils') 4 | const webpack = require('webpack') 5 | const config = require('../config') 6 | const merge = require('webpack-merge') 7 | const baseWebpackConfig = require('./webpack.base.conf') 8 | const CopyWebpackPlugin = require('copy-webpack-plugin') 9 | const HtmlWebpackPlugin = require('html-webpack-plugin') 10 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 11 | const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin') 12 | const UglifyJsPlugin = require('uglifyjs-webpack-plugin') 13 | 14 | const env = process.env.NODE_ENV === 'testing' 15 | ? require('../config/test.env') 16 | : require('../config/prod.env') 17 | 18 | const webpackConfig = merge(baseWebpackConfig, { 19 | module: { 20 | rules: utils.styleLoaders({ 21 | sourceMap: config.build.productionSourceMap, 22 | extract: true, 23 | usePostCSS: true 24 | }) 25 | }, 26 | devtool: config.build.productionSourceMap ? config.build.devtool : false, 27 | output: { 28 | path: config.build.assetsRoot, 29 | filename: utils.assetsPath('js/[name].[chunkhash].js'), 30 | chunkFilename: utils.assetsPath('js/[id].[chunkhash].js') 31 | }, 32 | plugins: [ 33 | // http://vuejs.github.io/vue-loader/en/workflow/production.html 34 | new webpack.DefinePlugin({ 35 | 'process.env': env 36 | }), 37 | new UglifyJsPlugin({ 38 | uglifyOptions: { 39 | compress: { 40 | warnings: false 41 | } 42 | }, 43 | sourceMap: config.build.productionSourceMap, 44 | parallel: true 45 | }), 46 | // extract css into its own file 47 | new ExtractTextPlugin({ 48 | filename: utils.assetsPath('css/[name].[contenthash].css'), 49 | // Setting the following option to `false` will not extract CSS from codesplit chunks. 50 | // Their CSS will instead be inserted dynamically with style-loader when the codesplit chunk has been loaded by webpack. 51 | // It's currently set to `true` because we are seeing that sourcemaps are included in the codesplit bundle as well when it's `false`, 52 | // increasing file size: https://github.com/vuejs-templates/webpack/issues/1110 53 | allChunks: true, 54 | }), 55 | // Compress extracted CSS. We are using this plugin so that possible 56 | // duplicated CSS from different components can be deduped. 57 | new OptimizeCSSPlugin({ 58 | cssProcessorOptions: config.build.productionSourceMap 59 | ? { safe: true, map: { inline: false } } 60 | : { safe: true } 61 | }), 62 | // generate dist index.html with correct asset hash for caching. 63 | // you can customize output by editing /index.html 64 | // see https://github.com/ampedandwired/html-webpack-plugin 65 | new HtmlWebpackPlugin({ 66 | filename: process.env.NODE_ENV === 'testing' 67 | ? 'index.html' 68 | : config.build.index, 69 | template: 'index.html', 70 | inject: true, 71 | minify: { 72 | removeComments: true, 73 | collapseWhitespace: true, 74 | removeAttributeQuotes: true 75 | // more options: 76 | // https://github.com/kangax/html-minifier#options-quick-reference 77 | }, 78 | // necessary to consistently work with multiple chunks via CommonsChunkPlugin 79 | chunksSortMode: 'dependency' 80 | }), 81 | // keep module.id stable when vendor modules does not change 82 | new webpack.HashedModuleIdsPlugin(), 83 | // enable scope hoisting 84 | new webpack.optimize.ModuleConcatenationPlugin(), 85 | // split vendor js into its own file 86 | new webpack.optimize.CommonsChunkPlugin({ 87 | name: 'vendor', 88 | minChunks (module) { 89 | // any required modules inside node_modules are extracted to vendor 90 | return ( 91 | module.resource && 92 | /\.js$/.test(module.resource) && 93 | module.resource.indexOf( 94 | path.join(__dirname, '../node_modules') 95 | ) === 0 96 | ) 97 | } 98 | }), 99 | // extract webpack runtime and module manifest to its own file in order to 100 | // prevent vendor hash from being updated whenever app bundle is updated 101 | new webpack.optimize.CommonsChunkPlugin({ 102 | name: 'manifest', 103 | minChunks: Infinity 104 | }), 105 | // This instance extracts shared chunks from code splitted chunks and bundles them 106 | // in a separate chunk, similar to the vendor chunk 107 | // see: https://webpack.js.org/plugins/commons-chunk-plugin/#extra-async-commons-chunk 108 | new webpack.optimize.CommonsChunkPlugin({ 109 | name: 'app', 110 | async: 'vendor-async', 111 | children: true, 112 | minChunks: 3 113 | }), 114 | 115 | // copy custom static assets 116 | new CopyWebpackPlugin([ 117 | { 118 | from: path.resolve(__dirname, '../static'), 119 | to: config.build.assetsSubDirectory, 120 | ignore: ['.*'] 121 | } 122 | ]) 123 | ] 124 | }) 125 | 126 | if (config.build.productionGzip) { 127 | const CompressionWebpackPlugin = require('compression-webpack-plugin') 128 | 129 | webpackConfig.plugins.push( 130 | new CompressionWebpackPlugin({ 131 | asset: '[path].gz[query]', 132 | algorithm: 'gzip', 133 | test: new RegExp( 134 | '\\.(' + 135 | config.build.productionGzipExtensions.join('|') + 136 | ')$' 137 | ), 138 | threshold: 10240, 139 | minRatio: 0.8 140 | }) 141 | ) 142 | } 143 | 144 | if (config.build.bundleAnalyzerReport) { 145 | const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin 146 | webpackConfig.plugins.push(new BundleAnalyzerPlugin()) 147 | } 148 | 149 | module.exports = webpackConfig 150 | -------------------------------------------------------------------------------- /dmriprepViewer/config/dev.env.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const merge = require('webpack-merge') 3 | const prodEnv = require('./prod.env') 4 | 5 | module.exports = merge(prodEnv, { 6 | NODE_ENV: '"development"' 7 | }) 8 | -------------------------------------------------------------------------------- /dmriprepViewer/config/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | // Template version: 1.3.1 3 | // see http://vuejs-templates.github.io/webpack for documentation. 4 | 5 | const path = require('path') 6 | 7 | module.exports = { 8 | dev: { 9 | 10 | // Paths 11 | assetsSubDirectory: 'static', 12 | assetsPublicPath: '/', 13 | proxyTable: {}, 14 | 15 | // Various Dev Server settings 16 | host: 'localhost', // can be overwritten by process.env.HOST 17 | port: 8080, // can be overwritten by process.env.PORT, if port is in use, a free one will be determined 18 | autoOpenBrowser: false, 19 | errorOverlay: true, 20 | notifyOnErrors: true, 21 | poll: false, // https://webpack.js.org/configuration/dev-server/#devserver-watchoptions- 22 | 23 | // Use Eslint Loader? 24 | // If true, your code will be linted during bundling and 25 | // linting errors and warnings will be shown in the console. 26 | useEslint: true, 27 | // If true, eslint errors and warnings will also be shown in the error overlay 28 | // in the browser. 29 | showEslintErrorsInOverlay: false, 30 | 31 | /** 32 | * Source Maps 33 | */ 34 | 35 | // https://webpack.js.org/configuration/devtool/#development 36 | devtool: 'cheap-module-eval-source-map', 37 | 38 | // If you have problems debugging vue-files in devtools, 39 | // set this to false - it *may* help 40 | // https://vue-loader.vuejs.org/en/options.html#cachebusting 41 | cacheBusting: true, 42 | 43 | cssSourceMap: true 44 | }, 45 | 46 | build: { 47 | // Template for index.html 48 | index: path.resolve(__dirname, '../dist/index.html'), 49 | 50 | // Paths 51 | assetsRoot: path.resolve(__dirname, '../dist'), 52 | assetsSubDirectory: 'static', 53 | assetsPublicPath: '/dmriprep/', 54 | 55 | /** 56 | * Source Maps 57 | */ 58 | 59 | productionSourceMap: true, 60 | // https://webpack.js.org/configuration/devtool/#production 61 | devtool: '#source-map', 62 | 63 | // Gzip off by default as many popular static hosts such as 64 | // Surge or Netlify already gzip all static assets for you. 65 | // Before setting to `true`, make sure to: 66 | // npm install --save-dev compression-webpack-plugin 67 | productionGzip: false, 68 | productionGzipExtensions: ['js', 'css'], 69 | 70 | // Run the build command with an extra argument to 71 | // View the bundle analyzer report after build finishes: 72 | // `npm run build --report` 73 | // Set to `true` or `false` to always turn it on or off 74 | bundleAnalyzerReport: process.env.npm_config_report 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /dmriprepViewer/config/prod.env.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | module.exports = { 3 | NODE_ENV: '"production"' 4 | } 5 | -------------------------------------------------------------------------------- /dmriprepViewer/config/test.env.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const merge = require('webpack-merge') 3 | const devEnv = require('./dev.env') 4 | 5 | module.exports = merge(devEnv, { 6 | NODE_ENV: '"testing"' 7 | }) 8 | -------------------------------------------------------------------------------- /dmriprepViewer/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e # exit with nonzero exit code if anything fails 4 | 5 | if [[ $TRAVIS_BRANCH == "master" && $TRAVIS_PULL_REQUEST == "false" ]]; then 6 | 7 | echo "Starting to update gh-pages\n" 8 | 9 | #copy data we're interested in to other place 10 | cp -R dist $HOME/dist 11 | 12 | #go to home and setup git 13 | cd $HOME 14 | git config --global user.email "travis@travis-ci.org" 15 | git config --global user.name "Travis" 16 | 17 | #using token clone gh-pages branch 18 | git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/${GH_USER}/${GH_REPO}.git gh-pages > /dev/null 19 | 20 | #go into directory and copy data we're interested in to that directory 21 | cd gh-pages 22 | cp -Rf $HOME/dist/* . 23 | 24 | echo "Allow files with underscore https://help.github.com/articles/files-that-start-with-an-underscore-are-missing/" > .nojekyll 25 | echo "[View live](https://${GH_USER}.github.io/${GH_REPO}/)" > README.md 26 | 27 | #add, commit and push files 28 | git add -f . 29 | git commit -m "Travis build $TRAVIS_BUILD_NUMBER" 30 | git push -fq origin gh-pages > /dev/null 31 | 32 | echo "Done updating gh-pages\n" 33 | 34 | else 35 | echo "Skipped updating gh-pages, because build is not triggered from the master branch." 36 | fi; 37 | -------------------------------------------------------------------------------- /dmriprepViewer/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | dmriprep-viewer 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dmriprepViewer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dmriprep-viewer", 3 | "version": "1.0.0", 4 | "description": "Viewer for dmriprep reports", 5 | "author": "akeshavan ", 6 | "private": true, 7 | "scripts": { 8 | "dev": "webpack-dev-server --inline --progress --config build/webpack.dev.conf.js", 9 | "start": "npm run dev", 10 | "unit": "jest --config test/unit/jest.conf.js --coverage", 11 | "e2e": "node test/e2e/runner.js", 12 | "test": "npm run unit && npm run e2e", 13 | "lint": "eslint --ext .js,.vue src test/unit test/e2e/specs", 14 | "build": "node build/build.js", 15 | "snyk-protect": "snyk protect", 16 | "prepare": "npm run snyk-protect" 17 | }, 18 | "dependencies": { 19 | "axios": "^0.18.0", 20 | "bootstrap-vue": "^2.0.0-rc.11", 21 | "brainsprite.js": "git+https://git@github.com/SIMEXP/brainsprite.js.git", 22 | "d3": "^5.7.0", 23 | "lodash": "^4.17.11", 24 | "vue": "^2.5.2", 25 | "vue-router": "^3.0.1", 26 | "vue-slider-component": "^2.7.7", 27 | "snyk": "^1.189.0" 28 | }, 29 | "devDependencies": { 30 | "autoprefixer": "^7.1.2", 31 | "babel-core": "^6.22.1", 32 | "babel-eslint": "^8.2.1", 33 | "babel-helper-vue-jsx-merge-props": "^2.0.3", 34 | "babel-jest": "^21.0.2", 35 | "babel-loader": "^7.1.1", 36 | "babel-plugin-dynamic-import-node": "^1.2.0", 37 | "babel-plugin-syntax-jsx": "^6.18.0", 38 | "babel-plugin-transform-es2015-modules-commonjs": "^6.26.0", 39 | "babel-plugin-transform-runtime": "^6.22.0", 40 | "babel-plugin-transform-vue-jsx": "^3.5.0", 41 | "babel-preset-env": "^1.3.2", 42 | "babel-preset-stage-2": "^6.22.0", 43 | "babel-register": "^6.22.0", 44 | "chalk": "^2.0.1", 45 | "chromedriver": "^2.27.2", 46 | "copy-webpack-plugin": "^4.0.1", 47 | "cross-spawn": "^5.0.1", 48 | "css-loader": "^0.28.0", 49 | "eslint": "^4.15.0", 50 | "eslint-config-airbnb-base": "^11.3.0", 51 | "eslint-friendly-formatter": "^3.0.0", 52 | "eslint-import-resolver-webpack": "^0.8.3", 53 | "eslint-loader": "^1.7.1", 54 | "eslint-plugin-import": "^2.7.0", 55 | "eslint-plugin-vue": "^4.0.0", 56 | "extract-text-webpack-plugin": "^3.0.0", 57 | "file-loader": "^1.1.4", 58 | "friendly-errors-webpack-plugin": "^1.6.1", 59 | "html-webpack-plugin": "^2.30.1", 60 | "jest": "^22.0.4", 61 | "jest-serializer-vue": "^0.3.0", 62 | "nightwatch": "^0.9.12", 63 | "node-notifier": "^5.1.2", 64 | "optimize-css-assets-webpack-plugin": "^3.2.0", 65 | "ora": "^1.2.0", 66 | "portfinder": "^1.0.13", 67 | "postcss-import": "^11.0.0", 68 | "postcss-loader": "^2.0.8", 69 | "postcss-url": "^7.2.1", 70 | "rimraf": "^2.6.0", 71 | "selenium-server": "^3.0.1", 72 | "semver": "^5.3.0", 73 | "shelljs": "^0.7.6", 74 | "uglifyjs-webpack-plugin": "^1.1.1", 75 | "url-loader": "^0.5.8", 76 | "vue-jest": "^1.0.2", 77 | "vue-loader": "^13.3.0", 78 | "vue-style-loader": "^3.0.1", 79 | "vue-template-compiler": "^2.5.2", 80 | "webpack": "^3.6.0", 81 | "webpack-bundle-analyzer": "^2.9.0", 82 | "webpack-dev-server": "^2.9.1", 83 | "webpack-merge": "^4.1.0" 84 | }, 85 | "engines": { 86 | "node": ">= 6.0.0", 87 | "npm": ">= 3.0.0" 88 | }, 89 | "browserslist": [ 90 | "> 1%", 91 | "last 2 versions", 92 | "not ie <= 8" 93 | ], 94 | "snyk": true 95 | } 96 | -------------------------------------------------------------------------------- /dmriprepViewer/src/App.vue: -------------------------------------------------------------------------------- 1 | 6 | 7 | 20 | 21 | 31 | -------------------------------------------------------------------------------- /dmriprepViewer/src/assets/logo.svg: -------------------------------------------------------------------------------- 1 | alien science -------------------------------------------------------------------------------- /dmriprepViewer/src/components/BrainSprite.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 31 | 32 | 76 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/Bucket.vue: -------------------------------------------------------------------------------- 1 | 41 | 42 | 192 | 193 | 196 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/GroupStats.vue: -------------------------------------------------------------------------------- 1 | 27 | 28 | 94 | 95 | 97 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/HelloWorld.vue: -------------------------------------------------------------------------------- 1 | 33 | 34 | 92 | 93 | 94 | 115 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/LineChart.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 185 | 186 | 192 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/Report.vue: -------------------------------------------------------------------------------- 1 | 76 | 77 | 159 | 160 | 163 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/Sprite4D.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 58 | 59 | 64 | -------------------------------------------------------------------------------- /dmriprepViewer/src/components/brainsprite.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | function brainsprite(params) { 3 | 4 | // Function to add nearest neighbour interpolation to a canvas 5 | function setNearestNeighbour(context,flag){ 6 | context.imageSmoothingEnabled = flag; 7 | return context; 8 | } 9 | 10 | // Initialize the brain object 11 | var brain = {}; 12 | 13 | // Initialize the brain object 14 | var defaultParams = { 15 | // Flag for "NaN" image values, i.e. unable to read values 16 | nanValue: false, 17 | 18 | // Smoothing of the main slices 19 | smooth: false, 20 | 21 | // Draw (or not) the current value 22 | flagValue: false, 23 | 24 | // Background color for the canvas 25 | colorBackground: '#000000', 26 | 27 | // Flag to turn on/off slice numbers 28 | flagCoordinates: false, 29 | 30 | // Origins and voxel size 31 | origin: { X: 0, Y: 0, Z: 0 }, 32 | voxelSize: 1, 33 | 34 | // Affine transformation 35 | affine: false, 36 | 37 | // Colorbar size parameters 38 | heightColorBar: 0.04, 39 | 40 | // Font parameters 41 | sizeFont: 0.075, 42 | colorFont: '#FFFFFF', 43 | 44 | // Number of decimals displayed 45 | nbDecimals: 3, 46 | 47 | // Flag to turn on/off the crosshair 48 | crosshair: false, 49 | 50 | // Color of the crosshair 51 | colorCrosshair: "#0000FF", 52 | 53 | // Size crosshair - percentage of the field of view 54 | sizeCrosshair: 0.9, 55 | 56 | // Optional title for the viewer 57 | title: false, 58 | 59 | // Coordinates for the initial cut 60 | numSlice: false, 61 | } 62 | 63 | var brain = Object.assign({}, defaultParams, params); 64 | 65 | // Build affine, if not specified 66 | if (typeof brain.affine === 'boolean' && brain.affine === false) { 67 | brain.affine = [[brain.voxelSize , 0 , 0 , -brain.origin.X], 68 | [0 , brain.voxelSize , 0 , -brain.origin.Y], 69 | [0 , 0 , brain.voxelSize , -brain.origin.Z], 70 | [0 , 0 , 0 , 1]] 71 | 72 | } 73 | 74 | // The main canvas, where the three slices are drawn 75 | brain.canvas = document.getElementById(params.canvas); 76 | brain.context = brain.canvas.getContext('2d'); 77 | brain.context = setNearestNeighbour(brain.context,brain.smooth); 78 | 79 | // An in-memory canvas to draw intermediate reconstruction 80 | // of the coronal slice, at native resolution 81 | brain.canvasY = document.createElement('canvas'); 82 | brain.contextY = brain.canvasY.getContext('2d'); 83 | 84 | // An in-memory canvas to draw intermediate reconstruction 85 | // of the axial slice, at native resolution 86 | brain.canvasZ = document.createElement('canvas'); 87 | brain.contextZ = brain.canvasZ.getContext('2d'); 88 | 89 | // An in-memory canvas to read the value of pixels 90 | brain.canvasRead = document.createElement('canvas'); 91 | brain.contextRead = brain.canvasRead.getContext('2d'); 92 | brain.canvasRead.width = 1; 93 | brain.canvasRead.height = 1; 94 | 95 | // Onclick events 96 | brain.onclick = typeof params.onclick !== 'undefined' ? params.onclick : ""; 97 | 98 | // Font parameters 99 | if (brain.flagCoordinates) { 100 | brain.spaceFont = 0.1; 101 | } else { 102 | brain.spaceFont = 0; 103 | }; 104 | 105 | //******************// 106 | // The sprite image // 107 | //******************// 108 | brain.sprite = document.getElementById(params.sprite); 109 | 110 | // Number of columns and rows in the sprite 111 | brain.nbCol = brain.sprite.width/params.nbSlice.Y; 112 | brain.nbRow = brain.sprite.height/params.nbSlice.Z; 113 | // Number of slices 114 | 115 | brain.nbSlice = { 116 | X: typeof params.nbSlice.X !== 'undefined' ? params.nbSlice.X : brain.nbCol*brain.nbRow, 117 | Y: params.nbSlice.Y, 118 | Z: params.nbSlice.Z 119 | }; 120 | 121 | // width and height for the canvas 122 | brain.widthCanvas = {'X':0 , 'Y':0 , 'Z':0 }; 123 | brain.heightCanvas = {'X':0 , 'Y':0 , 'Z':0 , 'max':0}; 124 | 125 | // the slice numbers 126 | if (brain.numSlice == false) { 127 | brain.numSlice = { X: Math.floor(brain.nbSlice.X/2), 128 | Y: Math.floor(brain.nbSlice.Y/2), 129 | Z: Math.floor(brain.nbSlice.Z/2)} 130 | }; 131 | 132 | // Coordinates for current slices - these will get updated when drawing the slices 133 | brain.coordinatesSlice = {'X': 0, 'Y': 0, 'Z': 0 }; 134 | 135 | //*************// 136 | // The planes // 137 | //*************// 138 | brain.planes = {}; 139 | // A master sagital canvas for the merge of background and overlay 140 | brain.planes.canvasMaster = document.createElement('canvas'); 141 | brain.planes.contextMaster = brain.planes.canvasMaster.getContext('2d'); 142 | 143 | //*************// 144 | // The overlay // 145 | //*************// 146 | params.overlay = typeof params.overlay !== 'undefined' ? params.overlay : false; 147 | if (params.overlay) { 148 | // Initialize the overlay 149 | brain.overlay = {}; 150 | // Get the sprite 151 | brain.overlay.sprite = document.getElementById(params.overlay.sprite); 152 | // Ratio between the resolution of the foreground and background 153 | // Number of columns and rows in the overlay 154 | brain.overlay.nbCol = brain.overlay.sprite.width/params.overlay.nbSlice.Y; 155 | brain.overlay.nbRow = brain.overlay.sprite.height/params.overlay.nbSlice.Z; 156 | // Number of slices in the overlay 157 | brain.overlay.nbSlice = { 158 | X: typeof params.overlay.nbSlice.X !== 'undefined' ? params.overlay.nbSlice.X : brain.overlay.nbCol*brain.overlay.nbRow, 159 | Y: params.overlay.nbSlice.Y, 160 | Z: params.overlay.nbSlice.Z 161 | }; 162 | // opacity 163 | brain.overlay.opacity = typeof params.overlay.opacity !== 'undefined' ? params.overlay.opacity : 1; 164 | }; 165 | 166 | //**************// 167 | // The colormap // 168 | //**************// 169 | params.colorMap = typeof params.colorMap !== 'undefined' ? params.colorMap: false; 170 | if (params.colorMap) { 171 | // Initialize the color map 172 | brain.colorMap = {}; 173 | // Get the sprite 174 | brain.colorMap.img = document.getElementById(params.colorMap.img); 175 | // Set min / max 176 | brain.colorMap.min = params.colorMap.min; 177 | brain.colorMap.max = params.colorMap.max; 178 | // Set visibility 179 | params.colorMap.hide = typeof params.colorMap.hide !== 'undefined' ? params.colorMap.hide: false; 180 | // An in-memory canvas to store the colormap 181 | brain.colorMap.canvas = document.createElement('canvas'); 182 | brain.colorMap.context = brain.colorMap.canvas.getContext('2d'); 183 | brain.colorMap.canvas.width = brain.colorMap.img.width; 184 | brain.colorMap.canvas.height = brain.colorMap.img.height; 185 | 186 | // Copy the color map in an in-memory canvas 187 | brain.colorMap.context.drawImage(brain.colorMap.img, 188 | 0,0,brain.colorMap.img.width, brain.colorMap.img.height, 189 | 0,0,brain.colorMap.img.width, brain.colorMap.img.height); 190 | }; 191 | 192 | //*******************************************// 193 | // Extract the value associated with a voxel // 194 | //*******************************************// 195 | brain.getValue = function(rgb,colorMap) { 196 | if (!colorMap) { 197 | return NaN; 198 | } 199 | var cv, dist, nbColor, ind, val, voxelValue; 200 | nbColor = colorMap.canvas.width; 201 | ind = NaN; 202 | val = Infinity; 203 | for (let xx=0; xx', 15 | }); 16 | -------------------------------------------------------------------------------- /dmriprepViewer/src/router/index.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue'; 2 | import Router from 'vue-router'; 3 | import HelloWorld from '@/components/HelloWorld'; 4 | import Report from '@/components/Report'; 5 | import Bucket from '@/components/Bucket'; 6 | 7 | Vue.use(Router); 8 | 9 | export default new Router({ 10 | routes: [ 11 | { 12 | path: '/', 13 | name: 'HelloWorld', 14 | component: HelloWorld, 15 | }, 16 | { 17 | path: '/report', 18 | name: 'Report', 19 | component: Report, 20 | }, 21 | { 22 | path: '/bucket/:bucket', 23 | name: 'Bucket', 24 | component: Bucket, 25 | }, 26 | ], 27 | }); 28 | -------------------------------------------------------------------------------- /dmriprepViewer/static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/dmriprep/92945a4f552ac980c4dae0b28714c2d7cf7582cf/dmriprepViewer/static/.gitkeep -------------------------------------------------------------------------------- /dmriprepViewer/test/e2e/custom-assertions/elementCount.js: -------------------------------------------------------------------------------- 1 | // A custom Nightwatch assertion. 2 | // The assertion name is the filename. 3 | // Example usage: 4 | // 5 | // browser.assert.elementCount(selector, count) 6 | // 7 | // For more information on custom assertions see: 8 | // http://nightwatchjs.org/guide#writing-custom-assertions 9 | 10 | exports.assertion = function (selector, count) { 11 | this.message = 'Testing if element <' + selector + '> has count: ' + count 12 | this.expected = count 13 | this.pass = function (val) { 14 | return val === this.expected 15 | } 16 | this.value = function (res) { 17 | return res.value 18 | } 19 | this.command = function (cb) { 20 | var self = this 21 | return this.api.execute(function (selector) { 22 | return document.querySelectorAll(selector).length 23 | }, [selector], function (res) { 24 | cb.call(self, res) 25 | }) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /dmriprepViewer/test/e2e/nightwatch.conf.js: -------------------------------------------------------------------------------- 1 | require('babel-register') 2 | var config = require('../../config') 3 | 4 | // http://nightwatchjs.org/gettingstarted#settings-file 5 | module.exports = { 6 | src_folders: ['test/e2e/specs'], 7 | output_folder: 'test/e2e/reports', 8 | custom_assertions_path: ['test/e2e/custom-assertions'], 9 | 10 | selenium: { 11 | start_process: true, 12 | server_path: require('selenium-server').path, 13 | host: '127.0.0.1', 14 | port: 4444, 15 | cli_args: { 16 | 'webdriver.chrome.driver': require('chromedriver').path 17 | } 18 | }, 19 | 20 | test_settings: { 21 | default: { 22 | selenium_port: 4444, 23 | selenium_host: 'localhost', 24 | silent: true, 25 | globals: { 26 | devServerURL: 'http://localhost:' + (process.env.PORT || config.dev.port) 27 | } 28 | }, 29 | 30 | chrome: { 31 | desiredCapabilities: { 32 | browserName: 'chrome', 33 | javascriptEnabled: true, 34 | acceptSslCerts: true 35 | } 36 | }, 37 | 38 | firefox: { 39 | desiredCapabilities: { 40 | browserName: 'firefox', 41 | javascriptEnabled: true, 42 | acceptSslCerts: true 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /dmriprepViewer/test/e2e/runner.js: -------------------------------------------------------------------------------- 1 | // 1. start the dev server using production config 2 | process.env.NODE_ENV = 'testing' 3 | 4 | const webpack = require('webpack') 5 | const DevServer = require('webpack-dev-server') 6 | 7 | const webpackConfig = require('../../build/webpack.prod.conf') 8 | const devConfigPromise = require('../../build/webpack.dev.conf') 9 | 10 | let server 11 | 12 | devConfigPromise.then(devConfig => { 13 | const devServerOptions = devConfig.devServer 14 | const compiler = webpack(webpackConfig) 15 | server = new DevServer(compiler, devServerOptions) 16 | const port = devServerOptions.port 17 | const host = devServerOptions.host 18 | return server.listen(port, host) 19 | }) 20 | .then(() => { 21 | // 2. run the nightwatch test suite against it 22 | // to run in additional browsers: 23 | // 1. add an entry in test/e2e/nightwatch.conf.js under "test_settings" 24 | // 2. add it to the --env flag below 25 | // or override the environment flag, for example: `npm run e2e -- --env chrome,firefox` 26 | // For more information on Nightwatch's config file, see 27 | // http://nightwatchjs.org/guide#settings-file 28 | let opts = process.argv.slice(2) 29 | if (opts.indexOf('--config') === -1) { 30 | opts = opts.concat(['--config', 'test/e2e/nightwatch.conf.js']) 31 | } 32 | if (opts.indexOf('--env') === -1) { 33 | opts = opts.concat(['--env', 'chrome']) 34 | } 35 | 36 | const spawn = require('cross-spawn') 37 | const runner = spawn('./node_modules/.bin/nightwatch', opts, { stdio: 'inherit' }) 38 | 39 | runner.on('exit', function (code) { 40 | server.close() 41 | process.exit(code) 42 | }) 43 | 44 | runner.on('error', function (err) { 45 | server.close() 46 | throw err 47 | }) 48 | }) 49 | -------------------------------------------------------------------------------- /dmriprepViewer/test/e2e/specs/test.js: -------------------------------------------------------------------------------- 1 | // For authoring Nightwatch tests, see 2 | // http://nightwatchjs.org/guide#usage 3 | 4 | module.exports = { 5 | 'default e2e tests': function test(browser) { 6 | // automatically uses dev Server port from /config.index.js 7 | // default: http://localhost:8080 8 | // see nightwatch.conf.js 9 | const devServer = browser.globals.devServerURL; 10 | 11 | browser 12 | .url(devServer) 13 | .waitForElementVisible('#app', 5000) 14 | .assert.elementPresent('.hello') 15 | .assert.containsText('h1', 'Welcome to Your Vue.js App') 16 | .assert.elementCount('img', 1) 17 | .end(); 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /dmriprepViewer/test/unit/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "jest": true 4 | }, 5 | "globals": { 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /dmriprepViewer/test/unit/jest.conf.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | rootDir: path.resolve(__dirname, '../../'), 5 | moduleFileExtensions: [ 6 | 'js', 7 | 'json', 8 | 'vue', 9 | ], 10 | moduleNameMapper: { 11 | '^@/(.*)$': '/src/$1', 12 | }, 13 | transform: { 14 | '^.+\\.js$': '/node_modules/babel-jest', 15 | '.*\\.(vue)$': '/node_modules/vue-jest', 16 | }, 17 | testPathIgnorePatterns: [ 18 | '/test/e2e', 19 | ], 20 | snapshotSerializers: ['/node_modules/jest-serializer-vue'], 21 | setupFiles: ['/test/unit/setup'], 22 | mapCoverage: true, 23 | coverageDirectory: '/test/unit/coverage', 24 | collectCoverageFrom: [ 25 | 'src/**/*.{js,vue}', 26 | '!src/main.js', 27 | '!src/router/index.js', 28 | '!**/node_modules/**', 29 | ], 30 | }; 31 | -------------------------------------------------------------------------------- /dmriprepViewer/test/unit/setup.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue'; 2 | 3 | Vue.config.productionTip = false; 4 | -------------------------------------------------------------------------------- /dmriprepViewer/test/unit/specs/HelloWorld.spec.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue'; 2 | import HelloWorld from '@/components/HelloWorld'; 3 | 4 | describe('HelloWorld.vue', () => { 5 | it('should render correct contents', () => { 6 | const Constructor = Vue.extend(HelloWorld); 7 | const vm = new Constructor().$mount(); 8 | expect(vm.$el.querySelector('.hello h1').textContent) 9 | .toEqual('Welcome to Your Vue.js App'); 10 | }); 11 | }); 12 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Generated by Neurodocker version 0.4.1 2 | # Timestamp: 2018-09-04 17:44:34 UTC 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/kaczmarj/neurodocker 9 | 10 | FROM debian:stretch 11 | 12 | ARG DEBIAN_FRONTEND="noninteractive" 13 | 14 | ENV LANG="en_US.UTF-8" \ 15 | LC_ALL="en_US.UTF-8" \ 16 | ND_ENTRYPOINT="/neurodocker/startup.sh" 17 | RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ 18 | && apt-get update -qq \ 19 | && apt-get install -y -q --no-install-recommends \ 20 | apt-utils \ 21 | bzip2 \ 22 | ca-certificates \ 23 | curl \ 24 | locales \ 25 | unzip \ 26 | && apt-get clean \ 27 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ 28 | && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ 29 | && dpkg-reconfigure --frontend=noninteractive locales \ 30 | && update-locale LANG="en_US.UTF-8" \ 31 | && chmod 777 /opt && chmod a+s /opt \ 32 | && mkdir -p /neurodocker \ 33 | && if [ ! -f "$ND_ENTRYPOINT" ]; then \ 34 | echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ 35 | && echo 'set -e' >> "$ND_ENTRYPOINT" \ 36 | && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ 37 | fi \ 38 | && chmod -R 777 /neurodocker && chmod a+s /neurodocker 39 | 40 | ENTRYPOINT ["/neurodocker/startup.sh"] 41 | 42 | ENV FSLDIR="/opt/fsl-5.0.11" \ 43 | PATH="/opt/fsl-5.0.11/bin:$PATH" 44 | RUN apt-get update -qq \ 45 | && apt-get install -y -q --no-install-recommends \ 46 | bc \ 47 | dc \ 48 | file \ 49 | libfontconfig1 \ 50 | libfreetype6 \ 51 | libgl1-mesa-dev \ 52 | libglu1-mesa-dev \ 53 | libgomp1 \ 54 | libice6 \ 55 | libmng1 \ 56 | libxcursor1 \ 57 | libxft2 \ 58 | libxinerama1 \ 59 | libxrandr2 \ 60 | libxrender1 \ 61 | libxt6 \ 62 | wget \ 63 | && apt-get clean \ 64 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ 65 | && echo "Downloading FSL ..." \ 66 | && mkdir -p /opt/fsl-5.0.11 \ 67 | && curl -fsSL --retry 5 https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-5.0.11-centos6_64.tar.gz \ 68 | | tar -xz -C /opt/fsl-5.0.11 --strip-components 1 \ 69 | && sed -i '$iecho Some packages in this Docker container are non-free' $ND_ENTRYPOINT \ 70 | && sed -i '$iecho If you are considering commercial use of this container, please consult the relevant license:' $ND_ENTRYPOINT \ 71 | && sed -i '$iecho https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence' $ND_ENTRYPOINT \ 72 | && sed -i '$isource $FSLDIR/etc/fslconf/fsl.sh' $ND_ENTRYPOINT \ 73 | && echo "Installing FSL conda environment ..." \ 74 | && bash /opt/fsl-5.0.11/etc/fslconf/fslpython_install.sh -f /opt/fsl-5.0.11 75 | 76 | ENV CONDA_DIR="/opt/miniconda-latest" \ 77 | PATH="/opt/miniconda-latest/bin:$PATH" 78 | RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ 79 | && echo "Downloading Miniconda installer ..." \ 80 | && conda_installer="/tmp/miniconda.sh" \ 81 | && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ 82 | && bash "$conda_installer" -b -p /opt/miniconda-latest \ 83 | && rm -f "$conda_installer" \ 84 | && conda update -yq -nbase conda \ 85 | && conda config --system --prepend channels conda-forge \ 86 | && conda config --system --set auto_update_conda false \ 87 | && conda config --system --set show_channel_urls true \ 88 | && sync && conda clean -tipsy && sync 89 | # && conda create -y -q --name dmriprep 90 | 91 | # RUN conda install -y -q --name dmriprep \ 92 | # -c \ 93 | # conda-forge \ 94 | # python=3.6 \ 95 | # nipype \ 96 | # dipy \ 97 | # boto3 \ 98 | # && sync && conda clean -tipsy && sync 99 | 100 | 101 | ENV FREESURFER_HOME="/opt/freesurfer-6.0.0" \ 102 | PATH="/opt/freesurfer-6.0.0/bin:$PATH" 103 | 104 | RUN apt-get update -qq \ 105 | && apt-get install -y -q --no-install-recommends \ 106 | bc \ 107 | libgomp1 \ 108 | libxmu6 \ 109 | libxt6 \ 110 | perl \ 111 | tcsh \ 112 | && apt-get clean \ 113 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ 114 | && echo "Downloading FreeSurfer ..." \ 115 | && mkdir -p /opt/freesurfer-6.0.0 \ 116 | && curl -fsSL --retry 5 ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.0/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.0.tar.gz \ 117 | | tar -xz -C /opt/freesurfer-6.0.0 --strip-components 1 \ 118 | --exclude='freesurfer/average/mult-comp-cor' \ 119 | --exclude='freesurfer/lib/cuda' \ 120 | --exclude='freesurfer/lib/qt' \ 121 | --exclude='freesurfer/subjects/V1_average' \ 122 | --exclude='freesurfer/subjects/bert' \ 123 | --exclude='freesurfer/subjects/cvs_avg35' \ 124 | --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ 125 | --exclude='freesurfer/subjects/fsaverage3' \ 126 | --exclude='freesurfer/subjects/fsaverage4' \ 127 | --exclude='freesurfer/subjects/fsaverage5' \ 128 | --exclude='freesurfer/subjects/fsaverage6' \ 129 | --exclude='freesurfer/subjects/fsaverage_sym' \ 130 | --exclude='freesurfer/trctrain' \ 131 | && sed -i '$isource "/opt/freesurfer-6.0.0/SetUpFreeSurfer.sh"' "$ND_ENTRYPOINT" 132 | 133 | COPY ./license.txt /opt/freesurfer-6.0.0/license.txt 134 | 135 | #&& sync && conda clean -tipsy && sync 136 | 137 | RUN apt-get update && apt-get install -y git gcc libopenblas-base 138 | ADD environment.yml environment.yml 139 | RUN conda env create -f environment.yml 140 | 141 | ENV LD_LIBRARY_PATH=/usr/lib/openblas-base/ 142 | 143 | RUN sed -i '$isource activate dmriprep' $ND_ENTRYPOINT 144 | 145 | 146 | RUN echo '{ \ 147 | \n "pkg_manager": "apt", \ 148 | \n "instructions": [ \ 149 | \n [ \ 150 | \n "base", \ 151 | \n "debian:stretch" \ 152 | \n ], \ 153 | \n [ \ 154 | \n "fsl", \ 155 | \n { \ 156 | \n "version": "5.0.11" \ 157 | \n } \ 158 | \n ], \ 159 | \n [ \ 160 | \n "miniconda", \ 161 | \n { \ 162 | \n "conda_install": [ \ 163 | \n "-c", \ 164 | \n "conda-forge", \ 165 | \n "python=3.6", \ 166 | \n "nipype", \ 167 | \n "dipy" \ 168 | \n ], \ 169 | \n "create_env": "dmriprep", \ 170 | \n "use_env": "dmriprep" \ 171 | \n } \ 172 | \n ] \ 173 | \n ] \ 174 | \n}' > /neurodocker/neurodocker_specs.json 175 | -------------------------------------------------------------------------------- /docker/environment.yml: -------------------------------------------------------------------------------- 1 | name: dmriprep 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.6 6 | - boto3 7 | - dask 8 | - dipy=0.15 9 | - graphviz 10 | - ipython 11 | - pandas 12 | - tqdm 13 | - pip: 14 | - "--editable=git+https://github.com/akeshavan/nipype@87e53615867c9850c359002909c71825343b2e35#egg=nipype" 15 | - "--editable=git+https://git.fmrib.ox.ac.uk/matteob/eddy_qc_release@57bb11da6a634c4195593fbc439ba9f8998157b0#egg=eddy_qc" 16 | - bids 17 | - duecredit 18 | -------------------------------------------------------------------------------- /docker/license.txt: -------------------------------------------------------------------------------- 1 | keshavan@uw.edu 2 | 37716 3 | *C9LImOstH7yw 4 | FSrY9yK/yWp8E 5 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = dmriprep 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # dmriprep documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another 17 | # directory, add these directories to sys.path here. If the directory is 18 | # relative to the documentation root, use os.path.abspath to make it 19 | # absolute, like shown here. 20 | # 21 | import os 22 | import sys 23 | sys.path.insert(0, os.path.abspath('..')) 24 | 25 | import dmriprep 26 | 27 | # -- General configuration --------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 35 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ['_templates'] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = '.rst' 45 | 46 | # The master toctree document. 47 | master_doc = 'index' 48 | 49 | # General information about the project. 50 | project = u'dmriprep' 51 | copyright = u"2018, Anisha Keshavan" 52 | author = u"Anisha Keshavan" 53 | 54 | # The version info for the project you're documenting, acts as replacement 55 | # for |version| and |release|, also used in various other places throughout 56 | # the built documents. 57 | # 58 | # The short X.Y version. 59 | version = dmriprep.__version__ 60 | # The full version, including alpha/beta/rc tags. 61 | release = dmriprep.__version__ 62 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation 64 | # for a list of supported languages. 65 | # 66 | # This is also used if you do content translation via gettext catalogs. 67 | # Usually you set "language" from the command line for these cases. 68 | language = None 69 | 70 | # List of patterns, relative to source directory, that match files and 71 | # directories to ignore when looking for source files. 72 | # This patterns also effect to html_static_path and html_extra_path 73 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 74 | 75 | # The name of the Pygments (syntax highlighting) style to use. 76 | pygments_style = 'sphinx' 77 | 78 | # If true, `todo` and `todoList` produce output, else they produce nothing. 79 | todo_include_todos = False 80 | 81 | 82 | # -- Options for HTML output ------------------------------------------- 83 | 84 | # The theme to use for HTML and HTML Help pages. See the documentation for 85 | # a list of builtin themes. 86 | # 87 | html_theme = 'alabaster' 88 | 89 | # Theme options are theme-specific and customize the look and feel of a 90 | # theme further. For a list of options available for each theme, see the 91 | # documentation. 92 | # 93 | # html_theme_options = {} 94 | 95 | # Add any paths that contain custom static files (such as style sheets) here, 96 | # relative to this directory. They are copied after the builtin static files, 97 | # so a file named "default.css" will overwrite the builtin "default.css". 98 | html_static_path = ['_static'] 99 | 100 | 101 | # -- Options for HTMLHelp output --------------------------------------- 102 | 103 | # Output file base name for HTML help builder. 104 | htmlhelp_basename = 'dmriprepdoc' 105 | 106 | 107 | # -- Options for LaTeX output ------------------------------------------ 108 | 109 | latex_elements = { 110 | # The paper size ('letterpaper' or 'a4paper'). 111 | # 112 | # 'papersize': 'letterpaper', 113 | 114 | # The font size ('10pt', '11pt' or '12pt'). 115 | # 116 | # 'pointsize': '10pt', 117 | 118 | # Additional stuff for the LaTeX preamble. 119 | # 120 | # 'preamble': '', 121 | 122 | # Latex figure (float) alignment 123 | # 124 | # 'figure_align': 'htbp', 125 | } 126 | 127 | # Grouping the document tree into LaTeX files. List of tuples 128 | # (source start file, target name, title, author, documentclass 129 | # [howto, manual, or own class]). 130 | latex_documents = [ 131 | (master_doc, 'dmriprep.tex', 132 | u'dmriprep Documentation', 133 | u'Anisha Keshavan', 'manual'), 134 | ] 135 | 136 | 137 | # -- Options for manual page output ------------------------------------ 138 | 139 | # One entry per manual page. List of tuples 140 | # (source start file, name, description, authors, manual section). 141 | man_pages = [ 142 | (master_doc, 'dmriprep', 143 | u'dmriprep Documentation', 144 | [author], 1) 145 | ] 146 | 147 | 148 | # -- Options for Texinfo output ---------------------------------------- 149 | 150 | # Grouping the document tree into Texinfo files. List of tuples 151 | # (source start file, target name, title, author, 152 | # dir menu entry, description, category) 153 | texinfo_documents = [ 154 | (master_doc, 'dmriprep', 155 | u'dmriprep Documentation', 156 | author, 157 | 'dmriprep', 158 | 'One line description of project.', 159 | 'Miscellaneous'), 160 | ] 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to dmriprep's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | installation 9 | usage 10 | modules 11 | contributing 12 | authors 13 | history 14 | 15 | Indices and tables 16 | ================== 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install dmriprep, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install dmriprep 16 | 17 | This is the preferred method to install dmriprep, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for dmriprep can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/nipy/dmriprep 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OL https://github.com/nipy/dmriprep/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/nipy/dmriprep 51 | .. _tarball: https://github.com/nipy/dmriprep/tarball/master 52 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=dmriprep 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | To use dmriprep in a project:: 6 | 7 | import dmriprep 8 | -------------------------------------------------------------------------------- /kubernetes/create_kube_job.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from boto3 import Session 4 | 5 | if __name__ == "__main__": 6 | if len(sys.argv) > 1: 7 | subject = sys.argv[1] 8 | 9 | session = Session() 10 | credentials = session.get_credentials() 11 | # Credentials are refreshable, so accessing your access key / secret key 12 | # separately can lead to a race condition. Use this to get an actual matched 13 | # set. 14 | current_credentials = credentials.get_frozen_credentials() 15 | 16 | access_key = current_credentials.access_key 17 | secret_key = current_credentials.secret_key 18 | 19 | with open("run_dmriprep.yml.tmpl", 'r') as template: 20 | with open("jobs/job_{}.yml".format(subject), 'w') as f: 21 | all_text = "\n".join(template.readlines()) 22 | all_text = all_text.replace("{{subject_lower}}", subject.lower()) 23 | all_text = all_text.replace("{{subject}}", subject) 24 | all_text = all_text.replace("{{access_key}}", access_key) 25 | all_text = all_text.replace("{{secret_key}}", secret_key) 26 | f.write(all_text) 27 | 28 | -------------------------------------------------------------------------------- /kubernetes/delete_cluster.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | gcloud container clusters delete dmriprep --zone=us-west1-a 3 | -------------------------------------------------------------------------------- /kubernetes/delete_job.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | kubectl delete -f run_dmriprep.yml 3 | -------------------------------------------------------------------------------- /kubernetes/docker/build_tag_push.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker build -t dmriprep:kube4 -f dockerfile-dmriprep-kube . 3 | docker tag dmriprep:kube4 gcr.io/dmriprep/dmriprep:kube4 4 | docker push gcr.io/dmriprep/dmriprep:kube4 5 | -------------------------------------------------------------------------------- /kubernetes/docker/dmriprep_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir inputs 3 | mkdir outputs 4 | dmriprep-data --subject $1 $PWD/inputs/ 5 | dmriprep $PWD/inputs $PWD/outputs 6 | dmriprep-upload --access_key $2 --secret_key $3 $PWD/outputs preafq-hbn 7 | -------------------------------------------------------------------------------- /kubernetes/docker/dockerfile-dmriprep-kube: -------------------------------------------------------------------------------- 1 | FROM dmriprep:prod 2 | COPY dmriprep_all.sh /dmriprep_all.sh 3 | CMD ["/neurodocker/startup.sh", "dmriprep_all.sh"] 4 | -------------------------------------------------------------------------------- /kubernetes/run_dmriprep.yml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1 2 | kind: Job 3 | metadata: 4 | name: dmriprep-{{subject_lower}} 5 | spec: 6 | template: 7 | spec: 8 | containers: 9 | - name: dmriprep-subject 10 | image: gcr.io/dmriprep/dmriprep:kube4 11 | command: ["/neurodocker/startup.sh", "./dmriprep_all.sh", "{{subject}}", "{{access_key}}", "{{secret_key}}"] 12 | resources: 13 | requests: 14 | memory: "12G" 15 | cpu: "2" 16 | restartPolicy: Never 17 | backoffLimit: 1 18 | -------------------------------------------------------------------------------- /kubernetes/run_job.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | kubectl apply -f run_dmriprep.yml 3 | -------------------------------------------------------------------------------- /kubernetes/setup_gcp_kubernetes_dmriprep.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # login 4 | gcloud auth login anishakeshavan@gmail.com 5 | 6 | # set the project to dmriprep 7 | gcloud config set project dmriprep 8 | 9 | # some variables 10 | ZONE=us-west1-a 11 | MAX_NODES=4 12 | CLUSTERNAME=dmriprep 13 | 14 | # set the default compute zone 15 | gcloud config set compute/zone $ZONE 16 | 17 | # start the cluster! 18 | gcloud beta container clusters create $CLUSTERNAME --machine-type n1-highmem-4 --enable-autoscaling --max-nodes=$MAX_NODES --num-nodes 1 --cluster-version latest --node-labels dmriprep/node-purpose=core 19 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Click>=6.0 2 | dask==1.2.2 3 | dipy==0.16.0 4 | nipype==1.2.0 5 | pandas==0.24.2 6 | parse==1.12.0 7 | tqdm==4.32.1 8 | pybids==0.8.0 9 | matplotlib==3.1.0 10 | cytoolz==0.9.0.1 11 | numba==0.43.1 -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | pip==19.1.1 2 | bumpversion==0.5.3 3 | wheel==0.33.4 4 | watchdog==0.9.0 5 | flake8==3.7.7 6 | tox==3.11.1 7 | coverage==4.5.3 8 | Sphinx==2.0.1 9 | twine==1.13.0 10 | 11 | pytest==4.5.0 12 | pytest-runner==4.4 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.1.0 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | 10 | [bumpversion:file:dmriprep/__init__.py] 11 | search = __version__ = '{current_version}' 12 | replace = __version__ = '{new_version}' 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | 20 | [aliases] 21 | # Define setup.py command aliases here 22 | test = pytest 23 | 24 | [tool:pytest] 25 | collect_ignore = ['setup.py'] 26 | 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """The setup script.""" 5 | 6 | from setuptools import setup, find_packages 7 | 8 | with open("README.md") as readme_file: 9 | readme = readme_file.read() 10 | 11 | with open("HISTORY.rst") as history_file: 12 | history = history_file.read() 13 | 14 | requirements = [ 15 | "Click>=6.0", 16 | "dask", 17 | "dipy", 18 | "nipype", 19 | "pandas", 20 | "parse", 21 | "tqdm", 22 | "pybids", 23 | "matplotlib", 24 | "cytoolz", 25 | "numba", 26 | ] 27 | 28 | extras_require = {"dev": ["flake8", "pytest", "pytest-cov", "pre-commit"]} 29 | 30 | setup_requirements = ["pytest-runner"] 31 | 32 | test_requirements = ["pytest"] 33 | 34 | setup( 35 | author="Anisha Keshavan", 36 | author_email="keshavan@berkeley.edu", 37 | classifiers=[ 38 | "Development Status :: 2 - Pre-Alpha", 39 | "Intended Audience :: Developers", 40 | "License :: OSI Approved :: BSD License", 41 | "Natural Language :: English", 42 | "Programming Language :: Python :: 3", 43 | "Programming Language :: Python :: 3.6", 44 | "Programming Language :: Python :: 3.7", 45 | ], 46 | description="Preprocessing of neuroimaging data in preparation for AFQ analysis", 47 | entry_points={ 48 | "console_scripts": [ 49 | "dmriprep=dmriprep.cli:main", 50 | "dmriprep-data=dmriprep.cli:data", 51 | "dmriprep-upload=dmriprep.cli:upload", 52 | ] 53 | }, 54 | scripts=["./dmriprep-docker"], 55 | install_requires=requirements, 56 | extras_require=extras_require, 57 | license="BSD license", 58 | long_description=readme + "\n\n" + history, 59 | include_package_data=True, 60 | keywords="dmriprep", 61 | name="dmriprep", 62 | packages=find_packages(include=["dmriprep"]), 63 | setup_requires=setup_requirements, 64 | test_suite="tests", 65 | tests_require=test_requirements, 66 | url="https://github.com/nipy/dmriprep", 67 | version="0.1.0", 68 | zip_safe=False, 69 | ) 70 | -------------------------------------------------------------------------------- /tests/test_dmriprep.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Tests for `dmriprep` package.""" 5 | 6 | import pytest 7 | 8 | from click.testing import CliRunner 9 | 10 | from dmriprep import dmriprep 11 | from dmriprep import cli 12 | 13 | 14 | @pytest.fixture 15 | def response(): 16 | """Sample pytest fixture. 17 | 18 | See more at: http://doc.pytest.org/en/latest/fixture.html 19 | """ 20 | # import requests 21 | # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') 22 | 23 | 24 | def test_content(response): 25 | """Sample pytest test function with the pytest fixture as an argument.""" 26 | # from bs4 import BeautifulSoup 27 | # assert 'GitHub' in BeautifulSoup(response.content).title.string 28 | 29 | 30 | def test_command_line_interface(): 31 | """Test the CLI.""" 32 | runner = CliRunner() 33 | result = runner.invoke(cli.main) 34 | assert result.exit_code == 0 35 | assert 'dmriprep.cli.main' in result.output 36 | help_result = runner.invoke(cli.main, ['--help']) 37 | assert help_result.exit_code == 0 38 | assert '--help Show this message and exit.' in help_result.output 39 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Tests for `dmriprep` package.""" 5 | 6 | import numpy as np 7 | 8 | from dmriprep.utils import is_hemispherical 9 | 10 | 11 | def uniform_points_on_sphere(npoints=1, hemisphere=True, rotate=(0, 0, 0)): 12 | """Generate random uniform points on a unit (hemi)sphere.""" 13 | r = 1.0 14 | if hemisphere: 15 | theta = np.random.uniform(0, np.pi / 2, npoints) 16 | else: 17 | theta = np.random.uniform(0, np.pi, npoints) 18 | phi = np.random.uniform(0, 2 * np.pi, npoints) 19 | 20 | x = r * np.sin(theta) * np.cos(phi) 21 | y = r * np.sin(theta) * np.sin(phi) 22 | z = r * np.cos(theta) 23 | 24 | vecs = np.stack([x, y, z]) 25 | 26 | rot_x = np.array([ 27 | [1.0, 0.0, 0.0], 28 | [0.0, np.cos(rotate[0]), -np.sin(rotate[0])], 29 | [0.0, np.sin(rotate[0]), np.cos(rotate[0])] 30 | ]) 31 | 32 | rot_y = np.array([ 33 | [np.cos(rotate[1]), 0.0, np.sin(rotate[1])], 34 | [0.0, 1.0, 0.0], 35 | [-np.sin(rotate[1]), 0.0, np.cos(rotate[1])] 36 | ]) 37 | 38 | rot_z = np.array([ 39 | [np.cos(rotate[2]), -np.sin(rotate[2]), 0.0], 40 | [np.sin(rotate[2]), np.cos(rotate[2]), 0.0], 41 | [0.0, 0.0, 1.0] 42 | ]) 43 | 44 | vecs = np.dot(rot_z, np.dot(rot_y, np.dot(rot_x, vecs))) 45 | 46 | return vecs.transpose() 47 | 48 | 49 | def test_is_hemispherical(): 50 | vecs = uniform_points_on_sphere( 51 | npoints=100, 52 | hemisphere=True, 53 | rotate=(np.random.uniform(0, np.pi), 54 | np.random.uniform(0, np.pi), 55 | np.random.uniform(0, np.pi)) 56 | ) 57 | 58 | assert is_hemispherical(vecs)[0] 59 | vecs = uniform_points_on_sphere(npoints=100, hemisphere=False) 60 | assert not is_hemispherical(vecs)[0] 61 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27, py34, py35, py36, flake8 3 | 4 | [travis] 5 | python = 6 | 3.6: py36 7 | 3.5: py35 8 | 3.4: py34 9 | 2.7: py27 10 | 11 | [testenv:flake8] 12 | basepython = python 13 | deps = flake8 14 | commands = flake8 dmriprep 15 | 16 | [testenv] 17 | setenv = 18 | PYTHONPATH = {toxinidir} 19 | deps = 20 | -r{toxinidir}/requirements_dev.txt 21 | ; If you want to make tox run the tests with the same versions, create a 22 | ; requirements.txt with the pinned versions and uncomment the following line: 23 | ; -r{toxinidir}/requirements.txt 24 | commands = 25 | pip install -U pip 26 | py.test --basetemp={envtmpdir} 27 | 28 | 29 | --------------------------------------------------------------------------------