├── fsl_seg_to_nidm ├── mapping_data │ ├── __init__.py │ ├── ReproNimCDEs.xlsx │ ├── fslmap.json │ ├── fsl-cde.json │ └── fsl_cde.ttl ├── __init__.py ├── fslutils.py └── fsl_seg_to_nidm.py ├── MANIFEST.in ├── requirements.txt ├── LICENSE ├── examples └── test.json ├── setup.py ├── .gitignore └── README.md /fsl_seg_to_nidm/mapping_data/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include fsl_seg_to_nidm/mapping_data/* 2 | # include segstats_jsonld/tests/testdata/* -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e git+https://github.com/incf-nidash/PyNIDM.git#egg=PyNIDM 2 | pandas 3 | numpy 4 | prov 5 | rdflib 6 | xlrd -------------------------------------------------------------------------------- /fsl_seg_to_nidm/mapping_data/ReproNimCDEs.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ReproNim/fsl_seg_to_nidm/master/fsl_seg_to_nidm/mapping_data/ReproNimCDEs.xlsx -------------------------------------------------------------------------------- /fsl_seg_to_nidm/__init__.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*- 2 | # ex: set sts=4 ts=4 sw=4 noet: 3 | # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See LICENSE file distributed along with the segstats_jsonld package for the 6 | # license terms. 7 | # 8 | # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | from __future__ import absolute_import 10 | 11 | __version__ = '0.0.1' 12 | 13 | # do imports of all of the functions that should be available here 14 | from .fsl_seg_to_nidm import add_seg_data 15 | from .fslutils import (read_fsl_stats,create_cde_graph,convert_stats_to_nidm) 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 David Keator 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /examples/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "Background": [ 3 | 11486780, 4 | 13317179.653980732 5 | ], 6 | "Left-Accumbens-area": [ 7 | 687, 8 | 796.4723292589188 9 | ], 10 | "Left-Amygdala": [ 11 | 1083, 12 | 1255.5742832422256 13 | ], 14 | "Left-Caudate": [ 15 | 3838, 16 | 4449.579038858414 17 | ], 18 | "Left-Hippocampus": [ 19 | 3798, 20 | 4403.205104112625 21 | ], 22 | "Left-Pallidum": [ 23 | 1802, 24 | 2089.1457602977753 25 | ], 26 | "Left-Putamen": [ 27 | 5089, 28 | 5899.923848032951 29 | ], 30 | "Left-Thalamus-Proper": [ 31 | 7617, 32 | 8830.75652396679 33 | ], 34 | "Right-Accumbens-area": [ 35 | 585, 36 | 678.2187956571579 37 | ], 38 | "Right-Amygdala": [ 39 | 1187, 40 | 1376.146513581276 41 | ], 42 | "Right-Caudate": [ 43 | 3983, 44 | 4617.684552311897 45 | ], 46 | "Right-Hippocampus": [ 47 | 3568, 48 | 4136.554979324341 49 | ], 50 | "Right-Pallidum": [ 51 | 1797, 52 | 2083.3490184545517 53 | ], 54 | "Right-Putamen": [ 55 | 5158, 56 | 5979.918885469437 57 | ], 58 | "Right-Thalamus-Proper": [ 59 | 7364, 60 | 8537.441386699677 61 | ], 62 | "csf": [ 63 | 221635.0, 64 | 256952.171875 65 | ], 66 | "gray": [ 67 | 686347.0, 68 | 795715.25 69 | ], 70 | "white": [ 71 | 510304.0, 72 | 591620.125 73 | ] 74 | } -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup 4 | from setuptools import find_packages 5 | from os.path import join as opj 6 | from os.path import dirname 7 | 8 | 9 | def get_version(): 10 | """Load version only 11 | """ 12 | with open(opj(dirname(__file__), 'fsl_seg_to_nidm', '__init__.py')) as f: 13 | version_lines = list(filter(lambda x: x.startswith('__version__'), f)) 14 | assert (len(version_lines) == 1) 15 | return version_lines[0].split('=')[1].strip(" '\"\t\n") 16 | 17 | # extension version 18 | version = get_version() 19 | PACKAGES = find_packages() 20 | 21 | README = opj(dirname(__file__), 'README.md') 22 | try: 23 | import pypandoc 24 | long_description = pypandoc.convert(README, 'rst') 25 | except (ImportError, OSError) as exc: 26 | print( 27 | "WARNING: pypandoc failed to import or threw an error while converting" 28 | " README.md to RST: %r .md version will be used as is" %exc 29 | ) 30 | long_description = open(README).read() 31 | 32 | # Metadata 33 | setup( 34 | name='fsl_seg_to_nidm', 35 | version=version, 36 | description='FSL FIRST and FAST segmentation data to NIDM / jsonld', 37 | long_description=long_description, 38 | author='David Keator', 39 | author_email='dbkeator@uci.edu', 40 | url='https://github.com/dbkeator/fsl_seg_to_nidm', 41 | packages=PACKAGES, 42 | install_requires=[ 43 | 'numpy', 44 | 'pynidm', 45 | 'pandas', 46 | ], # Add requirements as necessary 47 | include_package_data=True, 48 | extras_require={ 49 | 'devel-docs': [ 50 | # for converting README.md -> .rst for long description 51 | 'pypandoc', 52 | ]}, 53 | entry_points={ 54 | 'console_scripts': [ 55 | 'fslsegstats2nidm=fsl_seg_to_nidm.fsl_seg_to_nidm:main' # this is where the console entry points are defined 56 | ], 57 | }, 58 | classifiers=[ 59 | "Programming Language :: Python :: 3", 60 | "License :: OSI Approved :: MIT License", 61 | "Operating System :: OS Independent", 62 | ], # Change if necessary 63 | ) 64 | 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # Environments 100 | .env 101 | .venv 102 | env/ 103 | venv/ 104 | ENV/ 105 | env.bak/ 106 | venv.bak/ 107 | 108 | # Spyder project settings 109 | .spyderproject 110 | .spyproject 111 | 112 | # Rope project settings 113 | .ropeproject 114 | 115 | # mkdocs documentation 116 | /site 117 | 118 | # mypy 119 | .mypy_cache/ 120 | .dmypy.json 121 | dmypy.json 122 | 123 | # Pyre type checker 124 | .pyre/ 125 | 126 | #idea stuff 127 | .idea 128 | 129 | #.DS_Store 130 | .DS_Store 131 | -------------------------------------------------------------------------------- /fsl_seg_to_nidm/mapping_data/fslmap.json: -------------------------------------------------------------------------------- 1 | { 2 | "Measures": { 3 | "NVoxels": { 4 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 5 | "hasUnit": "voxel", 6 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 7 | }, 8 | "Volume": { 9 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 10 | "hasUnit": "mm^3", 11 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 12 | } 13 | }, 14 | "Structures": { 15 | "Accumbens-area": { 16 | "fslkey": [ 17 | "Left-Accumbens-area", 18 | "Right-Accumbens-area" 19 | ], 20 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001882" 21 | }, 22 | "Amygdala": { 23 | "fslkey": [ 24 | "Left-Amygdala", 25 | "Right-Amygdala" 26 | ], 27 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001876" 28 | }, 29 | "Background": { 30 | "fslkey": [ 31 | "Background" 32 | ], 33 | "isAbout": null 34 | }, 35 | "Caudate": { 36 | "fslkey": [ 37 | "Left-Caudate", 38 | "Right-Caudate" 39 | ], 40 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001873" 41 | }, 42 | "Hippocampus": { 43 | "fslkey": [ 44 | "Left-Hippocampus", 45 | "Right-Hippocampus" 46 | ], 47 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001954" 48 | }, 49 | "Pallidum": { 50 | "fslkey": [ 51 | "Left-Pallidum", 52 | "Right-Pallidum" 53 | ], 54 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0006514" 55 | }, 56 | "Putamen": { 57 | "fslkey": [ 58 | "Left-Putamen", 59 | "Right-Putamen" 60 | ], 61 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001874" 62 | }, 63 | "Thalamus-Proper": { 64 | "fslkey": [ 65 | "Left-Thalamus-Proper", 66 | "Right-Thalamus-Proper" 67 | ], 68 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001897" 69 | }, 70 | "csf": { 71 | "fslkey": [ 72 | "csf" 73 | ], 74 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001359" 75 | }, 76 | "gray": { 77 | "fslkey": [ 78 | "gray" 79 | ], 80 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0000956" 81 | }, 82 | "white": { 83 | "fslkey": [ 84 | "white" 85 | ], 86 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0002437" 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fsl_seg_to_nidm 2 | A tool to convert structural segmentation outputs from FSL's FIRST and FAST tool to NIDM. 3 | It takes the output of the "simple1" container and workflow which provides the results of 4 | FSL-based structural segmentation and provides them in a .json format. The ***fslsegstats2nidm*** 5 | program them converts these to NIDM which it can also add to the NIDM of the BIDS data structure. 6 | 7 | # Installation 8 | 1. Create a 'clean' environment 9 | 10 | ``` 11 | $ conda create -n my_env python=3 12 | ``` 13 | 14 | 2. Activate this environment 15 | 16 | ``` 17 | $ conda activate my_env 18 | ``` 19 | 20 | 3. Install 'click' (since it for some reason dosent install with the main setup) 21 | 22 | ``` 23 | $ pip install click 24 | ``` 25 | 26 | 4. Clone this repo 27 | ``` 28 | git clone https://github.com/ReproNim/fsl_seg_to_nidm.git 29 | ``` 30 | 5. Run the setup script 31 | ``` 32 | $ cd fsl_seg_to_nidm 33 | $ python setup.py install 34 | ``` 35 | 6. Done! 36 | 37 | # Usage 38 | You can get information about how to run this tool by executing: 39 | ``` 40 | $ fslsegstats2nidm --help 41 | usage: fs;_seg_to_nidm.py [-h] (-d DATA_FILE | -f SEGFILE) -subjid SUBJID -o 42 | OUTPUT_DIR [-j] [-add_de] [-n NIDM_FILE] 43 | [-forcenidm] 44 | 45 | This program will load in JSON output from FSL's FAST/FIRST 46 | segmentation tool, augment the FSL anatomical region designations with common data element 47 | anatomical designations, and save the statistics + region designations out as 48 | NIDM serializations (i.e. TURTLE, JSON-LD RDF) 49 | 50 | options: 51 | -h, --help show this help message and exit 52 | -d DATA_FILE, --data_file DATA_FILE 53 | Path to FSL FIRST/FAST JSON data file 54 | -f SEGFILE, --seg_file SEGFILE 55 | Path or URL to a specific FSL JSONstats file. Note, 56 | currently this is tested on ReproNim data 57 | -subjid SUBJID, --subjid SUBJID 58 | If a path to a URL or a stats fileis supplied via the 59 | -f/--seg_file parameters then -subjid parameter must 60 | be set withthe subject identifier to be used in the 61 | NIDM files 62 | -o OUTPUT_DIR, --output OUTPUT_DIR 63 | Output filename with full path 64 | -j, --jsonld If flag set then NIDM file will be written as JSONLD 65 | instead of TURTLE 66 | -add_de, --add_de If flag set then data element data dictionary will be 67 | added to nidm file else it will written to aseparate 68 | file as fsl_cde.ttl in the output directory (or same 69 | directory as nidm file if -n paramemteris used. 70 | -n NIDM_FILE, --nidm NIDM_FILE 71 | Optional NIDM file to add segmentation data to. 72 | -forcenidm, --forcenidm 73 | If adding to NIDM file this parameter forces the data 74 | to be added even if the participantdoesnt currently 75 | exist in the NIDM file. 76 | ``` 77 | -------------------------------------------------------------------------------- /fsl_seg_to_nidm/fslutils.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | import json 3 | from pathlib import Path 4 | import os 5 | 6 | # from .fsutils import hemiless 7 | 8 | FSL = namedtuple("FSL", ["structure", "hemi", "measure", "unit"]) 9 | cde_file = Path(os.path.dirname(__file__)) / "mapping_data" / "fsl-cde.json" 10 | map_file = Path(os.path.dirname(__file__)) / "mapping_data" / "fslmap.json" 11 | 12 | def hemiless(key): 13 | return ( 14 | key.replace("-lh-", "-") 15 | .replace("-rh-", "-") 16 | .replace("_lh_", "-") 17 | .replace("_rh_", "-") 18 | .replace("rh", "") 19 | .replace("lh", "") 20 | .replace("Left-", "") 21 | .replace("Right-", "") 22 | ) 23 | 24 | 25 | def read_fsl_stats(stat_file): 26 | with open(stat_file, "r") as fp: 27 | data = json.load(fp) 28 | 29 | with open(cde_file, "r") as fp: 30 | fsl_cde = json.load(fp) 31 | 32 | measures = [] 33 | for key, value in data.items(): 34 | voxkey = FSL( 35 | structure=key, 36 | hemi="Left" if "Left" in key else "Right" if "Right" in key else None, 37 | measure="NVoxels", 38 | unit="voxel", 39 | ) 40 | volkey = FSL(structure=key, hemi=voxkey.hemi, measure="Volume", unit="mm^3") 41 | if str(voxkey) in fsl_cde: 42 | measures.append((f'{fsl_cde[str(voxkey)]["id"]}', str(int(value[0])))) 43 | else: 44 | raise ValueError(f"Key {voxkey} not found in FSL data elements file") 45 | if str(volkey) in fsl_cde: 46 | measures.append((f'{fsl_cde[str(volkey)]["id"]}', str(value[1]))) 47 | else: 48 | raise ValueError(f"Key {volkey} not found in FSL data elements file") 49 | return measures 50 | 51 | 52 | def map_fsl_cdes(): 53 | """Update FSL to ReproNim mapping information 54 | """ 55 | 56 | with open(map_file, "r") as fp: 57 | fsl_map = json.load(fp) 58 | 59 | with open(cde_file, "r") as fp: 60 | fsl_cde = json.load(fp) 61 | 62 | mmap = fsl_map["Measures"] 63 | smap = fsl_map["Structures"] 64 | for key in fsl_cde: 65 | if key == "count": 66 | continue 67 | key_tuple = eval(key) 68 | # Deal with structures 69 | hkey = hemiless(key_tuple.structure) 70 | if hkey in smap: 71 | if smap[hkey]["isAbout"] is not None and ( 72 | "UNKNOWN" not in smap[hkey]["isAbout"] 73 | and "CUSTOM" not in smap[hkey]["isAbout"] 74 | ): 75 | fsl_cde[key]["isAbout"] = smap[hkey]["isAbout"] 76 | if mmap[key_tuple.measure]["measureOf"] is not None: 77 | fsl_cde[key].update(**mmap[key_tuple.measure]) 78 | 79 | with open(map_file, "w") as fp: 80 | json.dump(fsl_map, fp, sort_keys=True, indent=2) 81 | fp.write("\n") 82 | 83 | with open(cde_file, "w") as fp: 84 | json.dump(fsl_cde, fp, indent=2) 85 | fp.write("\n") 86 | 87 | return fsl_map, fsl_cde 88 | 89 | 90 | def create_cde_graph(restrict_to=None): 91 | """Create an RDFLIB graph with the FSL CDEs 92 | 93 | Any CDE that has a mapping will be mapped 94 | """ 95 | import rdflib as rl 96 | from nidm.core import Constants 97 | 98 | with open(cde_file, "r") as fp: 99 | fsl_cde = json.load(fp) 100 | 101 | fsl = Constants.FSL 102 | nidm = Constants.NIDM 103 | 104 | g = rl.Graph() 105 | g.bind("fsl", fsl) 106 | g.bind("nidm", nidm) 107 | 108 | # added by DBK to create subclass relationship 109 | g.add((fsl["DataElement"], rl.RDFS['subClassOf'], nidm['DataElement'])) 110 | 111 | for key, value in fsl_cde.items(): 112 | if key == "count": 113 | continue 114 | if restrict_to is not None: 115 | if value["id"] not in restrict_to: 116 | continue 117 | for subkey, item in value.items(): 118 | if subkey == "id": 119 | fslid = "fsl_" + item 120 | g.add((fsl[fslid], rl.RDF.type, fsl["DataElement"])) 121 | continue 122 | if item is None or "unknown" in str(item): 123 | continue 124 | if isinstance(item, str) and item.startswith("fsl:"): 125 | item = fsl[item.replace("fsl:", "")] 126 | if subkey in ["isAbout", "datumType", "measureOf"]: 127 | g.add((fsl[fslid], nidm[subkey], rl.URIRef(item))) 128 | elif subkey in ["hasUnit"]: 129 | g.add((fsl[fslid], nidm[subkey], rl.Literal(item))) 130 | # added by DBK to use rdfs:label 131 | elif subkey in ["label"]: 132 | g.add((fsl[fslid], rl.RDFS['label'], rl.Literal(item))) 133 | else: 134 | if isinstance(item, rl.URIRef): 135 | g.add((fsl[fslid], fsl[subkey], item)) 136 | else: 137 | g.add((fsl[fslid], fsl[subkey], rl.Literal(item))) 138 | key_tuple = eval(key) 139 | for subkey, item in key_tuple._asdict().items(): 140 | if item is None: 141 | continue 142 | if subkey == "hemi": 143 | g.add((fsl[fslid], nidm["hasLaterality"], rl.Literal(item))) 144 | else: 145 | g.add((fsl[fslid], fsl[subkey], rl.Literal(item))) 146 | return g 147 | 148 | 149 | def convert_stats_to_nidm(stats): 150 | """Convert a stats record into a NIDM entity 151 | 152 | Returns the entity and the prov document 153 | """ 154 | from nidm.core import Constants 155 | from nidm.experiment.Core import getUUID 156 | import prov 157 | 158 | fsl = prov.model.Namespace("fsl", str(Constants.FSL)) 159 | niiri = prov.model.Namespace("niiri", str(Constants.NIIRI)) 160 | nidm = prov.model.Namespace("nidm", "http://purl.org/nidash/nidm#") 161 | doc = prov.model.ProvDocument() 162 | e = doc.entity(identifier=niiri[getUUID()]) 163 | e.add_asserted_type(nidm["FSLStatsCollection"]) 164 | e.add_attributes( 165 | { 166 | fsl["fsl_" + val[0]]: prov.model.Literal( 167 | val[1], 168 | datatype=prov.model.XSD["float"] 169 | if "." in val[1] 170 | else prov.model.XSD["integer"], 171 | ) 172 | for val in stats 173 | } 174 | ) 175 | return e, doc 176 | -------------------------------------------------------------------------------- /fsl_seg_to_nidm/mapping_data/fsl-cde.json: -------------------------------------------------------------------------------- 1 | { 2 | "count": 36, 3 | "FSL(structure='Background', hemi=None, measure='NVoxels', unit='voxel')": { 4 | "id": "000001", 5 | "label": "Background (voxels)", 6 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 7 | "hasUnit": "voxel", 8 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 9 | }, 10 | "FSL(structure='Background', hemi=None, measure='Volume', unit='mm^3')": { 11 | "id": "000002", 12 | "label": "Background (mm^3)", 13 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 14 | "hasUnit": "mm^3", 15 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 16 | }, 17 | "FSL(structure='Left-Accumbens-area', hemi='Left', measure='NVoxels', unit='voxel')": { 18 | "id": "000003", 19 | "label": "Left-Accumbens-area (voxels)", 20 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001882", 21 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 22 | "hasUnit": "voxel", 23 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 24 | }, 25 | "FSL(structure='Left-Accumbens-area', hemi='Left', measure='Volume', unit='mm^3')": { 26 | "id": "000004", 27 | "label": "Left-Accumbens-area (mm^3)", 28 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001882", 29 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 30 | "hasUnit": "mm^3", 31 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 32 | }, 33 | "FSL(structure='Left-Amygdala', hemi='Left', measure='NVoxels', unit='voxel')": { 34 | "id": "000005", 35 | "label": "Left-Amygdala (voxels)", 36 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001876", 37 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 38 | "hasUnit": "voxel", 39 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 40 | }, 41 | "FSL(structure='Left-Amygdala', hemi='Left', measure='Volume', unit='mm^3')": { 42 | "id": "000006", 43 | "label": "Left-Amygdala (mm^3)", 44 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001876", 45 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 46 | "hasUnit": "mm^3", 47 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 48 | }, 49 | "FSL(structure='Left-Caudate', hemi='Left', measure='NVoxels', unit='voxel')": { 50 | "id": "000007", 51 | "label": "Left-Caudate (voxels)", 52 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001873", 53 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 54 | "hasUnit": "voxel", 55 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 56 | }, 57 | "FSL(structure='Left-Caudate', hemi='Left', measure='Volume', unit='mm^3')": { 58 | "id": "000008", 59 | "label": "Left-Caudate (mm^3)", 60 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001873", 61 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 62 | "hasUnit": "mm^3", 63 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 64 | }, 65 | "FSL(structure='Left-Hippocampus', hemi='Left', measure='NVoxels', unit='voxel')": { 66 | "id": "000009", 67 | "label": "Left-Hippocampus (voxels)", 68 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001954", 69 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 70 | "hasUnit": "voxel", 71 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 72 | }, 73 | "FSL(structure='Left-Hippocampus', hemi='Left', measure='Volume', unit='mm^3')": { 74 | "id": "000010", 75 | "label": "Left-Hippocampus (mm^3)", 76 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001954", 77 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 78 | "hasUnit": "mm^3", 79 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 80 | }, 81 | "FSL(structure='Left-Pallidum', hemi='Left', measure='NVoxels', unit='voxel')": { 82 | "id": "000011", 83 | "label": "Left-Pallidum (voxels)", 84 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0006514", 85 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 86 | "hasUnit": "voxel", 87 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 88 | }, 89 | "FSL(structure='Left-Pallidum', hemi='Left', measure='Volume', unit='mm^3')": { 90 | "id": "000012", 91 | "label": "Left-Pallidum (mm^3)", 92 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0006514", 93 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 94 | "hasUnit": "mm^3", 95 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 96 | }, 97 | "FSL(structure='Left-Putamen', hemi='Left', measure='NVoxels', unit='voxel')": { 98 | "id": "000013", 99 | "label": "Left-Putamen (voxels)", 100 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001874", 101 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 102 | "hasUnit": "voxel", 103 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 104 | }, 105 | "FSL(structure='Left-Putamen', hemi='Left', measure='Volume', unit='mm^3')": { 106 | "id": "000014", 107 | "label": "Left-Putamen (mm^3)", 108 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001874", 109 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 110 | "hasUnit": "mm^3", 111 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 112 | }, 113 | "FSL(structure='Left-Thalamus-Proper', hemi='Left', measure='NVoxels', unit='voxel')": { 114 | "id": "000015", 115 | "label": "Left-Thalamus-Proper (voxels)", 116 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001897", 117 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 118 | "hasUnit": "voxel", 119 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 120 | }, 121 | "FSL(structure='Left-Thalamus-Proper', hemi='Left', measure='Volume', unit='mm^3')": { 122 | "id": "000016", 123 | "label": "Left-Thalamus-Proper (mm^3)", 124 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001897", 125 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 126 | "hasUnit": "mm^3", 127 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 128 | }, 129 | "FSL(structure='Right-Accumbens-area', hemi='Right', measure='NVoxels', unit='voxel')": { 130 | "id": "000017", 131 | "label": "Right-Accumbens-area (voxels)", 132 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001882", 133 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 134 | "hasUnit": "voxel", 135 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 136 | }, 137 | "FSL(structure='Right-Accumbens-area', hemi='Right', measure='Volume', unit='mm^3')": { 138 | "id": "000018", 139 | "label": "Right-Accumbens-area (mm^3)", 140 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001882", 141 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 142 | "hasUnit": "mm^3", 143 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 144 | }, 145 | "FSL(structure='Right-Amygdala', hemi='Right', measure='NVoxels', unit='voxel')": { 146 | "id": "000019", 147 | "label": "Right-Amygdala (voxels)", 148 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001876", 149 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 150 | "hasUnit": "voxel", 151 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 152 | }, 153 | "FSL(structure='Right-Amygdala', hemi='Right', measure='Volume', unit='mm^3')": { 154 | "id": "000020", 155 | "label": "Right-Amygdala (mm^3)", 156 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001876", 157 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 158 | "hasUnit": "mm^3", 159 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 160 | }, 161 | "FSL(structure='Right-Caudate', hemi='Right', measure='NVoxels', unit='voxel')": { 162 | "id": "000021", 163 | "label": "Right-Caudate (voxels)", 164 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001873", 165 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 166 | "hasUnit": "voxel", 167 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 168 | }, 169 | "FSL(structure='Right-Caudate', hemi='Right', measure='Volume', unit='mm^3')": { 170 | "id": "000022", 171 | "label": "Right-Caudate (mm^3)", 172 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001873", 173 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 174 | "hasUnit": "mm^3", 175 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 176 | }, 177 | "FSL(structure='Right-Hippocampus', hemi='Right', measure='NVoxels', unit='voxel')": { 178 | "id": "000023", 179 | "label": "Right-Hippocampus (voxels)", 180 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001954", 181 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 182 | "hasUnit": "voxel", 183 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 184 | }, 185 | "FSL(structure='Right-Hippocampus', hemi='Right', measure='Volume', unit='mm^3')": { 186 | "id": "000024", 187 | "label": "Right-Hippocampus (mm^3)", 188 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001954", 189 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 190 | "hasUnit": "mm^3", 191 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 192 | }, 193 | "FSL(structure='Right-Pallidum', hemi='Right', measure='NVoxels', unit='voxel')": { 194 | "id": "000025", 195 | "label": "Right-Pallidum (voxels)", 196 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0006514", 197 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 198 | "hasUnit": "voxel", 199 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 200 | }, 201 | "FSL(structure='Right-Pallidum', hemi='Right', measure='Volume', unit='mm^3')": { 202 | "id": "000026", 203 | "label": "Right-Pallidum (mm^3)", 204 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0006514", 205 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 206 | "hasUnit": "mm^3", 207 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 208 | }, 209 | "FSL(structure='Right-Putamen', hemi='Right', measure='NVoxels', unit='voxel')": { 210 | "id": "000027", 211 | "label": "Right-Putamen (voxels)", 212 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001874", 213 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 214 | "hasUnit": "voxel", 215 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 216 | }, 217 | "FSL(structure='Right-Putamen', hemi='Right', measure='Volume', unit='mm^3')": { 218 | "id": "000028", 219 | "label": "Right-Putamen (mm^3)", 220 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001874", 221 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 222 | "hasUnit": "mm^3", 223 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 224 | }, 225 | "FSL(structure='Right-Thalamus-Proper', hemi='Right', measure='NVoxels', unit='voxel')": { 226 | "id": "000029", 227 | "label": "Right-Thalamus-Proper (voxels)", 228 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001897", 229 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 230 | "hasUnit": "voxel", 231 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 232 | }, 233 | "FSL(structure='Right-Thalamus-Proper', hemi='Right', measure='Volume', unit='mm^3')": { 234 | "id": "000030", 235 | "label": "Right-Thalamus-Proper (mm^3)", 236 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001897", 237 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 238 | "hasUnit": "mm^3", 239 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 240 | }, 241 | "FSL(structure='csf', hemi=None, measure='NVoxels', unit='voxel')": { 242 | "id": "000031", 243 | "label": "csf (voxels)", 244 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001359", 245 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 246 | "hasUnit": "voxel", 247 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 248 | }, 249 | "FSL(structure='csf', hemi=None, measure='Volume', unit='mm^3')": { 250 | "id": "000032", 251 | "label": "csf (mm^3)", 252 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0001359", 253 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 254 | "hasUnit": "mm^3", 255 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 256 | }, 257 | "FSL(structure='gray', hemi=None, measure='NVoxels', unit='voxel')": { 258 | "id": "000033", 259 | "label": "gray (voxels)", 260 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0000956", 261 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 262 | "hasUnit": "voxel", 263 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 264 | }, 265 | "FSL(structure='gray', hemi=None, measure='Volume', unit='mm^3')": { 266 | "id": "000034", 267 | "label": "gray (mm^3)", 268 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0000956", 269 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 270 | "hasUnit": "mm^3", 271 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 272 | }, 273 | "FSL(structure='white', hemi=None, measure='NVoxels', unit='voxel')": { 274 | "id": "000035", 275 | "label": "white (voxels)", 276 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0002437", 277 | "datumType": "http://uri.interlex.org/base/ilx_0102597", 278 | "hasUnit": "voxel", 279 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 280 | }, 281 | "FSL(structure='white', hemi=None, measure='Volume', unit='mm^3')": { 282 | "id": "000036", 283 | "label": "white (mm^3)", 284 | "isAbout": "http://purl.obolibrary.org/obo/UBERON_0002437", 285 | "datumType": "http://uri.interlex.org/base/ilx_0738276", 286 | "hasUnit": "mm^3", 287 | "measureOf": "http://uri.interlex.org/base/ilx_0112559" 288 | } 289 | } 290 | -------------------------------------------------------------------------------- /fsl_seg_to_nidm/mapping_data/fsl_cde.ttl: -------------------------------------------------------------------------------- 1 | @prefix fsl: . 2 | @prefix nidm: . 3 | @prefix rdf: . 4 | @prefix rdfs: . 5 | @prefix xml: . 6 | @prefix xsd: . 7 | 8 | fsl:fsl_000001 a fsl:DataElement ; 9 | rdfs:label "Background (voxels)" ; 10 | fsl:measure "NVoxels" ; 11 | fsl:structure "Background" ; 12 | fsl:unit "voxel" ; 13 | nidm:datumType ; 14 | nidm:hasUnit "voxel" ; 15 | nidm:measureOf . 16 | 17 | fsl:fsl_000002 a fsl:DataElement ; 18 | rdfs:label "Background (mm^3)" ; 19 | fsl:measure "Volume" ; 20 | fsl:structure "Background" ; 21 | fsl:unit "mm^3" ; 22 | nidm:datumType ; 23 | nidm:hasUnit "mm^3" ; 24 | nidm:measureOf . 25 | 26 | fsl:fsl_000003 a fsl:DataElement ; 27 | rdfs:label "Left-Accumbens-area (voxels)" ; 28 | fsl:measure "NVoxels" ; 29 | fsl:structure "Left-Accumbens-area" ; 30 | fsl:unit "voxel" ; 31 | nidm:datumType ; 32 | nidm:hasLaterality "Left" ; 33 | nidm:hasUnit "voxel" ; 34 | nidm:isAbout ; 35 | nidm:measureOf . 36 | 37 | fsl:fsl_000004 a fsl:DataElement ; 38 | rdfs:label "Left-Accumbens-area (mm^3)" ; 39 | fsl:measure "Volume" ; 40 | fsl:structure "Left-Accumbens-area" ; 41 | fsl:unit "mm^3" ; 42 | nidm:datumType ; 43 | nidm:hasLaterality "Left" ; 44 | nidm:hasUnit "mm^3" ; 45 | nidm:isAbout ; 46 | nidm:measureOf . 47 | 48 | fsl:fsl_000005 a fsl:DataElement ; 49 | rdfs:label "Left-Amygdala (voxels)" ; 50 | fsl:measure "NVoxels" ; 51 | fsl:structure "Left-Amygdala" ; 52 | fsl:unit "voxel" ; 53 | nidm:datumType ; 54 | nidm:hasLaterality "Left" ; 55 | nidm:hasUnit "voxel" ; 56 | nidm:isAbout ; 57 | nidm:measureOf . 58 | 59 | fsl:fsl_000006 a fsl:DataElement ; 60 | rdfs:label "Left-Amygdala (mm^3)" ; 61 | fsl:measure "Volume" ; 62 | fsl:structure "Left-Amygdala" ; 63 | fsl:unit "mm^3" ; 64 | nidm:datumType ; 65 | nidm:hasLaterality "Left" ; 66 | nidm:hasUnit "mm^3" ; 67 | nidm:isAbout ; 68 | nidm:measureOf . 69 | 70 | fsl:fsl_000007 a fsl:DataElement ; 71 | rdfs:label "Left-Caudate (voxels)" ; 72 | fsl:measure "NVoxels" ; 73 | fsl:structure "Left-Caudate" ; 74 | fsl:unit "voxel" ; 75 | nidm:datumType ; 76 | nidm:hasLaterality "Left" ; 77 | nidm:hasUnit "voxel" ; 78 | nidm:isAbout ; 79 | nidm:measureOf . 80 | 81 | fsl:fsl_000008 a fsl:DataElement ; 82 | rdfs:label "Left-Caudate (mm^3)" ; 83 | fsl:measure "Volume" ; 84 | fsl:structure "Left-Caudate" ; 85 | fsl:unit "mm^3" ; 86 | nidm:datumType ; 87 | nidm:hasLaterality "Left" ; 88 | nidm:hasUnit "mm^3" ; 89 | nidm:isAbout ; 90 | nidm:measureOf . 91 | 92 | fsl:fsl_000009 a fsl:DataElement ; 93 | rdfs:label "Left-Hippocampus (voxels)" ; 94 | fsl:measure "NVoxels" ; 95 | fsl:structure "Left-Hippocampus" ; 96 | fsl:unit "voxel" ; 97 | nidm:datumType ; 98 | nidm:hasLaterality "Left" ; 99 | nidm:hasUnit "voxel" ; 100 | nidm:isAbout ; 101 | nidm:measureOf . 102 | 103 | fsl:fsl_000010 a fsl:DataElement ; 104 | rdfs:label "Left-Hippocampus (mm^3)" ; 105 | fsl:measure "Volume" ; 106 | fsl:structure "Left-Hippocampus" ; 107 | fsl:unit "mm^3" ; 108 | nidm:datumType ; 109 | nidm:hasLaterality "Left" ; 110 | nidm:hasUnit "mm^3" ; 111 | nidm:isAbout ; 112 | nidm:measureOf . 113 | 114 | fsl:fsl_000011 a fsl:DataElement ; 115 | rdfs:label "Left-Pallidum (voxels)" ; 116 | fsl:measure "NVoxels" ; 117 | fsl:structure "Left-Pallidum" ; 118 | fsl:unit "voxel" ; 119 | nidm:datumType ; 120 | nidm:hasLaterality "Left" ; 121 | nidm:hasUnit "voxel" ; 122 | nidm:isAbout ; 123 | nidm:measureOf . 124 | 125 | fsl:fsl_000012 a fsl:DataElement ; 126 | rdfs:label "Left-Pallidum (mm^3)" ; 127 | fsl:measure "Volume" ; 128 | fsl:structure "Left-Pallidum" ; 129 | fsl:unit "mm^3" ; 130 | nidm:datumType ; 131 | nidm:hasLaterality "Left" ; 132 | nidm:hasUnit "mm^3" ; 133 | nidm:isAbout ; 134 | nidm:measureOf . 135 | 136 | fsl:fsl_000013 a fsl:DataElement ; 137 | rdfs:label "Left-Putamen (voxels)" ; 138 | fsl:measure "NVoxels" ; 139 | fsl:structure "Left-Putamen" ; 140 | fsl:unit "voxel" ; 141 | nidm:datumType ; 142 | nidm:hasLaterality "Left" ; 143 | nidm:hasUnit "voxel" ; 144 | nidm:isAbout ; 145 | nidm:measureOf . 146 | 147 | fsl:fsl_000014 a fsl:DataElement ; 148 | rdfs:label "Left-Putamen (mm^3)" ; 149 | fsl:measure "Volume" ; 150 | fsl:structure "Left-Putamen" ; 151 | fsl:unit "mm^3" ; 152 | nidm:datumType ; 153 | nidm:hasLaterality "Left" ; 154 | nidm:hasUnit "mm^3" ; 155 | nidm:isAbout ; 156 | nidm:measureOf . 157 | 158 | fsl:fsl_000015 a fsl:DataElement ; 159 | rdfs:label "Left-Thalamus-Proper (voxels)" ; 160 | fsl:measure "NVoxels" ; 161 | fsl:structure "Left-Thalamus-Proper" ; 162 | fsl:unit "voxel" ; 163 | nidm:datumType ; 164 | nidm:hasLaterality "Left" ; 165 | nidm:hasUnit "voxel" ; 166 | nidm:isAbout ; 167 | nidm:measureOf . 168 | 169 | fsl:fsl_000016 a fsl:DataElement ; 170 | rdfs:label "Left-Thalamus-Proper (mm^3)" ; 171 | fsl:measure "Volume" ; 172 | fsl:structure "Left-Thalamus-Proper" ; 173 | fsl:unit "mm^3" ; 174 | nidm:datumType ; 175 | nidm:hasLaterality "Left" ; 176 | nidm:hasUnit "mm^3" ; 177 | nidm:isAbout ; 178 | nidm:measureOf . 179 | 180 | fsl:fsl_000017 a fsl:DataElement ; 181 | rdfs:label "Right-Accumbens-area (voxels)" ; 182 | fsl:measure "NVoxels" ; 183 | fsl:structure "Right-Accumbens-area" ; 184 | fsl:unit "voxel" ; 185 | nidm:datumType ; 186 | nidm:hasLaterality "Right" ; 187 | nidm:hasUnit "voxel" ; 188 | nidm:isAbout ; 189 | nidm:measureOf . 190 | 191 | fsl:fsl_000018 a fsl:DataElement ; 192 | rdfs:label "Right-Accumbens-area (mm^3)" ; 193 | fsl:measure "Volume" ; 194 | fsl:structure "Right-Accumbens-area" ; 195 | fsl:unit "mm^3" ; 196 | nidm:datumType ; 197 | nidm:hasLaterality "Right" ; 198 | nidm:hasUnit "mm^3" ; 199 | nidm:isAbout ; 200 | nidm:measureOf . 201 | 202 | fsl:fsl_000019 a fsl:DataElement ; 203 | rdfs:label "Right-Amygdala (voxels)" ; 204 | fsl:measure "NVoxels" ; 205 | fsl:structure "Right-Amygdala" ; 206 | fsl:unit "voxel" ; 207 | nidm:datumType ; 208 | nidm:hasLaterality "Right" ; 209 | nidm:hasUnit "voxel" ; 210 | nidm:isAbout ; 211 | nidm:measureOf . 212 | 213 | fsl:fsl_000020 a fsl:DataElement ; 214 | rdfs:label "Right-Amygdala (mm^3)" ; 215 | fsl:measure "Volume" ; 216 | fsl:structure "Right-Amygdala" ; 217 | fsl:unit "mm^3" ; 218 | nidm:datumType ; 219 | nidm:hasLaterality "Right" ; 220 | nidm:hasUnit "mm^3" ; 221 | nidm:isAbout ; 222 | nidm:measureOf . 223 | 224 | fsl:fsl_000021 a fsl:DataElement ; 225 | rdfs:label "Right-Caudate (voxels)" ; 226 | fsl:measure "NVoxels" ; 227 | fsl:structure "Right-Caudate" ; 228 | fsl:unit "voxel" ; 229 | nidm:datumType ; 230 | nidm:hasLaterality "Right" ; 231 | nidm:hasUnit "voxel" ; 232 | nidm:isAbout ; 233 | nidm:measureOf . 234 | 235 | fsl:fsl_000022 a fsl:DataElement ; 236 | rdfs:label "Right-Caudate (mm^3)" ; 237 | fsl:measure "Volume" ; 238 | fsl:structure "Right-Caudate" ; 239 | fsl:unit "mm^3" ; 240 | nidm:datumType ; 241 | nidm:hasLaterality "Right" ; 242 | nidm:hasUnit "mm^3" ; 243 | nidm:isAbout ; 244 | nidm:measureOf . 245 | 246 | fsl:fsl_000023 a fsl:DataElement ; 247 | rdfs:label "Right-Hippocampus (voxels)" ; 248 | fsl:measure "NVoxels" ; 249 | fsl:structure "Right-Hippocampus" ; 250 | fsl:unit "voxel" ; 251 | nidm:datumType ; 252 | nidm:hasLaterality "Right" ; 253 | nidm:hasUnit "voxel" ; 254 | nidm:isAbout ; 255 | nidm:measureOf . 256 | 257 | fsl:fsl_000024 a fsl:DataElement ; 258 | rdfs:label "Right-Hippocampus (mm^3)" ; 259 | fsl:measure "Volume" ; 260 | fsl:structure "Right-Hippocampus" ; 261 | fsl:unit "mm^3" ; 262 | nidm:datumType ; 263 | nidm:hasLaterality "Right" ; 264 | nidm:hasUnit "mm^3" ; 265 | nidm:isAbout ; 266 | nidm:measureOf . 267 | 268 | fsl:fsl_000025 a fsl:DataElement ; 269 | rdfs:label "Right-Pallidum (voxels)" ; 270 | fsl:measure "NVoxels" ; 271 | fsl:structure "Right-Pallidum" ; 272 | fsl:unit "voxel" ; 273 | nidm:datumType ; 274 | nidm:hasLaterality "Right" ; 275 | nidm:hasUnit "voxel" ; 276 | nidm:isAbout ; 277 | nidm:measureOf . 278 | 279 | fsl:fsl_000026 a fsl:DataElement ; 280 | rdfs:label "Right-Pallidum (mm^3)" ; 281 | fsl:measure "Volume" ; 282 | fsl:structure "Right-Pallidum" ; 283 | fsl:unit "mm^3" ; 284 | nidm:datumType ; 285 | nidm:hasLaterality "Right" ; 286 | nidm:hasUnit "mm^3" ; 287 | nidm:isAbout ; 288 | nidm:measureOf . 289 | 290 | fsl:fsl_000027 a fsl:DataElement ; 291 | rdfs:label "Right-Putamen (voxels)" ; 292 | fsl:measure "NVoxels" ; 293 | fsl:structure "Right-Putamen" ; 294 | fsl:unit "voxel" ; 295 | nidm:datumType ; 296 | nidm:hasLaterality "Right" ; 297 | nidm:hasUnit "voxel" ; 298 | nidm:isAbout ; 299 | nidm:measureOf . 300 | 301 | fsl:fsl_000028 a fsl:DataElement ; 302 | rdfs:label "Right-Putamen (mm^3)" ; 303 | fsl:measure "Volume" ; 304 | fsl:structure "Right-Putamen" ; 305 | fsl:unit "mm^3" ; 306 | nidm:datumType ; 307 | nidm:hasLaterality "Right" ; 308 | nidm:hasUnit "mm^3" ; 309 | nidm:isAbout ; 310 | nidm:measureOf . 311 | 312 | fsl:fsl_000029 a fsl:DataElement ; 313 | rdfs:label "Right-Thalamus-Proper (voxels)" ; 314 | fsl:measure "NVoxels" ; 315 | fsl:structure "Right-Thalamus-Proper" ; 316 | fsl:unit "voxel" ; 317 | nidm:datumType ; 318 | nidm:hasLaterality "Right" ; 319 | nidm:hasUnit "voxel" ; 320 | nidm:isAbout ; 321 | nidm:measureOf . 322 | 323 | fsl:fsl_000030 a fsl:DataElement ; 324 | rdfs:label "Right-Thalamus-Proper (mm^3)" ; 325 | fsl:measure "Volume" ; 326 | fsl:structure "Right-Thalamus-Proper" ; 327 | fsl:unit "mm^3" ; 328 | nidm:datumType ; 329 | nidm:hasLaterality "Right" ; 330 | nidm:hasUnit "mm^3" ; 331 | nidm:isAbout ; 332 | nidm:measureOf . 333 | 334 | fsl:fsl_000031 a fsl:DataElement ; 335 | rdfs:label "csf (voxels)" ; 336 | fsl:measure "NVoxels" ; 337 | fsl:structure "csf" ; 338 | fsl:unit "voxel" ; 339 | nidm:datumType ; 340 | nidm:hasUnit "voxel" ; 341 | nidm:isAbout ; 342 | nidm:measureOf . 343 | 344 | fsl:fsl_000032 a fsl:DataElement ; 345 | rdfs:label "csf (mm^3)" ; 346 | fsl:measure "Volume" ; 347 | fsl:structure "csf" ; 348 | fsl:unit "mm^3" ; 349 | nidm:datumType ; 350 | nidm:hasUnit "mm^3" ; 351 | nidm:isAbout ; 352 | nidm:measureOf . 353 | 354 | fsl:fsl_000033 a fsl:DataElement ; 355 | rdfs:label "gray (voxels)" ; 356 | fsl:measure "NVoxels" ; 357 | fsl:structure "gray" ; 358 | fsl:unit "voxel" ; 359 | nidm:datumType ; 360 | nidm:hasUnit "voxel" ; 361 | nidm:isAbout ; 362 | nidm:measureOf . 363 | 364 | fsl:fsl_000034 a fsl:DataElement ; 365 | rdfs:label "gray (mm^3)" ; 366 | fsl:measure "Volume" ; 367 | fsl:structure "gray" ; 368 | fsl:unit "mm^3" ; 369 | nidm:datumType ; 370 | nidm:hasUnit "mm^3" ; 371 | nidm:isAbout ; 372 | nidm:measureOf . 373 | 374 | fsl:fsl_000035 a fsl:DataElement ; 375 | rdfs:label "white (voxels)" ; 376 | fsl:measure "NVoxels" ; 377 | fsl:structure "white" ; 378 | fsl:unit "voxel" ; 379 | nidm:datumType ; 380 | nidm:hasUnit "voxel" ; 381 | nidm:isAbout ; 382 | nidm:measureOf . 383 | 384 | fsl:fsl_000036 a fsl:DataElement ; 385 | rdfs:label "white (mm^3)" ; 386 | fsl:measure "Volume" ; 387 | fsl:structure "white" ; 388 | fsl:unit "mm^3" ; 389 | nidm:datumType ; 390 | nidm:hasUnit "mm^3" ; 391 | nidm:isAbout ; 392 | nidm:measureOf . 393 | 394 | fsl:DataElement rdfs:subClassOf nidm:DataElement . 395 | 396 | -------------------------------------------------------------------------------- /fsl_seg_to_nidm/fsl_seg_to_nidm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #!/usr/bin/env python 3 | #************************************************************************************** 4 | #************************************************************************************** 5 | # fsl_seg_to_nidm.py 6 | # License: GPL 7 | #************************************************************************************** 8 | #************************************************************************************** 9 | # Date: June 6, 2019 Coded by: Brainhack'ers 10 | # Filename: fsl_seg_to_nidm.py 11 | # 12 | # Program description: This program will load in JSON output from FSL's FAST/FIRST 13 | # segmentation tool, augment the FSL anatomical region designations with common data element 14 | # anatomical designations, and save the statistics + region designations out as 15 | # NIDM serializations (i.e. TURTLE, JSON-LD RDF) 16 | # 17 | # 18 | #************************************************************************************** 19 | # Development environment: Python - PyCharm IDE 20 | # 21 | #************************************************************************************** 22 | # System requirements: Python 3.X 23 | # Libraries: PyNIDM, 24 | #************************************************************************************** 25 | # Start date: June 6, 2019 26 | # Update history: 27 | # DATE MODIFICATION Who 28 | # 29 | # 30 | #************************************************************************************** 31 | # Programmer comments: 32 | # 33 | # 34 | #************************************************************************************** 35 | #************************************************************************************** 36 | 37 | 38 | from nidm.core import Constants 39 | from nidm.experiment.Core import getUUID 40 | from nidm.experiment.Core import Core 41 | from prov.model import QualifiedName,PROV_ROLE, ProvDocument, PROV_ATTR_USED_ENTITY,PROV_ACTIVITY,PROV_AGENT,PROV_ROLE 42 | 43 | from prov.model import Namespace as provNamespace 44 | 45 | # standard library 46 | from pickle import dumps 47 | import os 48 | from os.path import join,basename,splitext,isfile,dirname 49 | from socket import getfqdn 50 | import glob 51 | 52 | import prov.model as prov 53 | import json 54 | import urllib.request as ur 55 | from urllib.parse import urlparse 56 | import re 57 | 58 | from rdflib import Graph, RDF, URIRef, util, term,Namespace,Literal,BNode,XSD 59 | from fsl_seg_to_nidm.fslutils import read_fsl_stats, convert_stats_to_nidm, create_cde_graph 60 | from io import StringIO 61 | 62 | import tempfile 63 | 64 | 65 | def url_validator(url): 66 | ''' 67 | Tests whether url is a valide url 68 | :param url: url to test 69 | :return: True for valid url else False 70 | ''' 71 | try: 72 | result = urlparse(url) 73 | return all([result.scheme, result.netloc, result.path]) 74 | 75 | except: 76 | return False 77 | 78 | def add_seg_data(nidmdoc,subjid,fs_stats_entity_id, add_to_nidm=False, forceagent=False): 79 | ''' 80 | WIP: this function creates a NIDM file of brain volume data and if user supplied a NIDM-E file it will add brain volumes to the 81 | NIDM-E file for the matching subject ID 82 | :param nidmdoc: 83 | :param header: 84 | :param add_to_nidm: 85 | :return: 86 | ''' 87 | 88 | 89 | #for each of the header items create a dictionary where namespaces are freesurfer 90 | niiri=Namespace("http://iri.nidash.org/") 91 | nidmdoc.bind("niiri",niiri) 92 | # add namespace for subject id 93 | ndar = Namespace(Constants.NDAR) 94 | nidmdoc.bind("ndar",ndar) 95 | dct = Namespace(Constants.DCT) 96 | nidmdoc.bind("dct",dct) 97 | sio = Namespace(Constants.SIO) 98 | nidmdoc.bind("sio",sio) 99 | 100 | 101 | software_activity = niiri[getUUID()] 102 | nidmdoc.add((software_activity,RDF.type,Constants.PROV['Activity'])) 103 | nidmdoc.add((software_activity,Constants.DCT["description"],Literal("FSL FAST/FIRST segmentation statistics"))) 104 | fs = Namespace(Constants.FSL) 105 | 106 | 107 | #create software agent and associate with software activity 108 | #search and see if a software agent exists for this software, if so use it, if not create it 109 | for software_uid in nidmdoc.subjects(predicate=Constants.NIDM_NEUROIMAGING_ANALYSIS_SOFTWARE,object=URIRef(Constants.FSL) ): 110 | software_agent = software_uid 111 | break 112 | else: 113 | software_agent = niiri[getUUID()] 114 | 115 | nidmdoc.add((software_agent,RDF.type,Constants.PROV['Agent'])) 116 | neuro_soft=Namespace(Constants.NIDM_NEUROIMAGING_ANALYSIS_SOFTWARE) 117 | nidmdoc.add((software_agent,Constants.NIDM_NEUROIMAGING_ANALYSIS_SOFTWARE,URIRef(Constants.FSL))) 118 | nidmdoc.add((software_agent,RDF.type,Constants.PROV["SoftwareAgent"])) 119 | association_bnode = BNode() 120 | nidmdoc.add((software_activity,Constants.PROV['qualifiedAssociation'],association_bnode)) 121 | nidmdoc.add((association_bnode,RDF.type,Constants.PROV['Association'])) 122 | nidmdoc.add((association_bnode,Constants.PROV['hadRole'],Constants.NIDM_NEUROIMAGING_ANALYSIS_SOFTWARE)) 123 | nidmdoc.add((association_bnode,Constants.PROV['agent'],software_agent)) 124 | 125 | if not add_to_nidm: 126 | 127 | # create a new agent for subjid 128 | participant_agent = niiri[getUUID()] 129 | nidmdoc.add((participant_agent,RDF.type,Constants.PROV['Agent'])) 130 | nidmdoc.add((participant_agent,URIRef(Constants.NIDM_SUBJECTID.uri),Literal(subjid, datatype=XSD.string))) 131 | 132 | else: 133 | # query to get agent id for subjid 134 | #find subject ids and sessions in NIDM document 135 | query = """ 136 | PREFIX ndar: 137 | PREFIX rdf: 138 | PREFIX prov: 139 | PREFIX xsd: 140 | 141 | select distinct ?agent 142 | where { 143 | 144 | ?agent rdf:type prov:Agent ; 145 | ndar:src_subject_id \"%s\"^^xsd:string . 146 | 147 | }""" % subjid 148 | #print(query) 149 | qres = nidmdoc.query(query) 150 | if len(qres) == 0: 151 | print('Subject ID (%s) was not found in existing NIDM file...' %subjid) 152 | ############################################################################## 153 | # added to account for issues with some BIDS datasets that have leading 00's in subject directories 154 | # but not in participants.tsv files. 155 | if (len(subjid) - len(subjid.lstrip('0'))) != 0: 156 | print('Trying to find subject ID without leading zeros....') 157 | query = """ 158 | PREFIX ndar: 159 | PREFIX rdf: 160 | PREFIX prov: 161 | PREFIX xsd: 162 | 163 | select distinct ?agent 164 | where { 165 | 166 | ?agent rdf:type prov:Agent ; 167 | ndar:src_subject_id \"%s\"^^xsd:string . 168 | 169 | }""" % subjid.lstrip('0') 170 | #print(query) 171 | qres2 = nidmdoc.query(query) 172 | if len(qres2) == 0: 173 | print("Still can't find subject id after stripping leading zeros...") 174 | else: 175 | for row in qres2: 176 | print('Found subject ID after stripping zeros: %s in NIDM file (agent: %s)' %(subjid.lstrip('0'),row[0])) 177 | participant_agent = row[0] 178 | ####################################################################################### 179 | if (forceagent is not False) and (qres2==0): 180 | print('Explicitly creating agent in existing NIDM file...') 181 | participant_agent = niiri[getUUID()] 182 | nidmdoc.add((participant_agent,RDF.type,Constants.PROV['Agent'])) 183 | nidmdoc.add((participant_agent,URIRef(Constants.NIDM_SUBJECTID.uri),Literal(subjid, datatype=XSD.string))) 184 | elif (forceagent is False) and (qres==0) and (qres2==0): 185 | print('Not explicitly adding agent to NIDM file, no output written') 186 | exit() 187 | else: 188 | for row in qres: 189 | print('Found subject ID: %s in NIDM file (agent: %s)' %(subjid,row[0])) 190 | participant_agent = row[0] 191 | 192 | #create a blank node and qualified association with prov:Agent for participant 193 | association_bnode = BNode() 194 | nidmdoc.add((software_activity,Constants.PROV['qualifiedAssociation'],association_bnode)) 195 | nidmdoc.add((association_bnode,RDF.type,Constants.PROV['Association'])) 196 | nidmdoc.add((association_bnode,Constants.PROV['hadRole'],Constants.SIO["Subject"])) 197 | nidmdoc.add((association_bnode,Constants.PROV['agent'],participant_agent)) 198 | 199 | # add association between FSStatsCollection and computation activity 200 | nidmdoc.add((URIRef(fs_stats_entity_id.uri),Constants.PROV['wasGeneratedBy'],software_activity)) 201 | 202 | # get project uuid from NIDM doc and make association with software_activity 203 | query = """ 204 | prefix nidm: 205 | PREFIX rdf: 206 | 207 | select distinct ?project 208 | where { 209 | 210 | ?project rdf:type nidm:Project . 211 | 212 | }""" 213 | 214 | qres = nidmdoc.query(query) 215 | for row in qres: 216 | nidmdoc.add((software_activity, Constants.DCT["isPartOf"], row['project'])) 217 | 218 | def test_connection(remote=False): 219 | """helper function to test whether an internet connection exists. 220 | Used for preventing timeout errors when scraping interlex.""" 221 | import socket 222 | remote_server = 'www.google.com' if not remote else remote # TODO: maybe improve for China 223 | try: 224 | # does the host name resolve? 225 | host = socket.gethostbyname(remote_server) 226 | # can we establish a connection to the host name? 227 | con = socket.create_connection((host, 80), 2) 228 | return True 229 | except: 230 | print("Can't connect to a server...") 231 | pass 232 | return False 233 | 234 | 235 | 236 | def main(): 237 | 238 | import argparse 239 | parser = argparse.ArgumentParser(prog='fsl_seg_to_nidm.py', 240 | description='''This program will load in JSON output from FSL's FAST/FIRST 241 | segmentation tool, augment the FSL anatomical region designations with common data element 242 | anatomical designations, and save the statistics + region designations out as 243 | NIDM serializations (i.e. TURTLE, JSON-LD RDF)''', 244 | formatter_class=argparse.RawDescriptionHelpFormatter) 245 | #DBK: added mutually exclusive arguments to support pulling a named stats file (e.g. aseg.stats) as a URL such as 246 | #data hosted in an amazon bucket or from a mounted filesystem where you don't have access to the original 247 | #subjects directory. 248 | 249 | group = parser.add_mutually_exclusive_group(required=True) 250 | 251 | group.add_argument('-d', '--data_file', dest='data_file', type=str, 252 | help='Path to FSL FIRST/FAST JSON data file') 253 | group.add_argument('-f', '--seg_file', dest='segfile', type=str,help='Path or URL to a specific FSL JSON' 254 | 'stats file. Note, currently this is tested on ReproNim data') 255 | parser.add_argument('-subjid','--subjid',dest='subjid',required=True, help='If a path to a URL or a stats file' 256 | 'is supplied via the -f/--seg_file parameters then -subjid parameter must be set with' 257 | 'the subject identifier to be used in the NIDM files') 258 | parser.add_argument('-o', '--output', dest='output_dir', type=str, 259 | help='Output filename with full path', required=True) 260 | parser.add_argument('-j', '--jsonld', dest='jsonld', action='store_true', default = False, 261 | help='If flag set then NIDM file will be written as JSONLD instead of TURTLE') 262 | parser.add_argument('-add_de', '--add_de', dest='add_de', action='store_true', default = None, 263 | help='If flag set then data element data dictionary will be added to nidm file else it will written to a' 264 | 'separate file as fsl_cde.ttl in the output directory (or same directory as nidm file if -n paramemter' 265 | 'is used.') 266 | parser.add_argument('-n','--nidm', dest='nidm_file', type=str, required=False, 267 | help='Optional NIDM file to add segmentation data to.') 268 | parser.add_argument('-forcenidm','--forcenidm', action='store_true',required=False, 269 | help='If adding to NIDM file this parameter forces the data to be added even if the participant' 270 | 'doesnt currently exist in the NIDM file.') 271 | 272 | args = parser.parse_args() 273 | 274 | # test whether user supplied stats file directly and if so they the subject id must also be supplied so we 275 | # know which subject the stats file is for 276 | if (args.segfile and (args.subjid is None)) or (args.data_file and (args.subjid is None)): 277 | parser.error("-f/--seg_file and -d/--data_file requires -subjid/--subjid to be set!") 278 | 279 | # if output_dir doesn't exist then create it 280 | out_path = os.path.dirname(args.output_dir) 281 | if not os.path.exists(out_path): 282 | os.makedirs(out_path) 283 | 284 | 285 | # if we set -s or --subject_dir as parameter on command line... 286 | if args.data_file is not None: 287 | 288 | 289 | measures =read_fsl_stats(args.data_file) 290 | [e, doc] = convert_stats_to_nidm(measures) 291 | g = create_cde_graph() 292 | 293 | # for measures we need to create NIDM structures using anatomy mappings 294 | # If user has added an existing NIDM file as a command line parameter then add to existing file for subjects who exist in the NIDM file 295 | if args.nidm_file is None: 296 | 297 | print("Creating NIDM file...") 298 | # If user did not choose to add this data to an existing NIDM file then create a new one for the CSV data 299 | 300 | # convert nidm stats graph to rdflib 301 | g2 = Graph() 302 | g2.parse(source=StringIO(doc.serialize(format='rdf',rdf_format='turtle')),format='turtle') 303 | 304 | if args.add_de is not None: 305 | nidmdoc = g+g2 306 | else: 307 | nidmdoc = g2 308 | 309 | # print(nidmdoc.serializeTurtle()) 310 | 311 | # add seg data to new NIDM file 312 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier) 313 | 314 | #serialize NIDM file 315 | print("Writing NIDM file...") 316 | if args.jsonld is not False: 317 | #nidmdoc.serialize(destination=join(args.output_dir,splitext(basename(args.data_file))[0]+'.json'),format='jsonld') 318 | nidmdoc.serialize(destination=join(args.output_dir),format='jsonld') 319 | else: 320 | # nidmdoc.serialize(destination=join(args.output_dir,splitext(basename(args.data_file))[0]+'.ttl'),format='turtle') 321 | nidmdoc.serialize(destination=join(args.output_dir),format='turtle') 322 | # added to support separate cde serialization 323 | if args.add_de is None: 324 | # serialize cde graph 325 | g.serialize(destination=join(dirname(args.output_dir),"fsl_cde.ttl"),format='turtle') 326 | 327 | # we adding these data to an existing NIDM file 328 | else: 329 | #read in NIDM file with rdflib 330 | g1 = Graph() 331 | g1.parse(args.nidm_file,format=util.guess_format(args.nidm_file)) 332 | 333 | # convert nidm stats graph to rdflib 334 | g2 = Graph() 335 | g2.parse(source=StringIO(doc.serialize(format='rdf',rdf_format='turtle')),format='turtle') 336 | 337 | if args.add_de is not None: 338 | print("Combining graphs...") 339 | nidmdoc = g + g1 + g2 340 | else: 341 | nidmdoc = g1 + g2 342 | 343 | if args.forcenidm is not False: 344 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier,add_to_nidm=True, forceagent=True) 345 | else: 346 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier,add_to_nidm=True) 347 | 348 | 349 | #serialize NIDM file 350 | print("Writing Augmented NIDM file...") 351 | if args.jsonld is not False: 352 | nidmdoc.serialize(destination=args.nidm_file + '.json',format='jsonld') 353 | else: 354 | nidmdoc.serialize(destination=args.nidm_file,format='turtle') 355 | 356 | if args.add_de is None: 357 | # serialize cde graph 358 | g.serialize(destination=join(dirname(args.output_dir),"fsl_cde.ttl"),format='turtle') 359 | 360 | # else if the user didn't set subject_dir on command line then they must have set a segmentation file directly 361 | elif args.segfile is not None: 362 | 363 | #WIP: FSL URL form: https://fcp-indi.s3.amazonaws.com/data/Projects/ABIDE/Outputs/mindboggle_swf/simple_workflow/sub-0050002/segstats.json 364 | 365 | # here we're supporting amazon bucket-style file URLs where the expectation is the last parameter of the 366 | # see if we have a valid url 367 | url = url_validator(args.segfile) 368 | # if user supplied a url as a segfile 369 | if url is not False: 370 | 371 | #try to open the url and get the pointed to file 372 | try: 373 | #open url and get file 374 | opener = ur.urlopen(args.segfile) 375 | # write temporary file to disk and use for stats 376 | temp = tempfile.NamedTemporaryFile(delete=False) 377 | temp.write(opener.read()) 378 | temp.close() 379 | stats_file = temp.name 380 | except: 381 | print("ERROR! Can't open url: %s" %args.segfile) 382 | exit() 383 | 384 | # since all of the above worked, all we need to do is set the output file name to be the 385 | # args.subjid + "_" + [everything after the last / in the supplied URL] 386 | url_parts = urlparse(args.segfile) 387 | path_parts = url_parts[2].rpartition('/') 388 | output_filename = args.subjid + "_" + splitext(path_parts[2])[0] 389 | 390 | # else this must be a path to a stats file 391 | else: 392 | if isfile(args.segfile): 393 | stats_file = args.segfile 394 | # set outputfilename to be the args.subjid + "_" + args.segfile 395 | output_filename = args.subjid + "_" + splitext(basename(args.segfile))[0] 396 | else: 397 | print("ERROR! Can't open stats file: %s " %args.segfile) 398 | exit() 399 | 400 | measures =read_fsl_stats(stats_file) 401 | [e, doc] = convert_stats_to_nidm(measures) 402 | g = create_cde_graph() 403 | 404 | 405 | # for measures we need to create NIDM structures using anatomy mappings 406 | # If user has added an existing NIDM file as a command line parameter then add to existing file for subjects who exist in the NIDM file 407 | if args.nidm_file is None: 408 | 409 | print("Creating NIDM file...") 410 | # If user did not choose to add this data to an existing NIDM file then create a new one for the CSV data 411 | 412 | # convert nidm stats graph to rdflib 413 | g2 = Graph() 414 | g2.parse(source=StringIO(doc.serialize(format='rdf',rdf_format='turtle')),format='turtle') 415 | 416 | if args.add_de is not None: 417 | nidmdoc = g+g2 418 | else: 419 | nidmdoc = g2 420 | 421 | # print(nidmdoc.serializeTurtle()) 422 | 423 | # add seg data to new NIDM file 424 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier) 425 | 426 | #serialize NIDM file 427 | print("Writing NIDM file...") 428 | if args.jsonld is not False: 429 | # nidmdoc.serialize(destination=join(args.output_dir,splitext(basename(args.data_file))[0]+'.json'),format='jsonld') 430 | nidmdoc.serialize(destination=join(args.output_dir),format='jsonld') 431 | else: 432 | # nidmdoc.serialize(destination=join(args.output_dir,splitext(basename(args.data_file))[0]+'.ttl'),format='turtle') 433 | nidmdoc.serialize(destination=join(args.output_dir),format='turtle') 434 | 435 | # added to support separate cde serialization 436 | if args.add_de is None: 437 | # serialize cde graph 438 | g.serialize(destination=join(dirname(args.output_dir),"fsl_cde.ttl"),format='turtle') 439 | 440 | 441 | # we adding these data to an existing NIDM file 442 | else: 443 | #read in NIDM file with rdflib 444 | g1 = Graph() 445 | g1.parse(args.nidm_file,format=util.guess_format(args.nidm_file)) 446 | 447 | # convert nidm stats graph to rdflib 448 | g2 = Graph() 449 | g2.parse(source=StringIO(doc.serialize(format='rdf',rdf_format='turtle')),format='turtle') 450 | 451 | if args.add_de is not None: 452 | print("Combining graphs...") 453 | nidmdoc = g + g1 + g2 454 | else: 455 | nidmdoc = g1 + g2 456 | 457 | if args.forcenidm is not False: 458 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier,add_to_nidm=True, forceagent=True) 459 | else: 460 | add_seg_data(nidmdoc=nidmdoc,subjid=args.subjid,fs_stats_entity_id=e.identifier,add_to_nidm=True) 461 | 462 | 463 | #serialize NIDM file 464 | print("Writing Augmented NIDM file...") 465 | if args.jsonld is not False: 466 | nidmdoc.serialize(destination=args.nidm_file + '.json',format='jsonld') 467 | else: 468 | nidmdoc.serialize(destination=args.nidm_file,format='turtle') 469 | 470 | if args.add_de is None: 471 | # serialize cde graph 472 | g.serialize(destination=join(dirname(args.output_dir),"fsl_cde.ttl"),format='turtle') 473 | 474 | if __name__ == "__main__": 475 | main() 476 | --------------------------------------------------------------------------------