├── load_confounds ├── data │ ├── invalid_desc-confounds_regressors.tsv │ ├── invalid_desc-confounds_timeseries.tsv │ ├── __init__.py │ ├── test_space-fsLR_den-91k_bold.dtseries.nii │ ├── test_space-fsaverage5_hemi-L_bold.func.gii │ ├── test_space-fsaverage5_hemi-R_bold.func.gii │ ├── test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz │ ├── invalid_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz │ ├── missing_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz │ ├── nonss_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz │ ├── noconfound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz │ ├── test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz │ ├── missing_desc-confounds_regressors.tsv │ └── test_desc-confounds_regressors.tsv ├── tests │ ├── __init__.py │ ├── test_strategies.py │ └── test_parser.py ├── __init__.py ├── compcor.py ├── confounds.py ├── strategies.py └── parser.py ├── .gitignore ├── requirements.txt ├── CONTRIBUTING.md ├── .circleci └── config.yml ├── LICENSE ├── setup.py ├── .all-contributorsrc └── README.md /load_confounds/data/invalid_desc-confounds_regressors.tsv: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /load_confounds/data/invalid_desc-confounds_timeseries.tsv: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tests/__pycache__/ 2 | __pycache__/ 3 | load_confounds.egg-info/ 4 | -------------------------------------------------------------------------------- /load_confounds/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests all modules in the load_confounds library.""" 2 | -------------------------------------------------------------------------------- /load_confounds/data/__init__.py: -------------------------------------------------------------------------------- 1 | """Example confounds data generated by fmriprep, for testing purposes.""" 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.17.4 2 | pandas>=0.25.3 3 | scikit-learn>=0.21.3 4 | scipy>=1.3.2 5 | nilearn>=0.7.1 6 | matplotlib>=3.3.2 7 | pytest>=6.0.1 8 | -------------------------------------------------------------------------------- /load_confounds/data/test_space-fsLR_den-91k_bold.dtseries.nii: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/test_space-fsLR_den-91k_bold.dtseries.nii -------------------------------------------------------------------------------- /load_confounds/data/test_space-fsaverage5_hemi-L_bold.func.gii: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/test_space-fsaverage5_hemi-L_bold.func.gii -------------------------------------------------------------------------------- /load_confounds/data/test_space-fsaverage5_hemi-R_bold.func.gii: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/test_space-fsaverage5_hemi-R_bold.func.gii -------------------------------------------------------------------------------- /load_confounds/data/test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/data/invalid_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/invalid_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/data/missing_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/missing_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/data/nonss_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/nonss_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/data/noconfound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/noconfound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/data/test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SIMEXP/load_confounds/HEAD/load_confounds/data/test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz -------------------------------------------------------------------------------- /load_confounds/__init__.py: -------------------------------------------------------------------------------- 1 | """loading fMRIprep confounds into python.""" 2 | from load_confounds.parser import Confounds 3 | from load_confounds.strategies import ( 4 | Minimal, 5 | Scrubbing, 6 | CompCor, 7 | ICAAROMA, 8 | ) 9 | 10 | __all__ = [ 11 | "Confounds", 12 | "Minimal", 13 | "Scrubbing", 14 | "CompCor", 15 | "ICAAROMA", 16 | ] 17 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | The SIMEXP lab openly invites anybody to contribute to this repository. Most conversations take place on open [issues](https://github.com/SIMEXP/load_confounds/issues). We aim to create a welcoming environment where everybody's viewpoints are respected and acknowledged. 2 | 3 | If you are interested in making changes we encourage you to do it as follows: 4 | 1. Comment on an existing issue or add a new issue 5 | 2. Fork the **load_confounds** repository 6 | 3. Create a new branch for the issue 7 | 4. Make changes discussed 8 | 5. Test using `pytest` 9 | 6. Ensure that your fork is up to date with master 10 | 7. Submit a pull request 11 | 12 | You can test your changes by running `pytest` or `pytest -vvv`. This will display the number of passes and fails. Please refer to the [pytest documentation](https://docs.pytest.org/en/latest/usage.html) for more details. 13 | 14 | Thank you for your help :smiley: 15 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | python: circleci/python@0.2.1 5 | 6 | jobs: 7 | build-and-test: 8 | working_directory: ~/test_load_confounds 9 | docker: 10 | - image: circleci/python:3.7 11 | steps: 12 | - checkout 13 | - run: 14 | command: | 15 | pip install --progress-bar off -r requirements.txt 16 | pip install --progress-bar off pytest coverage 17 | pip install --progress-bar off -e . 18 | - run: 19 | command: | 20 | coverage run --source . -m pytest load_confounds/tests/test_*.py 21 | coverage report 22 | coverage html 23 | name: test_load_confounds 24 | - run: 25 | command: | 26 | bash <(curl -s https://codecov.io/bash) 27 | name: Upload_codecov 28 | - store_artifacts: 29 | path: htmlcov 30 | 31 | workflows: 32 | main: 33 | jobs: 34 | - build-and-test 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 load_confounds contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from os import path 3 | 4 | here = path.abspath(path.dirname(__file__)) 5 | 6 | # Get the long description from the README file 7 | with open(path.join(here, "README.md"), encoding="utf-8") as f: 8 | long_description = f.read() 9 | 10 | setup( 11 | name="load_confounds", 12 | version="0.12.0", 13 | description="load fMRIprep confounds in python", 14 | license="MIT", 15 | url="https://github.com/simexp/load_confounds", 16 | long_description=long_description, 17 | long_description_content_type="text/markdown", # Optional (see note above) 18 | project_urls={ # Optional 19 | "Bug Reports": "https://github.com/simexp/load_confounds/issues", 20 | "Funding": "https://cneuromod.ca", 21 | "Source": "https://github.com/simexp/load_confounds", 22 | }, 23 | maintainer="Pierre Bellec", 24 | maintainer_email="pierre.bellec@gmail.com", 25 | packages=find_packages(), 26 | package_data={"load_confounds.data": ["*.nii.gz", "*.tsv"]}, 27 | install_requires=[ 28 | "numpy>=1.17.4", 29 | "pandas>=0.25.3", 30 | "scikit-learn>=0.21.3", 31 | "scipy>=1.3.2", 32 | "nilearn>=0.7.1", 33 | ], # external packages as dependencies 34 | classifiers=[ 35 | "Development Status :: 4 - Beta", 36 | "Intended Audience :: Developers", 37 | "Topic :: Software Development :: Build Tools", 38 | "License :: OSI Approved :: MIT License", 39 | "Programming Language :: Python :: 3.5", 40 | "Programming Language :: Python :: 3.6", 41 | "Programming Language :: Python :: 3.7", 42 | "Programming Language :: Python :: 3.8", 43 | "Programming Language :: Python :: 3.9", 44 | ], 45 | python_requires=">=3.5", 46 | ) 47 | -------------------------------------------------------------------------------- /load_confounds/compcor.py: -------------------------------------------------------------------------------- 1 | """Helper function for _load_compcor.""" 2 | 3 | 4 | prefix_compcor = {"full": ["t", "a"], "temp": ["t"], "anat": ["a"]} 5 | anat_masker = {True: ["combined"], False: ["WM", "CSF"], None: None} 6 | 7 | 8 | def _find_compcor(confounds_json, compcor, n_compcor, acompcor_combined): 9 | """Builds list for the number of compcor components.""" 10 | prefix_set, anat_mask = _check_compcor_method(compcor, acompcor_combined) 11 | 12 | collector = [] 13 | for prefix in prefix_set: 14 | # all possible compcor confounds in order, mixing different types of mask 15 | all_compcor_name = [ 16 | comp for comp in confounds_json.keys() if f"{prefix}_comp_cor" in comp 17 | ] 18 | # filter by prefix first (anat vs temp) 19 | compcor_cols_filt = _prefix_confound_filter(prefix, all_compcor_name) 20 | if prefix == "a": 21 | # apply acompor mask option if relevant, and select top components 22 | compcor_cols_filt = _acompcor_mask( 23 | confounds_json, anat_mask, compcor_cols_filt, n_compcor 24 | ) 25 | else: 26 | # select top components 27 | compcor_cols_filt = _select_compcor(compcor_cols_filt, n_compcor) 28 | # Aggregate components across all masks 29 | collector += compcor_cols_filt 30 | return collector 31 | 32 | 33 | def _select_compcor(compcor_cols, n_compcor): 34 | """Retain a specified number of compcor components.""" 35 | # only select if not "auto", or less components are requested than there actually is 36 | if (n_compcor != "auto") and (n_compcor < len(compcor_cols)): 37 | compcor_cols = compcor_cols[0:n_compcor] 38 | return compcor_cols 39 | 40 | 41 | def _check_compcor_method(compcor, acompcor_combined): 42 | """Load compcor options and check if method is acceptable.""" 43 | # get relevant prefix from compcor strategy 44 | prefix_set = prefix_compcor[compcor] 45 | # get relevant compcore mask 46 | anat_mask = anat_masker[acompcor_combined] 47 | if ("a" in prefix_set) and (anat_mask is None): 48 | raise ValueError( 49 | f"acompcor_combined must set to True or False. Got {acompcor_combined}" 50 | ) 51 | return prefix_set, anat_mask 52 | 53 | 54 | def _acompcor_mask(confounds_json, anat_mask, compcor_cols_filt, n_compcor): 55 | """Filter according to acompcor mask(s) and select top components.""" 56 | collector = [] 57 | for mask in anat_mask: 58 | cols = _json_mask(compcor_cols_filt, confounds_json, mask) 59 | cols = _select_compcor(cols, n_compcor) 60 | collector += cols 61 | return collector 62 | 63 | 64 | def _json_mask(compcor_cols_filt, confounds_json, mask): 65 | """Extract anat compcor components from a given mask.""" 66 | cols = [] 67 | for compcor_col in compcor_cols_filt: 68 | if confounds_json[compcor_col]["Mask"] in mask: 69 | cols.append(compcor_col) 70 | return cols 71 | 72 | 73 | def _prefix_confound_filter(prefix, all_compcor_name): 74 | """Get confound columns by prefix and acompcor mask.""" 75 | compcor_cols_filt = [] 76 | for nn in range(len(all_compcor_name)): 77 | nn_str = str(nn).zfill(2) 78 | compcor_col = f"{prefix}_comp_cor_{nn_str}" 79 | compcor_cols_filt.append(compcor_col) 80 | return compcor_cols_filt 81 | -------------------------------------------------------------------------------- /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "README.md" 4 | ], 5 | "imageSize": 100, 6 | "commit": false, 7 | "contributors": [ 8 | { 9 | "login": "FrancoisPgm", 10 | "name": "François Paugam", 11 | "avatar_url": "https://avatars.githubusercontent.com/u/35327799?v=4", 12 | "profile": "https://github.com/FrancoisPgm", 13 | "contributions": [ 14 | "infra", 15 | "code", 16 | "review", 17 | "test", 18 | "data" 19 | ] 20 | }, 21 | { 22 | "login": "HanadS", 23 | "name": "HanadS", 24 | "avatar_url": "https://avatars.githubusercontent.com/u/26352860?v=4", 25 | "profile": "https://github.com/HanadS", 26 | "contributions": [ 27 | "code", 28 | "test", 29 | "data", 30 | "infra", 31 | "doc", 32 | "ideas" 33 | ] 34 | }, 35 | { 36 | "login": "emdupre", 37 | "name": "Elizabeth DuPre", 38 | "avatar_url": "https://avatars.githubusercontent.com/u/15017191?v=4", 39 | "profile": "http://emdupre.me", 40 | "contributions": [ 41 | "ideas" 42 | ] 43 | }, 44 | { 45 | "login": "htwangtw", 46 | "name": "Hao-Ting Wang", 47 | "avatar_url": "https://avatars.githubusercontent.com/u/13743617?v=4", 48 | "profile": "https://wanghaoting.com/", 49 | "contributions": [ 50 | "ideas", 51 | "code", 52 | "data", 53 | "doc", 54 | "test", 55 | "bug" 56 | ] 57 | }, 58 | { 59 | "login": "pbellec", 60 | "name": "Pierre Bellec", 61 | "avatar_url": "https://avatars.githubusercontent.com/u/1670887?v=4", 62 | "profile": "http://simexp-lab.org", 63 | "contributions": [ 64 | "code", 65 | "bug", 66 | "ideas", 67 | "infra", 68 | "test", 69 | "data", 70 | "eventOrganizing", 71 | "maintenance", 72 | "projectManagement" 73 | ] 74 | }, 75 | { 76 | "login": "smeisler", 77 | "name": "Steven Meisler", 78 | "avatar_url": "https://avatars.githubusercontent.com/u/27028726?v=4", 79 | "profile": "https://scholar.harvard.edu/steven-meisler", 80 | "contributions": [ 81 | "bug", 82 | "test", 83 | "data", 84 | "code", 85 | "doc", 86 | "ideas" 87 | ] 88 | }, 89 | { 90 | "login": "effigies", 91 | "name": "Chris Markiewicz", 92 | "avatar_url": "https://avatars.githubusercontent.com/u/83442?v=4", 93 | "profile": "https://github.com/effigies", 94 | "contributions": [ 95 | "ideas" 96 | ] 97 | }, 98 | { 99 | "login": "srastegarnia", 100 | "name": "Shima Rastegarnia", 101 | "avatar_url": "https://avatars.githubusercontent.com/u/64853244?v=4", 102 | "profile": "https://github.com/srastegarnia", 103 | "contributions": [ 104 | "bug" 105 | ] 106 | }, 107 | { 108 | "login": "nuKs", 109 | "name": "Thibault PIRONT", 110 | "avatar_url": "https://avatars.githubusercontent.com/u/1691962?v=4", 111 | "profile": "https://github.com/nuKs", 112 | "contributions": [ 113 | "code" 114 | ] 115 | }, 116 | { 117 | "login": "m-w-w", 118 | "name": "m-w-w", 119 | "avatar_url": "https://avatars.githubusercontent.com/u/36826334?v=4", 120 | "profile": "https://github.com/m-w-w", 121 | "contributions": [ 122 | "doc" 123 | ] 124 | } 125 | ], 126 | "contributorsPerLine": 7, 127 | "projectName": "load_confounds", 128 | "projectOwner": "SIMEXP", 129 | "repoType": "github", 130 | "repoHost": "https://github.com", 131 | "skipCi": true 132 | } 133 | -------------------------------------------------------------------------------- /load_confounds/data/missing_desc-confounds_regressors.tsv: -------------------------------------------------------------------------------- 1 | csf csf_derivative1 csf_power2 csf_derivative1_power2 white_matter white_matter_derivative1 white_matter_derivative1_power2 white_matter_power2 global_signal global_signal_derivative1 global_signal_power2 global_signal_derivative1_power2 std_dvars dvars framewise_displacement t_comp_cor_00 t_comp_cor_01 t_comp_cor_02 t_comp_cor_03 t_comp_cor_04 t_comp_cor_05 a_comp_cor_00 a_comp_cor_01 a_comp_cor_02 a_comp_cor_03 a_comp_cor_04 a_comp_cor_05 a_comp_cor_06 a_comp_cor_07 a_comp_cor_08 a_comp_cor_09 a_comp_cor_10 a_comp_cor_11 a_comp_cor_12 a_comp_cor_13 a_comp_cor_14 a_comp_cor_15 a_comp_cor_16 a_comp_cor_17 a_comp_cor_18 a_comp_cor_19 a_comp_cor_20 a_comp_cor_21 a_comp_cor_22 a_comp_cor_23 a_comp_cor_24 a_comp_cor_25 a_comp_cor_26 a_comp_cor_27 a_comp_cor_28 a_comp_cor_29 a_comp_cor_30 a_comp_cor_31 a_comp_cor_32 a_comp_cor_33 a_comp_cor_34 a_comp_cor_35 a_comp_cor_36 a_comp_cor_37 a_comp_cor_38 a_comp_cor_39 a_comp_cor_40 a_comp_cor_41 a_comp_cor_42 a_comp_cor_43 a_comp_cor_44 a_comp_cor_45 a_comp_cor_46 a_comp_cor_47 a_comp_cor_48 a_comp_cor_49 a_comp_cor_50 a_comp_cor_51 a_comp_cor_52 a_comp_cor_53 a_comp_cor_54 a_comp_cor_55 a_comp_cor_56 a_comp_cor_57 a_comp_cor_58 a_comp_cor_59 a_comp_cor_60 a_comp_cor_61 a_comp_cor_62 a_comp_cor_63 a_comp_cor_64 a_comp_cor_65 a_comp_cor_66 a_comp_cor_67 a_comp_cor_68 a_comp_cor_69 a_comp_cor_70 a_comp_cor_71 a_comp_cor_72 a_comp_cor_73 a_comp_cor_74 a_comp_cor_75 a_comp_cor_76 a_comp_cor_77 a_comp_cor_78 a_comp_cor_79 a_comp_cor_80 a_comp_cor_81 a_comp_cor_82 a_comp_cor_83 a_comp_cor_84 a_comp_cor_85 a_comp_cor_86 a_comp_cor_87 a_comp_cor_88 a_comp_cor_89 a_comp_cor_90 a_comp_cor_91 a_comp_cor_92 a_comp_cor_93 a_comp_cor_94 a_comp_cor_95 a_comp_cor_96 a_comp_cor_97 a_comp_cor_98 a_comp_cor_99 a_comp_cor_100 a_comp_cor_101 a_comp_cor_102 a_comp_cor_103 a_comp_cor_104 a_comp_cor_105 a_comp_cor_106 a_comp_cor_107 a_comp_cor_108 a_comp_cor_109 a_comp_cor_110 a_comp_cor_111 a_comp_cor_112 a_comp_cor_113 a_comp_cor_114 a_comp_cor_115 a_comp_cor_116 a_comp_cor_117 a_comp_cor_118 a_comp_cor_119 a_comp_cor_120 a_comp_cor_121 a_comp_cor_122 a_comp_cor_123 a_comp_cor_124 a_comp_cor_125 trans_x trans_x_power2 trans_x_derivative1_power2 trans_y_derivative1 trans_y_derivative1_power2 trans_y_power2 trans_z trans_z_derivative1 trans_z_power2 trans_z_derivative1_power2 rot_x rot_x_derivative1 rot_x_derivative1_power2 rot_x_power2 rot_y rot_y_derivative1 rot_y_derivative1_power2 rot_y_power2 rot_z rot_z_derivative1 rot_z_derivative1_power2 2 | 671.153618342538 n/a 450447.179414282 n/a 580.665507538706 n/a n/a 337172.431645183 530.768657905309 n/a 281715.368214603 n/a n/a n/a n/a -0.2226863812 -0.113050616 0.012665124 -0.1108079188 0.163488133 -0.3101784367 -0.2136032405 0.1805062665 -0.1258338043 0.4615960133 -0.1314987203 0.1795279005 -0.3658971697 0.2164046165 -0.0681233238 0.277758336 -0.1585464574 0.0437320656 0.0543286588 -0.0609655548 0.0016228234 -0.1051225071 0.0830414855 -0.0348087941 0.1335789368 -0.0570570553 0.0302598133 -0.1056033085 0.0708471347 -0.1039728232 0.0819081229 -0.1052386201 0.0368123057 -0.0289345498 -0.0950948895 0.0089454873 0.0061295626 -0.0529087759 -0.0091461234 0.0131369018 0.0170389085 -0.0546898811 -0.0911155844 0.1279118221 -0.0851975756 -0.0161044692 -0.0147100112 -0.039790274 0.0713014986 0.053585884 -0.0231722839 -0.0454798065 0.0125404302 0.1062919071 -0.0345799571 -0.0666732272 -0.0328259962 0.0616374556 0.0110585603 -0.0554769626 -0.006128521 -0.0223883564 -0.0118658614 0.01934487 -0.4229998581 0.0072832758 -0.4891379867 0.0954260773 -0.2426541714 0.3379348625 -0.0425466316 0.0907893795 -0.1803523156 0.3068471996 0.1652275486 -0.1012857549 -0.1677140614 0.133136193 -0.1974077136 0.0056496406 -0.0048809137 0.0529552556 -0.0307714247 0.0631189474 -0.0169368693 0.1342995158 -0.2112852073 0.0865245679 -0.0774314987 0.0341471268 -0.1030205292 0.0504206098 -0.1146514182 0.0589888681 -0.0236413099 0.0301646539 -0.0806863501 0.0189077647 -0.0026780651 -0.1187718476 0.1740324755 -0.162994716 0.0435311784 -0.0012686971 -0.2538492568 0.0886468548 -0.1672455871 -0.0721635267 0.2255190417 0.1092055154 -0.0824809281 -0.0488471461 0.004313308 0.0455344479 0.1600177573 -0.0417350641 -0.02264449 0.0267842935 0.1211738511 -0.0328850324 -0.0240413704 -0.0802451727 0.0334896302 0.0366594443 0.012656029 -0.0279206193 -0.0098421191 -0.1306543038 0.1167717346 0.0629070738 -0.0324221941 -0.0648432423 -8.0883E-05 6.542059689E-09 n/a n/a n/a 0.00365882233924 -0.186893 n/a 0.034928993449 n/a 0.00250235 n/a n/a 6.2617555225E-06 -0.000822451 n/a n/a 6.76425647401E-07 -0.000621335 n/a n/a 3 | -------------------------------------------------------------------------------- /load_confounds/tests/test_strategies.py: -------------------------------------------------------------------------------- 1 | """Test predefined denoising strategies.""" 2 | import os 3 | import re 4 | import load_confounds.strategies as lc 5 | import numpy as np 6 | import pandas as pd 7 | import pytest 8 | 9 | 10 | path_data = os.path.join(os.path.dirname(lc.__file__), "data") 11 | file_confounds = os.path.join( 12 | path_data, "test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 13 | ) 14 | file_aroma = os.path.join( 15 | path_data, "test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz" 16 | ) 17 | 18 | 19 | def test_Minimal(): 20 | """Test the Minimal strategy.""" 21 | # Try to load the confounds, whithout PCA reduction 22 | conf = lc.Minimal() 23 | assert conf.strategy == ["high_pass", "motion", "wm_csf", "non_steady_state"] 24 | assert hasattr(conf, "global_signal") == False 25 | conf.load(file_confounds) 26 | 27 | assert isinstance(conf.confounds_, pd.DataFrame) 28 | 29 | # Check that all model categories have been successfully loaded 30 | list_check = [ 31 | "trans_y", 32 | "trans_z", 33 | "rot_z", 34 | "cosine00", 35 | "csf", 36 | "white_matter", 37 | ] 38 | for check in list_check: 39 | assert check in conf.confounds_.columns 40 | 41 | # maker sure global signal works 42 | conf = lc.Minimal(global_signal="basic") 43 | assert conf.strategy == ["high_pass", "motion", "wm_csf", "non_steady_state", "global"] 44 | assert conf.global_signal == "basic" 45 | 46 | 47 | def test_Scrubbing(): 48 | """Test the Scrubbing strategy.""" 49 | conf = lc.Scrubbing(fd_thresh=0.15) 50 | # make sure global signal is not there 51 | assert conf.strategy == ["high_pass", "motion", "wm_csf", "scrub", "non_steady_state"] 52 | assert hasattr(conf, "global_signal") == False 53 | conf.load(file_confounds) 54 | 55 | assert isinstance(conf.confounds_, pd.DataFrame) 56 | 57 | # Check that all model categories have been successfully loaded 58 | list_check = [ 59 | "trans_x", 60 | "trans_y", 61 | "rot_z", 62 | "trans_x_derivative1", 63 | "trans_x_power2", 64 | "trans_x_derivative1_power2", 65 | "trans_y_derivative1", 66 | "trans_y_power2", 67 | "trans_y_derivative1_power2", 68 | "trans_z_derivative1", 69 | "trans_z_power2", 70 | "rot_z_derivative1", 71 | "rot_z_power2", 72 | "rot_z_derivative1_power2", 73 | "cosine00", 74 | "cosine01", 75 | "csf", 76 | "white_matter", 77 | "csf_derivative1", 78 | "csf_power2", 79 | "csf_derivative1_power2", 80 | "white_matter_derivative1", 81 | ] 82 | 83 | for check in list_check: 84 | assert check in conf.confounds_.columns 85 | # out of 30 vols, should have 6 motion outliers from scrubbing, 86 | # and 2 vol removed by srubbing strategy "full" 87 | assert len(conf.sample_mask_) == 22 88 | # shape of confound regressors untouched 89 | assert len(conf.confounds_) == 30 90 | 91 | # also load confounds with very liberal scrubbing thresholds 92 | # this should not produce an error 93 | conf = lc.Scrubbing(fd_thresh=1, std_dvars_thresh=5) 94 | conf.load(file_confounds) 95 | assert len(conf.sample_mask_) == 29 # only non-steady volumes removed 96 | 97 | # maker sure global signal works 98 | conf = lc.Scrubbing(global_signal="full") 99 | assert conf.strategy == ["high_pass", "motion", "wm_csf", "scrub", "non_steady_state", "global"] 100 | assert conf.global_signal == "full" 101 | 102 | 103 | def test_CompCor_anatomical(): 104 | """Test the anatomical CompCor strategy.""" 105 | # Try to load the confounds, whithout PCA reduction 106 | conf = lc.CompCor() 107 | conf.load(file_confounds) 108 | 109 | assert isinstance(conf.confounds_, pd.DataFrame) 110 | 111 | list_check = [ 112 | "trans_x", 113 | "trans_y", 114 | "rot_z", 115 | "trans_x_derivative1", 116 | "trans_x_power2", 117 | "trans_y_derivative1_power2", 118 | "trans_z_derivative1", 119 | "trans_z_power2", 120 | "trans_z_derivative1_power2", 121 | "rot_y_derivative1", 122 | "rot_y_power2", 123 | "rot_z_power2", 124 | "rot_z_derivative1_power2", 125 | "cosine00", 126 | "cosine01", 127 | "a_comp_cor_00", 128 | "a_comp_cor_01", 129 | "a_comp_cor_02", 130 | ] 131 | 132 | for check in list_check: 133 | assert check in conf.confounds_.columns 134 | 135 | compcor_col_str_anat = "".join(conf.confounds_.columns) 136 | assert "t_comp_cor_" not in compcor_col_str_anat 137 | assert ( 138 | "a_comp_cor_57" not in compcor_col_str_anat 139 | ) # this one comes from the WW mask 140 | 141 | 142 | def test_CompCor_anatomical_not_combined(): 143 | """Test the anatomical CompCor strategy without combined mask.""" 144 | # Try to load the confounds, whithout PCA reduction 145 | conf = lc.CompCor(acompcor_combined=False, n_compcor=5) 146 | conf.load(file_confounds) 147 | 148 | assert isinstance(conf.confounds_, pd.DataFrame) 149 | 150 | list_check = [ 151 | "trans_x", 152 | "trans_y", 153 | "rot_z", 154 | "trans_x_derivative1", 155 | "trans_x_power2", 156 | "trans_y_derivative1_power2", 157 | "trans_z_derivative1", 158 | "trans_z_power2", 159 | "trans_z_derivative1_power2", 160 | "rot_y_derivative1", 161 | "rot_y_power2", 162 | "rot_z_power2", 163 | "rot_z_derivative1_power2", 164 | "cosine00", 165 | "cosine01", 166 | "a_comp_cor_57", # from CSF mask 167 | "a_comp_cor_61", # from CSF mask 168 | "a_comp_cor_70", # from WM mask 169 | "a_comp_cor_74", # from WM mask 170 | ] 171 | 172 | for check in list_check: 173 | assert check in conf.confounds_.columns 174 | 175 | compcor_col_str_anat = "".join(conf.confounds_.columns) 176 | assert "t_comp_cor_" not in compcor_col_str_anat 177 | assert ( 178 | "a_comp_cor_00" not in compcor_col_str_anat 179 | ) # this one comes from the combined mask 180 | assert ( 181 | "a_comp_cor_62" not in compcor_col_str_anat 182 | ) # this one exceeds the number of requested components 183 | assert ( 184 | "a_comp_cor_75" not in compcor_col_str_anat 185 | ) # this one exceeds the number of requested components 186 | 187 | 188 | def test_CompCor_temporal(): 189 | """Test the temporal ompCor strategy.""" 190 | # Try to load the confounds, whithout PCA reduction 191 | conf = lc.CompCor(compcor="temp") 192 | conf.load(file_confounds) 193 | 194 | assert isinstance(conf.confounds_, pd.DataFrame) 195 | 196 | list_check = [ 197 | "cosine00", 198 | "cosine01", 199 | "cosine02", 200 | "cosine03", 201 | "t_comp_cor_00", 202 | "t_comp_cor_01", 203 | "t_comp_cor_02", 204 | "t_comp_cor_03", 205 | ] 206 | for check in list_check: 207 | assert check in conf.confounds_.columns 208 | 209 | compcor_col_str_anat = "".join(conf.confounds_.columns) 210 | assert "a_comp_cor_" not in compcor_col_str_anat 211 | 212 | 213 | def test_FullCompCor(): 214 | """Test a full compcor strategy.""" 215 | conf = lc.CompCor(compcor="full", acompcor_combined=False) 216 | conf.load(file_confounds) 217 | 218 | assert isinstance(conf.confounds_, pd.DataFrame) 219 | 220 | list_check = [ 221 | "t_comp_cor_00", 222 | "t_comp_cor_01", 223 | "t_comp_cor_02", 224 | "t_comp_cor_03", 225 | "a_comp_cor_57", # from CSF mask 226 | "a_comp_cor_58", # from CSF mask 227 | "a_comp_cor_105", # from WM mask 228 | ] 229 | for check in list_check: 230 | assert check in conf.confounds_.columns 231 | 232 | 233 | def test_ICAAROMA(): 234 | """Test the (non-aggressive) ICA-AROMA strategy.""" 235 | conf = lc.ICAAROMA(global_signal="basic") 236 | assert conf.global_signal == "basic" 237 | assert conf.strategy == ["wm_csf", "high_pass", "ica_aroma", "non_steady_state", "global"] 238 | conf.load(file_aroma) 239 | 240 | # Check that all fixed name model categories have been successfully loaded 241 | list_check = [ 242 | "csf", 243 | "white_matter", 244 | "global_signal", 245 | ] 246 | for c in conf.confounds_.columns: 247 | # Check that all fixed name model categories 248 | fixed = c in list_check 249 | cosines = re.match("cosine+", c) 250 | assert fixed or (cosines is not None) 251 | 252 | 253 | def test_invalid(): 254 | """Test warning raised for invalid keywors.""" 255 | with pytest.warns(UserWarning) as record: 256 | lc.ICAAROMA(compcor="anat", global_signal="full") 257 | assert "not taking effect: ['compcor']" in record[0].message.args[0] 258 | -------------------------------------------------------------------------------- /load_confounds/confounds.py: -------------------------------------------------------------------------------- 1 | """Helper functions for the manipulation of confounds. 2 | 3 | Authors: load_confounds team 4 | """ 5 | import numpy as np 6 | import pandas as pd 7 | from sklearn.decomposition import PCA 8 | from sklearn.preprocessing import scale 9 | import os 10 | import json 11 | import re 12 | 13 | 14 | img_file_patterns = { 15 | "aroma": "_desc-smoothAROMAnonaggr_bold", 16 | "nii.gz": "_space-.*_desc-preproc_bold.nii.gz", 17 | "dtseries.nii": "_space-.*_bold.dtseries.nii", 18 | "func.gii": "_space-.*_hemi-[LR]_bold.func.gii", 19 | } 20 | 21 | img_file_error = { 22 | "aroma": "Input must be ~desc-smoothAROMAnonaggr_bold for full ICA-AROMA strategy.", 23 | "nii.gz": "Invalid file type for the selected method.", 24 | "dtseries.nii": "Invalid file type for the selected method.", 25 | "func.gii": "need fMRIprep output with extension func.gii", 26 | } 27 | 28 | 29 | def _check_params(confounds_raw, params): 30 | """Check that specified parameters can be found in the confounds.""" 31 | not_found_params = [] 32 | for par in params: 33 | if not par in confounds_raw.columns: 34 | not_found_params.append(par) 35 | if not_found_params: 36 | raise MissingConfound(params=not_found_params) 37 | return None 38 | 39 | 40 | def _find_confounds(confounds_raw, keywords): 41 | """Find confounds that contain certain keywords.""" 42 | list_confounds, missing_keys = [], [] 43 | for key in keywords: 44 | key_found = [col for col in confounds_raw.columns if key in col] 45 | if key_found: 46 | list_confounds.extend(key_found) 47 | elif key != "non_steady_state": 48 | missing_keys.append(key) 49 | if missing_keys: 50 | raise MissingConfound(keywords=missing_keys) 51 | return list_confounds 52 | 53 | 54 | def _flag_single_gifti(img_files): 55 | """Test if the paired input files are giftis.""" 56 | flag_single_gifti = [] # gifti in pairs 57 | for img in img_files: 58 | ext = ".".join(img.split(".")[-2:]) 59 | flag_single_gifti.append((ext == "func.gii")) 60 | return all(flag_single_gifti) 61 | 62 | 63 | def _sanitize_confounds(img_files): 64 | """Make sure the inputs are in the correct format.""" 65 | # we want to support loading a single set of confounds, instead of a list 66 | # so we hack it 67 | if isinstance(img_files, list) and len(img_files) == 2: 68 | flag_single = _flag_single_gifti(img_files) 69 | else: # single file 70 | flag_single = isinstance(img_files, str) 71 | 72 | if flag_single: 73 | img_files = [img_files] 74 | return img_files, flag_single 75 | 76 | 77 | def _add_suffix(params, model): 78 | """ 79 | Add suffixes to a list of parameters. 80 | Suffixes includes derivatives, power2 and full 81 | """ 82 | params_full = params.copy() 83 | suffix = { 84 | "basic": {}, 85 | "derivatives": {"derivative1"}, 86 | "power2": {"power2"}, 87 | "full": {"derivative1", "power2", "derivative1_power2"}, 88 | } 89 | for par in params: 90 | for suff in suffix[model]: 91 | params_full.append(f"{par}_{suff}") 92 | return params_full 93 | 94 | 95 | def _pca_motion(confounds_motion, n_components): 96 | """Reduce the motion paramaters using PCA.""" 97 | n_available = confounds_motion.shape[1] 98 | if n_components > n_available: 99 | raise ValueError( 100 | f"User requested n_motion={n_components} motion components, but found only {n_available}." 101 | ) 102 | confounds_motion = confounds_motion.dropna() 103 | confounds_motion_std = scale( 104 | confounds_motion, axis=0, with_mean=True, with_std=True 105 | ) 106 | pca = PCA(n_components=n_components) 107 | motion_pca = pd.DataFrame(pca.fit_transform(confounds_motion_std)) 108 | motion_pca.columns = ["motion_pca_" + str(col + 1) for col in motion_pca.columns] 109 | return motion_pca 110 | 111 | 112 | def _optimize_scrub(fd_outliers, n_scans): 113 | """ 114 | Perform optimized scrub. After scrub volumes, further remove 115 | continuous segments containing fewer than 5 volumes. 116 | Power, Jonathan D., et al. "Methods to detect, characterize, and remove 117 | motion artifact in resting state fMRI." Neuroimage 84 (2014): 320-341. 118 | """ 119 | # Start by checking if the beginning continuous segment is fewer than 5 volumes 120 | if fd_outliers[0] < 5: 121 | fd_outliers = np.asarray(list(range(fd_outliers[0])) + list(fd_outliers)) 122 | # Do the same for the ending segment of scans 123 | if n_scans - (fd_outliers[-1] + 1) < 5: 124 | fd_outliers = np.asarray( 125 | list(fd_outliers) + list(range(fd_outliers[-1], n_scans)) 126 | ) 127 | # Now do everything in between 128 | fd_outlier_ind_diffs = np.diff(fd_outliers) 129 | short_segments_inds = np.where( 130 | np.logical_and(fd_outlier_ind_diffs > 1, fd_outlier_ind_diffs < 6) 131 | )[0] 132 | for ind in short_segments_inds: 133 | fd_outliers = np.asarray( 134 | list(fd_outliers) + list(range(fd_outliers[ind] + 1, fd_outliers[ind + 1])) 135 | ) 136 | fd_outliers = np.sort(np.unique(fd_outliers)) 137 | return fd_outliers 138 | 139 | 140 | def _get_file_raw(nii_file): 141 | """Get the name of the raw confound file.""" 142 | if isinstance(nii_file, list): # catch gifti 143 | nii_file = nii_file[0] 144 | suffix = "_space-" + nii_file.split("space-")[1] 145 | # fmriprep has changed the file suffix between v20.1.1 and v20.2.0 with respect to BEP 012. 146 | # cf. https://neurostars.org/t/naming-change-confounds-regressors-to-confounds-timeseries/17637 147 | # Check file with new naming scheme exists or replace, for backward compatibility. 148 | confounds_raw_candidates = [ 149 | nii_file.replace( 150 | suffix, 151 | "_desc-confounds_timeseries.tsv", 152 | ), 153 | nii_file.replace( 154 | suffix, 155 | "_desc-confounds_regressors.tsv", 156 | ), 157 | ] 158 | 159 | confounds_raw = [cr for cr in confounds_raw_candidates if os.path.exists(cr)] 160 | 161 | if not confounds_raw: 162 | raise ValueError("Could not find associated confound file.") 163 | elif len(confounds_raw) != 1: 164 | raise ValueError("Found more than one confound file.") 165 | else: 166 | return confounds_raw[0] 167 | 168 | 169 | def _get_json(confounds_raw, flag_acompcor): 170 | """Load json data companion to the confounds tsv file.""" 171 | # Load JSON file 172 | confounds_json = confounds_raw.replace("tsv", "json") 173 | try: 174 | with open(confounds_json, "rb") as f: 175 | confounds_json = json.load(f) 176 | except OSError: 177 | if flag_acompcor: 178 | raise ValueError( 179 | f"Could not find a json file {confounds_json}. This is necessary for anat compcor" 180 | ) 181 | return confounds_json 182 | 183 | 184 | def _ext_validator(image_file, ext): 185 | """Check image is valid based on extention.""" 186 | try: 187 | valid_img = all( 188 | bool(re.search(img_file_patterns[ext], img)) for img in image_file 189 | ) 190 | error_message = img_file_error[ext] 191 | except KeyError: 192 | valid_img = False 193 | error_message = "Unsupported input." 194 | return valid_img, error_message 195 | 196 | 197 | def _check_images(image_file, flag_full_aroma): 198 | """Validate input file and ICA AROMA related file.""" 199 | if len(image_file) == 2: # must be gifti 200 | valid_img, error_message = _ext_validator(image_file, "func.gii") 201 | elif flag_full_aroma: 202 | valid_img, error_message = _ext_validator([image_file], "aroma") 203 | else: 204 | ext = ".".join(image_file.split(".")[-2:]) 205 | valid_img, error_message = _ext_validator([image_file], ext) 206 | if not valid_img: 207 | raise ValueError(error_message) 208 | 209 | 210 | def _confounds_to_df(image_file, flag_acompcor, flag_full_aroma): 211 | """Load raw confounds as a pandas DataFrame.""" 212 | _check_images(image_file, flag_full_aroma) 213 | confounds_raw = _get_file_raw(image_file) 214 | confounds_json = _get_json(confounds_raw, flag_acompcor) 215 | confounds_raw = pd.read_csv(confounds_raw, delimiter="\t", encoding="utf-8") 216 | return confounds_raw, confounds_json 217 | 218 | 219 | def _get_outlier_cols(confounds_columns): 220 | """Get outlier regressor column names.""" 221 | outlier_cols = { 222 | col 223 | for col in confounds_columns 224 | if "motion_outlier" in col or "non_steady_state" in col 225 | } 226 | confounds_col = set(confounds_columns) - outlier_cols 227 | return outlier_cols, confounds_col 228 | 229 | 230 | def _extract_outlier_regressors(confounds): 231 | """Separate confounds and outlier regressors.""" 232 | outlier_cols, confounds_col = _get_outlier_cols(confounds.columns) 233 | outliers = confounds[outlier_cols] if outlier_cols else pd.DataFrame() 234 | confounds = confounds[confounds_col] 235 | sample_mask = _outlier_to_sample_mask(outliers) 236 | return sample_mask, confounds, outliers 237 | 238 | 239 | def _outlier_to_sample_mask(outlier_flag): 240 | """Generate sample mask from outlier regressors.""" 241 | if outlier_flag.size == 0: # Do not supply sample mask 242 | return None # consistency with nilearn sample_mask 243 | outlier_flag = outlier_flag.sum(axis=1).values 244 | return np.where(outlier_flag == 0)[0].tolist() 245 | 246 | 247 | def _prepare_output(confounds, demean): 248 | """Demean and create sample mask for the selected confounds.""" 249 | sample_mask, confounds, outliers = _extract_outlier_regressors(confounds) 250 | if confounds.size != 0: # ica_aroma = "full" generate empty output 251 | # Derivatives have NaN on the first row 252 | # Replace them by estimates at second time point, 253 | # otherwise nilearn will crash. 254 | mask_nan = np.isnan(confounds.values[0, :]) 255 | confounds.iloc[0, mask_nan] = confounds.iloc[1, mask_nan] 256 | if demean: 257 | confounds = _demean_confounds(confounds, sample_mask) 258 | return sample_mask, confounds 259 | 260 | 261 | def _demean_confounds(confounds, sample_mask): 262 | """Demean the confounds. The mean is calculated on non-outlier values.""" 263 | confound_cols = confounds.columns 264 | if sample_mask is None: 265 | confounds= scale(confounds, axis=0, with_std=False) 266 | else: # calculate the mean without outliers. 267 | confounds_mean = confounds.iloc[sample_mask, :].mean(axis=0) 268 | confounds -= confounds_mean 269 | return pd.DataFrame(confounds, columns=confound_cols) 270 | 271 | 272 | class MissingConfound(Exception): 273 | """ 274 | Exception raised when failing to find params in the confounds. 275 | 276 | Parameters 277 | ---------- 278 | params : list of missing params 279 | keywords: list of missing keywords 280 | """ 281 | 282 | def __init__(self, params=None, keywords=None): 283 | """Default values are empty lists.""" 284 | self.params = params if params else [] 285 | self.keywords = keywords if keywords else [] 286 | -------------------------------------------------------------------------------- /load_confounds/strategies.py: -------------------------------------------------------------------------------- 1 | """Predefined denoising strategies. 2 | 3 | Authors: load_confounds team 4 | """ 5 | import warnings 6 | from .parser import Confounds 7 | 8 | 9 | class Minimal(Confounds): 10 | """ 11 | Load confounds for a minimal denosing strategy commonly used 12 | in resting state functional connectivity, described in Fox et al., 2005. 13 | Full motion parameters, WM/CSF signals, and high pass filter, 14 | with an option to extract global signal confounds. 15 | 16 | Parameters 17 | ---------- 18 | confounds_raw : Pandas Dataframe or path to tsv file(s), optionally as a list. 19 | Raw confounds from fmriprep 20 | 21 | motion : string, optional 22 | Type of confounds extracted from head motion estimates. 23 | "basic" translation/rotation (6 parameters) 24 | "power2" translation/rotation + quadratic terms (12 parameters) 25 | "derivatives" translation/rotation + derivatives (12 parameters) 26 | "full" translation/rotation + derivatives + quadratic terms + power2d derivatives (24 parameters) 27 | 28 | wm_csf : string, optional 29 | Type of confounds extracted from masks of white matter and cerebrospinal fluids. 30 | "basic" the averages in each mask (2 parameters) 31 | "power2" averages and quadratic terms (4 parameters) 32 | "derivatives" averages and derivatives (4 parameters) 33 | "full" averages + derivatives + quadratic terms + power2d derivatives (8 parameters) 34 | 35 | demean : boolean, optional 36 | If True, the confounds are standardized to a zero mean (over time). 37 | This step is critical if the confounds are regressed out of time series 38 | using nilearn with no or zscore standardization, but should be turned off 39 | with "spc" normalization. 40 | 41 | global_signal : string, optional 42 | Specify type of confounds extracted from the global signal. 43 | Global signal regressors will not be retrieved if no arguments were applied. 44 | "basic" just the global signal (1 parameter) 45 | "power2" global signal and quadratic term (2 parameters) 46 | "derivatives" global signal and derivative (2 parameters) 47 | "full" global signal + derivatives + quadratic terms + power2d derivatives (4 parameters) 48 | 49 | Returns 50 | ------- 51 | conf : a Confounds object 52 | conf.confounds_ is a reduced version of fMRIprep confounds. 53 | 54 | """ 55 | 56 | def __init__(self, motion="full", wm_csf="basic", demean=True, **kwargs): 57 | """Default parameters.""" 58 | # check if global signal is supplied as a parameter 59 | # if so, add to strategy 60 | global_signal = kwargs.get("global_signal", False) 61 | strategy = ["high_pass", "motion", "wm_csf", "non_steady_state"] 62 | strategy, global_signal = _update_strategy(strategy, global_signal) 63 | # warn user for supplying useless parameter 64 | _check_invalid_parameter(kwargs, valid_keys=["global_signal"]) 65 | 66 | # set attributes 67 | self.strategy = strategy 68 | self.motion = motion 69 | self.n_motion = 0 70 | self.wm_csf = wm_csf 71 | self.demean = demean 72 | if global_signal: 73 | self.global_signal = global_signal 74 | 75 | 76 | class Scrubbing(Confounds): 77 | """ 78 | Load confounds for scrubbing describbed in Power et al., 2012. 79 | Motion parameters, WM/CSF signals, scrub (full), high pass filter. 80 | All noise components are fully expanded (derivatives, squares and squared 81 | derivatives). 82 | 83 | Parameters 84 | ---------- 85 | confounds_raw : Pandas Dataframe or path to tsv file(s), optionally as a list. 86 | Raw confounds from fmriprep 87 | 88 | motion : string, optional 89 | Type of confounds extracted from head motion estimates. 90 | "basic" translation/rotation (6 parameters) 91 | "power2" translation/rotation + quadratic terms (12 parameters) 92 | "derivatives" translation/rotation + derivatives (12 parameters) 93 | "full" translation/rotation + derivatives + quadratic terms + power2d derivatives (24 parameters) 94 | 95 | wm_csf : string, optional 96 | Type of confounds extracted from masks of white matter and cerebrospinal fluids. 97 | "basic" the averages in each mask (2 parameters) 98 | "power2" averages and quadratic terms (4 parameters) 99 | "derivatives" averages and derivatives (4 parameters) 100 | "full" averages + derivatives + quadratic terms + power2d derivatives (8 parameters) 101 | 102 | scrub : string, optional 103 | Type of scrub of frames with excessive motion (Power et al. 2014) 104 | "basic" remove time frames based on excessive FD and DVARS 105 | "full" also remove time windows which are too short after scrubbing. 106 | one-hot encoding vectors are added as regressors for each scrubbed frame. 107 | 108 | fd_thresh : float, optional 109 | Framewise displacement threshold for scrub (default = 0.2 mm) 110 | 111 | std_dvars_thresh : float, optional 112 | Standardized DVARS threshold for scrub (default = 3) 113 | 114 | demean : boolean, optional 115 | If True, the confounds are standardized to a zero mean (over time). 116 | This step is critical if the confounds are regressed out of time series 117 | using nilearn with no or zscore standardization, but should be turned off 118 | with "spc" normalization. 119 | 120 | global_signal : string, optional 121 | Specify type of confounds extracted from the global signal. 122 | Global signal regressors will not be retrieved if no arguments were applied. 123 | "basic" just the global signal (1 parameter) 124 | "power2" global signal and quadratic term (2 parameters) 125 | "derivatives" global signal and derivative (2 parameters) 126 | "full" global signal + derivatives + quadratic terms + power2d derivatives (4 parameters) 127 | 128 | Returns 129 | ------- 130 | conf : a Confounds object 131 | conf.confounds_ is a reduced version of fMRIprep confounds. 132 | 133 | """ 134 | 135 | def __init__( 136 | self, 137 | motion="full", 138 | wm_csf="full", 139 | scrub="full", 140 | fd_thresh=0.2, 141 | std_dvars_thresh=3, 142 | demean=True, 143 | **kwargs, 144 | ): 145 | """Default parameters.""" 146 | # check if global signal is supplied as a parameter 147 | # if so, add to strategy 148 | global_signal = kwargs.get("global_signal", False) 149 | strategy = ["high_pass", "motion", "wm_csf", "scrub", "non_steady_state"] 150 | strategy, global_signal = _update_strategy(strategy, global_signal) 151 | # warn user for supplying useless parameter 152 | _check_invalid_parameter(kwargs, valid_keys=["global_signal"]) 153 | 154 | # set attributes 155 | self.strategy = strategy 156 | self.motion = motion 157 | self.n_motion = 0 158 | self.wm_csf = wm_csf 159 | self.scrub = scrub 160 | self.fd_thresh = (fd_thresh,) 161 | self.std_dvars_thresh = (std_dvars_thresh,) 162 | self.demean = demean 163 | if global_signal: 164 | self.global_signal = global_signal 165 | 166 | 167 | class CompCor(Confounds): 168 | """ 169 | Load confounds using the CompCor strategy from Behzadi et al., 2007. 170 | Default with motion parameters (fully expanded), high pass filter, and anatomical compcor. 171 | 172 | Parameters 173 | ---------- 174 | confounds_raw : Pandas Dataframe or path to tsv file(s), optionally as a list. 175 | Raw confounds from fmriprep 176 | 177 | motion : string, optional 178 | Type of confounds extracted from head motion estimates. 179 | "basic" translation/rotation (6 parameters) 180 | "power2" translation/rotation + quadratic terms (12 parameters) 181 | "derivatives" translation/rotation + derivatives (12 parameters) 182 | "full" translation/rotation + derivatives + quadratic terms + power2d derivatives (24 parameters) 183 | 184 | n_compcor : int or "auto", optional 185 | The number of noise components to be extracted. For acompcor_combined=False, 186 | this is the number of components per mask. 187 | Default is "auto": select all components (50% variance explained by fMRIPrep defaults) 188 | 189 | acompcor_combined: boolean, optional 190 | If true, use components generated from the combined white matter and csf 191 | masks. Otherwise, components are generated from each mask separately and then 192 | concatenated. 193 | 194 | demean : boolean, optional 195 | If True, the confounds are standardized to a zero mean (over time). 196 | This step is critical if the confounds are regressed out of time series 197 | using nilearn with no or zscore standardization, but should be turned off 198 | with "spc" normalization. 199 | 200 | Returns 201 | ------- 202 | conf : a Confounds object 203 | conf.confounds_ is a reduced version of fMRIprep confounds. 204 | 205 | """ 206 | 207 | def __init__( 208 | self, 209 | motion="full", 210 | compcor="anat", 211 | n_compcor="auto", 212 | demean=True, 213 | acompcor_combined=True, 214 | ): 215 | """Default parameters.""" 216 | # set attributes 217 | self.strategy = ["high_pass", "motion", "compcor", "non_steady_state"] 218 | self.motion = motion 219 | self.n_motion = 0 220 | self.compcor = compcor 221 | self.n_compcor = n_compcor 222 | self.acompcor_combined = acompcor_combined 223 | self.demean = demean 224 | 225 | 226 | class ICAAROMA(Confounds): 227 | """ 228 | Load confounds for non-aggresive ICA-AROMA strategy from Pruim et al., 2015. 229 | The strategy requires fMRIprep outputs generated with `--use-aroma`. 230 | 231 | ICA-AROMA is implemented in two steps: 232 | 1. A non-aggressive denoising immediately after ICA classification. 233 | A linear regression estimates signals with all independent components 234 | as predictors. 235 | A partial regression is then applied to remove variance associated 236 | with noise independent components. 237 | fMRIprep perfoms this step and generates files suffixed with 238 | `desc-smoothAROMAnonaggr_bold`. 239 | 2. Confound regression step (mean signals from WM and CSF) 240 | Confound regressors generated by `load_confounds.ICAAROMA`. 241 | The generated confound regressors must only be used on fMRIprep output 242 | suffixed `desc-smoothAROMAnonaggr_bold`. 243 | 244 | `desc-smoothAROMAnonaggr_bold` is generated in `MNI152NLin6Asym` only. 245 | To produce `desc-smoothAROMAnonaggr_bold` in other spatial templates, 246 | use FSL function `fsl_regfilt`. For example, native T1w space: 247 | ``` 248 | fsl_regfilt -i sub-_task-_space-T1w_desc-preproc_bold.nii.gz \ 249 | -f $(cat sub-_task-_AROMAnoiseICs.csv) \ 250 | -d sub-_task-_desc-MELODIC_mixing.tsv \ 251 | -o sub-_task-_space-T1w_desc-AROMAnonaggr_bold.nii.gz 252 | ``` 253 | 254 | Parameters 255 | ---------- 256 | confounds_raw : Pandas Dataframe or path to tsv file(s), optionally as a list. 257 | Raw confounds from fmriprep 258 | 259 | wm_csf : string, optional 260 | Type of confounds extracted from masks of white matter and cerebrospinal fluids. 261 | "basic" the averages in each mask (2 parameters) 262 | "power2" averages and quadratic terms (4 parameters) 263 | "derivatives" averages and derivatives (4 parameters) 264 | "full" averages + derivatives + quadratic terms + power2d derivatives (8 parameters) 265 | 266 | demean : boolean, optional 267 | If True, the confounds are standardized to a zero mean (over time). 268 | This step is critical if the confounds are regressed out of time series 269 | using nilearn with no or zscore standardization, but should be turned off 270 | with "spc" normalization. 271 | 272 | global_signal : string, optional 273 | Specify type of confounds extracted from the global signal. 274 | Global signal regressors will not be retrieved if no arguments were applied. 275 | "basic" just the global signal (1 parameter) 276 | "power2" global signal and quadratic term (2 parameters) 277 | "derivatives" global signal and derivative (2 parameters) 278 | "full" global signal + derivatives + quadratic terms + power2d derivatives (4 parameters) 279 | 280 | Returns 281 | ------- 282 | conf : a Confounds object 283 | conf.confounds_ is a reduced version of fMRIprep confounds. 284 | 285 | Notes 286 | ----- 287 | fMRIprep documentation on ICA-AROMA 288 | https://fmriprep.org/en/latest/workflows.html#ica-aroma 289 | 290 | For more discussion regarding choosing the nuisance regressors before or 291 | after denoising with ICA-AROMA has a detriment on outcome measures, 292 | please see notebook 5. 293 | https://github.com/nipreps/fmriprep-notebooks/ 294 | 295 | """ 296 | 297 | def __init__(self, wm_csf="basic", demean=True, **kwargs): 298 | """Default parameters.""" 299 | strategy = ["wm_csf", "high_pass", "ica_aroma", "non_steady_state"] 300 | global_signal = kwargs.get("global_signal", False) 301 | strategy, global_signal = _update_strategy(strategy, global_signal) 302 | # warn user for supplying useless parameter 303 | _check_invalid_parameter(kwargs, valid_keys=["global_signal"]) 304 | 305 | # set attributes 306 | self.strategy = strategy 307 | self.demean = demean 308 | self.wm_csf = wm_csf 309 | self.ica_aroma = "full" 310 | if global_signal: 311 | self.global_signal = global_signal 312 | 313 | 314 | def _check_invalid_parameter(keyword_args, valid_keys): 315 | """Raise warnings if kwargs contains invalid parameters.""" 316 | # supply extra parameter will not effect the behaviour 317 | # but it is good to inform the user 318 | for key in valid_keys: 319 | if isinstance(keyword_args, dict) and key in keyword_args: 320 | keyword_args.pop(key) 321 | if isinstance(keyword_args, dict) and len(keyword_args) > 0: 322 | warnings.warn( 323 | "Supplied paramerters not accepted in the current " 324 | "strategy, hence not taking effect: " 325 | f"{list(keyword_args.keys())}. " 326 | "Please consider customising strategy with using " 327 | "the `Confounds` module." 328 | ) 329 | 330 | 331 | def _update_strategy(strategy, global_signal): 332 | """Update strategy if global signal is supplied as a parameter.""" 333 | strat = strategy.copy() 334 | if isinstance(global_signal, str): 335 | strat.append("global") 336 | return strat, global_signal 337 | -------------------------------------------------------------------------------- /load_confounds/parser.py: -------------------------------------------------------------------------------- 1 | """Flexible method to load confounds generated by fMRIprep. 2 | 3 | Authors: load_confounds team 4 | """ 5 | import numpy as np 6 | import pandas as pd 7 | from . import confounds as cf 8 | from .compcor import _find_compcor 9 | 10 | # Global variables listing the admissible types of noise components 11 | all_confounds = [ 12 | "motion", 13 | "high_pass", 14 | "wm_csf", 15 | "global", 16 | "compcor", 17 | "ica_aroma", 18 | "scrub", 19 | "non_steady_state", 20 | ] 21 | 22 | 23 | def _sanitize_strategy(strategy): 24 | """Defines the supported denoising strategies.""" 25 | if isinstance(strategy, list): 26 | for conf in strategy: 27 | if not conf in all_confounds: 28 | raise ValueError(f"{conf} is not a supported type of confounds.") 29 | else: 30 | raise ValueError("strategy needs to be a list of strings") 31 | # add non steady state if not present 32 | if "non_steady_state" not in strategy: 33 | strategy.append("non_steady_state") 34 | return strategy 35 | 36 | 37 | def _check_error(missing_confounds, missing_keys): 38 | """Consolidate a single error message across multiple missing confounds.""" 39 | if missing_confounds or missing_keys: 40 | error_msg = ( 41 | "The following keys or parameters are missing: " 42 | + f" {missing_confounds}" 43 | + f" {missing_keys}" 44 | + ". You may want to try a different denoising strategy." 45 | ) 46 | raise ValueError(error_msg) 47 | 48 | 49 | class Confounds: 50 | """ 51 | Confounds from fmriprep 52 | 53 | Parameters 54 | ---------- 55 | strategy : list of strings 56 | The type of noise confounds to include. 57 | "motion" head motion estimates. 58 | "high_pass" discrete cosines covering low frequencies. 59 | "wm_csf" confounds derived from white matter and cerebrospinal fluid. 60 | "global" confounds derived from the global signal. 61 | "ica_aroma" confounds derived from ICA-AROMA. 62 | "scrub" regressors for Power 2014 scrubbing approach. 63 | 64 | motion : string, optional 65 | Type of confounds extracted from head motion estimates. 66 | "basic" translation/rotation (6 parameters) 67 | "power2" translation/rotation + quadratic terms (12 parameters) 68 | "derivatives" translation/rotation + derivatives (12 parameters) 69 | "full" translation/rotation + derivatives + quadratic terms + power2d derivatives (24 parameters) 70 | 71 | n_motion : float 72 | Number of pca components to keep from head motion estimates. 73 | If the parameters is strictly comprised between 0 and 1, a principal component 74 | analysis is applied to the motion parameters, and the number of extracted 75 | components is set to exceed `n_motion` percent of the parameters variance. 76 | If the n_components = 0, then no PCA is performed. 77 | 78 | fd_thresh : float, optional 79 | Framewise displacement threshold for scrub (default = 0.2 mm) 80 | 81 | std_dvars_thresh : float, optional 82 | Standardized DVARS threshold for scrub (default = 3) 83 | 84 | wm_csf : string, optional 85 | Type of confounds extracted from masks of white matter and cerebrospinal fluids. 86 | "basic" the averages in each mask (2 parameters) 87 | "power2" averages and quadratic terms (4 parameters) 88 | "derivatives" averages and derivatives (4 parameters) 89 | "full" averages + derivatives + quadratic terms + power2d derivatives (8 parameters) 90 | 91 | global_signal : string, optional 92 | Type of confounds extracted from the global signal. 93 | "basic" just the global signal (1 parameter) 94 | "power2" global signal and quadratic term (2 parameters) 95 | "derivatives" global signal and derivative (2 parameters) 96 | "full" global signal + derivatives + quadratic terms + power2d derivatives (4 parameters) 97 | 98 | scrub : string, optional 99 | Type of scrub of frames with excessive motion (Power et al. 2014) 100 | "basic" remove time frames based on excessive FD and DVARS 101 | "full" also remove time windows which are too short after scrubbing. 102 | one-hot encoding vectors are added as regressors for each scrubbed frame. 103 | 104 | compcor : string, optional 105 | Type of confounds extracted from a component based noise correction method 106 | "anat" noise components calculated using anatomical compcor 107 | "temp" noise components calculated using temporal compcor 108 | "full" noise components calculated using both temporal and anatomical 109 | 110 | n_compcor : int or "auto", optional 111 | The number of noise components to be extracted. For acompcor_combined=False, 112 | and/or compcor="full", this is the number of components per mask. 113 | Default is "auto": select all components (50% variance explained by fMRIPrep defaults) 114 | 115 | acompcor_combined: boolean, optional 116 | If true, use components generated from the combined white matter and csf 117 | masks. Otherwise, components are generated from each mask separately and then 118 | concatenated. 119 | 120 | ica_aroma : None or string, optional 121 | None: default, not using ICA-AROMA related strategy 122 | "basic": use noise IC only. 123 | "full": use fMRIprep output `~desc-smoothAROMAnonaggr_bold.nii.gz` . 124 | 125 | demean : boolean, optional 126 | If True, the confounds are standardized to a zero mean (over time). 127 | This step is critical if the confounds are regressed out of time series 128 | using nilearn with no or zscore standardization, but should be turned off 129 | with "spc" normalization. 130 | 131 | 132 | Attributes 133 | ---------- 134 | `confounds_` : pandas.DataFrame 135 | The confounds loaded using the specified model. The columns of the dataframe 136 | contains the labels. 137 | 138 | `sample_mask_` : list of int 139 | The index of the niimgs along time/fourth dimension. 140 | This list includes indices for valid volumes for subsequent analysis. 141 | This attribute should be passed to parameter `sample_mask` of nilearn.NiftiMasker. 142 | Volumnes are removed if flagges as following: 143 | - Non-steady-state volumes (if present) 144 | - Motion outliers detected by scrubbing 145 | 146 | Notes 147 | ----- 148 | The predefined strategies implemented in this class are 149 | adapted from (Ciric et al. 2017). Band-pass filter is replaced 150 | by high-pass filter. Low-pass filters can be implemented, e.g., through 151 | nilearn maskers. Scrubbing is implemented by introducing regressors in the 152 | confounds, rather than eliminating time points. Other aspects of the 153 | preprocessing listed in Ciric et al. (2017) are controlled through fMRIprep, 154 | e.g. distortion correction. 155 | 156 | References 157 | ---------- 158 | Ciric et al., 2017 "Benchmarking of participant-level confound regression 159 | strategies for the control of motion artifact in studies of functional 160 | connectivity" Neuroimage 154: 174-87 161 | https://doi.org/10.1016/j.neuroimage.2017.03.020 162 | """ 163 | 164 | def __init__( 165 | self, 166 | strategy=["motion", "high_pass", "wm_csf"], 167 | motion="full", 168 | n_motion=0, 169 | scrub="full", 170 | fd_thresh=0.2, 171 | std_dvars_thresh=3, 172 | wm_csf="basic", 173 | global_signal="basic", 174 | compcor="anat", 175 | acompcor_combined=True, 176 | n_compcor="auto", 177 | ica_aroma=None, 178 | demean=True, 179 | ): 180 | """Default parameters.""" 181 | self.strategy = _sanitize_strategy(strategy) 182 | self.motion = motion 183 | self.n_motion = n_motion 184 | self.scrub = scrub 185 | self.fd_thresh = fd_thresh 186 | self.std_dvars_thresh = std_dvars_thresh 187 | self.wm_csf = wm_csf 188 | self.global_signal = global_signal 189 | self.compcor = compcor 190 | self.acompcor_combined = acompcor_combined 191 | self.n_compcor = n_compcor 192 | self.ica_aroma = ica_aroma 193 | self.demean = demean 194 | 195 | def load(self, img_files): 196 | """ 197 | Load fMRIprep confounds and sample mask 198 | 199 | Parameters 200 | ---------- 201 | img_files : path to processed image files, optionally as a list. 202 | Processed nii.gz/dtseries.nii/func.gii file from fmriprep. 203 | `nii.gz` or `dtseries.nii`: path to files, optionally as a list. 204 | `func.gii`: list of a pair of paths to files, optionally as a list of lists. 205 | The companion tsv will be automatically detected. 206 | 207 | Returns 208 | ------- 209 | confounds : pandas.DataFrame or list of pandas.DataFrame 210 | A reduced version of fMRIprep confounds based on selected strategy and flags. 211 | An intercept is automatically added to the list of confounds. 212 | The columns contains the labels of the regressors. 213 | 214 | sample_mask : list or list of list 215 | Index of time point to be preserved in the analysis 216 | """ 217 | return self._parse(img_files) 218 | 219 | def _parse(self, img_files): 220 | """Parse input image, find confound files and scrubbing etc.""" 221 | img_files, flag_single = cf._sanitize_confounds(img_files) 222 | 223 | confounds_out = [] 224 | sample_mask_out = [] 225 | self.missing_confounds_ = [] 226 | self.missing_keys_ = [] 227 | 228 | for file in img_files: 229 | sample_mask, conf = self._load_single(file) 230 | confounds_out.append(conf) 231 | sample_mask_out.append(sample_mask) 232 | 233 | # If a single input was provided, 234 | # send back a single output instead of a list 235 | if flag_single: 236 | confounds_out = confounds_out[0] 237 | sample_mask_out = sample_mask_out[0] 238 | 239 | self.confounds_ = confounds_out 240 | self.sample_mask_ = sample_mask_out 241 | return confounds_out, sample_mask_out 242 | 243 | 244 | def _load_single(self, confounds_raw): 245 | """Load a single confounds file from fmriprep.""" 246 | # Convert tsv file to pandas dataframe 247 | # check if relevant imaging files are present according to the strategy 248 | flag_acompcor = ("compcor" in self.strategy) and (self.compcor == "anat") 249 | flag_full_aroma = ("ica_aroma" in self.strategy) and (self.ica_aroma == "full") 250 | confounds_raw, self.json_ = cf._confounds_to_df( 251 | confounds_raw, flag_acompcor, flag_full_aroma 252 | ) 253 | 254 | confounds = pd.DataFrame() 255 | 256 | for confound in self.strategy: 257 | loaded_confounds = self._load_confound(confounds_raw, confound) 258 | confounds = pd.concat([confounds, loaded_confounds], axis=1) 259 | 260 | _check_error(self.missing_confounds_, self.missing_keys_) 261 | sample_mask, confounds= cf._prepare_output( 262 | confounds, self.demean 263 | ) 264 | return sample_mask, confounds 265 | 266 | def _load_confound(self, confounds_raw, confound): 267 | """Load a single type of confound.""" 268 | try: 269 | loaded_confounds = getattr(self, f"_load_{confound}")(confounds_raw) 270 | except cf.MissingConfound as exception: 271 | self.missing_confounds_ += exception.params 272 | self.missing_keys_ += exception.keywords 273 | loaded_confounds = pd.DataFrame() 274 | return loaded_confounds 275 | 276 | def _load_motion(self, confounds_raw): 277 | """Load the motion regressors.""" 278 | motion_params = cf._add_suffix( 279 | ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"], self.motion 280 | ) 281 | cf._check_params(confounds_raw, motion_params) 282 | confounds_motion = confounds_raw[motion_params] 283 | 284 | # Optionally apply PCA reduction 285 | if self.n_motion > 0: 286 | confounds_motion = cf._pca_motion( 287 | confounds_motion, n_components=self.n_motion 288 | ) 289 | return confounds_motion 290 | 291 | def _load_high_pass(self, confounds_raw): 292 | """Load the high pass filter regressors.""" 293 | high_pass_params = cf._find_confounds(confounds_raw, ["cosine"]) 294 | return confounds_raw[high_pass_params] 295 | 296 | def _load_wm_csf(self, confounds_raw): 297 | """Load the regressors derived from the white matter and CSF masks.""" 298 | wm_csf_params = cf._add_suffix(["csf", "white_matter"], self.wm_csf) 299 | cf._check_params(confounds_raw, wm_csf_params) 300 | return confounds_raw[wm_csf_params] 301 | 302 | def _load_global(self, confounds_raw): 303 | """Load the regressors derived from the global signal.""" 304 | global_params = cf._add_suffix(["global_signal"], self.global_signal) 305 | cf._check_params(confounds_raw, global_params) 306 | return confounds_raw[global_params] 307 | 308 | def _load_compcor(self, confounds_raw): 309 | """Load compcor regressors.""" 310 | compcor_cols = _find_compcor( 311 | self.json_, self.compcor, self.n_compcor, self.acompcor_combined 312 | ) 313 | cf._check_params(confounds_raw, compcor_cols) 314 | return confounds_raw[compcor_cols] 315 | 316 | def _load_ica_aroma(self, confounds_raw): 317 | """Load the ICA-AROMA regressors.""" 318 | if self.ica_aroma is None: 319 | raise ValueError("Please select an option when using ICA-AROMA strategy") 320 | if self.ica_aroma == "full": 321 | return pd.DataFrame() 322 | if self.ica_aroma == "basic": 323 | ica_aroma_params = cf._find_confounds(confounds_raw, ["aroma"]) 324 | return confounds_raw[ica_aroma_params] 325 | 326 | def _load_scrub(self, confounds_raw): 327 | """Perform basic scrub - Remove volumes if framewise displacement exceeds threshold.""" 328 | n_scans = len(confounds_raw) 329 | # Get indices of fd outliers 330 | fd_outliers = np.where( 331 | confounds_raw["framewise_displacement"] > self.fd_thresh 332 | )[0] 333 | dvars_outliers = np.where(confounds_raw["std_dvars"] > self.std_dvars_thresh)[0] 334 | combined_outliers = np.sort( 335 | np.unique(np.concatenate((fd_outliers, dvars_outliers))) 336 | ) 337 | # Do full scrubbing if desired, and motion outliers were detected 338 | if self.scrub == "full" and len(combined_outliers) > 0: 339 | combined_outliers = cf._optimize_scrub(combined_outliers, n_scans) 340 | # Make one-hot encoded motion outlier regressors 341 | motion_outlier_regressors = pd.DataFrame( 342 | np.transpose(np.eye(n_scans)[combined_outliers]).astype(int) 343 | ) 344 | column_names = [ 345 | "motion_outlier_" + str(num) 346 | for num in range(np.shape(motion_outlier_regressors)[1]) 347 | ] 348 | motion_outlier_regressors.columns = column_names 349 | return motion_outlier_regressors 350 | 351 | def _load_non_steady_state(self, confounds_raw): 352 | """Find non steady state regressors.""" 353 | nss_outliers = cf._find_confounds(confounds_raw, ["non_steady_state"]) 354 | if nss_outliers: 355 | return confounds_raw[nss_outliers] 356 | else: 357 | return pd.DataFrame() 358 | -------------------------------------------------------------------------------- /load_confounds/tests/test_parser.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import load_confounds.parser as lc 4 | import pandas as pd 5 | import numpy as np 6 | from scipy.stats import pearsonr 7 | from sklearn.preprocessing import scale 8 | import pytest 9 | from nibabel import Nifti1Image 10 | from nilearn.input_data import NiftiMasker 11 | 12 | 13 | path_data = os.path.join(os.path.dirname(lc.__file__), "data") 14 | file_confounds = os.path.join( 15 | path_data, "test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 16 | ) 17 | file_no_none_steady = os.path.join( 18 | path_data, "nonss_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 19 | ) 20 | 21 | 22 | def _simu_img(demean=True): 23 | """Simulate an nifti image based on confound file with some parts confounds and some parts noise.""" 24 | # set the size of the image matrix 25 | nx = 5 26 | ny = 5 27 | # the actual number of slices will actually be double of that 28 | # as we will stack slices with confounds on top of slices with noise 29 | nz = 2 30 | # Load a simple 6 parameters motion models as confounds 31 | confounds, _ = lc.Confounds(strategy=["motion"], motion="basic", demean=demean).load( 32 | file_confounds 33 | ) 34 | X = confounds.values 35 | # the first row is non-steady state, replace it with the imput from the second row 36 | non_steady = X[0, :] 37 | X[0, :] = X[1, :] 38 | # repeat X in length (axis = 0) three times to increase the degree of freedom 39 | X = np.tile(X, (3, 1)) 40 | # put non-steady state volume back at the first sample 41 | X[0, :] = non_steady 42 | # the number of time points is based on the example confound file 43 | nt = X.shape[0] 44 | # initialize an empty 4D volume 45 | vol = np.zeros([nx, ny, 2 * nz, nt]) 46 | vol_conf = np.zeros([nx, ny, 2 * nz]) 47 | vol_rand = np.zeros([nx, ny, 2 * nz]) 48 | 49 | # create a random mixture of confounds 50 | # standardized to zero mean and unit variance 51 | beta = np.random.rand(nx * ny * nz, X.shape[1]) 52 | tseries_conf = scale(np.matmul(beta, X.transpose()), axis=1) 53 | # fill the first half of the 4D data with the mixture 54 | vol[:, :, 0:nz, :] = tseries_conf.reshape(nx, ny, nz, nt) 55 | vol_conf[:, :, 0:nz] = 1 56 | 57 | # create random noise in the second half of the 4D data 58 | tseries_rand = scale(np.random.randn(nx * ny * nz, nt), axis=1) 59 | vol[:, :, range(nz, 2 * nz), :] = tseries_rand.reshape(nx, ny, nz, nt) 60 | vol_rand[:, :, range(nz, 2 * nz)] = 1 61 | 62 | # Shift the mean to non-zero 63 | vol = vol + 100 64 | 65 | # create an nifti image with the data, and corresponding mask 66 | img = Nifti1Image(vol, np.eye(4)) 67 | mask_conf = Nifti1Image(vol_conf, np.eye(4)) 68 | mask_rand = Nifti1Image(vol_rand, np.eye(4)) 69 | 70 | return img, mask_conf, mask_rand, X 71 | 72 | 73 | def _tseries_std(img, mask_img, confounds, sample_mask, standardize): 74 | """Get the std of time series in a mask.""" 75 | masker = NiftiMasker( 76 | mask_img=mask_img, standardize=standardize, sample_mask=sample_mask 77 | ) 78 | tseries = masker.fit_transform(img, confounds=confounds) 79 | return tseries.std(axis=0) 80 | 81 | 82 | def _denoise(img, mask_img, confounds, sample_mask, standardize): 83 | """Extract time series with and without confounds.""" 84 | masker = NiftiMasker( 85 | mask_img=mask_img, standardize=standardize, sample_mask=sample_mask 86 | ) 87 | tseries_raw = masker.fit_transform(img) 88 | tseries_clean = masker.fit_transform(img, confounds=confounds) 89 | return tseries_raw, tseries_clean 90 | 91 | 92 | def _corr_tseries(tseries1, tseries2): 93 | """Compute the correlation between two sets of time series.""" 94 | corr = np.zeros(tseries1.shape[1]) 95 | for ind in range(tseries1.shape[1]): 96 | corr[ind], _ = pearsonr(tseries1[:, ind], tseries2[:, ind]) 97 | return corr 98 | 99 | 100 | def _regression(confounds, sample_mask): 101 | """Simple regression with nilearn.""" 102 | # Simulate data 103 | img, mask_conf, _, _ = _simu_img(demean=True) 104 | confounds = np.tile(confounds, (3, 1)) # matching L29 (_simu_img) 105 | 106 | # Do the regression 107 | masker = NiftiMasker(mask_img=mask_conf, standardize=True, sample_mask=sample_mask) 108 | tseries_clean = masker.fit_transform(img, confounds) 109 | assert tseries_clean.shape[0] == confounds.shape[0] 110 | 111 | 112 | @pytest.mark.filterwarnings("ignore") 113 | def test_nilearn_regress(): 114 | """Try regressing out all motion types in nilearn.""" 115 | # Regress full motion 116 | confounds, _ = lc.Confounds(strategy=["motion"], motion="full").load(file_confounds) 117 | sample_mask = None # not testing sample mask here 118 | _regression(confounds, sample_mask) 119 | 120 | # Regress high_pass 121 | confounds, _ = lc.Confounds(strategy=["high_pass"]).load(file_confounds) 122 | _regression(confounds, sample_mask) 123 | 124 | # Regress wm_csf 125 | confounds, _ = lc.Confounds(strategy=["wm_csf"], wm_csf="full").load(file_confounds) 126 | _regression(confounds, sample_mask) 127 | # Regress global 128 | confounds, _ = lc.Confounds(strategy=["global"], global_signal="full").load( 129 | file_confounds 130 | ) 131 | _regression(confounds, sample_mask) 132 | 133 | # Regress AnatCompCor 134 | confounds, _ = lc.Confounds(strategy=["compcor"], compcor="anat").load(file_confounds) 135 | _regression(confounds, sample_mask) 136 | 137 | # Regress TempCompCor 138 | confounds, _ = lc.Confounds(strategy=["compcor"], compcor="temp").load(file_confounds) 139 | _regression(confounds, sample_mask) 140 | 141 | # Regress ICA-AROMA 142 | confounds, _ = lc.Confounds(strategy=["ica_aroma"], ica_aroma="basic").load( 143 | file_confounds 144 | ) 145 | _regression(confounds, sample_mask) 146 | 147 | 148 | @pytest.mark.filterwarnings("ignore") 149 | def test_nilearn_standardize_false(): 150 | """Test removing confounds in nilearn with no standardization.""" 151 | # Simulate data 152 | img, mask_conf, mask_rand, X = _simu_img(demean=True) 153 | 154 | # Check that most variance is removed 155 | # in voxels composed of pure confounds 156 | tseries_std = _tseries_std(img, mask_conf, X, None, False) 157 | assert np.mean(tseries_std < 0.0001) 158 | 159 | # Check that most variance is preserved 160 | # in voxels composed of random noise 161 | tseries_std = _tseries_std(img, mask_rand, X, None, False) 162 | assert np.mean(tseries_std > 0.9) 163 | 164 | 165 | @pytest.mark.filterwarnings("ignore") 166 | def test_nilearn_standardize_zscore(): 167 | """Test removing confounds in nilearn with zscore standardization.""" 168 | # Simulate data 169 | 170 | img, mask_conf, mask_rand, X = _simu_img(demean=True) 171 | 172 | # We now load the time series with vs without confounds 173 | # in voxels composed of pure confounds 174 | # the correlation before and after denoising should be very low 175 | # as most of the variance is removed by denoising 176 | tseries_raw, tseries_clean = _denoise(img, mask_conf, X, None, "zscore") 177 | corr = _corr_tseries(tseries_raw, tseries_clean) 178 | assert corr.mean() < 0.2 179 | 180 | # We now load the time series with zscore standardization 181 | # with vs without confounds in voxels where the signal is uncorrelated 182 | # with confounds. The correlation before and after denoising should be very 183 | # high as very little of the variance is removed by denoising 184 | tseries_raw, tseries_clean = _denoise(img, mask_rand, X, None, "zscore") 185 | corr = _corr_tseries(tseries_raw, tseries_clean) 186 | assert corr.mean() > 0.8 187 | 188 | 189 | def test_nilearn_standardize_psc(): 190 | """Test removing confounds in nilearn with psc standardization.""" 191 | # Similar test to test_nilearn_standardize_zscore, but with psc 192 | # Simulate data 193 | 194 | img, mask_conf, mask_rand, X = _simu_img(demean=False) 195 | 196 | # Areas with confound 197 | tseries_raw, tseries_clean = _denoise(img, mask_conf, X, None, "psc") 198 | corr = _corr_tseries(tseries_raw, tseries_clean) 199 | assert corr.mean() < 0.2 200 | 201 | # Areas with random noise 202 | tseries_raw, tseries_clean = _denoise(img, mask_rand, X, None, "psc") 203 | corr = _corr_tseries(tseries_raw, tseries_clean) 204 | assert corr.mean() > 0.8 205 | 206 | 207 | def test_confounds2df(): 208 | """Check auto-detect of confonds from an fMRI nii image.""" 209 | conf = lc.Confounds() 210 | file_confounds_nii = os.path.join( 211 | path_data, "test_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 212 | ) 213 | conf.load(file_confounds_nii) 214 | assert "trans_x" in conf.confounds_.columns 215 | 216 | 217 | def test_sanitize_strategy(): 218 | """Check that flawed strategy options generate meaningful error messages.""" 219 | with pytest.raises(ValueError): 220 | lc.Confounds(strategy="string") 221 | 222 | with pytest.raises(ValueError): 223 | lc.Confounds(strategy=["error"]) 224 | 225 | with pytest.raises(ValueError): 226 | lc.Confounds(strategy=[0]) 227 | 228 | conf = lc.Confounds(strategy=["motion"]) 229 | assert "non_steady_state" in conf.strategy 230 | 231 | 232 | def test_motion(): 233 | 234 | conf_basic = lc.Confounds(strategy=["motion"], motion="basic") 235 | conf_basic.load(file_confounds) 236 | conf_derivatives = lc.Confounds(strategy=["motion"], motion="derivatives") 237 | conf_derivatives.load(file_confounds) 238 | conf_power2 = lc.Confounds(strategy=["motion"], motion="power2") 239 | conf_power2.load(file_confounds) 240 | conf_full = lc.Confounds(strategy=["motion"], motion="full") 241 | conf_full.load(file_confounds) 242 | 243 | params = ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"] 244 | for param in params: 245 | # Basic 6 params motion model 246 | assert f"{param}" in conf_basic.confounds_.columns 247 | assert f"{param}_derivative1" not in conf_basic.confounds_.columns 248 | assert f"{param}_power2" not in conf_basic.confounds_.columns 249 | assert f"{param}_derivative1_power2" not in conf_basic.confounds_.columns 250 | 251 | # Use a 6 params + derivatives motion model 252 | assert f"{param}" in conf_derivatives.confounds_.columns 253 | assert f"{param}_derivative1" in conf_derivatives.confounds_.columns 254 | assert f"{param}_power2" not in conf_derivatives.confounds_.columns 255 | assert f"{param}_derivative1_power2" not in conf_derivatives.confounds_.columns 256 | 257 | # Use a 6 params + power2 motion model 258 | assert f"{param}" in conf_power2.confounds_.columns 259 | assert f"{param}_derivative1" not in conf_power2.confounds_.columns 260 | assert f"{param}_power2" in conf_power2.confounds_.columns 261 | assert f"{param}_derivative1_power2" not in conf_power2.confounds_.columns 262 | 263 | # Use a 6 params + derivatives + power2 + power2d derivatives motion model 264 | assert f"{param}" in conf_full.confounds_.columns 265 | assert f"{param}_derivative1" in conf_full.confounds_.columns 266 | assert f"{param}_power2" in conf_full.confounds_.columns 267 | assert f"{param}_derivative1_power2" in conf_full.confounds_.columns 268 | 269 | 270 | def test_n_compcor(): 271 | 272 | conf = lc.Confounds(strategy=["compcor"], compcor="anat", n_compcor=2) 273 | conf.load(file_confounds) 274 | assert "a_comp_cor_00" in conf.confounds_.columns 275 | assert "a_comp_cor_01" in conf.confounds_.columns 276 | assert "a_comp_cor_02" not in conf.confounds_.columns 277 | 278 | 279 | def test_n_motion(): 280 | 281 | conf = lc.Confounds(strategy=["motion"], motion="full", n_motion=0.2) 282 | conf.load(file_confounds) 283 | assert "motion_pca_1" in conf.confounds_.columns 284 | assert "motion_pca_2" not in conf.confounds_.columns 285 | 286 | conf = lc.Confounds(strategy=["motion"], motion="full", n_motion=0.95) 287 | conf.load(file_confounds) 288 | assert "motion_pca_6" in conf.confounds_.columns 289 | 290 | with pytest.raises(ValueError): 291 | conf = lc.Confounds(strategy=["motion"], motion="full", n_motion=50) 292 | conf.load(file_confounds) 293 | 294 | 295 | def test_not_found_exception(): 296 | 297 | conf = lc.Confounds( 298 | strategy=["high_pass", "motion", "global"], global_signal="full", motion="full" 299 | ) 300 | 301 | missing_params = ["trans_y", "trans_x_derivative1", "rot_z_power2"] 302 | missing_keywords = ["cosine"] 303 | 304 | file_missing_confounds = os.path.join( 305 | path_data, "missing_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 306 | ) 307 | 308 | with pytest.raises(ValueError) as exc_info: 309 | conf.load(file_missing_confounds) 310 | assert f"{missing_params}" in exc_info.value.args[0] 311 | assert f"{missing_keywords}" in exc_info.value.args[0] 312 | 313 | # loading anat compcor should also raise an error, because the json file is 314 | # missing for that example dataset 315 | with pytest.raises(ValueError): 316 | conf = lc.Confounds(strategy=["compcor"], compcor="anat") 317 | conf.load(file_missing_confounds) 318 | 319 | # catch invalid compcor option 320 | with pytest.raises(KeyError): 321 | conf = lc.Confounds(strategy=["compcor"], compcor="blah") 322 | conf.load(file_confounds) 323 | 324 | # catch invalid compcor option 325 | with pytest.raises(ValueError): 326 | conf = lc.Confounds( 327 | strategy=["compcor"], compcor="full", acompcor_combined=None 328 | ) 329 | conf.load(file_confounds) 330 | 331 | # Aggressive ICA-AROMA strategy requires 332 | # default nifti and noise ICs in confound file 333 | # correct nifti but missing noise regressor 334 | with pytest.raises(ValueError) as exc_info: 335 | conf = lc.Confounds(strategy=["ica_aroma"], ica_aroma="basic") 336 | conf.load(file_missing_confounds) 337 | assert "aroma" in exc_info.value.args[0] 338 | 339 | # Aggressive ICA-AROMA strategy requires 340 | # default nifti 341 | aroma_nii = os.path.join( 342 | path_data, "test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz" 343 | ) 344 | with pytest.raises(ValueError) as exc_info: 345 | conf.load(aroma_nii) 346 | assert "Invalid file type" in exc_info.value.args[0] 347 | 348 | # non aggressive ICA-AROMA strategy requires 349 | # desc-smoothAROMAnonaggr nifti file 350 | with pytest.raises(ValueError) as exc_info: 351 | conf = lc.Confounds(strategy=["ica_aroma"], ica_aroma="full") 352 | conf.load(file_missing_confounds) 353 | assert "desc-smoothAROMAnonaggr_bold" in exc_info.value.args[0] 354 | 355 | 356 | def test_load_non_nifti(): 357 | """Test non-nifti and invalid file type as input.""" 358 | conf = lc.Confounds() 359 | 360 | # tsv file - unsupported input 361 | tsv = os.path.join(path_data, "test_desc-confounds_regressors.tsv") 362 | with pytest.raises(ValueError): 363 | conf.load(tsv) 364 | 365 | # cifti file should be supported 366 | cifti = os.path.join(path_data, "test_space-fsLR_den-91k_bold.dtseries.nii") 367 | conf.load(cifti) 368 | assert conf.confounds_.size != 0 369 | 370 | # gifti support 371 | gifti = [ 372 | os.path.join(path_data, f"test_space-fsaverage5_hemi-{hemi}_bold.func.gii") 373 | for hemi in ["L", "R"] 374 | ] 375 | conf.load(gifti) 376 | assert conf.confounds_.size != 0 377 | 378 | 379 | def test_invalid_filetype(): 380 | """Invalid file types/associated files for load method.""" 381 | # invalid fmriprep version: contain confound files before and after v20.2.0 382 | conf = lc.Confounds() 383 | 384 | invalid_ver = os.path.join( 385 | path_data, "invalid_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 386 | ) 387 | with pytest.raises(ValueError): 388 | conf.load(invalid_ver) 389 | 390 | # nifti with no associated confound file 391 | no_confound = os.path.join( 392 | path_data, "noconfound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 393 | ) 394 | with pytest.raises(ValueError): 395 | conf.load(no_confound) 396 | 397 | 398 | def test_ica_aroma(): 399 | """Test ICA AROMA related file input.""" 400 | aroma_nii = os.path.join( 401 | path_data, "test_space-MNI152NLin2009cAsym_desc-smoothAROMAnonaggr_bold.nii.gz" 402 | ) 403 | # Agressive strategy 404 | conf = lc.Confounds(strategy=["ica_aroma"], ica_aroma="basic") 405 | conf.load(file_confounds) 406 | for col_name in conf.confounds_.columns: 407 | # only aroma and non-steady state columns will be present 408 | assert re.match("(?:aroma_motion_+|non_steady_state+)", col_name) 409 | 410 | # Non-agressive strategy 411 | conf = lc.Confounds(strategy=["ica_aroma"], ica_aroma="full") 412 | conf.load(aroma_nii) 413 | assert conf.confounds_.size == 0 414 | 415 | # invalid combination of strategy and option 416 | with pytest.raises(ValueError) as exc_info: 417 | conf = lc.Confounds(strategy=["ica_aroma"], ica_aroma=None) 418 | conf.load(file_confounds) 419 | assert "ICA-AROMA strategy" in exc_info.value.args[0] 420 | 421 | 422 | def test_sample_mask(): 423 | """Test load method and sample mask.""" 424 | # create a version with srub_mask not applied; 425 | # This is not recommanded 426 | conf = lc.Confounds(strategy=["motion", "scrub"], scrub="full", fd_thresh=0.15) 427 | reg, mask = conf.load(file_confounds) 428 | 429 | # the current test data has 6 time points marked as motion outliers, 430 | # and one nonsteady state (overlap with the first motion outlier) 431 | # 2 time points removed due to the "full" srubbing strategy 432 | assert reg.shape[0] - len(mask) == 8 433 | # nilearn requires unmasked confound regressors 434 | assert reg.shape[0] == 30 435 | 436 | # non steady state will always be removed 437 | conf = lc.Confounds(strategy=["motion"]) 438 | reg, mask = conf.load(file_confounds) 439 | assert reg.shape[0] - len(mask) == 1 440 | 441 | # When no non-steady state volumes are present 442 | conf = lc.Confounds(strategy=["motion"]) 443 | reg, mask = conf.load(file_no_none_steady) 444 | assert mask is None 445 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # load_confounds 2 | Load a sensible subset of the fMRI confounds generated with [fMRIprep](https://fmriprep.readthedocs.io/en/stable/) in python (Esteban et al., 2018). 3 | > ## :warning: **`load_confounds` is now a new feature in NiLearn 0.9.0**. Development of this project will fully migrate to [NiLearn](https://nilearn.github.io/stable/index.html). Please see the following links for the implementation. :warning: 4 | > 5 | > New API: 6 | > - [`nilearn.interfaces.fmriprep.load_confounds`](https://nilearn.github.io/stable/modules/generated/nilearn.interfaces.fmriprep.load_confounds.html#nilearn.interfaces.fmriprep.load_confounds) 7 | > - [`nilearn.interfaces.fmriprep.load_confounds_strategy`](https://nilearn.github.io/stable/modules/generated/nilearn.interfaces.fmriprep.load_confounds_strategy.html#nilearn.interfaces.fmriprep.load_confounds_strategy) 8 | > 9 | > The usage examples: 10 | >- [Temporal filtering with masker](https://nilearn.github.io/stable/manipulating_images/masker_objects.html#temporal-filtering-and-confound-removal) 11 | >- [Extracting signals on a parcellation](https://nilearn.github.io/stable/connectivity/functional_connectomes.html#extracting-signals-on-a-parcellation) 12 | >- [Example Gallery: create a connectome](https://nilearn.github.io/stable/auto_examples/03_connectivity/plot_signal_extraction.html#sphx-glr-auto-examples-03-connectivity-plot-signal-extraction-py) 13 | 14 | 15 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/SIMEXP/load_confounds/HEAD?filepath=demo%2Fload_confounds_demo.ipynb) [![All Contributors](https://img.shields.io/badge/all_contributors-10-orange.svg?style=flat-square)](#contributors-) [![collaborate brainhack](https://img.shields.io/badge/collaborate-brainhack-FF69A4.svg)](https://mattermost.brainhack.org/brainhack/channels/fmriprep_denoising) [![Pipy Badge](https://img.shields.io/pypi/v/load_confounds)](https://pypi.org/project/load-confounds/) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/1da186ba5c44489b8af6d96a9c50d3c7)](https://app.codacy.com/gh/SIMEXP/load_confounds?utm_source=github.com&utm_medium=referral&utm_content=SIMEXP/load_confounds&utm_campaign=Badge_Grade_Dashboard) [![Maintainability](https://api.codeclimate.com/v1/badges/ce6f2bf20aa87accaaa4/maintainability)](https://codeclimate.com/github/SIMEXP/load_confounds/maintainability) [![CircleCI](https://circleci.com/gh/SIMEXP/load_confounds.svg?style=svg)](https://circleci.com/gh/SIMEXP/load_confounds) [![codecov](https://codecov.io/gh/SIMEXP/load_confounds/branch/master/graph/badge.svg)](https://codecov.io/gh/SIMEXP/load_confounds) [![black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) 16 | 17 | ## Installation 18 | Install with `pip` (Python >=3.5): 19 | ```bash 20 | pip install load_confounds 21 | ``` 22 | 23 | ## TL;DR 24 | Load confounds for a minimal denosing strategy commonly used in resting state functional connectivity. 25 | (Full motion parameters, WM/CSF signals, and high pass filter) 26 | ```python 27 | from load_confounds import Minimal 28 | from nilearn.input_data import NiftiMasker 29 | 30 | # load_confounds auto-detects the companion .tsv file (which needs to be in the same directory) 31 | file = "path/to/file/sub-01_ses-001_bold.nii.gz" 32 | confounds = Minimal().load(file) 33 | 34 | # Use the confounds to load preprocessed time series with nilearn 35 | masker = NiftiMasker(smoothing_fwhm=5, standardize=True) 36 | img = masker.fit_transform(file, confounds=confounds) 37 | ``` 38 | It is also possible to fine-tune a subset of noise components and their parameters: 39 | ```python 40 | from load_confounds import Confounds 41 | confounds = Confounds(strategy=['high_pass', 'motion', 'global'], motion="full").load(file) 42 | ``` 43 | You can check our tutorial on MyBinder for more info [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/SIMEXP/load_confounds/HEAD?filepath=demo%2Fload_confounds_demo.ipynb) 44 | 45 | ## Noise components 46 | The following noise components are supported. Check the docstring of `Confounds` for more info on the parameters for each type of noise. 47 | * `motion` the motion parameters including 6 translation/rotation (`basic`), and optionally derivatives, squares, and squared derivatives (`full`). 48 | * `high_pass` basis of discrete cosines covering slow time drift frequency band. 49 | * `wm_csf` the average signal of white matter and cerebrospinal fluid masks (`basic`), and optionally derivatives, squares, and squared derivatives (`full`). 50 | * `global` the global signal (`basic`), and optionally derivatives, squares, and squared derivatives (`full`). 51 | * `compcor` the results of a PCA applied on a mask based on either anatomy (`anat`), temporal variance (`temp`), or both (`combined`). 52 | * `ica_aroma` the results of an idependent component analysis (ICA) followed by identification of noise components. This can be implementing by incorporating ICA regressors (`basic`) or directly loading a denoised file generated by fMRIprep (`full`). 53 | * `scrub` regressors coding for time frames with excessive motion, using threshold on frame displacement and standardized DVARS (`basic`) and suppressing short time windows using the (Power et al., 2014) appreach (`full`). 54 | 55 | ## Predefined strategies 56 | 57 | ### `Minimal` 58 | `Minimal` is suitable for data with minimal motion. Only includes motion parameters, wm and csf, with the option to add global. 59 | 60 | ### `Scrubbing` 61 | Like `Minimal`, but with scrubbing. Pros: Actual impact on data is pretty limited, but still good and offers the most control on what's being discarded. Cons: high loss of degrees of freedom, and messes up with the time axis in a way that may be difficult to handle for downstream analyses. 62 | 63 | ### `CompCor` 64 | `CompCor` includes anatomical or temporal compcor. The default is anatomical compcor with fully expanded motion parameters. Pros: large impact of denoising, efficient denoising, controlled loss of degrees of freedom. Cons: low control on what is being discarded (who knows what signal actually show up in the PCA for a given subject). 65 | 66 | ### `ICAAROMA` 67 | ICA-AROMA are only applicable to fMRIprep output generated with `--use-aroma`. Pros: pretty similar to CompCor, with better control of discarded components (those can be visually reviewed even though this is time consuming. Cons: may require retraining the noise detector and also requires to believe that ICA does efficiently separate noise from signal, which is not that clear, and the quality of separation may also vary substantially across subjects. 68 | 69 | ## A note on nifti files and file collections 70 | Note that if a `.nii.gz` file is specified, `load_confounds` will automatically look for the companion `tsv`confound file generated by fMRIprep. It is also possible to specify a list of confound (or imaging) files, in which case `load_confounds` will return a list of numpy ndarray. 71 | 72 | ## A note on low pass filtering 73 | Low pass filtering is a common operation in resting-state fMRI analysis, and is featured in all preprocessing strategies of the Ciric et al. (2017) paper. fMRIprep does not output the discrete cosines for low pass filtering. Instead, this operation can be implemented directly with the nilearn masker, using the argument `low_pass`. Be sure to also specify the argument `tr` in the nilearn masker if you use `low_pass`. 74 | 75 | ## A note on high pass filtering and detrending 76 | Nilearn masker features two arguments to remove slow time drifts: `high_pass` and `detrend`. Both of these operations are redundant with the `high_pass` regressors generated by fMRIprep, and included in all `load_confounds` strategies. Do not use nilearn's `high_pass` or `detrend` options with these strategies. It is however possible to use a flexible `Confounds` loader to exclude the `high_pass` noise components, and then rely on nilearn's high pass filterning or detrending options. This is not advised with `compcor` or `ica_aroma` analysis, which have been generated with the `high_pass` components of fMRIprep. 77 | 78 | ## A note on demeaning confounds 79 | Unless you use the `detrend` or `high_pass` options of nilearn maskers, it may be important to demean the confounds. This is done by default by `load_confounds`, and is required to properly regress out confounds using nilearn with the `standardize=False`, `standardize=True` or `standardize="zscore"` options. If you want to use `standardize="psc"`, you will need to turn off the demeaning in `load_confounds`, which can be achieved using, e.g.: 80 | ```python 81 | from load_confounds import Params6 82 | conf = Params6(demean=False) 83 | ``` 84 | 85 | ## A note on the choice of strategies 86 | We decided to focus our strategy catalogue on a reasonable but limited set of choices, and followed (mostly) the Ciric et al. (2017) reference. However, there are other strategies proposed in benchmarks such as (Parkes et al. 2018, Mascali et al. 2020). Advanced users can still explore these other choices using the flexible `Confounds` API, which can be used to reproduce most denoising strategies in a single short and readable command. 87 | 88 | ## A note on denoising benchmarks 89 | There has been a number of benchmarks you may want to refer to in order to select a denoising strategy (e.g. Ciric et al., 2017; Parkes et al. 2018; Mascali et al., 2020; Raval et al., 2020). However, a number of caveats do apply and the conclusions of these studies may not directly apply to `load_confounds` strategies. First, the noise regressors generated by fMRIprep do not necessarily follow the same implementations as these papers did. For example, the way `load_confounds` implements scrubbing is by adding regressors, while Ciric et al. (2017) excluded outlier time points prior to regressing other confounds. There are also other aspects of the fMRI preprocessing pipelines which are not controlled by `load_confounds`. For example, Ciric et al. (2017) did apply image distortion correction in all preprocessing strategies. This step is controlled by fMRIprep, and cannot be changed through `load_confounds`. 90 | 91 | ## A note about ICA-AROMA denoising 92 | 93 | ICA-AROMA related strategies are only applicable to fMRIprep output generated with `--use-aroma`. The approach predefined in `load_confounds` is the non-aggressive apporach, and the recommanded way of applying ICA-AROMA. fMRIprep produces files with suffix `desc-smoothAROMAnonaggr_bold`. Other noise regressors needed are retrieved by the predefined strategy in `load_confounds`. For details of the implementation, please refer to the documentation of `load_confounds.ICAAROMA`. 94 | 95 | The aggressive approach was described in Pruim et al. (2015) and achieve denoising in one step by `load_confound`. Noise independent components along with other source of noise are included in confound regressors. The aggressive approach **must** be applied to the regular minimally processed fMRIprep output suffixed `desc-prepro_bold`. The name "aggressive" reflects that this approach doesn't consider the potential good signals regressed out by the noise independent compoenents. Please refer to table [Recreating strategies from Ciric et al. 2017](#Recreating-strategies-from-Ciric-et-al.-2017) for the relevant options. 96 | 97 | ## Recreating strategies from Ciric et al. 2017 98 | 99 | `load_confounds` can recreate the following strategies. The following table highlights the relevant options: 100 | 101 | | Strategy | `high_pass` | `motion` | `wm_csf` | `global` | `compcor` | `ica_aroma` | `scrub` | 102 | | --------------- |:-----------:|:--------:|:--------:|:--------:|:---------:|:-----------:|:-------:| 103 | | `Params2` | x | | `basic` | | | | | 104 | | `Params6` | x | `basic` | | | | | | 105 | | `Params9` | x | `basic` | `basic` | `basic` | | | | 106 | | `Params9Scrub` | x | `basic` | `basic` | | | | `full` | 107 | | `Params24` | x | `full` | | | | | | 108 | | `Params36` | x | `full` | `full` | `full` | | | | 109 | | `Params36Scrub` | x | `full` | `full` | | | | `full` | 110 | | `AnatCompCor` | x | `full` | | | `anat` | | | 111 | | `TempCompCor` | x | | | | `temp` | | | 112 | | `ICAAROMA` | x | | `basic` | | | `full` | | 113 | | `AROMAGSR` | x | | `basic` | `basic` | | `full` | | 114 | | `AggrICAAROMA` | x | | `basic` | `basic` | | `basic` | | 115 | 116 | ## Funding 117 | Development of this library was supported in part by the Canadian Consortium on Neurodegeneration in Aging ([CCNA](https://ccna-ccnv.ca/)) and in part by the Courtois Foundation. 118 | 119 | ## References 120 | 121 | Behzadi Y, Restom K, Liau J, Liu TT. A component based noise correction method (CompCor) for BOLD and perfusion based fMRI. Neuroimage. 2007. doi:[10.1016/j.neuroimage.2007.04.042](https://doi.org/10.1016/j.neuroimage.2007.04.042) 122 | 123 | Ciric R, Wolf DH, Power JD, Roalf DR, Baum GL, Ruparel K, Shinohara RT, Elliott MA, Eickhoff SB, Davatzikos C., Gur RC, Gur RE, Bassett DS, Satterthwaite TD. Benchmarking of participant-level confound regression strategies for the control of motion artifact in studies of functional connectivity. Neuroimage. 2017. doi:[10.1016/j.neuroimage.2017.03.020](https://doi.org/10.1016/j.neuroimage.2017.03.020) 124 | 125 | Esteban O, Markiewicz CJ, Blair RW, Moodie CA, Isik AI, Erramuzpe A, Kent JD, Goncalves M, DuPre E, Snyder M, Oya H, Ghosh SS, Wright J, Durnez J, Poldrack RA, Gorgolewski KJ. fMRIPrep: a robust preprocessing pipeline for functional MRI. Nat Meth. 2018. doi: [10.1038/s41592-018-0235-4](https://doi.org/10.1038/s41592-018-0235-4) 126 | 127 | Fox MD, Snyder AZ, Vincent JL, Corbetta M, Van Essen DC, Raichle ME. The human brain is intrinsically organized into dynamic, anticorrelated functional networks. Proceedings of the National Academy of Sciences. 2005; doi: [10.1073/pnas.0504136102](https://doi.org/10.1073/pnas.0504136102). 128 | 129 | Mascali, D, Moraschi, M, DiNuzzo, M, et al. Evaluation of denoising strategies for task‐based functional connectivity: Equalizing residual motion artifacts between rest and cognitively demanding tasks. Hum Brain Mapp. 2020; 1– 24. doi: [10.1002/hbm.25332](https://doi.org/10.1002/hbm.25332) 130 | 131 | Parkes, L., Fulcher, B., Yucel, M., & Fornito, A. (2018). An evaluation of the efficacy, reliability, and sensitivity of motion correction strategies for resting-state functional MRI. NeuroImage, 171, 415-436. doi: [10.1016/j.neuroimage.2017.12.073](https://doi.org/10.1016/j.neuroimage.2017.12.073) 132 | 133 | Power JD, Mitra A, Laumann TO, Snyder AZ, Schlaggar BL, Petersen SE. Methods to detect, characterize, and remove motion artifact in resting state fMRI. Neuroimage 2014 84:320-41. doi: [10.1016/j.neuroimage.2013.08.048](https://doi.org/10.1016/j.neuroimage.2013.08.048) 134 | 135 | Pruim, R. H., Mennes, M., van Rooij, D., Llera, A., Buitelaar, J. K., & Beckmann, C. F. (2015). ICA-AROMA: A robust ICA-based strategy for removing motion artifacts from fMRI data. Neuroimage, 112, 267-277. doi: [10.1016/j.neuroimage.2015.02.064](https://doi.org/10.1016/j.neuroimage.2015.02.064) 136 | 137 | V. Raval, K. P. Nguyen, C. Mellema and A. Montillo, "Improved motion correction for functional MRI using an omnibus regression model," 2020 IEEE 17th International Symposium on Biomedical Imaging (ISBI), 2020, pp. 1044-1047, doi: [10.1109/ISBI45749.2020.9098688](https://doi.org/10.1109/ISBI45749.2020.9098688). 138 | 139 | ## Contributors ✨ 140 | 141 | Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 |

François Paugam

🚇 💻 👀 ⚠️ 🔣

HanadS

💻 ⚠️ 🔣 🚇 📖 🤔

Elizabeth DuPre

🤔

Hao-Ting Wang

🤔 💻 🔣 📖 ⚠️ 🐛

Pierre Bellec

💻 🐛 🤔 🚇 ⚠️ 🔣 📋 🚧 📆

Steven Meisler

🐛 ⚠️ 🔣 💻 📖 🤔

Chris Markiewicz

🤔

Shima Rastegarnia

🐛

Thibault PIRONT

💻

m-w-w

📖
162 | 163 | 164 | 165 | 166 | 167 | 168 | This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! 169 | -------------------------------------------------------------------------------- /load_confounds/data/test_desc-confounds_regressors.tsv: -------------------------------------------------------------------------------- 1 | csf csf_derivative1 csf_power2 csf_derivative1_power2 white_matter white_matter_derivative1 white_matter_derivative1_power2 white_matter_power2 global_signal global_signal_derivative1 global_signal_power2 global_signal_derivative1_power2 std_dvars dvars framewise_displacement t_comp_cor_00 t_comp_cor_01 t_comp_cor_02 t_comp_cor_03 t_comp_cor_04 t_comp_cor_05 a_comp_cor_00 a_comp_cor_01 a_comp_cor_02 a_comp_cor_03 a_comp_cor_04 a_comp_cor_05 a_comp_cor_06 a_comp_cor_07 a_comp_cor_08 a_comp_cor_09 a_comp_cor_10 a_comp_cor_11 a_comp_cor_12 a_comp_cor_13 a_comp_cor_14 a_comp_cor_15 a_comp_cor_16 a_comp_cor_17 a_comp_cor_18 a_comp_cor_19 a_comp_cor_20 a_comp_cor_21 a_comp_cor_22 a_comp_cor_23 a_comp_cor_24 a_comp_cor_25 a_comp_cor_26 a_comp_cor_27 a_comp_cor_28 a_comp_cor_29 a_comp_cor_30 a_comp_cor_31 a_comp_cor_32 a_comp_cor_33 a_comp_cor_34 a_comp_cor_35 a_comp_cor_36 a_comp_cor_37 a_comp_cor_38 a_comp_cor_39 a_comp_cor_40 a_comp_cor_41 a_comp_cor_42 a_comp_cor_43 a_comp_cor_44 a_comp_cor_45 a_comp_cor_46 a_comp_cor_47 a_comp_cor_48 a_comp_cor_49 a_comp_cor_50 a_comp_cor_51 a_comp_cor_52 a_comp_cor_53 a_comp_cor_54 a_comp_cor_55 a_comp_cor_56 a_comp_cor_57 a_comp_cor_58 a_comp_cor_59 a_comp_cor_60 a_comp_cor_61 a_comp_cor_62 a_comp_cor_63 a_comp_cor_64 a_comp_cor_65 a_comp_cor_66 a_comp_cor_67 a_comp_cor_68 a_comp_cor_69 a_comp_cor_70 a_comp_cor_71 a_comp_cor_72 a_comp_cor_73 a_comp_cor_74 a_comp_cor_75 a_comp_cor_76 a_comp_cor_77 a_comp_cor_78 a_comp_cor_79 a_comp_cor_80 a_comp_cor_81 a_comp_cor_82 a_comp_cor_83 a_comp_cor_84 a_comp_cor_85 a_comp_cor_86 a_comp_cor_87 a_comp_cor_88 a_comp_cor_89 a_comp_cor_90 a_comp_cor_91 a_comp_cor_92 a_comp_cor_93 a_comp_cor_94 a_comp_cor_95 a_comp_cor_96 a_comp_cor_97 a_comp_cor_98 a_comp_cor_99 a_comp_cor_100 a_comp_cor_101 a_comp_cor_102 a_comp_cor_103 a_comp_cor_104 a_comp_cor_105 a_comp_cor_106 a_comp_cor_107 a_comp_cor_108 a_comp_cor_109 a_comp_cor_110 a_comp_cor_111 a_comp_cor_112 a_comp_cor_113 a_comp_cor_114 a_comp_cor_115 a_comp_cor_116 a_comp_cor_117 a_comp_cor_118 a_comp_cor_119 a_comp_cor_120 a_comp_cor_121 a_comp_cor_122 a_comp_cor_123 a_comp_cor_124 a_comp_cor_125 cosine00 cosine01 cosine02 cosine03 non_steady_state_outlier00 trans_x trans_x_derivative1 trans_x_power2 trans_x_derivative1_power2 trans_y trans_y_derivative1 trans_y_derivative1_power2 trans_y_power2 trans_z trans_z_derivative1 trans_z_power2 trans_z_derivative1_power2 rot_x rot_x_derivative1 rot_x_derivative1_power2 rot_x_power2 rot_y rot_y_derivative1 rot_y_derivative1_power2 rot_y_power2 rot_z rot_z_derivative1 rot_z_power2 rot_z_derivative1_power2 aroma_motion_01 aroma_motion_02 aroma_motion_05 aroma_motion_11 aroma_motion_15 aroma_motion_16 aroma_motion_17 aroma_motion_18 aroma_motion_21 aroma_motion_23 aroma_motion_24 aroma_motion_25 aroma_motion_26 2 | 671.153618342538 n/a 450447.179414282 n/a 580.665507538706 n/a n/a 337172.431645183 530.768657905309 n/a 281715.368214603 n/a n/a n/a n/a 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 n/a 0 n/a 0 n/a n/a 0 0 n/a 0 n/a 0 n/a n/a 0 0 n/a n/a 0 0 n/a 0 n/a 0 0 0 0 0 0 0 0 0 0 0 0 0 3 | 653.900179929014 -17.2534384135246 427585.445311197 297.681137089286 579.194301696899 -1.47120584180664 2.16444662896597 335466.039118159 525.530324895805 -5.23833300950423 276182.122385091 27.4401327184616 1.355317 30.478424 0.2047947273 0.0644595501 -0.0879235241 0.1092472588 -0.0540962064 -0.10829327 -0.1722200746 0.0776486421 0.1361668336 -0.1872822171 0.222726161 0.0322528082 -0.1103685959 -0.106825763 0.150096395 0.0072652719 0.0057591188 -0.060782385 -0.0090386541 -0.0953708754 0.2056514314 0.1532129299 0.0031180453 -0.0719405471 -0.0183721503 -0.0312423037 -0.0014002752 0.0066665153 0.072570397 -0.099206731 0.1190932996 -0.1200373877 0.0568268137 -0.0290388434 -0.1067682717 -0.0171926889 -0.0496688002 0.0026312074 -0.0834533703 -0.0325804019 -0.0751572772 -0.0515706113 0.0232261731 0.1162337553 -0.1188252376 0.0823717271 0.036595931 0.0879895052 0.0699516587 -0.0408538203 0.0243904427 0.1492521276 0.0989776035 -0.1482325763 -0.1557214716 0.0827788382 -0.0331215221 0.1394954171 -0.1291066453 0.01481843 -0.0124995645 -0.020205274 -0.0926916164 -0.0533123202 0.1376721553 -0.0604725237 -0.1065268245 -0.0829286522 -0.1277331748 -0.2205049802 0.1358390032 -0.1409546454 -0.053511241 0.0901784178 0.0013190845 -0.024957492 0.0516058982 0.062094879 0.0959302777 -0.2432689072 0.0498646223 -0.0384398088 -0.0424776053 0.1636355607 0.0327464771 0.0407843002 0.1201621047 -0.1105422991 -0.1155370079 0.0500051458 0.0755488881 -0.0135389172 0.0107794627 -0.0930036699 -0.1740828194 0.0133917281 -0.0595627775 -0.0215611851 -0.0153165573 0.0505003953 0.0461414776 -0.0215042493 0.0681881767 -8.71736E-05 -0.0793109067 -0.0056325625 0.0002311488 0.0716171044 0.0033922459 -0.073041926 -0.1198891844 0.0189075532 0.1237892251 0.0863931004 -0.0528637169 -0.0430280819 -0.0887561825 0.1194799523 0.0942442683 0.0313944941 -0.0610328712 0.0428711293 0.0417418933 0.1135481234 -0.0307177184 0.0911313637 0.0481603648 0.0307306401 -0.2045937996 -0.0838511526 0.1316622344 -0.0256832733 -0.0270133016 0.1117549107 0.1116094882 0.1113672575 0.1110284288 0 -9.46603E-05 -1.37773E-05 8.96057239609E-09 1.8981399529E-10 -0.0177575 0.0427307 0.00182591272249 0.00031532880625 -0.0970591 0.0898339 0.00942046889281 0.00807012958921 0.00161921 -0.00088314 7.799362596E-07 2.6218410241E-06 -0.00096383 -0.000141379 1.99880216410001E-08 9.289682689E-07 -0.000201527 0.000419808 4.0613131729E-08 1.76238756864E-07 -1.542862866 0.4343631651 1.745968236 0.4448163917 1.040828528 4.891977472 0.8344550145 2.016852805 2.362939733 1.444640019 2.988160396 0.01158895605 -2.073736581 4 | 643.429107245049 -10.4710726839651 414001.01605016 109.64336315288 579.071605820252 -0.122695876646958 0.015054278146166 335323.924667245 522.582016981277 -2.94830791452796 273091.96447222 8.6925195588682 1.264208 28.429548 0.0840943474 0.0809620938 -0.115605803 -0.1868604801 0.091734672 -0.0051520662 -0.0680429028 0.1146602074 0.1545484773 0.1430255891 0.1474327418 0.0221005866 -0.1116752874 0.059697821 0.0503404671 0.163452832 0.0779934651 0.0064317847 -0.1891083255 -0.0979269097 0.0341384612 0.0866313799 -0.1074229235 0.0611339385 0.0424046234 -0.0507129461 -0.0621282274 -0.1470483371 0.1117146456 0.0330063809 -0.0014340561 -0.047621569 -0.099951851 -0.0664435625 0.1993474218 0.0585376594 0.025464721 -0.1513371825 0.1127676279 0.0376899406 -0.0151699076 -0.0913565869 0.0732078002 0.1085295187 0.0146906325 0.0453012251 -0.0676150917 -0.0418781385 -0.0944182797 -0.078854421 0.0007451466 0.0223826543 -0.0406276048 -0.0331433758 -0.0060640094 0.0148954451 0.121076054 -0.0476378318 0.0290148758 0.1288215788 -0.0451180628 0.0776888607 0.1149632256 0.0418559615 0.1420902872 0.0875144353 0.160484622 -0.0046008475 0.0789833135 0.0287925138 0.0316060564 0.0871921547 0.0058936174 -0.0726057325 -0.1707404438 0.0368397782 -0.055087865 0.1015911236 0.2194176625 -0.1372826441 -0.0933248032 -0.0284367428 -0.2613912764 0.0173234975 0.1533914469 0.1221493158 -0.0697884653 0.0132974119 -0.1137795148 -0.0826742947 0.0663565786 -0.0380070539 -0.1230277335 -0.0150534252 0.0262594432 0.0409837839 0.1413748326 -0.0893943097 -0.009457257 -0.0330538265 0.0711394775 0.1338636004 0.0615704328 0.0435412584 -0.1140547874 0.0492801455 -0.0670531151 -0.12600184 0.1100947184 -0.0507429312 -0.0659515009 0.020599598 -0.0060914239 -0.0084150615 0.0090338513 0.0843277108 0.0053276911 0.0848888257 -0.0588349434 0.0222689523 0.0518422752 -0.1261436544 0.0650463963 0.0034293016 -0.0678333663 -0.0042270355 0.0747543912 0.002851904 0.0775022032 -0.0628047543 0.0521206401 -0.0267326037 -0.1696465795 0.1116687268 0.111265035 0.1105932959 0.1096551279 0 -9.87307E-05 -4.07039999999998E-06 9.74775112249E-09 1.65681561599998E-11 0.000538577 0.018296077 0.00033474643359 2.90065184929E-07 -0.0500854 0.0469737 0.00250854729316 0.00220652849169 0.00128775 -0.00033146 1.098657316E-07 1.6583000625E-06 -0.00096383 0 0 9.289682689E-07 -0.000246477 -4.495E-05 6.0750911529E-08 2.0205025E-09 -2.364661251 -0.3890278348 0.8589254943 -0.4342005226 0.9773508581 2.107972395 0.5272597082 1.525494922 -0.1258728553 -2.163467546 0.09985913802 -0.9777267164 -2.41648717 5 | 643.398455564996 -0.030651680052984 413961.572623422 0.00093952549007 578.289190104366 -0.7824157158866 0.612174352466341 334418.387391563 522.611411944933 0.029394963655591 273122.687895076 0.000864063888314 1.100271 24.742943 0.0666586773 0.1958977947 0.0455378126 -0.0689109099 0.0403626414 -0.099818181 0.0059441761 0.1783685621 -0.0297411978 0.0135131335 0.0402089576 0.0091236661 -0.0625179834 0.0241208168 0.0218922972 0.116747918 0.0721604128 -0.1062584271 -0.1215760488 -0.0468691615 0.0442599893 0.0453257805 0.092991446 -0.046238628 -0.0485781248 -0.1976292935 0.114719944 0.0072660525 -0.0063428714 0.0367709287 0.0165422732 -0.0071700891 -0.0219575698 0.0037597469 0.0602014126 0.0344083256 0.1160403869 0.0675695734 -0.0702223138 0.1586841145 -0.055764141 -0.0353863011 -0.042103159 -0.0642856901 -0.1096995936 -0.0363691772 -0.0280959506 -0.067251416 -0.0081833248 -0.0115076174 -0.1658751254 -0.0206152027 0.1327175441 -0.0318158064 0.1991934667 0.0409273684 0.0585682272 0.0180358986 -0.1116553055 0.040170079 0.01853433 0.0161159621 0.0266546724 0.0638209066 0.056165601 0.2080032975 0.0089316998 0.0181347563 0.04227309 -0.0832455705 0.00690807 0.1076409615 0.1106236116 -0.0616720049 0.012211415 -0.0196430067 0.0117592194 0.1695481309 -0.025019474 -0.108964803 -0.0367770619 -0.0154097036 -0.1158257358 0.0860072318 0.0792781257 0.0973524856 0.1028564739 -0.0358811827 -0.0693828325 -0.0218461891 -0.1811736472 -0.0634285352 0.0011236944 -0.0740461926 0.0146818181 0.0004153394 -0.0092878815 0.0522470294 0.0544456617 -0.0851807995 0.0704532957 0.0267238843 -0.0913073767 -0.0234855811 0.0474124104 0.0596310249 -0.0221984081 0.1440353425 0.0147931029 0.0610301434 0.005712111 0.0458671721 0.1197174186 -0.0355624856 -0.0379527305 -0.0434280228 -0.0995945202 -0.0182680078 -0.0391290656 -0.1435062802 -0.0659861649 0.0292740906 0.0076731719 0.1098061315 -0.0357662278 0.099970555 -0.013955913 -0.0389500279 0.051440616 -0.0130283452 -0.0002570465 0.1141614085 0.0845138841 0.1115394925 0.1107490191 0.1094357105 0.1076057667 0 -0.00692264 -0.0068239093 4.79229445696E-05 4.65657381346265E-05 0.0238341 0.023295523 0.000542681391844 0.00056806432281 -0.0367118 0.0133736 0.00134775625924 0.00017885317696 0.00118991 -9.78400000000001E-05 9.57266560000002E-09 1.4158858081E-06 -0.00088358 8.025E-05 6.4400625E-09 7.807136164E-07 3.87459E-05 0.0002852229 1.50124476681E-09 8.135210268441E-08 -2.286366395 -0.5384935172 1.05804944 0.8514298949 0.6143732569 1.611737185 -0.5105478777 -1.57675939 -1.077255986 0.08536206458 0.5274382932 0.9247914291 -1.773588043 6 | 643.75666798644 0.358212421443795 414422.647577003 0.128316138876627 578.293859483746 0.004669379380971 2.18031038034361E-05 334423.787916607 522.852383884426 0.240971939493193 273374.615333627 0.058067475623111 1.050039 23.613306 0.116312455 0.2078586011 -0.0123208662 -0.0005846209 0.0441622197 0.041473519 0.0452890682 0.1736144327 0.0150966448 -0.0370137834 -0.0272619554 -0.06807681 -0.0330949887 0.0407103882 0.0830067875 -0.0416613996 -0.0537772645 -0.0442331926 0.0668263505 -0.0046446554 0.0309331634 0.0050537453 0.1080159359 -0.1363774648 0.123747077 -0.003656152 0.0863047087 0.0186094575 -0.0207034464 -0.1190762023 0.0052423867 -0.0901824656 -0.0497195605 0.0486481943 0.1199867641 0.0509174687 0.0179910823 0.0136646432 -0.0479349617 0.0244121265 -0.0191644441 -0.0644269797 -0.1453580674 -0.0378780108 0.0823612397 0.1192301492 0.0448556193 0.020026703 0.0104473433 -0.139043447 -0.0549989882 0.008878326 0.0254358999 0.0642800497 0.0133368428 -0.1966230789 0.1009809671 -0.0197628683 -0.078616129 0.0178396596 -0.1320301679 -0.1219972035 0.1157827005 0.0508509475 0.0801772445 0.1835871052 -0.044724527 -0.0196451159 0.0770516335 0.0384174999 -0.0219394056 -0.0313520879 0.0515368729 0.1153645971 0.0316704571 -0.0448409548 -0.1593617797 0.1354596772 -0.0011867312 -0.0287532001 0.0325910523 -0.0248065796 0.0267316002 0.1178231748 -0.0548955158 -0.0580397025 0.0066037429 0.0062746868 -0.0687049155 -0.0008265641 -0.024901932 -0.1057296377 0.0091714075 0.0488413452 -0.0604056651 -0.0292766252 -0.0357380612 0.0353656685 0.1454698397 0.0391393711 0.0010693356 -0.2205090156 0.0152710551 -0.0610078123 0.0227967891 0.2129416915 -0.0049511235 -0.0046735099 -0.0307566555 0.1030090531 -0.0921208159 0.0456086 -0.0363986986 0.0626394895 0.1245968352 -0.0123291499 -0.0533284129 -0.0154809782 0.1451323216 -0.0083130938 0.0811034444 -0.2044652261 0.0537208749 0.0104397792 -0.0202510904 -0.1612418951 -0.009315874 -0.0365961174 0.0113880785 0.0867522609 0.0781747237 -0.1102457037 0.1422654896 0.1113672575 0.1100622363 0.1078985167 0.1048929802 0 0.00768841 0.01461105 5.91116483281E-05 0.000213482782103 0.0164273 -0.0074068 5.486068624E-05 0.00026985618529 0.0156563 0.0523681 0.00024511972969 0.00274241789761 0.00060935 -0.00058056 3.370499136E-07 3.713074225E-07 -0.00096383 -8.025E-05 6.4400625E-09 9.289682689E-07 0.000216466 0.0001777201 4.6857529156E-08 3.158443394401E-08 -2.345299715 -0.7498805077 0.7431887879 -1.444008727 0.5093061011 -0.8910774264 -0.08758168699 -0.684120896 -0.0490486624 -0.07159804817 0.4689382922 1.099151437 -1.90788031 7 | 642.114210011365 -1.64245797507465 412310.658698519 2.69766819988632 578.421916079925 0.128056596178794 0.016398491824899 334571.913001572 522.322027279051 -0.530356605375346 272820.300180897 0.28127812886526 1.08808 24.468773 0.08079333 0.0981025142 -0.0184577457 -0.1573553567 0.0889834667 -0.0578273128 -0.0345234999 0.1030341943 0.0213943183 0.1501668545 0.0541054912 0.0353188747 -0.0905809583 0.1292087332 0.1099984661 -0.0117659307 -0.0768709067 -0.1694281872 0.0112063356 -0.0132184274 0.0177606429 -0.0023214341 0.0657166118 0.0428154127 -0.0689922112 0.0540958484 0.013285364 0.0706258953 -0.0810984091 0.0715717553 -0.0523050141 -0.0524702425 0.1553503868 0.0372034907 -0.0214569321 0.1777469713 0.0919345641 0.0906298411 0.0241025938 -0.012760576 0.0186966652 0.036977029 0.0094118937 -0.0564092531 -0.0118403482 -0.0391135096 0.0255635885 -0.0018039931 -0.0535736053 -0.0565826045 -0.0123044002 0.0403850976 -0.13872133 -0.0209233707 -0.0633483136 -0.0886841066 -0.0755789289 -0.0590427674 -0.0566622596 -0.0751412316 0.1119169779 0.0758666255 0.0163190585 -0.0423439234 0.0584661213 0.1585547 0.1435022852 0.0464554759 0.0281026549 -0.0836348742 0.0307406554 0.0634098489 -0.0875946668 0.1332355377 0.0254760228 0.0263070233 -0.0379093495 0.0962332681 0.0641448248 -0.0328122759 -0.1485747084 -0.0321153179 -0.1513164905 -0.0017229713 -0.0574264713 -0.1335855376 0.0312294555 0.0696798465 -0.0762542777 0.0810753333 -0.0429839206 0.0045662425 0.0792808645 -0.1082472958 0.0407919837 -0.0432106469 -0.1074870682 0.0185746287 -0.0276717708 0.175664079 0.011814441 0.0246378223 -0.150036491 0.0294566661 0.1126924959 0.0238560111 0.0092881105 0.025025099 -0.0218194801 -0.0565386644 -0.0259836302 0.0092617119 -0.0386632834 -0.0011581698 -0.1319686427 0.0188574687 0.1137307636 0.0648024688 -0.088906717 -0.0479148991 0.1526950752 0.0096389919 0.095413523 -0.1493816856 0.0174792515 -0.0456247062 0.0141467937 0.0140441587 -0.0250224874 0.0326642462 0.0240171376 -0.0827729862 0.2149069938 0.1111520884 0.1092057454 0.1059870468 0.1015334935 0 0.008326 0.00063759 6.9322276E-05 4.06521008100002E-07 -0.00119704 -0.01762434 0.000310617360436 1.4329047616E-06 -0.0322861 -0.0479424 0.00104239225321 0.00229847371776 0.00032688 -0.00028247 7.97893009E-08 1.068505344E-07 -0.00096383 0 0 9.289682689E-07 0.000207156 -9.31E-06 4.2913608336E-08 8.66761000000001E-11 -2.01614769 -0.3611658011 1.414144962 0.1589838983 0.2907975363 1.327440403 -2.330575627 -0.6723163672 0.5989010587 -2.400402949 0.4960315512 0.706311256 -1.932750355 8 | 644.888652482974 2.77444247160918 415881.374101306 7.69753102826884 577.627713581135 -0.794202498790128 0.630757609084483 333653.77549697 522.535062868458 0.213035589407809 273042.891926944 0.045384162354333 1.115871 25.093744 0.113071866 0.1615319258 0.0765039537 -0.018066619 0.1080107379 0.0322972521 -0.0076170024 0.1839216873 -0.0695844891 0.0780303126 -0.0265598699 -0.1218255524 0.0029605983 0.112369306 0.0227343031 -0.0094745386 -0.0863151851 -0.0283802986 0.0053880767 -0.0467384516 -0.1667112322 0.0858921019 -0.1037531269 0.0311315113 -0.146665788 -0.0532294646 -0.0131677561 -0.0606238073 -0.0377642506 0.0371647595 0.0015690675 0.0756174532 -0.0201116005 0.1203827066 -0.0446776671 -0.0808511288 -0.0587239003 -0.0325724685 -0.0537641815 -0.0345168947 -0.0548397539 0.034411589 -0.1532397462 0.0811655946 0.0498633535 0.0024887486 -0.0182917409 0.033556132 0.1261348442 0.0111756022 0.071623218 -0.0253836608 -0.070595446 0.1313344741 -0.0881295889 -0.0213009577 -0.0690971267 -0.0198680052 0.1598109476 0.1251417674 0.1219496197 -0.0469412162 0.0033993662 0.0829648543 0.0003231595 0.1894606456 -0.0313514143 0.0350002826 0.0853859346 0.043607426 0.0264738605 0.0452327022 0.0187887931 0.1112904601 -0.0251860536 0.1625426776 0.1741407347 0.1664464051 -0.0115059106 0.1030345156 -0.1060303987 -0.1204751369 0.0073121126 -0.076170527 -0.0290142991 -0.0703155181 -0.1343810003 0.0610059144 -0.0840449053 -0.0108600732 -0.0062260407 0.1061967369 0.0925320941 -0.0259194394 -0.0261603123 0.1416236803 0.0813089623 -0.0266776215 -0.0240535708 -0.0573568713 0.0380990309 -0.0169480212 -0.0599592774 0.0214674207 0.0965555195 -0.0614998852 0.0329514835 -0.0628295245 -0.0934832455 0.0486519826 -0.1079141371 0.1429967503 -0.1110632275 0.0110968875 0.10698607 -0.0048011343 0.0327377046 -0.0712470393 0.0327413439 0.0218427452 -0.0698922556 0.0388214888 -0.1533177409 -0.1052777526 -0.0067777133 0.0023880647 -0.0033557336 0.0875303488 0.0612359802 -0.1035807269 0.1576728582 -0.0208500006 -0.1010547534 0.110894068 0.1081808673 0.1037079311 0.0975480191 0 -0.000128276 -0.008454276 1.6454732176E-08 7.1474782684176E-05 0.0402396 0.04143664 0.00171699513449 0.00161922540816 0.0258156 0.0581017 0.00066644520336 0.00337580754289 0.000225295 -0.000101585 1.0319512225E-08 5.0757837025E-08 -0.00096383 0 0 9.289682689E-07 0.000207156 0 4.2913608336E-08 0 -2.409997708 -0.7184141241 0.02633428797 -1.411170205 0.2102017059 1.183159972 -0.4456006933 -2.495696366 0.6441998064 -0.0263057657 1.280880435 1.098630902 0.04001896495 9 | 644.654675775585 -0.233976707389388 415579.650999324 0.054745099600779 578.519440375006 0.891726793871158 0.795176674907734 334684.74289181 523.182875509544 0.647812641085807 273720.321226435 0.419661217950568 1.04134 23.41769 0.094272347 0.0509320611 -0.0857322509 -0.0377539725 0.1775611318 -0.0857633514 0.0621121338 0.1493441469 0.0703531378 0.0764931036 -0.0570088023 0.009843951 -0.1244255803 0.0418471565 -0.0380487866 -0.123277904 0.0125438946 0.0496446369 -0.0104922579 0.0079518469 -0.1424363023 -0.0336348695 0.1065334985 -0.0383482681 -0.0651527274 0.0533771426 -0.1107755351 0.1583714632 0.0630832299 0.0840950632 -0.0788565625 0.0249965805 -0.1122703756 -0.0996309636 -0.0587848186 -0.0802815207 0.122291716 0.0032130504 -0.0921899796 0.1012337507 0.0602824341 0.0243155642 0.0070990642 -0.0205753463 -0.0851770893 -0.0733307276 0.0735861471 -0.1021754954 0.1231017165 0.0015250437 -0.0413994972 -0.1189500193 -0.1410839707 0.0122561871 -0.0213034442 -0.0571785883 -0.0283442418 0.0929982085 -0.0538600048 -0.0329163431 -0.0340177016 -0.0216644139 -0.0589655361 -0.0311274384 0.1049158786 0.0689440575 0.0615302715 0.1567472352 0.0875513362 -0.0053474358 -0.0022309147 -0.1073185086 -0.0751067444 -0.079041156 0.0475661385 0.0571998191 0.0659864186 0.1336891992 0.0982073005 -0.00020386 -0.0504585496 0.0683789605 0.0061061716 0.0396997767 -0.1229757368 -0.0058200328 -0.0856006321 0.1561385447 0.0095765217 -0.0386035154 -0.2032402295 0.0941133783 0.1294765966 0.1306580982 0.0319831627 -0.1381641715 0.1123947496 -0.0160219702 0.0279984778 -0.1125406703 -0.0933365262 0.0275388615 0.0549254953 -0.0789314514 -0.0900692714 -0.0744622033 -0.0291323139 0.1567088308 -0.083584887 -0.026229128 0.0321326596 0.1095467846 -0.1289680622 -0.0790285867 0.0162904029 -0.0789530213 -0.0435557141 -0.0074279917 -0.0248441041 -0.0411942024 0.0139370684 -0.0832534579 0.0511437924 -0.0237781629 0.0383204249 -0.0131251785 -0.104762705 0.0648319772 0.002046659 0.0904899808 -0.0265936763 -0.0024174742 0.0730043193 0.1105932959 0.1069891821 0.1010690755 0.0929611287 0 -0.000120589 7.68700000000001E-06 1.4541706921E-08 5.90899690000002E-11 0.00624946 -0.03399014 0.00115532961722 3.90557502916E-05 0.00218691 -0.02362869 4.7825753481E-06 0.000558314991116 0.000536803 0.000311508 9.7037234064E-08 2.88157460809E-07 -0.000737839 0.000225991 5.10719320810001E-08 5.44406389921E-07 1.17384E-05 -0.0001954176 1.3779003456E-10 3.818803838976E-08 -1.931377968 -0.3262612641 1.64618611 -0.9192983333 0.2105284952 -0.2982590973 0.517197265 -0.9788128389 1.395971037 -1.586136485 0.7919322971 -0.4021222091 -0.604466876 10 | 647.141883414593 2.48720763900826 418792.617269387 6.18620183954105 578.671195204888 0.151754829881725 0.023029528392431 334860.352159854 523.878538022723 0.695662513179173 274448.722600826 0.483946332242762 1.037634 23.33436 0.105208591 0.0480299893 0.1357579133 0.0184623024 0.1254834789 -0.0356858686 0.0158130207 0.0873627926 -0.1048108084 -0.006065067 -0.0102333104 -0.0850789369 0.0491657691 -0.0645086162 0.0072639033 -0.0115585051 0.052200051 0.1015194952 0.1350997054 0.0945092627 -0.0818303157 -0.083734201 0.0576278911 -0.1018725949 -0.0030608288 -0.0286309121 0.0027133356 -0.0033011003 0.0463801152 0.1045474448 0.0113806327 0.0447383913 0.0693049436 -0.1452306242 0.0638541637 0.1151773401 -0.0272385566 -0.0221429584 -0.0269331681 -0.0481953489 0.0051575042 0.103269968 -0.1184068942 0.070127595 -0.1185805624 0.0773060613 -0.1228446602 0.0897179577 -0.1314936354 -0.0544233751 0.0940726372 -0.0509702073 0.0272743336 0.0607765671 0.1185907649 0.0373966918 0.1374817804 0.1415560907 0.0863670517 -0.052211691 0.1302391942 0.0173659556 -0.0685460293 0.0073102894 -0.0756041328 0.1061357244 -0.0386863762 0.0569140321 0.1149701535 -0.011778046 0.0534169465 -0.0654282916 0.1126638346 -0.0569407357 -0.0371161461 0.0715252519 -0.1141540773 0.0769314643 -0.0823703022 0.0293236246 -0.0147792997 -0.0228860348 0.1234974865 0.0195417105 -0.0305842091 -0.0390292155 -0.0291043955 -0.0795900394 0.1356086341 -0.0589088151 0.0121726674 0.0078018528 0.0838538653 0.1205497452 0.0674573922 -0.063849535 0.0022357525 0.0205326935 -0.0681685794 -0.0543119971 0.076889138 0.0925057398 -0.0726308733 -0.0144099844 0.0680179548 -0.0200284494 0.001843927 0.1015015392 0.1789431767 -0.0022281694 -0.0157427318 0.1221546684 0.1511947883 0.0742784455 -0.0052519123 0.0096118121 0.0366941276 -0.0782136048 0.110318758 0.0050123925 0.0225232974 0.0059789431 -0.0668986205 0.0231900555 -0.0322929553 -0.0253322076 -0.068769547 0.1120707983 0.1345686553 0.0660423955 -0.0105465481 0.0771847066 -0.1681586049 0.1102498879 0.1056325273 0.0980796336 0.0878011021 0 -0.000129645 -9.05600000000005E-06 1.6807826025E-08 8.20111360000008E-11 0.0469517 0.04070224 0.001656672341018 0.00220446213289 0.0199605 0.01777359 0.00039842156025 0.000315900501488 -8.43305E-05 -0.0006211335 3.8580682482225E-07 7.11163323025E-09 -0.000619916 0.000117923 1.3905833929E-08 3.84295847056E-07 0.000207156 0.0001954176 4.2913608336E-08 3.818803838976E-08 -2.148136846 -0.4968954648 1.012393072 0.8316874576 0.2331193117 -0.6122110514 -1.078125456 -1.422566159 -0.1544090373 0.7135154648 1.709858908 -0.330627952 0.7979021084 11 | 648.189027252239 1.0471438376461 420149.015050204 1.09651021672021 578.176263657394 -0.494931547493593 0.244957236704403 334287.791856825 523.794824671277 -0.083713351446022 274361.018352414 0.007007925210325 1.021163 22.963955 0.144019241 0.0699345404 0.0226380037 0.0424885738 0.0955722047 0.0390831987 -0.0950677298 0.1172196212 -0.0069648711 -0.014504502 0.021422991 -0.0184882775 0.0287265186 -0.0067195898 -0.0596876628 -0.0103601484 -0.1123267358 0.0802304168 0.0266843594 0.0556413268 -0.0145770634 -0.0299256666 0.0305205992 -0.0422019813 0.0830298873 0.0131904184 -0.049504946 -0.1914207745 0.0212160577 0.1192158473 0.0958295222 0.0721703418 0.01456893 -0.006305121 -0.0227839536 -0.0128491748 -0.1660124545 0.1092712119 0.0372086257 -0.1852822102 0.1689158355 0.0946683376 0.0546285339 -0.1696589213 0.0459411407 -0.0212061898 0.0421706596 -0.0261807483 -0.0598630665 -0.0441759849 -0.1048535599 -0.0255588535 -0.0151668358 0.0754474227 0.0276450814 0.0839414576 0.0056271886 -0.1996451727 -0.0417071403 -0.1223730865 -0.041778672 -0.1195059658 -0.007660744 0.0010600252 -0.0019177066 0.0574215166 -0.0446041574 0.0184663103 0.0831272815 -0.0833358575 -0.0378369217 -0.016866459 -0.120895899 0.0731098714 -0.0455937065 -0.0275880035 -0.0403389908 0.1116458928 -0.005482351 -0.0369407311 0.0151650073 -0.0956834728 0.1080877448 -0.0579355579 0.0294112956 -0.0516071094 -0.0570478076 0.0062697577 0.0285432596 -0.0394125045 0.1013396908 0.0368156506 0.0255467827 0.0474656853 0.0176969292 -0.0298747265 0.049950358 -0.0104840661 -0.1560599702 0.0222936127 0.0483968451 -0.0072593687 0.1172020149 0.0867158352 0.1853340675 0.0648895949 -0.0537354582 0.0216293304 -0.1133993582 -0.0192527693 0.2215671961 -0.2487805275 0.0181202258 0.0658531608 0.0553535639 0.0182499279 -0.054657388 -0.0202864419 0.0253758505 0.0731810942 -0.0311304846 0.0063906039 -0.0923358773 0.052817623 -0.1447406259 -0.0760495643 0.1508402173 -0.1080618097 0.0659628133 0.0560846384 -0.0718279351 -0.0436032757 0.1339476575 0.1098639765 0.1041129948 0.094749975 0.0820997524 0 -0.000105169 2.4476E-05 1.1060518561E-08 5.99074576000002E-10 0.0220571 -0.0248946 0.00061974110916 0.00048651566041 -0.0235494 -0.0435099 0.00055457424036 0.00189311139801 -0.000382261 -0.0002979305 8.876258283025E-08 1.46123472121E-07 0.000358022 0.000977938 9.56362731844E-07 1.28179752484E-07 -2.87808E-05 -0.0002359368 8.2833444864E-10 5.566617359424E-08 -1.995688879 -0.3197394923 1.687357424 -1.652759533 0.5801019429 0.05107998092 -0.1765108733 -0.6649030859 0.5394207992 0.555535496 0.7839552758 0.7959592826 -0.2234737981 12 | 647.134968501956 -1.05405875028282 418783.667458028 1.11103984904778 578.944784569589 0.768520912194844 0.590624392480796 335177.063580328 524.872105573115 1.07728090183741 275490.727208755 1.16053414146363 1.021934 22.981293 0.139767237 -0.0359964372 -0.0153332083 -0.0714670155 0.1283752764 0.0028270795 0.013965305 0.0304007965 0.0074896789 0.1361710694 -0.0196717928 -0.0717693644 -0.0164640805 -0.0052757647 -0.1152944347 0.0007341991 0.0166364861 0.0867931044 0.0941186176 -0.0273076045 -0.0816918183 -0.1554225327 -0.1001307173 0.092519515 0.0215590832 0.0214858821 0.0423868077 0.0711276297 -0.1071481875 0.0194080376 0.1532999986 -0.1513149372 0.0828019238 0.0496325269 0.0140198229 -0.1819078258 -0.0352921873 0.1125525072 0.0655480905 -0.1496422783 -0.0707105563 0.051016942 0.011775118 -0.0161075605 -0.0505866462 -0.0312027233 0.0881837855 -0.0540800044 0.038788178 0.0506813644 0.0061032604 -0.0466570323 0.0363780088 -0.1591859194 0.0138547599 0.0587507452 0.0797940911 0.0747543311 0.0581970205 0.0089788822 -0.0041013452 -0.0137114511 0.0192610809 -0.0156198808 -0.0151226234 -0.0089189923 0.0831457781 0.0926454402 0.077211856 0.0671686292 0.0698904931 0.0453264124 -0.0093826657 0.0018892519 -0.0632372077 0.0793204198 -0.0870890881 0.0460161289 0.0506640621 0.0946869382 -0.0573410362 -0.0133935934 0.0707907557 -0.0577061479 -0.0587064995 0.0350142896 -0.0673778516 0.0603896343 0.1637721843 0.1812996444 0.028541116 -0.0194897459 -0.0232262884 0.0482887413 -0.0555843517 0.1930353454 0.0513218555 -0.1031071382 -0.0706304961 0.0348979245 -0.0486741828 -0.1354442491 -0.1400352309 -0.0724905677 -0.1080667754 -0.0303488133 0.0150356187 0.0462958058 -0.0060054775 -0.083903186 0.0319843891 -0.1440188469 0.0268068522 -0.0950147462 -0.0031204575 -0.0747988359 0.0239426189 0.0342031091 -0.0070496122 0.1603318781 -0.0880085542 0.0254329709 0.0184301602 0.0733483223 0.0526968137 -0.0017394525 -0.0414717945 0.0122536671 -0.0239451812 0.0918015392 0.006604158 -0.1127875711 -0.0571308182 0.1094357105 0.1024329276 0.0910916496 0.0758922305 0 -0.000121166 -1.5997E-05 1.4681199556E-08 2.55904009E-10 0.0466476 0.0245905 0.00060469269025 0.00217599858576 0.0340672 0.0576166 0.00116057411584 0.00331967259556 -0.000691516 -0.000309255 9.5638655025E-08 4.78194378256E-07 -0.00030645 -0.000664472 4.41523038784E-07 9.39116025E-08 0.000148375 0.0001771558 2.2015140625E-08 3.138417747364E-08 -1.901208287 -0.7412255789 1.103925051 -0.9247669648 0.6076839088 -0.9435631119 0.4052189254 -0.1844615897 0.2430469058 -2.661054591 1.647862683 0.5235526379 0.301493338 13 | 648.585404672274 1.45043617031752 420663.027153897 2.10376508416534 578.677825270271 -0.266959299318046 0.071267267492382 334868.025459531 525.378312962477 0.506207389362089 276022.371731298 0.256245921044782 1.089816 24.50783 0.154453876 0.0214476323 0.1038478824 0.0095078262 0.0608775005 -0.0005862552 0.026832389 0.0682281909 -0.100229404 0.0098995869 -0.04053457 -0.0194994711 0.0054759569 0.0177024206 -0.1049410162 0.1420941936 0.0540841742 0.0731093557 0.1291777097 0.1228732277 -0.0528367356 0.0178570018 -0.090048496 0.0209288946 -0.0115093609 0.1062997673 0.0630824858 -0.1028334495 0.0977987977 -0.0796679003 -0.1380819939 -0.109759994 0.0962563242 -0.0397392973 0.006714934 -0.0335002219 0.0448616773 0.0148625408 -0.109797679 0.0712966676 -0.0515465699 -0.097064676 0.0856882592 0.0300035807 0.0348587508 -0.0790717487 0.0090703932 0.0080298402 -0.1110899569 0.0467878964 0.0448371649 0.0768068126 -0.038503597 -0.1238309162 -0.1513658387 0.0924029302 -0.1878620589 0.0075654861 -0.0068936164 -0.140795771 -0.0435607202 -0.0573053876 -0.0384092877 0.1830222976 -0.0813490677 0.0529478287 -0.0256013134 0.0510165942 0.0580050868 -0.0142659251 -0.0056836869 0.1031714078 0.0596102688 0.0023026442 0.0226585067 -0.0005277044 0.1119174887 0.0560765227 -0.0570663329 0.0178058107 0.0225761769 -0.0683766297 0.0635346286 -0.0583113652 -0.0516174673 0.0715527358 -0.1997425194 -0.0234853135 0.1344673734 -0.0552970414 0.0254898446 -0.044551784 -0.0379602698 0.0119648783 -0.0268458735 -0.0508445071 -0.0818957325 0.0747213439 -0.1438041397 -0.0736729475 0.0265839635 -0.1278277132 0.0680776272 0.0491465546 -0.0707708082 -0.0088897986 0.0273502614 0.0329994632 0.0622458758 -0.1099576857 -0.0028170131 0.1044719717 0.1198535533 -0.1211431023 -0.1010188655 0.105464466 0.1705902479 0.2047666376 -0.0482177469 -0.0309255114 0.0194220571 -0.113229542 -0.0714376519 -0.0732800961 0.0583759225 0.0193894561 -0.1303761679 -0.1011871138 -0.0522490087 -0.0238823522 -0.1100228201 -0.0345912518 0.0059814733 0.1089652549 0.1005949163 0.0871173473 0.0692168078 0 0.00973022 0.009851386 9.46771812484E-05 9.7049806120996E-05 0.00934841 -0.03729919 0.001391229574656 8.73927695281E-05 -0.0280749 -0.0621421 0.00078820001001 0.00386164059241 -0.000382261 0.000309255 9.5638655025E-08 1.46123472121E-07 -0.000441638 -0.000135188 1.8275795344E-08 1.95044123044E-07 0.000207156 5.8781E-05 4.2913608336E-08 3.455205961E-09 -2.076179308 -0.4126570913 0.7196925917 -0.3713194793 0.8083496045 1.22035531 -0.3228103734 -2.26180602 0.5898122335 0.1024084578 0.4845010371 1.237950078 1.12320117 14 | 647.943547853637 -0.641856818636938 419830.841205158 0.411980175630732 579.248860901608 0.571035631336486 0.326081692255859 335529.24285581 526.878995000216 1.50068203773878 277601.475372437 2.25204657839183 1.050606 23.62607 0.132838462 -0.0364581926 -0.0183236506 0.0576516969 0.0782654982 -0.0080821238 0.2318324077 -0.0242187764 0.0080740773 0.0072324833 -0.1579498075 -0.0025971663 -0.0862263151 -0.1119274348 0.0204829056 -0.1166723664 0.0231685456 0.1061025964 0.0886333845 0.0325192639 0.0292681407 0.044442845 -0.003324861 0.0868357882 -0.0054200042 0.0484607683 -0.0772024495 0.0312293604 -0.0607374658 -0.0773690634 0.0396691942 -0.0487929351 -0.0103928661 0.0141551417 0.1125505417 -0.0442943355 -0.0901611507 -0.0926875972 0.0163578743 -0.0330254393 0.0661027042 -0.1940797957 0.0186000005 -0.0461086472 -0.1170624328 0.0278111783 -0.1154520343 0.0082739837 -0.0224179256 -0.0673269119 -0.0163999608 -0.0801361851 0.0557870761 0.009712668 0.027015129 0.0533870687 -0.2178562043 -0.042285358 -0.1264990957 -0.0772277871 0.107105377 -0.0750098687 -0.0087475771 -0.0147169983 0.0083059213 -0.0216017028 -0.0002945663 0.1666025547 0.022806466 0.1366924658 -0.0428875688 -0.1292696416 0.0687609758 -0.0470152634 0.069066371 0.0846964502 0.037463614 -0.0129417659 -0.0126833816 0.0471227619 -0.0199544518 0.1442263394 0.1299828129 0.1623397548 -0.0289802415 -0.0531999409 -0.0056263692 -0.046188434 0.0418177615 -0.0672173702 0.0778820947 0.0795726772 -0.0227570039 0.0743772293 -0.1114155522 0.0140765889 -0.0793810054 -0.0227247862 0.0412377912 -0.0241511361 -0.0851423679 -0.0763600807 -0.064124915 0.0443885483 0.001019901 0.0118698752 -0.1064711187 -0.1177664539 0.0631424314 -0.0974039404 0.0405007449 -0.0459186682 -0.0287766347 -0.0247105286 -0.0496160183 0.0632583425 -0.0948321936 -0.0728338581 0.0314700577 0.0199114148 -0.0195398316 -0.1110009816 -0.0799762757 0.1130701668 -0.038199833 0.0558916864 -0.0548052881 -0.1005421574 0.2240479884 -0.1090335504 -0.1344651618 -0.1516347707 0.0889535884 0.1084527911 0.0986017949 0.0828408542 0.0621146404 0 -0.000125552 -0.009855772 1.5763304704E-08 9.7136241715984E-05 0.0403838 0.03103539 0.000963195432452 0.00163085130244 0.0161222 0.0441971 0.00025992533284 0.00195338364841 -0.000174654 0.000207607 4.3100666449E-08 3.0504019716E-08 -0.00096383 -0.000522192 2.72684484864E-07 9.289682689E-07 0.000432361 0.000225205 1.86936034321E-07 5.0717292025E-08 -1.786948146 -0.5740016271 1.266635799 -0.7947277956 0.785796629 -1.287149091 0.9087377094 -1.120551751 1.752235023 -0.7355303884 1.363145222 -0.3685511962 0.6254360063 15 | 649.794816243476 1.85126838983911 422233.303216893 3.42719465121748 579.118458282293 -0.130402619314509 0.017004843124085 335378.18872326 528.059873294803 1.18087829458773 278847.229784124 1.39447354662842 1.093361 24.587542 0.186950752 -0.1217762552 0.1467755082 -0.0277926957 -0.0214694341 0.0173917099 0.127431538 -0.074513436 -0.1706274665 0.0835740343 -0.0613637468 0.0565672626 0.149846631 -0.0567535695 -0.034743896 -0.0742509381 0.0802530311 0.0176626002 0.021651469 -0.0522153395 0.0566964937 0.0295452679 0.0136113911 0.0622568971 0.0751246732 -0.1607314422 0.0330767335 0.0470491961 0.0616248539 0.0411424593 -0.1055476935 0.0605440213 0.1192325558 -0.0485341676 -0.0187762136 -0.0062722742 -0.1970662942 -0.0038210333 0.0641508985 0.0745937033 0.0136005946 -0.0383741993 0.023845722 0.1170622441 -0.1051320658 0.0537694589 -0.0174380763 -0.0973728304 0.1633991467 0.074649086 0.0200725027 -0.0230207862 -0.002369173 -0.0528911996 0.0463006815 -0.0317152506 -0.1046815748 -0.0679361095 0.0967947969 -0.0472548187 0.0858951468 0.0443667776 0.0208945449 0.0509948429 -0.1945114493 0.005134718 0.0398044553 0.0258175333 -0.0080048415 -0.0142637812 -0.1432852046 -0.0294475011 0.0873207988 -0.1310607383 0.0148501539 -0.0690557941 0.0019608943 -0.0342303957 -0.145697867 0.1548337892 -0.0646542479 0.1087846347 -0.0009559977 -0.087182836 0.0337314165 0.0203787386 0.0749499848 -0.1233103832 -0.0217742866 0.1191583897 0.1366985017 0.0047430242 0.0514096988 0.0788053127 0.1404960597 0.0219640321 -0.024066387 -0.0144461439 0.0102196251 -0.2357978413 0.0545346082 -0.020543593 -0.0385170837 -0.0329592415 -0.0186997737 -0.0526752333 -0.0680268765 -0.0051168134 -0.0014758238 -0.0266161899 -0.1317596539 -0.1142268952 -0.0998659376 5.41466E-05 0.033774735 -0.0183337428 -0.019950334 0.0213050457 -0.0263079649 -0.0926137005 0.1013959516 0.1776829174 -0.0102869277 -0.0694010479 -0.1603801234 0.1041119388 -0.102886191 -0.0878541126 -0.1163556708 -0.0171622303 0.0218582939 0.0926828179 -0.0776911348 0.1078985167 0.0964566367 0.0782770044 0.0546295156 0 0.00638894 0.006514492 4.08185543236E-05 4.2438606018064E-05 -0.00205121 -0.04243501 0.0018007300737 4.2074624641E-06 -0.0573974 -0.0735196 0.00329446152676 0.00540513158416 0.000116264 0.000290918 8.4633282724E-08 1.3517317696E-08 -0.00019032 0.00077351 5.983177201E-07 3.62217024E-08 0.000207156 -0.000225205 4.2913608336E-08 5.0717292025E-08 -1.847623931 -0.1249772765 1.908258612 1.080511773 1.250593106 -0.5469994191 -0.3173198451 -2.231635855 -0.3624760002 0.3179739173 -0.4438030919 1.153321065 2.155041927 16 | 652.089403671826 2.29458742834981 425220.590381077 5.26513146634101 579.428632007776 0.310173725482286 0.096207739979561 335737.539590402 529.277569335828 1.21769604102417 280134.745402042 1.48278364832594 1.085047 24.400583 0.0555091 -0.1189747503 0.02295754 0.1962481659 -0.1282149608 0.0174101975 0.113764759 -0.0588929128 -0.0865777392 -0.0851264559 -0.2036159113 0.123552864 0.0196082281 -0.1305050004 0.0297462413 0.0224032801 -0.0411810315 -0.0804976187 0.0937903777 0.0436632167 0.0086272176 -0.1482379124 -0.0528553279 0.0510002809 0.0654671964 0.0085909531 -0.0158292741 0.0061375335 0.0027789119 -0.0779750927 0.0587130572 -0.002515855 -0.102479548 0.0042836946 -0.0488938244 -0.0112605121 0.0434602624 0.0286063608 0.0410375445 0.0226466102 -0.0420769191 0.0069634524 0.0370066631 -0.1752451237 -0.0171384573 -0.0782643804 -0.0022366325 0.095588481 -0.1137877821 -0.104737478 0.0227365646 0.020460858 -0.0595138276 0.1413609498 0.1002195811 0.1646795262 0.0620249967 -0.0081099094 0.0491727431 -0.0682525104 -0.0315389807 0.0058702322 -0.0286089129 0.0606900758 -0.0603643745 -0.1197656078 -0.1325872848 0.0825710633 -0.0940311412 0.0726066744 -0.1660247664 0.0282994847 0.0492913004 0.05315227 0.0131086358 -0.1256593642 -0.1872617279 -0.0204023436 -0.1757144303 0.0296400567 0.0716232662 0.1523473183 0.0457063077 0.0617686859 0.0822186173 -0.0979793597 -0.0113645702 -0.0212602963 0.0769068715 0.0732865513 -0.0268399498 -0.1421128062 -0.0893870847 0.0152472197 -0.0235269383 -0.0516582142 -0.0129415996 0.0527624791 -0.0246995226 0.0610815515 0.0173616148 0.0751370731 0.0534376868 0.0320768154 -0.0555343897 -0.0593229399 0.0022608412 -0.0176615994 -0.0260137533 -0.0738036523 0.1290322227 0.0174975119 0.0665483412 0.005925811 -0.136287429 0.0139592342 -0.0554027824 -0.0487903197 -0.0128119433 -0.0161076165 -0.133124523 0.0878506261 -0.0749865483 -0.1142901576 0.128004915 -0.1177470695 0.0320291398 0.0991222432 0.0558614342 -0.0083881234 0.0600521282 0.1601461011 0.0627800326 0.1073026454 0.0941627493 0.073441629 0.0468075816 0 0.00625593 -0.00013301 3.91366601649E-05 1.76916601000001E-08 0.0221437 0.02419491 0.000585393669908 0.00049034344969 -0.0485208 0.0088766 0.00235426803264 7.879402756E-05 -7.90096E-05 -0.0001952736 3.813177885696E-08 6.24251689216E-09 -0.000137672 5.2648E-05 2.771811904E-09 1.8953579584E-08 0.000405326 0.00019817 1.64289166276E-07 3.92713489E-08 -1.62410432 -0.1126102216 1.915852971 -0.5764218743 1.794431449 -1.806417364 0.702761833 -0.4616307442 2.21946912 1.005250254 -0.1342840471 2.509751357 0.8316402139 17 | 652.42035537999 0.330951708164548 425652.320114153 0.109529033137032 580.153405821886 0.724773814110677 0.525297081620538 336577.974286734 528.733658872404 -0.543910463423458 279559.2820246 0.295838592221521 1.0797 24.280342 0.08207153 -0.2283105377 0.0366968797 -0.0716152836 -0.1541858071 0.1192368779 0.0440008805 -0.1647578107 -0.0189785277 0.1405216428 -0.0887208788 0.0748910164 0.1650920224 -0.0346707513 -0.0160412847 0.042786139 -0.1723731376 -0.0130091515 -0.0021193851 -0.0708723664 0.003456554 -0.0317610425 -0.0379742744 -0.1016934483 0.0056272796 0.0392320887 0.0007209268 -0.1352859137 0.064652361 -0.1240147764 -0.0154984058 -0.0675218735 0.0406973456 -0.1145480047 -0.1651588123 -0.074880587 0.0322689759 -0.03715565 -0.0754120159 0.0672710922 -0.1419152067 0.0701310082 0.1213911956 0.1496961359 -0.0561270119 0.0158665616 -0.1732573551 -0.030556436 0.131681005 0.0364536043 0.0495548247 0.112862048 0.0703554258 0.1221716247 0.0779121391 -0.0455278896 -0.0184665424 -0.0298331959 -0.0470483065 -0.037506128 -0.1716836015 0.0754286664 0.079014658 -0.1352098684 -0.1409948113 -0.1390964074 0.1296589704 -0.052942878 -0.1213728139 0.088273787 -0.1841927757 -0.0435938166 0.0271778073 0.1371131977 -0.0513750935 -0.0106983127 0.1090776587 -0.1158858084 0.0143141751 0.1787115784 0.0043010111 0.0993853264 0.0035684311 -0.1163429054 0.0889190317 -0.0183103557 0.0688891728 -0.0238359168 0.0560107935 -0.0197832628 0.0293170959 -0.1209505081 0.0166093608 -0.072535315 -0.041617844 0.0614413527 -0.0262982184 0.1806454056 -0.0966284545 -0.0403356462 0.0170212159 0.0121520455 0.1109142623 -0.0196985241 -0.2340530087 0.0568669834 0.1835657107 0.1222668566 -0.020349324 0.0789359026 -0.0553344157 0.0196626886 -0.0074885664 0.1160003547 0.1210250397 -0.0523133162 0.0722484113 -0.0984496436 0.0246448416 0.0202151193 0.1089087523 -0.0052738756 0.1009056496 0.0536315283 0.1622547551 0.0445932265 0.1197545102 -0.0130456475 0.0840365586 -0.0383780467 -0.0875140441 -0.0509606498 -0.0558856004 0.1066654069 0.0917236698 0.0683515007 0.0386970634 0 0.0058653 -0.00039063 3.440174409E-05 1.525917969E-07 0.00704793 -0.01509577 0.000227882271893 4.96733172849E-05 -0.078947 -0.0304262 0.006232628809 0.00092575364644 0.000437662 0.0005166716 2.6694954224656E-07 1.91548026244E-07 -0.000129335 8.337E-06 6.9505569E-11 1.6727542225E-08 0.000207156 -0.00019817 4.2913608336E-08 3.92713489E-08 -1.952174826 -0.04621598853 1.322463727 0.3623635204 1.72545079 -0.2306229168 -1.612036476 0.3854989159 0.9439301566 -0.1295399683 -1.028094162 -0.5677371951 1.153995956 18 | 651.397446981345 -1.02290839864531 424318.633933814 1.04634159201912 579.082519927883 -1.07088589400337 1.14679659797539 335336.564886027 528.529570897494 -0.20408797491018 279343.507313089 0.041651901502939 1.070029 24.062843 0.03578227 -0.0676376196 -0.0081898489 0.1180239272 -0.1190887619 -0.0228472611 0.0103655269 -0.0174835734 -0.0204471404 -0.1059284362 -0.0971649218 0.1866784819 0.0757701414 0.0130218915 -0.0219743804 0.0131442086 -0.0992275705 -0.0764757662 0.0091920612 0.0118605669 0.0788225141 0.1041250569 0.1798656142 0.1778371201 0.0004981251 0.0932173539 0.0131717019 -0.0090620767 -0.0144050627 -0.0093323807 -0.0100120273 0.0085128079 -0.1257616132 -0.0163404074 -0.0206524081 -0.0909879868 -0.0739670022 0.0197671904 0.0483614054 -0.0039151683 -0.0468208505 0.0656451334 0.0239553654 -0.0467168732 0.0262869627 -0.0668762589 0.0339741395 -0.0179547225 0.0530205382 0.0471711822 -0.07966936 -0.0297546842 -0.1251048787 -0.0569007162 -0.0434303999 0.0050112325 -0.0542102061 -0.0765470947 0.0939051183 0.0684509017 -0.0294665664 -0.050857327 0.1279031241 -0.0141711948 -0.0015930698 -0.0716496705 -0.088261609 0.0460403342 -0.1074797605 -0.0363132425 -0.1929862429 0.0019827044 -0.1152124806 0.027062709 -0.0633324552 0.018590897 -0.0165144602 0.0127558108 -0.0937933822 0.0047033523 0.1338891479 0.1074178137 -0.0038025595 -0.0381190083 -0.0192050094 -0.0783830515 0.0098047951 -0.0172777979 -0.1426051896 -0.1002698455 -0.0112504685 0.1155123988 -0.1192468871 -0.0366093645 0.0626377396 -0.0179408283 -0.0554829601 0.0318480314 -0.0465053109 -0.0593309623 -0.0980972221 0.0668635637 -0.0356044055 0.0047539787 -0.068930233 0.0934000722 -0.0512937125 0.0322378429 -0.1656034805 -0.0280979023 0.0994966137 -0.0226914061 -0.062317448 0.0327659045 -0.0560751835 -0.0439385957 0.0433315153 0.0819369391 -0.1532374632 0.1468375965 0.0641554439 0.1294058581 0.0618021905 -0.0597850384 -0.0399647209 0.0264690375 0.0057280116 -0.0863810807 0.0755781672 -0.0010340736 0.1953991282 0.1620766364 0.0094085806 0.1059870468 0.0891431591 0.0630242761 0.0303479649 0 0.0105653 0.0047 0.00011162556409 2.209E-05 0.00636967 -0.00067826 4.600366276E-07 4.05726959089E-05 -0.0748121 0.0041349 0.00559685030641 1.709739801E-05 9.22821E-05 -0.0003453799 1.1928727532401E-07 8.51598598041E-09 5.06673E-05 0.0001800023 3.240082800529E-08 2.56717528929E-09 0.000207156 0 4.2913608336E-08 0 -1.46365734 0.03749691264 2.576071867 -1.391895913 1.96148888 0.4438810108 0.4271855348 -0.5477384963 0.9106799829 1.222393159 0.07498349625 0.256885764 -0.1643125984 19 | 650.357317790653 -1.04012919069237 422964.640803852 1.08186873333036 579.759262601662 0.676742673779472 0.457980646514189 336120.802572423 528.575977576689 0.046406679194774 279392.564071152 0.002153579873887 1.079279 24.270859 0.09104369 -0.1562345628 0.0215768425 -0.0230593856 -0.0978434479 -0.0250230412 0.0523376872 -0.1570884365 -0.0008426059 0.1172667562 -0.0935122931 -0.0350047366 0.0889696298 -0.0041433929 0.0581151524 0.1389277263 -0.0983995145 -0.0412780806 0.0800252089 -0.0194199545 0.0289993969 0.1187097165 -0.0042932268 -0.029320234 0.1698771713 -0.0275493531 -0.0227959407 0.1699468115 0.1731596725 -0.1511081398 -0.0894892323 -0.0420485766 0.0282570598 0.0579045001 -0.1080950094 0.0882156549 0.0924126463 -0.0683550478 0.0939062594 -0.1025514942 -0.0218609737 0.1295205853 0.023316399 0.0444037805 0.0803275891 -0.0088720575 -0.0836413155 0.1726423454 -0.1105028969 0.0497084559 0.0473904611 -0.1367454984 0.0670729924 -0.0627539374 0.0195299287 -0.0827988824 -0.0245504457 -0.0544500182 0.0114627299 0.1279799169 0.0486884616 -0.0613604328 -0.0428519919 -0.0176785684 -0.0913726305 -0.1179159266 0.0608633412 0.0413842123 -0.1096372196 0.0882592434 -0.0389483977 0.0752534712 0.0620921549 0.1330182941 0.0149425765 -0.0270746601 0.0551002261 -0.1368410417 0.0554829915 0.1601272766 -0.0191654756 0.0207691733 -0.0448081251 -0.0750553478 0.063343076 -0.0662581549 -0.1045837008 -0.1215667875 -0.1629701966 0.043636427 0.0655855323 -0.0904865264 -0.0094323046 0.0430347431 -0.0657465629 -0.278151697 -0.1194956265 -0.0159268901 0.0628658393 0.0483320113 -0.0553714377 -0.0238506273 0.0759667355 -0.0354624499 -0.0144821023 0.0115381653 0.1869844378 -0.1266918468 0.0437541557 -0.0228041026 -0.0414898758 0.041557696 0.0502334189 0.053320717 0.0538678862 -0.0913619698 0.1266863668 -0.0021707563 -0.0437872128 0.0809043767 -0.0113981633 0.0770410592 -0.0324915334 0.0329001726 -0.0622417278 -0.0746278533 -0.0742012516 -0.0966397964 -0.0487911056 0.0037922498 -0.0704825328 0.0264550521 -0.1084603018 0.1052678267 0.0864251961 0.0574784342 0.0218117611 0 0.00658361 -0.00398169 4.33439206321E-05 1.58538552561E-05 0.0303863 0.02401663 0.000576798516557 0.00092332722769 -0.0401351 0.034677 0.00161082625201 0.001202494329 -0.000189029 -0.0002813111 7.913593498321E-08 3.5731962841E-08 -0.000123096 -0.0001737633 3.019368442689E-08 1.5152625216E-08 0.000319449 0.000112293 1.02047663601E-07 1.2609717849E-08 -1.863563149 -0.2269733339 0.6142507573 0.2743780217 1.82890448 -0.3563571692 -1.77547669 0.4049206894 0.9513299888 -0.4382443467 0.05583220239 1.082363203 -0.5809537648 20 | 653.443363512953 3.0860457222999 426988.229319121 9.52367820012551 580.887371059973 1.12810845831052 1.27262869371174 337430.137856966 528.765253548664 0.18927597197569 279592.693360383 0.035825393567342 1.088855 24.48621 0.07327748 -0.0331794913 -0.0373906935 0.1456796413 -0.1475334512 -0.0544221073 -0.0796845857 -0.0999184545 0.0445166865 -0.1948276599 0.0290353076 0.1431049021 0.0669075375 0.1080264931 -0.0419967173 0.0461968435 -0.1679176872 0.012201979 0.0244300779 0.0503017907 0.1840599006 0.0693941143 -0.0256406325 -0.0233558339 -0.0503078607 0.1126576996 0.032773935 0.110691153 0.0069167888 -0.0368552781 -0.0921037135 0.0844436678 0.1270456864 -0.0521586825 0.1973286811 0.0620349738 -0.0586284517 -0.0235650907 0.1884560303 -0.1180133918 0.0302631585 0.0726607521 -0.1034579366 -0.008017545 -0.0758540539 0.0084449412 0.1021904701 0.0263300289 0.0954168341 -0.0010439619 -0.0120167004 -0.1143772856 0.0295488918 0.0600850164 0.0740760228 -0.0207977939 0.0274477423 0.1790556582 -0.0273999776 -0.0194658849 0.013173847 0.1412398394 -0.0785200042 -0.004867761 0.0210594624 -0.0833902842 -0.129951971 -0.0318987882 -0.1619126225 -0.1053456586 -0.1097532492 -0.0094105855 -0.1948023723 -0.0165656062 -0.0331242511 -0.0086747605 0.108409859 -0.13951458 -0.0305273072 -0.1225455437 0.2243854582 -0.0125906068 -0.0412265064 -0.0558795657 0.0170060867 -0.0567440644 0.0177666872 0.0287224834 -0.0104104658 -0.0312514768 0.0505117895 0.0221495872 -0.0202817209 -0.1616539176 0.0623452299 -0.1109840894 -0.0306323614 0.0611110144 0.0497352975 0.0400646459 -0.0696397794 -0.1806304587 -0.006442326 0.0268540552 0.1169621193 -0.0075415757 -0.1043327379 0.0097003155 0.0934611692 -0.1075215557 -0.1839658012 -0.060047749 0.0236841726 -0.1477616132 -0.0426218468 -0.12335646 0.0915610014 -0.1371684699 0.1303328657 0.0311865773 0.1130911731 0.0002573961 0.0195450864 -0.002931891 0.0678888338 -0.0406498773 0.052643533 0.076402943 0.0295973894 0.0421420771 -0.0878606187 0.1015003466 0.043491447 0.1045080238 0.0835739716 0.0517332122 0.0131410805 0 0.00444538 -0.00213823 1.97614033444E-05 4.5720275329E-06 0.00942095 -0.02096535 0.000439545900623 8.87542989025E-05 -0.0586656 -0.0185305 0.00344165262336 0.00034337943025 0.00031369 0.000502719 2.52726392961E-07 9.84014161E-08 -0.00010524 1.7856E-05 3.18836736E-10 1.10754576E-08 0.000207156 -0.000112293 4.2913608336E-08 1.2609717849E-08 -1.617749059 -0.1561312593 2.06567771 -0.8554071316 1.383428485 1.673869629 0.1268499763 0.09073258146 0.2922538455 1.198502377 0.06482813478 0.4698098496 -0.9882581456 21 | 649.347175384567 -4.09618812838528 421651.754179916 16.7787571831245 581.315786618178 0.428415558204847 0.183539890511971 337928.043771511 528.503548070588 -0.261705478076692 279316.0003232 0.06848975725535 1.061763 23.876974 0.158970199 -0.0956490975 -0.0574202194 -0.0666827929 -0.0990456373 -0.0631482687 -0.0076607453 -0.2056771863 0.0391903664 0.0359466909 0.0595421129 0.063998908 -0.0711611916 0.0501159931 -0.1722106104 0.0237078159 -0.0163762984 -0.0797317328 -0.0616597181 0.0660943251 -0.0762489304 -0.1399604116 -0.0098853922 -0.0252077026 0.0322805022 0.031881359 -0.0228715482 -0.0550964248 -0.0757106611 0.0411367968 0.1059482318 0.0604012777 0.0293043817 -0.0121668492 0.0383394799 0.1755557125 0.1381785999 -0.1378314531 0.0759964982 -0.0986994875 0.0093349788 0.0122757009 0.0260877638 0.0633312177 0.0648471018 -0.0414477945 -0.0614586946 -0.0008353539 0.0606253237 0.0101989947 -0.107027252 0.0169762502 0.0530621475 0.0661582398 -0.1265066492 0.042260949 -0.0414509117 -0.0572071479 -0.0760466119 0.1965894721 0.0157088854 0.0825931906 -0.0298216692 -0.110011964 0.0018719382 -0.088926939 0.0638807232 -0.0075817087 -0.1136992191 0.0636455976 0.0480024195 0.1070512972 -0.0489003852 -0.0076521568 -0.0788716243 -0.0632051469 -0.0442321437 -0.1977261342 0.0521001208 -0.077402901 -0.0308039263 0.0285740453 0.023986423 -0.133024634 -0.0626075873 0.0270590995 0.0755987652 0.1056348011 0.0610042739 -0.00451387 -0.0419205657 -0.0725853142 -0.0482679116 0.0304012886 0.1598317025 0.0188295999 0.0036880807 -0.0585869391 0.0490177033 0.2123834866 0.0125091355 0.0869252142 0.0474958776 -0.0177285268 0.0117417306 0.0188865822 -0.0149823995 -0.0894753686 0.0118462896 -0.0342637241 -0.0128559476 0.0832100719 0.0756658427 -0.0930175906 0.0997115727 -0.1106352203 0.0552573374 -0.0472782072 0.0745725207 -0.0346966144 -0.0361738682 -0.0169692568 -0.0157553604 0.0366306021 -0.0360766188 0.0604292384 0.0693064452 -0.1022285659 0.1042384583 -0.0658132173 -0.1268481248 0.0423479063 0.0566537548 0.1037079311 0.0805938822 0.0458085392 0.0043893808 0 -0.000107169 -0.004552549 1.1485194561E-08 2.0725702397401E-05 0.0458186 0.03639765 0.001324788925523 0.00209934410596 0.0108286 0.0694942 0.00011725857796 0.00482944383364 -0.00022441 -0.0005381 2.8955161E-07 5.03598481E-08 -0.000537656 -0.000432416 1.86983597056E-07 2.89073974336E-07 0.000207156 0 4.2913608336E-08 0 -1.54272092 -0.6774353487 1.413527711 0.2563756846 0.9232149939 0.5051806263 -1.213097713 0.7671755098 -0.1684966193 -1.736413207 0.6024199673 1.225841717 -1.316793891 22 | 653.145922535258 3.79874715069093 426599.596124434 14.4304799148824 580.139941689108 -1.17584492906951 1.38261129721849 336562.351943042 528.574703557566 0.071155486978455 279391.217240969 0.005063103327141 1.095008 24.624592 0.1072340576 -0.0103093498 0.0393947537 0.1101907609 -0.0950152614 0.129254727 0.0012248513 -0.0810984929 -0.0227209358 -0.1256419148 -0.0650612787 0.0263152738 0.133963729 0.1072090238 -0.1874828045 -0.0700066684 -0.0595704539 0.0660263957 -0.018932124 0.0159025368 0.0603525926 0.0622175895 -0.1237927004 0.0055034585 0.0532278593 0.0352767288 0.0760028254 0.0684044553 0.0357648751 0.052281546 -0.0339509559 -0.1035230236 -0.1019284741 -0.0731342656 0.0344352942 -0.1739295912 0.0242899383 -0.0504081753 -0.1407636222 0.1438604382 -0.0957390364 -0.0055865241 -0.0008867782 -0.1302765454 0.0264451919 -0.0380583038 0.2386420917 -0.0050836956 -0.1451706648 0.0715108742 0.117081128 0.0369883136 0.0219149606 -0.0724906484 -0.0718121182 -0.0157449546 -0.0704586911 -0.0154291187 -0.0448713863 0.0313112733 -0.0747510628 0.0176969027 -0.0586014749 -0.1988202072 -0.0455215352 -0.05589413 -0.1246243574 -0.09602917 0.0072777789 0.0799784104 -0.1017616971 0.0384100765 -0.0752058201 -0.0631793345 -0.047832384 0.023659253 0.1737749168 -0.0771078991 -0.0505539151 0.077127369 0.2050534626 -0.0143404159 0.0788266667 -0.1383194786 -0.0622813017 0.0615549151 -0.0089739143 0.1719093952 -0.0385614659 0.0986683335 0.1215070842 -0.0443498573 -0.1003777159 -0.020288865 -0.0446856316 -0.0471245416 0.0376120902 0.0955324316 0.0683185234 -0.0260390365 -0.0401111492 -0.0224033359 -0.0300138214 0.0246398287 -0.0547425895 0.0670934324 0.0224886343 0.0817027405 0.0284554985 0.0779858833 0.0389366619 -0.0703256781 -0.0445384763 -0.0955268105 -0.1061330753 0.0120338643 -0.2045863308 0.0382892055 -0.1589530445 0.0775939051 -0.0938300221 -0.0337272295 0.0536383854 0.0178787725 -0.0145774702 -0.0897587883 -0.0249819029 0.0654984061 -0.1757741722 0.0092444777 -0.1403738736 -0.0400755253 -0.1064569668 0.1028678571 0.0774895228 0.0397249664 -0.0043893808 0 -9.91814E-05 7.98759999999999E-06 9.83695010596E-09 6.38017537599999E-11 -0.00218477 -0.04800337 0.002304323531357 4.7732199529E-06 -0.0134165 -0.0242451 0.00018000247225 0.00058782487401 0.000225142 0.000449552 2.02097000704E-07 5.0688920164E-08 -0.000287656 0.00025 6.25E-08 8.2745974336E-08 0.000207156 0 4.2913608336E-08 0 -1.590716138 -0.3631373591 1.642336766 -1.680928008 0.7391016724 0.8124021702 0.4481964106 -0.8090902266 -0.5621813272 1.062466155 -0.2933494447 0.2292299393 0.4381065459 23 | 650.521148028506 -2.62477450675192 423177.764032326 6.88944121129478 580.338696315818 0.198754626709388 0.039503401638388 336793.002441543 528.193373626069 -0.381329931497476 278988.239942488 0.145412516655869 1.096114 24.649446 0.1094053556 -0.0937145453 -0.1305425522 0.0686268473 -0.0059443341 -0.128722265 0.0878039677 -0.1135816203 0.057666293 -0.0879867737 -0.0755621602 0.117868866 -0.1233664907 -0.0295438286 -0.0102738086 -0.0950665921 0.0591826984 0.0446606792 -0.1168118896 -0.0565043527 -0.0194496549 0.0565514426 -0.1753026684 0.1258702867 0.0208808631 -0.0439310556 0.0971220021 -0.0057658934 -0.04415511 -0.120351401 -0.0554582524 -0.084158592 0.02559162 0.0250778961 0.0847977115 0.0041156142 0.0281313619 0.1244429949 -0.0977125174 0.0372902651 0.1912041981 0.0483921288 -0.005264707 0.0835465643 0.063326688 0.0325097737 -0.0854243689 0.0568876618 0.1218294481 0.1098030866 -0.0066390251 -0.1262355008 -0.0302190309 0.0361026825 -0.0496048582 -0.108763534 0.2088350224 0.1245359307 0.0191803534 -0.0721774906 -0.0155275471 0.1060274418 -0.047128128 0.0491594908 0.0728099718 -0.1154361495 -0.0146032108 0.1115175914 -0.1127233139 -0.0035893403 0.0522064241 -0.0946522284 -0.042067953 -0.1216074622 0.0248443765 -0.0252490326 0.0320125448 -0.1125217667 -0.0266054432 -0.0173754486 0.0825298089 0.1212300507 -0.0110242673 0.0694860032 0.0166533613 0.0695001689 0.0145276883 0.1440201184 -0.0563564351 0.0005395478 0.1086015505 -0.0057280932 -0.0305692397 -0.0504860382 -0.1661114199 0.0689779845 0.0328184194 -0.1724001886 -0.0758175831 -0.1112979932 -0.1285492897 -0.0717992147 -0.1074834439 0.0230667974 0.0498440794 -0.0498888973 0.0108135351 -0.0586083638 0.1061638617 0.1255351579 -0.001723742 0.0797229539 -0.0775007407 0.0735947302 0.0719263701 0.0323290968 0.0401926736 -0.0588267711 0.0037447487 -0.1254089349 0.0758227903 0.0731303555 0.0685763162 -0.0222264127 0.0707883725 0.0502466753 0.0082776034 0.1310875647 -0.0225761328 -0.1293982022 -0.1544253587 0.2162796806 0.1225489371 0.1019881255 0.0742656803 0.0335035965 -0.0131410805 0 -0.000117947 -1.87656E-05 1.3911494809E-08 3.5214774336E-10 0.0394024 0.04158717 0.001729492708609 0.00155254912576 -0.00169874 0.01171776 2.8857175876E-06 0.000137305899418 -0.000602124 -0.000827266 6.84369034756E-07 3.62553311376E-07 -0.000287656 0 0 8.2745974336E-08 -8.72112E-05 -0.0002943672 7.60579340544E-09 8.665204843584E-08 -1.282756029 -0.4352482152 1.76348109 0.6554142726 0.6683546181 0.5018611835 0.8140431709 0.5183774245 2.011872922 -1.187661112 1.15706164 0.2853902695 -0.8665127685 24 | 650.504691866222 -0.016456162284271 423156.354139968 0.000270805277126 580.205800338581 -0.132895977236331 0.017661340765599 336638.770746534 527.637759330944 -0.555614295124428 278401.605071779 0.308707244946615 1.149912 25.859257 0.1259848435 -0.0765045232 0.08757227 0.0156044179 -0.0009942499 0.0173864405 -0.0113902126 -0.0825472232 -0.0720331964 0.0045995223 -0.0427538994 0.0324656727 0.1509782856 -0.036430198 -0.0405260341 -0.0049965169 -0.0507507724 0.176293437 -0.1403275887 -0.0198161223 -0.1574082447 -0.1276654433 -0.008489048 -0.0420742591 -0.1093343749 -0.0382404815 -0.0739435989 0.0799966239 -0.1253150119 0.0107268694 0.0476527064 0.1123169934 -0.1090271136 -0.091954723 0.023255659 0.0630989042 -0.0531500162 0.0516425251 -0.0247943255 0.0571962689 -0.0288775729 -0.0149725585 0.0192477441 0.0171042344 -0.129752524 0.2111259382 0.0752186728 -0.1057948778 -0.070470701 0.0458251997 0.1010006843 0.1362874496 0.0121463013 -0.0859640425 -0.0250170815 -0.044467547 0.117314773 0.0533174432 -0.0939229383 -0.0104169197 0.1576665993 -0.0125666828 -0.0466297449 -0.0488963894 -0.0808687798 -0.0288956919 -0.0043011321 -0.0009160202 0.0462673403 0.0174503668 -0.0168999487 -0.0480970419 0.1341042742 -0.144941553 -0.0847987617 -0.0746079054 0.0410552631 -0.0852857891 -0.061260203 0.095607549 0.0530906625 0.0130406507 0.0217811916 -0.1596867428 0.0837368826 -0.068826525 0.053355215 0.0426215189 0.1063320224 0.0187940829 -0.0410936354 0.1247314383 0.0939272304 0.0861953618 0.0023438924 0.148660145 0.0525873844 0.0001646165 0.0529727876 0.054961166 -0.0605820077 0.0825202522 0.0928505609 0.0520508181 -0.0174499332 -0.0310048077 -0.1001490536 0.1505970813 0.0234930895 -0.0047646023 -0.0902817264 -0.0806039932 0.042707797 -0.0244402586 -0.0622433706 0.1608341227 -0.0620065944 -0.0236971307 -0.0176530341 -0.1294667227 -0.0716226901 0.0080041252 0.0958952142 0.0966423849 0.0588132983 -0.0544927726 -0.0083113917 0.1300110529 -0.1017831662 -0.1729264045 0.1184840973 -0.0566556712 -0.0683771787 0.1010690755 0.0709273254 0.02716601 -0.0218117611 0 -7.64535E-05 4.14935E-05 5.84513766225E-09 1.72171054225E-09 0.0359665 -0.0034359 1.18054088099999E-05 0.00129358912225 -0.0183547 -0.01665596 0.00033689501209 0.000277421003522 0.000390099 0.000992223 9.84506481729E-07 1.52177229801E-07 0.000601392 0.000889048 7.90406346304E-07 3.61672337664E-07 -0.00032297 -0.0002357588 1.043096209E-07 5.558221177744E-08 -1.973159727 -0.4080898694 0.9227240526 1.404555597 0.4036665105 1.192474704 -0.4661763226 -0.02695029199 -1.045218684 0.8983331651 -0.008632115547 -0.785817666 1.689016159 25 | 650.608182842366 0.103490976144258 423291.007581446 0.010710382143291 579.342293823008 -0.863506515573476 0.745643502437846 335637.493412104 527.281169589787 -0.356589741157336 278025.431803974 0.127156243498656 1.092337 24.564526 0.1299975465 -0.0006600206 0.0096802632 0.079765913 -0.027813264 -0.0211733654 -0.1146284772 0.0133778564 -0.037774959 -0.1003199826 -0.0435230686 -0.0012242164 -0.0409996901 -0.0114571106 -0.0680357452 -0.028351566 0.0199988044 -0.0011304908 -0.0794700385 0.0387777282 -0.0624886917 0.0242746586 -0.132040686 -0.0728053728 0.0350047246 0.0130909103 0.1750237564 0.0027813886 -0.1463523552 0.002542231 0.0485864622 0.0790718873 -0.0567120071 0.1671425145 -0.0346092009 0.147026289 -0.0644012413 0.1393463564 0.1879647004 -0.0564059584 0.0754732958 -0.046776547 0.0448864058 0.0384040419 -0.0099751698 0.0059445896 0.0159206945 -0.0302595345 -0.037846996 0.0167369773 -0.0282993158 0.1078883279 -0.1088670716 0.030575391 -0.1869456352 0.131813985 -0.0236297275 0.1109747484 0.0580005057 0.0040892392 -0.1507165529 -0.196752771 0.0295400763 0.0273259827 0.0022294582 -0.0241230726 -0.0699802746 0.0116825881 -0.0350233665 -0.0754979637 -0.0115672959 0.0293398052 0.0036678462 0.0114994934 -0.024282981 -0.1126939533 -0.0943320379 0.0187643758 -0.0976623938 0.0032404284 0.0379971432 -0.0492858041 0.0284530649 0.0396534584 -0.0048949832 0.0954042399 -0.0532755473 0.1738143453 0.0060992382 -0.1174865818 0.102026538 -0.0603328015 0.0078486608 0.0222365087 -0.0608739817 0.0776055081 -0.1002754322 -0.0264017966 0.0374640303 0.1065223595 -0.0125397582 0.1145254493 -0.0154246149 -0.1430445713 0.0150287058 -0.0789318012 -0.1440869288 -0.1078198305 -0.0720701605 -0.0325397157 -0.0846827159 -0.1065400326 0.0275288806 0.0079960756 -0.1028278863 0.0851148016 0.0876158358 -0.0992804273 0.0123118838 -0.114691038 -0.025060519 0.0798408604 -0.0874652323 -0.0059935864 -0.0774970929 0.0600908344 -0.0841432594 0.0004173064 0.0448564363 0.0013784911 0.126186549 -0.0935828541 0.0299751113 0.1001110615 0.0674796057 0.0207341905 -0.0303479649 0 -0.018274 -0.0181975465 0.000333939076 0.00033115069862 0.0462822 0.0103157 0.00010641366649 0.00214204203684 0.0317251 0.0500798 0.00100648197001 0.00250798636804 -0.000345556 -0.000735655 5.41188279025E-07 1.19408949136E-07 0.000308957 -0.000292435 8.5518229225E-08 9.5454427849E-08 -0.00032297 0 1.043096209E-07 0 -1.124604465 -0.8038487921 2.036271001 -0.9500934547 0.560368987 0.4822558899 0.09424319143 0.02504831777 0.7323808452 0.8708165689 0.9543035964 1.125164602 -0.06471091288 26 | 648.175463385144 -2.43271945722222 420131.431334546 5.91812395754757 579.754332553044 0.412038730036215 0.169775915049857 336115.086114025 527.305697519214 0.02452792942745 278051.298636225 0.000601619321998 1.073452 24.139832 0.0352702 -0.0822921698 -0.066989862 -0.0959676932 0.0376155491 0.030046435 -0.0237340154 -0.0494049447 0.0568918214 0.0953365644 -0.0752175845 -0.0225162512 0.0105439653 0.0296136488 -0.0581030966 0.0639254577 0.0663258623 0.0913934959 0.0076198365 0.0454165971 0.0057320782 0.02970124 0.0347180862 -0.0055774992 -0.0346799523 0.0785588147 -0.0671443441 0.0167880095 0.0022332935 0.1153324002 0.0603684564 -0.0582406766 -0.0687971229 0.1581141266 -0.1209640332 0.054589117 -0.0584294801 -0.0127944515 0.0293124351 -0.0304444485 -0.0665979011 -0.0535571108 -0.0875293725 -0.1741355453 -0.0688331231 0.0705727151 -0.0283762804 0.0741079823 0.0880958053 0.0083556855 0.1129671852 -0.1304578016 -0.0128161984 0.0221590071 -0.0073377394 -0.028650455 0.0825191804 -0.0329299503 -0.1879121701 0.036303618 -0.1467237235 0.0248180467 0.095233592 -0.0360096507 0.0189383178 -0.077452365 0.1061450705 0.0271705221 0.0312521972 0.0795794568 0.0387153006 0.0936153714 0.0044154869 -0.0158950613 -0.0659099026 -0.0256304687 -0.0399117806 -0.0358657822 0.0504732653 0.1469804713 -0.0321932045 -0.0065944334 -0.0241727213 -0.0589111475 0.0114436038 0.0305825657 -0.1475330749 0.044261094 -0.0285833466 -0.0051809354 -0.0969557074 0.106923167 -0.0722802146 0.0532232837 -0.0275763018 -0.0957421919 -0.0914256908 -0.0511935625 0.0597303882 0.1191845658 0.1157818973 -0.0845470333 0.0369681633 -0.0613887651 -0.0623316608 -0.0538978635 -0.0107643734 0.035619978 0.0504399962 0.0661798872 0.0200516469 0.0221081856 -0.0631800492 0.0787240874 -0.0120040111 0.0422897588 -0.1261917482 -0.0847275639 0.1690488621 0.0827024287 -0.0187152589 0.0135484634 0.1350362419 0.1425480001 0.0078989682 0.0222384767 0.0484859186 -0.1487297647 -0.0856322208 0.1131663966 0.0178548422 0.0252448647 0.0365913771 0.0991144528 0.0639278374 0.0142304488 -0.0386970634 0 -0.0182685 5.50000000000203E-06 0.00033373809225 3.02500000000223E-11 0.0303959 -0.0158863 0.00025237452769 0.00092391073681 0.0470331 0.015308 0.00221211249561 0.000234334864 -0.000416945 -7.1389E-05 5.096389321E-09 1.73843133025E-07 0.000318976 1.0019E-05 1.00380361E-10 1.01745688576E-07 -0.00032297 0 1.043096209E-07 0 -1.40005462 -0.7007828638 0.7650429045 -0.3785678621 0.8072633828 -0.4890064144 0.4000196909 0.702243609 -0.05733260874 -2.32810584 0.08247817344 -0.001332616781 0.4337049276 27 | 650.436008166667 2.26054478152332 423067.000719789 5.11006270927232 579.926657585781 0.172325032737376 0.029695916907938 336314.928178616 527.749062067493 0.443364548278964 278519.072513119 0.19657212267061 1.120121 25.189329 0.0475594 0.047246385 0.0625596863 0.0678454973 -0.0459501161 -0.0876250501 -0.0437111168 -0.0114778784 -0.0855843884 -0.0457519736 -0.0216354633 -0.0306444823 -0.0456206473 0.0008952208 0.0058075736 0.1189251288 0.0807542009 0.001652428 -0.0256275354 0.1790540633 -0.0352018615 -0.0626656416 0.0012141025 -0.0952764905 -0.1323762869 -0.035622091 -0.1293969647 -0.0022363786 0.0215440755 -0.0955703865 0.0731047584 0.0373728045 0.104438679 0.0259549099 -0.092396553 0.0523001009 0.063825726 0.0317605469 0.1062649765 -0.0002467043 0.0809690771 -0.0760839242 -0.0795262437 0.0467618093 0.1529100252 -0.0886978601 0.1070055558 -0.2046939514 0.0264410235 -0.0487660977 0.0352608259 0.0852311739 0.046765891 -0.0420445968 0.1889156799 -0.0732968355 0.1014314386 -0.0863574389 0.0903599448 -0.0378922014 0.0463035458 -0.044588849 0.0147757772 0.0281557666 -0.0305231022 0.0226245113 -0.0714543534 0.027775786 -0.0563970337 -0.0595845132 0.0614075094 0.0931214047 0.0698245534 0.0229754442 0.1031258645 -0.1070306785 0.1283341304 -0.0390027594 -0.0949036587 -0.0244052618 -0.0248706177 -0.0766423246 -0.022418751 0.0067621722 0.014729624 0.0387081564 0.024371694 -0.0892248553 0.1908891384 -0.097384035 -0.1477045358 0.0199724966 0.017824162 -0.0814395389 0.030140332 -0.028596771 -0.0437162369 -0.1548408933 0.0430439806 0.0331141372 0.1833472163 0.0391576487 0.0720211292 -0.0971212883 0.060797874 -0.0540880991 -0.1634195139 -0.0097956674 -0.1187545263 0.0666811746 0.0998472258 0.0072522004 -0.0339091961 0.0545104062 0.1331544557 -0.0386420412 0.0939511385 0.079647424 -0.0098301128 0.0280805792 -0.1663505745 -0.1680425146 0.0921429533 -0.0605802644 0.1001418836 -0.1108329913 -0.0583536842 0.0551863979 0.0149204721 -0.0114546339 -0.0698060315 0.0008587859 -0.1227969542 0.0980796336 0.060277497 0.0076773447 -0.0468075816 0 -0.018271 -2.5000000000025E-06 0.000333829441 6.2500000000125E-12 0.0330906 0.0026947 7.26140808999999E-06 0.00109498780836 0.0169975 -0.0300356 0.00028891500625 0.00090213726736 -0.000525768 -0.000108823 1.1842445329E-08 2.76431989824E-07 0.000131267 -0.000187709 3.5234668681E-08 1.7231025289E-08 -0.00032297 0 1.043096209E-07 0 -1.2542692 -0.7384809464 0.6174004053 0.8758986059 0.5180277917 1.754443423 -0.04523461261 -1.2683525 1.14260992 0.49075313 0.7009874106 2.100017024 0.5773352868 28 | 650.014564188325 -0.421443978342836 422518.933656937 0.177615026881437 580.141551038312 0.214893452530873 0.046179195940639 336564.219241138 527.364460233571 -0.384601833922375 278113.273917446 0.147918570656454 1.090112 24.514484 0.10303625 -0.0065259623 -0.1115978934 0.0726972124 0.0284468945 0.0048081914 0.0925202512 -0.057478162 0.0932237076 -0.0471226756 -0.0703203926 -0.0703633388 -0.0575236012 -0.0373215916 0.0499732811 0.0048201752 -0.0776706554 0.1136519077 -0.1331062521 -0.0703916098 0.0534385026 -0.0375214996 0.1689709148 -0.0327116579 -0.0392000791 0.0709017748 -0.0087999564 0.0491001785 -0.032344269 -0.0005826904 0.0319533584 0.0118214223 0.0943671563 -0.077812487 -0.008247988 0.0103326581 -0.0033043455 -0.0294566285 -0.0093666248 -0.0332150756 -0.0324882021 0.0399022672 0.029594801 0.0616313982 0.0331167727 -0.1279077714 0.0450643524 -0.0043199134 0.0201295135 0.0921571592 0.0082897878 0.1014205987 0.0190442823 0.1367383519 -0.0659987304 -0.0873116645 -0.0680290055 -0.161581258 0.1760773961 -0.0521065344 0.0704622421 -0.0501656453 0.0158061411 -0.0470495332 0.0842181683 -0.0687421586 -0.0253582599 0.046407779 -0.0100192079 0.0937600803 0.0559503972 -0.1516162274 0.0775950969 0.0671990852 -0.0085942732 -0.0690630853 0.1080149749 -0.0648156046 0.0833981973 0.0079549907 0.0209082217 -0.021225005 -0.0092219421 0.1236191327 0.08813438 0.1129507458 0.101166687 -0.0073490859 0.0591632703 -0.0183149961 -0.0519672321 0.1247930138 0.0803980231 0.0843699412 0.0070521471 0.0555368277 -0.0202792019 0.1070463691 -0.0071833343 0.0447152521 -0.0438601253 -0.0121112817 0.0351114194 -0.0715789676 -0.0030889633 0.116909426 0.0217120832 0.0293730216 -0.0260474015 -0.0527798761 0.0201007326 0.0375219482 -0.0780462103 -0.0394009511 0.06625056 -0.1158496804 0.0620104305 -0.0417800533 -0.0195033432 0.0870308881 -0.0412151689 0.0447430864 0.1001114318 -0.1864432046 0.0639520283 -0.0476142779 0.0313312311 0.0264949154 0.0112376466 0.1837631667 0.1012754331 -0.0947948019 -0.0117405921 0.0970070028 0.056534213 0.0010976097 -0.0546295156 0 -0.0365464 -0.0182754 0.00133563935296 0.00033399024516 0.0583741 0.0252835 0.00063925537225 0.00340753555081 0.0696778 0.0526803 0.00485499581284 0.00277521400809 -0.000608452 -8.2684E-05 6.83664385599999E-09 3.70213836304E-07 0.000184524 5.3257E-05 2.836308049E-09 3.4049106576E-08 -0.00032297 0 1.043096209E-07 0 -1.525722719 -0.9055097993 0.2618465329 -0.118780625 0.2126442851 -0.5600490762 1.354404781 0.8886637561 1.168903176 -0.4419801101 0.5997572415 0.1934067937 0.3652155179 29 | 649.059907307194 -0.95465688113029 421278.763273624 0.911369760689414 580.129118249038 -0.012432789274044 0.000154574249133 336549.793840406 526.819262144566 -0.545198089004657 277538.534966545 0.29724095625433 1.120914 25.207161 0.05444385 0.0031215927 0.0898025723 -0.0551838473 -0.0393439768 0.042546517 -0.0338390388 -0.0778313265 -0.0609277702 -0.0044770854 0.0833823005 0.037875182 0.0255289976 0.0004723665 -0.0995301563 -0.0969003439 0.023667008 0.1597442221 -0.1647032015 0.0514155322 0.093829299 -0.0120832665 0.1405063097 -0.0350405416 0.1361801333 -0.0582443202 0.0817172913 -0.0171580645 0.0296670233 -0.0122443144 0.1364948807 0.1073652173 0.0864604267 0.0187023164 -0.0005762354 0.0855690825 0.1222753351 -0.131828951 0.0005932202 0.0170089325 0.0757715337 -0.1755084757 -0.025814314 -0.0821332173 0.1836705839 0.0341510364 -0.1640387209 -0.0209397245 -0.0395165722 0.0366692308 0.071949149 -0.0045251167 -0.0971937365 -0.1133124554 0.0500388674 -0.061132021 -0.0518317255 0.0095202429 0.0026536402 0.0424868503 0.0383136435 0.0570893388 -0.0537066475 0.0583706595 -0.086028022 0.0537837335 0.041385558 -0.0749393253 -0.0171247913 -0.0190780927 -0.0133325564 -0.100927948 -0.0225075832 -0.1150000132 -0.0679225758 -0.0777746089 -0.0076285732 -0.1099744741 0.0052922806 -0.0982995313 0.0248259973 0.001140908 0.0718440289 -0.0543965462 -0.0206590421 0.0970680171 0.1335735126 -0.0120150784 -0.0374668974 0.0074801942 0.0838504763 0.1331652647 -0.0652304196 0.2227880508 0.0776692529 0.0112068793 -0.0652917685 0.0443896747 0.1858501059 0.0556185823 0.1902661666 0.0329295976 0.0695217721 -0.0373983031 0.1057948241 0.0691047365 0.0819605707 -0.0472251584 0.1049395747 0.0257740971 0.089428461 -0.0770213468 -0.0822956683 -0.0591061866 0.0158092681 0.0331176604 0.1013815532 0.1631092406 -0.0871798637 -0.0019978378 0.0558914458 0.0620167251 -0.005918184 0.0358317966 0.1129651761 0.0027137208 0.0049016507 0.0716050599 0.0619147508 -0.0670333316 0.0348261637 0.0620736563 0.0454919591 0.0958969739 0.0527037574 -0.0054859328 -0.0621146404 0 -0.0350205 0.0015259 0.00122643542025 2.32837081000001E-06 0.0428685 -0.0155056 0.00024042363136 0.00183770829225 0.0556377 -0.0140401 0.00309555366129 0.00019712440801 -0.000608452 0 0 3.70213836304E-07 0.000309905 0.000125381 1.5720395161E-08 9.6041109025E-08 -0.000665034 -0.000342064 4.42270221156E-07 1.17007780096E-07 -1.195733476 -0.9080729235 1.751457928 1.681444015 -0.001439557444 1.601634306 -0.5462170842 -0.1525657575 -1.300746971 0.166324304 1.049066916 -0.3602522976 1.063616812 30 | 648.884773846761 -0.175133460432903 421051.449730163 0.030671728963203 579.987382184624 -0.141736064413863 0.020089111955531 336385.363493373 526.729429963693 -0.0898321808736 277443.892389877 0.008069820720507 1.10969 24.954758 0.18590945 0.0434454955 -4.48138E-05 0.0603933085 -0.0247969338 0.0704835461 -0.0402946861 -0.0528070086 0.0050380651 -0.0758492391 -0.0145366562 -0.1253165894 -0.0079518454 0.0304849911 -0.1077567303 0.0603187624 -0.0591643397 -0.0307164084 -0.08946369 -0.0910560235 -0.0421933002 -0.1374257052 0.0519148131 -0.0059775711 -0.0291876245 0.0374570579 -0.0646871351 -0.1111939729 -0.116772541 0.0066316452 -0.0886082397 -0.0099865747 -0.0687788814 0.0687391216 -0.0893405581 -0.0509334148 0.0606955388 -0.0243279958 -0.0638895551 -0.0477960261 0.2218086878 0.0160996865 0.0334293418 0.147584891 0.0433907754 0.0709609886 0.0448301026 0.1373400625 0.0681777771 -0.0080646779 0.0292617416 -0.0333605408 0.1486238037 -0.0273733437 -0.0493131004 0.1907249972 0.0831606837 -0.0551252875 -0.0250517241 -0.002103549 0.1028726631 0.0102564153 0.0730937264 -0.0056343778 -0.0070901208 0.0062491538 -0.0856019132 -0.0632268091 0.0120852778 0.0740198128 0.0575298588 0.0002100333 -0.0055981958 0.1011555744 -0.1396576176 -0.0626551124 -0.0488065883 -0.0855410588 0.0350320992 0.0368210699 0.0041733864 -0.1287536585 0.0921773114 0.0038465542 0.0148759668 0.0295927054 0.087200944 0.0852027623 0.0245287806 -0.0111326951 -0.1146047068 0.0361539104 0.0645073635 -0.1047685509 0.1216066332 0.0254172945 0.1031604934 -0.0976046681 -0.1298345247 0.0176840313 -0.1483183523 -0.0646130098 0.0333005783 0.006286241 0.0655086323 -0.0058420899 0.1250129107 -0.1241673614 -0.0072281641 -0.0351276429 0.0380981508 0.0760219858 0.0117788728 0.1137846811 -0.0165218309 0.0359944282 -0.0952614371 0.0391640896 -0.0047234361 -0.131901014 -0.1579907942 0.1930849384 -0.0141362712 -0.0203387282 0.049876613 -0.0359925844 -0.0606762515 -0.1382601054 -0.0251327125 -0.1108687323 0.0157446252 0.0196638047 -0.0407596089 0.094749975 0.0487920364 -0.0120504457 -0.0692168078 0 -0.0610964 -0.0260759 0.00373277009296 0.00067995256081 0.112297 0.0694285 0.00482031661225 0.012610616209 0.111714 0.0560763 0.012480017796 0.00314455142169 -0.00111743 -0.000508978 2.59058604484E-07 1.2486498049E-06 0.000402494 9.2589E-05 8.572722921E-09 1.62001420036E-07 -0.000750042 -8.5008E-05 5.62563001764E-07 7.22636006400001E-09 -1.04081886 -1.591154099 0.3762656451 -0.9898089924 0.1340146431 -0.8136024795 0.5401576829 0.8568933994 -0.3358618162 0.7533919106 1.085993991 0.6009536127 0.04979249387 31 | 645.780975416057 -3.10379843070405 417033.068209314 9.6335646984409 579.665739012702 -0.321643171922801 0.103454330044561 336012.368985141 525.869093790003 -0.860336173689575 276538.303803519 0.740178331758818 1.065129 23.95266 0.10726315 -0.0088635951 -0.0650065016 -0.1620102496 0.0394561316 -0.0149098144 -0.0607255847 -0.0357721783 0.0424865466 0.1244814792 0.0653441483 -0.0203398312 -0.1171700089 0.0929657656 -0.0439473623 -0.0832553985 0.0650953702 -0.0098386492 0.0529232676 0.0396783665 0.0645588862 -0.0644770207 0.0811622073 -0.0505418082 -0.0245079546 -0.0376329929 0.021394593 -0.097250245 0.0083904688 0.022183338 -0.1334866504 0.0534667068 0.0013832165 0.0082557863 0.0731497454 0.0063222839 -0.0032743689 0.0104026774 -0.1190191132 -0.0308518606 -0.1090316287 0.0521343128 0.123893617 -0.0278088738 0.0935222916 0.0475162012 0.0222886446 0.0875648086 0.0229885332 0.1435942691 -0.0170956277 -0.0478093501 0.0215248014 0.0382016763 -0.0267415164 0.0532613309 0.0102847416 0.0392170909 0.1652095105 0.0827882783 0.1196829806 -0.0307573337 -0.0185401323 -0.0106168565 0.0354453865 0.0369088998 0.1539401895 0.0096698307 0.0077052281 0.0092033306 0.0742705073 0.1147644395 -0.1042448724 -0.0437814913 -0.0033612548 -0.0339551537 -0.0599153661 -0.0458627338 0.0804943261 -0.0356075642 -0.1557894136 -0.0257917552 -0.0356390192 0.0294591656 -0.1497805195 0.0092892028 0.0208904067 -0.0053508366 0.0934409147 -0.0331564902 0.0340271089 0.0139647542 -0.0421230362 -0.0739970384 0.0455860412 0.0072739381 0.0927672489 0.0706831127 0.0247320447 -0.0316013605 -0.062484995 -0.0463698077 0.0307790836 0.0923427011 0.0315412707 0.1056568981 0.140043504 0.0500234409 0.1982543362 0.1342796509 0.0339473323 -0.0016465576 -0.02548206 0.0337653015 0.0082609537 -0.0656941157 0.0622331523 0.0473814276 -0.093829532 -0.0258052021 -0.0148296559 0.1157766456 -0.1029133318 -0.0645725529 -0.0898553946 0.093038356 0.014864925 -0.0119028787 0.0895496385 -0.0665598435 -0.0515589085 -0.0020800398 -0.0697527324 0.0935664481 0.0448050816 -0.0185731583 -0.0758922305 0 -0.0573831 0.0037133 0.00329282016561 1.378859689E-05 0.0593881 -0.0529089 0.00279935169921 0.00352694642161 0.0857324 -0.0259816 0.00735004440976 0.00067504353856 -0.000889677 0.000227753 5.18714290090001E-08 7.91525164329E-07 0.000280318 -0.000122176 1.4926974976E-08 7.8578181124E-08 -0.0008933 -0.000143258 7.9798489E-07 2.0522854564E-08 -0.9353839013 -1.199185544 1.058831558 0.2486787279 -0.1911013529 0.8850572131 -0.870709663 0.4627067409 0.2844926037 -2.387087132 -0.1841354218 0.8174797382 -0.9483718058 32 | --------------------------------------------------------------------------------