├── .gitignore ├── Dockerfile ├── LICENSE ├── readme.md ├── requirements.txt └── src ├── run.py ├── segmentation ├── config.ini.example ├── predict_UKBB.py └── train.py ├── surface ├── BiVFitting │ ├── BiventricularModel.py │ ├── Diffeomorphic_fitting.py │ ├── Frame.py │ ├── GPDataSet.py │ ├── __init__.py │ ├── build_model_tools.py │ ├── fitting_tools.py │ ├── surface_enum.py │ └── visualization.py ├── README.md ├── config.ini.example ├── config_params.py ├── contour2gp_QS.py ├── contour_qc_summary.py ├── mass_volume │ ├── Mass_volume.py │ └── mesh.py ├── mesh_txt_to_vtk.py ├── model │ ├── ETIndicesEpiRVLV.txt │ ├── ETIndicesMaterials.txt │ ├── ETIndicesSorted.txt │ ├── ETIndicesThruWall.txt │ ├── GTSTG_x.txt │ ├── GTSTG_x_stiff_valve.txt │ ├── GTSTG_y.txt │ ├── GTSTG_y_stiff_valve.txt │ ├── GTSTG_z.txt │ ├── GTSTG_z_stiff_valve.txt │ ├── J11.txt │ ├── J12.txt │ ├── J13.txt │ ├── basis_matrix.txt │ ├── boundary.txt │ ├── control_mesh_connectivity.txt │ ├── control_points_patches.txt │ ├── epi_to_septum_ETindices.txt │ ├── etVertexElementNum.txt │ ├── etVertexXi.txt │ ├── fraction.txt │ ├── hermite_CC_derivs_3d.txt │ ├── hermite_derivs_3d.txt │ ├── local_matrix.txt │ ├── mBder_x.txt │ ├── mBder_y.txt │ ├── mBder_z.txt │ ├── model.txt │ ├── patch_coordinates.txt │ ├── phantom_points.txt │ ├── phantom_points_basis.txt │ ├── phantom_points_matrix.txt │ └── subdivision_matrix.txt └── perform_fit.py └── volumetric ├── LVendo_RVseptum_RVendo.par ├── README.md ├── amg_cg_opts ├── carpfunc.py ├── compute_UVC_fiber.py ├── config.ini.example ├── geometrical.py ├── main_testvmesh.py ├── meshIO.py ├── meshtool_func.py ├── py_atrial_fibres.py ├── transmural.par └── v_mesh_generation.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-bookworm 2 | 3 | RUN apt-get update && apt-get install -y wget libglib2.0-0 libgl1 libxrender1 libx11-6 4 | 5 | ENV VIRTUAL_ENV=/opt/venv 6 | RUN python3.11 -m venv $VIRTUAL_ENV 7 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 8 | 9 | COPY requirements.txt . 10 | RUN pip install --upgrade pip 11 | RUN pip install -r requirements.txt 12 | RUN rm requirements.txt 13 | 14 | RUN wget https://git.opencarp.org/api/v4/projects/16/packages/generic/opencarp-appimage/v11.0/openCARP-v11.0-x86_64_AppImage.tar.gz 15 | RUN tar xf openCARP-v11.0-x86_64_AppImage.tar.gz 16 | RUN ./openCARP-v11.0-x86_64_AppImage/openCARP-v11.0-x86_64.AppImage --appimage-extract 17 | RUN mv squashfs-root /opt/openCARP 18 | RUN rm -rf openCARP-v11.0-x86_64_AppImage* 19 | 20 | WORKDIR src 21 | COPY src/ . 22 | 23 | ENTRYPOINT ["python3", "run.py", "--data-dir", "/data", "--carp-bin-dir", "/opt/openCARP/usr/bin", "--workspace-dir", "/nnunet"] 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # BiV Volumetric Meshing 2 | 3 | ## Overview 4 | 5 | This software pipeline generates finite 6 | element biventricular heart models from cardiovascular magnetic resonance (CMR) images in the 7 | [UK Biobank](https://www.ukbiobank.ac.uk). 8 | The pipeline has four components. 9 | 10 | 1. segmentation: NIfTI images > Segmentations 11 | 2. contour: Segmentations > Contours 12 | 3. surface: Contours > Surface meshes 13 | 4. volumetric: Surface meshes > Volumetric meshes (including universal ventricular coordinates (UVC) and myocardial fiber structure) 14 | 15 | The pipeline may not work as expected on data from other sources. 16 | 17 | ## Prerequisites 18 | 19 | + Git (all) 20 | + Python >=3.9 (all) 21 | + openCARP >=10.0 (volumetric mesh, and UVC and fiber) 22 | 23 | #### Notes 24 | 1. nnUNet (segmentation) may not work with Python >=3.12 25 | 26 | ### Installing openCARP 27 | 28 | [openCARP](https://opencarp.org/download/installation) can be installed as an 29 | unprivileged user in the following way. The resulting binary directory will be 30 | `/opt/openCARP/usr/bin` or equivalent. 31 | 32 | ``` 33 | $ wget https://git.opencarp.org/api/v4/projects/16/packages/generic/opencarp-appimage/v11.0/openCARP-v11.0-x86_64_AppImage.tar.gz 34 | $ tar xf openCARP-v11.0-x86_64_AppImage.tar.gz 35 | $ ./openCARP-v11.0-x86_64_AppImage/openCARP-v11.0-x86_64.AppImage --appimage-extract 36 | $ mv squashfs-root /opt/openCARP 37 | ``` 38 | 39 | ## Setup 40 | 41 | ### Source code 42 | 43 | Download the source code from GitHub. 44 | 45 | ``` 46 | $ git clone https://www.github.com/cdttk/biv-volumetric-meshing 47 | ``` 48 | 49 | ### Installing Python dependencies 50 | 51 | Install the necessary Python dependencies in a virtual environment. 52 | 53 | ``` 54 | $ cd cme-dt-pipeline 55 | $ python3 -m venv venv 56 | $ source ./venv/bin/activate 57 | $ pip install -r requirements.txt 58 | ``` 59 | 60 | ### Data directory 61 | 62 | All components of the pipeline work upon a data directory structured in a 63 | particular way. 64 | 65 | 1. Each subject is given a subdirectory in the data directory named after its 66 | identifier. 67 | 2. Data instance (visit) number *n* for a subject is given a subdirectory in the 68 | subject directory, named `Instance_n`, containing the relevant initial NIfTI 69 | files, and any subsequent output subdirectories. 70 | 71 | Each subject requires four input files in NIfTI format; one short axis (SAX) 72 | and three long axis (LAX) — 2, 3 and 4 chamber (Ch) views. These files 73 | must be named as follows. 74 | 75 | ``` 76 | SAX.nii.gz LAX_2Ch.nii.gz LAX_3Ch.nii.gz LAX_4Ch.nii.gz 77 | ``` 78 | 79 | #### Example of initial data directory 80 | 81 | ``` 82 | $ ls -R data 83 | 84 | data/: 85 | subject001 subject002 86 | 87 | data/subject001: 88 | Instance_2 89 | 90 | data/subject001/Instance_2: 91 | LAX_2Ch.nii.gz LAX_3Ch.nii.gz LAX_4Ch.nii.gz SAX.nii.gz 92 | 93 | data/subject002: 94 | Instance_2 Instance_3 95 | 96 | data/subject002/Instance_2: 97 | LAX_2Ch.nii.gz LAX_3Ch.nii.gz LAX_4Ch.nii.gz SAX.nii.gz 98 | 99 | data/subject002/Instance_3: 100 | LAX_2Ch.nii.gz LAX_3Ch.nii.gz LAX_4Ch.nii.gz SAX.nii.gz 101 | ``` 102 | 103 | #### Notes 104 | 1. Log files are also written to the data directory. 105 | 2. No subdirectories other than those for subjects should be present in the data 106 | directory. 107 | 3. Sample data, which would need to be named and structured as above, may be 108 | obtained from [here](https://github.com/baiwenjia/ukbb_cardiac/blob/master/demo_pipeline.py). 109 | 110 | 111 | ### nnUNet workspace (segmentation) 112 | 113 | The segmentation component requires that an nnUNet workspace be set up. 114 | 115 | Download the four required model files from [GitHub](https://www.github.com/cdttk/biv-volumetric-meshing/releases/tag/v1-public). 116 | 117 | ``` 118 | Dataset100_UKBB_Petersen_SAX.20240108.tar 119 | Dataset101_UKBB_LAX_2Ch.20240108.tar 120 | Dataset102_UKBB_LAX_3Ch.20240108.tar 121 | Dataset103_UKBB_LAX_4Ch.20240108.tar 122 | ``` 123 | 124 | Create a directory for the workspace wherever convenient and extract the model files to it. 125 | 126 | ``` 127 | $ mkdir nnunet_workspace 128 | $ for i in Dataset*.tar; do tar xf $i -C nnunet_workspace; done 129 | ``` 130 | 131 | ## Usage 132 | 133 | ``` 134 | $ cd cme-dt-pipeline 135 | $ source ./venv/bin/activate 136 | $ cd src 137 | $ python run.py 138 | ``` 139 | 140 | Select the component or components to be run. 141 | 142 | ``` 143 | --segmentation run segmentation 144 | --contour run contour 145 | --surface run surface 146 | --volumetric run volumetric 147 | --uvc-fiber run UVC and fiber 148 | 149 | --all-components run complete pipeline 150 | ``` 151 | 152 | ### Useful common arguments 153 | 154 | The following arguments are common across components. Selecting the data 155 | directory is required, other arguments are optional. Instance 2 is processed by 156 | default. Input and output directories have default values unless otherwise 157 | specified. All timeframes available are processed at each stage unless 158 | otherwise specified, except during contour extraction which automatically selects 159 | ED and ES timeframes. 160 | 161 | ``` 162 | --data-dir DATA_DIR, -d DATA_DIR path to data directory 163 | --instance INSTANCE, -i INSTANCE instance to be processed 164 | --timeframe TIMEFRAME, -t TIMEFRAME timeframe to be processed 165 | --input-dir INPUT_DIR, -I INPUT_DIR name of input directories 166 | --output-dir OUTPUT_DIR, -o OUTPUT_DIR name of output directories 167 | --job JOB, -j JOB job identifier 168 | ``` 169 | 170 | Subjects are ordered alphabetically and indexed from zero. 171 | 172 | ``` 173 | --all, -a process all subjects 174 | --subject SUBJECT, -s SUBJECT subject id to be processed 175 | --start START, -S START index of first subject id to be processed 176 | --number NUMBER, -n NUMBER number of subjects to processed from first subject id 177 | ``` 178 | 179 | #### Notes 180 | 181 | The following limitations currently apply. 182 | 183 | 1. Timeframe selection is not possible for segmentation. 184 | 2. Output directories cannot be specified for volumetric. 185 | 186 | ### Component specific arguments 187 | 188 | #### Segmentation 189 | 190 | The path to the workspace directory must be specified. The `--gpu` flag may be 191 | used to run inference on a CUDA capable GPU. 192 | 193 | ``` 194 | --workspace-dir WORKSPACE_DIR path to the nnUNet workspace directory 195 | --gpu run on gpu 196 | ``` 197 | 198 | #### Volumetric and UVC and fiber 199 | 200 | The path to the openCARP binaries directory must be specified. 201 | 202 | ``` 203 | --carp-bin-dir CARP_BIN_DIR the path to the openCARP binaries directory 204 | ``` 205 | 206 | ### Examples 207 | 208 | 1. Run segmentation for 5 subjects starting with the 1st subject 209 | 210 | ``` 211 | $ python run.py --data-dir /path/to/data --workspace-dir /path/to/workspace --segmentation --start 0 --number 5 212 | ``` 213 | 214 | 2. Run contour extraction for ED and ES timeframes for all subjects and write 215 | output to subdirectories named `contours.test1` 216 | 217 | ``` 218 | $ python run.py --data-dir /path/to/data --contour --all --output-dir contours.test1 219 | ``` 220 | 221 | 3. Run surface meshing for subject `subject001`, reading input from subdirectories 222 | named `contours.test1` 223 | 224 | ``` 225 | $ python run.py --data-dir /path/to/data --surface --subject subject001 --input-dir contours.test1 226 | ``` 227 | 228 | 4. Run contour extraction and surface meshing for the first one hundred subjects and label the log `surface100` 229 | 230 | ``` 231 | $ python run.py --data-dir /path/to/data --contour --surface --start 0 --number 100 --job surface100 232 | ``` 233 | 234 | 5. Run volumetric meshing for timeframe 1 only for 5 subjects starting with the 10th subject 235 | 236 | ``` 237 | $ python run.py --data-dir /path/to/data --carp-bin-dir /opt/openCARP/usr/bin --volumetric --start 9 --number 5 --timeframe 1 238 | ``` 239 | 240 | 6. Run UVC and fiber generation for all timeframes for all subjects 241 | 242 | ``` 243 | $ python run.py --data-dir /path/to/data --carp-bin-dir /opt/openCARP/usr/bin --uvc-fiber --all 244 | ``` 245 | 246 | 7. Run the complete pipeline for subject `subject002` 247 | 248 | ``` 249 | $ python run.py --data-dir /path/to/data --workspace-dir /path/to/workspace --carp-bin-dir /opt/openCARP/usr/bin --all-components --subject subject002 250 | ``` 251 | 252 | ## Docker 253 | 254 | The code may alternatively be built and run using [Docker](https://www.docker.com). 255 | 256 | 257 | ``` 258 | $ cd cme-dt-pipeline 259 | $ docker build . -t cmedt/cdttk 260 | $ docker run --rm -v /path/to/data/directory:/data -v /path/to/nnunet/workspace:/nnunet cmedt/cdttk --all-components --all 261 | ``` 262 | 263 | To run segmentation using a GPU the [NVIDIA Container Toolkit](https://github.com/NVIDIA/nvidia-container-toolkit) 264 | must be installed. 265 | 266 | ``` 267 | $ docker run --rm -v /path/to/data/directory:/data -v /path/to/nnunet/workspace:/nnunet --gpus=all cmedt/cdttk --all-components --all --gpu 268 | ``` 269 | 270 | ## Credits 271 | 272 | If you find this software useful, please consider giving appropriate credit by 273 | citing one of the below papers. 274 | 275 | 1. Devran Ugurlu, Shuang Qian, Elliot Fairweather et al; Cardiac Digital Twins 276 | at Scale from MRI: Open Tools and Representative Modles from ~55000 UK 277 | Biobank Participants; to appear 278 | 279 | 2. Shuang Qian, Devran Ugurlu, Elliot Fairweather et al; Developing Cardiac 280 | Digital Twins at Scale: Insights from Personalised Myocardial Conduction 281 | Velocity; medRxiv 2023; [DOI](https://doi.org/10.1101/2023.12.05.23299435) 282 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | cvxopt 2 | line_profiler 3 | matplotlib 4 | nibabel 5 | nltk 6 | nnunetv2 7 | numpy 8 | opencv-python 9 | pandas 10 | plotly 11 | pydicom 12 | pyvista 13 | scikit-image 14 | scipy 15 | torch 16 | -------------------------------------------------------------------------------- /src/run.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import argparse 4 | import logging 5 | import os 6 | import warnings 7 | 8 | from segmentation import predict_UKBB 9 | from surface import contour2gp_QS 10 | from surface import perform_fit 11 | from surface import mesh_txt_to_vtk 12 | from volumetric import main_testvmesh 13 | 14 | warnings.simplefilter(action='ignore', category=FutureWarning) 15 | 16 | if __name__ == '__main__': 17 | 18 | parser = argparse.ArgumentParser(allow_abbrev=False) 19 | 20 | parser.add_argument('--all-components', action='store_true', help='run all components') 21 | parser.add_argument('--segmentation', action='store_true', help='run segmentation') 22 | parser.add_argument('--contour', action='store_true', help='run contour') 23 | parser.add_argument('--surface', action='store_true', help='run surface') 24 | parser.add_argument('--volumetric', action='store_true', help='run volumetric') 25 | parser.add_argument('--uvc-fiber', action='store_true', help='run UVC and fiber') 26 | 27 | args, _ = parser.parse_known_args() 28 | 29 | base_dir = os.path.dirname(__file__) 30 | 31 | logging.basicConfig(level=logging.DEBUG, format='%(asctime)s | %(name)s | %(levelname)s | %(message)s') 32 | 33 | if args.all_components: 34 | args.segmentation, args.contour, args.surface, args.volumetric, args.uvc_fiber = True, True, True, True, True 35 | 36 | if args.segmentation: 37 | os.chdir(os.path.join(base_dir, 'segmentation')) 38 | predict_UKBB.main() 39 | if args.contour: 40 | os.chdir(os.path.join(base_dir, 'surface')) 41 | contour2gp_QS.main() 42 | if args.surface: 43 | os.chdir(os.path.join(base_dir, 'surface')) 44 | perform_fit.main() 45 | mesh_txt_to_vtk.main() 46 | if args.volumetric: 47 | os.chdir(os.path.join(base_dir, 'volumetric')) 48 | main_testvmesh.main(True, False) 49 | if args.uvc_fiber: 50 | os.chdir(os.path.join(base_dir, 'volumetric')) 51 | main_testvmesh.main(False, True) 52 | -------------------------------------------------------------------------------- /src/segmentation/config.ini.example: -------------------------------------------------------------------------------- 1 | [default] 2 | DataDir = /path/to/data/directory 3 | WorkspaceDir = /path/to/nnunet/workspace/directory 4 | 5 | [myprofile] 6 | DataDir = /path/to/other/data/directory 7 | WorkspaceDir = /path/to/other/nnunet/workspace/directory 8 | -------------------------------------------------------------------------------- /src/segmentation/predict_UKBB.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility script for nnUNet to predict UKBB NIFTIs (all time frames). Since creating a separate 3 | image for each timeframe in the whole UKBB dataset would create millions of files, this script 4 | is designed to work in batches of subjects rather than predicting the whole dataset at once. 5 | 6 | For the given batch and view (SAX, 2Ch etc.), the script will first create a temporary nnUNet 7 | dataset folder under nnUNet_raw. The temp dataset folder name will be either Dataset12345_temp 8 | if a dataset with id 12345 doesn't exist or the first id in the range 12345-12445 that is free to 9 | use. This allows running the script separately on different GPUs at once without manually 10 | determining the temp dataset name. It is recommended you have no prior datasets in the 12345-12445 11 | id range to make sure the script has free ids it can use. The temporary dataset will consist of 12 | single timeframe images created from the full NIFTIs of subjects in the batch for the given view. 13 | 14 | After the nnUNet dataset is created, the script just runs nnUNet prediction as usual. 15 | 16 | After nnUNet prediction is complete, the script will repack the segmentations into NIFTI files 17 | that contain all timeframes in a single file and then delete the temporary dataset. 18 | 19 | Currently, the batch is given as a range of integers such as 1-1000 which would make the 20 | script process the first 1000 subjects (wrt how sorted function sorts the UKBB folder). We 21 | will probably add an alternative way to specify batches later on like giving a file with a 22 | list of specific subjects. 23 | 24 | The possible views are currently "SAX", "LAX_4Ch", "LAX_3Ch" and "LAX_2Ch. These are not 25 | supplied as a separate argument. Instead, just name the model dataset so that it ends with the 26 | name of the view. e.g. the trained SAX model can be in an nnUNet dataset called 27 | Dataset100_UKBB_ManualSeg_SAX, the trained 2Ch model in a dataset called 28 | Dataset101_UKBB_ManualSeg_LAX_2Ch etc." 29 | """ 30 | 31 | import argparse 32 | import configparser 33 | import logging 34 | import os.path 35 | import re 36 | import shutil 37 | 38 | import nibabel as nib 39 | import numpy as np 40 | import subprocess 41 | 42 | from itertools import groupby 43 | 44 | 45 | logger = logging.getLogger(__name__) 46 | 47 | def split_nifti_t(subject_id: str, src: str, dest: str): 48 | """ 49 | Takes a NIFTI image that includes a time dimension and splits it into multiples images, each 50 | image being one time frame. 51 | 52 | Args: 53 | src: The source NIFTI file that is to be split. 54 | dest: The destination directory where the output files will be created 55 | """ 56 | input_nim = nib.load(src) 57 | 58 | src_split = src.split(os.sep) 59 | if src_split[-1][:3] == 'SAX': 60 | view = 'sax' 61 | elif src_split[-1][:7] == 'LAX_2Ch': 62 | view = 'lax_2ch' 63 | elif src_split[-1][:7] == 'LAX_3Ch': 64 | view = 'lax_3ch' 65 | elif src_split[-1][:7] == 'LAX_4Ch': 66 | view = 'lax_4ch' 67 | else: 68 | logger.error(f'{subject_id}: invalid view type') 69 | raise ValueError('Unexpected view type (Must be SAX, LAX_2Ch, LAX_3Ch or LAX_4Ch)') 70 | 71 | for t in range(input_nim.header['dim'][4]): 72 | input_data = input_nim.get_fdata() 73 | output_nim = nib.Nifti1Image(input_data[:, :, :, t], input_nim.affine, dtype=np.int16) 74 | # Change the NIFTI header as necessary 75 | output_nim.header.set_intent(1011) # dimless 76 | output_nim.header['pixdim'][:4] = input_nim.header['pixdim'][:4] 77 | output_nim.header.set_xyzt_units(xyz=2, t=16) 78 | output_nim.header['qform_code'] = 1 79 | output_nim.header['sform_code'] = 1 80 | 81 | # write to output file 82 | nib.save(output_nim, os.path.join(dest, f'{subject_id}_{view}_fr_{t:02d}_0000.nii.gz')) 83 | 84 | 85 | def repack_nifti_t(src_list: list[str], dest: str, input_nim: nib.Nifti1Image): 86 | """ 87 | Takes multiple NIFTI files, each file corresponding to a different timeframe of the same 88 | volume and repacks them into one NIFTI file with a time dimension. Also requires the 89 | header of the original input NIFTI file because the time spacing can anly be recovered from 90 | there. 91 | 92 | Args: 93 | src_list: Source NIFTI files that are to be repacked. Each file should be a different time 94 | frame of the same volume. 95 | dest: The output NIFTI file to be created. 96 | input_nim: The input NIFTI file, i.e. the NIFTI image before segmentation that 97 | includes all timeframes in the single file. 98 | """ 99 | # sort the source file names. 100 | src_sorted = sorted(src_list) 101 | 102 | # After sorting, the time frames should be in order but let's double-check to make sure. 103 | if int(src_sorted[0][-9:-7]) != 0: 104 | raise ValueError("Time frames not in correct order. Check file names and sorting code.") 105 | for i in range(len(src_sorted) - 1): 106 | if int(src_sorted[i + 1][-9:-7]) - int(src_sorted[i][-9:-7]) != 1: 107 | raise ValueError( 108 | "Time frames not in correct order. Check file names and sorting code.") 109 | 110 | # Determine output array size and initialize to zeros. 111 | pixel_array = np.zeros(input_nim.header['dim'][1:5]) 112 | 113 | # fill the pixel_array 114 | for t in range(len(src_sorted)): 115 | nim = nib.load(src_sorted[t]) 116 | pixel_array[:, :, :, t] = nim.get_fdata() 117 | 118 | # create the output nifti image 119 | output_nim = nib.Nifti1Image(pixel_array, input_nim.affine, dtype=np.int16) 120 | 121 | # Change the NIFTI header as necessary. 122 | output_nim.header.set_intent(2001) # time series 123 | output_nim.header['xyzt_units'] = input_nim.header['xyzt_units'] 124 | output_nim.header['qform_code'] = input_nim.header['qform_code'] 125 | output_nim.header['sform_code'] = input_nim.header['sform_code'] 126 | 127 | # Write the output file 128 | nib.save(output_nim, dest) 129 | 130 | 131 | 132 | def main(): 133 | parser = argparse.ArgumentParser() 134 | 135 | parser.add_argument('--profile', '-p', action='store', default='default', help='config profile to be used') 136 | parser.add_argument('--job', '-j', action='store', default='default', help='job name') 137 | 138 | parser.add_argument('--data-dir', '-d', action='store', help='path to data directory') 139 | parser.add_argument('--workspace-dir', '-w', action='store', help='path to workspace directory') 140 | parser.add_argument('--input-dir', '-I', action='store', help='name of input directories') 141 | parser.add_argument('--output-dir', '-o', action='store', default='nnUNet_segs', help='name of output directories') 142 | 143 | parser.add_argument('--instance', '-i', type=int, action='store', default=2, help='instance to be processed') 144 | 145 | parser.add_argument('--all', '-a', action='store_true', help='process all subjects') 146 | parser.add_argument('--subject', '-s', action='store', help='subject id to be processed') 147 | parser.add_argument('--start', '-S', action='store', type=int, help='index of first subject id to be processed') 148 | parser.add_argument('--number', '-n', action='store', type=int, help='number of subjects to be processed from first subject id') 149 | 150 | parser.add_argument('--model-datasets', nargs='+', default=['100_UKBB_Petersen_SAX', '101_UKBB_LAX_2Ch', '102_UKBB_LAX_3Ch', '103_UKBB_LAX_4Ch'], help='names of the training model datasets to be used for prediction') 151 | parser.add_argument('--gpu', '-g', action='store_true', help='run on gpu') 152 | 153 | args, _ = parser.parse_known_args() 154 | 155 | cfg = configparser.ConfigParser() 156 | cfg.read('config.ini') 157 | 158 | WORKSPACE_DIR = args.workspace_dir if args.workspace_dir else cfg[args.profile]['WorkspaceDir'] 159 | 160 | os.environ['nnUNet_raw'] = os.path.join(WORKSPACE_DIR, 'nnUNet_raw') 161 | os.environ['nnUNet_preprocessed'] = os.path.join(WORKSPACE_DIR, 'nnUNet_preprocessed') 162 | os.environ['nnUNet_results'] = os.path.join(WORKSPACE_DIR, 'nnUNet_results') 163 | 164 | nnunet_raw_dir = os.path.join(WORKSPACE_DIR, 'nnUNet_raw') 165 | ukbb_nifti_dir = args.data_dir if args.data_dir else cfg[args.profile]['DataDir'] 166 | 167 | log_filename = os.path.join(ukbb_nifti_dir, f'segmentation-{args.job}.log') 168 | formatter = logging.Formatter(fmt='%(asctime)s | %(name)s | %(levelname)s | %(message)s') 169 | handler = logging.FileHandler(log_filename) 170 | handler.setFormatter(formatter) 171 | logger.addHandler(handler) 172 | 173 | sids = [name for name in os.listdir(ukbb_nifti_dir) if os.path.isdir(os.path.join(ukbb_nifti_dir, name))] 174 | 175 | if args.all: 176 | subject_ids = sorted(sids) 177 | elif args.subject: 178 | sid = args.subject 179 | if sid in sids: 180 | subject_ids = [sid] 181 | else: 182 | subject_ids = [] 183 | elif args.start is not None and args.start >= 0 and args.start < len(sids): 184 | if args.number is not None and args.number > 0: 185 | end = args.start + args.number - 1 186 | subject_ids = sorted(sids)[args.start:end+1] 187 | else: 188 | subject_ids = sorted(sids)[args.start:] 189 | else: 190 | subject_ids = [] 191 | 192 | logger.debug(f'starting job: {args.job}') 193 | 194 | for model_dataset in args.model_datasets: 195 | run(subject_ids, model_dataset, nnunet_raw_dir, ukbb_nifti_dir, args) 196 | 197 | logger.debug(f'finished job: {args.job}') 198 | 199 | def run(subject_ids, model_dataset, nnunet_raw_dir, ukbb_nifti_dir, args): 200 | # determine the view type from the model dataset name 201 | model_dataset_name = [ds for ds in os.listdir(nnunet_raw_dir) 202 | if 203 | ds.startswith("Dataset" + model_dataset) and not ds.endswith(".zip")] 204 | if len(model_dataset_name) != 1: 205 | raise ValueError("Matching dataset dirs for the given model dataset ID is not 1. Check " 206 | "model id and nnUNet dataset names. Something is wrong.") 207 | model_dataset_name = model_dataset_name[0] 208 | 209 | if model_dataset_name[-3:] == 'SAX': 210 | view = 'SAX' 211 | elif model_dataset_name[-7:] == 'LAX_2Ch': 212 | view = 'LAX_2Ch' 213 | elif model_dataset_name[-7:] == 'LAX_3Ch': 214 | view = 'LAX_3Ch' 215 | elif model_dataset_name[-7:] == 'LAX_4Ch': 216 | view = 'LAX_4Ch' 217 | else: 218 | logger.error('invalid dataset type') 219 | raise ValueError('Unexpected view type (The dataset name must end with SAX, LAX_2Ch, ' 220 | 'LAX_3Ch or LAX_4Ch)') 221 | 222 | # Create temporary nnUNet dataset from the given batch of subjects 223 | # existing datasets in nnUNet_raw 224 | existing_datasets = os.listdir(nnunet_raw_dir) 225 | existing_dataset_ids = [dataset.partition("_")[0][7:] for dataset in existing_datasets] 226 | 227 | # determine an id for the temp dataset to be created and create the folder 228 | min_id = 12345 229 | max_id = 12445 230 | temp_dataset_id = "-1" 231 | for i in range(min_id, max_id): 232 | if str(i) in existing_dataset_ids: 233 | i = i + 1 234 | if i == max_id: 235 | raise ValueError("The 12345 - 12445 dataset id range is already used. Cannot " 236 | "create temp dataset") 237 | else: 238 | os.makedirs(os.path.join(nnunet_raw_dir, 'Dataset' + str(i) + '_temp', 'imagesTs'), 239 | exist_ok=True) 240 | os.makedirs(os.path.join(nnunet_raw_dir, 'Dataset' + str(i) + '_temp', 'labelsTs'), 241 | exist_ok=True) 242 | temp_dataset_id = str(i) 243 | break 244 | 245 | # split the nifti images and copy to temp dataset folder 246 | for subject_id in subject_ids: 247 | subject_dir = os.path.join(ukbb_nifti_dir, subject_id, f'Instance_{args.instance}') 248 | input_dir = os.path.join(subject_dir, args.input_dir) if args.input_dir else subject_dir 249 | 250 | if not os.path.exists(input_dir): 251 | logger.error(f'{subject_id}: missing input directory') 252 | continue 253 | 254 | files = sorted(os.listdir(input_dir)) 255 | # process the view that corresponds to the model dataset 256 | for f in files: 257 | if f[:3] == view or f[:7] == view: 258 | logger.debug(f'{subject_id}: unpacking {os.path.join(input_dir, f)}') 259 | try: 260 | split_nifti_t(subject_id, os.path.join(input_dir, f), 261 | os.path.join(nnunet_raw_dir, 'Dataset' + temp_dataset_id + '_temp', 262 | 'imagesTs')) 263 | except ValueError: 264 | logger.error(f'{subject_id}: error unpacking {f}') 265 | 266 | logger.debug('finished unpacking') 267 | 268 | # # run nnUNet prediction 269 | temp_input_folder = os.path.join(nnunet_raw_dir, 'Dataset' + temp_dataset_id + '_temp', 270 | 'imagesTs') 271 | temp_output_folder = os.path.join(nnunet_raw_dir, 'Dataset' + temp_dataset_id + '_temp', 272 | 'labelsTs') 273 | try: 274 | subprocess.run(["nnUNetv2_predict", 275 | "-i", temp_input_folder, 276 | "-o", temp_output_folder, 277 | "-d", 'Dataset' + model_dataset, 278 | '-device', 'cuda' if args.gpu else 'cpu', 279 | "-c", "2d"], 280 | check=True) 281 | except subprocess.CalledProcessError as e: 282 | logger.error(f'nnUNet prediction failed: {e.output}') 283 | 284 | # repack the time frames of output segmentations into single file 285 | 286 | # group the segmentation files by subject id 287 | seg_files = sorted([f for f in os.listdir(temp_output_folder) if f.endswith(".nii.gz")]) 288 | 289 | def get_subject_id(filename): 290 | return re.search(r'^(.+)_(sax|lax_[234]ch)_fr_\d+\.nii\.gz$', filename).group(1) 291 | 292 | seg_files_grouped = [(s_id, list(i)) for s_id, i in groupby(seg_files, get_subject_id)] 293 | 294 | # Each group consists of different timeframes belonging to the same image. Repack them 295 | # into a single file. 296 | for subject_id, group in seg_files_grouped: 297 | # read the input NIFTI. We need this to correctly write the header of the segmentation. 298 | subject_dir = os.path.join(ukbb_nifti_dir, subject_id, f'Instance_{args.instance}') 299 | input_dir = os.path.join(subject_dir, args.input_dir) if args.input_dir else subject_dir 300 | 301 | if not os.path.exists(input_dir): 302 | logger.error(f'{subject_id}: missing input directory') 303 | continue 304 | 305 | files = sorted(os.listdir(input_dir)) 306 | 307 | input_filename = None 308 | for f in files: 309 | if f[:3] == view or f[:7] == view: 310 | input_filename = os.path.join(input_dir, f) 311 | 312 | if not input_filename: 313 | logger.error(f'{subject_id}: missing input file') 314 | raise ValueError("The view could not be matched to an input view. This should not " 315 | "happen here so either there is a bug or the input directory changed" 316 | "during execution.") 317 | 318 | input_nim = nib.load(input_filename) 319 | 320 | # determine output filename 321 | out_dir = os.path.join(subject_dir, f'{args.output_dir}') 322 | os.makedirs(out_dir, exist_ok=True) 323 | out_f = input_filename.split(os.sep)[-1][:-7] + '_nnUNetSeg.nii.gz' 324 | out_filename = os.path.join(out_dir, out_f) 325 | 326 | logger.debug(f'{subject_id}: packing {out_filename}') 327 | src_files = [os.path.join(temp_output_folder, f) for f in group] 328 | repack_nifti_t(src_files, out_filename, input_nim) 329 | try: 330 | repack_nifti_t(src_files, out_filename, input_nim) 331 | except ValueError: 332 | logger.error(f'{subject_id}: error packing {out_f}') 333 | 334 | logger.debug('finished packing') 335 | 336 | # Remove temporary directories 337 | shutil.rmtree(os.path.join(nnunet_raw_dir, 'Dataset' + temp_dataset_id + '_temp')) 338 | 339 | if __name__ == "__main__": 340 | main() 341 | -------------------------------------------------------------------------------- /src/segmentation/train.py: -------------------------------------------------------------------------------- 1 | # utility script for nnUNet V2 to train multiple databases. 2D configuration only. 2 | 3 | import argparse 4 | import subprocess 5 | import os 6 | import time 7 | import torch 8 | 9 | 10 | def main(): 11 | parser = argparse.ArgumentParser() 12 | 13 | parser.add_argument("--datasets", help="IDs of the datasets to train.", nargs='+') 14 | parser.add_argument("--folds", help="folds to train. If this argument is not passed, " 15 | "5 folds will be trained by default. ", nargs='*') 16 | parser.add_argument("--checkcudadevices", help="If this argument is passed, the script will " 17 | "just print the available cuda devices and " 18 | "exit. You can use this before training to " 19 | "decide which device you want to use", 20 | action="store_true") 21 | 22 | args = parser.parse_args() 23 | 24 | nnunet_preproc_dir = os.path.join(os.sep, "workspace", "nnUNet_workspace", 25 | "nnUNet_preprocessed") 26 | nnunet_results_dir = os.path.join(os.sep, "workspace", "nnUNet_workspace", "nnUNet_results") 27 | 28 | # if checkcudadevices is passed, just print the available cuda devices and exit 29 | if args.checkcudadevices: 30 | for i in range(torch.cuda.device_count()): 31 | print(f"Cuda device {i}: {torch.cuda.get_device_name(i)}") 32 | exit() 33 | 34 | if args.datasets is None: 35 | print("No Dataset given for training. Use --datasets to tell the script which datasets " 36 | "to train.") 37 | exit() 38 | 39 | # folds to train 40 | folds = ["0", "1", "2", "3", "4"] 41 | if args.folds is not None: 42 | folds = args.folds 43 | 44 | for dataset in args.datasets: 45 | 46 | # Check if plan and process was previously run on this dataset by checking the existence 47 | # of the plan file with default name. If the file doesn't exist, run plan and preprocess. 48 | dataset_folder_name = None 49 | for f in os.listdir(nnunet_preproc_dir): 50 | if f.startswith("Dataset" + dataset): 51 | dataset_folder_name = f 52 | 53 | if not dataset_folder_name: 54 | subprocess.run(["nnUNetv2_plan_and_preprocess", "-d", dataset, "-c", "2d", 55 | "--verify_dataset_integrity"]) 56 | else: 57 | default_plan_file = os.path.join(nnunet_preproc_dir, dataset_folder_name, 58 | "nnUNetPlans.json") 59 | 60 | if os.path.exists(default_plan_file): 61 | print(f"Default plan file already exists for datasset {dataset}. Skipping " 62 | f"plan and process.") 63 | else: 64 | subprocess.run(["nnUNetv2_plan_and_preprocess", "-d", dataset, "-c", "2d", 65 | "--clean", "--verify_dataset_integrity"]) 66 | 67 | # Train 68 | # create timer to time how long training takes 69 | timer_file = open(os.path.join(nnunet_results_dir, "Dataset_" + dataset + 70 | "_timerFile.txt"), "a") 71 | for fold in folds: 72 | timer_file.write(f"\nStarted training fold {fold}. \n") 73 | tic = time.perf_counter() 74 | subprocess.run(["nnUNetv2_train", dataset, "2d", fold]) 75 | toc = time.perf_counter() 76 | 77 | timer_file.write(f"Fold {fold} total train time: {toc - tic: 0.4f} seconds. ") 78 | 79 | timer_file.close() 80 | 81 | 82 | if __name__ == "__main__": 83 | main() 84 | -------------------------------------------------------------------------------- /src/surface/BiVFitting/Frame.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import copy 3 | class Point(): 4 | """ 5 | This is a class which hold contour points and the relevant information 6 | we require 7 | 8 | """ 9 | def __init__(self, pixel_coords=None, sop_instance_uid=None, weight = 1): 10 | if pixel_coords == None: 11 | self.pixel =np.empty(2) 12 | else: 13 | self.pixel = pixel_coords 14 | 15 | self.sop_instance_uid = sop_instance_uid 16 | self.coordinates = np.empty(3) 17 | self.weight = weight 18 | 19 | 20 | 21 | 22 | def __eq__(self, other): 23 | 24 | if self.pixel == other.pixel: 25 | if self.sop_instance_uid ==other.sop_instance_uid: 26 | equal = True 27 | else: 28 | equal =False 29 | else:equal =False 30 | return equal 31 | 32 | 33 | def deep_copy_point(self): 34 | new_point = Point() 35 | new_point.pixel = copy.deepcopy(self.pixel) 36 | new_point.sop_instance_uid = copy.deepcopy(self.sop_instance_uid) 37 | new_point.coordinates = copy.deepcopy(self.coordinates) 38 | new_point.weight = copy.deepcopy(self.weight) 39 | return new_point 40 | 41 | 42 | class Frame(): 43 | def __init__(self, image_id,position, orientation, pixel_spacing, 44 | image = None, subpixel_resolution = 1): 45 | self.position = position 46 | self.orientation = orientation 47 | self.pixel_spacing = pixel_spacing 48 | self.subpixel_resolution = subpixel_resolution 49 | self.image = image 50 | 51 | self.time_frame = 1 52 | self.slice = None 53 | self.image_id = image_id 54 | 55 | def get_affine_matrix(self, scaling = False): 56 | spacing = self.pixel_spacing 57 | image_position_patient = self.position 58 | image_orientation_patient = self.orientation 59 | # Translation 60 | T = np.identity(4) 61 | T[0:3, 3] = image_position_patient 62 | # Rotation 63 | R = np.identity(4) 64 | R[0:3, 0] = image_orientation_patient[0:3] 65 | R[0:3, 1] = image_orientation_patient[3:7] 66 | R[0:3, 2] = np.cross(R[0:3, 0], R[0:3, 1]) 67 | T = np.dot(T, R) 68 | # scale 69 | if scaling: 70 | S = np.identity(4) 71 | S[0, 0] = spacing[1] 72 | S[1, 1] = spacing[0] 73 | T = np.dot(T, S) 74 | return T -------------------------------------------------------------------------------- /src/surface/BiVFitting/__init__.py: -------------------------------------------------------------------------------- 1 | from .BiventricularModel import BiventricularModel 2 | from . import build_model_tools 3 | from . import fitting_tools 4 | from .Frame import Frame 5 | from .Frame import Point 6 | from .GPDataSet import GPDataSet 7 | from .surface_enum import Surface 8 | from .surface_enum import ContourType 9 | from .surface_enum import SURFACE_CONTOUR_MAP 10 | from .Diffeomorphic_fitting import * -------------------------------------------------------------------------------- /src/surface/BiVFitting/build_model_tools.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def generate_gauss_points(nb_points): 4 | ''' 5 | Estimte gauss points and weight for x, y and z direction assuming 6 | interval(a,b) = [0,1] 7 | 8 | Parameters: 9 | ---------- 10 | 11 | 'nb_points' order of point scheme to be used 12 | 13 | Returns: 14 | ------- 15 | 16 | `weights` vector of gauss weights 17 | 18 | `points` vector of gauss points 19 | ''' 20 | # cf Alistair's code 21 | 22 | nx=nb_points 23 | ny=nb_points 24 | nz=nb_points 25 | 26 | # set limits 27 | ax=0 28 | bx=1 29 | ay=0 30 | by=1 31 | az=0 32 | bz=1 33 | 34 | # get gauss points in x direction through the functions points_weights 35 | xgauss_weights, xgauss_points = gauss_points_weights(nx) 36 | # obtain actual values of the gauss points and weights 37 | xpoints=(bx-ax)*0.5*xgauss_points+(bx+ax)*0.5 38 | xweights=(bx-ax)*0.5*xgauss_weights 39 | 40 | # get y points in y direction through the functions points_weights 41 | ygauss_weights, ygauss_points =gauss_points_weights(ny) 42 | 43 | # obtain actual values of the gauss points and weights 44 | ypoints=(by-ay)*0.5*ygauss_points+(by+ay)*0.5 45 | yweights=(by-ay)*0.5*ygauss_weights 46 | 47 | # get z points in z direction through the functions points_weights 48 | zgauss_weights, zgauss_points =gauss_points_weights(nz) 49 | 50 | # obtain actual values of the gauss points and weights 51 | zpoints=(bz-az)*0.5*zgauss_points+(bz+az)*0.5 52 | zweights=(bz-az)*0.5*zgauss_weights 53 | 54 | xi = np.zeros((nx*ny*nz,3)) 55 | weights = np.zeros(nx*ny*nz) 56 | count = 0 57 | for i in range(nx): 58 | for j in range(ny): 59 | for k in range(nz): 60 | # position of approximating points xi[xs ys], nx2 61 | xi[count,0] = xpoints[i] 62 | xi[count,1] = ypoints[j] 63 | xi[count,2] = zpoints[k] 64 | weights[count] = xweights[i]*yweights[j]*zweights[k] 65 | count=count+1 66 | return xi, weights 67 | 68 | 69 | 70 | def gauss_points_weights(number): 71 | ''' Generate gauss point and weights assuming interval[a,b] = [-1,1] 72 | 73 | Parameters: 74 | ----------- 75 | 76 | `number` order of point scheme to be used 77 | 78 | Returns: 79 | -------- 80 | 81 | `weights` vector of gauss weights 82 | 83 | `points` vector of gauss points 84 | 85 | ''' 86 | 87 | zero_tolerance = 1 * 10e-10 88 | 89 | # obtain whether odd or even number of points desired 90 | odd_even = number%2 91 | 92 | if (odd_even == 1): 93 | loop = number - 1 94 | else: 95 | loop = number 96 | 97 | eta =np.zeros(int(loop / 2)) 98 | w = np.zeros(int(loop/2)) 99 | #loop over half number of points wanted 100 | for j in range(int(loop *0.5)): 101 | # obtain initial estimate for first root 102 | eta[j] = np.cos(np.pi * (j+1 - 0.25) / (number + 0.5)) 103 | # initialise delta to 1 104 | delta = 1 105 | while (abs(delta) > zero_tolerance): 106 | Pn, dPn = legendreCIM(number, eta[j]) 107 | delta = -Pn / dPn 108 | eta[j] = eta[j] + delta 109 | w[j] = 2 / ((1 - eta[j] ** 2) * dPn ** 2) 110 | 111 | 112 | # record gauss points and weights 113 | points= np.zeros(number) 114 | weights = np.zeros(number) 115 | if (odd_even == 0): 116 | for j in range(int(loop *0.5)): 117 | points[j] = -eta[j] 118 | points[number-1 - j] = eta[j] 119 | weights[j] = w[j] 120 | weights[number -1 - j] = w[j] 121 | 122 | else: 123 | points[int(loop*0.5) ] = 0 124 | Pn,dPn = legendreCIM(number, points[int(loop*0.5 )]) 125 | weights[int(loop*0.5) ] = 2 / ((1 - points[int(loop*0.5) ]**2) * dPn ** 2) 126 | for j in range(int(loop *0.5)): 127 | points[j] = -eta[j] 128 | points[number-1 - j ] = eta[j] 129 | weights[j] = w[j] 130 | weights[number-1 - j ] = w[j] 131 | 132 | 133 | return weights,points 134 | 135 | 136 | 137 | def legendreCIM(n, eta): 138 | '''Evaluates the Legendre polynomial and its derivative 139 | at a point eta(ith root) 140 | 141 | Parameters: 142 | ------------ 143 | 144 | `n` number of roots / degrees of freedom 145 | 146 | `eta` current estimate of the root i.e gauss point 147 | 148 | Returns: 149 | -------- 150 | 151 | `Pn` value of polynomial at n 152 | 153 | `dPn` derivative of polynomial at n 154 | ''' 155 | P= np.zeros(n+2) 156 | P[0] = 0 157 | P[1] = 1 158 | 159 | for i in range(1,n+1): 160 | P[i + 1] = ((2 * i - 1) * eta * P[i] - (i - 1) * P[i-1]) / i 161 | 162 | 163 | Pn = P[n+1 ] 164 | dPn = n * ((eta * P[n+1] - P[n]) / (eta ** 2 - 1)) 165 | 166 | return Pn, dPn 167 | 168 | def basis_function_bspline(s): 169 | ''' Evaluate the four uniform cubic B-Spline basis functions at a point s 170 | 171 | Parameters: 172 | ----------- 173 | 174 | `s` float point where to evaluate the b-spline basis 175 | 176 | Returns: 177 | -------- 178 | 179 | `bs` 4x1 vector b-spline basis functions value 180 | ''' 181 | bs = np.zeros(4) 182 | bs[0] = (1/6) * (1-3*s+3*s*s-s*s*s) 183 | bs[1] = (1/6) * (4 - 6*s*s+3*s*s*s) 184 | bs[2] = (1/6) * (1+3*s+3*s*s-3*s*s*s) 185 | bs[3] = (1/6) * s*s*s 186 | 187 | return bs 188 | 189 | def der2_basis_function_bspline(s): 190 | ''' Evaluate second derivatives of the four uniform cubic B-Spline 191 | basis functions at point s 192 | 193 | Parameters: 194 | ----------- 195 | 196 | `s` float point where to evaluate the b-spline basis 197 | 198 | Returns: 199 | -------- 200 | 201 | `ds` 4x1 vector with second derivatives of the b-spline basis functions 202 | ''' 203 | 204 | ds = np.zeros(4) 205 | ds[0] = 1-s 206 | ds[1] = 3*s-2 207 | ds[2] = 1-3*s 208 | ds[3] = s 209 | return ds 210 | 211 | def der_basis_function_bspline(s): 212 | ''' Evaluate derivatives of the four uniform cubic B-Spline 213 | basis functions at point s 214 | 215 | Parameters: 216 | ----------- 217 | 218 | `s` float point where to evaluate the b-spline basis 219 | 220 | Returns: 221 | -------- 222 | 223 | `ds` 4x1 vector with derivatives of the b-spline basis functions 224 | ''' 225 | ds = np.zeros(4) 226 | ds[0] = -0.5*s*s + s - 0.5 227 | ds[1] = 1.5*s*s - 2*s 228 | ds[2] = -1.5*s*s + s + 0.5 229 | ds[3] = 0.5*s*s 230 | return ds 231 | 232 | 233 | def adjust_boundary_weights(boundary, sWeights,tWeights): 234 | 235 | if int(boundary) & 1: 236 | tWeights[2] = tWeights[2] - tWeights[0] 237 | tWeights[1] = tWeights[1] + 2 * tWeights[0] 238 | tWeights[0] = 0 239 | 240 | 241 | if int(boundary) & 2: 242 | sWeights[1] = sWeights[1] - sWeights[3] 243 | sWeights[2] = sWeights[2] + 2 * sWeights[3] 244 | sWeights[3] = 0 245 | 246 | if int(boundary)& 4: 247 | tWeights[1] = tWeights[1] - tWeights[3] 248 | tWeights[2] = tWeights[2] + 2 * tWeights[3] 249 | tWeights[3] = 0 250 | 251 | 252 | if int(boundary) & 8: 253 | sWeights[2] = sWeights[2] - sWeights[0] 254 | sWeights[1] = sWeights[1] + 2 * sWeights[0] 255 | sWeights[0] = 0 256 | 257 | return sWeights, tWeights -------------------------------------------------------------------------------- /src/surface/BiVFitting/fitting_tools.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | from scipy import optimize 4 | from scipy.spatial import cKDTree 5 | from scipy.spatial import Delaunay 6 | from plotly import graph_objects as go 7 | from operator import mul 8 | 9 | # Auxiliary functions 10 | def fit_circle_2d(x, y, w=[]): 11 | """ This function fits a circle to a set of 2D points 12 | Input: 13 | [x,y]: 2D points coordinates 14 | w: weights for points (optional) 15 | Output: 16 | [xc,yc]: center of the fitted circle 17 | r: radius of the fitted circle 18 | """ 19 | 20 | x = np.array(x) 21 | y = np.array(y) 22 | A = np.array([x, y, np.ones(len(x))]).T 23 | b = x**2 + y**2 24 | 25 | # Modify A,b for weighted least squares 26 | if len(w) == len(x): 27 | W = np.diag(w) 28 | A = np.dot(W,A) 29 | b = np.dot(W,b) 30 | 31 | # Solve by method of least squares 32 | c = np.linalg.lstsq(A,b,rcond=None)[0] 33 | 34 | # Get circle parameters from solution c 35 | xc = c[0]/2 36 | yc = c[1]/2 37 | center = np.array([xc, yc]) 38 | r = np.sqrt(c[2] + xc**2 + yc**2) 39 | return center, r 40 | 41 | 42 | def fit_elipse_2d(points, tolerance=0.01): 43 | """ This function fits a elipse to a set of 2D points 44 | Input: 45 | [x,y]: 2D points coordinates 46 | w: weights for points (optional) 47 | Output: 48 | [xc,yc]: center of the fitted circle 49 | r: radius of the fitted circle 50 | """ 51 | 52 | (N, d) = np.shape(points) 53 | d = float(d) 54 | # Q will be our working array 55 | Q = np.vstack([np.copy(points.T), np.ones(N)]) 56 | QT = Q.T 57 | 58 | # initializations 59 | err = 1.0 + tolerance 60 | u = (1.0 / N) * np.ones(N) 61 | 62 | # Khachiyan Algorithm 63 | while err > tolerance: 64 | V = np.dot(Q, np.dot(np.diag(u), QT)) 65 | M = np.diag(np.dot(QT, np.dot(np.linalg.inv(V),Q))) # M the diagonal vector of an NxN matrix 66 | j = np.argmax(M) 67 | maximum = M[j] 68 | step_size = (maximum - d - 1.0) / ((d + 1.0) * (maximum - 1.0)) 69 | new_u = (1.0 - step_size) * u 70 | new_u[j] += step_size 71 | err = np.linalg.norm(new_u - u) 72 | u = new_u 73 | 74 | 75 | # center of the ellipse 76 | center = np.dot(points.T, u) 77 | # the A matrix for the ellipse 78 | A = np.linalg.inv( 79 | np.dot(points.T, np.dot(np.diag(u), points)) - 80 | np.array([[a * b for b in center] for a in center])) / d 81 | # Get the values we'd like to return 82 | U, s, rotation = np.linalg.svd(A) 83 | radii = 1.0 / np.sqrt(s) 84 | 85 | return (center, radii, rotation) 86 | 87 | 88 | def rodrigues_rot(P, n0, n1): 89 | """ This function rotates data based on a starting and ending vector. Rodrigues rotation is used 90 | to project 3D points onto a fitting plane and get their 2D X-Y coords in the coord system of the plane 91 | Input: 92 | P: 3D points 93 | n0: plane normal 94 | n1: normal of the new XY coordinates system 95 | Output: 96 | P_rot: rotated points 97 | 98 | """ 99 | # If P is only 1d np.array (coords of single point), fix it to be matrix 100 | if P.ndim == 1: 101 | P = P[np.newaxis,:] 102 | 103 | # Get vector of rotation k and angle theta 104 | n0 = n0/np.linalg.norm(n0) 105 | n1 = n1/np.linalg.norm(n1) 106 | k = np.cross(n0,n1) 107 | k = k/np.linalg.norm(k) 108 | theta = np.arccos(np.dot(n0,n1)) 109 | 110 | # Compute rotated points 111 | P_rot = np.zeros((len(P),3)) 112 | for i in range(len(P)): 113 | P_rot[i] = P[i]*np.cos(theta) + np.cross(k,P[i])*np.sin(theta) + k*np.dot(k,P[i])*(1-np.cos(theta)) 114 | 115 | return P_rot 116 | 117 | def Plot2DPoint(points, color_markers, size_markers,nameplot = " "): 118 | """ Plot 2D points 119 | Input: 120 | points: 2D points 121 | color_markers: color of the markers 122 | size_markers: size of the markers 123 | nameplot: plot name (default: " ") 124 | 125 | Output: 126 | trace: trace for figure 127 | """ 128 | trace = go.Scatter( 129 | x=points[:,0], 130 | y=points[:,1], 131 | name = nameplot, 132 | mode='markers', 133 | marker=dict(size=size_markers,opacity=1.0,color = color_markers) 134 | ) 135 | return [trace] 136 | 137 | def Plot3DPoint(points, color_markers, size_markers,nameplot = " "): 138 | """ Plot 3D points 139 | Input: 140 | points: 3D points 141 | color_markers: color of the markers 142 | size_markers: size of the markers 143 | nameplot: plot name (default: " ") 144 | 145 | Output: 146 | trace: trace for figure 147 | """ 148 | 149 | trace = go.Scatter3d( 150 | x=points[:,0], 151 | y=points[:,1], 152 | z=points[:,2], 153 | name = nameplot, 154 | mode='markers', 155 | marker=dict(size=size_markers,opacity=1.0,color = color_markers) 156 | ) 157 | return [trace] 158 | 159 | #@profile 160 | def LineIntersection(ImagePositionPatient,ImageOrientationPatient,P0,P1): 161 | """ Find the intersection between line P0-P1 with the MRI image. 162 | Input: 163 | P0 and P1 are both single vector of 3D coordinate points. 164 | Output: 165 | P is the intersection point (if any, see below) on the image plane. 166 | P in 3D coordinate. Use M.PatientToImage for converting it into 2D coordinate. 167 | 168 | P will return empty if M is empty. 169 | P will also return empty if P0-P1 line is parallel with the image plane M. 170 | Adpted from Avan Suinesiaputra 171 | """ 172 | 173 | ''' 174 | R = np.identity(4) 175 | 176 | R[0,0:3] = ImageOrientationPatient[0:3] 177 | R[1,0:3] = ImageOrientationPatient[3:6] 178 | R[2,0:3] = np.cross(R[0,0:3],R[1,0:3]) 179 | R[3,0:3] = ImagePositionPatient 180 | 181 | normal = R[2,0:3] 182 | 183 | u = P1-P0 184 | 185 | nu = np.dot(normal,u) 186 | if np.all(nu==0): # orthogonal vectors u belongs to the plane 187 | return P0 188 | 189 | # compute how from P0 to reach the plane 190 | s = (np.dot(normal.T , (R[3,0:3] - P0))) / nu 191 | 192 | # compute P 193 | P = P0 + s * u 194 | 195 | return P 196 | ''' 197 | #LDT (3/11/21): this is faster 198 | def cross(a, b): 199 | c = [a[1]*b[2] - a[2]*b[1], 200 | a[2]*b[0] - a[0]*b[2], 201 | a[0]*b[1] - a[1]*b[0]] 202 | 203 | return c 204 | 205 | normal = cross(ImageOrientationPatient[0:3],ImageOrientationPatient[3:6]) 206 | u = P1-P0 207 | nu = np.dot(normal, u) 208 | #nu = sum(map(mul, normal, u)) 209 | 210 | if nu == 0.0: # orthogonal vectors u belongs to the plane 211 | return P0 212 | 213 | s = (np.dot(np.array(normal).T , (ImagePositionPatient - P0))) / nu 214 | #s = sum(map(mul, np.array(normal).T, (ImagePositionPatient - P0)))/ nu 215 | 216 | P = P0 + s * u 217 | 218 | return P 219 | 220 | 221 | 222 | def generate_2Delipse_by_vectors(t, center, radii, rotation =None): 223 | """ This function generates points on elipse 224 | Input: 225 | t: point's angle on the circle 226 | v: small axis vector 227 | u: large axis vector 228 | r: radii, if scalar estimates an cirle 229 | C: center of the elipse 230 | Output: 231 | P_circle: points on ellipse/circle if r is scalar 232 | """ 233 | if np.isscalar(radii): 234 | radii = [radii,radii] 235 | if rotation is None: 236 | rotation = np.array([[1,0],[0,1]]) 237 | 238 | x = radii[0] * np.cos(t) 239 | y = radii[1] * np.sin(t) 240 | for i in range(len(x)): 241 | [x[i], y[i]] = np.dot([x[i], y[i]],rotation) + center 242 | return np.array([x, y]).T 243 | 244 | 245 | def apply_affine_to_points(affine_matrix, points_array): 246 | """ apply affine matrix to 3D points, only in-plane transformation is considered 247 | input: 248 | affine_matrix : 4x4 matrix describing the affine 249 | transformation 250 | points_array: nx3 array with points coordinates 251 | output: 252 | y_points_array: nx2 array with points coordinate in the new 253 | position 254 | """ 255 | points_array_4D = np.ones((len(points_array), 4)) 256 | points_array_4D[:, 0:3] = points_array 257 | t_points_array = np.dot(points_array_4D, affine_matrix.T) 258 | t_points_array = t_points_array[:, 0:3] / ( 259 | np.vstack((t_points_array[:, 3], t_points_array[:, 3], t_points_array[:, 3]))).T 260 | return t_points_array 261 | 262 | 263 | 264 | 265 | def register_group_points_translation_only(source_points, target_points, slice_number, 266 | weights = None, 267 | exclude_outliers = False, 268 | norm = 1): 269 | """ compute the optimal translation between two sets of grouped points 270 | each group for the source points will be projected into the corresponding 271 | group from target points 272 | input: 273 | source_points = array of nx2 arrays with points coordinates, moving 274 | points 275 | target_points = array of nx2 arrays with points coordinates, 276 | fixed points 277 | output: 2D translation vector 278 | """ 279 | # this checks that the number of countours used is the same 280 | if len(source_points) != len(target_points): 281 | return np.array([0,0]) 282 | 283 | def obj_function(x): 284 | f = 0 285 | nb = 0 286 | 287 | if norm not in [1,2]: 288 | ValueError('Register group points: only norm 1 and 2 are ' 289 | 'implemented') 290 | return 291 | 292 | for index,target in enumerate(target_points): 293 | 294 | # LDT: generate nearest neighbours tree 295 | tree = cKDTree(target) # provides an index into a set of k-dimensional points which can be used to rapidly look up the nearest neighbors of any point. 296 | new_points = source_points[index]+np.array(x) 297 | #Query the kd-tree for the nearest neighbor, using euclidean distance. 298 | d, indx = tree.query(new_points, k=1, p=2) 299 | # output d is an array of distances to the nearest neighbor 300 | #print('d ', d, ' idx', indx) 301 | if exclude_outliers: 302 | d[d>10] = 0 303 | 304 | nb = nb + len(d) 305 | 306 | 307 | #print('nb', nb) 308 | if weights is None: 309 | f = f+ sum(np.power(d,norm)) 310 | else: 311 | f = f + weights[index]*sum(np.power(d,norm)) 312 | 313 | return np.sqrt(f/nb) 314 | 315 | t = optimize.fmin(func=obj_function, x0=[0, 0], disp=False) 316 | 317 | #print('\nshift', t) 318 | #t = optimize.fmin_slsqp(func=obj_function, x0=[0, 0], disp=False) 319 | 320 | return t 321 | 322 | #@profile 323 | def sort_consecutive_points(C): 324 | " add by A.Mira on 01/2020" 325 | if isinstance(C, list): 326 | C = np.array(C) 327 | Cx = C[0, :] 328 | lastP = Cx 329 | C_index = [0] 330 | #index_list = np.array(range(1,C.shape[0])) 331 | index_list = range(1,C.shape[0]) #LDT 10/11 332 | 333 | Cr = np.delete(C, 0, 0) 334 | # iterate through points until all points are taken away 335 | while Cr.shape[0] > 0: 336 | # find the closest point from the last point at Cx 337 | i = (np.square(lastP - Cr)).sum(1).argmin() 338 | lastP = Cr[i] 339 | Cx = np.vstack([Cx, lastP]) 340 | C_index.append(index_list[i]) 341 | Cr = np.delete(Cr, i, 0) 342 | index_list = np.delete(index_list,i) 343 | 344 | return C_index,Cx 345 | 346 | 347 | 348 | 349 | def compute_area_weighted_centroid(points): 350 | 351 | # centroids were calculated using the area-weighted average 352 | # of the barycentre of the triangles from the triangulation 353 | # of the intersection points 354 | # Get triangulation 355 | T = Delaunay(points) 356 | n = len(T.simplices) 357 | W = np.zeros((n, 1)) 358 | C = 0 359 | 360 | for k in range(n): 361 | sp = points[T.simplices[k, :], :] 362 | a = np.linalg.norm(sp[1, :] - sp[0, :]) 363 | b = np.linalg.norm(sp[2, :] - sp[1, :]) 364 | c = np.linalg.norm(sp[2, :] - sp[0, :]) 365 | s = (a + b + c) / 2 366 | w = s * (s - a) * (s - b) * (s - c) 367 | if w < 0: 368 | W[k] = 0 369 | else: 370 | W[k] = np.sqrt(s * (s - a) * (s - b) * (s - c)) 371 | C = C + np.multiply(W[k], sp.mean(axis=0)) 372 | 373 | C = C / np.sum(W) 374 | 375 | return C 376 | -------------------------------------------------------------------------------- /src/surface/BiVFitting/surface_enum.py: -------------------------------------------------------------------------------- 1 | 2 | from enum import IntEnum,Enum 3 | 4 | class Surface(IntEnum): 5 | ''' 6 | 7 | ''' 8 | LV_ENDOCARDIAL = 0 #LV_ENDOCARDIAL, 9 | RV_SEPTUM = 1 # RV septum, 10 | RV_FREEWALL = 2 # RV free wall, 11 | EPICARDIAL = 3 #epicardium, 12 | MITRAL_VALVE = 4# mitral valve, 13 | AORTA_VALVE = 5 # aorta, 14 | TRICUSPID_VALVE = 6 #tricuspid, 15 | PULMONARY_VALVE = 7 # pulmonary valve, 16 | RV_INSERT = 8 #RV insert. 17 | APEX = 9 18 | 19 | 20 | class ContourType(Enum): 21 | ''' 22 | 23 | ''' 24 | # SA = small axia 25 | # LA = long axis 26 | SAX_RV_FREEWALL = "SAX_RV_FREEWALL" # RV free wall, 27 | LAX_RV_FREEWALL = "LAX_RV_FREEWALL" 28 | SAX_RV_SEPTUM = "SAX_RV_SEPTUM" 29 | LAX_RV_SEPTUM = "LAX_RV_SEPTUM" 30 | SAX_RV_OUTLET = 'SAX_RV_OUTLET' 31 | 32 | RV_INSERT = "RV_INSERT" 33 | 34 | LAX_RV_ENDOCARDIAL = "LAX_RV_ENDOCARDIAL" 35 | LAX_RV_EPICARDIAL = 'LAX_RV_EPICARDIAL' 36 | LAX_LV_ENDOCARDIAL = "LAX_LV_ENDOCARDIAL" 37 | LAX_LV_EPICARDIAL = "LAX_LV_EPICARDIAL" 38 | 39 | SAX_RV_ENDOCARDIAL = "SAX_RV_ENDOCARDIAL" 40 | SAX_RV_EPICARDIAL = "SAX_RV_EPICARDIAL" 41 | SAX_LV_ENDOCARDIAL = "SAX_LV_ENDOCARDIAL" 42 | SAX_LV_EPICARDIAL = "SAX_LV_EPICARDIAL" 43 | 44 | MITRAL_VALVE = "MITRAL_VALVE" 45 | MITRAL_PHANTOM = "MITRAL_PHANTOM" 46 | APEX_POINT = "APEX_POINT" 47 | TRICUSPID_VALVE = "TRICUSPID_VALVE" 48 | PULMONARY_VALVE = "PULMONARY_VALVE" 49 | PULMONARY_PHANTOM = "PULMONARY_PHANTOM" 50 | AORTA_VALVE = "AORTA_VALVE" 51 | AORTA_PHANTOM = "AORTA_PHANTOM" 52 | TRICUSPID_PHANTOM = "TRICUSPID_PHANTOM" 53 | LAX_RA = "LAX_RA" 54 | LAX_LA = "LAX_LA" 55 | LAX_RV_EXTENT = 'LAX_RV_EXTENT', 56 | LAX_LV_EXTENT = 'LAX_LV_EXTENT', 57 | 58 | SURFACE_CONTOUR_MAP ={ 59 | Surface.LV_ENDOCARDIAL.value : [ 60 | ContourType.SAX_LV_ENDOCARDIAL, ContourType.LAX_LV_ENDOCARDIAL], 61 | Surface.RV_SEPTUM.value: [ 62 | ContourType.LAX_RV_SEPTUM, ContourType.SAX_RV_SEPTUM], 63 | Surface.RV_FREEWALL.value: [ 64 | ContourType.SAX_RV_FREEWALL, ContourType.LAX_RV_FREEWALL, ContourType.SAX_RV_OUTLET], 65 | Surface.EPICARDIAL.value: [ 66 | ContourType.SAX_LV_EPICARDIAL, ContourType.LAX_LV_EPICARDIAL, 67 | ContourType.SAX_RV_EPICARDIAL, ContourType.LAX_RV_EPICARDIAL], 68 | Surface.MITRAL_VALVE.value: [ 69 | ContourType.MITRAL_VALVE, ContourType.MITRAL_PHANTOM], 70 | Surface.AORTA_VALVE.value: [ 71 | ContourType.AORTA_VALVE, ContourType.AORTA_PHANTOM], 72 | Surface.TRICUSPID_VALVE.value: [ 73 | ContourType.TRICUSPID_VALVE,ContourType.TRICUSPID_PHANTOM], 74 | Surface.PULMONARY_VALVE.value: [ 75 | ContourType.PULMONARY_VALVE, ContourType.PULMONARY_PHANTOM], 76 | Surface.RV_INSERT.value: [ 77 | ContourType.RV_INSERT], 78 | Surface.APEX.value: [ContourType.APEX_POINT] 79 | } 80 | -------------------------------------------------------------------------------- /src/surface/BiVFitting/visualization.py: -------------------------------------------------------------------------------- 1 | #import moviepy.editor as mpy 2 | import numpy as np 3 | import warnings 4 | 5 | import matplotlib 6 | import matplotlib.pyplot as plt 7 | cmap = plt.cm.get_cmap('gist_rainbow') 8 | from copy import deepcopy 9 | 10 | 11 | # Some systems have the mayavi2 module referenced by diffrent names. 12 | #from mayavi import mlab 13 | 14 | class Figure: 15 | 16 | def __init__(self, figure='Default', fgcolor=(1, 1, 1), 17 | bgcolor=(0, 0, 0), size=(400, 400)): 18 | 19 | 20 | self.figure = mlab.figure(figure,fgcolor = fgcolor, bgcolor=bgcolor, size=size) 21 | self.plots = {} 22 | #mlab.gcf().scene.renderer.set(use_depth_peeling=True) 23 | 24 | def clear(self, label=None): 25 | if label == None: 26 | labels = self.plots.keys() 27 | else: 28 | labels = [label] 29 | 30 | mlab.figure(self.figure.name) 31 | 32 | for label in labels: 33 | mlab_obj = self.plots.get(label) 34 | if mlab_obj != None: 35 | if mlab_obj.name == 'Surface': 36 | mlab_obj.parent.parent.parent.remove() 37 | else: 38 | mlab_obj.parent.parent.remove() 39 | self.plots.pop(label) 40 | 41 | def hide(self, label): 42 | if label in self.plots.keys(): 43 | self.plots[label].visible = False 44 | 45 | def show(self, label): 46 | if label in self.plots.keys(): 47 | self.plots[label].visible = True 48 | 49 | def plot_surfaces(self, label, verts, facets, scalars=None, vmax = None, 50 | vmin=None, color=None, rep='surface', opacity=1.0): 51 | 52 | 53 | if color == None: 54 | color = (1, 0, 0) 55 | 56 | self.figure.scene.disable_render = True 57 | mlab_obj = self.plots.get(label) 58 | if not (mlab_obj == None): 59 | self.clear(label) 60 | 61 | if scalars is None: 62 | self.plots[label] = mlab.triangular_mesh(verts[:, 0], verts[:, 1], verts[:, 2], 63 | facets, color=color, opacity = opacity, 64 | representation=rep) 65 | 66 | else: 67 | 68 | if (vmax is None): 69 | if (vmin is None): 70 | self.plots[label] = mlab.triangular_mesh(verts[:, 0], 71 | verts[:, 1], 72 | verts[:, 2], 73 | facets, 74 | scalars=scalars, 75 | opacity=opacity) 76 | else: 77 | self.plots[label] = mlab.triangular_mesh(verts[:, 0], 78 | verts[:, 1], 79 | verts[:, 2], 80 | facets, 81 | scalars=scalars, 82 | vmin = vmin, 83 | opacity=opacity) 84 | else: 85 | if vmin is None: 86 | self.plots[label] = mlab.triangular_mesh(verts[:, 0], 87 | verts[:, 1], 88 | verts[:, 2], 89 | facets, 90 | scalars=scalars, 91 | vmax = vmax, 92 | opacity=opacity) 93 | else: 94 | self.plots[label] = mlab.triangular_mesh(verts[:, 0], 95 | verts[:, 1], 96 | verts[:, 2], 97 | facets, 98 | scalars=scalars, 99 | vmin = vmin, 100 | vmax = vmax, 101 | opacity=opacity) 102 | self.plots['colorbar'] = mlab.colorbar(orientation='vertical', 103 | nb_labels=3) 104 | self.figure.scene.disable_render = False 105 | 106 | 107 | def plot_lines(self, label, verts, line, color=None, size=0, opacity=1.): 108 | 109 | if color == None: 110 | color = (1, 1, 1) 111 | if size == None: 112 | size = 1 113 | 114 | connections = np.array(line) 115 | 116 | self.figure.scene.disable_render = True 117 | mlab_obj = self.plots.get(label) 118 | if not (mlab_obj == None): 119 | self.clear(label) 120 | self.plots[label] = mlab.points3d(verts[:, 0], verts[:, 1], verts[:, 2], color=color, scale_factor=0, 121 | opacity=opacity) 122 | self.plots[label].mlab_source.dataset.lines = connections 123 | mlab.pipeline.surface(self.plots[label], color=color, opacity=opacity, 124 | representation='wireframe', 125 | line_width=size, 126 | name='Connections') 127 | 128 | self.figure.scene.disable_render = False 129 | 130 | def plot_points(self, label, X, color=None, size=None, mode=None, opacity=1, 131 | plot_text = False, text_to_plot = []): 132 | 133 | mlab.figure(self.figure.name) 134 | 135 | if color == None: 136 | color = (1, 0, 0) 137 | 138 | if size == None and mode == None or size == 0: 139 | size = 1 140 | mode = 'point' 141 | if size == None: 142 | size = 1 143 | if mode == None: 144 | mode = 'sphere' 145 | if len(text_to_plot)==0 and plot_text: 146 | text_to_plot = range(X.shape[0]) 147 | 148 | if isinstance(X, list): 149 | X = np.array(X) 150 | 151 | if len(X.shape) == 1: 152 | X = np.array([X]) 153 | 154 | mlab_obj = self.plots.get(label) 155 | if not mlab_obj == None: 156 | self.clear(label) 157 | if isinstance(color, tuple): 158 | self.plots[label] = mlab.points3d(X[:, 0], X[:, 1], X[:, 2], color=color, scale_factor=size, mode=mode, 159 | opacity=opacity) 160 | else: 161 | self.plots[label] = mlab.points3d(X[:, 0], X[:, 1], X[:, 2], color, scale_factor=size, 162 | scale_mode='none', 163 | mode=mode, opacity=opacity) 164 | if plot_text: 165 | if len(text_to_plot) == len(X): 166 | index_label = label+'_index' 167 | self.plot_text(index_label, X, 168 | [str(index) for index in text_to_plot], size = size) 169 | 170 | def plot_text(self, label, X, text, size=1, color=(1, 1, 1)): 171 | 172 | self.figure.scene.disable_render = True 173 | 174 | scale = (size, size, size) 175 | mlab_objs = self.plots.get(label) 176 | 177 | if mlab_objs != None: 178 | if len(mlab_objs) != len(text): 179 | for obj in mlab_objs: 180 | obj.remove() 181 | self.plots.pop(label) 182 | 183 | mlab_objs = self.plots.get(label) 184 | if not mlab_objs == None: 185 | self.clear(label) 186 | if mlab_objs == None: 187 | text_objs = [] 188 | for x, t in zip(X, text): 189 | if len(x) == 3: 190 | text_objs.append(mlab.text3d(x[0], x[1], x[2], str(t), scale=scale, color=color)) 191 | if len(x) == 2: 192 | text_objs.append( 193 | mlab.text(x[0], x[1], str(t), 194 | color=color, width = size)) 195 | self.plots[label] = text_objs 196 | elif len(mlab_objs) == len(text): 197 | for i, obj in enumerate(mlab_objs): 198 | obj.position = X[i, :] 199 | obj.text = str(text[i]) 200 | obj.scale = scale 201 | 202 | self.figure.scene.disable_render = False 203 | 204 | def plot_element_ids(self, label,mesh, elem_ids_to_plot = [], 205 | size=1, colour=(1, 1, 1)): 206 | if len(elem_ids_to_plot) == 0: 207 | elem_ids_to_plot = range(mesh.elements.shape[0]) 208 | mlab_obj = self.plots.get(label) 209 | if not mlab_obj == None: 210 | self.clear(label) 211 | 212 | for idx, element in enumerate(mesh.elements): 213 | if idx in elem_ids_to_plot: 214 | 215 | position = np.mean(mesh.nodes[element[-4:]], axis=0 ) 216 | self.plot_text('{0}{1}'.format(label, idx), 217 | [position], [idx], size=size, color=colour) 218 | 219 | def plot_dicoms(self, label, scan): 220 | # scan = biv_model._load_dicom_attributes(dicom_files) 221 | 222 | mlab.figure(self.figure.name) 223 | 224 | mlab_objs = self.plots.get(label) 225 | if mlab_objs == None: 226 | src = mlab.pipeline.scalar_field(scan.values) 227 | src.origin = scan.origin 228 | src.spacing = scan.spacing 229 | plane = mlab.pipeline.image_plane_widget(src, 230 | plane_orientation='z_axes', 231 | slice_index=int(0.5 * scan.num_slices), 232 | colormap='black-white') 233 | self.plots[label] = {} 234 | self.plots[label]['src'] = src 235 | self.plots[label]['plane'] = plane 236 | self.plots[label]['filepaths'] = scan.filepaths 237 | else: 238 | self.plots[label]['src'].origin = scan.origin 239 | self.plots[label]['src'].spacing = scan.spacing 240 | self.plots[label]['src'].scalar_data = scan.values 241 | self.plots[label]['plane'].update_pipeline() 242 | self.plots[label]['filepaths'] = scan.filepaths 243 | 244 | 245 | def plot_mesh(self, label, mesh, scalars =None, vmin = None, vmax = None, 246 | face_colour=(1, 0, 0), opacity=0.5, line_colour = (1, 1, 1), 247 | line_size=1., line_opacity=1., node_colour=(1,0,1), 248 | node_size=0, mode = 'surface'): 249 | 250 | 251 | lines = mesh.get_lines() 252 | verts = mesh.get_nodes() 253 | matlist = mesh.get_materials() 254 | if not( mode == 'wireframe'): 255 | if not( scalars is None) : 256 | elem_list = np.where(matlist == matlist[0])[0] 257 | faces = mesh.get_surface(elem_list) 258 | self.plot_surfaces('{0}_faces'.format(label), verts, 259 | faces, scalars=scalars, vmax=vmax, vmin=vmin, 260 | color=face_colour, opacity=opacity) 261 | elif len(np.unique(matlist)) == 1: 262 | elem_list = np.where(matlist == matlist[0])[0] 263 | faces = mesh.get_surface(elem_list) 264 | self.plot_surfaces('{0}_faces'.format(label), verts, 265 | faces, 266 | color=face_colour, opacity=opacity) 267 | 268 | 269 | else: 270 | if len(np.unique(matlist)) > 1: 271 | norm = matplotlib.colors.Normalize( 272 | vmin=np.min(mesh.materials), 273 | vmax=np.max(mesh.materials)) 274 | for indx, mat in enumerate(np.unique(matlist)): 275 | elem_list = np.where(matlist==mat)[0] 276 | faces = mesh.get_surface(elem_list) 277 | self.plot_surfaces('{0}_{1}_faces'.format(label,mat), verts, faces, 278 | color=cmap(norm(indx))[:3],opacity=opacity) 279 | 280 | 281 | self.plot_lines('{0}_lines'.format(label), verts, lines, color=line_colour, 282 | size=line_size, opacity=line_opacity) 283 | if node_size > 0 : 284 | self.plot_points('{0}_nodes'.format(label), verts, color=node_colour, size=node_size) 285 | 286 | def make_animation(self,output_file,mesh, 287 | node_position, duration, 288 | elem_groups = None, 289 | elem_groups_color = None, 290 | shade_elem = None, 291 | t_scalars = None, 292 | opacity=1, vmax=None, 293 | vmin= None, view =None, annotation = None, 294 | annotation_color = None): 295 | 296 | if (elem_groups_color is not None) and \ 297 | ( not (len(elem_groups) == len(elem_groups_color))): 298 | warnings.warn('elem_groups and elem_groups_color should have the ' 299 | 'same length') 300 | elem_groups_color = None 301 | int_node_position = node_position[0] 302 | ext_node_position = None 303 | if len(node_position) == 2: 304 | ext_node_position = node_position[1] 305 | nb_frames = len(int_node_position) 306 | fps = int(nb_frames/duration) 307 | 308 | if not (t_scalars is None): 309 | if vmax is None: 310 | vmax = np.max(t_scalars) 311 | if vmin is None: 312 | vmin = np.min(t_scalars) 313 | 314 | def make_frame(t): 315 | #mlab.clf() 316 | 317 | 318 | new_mesh = deepcopy(mesh) 319 | new_mesh.set_nodes(int_node_position[int(t*fps)]) 320 | new_mesh_ext = None 321 | if ext_node_position is not None: 322 | new_mesh_ext = deepcopy(mesh) 323 | new_mesh_ext.set_nodes(ext_node_position[int(t*fps)]) 324 | 325 | if t_scalars is None: 326 | if elem_groups is None: 327 | self.plot_mesh('mesh', new_mesh, opacity=opacity) 328 | else: 329 | cmap_mesh = matplotlib.cm.get_cmap('gist_rainbow') 330 | norm_mesh = matplotlib.colors.Normalize(vmin=0, 331 | vmax=len( 332 | elem_groups) - 1) 333 | plot_model = deepcopy(new_mesh) 334 | for index_group, group in enumerate(elem_groups): 335 | plot_model.set_elements(new_mesh.elements[group]) 336 | 337 | if elem_groups_color is None: 338 | group_color = cmap_mesh( norm_mesh(index_group))[:3] 339 | else: 340 | group_color = elem_groups_color[index_group] 341 | 342 | self.plot_mesh(str(index_group), plot_model, 343 | face_colour=group_color, 344 | opacity = opacity) 345 | if new_mesh_ext is not None: 346 | self.plot_mesh(str(index_group), plot_model, 347 | face_colour=group_color, 348 | opacity=opacity, line_opacity=0) 349 | new_mesh_ext.set_elements(new_mesh.elements[group]) 350 | self.plot_mesh(str(index_group)+'ext', 351 | new_mesh_ext, mode='wireframe' ) 352 | else: 353 | if shade_elem: 354 | shaded_mesh =deepcopy(new_mesh) 355 | shaded_mesh.set_elements(new_mesh.elements[shade_elem]) 356 | self.plot_mesh('mesh_shaded', shaded_mesh, mode ='wireframe', 357 | vmax=vmax, line_opacity=0.5) 358 | elements = [x for x in new_mesh.elements 359 | if x not in shaded_mesh.elements] 360 | new_mesh.set_elements(elements) 361 | self.plot_mesh('mesh', new_mesh, 362 | scalars=t_scalars[int(t * fps)], vmin=vmin, 363 | vmax=vmax, line_opacity=0.1, opacity = 1) 364 | else: 365 | 366 | 367 | self.plot_mesh('mesh',new_mesh,scalars= t_scalars[int(t*fps)],vmin = vmin, 368 | vmax= vmax, opacity=opacity) 369 | if new_mesh_ext is not None: 370 | new_mesh_ext.et_elements(new_mesh.elements) 371 | self.plot_mesh('mesh_ext', 372 | new_mesh_ext, mode='wireframe') 373 | 374 | if not (annotation is None): 375 | if annotation_color is not None: 376 | self.plot_text('annotation', [[0.6,0.9]], 377 | [str(annotation[int(t*fps)])], 378 | color=annotation_color, 379 | size = 0.3) 380 | else: 381 | self.plot_text('annotation', [[0.6, 0.9]], 382 | [str(annotation[ int( t * fps)])], 383 | size = 0.3) 384 | 385 | 386 | if not (view is None): 387 | if np.isscalar(view): 388 | mlab.view(view) 389 | elif len(view) == 2: 390 | mlab.view(view[0],view[1]) 391 | elif len(view) == 3: 392 | mlab.view(view[0], view[1], view[2]) 393 | elif len(view)==4: 394 | mlab.view(view[0], view[1], view[2], 395 | roll = view[3]) 396 | elif len(view) == 5: 397 | mlab.view(view[0], view[1], view[2],view[3], 398 | roll=view[4]) 399 | 400 | self.figure.scene._lift() 401 | return mlab.screenshot(antialiased=True) 402 | 403 | animation = mpy.VideoClip(make_frame, duration=duration) 404 | animation.write_gif(output_file, fps = fps) 405 | 406 | 407 | def close_all(self): 408 | mlab.close(all = True) 409 | # def plot_image_stack(biv_model,image_stack): 410 | 411 | def plot_local_cs(self, label, mesh, local_cs = None, axis='xyz', 412 | scale_factor= 3): 413 | ''' 414 | Plot coordinates system given the position and precomputed vectors 415 | for x,y, z axis 416 | `label`: labelof the plot as string 417 | 418 | `local_cs`: nx3x3 matrix with vector defining CS for at each point 419 | `axis`: axis to plot (x,y,z,xy,xz,yz,xyz) 420 | `scale_factor`: scale factor for CS plot 421 | :return: 422 | ''' 423 | if local_cs is None: 424 | local_cs = mesh.get_local_cs() 425 | mlab_obj = self.plots.get(label + 'y_axis') 426 | if not mlab_obj == None: 427 | self.clear(label + 'y_axis') 428 | mlab_obj = self.plots.get(label + 'z_axis') 429 | if not mlab_obj == None: 430 | self.clear(label + 'z_axis') 431 | self.figure.scene.disable_render = True 432 | 433 | position = mesh.nodes[mesh.elements].mean(axis = 1) 434 | 435 | if 'x' in axis: 436 | x = np.array(position[:, 0]) 437 | y = np.array(position[:, 1]) 438 | z = np.array(position[:, 2]) 439 | u = np.array(local_cs[:, 0][:, 0]) 440 | v = np.array(local_cs[:, 0][:, 1]) 441 | w = np.array(local_cs[:, 0][:, 2]) 442 | self.plots[label + 'x_axis'] = mlab.quiver3d(x, y, z, u, v, w, 443 | line_width=5, 444 | color=(1, 0, 0), 445 | scale_factor=scale_factor, 446 | mode='arrow', 447 | resolution=25) 448 | if 'y' in axis: 449 | x = np.array(position[:, 0]) 450 | y = np.array(position[:, 1]) 451 | z = np.array(position[:, 2]) 452 | u = np.array(local_cs[:, 1][:, 0]) 453 | v = np.array(local_cs[:, 1][:, 1]) 454 | w = np.array(local_cs[:, 1][:, 2]) 455 | self.plots[label + 'y_axis'] = mlab.quiver3d(x, y, z, u, v, w, 456 | line_width=5, 457 | color=(0, 1, 0), 458 | scale_factor=scale_factor, 459 | mode='arrow', 460 | resolution=25) 461 | if 'z' in axis: 462 | x = np.array(position[:, 0]) 463 | y = np.array(position[:, 1]) 464 | z = np.array(position[:, 2]) 465 | u = np.array(local_cs[:, 2][:, 0]) 466 | v = np.array(local_cs[:, 2][:, 1]) 467 | w = np.array(local_cs[:, 2][:, 2]) 468 | self.plots[label + 'z_axis'] = mlab.quiver3d(x, y, z, u, v, w, 469 | line_width=5, 470 | color=(0, 0, 1), 471 | scale_factor=scale_factor, 472 | mode='arrow', 473 | resolution=25) 474 | 475 | self.figure.scene.disable_render = False 476 | def plot_mesh_local_cs(self,label ,mesh, elem_list = None,axis= 'xyz' , 477 | scale_factor =3): 478 | 479 | ''' 480 | Plot local coordinate system for each elem in the mesh of for the 481 | elements in elem_list 482 | :param label: label of the plot as string 483 | :param mesh: mesh object 484 | :param elem_list: list of elements index to plot the local cs 485 | :param axis: axis to be ploted (x,y,z,xy,yz,xz,xyz) 486 | :param scale_factor: scale factor for cs plot 487 | :return: 488 | ''' 489 | local_cs = mesh.get_local_cs(elem_list) 490 | elem_centroid = mesh.nodes[mesh.elements].mean(axis=1) 491 | self.plot_local_cs(label, elem_centroid, local_cs, axis, scale_factor) 492 | 493 | 494 | 495 | 496 | -------------------------------------------------------------------------------- /src/surface/README.md: -------------------------------------------------------------------------------- 1 | 2 | Biventricular model fitting framework 3 | ----------------------------------------------- 4 | Author: Laura Dal Toso 5 | 6 | Date: 4 Feb 2022 7 | 8 | ----------------------------------------------- 9 | 10 | This code performs patient-specific biventricular mesh customization. 11 | 12 | The process takes place in 2 steps: 13 | 1. correction of breath-hold misregistration between short-axis slices, and 14 | 2. deformation of the subdivided template mesh to fit the manual contours while preserving 15 | the topology of the mesh. 16 | 17 | Documentation: https://github.kcl.ac.uk/pages/YoungLab/BiV_Modelling/ 18 | 19 | 20 | Contents: 21 | ----------------------------------------------- 22 | - BiVFitting: contains the code that performs patient-specific biventricular mesh customization. 23 | - model: contains .txt files required by the fitting modules 24 | - results: output folder 25 | - test_data: contains one subfolder for each patient. Each subfolder contains the GPFile and SliceInfoFile relative to one patient. 26 | - config_params: configuration of parameters for the fitting 27 | - perform_fit: script that contains the routine to perform the biventricular fitting 28 | - run_parallel: allows fitting using parallel CPUs. Each patient is assigned to one CPU. 29 | 30 | Usage: 31 | ----------------------------------------------- 32 | 33 | **Step 1** 34 | 35 | Download the repository, and install the packages listed in requirements.txt. 36 | 37 | **Step 2** 38 | 39 | At first, the user needs to set the parameters in config_parameters.py. The variable 'measure_shift_EDonly' can be set to True if the user wants to measure the shift at ED frame only, and apply it to the other frames in the time series to correct for breath-hold misalignment. If 'measure_shift_EDonly' is set to False, the shift will be measured and applied at each frame. The variable 'sampling' can be used to sample the input guide points (sampling = 2 means every other point will be used). The variable workers defines the number of CPUs to be used for the fitting. 40 | 41 | **Step 3** 42 | 43 | At this point, you can modify the script perform_fit.py, which performs the model fitting. 44 | 45 | - Check that the relative paths defined after __name__ == '__main__' are correct. 46 | - Check that filename and filenameInfo variables point to the corret guide points and slice information files that you want to process 47 | 48 | You may need to change other variables in case you want to: 49 | - Fit the model only to a subset of all the available frames. To do so, change the 'frames_to_fit' parameter by assigning the frame numbers that should be fitted (i.e frames_to_fit = [0,1,2]) 50 | - Output a plot of the fitted models in html. Single frame plots can be generated by uncommenting the lines starting with "plot(go.Figure..". Time series plots that gather all time frames are controlled by the variables TimeSeries_step1 and TimeSeries_step2. To generate time series plots, uncomment all the lines containing these variables. 51 | 52 | 53 | **Step 4** 54 | 55 | After changing the script perform_fit.py according to your needs, there are two options to perform the model fitting. The first option is to fit the list of inout patients sequentially, by running perform_fit.py. 56 | 57 | To speed up the fitting, you may want to process different cases in parallel CPUs. To perform the fitting in parallel, you can use run_parallel.py. The relative paths at the bottom of this script need to be changed first, then the script can be lauched to generate the fitted models. 58 | 59 | Credits 60 | ------------------------------------ 61 | Based on work by: Anna Mira, Liandong Lee, Richard Burns 62 | -------------------------------------------------------------------------------- /src/surface/config.ini.example: -------------------------------------------------------------------------------- 1 | [default] 2 | DataDir = /path/to/data/directory 3 | 4 | [myprofile] 5 | DataDir = /path/to/other/data/directory 6 | -------------------------------------------------------------------------------- /src/surface/config_params.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------- 2 | # This file was created by LDT on 4 Feb 2022 3 | # ----------------------------------------------- 4 | # This file contains the variables that are most often changed to test the fitting code 5 | # with different settings. 6 | # If deemed redundant (i.e. if code is always run with same settings) this script can be incorporated in run_parallel.py 7 | # or in perform_fitting.py 8 | # ----------------------------------------------- 9 | 10 | import multiprocessing 11 | 12 | enable_visualizations = False 13 | measure_shift_ed_only = True # do you want to measure ED shift only at ED? 14 | 15 | # Set the following weights: 16 | # RB set of weights: [10, 1e6, 500] 17 | # LL set of weights: [200, 500, 500] or [40,1e5,0.01] 18 | 19 | # the following lines contain the set pof weights used by the 20 | # MultiThreadSmoothingED() and SolveProblemCVXOPT() functions 21 | weight_gp = 100 # 100 #200 22 | low_smoothing_weight = 1e6 # 1e3 #1e4 23 | transmural_weight = 0.01 # 1e3 #0.01 24 | 25 | # set the sampling to be used by the GPDataset() module 26 | sampling = 1 # sampling = 1 means all guide points are used 27 | 28 | workers = 1 # = num of processes to be opened in parallel = number of CPUS that one wants to use 29 | 30 | if workers > multiprocessing.cpu_count(): 31 | print('The number of workers exceeds the number of available CPUs') 32 | raise ValueError -------------------------------------------------------------------------------- /src/surface/contour_qc_summary.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from dask import dataframe as dd 3 | import shutil 4 | import os 5 | import numpy as np 6 | import nibabel as nib 7 | from collections import Counter 8 | 9 | 10 | def main(): 11 | qc_file = os.path.join(os.sep, "netapp", "cme_digital_twins", "UKBB_88878", 12 | "contour2gp_QS_all.log") 13 | 14 | passed_subject_ids = [] 15 | num_total_subjects = 0 16 | num_subjects_w_all_files = 0 17 | fails = {"No 2Ch file": [], 18 | "No 3Ch file": [], 19 | "No 4Ch file": [], 20 | "No SAX file": [], 21 | "SAX has less than 5 slices": [], 22 | "SAX QC fail": [], 23 | "LAX 2Ch QC fail": [], 24 | "LAX 3Ch QC fail": [], 25 | "LAX 4Ch QC fail": []} 26 | 27 | with open(qc_file) as f: 28 | lines = f.readlines() 29 | 30 | for line in lines: 31 | if line.startswith("INFO: Processing subject"): 32 | qc_pass = True 33 | all_files_exist = True 34 | current_subject_id = line[25:43] 35 | 36 | if line.startswith("INFO: Completed processing subject"): 37 | num_total_subjects += 1 38 | 39 | if qc_pass: 40 | passed_subject_ids.append(current_subject_id) 41 | 42 | if all_files_exist: 43 | num_subjects_w_all_files += 1 44 | 45 | if "LAX 2 chamber file not found" in line: 46 | fails["No 2Ch file"].append(current_subject_id) 47 | qc_pass = False 48 | all_files_exist = False 49 | 50 | if "LAX 3 chamber file not found" in line: 51 | fails["No 3Ch file"].append(current_subject_id) 52 | qc_pass = False 53 | all_files_exist = False 54 | 55 | if "LAX 4 chamber file not found" in line: 56 | fails["No 4Ch file"].append(current_subject_id) 57 | qc_pass = False 58 | all_files_exist = False 59 | 60 | if "SAX file not found" in line: 61 | fails["No SAX file"].append(current_subject_id) 62 | qc_pass = False 63 | all_files_exist = False 64 | 65 | if "SAX has less than 5 slices" in line: 66 | fails["SAX has less than 5 slices"].append(current_subject_id) 67 | qc_pass = False 68 | 69 | if "SAX slice fail rate" in line: 70 | total = int(line[-3:]) 71 | fail = int(line[46:49]) 72 | success = total - fail 73 | 74 | if success < 5: 75 | fails["SAX QC fail"].append(current_subject_id) 76 | qc_pass = False 77 | 78 | if "LAX 2 Ch failed QC" in line: 79 | fails["LAX 2Ch QC fail"].append(current_subject_id) 80 | qc_pass = False 81 | 82 | # if "LAX 3 Ch failed QC" in line: 83 | # fails["LAX 3Ch QC fail"].append(current_subject_id) 84 | # qc_pass = False 85 | 86 | if "LAX 4 Ch failed QC" in line: 87 | fails["LAX 4Ch QC fail"].append(current_subject_id) 88 | qc_pass = False 89 | 90 | # if line.startswith("ERROR:"): 91 | # qc_pass = False 92 | 93 | for cause, fail_subjects in fails.items(): 94 | print(f"{cause} rate = {len(fail_subjects) / num_total_subjects * 100:.2f}%") 95 | 96 | print(f"All views exist for {num_subjects_w_all_files} out of {num_total_subjects} subjects.") 97 | 98 | print( 99 | f"Contour QC fail rate = {(num_subjects_w_all_files - len(passed_subject_ids)) / num_subjects_w_all_files * 100:.2f}%") 100 | 101 | print( 102 | f"Contour QC Passed = {len(passed_subject_ids)} out of {num_subjects_w_all_files} subjects") 103 | 104 | output_folder = os.path.join(os.sep, "netapp", "cme_digital_twins", "UKBB_88878", "qc_overview") 105 | 106 | with open(os.path.join(output_folder, "subjects_to_mesh.txt"), 'w') as fp: 107 | fp.write('\n'.join(passed_subject_ids)) 108 | 109 | 110 | if __name__ == "__main__": 111 | main() 112 | -------------------------------------------------------------------------------- /src/surface/mass_volume/Mass_volume.py: -------------------------------------------------------------------------------- 1 | ''' 2 | 15/09/2022 - Laura Dal Toso 3 | Based on A.M's scripts. 4 | Script for the measurement of LV and LV mass and volume from biventricular models. 5 | ''' 6 | 7 | import argparse 8 | import csv 9 | import numpy as np 10 | import os 11 | import re 12 | 13 | from pathlib import Path 14 | 15 | from mesh import Mesh 16 | 17 | 18 | def conv(filepath): 19 | outname = str(filepath.name).replace('model', 'Model').replace('txt', 'csv') 20 | outpath = filepath.parent / outname 21 | 22 | with open(filepath) as f: 23 | inlines = f.read().splitlines() 24 | 25 | outlines = [] 26 | for line in inlines[1:]: 27 | outlines.append(','.join(line.split()[:3]) + '\n') 28 | 29 | with open(outpath, 'w+') as f: 30 | f.write('x,y,z\n') 31 | f.writelines(outlines) 32 | 33 | return outpath 34 | 35 | def find_volume(case_name: str, model_file: os.PathLike, output_file: os.PathLike, biv_model_folder: os.PathLike, precision : int) -> None: 36 | ''' 37 | # Authors: ldt, cm 38 | # Date: 09/22, revised 08/24 by cm 39 | 40 | This function measures the mass and volume of LV and RV. 41 | #-------------------------------------------------------------- 42 | Inputs: case_name = model case name 43 | model_file = fitted model (.txt), containing only data relative to one frame 44 | output_file = path to the output csv file 45 | 46 | Output: dictionary and csv file containing masses and volumes 47 | ''' 48 | 49 | # get the frame number 50 | frame_name = int(re.search(r'timeframe(\d+)\.csv', str(model_file))[1]) 51 | 52 | # read GP file 53 | control_points = np.loadtxt(model_file, delimiter=',', skiprows=1, usecols=[0, 1, 2]).astype(float) 54 | 55 | # assign values to dict 56 | results_dict = {k: '' for k in ['lv_endo', 'rv_endo', 'lv_epi', 'rv_epi', 'lv_mass', 'rv_mass']} 57 | 58 | subdivision_matrix_file = biv_model_folder / "subdivision_matrix.txt" 59 | assert subdivision_matrix_file.exists(), \ 60 | f"biv_model_folder does not exist. Cannot find {subdivision_matrix_file} file!" 61 | 62 | elements_file = biv_model_folder / 'ETIndicesSorted.txt' 63 | assert elements_file.exists(), \ 64 | f"biv_model_folder does not exist. Cannot find {elements_file} file!" 65 | 66 | material_file = biv_model_folder / 'ETIndicesMaterials.txt' 67 | assert material_file.exists(), \ 68 | f"biv_model_folder does not exist. Cannot find {material_file} file!" 69 | 70 | thru_wall_file = biv_model_folder / 'epi_to_septum_ETindices.txt' 71 | assert thru_wall_file.exists(), \ 72 | f"biv_model_folder does not exist. Cannot find {thru_wall_file} file!" 73 | 74 | if control_points.shape[0] > 0: 75 | subdivision_matrix = (np.loadtxt(subdivision_matrix_file)).astype(float) 76 | faces = np.loadtxt(elements_file).astype(int)-1 77 | mat = np.loadtxt(material_file, dtype='str') 78 | 79 | # A.M. :there is a gap between septum surface and the epicardial 80 | # Which needs to be closed if the RV/LV epicardial volume is needed 81 | # this gap can be closed by using the et_thru_wall facets 82 | et_thru_wall = np.loadtxt(thru_wall_file, delimiter='\t').astype(int)-1 83 | 84 | ## convert labels to integer corresponding to the sorted list 85 | # of unique labels types 86 | unique_material = np.unique(mat[:,1]) 87 | 88 | materials = np.zeros(mat.shape) 89 | for index, m in enumerate(unique_material): 90 | face_index = mat[:, 1] == m 91 | materials[face_index, 0] = mat[face_index, 0].astype(int) 92 | materials[face_index, 1] = [index] * np.sum(face_index) 93 | 94 | # add material for the new facets 95 | new_elem_mat = [list(range(materials.shape[0], materials.shape[0] + et_thru_wall.shape[0])), 96 | [len(unique_material)] * len(et_thru_wall)] 97 | 98 | vertices = np.dot(subdivision_matrix, control_points) 99 | faces = np.concatenate((faces.astype(int), et_thru_wall)) 100 | materials = np.concatenate((materials.T, new_elem_mat), axis=1).T.astype(int) 101 | 102 | model = Mesh('mesh') 103 | model.set_nodes(vertices) 104 | model.set_elements(faces) 105 | model.set_materials(materials[:, 0], materials[:, 1]) 106 | 107 | # components list, used to get the correct mesh components: 108 | # ['0 AORTA_VALVE' '1 AORTA_VALVE_CUT' '2 LV_ENDOCARDIAL' '3 LV_EPICARDIAL' 109 | # ' 4 MITRAL_VALVE' '5 MITRAL_VALVE_CUT' '6 PULMONARY_VALVE' '7 PULMONARY_VALVE_CUT' 110 | # '8 RV_EPICARDIAL' '9 RV_FREEWALL' '10 RV_SEPTUM' '11 TRICUSPID_VALVE' 111 | # '12 TRICUSPID_VALVE_CUT', '13' THRU WALL] 112 | 113 | lv_endo = model.get_mesh_component([0, 2, 4], reindex_nodes=False) 114 | 115 | # Select RV endocardial 116 | rv_endo = model.get_mesh_component([6, 9, 10, 11], reindex_nodes=False) 117 | 118 | # switching the normal direction for the septum 119 | rv_endo.elements[rv_endo.materials == 10, :] = \ 120 | np.array([rv_endo.elements[rv_endo.materials == 10, 0], 121 | rv_endo.elements[rv_endo.materials == 10, 2], 122 | rv_endo.elements[rv_endo.materials == 10, 1]]).T 123 | 124 | lv_epi = model.get_mesh_component([0, 1, 3, 4, 5, 10, 13], reindex_nodes=False) 125 | # switching the normal direction for the thru wall 126 | lv_epi.elements[lv_epi.materials == 13, :] = \ 127 | np.array([lv_epi.elements[lv_epi.materials == 13, 0], 128 | lv_epi.elements[lv_epi.materials == 13, 2], 129 | lv_epi.elements[lv_epi.materials == 13, 1]]).T 130 | 131 | # switching the normal direction for the septum 132 | rv_epi = model.get_mesh_component([6, 7, 8, 10, 11, 12, 13], reindex_nodes=False) 133 | rv_epi.elements[rv_epi.materials == 10, :] = \ 134 | np.array([rv_epi.elements[rv_epi.materials == 10, 0], 135 | rv_epi.elements[rv_epi.materials == 10, 2], 136 | rv_epi.elements[rv_epi.materials == 10, 1]]).T 137 | 138 | lv_endo_vol = lv_endo.get_volume() 139 | rv_endo_vol = rv_endo.get_volume() 140 | lv_epi_vol = lv_epi.get_volume() 141 | rv_epi_vol = rv_epi.get_volume() 142 | 143 | rv_mass = (rv_epi_vol - rv_endo_vol) * 1.05 # mass in grams 144 | lv_mass = (lv_epi_vol - lv_endo_vol) * 1.05 145 | 146 | # assign values to dict 147 | results_dict['lv_vol'] = round(lv_endo_vol, precision) 148 | results_dict['rv_vol'] = round(rv_endo_vol, precision) 149 | results_dict['lv_epivol'] = round(lv_epi_vol, precision) 150 | results_dict['rv_epivol'] = round(rv_epi_vol, precision) 151 | results_dict['lv_mass'] = round(lv_mass, precision) 152 | results_dict['rv_mass'] = round(rv_mass, precision) 153 | 154 | # append to the output_file 155 | with open(output_file, 'a', newline='') as f: 156 | # print out measurements in spreadsheet 157 | writer = csv.writer(f) 158 | writer.writerow([case_name, frame_name, results_dict['lv_vol'], results_dict['lv_mass'], 159 | results_dict['rv_vol'], results_dict['rv_mass'], 160 | results_dict['lv_epivol'], results_dict['rv_epivol']]) 161 | 162 | def main(): 163 | parser = argparse.ArgumentParser() 164 | parser.add_argument('--job', '-j', action='store', default='default', help='job identifier') 165 | parser.add_argument('--data-dir', '-d', action='store', help='path to data directory') 166 | parser.add_argument('--input-dir', '-I', action='store', default='Mesh_Outputs', help='name of input directories') 167 | parser.add_argument('--model-dir', '-m', action='store', help='path to model directory') 168 | parser.add_argument('--instance', '-i', type=int, action='store', default=2, help='instance to be processed') 169 | parser.add_argument('--all', '-a', action='store_true', help='process all subjects') 170 | parser.add_argument('--subject', '-s', action='store', help='subject id to be processed') 171 | parser.add_argument('--start', '-b', action='store', type=int, help='index of first subject id to be processed') 172 | parser.add_argument('--number', '-n', action='store', type=int, help='number of subjects to processed from first subject id') 173 | parser.add_argument('--precision', '-p', action='store', type=int, default=2, help='output precision') 174 | args = parser.parse_args() 175 | 176 | output_file = f'mv-{args.job}.csv' 177 | fieldnames = ['subject', 'timeframe', 178 | 'lv_vol', 'lv_mass', 179 | 'rv_vol', 'rv_mass', 180 | 'lv_epivol', 'rv_epivol'] 181 | 182 | with open(output_file, 'w') as f: 183 | writer = csv.DictWriter(f, fieldnames=fieldnames) 184 | writer.writeheader() 185 | 186 | data_dir = args.data_dir 187 | model_dir = Path(args.model_dir) 188 | instance = args.instance 189 | 190 | sids = [n for n in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, n))] 191 | if args.all: 192 | subject_ids = sorted(sids) 193 | elif args.subject: 194 | sid = args.subject 195 | if sid in sids: 196 | subject_ids = [sid] 197 | else: 198 | subject_ids = [] 199 | elif args.start is not None and args.start >= 0 and args.start < len(sids): 200 | if args.number is not None and args.number > 0: 201 | end = args.start + args.number - 1 202 | subject_ids = sorted(sids)[args.start:end+1] 203 | else: 204 | subject_ids = sorted(sids)[args.start:] 205 | else: 206 | subject_ids = [] 207 | 208 | for subject_id in subject_ids: 209 | mesh_dir = Path(data_dir) / subject_id / f'Instance_{instance}' / args.input_dir 210 | 211 | model_filepaths = Path(mesh_dir).glob('*model*.txt') 212 | 213 | for model_filepath in model_filepaths: 214 | csv_filepath = conv(model_filepath) 215 | find_volume(subject_id, csv_filepath, output_file, model_dir, args.precision) 216 | 217 | if __name__ == '__main__': 218 | main() 219 | -------------------------------------------------------------------------------- /src/surface/mesh_txt_to_vtk.py: -------------------------------------------------------------------------------- 1 | # This script converts the text model output file from the mesh fitting code to vtk files 2 | 3 | # !/usr/bin/env python3 4 | import argparse 5 | import configparser 6 | import logging 7 | import os 8 | import numpy as np 9 | import time 10 | import pandas as pd 11 | from pathlib import Path 12 | import re 13 | import sys 14 | 15 | from .BiVFitting import BiventricularModel 16 | from .BiVFitting import Surface 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | def write_vtk_polydata(filename, vertices, **kwargs): 21 | """Writes mesh as .vtk file format for Paraview (Kitware (c)) visualisation software. 22 | ---------- 23 | Author: LDT, modified from code given by Debbie Zhao 24 | ---------- 25 | Input: 26 | ---------- 27 | filename: str 28 | path to output file with .vtk extension 29 | vertices: ndarray 30 | numpy Nx3 array of the coordinates of the mesh vertices 31 | (OPTIONAL) faces: ndarray 32 | numpy Nx3 (triangular) or Nx4 (quadrilateral) array of the faces 33 | """ 34 | faces = kwargs.get('faces', None) 35 | vertices = np.asarray(vertices) 36 | nv = vertices.shape[0] 37 | 38 | if faces is not None: 39 | nf = faces.shape[0] 40 | ns = faces.shape[1] 41 | 42 | with open(filename, 'w') as f: 43 | 44 | f.write('# vtk DataFile Version 4.2\n') 45 | f.write('Legacy vtk file generated by python\n') 46 | f.write('ASCII\n') 47 | f.write('DATASET POLYDATA\n') 48 | 49 | # write vertex data 50 | f.write('\nPOINTS %u float\n' % nv) 51 | for vertex in vertices: 52 | f.write('%f %f %f\n' % (vertex[0], vertex[1], vertex[2])) 53 | 54 | # write face (polygon) data 55 | f.write('\nPOLYGONS %u %u\n' % (nf, nf * (ns + 1))) 56 | if ns == 3: # triangle 57 | [f.write('3 %u %u %u\n' % (face[0], face[1], face[2])) for face in faces] 58 | elif ns == 4: # quadrangle 59 | [f.write('4 %u %u %u %u\n' % (face[0], face[1], face[2], face[3])) for face in 60 | faces] 61 | 62 | else: 63 | with open(filename, 'w') as f: 64 | 65 | f.write('# vtk DataFile Version 4.2\n') 66 | f.write('Legacy vtk file generated by python\n') 67 | f.write('ASCII\n') 68 | f.write('DATASET POLYDATA\n') 69 | 70 | # write vertex data 71 | f.write('\nPOINTS %u float\n' % nv) 72 | for vertex in vertices: 73 | f.write('%f %f %f\n' % (vertex[0], vertex[1], vertex[2])) 74 | 75 | 76 | def save_mesh_as_vtk(model_dir: str, mesh_dir: str, timeframe_num: int, force_overwrite: bool): 77 | """ 78 | The mesh files in .txt format in folder will be written as vtk files in the same 79 | folder. A different mesh is created for each label so that they can be visualized separately in 80 | paraview. 81 | 82 | Args: 83 | model_dir: Path to the folder where the heart atlas models are. 84 | mesh_dir: Path to the output folder of the mesh fitting code. This is where the meshes 85 | for an individual heart are. 86 | 87 | Returns: 88 | 89 | """ 90 | subject_id = mesh_dir.split(os.sep)[-3] 91 | 92 | mesh_files = sorted([filename for filename in os.listdir(mesh_dir) if 93 | subject_id + '_model_timeframe' in filename]) 94 | 95 | for file in mesh_files: 96 | # Extract time frame id 97 | time_frame_id = int((re.search(r'_timeframe(.*).txt', file)).group(1)) 98 | 99 | if timeframe_num is not None and timeframe_num != time_frame_id: 100 | logger.debug('subject {}, timeframe {}: skipping, timeframe not requested'.format(subject_id, time_frame_id)) 101 | continue 102 | 103 | pv_file = os.path.join(mesh_dir, 'Mesh_pulmonary_valve_timeframe' + format(time_frame_id, '03') + '.vtk') 104 | if os.path.exists(pv_file) and not force_overwrite: 105 | logger.debug('subject {}, timeframe {}: skipping, mesh files exist'.format(subject_id, time_frame_id)) 106 | continue 107 | 108 | # read the mesh from text file as a biventricular model 109 | shifting_model = BiventricularModel(model_dir, os.path.join(mesh_dir, file)) 110 | 111 | fitted_nodes = pd.read_csv( 112 | os.path.join(mesh_dir, file), sep=r'\s+', skiprows=0).values[:, :3] 113 | shifting_model.update_control_mesh(fitted_nodes) 114 | 115 | # Extract data for LV endo 116 | faces_lv_endo = shifting_model.get_surface_faces(Surface.LV_ENDOCARDIAL) 117 | x = np.array(shifting_model.et_pos[:, 0]).T 118 | y = np.array(shifting_model.et_pos[:, 1]).T 119 | z = np.array(shifting_model.et_pos[:, 2]).T 120 | data = np.vstack((x, y, z)).T 121 | write_vtk_polydata(os.path.join(mesh_dir, 'Mesh_LV_endo_timeframe' + format(time_frame_id, 122 | "03") + 123 | '.vtk'), 124 | data, faces=faces_lv_endo) 125 | 126 | # RV free wall 127 | faces_rv_fw = shifting_model.get_surface_faces(Surface.RV_FREEWALL) 128 | write_vtk_polydata( 129 | os.path.join(mesh_dir, 'Mesh_RV_FW_timeframe' + format(time_frame_id, "03") + '.vtk'), 130 | data, faces=faces_rv_fw) 131 | 132 | # RV septum 133 | faces_rv_septum = shifting_model.get_surface_faces(Surface.RV_SEPTUM) 134 | write_vtk_polydata(os.path.join(mesh_dir, 135 | 'Mesh_RV_septum_timeframe' + format(time_frame_id, 136 | "03") + '.vtk'), data, 137 | faces=faces_rv_septum) 138 | 139 | # epicardium 140 | faces_epi = shifting_model.get_surface_faces(Surface.EPICARDIAL) 141 | write_vtk_polydata( 142 | os.path.join(mesh_dir, 'Mesh_epi_timeframe' + format(time_frame_id, "03") + '.vtk'), 143 | data, faces=faces_epi) 144 | 145 | # mitral valve 146 | faces_mitral = shifting_model.get_surface_faces(Surface.MITRAL_VALVE) 147 | write_vtk_polydata(os.path.join(mesh_dir, 148 | 'Mesh_mitral_valve_timeframe' + format(time_frame_id, 149 | "03") + '.vtk'), 150 | data, faces=faces_mitral) 151 | 152 | # aorta valve 153 | faces_aorta = shifting_model.get_surface_faces(Surface.AORTA_VALVE) 154 | write_vtk_polydata(os.path.join(mesh_dir, 155 | 'Mesh_aorta_valve_timeframe' + format(time_frame_id, 156 | "03") + '.vtk'), 157 | data, faces=faces_aorta) 158 | 159 | # tricuspid valve 160 | faces_tricuspid = shifting_model.get_surface_faces(Surface.TRICUSPID_VALVE) 161 | write_vtk_polydata(os.path.join(mesh_dir, 162 | 'Mesh_tricuspid_valve_timeframe' + format(time_frame_id, 163 | "03") + '.vtk'), 164 | data, faces=faces_tricuspid) 165 | 166 | # pulmonary valve 167 | faces_pulmonary = shifting_model.get_surface_faces(Surface.PULMONARY_VALVE) 168 | write_vtk_polydata(os.path.join(mesh_dir, 169 | 'Mesh_pulmonary_valve_timeframe' + format(time_frame_id, 170 | "03") + '.vtk'), 171 | data, faces=faces_pulmonary) 172 | 173 | 174 | def main(): 175 | parser = argparse.ArgumentParser() 176 | 177 | parser.add_argument('--profile', '-p', action='store', default='default', help='config profile to be used') 178 | parser.add_argument('--job', '-j', action='store', default='default', help='job identifier') 179 | parser.add_argument('--force', '-f', action='store_true', default=False, help='force overwrite') 180 | 181 | parser.add_argument('--data-dir', '-d', action='store', help='path to data directory') 182 | parser.add_argument('--output-dir', '-o', action='store', default='Mesh_Outputs', help='name of output directories') 183 | 184 | parser.add_argument('--instance', '-i', type=int, action='store', default=2, help='instance to be processed') 185 | 186 | parser.add_argument('--all', '-a', action='store_true', help='process all subjects') 187 | parser.add_argument('--subject', '-s', action='store', help='subject id to be processed') 188 | parser.add_argument('--start', '-S', action='store', type=int, help='index of first subject id to be processed') 189 | parser.add_argument('--number', '-n', action='store', type=int, help='number of subjects to be processed from first subject id') 190 | parser.add_argument('--allowlist', '-l', action='store', help='path to subject allowlist') 191 | 192 | parser.add_argument('--timeframe', '-t', type=int, action='store', help='timeframe to be processed') 193 | 194 | args, _ = parser.parse_known_args() 195 | 196 | cfg = configparser.ConfigParser() 197 | cfg.read('config.ini') 198 | 199 | data_dir = args.data_dir if args.data_dir else cfg[args.profile]['DataDir'] 200 | 201 | start_time = time.time() 202 | 203 | # You should have the subject ID as a folder name for each subject on the data_dir and no 204 | # other files or folders on data_dir 205 | if args.allowlist and os.path.exists(args.allowlist): 206 | with open(args.allowlist) as f: 207 | sids = [n for n in f.read().splitlines() if os.path.isdir(os.path.join(data_dir, n))] 208 | else: 209 | sids = [n for n in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, n))] 210 | 211 | if args.all: 212 | subject_ids = sorted(sids) 213 | elif args.subject: 214 | sid = args.subject 215 | if sid in sids: 216 | subject_ids = [sid] 217 | else: 218 | subject_ids = [] 219 | elif args.start is not None and args.start >= 0 and args.start < len(sids): 220 | if args.number is not None and args.number > 0: 221 | end = args.start + args.number - 1 222 | subject_ids = sorted(sids)[args.start:end+1] 223 | else: 224 | subject_ids = sorted(sids)[args.start:] 225 | else: 226 | subject_ids = [] 227 | 228 | log_filename = os.path.join(data_dir, f'surface-{args.job}.log') 229 | formatter = logging.Formatter(fmt='%(asctime)s | %(name)s | %(levelname)s | %(message)s') 230 | handler = logging.FileHandler(log_filename) 231 | handler.setFormatter(formatter) 232 | logger.addHandler(handler) 233 | 234 | for subject_id in subject_ids: 235 | logger.debug(f"Processing subject {subject_id}...") 236 | 237 | i_dir = os.path.join(data_dir, subject_id, f'Instance_{args.instance}') 238 | 239 | if not os.path.exists(i_dir): 240 | logger.debug(f'Instance_{args.instance} directory does not exist for {subject_id}') 241 | continue 242 | 243 | input_dir = os.path.join(i_dir, args.output_dir) 244 | 245 | save_mesh_as_vtk('./model', input_dir, args.timeframe, args.force) 246 | 247 | logger.debug("VTK_mesh done!") 248 | logger.debug('TOTAL TIME: {}'.format(time.time() - start_time)) 249 | 250 | 251 | if __name__ == '__main__': 252 | main() 253 | -------------------------------------------------------------------------------- /src/surface/model/ETIndicesThruWall.txt: -------------------------------------------------------------------------------- 1 | 1 7 2 2 | 1 6 7 3 | 2 8 3 4 | 2 7 8 5 | 3 9 4 6 | 3 8 9 7 | 4 10 5 8 | 4 9 10 9 | 6 12 7 10 | 6 11 12 11 | 7 13 8 12 | 7 12 13 13 | 8 14 9 14 | 8 13 14 15 | 9 15 10 16 | 9 14 15 17 | 11 17 12 18 | 11 16 17 19 | 12 18 13 20 | 12 17 18 21 | 13 19 14 22 | 13 18 19 23 | 14 20 15 24 | 14 19 20 25 | 21 27 22 26 | 21 26 27 27 | 22 28 23 28 | 22 27 28 29 | 23 29 24 30 | 23 28 29 31 | 24 30 25 32 | 24 29 30 33 | 26 32 27 34 | 26 31 32 35 | 27 33 28 36 | 27 32 33 37 | 28 34 29 38 | 28 33 34 39 | 29 35 30 40 | 29 34 35 41 | 31 37 32 42 | 31 36 37 43 | 32 38 33 44 | 32 37 38 45 | 33 39 34 46 | 33 38 39 47 | 34 40 35 48 | 34 39 40 49 | 41 47 42 50 | 41 46 47 51 | 42 48 43 52 | 42 47 48 53 | 43 49 44 54 | 43 48 49 55 | 44 50 45 56 | 44 49 50 57 | 46 52 47 58 | 46 51 52 59 | 47 53 48 60 | 47 52 53 61 | 48 54 49 62 | 48 53 54 63 | 49 55 50 64 | 49 54 55 65 | 51 57 52 66 | 51 56 57 67 | 52 58 53 68 | 52 57 58 69 | 53 59 54 70 | 53 58 59 71 | 54 60 55 72 | 54 59 60 73 | 61 67 62 74 | 61 66 67 75 | 62 68 63 76 | 62 67 68 77 | 63 69 64 78 | 63 68 69 79 | 64 70 65 80 | 64 69 70 81 | 66 72 67 82 | 66 71 72 83 | 67 73 68 84 | 67 72 73 85 | 68 74 69 86 | 68 73 74 87 | 69 75 70 88 | 69 74 75 89 | 71 77 72 90 | 71 76 77 91 | 72 78 73 92 | 72 77 78 93 | 73 79 74 94 | 73 78 79 95 | 74 80 75 96 | 74 79 80 97 | 81 87 82 98 | 81 86 87 99 | 82 88 83 100 | 82 87 88 101 | 83 89 84 102 | 83 88 89 103 | 84 90 85 104 | 84 89 90 105 | 86 92 87 106 | 86 91 92 107 | 87 93 88 108 | 87 92 93 109 | 88 94 89 110 | 88 93 94 111 | 89 95 90 112 | 89 94 95 113 | 91 97 92 114 | 91 96 97 115 | 92 98 93 116 | 92 97 98 117 | 93 99 94 118 | 93 98 99 119 | 94 100 95 120 | 94 99 100 121 | 101 107 102 122 | 101 106 107 123 | 102 108 103 124 | 102 107 108 125 | 103 109 104 126 | 103 108 109 127 | 104 110 105 128 | 104 109 110 129 | 106 112 107 130 | 106 111 112 131 | 107 113 108 132 | 107 112 113 133 | 108 114 109 134 | 108 113 114 135 | 109 115 110 136 | 109 114 115 137 | 111 117 112 138 | 111 116 117 139 | 112 118 113 140 | 112 117 118 141 | 113 119 114 142 | 113 118 119 143 | 114 120 115 144 | 114 119 120 145 | 121 127 122 146 | 121 126 127 147 | 122 128 123 148 | 122 127 128 149 | 123 129 124 150 | 123 128 129 151 | 124 130 125 152 | 124 129 130 153 | 126 132 127 154 | 126 131 132 155 | 127 133 128 156 | 127 132 133 157 | 128 134 129 158 | 128 133 134 159 | 129 135 130 160 | 129 134 135 161 | 131 137 132 162 | 131 136 137 163 | 132 138 133 164 | 132 137 138 165 | 133 139 134 166 | 133 138 139 167 | 134 140 135 168 | 134 139 140 169 | 141 147 142 170 | 141 146 147 171 | 142 148 143 172 | 142 147 148 173 | 143 149 144 174 | 143 148 149 175 | 144 150 145 176 | 144 149 150 177 | 146 152 147 178 | 146 151 152 179 | 147 153 148 180 | 147 152 153 181 | 148 154 149 182 | 148 153 154 183 | 149 155 150 184 | 149 154 155 185 | 151 157 152 186 | 151 156 157 187 | 152 158 153 188 | 152 157 158 189 | 153 159 154 190 | 153 158 159 191 | 154 160 155 192 | 154 159 160 193 | 161 167 162 194 | 161 166 167 195 | 162 168 163 196 | 162 167 168 197 | 163 169 164 198 | 163 168 169 199 | 164 170 165 200 | 164 169 170 201 | 166 172 167 202 | 166 171 172 203 | 167 173 168 204 | 167 172 173 205 | 168 174 169 206 | 168 173 174 207 | 169 175 170 208 | 169 174 175 209 | 171 177 172 210 | 171 176 177 211 | 172 178 173 212 | 172 177 178 213 | 173 179 174 214 | 173 178 179 215 | 174 180 175 216 | 174 179 180 217 | 181 187 182 218 | 181 186 187 219 | 182 188 183 220 | 182 187 188 221 | 183 189 184 222 | 183 188 189 223 | 184 190 185 224 | 184 189 190 225 | 186 192 187 226 | 186 191 192 227 | 187 193 188 228 | 187 192 193 229 | 188 194 189 230 | 188 193 194 231 | 189 195 190 232 | 189 194 195 233 | 191 197 192 234 | 191 196 197 235 | 192 198 193 236 | 192 197 198 237 | 193 199 194 238 | 193 198 199 239 | 194 200 195 240 | 194 199 200 241 | 201 202 207 242 | 201 207 206 243 | 202 203 208 244 | 202 208 207 245 | 203 204 209 246 | 203 209 208 247 | 204 205 210 248 | 204 210 209 249 | 206 207 212 250 | 206 212 211 251 | 207 208 213 252 | 207 213 212 253 | 208 209 214 254 | 208 214 213 255 | 209 210 215 256 | 209 215 214 257 | 211 212 217 258 | 211 217 216 259 | 212 213 218 260 | 212 218 217 261 | 213 214 219 262 | 213 219 218 263 | 214 215 220 264 | 214 220 219 265 | 221 222 227 266 | 221 227 226 267 | 222 223 228 268 | 222 228 227 269 | 223 224 229 270 | 223 229 228 271 | 224 225 230 272 | 224 230 229 273 | 226 227 232 274 | 226 232 231 275 | 227 228 233 276 | 227 233 232 277 | 228 229 234 278 | 228 234 233 279 | 229 230 235 280 | 229 235 234 281 | 231 232 237 282 | 231 237 236 283 | 232 233 238 284 | 232 238 237 285 | 233 234 239 286 | 233 239 238 287 | 234 235 240 288 | 234 240 239 289 | 241 242 247 290 | 241 247 246 291 | 242 243 248 292 | 242 248 247 293 | 243 244 249 294 | 243 249 248 295 | 244 245 250 296 | 244 250 249 297 | 246 247 252 298 | 246 252 251 299 | 247 248 253 300 | 247 253 252 301 | 248 249 254 302 | 248 254 253 303 | 249 250 255 304 | 249 255 254 305 | 251 252 257 306 | 251 257 256 307 | 252 253 258 308 | 252 258 257 309 | 253 254 259 310 | 253 259 258 311 | 254 255 260 312 | 254 260 259 313 | 261 262 267 314 | 261 267 266 315 | 262 263 268 316 | 262 268 267 317 | 263 264 269 318 | 263 269 268 319 | 264 265 270 320 | 264 270 269 321 | 266 267 272 322 | 266 272 271 323 | 267 268 273 324 | 267 273 272 325 | 268 269 274 326 | 268 274 273 327 | 269 270 275 328 | 269 275 274 329 | 271 272 277 330 | 271 277 276 331 | 272 273 278 332 | 272 278 277 333 | 273 274 279 334 | 273 279 278 335 | 274 275 280 336 | 274 280 279 337 | 281 282 287 338 | 281 287 286 339 | 282 283 288 340 | 282 288 287 341 | 283 284 289 342 | 283 289 288 343 | 284 285 290 344 | 284 290 289 345 | 286 287 292 346 | 286 292 291 347 | 287 288 293 348 | 287 293 292 349 | 288 289 294 350 | 288 294 293 351 | 289 290 295 352 | 289 295 294 353 | 291 292 297 354 | 291 297 296 355 | 292 293 298 356 | 292 298 297 357 | 293 294 299 358 | 293 299 298 359 | 294 295 300 360 | 294 300 299 361 | 301 302 307 362 | 301 307 306 363 | 302 303 308 364 | 302 308 307 365 | 303 304 309 366 | 303 309 308 367 | 304 305 310 368 | 304 310 309 369 | 306 307 312 370 | 306 312 311 371 | 307 308 313 372 | 307 313 312 373 | 308 309 314 374 | 308 314 313 375 | 309 310 315 376 | 309 315 314 377 | 311 312 317 378 | 311 317 316 379 | 312 313 318 380 | 312 318 317 381 | 313 314 319 382 | 313 319 318 383 | 314 315 320 384 | 314 320 319 385 | 321 322 327 386 | 321 327 326 387 | 322 323 328 388 | 322 328 327 389 | 323 324 329 390 | 323 329 328 391 | 324 325 330 392 | 324 330 329 393 | 326 327 332 394 | 326 332 331 395 | 327 328 333 396 | 327 333 332 397 | 328 329 334 398 | 328 334 333 399 | 329 330 335 400 | 329 335 334 401 | 331 332 337 402 | 331 337 336 403 | 332 333 338 404 | 332 338 337 405 | 333 334 339 406 | 333 339 338 407 | 334 335 340 408 | 334 340 339 409 | 341 342 347 410 | 341 347 346 411 | 342 343 348 412 | 342 348 347 413 | 343 344 349 414 | 343 349 348 415 | 344 345 350 416 | 344 350 349 417 | 346 347 352 418 | 346 352 351 419 | 347 348 353 420 | 347 353 352 421 | 348 349 354 422 | 348 354 353 423 | 349 350 355 424 | 349 355 354 425 | 351 352 357 426 | 351 357 356 427 | 352 353 358 428 | 352 358 357 429 | 353 354 359 430 | 353 359 358 431 | 354 355 360 432 | 354 360 359 433 | 361 362 367 434 | 361 367 366 435 | 362 363 368 436 | 362 368 367 437 | 363 364 369 438 | 363 369 368 439 | 364 365 370 440 | 364 370 369 441 | 366 367 372 442 | 366 372 371 443 | 367 368 373 444 | 367 373 372 445 | 368 369 374 446 | 368 374 373 447 | 369 370 375 448 | 369 375 374 449 | 371 372 377 450 | 371 377 376 451 | 372 373 378 452 | 372 378 377 453 | 373 374 379 454 | 373 379 378 455 | 374 375 380 456 | 374 380 379 457 | 381 382 387 458 | 381 387 386 459 | 382 383 388 460 | 382 388 387 461 | 383 384 389 462 | 383 389 388 463 | 384 385 390 464 | 384 390 389 465 | 386 387 392 466 | 386 392 391 467 | 387 388 393 468 | 387 393 392 469 | 388 389 394 470 | 388 394 393 471 | 389 390 395 472 | 389 395 394 473 | 391 392 397 474 | 391 397 396 475 | 392 393 398 476 | 392 398 397 477 | 393 394 399 478 | 393 399 398 479 | 394 395 400 480 | 394 400 399 481 | 379 380 344 482 | 380 345 344 483 | 378 379 343 484 | 379 344 343 485 | 377 378 342 486 | 378 343 342 487 | 376 377 341 488 | 377 342 341 489 | 384 4 385 490 | 385 4 5 491 | 383 3 384 492 | 384 3 4 493 | 382 2 383 494 | 383 2 3 495 | 382 1 2 496 | 1 382 381 497 | 24 25 19 498 | 25 20 19 499 | 23 24 18 500 | 24 19 18 501 | 22 23 17 502 | 23 18 17 503 | 21 22 16 504 | 22 17 16 505 | 399 400 364 506 | 400 365 364 507 | 398 399 363 508 | 399 364 363 509 | 397 398 363 510 | 397 363 362 511 | 396 397 362 512 | 396 362 361 513 | 44 45 39 514 | 45 40 39 515 | 43 44 39 516 | 43 39 38 517 | 42 43 38 518 | 42 38 37 519 | 41 42 37 520 | 41 37 36 521 | 64 65 60 522 | 64 60 59 523 | 63 64 59 524 | 63 59 58 525 | 62 63 58 526 | 62 58 57 527 | 61 62 57 528 | 61 57 56 529 | 76 81 77 530 | 81 82 77 531 | 77 82 78 532 | 82 83 78 533 | 78 83 79 534 | 83 84 79 535 | 79 84 80 536 | 84 85 80 537 | 96 101 97 538 | 101 102 97 539 | 97 102 98 540 | 102 103 98 541 | 98 103 99 542 | 103 104 99 543 | 99 104 100 544 | 104 105 100 545 | 339 340 304 546 | 340 305 304 547 | 338 339 303 548 | 339 304 303 549 | 337 338 302 550 | 338 303 302 551 | 336 337 301 552 | 337 302 301 553 | 359 360 324 554 | 360 325 324 555 | 358 359 323 556 | 359 324 323 557 | 357 358 322 558 | 358 323 322 559 | 356 357 321 560 | 357 322 321 561 | 319 320 284 562 | 320 285 284 563 | 318 319 283 564 | 319 284 283 565 | 317 318 282 566 | 318 283 282 567 | 316 317 281 568 | 317 282 281 569 | 299 300 265 570 | 299 265 264 571 | 298 299 264 572 | 298 264 263 573 | 297 298 263 574 | 297 263 262 575 | 296 297 262 576 | 296 262 261 577 | 279 280 245 578 | 279 245 244 579 | 278 279 244 580 | 278 244 243 581 | 277 278 243 582 | 277 243 242 583 | 276 277 242 584 | 276 242 241 585 | 259 260 225 586 | 259 225 224 587 | 258 259 224 588 | 258 224 223 589 | 257 258 223 590 | 257 223 222 591 | 256 257 222 592 | 256 222 221 593 | 239 240 205 594 | 239 205 204 595 | 238 239 204 596 | 238 204 203 597 | 237 238 203 598 | 237 203 202 599 | 236 237 202 600 | 236 202 201 601 | 124 125 120 602 | 124 120 119 603 | 123 124 119 604 | 123 119 118 605 | 122 123 118 606 | 122 118 117 607 | 121 122 117 608 | 121 117 116 609 | 219 220 199 610 | 220 200 199 611 | 218 219 198 612 | 219 199 198 613 | 217 218 197 614 | 218 198 197 615 | 216 217 196 616 | 217 197 196 617 | 184 185 179 618 | 185 180 179 619 | 183 184 178 620 | 184 179 178 621 | 182 183 177 622 | 183 178 177 623 | 181 182 176 624 | 182 177 176 625 | 164 165 159 626 | 165 160 159 627 | 163 164 158 628 | 164 159 158 629 | 162 163 157 630 | 163 158 157 631 | 161 162 156 632 | 162 157 156 633 | 144 145 140 634 | 144 140 139 635 | 143 144 139 636 | 143 139 138 637 | 142 143 138 638 | 142 138 137 639 | 141 142 137 640 | 141 137 136 641 | -------------------------------------------------------------------------------- /src/surface/model/control_mesh_connectivity.txt: -------------------------------------------------------------------------------- 1 | 45 71 47 73 146 171 113 169 2 | 47 73 65 74 113 169 168 172 3 | 74 72 65 46 172 170 168 142 4 | 75 77 30 32 173 174 149 150 5 | 77 76 32 31 174 175 150 162 6 | 79 49 76 31 176 165 175 162 7 | 78 48 79 49 177 166 176 165 8 | 72 75 46 30 170 173 142 149 9 | 80 79 77 76 178 176 174 175 10 | 81 78 80 79 179 177 178 176 11 | 82 80 75 77 180 178 173 174 12 | 83 81 82 80 181 179 180 178 13 | 73 84 74 72 169 182 172 170 14 | 71 85 73 84 171 183 169 182 15 | 84 82 72 75 182 180 170 173 16 | 85 83 84 82 183 181 182 180 17 | 11 18 86 88 155 156 186 187 18 | 54 89 15 87 164 188 147 184 19 | 15 87 43 91 147 184 144 189 20 | 18 58 88 90 156 151 187 185 21 | 54 11 89 86 164 155 188 186 22 | 90 58 92 61 185 151 190 160 23 | 86 88 94 93 186 187 192 193 24 | 89 86 95 94 188 186 191 192 25 | 87 89 91 95 184 188 189 191 26 | 88 90 93 92 187 185 193 190 27 | 43 91 16 96 144 189 143 194 28 | 92 61 99 59 190 160 197 153 29 | 91 95 96 100 189 191 194 198 30 | 93 92 98 99 193 190 196 197 31 | 94 93 97 98 192 193 195 196 32 | 95 94 100 97 191 192 198 195 33 | 16 96 37 105 143 194 145 203 34 | 37 105 45 71 145 203 146 171 35 | 101 60 78 48 199 158 177 166 36 | 99 59 101 60 197 153 199 158 37 | 105 104 71 85 203 202 171 183 38 | 102 101 81 78 200 199 179 177 39 | 103 102 83 81 201 200 181 179 40 | 104 103 85 83 202 201 183 181 41 | 96 100 105 104 194 198 203 202 42 | 97 98 103 102 195 196 201 200 43 | 100 97 104 103 198 195 202 201 44 | 98 99 102 101 196 197 200 199 45 | 42 44 14 20 241 243 232 234 46 | 13 19 42 44 250 252 241 243 47 | 45 47 3 2 343 331 337 329 48 | 46 23 22 6 315 297 295 293 49 | 17 67 4 50 342 346 338 344 50 | 38 66 17 67 237 247 342 346 51 | 20 9 38 66 234 230 237 247 52 | 44 40 20 9 243 239 234 230 53 | 19 1 44 40 252 248 243 239 54 | 7 8 53 5 340 341 345 339 55 | 56 36 7 8 245 235 340 341 56 | 52 14 56 36 244 232 245 235 57 | 57 42 52 14 246 241 244 232 58 | 51 13 57 42 253 250 246 241 59 | 59 12 60 39 260 231 261 238 60 | 60 39 48 34 261 238 335 334 61 | 58 10 61 41 259 249 262 240 62 | 61 41 59 12 262 240 260 231 63 | 23 24 6 21 297 298 293 294 64 | 24 63 21 62 298 323 294 322 65 | 63 34 62 33 323 334 322 333 66 | 24 23 27 26 298 297 303 302 67 | 31 24 28 27 307 298 304 303 68 | 49 63 31 24 319 323 307 298 69 | 48 34 49 63 335 334 319 323 70 | 47 65 2 64 331 326 329 324 71 | 65 46 64 22 326 315 324 295 72 | 66 37 67 45 247 236 346 343 73 | 9 16 66 37 230 233 247 236 74 | 40 43 9 16 239 242 230 233 75 | 1 15 40 43 248 251 239 242 76 | 67 45 50 3 346 343 344 337 77 | 18 68 58 10 376 256 259 249 78 | 55 54 1 15 255 254 248 251 79 | 11 69 18 68 377 257 376 256 80 | 70 19 51 13 258 252 253 250 81 | 55 1 70 19 255 248 258 252 82 | 23 46 26 25 297 315 302 301 83 | 46 30 25 29 315 306 301 305 84 | 30 32 29 35 306 308 305 314 85 | 32 31 35 28 308 307 314 304 86 | 8 17 5 4 341 342 339 338 87 | 36 38 8 17 235 237 341 342 88 | 14 20 36 38 232 234 235 237 89 | 39 56 34 7 238 245 334 340 90 | 12 52 39 56 231 244 238 245 91 | 41 57 12 52 240 246 231 244 92 | 10 51 41 57 249 253 240 246 93 | 68 70 10 51 256 258 249 253 94 | 69 55 68 70 257 255 256 258 95 | 11 54 69 55 377 254 257 255 96 | 34 7 33 53 334 340 333 345 97 | 137 136 114 111 273 272 266 264 98 | 134 133 128 121 270 269 356 355 99 | 114 111 134 133 266 264 270 269 100 | 149 142 119 115 317 316 300 296 101 | 150 149 107 119 318 317 292 300 102 | 123 156 106 151 267 374 263 378 103 | 163 148 157 135 289 282 284 271 104 | 111 153 133 158 264 283 269 285 105 | 133 158 121 166 269 285 355 336 106 | 159 140 163 148 286 276 289 282 107 | 161 139 159 140 288 275 286 276 108 | 106 151 136 160 263 378 272 287 109 | 136 160 111 153 272 287 264 283 110 | 127 150 124 107 312 318 309 292 111 | 132 157 110 152 268 284 348 352 112 | 138 163 132 157 274 289 268 284 113 | 141 159 138 163 277 286 274 289 114 | 112 161 141 159 265 288 277 286 115 | 168 113 167 109 327 330 325 328 116 | 142 168 115 167 316 327 296 325 117 | 128 121 208 205 356 355 365 363 118 | 208 205 116 130 365 363 354 358 119 | 205 206 130 117 363 364 358 332 120 | 121 166 205 206 355 336 363 364 121 | 209 207 131 118 362 360 313 299 122 | 209 165 207 162 362 321 360 320 123 | 206 209 117 131 364 362 332 313 124 | 206 166 209 165 364 336 362 321 125 | 140 211 148 210 276 373 282 372 126 | 211 137 210 114 373 273 372 266 127 | 139 213 140 211 275 375 276 373 128 | 148 210 135 212 282 372 271 371 129 | 210 114 212 134 372 266 371 270 130 | 212 134 214 128 371 270 370 356 131 | 135 212 215 214 271 371 369 370 132 | 157 135 152 215 284 271 352 369 133 | 217 216 108 154 367 368 347 353 134 | 110 152 217 216 348 352 367 368 135 | 145 220 146 218 280 379 351 380 136 | 220 132 218 110 379 268 380 348 137 | 143 219 145 220 278 381 280 379 138 | 219 138 220 132 381 274 379 268 139 | 144 221 143 219 279 382 278 381 140 | 221 141 219 138 382 277 381 274 141 | 147 223 144 221 281 383 279 382 142 | 223 112 221 141 383 265 382 277 143 | 223 224 112 161 383 385 265 288 144 | 164 224 147 223 291 385 281 383 145 | 224 225 161 139 385 384 288 275 146 | 164 155 224 225 291 290 385 384 147 | 226 217 222 108 387 367 386 347 148 | 218 110 226 217 380 348 387 367 149 | 225 155 123 156 384 290 267 374 150 | 225 123 139 213 384 267 275 375 151 | 152 215 216 227 352 369 368 349 152 | 215 214 227 120 369 370 349 366 153 | 216 227 154 122 368 349 353 350 154 | 227 127 122 124 349 312 350 309 155 | 227 120 127 204 349 366 312 359 156 | 204 120 125 129 359 366 310 357 157 | 113 226 109 222 330 387 328 386 158 | 146 218 113 226 351 380 330 387 159 | 214 128 120 208 370 356 366 365 160 | 120 208 129 116 366 365 357 354 161 | 213 228 211 137 375 388 373 273 162 | 228 106 137 136 388 263 273 272 163 | 123 106 213 228 267 263 375 388 164 | 229 204 126 125 361 359 311 310 165 | 207 229 118 126 360 361 299 311 166 | 127 204 150 229 312 359 318 361 167 | 150 229 162 207 318 361 320 360 168 | 37 145 45 146 236 280 343 351 169 | 16 143 37 145 233 278 236 280 170 | 43 144 16 143 242 279 233 278 171 | 15 147 43 144 251 281 242 279 172 | 54 164 15 147 254 291 251 281 173 | 151 58 160 61 378 259 287 262 174 | 160 61 153 59 287 262 283 260 175 | 153 59 158 60 283 260 285 261 176 | 158 60 166 48 285 261 336 335 177 | 46 142 30 149 315 316 306 317 178 | 30 149 32 150 306 317 308 318 179 | 162 31 150 32 320 307 318 308 180 | 165 49 162 31 321 319 320 307 181 | 65 168 46 142 326 327 315 316 182 | 47 113 65 168 331 330 326 327 183 | 166 48 165 49 336 335 321 319 184 | 156 18 151 58 374 376 378 259 185 | 164 54 155 11 291 254 290 377 186 | 155 11 156 18 290 377 374 376 187 | 45 146 47 113 343 351 331 330 -------------------------------------------------------------------------------- /src/surface/model/epi_to_septum_ETindices.txt: -------------------------------------------------------------------------------- 1 | 5733 5803 3273 2 | 3273 5803 5126 3 | 5803 5767 5126 4 | 5126 5767 3602 5 | 5767 5804 3602 6 | 3602 5804 5125 7 | 5804 5734 5125 8 | 5125 5734 3274 9 | 5734 5801 3274 10 | 3274 5801 5130 11 | 5801 5766 5130 12 | 5130 5766 3604 13 | 5766 5802 3604 14 | 3604 5802 5129 15 | 5802 5737 5129 16 | 5129 5737 3276 17 | 5737 5799 3276 18 | 3276 5799 5194 19 | 5799 5765 5194 20 | 5194 5765 3636 21 | 5765 5800 3636 22 | 3636 5800 5193 23 | 5800 5747 5193 24 | 5193 5747 3286 25 | 5747 5797 3286 26 | 3286 5797 5527 27 | 5797 5764 5527 28 | 5527 5764 3803 29 | 5764 5798 3803 30 | 3803 5798 5528 31 | 5798 5742 5528 32 | 5528 5742 3285 33 | 5742 5809 3285 34 | 3285 5809 5558 35 | 5809 5770 5558 36 | 5558 5770 3818 37 | 5770 5810 3818 38 | 3818 5810 5557 39 | 5810 5743 5557 40 | 5557 5743 3369 41 | 5743 5775 3369 42 | 3369 5775 5469 43 | 5775 5753 5469 44 | 5469 5753 3774 45 | 5753 5776 3774 46 | 3774 5776 5470 47 | 5776 5740 5470 48 | 5470 5740 3373 49 | 5740 5781 3373 50 | 3373 5781 5163 51 | 5781 5756 5163 52 | 5163 5756 3621 53 | 5756 5782 3621 54 | 3621 5782 5164 55 | 5782 5745 5164 56 | 5164 5745 3282 57 | 5745 5783 3282 58 | 3282 5783 5169 59 | 5783 5757 5169 60 | 5169 5757 3624 61 | 5757 5784 3624 62 | 3624 5784 5170 63 | 5784 5741 5170 64 | 5170 5741 3278 65 | 5741 5777 3278 66 | 3278 5777 5143 67 | 5777 5754 5143 68 | 5143 5754 3611 69 | 5754 5778 3611 70 | 3611 5778 5144 71 | 5778 5744 5144 72 | 5144 5744 3280 73 | 5744 5779 3280 74 | 3280 5779 5147 75 | 5779 5755 5147 76 | 5147 5755 3613 77 | 5755 5780 3613 78 | 3613 5780 5148 79 | 5780 5749 5148 80 | 5148 5749 3331 81 | 5749 5795 3331 82 | 3331 5795 5326 83 | 5795 5763 5326 84 | 5326 5763 3702 85 | 5763 5796 3702 86 | 3702 5796 5325 87 | 5796 5748 5325 88 | 5325 5748 3316 89 | 5748 5793 3316 90 | 3316 5793 5268 91 | 5793 5762 5268 92 | 5268 5762 3673 93 | 5762 5794 3673 94 | 3673 5794 5267 95 | 5794 5746 5267 96 | 5267 5746 3315 97 | 5746 5791 3315 98 | 3315 5791 5264 99 | 5791 5761 5264 100 | 5264 5761 3671 101 | 5761 5792 3671 102 | 3671 5792 5263 103 | 5792 5739 5263 104 | 5263 5739 3313 105 | 5739 5773 3313 106 | 3313 5773 5247 107 | 5773 5752 5247 108 | 5247 5752 3663 109 | 5752 5774 3663 110 | 3663 5774 5248 111 | 5774 5738 5248 112 | 5248 5738 3312 113 | 5738 5771 3312 114 | 3312 5771 5241 115 | 5771 5751 5241 116 | 5241 5751 3660 117 | 5751 5772 3660 118 | 3660 5772 5242 119 | 5772 5732 5242 120 | 5242 5732 3311 121 | 5732 5787 3311 122 | 3311 5787 5296 123 | 5787 5759 5296 124 | 5296 5759 3687 125 | 5759 5788 3687 126 | 3687 5788 5295 127 | 5788 5750 5295 128 | 5295 5750 3322 129 | 5750 5785 3322 130 | 3322 5785 5310 131 | 5785 5758 5310 132 | 5310 5758 3694 133 | 5758 5786 3694 134 | 3694 5786 5309 135 | 5786 5731 5309 136 | 5309 5731 3325 137 | 5731 5789 3325 138 | 3325 5789 5568 139 | 5789 5760 5568 140 | 5568 5760 3823 141 | 5760 5790 3823 142 | 3823 5790 5567 143 | 5790 5736 5567 144 | 5567 5736 3346 145 | 5736 5807 3346 146 | 3346 5807 5116 147 | 5807 5769 5116 148 | 5116 5769 3597 149 | 5769 5808 3597 150 | 3597 5808 5115 151 | 5808 5735 5115 152 | 5115 5735 3275 153 | 5735 5805 3275 154 | 3275 5805 5122 155 | 5805 5768 5122 156 | 5122 5768 3600 157 | 5768 5806 3600 158 | 3600 5806 5121 159 | 5806 5733 5121 160 | 5121 5733 3273 161 | -------------------------------------------------------------------------------- /src/surface/model/model.txt: -------------------------------------------------------------------------------- 1 | -6.431456 1.707481 -1.636837 2 | -2.900815 -2.608039 2.304806 3 | -3.084266 -2.7988 2.23643 4 | -4.088487 -3.528421 2.433011 5 | -4.574467 -3.333792 2.883549 6 | -2.888589 -2.228395 3.907615 7 | -6.514179 -2.197191 3.36706 8 | -6.170715 -2.985478 2.254617 9 | -5.631552 -0.40928 -0.449588 10 | -7.31574 2.452596 0.688369 11 | -5.764734 3.286261 -0.616452 12 | -7.41363 0.900924 2.495155 13 | -7.730681 1.253 -0.420703 14 | -7.430718 -0.718833 0.694043 15 | -5.389464 1.980468 -1.736791 16 | -4.622499 0.090102 -0.667771 17 | -5.192904 -3.237013 1.243716 18 | -6.096625 3.344489 0.131425 19 | -7.261872 1.492306 -1.101532 20 | -6.498163 -0.667882 0.060352 21 | -3.828808 -2.130454 4.264983 22 | -2.606858 -2.168769 3.126579 23 | -2.762624 -2.045488 4.048673 24 | -3.817223 -1.631503 4.752431 25 | -1.788427 -2.034437 3.904711 26 | -2.523214 -2.043368 4.496327 27 | -3.343464 -1.805959 4.958617 28 | -3.317513 -1.040894 4.900172 29 | -2.011096 -1.466655 4.15732 30 | -1.96261 -1.245161 3.434173 31 | -3.689448 -0.744738 4.472806 32 | -2.424721 -0.679786 3.81919 33 | -4.973547 -2.483322 4.440375 34 | -6.226912 -1.150314 4.425484 35 | -2.488513 -0.693512 4.30966 36 | -7.002516 -1.914424 1.435153 37 | -3.870324 -0.872202 -0.047205 38 | -5.994277 -1.9975 0.648403 39 | -7.060722 -0.099971 3.45184 40 | -6.06555 0.687471 -1.08245 41 | -7.429388 1.792602 1.569553 42 | -7.600424 0.414548 0.080952 43 | -5.085412 1.01501 -1.298117 44 | -6.868713 0.36969 -0.49956 45 | -3.074984 -1.667001 0.433625 46 | -2.444093 -1.885023 2.956205 47 | -2.692093 -1.846182 1.110092 48 | -5.678145 -0.170382 4.591999 49 | -4.387074 -0.50237 4.530813 50 | -3.56144 -3.091547 2.269197 51 | -7.84856 1.935452 0.056941 52 | -7.785386 -0.034872 1.660092 53 | -5.068839 -2.972404 3.820169 54 | -5.636542 2.911004 -1.305629 55 | -6.557022 2.751478 -1.669898 56 | -7.368402 -1.158362 2.532226 57 | -7.852853 1.031789 0.844441 58 | -6.446929 2.921993 0.908806 59 | -6.277294 1.587733 2.91391 60 | -6.031352 0.749138 3.965906 61 | -6.509451 2.329194 1.893348 62 | -4.558929 -2.076028 4.534202 63 | -5.054416 -1.273975 4.604852 64 | -2.589723 -2.33571 2.646654 65 | -2.387023 -2.221118 2.148695 66 | -5.071423 -1.582079 0.083752 67 | -4.323009 -2.800109 0.864278 68 | -6.980061 3.087364 -0.158892 69 | -6.632205 3.459746 -0.691369 70 | -7.446797 2.573564 -0.819401 71 | -3.247435 -1.026639 0.958748 72 | -2.508399 -1.082074 2.134625 73 | -2.814716 -1.220639 1.377791 74 | -2.465181 -1.326716 1.798433 75 | -2.739568 -0.507832 2.718292 76 | -3.653233 -0.369374 3.805895 77 | -3.190761 -0.292959 3.275681 78 | -4.885472 0.556938 3.901226 79 | -4.24098 -0.043439 3.822185 80 | -3.716727 0.152531 3.0237 81 | -4.23002 0.558428 2.751705 82 | -3.259903 -0.200721 2.233305 83 | -3.692473 0.364943 1.971959 84 | -3.077665 -0.582391 1.72663 85 | -3.467561 -0.234892 1.336308 86 | -5.119913 2.671256 0.062277 87 | -4.910758 1.82711 -0.971278 88 | -5.507242 2.848556 0.514215 89 | -4.990922 2.281116 -0.59038 90 | -5.817111 2.739056 1.027131 91 | -4.611777 1.241851 -0.533995 92 | -5.715931 2.339695 1.524091 93 | -5.087733 2.232729 1.046886 94 | -4.70586 2.042382 0.538408 95 | -4.596587 1.650908 -0.024072 96 | -4.219725 0.43634 -0.005639 97 | -4.368815 1.477255 1.003477 98 | -4.779515 1.690992 1.648703 99 | -5.519003 1.704252 2.481615 100 | -4.180059 1.025062 0.505929 101 | -5.219717 1.095025 3.290516 102 | -4.50097 1.130212 2.323722 103 | -4.015718 0.936614 1.468352 104 | -3.852954 0.415587 0.930104 105 | -3.779641 -0.343807 0.522028 106 | -4.990167 3.721094 1.458913 107 | -1.10334 -0.51718 3.544688 108 | 0.197634 -0.584135 1.72206 109 | -1.351998 -1.700302 1.201232 110 | 0.155002 0.454884 0.906957 111 | -4.638523 3.057519 3.361904 112 | -2.741916 3.077076 -1.532752 113 | -1.845875 -1.739641 0.935069 114 | -3.409929 3.303904 2.996571 115 | -1.631193 -1.424701 2.387828 116 | -3.072161 0.347825 7.372669 117 | -4.613428 0.157041 6.750725 118 | -3.744546 -0.597958 6.215916 119 | -1.285728 -1.071677 3.172205 120 | -1.522526 1.442238 5.757557 121 | -4.076205 2.021005 5.564736 122 | -0.036074 0.441395 3.424371 123 | -4.601983 3.962631 0.575345 124 | -0.789187 -0.036834 3.683777 125 | -2.767095 -0.428863 6.822286 126 | -3.114074 -0.636614 6.382889 127 | -1.340948 0.836317 4.431242 128 | -2.635465 2.482392 5.387738 129 | -2.661624 -0.024315 7.105618 130 | -3.858614 0.513749 7.279318 131 | -4.335988 -0.538129 6.154378 132 | -0.311749 1.336125 0.045597 133 | -4.423169 2.556668 4.616928 134 | -3.00774 2.896656 4.26485 135 | -0.985459 2.723426 2.532642 136 | -4.890266 3.336148 2.329967 137 | -3.92187 3.590327 1.920887 138 | -1.20893 2.034277 -0.688897 139 | -3.586869 3.647225 0.004664 140 | -2.802326 3.619114 0.669541 141 | -2.142929 2.551509 -1.26391 142 | -1.810971 -1.320859 2.538369 143 | -3.669053 0.302751 -0.959375 144 | -4.157988 1.256925 -1.502476 145 | -2.921753 -0.64448 -0.408176 146 | -2.214064 -1.460407 0.191137 147 | -4.480234 2.474514 -1.789749 148 | -1.785593 3.255799 1.638791 149 | -1.680438 -0.773272 3.386815 150 | -2.073247 -0.024578 3.945807 151 | -5.767571 3.499179 1.078626 152 | 0.980822 1.678258 1.974465 153 | -5.666859 2.447357 3.233379 154 | 0.630652 0.097726 2.814613 155 | -5.048724 3.694345 -0.590205 156 | -5.526796 3.830363 0.478936 157 | 0.235966 2.350957 1.130617 158 | -5.348891 1.618952 4.344394 159 | -2.041166 3.285864 -0.547978 160 | -5.769701 2.940588 2.125616 161 | -2.951363 3.58648 -1.060906 162 | -2.964765 0.106989 4.468669 163 | -0.81593 2.892131 0.224914 164 | -4.945021 3.326053 -1.556262 165 | -4.174481 -0.217021 4.896567 166 | -5.218885 0.641642 5.389311 167 | -1.684361 -1.734579 1.714348 168 | -1.884076 -1.685316 1.73589 169 | -2.259451 -0.811648 1.142366 170 | -2.052501 -0.668993 2.046966 171 | -2.502957 -0.541222 0.605495 172 | -1.97978 -0.986768 1.576671 173 | -2.323942 -0.113634 2.780784 174 | -2.636616 0.402025 3.44432 175 | -3.127006 0.386163 4.025907 176 | -3.789047 0.579998 4.102009 177 | -4.41994 1.080721 4.092009 178 | -3.240079 0.581747 3.117718 179 | -3.720691 1.06625 2.850467 180 | -2.762315 0.208383 2.287439 181 | -3.052548 0.747742 1.900868 182 | -2.458284 -0.163089 1.711854 183 | -2.871922 0.294553 1.261683 184 | -4.314707 2.257385 -0.986663 185 | -5.188697 3.177366 1.113344 186 | -4.425139 3.04739 0.026895 187 | -4.948724 3.290285 0.633052 188 | -4.371807 2.701134 -0.710088 189 | -3.913892 1.676567 -0.791429 190 | -5.100517 2.811783 1.650092 191 | -3.924689 2.215534 -0.192251 192 | -4.05679 2.617808 0.482776 193 | -4.528687 2.741046 1.111344 194 | -3.549968 0.948361 -0.307726 195 | -3.719343 2.045077 1.009189 196 | -4.24078 2.151592 1.75408 197 | -5.016247 2.265155 2.637001 198 | -3.630037 1.566799 0.306002 199 | -4.790851 1.67244 3.448497 200 | -4.006452 1.622243 2.428212 201 | -3.388449 1.418165 1.569942 202 | -3.236799 0.926064 0.837097 203 | -3.014842 0.129263 0.189513 204 | -1.909135 0.854231 5.572455 205 | -3.691638 1.483978 6.476068 206 | -4.873255 0.504702 5.981855 207 | -3.50966 -0.067094 5.303614 208 | -2.511249 1.843637 6.663502 209 | -4.312206 -0.03952 5.520249 210 | -2.581149 3.338981 2.407257 211 | -3.265165 3.609776 1.40541 212 | -1.900714 2.891892 3.504063 213 | -3.913445 3.737467 0.749888 214 | -1.320024 2.323581 4.708228 215 | -0.207026 2.127977 3.593919 216 | 0.85554 1.043478 2.782055 217 | 0.402038 -0.117362 1.459361 218 | -1.009315 -0.494506 0.236704 219 | -2.498325 1.062334 -0.920156 220 | -1.703663 0.261428 -0.299874 221 | -3.093422 1.900476 -1.393378 222 | -0.327909 -1.266515 1.249735 223 | -3.614683 2.839946 -1.830638 224 | -3.963071 3.661851 -1.388 225 | -4.377411 3.79633 -0.466634 226 | -0.532119 -0.986573 0.803105 227 | -0.175566 1.54746 4.143629 228 | -4.214118 3.632577 1.302712 229 | -2.559297 0.31152 5.129878 230 | -5.596539 -0.800675 -1.159009 231 | -8.101864 0.926777 3.033529 232 | -8.204963 -1.25644 0.574553 233 | -4.538007 -0.334358 -1.340972 234 | -6.931001 -1.403441 -0.695742 235 | -7.734938 -2.468752 1.373127 236 | -3.793679 -1.287709 -0.68037 237 | -6.343607 -2.746526 0.081809 238 | -7.742014 -0.211741 4.073268 239 | -6.014822 0.288239 -1.738945 240 | -8.182399 1.901324 1.922912 241 | -8.314931 -0.081217 0.001957 242 | -5.016313 0.636875 -1.961454 243 | -7.318596 -0.246319 -1.24371 244 | -8.584446 -0.2458 1.898043 245 | -8.113408 -1.536472 2.876126 246 | -8.731901 0.963222 1.004924 247 | -5.049822 -1.989221 -0.578245 248 | -6.428907 1.418183 -2.19553 249 | -8.0571 2.814462 0.752624 250 | -8.351645 0.976521 -0.572572 251 | -5.409882 1.792743 -2.396488 252 | -7.691793 1.209878 -1.746234 253 | -8.622423 1.993661 -0.081284 254 | -5.872843 3.201638 -1.984265 255 | -6.870555 3.027271 -1.920171 256 | -7.670229 3.485845 -0.334439 257 | -7.026359 3.723598 -1.00982 258 | -8.006834 2.827009 -1.309579 259 | -7.025989 3.322283 1.172478 260 | -6.858468 1.748307 3.495429 261 | -6.610008 0.826558 4.506436 262 | -7.136198 2.555787 2.436573 263 | -5.079212 3.951676 1.536062 264 | -4.761636 3.336114 3.593589 265 | -2.63712 3.079398 -1.779081 266 | -3.336908 3.563929 3.154678 267 | -4.648861 4.246342 0.606704 268 | -0.050374 1.282919 -0.1994 269 | -4.54951 2.863967 4.873672 270 | -2.921751 3.174989 4.47169 271 | -0.789007 3.045921 2.629587 272 | -4.999163 3.572207 2.555155 273 | -3.840859 3.846762 2.057283 274 | -0.956704 1.95047 -0.972268 275 | -3.457698 4.048167 0.054049 276 | -2.652448 3.916167 0.785497 277 | -1.911081 2.506824 -1.537732 278 | -3.533987 0.140367 -1.246255 279 | -3.99582 1.179626 -1.7579 280 | -2.814885 -0.734651 -0.644721 281 | -4.354609 2.501664 -2.058193 282 | -1.621586 3.556257 1.666465 283 | -5.80061 2.6755 3.451446 284 | 0.539589 2.464871 1.066312 285 | -5.523024 1.838381 4.584175 286 | -1.798194 3.51934 -0.640846 287 | -5.911921 3.254425 2.340184 288 | -2.733729 3.813761 -1.200843 289 | -0.535072 3.056302 0.185393 290 | -5.12883 4.126281 -0.724885 291 | -4.876129 3.705588 -1.836182 292 | -1.039324 -0.59972 3.65061 293 | -2.708755 -2.363089 4.187197 294 | -3.787052 -2.299935 4.610667 295 | -2.300322 -2.291265 3.266877 296 | -1.603389 -1.570775 2.483058 297 | -2.67601 -2.206812 4.191661 298 | -3.789545 -1.714504 4.977224 299 | -3.703619 -0.751065 6.1106 300 | -1.268295 -1.130504 3.301581 301 | -1.602509 -2.271182 3.735769 302 | -2.528594 -2.272218 4.494465 303 | -3.519391 -1.817742 5.113086 304 | -3.467445 -0.884797 5.028985 305 | -1.795457 -1.344457 4.155453 306 | -1.773728 -1.268481 3.501999 307 | -3.718579 -0.695954 4.557264 308 | -2.421234 -0.501467 4.063729 309 | -0.707939 -0.132244 3.833863 310 | -2.632432 -0.487399 6.754153 311 | -3.072209 -0.769395 6.351677 312 | -1.216895 0.750681 4.569654 313 | -4.491183 -0.490297 6.0113 314 | -2.292353 -0.507085 4.355091 315 | -2.104146 -1.937101 3.111832 316 | -1.788481 -1.520155 2.610233 317 | -1.606009 -0.918199 3.476059 318 | -2.03739 -0.189256 4.08498 319 | -4.504173 -0.712293 4.947207 320 | -2.951011 -0.030602 4.650061 321 | -4.289484 -0.36111 5.1145 322 | -4.538454 -2.186187 4.920786 323 | -5.006201 -1.453479 5.02712 324 | -2.239591 -2.502808 2.692469 325 | -1.69155 -1.927297 1.733813 326 | -1.926102 -2.284801 2.083079 327 | -1.835226 -1.910187 1.716306 328 | -1.307363 -1.883825 1.1939 329 | -2.432293 -2.924234 2.209024 330 | -1.779308 -1.910574 0.786057 331 | -2.258671 -2.347069 0.893645 332 | -4.790517 0.255614 6.653812 333 | -5.255747 -2.576679 4.916392 334 | -6.810011 -1.476331 4.912051 335 | -6.085652 -0.272395 5.19772 336 | -5.476513 0.536366 5.536508 337 | -2.870124 -3.144068 1.897806 338 | -4.320597 -4.064968 2.347826 339 | -4.82511 -3.950439 3.018997 340 | -7.04029 -2.625953 3.708095 341 | -6.617622 -3.651275 2.30166 342 | -5.398385 -3.997298 1.022405 343 | -2.873635 -2.241347 0.02505 344 | -3.401 -3.59532 1.887821 345 | -5.531039 -3.505928 4.049331 346 | -4.209413 -3.315488 0.435431 347 | 0.445109 -0.637464 1.645588 348 | 0.40572 0.407238 0.693813 349 | -0.015363 1.561045 4.368451 350 | 0.104646 0.379174 3.642603 351 | -2.036205 -1.653927 -0.028565 352 | 1.303285 1.795795 1.957586 353 | 0.83672 0.043338 2.983825 354 | -3.101256 0.532482 7.528275 355 | -4.088201 2.285738 5.876509 356 | -2.530032 2.775846 5.553971 357 | -2.504633 -0.035862 7.131205 358 | -3.945188 0.667869 7.401416 359 | -1.796489 0.75078 5.571542 360 | -3.391467 -0.11313 5.171974 361 | -2.516839 0.132564 5.121572 362 | -4.379027 -0.179459 5.502032 363 | -3.69494 1.718065 6.729259 364 | -5.155014 0.514935 6.050732 365 | -2.458023 2.024289 6.855068 366 | -1.339955 1.582418 5.848189 367 | 0.643259 -0.200481 1.380271 368 | 1.092487 1.02048 2.953526 369 | 0.047238 2.438853 3.740847 370 | -1.185697 2.564725 4.882232 371 | -1.811544 3.123984 3.618325 372 | -2.471523 3.59537 2.476527 373 | -3.214597 3.858725 1.488996 374 | -5.662899 4.11691 0.430287 375 | -3.821417 4.05983 0.792299 376 | -6.625558 3.777898 0.191042 377 | -6.055586 3.947275 -0.791849 378 | -5.946659 3.730914 1.413145 379 | -1.50027 0.105627 -0.609877 380 | -0.781784 -0.653148 -0.074151 381 | -2.33356 0.960111 -1.168528 382 | -2.943563 1.789706 -1.62653 383 | -3.568841 2.857488 -2.02388 384 | -4.312343 4.065766 -0.481194 385 | -3.849323 3.919535 -1.529259 386 | -0.223684 -1.400527 1.090268 387 | -0.394614 -1.135218 0.601045 388 | -4.154273 3.945375 1.38058 -------------------------------------------------------------------------------- /src/surface/perform_fit.py: -------------------------------------------------------------------------------- 1 | # Input: 3D contours 2 | # Output: Fitted model 3 | 4 | import argparse 5 | import configparser 6 | import logging 7 | import os 8 | import numpy as np 9 | import time 10 | import pandas as pd 11 | from pathlib import Path 12 | 13 | from . import config_params as params 14 | from .BiVFitting import BiventricularModel 15 | from .BiVFitting import GPDataSet 16 | from .BiVFitting import ContourType 17 | from .BiVFitting import MultiThreadSmoothingED, SolveProblemCVXOPT 18 | from .BiVFitting import plot_timeseries 19 | 20 | if params.enable_visualizations: 21 | from plotly.offline import plot 22 | import plotly.graph_objs as go 23 | 24 | logger = logging.getLogger('surface') 25 | 26 | # This list of contours_to _plot was taken from Liandong Lee 27 | contours_to_plot = [ContourType.LAX_RA, ContourType.LAX_RV_ENDOCARDIAL, 28 | ContourType.SAX_RV_FREEWALL, ContourType.LAX_RV_FREEWALL, 29 | ContourType.SAX_RV_SEPTUM, ContourType.LAX_RV_SEPTUM, 30 | ContourType.SAX_LV_ENDOCARDIAL, 31 | ContourType.SAX_LV_EPICARDIAL, ContourType.RV_INSERT, 32 | ContourType.APEX_POINT, ContourType.MITRAL_VALVE, 33 | ContourType.TRICUSPID_VALVE, ContourType.AORTA_VALVE, 34 | ContourType.SAX_RV_EPICARDIAL, ContourType.LAX_RV_EPICARDIAL, 35 | ContourType.LAX_LV_ENDOCARDIAL, ContourType.LAX_LV_EPICARDIAL, 36 | ContourType.LAX_RV_EPICARDIAL, ContourType.SAX_RV_OUTLET, 37 | ContourType.AORTA_PHANTOM, ContourType.TRICUSPID_PHANTOM, 38 | ContourType.MITRAL_PHANTOM 39 | ] 40 | 41 | 42 | def perform_fitting(input_dir, output_dir, gp_points_file='gp_points_file.txt', 43 | gp_frame_info_file='gp_frame_info_file.txt', model_path='./model', **kwargs): 44 | try: 45 | # performs all the BiVentricular fitting operations 46 | '''''' 47 | if 'iter_num' in kwargs: 48 | iter_num = kwargs.get('iter_num', None) 49 | pid = os.getpid() 50 | #logger.debug('child PID', pid) 51 | # assign a new process ID and a new CPU to the child process 52 | # iter_num corresponds to the id number of the CPU where the process will be run 53 | os.system("taskset -cp %d %d" %(iter_num, pid)) 54 | 55 | if 'id_Frame' in kwargs: 56 | # acquire .csv file containing patient_id, ES frame number, ED frame number if present 57 | case_frame_dict = kwargs.get('id_Frame', None) 58 | 59 | # define the path to gp points file and gp frame info file 60 | filename = os.path.join(input_dir, gp_points_file) 61 | filename_info = os.path.join(input_dir, gp_frame_info_file) 62 | 63 | # extract the subject id from the input directory 64 | subject_id = input_dir.split(os.sep)[-3] 65 | 66 | if not os.path.exists(filename): 67 | logger.error(f'subject {subject_id}: gp points file does not exist') 68 | return 69 | if not os.path.exists(filename_info): 70 | logger.error(f'subject {subject_id}: gp points info file does not exist') 71 | return 72 | 73 | # create a log file to store fitting errors 74 | error_file = Path(os.path.join(output_dir, 'ErrorFile.txt')) 75 | error_file.touch(exist_ok=True) 76 | shift_file = Path(os.path.join(output_dir, 'Shiftfile.txt')) 77 | shift_file.touch(exist_ok=True) 78 | pos_file = Path(os.path.join(output_dir ,'Posfile.txt')) 79 | pos_file.touch(exist_ok=True) 80 | 81 | with open(error_file, 'w') as f: 82 | f.write(f'Log for subject: {subject_id} \n') 83 | 84 | # read all the frames from the GPFile 85 | all_frames = pd.read_csv(filename, sep='\t') 86 | 87 | time_frames = np.unique(all_frames.values[:, 6]).astype(np.uint16) 88 | 89 | # if measure_shift_ed_only, we calculate and write the slice shifts based on the ED frame 90 | if params.measure_shift_ed_only: 91 | logger.debug('Shift measured only at ED frame') 92 | 93 | # time_frames[0] should be the ED frame. 94 | ed_dataset = GPDataSet(filename, filename_info, subject_id, sampling=params.sampling, 95 | time_frame_number=int(time_frames[0])) 96 | 97 | result_ed = ed_dataset.sinclaire_slice_shifting(frame_num=int(time_frames[0])) 98 | shift_ed = result_ed[0] 99 | pos_ed = result_ed[1] 100 | 101 | with open(shift_file, "w") as file: 102 | file.write(f'Shift measured only at ED (time frame {str(time_frames[0])}): \n') 103 | file.write(str(shift_ed)) 104 | file.close() 105 | 106 | with open(pos_file, "w") as file: 107 | file.write(f'Pos measured only at ED (time frame {str(time_frames[0])}): \n') 108 | file.write(str(pos_ed)) 109 | file.close() 110 | 111 | # Initialise time series lists 112 | time_series_step1 = [] 113 | time_series_step2 = [] 114 | 115 | logger.debug(f'Fitting of {subject_id} ----> started \n') 116 | 117 | timeframe_num = kwargs.get('timeframe_num', None) 118 | force_overwrite = kwargs.get('force_overwrite', False) 119 | 120 | # for all time frames 121 | for idx, time_frame_id in enumerate(time_frames): 122 | time_frame_id = int(time_frame_id) 123 | 124 | if timeframe_num is not None and timeframe_num != time_frame_id: 125 | logger.debug('subject {}, timeframe {}: skipping, timeframe not requested'.format(subject_id, time_frame_id)) 126 | continue 127 | 128 | model_file = os.path.join(output_dir, f'{subject_id}_model_timeframe{time_frame_id:03}.txt') 129 | if os.path.exists(model_file) and not force_overwrite: 130 | logger.debug('subject {}, timeframe {}: skipping, model file exists'.format(subject_id, time_frame_id)) 131 | continue 132 | 133 | logger.debug(f'Time frame id: {time_frame_id}') 134 | 135 | with open(error_file, 'a') as f: 136 | f.write(f"\nTime Frame # {time_frame_id}\n") 137 | 138 | data_set = GPDataSet(filename, filename_info, subject_id, sampling=params.sampling, 139 | time_frame_number=time_frame_id) 140 | biventricular_model = BiventricularModel(model_path, subject_id) 141 | 142 | if params.measure_shift_ed_only: 143 | # apply shift measured previously using ED frame 144 | data_set.apply_slice_shift(shift_ed, pos_ed) 145 | else: 146 | # measure and apply shift to current frame 147 | shifted_slice = data_set.sinclaire_slice_shifting(error_file, time_frame_id) 148 | shift_measure = shifted_slice[0] 149 | pos_measure = shifted_slice[1] 150 | 151 | if idx == 0: 152 | with open(shift_file, "w") as file: 153 | file.write(f'Time frame id: {time_frame_id}\n') 154 | file.write(str(shift_measure)) 155 | file.close() 156 | with open(pos_file, "w") as file: 157 | file.write(f'Time frame id: {time_frame_id}\n') 158 | file.write(str(pos_measure)) 159 | file.close() 160 | else: 161 | with open(shift_file, "a") as file: 162 | file.write(f'Time frame id: {time_frame_id}\n') 163 | file.write(str(shift_measure)) 164 | file.close() 165 | with open(pos_file, "w") as file: 166 | file.write(f'Time frame id: {time_frame_id}\n') 167 | file.write(str(pos_measure)) 168 | file.close() 169 | 170 | if not hasattr(data_set, 'tricuspid_centroid'): 171 | logger.error('subject {}, timeframe {}: missing attribute, tricuspid_centroid'.format(subject_id, time_frame_id)) 172 | continue 173 | 174 | if not hasattr(data_set, 'apex'): 175 | logger.error('subject {}, timeframe {}: missing attribute, apex'.format(subject_id, time_frame_id)) 176 | continue 177 | 178 | try: 179 | biventricular_model.update_pose_and_scale(data_set) 180 | except FloatingPointError: 181 | logger.error('subject {}, timeframe {}: failed to update pose and scale'.format(subject_id, time_frame_id)) 182 | continue 183 | 184 | if params.enable_visualizations: 185 | contour_plots = data_set.PlotDataSet(contours_to_plot) 186 | 187 | data = contour_plots 188 | 189 | plot(go.Figure(data), filename=os.path.join(output_dir, 190 | 'pose_fitted_model_timeframe' + str( 191 | time_frame_id) + '.html'), 192 | auto_open=False) 193 | 194 | # Generates RV epicardial points if they have not been contoured 195 | # (can be commented if available) used in LL 196 | try: 197 | rv_epi_points, rv_epi_contour, rv_epi_slice = data_set.create_rv_epicardium(rv_thickness=3) 198 | except Exception: 199 | logger.error('subject {}, timeframe {}: failed to create RV epicardium'.format(subject_id, time_frame_id)) 200 | continue 201 | 202 | # Generate phantom points for the mitral valve, the tricuspid valve, the pulmonary 203 | # artery and the aorta 204 | try: 205 | mitral_points = data_set.create_valve_phantom_points(30, ContourType.MITRAL_VALVE) 206 | tri_points = data_set.create_valve_phantom_points(30, ContourType.TRICUSPID_VALVE) 207 | pulmonary_points = data_set.create_valve_phantom_points(20, ContourType.PULMONARY_VALVE) 208 | aorta_points = data_set.create_valve_phantom_points(20, ContourType.AORTA_VALVE) 209 | except Exception: 210 | logger.error('subject {}, timeframe {}: failed to create phantom points'.format(subject_id, time_frame_id)) 211 | continue 212 | 213 | # Example on how to set different weights for different points group (R.B.) 214 | data_set.weights[data_set.contour_type == ContourType.MITRAL_PHANTOM] = 2 215 | data_set.weights[data_set.contour_type == ContourType.AORTA_PHANTOM] = 2 216 | data_set.weights[data_set.contour_type == ContourType.PULMONARY_PHANTOM] = 2 217 | data_set.weights[data_set.contour_type == ContourType.TRICUSPID_PHANTOM] = 2 218 | 219 | data_set.weights[data_set.contour_type == ContourType.APEX_POINT] = 1 220 | data_set.weights[data_set.contour_type == ContourType.RV_INSERT] = 5 221 | 222 | data_set.weights[data_set.contour_type == ContourType.MITRAL_VALVE] = 2 223 | data_set.weights[data_set.contour_type == ContourType.AORTA_VALVE] = 2 224 | data_set.weights[data_set.contour_type == ContourType.PULMONARY_VALVE] = 2 225 | 226 | # Perform linear fit 227 | MultiThreadSmoothingED(biventricular_model, params.weight_gp, data_set, error_file) 228 | 229 | # Results after linear fit 230 | if params.enable_visualizations: 231 | model = biventricular_model.plot_surface("rgb(0,127,0)", "rgb(0,0,127)", 232 | "rgb(127,0,0)", "all") 233 | data = model + contour_plots 234 | 235 | time_series_step1.append([data, time_frame_id]) 236 | 237 | plot(go.Figure(data), filename=os.path.join(output_dir, 238 | 'linear_fitted_model_timeframe' + str( 239 | time_frame_id) + '.html'), 240 | auto_open=False) 241 | 242 | # Perform diffeomorphic fit (this step can take a while) 243 | SolveProblemCVXOPT(biventricular_model, data_set, params.weight_gp, params.low_smoothing_weight, 244 | params.transmural_weight, error_file) 245 | 246 | # Results after diffeomorphic fit 247 | if params.enable_visualizations: 248 | model = biventricular_model.plot_surface("rgb(0,127,0)", "rgb(0,0,127)", 249 | "rgb(127,0,0)", "all") 250 | 251 | data = model + contour_plots 252 | 253 | time_series_step2.append([data, time_frame_id]) 254 | 255 | plot(go.Figure(data), filename=os.path.join(output_dir, 256 | 'diffeo_fitted_model_time_frame' + str( 257 | time_frame_id) + '.html'), 258 | auto_open=False) 259 | 260 | model_data = {'x': biventricular_model.control_mesh[:, 0], 261 | 'y': biventricular_model.control_mesh[:, 1], 262 | 'z': biventricular_model.control_mesh[:, 2], 263 | 'time_frame': [time_frame_id] * 264 | biventricular_model.control_mesh.shape[0]} 265 | model_dataframe = pd.DataFrame(data=model_data) 266 | 267 | with open(model_file, "w") as file: 268 | file.write(model_dataframe.to_string(header=True, index=False)) 269 | 270 | if params.enable_visualizations: 271 | # Comment the following lines if you don't want html time series plots 272 | # if you want to plot time series in html files uncomment the next lines 273 | plot_timeseries(time_series_step1, output_dir, 'TimeSeries_linear_fit.html') 274 | 275 | # Comment if you did not run diffeomorphic fit 276 | plot_timeseries(time_series_step2, output_dir, 'TimeSeries_diffeo_fit.html') 277 | 278 | except KeyboardInterrupt: 279 | raise KeyboardInterruptError() 280 | 281 | 282 | def main(): 283 | parser = argparse.ArgumentParser() 284 | 285 | parser.add_argument('--profile', '-p', action='store', default='default', help='config profile to be used') 286 | parser.add_argument('--job', '-j', action='store', default='default', help='job identifier') 287 | parser.add_argument('--force', '-f', action='store_true', default=False, help='force overwrite') 288 | 289 | parser.add_argument('--data-dir', '-d', action='store', help='path to data directory') 290 | parser.add_argument('--input-dir', '-I', action='store', default='Contour_Outputs', help='name of input directories') 291 | parser.add_argument('--output-dir', '-o', action='store', default='Mesh_Outputs', help='name of output directories') 292 | 293 | parser.add_argument('--instance', '-i', type=int, action='store', default=2, help='instance to be processed') 294 | 295 | parser.add_argument('--all', '-a', action='store_true', help='process all subjects') 296 | parser.add_argument('--subject', '-s', action='store', help='subject id to be processed') 297 | parser.add_argument('--start', '-S', action='store', type=int, help='index of first subject id to be processed') 298 | parser.add_argument('--number', '-n', action='store', type=int, help='number of subjects to be processed from first subject id') 299 | parser.add_argument('--allowlist', '-l', action='store', help='path to subject allowlist') 300 | 301 | parser.add_argument('--timeframe', '-t', type=int, action='store', help='timeframe to be processed') 302 | 303 | args, _ = parser.parse_known_args() 304 | 305 | cfg = configparser.ConfigParser() 306 | cfg.read('config.ini') 307 | 308 | data_dir = args.data_dir if args.data_dir else cfg[args.profile]['DataDir'] 309 | 310 | start_time = time.time() 311 | 312 | # You should have the subject ID as a folder name for each subject on the data_dir and no 313 | # other files or folders on data_dir 314 | if args.allowlist and os.path.exists(args.allowlist): 315 | with open(args.allowlist) as f: 316 | sids = [n for n in f.read().splitlines() if os.path.isdir(os.path.join(data_dir, n))] 317 | else: 318 | sids = [n for n in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, n))] 319 | 320 | if args.all: 321 | subject_ids = sorted(sids) 322 | elif args.subject: 323 | sid = args.subject 324 | if sid in sids: 325 | subject_ids = [sid] 326 | else: 327 | subject_ids = [] 328 | elif args.start is not None and args.start >= 0 and args.start < len(sids): 329 | if args.number is not None and args.number > 0: 330 | end = args.start + args.number - 1 331 | subject_ids = sorted(sids)[args.start:end+1] 332 | else: 333 | subject_ids = sorted(sids)[args.start:] 334 | else: 335 | subject_ids = [] 336 | 337 | log_filename = os.path.join(data_dir, f'surface-{args.job}.log') 338 | formatter = logging.Formatter(fmt='%(asctime)s | %(name)s | %(levelname)s | %(message)s') 339 | handler = logging.FileHandler(log_filename) 340 | handler.setFormatter(formatter) 341 | logger.addHandler(handler) 342 | 343 | np.seterr(all='raise') 344 | 345 | for subject_id in subject_ids: 346 | logger.debug(f"Processing subject {subject_id}...") 347 | 348 | i_dir = os.path.join(data_dir, subject_id, f'Instance_{args.instance}') 349 | 350 | if not os.path.exists(i_dir): 351 | logger.debug(f'Instance_{args.instance} directory does not exist for {subject_id}') 352 | continue 353 | 354 | input_dir = os.path.join(i_dir, args.input_dir) 355 | output_dir = os.path.join(i_dir, args.output_dir) 356 | 357 | # create the output directory if it doesn't exist 358 | if not os.path.exists(output_dir): 359 | os.mkdir(output_dir) 360 | 361 | perform_fitting(input_dir, output_dir, timeframe_num=args.timeframe, force_overwrite=args.force) 362 | 363 | logger.debug(f'Total run time: {time.time() - start_time}') 364 | 365 | 366 | if __name__ == '__main__': 367 | main() 368 | -------------------------------------------------------------------------------- /src/volumetric/LVendo_RVseptum_RVendo.par: -------------------------------------------------------------------------------- 1 | num_stim = 3 2 | 3 | bidomain = 1 4 | 5 | tend = 1 6 | 7 | stimulus[0].stimtype = 3 8 | 9 | stimulus[1].stimtype = 2 10 | stimulus[1].strength = 0.5 11 | stimulus[1].duration = 1 12 | stimulus[2].stimtype = 2 13 | stimulus[2].strength = 1 14 | stimulus[2].duration = 1 15 | 16 | 17 | timedt = 1.0 18 | spacedt = 1.0 19 | 20 | dt = 20 21 | vm_per_phie = 1 22 | parab_solve = 1 23 | cg_tol_ellip = 2.8e-6 24 | 25 | num_gregions = 1 26 | 27 | gregion[0].num_IDs = 1 28 | gregion[0].ID[0] = 0 29 | gregion[0].g_il = 1 30 | gregion[0].g_it = 1 31 | gregion[0].g_el = 1 32 | gregion[0].g_et = 1 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/volumetric/README.md: -------------------------------------------------------------------------------- 1 | # volumetric_mesh ---Shuang Qian 2 | 3 | ####################################### 4 | 5 | The main script is "main_testvmesh.py". 6 | The dependencies include: 7 | 1. carpfunc.py 8 | 2. meshtool_func.py 9 | 3. v_mesh_generation.py 10 | 4. meshIO.py 11 | 5. LVendo_RVseptum_Rvendo.par (this is a file required for running CARP/OpenCARP) 12 | 13 | ######################################## 14 | 15 | Step 1: load surface meshes 16 | (1) Input the location where 8 surface meshes in .vtk are. 17 | (2) Sort them in names will get a list as: 18 | # surfacemesh list is: 19 | # 0: LV_endo 20 | # 1: RV_FW 21 | # 2: RV_septum 22 | # 3: aorta_valve 23 | # 4: epi 24 | # 5: mitral_valve 25 | # 6: pulmonary_valve 26 | # 7: tricuspid_valve 27 | *may need to check if the input names change. 28 | 29 | ######################################## 30 | 31 | Step 2: generate valves using the function called generate_valve() 32 | Example: “aorta=generate_valve(meshtoolloc,folder,Valvename=surfacemesh[3],endoname=surfacemesh[0],outmsh="aortamesh",bdry_step="0.5",ptindex=5)” 33 | “folder” is the folder where surface mesh is 34 | “Valvename” is the filename of the valves (according to the order of sorted filename) 35 | “endoname” is which endo surface that the target valve is located (also according to the order of sorted filename) 36 | “outmsh” is the foldername and the filename of the volumetric valve mesh 37 | “bdry_step” is how far is the middle point in the lower plane from the original valve surface (see next slide) 38 | “ptindex” is the index of the valve points in original .pts(totally 5810) and also the element tag for the volumetric valve mesh 39 | 40 | ######################################## 41 | 42 | Step 3: merge all volumetric meshes and resample using function merge_resample(): 43 | splitmeshdir=vmesh.split_RVLV(meshtoolloc,OPTS,CARP,parfile,mesh_nolabel+"_i",SIMID,surf_endo,IGBEXTRACT,thres,split_myo_only) 44 | Note the edge length of resampled mesh can be checked in “edgeinfo.txt” 45 | *Currently the resample setting is the optimal value to achieve mean edge length of ~1mm 46 | *It can be potentially modified to find the optimal resample setting by using the info in this “edgeinfo.txt” 47 | 48 | ######################################## 49 | 50 | Step 4: extract surfaces using the function extract_surfacenolabel() 51 | This step is to extract three surfaces: LV_endo, RV_endo, RV_septum for the next step 52 | surf_endo=vmesh.extract_surfacenolabel(meshtoolloc,mesh_nolabel=folder+"/"+outmsh+"/"+outmsh, pts_5810=pts,surfmeshloc) 53 | (1) extract allvalves_surf and myo_surf. 54 | (2) then “myo_surf – allvalves_surf” gives separated epi, RV_endo and LV_endo 55 | (3) use points on LV_endo to select the LV_endo 56 | (4) use points on RV_endo and “-edge=10” to select the RV_endo 57 | (5) use points on RV_sep and “-edge=10” to select the RV_sep 58 | 59 | ######################################## 60 | 61 | Step 5: Split/retag the LV and RV using function split_RVLV(): 62 | splitmeshdir=vmesh.split_RVLV(meshtoolloc,OPTS,CARP,parfile,mesh_nolabel+"_i",SIMID,surf_endo,IGBEXTRACT,thres,split_myo_only) 63 | This step require Laplace solver running by OpenCARP 64 | The “thres” is the value to determine where LV and RV boundary is. This 0.7 is an optimal value to split it neatly. 65 | 66 | ######################################## 67 | 68 | If it runs correctly, it will produce the final mesh to "/mesh_all/mesh_all" in both CARP format (.pts, .elem, .lom) and .vtk format 69 | 70 | ############################################ 71 | Next step is to generate UVCs and fibers. 72 | The main script is "main_UVC_fiber.py". 73 | The dependencies include: 74 | 1. carpfunc.py 75 | 2. meshtool_func.py 76 | 3. compute_UVC_fiber.py 77 | 4. meshIO.py 78 | 5. transmural.par (this is a file required for running CARP/OpenCARP) 79 | ############################################ 80 | 81 | 82 | At last, three folders are required: 83 | (1) The final mesh in "/mesh_all/fiberfolder". 84 | (2) The UVCs in '/mesh_all/UVC_i'. 85 | (3) The surfaces in'/mesh_all/surface/biv_i'. 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /src/volumetric/amg_cg_opts: -------------------------------------------------------------------------------- 1 | -ksp_type cg 2 | -pc_type hypre 3 | -pc_hypre_type boomeramg 4 | -pc_hypre_boomeramg_max_iter 1 5 | -pc_hypre_boomeramg_strong_threshold 0.0 6 | -options_left 7 | -------------------------------------------------------------------------------- /src/volumetric/carpfunc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Jul 24 17:22:29 2022 5 | 6 | @author: sq20 7 | """ 8 | 9 | import pandas as pd 10 | import glob 11 | import logging 12 | import math 13 | import random 14 | import numpy as np 15 | import os 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | def Laplace_solver(MPIEXEC,OPTS,CARP,parfile,mesh,SIMID,*argv): 20 | 21 | # LV,RV_septum and RV Laplace solver 22 | cmd=MPIEXEC+" -np 8 "+CARP+" -experiment 2 +F "+parfile+" -meshname "+mesh+" -simID "+SIMID+" -ellip_options_file "+ OPTS+" " 23 | if not argv==None: 24 | for arg in argv: 25 | cmd=cmd+arg+" " 26 | logger.debug(cmd) 27 | return os.system(cmd) == 0 28 | 29 | def igb_todata(IGBEXTRACT,phieGrad_path): 30 | cmd="%s -o ascii -O %s/phieGrad.dat %s/phie.igb " % (IGBEXTRACT,phieGrad_path,phieGrad_path) 31 | logger.debug(cmd) 32 | os.system(cmd) 33 | gradMag=np.genfromtxt(phieGrad_path+'/phieGrad.dat') 34 | #gradMag=gradMag[0,:] 35 | return gradMag 36 | 37 | def GlGradient_data(GlGradient,MESH,datatype,inputdata,outputdata): 38 | #data types: elem_ctr,vtx 39 | cmd = "%s extract gradient -msh %s -idat %s -odat %s" % (GlGradient, MESH, inputdata, outputdata) 40 | logger.debug(cmd) 41 | return os.system(cmd) == 0 42 | 43 | 44 | -------------------------------------------------------------------------------- /src/volumetric/config.ini.example: -------------------------------------------------------------------------------- 1 | [default] 2 | DataDir = /path/to/data/directory 3 | CarpBinDir = /path/to/opencarp/bin/directory 4 | 5 | [myprofile] 6 | DataDir = /path/to/other/data/directory 7 | CarpBinDir = /path/to/other/opencarp/bin/directory 8 | -------------------------------------------------------------------------------- /src/volumetric/geometrical.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # This file was developed by Martin Bishop 4 | """ 5 | Created on Wed Jul 27 09:40:26 2022 6 | 7 | @author: sq20 8 | """ 9 | import pandas as pd 10 | import glob 11 | import math 12 | from math import acos,asin,cos,sin,sqrt 13 | import random 14 | import numpy as np 15 | 16 | 17 | ######################################### 18 | # Function to rotate one vector to be aligned with another (Marina Strocchi) 19 | ######################################### 20 | def computeRotationMatrix(v_target, v_rotate): 21 | 22 | ax = np.cross(v_target,v_rotate) 23 | 24 | ax = ax/np.linalg.norm(ax) 25 | 26 | cos_theta = np.dot(v_target,v_rotate) 27 | 28 | theta = -acos(cos_theta) 29 | 30 | R = np.zeros((3,3)) 31 | R[0,0] = ax[0]**2 + cos(theta) * (1 - ax[0]**2); 32 | R[0,1] = (1 - cos(theta)) * ax[0] * ax[1] - ax[2] * sin(theta); 33 | R[0,2] = (1 - cos(theta)) * ax[0] * ax[2] + ax[1] * sin(theta); 34 | R[1,0] = (1 - cos(theta)) * ax[0] * ax[1] + ax[2] * sin(theta); 35 | R[1,1] = ax[1]**2 + cos(theta) * (1 - ax[1]**2); 36 | R[1,2] = ( 1 - cos(theta)) * ax[1] * ax[2] - ax[0] * sin(theta); 37 | R[2,0] = ( 1 - cos(theta)) * ax[0] * ax[2] - ax[1] * sin(theta); 38 | R[2,1] = ( 1 - cos(theta)) * ax[1] * ax[2] + ax[0] * sin(theta); 39 | R[2,2] = ax[2]**2 + cos(theta) * (1 - ax[2]**2); 40 | 41 | return R 42 | 43 | ######################################### 44 | # Function to rotate one vector to be aligned with another (Marina Strocchi) 45 | ######################################### 46 | def computeAxisRotationMatrix(u, theta): 47 | 48 | R11 = cos(theta) + u[0]**2*(1-cos(theta)) 49 | R12 = u[0]*u[1]*(1-cos(theta)) - u[2]*sin(theta) 50 | R13 = u[0]*u[2]*(1-cos(theta)) + u[1]*sin(theta) 51 | R21 = u[1]*u[0]*(1-cos(theta)) + u[2]*sin(theta) 52 | R22 = cos(theta) + u[1]**2*(1-cos(theta)) 53 | R23 = u[1]*u[2]*(1-cos(theta)) - u[0]*sin(theta) 54 | R31 = u[2]*u[0]*(1-cos(theta)) - u[1]*sin(theta) 55 | R32 = u[2]*u[1]*(1-cos(theta)) + u[0]*sin(theta) 56 | R33 = cos(theta) + u[2]**2*(1-cos(theta)) 57 | 58 | R = [[R11,R12,R13],[R21,R22,R23],[R31,R32,R33]] 59 | 60 | return R 61 | 62 | ######################################### 63 | # Function to compute area of triangle from vertices 64 | ######################################### 65 | def computeTriangleArea(a, b, c): 66 | 67 | ab = b-a 68 | ac = c-a 69 | 70 | area = 0.5*np.cross(ab,ac) 71 | 72 | return area 73 | 74 | ######################################### 75 | # Function to compute normal to plane given three points in plane 76 | ######################################### 77 | def computeNormalToPlane(a, b, c): 78 | 79 | # defines two vectors linking point A with point B, and point A with point C 80 | v_ab = b-a 81 | v_ac = c-a 82 | 83 | # computes the normal (in a consistent manner) 84 | normal = np.cross(v_ab,v_ac) 85 | 86 | normal = normal/np.linalg.norm(normal) 87 | 88 | return normal 89 | -------------------------------------------------------------------------------- /src/volumetric/meshIO.py: -------------------------------------------------------------------------------- 1 | # Some functions included in this file were written by Martin Bishop 2 | import pandas as pd 3 | import glob 4 | import logging 5 | import math 6 | import random 7 | import numpy as np 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | ######################################### 12 | # Function to read in mesh from basename 13 | ######################################### 14 | def read_mesh(basename=None, file_pts=None, file_elem=None, file_lon=None): 15 | # Function to read in mesh from basename 16 | 17 | # Defines pts, elem and lon files from basename 18 | if file_pts is None: 19 | file_pts = glob.glob(basename + '*.pts') 20 | if len(file_pts) > 1: 21 | raise ValueError('Too many matching .pts files') 22 | elif len(file_pts) == 0: 23 | raise ValueError('No matching .pts files') 24 | file_pts = file_pts[0] 25 | if file_elem is None: 26 | file_elem = glob.glob(basename + '*.elem') 27 | if len(file_elem) > 1: 28 | raise ValueError('Too many matching .elem files') 29 | elif len(file_elem) == 0: 30 | raise ValueError('No matching .elem files') 31 | file_elem = file_elem[0] 32 | if file_lon is None: 33 | file_lon = glob.glob(basename + '*.lon') 34 | if len(file_lon) > 1: 35 | raise ValueError('Too many matching .lon files') 36 | elif len(file_lon) == 0: 37 | raise ValueError('No matching .lon files') 38 | file_lon = file_lon[0] 39 | 40 | # Read mesh files 41 | try: 42 | pts = pd.read_csv(file_pts, sep=' ', skiprows=1, header=None) 43 | logger.debug("Successfully read {}".format(file_pts)) 44 | except ValueError: 45 | pts = None 46 | elem = pd.read_csv(file_elem, sep=' ', skiprows=1, usecols=(1, 2, 3, 4, 5), header=None) 47 | logger.debug("Successfully read {}".format(file_elem)) 48 | lon = pd.read_csv(file_lon, sep=' ', skiprows=1, header=None) 49 | logger.debug("Successfully read {}".format(file_lon)) 50 | 51 | return pts, elem, lon 52 | 53 | 54 | ######################################### 55 | # Function to write mesh 56 | ######################################### 57 | def write_mesh(basename, pts=None, elem=None, lon=None, shapes=None, precision_pts=None, precision_lon=None): 58 | # Write pts, elem and lon data to file 59 | 60 | # Ensure *something* is being written! 61 | assert ((pts is not None) and (elem is not None) and (lon is not None)), "No data given to write to file." 62 | 63 | # Adapt precision to default formats 64 | if precision_pts is None: 65 | precision_pts = '%.12g' 66 | if precision_lon is None: 67 | precision_lon = '%.5g' 68 | 69 | # Basic error checking on output file name 70 | if basename[-1] == '.': 71 | basename = basename[:-1] 72 | 73 | ####################### 74 | # Writes-out pts file 75 | ####################### 76 | if pts is not None: 77 | with open(basename + '.pts', 'w') as pFile: 78 | pFile.write('{}\n'.format(len(pts))) 79 | pts.to_csv(basename + '.pts', sep=' ', header=False, index=False, mode='a', float_format=precision_pts) 80 | logger.debug("pts data written to file {}".format(basename + '.pts')) 81 | 82 | ###################### 83 | # Writes-out elems file 84 | ###################### 85 | # If we haven't defined a shape for our elements, set to be tets 86 | if shapes is None: 87 | shapes = 'Tt' 88 | 89 | if elem is not None: 90 | with open(basename + '.elem', 'w') as pFile: 91 | pFile.write('{}\n'.format(len(elem))) 92 | elem.insert(loc=0, value=shapes, column=0) 93 | elem.to_csv(basename + '.elem', sep=' ', header=False, index=False, mode='a') 94 | logger.debug("elem data written to file {}".format(basename + '.elem')) 95 | del elem[0] # Remove added column to prevent cross-talk problems later 96 | 97 | ###################### 98 | # Writes-out lon file 99 | ###################### 100 | if lon is not None: 101 | with open(basename + '.lon', 'w') as pFile: 102 | pFile.write('1\n') 103 | lon.to_csv(basename + '.lon', sep=' ', header=False, index=False, mode='a', float_format=precision_lon) 104 | logger.debug("lon data written to file {}".format(basename + '.lon')) 105 | 106 | return None 107 | 108 | 109 | ######################################### 110 | # Function to read UVC data and interpolate onto elements 111 | ######################################### 112 | 113 | ######################################### 114 | # Function to read pts file 115 | ######################################### 116 | def read_pts(basename=None, file_pts=None): 117 | # Function to read in mesh from basename 118 | 119 | if file_pts is None: 120 | file_pts = glob.glob(basename + '.pts') 121 | #if len(file_pts) > 1: 122 | # raise ValueError('Too many matching .pts files') 123 | if len(file_pts) == 0: 124 | raise ValueError('No matching .pts files') 125 | file_pts = file_pts[0] 126 | 127 | # Read mesh files 128 | pts = pd.read_csv(file_pts, sep=' ', skiprows=1, header=None) 129 | logger.debug("Successfully read {}".format(file_pts)) 130 | logger.debug('Mesh has {} nodes'.format(len(pts))) 131 | 132 | 133 | return pts 134 | 135 | ######################################### 136 | # Function to read cpts file 137 | ######################################### 138 | def read_cpts(basename=None, file_cpts=None): 139 | # Function to read in mesh from basename 140 | 141 | if file_cpts is None: 142 | file_cpts = glob.glob(basename + '*.cpts') 143 | #if len(file_pts) > 1: 144 | # raise ValueError('Too many matching .pts files') 145 | if len(file_cpts) == 0: 146 | raise ValueError('No matching .cts files') 147 | file_cpts = file_cpts[0] 148 | 149 | # Read mesh files 150 | cpts = pd.read_csv(file_pts, sep=' ', skiprows=1, header=None) 151 | logger.debug("Successfully read {}".format(file_pts)) 152 | 153 | return cpts 154 | 155 | 156 | 157 | ######################################### 158 | # Function to read elems file 159 | ######################################### 160 | def read_elems(basename=None, file_elem=None): 161 | # Function to read in mesh from basename 162 | 163 | if file_elem is None: 164 | file_elem = glob.glob(basename + '.elem') 165 | if len(file_elem) > 1: 166 | raise ValueError('Too many matching .elem files') 167 | elif len(file_elem) == 0: 168 | raise ValueError('No matching .elem files') 169 | file_elem = file_elem[0] 170 | 171 | # Read mesh files 172 | elem = pd.read_csv(file_elem, sep=' ', skiprows=1, usecols=(1, 2, 3, 4, 5), header=None) 173 | logger.debug("Successfully read {}".format(file_elem)) 174 | logger.debug('Mesh has {} elements'.format(len(elem))) 175 | return elem 176 | 177 | 178 | ######################################### 179 | # Function to read lon file 180 | ######################################### 181 | def read_fibres(basename=None, file_lon=None): 182 | 183 | # Defines lon files from basename 184 | if file_lon is None: 185 | file_lon = glob.glob(basename + '.lon') 186 | if len(file_lon) > 1: 187 | raise ValueError('Too many matching .lon files') 188 | elif len(file_lon) == 0: 189 | raise ValueError('No matching .lon files') 190 | file_lon = file_lon[0] 191 | 192 | # Read mesh files 193 | lon = pd.read_csv(file_lon, sep=' ', skiprows=1, header=None) 194 | logger.debug("Successfully read {}".format(file_lon)) 195 | 196 | return lon 197 | 198 | 199 | ######################################### 200 | # Function to write element file 201 | ######################################### 202 | def write_elems(elemFilename=None, elem=None, shapes=None): 203 | # Write elem 204 | 205 | # Ensure *something* is being written! 206 | assert ((elem is not None)), "No data given to write to file." 207 | 208 | ###################### 209 | # Writes-out elems file 210 | ###################### 211 | # If we haven't defined a shape for our elements, set to be tets 212 | if shapes is None: 213 | shapes = 'Tt' 214 | 215 | if elem is not None: 216 | with open(elemFilename + '.elem', 'w') as pFile: 217 | pFile.write('{}\n'.format(len(elem))) 218 | elem.insert(loc=0, value=shapes, column=0) 219 | elem.to_csv(elemFilename + '.elem', sep=' ', header=False, index=False, mode='a') 220 | logger.debug("elem data written to file {}".format(basename + '.elem')) 221 | del elem[0] # Remove added column to prevent cross-talk problems later 222 | 223 | return None 224 | 225 | 226 | ######################################### 227 | # Function to write lon file 228 | ######################################### 229 | def write_lon(lonFilename=None, lon=None): 230 | # Ensure *something* is being written! 231 | assert ((lon is not None)), "No data given to write to file." 232 | 233 | ###################### 234 | # Writes-out lon file 235 | ###################### 236 | if lon is not None: 237 | with open(lonFilename + '.lon', 'w') as pFile: 238 | pFile.write('1\n') 239 | lon.to_csv(lonFilename + '.lon', sep=' ', header=False, index=False, mode='a') 240 | logger.debug("lon data written to file {}".format(lonFilename + '.lon')) 241 | 242 | return None 243 | 244 | 245 | ######################################### 246 | def write_lon_includesheet(lonFilename=None, lon=None): 247 | # Ensure *something* is being written! 248 | assert ((lon is not None)), "No data given to write to file." 249 | 250 | ###################### 251 | # Writes-out lon file 252 | ###################### 253 | if lon is not None: 254 | with open(lonFilename + '.lon', 'w') as pFile: 255 | pFile.write('2\n') 256 | lon.to_csv(lonFilename + '.lon', sep=' ', header=False, index=False, mode='a') 257 | logger.debug("lon data written to file {}".format(lonFilename + '.lon')) 258 | 259 | return None 260 | 261 | 262 | ######################################### 263 | # Function to write pts file 264 | ######################################### 265 | def write_pts(ptsFilename=None, pts=None): 266 | # Ensure *something* is being written! 267 | assert ((pts is not None)), "No data given to write to file." 268 | 269 | precision_pts = '%.12g' 270 | 271 | ###################### 272 | # Writes-out pts file 273 | ###################### 274 | if pts is not None: 275 | with open(ptsFilename + '.pts', 'w') as pFile: 276 | pFile.write('{}\n'.format(len(pts))) 277 | pts.to_csv(ptsFilename + '.pts', sep=' ', header=False, index=False, mode='a', float_format=precision_pts) 278 | logger.debug("pts data written to file {}".format(ptsFilename + '.pts')) 279 | 280 | return None 281 | 282 | ######################################### 283 | # Function to write auxgrid pts file 284 | ######################################### 285 | def write_auxpts(auxptsFilename=None, pts=None): 286 | # Ensure *something* is being written! 287 | assert ((pts is not None)), "No data given to write to file." 288 | 289 | precision_pts = '%.12g' 290 | 291 | ###################### 292 | # Writes-out pts file 293 | ###################### 294 | if pts is not None: 295 | with open(auxptsFilename + '.pts_t', 'w') as pFile: 296 | pFile.write('{}\n'.format(len(pts))) 297 | pFile.write("1\n") 298 | pts.to_csv(auxptsFilename + '.pts_t', sep=' ', header=False, index=False, mode='a', float_format=precision_pts) 299 | logger.debug("pts data written to file {}".format(auxptsFilename + '.pts_t')) 300 | 301 | return None 302 | 303 | ######################################### 304 | # Function to write out points data 305 | ######################################### 306 | def write_data(dataFilename=None, data=None): 307 | # Ensure *something* is being written! 308 | assert ((data is not None)), "No data given to write to file." 309 | 310 | if data is not None: 311 | dFile = open(dataFilename, '+w') 312 | for i in data: 313 | dFile.write("%f\n" %i) 314 | dFile.close() 315 | 316 | return None 317 | 318 | ######################################### 319 | # Function to write out node/element list 320 | ######################################### 321 | def write_list(dataFilename=None, data=None): 322 | # Ensure *something* is being written! 323 | assert ((data is not None)), "No data given to write to file." 324 | 325 | if data is not None: 326 | dFile = open(dataFilename, '+w') 327 | for i in data: 328 | dFile.write("%i\n" %i) 329 | dFile.close() 330 | 331 | return None 332 | 333 | 334 | ######################################### 335 | # Function to create a centroids file 336 | ######################################### 337 | def create_centroids(elems=None, pts=None): 338 | 339 | coords_n0 = np.array(pts.iloc[elems.iloc[:,0]]) 340 | coords_n1 = np.array(pts.iloc[elems.iloc[:,1]]) 341 | coords_n2 = np.array(pts.iloc[elems.iloc[:,2]]) 342 | coords_n3 = np.array(pts.iloc[elems.iloc[:,3]]) 343 | mean_coords = (coords_n0 + coords_n1 + coords_n2 + coords_n3)*0.25 344 | 345 | centroids = pd.DataFrame(mean_coords) 346 | 347 | return centroids 348 | 349 | ######################################### 350 | # Function to read surf file for surface 351 | ######################################### 352 | def read_surf(basename=None, file_surf=None): 353 | # Function to read in mesh from basename 354 | 355 | if file_surf is None: 356 | file_surf = glob.glob(basename + '.surf') 357 | #if len(file_pts) > 1: 358 | # raise ValueError('Too many matching .pts files') 359 | if len(file_surf) == 0: 360 | raise ValueError('No matching .surf files') 361 | file_surf = file_surf[0] 362 | 363 | # Read mesh files 364 | surf = pd.read_csv(file_surf, sep=' ', skiprows=1, header=None) 365 | logger.debug("Successfully read {}".format(file_surf)) 366 | logger.debug('Surface has {} tets'.format(len(surf))) 367 | 368 | 369 | return surf 370 | 371 | ######################################### 372 | # Function to read elem file for surface 373 | ######################################### 374 | def read_elem(basename=None, file_elem=None): 375 | # Function to read in mesh from basename 376 | 377 | if file_elem is None: 378 | file_elem = glob.glob(basename + '.elem') 379 | #if len(file_pts) > 1: 380 | # raise ValueError('Too many matching .pts files') 381 | if len(file_elem) == 0: 382 | raise ValueError('No matching .elem files') 383 | file_elem = file_elem[0] 384 | 385 | # Read mesh files 386 | elem = pd.read_csv(file_elem, sep=' ', skiprows=1, header=None) 387 | logger.debug("Successfully read {}".format(file_elem)) 388 | logger.debug('Mesh has {} elements'.format(len(elem))) 389 | 390 | 391 | return elem 392 | 393 | ######################################### 394 | # Function to write element file for surface 395 | ######################################### 396 | def write_surf(surfFilename=None, surf=None, shapes=None): 397 | # Write elem 398 | 399 | # Ensure *something* is being written! 400 | assert ((surf is not None)), "No data given to write to file." 401 | 402 | ###################### 403 | # Writes-out elems file 404 | ###################### 405 | # If we haven't defined a shape for our elements, set to be tets 406 | if shapes is None: 407 | shapes = 'Tr' 408 | 409 | if surf is not None: 410 | with open(surfFilename + '.surf', 'w') as pFile: 411 | pFile.write('{}\n'.format(len(surf))) 412 | if surf.iloc[0,0]!='Tr': 413 | surf.insert(loc=0, value=shapes, column=0) 414 | surf.to_csv(surfFilename + '.surf', sep=' ', header=False, index=False, mode='a') 415 | logger.debug("surf data written to file {}".format(surfFilename + '.surf')) 416 | del surf[0] # Remove added column to prevent cross-talk problems later 417 | else: 418 | surf.to_csv(surfFilename + '.surf', sep=' ', header=False, index=False, mode='a') 419 | logger.debug("surf data written to file {}".format(surfFilename + '.surf')) 420 | del surf[0] # Remove added column to prevent cross-talk problems later 421 | 422 | return None 423 | 424 | ######################################### 425 | # Function to write element file for surfaces 426 | ######################################### 427 | def write_elem(elemFilename=None, elem=None, shapes=None): 428 | # Write elem 429 | 430 | # Ensure *something* is being written! 431 | assert ((elem is not None)), "No data given to write to file." 432 | 433 | ###################### 434 | # Writes-out elems file 435 | ###################### 436 | # If we haven't defined a shape for our elements, set to be tets 437 | if shapes is None: 438 | shapes = 'Tr' 439 | 440 | if elem is not None: 441 | with open(elemFilename + '.elem', 'w') as pFile: 442 | pFile.write('{}\n'.format(len(elem))) 443 | if elem.iloc[0,0]!='Tr': 444 | elem.insert(loc=0, value=shapes, column=0) 445 | elem.to_csv(elemFilename + '.elem', sep=' ', header=False, index=False, mode='a') 446 | logger.debug("elem data written to file {}".format(elemFilename + '.elem')) 447 | del elem[0] # Remove added column to prevent cross-talk problems later 448 | else: 449 | elem.to_csv(elemFilename + '.elem', sep=' ', header=False, index=False, mode='a') 450 | logger.debug("elem data written to file {}".format(elemFilename + '.elem')) 451 | del elem[0] # Remove added column to prevent cross-talk problems later 452 | 453 | return None 454 | 455 | ######################################### 456 | # Function to read in vtx file 457 | ######################################### 458 | def read_vtx_File(vtxFilename=None): 459 | 460 | vtxs = np.loadtxt(vtxFilename,skiprows=2) 461 | 462 | return vtxs 463 | 464 | 465 | ######################################### 466 | # Function to write out vtx file 467 | ######################################### 468 | def write_vtx_File(vtxFilename=None, vtx=None): 469 | # Ensure *something* is being written! 470 | assert ((vtx is not None)), "No data given to write to file." 471 | 472 | if vtx is not None: 473 | #vtx.to_csv(vtxFilename + '.vtx', header=False, index=False, mode='a') 474 | dFile = open(vtxFilename+ '.vtx', '+w') 475 | #dFile.write("1\n") 476 | dFile.write("%i\n" %len(vtx)) 477 | dFile.write("extra\n") 478 | for i in range(len(vtx)): 479 | dFile.write("%i\n" %vtx.loc[i]) 480 | #vtx.to_csv(vtxFilename + '.vtx', header=False, index=False, mode='a') 481 | dFile.close() 482 | logger.debug("vtx data written to file {}".format(vtxFilename + '.vtx')) 483 | return None 484 | 485 | ######################################### 486 | # Function to immediately convert surface to list of unique nodes 487 | ######################################### 488 | def read_surf_to_nodeList(surf=None,Filename=False): 489 | # Ensure *something* is being written! 490 | if surf is not None: 491 | 492 | surf_nodes = [] 493 | surf_nodes = np.append(surf[1],surf_nodes) 494 | surf_nodes = np.append(surf[2],surf_nodes) 495 | surf_nodes = np.append(surf[3],surf_nodes) 496 | surf_nodes = np.unique(surf_nodes) 497 | 498 | surf_nodes = surf_nodes.astype(int) 499 | 500 | return surf_nodes 501 | 502 | 503 | 504 | -------------------------------------------------------------------------------- /src/volumetric/meshtool_func.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Jul 24 11:46:28 2022 5 | 6 | @author: sq20 7 | """ 8 | import pandas as pd 9 | import glob 10 | import logging 11 | import math 12 | import random 13 | import numpy as np 14 | import os 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | def merge_mesh(meshtoolloc,msh1,msh2,outmsh,ifmt=None,ofmt=None): 19 | #merge two meshes 20 | if ifmt == None: 21 | ifmt="carp_txt" 22 | 23 | if ofmt == None: 24 | ofmt="carp_txt" 25 | 26 | cmd=meshtoolloc+" merge meshes -msh1="+msh1+" -msh2="+msh2+" -outmsh="+outmsh+" -ifmt="+ifmt+" -ofmt="+ofmt 27 | 28 | logger.debug(cmd) 29 | return os.system(cmd) == 0 30 | 31 | def convert_mesh(meshtoolloc,imsh,omsh,ifmt=None,ofmt=None): 32 | #merge two meshes 33 | if ifmt == None: 34 | ifmt="carp_txt" 35 | 36 | if ofmt == None: 37 | ofmt="carp_txt" 38 | 39 | cmd=meshtoolloc+" convert -imsh="+imsh+" -omsh="+omsh+" -ifmt="+ifmt+" -ofmt="+ofmt 40 | 41 | logger.debug(cmd) 42 | return os.system(cmd) == 0 43 | 44 | def generate_mesh(meshtoolloc,surf,outmsh,*argv,ifmt=None,ofmt=None): 45 | if ifmt == None: 46 | ifmt="carp_txt" 47 | 48 | if ofmt == None: 49 | ofmt="carp_txt" 50 | cmd="timeout 30s "+meshtoolloc+" generate mesh -surf="+surf+" -outmsh="+ outmsh +" -ifmt="+ifmt+" -ofmt="+ofmt+" " 51 | 52 | if not argv==None: 53 | for arg in argv: 54 | cmd=cmd+arg+" " 55 | 56 | logger.debug(cmd) 57 | return os.system(cmd) == 0 58 | 59 | 60 | def resample_mesh(meshtoolloc,msh,min,max,outmsh,ifmt=None,ofmt=None): 61 | if ifmt == None: 62 | ifmt="carp_txt" 63 | 64 | if ofmt == None: 65 | ofmt="carp_txt" 66 | 67 | cmd=meshtoolloc+" resample mesh -msh="+msh+" -min="+min+" -max="+max+" -outmsh="+outmsh+" -ifmt="+ifmt+" -ofmt="+ofmt+" " 68 | print ("Running simulation...") 69 | 70 | logger.debug(cmd) 71 | return os.system(cmd) == 0 72 | 73 | def query_edge(meshtoolloc,msh,ifmt=None,file="edgeinfo"): 74 | if ifmt == None: 75 | ifmt="carp_txt" 76 | cmd =meshtoolloc+" query edges -msh="+msh+" -ifmt="+ifmt 77 | # run sim 78 | 79 | logger.debug(cmd) 80 | pipe = os.popen(cmd) 81 | 82 | # saving the output 83 | output = pipe.read() 84 | with open(file+'.txt', 'w+') as f: 85 | f.write(output) 86 | f.close() 87 | 88 | return pipe.close() == None 89 | 90 | def extract_surf(meshtoolloc,msh,surf,op,*argv): 91 | 92 | cmd=meshtoolloc+" extract surface -msh="+msh+" -surf="+surf+" -op="+op+" " 93 | if not argv==None: 94 | for arg in argv: 95 | cmd=cmd+arg+" " 96 | 97 | logger.debug(cmd) 98 | return os.system(cmd) == 0 99 | 100 | def extract_mesh(meshtoolloc,msh,tags,submsh,ifmt=None,ofmt=None): 101 | 102 | if ifmt == None: 103 | ifmt="carp_txt" 104 | 105 | if ofmt == None: 106 | ofmt="carp_txt" 107 | 108 | cmd=meshtoolloc+" extract mesh -msh="+msh+" -tags="+tags+" -submsh="+submsh+" -ifmt="+ifmt+" -ofmt="+ofmt+" " 109 | 110 | logger.debug(cmd) 111 | return os.system(cmd) == 0 112 | 113 | def map_file(meshtoolloc,submsh,file,outdir,mode=None): 114 | #default is mesh to submesh 115 | if mode == None: 116 | mode="m2s" 117 | cmd=meshtoolloc+" map -submsh="+submsh+" -files="+file+" -outdir="+outdir+" -mode="+mode 118 | 119 | logger.debug(cmd) 120 | return os.system(cmd) == 0 121 | 122 | def insert_submesh(meshtoolloc,submsh,msh,outmsh): 123 | 124 | cmd=meshtoolloc+" insert submesh -submsh="+submsh+" -msh="+msh+" -outmsh="+outmsh 125 | 126 | logger.debug(cmd) 127 | return os.system(cmd) == 0 128 | 129 | def generate_dfield(meshtoolloc,msh,ssurf,odat,*argv): 130 | cmd=meshtoolloc+" generate distancefield -msh="+msh+" -ssurf="+ssurf+" -odat="+odat+" " 131 | if not argv==None: 132 | for arg in argv: 133 | cmd=cmd+arg+" " 134 | 135 | logger.debug(cmd) 136 | return os.system(cmd) == 0 137 | 138 | def insert_data(meshtoolloc,submsh,submsh_data,msh,odat,mode): 139 | #insert data: data defined on a submesh is inserted back into a mesh 140 | #-mode= (optional) Data mode. 0 = nodal, 1 = element. Default is 0. 141 | cmd=meshtoolloc+" insert data -submsh="+submsh+" -submsh_data="+submsh_data+" -msh="+msh+" -odat="+odat+" -mode="+mode 142 | logger.debug(cmd) 143 | return os.system(cmd) == 0 144 | 145 | def extract_data(meshtoolloc,submsh,msh_data,submsh_data,mode): 146 | 147 | cmd=meshtoolloc+" extract data -submsh="+submsh+" -submsh_data="+submsh_data+" -msh_data="+msh_data+" -mode="+mode 148 | logger.debug(cmd) 149 | return os.system(cmd) == 0 150 | 151 | def insert_fibers(meshtoolloc,submsh,submsh_data,msh,odat,mode): 152 | #insert data: data defined on a submesh is inserted back into a mesh 153 | #-mode= (optional) Data mode. 0 = nodal, 1 = element. Default is 0. 154 | cmd=meshtoolloc+" insert data -submsh="+submsh+" -submsh_data="+submsh_data+" -msh="+msh+" -odat="+odat+" -mode="+mode 155 | logger.debug(cmd) 156 | return os.system(cmd) == 0 157 | 158 | def extract_gradient(meshtoolloc,msh,idat,odat,mode,ifmt=None): 159 | #mode= (optional) output mode. 0 == nodal output, 1 == element output. 0 is default. 160 | if ifmt == None: 161 | ifmt="carp_txt" 162 | 163 | cmd=meshtoolloc+" extract gradient -msh="+msh+" -idat="+idat+" -odat="+odat+" -mode="+mode+" -ifmt="+ifmt 164 | print(cmd) 165 | os.system(cmd) 166 | return os.system(cmd) == 0 167 | 168 | 169 | def interpolate_elemdata(meshtoolloc,imsh,idat,omsh,odat): 170 | #-omsh= (input) path to basename of the mesh we interpolate to 171 | #-imsh= (input) path to basename of the mesh we interpolate from 172 | 173 | cmd=meshtoolloc+" interpolate elemdata -imsh="+imsh+" -idat="+idat+" -omsh="+omsh+" -odat="+odat 174 | print(cmd) 175 | os.system(cmd) 176 | return os.system(cmd) == 0 177 | 178 | def collect_nodal(meshtoolloc, imsh, ifmt, omsh, ofmt, nod): 179 | cmd = '{} collect -imsh={} -ifmt={} -omsh={} -ofmt={} -nod={}'.format(meshtoolloc, imsh, ifmt, omsh, ofmt, nod) 180 | logger.debug(cmd) 181 | return os.system(cmd) == 0 182 | -------------------------------------------------------------------------------- /src/volumetric/py_atrial_fibres.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | import numpy as np 4 | import pyvista as pv 5 | import vtk 6 | 7 | def read_pts(filename): 8 | return np.loadtxt(filename, dtype=float, skiprows=1) 9 | 10 | def read_elem(filename): 11 | return np.loadtxt(filename, dtype=int, skiprows=1, usecols=(1,2,3,4)) 12 | 13 | def read_lon(filename): 14 | return np.loadtxt(filename, dtype=float, skiprows=1) 15 | 16 | def carp_to_pyvista(meshname, stride=3, tube_radius=0.08, skip=0): 17 | pts = read_pts(meshname + '.pts') 18 | elem = read_elem(meshname + '.elem') 19 | print(elem.shape[0]) 20 | elem = elem if skip == 0 else elem[::skip, :] 21 | print(elem.shape[0]) 22 | 23 | tets = np.column_stack((np.ones((elem.shape[0],), dtype=int) * 4, elem)).flatten() 24 | cell_type = np.ones((elem.shape[0],), dtype=int) * vtk.VTK_TETRA 25 | 26 | plt_msh = pv.UnstructuredGrid(tets, cell_type, pts) 27 | 28 | lon = read_lon(meshname + ".lon") 29 | lon = lon if skip == 0 else lon[::skip, :] 30 | fibres = lon[:, :3] 31 | 32 | nelem = lon.shape[0] 33 | nelem_nofibres = int(nelem * (1 - 1. / stride)) 34 | exclude = random.sample(range(0, nelem), nelem_nofibres) 35 | 36 | fibres[exclude, :] = np.zeros((nelem_nofibres, 3), dtype=float) 37 | 38 | plt_msh["fibres"] = fibres 39 | 40 | line = pv.Line() 41 | glyphs = plt_msh.glyph(orient='fibres', scale=True, factor=2000.0, geom=line.tube(radius=tube_radius)) 42 | 43 | return glyphs 44 | -------------------------------------------------------------------------------- /src/volumetric/transmural.par: -------------------------------------------------------------------------------- 1 | num_stim = 4 2 | 3 | bidomain = 1 4 | 5 | tend = 1 6 | 7 | stimulus[0].stimtype = 3 8 | 9 | stimulus[1].stimtype = 3 10 | 11 | stimulus[2].stimtype = 2 12 | stimulus[2].strength = 1 13 | stimulus[2].duration = 1 14 | 15 | stimulus[3].stimtype = 2 16 | stimulus[3].strength = 1 17 | stimulus[3].duration = 1 18 | 19 | 20 | stimulus[0].stimtype = 3 21 | 22 | timedt = 1.0 23 | spacedt = 1.0 24 | 25 | dt = 20 26 | vm_per_phie = 1 27 | parab_solve = 1 28 | cg_tol_ellip = 2.8e-6 29 | 30 | num_gregions = 1 31 | 32 | gregion[0].num_IDs = 1 33 | gregion[0].ID[0] = 0 34 | gregion[0].g_il = 1 35 | gregion[0].g_it = 1 36 | gregion[0].g_el = 1 37 | gregion[0].g_et = 1 38 | 39 | 40 | -------------------------------------------------------------------------------- /src/volumetric/v_mesh_generation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Thu Jul 28 10:31:59 2022 5 | 6 | @author: sq20 7 | """ 8 | 9 | import pandas as pd 10 | import numpy as np 11 | import os 12 | import glob 13 | import math 14 | import random 15 | import shutil 16 | 17 | from . import carpfunc 18 | from . import meshIO 19 | from . import meshtool_func as meshtool 20 | 21 | def generate_valve(meshtoolloc,input_dir,folder,Valvename,endoname,outmsh,bdry_step,ptindex): 22 | #Valvename: the valve for generating mesh, str 23 | #endoname: the endo where the target valve is located,str 24 | #outmsh: the name of valve mesh,str 25 | #bdry_step: the distance of the lower plane from the original valve plane, typically 0.2-0.5 26 | #the element tag value for the valve mesh 27 | #ptindex: the index of the valve points in original .pts(totally 5810) 28 | if not os.path.exists(folder+'/'+outmsh): 29 | os.mkdir(folder+'/'+outmsh) 30 | os.chdir(folder+'/'+outmsh) 31 | 32 | if not meshtool.convert_mesh(meshtoolloc,input_dir+"/"+Valvename,Valvename,ifmt="vtk",ofmt=None): 33 | return False 34 | pts = meshIO.read_pts(basename=Valvename, file_pts=None) 35 | 36 | #to find the middle point in the lower plane of valve 37 | volumetricname=Valvename+"_volumetricmesh" 38 | if not meshtool.generate_mesh(meshtoolloc,Valvename,volumetricname,'-bdry_layers=-1','-bdry_step='+bdry_step): 39 | return False 40 | 41 | 42 | ptsvolume = meshIO.read_pts(basename=volumetricname, file_pts=None) 43 | indexofnotnan=np.empty((0,1),int) 44 | for i in range(len(pts.values),len(ptsvolume.values)): 45 | if not math.isnan(ptsvolume.values[i,1]): 46 | indexofnotnan=np.append(indexofnotnan,[i]) 47 | 48 | #endo mesh reading in: 49 | if not meshtool.convert_mesh(meshtoolloc,input_dir+"/"+endoname,endoname,ifmt="vtk",ofmt=None): 50 | return False 51 | 52 | #add the middle point in the lower plane of valve to the original .pts file 53 | pts_LVendo=meshIO.read_pts(basename=endoname) 54 | 55 | pts_LVendo1= np.append(pts_LVendo.values,[ptsvolume.iloc[indexofnotnan[len(indexofnotnan)-1],:]],axis=0) 56 | #add middle point in the lower plane of valve to the .pts 57 | 58 | meshIO.write_pts(ptsFilename="LV_endo_with_lowplane"+Valvename, pts=pd.DataFrame(pts_LVendo1)) 59 | 60 | et_vertex_start_end = np.array( 61 | [[0, 1499], [1500, 2164], [2165, 3223], [3224, 5581], 62 | [5582, 5630], [5631, 5655], [5656, 5696], [5697, 5729], 63 | [5730, 5809]]) 64 | '''Class constant, surface index limits for vertices `et_pos`. 65 | Surfaces are defined in the following order: 66 | 67 | LV_ENDOCARDIAL = 0 68 | RV_SEPTUM = 1 69 | RV_FREEWALL = 2 70 | EPICARDIAL =3 71 | MITRAL_VALVE =4 72 | AORTA_VALVE = 5 73 | TRICUSPID_VALVE = 6 74 | PULMONARY_VALVE = 7 75 | RV_INSERT = 8 76 | ''' 77 | elem_LVendo=meshIO.read_elem(basename=endoname,file_elem=None) 78 | 79 | start_idx= et_vertex_start_end[ptindex][0] #mitral valve points 80 | end_idx= et_vertex_start_end[ptindex][1] 81 | index=list(range(start_idx,end_idx)) 82 | N=np.where(np.isin(elem_LVendo,index))[0] #find index for elements connected to valve plane 83 | unique, counts = np.unique(N, return_counts=True) #unique is the value and counts is how many times the value appears 84 | for i in range(len(unique)): 85 | if counts[i]==1: 86 | if np.isin(elem_LVendo.iloc[unique[i],1],index): 87 | elem_LVendo.iloc[unique[i],1]=5810 88 | if np.isin(elem_LVendo.iloc[unique[i],2],index): 89 | elem_LVendo.iloc[unique[i],2]=5810 90 | if np.isin(elem_LVendo.iloc[unique[i],3],index): 91 | elem_LVendo.iloc[unique[i],3]=5810 92 | 93 | volumetricname=Valvename+"_volumetricmesh" 94 | src_basename = os.path.join(Valvename, volumetricname) 95 | 96 | meshIO.write_elem("LV_endo_with_lowplane"+Valvename,elem_LVendo,shapes="Tr") 97 | 98 | if not meshtool.convert_mesh(meshtoolloc,"LV_endo_with_lowplane"+Valvename,"LV_endowithlowplane"+Valvename,ofmt="vtk"): 99 | return False 100 | if not meshtool.merge_mesh(meshtoolloc,"LV_endowithlowplane"+Valvename,input_dir+"/"+Valvename,"merge1",ifmt="vtk",ofmt="vtk"): 101 | return False 102 | if not meshtool.merge_mesh(meshtoolloc,"merge1",input_dir+"/"+endoname,"merge"+Valvename,ifmt="vtk",ofmt="vtk"): 103 | return False 104 | if not meshtool.generate_mesh(meshtoolloc,"merge"+Valvename,outmsh,ifmt="vtk",ofmt="vtk"): 105 | return False 106 | if not meshtool.convert_mesh(meshtoolloc,outmsh,outmsh,ifmt="vtk"): 107 | return False 108 | 109 | #retag the valve mesh to new tag 110 | elem=meshIO.read_elems(basename=outmsh,file_elem=None) 111 | elem.insert(5,'tag',ptindex) 112 | #print (elem.head) 113 | elem = elem.drop(5, axis=1) # axis 1 drops columns, 0 will drop rows that match index value in labels 114 | meshIO.write_elem(outmsh, pd.DataFrame(elem),shapes="Tt") 115 | 116 | if not meshtool.convert_mesh(meshtoolloc,outmsh,outmsh,ofmt="vtk"): 117 | return False 118 | 119 | print("new mesh is saved as ", outmsh) 120 | os.chdir(folder) 121 | 122 | return True 123 | 124 | def merge_resample(meshtoolloc,input_dir,folder,LV_endo,RV_FW,RV_septum,epi,Valves,min,max,outmsh): 125 | if not os.path.exists(outmsh): 126 | os.mkdir(outmsh) 127 | os.chdir(outmsh) 128 | #merge all myo surfaces and generate mesh 129 | if not meshtool.merge_mesh(meshtoolloc,input_dir+"/"+LV_endo,input_dir+"/"+RV_FW,"M1",ifmt="vtk",ofmt="vtk"): 130 | return False 131 | if not meshtool.merge_mesh(meshtoolloc,"M1",input_dir+"/"+RV_septum,"M2",ifmt="vtk",ofmt="vtk"): 132 | return False 133 | if not meshtool.merge_mesh(meshtoolloc,"M2",input_dir+"/"+epi,"M3",ifmt="vtk",ofmt="vtk"): 134 | return False 135 | if not meshtool.generate_mesh(meshtoolloc,"M3","M3_volume",ifmt="vtk",ofmt="vtk"): 136 | return False 137 | if not meshtool.convert_mesh(meshtoolloc,"M3_volume","M3_volume",ifmt="vtk"): 138 | return False 139 | 140 | #merge myo mesh with valve meshes 141 | if not meshtool.merge_mesh(meshtoolloc,"M3_volume",folder+'/'+Valves[0]+'/'+Valves[0],"M_valve1",ifmt="vtk",ofmt="vtk"): 142 | return False 143 | if not meshtool.merge_mesh(meshtoolloc,"M_valve1",folder+'/'+Valves[1]+'/'+Valves[1],"M_valve12",ifmt="vtk",ofmt="vtk"): 144 | return False 145 | if not meshtool.merge_mesh(meshtoolloc,"M_valve12",folder+'/'+Valves[2]+'/'+Valves[2],"M_valve123",ifmt="vtk",ofmt="vtk"): 146 | return False 147 | if not meshtool.merge_mesh(meshtoolloc,"M_valve123",folder+'/'+Valves[3]+'/'+Valves[3],"M_valve1234",ifmt="vtk",ofmt="vtk"): 148 | return False 149 | 150 | #resample all mesh and save edge lengths in "edgeinfo.txt" 151 | if not meshtool.resample_mesh(meshtoolloc,"M_valve1234",min,max,outmsh,ifmt="vtk",ofmt="vtk"): 152 | return False 153 | if not meshtool.convert_mesh(meshtoolloc,outmsh,outmsh,ifmt="vtk"): 154 | return False 155 | if not meshtool.query_edge(meshtoolloc,outmsh,file="edgeinfo"): 156 | return False 157 | 158 | if not meshtool.extract_mesh(meshtoolloc,outmsh,"0",outmsh+"_i"): 159 | return False 160 | 161 | #rewrite the lon file to (1,0,0) or the Laplace solver wont run 162 | lon=meshIO.read_fibres(basename=outmsh+"_i",file_lon=None) 163 | lon.insert(0,'x_axis',1) 164 | #print (elem.head) 165 | lonnew = lon.drop(2, axis=1) # axis 1 drops columns, 0 will drop rows that match index value in labels 166 | meshIO.write_lon(outmsh+"_i", pd.DataFrame(lonnew)) 167 | if not meshtool.convert_mesh(meshtoolloc,outmsh+"_i",outmsh+"_i",ofmt="vtk"): 168 | return False 169 | 170 | os.chdir(folder) 171 | 172 | return True 173 | 174 | 175 | 176 | def extract_surfacenolabel(meshtoolloc,mesh, pts5810,surfmesh,input_dir,RV_septum): 177 | #input mesh with labels as RV and LV:0, valves:4,5,6,7 178 | #input the original pts file for finding coords in LV_endo, RV_endo and RV_septum 179 | #output is 3 surf: LV_endo, RV_endo and RV_septum 180 | if not os.path.exists(surfmesh): 181 | os.mkdir(surfmesh) 182 | os.chdir(surfmesh) 183 | 184 | et_vertex_start_end = np.array( 185 | [[0, 1499], [1500, 2164], [2165, 3223], [3224, 5581], 186 | [5582, 5630], [5631, 5655], [5656, 5696], [5697, 5729], 187 | [5730, 5809]]) 188 | '''Class constant, surface index limits for vertices `et_pos`. 189 | Surfaces are defined in the following order: 190 | 191 | LV_ENDOCARDIAL = 0 192 | RV_SEPTUM = 1 193 | RV_FREEWALL = 2 194 | EPICARDIAL =3 195 | MITRAL_VALVE =4 196 | AORTA_VALVE = 5 197 | TRICUSPID_VALVE = 6 198 | PULMONARY_VALVE = 7 199 | RV_INSERT = 8 200 | ''' 201 | 202 | LVendo=et_vertex_start_end[0][0] 203 | LVendocoord=str(pts5810.iloc[LVendo,0])+","+str(pts5810.iloc[LVendo,1])+","+str(pts5810.iloc[LVendo,2]) 204 | RVendo=et_vertex_start_end[2][0] 205 | RVendocoord=str(pts5810.iloc[RVendo,0])+","+str(pts5810.iloc[RVendo,1])+","+str(pts5810.iloc[RVendo,2]) 206 | 207 | 208 | #extract LV_endo and RV_endo. For Rv_endo, both carp txt and .surf are written out 209 | meshtool.extract_surf(meshtoolloc,mesh,"LV_endo","0-4,5,6,7","-coord="+LVendocoord) 210 | meshtool.extract_surf(meshtoolloc,mesh,"RV_endo","0-4,5,6,7","-coord="+RVendocoord,'-ofmt=carp_txt') 211 | 212 | 213 | # RV_septum mapping from low res to high res. Credit to Martin Bishop. 214 | #create RV_septum from original surface mesh in vtk to carp_txt 215 | meshtool.convert_mesh(meshtoolloc, input_dir+'/'+RV_septum, RV_septum,ifmt='vtk') 216 | 217 | #generate a dummy element data file of ones for this septal surface mesh 218 | elems=meshIO.read_elem(RV_septum) 219 | data=np.ones([len(elems),1]) 220 | np.savetxt('RV_septum_elemdata.dat',data) 221 | 222 | 223 | #interpolates elemdata across to high res mesh 224 | omsh='RV_endo.surfmesh' 225 | meshtool.interpolate_elemdata(meshtoolloc,RV_septum,'RV_septum_elemdata.dat',omsh,'rv_endo_septdata.dat') 226 | 227 | 228 | # #thresholds interpolated datafield to define septal surface on highres mesh 229 | #extract surface_fromdata 230 | 231 | surf=meshIO.read_surf('RV_endo') 232 | 233 | data= np.loadtxt('rv_endo_septdata.dat') 234 | thr=0.9 235 | Septum_surf_hre=surf[data>thr] 236 | meshIO.write_surf('RV_septumfinal',Septum_surf_hre) 237 | surf_vtx=np.unique(Septum_surf_hre) 238 | meshIO.write_vtx_File(vtxFilename='RV_septumfinal.surf', vtx=pd.DataFrame(surf_vtx)) 239 | 240 | # #extract RV free wall as RV endo - RV septum 241 | meshtool.extract_surf(meshtoolloc,mesh,"RV_endo_FW","RV_endo-RV_septumfinal") 242 | 243 | biv_surf_i="surf_i" 244 | if not os.path.exists(biv_surf_i): 245 | os.mkdir(biv_surf_i) 246 | if not meshtool.map_file(meshtoolloc,mesh+"_i","*.surf",biv_surf_i): 247 | return None 248 | if not meshtool.map_file(meshtoolloc,mesh+"_i","*.vtx",biv_surf_i): 249 | return None 250 | biv_surf_i="/surf_i/" 251 | 252 | return [surfmesh+biv_surf_i+"/LV_endo",surfmesh+biv_surf_i+"/RV_septumfinal",surfmesh+biv_surf_i+"/RV_endo_FW"] 253 | 254 | def split_RVLV(meshtoolloc,MPIEXEC,OPTS,CARP,parfile,mesh_nolabel,SIMID,surf_endo,IGBEXTRACT,thres,outmsh): 255 | 256 | #input SIMID:result from Laplace 257 | #mesh_nolabel: resampled mesh dir plus the name 258 | #thres: define LV and RV boundary, typically 0.7 259 | #pathtopar: the location of the .par for Laplace solver 260 | #pathtosurface: the location of the surfaces in the resampled mesh 261 | #output the new labeled .elem file. 262 | #Labels: RV:0; LV:1; Valves:10-13 263 | 264 | args="-stimulus[0].vtx_file"+" "+surf_endo[0]+".surf -stimulus[1].vtx_file"+" "+surf_endo[1]+".surf -stimulus[2].vtx_file"+" "+surf_endo[2]+".surf" 265 | carpfunc.Laplace_solver(MPIEXEC,OPTS,CARP,parfile,mesh_nolabel,SIMID,args) 266 | 267 | gradMag=carpfunc.igb_todata(IGBEXTRACT,SIMID) 268 | 269 | elem=meshIO.read_elems(basename=mesh_nolabel) 270 | #thres=0.51 271 | print("Start spliting...") 272 | split = elem[[1, 2, 3, 4]].applymap(lambda x: x < len(gradMag) and gradMag[x] < thres) 273 | elem[5] = split.any(axis=1).map(int) 274 | #elem.loc[elem[5]==0,5]=0 #RV:0 275 | outmsh=mesh_nolabel+"_split" 276 | meshIO.write_elem(elemFilename=outmsh, elem=elem,shapes="Tt") 277 | print("End spliting!") 278 | shutil.copyfile(mesh_nolabel+".lon", outmsh+".lon") 279 | shutil.copyfile(mesh_nolabel+".pts", outmsh+".pts") 280 | shutil.copyfile(mesh_nolabel+".nod", outmsh+".nod") 281 | shutil.copyfile(mesh_nolabel+".eidx", outmsh+".eidx") 282 | 283 | if not meshtool.convert_mesh(meshtoolloc,outmsh,outmsh,ofmt="vtk"): 284 | return None 285 | 286 | print("Split LV and RV mesh is"+ outmsh) 287 | 288 | return outmsh 289 | --------------------------------------------------------------------------------