├── .github
└── workflows
│ ├── publish.yml
│ └── unit-tests.yml
├── .gitignore
├── LICENSE.txt
├── README.rst
├── dicom_numpy
├── __init__.py
├── combine_slices.py
├── exceptions.py
├── version.py
└── zip_archive.py
├── docs
├── Makefile
├── make.bat
└── source
│ ├── conf.py
│ └── index.rst
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── conftest.py
├── create_golden_values.py
├── data-citation.txt
├── dupe-positions.zip
├── golden_values.npz
├── test_combine_from_zip.py
├── test_combine_slices.py
└── test_dicom.zip
└── tox.ini
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 | on:
3 | push:
4 | tags:
5 | - "v*"
6 | jobs:
7 | publish:
8 | name: Publish to PyPI
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.8
13 | uses: actions/setup-python@v1
14 | with:
15 | python-version: 3.8
16 | - name: Install pypa/build
17 | run: python -m pip install build --user
18 | - name: Build a binary wheel and a source tarball
19 | run: python -m build --sdist --wheel --outdir dist/ .
20 | - name: Publish distribution to Test PyPI
21 | uses: pypa/gh-action-pypi-publish@release/v1
22 | with:
23 | user: __token__
24 | password: ${{ secrets.TEST_PYPI_API_TOKEN }}
25 | repository_url: https://test.pypi.org/legacy/
26 | - name: Publish distribution to PyPI
27 | uses: pypa/gh-action-pypi-publish@release/v1
28 | with:
29 | user: __token__
30 | password: ${{ secrets.PYPI_API_TOKEN }}
31 |
--------------------------------------------------------------------------------
/.github/workflows/unit-tests.yml:
--------------------------------------------------------------------------------
1 | name: Unit tests
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version:
11 | - "3.7"
12 | - "3.8"
13 | - "3.9"
14 | - "3.10"
15 | - "3.11"
16 |
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Setup Python
20 | uses: actions/setup-python@v4
21 | with:
22 | python-version: "${{ matrix.python-version }}"
23 | - name: Install Tox and build dependencies
24 | run: pip install tox tox-gh-actions
25 | - name: Run Tox
26 | # Run tox using the version of Python in `PATH`
27 | run: tox
28 |
29 | package:
30 | name: Check that binary and source distributions can be successfully formed
31 | runs-on: ubuntu-latest
32 | steps:
33 | - uses: actions/checkout@v2
34 | - name: Setup Python
35 | uses: actions/setup-python@v2
36 | with:
37 | python-version: 3.8
38 | - name: Install pypa/build
39 | run: python -m pip install build --user
40 | - name: Build a binary wheel and a source tarball
41 | run: python -m build --sdist --wheel --outdir dist/ .
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # general things to ignore
2 | build/
3 | dist/
4 | *.egg-info/
5 | *.egg
6 | *.py[cod]
7 | __pycache__/
8 | *.so
9 | *~
10 | .tox
11 |
12 | # due to using tox and pytest
13 | .cache
14 |
15 | # JetBrains (PyCharm)
16 | .idea/
17 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2017-2021 Innolitics, LLC.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
7 | of the Software, and to permit persons to whom the Software is furnished to do
8 | so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | .. image:: https://travis-ci.org/innolitics/dicom-numpy.svg?branch=master
2 | :target: https://travis-ci.org/innolitics/dicom-numpy
3 |
4 | ===========
5 | DICOM Numpy
6 | ===========
7 |
8 | See `Our Documentation `_.
9 |
--------------------------------------------------------------------------------
/dicom_numpy/__init__.py:
--------------------------------------------------------------------------------
1 | from .combine_slices import combine_slices, sort_by_slice_position, sort_by_instance_number
2 | from .exceptions import DicomImportException, MissingInstanceNumberException
3 | from .version import __version__
4 |
5 | __all__ = [
6 | 'combine_slices',
7 | 'sort_by_instance_number',
8 | 'sort_by_slice_position',
9 | 'DicomImportException',
10 | 'MissingInstanceNumberException',
11 | '__version__'
12 | ]
13 |
--------------------------------------------------------------------------------
/dicom_numpy/combine_slices.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from math import isclose
3 |
4 | import numpy as np
5 |
6 | from .exceptions import DicomImportException, MissingInstanceNumberException
7 |
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | def combine_slices(
13 | datasets,
14 | rescale=None,
15 | enforce_slice_spacing=True,
16 | sort_by_instance=False,
17 | skip_sorting=False,
18 | c_order_axes=False,
19 | ):
20 | """
21 | Given a list of pydicom datasets for an image series, stitch them together into a
22 | three-dimensional numpy array. Also calculate a 4x4 affine transformation
23 | matrix that converts the ijk-pixel-indices into the xyz-coordinates in the
24 | DICOM patient's coordinate system.
25 |
26 | Returns a two-tuple containing the 3D-ndarray and the affine matrix.
27 |
28 | If `rescale` is set to `None` (the default), then the image array dtype
29 | will be preserved, unless any of the DICOM images contain either the
30 | `Rescale Slope
31 | `_ or the
32 | `Rescale Intercept `_
33 | attributes. If either of these attributes are present, they will be
34 | applied to each slice individually.
35 |
36 | If `rescale` is `True` the voxels will be cast to `float32`, if set to
37 | `False`, the original dtype will be preserved even if DICOM rescaling information is present.
38 |
39 | If `enforce_slice_spacing` is set to `True`, `combine_slices` will raise a
40 | `DicomImportException` if there are missing slices detected in the
41 | datasets. If `enforce_slice_spacing` is set to `False`, missing slices will
42 | be ignored.
43 |
44 | If `sort_by_instance` is set to `False`, `combine_slices` will sort the
45 | image instances by position along the slice axis in increasing order. This
46 | is the default for backwards-compatibility reasons. If `True`, the image
47 | instances will be sorted according to decreasing `InstanceNumber`. If
48 | images in the series do not have an `InstanceNumber` and `sort_by_instance`
49 | is `True`, a `MissingInstanceNumberException` will be raised.
50 |
51 | If `skip_sorting` is set to `True`, `combine_slices` will not attempt to
52 | sort the slices. This can be useful if the volume must be ordered on other
53 | tags besides slice position or instance number. This overrides any value
54 | passed to `sort_by_instance`.
55 |
56 | If `c_order_axes` is set to `True`, the returned array will have its axes
57 | returned in the order of `(k, j, i)` rather than `(i, j, k)`. This is done
58 | to optimize slice accesses by ensuring that each slice is contiguous in
59 | memory. By default, this is done by keeping the axes `(i, j, k)` and storing
60 | the array using Fortran ordering. This can cause issues with some serialization
61 | libraries that require C-ordering, such as HDF5. In those cases, the axes may
62 | be reordered such that slices remain contiguous in memory, but the array is
63 | returned in C-ordering.
64 |
65 | The returned array has the column-major byte-order.
66 |
67 | Datasets produced by reading DICOMDIR files are ignored.
68 |
69 | This function requires that the datasets:
70 |
71 | - Be in same series (have the same
72 | `Series Instance UID `_,
73 | `Modality `_,
74 | and `SOP Class UID `_).
75 | - The binary storage of each slice must be the same (have the same
76 | `Bits Allocated `_ and
77 | `Pixel Representation `_).
78 | - The image slice must approximately form a grid. This means there can not
79 | be any missing internal slices (missing slices on the ends of the dataset
80 | are not detected). This requirement is relaxed if `enforce_slice_spacing` is set to `False`.
81 | - Each slice must have the same
82 | `Rows `_,
83 | `Columns `_,
84 | `Samples Per Pixel `_,
85 | `Pixel Spacing `_, and
86 | `Image Orientation (Patient) `_
87 | attribute values.
88 | - The direction cosines derived from the
89 | `Image Orientation (Patient) `_
90 | attribute must, within 1e-4, have a magnitude of 1. The cosines must
91 | also be approximately perpendicular (their dot-product must be within
92 | 1e-4 of 0). Warnings are displayed if any of these approximations are
93 | below 1e-8, however, since we have seen real datasets with values up to
94 | 1e-4, we let them pass.
95 | - The `Image Position (Patient) `_
96 | values must approximately form a line.
97 |
98 | If any of these conditions are not met, a `dicom_numpy.DicomImportException` is raised.
99 | """
100 | slice_datasets = [ds for ds in datasets if not _is_dicomdir(ds)]
101 |
102 | if len(slice_datasets) == 0:
103 | raise DicomImportException("Must provide at least one image DICOM dataset")
104 |
105 | if skip_sorting:
106 | sorted_datasets = slice_datasets
107 | elif sort_by_instance:
108 | sorted_datasets = sort_by_instance_number(slice_datasets)
109 | else:
110 | sorted_datasets = sort_by_slice_position(slice_datasets)
111 |
112 | _validate_slices_form_uniform_grid(sorted_datasets, enforce_slice_spacing=enforce_slice_spacing)
113 |
114 | voxels = _merge_slice_pixel_arrays(sorted_datasets, rescale, c_order_axes=c_order_axes)
115 | transform = _ijk_to_patient_xyz_transform_matrix(sorted_datasets)
116 |
117 | return voxels, transform
118 |
119 |
120 | def sort_by_instance_number(slice_datasets):
121 | """
122 | Given a list of pydicom Datasets, return the datasets sorted by instance
123 | number in the image orientation direction.
124 |
125 | This does not require `pixel_array` to be present, and so may be used to
126 | associate instance Datasets with the voxels returned from `combine_slices`.
127 | """
128 | instance_numbers = [getattr(ds, 'InstanceNumber', None) for ds in slice_datasets]
129 | if any(n is None for n in instance_numbers):
130 | raise MissingInstanceNumberException
131 |
132 | return [
133 | d for (s, d) in sorted(
134 | zip(instance_numbers, slice_datasets),
135 | key=lambda v: int(v[0]),
136 | # Stacked in reverse to order in direction of increasing slice axis
137 | reverse=True
138 | )
139 | ]
140 |
141 |
142 | def sort_by_slice_position(slice_datasets):
143 | """
144 | Given a list of pydicom Datasets, return the datasets sorted in the image orientation direction.
145 |
146 | This does not require `pixel_array` to be present, and so may be used to associate instance Datasets
147 | with the voxels returned from `combine_slices`.
148 | """
149 | slice_positions = _slice_positions(slice_datasets)
150 | return [
151 | d for (s, d) in sorted(
152 | zip(slice_positions, slice_datasets),
153 | key=lambda v: v[0],
154 | )
155 | ]
156 |
157 |
158 | def _is_dicomdir(dataset):
159 | media_sop_class = getattr(dataset, 'MediaStorageSOPClassUID', None)
160 | return media_sop_class == '1.2.840.10008.1.3.10'
161 |
162 |
163 | def _merge_slice_pixel_arrays(sorted_datasets, rescale=None, c_order_axes=False):
164 | if rescale is None:
165 | rescale = any(_requires_rescaling(d) for d in sorted_datasets)
166 |
167 | first_dataset = sorted_datasets[0]
168 | slice_dtype = first_dataset.pixel_array.dtype
169 | num_slices = len(sorted_datasets)
170 | voxels_dtype = np.float32 if rescale else slice_dtype
171 |
172 | if c_order_axes:
173 | slice_shape = first_dataset.pixel_array.shape
174 | voxels_shape = (num_slices,) + slice_shape
175 | voxels = np.empty(voxels_shape, dtype=voxels_dtype)
176 | else:
177 | slice_shape = first_dataset.pixel_array.T.shape
178 | voxels_shape = slice_shape + (num_slices,)
179 | voxels = np.empty(voxels_shape, dtype=voxels_dtype, order='F')
180 |
181 | for k, dataset in enumerate(sorted_datasets):
182 | pixel_array = dataset.pixel_array if c_order_axes else dataset.pixel_array.T
183 | if rescale:
184 | slope = float(getattr(dataset, 'RescaleSlope', 1))
185 | intercept = float(getattr(dataset, 'RescaleIntercept', 0))
186 | pixel_array = pixel_array.astype(np.float32) * slope + intercept
187 | if c_order_axes:
188 | voxels[k, ...] = pixel_array
189 | else:
190 | voxels[..., k] = pixel_array
191 |
192 | return voxels
193 |
194 |
195 | def _requires_rescaling(dataset):
196 | return hasattr(dataset, 'RescaleSlope') or hasattr(dataset, 'RescaleIntercept')
197 |
198 |
199 | def _ijk_to_patient_xyz_transform_matrix(sorted_datasets):
200 | first_dataset = sorted_datasets[0]
201 | image_orientation = first_dataset.ImageOrientationPatient
202 | row_cosine, column_cosine, slice_cosine = _extract_cosines(image_orientation)
203 |
204 | row_spacing, column_spacing = first_dataset.PixelSpacing
205 | slice_spacing = _slice_spacing(sorted_datasets)
206 |
207 | transform = np.identity(4, dtype=np.float32)
208 |
209 | transform[:3, 0] = row_cosine * column_spacing
210 | transform[:3, 1] = column_cosine * row_spacing
211 | transform[:3, 2] = slice_cosine * slice_spacing
212 |
213 | transform[:3, 3] = first_dataset.ImagePositionPatient
214 |
215 | return transform
216 |
217 |
218 | def _validate_slices_form_uniform_grid(sorted_datasets, enforce_slice_spacing=True):
219 | """
220 | Perform various data checks to ensure that the list of slices form a
221 | evenly-spaced grid of data. Optionally, this can be slightly relaxed to
222 | allow for missing slices in the volume.
223 |
224 | Some of these checks are probably not required if the data follows the
225 | DICOM specification, however it seems pertinent to check anyway.
226 | """
227 | invariant_properties = [
228 | 'Modality',
229 | 'SOPClassUID',
230 | 'SeriesInstanceUID',
231 | 'Rows',
232 | 'Columns',
233 | 'SamplesPerPixel',
234 | 'PixelSpacing',
235 | 'PixelRepresentation',
236 | 'BitsAllocated',
237 | ]
238 |
239 | for property_name in invariant_properties:
240 | _slice_attribute_equal(sorted_datasets, property_name)
241 |
242 | _validate_image_orientation(sorted_datasets[0].ImageOrientationPatient)
243 | _slice_ndarray_attribute_almost_equal(sorted_datasets, 'ImageOrientationPatient', 1e-5)
244 |
245 | if enforce_slice_spacing:
246 | slice_positions = _slice_positions(sorted_datasets)
247 | _check_for_missing_slices(slice_positions)
248 |
249 |
250 | def _validate_image_orientation(image_orientation):
251 | """
252 | Ensure that the image orientation is supported
253 | - The direction cosines have magnitudes of 1 (just in case)
254 | - The direction cosines are perpendicular
255 | """
256 | row_cosine, column_cosine, slice_cosine = _extract_cosines(image_orientation)
257 |
258 | if not _almost_zero(np.dot(row_cosine, column_cosine), 1e-4):
259 | raise DicomImportException(f"Non-orthogonal direction cosines: {row_cosine}, {column_cosine}")
260 | elif not _almost_zero(np.dot(row_cosine, column_cosine), 1e-8):
261 | logger.warning(f"Direction cosines aren't quite orthogonal: {row_cosine}, {column_cosine}")
262 |
263 | if not _almost_one(np.linalg.norm(row_cosine), 1e-4):
264 | raise DicomImportException(f"The row direction cosine's magnitude is not 1: {row_cosine}")
265 | elif not _almost_one(np.linalg.norm(row_cosine), 1e-8):
266 | logger.warning(f"The row direction cosine's magnitude is not quite 1: {row_cosine}")
267 |
268 | if not _almost_one(np.linalg.norm(column_cosine), 1e-4):
269 | raise DicomImportException(f"The column direction cosine's magnitude is not 1: {column_cosine}")
270 | elif not _almost_one(np.linalg.norm(column_cosine), 1e-8):
271 | logger.warning(f"The column direction cosine's magnitude is not quite 1: {column_cosine}")
272 |
273 |
274 | def _almost_zero(value, abs_tol):
275 | return isclose(value, 0.0, abs_tol=abs_tol)
276 |
277 |
278 | def _almost_one(value, abs_tol):
279 | return isclose(value, 1.0, abs_tol=abs_tol)
280 |
281 |
282 | def _extract_cosines(image_orientation):
283 | row_cosine = np.array(image_orientation[:3])
284 | column_cosine = np.array(image_orientation[3:])
285 | slice_cosine = np.cross(row_cosine, column_cosine)
286 | return row_cosine, column_cosine, slice_cosine
287 |
288 |
289 | def _slice_attribute_equal(sorted_datasets, property_name):
290 | initial_value = getattr(sorted_datasets[0], property_name, None)
291 | for dataset in sorted_datasets[1:]:
292 | value = getattr(dataset, property_name, None)
293 | if value != initial_value:
294 | msg = f'All slices must have the same value for "{property_name}": {value} != {initial_value}'
295 | raise DicomImportException(msg)
296 |
297 |
298 | def _slice_ndarray_attribute_almost_equal(sorted_datasets, property_name, abs_tol):
299 | initial_value = getattr(sorted_datasets[0], property_name, None)
300 | for dataset in sorted_datasets[1:]:
301 | value = getattr(dataset, property_name, None)
302 | if not np.allclose(value, initial_value, atol=abs_tol):
303 | msg = (f'All slices must have the same value for "{property_name}" within "{abs_tol}": {value} != '
304 | f'{initial_value}')
305 | raise DicomImportException(msg)
306 |
307 |
308 | def _slice_positions(sorted_datasets):
309 | image_orientation = sorted_datasets[0].ImageOrientationPatient
310 | row_cosine, column_cosine, slice_cosine = _extract_cosines(image_orientation)
311 | return [np.dot(slice_cosine, d.ImagePositionPatient) for d in sorted_datasets]
312 |
313 |
314 | def _check_for_missing_slices(slice_positions):
315 | if len(slice_positions) > 1:
316 | slice_positions_diffs = np.diff(sorted(slice_positions))
317 | if not np.allclose(slice_positions_diffs, slice_positions_diffs[0], atol=0, rtol=1e-5):
318 | # TODO: figure out how we should handle non-even slice spacing
319 | msg = f"The slice spacing is non-uniform. Slice spacings:\n{slice_positions_diffs}"
320 | logger.warning(msg)
321 |
322 | if not np.allclose(slice_positions_diffs, slice_positions_diffs[0], atol=0, rtol=1e-1):
323 | raise DicomImportException('It appears there are missing slices')
324 |
325 |
326 | def _slice_spacing(sorted_datasets):
327 | if len(sorted_datasets) > 1:
328 | slice_positions = _slice_positions(sorted_datasets)
329 | slice_positions_diffs = np.diff(slice_positions)
330 | return np.median(slice_positions_diffs)
331 |
332 | return getattr(sorted_datasets[0], 'SpacingBetweenSlices', 0)
333 |
--------------------------------------------------------------------------------
/dicom_numpy/exceptions.py:
--------------------------------------------------------------------------------
1 | class DicomImportException(Exception):
2 | pass
3 |
4 |
5 | class MissingInstanceNumberException(Exception):
6 | pass
7 |
--------------------------------------------------------------------------------
/dicom_numpy/version.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.6.5'
2 |
--------------------------------------------------------------------------------
/dicom_numpy/zip_archive.py:
--------------------------------------------------------------------------------
1 | import zipfile
2 | import logging
3 | import tempfile
4 |
5 | import pydicom
6 |
7 | from .exceptions import DicomImportException
8 | from .combine_slices import combine_slices
9 |
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def combined_series_from_zip(zip_filename):
15 | logger.info(f'Extracting voxel data from "{zip_filename}"')
16 |
17 | if not zipfile.is_zipfile(zip_filename):
18 | raise DicomImportException(f'Invalid zipfile {zip_filename}')
19 |
20 | with zipfile.ZipFile(zip_filename, 'r') as zip_file:
21 | datasets = dicom_datasets_from_zip(zip_file)
22 |
23 | voxels, ijk_to_xyz = combine_slices(datasets)
24 | return voxels, ijk_to_xyz
25 |
26 |
27 | def dicom_datasets_from_zip(zip_file):
28 | datasets = []
29 | for entry in zip_file.namelist():
30 | if entry.endswith('/'):
31 | continue # skip directories
32 |
33 | entry_pseudo_file = zip_file.open(entry)
34 |
35 | # the pseudo file does not support `seek`, which is required by
36 | # pydicom's lazy loading mechanism; use temporary files to get around this;
37 | # relies on the temporary files not being removed until the temp
38 | # file is garbage collected, which should be the case because the
39 | # pydicom datasets should retain a reference to the temp file
40 | temp_file = tempfile.TemporaryFile()
41 | temp_file.write(entry_pseudo_file.read())
42 | temp_file.flush()
43 | temp_file.seek(0)
44 |
45 | try:
46 | dataset = pydicom.read_file(temp_file)
47 | datasets.append(dataset)
48 | except pydicom.errors.InvalidDicomError as e:
49 | msg = f'Skipping invalid DICOM file "{entry}": {e}'
50 | logger.info(msg)
51 |
52 | if len(datasets) == 0:
53 | raise DicomImportException('Zipfile does not contain any valid DICOM files')
54 |
55 | return datasets
56 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = DICOM-Numpy
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 | set SPHINXPROJ=DICOM-Numpy
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # DICOM-Numpy documentation build configuration file, created by
5 | # sphinx-quickstart on Fri Apr 28 10:43:23 2017.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | # If extensions (or modules to document with autodoc) are in another directory,
17 | # add these directories to sys.path here. If the directory is relative to the
18 | # documentation root, use os.path.abspath to make it absolute, like shown here.
19 | #
20 | import os
21 | import sys
22 |
23 | from unittest.mock import MagicMock
24 |
25 | sys.path.insert(0, os.path.abspath('../..'))
26 |
27 |
28 | class Mock(MagicMock):
29 | @classmethod
30 | def __getattr__(cls, name):
31 | return MagicMock()
32 |
33 |
34 | MOCK_MODULES = ['numpy', 'pydicom']
35 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
36 |
37 |
38 | # -- General configuration ------------------------------------------------
39 |
40 | # If your documentation needs a minimal Sphinx version, state it here.
41 | #
42 | # needs_sphinx = '1.0'
43 |
44 | # Add any Sphinx extension module names here, as strings. They can be
45 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
46 | # ones.
47 | extensions = ['sphinx.ext.autodoc']
48 |
49 | # Add any paths that contain templates here, relative to this directory.
50 | templates_path = ['_templates']
51 |
52 | # The suffix(es) of source filenames.
53 | # You can specify multiple suffix as a list of string:
54 | #
55 | # source_suffix = ['.rst', '.md']
56 | source_suffix = '.rst'
57 |
58 | # The master toctree document.
59 | master_doc = 'index'
60 |
61 | # General information about the project.
62 | project = 'DICOM-Numpy'
63 | copyright = '2017, Innolitics'
64 | author = 'J. David Giese'
65 |
66 | # The version info for the project you're documenting, acts as replacement for
67 | # |version| and |release|, also used in various other places throughout the
68 | # built documents.
69 | #
70 | # The short X.Y version.
71 | version = '0.1.1'
72 | # The full version, including alpha/beta/rc tags.
73 | release = '0.1.1'
74 |
75 | # The language for content autogenerated by Sphinx. Refer to documentation
76 | # for a list of supported languages.
77 | #
78 | # This is also used if you do content translation via gettext catalogs.
79 | # Usually you set "language" from the command line for these cases.
80 | language = None
81 |
82 | # List of patterns, relative to source directory, that match files and
83 | # directories to ignore when looking for source files.
84 | # This patterns also effect to html_static_path and html_extra_path
85 | exclude_patterns = []
86 |
87 | # The name of the Pygments (syntax highlighting) style to use.
88 | pygments_style = 'sphinx'
89 |
90 | # If true, `todo` and `todoList` produce output, else they produce nothing.
91 | todo_include_todos = False
92 |
93 |
94 | # -- Options for HTML output ----------------------------------------------
95 |
96 | # The theme to use for HTML and HTML Help pages. See the documentation for
97 | # a list of builtin themes.
98 | #
99 | html_theme = 'alabaster'
100 |
101 | # Theme options are theme-specific and customize the look and feel of a theme
102 | # further. For a list of options available for each theme, see the
103 | # documentation.
104 | #
105 | # html_theme_options = {}
106 |
107 | # Add any paths that contain custom static files (such as style sheets) here,
108 | # relative to this directory. They are copied after the builtin static files,
109 | # so a file named "default.css" will overwrite the builtin "default.css".
110 | html_static_path = ['_static']
111 |
112 |
113 | # -- Options for HTMLHelp output ------------------------------------------
114 |
115 | # Output file base name for HTML help builder.
116 | htmlhelp_basename = 'DICOM-Numpydoc'
117 |
118 |
119 | # -- Options for LaTeX output ---------------------------------------------
120 |
121 | latex_elements = {
122 | # The paper size ('letterpaper' or 'a4paper').
123 | #
124 | # 'papersize': 'letterpaper',
125 |
126 | # The font size ('10pt', '11pt' or '12pt').
127 | #
128 | # 'pointsize': '10pt',
129 |
130 | # Additional stuff for the LaTeX preamble.
131 | #
132 | # 'preamble': '',
133 |
134 | # Latex figure (float) alignment
135 | #
136 | # 'figure_align': 'htbp',
137 | }
138 |
139 | # Grouping the document tree into LaTeX files. List of tuples
140 | # (source start file, target name, title,
141 | # author, documentclass [howto, manual, or own class]).
142 | latex_documents = [
143 | (master_doc, 'DICOM-Numpy.tex', 'DICOM-Numpy Documentation',
144 | 'J. David Giese', 'manual'),
145 | ]
146 |
147 |
148 | # -- Options for manual page output ---------------------------------------
149 |
150 | # One entry per manual page. List of tuples
151 | # (source start file, name, description, authors, manual section).
152 | man_pages = [
153 | (master_doc, 'dicom-numpy', 'DICOM-Numpy Documentation',
154 | [author], 1)
155 | ]
156 |
157 |
158 | # -- Options for Texinfo output -------------------------------------------
159 |
160 | # Grouping the document tree into Texinfo files. List of tuples
161 | # (source start file, target name, title, author,
162 | # dir menu entry, description, category)
163 | texinfo_documents = [
164 | (master_doc, 'DICOM-Numpy', 'DICOM-Numpy Documentation',
165 | author, 'DICOM-Numpy', 'One line description of project.',
166 | 'Miscellaneous'),
167 | ]
168 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | ***********
2 | DICOM-Numpy
3 | ***********
4 |
5 | This python module provides a set of utilities for extracting data contained in
6 | DICOM files into Numpy ndarrays. It is a higher-level library that builds on the excellent lower-level `pydicom
7 | `_ library.
8 |
9 | The library is quite small at the moment, however, if you have a DICOM-related
10 | utility function that you think would be appropriate to include, create a
11 | Github Issue!
12 |
13 | Dependencies
14 | ============
15 |
16 | - Python 3.6+
17 | - Numpy
18 | - PyDicom 1.0+
19 |
20 |
21 | Installation
22 | ============
23 |
24 | .. code:: bash
25 |
26 | pip install dicom_numpy
27 |
28 |
29 | Source Code
30 | ===========
31 |
32 | The source code is hosted on `Github `_.
33 |
34 |
35 | Combine DICOM Slices
36 | ====================
37 |
38 | The DICOM standard stores MR, CT, and PET scans as a series of images saved in
39 | a separate files. A common task is to combine all of the images that make up a
40 | single 3D image into a single scan.
41 |
42 | The function that performs this task is `combine_slices`. Since this library
43 | builds on pydicom, `combine_slices` takes an list of `pydicom
44 | datasets `_.
45 |
46 | Example
47 | -------
48 |
49 | .. code:: python
50 |
51 | import pydicom
52 | import dicom_numpy
53 |
54 | def extract_voxel_data(list_of_dicom_files):
55 | datasets = [pydicom.dcmread(f) for f in list_of_dicom_files]
56 | try:
57 | voxel_ndarray, ijk_to_xyz = dicom_numpy.combine_slices(datasets)
58 | except dicom_numpy.DicomImportException as e:
59 | # invalid DICOM data
60 | raise
61 | return voxel_ndarray
62 |
63 |
64 | Details
65 | -------
66 |
67 | .. autofunction:: dicom_numpy.combine_slices
68 | .. autofunction:: dicom_numpy.sort_by_slice_position
69 |
70 |
71 | Change Log
72 | ==========
73 |
74 | Version 0.6.5
75 | -------------
76 | - Documentation-only release updating incorrect axes labels in the documentation
77 | for `combine_slices`. The `c_order_axes` option converts axis orders from
78 | `[i, j, k]` to `[k, j, i]`, rather than what was stated in the 0.6.4 documentation.
79 |
80 | Version 0.6.4
81 | -------------
82 | - Add a `c_order_axes` option to `combine_slices`. When true, this option returns
83 | a volume with row-major ordering instead of column-major ordering. In order to
84 | keep slices contiguous in memory, using row-major ordering will reverse the order
85 | of the axes; thus, `[j, i, k]` will instead be `[k, i, j]`. This can be useful
86 | when using the volume with other libraries that expect row-major ordering, such
87 | as HDF5.
88 |
89 | Version 0.6.3
90 | -------------
91 |
92 | - Add a `skip_sorting` option to `combine_slices`, allowing slices to be sorted
93 | by the user before being passed in to `combine_slices`.
94 |
95 | Version 0.6.2
96 | -------------
97 | - Add a `sort_by_instance` option to `combine_slices`, allowing slices to be
98 | sorted in the volume by their instance number rather than slice position.
99 | This is useful for series that contain multiple scans over the same physical
100 | space, such as diffusion MRI.
101 |
102 | Version 0.6.1
103 | -------------
104 | - Fix a bug where slice sorting could raise an exception if multiple slices
105 | were located at the same slice position.
106 |
107 | Version 0.6.0
108 | -------------
109 | - Add `enforce_slice_spacing` keyword argument to `combine_slices`, which
110 | defaults to True. When this keyword argument is set to False, slices can be
111 | combined even if some are missing, i.e. the slices do not form a uniform
112 | grid.
113 | - The slice spacing calculation (used in the formation of the image
114 | transformation matrix) has been changed to use the median of the spacing
115 | between slices, rather than the mean. This change was made to make the
116 | calculation less sensitive to large gaps skewing the slice spacing
117 | calculation as a result of missing slices.
118 |
119 | Version 0.5.0
120 | -------------
121 | - Export `sort_by_slice_position`
122 |
123 | Version 0.4.0
124 | -------------
125 | - Ignore DICOMDIR files
126 | - Fix bug that was triggered when using `from dicom_numpy import *`
127 | - Make `combine_slices` work with a single slice
128 | - Add support for "channeled slices" (e.g., RGB slices)
129 | - Allow HighBit and BitsStored DICOM attributes to be non-uniform
130 | - Drop support for Python 3.4; test Python 3.7
131 | - Require the SamplesPerPixel DICOM attribute to be invariant among the slices
132 |
133 | Version 0.3.0
134 | -------------
135 |
136 | - Reverted slice ordering change from v0.2.0, since the DICOM standard defines
137 | the Z-axis direction to be increasing in the direction of the head.
138 | - Added support for both PyDicom 0.X and 1.X
139 |
140 | Version 0.2.0
141 | -------------
142 |
143 | - Changed the behavior of `combine_slices` to stack slices from head (slice 0)
144 | to foot (slice -1). Note that this is the reverse of the behavior in v0.1.*.
145 |
146 | Version 0.1.5
147 | -------------
148 |
149 | - Added the `rescale` option to `combine_slices`
150 | - Made `combine_slices`'s returned ndarray use column-major ordering
151 |
152 | Contributing
153 | ============
154 |
155 | Process
156 | -------
157 |
158 | Contributions are welcome. Please create a Github issue describing the change
159 | you would like to make so that you can discuss your approach with the
160 | maintainers. Assuming the maintainers like your approach, then create a pull
161 | request.
162 |
163 | Tests
164 | -----
165 |
166 | Most new functionality will require unit tests.
167 |
168 | Run all of the tests for each supported python version using:
169 |
170 | .. code:: bash
171 |
172 | tox
173 |
174 | Run all of the tests for the currently active python version using:
175 |
176 | .. code:: bash
177 |
178 | pytest
179 |
180 | Other Contributors
181 | ------------------
182 |
183 | Additional contributions made by:
184 |
185 | - Jonathan Daniel
186 |
187 | Thank you!
188 |
189 |
190 | About Innolitics
191 | ================
192 |
193 | Innolitics is a team of talented software developers with medical and
194 | engineering backgrounds. We help companies produce top quality medical imaging
195 | and workflow applications. If you work with DICOM frequently, our `DICOM
196 | Standard Browser `_ may be useful to you.
197 |
198 | If you could use help with DICOM, `let us know `_! We offer training sessions and
199 | can provide advice or development services.
200 |
201 |
202 | Licenses
203 | ========
204 |
205 | .. include:: ../../LICENSE.txt
206 |
207 | Indices and tables
208 | ==================
209 |
210 | * :ref:`genindex`
211 | * :ref:`modindex`
212 | * :ref:`search`
213 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal=1
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | A setuptools based setup module.
3 | """
4 |
5 | from setuptools import setup, find_packages
6 | from os import path
7 |
8 | here = path.abspath(path.dirname(__file__))
9 |
10 | with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
11 | long_description = f.read()
12 |
13 | metadata = {}
14 | with open(path.join(here, 'dicom_numpy', 'version.py')) as f:
15 | exec(f.read(), metadata)
16 |
17 | setup(
18 | name='dicom_numpy',
19 | version=metadata['__version__'],
20 | description='Extract image data into a 3D numpy array from a set of DICOM files.',
21 | long_description=long_description,
22 | url='https://github.com/innolitics/dicom-numpy',
23 | author='Innolitics, LLC',
24 | author_email='info@innolitics.com',
25 | license='MIT',
26 | classifiers=[
27 | 'Development Status :: 5 - Production/Stable',
28 |
29 | 'Intended Audience :: Developers',
30 | 'Intended Audience :: Healthcare Industry',
31 | 'Topic :: Software Development :: Build Tools',
32 | 'Topic :: Scientific/Engineering :: Medical Science Apps.',
33 |
34 | 'License :: OSI Approved :: MIT License',
35 |
36 | 'Programming Language :: Python :: 3',
37 | 'Programming Language :: Python :: 3.6',
38 | 'Programming Language :: Python :: 3.7',
39 | 'Programming Language :: Python :: 3.8',
40 | 'Programming Language :: Python :: 3.9'
41 | ],
42 |
43 | keywords='dicom numpy',
44 |
45 | packages=find_packages(exclude=['contrib', 'docs', 'tests']),
46 |
47 | install_requires=[
48 | 'pydicom >= 1.0',
49 | 'numpy',
50 | ],
51 |
52 | python_requires='>= 3.6',
53 |
54 | extras_require={
55 | 'dev': ['check-manifest', 'sphinx', 'sphinx-autobuild'],
56 | 'test': ['coverage', 'pytest']
57 | }
58 | )
59 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/innolitics/dicom-numpy/98e927ce53b3ca77ae4bbe811e781f87b02aaca8/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | # direction cosines
6 | x_cos = (1, 0, 0)
7 | y_cos = (0, 1, 0)
8 | z_cos = (0, 0, 1)
9 | negative_x_cos = (-1, 0, 0)
10 | negative_y_cos = (0, -1, 0)
11 | negative_z_cos = (0, 0, -1)
12 |
13 | arbitrary_shape = (10, 11)
14 | arbitrary_rgb_shape = (10, 11, 3)
15 |
16 |
17 | class MockSlice:
18 | """
19 | A minimal DICOM dataset representing a dataslice at a particular
20 | slice location. The `slice_position` is the coordinate value along the
21 | remaining unused axis (i.e. the axis perpendicular to the direction
22 | cosines).
23 | """
24 |
25 | def __init__(self, pixel_array, slice_position, row_cosine=None, column_cosine=None, **kwargs):
26 | if row_cosine is None:
27 | row_cosine = x_cos
28 |
29 | if column_cosine is None:
30 | column_cosine = y_cos
31 |
32 | shape = pixel_array.shape
33 | if len(shape) == 2:
34 | num_rows, num_columns = shape
35 | samples_per_pixel = 1
36 | else:
37 | num_rows, num_columns, samples_per_pixel = shape
38 | # TODO: when `combine_slices` takes care of the planar configuration (also in invariant_properties), add
39 | # self.PlanarConfiguration = 0,
40 | # which means that the RGB channels are in the last axis.
41 | # The Planar Configuration tag is required when Samples Per Pixel > 1.
42 | # It can be 0 - "channels-last" or 1 - "channels-first" (in pixel_array).
43 | # Usually, it is 0 - just as here. See
44 | # https://dicom.innolitics.com/ciods/enhanced-mr-image/enhanced-mr-image/00280006
45 |
46 | self.pixel_array = pixel_array
47 |
48 | self.SeriesInstanceUID = 'arbitrary uid'
49 | self.SOPClassUID = 'arbitrary sopclass uid'
50 | self.PixelSpacing = [1.0, 1.0]
51 | self.Rows = num_rows
52 | self.Columns = num_columns
53 | self.SamplesPerPixel = samples_per_pixel
54 | self.Modality = 'MR'
55 |
56 | # assume that the images are centered on the remaining unused axis
57 | a_component = [-num_columns/2.0*c for c in row_cosine]
58 | b_component = [-num_rows/2.0*c for c in column_cosine]
59 | c_component = [(slice_position if c == 0 and cc == 0 else 0) for c, cc in zip(row_cosine, column_cosine)]
60 | patient_position = [a + b + c for a, b, c in zip(a_component, b_component, c_component)]
61 |
62 | self.ImagePositionPatient = patient_position
63 |
64 | self.ImageOrientationPatient = list(row_cosine) + list(column_cosine)
65 |
66 | for k, v in kwargs.items():
67 | setattr(self, k, v)
68 |
69 |
70 | @pytest.fixture
71 | def axial_slices():
72 | return [
73 | MockSlice(randi(*arbitrary_shape), 0, InstanceNumber=3),
74 | MockSlice(randi(*arbitrary_shape), 1, InstanceNumber=2),
75 | MockSlice(randi(*arbitrary_shape), 2, InstanceNumber=1),
76 | MockSlice(randi(*arbitrary_shape), 3, InstanceNumber=0),
77 | ]
78 |
79 |
80 | @pytest.fixture
81 | def axial_slices_mixed_instances():
82 | return [
83 | MockSlice(randi(*arbitrary_shape), 2, InstanceNumber=1),
84 | MockSlice(randi(*arbitrary_shape), 0, InstanceNumber=3),
85 | MockSlice(randi(*arbitrary_shape), 3, InstanceNumber=2),
86 | MockSlice(randi(*arbitrary_shape), 1, InstanceNumber=0),
87 | ]
88 |
89 |
90 | @pytest.fixture
91 | def axial_slices_missing_instance_numbers():
92 | return [
93 | MockSlice(randi(*arbitrary_shape), 0),
94 | MockSlice(randi(*arbitrary_shape), 1),
95 | MockSlice(randi(*arbitrary_shape), 2),
96 | MockSlice(randi(*arbitrary_shape), 3),
97 | ]
98 |
99 |
100 | @pytest.fixture
101 | def axial_rgb_slices():
102 | return [
103 | MockSlice(randi(*arbitrary_rgb_shape), 0),
104 | MockSlice(randi(*arbitrary_rgb_shape), 1),
105 | MockSlice(randi(*arbitrary_rgb_shape), 2),
106 | MockSlice(randi(*arbitrary_rgb_shape), 3),
107 | ]
108 |
109 |
110 | def randi(*shape):
111 | return np.random.randint(1000, size=shape, dtype='uint16')
112 |
--------------------------------------------------------------------------------
/tests/create_golden_values.py:
--------------------------------------------------------------------------------
1 | """
2 | Generate a golden NPZ file from a dicom ZIP archive.
3 | """
4 | import argparse
5 |
6 | import numpy as np
7 |
8 | from dicom_numpy.zip_archive import combined_series_from_zip
9 |
10 |
11 | def parse_args():
12 | parser = argparse.ArgumentParser()
13 | parser.add_argument('-o', '--output', help='Output golden NPZ file', required=False)
14 | parser.add_argument('input', help="Input DICOM zip archive")
15 | return parser.parse_args()
16 |
17 |
18 | def generate_golden_values(input_zip, output_path='golden_values'):
19 | """
20 | Generate a golden NPZ file for a given DICOM zip archive.
21 | """
22 | voxels, ijk_to_xyz = combined_series_from_zip(input_zip)
23 | np.savez_compressed(output_path, voxels=voxels, ijk_to_xyz=ijk_to_xyz)
24 |
25 |
26 | if __name__ == '__main__':
27 | args = parse_args()
28 | if args.output:
29 | generate_golden_values(args.input, args.output)
30 | else:
31 | generate_golden_values(args.input)
32 |
--------------------------------------------------------------------------------
/tests/data-citation.txt:
--------------------------------------------------------------------------------
1 | Data in test_dicom.zip comes from the C4KC-KiTS dataset of The Cancer Imaging
2 | Archive (TCIA) and is licensed under Creative Commons Attribution 3.0 Unported
3 | License (https://creativecommons.org/licenses/by/3.0/)
4 |
5 | The zip file contains a very small subset of the data in this study and is
6 | intended to be used only for the purposes of testing dicom-numpy functionality.
7 |
8 | Please see the following citations for more information:
9 |
10 | Data Citation
11 |
12 | Heller, N., Sathianathen, N., Kalapara, A., Walczak, E., Moore, K., Kaluzniak,
13 | H., Rosenberg, J., Blake, P., Rengel, Z., Oestreich, M., Dean, J., Tradewell,
14 | M., Shah, A., Tejpaul, R., Edgerton, Z., Peterson, M., Raza, S., Regmi, S.,
15 | Papanikolopoulos, N., Weight, C. Data from C4KC-KiTS [Data set]. The Cancer
16 | Imaging Archive. 10.7937/TCIA.2019.IX49E8NX
17 |
18 | TCIA Citation
19 |
20 | Clark K, Vendt B, Smith K, Freymann J, Kirby J, Koppel P, Moore S, Phillips S,
21 | Maffitt D, Pringle M, Tarbox L, Prior F. The Cancer Imaging Archive (TCIA):
22 | Maintaining and Operating a Public Information Repository, Journal of Digital
23 | Imaging, Volume 26, Number 6, December, 2013, pp 1045-1057. DOI:
24 | 10.1007/s10278-013-9622-7
25 |
--------------------------------------------------------------------------------
/tests/dupe-positions.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/innolitics/dicom-numpy/98e927ce53b3ca77ae4bbe811e781f87b02aaca8/tests/dupe-positions.zip
--------------------------------------------------------------------------------
/tests/golden_values.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/innolitics/dicom-numpy/98e927ce53b3ca77ae4bbe811e781f87b02aaca8/tests/golden_values.npz
--------------------------------------------------------------------------------
/tests/test_combine_from_zip.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import numpy as np
4 |
5 | from dicom_numpy.zip_archive import combined_series_from_zip
6 |
7 | TEST_DIR = os.path.dirname(__file__)
8 | TEST_DICOM_ZIP_PATH = os.path.join(TEST_DIR, 'test_dicom.zip')
9 | GOLDEN_FILE_PATH = os.path.join(TEST_DIR, 'golden_values.npz')
10 |
11 |
12 | def test_combine_from_zip():
13 | """
14 | An integration test checking that a known DICOM zip archive can be
15 | processed and produces a known golden value.
16 | """
17 | voxels, ijk_to_xyz = combined_series_from_zip(TEST_DICOM_ZIP_PATH)
18 | with np.load(GOLDEN_FILE_PATH) as dataset:
19 | np.testing.assert_array_equal(voxels, dataset['voxels'])
20 | np.testing.assert_array_equal(ijk_to_xyz, dataset['ijk_to_xyz'])
21 |
--------------------------------------------------------------------------------
/tests/test_combine_slices.py:
--------------------------------------------------------------------------------
1 | # from copy import deepcopy
2 | from glob import glob
3 | import os
4 | from tempfile import TemporaryDirectory
5 | from zipfile import ZipFile
6 |
7 | import numpy as np
8 | import pytest
9 | import pydicom
10 |
11 | from dicom_numpy.combine_slices import (
12 | combine_slices,
13 | sort_by_slice_position,
14 | sort_by_instance_number,
15 | _merge_slice_pixel_arrays,
16 | )
17 | from dicom_numpy.exceptions import DicomImportException, MissingInstanceNumberException
18 | from .conftest import MockSlice
19 |
20 | TEST_DIR = os.path.dirname(__file__)
21 | TEST_DICOM_ZIP_PATH = os.path.join(TEST_DIR, 'dupe-positions.zip')
22 |
23 |
24 | def getDatasetsFromZip():
25 | with TemporaryDirectory() as tempdir:
26 | with ZipFile(TEST_DICOM_ZIP_PATH) as test_zip:
27 | test_zip.extractall(tempdir)
28 | dicom_paths = glob(os.path.join(tempdir, '*.dcm'))
29 | return [pydicom.dcmread(p) for p in dicom_paths]
30 |
31 |
32 | class TestSortBySlicePosition:
33 | def test_slice_sort_order(self):
34 | """
35 | Test that no exceptions are raised by the sorting function when
36 | datasets with duplicate positions are used.
37 | """
38 | datasets = getDatasetsFromZip()
39 | sort_by_slice_position(datasets)
40 |
41 |
42 | class TestCombineSlices:
43 | def test_simple_axial_set(self, axial_slices):
44 | combined, _ = combine_slices(axial_slices[0:2])
45 | manually_combined = np.dstack((axial_slices[0].pixel_array.T, axial_slices[1].pixel_array.T))
46 | assert np.array_equal(combined, manually_combined)
47 |
48 | def test_simple_axial_set_w_dicomdir(self, axial_slices):
49 | dicomdir_dataset = axial_slices[2]
50 | dicomdir_dataset.MediaStorageSOPClassUID = '1.2.840.10008.1.3.10'
51 | datasets = [dicomdir_dataset, axial_slices[0], axial_slices[1]]
52 | combined, _ = combine_slices(datasets)
53 | manually_combined = np.dstack((axial_slices[0].pixel_array.T, axial_slices[1].pixel_array.T))
54 | assert np.array_equal(combined, manually_combined)
55 |
56 | def test_single_slice(self, axial_slices):
57 | dataset = axial_slices[-1]
58 | array, _ = combine_slices([dataset])
59 | assert np.array_equal(array, dataset.pixel_array.T[:, :, None])
60 |
61 | def test_single_slice_spacing(self, axial_slices):
62 | slice_spacing = 0.65
63 | dataset = axial_slices[0]
64 | dataset.SpacingBetweenSlices = slice_spacing
65 | array, affine = combine_slices([dataset])
66 | assert np.array_equal(array, dataset.pixel_array.T[:, :, None])
67 | assert np.isclose(np.linalg.norm(affine[:, 2]), np.abs(slice_spacing))
68 |
69 | def test_rgb_axial_set(self, axial_rgb_slices):
70 | combined, _ = combine_slices(axial_rgb_slices)
71 |
72 | manually_combined = np.stack([ds.pixel_array for ds in axial_rgb_slices], axis=0).T
73 | assert np.array_equal(combined, manually_combined)
74 |
75 |
76 | class TestMergeSlicePixelArrays:
77 | def test_casting_if_only_rescale_slope(self):
78 | """
79 | If the `RescaleSlope` DICOM attribute is present, the
80 | `RescaleIntercept` attribute should also be present, however, we handle
81 | this case anyway.
82 | """
83 | slices = [
84 | MockSlice(np.ones((10, 20), dtype=np.uint8), 0, RescaleSlope=2),
85 | MockSlice(np.ones((10, 20), dtype=np.uint8), 1, RescaleSlope=2),
86 | ]
87 |
88 | voxels = _merge_slice_pixel_arrays(slices)
89 | assert voxels.dtype == np.dtype('float32')
90 | assert voxels[0, 0, 0] == 2.0
91 |
92 | def test_casting_rescale_slope_and_intercept(self):
93 | """
94 | Some DICOM modules contain the `RescaleSlope` and `RescaleIntercept` DICOM attributes.
95 | """
96 | slices = [
97 | MockSlice(np.ones((10, 20), dtype=np.uint8), 0, RescaleSlope=2, RescaleIntercept=3),
98 | MockSlice(np.ones((10, 20), dtype=np.uint8), 1, RescaleSlope=2, RescaleIntercept=3),
99 | ]
100 |
101 | voxels = _merge_slice_pixel_arrays(slices)
102 | assert voxels.dtype == np.dtype('float32')
103 | assert voxels[0, 0, 0] == 5.0
104 |
105 | def test_robust_to_ordering(self, axial_slices):
106 | """
107 | The DICOM slices should be able to be passed in in any order, and they
108 | should be recombined appropriately using the sort function.
109 | """
110 | assert np.array_equal(
111 | _merge_slice_pixel_arrays(sort_by_slice_position([axial_slices[0], axial_slices[1], axial_slices[2]])),
112 | _merge_slice_pixel_arrays(sort_by_slice_position([axial_slices[1], axial_slices[0], axial_slices[2]]))
113 | )
114 |
115 | assert np.array_equal(
116 | _merge_slice_pixel_arrays(sort_by_instance_number([axial_slices[0], axial_slices[1], axial_slices[2]])),
117 | _merge_slice_pixel_arrays(sort_by_instance_number([axial_slices[2], axial_slices[0], axial_slices[1]]))
118 | )
119 |
120 | def test_rescales_if_forced_true(self):
121 | slice_datasets = [MockSlice(np.ones((10, 20), dtype=np.uint8), 0)]
122 | voxels = _merge_slice_pixel_arrays(slice_datasets, rescale=True)
123 | assert voxels.dtype == np.float32
124 |
125 | def test_no_rescale_if_forced_false(self):
126 | slice_datasets = [MockSlice(np.ones((10, 20), dtype=np.uint8), 0, RescaleSlope=2, RescaleIntercept=3)]
127 | voxels = _merge_slice_pixel_arrays(slice_datasets, rescale=False)
128 | assert voxels.dtype == np.uint8
129 |
130 | def test_c_ordering(self):
131 | # Note that the shape returned by pydicom's `pixel_array` is [j, i]
132 | slices = [
133 | MockSlice(np.ones((10, 20), dtype=np.uint8), 0, RescaleSlope=2, RescaleIntercept=3),
134 | MockSlice(np.ones((10, 20), dtype=np.uint8), 1, RescaleSlope=2, RescaleIntercept=3),
135 | ]
136 | voxels = _merge_slice_pixel_arrays(slices, c_order_axes=True)
137 | assert voxels.flags.c_contiguous
138 | # Assert that the axes order is [k, j, i]
139 | assert voxels.shape == (2, 10, 20)
140 |
141 |
142 | class TestValidateSlicesFormUniformGrid:
143 | def test_missing_middle_slice_strict(self, axial_slices):
144 | """
145 | By default, all slices must be present. Slice position is determined
146 | using the ImagePositionPatient (0020,0032) tag.
147 | """
148 | with pytest.raises(DicomImportException):
149 | combine_slices([axial_slices[0], axial_slices[2], axial_slices[3]])
150 |
151 | def test_missing_middle_slice_lax(self, axial_slices):
152 | """
153 | if `enforce_slice_spacing` is set to False, the no missing slices
154 | constraint is relaxed. In this case, slices are stacked together as if
155 | there were no missing slices.
156 | """
157 | voxels, _transform = combine_slices(
158 | [axial_slices[0], axial_slices[2], axial_slices[3]],
159 | enforce_slice_spacing=False,
160 | )
161 | assert voxels.shape[2] == 3
162 |
163 | def test_insignificant_difference_in_direction_cosines(self, axial_slices):
164 | """
165 | We have seen DICOM series in the field where slices have lightly
166 | different direction cosines.
167 | """
168 | axial_slices[0].ImageOrientationPatient[0] += 1e-6
169 | combine_slices(axial_slices)
170 |
171 | def test_significant_difference_in_direction_cosines(self, axial_slices):
172 | axial_slices[0].ImageOrientationPatient[0] += 1e-4
173 | with pytest.raises(DicomImportException):
174 | combine_slices(axial_slices, enforce_slice_spacing=False)
175 |
176 | def test_slices_from_different_series(self, axial_slices):
177 | """
178 | As a sanity check, slices that don't come from the same DICOM series should
179 | be rejected.
180 | """
181 | axial_slices[2].SeriesInstanceUID += 'Ooops'
182 | with pytest.raises(DicomImportException):
183 | combine_slices(axial_slices, enforce_slice_spacing=False)
184 |
185 | @pytest.mark.xfail(reason='Not sure how to detect this in DICOM')
186 | def test_missing_end_slice(self, axial_slices):
187 | """
188 | Ideally, we would detect missing edge slices, however given that we don't
189 | know any way to determine the number of slices are in a DICOM series, this
190 | seems impossible.
191 | """
192 | with pytest.raises(DicomImportException):
193 | combine_slices(
194 | [axial_slices[0], axial_slices[1], axial_slices[2]],
195 | enforce_slice_spacing=False,
196 | )
197 |
198 | def test_combine_with_instance_number(self, axial_slices):
199 | """
200 | Test that a collection of slices can be identically assembled using the
201 | slice position or instance number, assuming the instance numbers are
202 | ordered sequentially along the slice axis.
203 | """
204 | instance_sorted_voxels, _ = combine_slices(axial_slices, sort_by_instance=True)
205 | position_sorted_voxels, _ = combine_slices(axial_slices)
206 | assert np.array_equal(instance_sorted_voxels, position_sorted_voxels)
207 |
208 | def test_instance_combination_fails_when_missing(self, axial_slices_missing_instance_numbers):
209 | """
210 | Test that an exception is raised when slices are attempted to be sorted
211 | by instance number, but some instance numbers are missing.
212 | """
213 | with pytest.raises(MissingInstanceNumberException):
214 | combine_slices(axial_slices_missing_instance_numbers, sort_by_instance=True)
215 |
216 | def test_instance_sorting_with_mixed_positions(self, axial_slices_mixed_instances):
217 | """
218 | Test that a volume sorts slices by instance number and not by image
219 | position patient when instance sorting is selected.
220 |
221 | In practice, series like this tend to be multiple scans with different
222 | parameters within a single series, such as in the case of diffusion
223 | MRI.
224 | """
225 | instance_sorted_voxels, _ = combine_slices(axial_slices_mixed_instances, sort_by_instance=True)
226 | position_sorted_voxels, _ = combine_slices(axial_slices_mixed_instances)
227 | assert np.array_equal(instance_sorted_voxels[:, :, 0], position_sorted_voxels[:, :, 0])
228 | assert np.array_equal(instance_sorted_voxels[:, :, 1], position_sorted_voxels[:, :, 3])
229 | assert np.array_equal(instance_sorted_voxels[:, :, 2], position_sorted_voxels[:, :, 2])
230 | assert np.array_equal(instance_sorted_voxels[:, :, 3], position_sorted_voxels[:, :, 1])
231 |
232 | def test_skip_sorting(self, axial_slices_mixed_instances):
233 | """
234 | Test that a volume's slice ordering is not altered when the user
235 | specifies `skip_sorting=True`.
236 | """
237 | position_sorted_voxels, _ = combine_slices(axial_slices_mixed_instances)
238 | unsorted_voxels, _ = combine_slices(axial_slices_mixed_instances, skip_sorting=True)
239 | assert np.array_equal(unsorted_voxels[:, :, 0], position_sorted_voxels[:, :, 2])
240 | assert np.array_equal(unsorted_voxels[:, :, 1], position_sorted_voxels[:, :, 0])
241 | assert np.array_equal(unsorted_voxels[:, :, 2], position_sorted_voxels[:, :, 3])
242 | assert np.array_equal(unsorted_voxels[:, :, 3], position_sorted_voxels[:, :, 1])
243 |
--------------------------------------------------------------------------------
/tests/test_dicom.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/innolitics/dicom-numpy/98e927ce53b3ca77ae4bbe811e781f87b02aaca8/tests/test_dicom.zip
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py{36,37,38,39}-pydicom
4 |
5 | [gh-actions]
6 | python =
7 | 3.7: py37
8 | 3.8: py38
9 | 3.9: py39
10 | 3.10: py310
11 | 3.11: py311
12 |
13 | [testenv]
14 | basepython =
15 | py36: python3.6
16 | py37: python3.7
17 | py38: python3.8
18 | py39: python3.9
19 | py310: python3.10
20 | py311: python3.11
21 | deps =
22 | readme_renderer
23 | flake8
24 | pytest
25 | pydicom: pydicom>=1.0
26 | numpy
27 | commands =
28 | python setup.py check -m -r -s
29 | flake8 --ignore=E226 .
30 | pytest tests
31 |
32 | [flake8]
33 | exclude = .tox,*.egg,build,data
34 | select = E,W,F
35 | max-line-length = 120
36 |
--------------------------------------------------------------------------------