├── setup.cfg ├── docs ├── source │ ├── imgs │ │ ├── logo.png │ │ ├── pept_centres.png │ │ ├── pept_centres_full.png │ │ └── pept_transformation.png │ ├── _static │ │ ├── logo.png │ │ └── anaconda-symbol.svg │ ├── manual │ │ ├── simulation.rst │ │ ├── plots.rst │ │ ├── scanners.rst │ │ ├── base.rst │ │ ├── utilities.rst │ │ ├── processing.rst │ │ ├── index.rst │ │ └── tracking.rst │ ├── tutorials │ │ ├── interpolating.rst │ │ ├── fpi.rst │ │ ├── reading.rst │ │ ├── birmingham.rst │ │ ├── velocities.rst │ │ ├── converting.rst │ │ ├── tracking_errors.rst │ │ ├── trajectory_separation.rst │ │ ├── adaptive_samples.rst │ │ ├── index.rst │ │ ├── filtering.rst │ │ ├── visualising.rst │ │ ├── basics.md │ │ └── peptml.rst │ ├── getting_started.rst │ ├── citing.rst │ ├── contributing.rst │ └── index.rst ├── Makefile └── make.bat ├── pyproject.toml ├── requirements_extra.txt ├── requirements.txt ├── MANIFEST.in ├── pept ├── __version__.py ├── scanners │ ├── modular_camera │ │ ├── __init__.py │ │ ├── extensions │ │ │ ├── get_pept_event_ext.h │ │ │ ├── get_pept_event.pyx │ │ │ └── get_pept_event_ext.c │ │ └── modular_camera.py │ ├── parallel_screens │ │ ├── extensions │ │ │ ├── __init__.py │ │ │ ├── binary_converter_ext.h │ │ │ └── binary_converter.pyx │ │ ├── __init__.py │ │ └── adac_forte.py │ └── __init__.py ├── utilities │ ├── parallel │ │ ├── __init__.py │ │ └── parallel_map.py │ ├── traverse │ │ └── __init__.py │ ├── cutpoints │ │ ├── __init__.py │ │ ├── find_minpoints_ext.h │ │ └── find_minpoints.pyx │ ├── misc │ │ ├── __init__.py │ │ └── aggregate.py │ └── __init__.py ├── simulation │ ├── __init__.py │ └── .gitignore ├── tracking │ ├── tof │ │ └── __init__.py │ ├── birmingham_method │ │ ├── extensions │ │ │ ├── birmingham_method_ext.h │ │ │ └── birmingham_method.pyx │ │ ├── __init__.py │ │ └── birmingham_method.py │ ├── trajectory_separation │ │ ├── __init__.py │ │ └── distance_matrix_reachable.pyx │ ├── fpi │ │ ├── __init__.py │ │ ├── fpi_ext.pyx │ │ ├── PeptStructures.hpp │ │ └── fpi.py │ ├── __init__.py │ ├── peptml │ │ └── __init__.py │ └── post.py ├── processing │ └── __init__.py ├── plots │ └── __init__.py └── base │ ├── __init__.py │ ├── utilities.py │ └── pixels.py ├── .flake8 ├── .readthedocs.yml ├── .gitignore ├── .github └── workflows │ └── build_wheels.yml ├── prototype └── pipeline_optimise.py ├── tests └── test_processing.py └── CODE_OF_CONDUCT.md /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description_file = README.md 3 | -------------------------------------------------------------------------------- /docs/source/imgs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uob-positron-imaging-centre/pept/HEAD/docs/source/imgs/logo.png -------------------------------------------------------------------------------- /docs/source/_static/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uob-positron-imaging-centre/pept/HEAD/docs/source/_static/logo.png -------------------------------------------------------------------------------- /docs/source/imgs/pept_centres.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uob-positron-imaging-centre/pept/HEAD/docs/source/imgs/pept_centres.png -------------------------------------------------------------------------------- /docs/source/imgs/pept_centres_full.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uob-positron-imaging-centre/pept/HEAD/docs/source/imgs/pept_centres_full.png -------------------------------------------------------------------------------- /docs/source/imgs/pept_transformation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uob-positron-imaging-centre/pept/HEAD/docs/source/imgs/pept_transformation.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools", 4 | "wheel", 5 | "cython", 6 | "oldest-supported-numpy", 7 | ] 8 | -------------------------------------------------------------------------------- /requirements_extra.txt: -------------------------------------------------------------------------------- 1 | Sphinx>=4.1 2 | numpydoc>=1.1 3 | ipython>=5.5.0 4 | pydata-sphinx-theme>=0.6.3 5 | numba>=0.50.0 6 | pytest>=3.6.4 7 | myst-parser>=0.18 8 | -------------------------------------------------------------------------------- /docs/source/manual/simulation.rst: -------------------------------------------------------------------------------- 1 | ``pept.simulation`` 2 | =================== 3 | 4 | 5 | .. automodule:: pept.simulation 6 | 7 | 8 | 9 | .. autosummary:: 10 | :toctree: generated/ 11 | 12 | pept.simulation.Simulator 13 | 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Cython>=0.29.16 2 | scipy>=1.4.1 3 | hdbscan>=0.8.26 4 | numpy>=1.18.3 5 | joblib>=0.14.1,<1.2 6 | matplotlib>=3.2.1 7 | plotly>=4.4.1 8 | tqdm>=4.41.1 9 | pandas>=1.0.3 10 | cma>=3.0.3 11 | natsort>=5.5.0 12 | konigcell>=0.2.0 13 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md LICENSE CODE_OF_CONDUCT.md 2 | include requirements.txt requirements_extra.txt 3 | include *.rst *.txt 4 | recursive-include pept *.py *.pyx *.pxd *.c* *.h* 5 | global-exclude .DS_Store 6 | global-exclude __pycache__ 7 | global-exclude *.pyc 8 | global-exclude *.so 9 | -------------------------------------------------------------------------------- /pept/__version__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __version__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 23.08.2019 7 | 8 | 9 | VERSION = (0, 5, 2) 10 | 11 | __version__ = '.'.join(map(str, VERSION)) 12 | -------------------------------------------------------------------------------- /docs/source/manual/plots.rst: -------------------------------------------------------------------------------- 1 | Visualisation (``pept.plots``) 2 | ============================== 3 | 4 | 5 | .. automodule:: pept.plots 6 | 7 | 8 | 9 | .. autosummary:: 10 | :toctree: generated/ 11 | 12 | pept.plots.format_fig 13 | pept.plots.histogram 14 | pept.plots.make_video 15 | pept.plots.PlotlyGrapher 16 | pept.plots.PlotlyGrapher2D 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docs/source/manual/scanners.rst: -------------------------------------------------------------------------------- 1 | Initialising Scanner Data (``pept.scanners``) 2 | ============================================= 3 | 4 | 5 | .. automodule:: pept.scanners 6 | 7 | 8 | 9 | .. autosummary:: 10 | :toctree: generated/ 11 | 12 | pept.scanners.adac_forte 13 | pept.scanners.parallel_screens 14 | pept.scanners.ADACGeometricEfficiency 15 | pept.scanners.modular_camera 16 | -------------------------------------------------------------------------------- /docs/source/manual/base.rst: -------------------------------------------------------------------------------- 1 | Base / Abstract Classes (``pept.base``) 2 | ======================================= 3 | 4 | 5 | .. autosummary:: 6 | :toctree: generated/ 7 | 8 | pept.base.PEPTObject 9 | pept.base.IterableSamples 10 | pept.base.Transformer 11 | pept.base.Filter 12 | pept.base.Reducer 13 | pept.base.PointDataFilter 14 | pept.base.LineDataFilter 15 | pept.base.VoxelsFilter 16 | 17 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E303, E271, E272, W504, E251 3 | 4 | # I like whitespace and indentation - the easier my eyes parse, the quicker and 5 | # more productive I am 6 | # E303: too many blank lines 7 | # E271: multiple spaces after keyword 8 | # E272: multiple spaces before keyword 9 | # W504: line break after binary operator 10 | 11 | # Controversial, but I don't like fun(x=2, y=3) - I prefer fun(x = 2, y = 3) 12 | # E251: unexpected spaces around keyword / parameter equals 13 | -------------------------------------------------------------------------------- /pept/scanners/modular_camera/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: License: GNU v3.0 7 | # Author : Sam Manger 8 | # Date : 20.08.2019 9 | 10 | 11 | from .modular_camera import modular_camera 12 | 13 | 14 | __all__ = [ 15 | 'modular_camera', 16 | ] 17 | 18 | 19 | __license__ = "GNU v3.0" 20 | __maintainer__ = "Sam Manger" 21 | __email__ = "s.manger@bham.ac.uk" 22 | __status__ = "Beta" 23 | -------------------------------------------------------------------------------- /pept/utilities/parallel/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 03.02.2020 7 | 8 | 9 | from .parallel_map import parallel_map_file 10 | 11 | 12 | __all__ = [ 13 | "parallel_map_file" 14 | ] 15 | 16 | 17 | __license__ = "GNU v3.0" 18 | __maintainer__ = "Andrei Leonard Nicusan" 19 | __email__ = "a.l.nicusan@bham.ac.uk" 20 | __status__ = "Beta" 21 | -------------------------------------------------------------------------------- /pept/scanners/parallel_screens/extensions/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 01.04.2021 7 | 8 | 9 | from .binary_converter import convert_adac_forte 10 | 11 | 12 | __all__ = [ 13 | "convert_adac_forte", 14 | ] 15 | 16 | 17 | __license__ = "GNU v3.0" 18 | __maintainer__ = "Andrei Leonard Nicusan" 19 | __email__ = "a.l.nicusan@bham.ac.uk" 20 | __status__ = "Beta" 21 | -------------------------------------------------------------------------------- /pept/simulation/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: License: GNU v3.0 7 | # Author : Andrei Leonard Nicusan 8 | # Date : 21.08.2019 9 | 10 | 11 | from .peptsim import Noise 12 | from .peptsim import Shape 13 | from .peptsim import Simulator 14 | 15 | 16 | __license__ = "GNU v3.0" 17 | __maintainer__ = "Andrei Leonard Nicusan" 18 | __email__ = "a.l.nicusan@bham.ac.uk" 19 | __status__ = "Development" 20 | -------------------------------------------------------------------------------- /docs/source/manual/utilities.rst: -------------------------------------------------------------------------------- 1 | ``pept.utilities`` 2 | ================== 3 | 4 | 5 | .. automodule:: pept.utilities 6 | 7 | 8 | 9 | .. autosummary:: 10 | :toctree: generated/ 11 | 12 | pept.utilities.find_cutpoints 13 | pept.utilities.find_minpoints 14 | pept.utilities.group_by_column 15 | pept.utilities.number_of_lines 16 | pept.utilities.read_csv 17 | pept.utilities.read_csv_chunks 18 | pept.utilities.parallel_map_file 19 | pept.utilities.traverse2d 20 | pept.utilities.traverse3d 21 | pept.utilities.ChunkReader 22 | 23 | 24 | -------------------------------------------------------------------------------- /pept/utilities/traverse/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 14.01.2020 7 | 8 | 9 | from .traverse2d import traverse2d 10 | from .traverse3d import traverse3d 11 | 12 | __all__ = [ 13 | "traverse2d", 14 | "traverse3d", 15 | ] 16 | 17 | 18 | __license__ = "GNU v3.0" 19 | __maintainer__ = "Andrei Leonard Nicusan" 20 | __email__ = "a.l.nicusan@bham.ac.uk" 21 | __status__ = "Beta" 22 | -------------------------------------------------------------------------------- /pept/tracking/tof/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 29.09.2021 7 | 8 | 9 | from .base import TimeOfFlight 10 | from .base import GaussianDensity 11 | from .base import CutpointsToF 12 | 13 | from .cutpoints_tof import find_cutpoints_tof 14 | 15 | 16 | __license__ = "GNU v3.0" 17 | __maintainer__ = "Andrei Leonard Nicusan" 18 | __email__ = "a.l.nicusan@bham.ac.uk" 19 | __status__ = "Beta" 20 | -------------------------------------------------------------------------------- /pept/scanners/parallel_screens/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # File : __init__.py 5 | # License: GNU v3.0 6 | # Author : Andrei Leonard Nicusan 7 | # Date : 20.08.2019 8 | 9 | 10 | from .parallel_screens import parallel_screens 11 | from .parallel_screens import ADACGeometricEfficiency 12 | from .adac_forte import adac_forte 13 | from .extensions import convert_adac_forte 14 | 15 | 16 | __license__ = "GNU v3.0" 17 | __maintainer__ = "Andrei Leonard Nicusan" 18 | __email__ = "a.l.nicusan@bham.ac.uk" 19 | -------------------------------------------------------------------------------- /pept/utilities/cutpoints/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 14.01.2020 7 | 8 | 9 | from .find_cutpoints import find_cutpoints 10 | from .find_minpoints import find_minpoints 11 | 12 | 13 | __all__ = [ 14 | "find_cutpoints", 15 | "find_minpoints", 16 | ] 17 | 18 | 19 | __license__ = "GNU v3.0" 20 | __maintainer__ = "Andrei Leonard Nicusan" 21 | __email__ = "a.l.nicusan@bham.ac.uk" 22 | __status__ = "Beta" 23 | -------------------------------------------------------------------------------- /pept/scanners/parallel_screens/extensions/binary_converter_ext.h: -------------------------------------------------------------------------------- 1 | /** 2 | * File : binary_converter_ext.h 3 | * License: GNU v3.0 4 | * Author : Andrei Leonard Nicusan 5 | * Date : 29.03.2021 6 | */ 7 | 8 | #ifndef BINARY_CONVERTER_EXT 9 | #define BINARY_CONVERTER_EXT 10 | 11 | 12 | #if defined(_MSC_VER) 13 | // Support the bloody unconforming mess that MSVC is; allow using fopen and ssize_t 14 | #define _CRT_SECURE_NO_DEPRECATE 15 | #include 16 | typedef SSIZE_T ssize_t; 17 | #else 18 | #include 19 | #endif 20 | 21 | 22 | double* read_adac_binary(const char* filepath, ssize_t* lors_elements); 23 | 24 | #endif -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /pept/utilities/misc/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 10.06.2020 7 | 8 | 9 | from .aggregate import group_by_column 10 | from .read_csv import number_of_lines 11 | from .read_csv import read_csv 12 | from .read_csv import read_csv_chunks 13 | from .read_csv import ChunkReader 14 | 15 | 16 | __all__ = [ 17 | "group_by_column", 18 | "number_of_lines", 19 | "read_csv", 20 | "read_csv_chunks", 21 | "ChunkReader" 22 | ] 23 | 24 | 25 | __license__ = "GNU v3.0" 26 | __maintainer__ = "Andrei Leonard Nicusan" 27 | __email__ = "a.l.nicusan@bham.ac.uk" 28 | __status__ = "Beta" 29 | -------------------------------------------------------------------------------- /pept/utilities/cutpoints/find_minpoints_ext.h: -------------------------------------------------------------------------------- 1 | /** 2 | * File : find_minpoints_ext.h 3 | * License: GNU v3.0 4 | * Author : Andrei Leonard Nicusan 5 | * Date : 20.10.2020 6 | */ 7 | 8 | #ifndef FIND_MINPOINTS_EXT 9 | #define FIND_MINPOINTS_EXT 10 | 11 | 12 | #if defined(_MSC_VER) 13 | // Support the bloody unconforming mess that MSVC is; allow using fopen and ssize_t 14 | #define _CRT_SECURE_NO_DEPRECATE 15 | #include 16 | typedef SSIZE_T ssize_t; 17 | #else 18 | #include 19 | #endif 20 | 21 | 22 | double* find_minpoints_ext( 23 | const double *sample_lines, 24 | const ssize_t nrows, 25 | const ssize_t ncols, 26 | const ssize_t num_lines, 27 | const double max_distance, 28 | const double *cutoffs, 29 | const int append_indices, 30 | ssize_t *mpts_nrows, 31 | ssize_t *mpts_ncols 32 | ); 33 | 34 | #endif 35 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/tutorials/interpolating.rst: -------------------------------------------------------------------------------- 1 | Interpolating Timesteps 2 | ======================= 3 | 4 | When extracting post-processed data from tracer trajectories for e.g. probability distributions, it is often important to **sample data at fixed timesteps**. As PEPT is natively a Lagrangian technique where tracers can be tracked more often in more sensitive areas of the gamma scanners, we have to convert those "randomly-sampled" positions into regular timesteps using ``Interpolate``. 5 | 6 | First, ``Segregate`` points into individual, continuous trajectory segments, ``GroupBy`` according to each trajectory's label, then ``Interpolate`` into regular timesteps and finally ``Stack`` them back into a ``PointData``: 7 | 8 | :: 9 | 10 | from pept.tracking import * 11 | 12 | pipe = pept.Pipeline([ 13 | Segregate(window = 20, cut_distance = 10.), 14 | GroupBy("label"), 15 | Interpolate(timestep = 5.), 16 | Stack(), 17 | ]) 18 | 19 | trajectories = pipe.fit(trajectories) 20 | 21 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # File : .readthedocs.yml 2 | # License: GNU v3.0 3 | # Author : Andrei Leonard Nicusan 4 | # Date : 29.06.2020 5 | 6 | 7 | # .readthedocs.yml 8 | # Read the Docs configuration file 9 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 10 | 11 | # Required 12 | version: 2 13 | 14 | build: 15 | image: testing 16 | 17 | # Build documentation in the docs/ directory with Sphinx 18 | sphinx: 19 | configuration: docs/source/conf.py 20 | 21 | # Build documentation with MkDocs 22 | # mkdocs: 23 | # configuration: mkdocs.yml 24 | 25 | # Optionally build your docs in additional formats such as PDF 26 | formats: all 27 | 28 | # Optionally set the version of Python and requirements required to build your docs 29 | python: 30 | install: 31 | - requirements: requirements.txt 32 | - requirements: requirements_extra.txt 33 | - method: pip 34 | path: . 35 | extra_requirements: 36 | - requirements_extra.txt 37 | system_packages: true 38 | 39 | 40 | -------------------------------------------------------------------------------- /pept/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 14.01.2020 7 | 8 | 9 | '''PEPT-oriented utility functions. 10 | 11 | The utility functions include low-level optimised Cython functions (e.g. 12 | `find_cutpoints`) that are of common interest across the `pept` package, as 13 | well as I/O functions, parallel maps and pixel/voxel traversal algorithms. 14 | 15 | Even though the functions are grouped in directories (subpackages) and files 16 | (modules), unlike the rest of the package, they are all imported into the 17 | `pept.utilities` root, so that their import paths are not too long. 18 | ''' 19 | 20 | 21 | from .cutpoints import * 22 | from .traverse import * 23 | from .parallel import * 24 | from .misc import * 25 | 26 | 27 | __license__ = "GNU v3.0" 28 | __maintainer__ = "Andrei Leonard Nicusan" 29 | __email__ = "a.l.nicusan@bham.ac.uk" 30 | __status__ = "Beta" 31 | -------------------------------------------------------------------------------- /pept/processing/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: GNU v3.0 7 | # Author : Andrei Leonard Nicusan 8 | # Date : 22.06.2020 9 | 10 | 11 | '''The PEPT-oriented post-processing suite, including occupancy grid, 12 | vector velocity fields, etc. 13 | 14 | This module contains fast, robust functions that operate on PEPT-like data 15 | and integrate with the `pept` library's base classes. 16 | 17 | ''' 18 | 19 | 20 | from .grids import DynamicProbability2D, ResidenceDistribution2D 21 | from .grids import DynamicProbability3D, ResidenceDistribution3D 22 | from .grids import VectorField2D, VectorField3D 23 | from .grids import VectorGrid2D, VectorGrid3D 24 | 25 | from .mixing import LaceyColors, LaceyColorsLinear 26 | from .mixing import RelativeDeviations, RelativeDeviationsLinear 27 | from .mixing import AutoCorrelation 28 | from .mixing import SpatialProjections 29 | 30 | 31 | __license__ = "GNU v3.0" 32 | __maintainer__ = "Andrei Leonard Nicusan" 33 | __email__ = "a.l.nicusan@bham.ac.uk" 34 | __status__ = "Beta" 35 | -------------------------------------------------------------------------------- /docs/source/manual/processing.rst: -------------------------------------------------------------------------------- 1 | Post Processing (``pept.processing``) 2 | ===================================== 3 | 4 | 5 | .. automodule:: pept.processing 6 | 7 | 8 | Probability / Residence Distributions 9 | ------------------------------------- 10 | 11 | .. autosummary:: 12 | :toctree: generated/ 13 | 14 | pept.processing.DynamicProbability2D 15 | pept.processing.DynamicProbability3D 16 | pept.processing.ResidenceDistribution2D 17 | pept.processing.ResidenceDistribution3D 18 | 19 | 20 | 21 | 22 | Vector Grids 23 | ------------ 24 | 25 | .. autosummary:: 26 | :toctree: generated/ 27 | 28 | pept.processing.VectorField2D 29 | pept.processing.VectorGrid2D 30 | pept.processing.VectorField3D 31 | pept.processing.VectorGrid3D 32 | 33 | 34 | 35 | 36 | Mixing Quantification 37 | --------------------- 38 | 39 | .. autosummary:: 40 | :toctree: generated/ 41 | 42 | pept.processing.LaceyColors 43 | pept.processing.LaceyColorsLinear 44 | pept.processing.RelativeDeviations 45 | pept.processing.RelativeDeviationsLinear 46 | pept.processing.AutoCorrelation 47 | pept.processing.SpatialProjections 48 | 49 | 50 | -------------------------------------------------------------------------------- /pept/plots/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: License: GNU v3.0 7 | # Author : Andrei Leonard Nicusan 8 | # Date : 22.08.2019 9 | 10 | 11 | '''PEPT-oriented visulisation tools. 12 | 13 | Visualisation functions and classes for PEPT data, transparently working with 14 | both `pept` base classes and raw NumPy arrays (e.g. `PlotlyGrapher.add_lines` 15 | handles both `pept.LineData` and (N, 7) NumPy arrays). 16 | 17 | The `PlotlyGrapher` class creates interactive, publication-ready 3D figures 18 | with optional subplots which can also be exported to portable HTML files. The 19 | `PlotlyGrapher2D` class is its two-dimensional counterpart, handling e.g. 20 | `pept.Pixels`. 21 | 22 | ''' 23 | 24 | 25 | from .plotly_grapher import PlotlyGrapher 26 | from .plotly_grapher2d import PlotlyGrapher2D 27 | from .plotly_grapher2d import format_fig 28 | from .plotly_grapher2d import histogram 29 | from .plotly_grapher2d import make_video 30 | 31 | 32 | __license__ = "GNU v3.0" 33 | __maintainer__ = "Andrei Leonard Nicusan" 34 | __email__ = "a.l.nicusan@bham.ac.uk" 35 | __status__ = "Beta" 36 | -------------------------------------------------------------------------------- /docs/source/tutorials/fpi.rst: -------------------------------------------------------------------------------- 1 | Feature Point Identification 2 | ============================ 3 | 4 | FPI is a modern voxel-based tracer-location algorithm that can reliably work with unknown numbers of tracers in fast and noisy environments. 5 | 6 | It was successfully used to track fast-moving radioactive tracers in pipe flows at the Virginia Commonwealth University. If you use this algorithm in your work, please cite the following paper: 7 | 8 | *Wiggins C, Santos R, Ruggles A. A feature point identification method for positron emission particle tracking with multiple tracers. Nuclear Instruments and Methods in Physics Research Section A: Accelerators, Spectrometers, Detectors and Associated Equipment. 2017 Jan 21; 843:22-8.* 9 | 10 | 11 | 12 | 13 | FPI Recipe 14 | ---------- 15 | 16 | As FPI works on voxelized representations of the LoRs, the ``Voxelize`` filter is first used before ``FPI`` itself: 17 | 18 | :: 19 | 20 | import pept 21 | from pept.tracking import * 22 | 23 | resolution = (100, 100, 100) 24 | 25 | pipeline = pept.Pipeline([ 26 | Voxelize(resolution), 27 | FPI(w = 3, r = 0.4), 28 | Stack(), 29 | ]) 30 | 31 | locations = pipeline.fit(lors) 32 | 33 | 34 | -------------------------------------------------------------------------------- /docs/source/tutorials/reading.rst: -------------------------------------------------------------------------------- 1 | Saving / Loading Data 2 | ===================== 3 | 4 | All PEPT objects can be saved in an efficient binary format using ``pept.save`` and 5 | ``pept.load``: 6 | 7 | :: 8 | 9 | import pept 10 | import numpy as np 11 | 12 | # Create some dummy data 13 | lines_raw = np.arange(70).reshape((10, 7) 14 | lines = pept.LineData(lines_raw) 15 | 16 | # Save data 17 | pept.save("data.pickle", lines) 18 | 19 | # Load data 20 | lines_loaded = pept.load("data.pickle") 21 | 22 | 23 | The binary approach has the advantage of preserving all your metadata saved in the object 24 | instances - e.g. ``columns``, ``sample_size`` - allowing the full state to be reloaded. 25 | 26 | 27 | Matrix-like data like ``pept.LineData`` and ``pept.PointData`` can also be saved in a slower, 28 | but human-readable CSV format using their class methods ``.to_csv``; such tabular data can then 29 | be reinitialised using ``pept.read_csv``: 30 | 31 | :: 32 | 33 | # Save data in CSV format 34 | lines.to_csv("data.csv") 35 | 36 | # Load data back - *this will be a simple NumPy array!* 37 | lines_raw = pept.read_csv("data.csv") 38 | 39 | # Need to put the array back into a `pept.LineData` 40 | lines = pept.LineData(lines_raw) 41 | 42 | 43 | -------------------------------------------------------------------------------- /docs/source/getting_started.rst: -------------------------------------------------------------------------------- 1 | *************** 2 | Getting Started 3 | *************** 4 | These instructions will help you get started with PEPT data analysis. 5 | 6 | 7 | Prerequisites 8 | ------------- 9 | This package supports Python 3.6 and above - it is built and tested for Python 10 | 3.6, 3.7 and 3.8 on Windows, Linux and macOS (thanks to conda-forge_, which is 11 | awesome!). 12 | 13 | You can install it using the batteries-included Anaconda_ distribution or the 14 | bare-bones Python_ interpreter. You can also check out our Python and `pept` 15 | tutorials_. 16 | 17 | .. _conda-forge: https://conda-forge.org/ 18 | .. _Anaconda: https://www.anaconda.com/products/individual 19 | .. _Python: https://www.python.org/downloads/ 20 | .. _tutorials: https://github.com/uob-positron-imaging-centre/tutorials 21 | 22 | 23 | Installation 24 | ------------ 25 | The easiest and quickest installation, if you are using Anaconda: 26 | 27 | :: 28 | 29 | conda install -c conda-forge pept 30 | 31 | You can also install the latest release version of `pept` from PyPI: 32 | 33 | :: 34 | 35 | pip install --upgrade pept 36 | 37 | Or you can install the development version from the GitHub repository: 38 | 39 | :: 40 | 41 | pip install -U git+https://github.com/uob-positron-imaging-centre/pept 42 | 43 | 44 | -------------------------------------------------------------------------------- /pept/tracking/birmingham_method/extensions/birmingham_method_ext.h: -------------------------------------------------------------------------------- 1 | /** 2 | * File : birmingham_method_ext.h 3 | * License : License: GNU v3.0 4 | * Author : Sam Manger 5 | * Date : 21.08.2019 6 | */ 7 | 8 | 9 | #ifndef BIRMINGHAM_METHOD_EXT 10 | #define BIRMINGHAM_METHOD_EXT 11 | 12 | 13 | #if defined(_MSC_VER) 14 | // Support the bloody unconforming mess that MSVC is; allow using fopen and ssize_t 15 | #define _CRT_SECURE_NO_DEPRECATE 16 | #include 17 | typedef SSIZE_T ssize_t; 18 | #else 19 | #include 20 | #endif 21 | 22 | 23 | #include // for sqrt 24 | #include // for DBL_MAX 25 | #include // for malloc 26 | 27 | 28 | void birmingham_method_ext( 29 | const double *, const ssize_t nrows, const ssize_t ncols, 30 | double *, int *, const double 31 | ); 32 | 33 | // void calculate(const double *, const double *); 34 | 35 | void calculate( 36 | double *, double *, double *, double *, double *, double *, 37 | double *, double *, double *, double *, double *, double *, 38 | double *, double *, double *, double *, double *, double *, 39 | int *, int, int, double * 40 | ); 41 | 42 | #endif 43 | -------------------------------------------------------------------------------- /docs/source/tutorials/birmingham.rst: -------------------------------------------------------------------------------- 1 | The Birmingham Method 2 | ===================== 3 | 4 | The Birmingham Method is an efficient, analytical technique for tracking tracers using the LoRs from PEPT data. 5 | 6 | If you are using it in your research, you are kindly asked to cite the following paper: 7 | 8 | 9 | *Parker DJ, Broadbent CJ, Fowles P, Hawkesworth MR, McNeil P. Positron emission particle tracking-a technique for studying flow within engineering equipment. Nuclear Instruments and Methods in Physics Research Section A: Accelerators, Spectrometers, Detectors and Associated Equipment. 1993 Mar 10;326(3):592-607.* 10 | 11 | 12 | 13 | Birmingham Method recipe 14 | ------------------------ 15 | 16 | :: 17 | 18 | import pept 19 | from pept.tracking import * 20 | 21 | pipeline = pept.Pipeline([ 22 | BirminghamMethod(fopt = 0.5), 23 | Stack(), 24 | ]) 25 | 26 | locations = pipeline.fit(lors) 27 | 28 | 29 | 30 | Recipe with Trajectory Separation 31 | --------------------------------- 32 | 33 | :: 34 | 35 | import pept 36 | from pept.tracking import * 37 | 38 | pipeline = pept.Pipeline([ 39 | BirminghamMethod(fopt = 0.5), 40 | Segregate(window = 20, cut_distance = 10), 41 | Stack(), 42 | ]) 43 | 44 | locations = pipeline.fit(lors) 45 | 46 | 47 | -------------------------------------------------------------------------------- /pept/base/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 31.01.2020 7 | 8 | 9 | '''PEPT base classes. 10 | ''' 11 | 12 | 13 | from .line_data import LineData 14 | from .point_data import PointData 15 | 16 | from .iterable_samples import IterableSamples 17 | from .iterable_samples import TimeWindow 18 | from .iterable_samples import AdaptiveWindow 19 | from .iterable_samples import AsyncIterableSamples 20 | from .iterable_samples import PEPTObject 21 | 22 | from .pipelines import Transformer 23 | from .pipelines import Filter 24 | from .pipelines import LineDataFilter 25 | from .pipelines import PointDataFilter 26 | from .pipelines import VoxelsFilter 27 | from .pipelines import Reducer 28 | from .pipelines import Pipeline 29 | 30 | from .utilities import check_iterable 31 | 32 | 33 | # Execute code here to add dynamic methods 34 | from .pixels import Pixels 35 | from .voxels import Voxels 36 | 37 | 38 | 39 | 40 | __license__ = "GNU v3.0" 41 | __maintainer__ = "Andrei Leonard Nicusan" 42 | __email__ = "a.l.nicusan@bham.ac.uk" 43 | __status__ = "Beta" 44 | -------------------------------------------------------------------------------- /docs/source/manual/index.rst: -------------------------------------------------------------------------------- 1 | ****** 2 | Manual 3 | ****** 4 | 5 | All public ``pept`` subroutines are fully documented here, along with copy-pastable examples. The `base` functionality is summarised below; the rest of the library is organised into submodules, which you can access on the left. You can also use the `Search` bar in the top left to go directly to what you need. 6 | 7 | We really appreciate all help with writing useful documentation; if you feel something can be improved, or would like to share some example code, by all means get in contact with us - or be a superhero and click `Edit this page` on the right and submit your changes to the GitHub repository directly! 8 | 9 | 10 | Base Functions 11 | ============== 12 | 13 | .. autosummary:: 14 | :toctree: generated/ 15 | 16 | pept.read_csv 17 | pept.load 18 | pept.save 19 | 20 | 21 | 22 | 23 | Base Classes 24 | ============ 25 | 26 | .. autosummary:: 27 | :toctree: generated/ 28 | 29 | pept.LineData 30 | pept.PointData 31 | pept.Pixels 32 | pept.Voxels 33 | pept.Pipeline 34 | 35 | 36 | 37 | 38 | Auxilliaries 39 | ============ 40 | 41 | .. autosummary:: 42 | :toctree: generated/ 43 | 44 | pept.TimeWindow 45 | pept.AdaptiveWindow 46 | 47 | 48 | 49 | 50 | .. toctree:: 51 | :hidden: 52 | :caption: Submodules 53 | 54 | base 55 | scanners 56 | tracking 57 | processing 58 | plots 59 | utilities 60 | simulation 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /pept/scanners/modular_camera/extensions/get_pept_event_ext.h: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * pept is a Python library that unifies Positron Emission Particle 4 | * Tracking (PEPT) research, including tracking, simulation, data analysis 5 | * and visualisation tools 6 | * 7 | * Copyright (C) 2019 Andrei Leonard Nicusan 8 | * 9 | * This program is free software: you can redistribute it and/or modify 10 | * it under the terms of the GNU General Public License as published by 11 | * the Free Software Foundation, either version 3 of the License, or 12 | * (at your option) any later version. 13 | * 14 | * This program is distributed in the hope that it will be useful, 15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | * GNU General Public License for more details. 18 | * 19 | * You should have received a copy of the GNU General Public License 20 | * along with this program. If not, see . 21 | */ 22 | 23 | /** 24 | * File : get_pept_event_ext.h 25 | * License : License: GNU v3.0 26 | * Author : Sam Manger 27 | * Date : 01.07.2019 28 | */ 29 | 30 | #ifndef GET_PEPT_EVENT_EXT 31 | #define GET_PEPT_EVENT_EXT 32 | 33 | void get_pept_event_ext(double* result, unsigned int word, int itag, int itime); 34 | 35 | void get_pept_LOR_ext(double* LOR, unsigned int word, int itag, int itime); 36 | 37 | #endif 38 | -------------------------------------------------------------------------------- /pept/tracking/trajectory_separation/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 22.08.2019 7 | 8 | 9 | '''Separate the intertwined points from pre-tracked tracer locations into 10 | individual trajectories. 11 | 12 | Extended Summary 13 | ---------------- 14 | A typical PEPT workflow would involve transforming LoRs into points using some 15 | tracking algorithm. These points include all tracers moving through the system, 16 | being intertwined (e.g. for two tracers A and B, the `point_data` array might 17 | have two entries for A, followed by three entries for B, then one entry for A, 18 | etc.). The points can be segregated based on distance alone using the 19 | `segregate_trajectories` function; for well-defined trajectories of tracers 20 | that do not collide, this may be enough to retrieve individual trajectories. 21 | However, for tracers that do come into contact, the identity of the least 22 | active one is usually lost; in such cases, the `connect_trajectories` function 23 | can be used to piece back the trajectories of tracers with gaps in their tracks 24 | using some *tracer signature* (e.g. cluster size in PEPT-ML). 25 | ''' 26 | 27 | 28 | from .trajectory_separation import Segregate 29 | from .trajectory_separation import Reconnect 30 | 31 | 32 | __license__ = "GNU v3.0" 33 | __maintainer__ = "Andrei Leonard Nicusan" 34 | __email__ = "a.l.nicusan@bham.ac.uk" 35 | __status__ = "Beta" 36 | -------------------------------------------------------------------------------- /docs/source/citing.rst: -------------------------------------------------------------------------------- 1 | ****** 2 | Citing 3 | ****** 4 | 5 | If you used this codebase or any software making use of it in a scientific publication, we ask you to cite the following paper: 6 | 7 | Nicuşan AL, Windows-Yule CR. Positron emission particle tracking using machine learning. Review of Scientific Instruments. 2020 Jan 1;91(1):013329. 8 | https://doi.org/10.1063/1.5129251 9 | 10 | 11 | Because `pept` is a project bringing together the expertise of many people, it hosts multiple algorithms that were developed and published in other papers. Please check the documentation of the `pept` algorithms you are using in your research and cite the original papers mentioned accordingly. 12 | 13 | 14 | 15 | References 16 | ========== 17 | Papers presenting PEPT algorithms included in this library: [1]_, [2]_, [3]_. 18 | 19 | .. [1] Parker DJ, Broadbent CJ, Fowles P, Hawkesworth MR, McNeil P. Positron 20 | emission particle tracking-a technique for studying flow within engineering 21 | equipment. Nuclear Instruments and Methods in Physics Research Section A: 22 | Accelerators, Spectrometers, Detectors and Associated Equipment. 1993 23 | Mar 10;326(3):592-607. 24 | .. [2] Nicuşan AL, Windows-Yule CR. Positron emission particle tracking using 25 | machine learning. Review of Scientific Instruments. 2020 Jan 1;91(1):013329. 26 | .. [3] Wiggins C, Santos R, Ruggles A. A feature point identification method 27 | for positron emission particle tracking with multiple tracers. Nuclear 28 | Instruments and Methods in Physics Research Section A: Accelerators, 29 | Spectrometers, Detectors and Associated Equipment. 2017 Jan 21;843:22-8. 30 | 31 | -------------------------------------------------------------------------------- /docs/source/tutorials/velocities.rst: -------------------------------------------------------------------------------- 1 | Extracting Velocities 2 | ===================== 3 | 4 | When extracting post-processed data from tracer trajectories for e.g. probability distributions, it is often important to **sample data at fixed timesteps**. As PEPT is natively a Lagrangian technique where tracers can be tracked more often in more sensitive areas of the gamma scanners, we have to convert those "randomly-sampled" positions into regular timesteps using ``Interpolate``. 5 | 6 | First, ``Segregate`` points into individual, continuous trajectory segments, ``GroupBy`` according to each trajectory's label, then ``Interpolate`` into regular timesteps, then compute each point's ``Velocity`` (dimension-wise or absolute) and finally ``Stack`` them back into a ``PointData``: 7 | 8 | :: 9 | 10 | from pept.tracking import * 11 | 12 | pipe_vel = pept.Pipeline([ 13 | Segregate(window = 20, cut_distance = 10.), 14 | GroupBy("label"), 15 | Interpolate(timestep = 5.), 16 | Velocity(window = 7), 17 | Stack(), 18 | ]) 19 | 20 | trajectories = pipe_vel.fit(trajectories) 21 | 22 | 23 | The ``Velocity`` step appends columns ``["vx", "vy", "vz"]`` (default) or ``["v"]`` (if ``absolute = True``). You can add both if you wish: 24 | 25 | :: 26 | 27 | from pept.tracking import * 28 | 29 | pept.Pipeline([ 30 | Segregate(window = 20, cut_distance = 10.), 31 | GroupBy("label"), 32 | Interpolate(timestep = 5.), 33 | Velocity(window = 7), # Appends vx, vy, vz 34 | Velocity(window = 7, absolute = True), # Appends v 35 | Stack(), 36 | ]) 37 | 38 | 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /docs/source/_static/anaconda-symbol.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pept/simulation/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /docs/source/tutorials/converting.rst: -------------------------------------------------------------------------------- 1 | Initialising PEPT Scanner Data 2 | ============================== 3 | 4 | The ``pept.scanners`` submodule contains converters between scanner specific data formats 5 | (e.g. parallel screens / ASCII, modular camera / binary) and the ``pept`` base classes, 6 | allowing simple initialisation of ``pept.LineData`` from different sources. 7 | 8 | 9 | ADAC Forte 10 | ---------- 11 | 12 | The parallel screens detector used at Birmingham can output binary `list-mode` data, which can 13 | be converted using ``pept.scanners.adac_forte(binary_file)``: 14 | 15 | :: 16 | 17 | import pept 18 | 19 | lines = pept.scanners.adac_forte("binary_file.da01") 20 | 21 | 22 | If you have multiple files from the same experiment, e.g. "data.da01", "data.da02", etc., you can stitch them all together using a *glob*, "data.da*": 23 | 24 | :: 25 | 26 | import pept 27 | 28 | # Multiple files starting with `binary_file.da` 29 | lines = pept.scanners.adac_forte("binary_file.da*") 30 | 31 | 32 | 33 | Parallel Screens 34 | ---------------- 35 | 36 | If you have your data as a CSV containing 5 columns `[t, x1, y1, x2, y2]` representing the 37 | coordinates of the two points defining an LoR on two parallel screens, you can use 38 | ``pept.scanners.parallel_screens`` to insert the missing coordinates and get the LoRs into 39 | the general ``LineData`` format `[t, x1, y1, z1, x2, y2, z2]`: 40 | 41 | :: 42 | 43 | import pept 44 | 45 | screen_separation = 500 46 | lines = pept.scanners.parallel_screens(csv_or_array, screen_separation) 47 | 48 | 49 | Modular Camera 50 | -------------- 51 | 52 | Your modular camera data can be initialised using ``pept.scanners.modular_camera``: 53 | 54 | :: 55 | 56 | import pept 57 | 58 | lines = pept.scanners.modular_camera(filepath) 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /pept/scanners/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # File : __init__.py 5 | # License: License: GNU v3.0 6 | # Author : Andrei Leonard Nicusan 7 | # Date : 20.08.2019 8 | 9 | 10 | '''Convert data from different PET / PEPT scanner geometries and data formats 11 | into the common base classes. 12 | 13 | The PEPT base classes `PointData`, `LineData`, and `VoxelData` are abstractions 14 | over the type of data that may be encountered in the context of PEPT (e.g. LoRs 15 | are `LineData`, trajectory points are `PointData`). Once the raw data is 16 | transformed into the common formats, any tracking, analysis or visualisation 17 | algorithm in the `pept` package can be used interchangeably. 18 | 19 | The `pept.scanners` subpackage provides modules for transforming the raw data 20 | from different PET / PEPT scanner geometries (parallel screens, modular 21 | cameras, etc.) and data formats (binary, ASCII, etc.) into the common base 22 | classes. 23 | 24 | If you'd like to integrate another scanner geometry or raw data format into 25 | this package, you can check out the `pept.scanners.parallel_screens` function 26 | as an example. This usually only involves writing a single function by hand; 27 | then all functionality from `LineData` will be available to your new data 28 | format, for free. 29 | ''' 30 | 31 | 32 | from .parallel_screens import parallel_screens, adac_forte 33 | from .parallel_screens import ADACGeometricEfficiency 34 | from .modular_camera import modular_camera 35 | 36 | 37 | __author__ = ["Andrei Leonard Nicusan", "Sam Manger"] 38 | __credits__ = [ 39 | "Andrei Leonard Nicusan", 40 | "Kit Windows-Yule", 41 | "Sam Manger" 42 | ] 43 | __license__ = "GNU v3.0" 44 | __maintainer__ = "Andrei Leonard Nicusan" 45 | __email__ = "a.l.nicusan@bham.ac.uk" 46 | __status__ = "Beta" 47 | -------------------------------------------------------------------------------- /docs/source/tutorials/tracking_errors.rst: -------------------------------------------------------------------------------- 1 | Tracking Errors 2 | =============== 3 | 4 | When processing more difficult datasets - scattering environments, low tracer activities, etc. - 5 | it is often useful to use some tracer statistics to remove erroneous locations. 6 | 7 | Most PEPT algorithms will include some measure of the tracer location errors, for example: 8 | 9 | - The ``Centroids(error = True)`` filter appends a column "error" representing the standard 10 | deviation of the distances from the computed centroid to the constituent points. For a 11 | 500 mm scanner, a spread in a tracer location of 100 mm is clearly an erroneous point. 12 | - The ``Centroids(cluster_size = True)`` filter appends a column "cluster_size" representing 13 | the number of points used to compute the centroid. If a sample of 200 LoRs yields a tracer 14 | location computed from 5 points, it is clearly noise. 15 | - The ``BirminghamMethod`` filter includes a column "error" representing the standard 16 | deviation of the distances from the tracer position to the constituent LoRs. 17 | 18 | 19 | Histogram of Tracking Errors 20 | ---------------------------- 21 | 22 | You can select a named column via string indexing, e.g. ``trajectories["error"]``; you can 23 | then plot a histogram of the relative errors with: 24 | 25 | :: 26 | 27 | import plotly.express as px 28 | px.histogram(trajectories["error"]).show() # Large values are noise 29 | px.histogram(trajectories["cluster_size"]).show() # Small values are noise 30 | 31 | 32 | It is often useful to remove points with an error higher than a certain value, e.g. 20 mm: 33 | 34 | :: 35 | 36 | trajectories = Condition("error < 20").fit(trajectories) 37 | 38 | # Or simply append the `Condition` to the `pept.Pipeline` 39 | pipeline = pept.Pipeline([ 40 | ... 41 | Condition("cluster_size > 30, error < 20"), 42 | ... 43 | ]) 44 | 45 | 46 | -------------------------------------------------------------------------------- /docs/source/manual/tracking.rst: -------------------------------------------------------------------------------- 1 | Tracking Algorithms (``pept.tracking``) 2 | ======================================= 3 | 4 | 5 | .. automodule:: pept.tracking 6 | 7 | 8 | 9 | Tracking Optimisation 10 | --------------------- 11 | 12 | .. autosummary:: 13 | :toctree: generated/ 14 | 15 | pept.tracking.Debug 16 | pept.tracking.OptimizeWindow 17 | 18 | 19 | 20 | General-Purpose Transformers 21 | ---------------------------- 22 | 23 | .. autosummary:: 24 | :toctree: generated/ 25 | 26 | pept.tracking.Stack 27 | pept.tracking.SplitLabels 28 | pept.tracking.SplitAll 29 | pept.tracking.GroupBy 30 | pept.tracking.Centroids 31 | pept.tracking.LinesCentroids 32 | pept.tracking.Condition 33 | pept.tracking.SamplesCondition 34 | pept.tracking.Remove 35 | pept.tracking.Swap 36 | 37 | 38 | 39 | Space Transformers 40 | ------------------ 41 | 42 | .. autosummary:: 43 | :toctree: generated/ 44 | 45 | pept.tracking.Voxelize 46 | pept.tracking.Interpolate 47 | pept.tracking.Reorient 48 | pept.tracking.OutOfViewFilter 49 | pept.tracking.RemoveStatic 50 | 51 | 52 | 53 | Tracer Locating Algorithms 54 | -------------------------- 55 | 56 | .. autosummary:: 57 | :toctree: generated/ 58 | 59 | pept.tracking.BirminghamMethod 60 | pept.tracking.Cutpoints 61 | pept.tracking.Minpoints 62 | pept.tracking.HDBSCAN 63 | pept.tracking.FPI 64 | 65 | 66 | 67 | Trajectory Separation Algorithms 68 | -------------------------------- 69 | 70 | .. autosummary:: 71 | :toctree: generated/ 72 | 73 | pept.tracking.Segregate 74 | pept.tracking.Reconnect 75 | 76 | 77 | 78 | Time Of Flight Algorithms 79 | ------------------------- 80 | 81 | .. autosummary:: 82 | :toctree: generated/ 83 | 84 | pept.tracking.TimeOfFlight 85 | pept.tracking.CutpointsToF 86 | pept.tracking.GaussianDensity 87 | 88 | 89 | 90 | Post Processing Algorithms 91 | -------------------------- 92 | 93 | .. autosummary:: 94 | :toctree: generated/ 95 | 96 | pept.tracking.Velocity 97 | 98 | -------------------------------------------------------------------------------- /docs/source/tutorials/trajectory_separation.rst: -------------------------------------------------------------------------------- 1 | Trajectory Separation 2 | ===================== 3 | 4 | 5 | Segregate Points 6 | ---------------- 7 | 8 | We can separate out trajectory segments / points that are spatio-temporally far away to: 9 | 10 | 1. Remove spurious, noisy points. 11 | 2. Separate out continuous trajectory segments. 12 | 13 | The *spatio-temporal metric* differentiates between points that may be in the same location at different times. This is achieved by allowing points to be connected in a sliding window approach. 14 | 15 | The ``pept.tracking.Segregate`` algorithm works by creating a *Minimum Spanning Tree* (MST, or minimum distance path) connecting all points in a dataset, then *cutting* all paths longer than a ``cut_distance``. All distinct segments are assigned a trajectory ``'label'`` (integer starting from 0); trajectories with fewer than ``min_trajectory_size`` points are considered noise (label `-1`). 16 | 17 | 18 | :: 19 | 20 | from pept.tracking import * 21 | 22 | trajectories = Segregate(window = 20, cut_distance = 10.).fit(trajectories) 23 | 24 | 25 | Consider all trajectories with fewer than 50 points to be noise: 26 | 27 | 28 | :: 29 | 30 | segr = Segregate( 31 | window = 20, 32 | cut_distance = 10., 33 | min_trajectory_size = 50, 34 | ) 35 | 36 | trajectories = segr.fit(trajectories) 37 | 38 | 39 | This step adds a new column "label". We can group each individual trajectory into a list with ``GroupBy``: 40 | 41 | :: 42 | 43 | traj_list = GroupBy("label").fit(trajectories) 44 | traj_list[0] # First trajectory 45 | 46 | 47 | *[New in pept-0.5.2]* Only connect points within a time interval; in other words, disconnect into different trajectories points whose timestamps are further apart than ``max_time_interval``: 48 | 49 | :: 50 | 51 | segr = Segregate( 52 | window = 20, 53 | cut_distance = 10., 54 | min_trajectory_size = 50, 55 | max_time_interval = 2000, # Disconnect tracer with >2s gap 56 | ) 57 | 58 | trajectories = segr.fit(trajectories) 59 | 60 | -------------------------------------------------------------------------------- /pept/tracking/fpi/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 15.04.2021 7 | 8 | 9 | '''The `fpi` package implements the Feature Point Identification (FPI) 10 | algorithm for robust voxel-based multiple tracer tracking, using the original 11 | code kindly shared by [1]_. 12 | 13 | Summary 14 | ------- 15 | A typical workflow for using the `fpi` subpackage would be: 16 | 17 | 1. Read the LoRs into a `pept.LineData` class instance and set the 18 | `sample_size` and `overlap` appropriately. 19 | 2. Voxellise the `pept.LineData` samples with a `pept.VoxelData` class (this 20 | can be done on demand, saving memory). 21 | 3. Instantiate a `pept.tracking.fpi.FPI` class and transform the voxellised 22 | LoRs into tracer locations using the `fit` method. 23 | 24 | Extended Summary 25 | ---------------- 26 | [TODO: add more detailed summary of how FPI works]. 27 | 28 | It was successfully used to track fast-moving radioactive tracers in pipe flows 29 | at the Virginia Commonwealth University. If you use this algorithm in your 30 | work, please cite the original paper [1]_. 31 | 32 | Modules Provided 33 | ---------------- 34 | 35 | :: 36 | 37 | pept.tracking.fpi 38 | │ 39 | Functions imported into the subpackage root: 40 | ├── fpi_ext : Low-level C++ FPI subroutine. 41 | │ 42 | Classes imported into the subpackage root: 43 | └── FPI : Find tracer locations from samples of voxellised LoRs. 44 | 45 | References 46 | ---------- 47 | .. [1] Wiggins C, Santos R, Ruggles A. A feature point identification method 48 | for positron emission particle tracking with multiple tracers. Nuclear 49 | Instruments and Methods in Physics Research Section A: Accelerators, 50 | Spectrometers, Detectors and Associated Equipment. 2017 Jan 21;843:22-8. 51 | ''' 52 | 53 | 54 | from .fpi import FPI 55 | from .fpi_ext import fpi_ext 56 | 57 | 58 | __license__ = "GNU v3.0" 59 | __maintainer__ = "Andrei Leonard Nicusan" 60 | __email__ = "a.l.nicusan@bham.ac.uk" 61 | __status__ = "Beta" 62 | -------------------------------------------------------------------------------- /pept/utilities/misc/aggregate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : aggregate.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 10.06.2020 7 | 8 | 9 | import numpy as np 10 | 11 | 12 | def group_by_column(data_array, column_to_separate): 13 | '''Group the rows in a 2D `data_array` based on the unique values in a 14 | given `column_to_separate`, returning the groups as a list of numpy arrays. 15 | 16 | Parameters 17 | ---------- 18 | data_array : (M, N) numpy.ndarray 19 | A generic 2D numpy array-like (will be converted using numpy.asarray). 20 | column_to_separate : int 21 | The column index in `data_array` from which the unique values will be 22 | used for grouping. 23 | 24 | Returns 25 | ------- 26 | groups : list of numpy.ndarray 27 | A list whose elements are 2D numpy arrays - these are sub-arrays from 28 | `data_array` for which the entries in the column `column_to_separate` 29 | are the same. 30 | 31 | Raises 32 | ------ 33 | ValueError 34 | If data_array does not have exactly 2 dimensions. 35 | 36 | Examples 37 | -------- 38 | Separate a 6x3 numpy array based on the last column: 39 | 40 | >>> x = np.array([ 41 | >>> [1, 2, 1], 42 | >>> [5, 3, 1], 43 | >>> [1, 1, 2], 44 | >>> [5, 2, 1], 45 | >>> [2, 4, 2] 46 | >>> ]) 47 | >>> x_sep = pept.utilities.group_by_column(x, -1) 48 | >>> x_sep 49 | >>> [array([[1, 2, 1], 50 | >>> [5, 3, 1], 51 | >>> [5, 2, 1]]), 52 | >>> array([[1, 1, 2], 53 | >>> [2, 4, 2]])] 54 | 55 | ''' 56 | 57 | data_array = np.asarray(data_array) 58 | if data_array.ndim != 2: 59 | raise ValueError(( 60 | "\n[ERROR]: `data_array` should have exactly 2 dimensions. " 61 | f"Received {data_array} with {data_array.ndim} dimensions.\n" 62 | )) 63 | 64 | data_col = data_array[:, column_to_separate] 65 | labels = np.unique(data_col) 66 | 67 | groups = [data_array[data_col == label] for label in labels] 68 | return groups 69 | -------------------------------------------------------------------------------- /pept/tracking/birmingham_method/__init__.py: -------------------------------------------------------------------------------- 1 | # !/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : __init__.py 4 | # License: License: GNU v3.0 5 | # Author : Sam Manger 6 | # Date : 20.08.2019 7 | 8 | 9 | '''The `birmingham_method` package provides an efficient, optionally-parallel 10 | implementation of the well-known Birmingham method for single-tracer tracking. 11 | 12 | Summary 13 | ------- 14 | A typical workflow for using the `birmingham_method` package would be: 15 | 16 | 1. Read the LoRs into a `pept.LineData` class instance and set the 17 | `sample_size` and `overlap` appropriately. 18 | 2. Instantiate a `pept.tracking.birmingham_method.BirminghamMethod` class and 19 | transform the LoRs into tracer locations using the `fit` method. 20 | 21 | Extended Summary 22 | ---------------- 23 | For a given "sample" of LoRs, the Birmingham method minimises the distance 24 | between all of the LoRs, rejecting a fraction of lines that lie furthest away 25 | from the calculated distance. The process is repeated iteratively until a 26 | specified fraction ("fopt") of the original subset of LORs remains. 27 | 28 | The Birmingham method has been used extensively for well over 30 years at the 29 | University of Birmingham to track radioactively-labelled tracers in a variety 30 | of industrial and scientific systems [1]_. 31 | 32 | Modules Provided 33 | ---------------- 34 | 35 | :: 36 | 37 | pept.tracking.birmingham_method 38 | │ 39 | Classes imported into the subpackage root: 40 | └── BirminghamMethod : Transform samples of LoRs into tracer locations. 41 | 42 | References 43 | ---------- 44 | .. [1] Parker DJ, Broadbent CJ, Fowles P, Hawkesworth MR, McNeil P. Positron 45 | emission particle tracking-a technique for studying flow within engineering 46 | equipment. Nuclear Instruments and Methods in Physics Research Section A: 47 | Accelerators, Spectrometers, Detectors and Associated Equipment. 1993 Mar 48 | 10;326(3):592-607. 49 | ''' 50 | 51 | 52 | from .birmingham_method import BirminghamMethod 53 | from .extensions.birmingham_method import birmingham_method 54 | 55 | 56 | __all__ = [ 57 | 'BirminghamMethod', 58 | 'birmingham_method', 59 | ] 60 | 61 | 62 | __license__ = "GNU v3.0" 63 | __maintainer__ = "Sam Manger" 64 | __email__ = "s.manger@bham.ac.uk" 65 | __status__ = "Beta" 66 | -------------------------------------------------------------------------------- /.github/workflows/build_wheels.yml: -------------------------------------------------------------------------------- 1 | name: Build and upload pept to PyPI on a OS and Python version matrix 2 | 3 | # env: 4 | # CIBW_BUILD: cp37-* cp38-* cp39-* 5 | # CIBW_TEST_REQUIRES: pytest 6 | # CIBW_TEST_COMMAND: "pytest {project}/tests" 7 | 8 | # Disable building PyPy wheels on all platforms 9 | env: 10 | CIBW_SKIP: pp* cp36-* 11 | 12 | # Build on every branch push, tag push, and pull request change: 13 | # on: [push, pull_request] 14 | # Manually trigger workflow 15 | on: workflow_dispatch 16 | 17 | jobs: 18 | build_wheels: 19 | name: Build pept wheels on ${{ matrix.os }} 20 | runs-on: ${{ matrix.os }} 21 | strategy: 22 | matrix: 23 | os: [ubuntu-latest, windows-latest, macos-latest] 24 | 25 | steps: 26 | - uses: actions/checkout@v2 27 | 28 | - uses: actions/setup-python@v2 29 | name: Install Python 30 | with: 31 | python-version: '3.8' 32 | 33 | - name: Build wheels 34 | uses: pypa/cibuildwheel@v2.1.1 35 | 36 | - uses: actions/upload-artifact@v2 37 | with: 38 | path: ./wheelhouse/*.whl 39 | 40 | build_sdist: 41 | name: Build source distribution 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v2 45 | 46 | - uses: actions/setup-python@v2 47 | name: Install Python 48 | with: 49 | python-version: '3.8' 50 | 51 | - name: Install package dependencies 52 | uses: py-actions/py-dependency-install@v2 53 | 54 | - name: Build sdist 55 | run: python setup.py sdist 56 | 57 | - uses: actions/upload-artifact@v2 58 | with: 59 | path: dist/*.tar.gz 60 | 61 | upload_pypi: 62 | needs: [build_wheels, build_sdist] 63 | runs-on: ubuntu-latest 64 | # upload to PyPI on every tag starting with 'v' 65 | # if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') 66 | # alternatively, to publish when a GitHub Release is created, use the following rule: 67 | # if: github.event_name == 'release' && github.event.action == 'published' 68 | steps: 69 | - uses: actions/download-artifact@v2 70 | with: 71 | name: artifact 72 | path: dist 73 | 74 | - uses: pypa/gh-action-pypi-publish@v1.4.2 75 | with: 76 | user: __token__ 77 | password: ${{ secrets.PYPI_API_TOKEN }} 78 | # repository_url: https://test.pypi.org/legacy/ 79 | -------------------------------------------------------------------------------- /prototype/pipeline_optimise.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : test_optimise.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 24.11.2021 7 | 8 | 9 | import numpy as np 10 | import pept 11 | from pept.tracking import * 12 | from pept.plots import PlotlyGrapher 13 | from pept.plots import PlotlyGrapher2D 14 | 15 | import plotly.graph_objs as go 16 | from plotly.subplots import make_subplots 17 | 18 | 19 | lors = pept.scanners.parallel_screens( 20 | "https://raw.githubusercontent.com/uob-positron-imaging-centre/" + 21 | "example_data/master/sample_1p_fluidised_bed.csv", 22 | 600, 23 | sample_size = 200, 24 | skiprows = 15, 25 | ) 26 | 27 | 28 | pipeline = pept.Pipeline([ 29 | Stack(sample_size = 300, overlap = 150), 30 | BirminghamMethod(fopt = 0.5), 31 | Stack(), 32 | ]) 33 | 34 | 35 | # Create PEPT-ML processing pipeline 36 | pipeline = pept.Pipeline([ 37 | # First pass of clustering 38 | Stack(sample_size = 136, overlap = 68), 39 | Cutpoints(max_distance = 0.4), 40 | HDBSCAN(true_fraction = 0.96), 41 | SplitLabels() + Centroids(error = True), 42 | Stack(), 43 | ]) 44 | 45 | 46 | hist = pipeline.optimise( 47 | lors.lines[:10], 48 | sample_size = [100, 200], 49 | overlap = [0, 190], 50 | max_distance = [0.01, 2.0], 51 | true_fraction = [0, 1], 52 | ) 53 | 54 | 55 | nanhist = ~np.isfinite(hist[:, -1]) 56 | smahist = hist[:, -1] < np.quantile(hist[:, -1], 0.8) 57 | 58 | fig = go.Figure() 59 | 60 | fig.add_trace(go.Scatter3d( 61 | x = hist[smahist, 0], 62 | y = hist[smahist, 1], 63 | z = hist[smahist, -1], 64 | mode = "markers", 65 | marker = dict( 66 | color = np.arange(smahist.sum()), 67 | ) 68 | )) 69 | 70 | fig.add_trace(go.Scatter3d( 71 | x = hist[nanhist, 0], 72 | y = hist[nanhist, 1], 73 | z = np.zeros(nanhist.sum()), 74 | mode = "markers", 75 | marker = dict( 76 | color = "red", 77 | ) 78 | )) 79 | 80 | fig.show() 81 | 82 | 83 | fig2 = make_subplots(3, 1) 84 | ep = np.arange(len(hist)) 85 | fig2.add_trace(go.Scatter(x=ep, y = hist[:, -3]), 1, 1) 86 | fig2.add_trace(go.Scatter(x=ep, y = hist[:, -2]), 2, 1) 87 | fig2.add_trace(go.Scatter(x=ep, y = hist[:, -1]), 3, 1) 88 | fig2.show() 89 | 90 | 91 | traj = pipeline.fit(lors) 92 | 93 | 94 | PlotlyGrapher().add_points(traj).show() 95 | PlotlyGrapher2D().add_timeseries(traj).show() 96 | -------------------------------------------------------------------------------- /pept/base/utilities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : utilities.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 08.08.2021 7 | 8 | 9 | import textwrap 10 | 11 | 12 | def check_homogeneous_types(iterable): 13 | if len(iterable) == 0: 14 | return 15 | 16 | base_type = type(iterable[0]) 17 | for i, e in enumerate(iterable): 18 | if not isinstance(e, base_type): 19 | raise TypeError(textwrap.fill(( 20 | "The input iterable must have homogeneous types. The first " 21 | f"element was of type `{base_type}`, but the element at index " 22 | f"{i} was of type `{type(e)}`." 23 | ))) 24 | 25 | 26 | def check_iterable(target_type, **kwargs): 27 | '''Check that an iterable has elements of type `target_type`. 28 | 29 | Performance caveat: only checks the first element. If the iterable is empty 30 | it passes. 31 | 32 | Raises 33 | ------ 34 | TypeError 35 | If the first keyword argument value is not an iterable or its first 36 | element is not an object of type `target_type`. 37 | 38 | Examples 39 | -------- 40 | >>> check_iterable(PointData, samples = [PointData(...), PointData(...)]) 41 | ''' 42 | # Extract the first keyword argument name and value 43 | for obj, val in kwargs.items(): 44 | break 45 | 46 | if not hasattr(val, "__iter__"): 47 | raise TypeError(textwrap.fill(( 48 | f"The input `{obj}` must be an iterable (list, tuple, PointData, " 49 | f"LineData, etc.). Received type=`{type(val)}`." 50 | ))) 51 | 52 | if len(val) and not isinstance(val[0], target_type): 53 | raise TypeError(textwrap.fill(( 54 | f"The input `{obj}` must be an iterable containing elements of " 55 | f"type `{target_type}`. The first element in `{obj}` was of type " 56 | f"`{type(val[0])}`." 57 | ))) 58 | 59 | 60 | def memoryview_safe(x): 61 | """Make array safe to run in a Cython memoryview-based kernel. These 62 | kernels typically break down with the error ``ValueError: buffer source 63 | array is read-only`` when running in dask distributed or joblib. 64 | 65 | Taken from `https://github.com/dask/distributed/issues/1978`. 66 | """ 67 | if not x.flags.writeable: 68 | if not x.flags.owndata: 69 | x = x.copy(order='C') 70 | x.setflags(write=True) 71 | return x 72 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | ************ 2 | Contributing 3 | ************ 4 | 5 | The `pept` library is not a one-man project; it is being built, improved and extended continuously (directly or indirectly) by an international team of researchers of diverse backgrounds - including programmers, mathematicians and chemical / mechanical / nuclear engineers. Want to contribute and become a PEPTspert yourself? Great, join the team! 6 | 7 | There are multiple ways to help: 8 | 9 | - Open an issue mentioning any improvement you think `pept` could benefit from. 10 | - Write a tutorial or share scripts you've developed that we can add to the `pept` documentation to help other people in the future. 11 | - Share your PEPT-related algorithms - tracking, post-processing, visualisation, anything really! - so everybody can benefit from them. 12 | 13 | Want to be a superhero and contribute code directly to the library itself? Grand - fork the project, add your code and submit a pull request (if that sounds like gibberish but you're an eager programmer, check `this article 14 | `_). We are more than happy to work with you on integrating your code into the library and, if helpful, we can schedule a screen-to-screen meeting for a more in-depth discussion about the `pept` package architecture. 15 | 16 | Naturally, anything you contribute to the library will respect your authorship - protected by the strong GPL v3.0 open-source license (see the "Licensing" section below). If you include published work, please add a pointer to your publication in the code documentation. 17 | 18 | 19 | Licensing 20 | ========= 21 | 22 | The `pept` package is `GPL v3.0 23 | `_ licensed. In non-lawyer terms, the key points of this license are: 24 | 25 | - You can view, use, copy and modify this code **_freely_**. 26 | - Your modifications must _also_ be licensed with GPL v3.0 or later. 27 | - If you share your modifications with someone, you have to include the source code as well. 28 | 29 | Essentially do whatever you want with the code, but don't try selling it saying it's yours :). This is a community-driven project building upon many other wonderful open-source projects (NumPy, Plotly, even Python itself!) without which `pept` simply would not have been possible. GPL v3.0 is indeed a very strong *copyleft* license; it was deliberately chosen to maintain the openness and transparency of great software and progress, and respect the researchers pushing PEPT forward. Frankly, open collaboration is way more efficient than closed, for-profit competition. 30 | 31 | 32 | -------------------------------------------------------------------------------- /docs/source/tutorials/adaptive_samples.rst: -------------------------------------------------------------------------------- 1 | Adaptive Sampling 2 | ================= 3 | 4 | Perhaps the most important decision a PEPT user must make is how the LoRs are divided into samples. The two most common approaches are: 5 | 6 | **Fixed sample size**: a constant number of elements per sample, with potential overlap between samples. 7 | 8 | - Advantages: effectively adapts spatio-temporal resolution, with higher accuracy in more active PEPT scanner regions. 9 | - Disadvantages: when a tracer exits the field of view, the last LoRs will be joined with the first LoRs when the tracer re-enters the scanner in the same samples. 10 | 11 | **Fixed time window**: a constant time interval in which LoRs are aggregated, with potential overlap. 12 | 13 | - Advantages: robust to tracers moving out of the field of view. 14 | - Disadvantages: non-adaptive temporal resolution. 15 | 16 | The two approaches can be combined into a single ``pept.AdaptiveWindow``, which works as a fixed time window, except when more LoRs are encountered than a given limit, in which case the time window is shrunk - hence adapting the time window depending on how many LoRs are intercepted in a given window. 17 | 18 | 19 | :: 20 | 21 | import pept 22 | 23 | # A time window of 5 ms shrinking when encountering more than 200 LoRs 24 | lors = pept.LineData(..., sample_size = pept.AdaptiveWindow(5.0, 200)) 25 | 26 | # A time window of 12 ms with the number of LoRs capped at 400 LoRs and an overlap of 6 ms 27 | lors = pept.scanners.adac_forte( 28 | ..., 29 | sample_size = pept.AdaptiveWindow(12., 200), 30 | overlap = pept.AdaptiveWindow(6.), 31 | ) 32 | 33 | 34 | 35 | Moreover, if an ideal number of LoRs is selected, there exists an optimum time window for which most samples will have roughly this ideal number of LoRs, except when the tracer is out of the field of view, or it's static. This can be automatically selected using ``pept.tracking.OptimizeWindow``: 36 | 37 | 38 | :: 39 | 40 | import pept 41 | import pept.tracking as pt 42 | 43 | # Find an adaptive time window that is ideal for about 200 LoRs per sample 44 | lors = pept.LineData(...) 45 | lors = pt.OptimizeWindow(ideal_elems = 200).fit(lors) 46 | 47 | 48 | `OptimizeWindow` can be used at the start of a pipeline; an optional `overlap` parameter can be used to define an overlap as a ratio to the ideal time window found. For example, if the ideal time window found is 100 ms, an overlap of 0.5 will result in an overlapping time interval of 50 ms: 49 | 50 | :: 51 | 52 | import pept 53 | from pept.tracking import * 54 | 55 | pipeline = pept.Pipeline([ 56 | OptimizeWindow(200), 57 | BirminghamMethod(fopt = 0.5), 58 | Stack(), 59 | ]) 60 | 61 | locations = pipeline.fit(lors) 62 | 63 | -------------------------------------------------------------------------------- /pept/scanners/modular_camera/extensions/get_pept_event.pyx: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools 8 | # 9 | # Copyright (C) 2019 Andrei Leonard Nicusan 10 | # 11 | # This program is free software: you can redistribute it and/or modify 12 | # it under the terms of the GNU General Public License as published by 13 | # the Free Software Foundation, either version 3 of the License, or 14 | # (at your option) any later version. 15 | # 16 | # This program is distributed in the hope that it will be useful, 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | # GNU General Public License for more details. 20 | # 21 | # You should have received a copy of the GNU General Public License 22 | # along with this program. If not, see . 23 | 24 | 25 | # File : get_pept_event.pyx 26 | # License : License: GNU v3.0 27 | # Author : Sam Manger 28 | # Date : 27.06.2019 29 | 30 | 31 | #!python 32 | #cython: language_level=3 33 | 34 | 35 | cdef extern from "get_pept_event_ext.c": 36 | # C is included here so that it doesn't need to be compiled externally 37 | pass 38 | 39 | cdef extern from "get_pept_event_ext.h": 40 | void get_pept_event_ext(double *, unsigned int, int, int) 41 | void get_pept_LOR_ext(double *, unsigned int, int, int) 42 | 43 | import numpy as np 44 | 45 | def get_pept_event(word, itag, itime): 46 | # Lines for a single sample => n x 12 array 47 | # sampleLines row: [word time itag MPnum Bucket1 Bucket2 Block1 Block2 Seg1 Seg2 Plane1 Plane2] 48 | 49 | cdef unsigned int word_C = word 50 | cdef int itag_C = itag 51 | cdef int itime_C = itime 52 | 53 | data_array = np.zeros(12, order='C') # Allocate enough memory 54 | # data_array = np.ravel(data_array, order='C') # Cast into 1D array to send to C 55 | 56 | cdef double[::1] data_array_memview = data_array 57 | 58 | get_pept_event_ext(&data_array_memview[0], word_C, itag_C, itime_C) 59 | 60 | return data_array 61 | 62 | def get_pept_LOR(word, itag, itime): 63 | # Lines for a single sample => n x 8 array 64 | # sampleLines row: [itag time X1 Y1 Z1 X2 Y2 Z2] 65 | 66 | cdef unsigned int word_C = word 67 | cdef int itag_C = itag 68 | cdef int itime_C = itime 69 | 70 | LOR = np.zeros(8, order='C') # Allocate enough memory 71 | # data_array = np.ravel(data_array, order='C') # Cast into 1D array to send to C 72 | 73 | cdef double[::1] LOR_memview = LOR 74 | 75 | get_pept_LOR_ext(&LOR_memview[0], word_C, itag_C, itime_C) 76 | 77 | return LOR 78 | -------------------------------------------------------------------------------- /docs/source/tutorials/index.rst: -------------------------------------------------------------------------------- 1 | ********* 2 | Tutorials 3 | ********* 4 | 5 | The main purpose of the PEPT library is to provide a common, consistent 6 | foundation for PEPT-related algorithms, including tracer tracking, 7 | visualisation and post-processing tools - such that they can be used 8 | interchangeably, mixed and matched for any PEPT camera and system. Virtually 9 | all PEPT processing routine follows these steps: 10 | 11 | 1. Convert raw gamma camera / scanner data into 3D lines (i.e. the captured 12 | gamma rays, or lines of response - LoRs). 13 | 2. Take a sample of lines, locate tracer locations, then repeat for the next 14 | samples. 15 | 3. Separate out individual tracer trajectories. 16 | 4. Visualise and post-process trajectories. 17 | 18 | For these algorithm-agnostic steps, PEPT provides five base data structures 19 | upon which the rest of the library is built: 20 | 21 | 1. ``pept.LineData``: general 3D line samples, formatted as *[time, x1, y1, z1, 22 | x2, y2, z2, extra...]*. 23 | 2. ``pept.PointData``: general 3D point samples, formatted as *[time, x, y, z, 24 | extra...]*. 25 | 3. ``pept.Pixels``: single 2D pixellised space with physical dimensions, 26 | including fast line traversal. 27 | 4. ``pept.Voxels``: single 3D voxellised space with physical dimensions, 28 | including fast line traversal. 29 | 30 | For example, once you convert your PEPT data - from any scanner - into 31 | ``pept.LineData``, all the algorithms in this library can be used. 32 | 33 | All the data structures above are built on top of NumPy and integrate natively 34 | with the rest of the Python / SciPy ecosystem. The rest of the PEPT library is 35 | organised into submodules: 36 | 37 | 1. ``pept.scanners``: converters between native scanner data and the base 38 | data structures. 39 | 2. ``pept.tracking``: radioactive tracer tracking algorithms, e.g. the 40 | Birmingham method, PEPT-ML, FPI. 41 | 3. ``pept.plots``: PEPT data visualisation subroutines. 42 | 4. ``pept.utilities``: general-purpose helpers, e.g. ``read_csv``, 43 | ``traverse3d``. 44 | 5. ``pept.processing``: PEPT-oriented post-processing algorithms, e.g. 45 | ``VectorField3D``. 46 | 47 | 48 | ------------ 49 | 50 | 51 | If you are new to the PEPT library, we recommend going through this interactive 52 | online notebook, which introduces all the fundamental concepts of the library: 53 | 54 | https://colab.research.google.com/drive/1G8XHP9zWMMDVu23PXzANLCOKNP_RjBEO?usp=sharing 55 | 56 | 57 | Once you get the idea of ``LineData`` samples, ``Pipeline`` and 58 | ``PlotlyGrapher``, you can use these copy-pastable tutorials to build PEPT data 59 | analysis pipelines tailored to your specific systems. 60 | 61 | 62 | .. toctree:: 63 | :caption: Pre-processing 64 | 65 | basics 66 | reading 67 | visualising 68 | converting 69 | 70 | 71 | 72 | .. toctree:: 73 | :caption: Tracking 74 | 75 | adaptive_samples 76 | birmingham 77 | peptml 78 | fpi 79 | 80 | 81 | 82 | .. toctree:: 83 | :caption: Post-processing 84 | 85 | tracking_errors 86 | trajectory_separation 87 | filtering 88 | velocities 89 | interpolating 90 | 91 | 92 | -------------------------------------------------------------------------------- /tests/test_processing.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : test_processing.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 23.11.2021 7 | 8 | 9 | '''Integration tests to ensure the `pept` base classes behave correctly and 10 | offer consistent interfaces. 11 | ''' 12 | 13 | 14 | import numpy as np 15 | import pept 16 | 17 | from pept.processing import * 18 | 19 | 20 | 21 | def test_dynamic_probability2d(): 22 | # Generate tracer locations 23 | num_particles = 10 24 | positions = pept.PointData( 25 | np.random.uniform(0, 500, (num_particles, 5)), 26 | columns = ["t", "x", "y", "z", "v"] 27 | ) 28 | 29 | # Test different uses 30 | pixels = DynamicProbability2D(1., "v", "xy").fit(positions) 31 | assert pixels.pixels.any(), "all pixels are zero!" 32 | 33 | DynamicProbability2D(0.1, "t", "yz").fit(positions) 34 | DynamicProbability2D(0.1, 4, "xy").fit(positions) 35 | DynamicProbability2D(0.1, "v", "xy", xlim = [0, 500]).fit(positions) 36 | DynamicProbability2D(0.1, "v", "xy", resolution = [20, 20]).fit(positions) 37 | DynamicProbability2D(0.1, 4, "xy", max_workers = 1).fit(positions) 38 | 39 | 40 | def test_residence_distribution2d(): 41 | # Generate tracer locations 42 | num_particles = 10 43 | positions = pept.PointData( 44 | np.random.uniform(0, 500, (num_particles, 5)), 45 | columns = ["t", "x", "y", "z", "v"] 46 | ) 47 | 48 | # Test different uses 49 | pixels = ResidenceDistribution2D(1., "v").fit(positions) 50 | assert pixels.pixels.any(), "all pixels are zero!" 51 | 52 | ResidenceDistribution2D(0.1, "t", "yz").fit(positions) 53 | ResidenceDistribution2D(0.1, 0, "xy").fit(positions) 54 | ResidenceDistribution2D(0.1, xlim = [0, 500]).fit(positions) 55 | ResidenceDistribution2D(0.1, resolution = [20, 20]).fit(positions) 56 | ResidenceDistribution2D(0.1, 0, "xy", max_workers = 1).fit(positions) 57 | 58 | 59 | def test_dynamic_probability3d(): 60 | # Generate tracer locations 61 | num_particles = 10 62 | positions = pept.PointData( 63 | np.random.uniform(0, 500, (num_particles, 5)), 64 | columns = ["t", "x", "y", "z", "v"] 65 | ) 66 | 67 | # Test different uses 68 | voxels = DynamicProbability3D(1., "v").fit(positions) 69 | assert voxels.voxels.any(), "all voxels are zero!" 70 | 71 | DynamicProbability3D(0.1, "t", "yzx").fit(positions) 72 | DynamicProbability3D(0.1, 4,).fit(positions) 73 | DynamicProbability3D(0.1, "v", xlim = [0, 500]).fit(positions) 74 | DynamicProbability3D(0.1, "v", resolution = [20, 20, 20]).fit(positions) 75 | DynamicProbability3D(0.1, 4, max_workers = 1).fit(positions) 76 | 77 | 78 | def test_residence_distribution3d(): 79 | # Generate tracer locations 80 | num_particles = 10 81 | positions = pept.PointData( 82 | np.random.uniform(0, 500, (num_particles, 5)), 83 | columns = ["t", "x", "y", "z", "v"] 84 | ) 85 | 86 | # Test different uses 87 | voxels = ResidenceDistribution3D(1., "v").fit(positions) 88 | assert voxels.voxels.any(), "all voxels are zero!" 89 | 90 | ResidenceDistribution3D(0.1, "t", "yzx").fit(positions) 91 | ResidenceDistribution3D(0.1, 0).fit(positions) 92 | ResidenceDistribution3D(0.1, xlim = [0, 500]).fit(positions) 93 | ResidenceDistribution3D(0.1, resolution = [20, 20, 20]).fit(positions) 94 | ResidenceDistribution3D(0.1, 0, max_workers = 1).fit(positions) 95 | -------------------------------------------------------------------------------- /pept/tracking/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: License: GNU v3.0 7 | # Author : Andrei Leonard Nicusan 8 | # Date : 21.08.2019 9 | 10 | 11 | '''Tracer location, identification and tracking algorithms. 12 | 13 | The `pept.tracking` subpackage hosts different tracking algorithms, working 14 | with both the base classes, as well as with generic NumPy arrays. 15 | 16 | All algorithms here are either ``pept.base.Filter`` or ``pept.base.Reducer`` 17 | subclasses, implementing the `.fit` and `.fit_sample` methods; here is an 18 | example using PEPT-ML: 19 | 20 | >>> from pept.tracking import * 21 | >>> 22 | >>> cutpoints = Cutpoints(0.5).fit(lines) 23 | >>> clustered = HDBSCAN(0.15).fit(cutpoints) 24 | >>> centres = (SplitLabels() + Centroids() + Stack()).fit(clustered) 25 | 26 | Once the processing steps have been tuned (see the `Tutorials`), you can chain 27 | all filters into a `pept.Pipeline` for efficient, parallel execution: 28 | 29 | >>> pipeline = ( 30 | >>> Cutpoints(0.5) + 31 | >>> HDBSCAN(0.15) + 32 | >>> SplitLabels() + Centroids() + Stack() 33 | >>> ) 34 | >>> centres = pipeline.fit(lines) 35 | 36 | If you would like to implement a PEPT algorithm, all you need to do is to 37 | subclass a ``pept.base.Filter`` and define the method ``.fit_sample(sample)`` - 38 | and you get parallel execution and pipeline chaining for free! 39 | 40 | >>> import pept 41 | >>> 42 | >>> class NewAlgorithm(pept.base.LineDataFilter): 43 | >>> def __init__(self, setting1, setting2 = None): 44 | >>> self.setting1 = setting1 45 | >>> self.setting2 = setting2 46 | >>> 47 | >>> def fit_sample(self, sample: pept.LineData): 48 | >>> processed_points = ... 49 | >>> return pept.PointData(processed_points) 50 | 51 | ''' 52 | 53 | 54 | from .birmingham_method import BirminghamMethod 55 | from .peptml import Cutpoints, Minpoints 56 | from .peptml import HDBSCAN, HDBSCANClusterer 57 | from .fpi import FPI 58 | 59 | from .transformers import Stack 60 | from .transformers import Debug 61 | 62 | from .transformers import SplitLabels 63 | from .transformers import SplitAll, GroupBy 64 | 65 | from .transformers import Centroids 66 | from .transformers import LinesCentroids 67 | 68 | from .transformers import Condition 69 | from .transformers import SamplesCondition 70 | from .transformers import Remove 71 | from .transformers import Swap 72 | 73 | from .transformers import OptimizeWindow 74 | 75 | from .space_transformers import Voxelize 76 | from .space_transformers import Interpolate 77 | from .space_transformers import Reorient 78 | from .space_transformers import Center 79 | from .space_transformers import OutOfViewFilter 80 | from .space_transformers import RemoveStatic 81 | 82 | from .post import Velocity 83 | 84 | from .tof import TimeOfFlight 85 | from .tof import CutpointsToF 86 | from .tof import GaussianDensity 87 | 88 | from .trajectory_separation import Segregate 89 | from .trajectory_separation import Reconnect 90 | 91 | 92 | __license__ = "GNU v3.0" 93 | __maintainer__ = "Andrei Leonard Nicusan" 94 | __email__ = "a.l.nicusan@bham.ac.uk" 95 | __status__ = "Beta" 96 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at a.l.nicusan@bham.ac.uk. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | 78 | -------------------------------------------------------------------------------- /pept/tracking/peptml/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # File : __init__.py 6 | # License: License: GNU v3.0 7 | # Author : Andrei Leonard Nicusan 8 | # Date : 22.08.2019 9 | 10 | 11 | '''The `peptml` package implements an efficient, optionally-parallel 12 | hierarchical density-based clustering algorithm for general Positron Emission 13 | Particle Tracking (PEPT). 14 | 15 | Summary 16 | ------- 17 | A typical workflow for using the `peptml` package would be: 18 | 19 | 1. Read the LoRs into a `pept.LineData` class instance and set the 20 | `sample_size` and `overlap` appropriately. 21 | 2. Compute the cutpoints using the `pept.tracking.peptml.Cutpoints` class. 22 | 3. Instantiate an `pept.tracking.peptml.HDBSCANClusterer` class and cluster the 23 | cutpoints found previously. 24 | 4. Optional: cluster the results from the previous step again for smoother, 25 | tighter trajectories. 26 | 27 | Extended Summary 28 | ---------------- 29 | The PEPT-ML algorithm [1]_ works using the following steps: 30 | 31 | 1. Split the data into a series of individual "samples", each containing a 32 | given number of LoRs. Use the base class pept.LineData for this. 33 | 2. For every sample of LoRs, compute the *cutpoints*, or the points in space 34 | that minimise the distance to every pair of lines. 35 | 3. Cluster every sample using HDBSCAN and extract the centres of the clusters 36 | ("1-pass clustering"). 37 | 4. Split the centres into samples of a given size. 38 | 5. Cluster every sample of centres using HDBSCAN and extract the centres of the 39 | clusters ("2-pass clustering"). 40 | 6. Construct the trajectory of every particle using the centres from the 41 | previous step. 42 | 43 | More tutorials and examples can be found on the University of Birmingham 44 | Positron Imaging Centre's GitHub repository. 45 | 46 | PEPT-ML was successfuly used at the University of Birmingham to analyse real 47 | Fluorine-18 tracers in air. 48 | 49 | Modules Provided 50 | ---------------- 51 | 52 | :: 53 | 54 | pept.tracking.peptml 55 | │ 56 | Functions imported into the subpackage root: 57 | ├── find_cutpoints : Find cutpoints from a NumPy array of lines. 58 | ├── find_minpoints : Find MDPs of combinations from a line array. 59 | ├── get_cutoffs : Find cutpoint cutoffs from an array of lines. 60 | │ 61 | Classes imported into the subpackage root: 62 | ├── Cutpoints : Compute cutpoints from samples in a `LineData`. 63 | ├── Minpoints : Compute minpoints from samples in a `LineData`. 64 | └── HDBSCANClusterer : Cluster samples of cutpoints in parallel. 65 | 66 | References 67 | ---------- 68 | .. [1] Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 69 | using machine learning. Review of Scientific Instruments. 70 | 2020 Jan 1;91(1):013329. 71 | https://doi.org/10.1063/1.5129251 72 | ''' 73 | 74 | 75 | from .cutpoints import find_cutpoints 76 | from .minpoints import find_minpoints 77 | from .cutpoints import get_cutoffs 78 | from .cutpoints import Cutpoints 79 | from .minpoints import Minpoints 80 | 81 | from .peptml import HDBSCANClusterer 82 | from .peptml import HDBSCAN 83 | 84 | 85 | __all__ = [ 86 | "find_cutpoints", 87 | "find_minpoints", 88 | "get_cutoffs", 89 | "Cutpoints", 90 | "Minpoints", 91 | "HDBSCANClusterer", 92 | "HDBSCAN", 93 | ] 94 | 95 | 96 | __license__ = "GNU v3.0" 97 | __maintainer__ = "Andrei Leonard Nicusan" 98 | __email__ = "a.l.nicusan@bham.ac.uk" 99 | __status__ = "Beta" 100 | -------------------------------------------------------------------------------- /docs/source/tutorials/filtering.rst: -------------------------------------------------------------------------------- 1 | Filtering Data 2 | ============== 3 | 4 | There are many filters in ``pept.tracking``, you can check out the Manual at the top of the page for a complete list. Here are examples with the most important ones. 5 | 6 | 7 | Remove 8 | ------ 9 | 10 | Simply remove a column: 11 | 12 | :: 13 | 14 | from pept.tracking import * 15 | 16 | trajectories = Remove("label").fit(trajectories) 17 | 18 | 19 | Or multiple columns: 20 | 21 | :: 22 | 23 | trajectories = Remove("label", "error").fit(trajectories) 24 | 25 | 26 | Condition 27 | --------- 28 | 29 | One of the most important filters, selecting only data that satisfies a condition: 30 | 31 | :: 32 | 33 | from pept.tracking import * 34 | 35 | trajectories = Condition("error < 15").fit(trajectories) 36 | 37 | 38 | Or multiple ones: 39 | 40 | :: 41 | 42 | trajectories = Condition("error < 15, label >= 0").fit(trajectories) 43 | 44 | 45 | In the simplest case, you just use the column name **as the first argument** followed by a comparison. If the column name is not the first argument, you must use single quotes: 46 | 47 | :: 48 | 49 | trajectories = Condition("0 <= 'label'").fit(trajectories) 50 | 51 | 52 | You can also use filtering functions from NumPy in the condition string (i.e. anything returning a boolean mask): 53 | 54 | :: 55 | 56 | # Remove all NaNs and Infs from the 'x' column 57 | trajectories = Condition("np.isfinite('x')") 58 | 59 | 60 | Finally, you can supply your own function receiving a NumPy array of the data and returning a boolean mask: 61 | 62 | :: 63 | 64 | def last_column_filter(data): 65 | return data[:, -1] > 10 66 | 67 | trajectories = Condition(last_column_filter).fit(trajectories) 68 | 69 | 70 | Or using inline functions (i.e. ``lambda``): 71 | 72 | :: 73 | 74 | # Select points within a vertical cylinder with radius 10 75 | trajectories = Condition(lambda x: x[:, 1]**2 + x[:, 3]**2 < 10**2).fit(trajectories) 76 | 77 | 78 | SamplesCondition 79 | ---------------- 80 | 81 | While ``Condition`` is applied on individual points, we could filter entire samples - for example, select only trajectories with more than 30 points: 82 | 83 | :: 84 | 85 | import pept.tracking as pt 86 | 87 | long_trajectories_filter = pept.Pipeline([ 88 | # Segregate points - appends "label" column 89 | pt.Segregate(window = 20, cut_distance = 10), 90 | 91 | # Group points into samples; e.g. sample 1 contains all points with label 1 92 | pt.GroupBy("label"), 93 | 94 | # Now each sample is an entire trajectory which we can filter 95 | pt.SamplesCondition("sample_size > 30"), 96 | 97 | # And stack all remaining samples back into a single PointData 98 | pt.Stack(), 99 | ]) 100 | 101 | long_trajectories = long_trajectories_filter.fit(trajectories) 102 | 103 | 104 | The condition can be based on the sample itself, e.g. keep only samples that lie completely beyond x=0: 105 | 106 | :: 107 | 108 | # Keep only samples for which all points' X coordinates are bigger than 0 109 | Condition("np.all(sample['x'] > 0)") 110 | 111 | 112 | 113 | GroupBy 114 | ------- 115 | 116 | Stack all samples (i.e. ``LineData`` or ``PointData``) and split them into a list according to a named / numeric column index: 117 | 118 | :: 119 | 120 | from pept.tracking import * 121 | 122 | group_list = GroupBy("label").fit(trajectories) 123 | 124 | 125 | 126 | RemoveStatic 127 | ------------ 128 | 129 | Remove tracer locations when it spends more than `time_window` without moving more than `max_distance`: 130 | 131 | :: 132 | 133 | from pept.tracking import * 134 | 135 | # Remove positions that spent more than 2 seconds without moving more than 20 mm 136 | nonstatic = RemoveStatic(time_window = 2000, max_distance = 20).fit(trajectories) 137 | 138 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. 2 | File : index.rst 3 | License: GNU v3.0 4 | Author : Andrei Leonard Nicusan 5 | Date : 28.06.2020 6 | 7 | 8 | ================================ 9 | The PEPT Library's Documentation 10 | ================================ 11 | 12 | A Python library that unifies Positron Emission Particle Tracking 13 | (PEPT) research, including tracking, simulation, data analysis and 14 | visualisation tools. 15 | 16 | 17 | Positron Emission Particle Tracking 18 | =================================== 19 | PEPT is a technique developed at the University of Birmingham which allows the 20 | non-invasive, three-dimensional tracking of one or more 'tracer' particles 21 | through particulate, fluid or multiphase systems. The technique allows particle 22 | or fluid motion to be tracked with sub-millimetre accuracy and sub-millisecond 23 | temporal resolution and, due to its use of highly-penetrating 511keV gamma 24 | rays, can be used to probe the internal dynamics of even large, dense, 25 | optically opaque systems - making it ideal for industrial as well as scientific 26 | applications. 27 | 28 | PEPT is performed by radioactively labelling a particle with a positron- 29 | emitting radioisotope such as fluorine-18 (18F) or gallium-68 (68Ga), and using 30 | the back-to-back gamma rays produced by electron-positron annihilation events 31 | in and around the tracer to triangulate its spatial position. Each detected 32 | gamma ray represents a line of response (LoR). 33 | 34 | .. image:: imgs/pept_transformation.png 35 | :alt: Transforming LoRs into trajectories using `pept` 36 | 37 | Transforming gamma rays, or lines of response (left) into individual tracer 38 | trajectories (right) using the `pept` library. Depicted is experimental data of 39 | two tracers rotating at 42 RPM, imaged using the University of Birmingham 40 | Positron Imaging Centre's parallel screens PEPT camera. 41 | 42 | 43 | Tutorials and Documentation 44 | =========================== 45 | 46 | A very fast-paced introduction to Python is available `here (Google Colab tutorial link) 47 | `_; it is aimed at engineers whose background might be a few lines written MATLAB, as well as moderate C/C++ programmers. 48 | 49 | A beginner-friendly tutorial for using the `pept` package is available `here (Google Colab link) 50 | `_. 51 | 52 | The links above point to Google Colaboratory, a Jupyter notebook-hosting website that lets you combine text with Python code, executing it on Google servers. Pretty neat, isn't it? 53 | 54 | 55 | Performance 56 | =========== 57 | Significant effort has been put into making the algorithms in this package as 58 | fast as possible. Most computationally intensive code has been implemented in `Cython`, `C` or `C++` and allows policy-based parallel execution, either on shared-memory machines using `joblib` / `ThreadPoolExecutor`, or on distributed computing clusters using `mpi4py.futures.MPIPoolExecutor`. 59 | 60 | 61 | Copyright 62 | ========= 63 | Copyright (C) 2021 the `pept` developers. Until now, this library was built directly or indirectly through the brain-time of: 64 | 65 | - Andrei Leonard Nicusan (University of Birmingham) 66 | - Dr. Kit Windows-Yule (University of Birmingham) 67 | - Dr. Sam Manger (University of Birmingham) 68 | - Matthew Herald (University of Birmingham) 69 | - Chris Jones (University of Birmingham) 70 | - Mark Al-Shemmeri (University of Birmingham) 71 | - Prof. David Parker (University of Birmingham) 72 | - Dr. Antoine Renaud (University of Edinburgh) 73 | - Dr. Cody Wiggins (Virginia Commonwealth University) 74 | - Dawid Michał Hampel 75 | - Dr. Tom Leadbeater 76 | 77 | Thank you. 78 | 79 | 80 | Indices and tables 81 | ================== 82 | 83 | .. toctree:: 84 | :caption: Documentation 85 | :maxdepth: 2 86 | 87 | getting_started 88 | tutorials/index 89 | manual/index 90 | contributing 91 | citing 92 | 93 | 94 | Pages 95 | 96 | * :ref:`genindex` 97 | * :ref:`modindex` 98 | * :ref:`search` 99 | -------------------------------------------------------------------------------- /docs/source/tutorials/visualising.rst: -------------------------------------------------------------------------------- 1 | Plotting 2 | ======== 3 | 4 | 5 | 6 | Interactive 3D Plots 7 | -------------------- 8 | 9 | The easiest method of plotting 3D PEPT-like data is using the ``pept.plots.PlotlyGrapher`` 10 | interactive grapher: 11 | 12 | 13 | :: 14 | 15 | # Plotting some example 3D lines 16 | import pept 17 | from pept.plots import PlotlyGrapher 18 | import numpy as np 19 | 20 | lines_raw = np.arange(70).reshape((10, 7)) 21 | lines = pept.LineData(lines_raw) 22 | 23 | PlotlyGrapher().add_lines(lines).show() 24 | 25 | 26 | :: 27 | 28 | # Plotting some example 3D points 29 | import pept 30 | from pept.plots import PlotlyGrapher 31 | import numpy as np 32 | 33 | points_raw = np.arange(40).reshape((10, 4)) 34 | points = pept.PointData(points_raw) 35 | 36 | PlotlyGrapher().add_points(points).show() 37 | 38 | 39 | The ``PlotlyGrapher`` object allows straightforward subplots creation: 40 | 41 | 42 | :: 43 | 44 | # Plot the example 3D lines and points on separate subplots 45 | grapher = PlotlyGrapher(cols = 2) 46 | 47 | grapher.add_lines(lines) # col = 1 by default 48 | grapher.add_points(points, col = 2) 49 | 50 | grapher.show() 51 | 52 | 53 | :: 54 | 55 | # Plot the example 3D lines and points on separate subplots 56 | grapher = PlotlyGrapher(rows = 2, cols = 2) 57 | 58 | grapher.add_lines(lines, col = 2) # row = 1 by default 59 | grapher.add_points(points, row = 2, col = 2) 60 | 61 | grapher.show() 62 | 63 | 64 | 65 | 66 | Adding Colourbars 67 | ----------------- 68 | 69 | By default, the last column of a dataset is used to colour-code the resulting points: 70 | 71 | :: 72 | 73 | from pept.plots import PlotlyGrapher 74 | PlotlyGrapher().add_points(point_data).show() # Colour-codes by the last column 75 | 76 | 77 | You can change the column used to colour-code points using a numeric index (e.g. first column 78 | ``colorbar_col = 0``, second to last column ``colorbar_col = -2``) or named column (e.g. 79 | ``colorbar_col = "error"``): 80 | 81 | :: 82 | 83 | PlotlyGrapher().add_points(point_data, colorbar_col = -2).show() 84 | PlotlyGrapher().add_points(point_data, colorbar_col = "label").show() # Coloured by trajectory 85 | PlotlyGrapher().add_points(point_data, colorbar_col = "v").show() # Coloured by velocity 86 | 87 | 88 | As a ``PlotlyGrapher`` will often manage multiple subplots, one shouldn't include explicit 89 | colourbars on the sides *for each dataset plotted*. Therefore, colourbars are hidden by default; 90 | add a colourbar by setting its title: 91 | 92 | :: 93 | 94 | PlotlyGrapher().add_points(points, colorbar_title = "Velocity").show() 95 | 96 | 97 | 98 | 99 | Histogram of Tracking Errors 100 | ---------------------------- 101 | 102 | The ``Centroids(error = True)`` filter appends a column "error" representing the relative error 103 | in the tracked position. You can select a named column via indexing, e.g. ``trajectories["error"]``; 104 | you can then plot a histogram of the relative errors with: 105 | 106 | :: 107 | 108 | import plotly.express as px 109 | px.histogram(trajectories["error"]).show() # Large values are noise 110 | px.histogram(trajectories["cluster_size"]).show() # Small values are noise 111 | 112 | 113 | It is often useful to remove points with an error higher than a certain value, e.g. 20 mm: 114 | 115 | :: 116 | 117 | trajectories = Condition("error < 20").fit(trajectories) 118 | 119 | # Or simply append the `Condition` to the `pept.Pipeline` 120 | pipeline = pept.Pipeline([ 121 | ... 122 | Condition("cluster_size > 30, error < 20"), 123 | ... 124 | ]) 125 | 126 | 127 | 128 | 129 | Exporting Plotly Graphs as Images 130 | --------------------------------- 131 | 132 | The standard output of the Plotly grapher is an interactive HTML webpage; however, this can lead to large file sizes or memory overflows. Plotly allows for graphs to be exported as images to alleviate some of these issues. 133 | 134 | Ensure you have imported: 135 | 136 | :: 137 | 138 | import plotly.express as px 139 | import kaleido 140 | import plotly.io as pio 141 | 142 | 143 | There are two main ways of exporting as images: 144 | 145 | :: 146 | 147 | # Save the inner plotly.Figure attribute of a `grapher` 148 | # Format can be changed to other image formats 149 | # Width and height can be adjusted to give the desired image size 150 | grapher.fig.write_image("figure.png", width=2560, height=1440) 151 | 152 | 153 | 154 | 155 | Modifying the Underlying Figure 156 | ------------------------------- 157 | 158 | You can access the Plotly figure wrapped and managed by a PlotlyGrapher using the ``.fig`` 159 | attribute: 160 | 161 | :: 162 | 163 | grapher.fig.update_layout(xaxis_title = "Pipe Length (mm)") 164 | 165 | 166 | -------------------------------------------------------------------------------- /pept/utilities/parallel/parallel_map.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : parallel_map.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 03.02.2020 7 | 8 | 9 | import numpy as np 10 | from multiprocessing import Pool 11 | 12 | 13 | def parallel_map_file( 14 | func, # Called as func(data_chunk, chunk_number, *args, **kwargs) 15 | fname, # File that will be supplied to numpy.loadtxt 16 | start, # Start line 17 | end, # End line 18 | chunksize, # Number of lines per chunk 19 | *args, 20 | dtype = float, 21 | processes = None, 22 | callback = lambda x: None, 23 | error_callback = lambda x: None, 24 | **kwargs 25 | ): 26 | '''Utility for parallelising (read CSV chunk -> process chunk) workflows. 27 | 28 | This function reads individual chunks of data from a CSV-formatted file, 29 | then asynchronously sends them as numpy arrays to an arbitrary function 30 | `func` for processing. In effect, it reads a file in one main thread and 31 | processes it in separate threads. 32 | 33 | This is especially useful when dealing with very large files (like we do in 34 | PEPT...) that you'd like to process in batches, in parallel. 35 | 36 | Parameters 37 | ---------- 38 | func : callable 39 | The function that will be called with each chunk of data, the chunk 40 | number, the other positional arguments `*args` and keyword arguments 41 | `**kwargs`: `func(data_chunk, chunk_number, *args, **kwargs)`. 42 | `data_chunk` is a numpy array returned by `numpy.loadtxt` and 43 | `chunk_number` is an int. `func` must be picklable for sending to 44 | other threads. 45 | fname : file, str, or pathlib.Path 46 | The file, filename, or generator that numpy.loadtxt will be supplied 47 | with. 48 | start : int 49 | The starting line number that the chunks will be read from. 50 | end : int 51 | The ending line number that the chunks will be read from. This is 52 | exclusive. 53 | chunksize : int 54 | The number of lines that will be read for each chunk. 55 | *args : additional positional arguments 56 | Additional positional arguments that will be supplied to `func`. 57 | dtype : type 58 | The data type of the numpy array that is returned by numpy.loadtxt. The 59 | default is `float`. 60 | processes : int 61 | The maximum number of threads that will be used for calling `func`. If 62 | left to the default `None`, then the number returned by 63 | `os.cpu_count()` will be used. 64 | callback : callable 65 | When the result from a `func` call becomes ready callback is applied to 66 | it, that is unless the call failed, in which case the error_callback is 67 | applied instead. 68 | error_callback : callable 69 | If the target function `func` fails, then the error_callback is called 70 | with the exception instance. 71 | **kwargs : additional keybord arguments 72 | Additional keyword arguments that will be supplied to `func`. 73 | 74 | Returns 75 | ------- 76 | list 77 | A Python list of the `func` call returns. The results are not 78 | necessarily in order, though this can be verified by using the chunk 79 | number that is supplied to each call to `func`. If `func` does not 80 | return anything, it will simply be a list of `None`. 81 | 82 | Notes 83 | ----- 84 | This function uses `numpy.loadtxt` to read chunks of data and 85 | `multiprocessing.Pool.apply_async` to call `func` asynchronously. 86 | 87 | As the calls to `func` happen in different threads, all the usual parallel 88 | processing issues apply. For example, `func` should not save data to the 89 | same file, as it will overwrite results from different threads and may 90 | become corrupt - however, there is a workaround for this particular case: 91 | because the chunk numbers are guaranteed to be unique, any data can be 92 | saved to a file whose name includes this chunk number, making it unique. 93 | 94 | Examples 95 | -------- 96 | For a random file-like CSV data object: 97 | 98 | >>> import io 99 | >>> flike = io.StringIO("1,2,3\\n4,5,6\\n7,8,9") 100 | >>> def func(data, chunk_number): 101 | >>> return (data, chunk_number) 102 | >>> results = parallel_map_file(func, flike, 0, 3, 1) 103 | >>> print(results) 104 | >>> [ ([1, 2, 3], 0), ([4, 5, 6], 1), ([7, 8, 9], 2) ] 105 | 106 | ''' 107 | 108 | nchunks = int((end - start) / chunksize) 109 | 110 | with Pool(processes = processes) as pool: 111 | results = [] 112 | for i in range(nchunks): 113 | data = np.loadtxt( 114 | fname, 115 | skiprows = start + i * chunksize, 116 | max_rows = chunksize, 117 | dtype = dtype 118 | ) 119 | worker = pool.apply_async( 120 | func, 121 | (data, i, *args), 122 | kwargs, 123 | callback, 124 | error_callback 125 | ) 126 | results.append(worker) 127 | 128 | results = [r.get() for r in results] 129 | 130 | return results 131 | -------------------------------------------------------------------------------- /pept/scanners/parallel_screens/extensions/binary_converter.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | # pept is a Python library that unifies Positron Emission Particle 5 | # Tracking (PEPT) research, including tracking, simulation, data analysis 6 | # and visualisation tools. 7 | # 8 | # If you used this codebase or any software making use of it in a scientific 9 | # publication, we ask you to cite the following paper: 10 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 11 | # using machine learning. Review of Scientific Instruments. 12 | # 2020 Jan 1;91(1):013329. 13 | # https://doi.org/10.1063/1.5129251 14 | # 15 | # Copyright (C) 2021 the pept developers. 16 | # 17 | # This program is free software: you can redistribute it and/or modify 18 | # it under the terms of the GNU General Public License as published by 19 | # the Free Software Foundation, either version 3 of the License, or 20 | # (at your option) any later version. 21 | # 22 | # This program is distributed in the hope that it will be useful, 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 25 | # GNU General Public License for more details. 26 | # 27 | # You should have received a copy of the GNU General Public License 28 | # along with this program. If not, see . 29 | # pept is a Python library that unifies Positron Emission Particle 30 | # Tracking (PEPT) research, including tracking, simulation, data analysis 31 | # and visualisation tools 32 | 33 | 34 | # File : binary_converter.pyx 35 | # License : GNU v3.0 36 | # Author : Andrei Leonard Nicusan 37 | # Date : 01.04.2021 38 | 39 | 40 | # cython: language_level=3 41 | # cython: boundscheck=False 42 | # cython: wraparound=False 43 | # cython: initializedcheck=False 44 | # cython: nonecheck=False 45 | # cython: embedsignature=True 46 | # cython: cdivision=True 47 | 48 | 49 | import numpy as np # import numpy for Python functions 50 | cimport numpy as np # import numpy for C functions (numpy's C API) 51 | 52 | 53 | np.import_array() 54 | 55 | 56 | cdef extern from "binary_converter_ext.c": 57 | # C is included here so that it doesn't need to be compiled externally 58 | pass 59 | 60 | 61 | cdef extern from "binary_converter_ext.h": 62 | double* read_adac_binary(const char *, Py_ssize_t *) nogil 63 | 64 | 65 | cpdef convert_adac_forte(filepath): 66 | '''Convert an ADAC Forte list mode binary file to a general line data 67 | format `[time, x1, y1, z1, x2, y2, z2]`, returned as a NumPy array. 68 | 69 | :: 70 | 71 | Function signature: 72 | binary_converter(filepath) 73 | 74 | Binary converter for the ADAC Forte dual-head gamma camera native list 75 | mode data. Given the `filepath` to such a binary file (usually with 76 | extension ".da01"), this function converts the binary contents to the 77 | general line of response format `[time, x1, y1, z1, x2, y2, z2]`, where 78 | `z1 = 0` and `z2 = screen_separation` (found from the file). 79 | 80 | The LoRs are returned as a (N, 7) NumPy array, where N is the number of 81 | LoRs that were found in the file. 82 | 83 | Function parameters 84 | ------------------- 85 | filepath: str-like 86 | A string of characters containing the path to the binary file. The 87 | string's contents are not read in - it will only be used with the 88 | `fopen` function, so it can contain any characters allowed by the OS 89 | file system. 90 | 91 | Returns 92 | ------- 93 | lors: (N, 7) NumPy array 94 | The 2D array of LoRs, each row containing the time and coordinates of 95 | the first and second point defining a 3D line, respectively: 96 | `[time, x1, y1, z1, x2, y2, z2]`. 97 | 98 | Raises 99 | ------ 100 | FileNotFoundError 101 | If the `filepath` does not exist or points to an invalid ADAC binary 102 | file, in which case the C converter subroutine (binary_converter_ext.c) 103 | prints a specific message. 104 | 105 | Examples 106 | -------- 107 | 108 | >>> import numpy as np 109 | >>> from pept.scanners.parallel_screens import binary_converter 110 | >>> 111 | >>> lines = binary_converter("adac_experiment_data.da01") 112 | 113 | ''' 114 | 115 | filepath_utf = str(filepath).encode('UTF-8') 116 | 117 | cdef char *filepath_c = filepath_utf 118 | 119 | cdef double *lors = NULL 120 | cdef Py_ssize_t lors_elements = 0 121 | cdef np.npy_intp[2] shape 122 | 123 | cdef np.ndarray[double, ndim=2] lors_arr 124 | 125 | with nogil: 126 | lors = read_adac_binary(filepath_c, &lors_elements) 127 | 128 | shape[0] = lors_elements // 7 129 | shape[1] = 7 130 | 131 | # Use the `lors` pointer as the internal data of a numpy array with 132 | # PyArray_SimpleNewFromData 133 | cdef extern from "numpy/arrayobject.h": 134 | void PyArray_ENABLEFLAGS(np.ndarray arr, int flags) 135 | 136 | if lors is NULL: 137 | raise FileNotFoundError( 138 | "Could not convert binary file - see above for error message" 139 | ) 140 | else: 141 | lors_arr = np.PyArray_SimpleNewFromData(2, shape, np.NPY_FLOAT64, lors) 142 | 143 | PyArray_ENABLEFLAGS(lors_arr, np.NPY_OWNDATA) 144 | 145 | return lors_arr 146 | -------------------------------------------------------------------------------- /docs/source/tutorials/basics.md: -------------------------------------------------------------------------------- 1 | Absolute Basics 2 | =============== 3 | 4 | The main purpose of the `pept` library is to provide a common, consistent foundation for PEPT-related algorithms, including tracer tracking, visualisation and post-processing tools - such that they can be used interchangeably, mixed and matched for different systems. Virtually *any* PEPT processing routine follows these steps: 5 | 6 | 1. Convert raw gamma camera / scanner data into *3D lines* (i.e. the captured gamma rays, or lines of response - LoRs). 7 | 2. Take a *sample* of lines, locate tracer locations, then repeat for the next samples. 8 | 3. Separate out individual tracer trajectories. 9 | 4. Visualise and post-process trajectories. 10 | 11 | For these algorithm-agnostic steps, `pept` provides five base data structures upon which the rest of the library is built: 12 | 13 | 1. [`pept.LineData`](https://pept.readthedocs.io/en/latest/manual/generated/pept.LineData.html): general 3D line samples, formatted as *[time, x1, y1, z1, x2, y2, z2, extra...]*. 14 | 2. [`pept.PointData`](https://pept.readthedocs.io/en/latest/manual/generated/pept.PointData.html): general 3D point samples, formatted as *[time, x, y, z, extra...]*. 15 | 3. [`pept.Pixels`](https://pept.readthedocs.io/en/latest/manual/generated/pept.Pixels.html): single 2D pixellised space with physical dimensions, including fast line traversal. 16 | 4. [`pept.Voxels`](https://pept.readthedocs.io/en/latest/manual/generated/pept.Voxels.html): single 3D voxellised space with physical dimensions, including fast line traversal. 17 | 18 | All the data structures above are built on top of NumPy and integrate natively with the rest of the Python / SciPy ecosystem. The rest of the `pept` library is organised into submodules: 19 | 20 | - [`pept.scanners`](https://pept.readthedocs.io/en/latest/manual/scanners.html): converters between native scanner data and the base classes. 21 | - [`pept.tracking`](https://pept.readthedocs.io/en/latest/manual/tracking.html): radioactive tracer tracking algorithms, e.g. the Birmingham method, PEPT-ML, FPI. 22 | - [`pept.plots`](https://pept.readthedocs.io/en/latest/manual/plots.html): PEPT data visualisation subroutines. 23 | - [`pept.utilities`](https://pept.readthedocs.io/en/latest/manual/utilities.html): general-purpose helpers, e.g. `read_csv`, `traverse3d`. 24 | - [`pept.processing`](https://pept.readthedocs.io/en/latest/manual/processing.html): PEPT-oriented post-processing algorithms, e.g. `occupancy2d`. 25 | 26 | 27 | [`pept.LineData`](https://pept.readthedocs.io/en/latest/manual/generated/pept.LineData.html) 28 | -------------------------------------------------------------------------------------------- 29 | 30 | Generally, PEPT Lines of Response (LoRs) are lines in 3D space, each 31 | defined by two points, regardless of the geometry of the scanner used. This 32 | class is used to wrap LoRs (or any lines!), efficiently yielding samples of 33 | `lines` of an adaptive `sample_size` and `overlap`. 34 | 35 | It is an abstraction over PET / PEPT scanner geometries and data formats, 36 | as once the raw LoRs (be they stored as binary, ASCII, etc.) are 37 | transformed into the common `LineData` format, any tracking, analysis or 38 | visualisation algorithm in the `pept` package can be used interchangeably. 39 | Moreover, it provides a stable, user-friendly interface for iterating over 40 | LoRs in *samples* - this is useful for tracking algorithms, as they 41 | generally take a few LoRs (a *sample*), produce a tracer position, then 42 | move to the next sample of LoRs, repeating the procedure. Using overlapping 43 | samples is also useful for improving the tracking rate of the algorithms. 44 | 45 | Here are some basic examples of creating and using `LineData` samples - you're 46 | very much invited to copy and run them! 47 | 48 | Initialise a `LineData` instance containing 10 lines with a `sample_size` 49 | of 3. 50 | 51 | ```python 52 | >>> import pept 53 | >>> import numpy as np 54 | >>> lines_raw = np.arange(70).reshape(10, 7) 55 | >>> print(lines_raw) 56 | [[ 0 1 2 3 4 5 6] 57 | [ 7 8 9 10 11 12 13] 58 | [14 15 16 17 18 19 20] 59 | [21 22 23 24 25 26 27] 60 | [28 29 30 31 32 33 34] 61 | [35 36 37 38 39 40 41] 62 | [42 43 44 45 46 47 48] 63 | [49 50 51 52 53 54 55] 64 | [56 57 58 59 60 61 62] 65 | [63 64 65 66 67 68 69]] 66 | 67 | >>> line_data = pept.LineData(lines_raw, sample_size = 3) 68 | >>> line_data 69 | pept.LineData (samples: 3) 70 | -------------------------- 71 | sample_size = 3 72 | overlap = 0 73 | lines = 74 | (rows: 10, columns: 7) 75 | [[ 0. 1. ... 5. 6.] 76 | [ 7. 8. ... 12. 13.] 77 | ... 78 | [56. 57. ... 61. 62.] 79 | [63. 64. ... 68. 69.]] 80 | columns = ['t', 'x1', 'y1', 'z1', 'x2', 'y2', 'z2'] 81 | attrs = {} 82 | ``` 83 | 84 | Access samples using subscript notation. Notice how the samples are 85 | consecutive, as `overlap` is 0 by default. 86 | 87 | ```python 88 | >>> line_data[0] 89 | pept.LineData (samples: 1) 90 | -------------------------- 91 | sample_size = 3 92 | overlap = 0 93 | lines = 94 | (rows: 3, columns: 7) 95 | [[ 0. 1. ... 5. 6.] 96 | [ 7. 8. ... 12. 13.] 97 | [14. 15. ... 19. 20.]] 98 | columns = ['t', 'x1', 'y1', 'z1', 'x2', 'y2', 'z2'] 99 | attrs = {} 100 | 101 | >>> line_data[1] 102 | pept.LineData (samples: 1) 103 | -------------------------- 104 | sample_size = 3 105 | overlap = 0 106 | lines = 107 | (rows: 3, columns: 7) 108 | [[21. 22. ... 26. 27.] 109 | [28. 29. ... 33. 34.] 110 | [35. 36. ... 40. 41.]] 111 | columns = ['t', 'x1', 'y1', 'z1', 'x2', 'y2', 'z2'] 112 | attrs = {} 113 | ``` 114 | 115 | Now set an overlap of 2; notice how the number of samples changes: 116 | 117 | ```python 118 | >>> len(line_data) # Number of samples 119 | 3 120 | 121 | >>> line_data.overlap = 2 122 | >>> len(line_data) 123 | 8 124 | ``` 125 | 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /pept/scanners/modular_camera/modular_camera.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools 8 | # 9 | # Copyright (C) 2019 Andrei Leonard Nicusan 10 | # 11 | # This program is free software: you can redistribute it and/or modify 12 | # it under the terms of the GNU General Public License as published by 13 | # the Free Software Foundation, either version 3 of the License, or 14 | # (at your option) any later version. 15 | # 16 | # This program is distributed in the hope that it will be useful, 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | # GNU General Public License for more details. 20 | # 21 | # You should have received a copy of the GNU General Public License 22 | # along with this program. If not, see . 23 | 24 | 25 | # File : modular_camera.py 26 | # License: License: GNU v3.0 27 | # Author : Sam Manger 28 | # Date : 20.08.2019 29 | 30 | 31 | import time 32 | 33 | import numpy as np 34 | 35 | from pept import LineData 36 | from .extensions.get_pept_event import get_pept_LOR 37 | 38 | 39 | def modular_camera( 40 | data_file, 41 | sample_size = None, 42 | overlap = None, 43 | verbose = True 44 | ): 45 | '''Initialise PEPT LoRs from the modular camera DAQ. 46 | 47 | Can read data from a `.da_1` file or equivalent. The file must contain 48 | the standard datawords from the modular camera output. This will then 49 | be automatically transformed into the standard `LineData` format 50 | with every row being `[time, x1, y1, z1, x2, y2, z2]`, where the geometry 51 | is derived from the C-extension. The current useable geometry is a square 52 | layout with 4 stacks for 4 modules, separated by 250 mm. 53 | 54 | Parameters 55 | ---------- 56 | data_file : str 57 | A string with the (absolute or relative) path to the data file 58 | from which the PEPT data will be read. It should include the 59 | full file name, along with the extension (.da_1) 60 | 61 | sample_size : int, optional 62 | An `int`` that defines the number of lines that should be 63 | returned when iterating over `_lines`. A `sample_size` of 0 64 | yields all the data as one single sample. (Default is 200) 65 | 66 | overlap : int, optional 67 | An `int` that defines the overlap between two consecutive 68 | samples that are returned when iterating over `_lines`. 69 | An overlap of 0 means consecutive samples, while an overlap 70 | of (`sample_size` - 1) means incrementing the samples by one. 71 | A negative overlap means skipping values between samples. An 72 | error is raised if `overlap` is larger than or equal to 73 | `sample_size`. (Default is 0) 74 | 75 | verbose : bool, optional 76 | An option that enables printing the time taken for the 77 | initialisation of an instance of the class. Useful when 78 | reading large files (10gb files for PEPT data is not unheard 79 | of). (Default is True) 80 | 81 | Returns 82 | ------- 83 | LineData 84 | The initialised LoRs. 85 | 86 | Raises 87 | ------ 88 | ValueError 89 | If `overlap` >= `sample_size`. Overlap has to be smaller than 90 | `sample_size`. Note that it can also be negative. 91 | 92 | ValueError 93 | If the data file does not have (N, 7) shape. 94 | ''' 95 | 96 | if verbose: 97 | start = time.time() 98 | 99 | x = 10 100 | 101 | header_buffer_size = 1000 102 | 103 | n_events = 0 104 | 105 | # Modular camera data reader requires 'itag' for timing. We will drop 106 | # this column at the end of initialisation 107 | # Row: [itag, itime, X1, Y1, Z1, X2, Y2, Z2] 108 | 109 | lines = np.zeros([sample_size, 8]) 110 | 111 | with open(data_file, "rb") as f: 112 | # Skip over the header and handshake word 113 | f.seek(header_buffer_size) 114 | 115 | word = f.read(4) 116 | 117 | if word.hex() == 'cefacefa': 118 | # Skip two words 119 | word = f.read(4) 120 | word = f.read(4) 121 | 122 | itime = 0 123 | itag = 0 124 | 125 | # BufTime = 0 126 | # nBuf = 0 127 | 128 | while word != b'' and (n_events < sample_size): 129 | 130 | word = f.read(4) 131 | 132 | # Handshake word 133 | if word.hex() == 'cefacefa': 134 | # Skip two words 135 | word = f.read(4) 136 | word = f.read(4) 137 | 138 | if word != b'': 139 | word = int.from_bytes(word, "little") 140 | 141 | lines[n_events, :] = get_pept_LOR( 142 | word, itag, itime 143 | ) # C function 144 | 145 | itag = lines[n_events, 1] 146 | itime = lines[n_events, 1] 147 | 148 | n_events = n_events + 1 149 | 150 | if (n_events % x) == 0: 151 | print("Got ", n_events, "\n") 152 | x = x * 10 153 | 154 | # Remove 'zero' lines 155 | lines = lines[np.all(lines, axis = 1)] 156 | 157 | # Drop itag column 158 | lines = np.delete(lines, 0, axis=1) 159 | 160 | if verbose: 161 | end = time.time() 162 | print(f"\nInitialised the PEPT data in {end - start:3.3f} s.\n") 163 | 164 | return LineData(lines, sample_size = sample_size, overlap = overlap) 165 | -------------------------------------------------------------------------------- /pept/tracking/post.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # File : post.py 4 | # License: GNU v3.0 5 | # Author : Andrei Leonard Nicusan 6 | # Date : 06.09.2021 7 | 8 | 9 | import warnings 10 | 11 | import numpy as np 12 | 13 | try: 14 | class NotInstalled: 15 | '''Class used to signal that Numba is not available.''' 16 | pass 17 | 18 | import numba as nb 19 | except ImportError: 20 | nb = NotInstalled() 21 | nb.njit = lambda func: func 22 | 23 | from pept import PointData 24 | from pept.base import PointDataFilter 25 | 26 | 27 | @nb.njit 28 | def polyfit(x, y, deg): 29 | '''Fit polynomial of order `deg` against x, y data points.''' 30 | mat = np.zeros(shape = (x.shape[0], deg + 1)) 31 | mat[:, 0] = np.ones_like(x) 32 | for n in range(1, deg + 1): 33 | mat[:, n] = x**n 34 | 35 | p = np.linalg.lstsq(mat, y)[0] 36 | return p 37 | 38 | 39 | @nb.njit 40 | def polyder(p): 41 | '''Differentiate polynomial p.''' 42 | d = np.zeros(shape = (p.shape[0] - 1)) 43 | for n in range(d.shape[0]): 44 | d[n] = (n + 1) * p[n + 1] 45 | return d 46 | 47 | 48 | @nb.njit 49 | def polyval(p, x): 50 | '''Evaluate polynomial p(x) using Horner's Method. 51 | 52 | New numpy.polynomial.Polynomial format: 53 | p[0] + p[1] * x + p[2] * x^2 + ... 54 | ''' 55 | result = np.zeros_like(x) 56 | for coeff in p[::-1]: 57 | result = x * result + coeff 58 | return result 59 | 60 | 61 | @nb.njit 62 | def compute_velocities(points, window, deg): 63 | # Pre-allocate velocities matrix, columns [vx, vy, vz] 64 | v = np.zeros((points.shape[0], 3)) 65 | 66 | # Half-window size 67 | hw = (window - 1) // 2 68 | 69 | # Infer velocities of first hw points 70 | w = points[:window] 71 | vx = polyder(polyfit(w[:, 0], w[:, 1], deg)) 72 | vy = polyder(polyfit(w[:, 0], w[:, 2], deg)) 73 | vz = polyder(polyfit(w[:, 0], w[:, 3], deg)) 74 | 75 | for i in range(hw + 1): 76 | v[i, 0] = polyval(vx, w[i, 0:1])[0] 77 | v[i, 1] = polyval(vy, w[i, 0:1])[0] 78 | v[i, 2] = polyval(vz, w[i, 0:1])[0] 79 | 80 | # Compute velocities in a sliding window 81 | for i in range(hw + 1, points.shape[0] - hw): 82 | w = points[i - hw:i + hw + 1] 83 | 84 | vx = polyder(polyfit(w[:, 0], w[:, 1], deg)) 85 | vy = polyder(polyfit(w[:, 0], w[:, 2], deg)) 86 | vz = polyder(polyfit(w[:, 0], w[:, 3], deg)) 87 | 88 | v[i, 0] = polyval(vx, points[i, 0:1])[0] 89 | v[i, 1] = polyval(vy, points[i, 0:1])[0] 90 | v[i, 2] = polyval(vz, points[i, 0:1])[0] 91 | 92 | # Infer velocities of last hw points 93 | for i in range(points.shape[0] - hw, points.shape[0]): 94 | v[i, 0] = polyval(vx, points[i, 0:1])[0] 95 | v[i, 1] = polyval(vy, points[i, 0:1])[0] 96 | v[i, 2] = polyval(vz, points[i, 0:1])[0] 97 | 98 | return v 99 | 100 | 101 | 102 | 103 | class Velocity(PointDataFilter): 104 | '''Append the dimension-wise or absolute velocity to samples of points 105 | using a 2D fitted polynomial in a rolling window mode. 106 | 107 | Filter signature: 108 | 109 | :: 110 | 111 | PointData -> Velocity.fit_sample -> PointData 112 | 113 | If Numba is installed, a fast, natively-compiled algorithm is used. 114 | 115 | If `absolute = False`, the "vx", "vy" and "vz" columns are appended. If 116 | `absolute = True`, then the "v" column is appended. 117 | ''' 118 | 119 | def __init__(self, window, degree = 2, absolute = False): 120 | if isinstance(nb, NotInstalled): 121 | warnings.warn(( 122 | "Numba is not installed, so this function will be very slow. " 123 | "Install Numba to JIT compile the compute-intensive part." 124 | ), RuntimeWarning) 125 | 126 | self.window = int(window) 127 | assert self.window % 2 == 1, "The `window` must be an odd number!" 128 | self.degree = int(degree) 129 | self.absolute = bool(absolute) 130 | 131 | assert self.window > self.degree, "The `window` must be >`degree`!" 132 | 133 | 134 | def fit_sample(self, samples): 135 | if not isinstance(samples, PointData): 136 | samples = PointData(samples) 137 | 138 | if not len(samples.points): 139 | return self._empty_sample(samples) 140 | 141 | if self.window >= len(samples.points): 142 | return self._invalid_sample(samples) 143 | 144 | vels = compute_velocities(samples.points, self.window, self.degree) 145 | 146 | # Create new object like sample with the extra velocity columns 147 | if self.absolute: 148 | absvels = np.linalg.norm(vels, axis = 1) 149 | points = np.c_[samples.points, absvels] 150 | columns = samples.columns + ["v"] 151 | else: 152 | points = np.c_[samples.points, vels] 153 | columns = samples.columns + ["vx", "vy", "vz"] 154 | 155 | new_sample = samples.copy(data = points, columns = columns) 156 | 157 | return new_sample 158 | 159 | 160 | def _empty_sample(self, samples): 161 | if self.absolute: 162 | columns = samples.columns + ["v"] 163 | else: 164 | columns = samples.columns + ["vx", "vy", "vz"] 165 | 166 | return samples.copy( 167 | data = np.empty((0, len(columns))), 168 | columns = columns, 169 | ) 170 | 171 | 172 | def _invalid_sample(self, samples): 173 | if self.absolute: 174 | columns = samples.columns + ["v"] 175 | vels = np.full(len(samples.points), np.nan) 176 | else: 177 | columns = samples.columns + ["vx", "vy", "vz"] 178 | vels = np.full((len(samples.points), 3), np.nan) 179 | 180 | return samples.copy( 181 | data = np.c_[samples.points, vels], 182 | columns = columns, 183 | ) 184 | -------------------------------------------------------------------------------- /pept/tracking/fpi/fpi_ext.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | # pept is a Python library that unifies Positron Emission Particle 5 | # Tracking (PEPT) research, including tracking, simulation, data analysis 6 | # and visualisation tools. 7 | # 8 | # If you used this codebase or any software making use of it in a scientific 9 | # publication, you must cite the following paper: 10 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 11 | # using machine learning. Review of Scientific Instruments. 12 | # 2020 Jan 1;91(1):013329. 13 | # https://doi.org/10.1063/1.5129251 14 | # 15 | # Copyright (C) 2019-2021 the pept developers 16 | # 17 | # This program is free software: you can redistribute it and/or modify 18 | # it under the terms of the GNU General Public License as published by 19 | # the Free Software Foundation, either version 3 of the License, or 20 | # (at your option) any later version. 21 | # 22 | # This program is distributed in the hope that it will be useful, 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 25 | # GNU General Public License for more details. 26 | # 27 | # You should have received a copy of the GNU General Public License 28 | # along with this program. If not, see . 29 | # pept is a Python library that unifies Positron Emission Particle 30 | # Tracking (PEPT) research, including tracking, simulation, data analysis 31 | # and visualisation tools 32 | 33 | 34 | # File : fpi_ext.pyx 35 | # License : GNU v3.0 36 | # Author : Andrei Leonard Nicusan 37 | # Date : 06.04.2020 38 | 39 | 40 | # distutils: language=c++ 41 | 42 | # cython: language_level=3 43 | # cython: boundscheck=False 44 | # cython: wraparound=False 45 | # cython: initializedcheck=False 46 | # cython: nonecheck=False 47 | # cython: embedsignature=True 48 | # cython: cdivision=True 49 | 50 | 51 | import numpy as np # import numpy for Python functions 52 | cimport numpy as np # import numpy for C functions (numpy's C API) 53 | 54 | from libc.stdint cimport int64_t 55 | 56 | 57 | np.import_array() 58 | 59 | 60 | cdef extern from "calcPosFPI.hpp": 61 | double* calcPosFPIC( 62 | double *voxels, 63 | int64_t length, 64 | int64_t width, 65 | int64_t depth, 66 | double w, 67 | double r, 68 | double lldCounts, 69 | int64_t *out_rows, 70 | int64_t *out_cols 71 | ) nogil 72 | 73 | 74 | cpdef np.ndarray[double, ndim=2] fpi_ext( 75 | double[:, :, :] voxels, 76 | double w, 77 | double r, 78 | double lldCounts = 0., 79 | ): 80 | '''Find a tracer's location from a pre-computed voxellised sample of LoRs 81 | using the Feature Point Identification (FPI) method. 82 | 83 | :: 84 | 85 | Function signature: 86 | 87 | np.ndarray[double, ndim=2] fpi_ext( 88 | double[:, :, :] voxels, 89 | double w, 90 | double r, 91 | double lldCounts, 92 | ) 93 | 94 | FPI is a modern tracer-location algorithm that was successfully used to 95 | track fast-moving radioactive tracers in pipe flows at the Virginia 96 | Commonwealth University. If you use this algorithm in your work, please 97 | cite the following paper: 98 | 99 | Wiggins C, Santos R, Ruggles A. A feature point identification method 100 | for positron emission particle tracking with multiple tracers. Nuclear 101 | Instruments and Methods in Physics Research Section A: Accelerators, 102 | Spectrometers, Detectors and Associated Equipment. 2017 Jan 21; 103 | 843:22-8. 104 | 105 | Permission was granted explicitly by Dr. Cody Wiggins in March 2021 to 106 | publish his code in the `pept` library under the GNU v3.0 license. 107 | 108 | The points returned by this function are in *voxel dimensions*, without 109 | timestamps. They can be translated into physical dimensions and timestamps 110 | can be added after calling it, e.g. with the `pept.tracking.fpi.FPI` 111 | class. 112 | 113 | Parameters 114 | ---------- 115 | voxels: (L, W, D) numpy.ndarray[ndim = 2, dtype = numpy.float64] 116 | The 3D grid of voxels, initialised to zero. It can be created with 117 | `numpy.zeros((length, width, depth))`. 118 | 119 | w: double 120 | Search range to be used in local maxima calculation. Typical values for 121 | w are 2 - 5 (lower number for more particles or smaller particle 122 | separation). 123 | 124 | r: double 125 | Fraction of peak value used as threshold. Typical values for r are 126 | usually between 0.3 and 0.6 (lower for more particles, higher for 127 | greater background noise) 128 | 129 | lldCounts: double, default 0 130 | A secondary lld to prevent assigning local maxima to voxels with very 131 | low values. The parameter lldCounts is not used much in practice - 132 | for most cases, it can be set to zero. 133 | 134 | ''' 135 | 136 | cdef double *points 137 | cdef int64_t nrows = 0 138 | cdef int64_t ncols = 0 139 | cdef np.npy_intp[2] size 140 | 141 | with nogil: 142 | points = calcPosFPIC( 143 | &voxels[0, 0, 0], 144 | voxels.shape[0], 145 | voxels.shape[1], 146 | voxels.shape[2], 147 | w, 148 | r, 149 | lldCounts, 150 | &nrows, 151 | &ncols, 152 | ) 153 | 154 | size[0] = nrows 155 | size[1] = ncols 156 | 157 | # Use the `minpoints` pointer as the internal data of a numpy array 158 | cdef extern from "numpy/arrayobject.h": 159 | void PyArray_ENABLEFLAGS(np.ndarray arr, int flags) 160 | 161 | cdef np.ndarray[double, ndim=2] points_arr = np.PyArray_SimpleNewFromData( 162 | 2, size, np.NPY_FLOAT64, points 163 | ) 164 | PyArray_ENABLEFLAGS(points_arr, np.NPY_OWNDATA) 165 | 166 | return points_arr 167 | -------------------------------------------------------------------------------- /pept/tracking/fpi/PeptStructures.hpp: -------------------------------------------------------------------------------- 1 | /** 2 | * Permission granted explicitly by Cody Wiggins in March 2021 to publish his code in the `pept` 3 | * library under the GNU v3.0 license. If you use the `pept.tracking.fpi` submodule, please cite 4 | * the following paper: 5 | * 6 | * C. Wiggins et al. "A feature point identification method for positron emission particle 7 | * tracking with multiple tracers," Nucl. Instr. Meth. Phys. Res. A, 843:22, 2017. 8 | * 9 | * The original author's copyright notice is included below. A sincere thank you for your work. 10 | */ 11 | 12 | 13 | // 14 | // PeptStructures.h 15 | // MultiPEPT Code Snippets 16 | // 17 | // Created by Cody Wiggins on 3/17/21. 18 | // Copyright © 2021 Cody Wiggins. All rights reserved. 19 | // 20 | // Code adapted from MultiPEPT v.1.18 by Cody Wiggins 21 | // 22 | // File for holding definition of structures used by MultiPEPT processing codes 23 | 24 | 25 | #ifndef PeptStructures_h 26 | #define PeptStructures_h 27 | 28 | 29 | #include 30 | #include 31 | #include 32 | 33 | 34 | using namespace std; 35 | 36 | 37 | // structure for holding points in R3 38 | struct point3{ 39 | double u[3]; // contains x, y, z coordinates 40 | // using name "u" due to conventional vector notation 41 | point3(const point3& a) // copy constructor 42 | { 43 | u[0]=a.u[0]; 44 | u[1]=a.u[1]; 45 | u[2]=a.u[2]; 46 | } 47 | point3(int64_t x, int64_t y, int64_t z) // constructor with integers 48 | { 49 | u[0]=x; u[1]=y; u[2]=z; 50 | } 51 | point3(double x, double y, double z) // constructor with doubles 52 | { 53 | u[0]=x; u[1]=y; u[2]=z; 54 | } 55 | point3& operator=(point3& rhs) // copy assignment operator 56 | { 57 | if (this != &rhs) 58 | { 59 | u[0] = rhs.u[0]; 60 | u[1] = rhs.u[1]; 61 | u[2] = rhs.u[2]; 62 | } 63 | 64 | return *this; 65 | } 66 | }; 67 | 68 | 69 | 70 | // structure for cluster of 3d points 71 | struct cluster{ 72 | vector point; // contains a vector of point3's 73 | 74 | cluster(const cluster& a) // copy constructor 75 | { 76 | for (int64_t i=0; i<(int64_t)a.point.size(); i++){ 77 | point.push_back(a.point[i]); 78 | } 79 | } 80 | cluster(point3 a) // constructor with single point3 81 | { 82 | point.push_back(a); // makes a single element vector 83 | } 84 | cluster(vector a) // constructor with vector of point3's 85 | { 86 | for (int64_t i=0; i<(int64_t)a.size(); i++) 87 | { 88 | point.push_back(a[i]); 89 | } 90 | } 91 | cluster& operator=(cluster& rhs) // copy assignment operator 92 | { 93 | if (this != &rhs) 94 | for (int64_t i=0; i<(int64_t)rhs.point.size(); i++) 95 | point.push_back(rhs.point[i]); 96 | 97 | return *this; 98 | } 99 | }; 100 | 101 | 102 | 103 | // structure for holding points in R3 with time and error 104 | // these are similar to point3's, but also contain time and uncertainty info 105 | struct point3time{ 106 | double u[3]; // x,y,z position 107 | double t; // time 108 | double err[3]; // x,y,z uncertainty 109 | int64_t nLOR; 110 | point3time(const point3time& a) // copy constructor 111 | { 112 | u[0]=a.u[0]; 113 | u[1]=a.u[1]; 114 | u[2]=a.u[2]; 115 | t=a.t; 116 | err[0]=a.err[0]; 117 | err[1]=a.err[1]; 118 | err[2]=a.err[2]; 119 | nLOR=a.nLOR; 120 | } 121 | point3time(int64_t x, int64_t y, int64_t z, int64_t time, int64_t ex, int64_t ey, int64_t ez, int64_t nLines) // integer constructor 122 | { 123 | u[0]=x; u[1]=y; u[2]=z; t=time; err[0]=ex; err[1]=ey; err[2]=ez; nLOR=nLines; 124 | } 125 | point3time(double x, double y, double z, double time, double ex, double ey, double ez, int64_t nLines) // double constructor 126 | { 127 | u[0]=x; u[1]=y; u[2]=z; t=time; err[0]=ex; err[1]=ey; err[2]=ez; nLOR=nLines; 128 | } 129 | point3time(point3 x, point3 dx, int time, int nLines) // constructor with point3's and int time 130 | { 131 | u[0]=x.u[0]; err[0]=dx.u[0]; 132 | u[1]=x.u[1]; err[1]=dx.u[1]; 133 | u[2]=x.u[2]; err[2]=dx.u[2]; 134 | t=time; 135 | nLOR=nLines; 136 | } 137 | point3time(point3 x, point3 dx, double time, int64_t nLines) // constructor with point3's and double time 138 | { 139 | u[0]=x.u[0]; err[0]=dx.u[0]; 140 | u[1]=x.u[1]; err[1]=dx.u[1]; 141 | u[2]=x.u[2]; err[2]=dx.u[2]; 142 | t=time; 143 | nLOR=nLines; 144 | } 145 | point3time& operator=(point3time& a) // copy assignment operator 146 | { 147 | if (this != &a) 148 | { 149 | u[0]=a.u[0]; 150 | u[1]=a.u[1]; 151 | u[2]=a.u[2]; 152 | t=a.t; 153 | err[0]=a.err[0]; 154 | err[1]=a.err[1]; 155 | err[2]=a.err[2]; 156 | nLOR=a.nLOR; 157 | } 158 | 159 | return *this; 160 | } 161 | }; 162 | 163 | 164 | 165 | // structure for cluster of point3time's 166 | // similar to "cluster", but using point3time's 167 | struct clusterTime{ 168 | vector point; // contains vector of point3time 169 | int64_t nLikely; // number to be used in post processing for filtering false positives 170 | 171 | clusterTime(const clusterTime& a) // copy constructor 172 | { 173 | for (int64_t i=0; i<(int64_t)a.point.size(); i++){ 174 | point.push_back(a.point[i]); 175 | } 176 | nLikely=a.nLikely; 177 | } 178 | clusterTime(point3time a) // constructor with single point3time 179 | { 180 | point.push_back(a); 181 | nLikely=point.size(); 182 | } 183 | clusterTime(vector a) // constructor with vector of point3time 184 | { 185 | for (int64_t i=0; i<(int64_t)a.size(); i++) 186 | { 187 | point.push_back(a[i]); 188 | } 189 | nLikely=point.size(); 190 | } 191 | clusterTime& operator=(clusterTime& a) // copy assignment operator 192 | { 193 | if (this != &a) 194 | { 195 | for (int64_t i=0; i<(int64_t)a.point.size(); i++){ 196 | point.push_back(a.point[i]); 197 | } 198 | nLikely=a.nLikely; 199 | } 200 | 201 | return *this; 202 | } 203 | 204 | }; 205 | 206 | 207 | #endif /* PeptStructures_h */ 208 | -------------------------------------------------------------------------------- /pept/tracking/birmingham_method/extensions/birmingham_method.pyx: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools. 8 | # 9 | # If you used this codebase or any software making use of it in a scientific 10 | # publication, you must cite the following paper: 11 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 12 | # using machine learning. Review of Scientific Instruments. 13 | # 2020 Jan 1;91(1):013329. 14 | # https://doi.org/10.1063/1.5129251 15 | # 16 | # Copyright (C) 2019-2021 the pept developers 17 | # 18 | # This program is free software: you can redistribute it and/or modify 19 | # it under the terms of the GNU General Public License as published by 20 | # the Free Software Foundation, either version 3 of the License, or 21 | # (at your option) any later version. 22 | # 23 | # This program is distributed in the hope that it will be useful, 24 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 25 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 26 | # GNU General Public License for more details. 27 | # 28 | # You should have received a copy of the GNU General Public License 29 | # along with this program. If not, see . 30 | # pept is a Python library that unifies Positron Emission Particle 31 | # Tracking (PEPT) research, including tracking, simulation, data analysis 32 | # and visualisation tools 33 | 34 | 35 | # File : birmingham_method.pyx 36 | # License : GNU v3.0 37 | # Author : Sam Manger 38 | # Date : 21.08.2019 39 | 40 | 41 | # cython: language_level=3 42 | # cython: boundscheck=False 43 | # cython: wraparound=False 44 | # cython: initializedcheck=False 45 | # cython: nonecheck=False 46 | # cython: embedsignature=True 47 | # cython: cdivision=True 48 | 49 | 50 | import numpy as np # import numpy for Python functions 51 | cimport numpy as np # import numpy for C functions (numpy's C API) 52 | 53 | 54 | cdef extern from "birmingham_method_ext.c": 55 | # C is included here so that it doesn't need to be compiled externally 56 | pass 57 | 58 | 59 | cdef extern from "birmingham_method_ext.h": 60 | void birmingham_method_ext( 61 | const double *, const Py_ssize_t, const Py_ssize_t, 62 | double *, int *, const double 63 | ) nogil 64 | 65 | void calculate( 66 | double *, double *, double *, double *, double *, double *, 67 | double *, double *, double *, double *, double *, double *, 68 | double *, double *, double *, double *, double *, 69 | int *, int, int, double * 70 | ) nogil 71 | 72 | 73 | # cpdef means it is defined both for Python and C code; 74 | # cdef means it is defined only for C code; 75 | 76 | # Cython has a cool function to automatically get memoryviews of the input 77 | # parameters => double[:, :] receives a 2D numpy array. 78 | cpdef birmingham_method( 79 | const double[:, :] lines, 80 | const double fopt 81 | ): 82 | '''Use the Birmingham Method to find one tracer location from the LoRs 83 | stored in `lines`. 84 | 85 | Function signature: 86 | birmingham_method( 87 | double[:, :] lines, # LoRs in a sample 88 | double fopt # Fraction of LoRs used to find tracer 89 | ) 90 | 91 | This function receives a numpy array of LoRs (one "sample") from python, 92 | computing the minimum distance point (MDP). A number of lines that lie 93 | outside the standard deviation of the MDP are then removed from the set, 94 | and the MDP is recalculated. This process is repeated until approximately 95 | a fixed fraction (fopt) of the original lines is left. 96 | 97 | The found tracer position is then returned along with a boolean mask of 98 | the LoRs that were used to compute it. 99 | 100 | Parameters 101 | ---------- 102 | lines : (N, M >= 7) numpy.ndarray 103 | A numpy array of the lines of respones (LoRs) that will be used to find 104 | a tracer location; each LoR is stored as a timestamp, the 3D 105 | coordinates of two points defining the line, followed by any additional 106 | data. The data columns are then `[time, x1, y1, z1, x2, y2, z2, etc]`. 107 | Note that the extra data is simply ignored by this function. 108 | fopt : float 109 | A float number between 0 and 1 representing the fraction of LoRs that 110 | will be used to compute the tracer location. 111 | 112 | Returns 113 | ------- 114 | location : (5,) numpy.ndarray 115 | The computed tracer location, with data columns formatted as 116 | `[time, x, y, z, error]`. 117 | used : (N,) numpy.ndarray 118 | A boolean mask of the LoRs that were used to compute the tracer 119 | location; that is, a vector of the same length as `lines`, containing 1 120 | for the rows that were used, and 0 otherwise. 121 | 122 | Notes 123 | ----- 124 | This is a low-level Cython function that does not do any checks on the 125 | input data - it is meant to be used in other modules / libraries. For a 126 | normal user, the `pept.tracking.birmingham_method.BirminghamMethod` class 127 | methods `fit_sample` and `fit` are recommended as higher-level APIs. They 128 | do check the input parameters and are easier to use. 129 | ''' 130 | 131 | # Py_ssize_t is the one "strange" type from Cython - it is the type used 132 | # for indexing arrays, as pointers must have a certain number of bits that 133 | # is platform-dependent; that type is stored as a C macro in `ssize_t` that 134 | # Cython also provides as `Py_ssize_t`. 135 | cdef Py_ssize_t nrows = lines.shape[0] 136 | cdef Py_ssize_t ncols = lines.shape[1] 137 | 138 | # np.float64 == C double ; np.intc == C int ; 139 | cdef np.ndarray[double, ndim = 1] location = np.zeros(5, dtype = np.float64) 140 | cdef np.ndarray[int, ndim = 1] used = np.ones(nrows, dtype = np.intc) 141 | 142 | # Release the GIL as we're in a thread-safe C function for most of our 143 | # computation time. 144 | with nogil: 145 | birmingham_method_ext( 146 | &lines[0, 0], 147 | nrows, 148 | ncols, 149 | &location[0], 150 | &used[0], 151 | fopt 152 | ) 153 | 154 | return location, used 155 | 156 | 157 | -------------------------------------------------------------------------------- /docs/source/tutorials/peptml.rst: -------------------------------------------------------------------------------- 1 | PEPT-ML 2 | ======= 3 | 4 | PEPT using Machine Learning is a modern clustering-based tracking method that was developed specifically for noisy, fast applications. 5 | 6 | If you are using PEPT-ML in your research, you are kindly asked to cite the following paper: 7 | 8 | *Nicuşan AL, Windows-Yule CR. Positron emission particle tracking using machine learning. Review of Scientific Instruments. 2020 Jan 1;91(1):013329.* 9 | 10 | 11 | PEPT-ML one pass of clustering recipe 12 | ------------------------------------- 13 | 14 | The LoRs are first converted into ``Cutpoints``, which are then assigned cluster labels using ``HDBSCAN``; the cutpoints are then grouped into clusters using ``SplitLabels`` and the clusters' ``Centroids`` are taken as the particle locations. Finally, stack all centroids into a single ``PointData``. 15 | 16 | :: 17 | 18 | import pept 19 | from pept.tracking import * 20 | 21 | max_tracers = 1 22 | 23 | pipeline = pept.Pipeline([ 24 | Cutpoints(max_distance = 0.5), 25 | HDBSCAN(true_fraction = 0.15, max_tracers = max_tracers), 26 | SplitLabels() + Centroids(error = True), 27 | Stack(), 28 | ]) 29 | 30 | locations = pipeline.fit(lors) 31 | 32 | 33 | 34 | PEPT-ML second pass of clustering recipe 35 | ---------------------------------------- 36 | 37 | The particle locations will always have a bit of *scatter* to them; we can *tighten* those points into accurate, dense trajectories using a *second pass of clustering*. 38 | 39 | Set a very small sample size and maximum overlap to minimise temporal smoothing effects, then recluster the tracer locations, split according to cluster label, compute centroids, and stack into a final ``PointData``. 40 | 41 | 42 | :: 43 | 44 | import pept 45 | from pept.tracking import * 46 | 47 | max_tracers = 1 48 | 49 | pipeline = pept.Pipeline([ 50 | Stack(sample_size = 30 * max_tracers, overlap = 30 * max_tracers - 1), 51 | HDBSCAN(true_fraction = 0.6, max_tracers = max_tracers), 52 | SplitLabels() + Centroids(error = True), 53 | Stack(), 54 | ]) 55 | 56 | locations2 = pipeline.fit(lors) 57 | 58 | 59 | 60 | PEPT-ML complete recipe 61 | ----------------------- 62 | 63 | Including two passes of clustering and trajectory separation: 64 | Including an example ADAC Forte data initisalisation, two passes of clustering, 65 | trajectory separation, plotting and saving trajectories as CSV. 66 | 67 | 68 | :: 69 | 70 | # Import what we need from the `pept` library 71 | import pept 72 | from pept.tracking import * 73 | from pept.plots import PlotlyGrapher, PlotlyGrapher2D 74 | 75 | 76 | # Open interactive plots in the web browser 77 | import plotly 78 | plotly.io.renderers.default = "browser" 79 | 80 | 81 | # Initialise data from file and set sample size and overlap 82 | filepath = "DS1.da01" 83 | max_tracers = 1 84 | 85 | lors = pept.scanners.adac_forte( 86 | filepath, 87 | sample_size = 200 * max_tracers, 88 | overlap = 150 * max_tracers, 89 | ) 90 | 91 | 92 | # Select only the first 1000 samples of LoRs for testing; comment out for all 93 | lors = lors[:1000] 94 | 95 | 96 | # Create PEPT-ML processing pipeline 97 | pipeline = pept.Pipeline([ 98 | 99 | # First pass of clustering 100 | Cutpoints(max_distance = 0.2), 101 | HDBSCAN(true_fraction = 0.15, max_tracers = max_tracers), 102 | SplitLabels() + Centroids(error = True), 103 | 104 | # Second pass of clustering 105 | Stack(sample_size = 30 * max_tracers, overlap = 30 * max_tracers - 1), 106 | HDBSCAN(true_fraction = 0.6, max_tracers = max_tracers), 107 | SplitLabels() + Centroids(), 108 | 109 | # Trajectory separation 110 | Segregate(window = 20 * max_tracers, cut_distance = 10), 111 | Stack(), 112 | ]) 113 | 114 | 115 | # Process all samples in `lors` in parallel, using `max_workers` threads 116 | trajectories = pipeline.fit(lors) 117 | 118 | 119 | # Save trajectories as CSV 120 | trajectories.to_csv(filepath + ".csv") 121 | 122 | # Save as a fast binary; you can load them back with `pept.load("path")` 123 | trajectories.save(filepath + ".pickle") 124 | 125 | 126 | # Plot trajectories - first a 2D timeseries, then all 3D positions 127 | PlotlyGrapher2D().add_timeseries(trajectories).show() 128 | PlotlyGrapher().add_points(trajectories).show() 129 | 130 | 131 | 132 | 133 | Example of a Complex Processing Pipeline 134 | ---------------------------------------- 135 | 136 | This is an example of "production code" used for tracking tracers in pipe flow 137 | imaging, where particles enter and leave the field of view regularly. This 138 | pipeline automatically: 139 | 140 | - Sets an optimum adaptive time window. 141 | - Runs a first pass of clustering, keeping track of the number of LoRs around 142 | the tracers (``cluster_size``) and relative location error (``error``). 143 | - Removes locations with too few LoRs or large errors. 144 | - Sets a new optimum adaptive time window for a second pass of clustering. 145 | - Removes spurious points while the tracer is out of the field of view. 146 | - Separates out different tracer trajectories, removes the ones with too few 147 | points and groups them by trajectory. 148 | - Computes the tracer velocity at each location on each trajectory. 149 | - Removes locations at the edges of the detectors. 150 | 151 | Each individual step could be an entire program on its own; with the PEPT 152 | ``Pipeline`` architecture, they can be chained in 17 lines of Python code, 153 | automatically using all processors available on parallelisable sections. 154 | 155 | :: 156 | 157 | # Create PEPT-ML processing pipeline 158 | pipeline = pept.Pipeline([ 159 | OptimizeWindow(200, overlap = 0.5) + Debug(1), 160 | 161 | # First pass of clustering 162 | Cutpoints(max_distance = 0.2), 163 | HDBSCAN(true_fraction = 0.15), 164 | SplitLabels() + Centroids(cluster_size = True, error = True), 165 | 166 | # Remove erroneous points 167 | Condition("cluster_size > 30, error < 20"), 168 | 169 | # Second pass of clustering 170 | OptimizeWindow(30, overlap = 0.95) + Debug(1), 171 | HDBSCAN(true_fraction = 0.6), 172 | SplitLabels() + Centroids(), 173 | 174 | # Remove sparse points in time 175 | OutOfViewFilter(200.), 176 | 177 | # Trajectory separation 178 | Segregate(window = 20, cut_distance = 20, min_trajectory_size = 20), 179 | Condition("label >= 0"), 180 | GroupBy("label"), 181 | 182 | # Velocity computation 183 | Velocity(11), 184 | Velocity(11, absolute = True), 185 | 186 | # Cutoff points outside this region 187 | Condition("y > 100, y < 500"), 188 | 189 | Stack(), 190 | ]) 191 | 192 | 193 | 194 | 195 | 196 | -------------------------------------------------------------------------------- /pept/scanners/parallel_screens/adac_forte.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # pept is a Python library that unifies Positron Emission Particle 5 | # Tracking (PEPT) research, including tracking, simulation, data analysis 6 | # and visualisation tools 7 | # 8 | # If you used this codebase or any software making use of it in a scientific 9 | # publication, you must cite the following paper: 10 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 11 | # using machine learning. Review of Scientific Instruments. 12 | # 2020 Jan 1;91(1):013329. 13 | # https://doi.org/10.1063/1.5129251 14 | # 15 | # Copyright (C) 2020 Andrei Leonard Nicusan 16 | # 17 | # This program is free software: you can redistribute it and/or modify 18 | # it under the terms of the GNU General Public License as published by 19 | # the Free Software Foundation, either version 3 of the License, or 20 | # (at your option) any later version. 21 | # 22 | # This program is distributed in the hope that it will be useful, 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 25 | # GNU General Public License for more details. 26 | # 27 | # You should have received a copy of the GNU General Public License 28 | # along with this program. If not, see . 29 | 30 | # File : adac_forte.py 31 | # License: GNU v3.0 32 | # Author : Andrei Leonard Nicusan 33 | # Date : 01.04.2021 34 | 35 | 36 | import os 37 | import time 38 | import textwrap 39 | from glob import glob 40 | 41 | import numpy as np 42 | from natsort import natsorted 43 | from pept import LineData 44 | 45 | from .extensions import convert_adac_forte 46 | 47 | 48 | 49 | 50 | def adac_forte( 51 | filepath, 52 | sample_size = None, 53 | overlap = None, 54 | verbose = True, 55 | ): 56 | '''Initialise PEPT lines of response (LoRs) from a binary file outputted by 57 | the ADAC Forte parallel screen detector list mode (common file extension 58 | ".da01"). 59 | 60 | Parameters 61 | ---------- 62 | filepath : str 63 | The path to a ADAC Forte-generated binary file from which the LoRs 64 | will be read into the `LineData` format. If you have multiple files, 65 | use a wildcard (*) after their common substring to concatenate them, 66 | e.g. "DS1.da*" will add ["DS1.da01", "DS1.da02", "DS1.da02_02"]. 67 | 68 | sample_size : int, default 0 69 | An `int` that defines the number of lines that should be returned 70 | when iterating over `lines`. A `sample_size` of 0 yields all the 71 | data as one single sample. A good starting value would be 200 times 72 | the maximum number of tracers that would be tracked. 73 | 74 | overlap : int, default 0 75 | An `int` that defines the overlap between two consecutive samples 76 | that are returned when iterating over `lines`. An overlap of 0 77 | implies consecutive samples, while an overlap of 78 | (`sample_size` - 1) means incrementing the samples by one. A 79 | negative overlap means skipping values between samples. An error is 80 | raised if `overlap` is larger than or equal to `sample_size`. 81 | 82 | verbose : bool, default True 83 | An option that enables printing the time taken for the 84 | initialisation of an instance of the class. Useful when reading 85 | large files (10gb files for PEPT data is not unheard of). 86 | 87 | Returns 88 | ------- 89 | LineData 90 | The initialised LoRs. 91 | 92 | Raises 93 | ------ 94 | FileNotFoundError 95 | If the input `filepath` does not exist. 96 | 97 | ValueError 98 | If `overlap` >= `sample_size`. Overlap has to be smaller than 99 | `sample_size`. Note that it can also be negative. 100 | 101 | See Also 102 | -------- 103 | pept.LineData : Encapsulate LoRs for ease of iteration and plotting. 104 | pept.PointData : Encapsulate points for ease of iteration and plotting. 105 | pept.read_csv : Fast CSV file reading into numpy arrays. 106 | PlotlyGrapher : Easy, publication-ready plotting of PEPT-oriented data. 107 | 108 | Examples 109 | -------- 110 | Initialise a `ParallelScreens` array for three LoRs on a parallel screens 111 | PEPT scanner (i.e. each line is defined by **two** points each) with a 112 | head separation of 500 mm: 113 | 114 | >>> lors = pept.scanners.adac_forte("binary_data_adac.da01") 115 | Initialised the PEPT data in 0.011 s. 116 | 117 | >>> lors 118 | LineData 119 | -------- 120 | sample_size = 0 121 | overlap = 0 122 | samples = 1 123 | lines = 124 | [[0.00000000e+00 1.62250000e+02 3.60490000e+02 ... 4.14770000e+02 125 | 3.77010000e+02 3.10000000e+02] 126 | [4.19512195e-01 2.05910000e+02 2.68450000e+02 ... 3.51640000e+02 127 | 2.95000000e+02 3.10000000e+02] 128 | [8.39024390e-01 3.16830000e+02 1.26260000e+02 ... 2.74350000e+02 129 | 3.95300000e+02 3.10000000e+02] 130 | ... 131 | [1.98255892e+04 2.64320000e+02 2.43080000e+02 ... 2.25970000e+02 132 | 4.01200000e+02 3.10000000e+02] 133 | [1.98263928e+04 3.19780000e+02 3.38660000e+02 ... 2.75530000e+02 134 | 5.19200000e+02 3.10000000e+02] 135 | [1.98271964e+04 2.41310000e+02 4.15360000e+02 ... 2.91460000e+02 136 | 4.63150000e+02 3.10000000e+02]] 137 | lines.shape = (32526, 7) 138 | columns = ['t', 'x1', 'y1', 'z1', 'x2', 'y2', 'z2'] 139 | ''' 140 | 141 | if verbose: 142 | start = time.time() 143 | 144 | # If we have a wildcard (*) in the filepath, find all files 145 | if "*" in filepath: 146 | filepaths = natsorted(glob(filepath)) 147 | if verbose: 148 | print(f"Concatenating files:\n {filepaths}") 149 | 150 | # Otherwise make sure the single file exists 151 | else: 152 | if not os.path.isfile(filepath): 153 | raise FileNotFoundError(textwrap.fill(( 154 | f"The input file path {filepath} does not exist!" 155 | ))) 156 | 157 | filepaths = [filepath] 158 | 159 | lines = convert_adac_forte(filepaths[0]) 160 | 161 | # If there are multiple files, concatenate them (and add up the timestamps) 162 | for i in range(1, len(filepaths)): 163 | new_lines = convert_adac_forte(filepaths[i]) 164 | new_lines[:, 0] += lines[-1, 0] 165 | 166 | lines = np.vstack((lines, new_lines)) 167 | 168 | # Flip Y axis 169 | lines[:, [2, 5]] = 600 - lines[:, [2, 5]] 170 | 171 | if verbose: 172 | end = time.time() 173 | print(f"\nInitialised PEPT data in {end - start:3.3f} s.\n") 174 | 175 | return LineData(lines, sample_size = sample_size, overlap = overlap) 176 | -------------------------------------------------------------------------------- /pept/tracking/birmingham_method/birmingham_method.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools 8 | # 9 | # Copyright (C) 2019-2021 the pept developers 10 | # 11 | # This program is free software: you can redistribute it and/or modify 12 | # it under the terms of the GNU General Public License as published by 13 | # the Free Software Foundation, either version 3 of the License, or 14 | # (at your option) any later version. 15 | # 16 | # This program is distributed in the hope that it will be useful, 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | # GNU General Public License for more details. 20 | # 21 | # You should have received a copy of the GNU General Public License 22 | # along with this program. If not, see . 23 | 24 | 25 | # File : birmingham_method.py 26 | # License: GNU v3.0 27 | # Author : Sam Manger 28 | # Date : 20.08.2019 29 | 30 | 31 | import numpy as np 32 | 33 | import pept 34 | 35 | from .extensions.birmingham_method import birmingham_method 36 | 37 | 38 | class BirminghamMethod(pept.base.LineDataFilter): 39 | '''The Birmingham Method is an efficient, analytical technique for tracking 40 | tracers using the LoRs from PEPT data. 41 | 42 | Two main methods are provided: `fit_sample` for tracking a single numpy 43 | array of LoRs (i.e. a single sample) and `fit` which tracks all the samples 44 | encapsulated in a `pept.LineData` class *in parallel*. 45 | 46 | For the given `sample` of LoRs (a numpy.ndarray), this function minimises 47 | the distance between all of the LoRs, rejecting a fraction of lines that 48 | lie furthest away from the calculated distance. The process is repeated 49 | iteratively until a specified fraction (`fopt`) of the original subset of 50 | LORs remains. 51 | 52 | This class is a wrapper around the `birmingham_method` subroutine 53 | (implemented in C), providing tools for asynchronously tracking samples of 54 | LoRs. It can return `PointData` classes which can be easily manipulated and 55 | visualised. 56 | 57 | Attributes 58 | ---------- 59 | fopt : float 60 | Floating-point number between 0 and 1, representing the target fraction 61 | of LoRs in a sample used to locate a tracer. 62 | 63 | get_used : bool, default False 64 | If True, attach an attribute ``._lines`` to the output PointData 65 | containing the sample of LoRs used (+ a column `used`). 66 | 67 | See Also 68 | -------- 69 | pept.LineData : Encapsulate LoRs for ease of iteration and plotting. 70 | pept.PointData : Encapsulate points for ease of iteration and plotting. 71 | pept.utilities.read_csv : Fast CSV file reading into numpy arrays. 72 | PlotlyGrapher : Easy, publication-ready plotting of PEPT-oriented data. 73 | pept.scanners.ParallelScreens : Initialise a `pept.LineData` instance from 74 | parallel screens PEPT detectors. 75 | 76 | Examples 77 | -------- 78 | A typical workflow would involve reading LoRs from a file, instantiating a 79 | `BirminghamMethod` class, tracking the tracer locations from the LoRs, and 80 | plotting them. 81 | 82 | >>> import pept 83 | >>> from pept.tracking.birmingham_method import BirminghamMethod 84 | 85 | >>> lors = pept.LineData(...) # set sample_size and overlap appropriately 86 | >>> bham = BirminghamMethod() 87 | >>> locations = bham.fit(lors) # this is a `pept.PointData` instance 88 | 89 | >>> grapher = PlotlyGrapher() 90 | >>> grapher.add_points(locations) 91 | >>> grapher.show() 92 | ''' 93 | 94 | def __init__(self, fopt = 0.5, get_used = False): 95 | '''`BirminghamMethod` class constructor. 96 | 97 | fopt : float, default 0.5 98 | Float number between 0 and 1, representing the fraction of 99 | remaining LORs in a sample used to locate the particle. 100 | 101 | verbose : bool, default False 102 | Print extra information when initialising this class. 103 | ''' 104 | 105 | # Use @fopt.setter (below) to do the relevant type-checking when 106 | # setting fopt (self._fopt is the internal attribute, that we only 107 | # access through the getter and setter of the self.fopt property). 108 | self.fopt = float(fopt) 109 | self.get_used = bool(get_used) 110 | 111 | 112 | def fit_sample(self, sample): 113 | '''Use the Birmingham method to track a tracer location from a numpy 114 | array (i.e. one sample) of LoRs. 115 | 116 | For the given `sample` of LoRs (a numpy.ndarray), this function 117 | minimises the distance between all of the LoRs, rejecting a fraction of 118 | lines that lie furthest away from the calculated distance. The process 119 | is repeated iteratively until a specified fraction (`fopt`) of the 120 | original subset of LORs remains. 121 | 122 | Parameters 123 | ---------- 124 | sample : (N, M>=7) numpy.ndarray 125 | The sample of LORs that will be clustered. Each LoR is expressed as 126 | a timestamps and a line defined by two points; the data columns are 127 | then `[time, x1, y1, z1, x2, y2, z2, extra...]`. 128 | 129 | Returns 130 | ------- 131 | locations : numpy.ndarray or pept.PointData 132 | The tracked locations found. 133 | 134 | used : numpy.ndarray, optional 135 | If `get_used` is true, then also return a boolean mask of the LoRs 136 | used to compute the tracer location - that is, a vector of the same 137 | length as `sample`, containing 1 for the rows that were used, and 0 138 | otherwise. 139 | [Used for multi-particle tracking, not implemented yet]. 140 | 141 | Raises 142 | ------ 143 | ValueError 144 | If `sample` is not a numpy array of shape (N, M), where M >= 7. 145 | ''' 146 | 147 | if not isinstance(sample, pept.LineData): 148 | sample = pept.LineData(sample) 149 | 150 | locations, used = birmingham_method(sample.lines, self.fopt) 151 | 152 | # Propagate any LineData attributes besides `columns` 153 | attrs = sample.extra_attrs() 154 | 155 | locations = pept.PointData( 156 | [locations], 157 | columns = ["t", "x", "y", "z", "error"], 158 | **attrs, 159 | ) 160 | 161 | # If `get_used`, also attach a `._lines` attribute with the lines used 162 | if self.get_used: 163 | locations.attrs["_lines"] = sample.copy( 164 | data = np.c_[sample.lines, used], 165 | columns = sample.columns + ["used"], 166 | ) 167 | 168 | return locations 169 | -------------------------------------------------------------------------------- /pept/tracking/trajectory_separation/distance_matrix_reachable.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | # pept is a Python library that unifies Positron Emission Particle 5 | # Tracking (PEPT) research, including tracking, simulation, data analysis 6 | # and visualisation tools. 7 | # 8 | # If you used this codebase or any software making use of it in a scientific 9 | # publication, you must cite the following paper: 10 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 11 | # using machine learning. Review of Scientific Instruments. 12 | # 2020 Jan 1;91(1):013329. 13 | # https://doi.org/10.1063/1.5129251 14 | # 15 | # Copyright (C) 2019-2021 the pept developers 16 | # 17 | # This program is free software: you can redistribute it and/or modify 18 | # it under the terms of the GNU General Public License as published by 19 | # the Free Software Foundation, either version 3 of the License, or 20 | # (at your option) any later version. 21 | # 22 | # This program is distributed in the hope that it will be useful, 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 25 | # GNU General Public License for more details. 26 | # 27 | # You should have received a copy of the GNU General Public License 28 | # along with this program. If not, see . 29 | # pept is a Python library that unifies Positron Emission Particle 30 | # Tracking (PEPT) research, including tracking, simulation, data analysis 31 | # and visualisation tools 32 | 33 | 34 | # File : distance_matrix_reachable.pyx 35 | # License : GNU v3.0 36 | # Author : Andrei Leonard Nicusan 37 | # Date : 10.06.2020 38 | 39 | 40 | # cython: language_level=3 41 | # cython: boundscheck=False 42 | # cython: wraparound=False 43 | # cython: initializedcheck=False 44 | # cython: nonecheck=False 45 | # cython: embedsignature=True 46 | # cython: cdivision=True 47 | 48 | 49 | import numpy as np 50 | from scipy.sparse import csr_matrix 51 | 52 | from libc.float cimport DBL_MIN, DBL_MAX 53 | from libc.math cimport sqrt 54 | cimport numpy as np 55 | 56 | 57 | cpdef distance_matrix_reachable( 58 | const double[:, :] pts, # Array of points, cols = [t, x, y, z, ...] 59 | const int points_window, 60 | const double max_time_interval, 61 | ): 62 | '''Compute the distance matrix from a time-sorted array of points `pts` 63 | based on a sliding `points_window`. 64 | 65 | :: 66 | 67 | Function signature: 68 | distance_matrix_reachable( 69 | double[:, :] pts, # Array of points, cols = [t, x, y, z, ...] 70 | int points_window, 71 | double max_time_interval, 72 | ) 73 | 74 | The distance between the points (pts[i], pts[j]) is stored in the distance 75 | matrix at indices (i, j), making it upper-triangular. 76 | 77 | The distance matrix is created and returned using SciPy's sparse CSR 78 | matrix format. This saves a lot in terms of memory usage, especially for 79 | time-series data such as moving tracers, as not all points can be 80 | connected. This format is also closer to the mathematical formulation of 81 | "reachable points" in terms of undirected (incomplete) graphs - namely 82 | storing edges as a list of pairs of vertices. 83 | 84 | This is a low-level Cython function that does not do any checks on the 85 | input data - it is meant to be used in other modules / libraries; in 86 | particular, the `pept.tracking.trajectory_separation` module. 87 | 88 | Parameters 89 | ---------- 90 | pts : (M, N>=4) numpy.ndarray 91 | The points from multiple trajectories. Each row in `pts` will 92 | have a timestamp and the 3 spatial coordinates, such that the data 93 | columns are [time, x_coord, y_coord, z_coord]. Note that `point_data` 94 | can have more data columns and they will simply be ignored. 95 | 96 | points_window : int 97 | Two points are "reachable" (i.e. they can be connected) if and only if 98 | they are within `points_window` in the time-sorted input `pts`. As the 99 | points from different trajectories are intertwined (e.g. for two 100 | tracers A and B, the `pts` array might have two entries for A, 101 | followed by three entries for B, then one entry for A, etc.), this 102 | should optimally be the largest number of points in the input array 103 | between two consecutive points on the same trajectory. If 104 | `pts` is too small, all points in the dataset will be unreachable. 105 | Naturally, a larger `time_window` correponds to more pairs needing to 106 | be checked (and the function will take a longer to complete). 107 | 108 | max_time_interval : double 109 | The maximum time allowed between two consecutive points for them to be 110 | reachable / connected. If the interval between two points is larger 111 | than this, their distance is set to DBL_MAX. 112 | 113 | Returns 114 | ------- 115 | distance_matrix : CSR 116 | A SciPy sparse matrix in the CSR format, containing the distances 117 | between every pair of reachable points in `pts`. 118 | 119 | Notes 120 | ----- 121 | In order for the `points_window` to act as a sliding window, in effect only 122 | connecting points which are around the same timeframe, the points should be 123 | sorted based on the time column (the first row) in `pts`. This should be 124 | done *prior* to calling this function. 125 | ''' 126 | 127 | # Use Py_ssize_t as we will access C arrays (memoryviews on numpy arrays). 128 | # That is the "proper" type of a C array pointer / index. 129 | cdef Py_ssize_t n = pts.shape[0] # Total number of points 130 | cdef Py_ssize_t p = min(n, points_window) 131 | 132 | # Calculate sparse distance matrix between reachable points. The number of 133 | # points we need to check `ndists` is given by the formula below. 134 | cdef Py_ssize_t ndists = (p + 1) * (n - p) + p * (p + 1) // 2 - n 135 | 136 | # Pre-allocate the arrays for creating the sparse distance matrix. In the 137 | # sparse matrix, every data point `dists` has an associated row in `rows` 138 | # column in `cols`. 139 | cdef np.ndarray[double, ndim = 1] dists_arr = np.zeros(ndists, dtype = 140 | np.float64) 141 | cdef np.ndarray[double, ndim = 1] rows_arr = np.zeros(ndists, dtype = 142 | np.float64) 143 | cdef np.ndarray[double, ndim = 1] cols_arr = np.zeros(ndists, dtype = 144 | np.float64) 145 | 146 | # We'll work with memoryviews on the above arrays: 147 | cdef double[:] dists = dists_arr 148 | cdef double[:] rows = rows_arr 149 | cdef double[:] cols = cols_arr 150 | 151 | # Calculate the distances between reachable points. 152 | cdef Py_ssize_t ie = 0 # distance index 153 | cdef Py_ssize_t i, j # iterators 154 | cdef double dist # distance between two points 155 | 156 | with nogil: 157 | for i in range(n - 1): 158 | for j in range(i + 1, min(i + p, n - 1) + 1): 159 | if pts[j, 0] - pts[i, 0] > max_time_interval: 160 | dist = DBL_MAX 161 | else: 162 | # Euclidean distance between points i, j in `pts` 163 | # dist = np.linalg.norm(pts[i, 1:4] - pts[j, 1:4]) 164 | dist = sqrt( 165 | (pts[j, 1] - pts[i, 1]) ** 2 + 166 | (pts[j, 2] - pts[i, 2]) ** 2 + 167 | (pts[j, 3] - pts[i, 3]) ** 2 168 | ) 169 | 170 | # Fix bug (or feature?) of scipy's minimum_spanning_tree where 171 | # duplicate points (i.e. dist == 0.0) are ommitted from the 172 | # MST vertices. 173 | dists[ie] = dist if dist != 0.0 else DBL_MIN 174 | rows[ie] = i 175 | cols[ie] = j 176 | ie = ie + 1 177 | 178 | # Create the distance matrix from the found points. 179 | distance_matrix = csr_matrix( 180 | (dists, (rows, cols)), 181 | shape = (n, n) 182 | ) 183 | 184 | return distance_matrix 185 | -------------------------------------------------------------------------------- /pept/utilities/cutpoints/find_minpoints.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | # pept is a Python library that unifies Positron Emission Particle 5 | # Tracking (PEPT) research, including tracking, simulation, data analysis 6 | # and visualisation tools. 7 | # 8 | # If you used this codebase or any software making use of it in a scientific 9 | # publication, you must cite the following paper: 10 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 11 | # using machine learning. Review of Scientific Instruments. 12 | # 2020 Jan 1;91(1):013329. 13 | # https://doi.org/10.1063/1.5129251 14 | # 15 | # Copyright (C) 2019-2021 the pept developers 16 | # 17 | # This program is free software: you can redistribute it and/or modify 18 | # it under the terms of the GNU General Public License as published by 19 | # the Free Software Foundation, either version 3 of the License, or 20 | # (at your option) any later version. 21 | # 22 | # This program is distributed in the hope that it will be useful, 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 25 | # GNU General Public License for more details. 26 | # 27 | # You should have received a copy of the GNU General Public License 28 | # along with this program. If not, see . 29 | # pept is a Python library that unifies Positron Emission Particle 30 | # Tracking (PEPT) research, including tracking, simulation, data analysis 31 | # and visualisation tools 32 | 33 | 34 | # File : find_minpoints.pyx 35 | # License : GNU v3.0 36 | # Author : Andrei Leonard Nicusan 37 | # Date : 20.10.2020 38 | 39 | 40 | # cython: language_level=3 41 | # cython: boundscheck=False 42 | # cython: wraparound=False 43 | # cython: initializedcheck=False 44 | # cython: nonecheck=False 45 | # cython: embedsignature=True 46 | # cython: cdivision=True 47 | 48 | 49 | import numpy as np # import numpy for Python functions 50 | cimport numpy as np # import numpy for C functions (numpy's C API) 51 | 52 | 53 | np.import_array() 54 | 55 | 56 | cdef extern from "find_minpoints_ext.c": 57 | # C is included here so that it doesn't need to be compiled externally 58 | pass 59 | 60 | 61 | cdef extern from "find_minpoints_ext.h": 62 | double* find_minpoints_ext( 63 | const double *, const Py_ssize_t, const Py_ssize_t, const Py_ssize_t, 64 | const double, const double *, const int, Py_ssize_t *, Py_ssize_t * 65 | ) nogil 66 | 67 | 68 | 69 | cpdef find_minpoints( 70 | const double[:, :] sample_lines, # LoRs in sample 71 | const Py_ssize_t num_lines, # Number of LoRs in groups for computing MDP 72 | const double max_distance, # Max allowed distance between two LoRs 73 | const double[:] cutoffs, # Spatial cutoff for cutpoints 74 | bint append_indices = 0 # Append LoR indices used for each cutpoint 75 | ): 76 | '''Compute the minimum distance points (MDPs) from all combinations of 77 | `num_lines` lines given in an array of lines `sample_lines`. 78 | 79 | :: 80 | 81 | Function signature: 82 | find_minpoints( 83 | double[:, :] sample_lines, # LoRs in sample 84 | Py_ssize_t num_lines, # Number of LoRs in combinations 85 | double max_distance, # Max distance from MDP to LoRs 86 | double[:] cutoffs, # Spatial cutoff for minpoints 87 | bool append_indices = 0 # Append LoR indices used 88 | ) 89 | 90 | Given a sample of lines, this functions computes the minimum distance 91 | points (MDPs) for every possible combination of `num_lines` lines. The 92 | returned numpy array contains all MDPs that satisfy the following: 93 | 94 | 1. Are within the `cutoffs`. 95 | 2. Are closer to all the constituent LoRs than `max_distance`. 96 | 97 | Parameters 98 | ---------- 99 | sample_lines : (M, N) numpy.ndarray 100 | A 2D array of lines, where each line is defined by two points such that 101 | every row is formatted as `[t, x1, y1, z1, x2, y2, z2, etc.]`. It 102 | *must* have at least 2 lines and the combination size `num_lines` 103 | *must* be smaller or equal to the number of lines. Put differently: 104 | 2 <= num_lines <= len(sample_lines). 105 | 106 | num_lines : int 107 | The number of lines in each combination of LoRs used to compute the 108 | MDP. This function considers every combination of `numlines` from the 109 | input `sample_lines`. It must be smaller or equal to the number of input 110 | lines `sample_lines`. 111 | 112 | max_distance : float 113 | The maximum allowed distance between an MDP and its constituent lines. 114 | If any distance from the MDP to one of its lines is larger than 115 | `max_distance`, the MDP is thrown away. 116 | 117 | cutoffs : (6,) numpy.ndarray 118 | An array of spatial cutoff coordinates with *exactly 6 elements* as 119 | [x_min, x_max, y_min, y_max, z_min, z_max]. If any MDP lies outside 120 | this region, it is thrown away. 121 | 122 | append_indices : bool 123 | A boolean specifying whether to include the indices of the lines used 124 | to compute each MDP. If `False`, the output array will only contain the 125 | [time, x, y, z] of the MDPs. If `True`, the output array will have 126 | extra columns [time, x, y, z, line_idx(1), ..., line_idx(n)] where 127 | n = `num_lines`. 128 | 129 | Returns 130 | ------- 131 | minpoints : (M, N) numpy.ndarray 132 | A 2D array of `float`s containing the time and coordinates of the MDPs 133 | [time, x, y, z]. The time is computed as the average of the constituent 134 | lines. If `append_indices` is `True`, then `num_lines` indices of the 135 | constituent lines are appended as extra columns: 136 | [time, x, y, z, line_idx1, line_idx2, ..]. 137 | 138 | Notes 139 | ----- 140 | There must be at least two lines in `sample_lines` and `num_lines` must be 141 | greater or equal to the number of lines (i.e. `len(sample_lines)`). 142 | Put another way: 2 <= num_lines <= len(sample_lines). 143 | 144 | This is a low-level Cython function that does not do any checks on the 145 | input data - it is meant to be used in other modules / libraries. For a 146 | normal user, the `pept.tracking.peptml` function `find_minpoints` and 147 | class `Minpoints` are recommended as higher-level APIs. They do check the 148 | input data and are easier to use (for example, they automatically compute 149 | the cutoffs). 150 | 151 | Examples 152 | -------- 153 | 154 | >>> import numpy as np 155 | >>> from pept.utilities import find_minpoints 156 | >>> 157 | >>> lines = np.random.random((500, 7)) * 500 158 | >>> num_lines = 3 159 | >>> max_distance = 0.1 160 | >>> cutoffs = np.array([0, 500, 0, 500, 0, 500], dtype = float) 161 | >>> 162 | >>> minpoints = find_minpoints(lines, num_lines, max_distance, cutoffs) 163 | 164 | ''' 165 | 166 | # Lines for a single sample => (m, n >= 7) array 167 | # sample_lines row: [time X1 Y1 Z1 X2 Y2 Z2 etc.] 168 | cdef Py_ssize_t nrows = sample_lines.shape[0] 169 | cdef Py_ssize_t ncols = sample_lines.shape[1] 170 | 171 | cdef Py_ssize_t mpts_nrows = 0 172 | cdef Py_ssize_t mpts_ncols = 0 173 | 174 | cdef double *minpoints 175 | cdef np.npy_intp[2] size 176 | 177 | with nogil: 178 | minpoints = find_minpoints_ext( 179 | &sample_lines[0, 0], 180 | nrows, 181 | ncols, 182 | num_lines, 183 | max_distance, 184 | &cutoffs[0], 185 | append_indices, 186 | &mpts_nrows, 187 | &mpts_ncols 188 | ) 189 | 190 | size[0] = mpts_nrows 191 | size[1] = mpts_ncols 192 | 193 | # Use the `minpoints` pointer as the internal data of a numpy array 194 | cdef extern from "numpy/arrayobject.h": 195 | void PyArray_ENABLEFLAGS(np.ndarray arr, int flags) 196 | 197 | cdef np.ndarray[double, ndim=2] mpts_arr = np.PyArray_SimpleNewFromData( 198 | 2, size, np.NPY_FLOAT64, minpoints 199 | ) 200 | PyArray_ENABLEFLAGS(mpts_arr, np.NPY_OWNDATA) 201 | 202 | # Sort rows based on time (column 0) 203 | mpts_arr = mpts_arr[mpts_arr[:, 0].argsort()] 204 | 205 | return mpts_arr 206 | -------------------------------------------------------------------------------- /pept/scanners/modular_camera/extensions/get_pept_event_ext.c: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * pept is a Python library that unifies Positron Emission Particle 4 | * Tracking (PEPT) research, including tracking, simulation, data analysis 5 | * and visualisation tools 6 | * 7 | * Copyright (C) 2019 Andrei Leonard Nicusan 8 | * 9 | * This program is free software: you can redistribute it and/or modify 10 | * it under the terms of the GNU General Public License as published by 11 | * the Free Software Foundation, either version 3 of the License, or 12 | * (at your option) any later version. 13 | * 14 | * This program is distributed in the hope that it will be useful, 15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | * GNU General Public License for more details. 18 | * 19 | * You should have received a copy of the GNU General Public License 20 | * along with this program. If not, see . 21 | */ 22 | 23 | /** 24 | * File : get_pept_event_ext.c 25 | * License : License: GNU v3.0 26 | * Author : Sam Manger 27 | * Date : 01.07.2019 28 | */ 29 | 30 | 31 | #include "get_pept_event_ext.h" 32 | 33 | 34 | unsigned short int bitrev(unsigned short int i1, int length) 35 | { 36 | unsigned short int j,k,dum,bit1,bit2,out; 37 | out=0; 38 | for(j=0;(int)jj) 43 | bit2=bit1<<(k-j); 44 | else 45 | bit2=bit1>>(j-k); 46 | out=out|bit2; 47 | } 48 | return out; 49 | } 50 | 51 | 52 | 53 | void get_pept_event_ext(double* result, unsigned int word, int itag, int itime) 54 | { 55 | int MPnum[112] = 56 | { 0,5, 0,6, 0,7, 0,8, 0,9, 0,10, 0,11, 57 | 1,6, 1,7, 1,8, 1,9, 1,10, 1,11, 1,12, 58 | 2,7, 2,8, 2,9, 2,10, 2,11, 2,12, 2,13, 59 | 3,8, 3,9, 3,10, 3,11, 3,12, 3,13, 3,14, 60 | 4,9, 4,10, 4,11, 4,12, 4,13, 4,14, 4,15, 61 | 5,10, 5,11, 5,12, 5,13, 5,14, 5,15, 62 | 6,11, 6,12, 6,13, 6,14, 6,15, 63 | 7,12, 7,13, 7,14, 7,15, 64 | 8,13, 8,14, 8,15, 65 | 9,14, 9,15, 66 | 10,15 67 | }; 68 | 69 | unsigned short int short1,short2,word1,word2; 70 | int dtime, modpair,BoardAddr,Bucket[2],Block[2],Seg[2],Plane[2],itagold; 71 | 72 | short1 = (word&0xffff); 73 | word1=bitrev(short1,16); //reverse bit ordering for PJ card 74 | short2 = (word&0xffff0000)>>16; 75 | word2=bitrev(short2,16); 76 | 77 | itagold=itag; 78 | itag=word2&0x0f; 79 | dtime=itag-itagold; 80 | if(dtime<0)dtime+=16; 81 | itime+=dtime*2; 82 | 83 | modpair = word1&0x3f; 84 | 85 | Bucket[0] = MPnum[2*modpair-2]; //check whether starts from 0 or 1 86 | Bucket[1] = MPnum[2*modpair-1]; 87 | 88 | Block[0] = (word2&0x180)>>7; 89 | Block[1] = (word2&0x6000)>>13; 90 | 91 | Seg[0] = (word2&0x1c00)>>10; 92 | Seg[1] = (word2&0x70)>>4; 93 | 94 | BoardAddr = (word1 & 0xc000)>>14; 95 | 96 | if(BoardAddr==1||BoardAddr==2) 97 | BoardAddr=3-BoardAddr; 98 | 99 | if(BoardAddr == 0); 100 | else if(BoardAddr == 1) 101 | { 102 | 103 | Bucket[1] += 16; 104 | } 105 | else if(BoardAddr == 2) 106 | { 107 | 108 | Bucket[0] += 16; 109 | } 110 | else if(BoardAddr == 3) 111 | { 112 | Bucket[0] += 16; 113 | Bucket[1] += 16; 114 | } 115 | 116 | Plane[0] = (word1&0x300)>>8; 117 | Plane[1] = (word1&0x1800)>>11; 118 | 119 | int it; 120 | 121 | if (modpair>28) // should be 56 but for modular camera we cop out at 28 122 | { 123 | for (it=0; it<12; it++) 124 | { 125 | result[it] = 0; 126 | } 127 | } 128 | else 129 | { 130 | result[0] = word; 131 | result[1] = itag; 132 | result[2] = itime; 133 | result[3] = modpair; 134 | result[4] = Bucket[0]; 135 | result[5] = Bucket[1]; 136 | result[6] = Block[0]; 137 | result[7] = Block[1]; 138 | result[8] = Seg[0]; 139 | result[9] = Seg[1]; 140 | result[10] = Plane[0]; 141 | result[11] = Plane[1]; 142 | } 143 | } 144 | 145 | 146 | void get_pept_LOR_ext(double* result, unsigned int word, int itag, int itime) 147 | { 148 | int MPnum[112] = 149 | { 0,5, 0,6, 0,7, 0,8, 0,9, 0,10, 0,11, 150 | 1,6, 1,7, 1,8, 1,9, 1,10, 1,11, 1,12, 151 | 2,7, 2,8, 2,9, 2,10, 2,11, 2,12, 2,13, 152 | 3,8, 3,9, 3,10, 3,11, 3,12, 3,13, 3,14, 153 | 4,9, 4,10, 4,11, 4,12, 4,13, 4,14, 4,15, 154 | 5,10, 5,11, 5,12, 5,13, 5,14, 5,15, 155 | 6,11, 6,12, 6,13, 6,14, 6,15, 156 | 7,12, 7,13, 7,14, 7,15, 157 | 8,13, 8,14, 8,15, 158 | 9,14, 9,15, 159 | 10,15 160 | }; 161 | 162 | unsigned short int short1,short2,word1,word2; 163 | 164 | int dtime, modpair,BoardAddr,Bucket[2],Block[2],Seg[2],Plane[2],itagold; 165 | 166 | float x[2],y[2],z[2]; 167 | 168 | float modHeight=95.; 169 | float segHeight=13.5; 170 | float segWidth=6.25; 171 | float blockWidth=50; 172 | float blockSep=41; 173 | float modSep=250; 174 | 175 | short1 = (word&0xffff); 176 | word1=bitrev(short1,16); //reverse bit ordering for PJ card 177 | short2 = (word&0xffff0000)>>16; 178 | word2=bitrev(short2,16); 179 | 180 | itagold=itag; 181 | itag=word2&0x0f; 182 | dtime=itag-itagold; 183 | if(dtime<0)dtime+=16; 184 | itime+=dtime*2; 185 | 186 | modpair = word1&0x3f; 187 | 188 | Bucket[0] = MPnum[2*modpair-2]; //check whether starts from 0 or 1 189 | Bucket[1] = MPnum[2*modpair-1]; 190 | 191 | Block[0] = (word2&0x180)>>7; 192 | Block[1] = (word2&0x6000)>>13; 193 | 194 | Seg[0] = (word2&0x1c00)>>10; 195 | Seg[1] = (word2&0x70)>>4; 196 | 197 | BoardAddr = (word1 & 0xc000)>>14; 198 | 199 | if(BoardAddr==1||BoardAddr==2) 200 | BoardAddr=3-BoardAddr; 201 | 202 | if(BoardAddr == 0); 203 | else if(BoardAddr == 1) 204 | { 205 | 206 | Bucket[1] += 16; 207 | } 208 | else if(BoardAddr == 2) 209 | { 210 | 211 | Bucket[0] += 16; 212 | } 213 | else if(BoardAddr == 3) 214 | { 215 | Bucket[0] += 16; 216 | Bucket[1] += 16; 217 | } 218 | 219 | Plane[0] = (word1&0x300)>>8; 220 | Plane[1] = (word1&0x1800)>>11; 221 | 222 | int detector; 223 | 224 | for (detector=0; detector<2; detector++) 225 | { 226 | 227 | if(0 <= Bucket[detector] && Bucket[detector] <= 3){ 228 | x[detector] = modSep; 229 | y[detector] = ((3-Bucket[detector])*modHeight) + (Plane[detector] * segHeight); 230 | z[detector] = ((2-Block[detector])*(blockWidth+blockSep) - (blockSep/2) - (segWidth/2) - ((7-Seg[detector])*segWidth)); 231 | } 232 | else if (8 <= Bucket[detector] && Bucket[detector] <= 11){ 233 | x[detector] = -modSep; 234 | y[detector] = ((11-Bucket[detector])*modHeight) + (Plane[detector] * segHeight); 235 | z[detector] = -((2-Block[detector])*(blockWidth+blockSep) - (blockSep/2) - (segWidth/2) - ((7-Seg[detector])*segWidth)); 236 | } 237 | else if (16 <= Bucket[detector] && Bucket[detector] <= 19){ 238 | x[detector] = ((2-Block[detector])*(blockWidth+blockSep) - (blockSep/2) - (segWidth/2) - ((7-Seg[detector])*segWidth)); 239 | y[detector] = ((19-Bucket[detector])*modHeight) + (Plane[detector] * segHeight); 240 | z[detector] = -modSep; 241 | } 242 | else if (24 <= Bucket[detector] && Bucket[detector] <= 27){ 243 | x[detector] = -((2-Block[detector])*(blockWidth+blockSep) - (blockSep/2) - (segWidth/2) - ((7-Seg[detector])*segWidth)); 244 | y[detector] = ((27-Bucket[detector])*modHeight) + (Plane[detector] * segHeight); 245 | z[detector] = +modSep; 246 | } 247 | else 248 | { 249 | continue; 250 | } 251 | 252 | } 253 | 254 | int it; 255 | 256 | if (modpair>28) // should be 56 but for modular camera we cop out at 28 257 | { 258 | for (it=0; it<8; it++) 259 | { 260 | result[it] = 0; 261 | } 262 | } 263 | else 264 | { 265 | result[0] = itag; 266 | result[1] = itime; 267 | result[2] = x[0]; 268 | result[3] = y[0]; 269 | result[4] = z[0]; 270 | result[5] = x[1]; 271 | result[6] = y[1]; 272 | result[7] = z[1]; 273 | } 274 | } 275 | -------------------------------------------------------------------------------- /pept/base/pixels.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools. 8 | # 9 | # If you used this codebase or any software making use of it in a scientific 10 | # publication, you must cite the following paper: 11 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 12 | # using machine learning. Review of Scientific Instruments. 13 | # 2020 Jan 1;91(1):013329. 14 | # https://doi.org/10.1063/1.5129251 15 | # 16 | # Copyright (C) 2019-2021 the pept developers 17 | # 18 | # This program is free software: you can redistribute it and/or modify 19 | # it under the terms of the GNU General Public License as published by 20 | # the Free Software Foundation, either version 3 of the License, or 21 | # (at your option) any later version. 22 | # 23 | # This program is distributed in the hope that it will be useful, 24 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 25 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 26 | # GNU General Public License for more details. 27 | # 28 | # You should have received a copy of the GNU General Public License 29 | # along with this program. If not, see . 30 | 31 | 32 | # File : pixels.py 33 | # License: GNU v3.0 34 | # Author : Andrei Leonard Nicusan 35 | # Date : 23.11.2021 36 | 37 | 38 | import time 39 | import textwrap 40 | 41 | import numpy as np 42 | 43 | from konigcell import Pixels 44 | from pept.utilities.traverse import traverse2d 45 | 46 | 47 | 48 | def get_cutoff(p1, p2): 49 | '''Return a numpy array containing the minimum and maximum value found 50 | across the two input arrays. 51 | Parameters 52 | ---------- 53 | p1 : (N,) numpy.ndarray 54 | The first 1D numpy array. 55 | p2 : (N,) numpy.ndarray 56 | The second 1D numpy array. 57 | Returns 58 | ------- 59 | (2,) numpy.ndarray 60 | The minimum and maximum value found across `p1` and `p2`. 61 | Notes 62 | ----- 63 | The input parameters *must* be numpy arrays, otherwise an error will 64 | be raised. 65 | ''' 66 | 67 | return np.array([ 68 | min(p1.min(), p2.min()), 69 | max(p1.max(), p2.max()), 70 | ]) 71 | 72 | 73 | def from_lines( 74 | lines, 75 | number_of_pixels, 76 | xlim = None, 77 | ylim = None, 78 | verbose = True, 79 | ): 80 | '''Create a pixel space and traverse / pixellise a given sample of 81 | `lines`. 82 | 83 | The `number_of_pixels` in each dimension must be defined. If the 84 | pixel space boundaries `xlim` or `ylim` are not defined, they 85 | are inferred as the boundaries of the `lines`. 86 | 87 | Parameters 88 | ---------- 89 | lines : (M, N>=5) numpy.ndarray 90 | The lines that will be pixellised, each defined by a timestamp and 91 | two 2D points, so that the data columns are [time, x1, y1, x2, y2]. 92 | Note that extra columns are ignored. 93 | 94 | number_of_pixels : (2,) list[int] 95 | The number of pixels in the x- and y-dimensions, respectively. 96 | 97 | xlim : (2,) list[float], optional 98 | The lower and upper boundaries of the pixellised volume in the 99 | x-dimension, formatted as [x_min, x_max]. If undefined, it is 100 | inferred from the boundaries of `lines`. 101 | 102 | ylim : (2,) list[float], optional 103 | The lower and upper boundaries of the pixellised volume in the 104 | y-dimension, formatted as [y_min, y_max]. If undefined, it is 105 | inferred from the boundaries of `lines`. 106 | 107 | Returns 108 | ------- 109 | pept.Pixels 110 | A new `Pixels` object with the pixels through which the lines were 111 | traversed. 112 | 113 | Raises 114 | ------ 115 | ValueError 116 | If the input `lines` does not have the shape (M, N>=5). If the 117 | `number_of_pixels` is not a 1D list with exactly 2 elements, or 118 | any dimension has fewer than 2 pixels. 119 | ''' 120 | 121 | if verbose: 122 | start = time.time() 123 | 124 | # Type-checking inputs 125 | lines = np.asarray(lines, order = "C", dtype = float) 126 | 127 | if lines.ndim != 2 or lines.shape[1] < 5: 128 | raise ValueError(textwrap.fill(( 129 | "The input `lines` must be a 2D numpy array containing lines " 130 | "defined by a timestamp and two 2D points, with every row " 131 | "formatted as [t, x1, y1, x2, y2]. The `lines` must then have " 132 | f"shape (M, 5). Received array with shape {lines.shape}." 133 | ))) 134 | 135 | number_of_pixels = np.asarray( 136 | number_of_pixels, 137 | order = "C", 138 | dtype = int 139 | ) 140 | 141 | if number_of_pixels.ndim != 1 or len(number_of_pixels) != 2: 142 | raise ValueError(textwrap.fill(( 143 | "The input `number_of_pixels` must be a list-like " 144 | "with exactly two values, corresponding to the " 145 | "number of pixels in the x- and y-dimension. " 146 | f"Received parameter with shape {number_of_pixels.shape}." 147 | ))) 148 | 149 | if (number_of_pixels < 2).any(): 150 | raise ValueError(textwrap.fill(( 151 | "The input `number_of_pixels` must set at least two " 152 | "pixels in each dimension (i.e. all elements in " 153 | "`number_of_elements` must be larger or equal to two). " 154 | f"Received `{number_of_pixels}`." 155 | ))) 156 | 157 | if xlim is None: 158 | xlim = get_cutoff(lines[:, 1], lines[:, 3]) 159 | else: 160 | xlim = np.asarray(xlim, dtype = float) 161 | 162 | if xlim.ndim != 1 or len(xlim) != 2: 163 | raise ValueError(textwrap.fill(( 164 | "The input `xlim` parameter must be a list with exactly " 165 | "two values, corresponding to the minimum and maximum " 166 | "coordinates of the pixel space in the x-dimension. " 167 | f"Received parameter with shape {xlim.shape}." 168 | ))) 169 | 170 | if ylim is None: 171 | ylim = get_cutoff(lines[:, 1], lines[:, 3]) 172 | else: 173 | ylim = np.asarray(ylim, dtype = float) 174 | 175 | if ylim.ndim != 1 or len(ylim) != 2: 176 | raise ValueError(textwrap.fill(( 177 | "The input `ylim` parameter must be a list with exactly " 178 | "two values, corresponding to the minimum and maximum " 179 | "coordinates of the pixel space in the y-dimension. " 180 | f"Received parameter with shape {ylim.shape}." 181 | ))) 182 | 183 | pixels_array = np.zeros(tuple(number_of_pixels)) 184 | pixels = Pixels( 185 | pixels_array, 186 | xlim = xlim, 187 | ylim = ylim, 188 | ) 189 | 190 | pixels.add_lines(lines, verbose = False) 191 | 192 | if verbose: 193 | end = time.time() 194 | print(( 195 | f"Initialised Pixels class in {end - start} s." 196 | )) 197 | 198 | return pixels 199 | 200 | 201 | # Dynamically add this as a Pixels method 202 | def add_lines(self, lines, verbose = False): 203 | '''Pixellise a sample of lines, adding 1 to each pixel traversed, for 204 | each line in the sample. 205 | 206 | Parameters 207 | ---------- 208 | lines : (M, N >= 5) numpy.ndarray 209 | The sample of 2D lines to pixellise. Each line is defined as a 210 | timestamp followed by two 2D points, such that the data columns are 211 | `[time, x1, y1, x2, y2, ...]`. Note that there can be extra data 212 | columns which will be ignored. 213 | 214 | verbose : bool, default False 215 | Time the pixel traversal and print it to the terminal. 216 | 217 | Raises 218 | ------ 219 | ValueError 220 | If `lines` has fewer than 5 columns. 221 | ''' 222 | 223 | lines = np.asarray(lines, order = "C", dtype = float) 224 | if lines.ndim != 2 or lines.shape[1] < 5: 225 | raise ValueError(textwrap.fill(( 226 | "The input `lines` must be a 2D array of lines, where each " 227 | "line (i.e. row) is defined by a timestamp and two 2D points, " 228 | "so the data columns are [time, x1, y1, x2, y2]. " 229 | f"Received array of shape {lines.shape}." 230 | ))) 231 | 232 | if verbose: 233 | start = time.time() 234 | 235 | traverse2d( 236 | self.pixels, 237 | lines, 238 | self.pixel_grids[0], 239 | self.pixel_grids[1], 240 | ) 241 | 242 | if verbose: 243 | end = time.time() 244 | print(f"Traversing {len(lines)} lines took {end - start} s.") 245 | 246 | 247 | # Add the `from_lines` function as a static method to Pixels 248 | Pixels.from_lines = staticmethod(from_lines) 249 | Pixels.add_lines = add_lines 250 | -------------------------------------------------------------------------------- /pept/tracking/fpi/fpi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | # pept is a Python library that unifies Positron Emission Particle 6 | # Tracking (PEPT) research, including tracking, simulation, data analysis 7 | # and visualisation tools. 8 | # 9 | # If you used this codebase or any software making use of it in a scientific 10 | # publication, you must cite the following paper: 11 | # Nicuşan AL, Windows-Yule CR. Positron emission particle tracking 12 | # using machine learning. Review of Scientific Instruments. 13 | # 2020 Jan 1;91(1):013329. 14 | # https://doi.org/10.1063/1.5129251 15 | # 16 | # Copyright (C) 2019-2021 the pept developers 17 | # 18 | # This program is free software: you can redistribute it and/or modify 19 | # it under the terms of the GNU General Public License as published by 20 | # the Free Software Foundation, either version 3 of the License, or 21 | # (at your option) any later version. 22 | # 23 | # This program is distributed in the hope that it will be useful, 24 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 25 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 26 | # GNU General Public License for more details. 27 | # 28 | # You should have received a copy of the GNU General Public License 29 | # along with this program. If not, see . 30 | 31 | 32 | # File : fpi.py 33 | # License: GNU v3.0 34 | # Author : Andrei Leonard Nicusan 35 | # Date : 16.04.2021 36 | 37 | 38 | import warnings 39 | import textwrap 40 | 41 | import numpy as np 42 | 43 | import pept 44 | from .fpi_ext import fpi_ext 45 | 46 | 47 | class FPI(pept.base.VoxelsFilter): 48 | '''FPI is a modern voxel-based tracer-location algorithm that can reliably 49 | work with unknown numbers of tracers in fast and noisy environments. 50 | 51 | It was successfully used to track fast-moving radioactive tracers in pipe 52 | flows at the Virginia Commonwealth University. If you use this algorithm in 53 | your work, please cite the following paper: 54 | 55 | Wiggins C, Santos R, Ruggles A. A feature point identification method 56 | for positron emission particle tracking with multiple tracers. Nuclear 57 | Instruments and Methods in Physics Research Section A: Accelerators, 58 | Spectrometers, Detectors and Associated Equipment. 2017 Jan 21; 59 | 843:22-8. 60 | 61 | Permission was granted by Dr. Cody Wiggins in March 2021 to publish his 62 | code in the `pept` library under the GNU v3.0 license. 63 | 64 | Two main methods are provided: `fit_sample` for tracking a single voxel 65 | space (i.e. a single `pept.Voxels`) and `fit` which tracks all the samples 66 | encapsulated in a `pept.VoxelData` class *in parallel*. 67 | 68 | Attributes 69 | ---------- 70 | w : double 71 | Search range to be used in local maxima calculation. Typical values for 72 | w are 2 - 5 (lower number for more particles or smaller particle 73 | separation). 74 | 75 | r : double 76 | Fraction of peak value used as threshold. Typical values for r are 77 | usually between 0.3 and 0.6 (lower for more particles, higher for 78 | greater background noise) 79 | 80 | lld_counts : double, default 0 81 | A secondary lld to prevent assigning local maxima to voxels with very 82 | low values. The parameter lld_counts is not used much in practice - 83 | for most cases, it can be set to zero. 84 | 85 | See Also 86 | -------- 87 | pept.LineData : Encapsulate LoRs for ease of iteration and plotting. 88 | pept.PointData : Encapsulate points for ease of iteration and plotting. 89 | pept.utilities.read_csv : Fast CSV file reading into numpy arrays. 90 | PlotlyGrapher : Easy, publication-ready plotting of PEPT-oriented data. 91 | 92 | Examples 93 | -------- 94 | A typical workflow would involve reading LoRs from a file, creating a lazy 95 | `VoxelData` voxellised representation, instantiating an `FPI` class, 96 | tracking the tracer locations from the LoRs, and plotting them. 97 | 98 | >>> import pept 99 | >>> 100 | >>> lors = pept.LineData(...) # set sample_size and overlap appropriately 101 | >>> voxels = pept.tracking.Voxelize((50, 50, 50)).fit(lors) 102 | >>> 103 | >>> fpi = pept.tracking.FPI(w = 3, r = 0.4) 104 | >>> positions = fpi.fit(voxels) # this is a `pept.PointData` instance 105 | 106 | A much more efficient approach would be to create a `pept.Pipeline` 107 | containing a voxelization step and then FPI: 108 | 109 | >>> from pept.tracking import * 110 | >>> 111 | >>> pipeline = Voxelize((50, 50, 50)) + FPI() + Stack() 112 | >>> positions = pipeline.fit(lors) 113 | 114 | Finally, plotting results: 115 | 116 | >>> from pept.plots import PlotlyGrapher 117 | >>> 118 | >>> grapher = PlotlyGrapher() 119 | >>> grapher.add_points(positions) 120 | >>> grapher.show() 121 | 122 | >>> from pept.plots import PlotlyGrapher2D 123 | >>> PlotlyGrapher2D().add_timeseries(positions).show() 124 | ''' 125 | 126 | def __init__( 127 | self, 128 | w = 3., 129 | r = 0.4, 130 | lld_counts = 0., 131 | verbose = False, 132 | ): 133 | '''`FPI` class constructor. 134 | 135 | Parameters 136 | ---------- 137 | w : double 138 | Search range to be used in local maxima calculation. Typical values 139 | for w are 2 - 5 (lower number for more particles or smaller 140 | particle separation). 141 | 142 | r : double 143 | Fraction of peak value used as threshold. Typical values for r are 144 | usually between 0.3 and 0.6 (lower for more particles, higher for 145 | greater background noise) 146 | 147 | lld_counts : double, default 0 148 | A secondary lld to prevent assigning local maxima to voxels with 149 | very low values. The parameter `lld_counts` is not used much in 150 | practice - for most cases, it can be set to zero. 151 | 152 | verbose : bool, default False 153 | Show extra information on class instantiation. 154 | ''' 155 | self.w = float(w) 156 | self.r = float(r) 157 | self.lld_counts = float(lld_counts) 158 | 159 | 160 | def fit_sample(self, voxels: pept.Voxels): 161 | '''Use the FPI algorithm to locate a tracer from a single voxellised 162 | space (i.e. from one sample of LoRs). 163 | 164 | A sample of LoRs can be voxellised using the `pept.Voxels.from_lines` 165 | method before calling this function. 166 | 167 | Parameters 168 | ---------- 169 | voxels : pept.Voxels 170 | A single voxellised space (i.e. from a single sample of LoRs) for 171 | which the tracers' locations will be found using the FPI method. 172 | 173 | Returns 174 | ------- 175 | locations : numpy.ndarray or pept.PointData 176 | The tracked locations found; if `as_array` is True, they are 177 | returned as a NumPy array with columns [time, x, y, z, error_x, 178 | error_y, error_z]. If `as_array` is False, the points are returned 179 | in a `pept.PointData` for ease of visualisation. 180 | 181 | Raises 182 | ------ 183 | TypeError 184 | If `voxels` is not an instance of `pept.Voxels` (or subclass 185 | thereof). 186 | ''' 187 | 188 | if not isinstance(voxels, pept.Voxels): 189 | raise TypeError(textwrap.fill(( 190 | "The input `voxels` must be a Voxels instance. Received type " 191 | f"`{type(voxels)}`." 192 | ))) 193 | 194 | positions = fpi_ext( 195 | np.asarray(voxels.voxels, dtype = float, order = "C"), 196 | self.w, 197 | self.r, 198 | self.lld_counts, 199 | ) 200 | 201 | # Translate the coordinates from the voxel space to the physical space 202 | positions[:, :3] *= voxels.voxel_size 203 | positions[:, :3] += [voxels.xlim[0], voxels.ylim[0], voxels.zlim[0]] 204 | 205 | # Convert errors to physical space too 206 | positions[:, 3:] *= voxels.voxel_size 207 | 208 | # Create points array to store [t, x, y, z, xerr, yerr, zerr, err] 209 | points = np.full((len(positions), 8), np.nan) 210 | points[:, 1:7] = positions 211 | points[:, 7] = np.linalg.norm(positions[:, 3:6], axis = 1) 212 | 213 | # Set the timestamp if `_lines` exists 214 | if "_lines" in voxels.attrs: 215 | points[:, 0] = voxels.attrs["_lines"].lines[:, 0].mean() 216 | else: 217 | warnings.warn(( 218 | "The input `Voxels` did not have a '_lines' attribute, so no " 219 | "timestamp can be inferred. The time was set to NaN." 220 | ), RuntimeWarning) 221 | 222 | return pept.PointData( 223 | points, 224 | columns = ["t", "x", "y", "z", 225 | "error_x", "error_y", "error_z", 226 | "error"], 227 | ) 228 | --------------------------------------------------------------------------------