├── .github
└── workflows
│ ├── sphinx.yml
│ └── test_and_deploy.yml
├── .gitignore
├── .nojekyll
├── LICENSE
├── MANIFEST.in
├── README.md
├── build.rst
├── docs
├── .gitignore
├── Makefile
├── make.bat
└── source
│ ├── _static
│ └── .gitkeep
│ ├── conf.py
│ ├── guide.rst
│ ├── images
│ ├── output_Example1_1.png
│ ├── output_Example1_2.png
│ ├── output_Example1_3.png
│ ├── output_Example1_4.png
│ ├── output_Example1_5.png
│ ├── output_Example1_6.png
│ ├── output_Example2_1.png
│ ├── output_Example2_2.png
│ └── output_Example2_3.png
│ ├── index.rst
│ ├── install.rst
│ ├── intro.rst
│ ├── modules.rst
│ ├── quickstart.rst
│ ├── stardist_stracking.rst
│ └── stracking.rst
├── examples
├── README.rst
├── detectors
│ ├── README.rst
│ ├── dog_detector_2d.py
│ ├── doh_detector_2d.py
│ └── log_detector_2d.py
├── features
│ ├── README.rst
│ └── length_features_2d.py
├── jupyter_notebooks
│ ├── Test StarDist + Stracking.ipynb
│ └── Test_Stracking.ipynb
├── linkers
│ ├── README.rst
│ └── sp_linker.py
└── properties
│ ├── README.rst
│ └── intensity_properties.py
├── requirements.txt
├── setup.cfg
├── setup.py
├── stracking
├── __init__.py
├── cli
│ ├── __init__.py
│ ├── detector_dog.py
│ ├── detector_doh.py
│ ├── detector_log.py
│ └── linker_shortest_path.py
├── containers
│ ├── __init__.py
│ └── _containers.py
├── data
│ ├── __init__.py
│ ├── fake_tracks1.tif
│ └── tracks1_3d
│ │ ├── track1_t001.tif
│ │ ├── track1_t002.tif
│ │ ├── track1_t003.tif
│ │ ├── track1_t004.tif
│ │ └── track1_t005.tif
├── detectors
│ ├── __init__.py
│ ├── _detector.py
│ ├── _gaussian_detectors.py
│ ├── _seg_detector.py
│ └── tests
│ │ ├── __init__.py
│ │ ├── test_gaussian_detectors.py
│ │ ├── tracks1.tif
│ │ └── tracks1_crop.tif
├── features
│ ├── __init__.py
│ ├── _feature.py
│ ├── _length.py
│ └── tests
│ │ ├── __init__.py
│ │ └── test_length.py
├── filters
│ ├── __init__.py
│ ├── _feature_filter.py
│ ├── _filter.py
│ └── tests
│ │ ├── __init__.py
│ │ └── test_feature_filter.py
├── io
│ ├── __init__.py
│ ├── _csv_io.py
│ ├── _icy_io.py
│ ├── _io.py
│ ├── _isbi_io.py
│ ├── _particles_io.py
│ ├── _reader_function.py
│ ├── _st_io.py
│ ├── _trackmate_io.py
│ └── tests
│ │ ├── FakeTracks_ISBI.xml
│ │ ├── FakeTracks_Icy.xml
│ │ ├── FakeTracks_TrackMate.xml
│ │ ├── __init__.py
│ │ ├── test_reader.py
│ │ └── two_tracks.csv
├── linkers
│ ├── __init__.py
│ ├── _euclidean_cost.py
│ ├── _linker.py
│ ├── _nn_linker.py
│ ├── _sp_linker.py
│ ├── tests
│ │ ├── __init__.py
│ │ └── test_sp_linker.py
│ └── utils.py
├── observers
│ ├── __init__.py
│ └── _observers.py
├── pipelines
│ ├── __init__.py
│ ├── _pipeline.py
│ └── tests
│ │ └── pipeline1.json
└── properties
│ ├── __init__.py
│ ├── _intensity.py
│ ├── _properties.py
│ └── tests
│ ├── __init__.py
│ └── test_intensity_properties.py
└── tox.ini
/.github/workflows/sphinx.yml:
--------------------------------------------------------------------------------
1 | name: tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - main
8 | tags:
9 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
10 | pull_request:
11 | branches:
12 | - master
13 | - main
14 | workflow_dispatch:
15 |
16 | jobs:
17 | release_sphinx:
18 | runs-on: ubuntu-latest
19 | container:
20 | image: python:3.6
21 | steps:
22 |
23 | # check out sources that will be used for autodocs, plus readme
24 | - name: Checkout
25 | uses: actions/checkout@v2.3.1
26 |
27 | - name: Install rsync
28 | run: |
29 | apt-get update && apt-get install -y rsync
30 |
31 | # didn't need to change anything here, but had to add sphinx.ext.githubpages
32 | # to my conf.py extensions list. that fixes the broken uploads
33 | - name: Building documentation
34 | run: |
35 | pip install cython numpy pandas scikit-image scipy
36 | pip install sphinx sphinx_rtd_theme
37 | cd docs
38 | make html
39 |
40 | - name: Deploy
41 | uses: JamesIves/github-pages-deploy-action@4.1.3
42 | with:
43 | branch: gh-pages # The branch the action should deploy to.
44 | folder: docs/build/html # The folder the action should deploy.
45 |
--------------------------------------------------------------------------------
/.github/workflows/test_and_deploy.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: tests
5 |
6 | on:
7 | push:
8 | branches:
9 | - master
10 | - main
11 | tags:
12 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
13 | pull_request:
14 | branches:
15 | - master
16 | - main
17 | workflow_dispatch:
18 |
19 | jobs:
20 | test:
21 | name: ${{ matrix.platform }} py${{ matrix.python-version }}
22 | runs-on: ${{ matrix.platform }}
23 | strategy:
24 | matrix:
25 | platform: [ubuntu-latest, windows-latest, macos-latest]
26 | python-version: ['3.8', '3.9', '3.10']
27 |
28 | steps:
29 | - uses: actions/checkout@v2
30 |
31 | - name: Set up Python ${{ matrix.python-version }}
32 | uses: actions/setup-python@v2
33 | with:
34 | python-version: ${{ matrix.python-version }}
35 |
36 | # these libraries, along with pytest-xvfb (added in the `deps` in tox.ini),
37 | # enable testing on Qt on linux
38 | - name: Install Linux libraries
39 | if: runner.os == 'Linux'
40 | run: |
41 | sudo apt-get install -y libdbus-1-3 libxkbcommon-x11-0 libxcb-icccm4 \
42 | libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 \
43 | libxcb-xinerama0 libxcb-xinput0 libxcb-xfixes0
44 |
45 | # note: if you need dependencies from conda, considering using
46 | # setup-miniconda: https://github.com/conda-incubator/setup-miniconda
47 | # and
48 | # tox-conda: https://github.com/tox-dev/tox-conda
49 | - name: Install dependencies
50 | run: |
51 | python -m pip install --upgrade pip
52 | pip install setuptools tox numpy scipy cython tox-gh-actions
53 |
54 | # this runs the platform-specific tests declared in tox.ini
55 | - name: Test with tox
56 | run: tox
57 | env:
58 | PLATFORM: ${{ matrix.platform }}
59 |
60 | - name: Coverage
61 | uses: codecov/codecov-action@v1
62 |
63 | deploy:
64 | # this will run when you have tagged a commit, starting with "v*"
65 | # and requires that you have put your twine API key in your
66 | # github secrets (see readme for details)
67 | needs: [test]
68 | runs-on: ubuntu-latest
69 | if: contains(github.ref, 'tags')
70 | steps:
71 | - uses: actions/checkout@v2
72 | - name: Set up Python
73 | uses: actions/setup-python@v2
74 | with:
75 | python-version: "3.x"
76 | - name: Install dependencies
77 | run: |
78 | python -m pip install --upgrade pip
79 | pip install numpy scipy cython
80 | pip install -U setuptools setuptools_scm wheel twine
81 | - name: Build and publish
82 | env:
83 | TWINE_USERNAME: __token__
84 | TWINE_PASSWORD: ${{ secrets.TWINE_API_KEY }}
85 | run: |
86 | git tag
87 | python setup.py sdist bdist_wheel
88 | twine upload dist/*
89 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | *.pyo
3 | *~
4 | \.#*
5 | /build
6 | /dist
7 | *.egg-info
8 | *.so
9 | __pycache__
10 | .DS_Store
11 | _build
12 | /.idea
13 | .*
--------------------------------------------------------------------------------
/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/.nojekyll
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2021, STracking
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include README.rst
3 | include requirements.txt
4 | recursive-include stracking/data/ *.tif
5 |
6 | recursive-exclude * __pycache__
7 | recursive-exclude * *.py[co]
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # STracking
2 |
3 | **STracking** is a python framework to develop particles tracking pipeline. This library has been
4 | developed to track intra-cellular object in microscopy 2D+t and 3D+t images, but can be use to any
5 | spots tracking application in 2D+t and 3D+t images.
6 |
7 | # System Requirements
8 |
9 | ## Software Requirements
10 |
11 | ### OS Requirements
12 |
13 | The `STracking` development version is tested on *Windows 10*, *MacOS* and *Linux* operating systems. The developmental version of the package has been tested on the following systems:
14 |
15 | - Linux: 20.04.4
16 | - Mac OSX: Mac OS Catalina 10.15.7
17 | - Windows: 10
18 |
19 | # install
20 |
21 | ## Library installation from PyPI
22 |
23 | 1. Install an [Anaconda](https://www.anaconda.com/download/) distribution of Python -- Choose **Python 3.9** and your operating system. Note you might need to use an anaconda prompt if you did not add anaconda to the path.
24 | 2. Open an anaconda prompt / command prompt with `conda` for **python 3** in the path
25 | 3. Create a new environment with `conda create --name stracking python=3.9`.
26 | 4. To activate this new environment, run `conda activate stracking`
27 | 5. To install the `STracking`library, run `python -m pip install stracking`.
28 |
29 | if you need to update to a new release, use:
30 | ~~~sh
31 | python -m pip install stracking --upgrade
32 | ~~~
33 |
34 | ## Library installation from source
35 |
36 | This install is for developers or people who want the last features in the ``main`` branch.
37 |
38 | 1. Install an [Anaconda](https://www.anaconda.com/download/) distribution of Python -- Choose **Python 3.9** and your operating system. Note you might need to use an anaconda prompt if you did not add anaconda to the path.
39 | 2. Open an anaconda prompt / command prompt with `conda` for **python 3** in the path
40 | 3. Create a new environment with `conda create --name stracking python=3.9`.
41 | 4. To activate this new environment, run `conda activate stracking`
42 | 5. Pull the source code from git with `git pull https://github.com/sylvainprigent/stracking.git
43 | 6. Then install the `STracking` library from you local dir with: `python -m pip install -e ./stracking`.
44 |
45 | ## Use STracking with napari
46 |
47 | The STracking library is embedded in a napari plugin that allows using ``STracking`` with a graphical interface.
48 | Please refer to the [`STracking` napari plugin](https://www.napari-hub.org/plugins/napari-stracking) documentation to install and use it.
49 |
50 | # STracking documentation
51 |
52 | The full documentation with tutorial and docstring is available [here](https://sylvainprigent.github.io/stracking/)
53 |
--------------------------------------------------------------------------------
/build.rst:
--------------------------------------------------------------------------------
1 | Build locally
2 | =============
3 |
4 | do::
5 | python3 setup.py build_ext --inplace
6 |
7 |
8 | Building from source
9 | ====================
10 |
11 | Building from source is required to work on a contribution (bug fix, new
12 | feature, code or documentation improvement).
13 |
14 | .. _git_repo:
15 |
16 | #. Use `Git `_ to check out the latest source from the
17 | `stracking repository `_ on
18 | GitLab.::
19 |
20 | git clone git://github.com/sylvainprigent/stracking.git # add --depth 1 if your connection is slow
21 | cd stracking
22 |
23 | If you plan on submitting a pull-request, you should clone from your fork
24 | instead.
25 |
26 | #. Install a compiler with OpenMP_ support for your platform.
27 |
28 | #. Optional (but recommended): create and activate a dedicated virtualenv_
29 | or `conda environment`_.
30 |
31 | #. Install Cython_ and build the project with pip in :ref:`editable_mode`::
32 |
33 | pip install cython
34 | pip install --verbose --no-build-isolation --editable .
35 |
36 | #. Check that the installed scikit-learn has a version number ending with
37 | `.dev0`::
38 |
39 | python -c "import sklearn; print(sklearn.__version__)"
40 |
41 |
42 | .. note::
43 |
44 | You will have to run the ``pip install --no-build-isolation --editable .``
45 | command every time the source code of a Cython file is updated
46 | (ending in `.pyx` or `.pxd`). Use the ``--no-build-isolation`` flag to
47 | avoid compiling the whole project each time, only the files you have
48 | modified.
49 |
50 | Create a wheel
51 | ==============
52 |
53 | do::
54 |
55 | python3 setup.py bdist_wheel
56 |
57 | Testing
58 | =======
59 |
60 | run tests by running::
61 |
62 | pytest stracking
63 |
64 | or
65 |
66 | python3 -m pytest stracking
67 |
68 |
69 | Profiling
70 | =========
71 |
72 | python -m cProfile -o out_profile script_name.py
73 | cprofilev -f out_profile
74 |
75 | Build documentation
76 | ===================
77 |
78 | without example gallery::
79 |
80 | cd doc
81 | make
82 |
83 | with the example gallery (may take a while)::
84 |
85 | cd doc
86 | make html
87 |
88 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | build/
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/_static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/_static/.gitkeep
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('../../'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'STracking'
21 | copyright = '2020, STracking'
22 | author = 'Sylvain Prigent, Cesar-Augusto Valades Cruz'
23 |
24 | # The full version, including alpha/beta/rc tags
25 | release = '0.1.9'
26 |
27 |
28 | # -- General configuration ---------------------------------------------------
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = []
34 | extensions.append('sphinx.ext.autodoc')
35 | extensions.append('sphinx.ext.napoleon')
36 | extensions.append('sphinx.ext.coverage')
37 |
38 | # Add any paths that contain templates here, relative to this directory.
39 | templates_path = ['_templates']
40 |
41 | # List of patterns, relative to source directory, that match files and
42 | # directories to ignore when looking for source files.
43 | # This pattern also affects html_static_path and html_extra_path.
44 | exclude_patterns = []
45 |
46 |
47 | # -- Options for HTML output -------------------------------------------------
48 |
49 | # The theme to use for HTML and HTML Help pages. See the documentation for
50 | # a list of builtin themes.
51 | #
52 | html_theme = 'sphinx_rtd_theme'
53 |
54 | # Add any paths that contain custom static files (such as style sheets) here,
55 | # relative to this directory. They are copied after the builtin static files,
56 | # so a file named "default.css" will overwrite the builtin "default.css".
57 | html_static_path = ['_static']
58 |
--------------------------------------------------------------------------------
/docs/source/guide.rst:
--------------------------------------------------------------------------------
1 | Guide
2 | =====
3 |
4 | **STracking** is a python framework to develop particles tracking pipeline. This library has been developed to track
5 | intra-cellular object in microscopy 2D+t and 3D+t images, but can be use for any spots tracking application in
6 | 2D+t and 3D+t images.
7 |
8 | A particles tracking pipeline is decomposed into sequential steps. First the particles are **detected** individually and
9 | independently in each time frame of the image. Then a **linker** algorithm is used to link particles between frames and
10 | form the tracks. Then we calculate the **properties** of the particles (size, intensity...) and the **features** of the
11 | tracks (length, distance...) to analyse them. A final step is the tracks **filtering** that uses the properties and
12 | features to select tracks of interest.
13 |
14 |
15 | Library components
16 | ------------------
17 | The **STracking** library is made of one module per step of the pipeline. Plus one module for data containers and on module for pipeline:
18 |
19 | * **Containers**: ``SParticles`` and ``STracks`` containers based on ``napari`` points and track layer data structures to store particles and tracks
20 | * **Detectors**: define a detector interface and implementations of particle detection algorithm for 2D and 3D image sequences
21 | * **Linkers**: define a linker interface and implementation of particle linkers (or trackers) for 2D and 3D image sequences
22 | * **properties**: define an interface and implementations of algorithms to measure properties of particles (intensity...)
23 | * **feature**: define an interface and implementations of algorithms to measure tracks properties (length, displacement...)
24 | * **filters**: define an interface and implementations of algorithms to select tracks
25 | * **pipeline**: Define a class to run a STracking pipeline defined in a json file
26 |
27 | Containers
28 | ~~~~~~~~~~
29 |
30 | The containers module has two classes ``SParticles`` and ``STracks`` to facilitate the management of the *particles* and *tracks*
31 | data and metadata. The containers have been designed to be compatible with the napari layers data structures.
32 |
33 | A ``SParticles`` object contains a list of particles and their metadata in a 2D+t or 3D+t image. It contains 3 attributes:
34 |
35 | data : array (N, D+1)
36 | Coordinates for N points in D+1 dimensions. ID,T,(Z),Y,X. The first
37 | axis is the integer ID of the track. D is either 3 or 4 for planar
38 | or volumetric time series respectively.
39 | properties : dict {str: array (N,)}, DataFrame
40 | Properties for each point. Each property should be an array of length N,
41 | where N is the number of points.
42 | scale : tuple of float
43 | Scale factors for the image data.
44 |
45 | A ``STracks`` object contains a list of tracks and their metadata in a 2D+t or 3D+t image. It contains 5 attributes:
46 |
47 | data : array (N, D+1)
48 | Coordinates for N points in D+1 dimensions. ID,T,(Z),Y,X. The first
49 | axis is the integer ID of the track. D is either 3 or 4 for planar
50 | or volumetric time series respectively.
51 | properties : dict {str: array (N,)}, DataFrame
52 | Properties for each point. Each property should be an array of length N,
53 | where N is the number of points.
54 | graph : dict {int: list}
55 | Graph representing associations between tracks. Dictionary defines the
56 | mapping between a track ID and the parents of the track. This can be
57 | one (the track has one parent, and the parent has >=1 child) in the
58 | case of track splitting, or more than one (the track has multiple
59 | parents, but only one child) in the case of track merging.
60 | See examples/tracks_3d_with_graph.py
61 | features: dict {str: dict}
62 | Properties for each tracks. Each feature should be an map of
63 | trackID=feature. Ex: features['length'][12]=25.2
64 | scale : tuple of float
65 | Scale factors for the image data.
66 |
67 |
68 | Detectors
69 | ~~~~~~~~~
70 |
71 | ``SDetector`` are objects with the same interface. They have a ``run`` method that takes a numpy array ( 2D+t or 3D+t image) as
72 | an input and returns detections in a ``SParticles`` object. The parameters of the detector have to be passed to it constructor.
73 |
74 | .. code-block:: python
75 |
76 | from stracking.detectors import DoGDetector
77 | ...
78 | detector = DoGDetector(min_sigma=4, max_sigma=5, threshold=0.2)
79 | particles = detector.run(image)
80 | ...
81 |
82 |
83 | To create a new detector developers just need to inherit `SDetector`:
84 |
85 | .. code-block:: python
86 |
87 | from stracking.detectors import SDetector
88 |
89 | class MyDetector(SDetector):
90 | def __init__(self):
91 | super().__init__()
92 |
93 | def run(self, image, scale=None):
94 | # Implement the detector here
95 | spots_ = ...
96 | return SParticles(data=spots_, properties={}, scale=scale)
97 |
98 |
99 | Linkers
100 | ~~~~~~~
101 |
102 | ``SLinker`` are objects with the same interface. They have a ``run`` method that takes the detections (in a ``SParticles``
103 | object) and optionally a numpy array (the 2D+t or 3D+t image), and return the calculated tracks in a ``STracks`` object.
104 | The parameters of a linker have to be passed in the constructor. For example, the ``SPLinker`` (Shortest Path) linker need a
105 | cost function, and a frame gap parameters:
106 |
107 | .. code-block:: python
108 |
109 | from stracking.linkers import SPLinker, EuclideanCost
110 | ...
111 | euclidean_cost = EuclideanCost(max_cost=3000)
112 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
113 | tracks = my_tracker.run(particles)
114 | ...
115 |
116 |
117 | To create a new linker developers just need to inherit `SLinker`:
118 |
119 | .. code-block:: python
120 |
121 | from stracking.linkers import SDetector
122 |
123 | class MyLinker(SLinker):
124 | def __init__(self, cost=None):
125 | super().__init__(cost)
126 |
127 | def run(self, particles, image=None):
128 | # Implement the linker here
129 | mydata = ...
130 | return STracks(data=mydata, properties=None, graph={}, scale=particles.scale)
131 |
132 | Properties
133 | ~~~~~~~~~~~
134 |
135 | ``SProperty`` based objects are objects with the same interface. They have a ``run`` method that takes the detections (in a ``SParticles``
136 | object) and a numpy array (the 2D+t or 3D+t image), and returns the input ``SParticles`` where the calculated properties have been added
137 | to the ``SParticles.properties`` dictionary. All the ``SProperty`` parameters have to be send to the constructor. Here is an
138 | example with the ``IntensityProperty`` algorithm that calculate the `min`, `max`, `mean` and `std` intensities inside the spots using a
139 | given radius:
140 |
141 | .. code-block:: python
142 |
143 | from stracking.properties import IntensityProperty
144 | ...
145 | property_calc = IntensityProperty(radius=2)
146 | property_calc.run(particles, image)
147 | ...
148 |
149 |
150 | To create a new property developers just need to inherit `SProperty`:
151 |
152 | .. code-block:: python
153 |
154 | from stracking.properties import SProperty
155 |
156 | class MyProperty(SProperty):
157 | def __init__(self, radius):
158 | super().__init__()
159 |
160 | def run(self, sparticles, image):
161 | # Calculate here some properties and add them to sparticles.properties
162 | ...
163 | return sparticles
164 |
165 | Features
166 | ~~~~~~~~
167 |
168 | ``SFeature`` based objects are objects with the same interface. They have a ``run`` method that takes the tracks (in a ``STRacks``
169 | object) and optionally a numpy array (the 2D+t or 3D+t image), and returns the input ``STracks`` object where the calculated
170 | features have been added to the ``STracks.features`` dictionary. Here is an example of the ``DistanceFeature`` that calculate
171 | the distance a particle moved:
172 |
173 | .. code-block:: python
174 |
175 | from stracking.filters import DistanceFeature
176 | ...
177 | feature_calc = DistanceFeature()
178 | feature_calc.run(tracks)
179 | ...
180 |
181 | To create a new feature developers just need to inherit `SFeature`:
182 |
183 | .. code-block:: python
184 |
185 | from stracking.features import SFeature
186 |
187 | class MyFeature(SFeature):
188 | def __init__(self):
189 | super().__init__()
190 |
191 | def run(self, stracks, image=None):
192 | # Calculate here some features and add them to stracks.features
193 | ...
194 | return stracks
195 |
196 | filters
197 | ~~~~~~~~
198 |
199 | ``SFilter`` based objects are objects with the same interface. The have a ``Run`` method that takes the tracks (in a ``STRacks``
200 | object) as input and return the same tracks object where filtered tracks have been removed:
201 |
202 | .. code-block:: python
203 |
204 | from stracking.filters import FeatureFilter
205 | ...
206 | filter_calc = FeatureFilter(feature_name='distance', min_val='20', max_val='120')
207 | filter_calc.run(tracks)
208 |
209 |
210 | To create a new filter developers just need to inherit `SFilter`:
211 |
212 | .. code-block:: python
213 |
214 | from stracking.filters import SFilter
215 |
216 | class FeatureFilter(STracksFilter):
217 | def __init__(self):
218 | super().__init__()
219 |
220 | def run(self, stracks):
221 | # Implement here the algorithm to select some tracks
222 | new_stracks = ...
223 | return new_stracks
224 |
225 |
226 | Read and Write
227 | --------------
228 |
229 | The **STracking** library provides an extra module called **io**. It allows to read tracks data from many formats (JSON, CSV,
230 | Icy xml, ISBI xml, TrackMate xml...) and write the tracks in JSON format.
231 | To read a file, you can use the convenient method ``read_tracks`` that takes the path of an input file and return a ``STracks``
232 | object:
233 |
234 | .. code-block:: python
235 |
236 | from stracking.io import read_tracks
237 | tracks = read_tracks('path/to/the/tracks/file.xml'))
238 |
239 | You can also alternatively call the IO class from the dedicated format. Read tracks are then available in the ``tracks``
240 | attribute of the IO object.
241 |
242 | .. code-block:: python
243 |
244 | from stracking.io import TrackMateIO
245 |
246 | trackmate_reader = TrackMateIO('path/to/the/trackmate/model/file.xml')
247 | trackmate_reader.read()
248 | print(trackmate_reader.stracks.data)
249 |
250 |
251 | To write ``STracks`` into a file, the current version of **STracking** only support the *JSON* format from the native
252 | **stracking** IO class:
253 |
254 | .. code-block:: python
255 |
256 | from stracking.io import StIO
257 | ...
258 | writer = StIO('path/to/the/tracks/file.json')
259 | writer.write(mytracks)
260 | ...
261 |
262 | a more convenient function is the ``write_tracks`` function:
263 |
264 | .. code-block:: python
265 |
266 | from stracking.io import write_tracks
267 | ...
268 | write_tracks('path/to/the/tracks/file.json', mytracks)
269 | ...
270 |
271 | It is also possible to save the particles in a file. The supported format is a CSV file where each columns is a particle property.
272 | Mandatory properties are 'T', 'Y', 'X' coordinates for 2D+t particles and 'T', 'Z', 'Y', 'X' coordinates for 3D+t particles.
273 | To write particles to file you can use the ``write_particles`` function:
274 | .. code-block:: python
275 |
276 | from stracking.io import write_particles
277 | ...
278 | write_particles('path/to/the/tracks/file.csv', particles)
279 | ...
280 |
281 | And to read particles, the ``read_particles`` function:
282 |
283 | .. code-block:: python
284 |
285 | from stracking.io import read_particles
286 | ...
287 | particles read_particles('path/to/the/tracks/file.csv')
288 | ...
289 |
290 |
291 |
292 | Pipeline
293 | --------
294 |
295 | Writing a tracking pipeline with **STracking** is straightforward. You just need to call the different modules in a sequence:
296 |
297 | .. code-block:: python
298 |
299 | from stracking.data import fake_tracks1
300 | from stracking.detectors import DoGDetector
301 | from stracking.linkers import SPLinker, EuclideanCost
302 | from stracking.features import DistanceFeature
303 | from stracking.filters import FeatureFilter
304 | from stracking.io import write_tracks
305 | import napari
306 |
307 | # Load data
308 | image = fake_tracks1()
309 |
310 | # Open napari
311 | viewer = napari.Viewer(axis_labels='tyx')
312 | viewer.add_image(image, contrast_limits=[0, 300])
313 |
314 | # Detection
315 | detector = DoGDetector(min_sigma=3, max_sigma=5, threshold=0.2)
316 | particles = detector.run(image)
317 |
318 | # Display spots
319 | viewer.add_points(particles.data, size=4, face_color="red", edge_color="red", blending='opaque')
320 |
321 | # Linking
322 | euclidean_cost = EuclideanCost(max_cost=3000)
323 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
324 | tracks = my_tracker.run(particles)
325 |
326 | # Display tracks
327 | viewer.add_tracks(tracks.data, name='Tracks', colormap="hsv")
328 |
329 | # Calculate distance feature
330 | feature_calc = DistanceFeature()
331 | feature_calc.run(tracks)
332 |
333 | # Keep only tracks that moves less than 60 pixels
334 | filter_calc = FeatureFilter(feature_name='distance', min_val=20, max_val=60)
335 | filter_calc.run(tracks)
336 |
337 | # Display filtered tracks
338 | viewer.add_tracks(tracks.data, name='Filtered Tracks',colormap="hsv")
339 | napari.run()
340 |
341 | # Save the tracks
342 | write_tracks('path/to/the/tracks/file.json', tracks)
343 |
344 |
345 | The STracking library also provides a ``STrackingPipeline`` class that allows to run a tracking pipeline from
346 | a pipeline description file (JSON format):
347 |
348 | .. code-block:: json
349 |
350 | {
351 | "name": "pipeline1",
352 | "author": "Sylvain Prigent",
353 | "date": "2022-04-13",
354 | "stracking_version": "0.1.8",
355 | "steps": {
356 | "detector": {
357 | "name": "DoGDetector",
358 | "parameters": {
359 | "min_sigma": 4,
360 | "max_sigma": 5,
361 | "sigma_ratio": 1.1,
362 | "threshold": 0.15,
363 | "overlap": 0
364 | }
365 | },
366 | "linker": {
367 | "name": "SPLinker",
368 | "cost": {
369 | "name": "EuclideanCost",
370 | "parameters": {}
371 | },
372 | "parameters": {
373 | "gap": 1,
374 | "min_track_length": 2
375 | }
376 | },
377 | "properties": [
378 | {
379 | "name": "IntensityProperty",
380 | "parameters": {
381 | "radius": 2.5
382 | }
383 | }
384 | ],
385 | "features": [
386 | {
387 | "name": "LengthFeature"
388 | },
389 | {
390 | "name": "DistanceFeature"
391 | },
392 | {
393 | "name": "DisplacementFeature"
394 | }
395 | ],
396 | "filters": [
397 | {
398 | "name": "FeatureFilter",
399 | "parameters": {
400 | "feature_name": "distance",
401 | "min_val": 20,
402 | "max_val": 60
403 | }
404 | }
405 | ]
406 | }
407 | }
408 |
409 | Then, a pipeline can be run with the ``STrackingPipeline`` class
410 |
411 | .. code-block:: python
412 |
413 | from stracking.data import fake_tracks1
414 | from stracking.io import write_tracks
415 | from stracking.pipelines import STrackingPipeline
416 | import napari
417 |
418 | # Load data
419 | image = fake_tracks1()
420 |
421 | # Run pipeline
422 | pipeline = STrackingPipeline()
423 | pipeline.load('path/to/the/pipeline.json')
424 | tracks = pipeline.run(image)
425 |
426 | # display
427 | viewer = napari.Viewer(axis_labels='tyx')
428 | viewer.add_image(image, contrast_limits=[0, 300])
429 | viewer.add_tracks(tracks.data, name='Pipeline Tracks',colormap="hsv")
430 | napari.run()
431 |
432 | # save
433 | write_tracks('pipeline_tracks.csv', tracks, format_='csv')
434 |
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_1.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_2.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_3.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_4.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_5.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example1_6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example1_6.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example2_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example2_1.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example2_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example2_2.png
--------------------------------------------------------------------------------
/docs/source/images/output_Example2_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/docs/source/images/output_Example2_3.png
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. STracking documentation master file, created by
2 | sphinx-quickstart on Mon May 18 17:15:17 2020.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to STracking's documentation!
7 | ==========================================
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 | :caption: Contents:
12 |
13 | intro
14 | install
15 | quickstart
16 | guide
17 | stracking
18 | stardist_stracking
19 | modules
20 |
21 |
22 | Indices and tables
23 | ==================
24 |
25 | * :ref:`genindex`
26 | * :ref:`modindex`
27 | * :ref:`search`
28 |
--------------------------------------------------------------------------------
/docs/source/install.rst:
--------------------------------------------------------------------------------
1 | Install
2 | =======
3 |
4 | This section contains the instructions to install ``STracking``
5 |
6 | Using PyPI
7 | ----------
8 |
9 | Releases are available in PyPI a repository. We recommend using virtual environment
10 |
11 | .. code-block:: shell
12 |
13 | python -m venv .stracking-env
14 | source .stracking-env/bin/activate
15 | pip install stracking
16 |
17 |
18 | From source
19 | -----------
20 |
21 | If you plan to develop ``STracking`` we recommend installing locally
22 |
23 | .. code-block:: shell
24 |
25 | python -m venv .stracking-env
26 | source .stracking-env/bin/activate
27 | git clone https://github.com/sylvainprigent/stracking.git
28 | cd stracking
29 | pip install -e .
30 |
--------------------------------------------------------------------------------
/docs/source/intro.rst:
--------------------------------------------------------------------------------
1 | Introduction
2 | ============
3 |
4 | STracking is a library for particles tracking in scientific imaging
5 |
6 | Context
7 | -------
8 | STracking has been developed in the `Serpico `_ research team. The goal is to provide a
9 | modular library to track particles in 2D+t and 3D+t microscopy images. A classical application of your team is 3D+t
10 | endosomes tracking with Lattice LightSheet microscopy.
11 |
12 | Library components
13 | ------------------
14 | STracking is written in python3 and uses scipy library for data structures. STracking library is organized as a scikit
15 | library and provides a module for each particle tracking step:
16 |
17 | * **Containers**: ``SParticles`` and ``STracks`` containers based on ``Napari`` points and track layer data structures to store particles and tracks
18 | * **Detectors**: define a detector interface and implementations of particle detection algorithm for 2D and 3D image sequences
19 | * **Linkers**: define a linker interface and implementation of particle linkers (or trackers) for 2D and 3D image sequences
20 | * **properties**: define an interface and implementations of algorithms to measure properties of particles (intensity...)
21 | * **feature**: define an interface and implementations of algorithms to measure tracks properties (length, displacement...)
22 | * **filters**: define an interface and implementations of algorithms to select tracks
23 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | Available tools
2 | ===============
3 |
4 | Containers
5 | ----------
6 |
7 | .. automodule:: stracking.containers
8 | :members:
9 |
10 | Detectors
11 | ---------
12 |
13 | .. automodule:: stracking.detectors
14 | :members:
15 |
16 | Properties
17 | ----------
18 |
19 | .. automodule:: stracking.properties
20 | :members:
21 |
22 | Linkers
23 | -------
24 |
25 | .. automodule:: stracking.linkers
26 | :members:
27 |
28 | Features
29 | --------
30 |
31 | .. automodule:: stracking.features
32 | :members:
33 |
34 | Filters
35 | -------
36 |
37 | .. automodule:: stracking.filters
38 | :members:
39 |
40 | IO
41 | --
42 |
43 | .. automodule:: stracking.io
44 | :members:
--------------------------------------------------------------------------------
/docs/source/quickstart.rst:
--------------------------------------------------------------------------------
1 | Quick start
2 | ===========
3 |
4 | This is a quick start example of how to use the **STracking** library. This section supposes you to know the principles
5 | of particles tracking. If it is not the case, please refer to the
6 | `Guide `_.
7 |
8 | Input images
9 | ------------
10 | Input images are 2D+t or 3D+t gray scaled images. 2D+t images are represented as numpy arrays with the following
11 | columns ordering ``[T, Y, X]`` and 3D+t images are represented with numpy array with ``[T, Z, Y, X]`` columns ordering
12 |
13 | First we load the images:
14 |
15 | .. code-block:: python3
16 |
17 | from stracking import data
18 |
19 | image = data.fake_tracks1()
20 |
21 |
22 | Particles detections
23 | --------------------
24 | The first step of particle tracking is to detect individual particles frame by frame.
25 | **STracking** provides ``SDetector`` interface for particles detector. In this example we detect particles with the
26 | *Difference of Gaussians* detector:
27 |
28 | .. code-block:: python3
29 |
30 | from stracking.detectors import DoGDetector
31 |
32 | detector = DoGDetector(min_sigma=4, max_sigma=5, threshold=0.2)
33 | particles = detector.run(image)
34 |
35 |
36 | The output ```articles`` is an instance of the ``SParticles`` container. It contains the list of particles as a numpy
37 | array, the properties of the particles as a *dict* and the image scale as a *tuple*
38 |
39 | Particles linking
40 | -----------------
41 | The second step is linking the particles to create tracks.
42 | **STracking** provides ``SLinker`` interface to implement mulitple linking algorithms. In this quick start, we use the
43 | *Shorted path* graph based linker, using the Euclidean distance between particles as a link cost function:
44 |
45 | .. code-block:: python3
46 |
47 | from stracking.linkers import SPLinker, EuclideanCost
48 |
49 | euclidean_cost = EuclideanCost(max_cost=3000)
50 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
51 | tracks = my_tracker.run(particles)
52 |
53 |
54 | The output ``tracks`` in an instance of the ``STracks`` container. It contains the list of tracks as a numpy array and
55 | all the tracks metadata in dictionaries.
56 |
57 | The next steps show the usage of ``SProperty``, ``SFeature`` and ``SFilter`` to analyse the trajectories
58 |
59 | Particles properties
60 | --------------------
61 | The tracks properties module allows to calculate properties of the particles. This quickstart example
62 | shows how to calculate the intensity properties of particles:
63 |
64 | .. code-block:: python3
65 |
66 | from stracking.properties import IntensityProperty
67 |
68 | property_calc = IntensityProperty(radius=2)
69 | property_calc.run(particles, image)
70 |
71 | All the calculated properties are saved in the properties attribute of the ``SParticles`` container.
72 |
73 | Tracks features
74 | ---------------
75 | The tracks features module allows to calculate features of tracks like length and distance. This quickstart example shows how
76 | to calculate the distance of tracks:
77 |
78 | .. code-block:: python3
79 |
80 | from stracking.features import DistanceFeature
81 |
82 | feature_calc = DistanceFeature()
83 | feature_calc.run(tracks)
84 |
85 | The calculated features are stored in the ``features`` attribute of the ``STracks`` container.
86 |
87 | Tracks filter
88 | -------------
89 | The last part is the filter module. It allows to extract a subset of tracks base on a defined criterion. In this example, we select the tracks that move less that a distance of 60 pixels:
90 |
91 | .. code-block:: python3
92 |
93 | from stracking.filters import FeatureFilter
94 |
95 | filter = FeatureFilter(feature_name='distance', min_val=0, max_val=60)
96 | filtered_tracks = filter.run(tracks)
97 |
98 | Filtered set of tracks are return as a ``STracks`` object.
99 |
--------------------------------------------------------------------------------
/docs/source/stardist_stracking.rst:
--------------------------------------------------------------------------------
1 | Example 2: StarDist (Detection) + Stracking (Tracker)
2 | =====================================================
3 |
4 | This example shows a combination of StarDist Detection and Stracking
5 | (Tracker)
6 |
7 | Load trained mode
8 | -----------------
9 |
10 | .. code-block:: python3
11 |
12 | from stardist.models import StarDist2D
13 |
14 | # prints a list of available models
15 | StarDist2D.from_pretrained()
16 |
17 | # creates a pretrained model
18 | model = StarDist2D.from_pretrained('2D_versatile_fluo')
19 |
20 |
21 | .. parsed-literal::
22 |
23 | There are 4 registered models for 'StarDist2D':
24 |
25 | Name Alias(es)
26 | ──── ─────────
27 | '2D_versatile_fluo' 'Versatile (fluorescent nuclei)'
28 | '2D_versatile_he' 'Versatile (H&E nuclei)'
29 | '2D_paper_dsb2018' 'DSB 2018 (from StarDist 2D paper)'
30 | '2D_demo' None
31 | Found model '2D_versatile_fluo' for 'StarDist2D'.
32 | Loading network weights from 'weights_best.h5'.
33 | Loading thresholds from 'thresholds.json'.
34 | Using default values: prob_thresh=0.479071, nms_thresh=0.3.
35 |
36 |
37 | StarDist: Prediction and detection
38 | ----------------------------------
39 |
40 | .. code-block:: python3
41 |
42 | from csbdeep.utils import normalize
43 | import matplotlib.pyplot as plt
44 | from tifffile import imread
45 | import numpy as np
46 | from stracking.detectors import SSegDetector
47 |
48 | folder=""
49 | filename="P31-crop2.tif"
50 |
51 | img= imread(folder+filename)
52 |
53 | labels=np.zeros(img.shape);
54 |
55 | img=normalize(img)
56 |
57 |
58 | for i in range(img.shape[0]):
59 | labels[i,:,:],details = model.predict_instances(img[i,:,:])
60 |
61 | sdetector = SSegDetector(is_mask=False)
62 | particles = sdetector.run(labels)
63 |
64 |
65 | Create an empty napari viewer
66 | -----------------------------
67 |
68 | .. code-block:: python3
69 |
70 | %gui qt
71 |
72 | import napari
73 | from napari.utils import nbscreenshot
74 | viewer = napari.Viewer(axis_labels='tyx')
75 |
76 | Display Input and StarDist Prediction
77 | -------------------------------------
78 |
79 | .. code-block:: python3
80 |
81 | viewer.add_image(img, name='Input', multiscale=False,
82 | contrast_limits=[0, 3], colormap='gray',blending='additive');
83 |
84 | viewer.add_image(labels, name='Predictions StarDist', multiscale=False,
85 | colormap='gist_earth',blending='additive',opacity=0.2);
86 |
87 | nbscreenshot(viewer)
88 |
89 |
90 | .. image:: images/output_Example2_1.png
91 | :width: 600
92 |
93 |
94 | Display spots from StarDist
95 | ---------------------------
96 |
97 | .. code-block:: python3
98 |
99 | viewer.add_points(particles.data, size=5, blending='additive')
100 | nbscreenshot(viewer)
101 |
102 |
103 |
104 |
105 | .. image:: images/output_Example2_2.png
106 | :width: 600
107 |
108 |
109 |
110 | Linker
111 | ------
112 |
113 | Shortest path tracking with euclidean cost
114 |
115 | .. code-block:: python3
116 |
117 | from stracking.linkers import SPLinker, EuclideanCost
118 |
119 | euclidean_cost = EuclideanCost(max_cost=100);
120 | my_tracker = SPLinker(cost=euclidean_cost, gap=1);
121 | tracks = my_tracker.run(particles);
122 |
123 |
124 |
125 | .. parsed-literal::
126 |
127 | detections shape= (2652, 3)
128 | num frames= 30
129 | cost= 60244.0
130 | self.cost.max_cost= 100
131 | cost= 256.0
132 | self.cost.max_cost= 100
133 | cost= 16.0
134 | self.cost.max_cost= 100
135 | cost= 11236.0
136 | self.cost.max_cost= 100
137 | extract track...
138 | dim in track to path= 2
139 | add predecessor...
140 | add predecessor...
141 | add predecessor...
142 |
143 |
144 |
145 | Display tracks
146 | --------------
147 |
148 | .. code-block:: python3
149 |
150 | viewer.add_tracks(tracks.data, name='Tracks')
151 | nbscreenshot(viewer)
152 |
153 |
154 | .. image:: images/output_Example2_3.png
155 | :width: 600
156 |
157 |
158 |
159 |
160 |
--------------------------------------------------------------------------------
/docs/source/stracking.rst:
--------------------------------------------------------------------------------
1 | Example 1: Stracking workflow
2 | =============================
3 |
4 | This example shows how to detect particles in 3D+t image using the LoG
5 | detector
6 |
7 | Load data
8 | ---------
9 |
10 | .. code-block:: python3
11 |
12 | from stracking import data
13 | from tifffile import imread
14 |
15 | folder=""
16 |
17 | filenamePKMR="PKMR_10timepoints_crop.tif"
18 | filenamePMDR="PMDR_10timepoints_crop.tif"
19 | PKMRimg= imread(folder+filenamePKMR)
20 | PMDRimg= imread(folder+filenamePMDR)
21 |
22 | Create an empty napari viewer
23 | -----------------------------
24 |
25 | .. code-block:: python3
26 |
27 | %gui qt
28 |
29 | import napari
30 | from napari.utils import nbscreenshot
31 | viewer = napari.Viewer(axis_labels='tzyx')
32 |
33 | Display volumetric timeseries
34 | -----------------------------
35 |
36 | .. code-block:: python3
37 |
38 | viewer.add_image(PKMRimg, name='PKMR', multiscale=False, scale=[4.3,0.316,0.104,0.104],
39 | contrast_limits=[10, 600], colormap='magenta',blending='additive');
40 |
41 | viewer.add_image(PMDRimg, name='PMDR', multiscale=False, scale=[4.3,0.316,0.104,0.104],
42 | contrast_limits=[10, 1_000], colormap='green',blending='additive',gamma=0.8);
43 |
44 | viewer.dims.ndisplay = 3
45 | viewer.scale_bar.visible='true'
46 | viewer.scale_bar.unit='um'
47 | nbscreenshot(viewer)
48 |
49 | .. image:: images/output_Example1_1.png
50 | :width: 600
51 |
52 | LoG 3D+t detection
53 | ------------------
54 |
55 | .. code-block:: python3
56 |
57 | from stracking.detectors import LoGDetector
58 |
59 | detector = LoGDetector(min_sigma=3, max_sigma=5, num_sigma=3, threshold=0.001)
60 | particles = detector.run(PMDRimg)
61 |
62 |
63 | Display spots
64 | ~~~~~~~~~~~~~
65 |
66 | .. code-block:: python3
67 |
68 | viewer.add_points(particles.data, size=5, shading='spherical',scale=[4.3,0.316,0.104,0.104],blending='additive')
69 | nbscreenshot(viewer)
70 |
71 | .. image:: images/output_Example1_2.png
72 | :width: 600
73 |
74 | Spots properties
75 | ----------------
76 |
77 | .. code-block:: python3
78 |
79 | from stracking.properties import IntensityProperty
80 |
81 | property_calc = IntensityProperty(radius=2.5)
82 | property_calc.run(particles,PKMRimg)
83 | y=particles.properties['mean_intensity']
84 | particlesch1=particles
85 | property_calc = IntensityProperty(radius=2.5)
86 | property_calc.run(particles,PMDRimg)
87 | x=particles.properties['mean_intensity']
88 |
89 | Spots statistics
90 | ~~~~~~~~~~~~~~~~
91 |
92 | .. code-block:: python3
93 |
94 | import matplotlib.pyplot as plt
95 | import numpy as np
96 | from matplotlib import colors
97 | from matplotlib.ticker import PercentFormatter
98 | import seaborn as sns
99 |
100 | plt.style.use('_mpl-gallery')
101 | fig, axs = plt.subplots(1, 2, figsize=(15, 4), sharey=False)
102 | axs[0].set_title("Mean Intensity [a.u.]",fontsize=14)
103 | axs[0].set_ylabel("Mitochondria Intensity",fontsize=14)
104 | axs[0].set_xlabel("Endosomes Intensity",fontsize=14)
105 | axs[0].scatter(x,y,alpha=0.5,color='r');
106 | axs[0].tick_params(axis='both', labelsize=14)
107 | sns.set_style("whitegrid")
108 | axs[1] = sns.swarmplot(y=x,alpha=0.9,size=3)
109 | axs[1].set_ylabel("Endosomes Intensity",fontsize=14)
110 | axs[1].set_xlabel("PMDR",fontsize=14)
111 | axs[1].set_title("Endosomes Intensity [a.u.]",fontsize=14)
112 | axs[1].tick_params(axis='both',labelsize=14)
113 |
114 | .. image:: images/output_Example1_3.png
115 | :width: 600
116 |
117 | Tracker
118 | -------
119 |
120 | Shortest path tracking with euclidean cost
121 |
122 | .. code-block:: python3
123 |
124 | from stracking.linkers import SPLinker, EuclideanCost
125 |
126 | euclidean_cost = EuclideanCost(max_cost=225)
127 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
128 | tracks = my_tracker.run(particles)
129 |
130 |
131 | .. parsed-literal::
132 |
133 | detections shape= (564, 4)
134 | num frames= 10
135 | cost= 1.0
136 | self.cost.max_cost= 225
137 | cost= 2819.0
138 | self.cost.max_cost= 225
139 | cost= 5433.0
140 | self.cost.max_cost= 225
141 | cost= 8438.0
142 | self.cost.max_cost= 225
143 | extract track...
144 | dim in track to path= 3
145 | add predecessor...
146 | add predecessor...
147 |
148 |
149 | Display tracks
150 | ~~~~~~~~~~~~~~
151 |
152 | .. code-block:: python3
153 |
154 | viewer.add_tracks(tracks.data, name='Tracks',scale=[4.3,0.316,0.104,0.104])
155 | nbscreenshot(viewer)
156 |
157 | .. image:: images/output_Example1_4.png
158 | :width: 600
159 |
160 |
161 | Features - tracking
162 | -------------------
163 |
164 | .. code-block:: python3
165 |
166 | from stracking.features import (LengthFeature, DistanceFeature,
167 | DisplacementFeature)
168 |
169 | tracks.scale=[1,0.316,0.104,0.104];
170 |
171 | # Length feature
172 | feature_calc = LengthFeature()
173 | tracks = feature_calc.run(tracks)
174 |
175 | # Distance feature
176 | feature_calc = DistanceFeature()
177 | tracks = feature_calc.run(tracks)
178 |
179 | # Displacement feature
180 | feature_calc = DisplacementFeature()
181 | tracks = feature_calc.run(tracks)
182 |
183 |
184 |
185 | Display tracks properties
186 | ~~~~~~~~~~~~~~~~~~~~~~~~~
187 |
188 | .. code-block:: python3
189 |
190 | displacement=tracks.features['displacement'];
191 | distance=tracks.features['distance'];
192 | length=tracks.features['length'];
193 |
194 | displacementMat = np.array([displacement[i] for i in range(len(displacement))])
195 | distanceMat = np.array([distance[i] for i in range(len(distance))])
196 | lengthMat = np.array([length[i] for i in range(len(length))])
197 | speed=distanceMat/(4.3*lengthMat)
198 |
199 | fig, axs = plt.subplots(1, 3, figsize=(15, 4), sharey=False)
200 |
201 | axs[0].hist(4.3*lengthMat, bins=8, edgecolor="white")
202 | axs[0].set_xlabel("Track Duration [s]",fontsize=14)
203 | axs[0].set_ylabel("Frequency",fontsize=14)
204 | axs[0].tick_params(axis='both', labelsize=14)
205 |
206 | axs[1].boxplot([speed],patch_artist=True,
207 | medianprops={"color": "red", "linewidth": 1.5},
208 | boxprops={"facecolor": "white", "edgecolor": "black",
209 | "linewidth": 1.5},
210 | whiskerprops={"color": "black", "linewidth": 1.5},
211 | capprops={"color": "black", "linewidth": 1.5},labels=[''])
212 | axs[1].set_ylabel("Speed [um/s]",fontsize=14)
213 | axs[1].set_xlabel("PMDR",fontsize=14)
214 | axs[1].tick_params(axis='both', labelsize=14)
215 |
216 |
217 | axs[2].hist(displacementMat/distanceMat, bins=10, edgecolor="white",facecolor='gray',alpha=0.9)
218 | axs[2].set_xlabel("Persistance [a. u.]",fontsize=14)
219 | axs[2].set_ylabel("Frequency",fontsize=14)
220 | axs[2].tick_params(axis='both', labelsize=14)
221 |
222 |
223 | .. image:: images/output_Example1_5.png
224 | :width: 600
225 |
226 | Features tracking
227 | -----------------
228 |
229 | .. code-block:: python3
230 |
231 | from stracking.filters import FeatureFilter
232 |
233 | f_filter = FeatureFilter(feature_name='length', min_val=10, max_val=100)
234 | filtered_tracks = f_filter.run(tracks)
235 |
236 | viewer.add_tracks(filtered_tracks.data,features=filtered_tracks.features,name='Filtered Tracks',scale=[4.3,0.316,0.104,0.104])
237 | viewer.layers['Tracks'].visible=False
238 | nbscreenshot(viewer)
239 |
240 | .. image:: images/output_Example1_6.png
241 | :width: 600
242 |
243 |
244 |
--------------------------------------------------------------------------------
/examples/README.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
--------------------------------------------------------------------------------
/examples/detectors/README.rst:
--------------------------------------------------------------------------------
1 | Detectors
2 | ---------
--------------------------------------------------------------------------------
/examples/detectors/dog_detector_2d.py:
--------------------------------------------------------------------------------
1 | """
2 | LoG 2D detection
3 | ================
4 |
5 | This example shows how to detect particles in 2D+t image using the LoG detector
6 | """
7 |
8 | import numpy as np
9 | import napari
10 |
11 | from stracking.detectors import DoGDetector
12 | from stracking.data import fake_tracks1
13 |
14 | # load 2D+t sample
15 | image = fake_tracks1()
16 |
17 | # detect particles
18 | detector = DoGDetector(min_sigma=4, max_sigma=5, threshold=0.2)
19 | particles = detector.run(image)
20 |
21 | # visualize in napari
22 | viewer = napari.view_image(image)
23 | viewer.add_points(particles.data, size=2)
24 | napari.run()
25 |
--------------------------------------------------------------------------------
/examples/detectors/doh_detector_2d.py:
--------------------------------------------------------------------------------
1 | """
2 | LoG 2D detection
3 | ================
4 |
5 | This example shows how to detect particles in 2D+t image using the LoG detector
6 | """
7 |
8 | import numpy as np
9 | import napari
10 |
11 | from stracking.detectors import DoHDetector
12 | from stracking.data import fake_tracks1
13 |
14 | # load 2D+t sample
15 | image = fake_tracks1()
16 |
17 | # detect particles
18 | detector = DoHDetector(min_sigma=4, max_sigma=5, threshold=0.015)
19 | particles = detector.run(image)
20 |
21 | # visualize in napari
22 | viewer = napari.view_image(image)
23 | viewer.add_points(particles.data, size=2)
24 | napari.run()
25 |
--------------------------------------------------------------------------------
/examples/detectors/log_detector_2d.py:
--------------------------------------------------------------------------------
1 | """
2 | LoG 2D detection
3 | ================
4 |
5 | This example shows how to detect particles in 2D+t image using the LoG detector
6 | """
7 |
8 | import numpy as np
9 | import napari
10 |
11 | from stracking.detectors import LoGDetector
12 | from stracking.data import fake_tracks1
13 |
14 |
15 | # load 2D+t sample
16 | image = fake_tracks1()
17 |
18 | # detect particles
19 | detector = LoGDetector(min_sigma=4, max_sigma=5, threshold=0.2)
20 | particles = detector.run(image)
21 |
22 | # visualize in napari
23 | viewer = napari.view_image(image)
24 | viewer.add_points(particles.data, size=2)
25 | napari.run()
26 |
--------------------------------------------------------------------------------
/examples/features/README.rst:
--------------------------------------------------------------------------------
1 | Features
2 | --------
--------------------------------------------------------------------------------
/examples/features/length_features_2d.py:
--------------------------------------------------------------------------------
1 | """
2 | Length features 2D
3 | ==================
4 |
5 | This example shows how to detect particles in 2D+t image using the LoG detector
6 | """
7 |
8 | import numpy as np
9 |
10 | from stracking.features import (LengthFeature, DistanceFeature,
11 | DisplacementFeature)
12 | from stracking.containers import STracks
13 |
14 | # create tracks
15 | data = np.array([[0, 0, 20, 20],
16 | [0, 1, 20, 35],
17 | [0, 2, 20, 50],
18 | [0, 3, 20, 65],
19 | [0, 4, 20, 80],
20 | [1, 0, 100, 25],
21 | [1, 1, 100, 35],
22 | [1, 2, 100, 50],
23 | [1, 3, 100, 65],
24 | [1, 4, 100, 80],
25 | [2, 0, 60, 19],
26 | [2, 2, 65, 50],
27 | [2, 3, 60, 65],
28 | [2, 4, 60, 80]]
29 | )
30 | tracks = STracks(data=data, features=dict())
31 |
32 | # Length feature
33 | feature_calc = LengthFeature()
34 | tracks2 = feature_calc.run(tracks)
35 |
36 | # Distance feature
37 | feature_calc = DistanceFeature()
38 | tracks = feature_calc.run(tracks)
39 |
40 | # Displacement feature
41 | feature_calc = DisplacementFeature()
42 | tracks = feature_calc.run(tracks)
43 |
44 | # print results
45 | print('features:', tracks.features)
46 |
--------------------------------------------------------------------------------
/examples/linkers/README.rst:
--------------------------------------------------------------------------------
1 | Linkers
2 | -------
--------------------------------------------------------------------------------
/examples/linkers/sp_linker.py:
--------------------------------------------------------------------------------
1 | """
2 | Shortest Path linker example
3 | ============================
4 |
5 | This example shows how to track particles with the shortest path linker
6 | """
7 |
8 | import numpy as np
9 | import napari
10 |
11 | from stracking.linkers import SPLinker, EuclideanCost
12 | from stracking.containers import SParticles, STracks
13 | from stracking.data import fake_tracks1
14 |
15 | # load 2D+t sample
16 | image = fake_tracks1()
17 |
18 | # particles list
19 | detections = np.array([[0., 53., 12.],
20 | [0., 93., 11.],
21 | [0., 13., 10.],
22 | [1., 53., 26.],
23 | [1., 93., 26.],
24 | [1., 13., 26.],
25 | [2., 13., 41.],
26 | [2., 93., 41.],
27 | [2., 53., 41.],
28 | [3., 93., 56.],
29 | [3., 13., 55.],
30 | [3., 54., 56.],
31 | [4., 53., 71.],
32 | [4., 94., 71.],
33 | [4., 13., 71.]])
34 | particles = SParticles(data=detections)
35 |
36 | # shortest path tracking with euclidean cost
37 | euclidean_cost = EuclideanCost(max_cost=3000)
38 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
39 | tracks = my_tracker.run(particles)
40 |
41 | viewer = napari.view_image(np.transpose(image, (2, 0, 1)),
42 | name='fake particles')
43 | viewer.add_tracks(tracks.data, name='stracking')
44 | napari.run()
45 |
--------------------------------------------------------------------------------
/examples/properties/README.rst:
--------------------------------------------------------------------------------
1 | Properties
2 | ----------
--------------------------------------------------------------------------------
/examples/properties/intensity_properties.py:
--------------------------------------------------------------------------------
1 | """
2 | Intensity properties example
3 | ============================
4 |
5 | This example shows how calculate intensity properties
6 | """
7 |
8 | import numpy as np
9 |
10 | from stracking.properties import IntensityProperty
11 | from stracking.containers import SParticles, STracks
12 | from stracking.data import fake_tracks1
13 |
14 | # initialize the input data
15 | image = fake_tracks1()
16 | spots = np.array([[0., 54., 12.],
17 | [0., 94., 12.],
18 | [0., 14., 12.],
19 | [1., 55., 27.],
20 | [1., 94., 27.],
21 | [1., 14., 27.],
22 | [2., 94., 42.],
23 | [2., 54., 42.],
24 | [2., 14., 42.],
25 | [3., 94., 57.],
26 | [3., 14., 57.],
27 | [3., 54., 57.],
28 | [4., 54., 72.],
29 | [4., 94., 72.],
30 | [4., 14., 72.]])
31 | particles = SParticles(data=spots)
32 |
33 | # calculate the intensity properties with a particle of radius=2
34 | property_calc = IntensityProperty(radius=2)
35 | property_calc.run(particles, image)
36 |
37 | # show the calculated properties
38 | print(particles.properties)
39 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | scipy
2 | scikit-image
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = stracking
3 | version = 0.1.10
4 | author = Sylvain Prigent
5 | author_email = meriadec.prigent@gmail.com
6 | url = https://github.com/sylvainprigent/stracking
7 | license = BSD 3-Clause
8 | description = Implementation of particle tracking algorithms for 2D+t and 3D+t scientific data
9 | long_description = file: README.md
10 | long_description_content_type = text/markdown
11 | classifiers =
12 | Development Status :: 2 - Pre-Alpha
13 | Intended Audience :: Developers
14 | Topic :: Software Development :: Testing
15 | Programming Language :: Python
16 | Programming Language :: Python :: 3
17 | Programming Language :: Python :: 3.9
18 | Operating System :: OS Independent
19 | License :: OSI Approved :: BSD License
20 |
21 | [options]
22 | packages = find:
23 | python_requires = >=3.7
24 |
25 | # add your package requirements here
26 | install_requires =
27 | scipy>=1.6.3
28 | scikit-image>=0.18.1
29 | pandas>=1.2.4
30 |
31 | [options.package_data]
32 | * = */*.tif
33 |
34 | [options.entry_points]
35 | console_scripts =
36 | sdogdetector = stracking.cli.detector_dog:main
37 | slogdetector = stracking.cli.detector_log:main
38 | sdohdetector = stracking.cli.detector_doh:main
39 | ssplinker = stracking.cli.linker_shortest_path:main
40 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
4 |
--------------------------------------------------------------------------------
/stracking/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Bio-image analysis module for Python
3 | ====================================
4 |
5 | stracking is a Python module for particles tracking based on the scientific
6 | ecosystem in python (numpy, scipy, scikit-image).
7 |
8 | See http://github.com/sylvainprigent/stracking for complete documentation.
9 | """
10 |
11 | __all__ = []
--------------------------------------------------------------------------------
/stracking/cli/__init__.py:
--------------------------------------------------------------------------------
1 | """CLI module
2 | Set of tools to use STracking tools with a command line interface
3 | """
4 |
5 | __all__ = []
6 |
--------------------------------------------------------------------------------
/stracking/cli/detector_dog.py:
--------------------------------------------------------------------------------
1 | import os
2 | import argparse
3 | from skimage.io import imread
4 | from stracking.detectors import DoGDetector
5 | from stracking.io import write_particles
6 |
7 |
8 | def main():
9 | parser = argparse.ArgumentParser(description='STracking DoG detector',
10 | conflict_handler='resolve')
11 | parser.add_argument('-i', '--input', help='input image file', default='')
12 | parser.add_argument('-o', '--output', help='Output image file', default='')
13 |
14 | parser.add_argument('-a', '--minsigma', help='Minimum sigma value', default='4')
15 | parser.add_argument('-b', '--maxsigma', help='Maximum sigma value', default='5')
16 | parser.add_argument('-r', '--sigmaratio', help='The ratio between the standard deviation of '
17 | 'Gaussian Kernels', default='1.6')
18 | parser.add_argument('-t', '--threshold', help='Detection threshold', default='0.2')
19 | parser.add_argument('-l', '--overlap', help='Allowed detection overlap fraction in [0, 1]',
20 | default='0.5')
21 | args = parser.parse_args()
22 |
23 | if os.path.exists(args.input):
24 | image = imread(args.input)
25 | else:
26 | print('ERROR: The input image file does not exists')
27 | return
28 | detector = DoGDetector(min_sigma=float(args.minsigma),
29 | max_sigma=float(args.maxsigma),
30 | sigma_ratio=float(args.sigmaratio),
31 | threshold=float(args.threshold),
32 | overlap=float(args.overlap))
33 | particles = detector.run(image)
34 | write_particles(args.output, particles)
35 |
36 |
37 | if __name__ == "__main__":
38 | main()
39 |
--------------------------------------------------------------------------------
/stracking/cli/detector_doh.py:
--------------------------------------------------------------------------------
1 | import os
2 | import argparse
3 | from skimage.io import imread
4 | from stracking.detectors import DoHDetector
5 | from stracking.io import write_particles
6 |
7 |
8 | def main():
9 | parser = argparse.ArgumentParser(description='STracking DoH detector',
10 | conflict_handler='resolve')
11 | parser.add_argument('-i', '--input', help='input image file', default='')
12 | parser.add_argument('-o', '--output', help='Output image file', default='')
13 | parser.add_argument('-a', '--minsigma', help='Minimum sigma value', default='4')
14 | parser.add_argument('-b', '--maxsigma', help='Maximum sigma value', default='5')
15 | parser.add_argument('-n', '--numsigma', help='Number of sigma value', default='10')
16 | parser.add_argument('-t', '--threshold', help='Detection threshold', default='0.01')
17 | parser.add_argument('-l', '--overlap', help='Allowed detection overlap fraction in [0, 1]', default='0.5')
18 |
19 | args = parser.parse_args()
20 |
21 | if os.path.exists(args.input):
22 | image = imread(args.input)
23 | else:
24 | print('ERROR: The input image file does not exists')
25 | return
26 | detector = DoHDetector(min_sigma=float(args.minsigma),
27 | max_sigma=float(args.maxsigma),
28 | num_sigma=int(args.numsigma),
29 | threshold=float(args.threshold),
30 | overlap=float(args.overlap))
31 | particles = detector.run(image)
32 | write_particles(args.output, particles)
33 |
34 |
35 | if __name__ == "__main__":
36 | main()
37 |
--------------------------------------------------------------------------------
/stracking/cli/detector_log.py:
--------------------------------------------------------------------------------
1 | import os
2 | import argparse
3 | from skimage.io import imread
4 | from stracking.detectors import LoGDetector
5 | from stracking.io import write_particles
6 |
7 |
8 | def main():
9 | parser = argparse.ArgumentParser(description='STracking LoG detector',
10 | conflict_handler='resolve')
11 | parser.add_argument('-i', '--input', help='input image file', default='')
12 | parser.add_argument('-o', '--output', help='Output image file', default='')
13 | parser.add_argument('-a', '--minsigma', help='Minimum sigma value', default='4')
14 | parser.add_argument('-b', '--maxsigma', help='Maximum sigma value', default='5')
15 | parser.add_argument('-n', '--numsigma', help='Number of sigma value', default='10')
16 | parser.add_argument('-t', '--threshold', help='Detection threshold', default='0.2')
17 | parser.add_argument('-l', '--overlap', help='Allowed detection overlap fraction in [0, 1]',
18 | default='0.5')
19 | args = parser.parse_args()
20 |
21 | if os.path.exists(args.input):
22 | image = imread(args.input)
23 | else:
24 | print('ERROR: The input image file does not exists')
25 | return
26 | detector = LoGDetector(min_sigma=float(args.minsigma),
27 | max_sigma=float(args.maxsigma),
28 | num_sigma=int(args.numsigma),
29 | threshold=float(args.threshold),
30 | overlap=float(args.overlap))
31 | particles = detector.run(image)
32 | write_particles(args.output, particles)
33 |
34 |
35 | if __name__ == "__main__":
36 | main()
37 |
--------------------------------------------------------------------------------
/stracking/cli/linker_shortest_path.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | from stracking.io import read_particles, write_tracks
3 | from stracking.linkers import SPLinker, EuclideanCost
4 |
5 |
6 | def main():
7 | parser = argparse.ArgumentParser(description='STracking shortest path linker',
8 | conflict_handler='resolve')
9 | parser.add_argument('-i', '--input', help='input detection file', default='')
10 | parser.add_argument('-o', '--output', help='Output tracks file', default='')
11 | parser.add_argument('-f', '--format', help='output file format', default='st.json')
12 | parser.add_argument('-c', '--maxcost', help='Maximum connection cost', default='3000')
13 | parser.add_argument('-g', '--gap', help='Number of frame for gap closing ', default='1')
14 | args = parser.parse_args()
15 |
16 | particles = read_particles(args.input)
17 | euclidean_cost = EuclideanCost(max_cost=float(args.maxcost))
18 | my_tracker = SPLinker(cost=euclidean_cost, gap=int(args.gap))
19 | tracks = my_tracker.run(particles)
20 | write_tracks(args.output, tracks, format_=args.format)
21 |
22 |
23 | if __name__ == "__main__":
24 | main()
25 |
--------------------------------------------------------------------------------
/stracking/containers/__init__.py:
--------------------------------------------------------------------------------
1 | from ._containers import SParticles, STracks
2 |
3 | __all__ = ['SParticles',
4 | 'STracks']
5 |
--------------------------------------------------------------------------------
/stracking/containers/_containers.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | class SParticles:
4 | """Container for particles
5 |
6 | The container have two data. The particle array (N, D+1) of the particles
7 | and a properties dictionary for the features
8 |
9 | Attributes
10 | ----------
11 | data : array (N, D+1)
12 | Coordinates for N points in D+1 dimensions. T,(Z),Y,X. The first
13 | axis is the integer ID of the time point. D is either 2 or 3 for planar
14 | or volumetric time series respectively.
15 | properties : dict {str: array (N,)}, DataFrame
16 | Properties for each point. Each property should be an array of length N,
17 | where N is the number of points.
18 | scale : tuple of float
19 | Scale factors for the image data.
20 |
21 | """
22 | def __init__(self, data=None, properties=dict(), scale=None):
23 | self.data = data
24 | self.properties = properties
25 | self.scale = scale
26 |
27 |
28 | class STracks:
29 | """Container for trajectories
30 |
31 | This container is compatible with the Napari tracks layer
32 |
33 | Attributes
34 | ----------
35 | data : array (N, D+1)
36 | Coordinates for N points in D+1 dimensions. ID,T,(Z),Y,X. The first
37 | axis is the integer ID of the track. D is either 3 or 4 for planar
38 | or volumetric timeseries respectively.
39 | properties : dict {str: array (N,)}, DataFrame
40 | Properties for each point. Each property should be an array of length N,
41 | where N is the number of points.
42 | graph : dict {int: list}
43 | Graph representing associations between tracks. Dictionary defines the
44 | mapping between a track ID and the parents of the track. This can be
45 | one (the track has one parent, and the parent has >=1 child) in the
46 | case of track splitting, or more than one (the track has multiple
47 | parents, but only one child) in the case of track merging.
48 | See examples/tracks_3d_with_graph.py
49 | features: dict {str: dict}
50 | Properties for each tracks. Each feature should be an map of
51 | trackID=feature. Ex: features['length'][12]=25.2
52 | scale : tuple of float
53 | Scale factors for the image data.
54 |
55 | """
56 | def __init__(self, data=None, properties=dict(),
57 | graph=dict(), features=dict(),
58 | scale=tuple()):
59 | self.data = data
60 | self.properties = properties
61 | self.graph = graph
62 | self.features = features
63 | self.scale = scale
64 |
--------------------------------------------------------------------------------
/stracking/data/__init__.py:
--------------------------------------------------------------------------------
1 | import os.path as osp
2 | import os
3 | import numpy as np
4 |
5 | __all__ = ['fake_tracks1', 'fake_tracks1_3d']
6 |
7 | legacy_data_dir = osp.abspath(osp.dirname(__file__))
8 |
9 |
10 | def _fetch(data_filename):
11 | """Fetch a given data file from the local data dir.
12 |
13 | This function provides the path location of the data file given
14 | its name in the scikit-image repository.
15 |
16 | Parameters
17 | ----------
18 | data_filename:
19 | Name of the file in the scikit-bioimaging data dir
20 |
21 | Returns
22 | -------
23 | Path of the local file as a python string.
24 | """
25 |
26 | filepath = os.path.join(legacy_data_dir, data_filename)
27 |
28 | if os.path.isfile(filepath):
29 | return filepath
30 | else:
31 | raise FileExistsError("Cannot find the file:", filepath)
32 |
33 |
34 | def _load(f):
35 | """Load an image file located in the data directory.
36 | Parameters
37 | ----------
38 | f : string
39 | File name.
40 | Returns
41 | -------
42 | img : ndarray
43 | Image loaded from ``skimage.data_dir``.
44 | """
45 | # importing io is quite slow since it scans all the backends
46 | # we lazy import it here
47 | from skimage.io import imread
48 | return imread(_fetch(f))
49 |
50 |
51 | def fake_tracks1():
52 | """2D+t particles moving horizontally.
53 |
54 | Returns
55 | -------
56 | fake_traks1 : (119, 101, 5) float ndarray
57 | Moving points in 2D.
58 | """
59 |
60 | return _load("fake_tracks1.tif")
61 |
62 |
63 | def fake_tracks1_3d():
64 | """3D+t particles moving simultaneously.
65 |
66 | Returns
67 | -------
68 | fake_tracks1_3d: (5, 64, 128, 128) float ndarray
69 | Moving points in 3D.
70 | """
71 |
72 | f1 = _load(os.path.join('tracks1_3d', 'track1_t001.tif'))
73 | f2 = _load(os.path.join('tracks1_3d', 'track1_t002.tif'))
74 | f3 = _load(os.path.join('tracks1_3d', 'track1_t003.tif'))
75 | f4 = _load(os.path.join('tracks1_3d', 'track1_t004.tif'))
76 | f5 = _load(os.path.join('tracks1_3d', 'track1_t005.tif'))
77 |
78 | stack = np.stack([f1, f2, f3, f4, f5])
79 | return stack
80 |
--------------------------------------------------------------------------------
/stracking/data/fake_tracks1.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/fake_tracks1.tif
--------------------------------------------------------------------------------
/stracking/data/tracks1_3d/track1_t001.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/tracks1_3d/track1_t001.tif
--------------------------------------------------------------------------------
/stracking/data/tracks1_3d/track1_t002.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/tracks1_3d/track1_t002.tif
--------------------------------------------------------------------------------
/stracking/data/tracks1_3d/track1_t003.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/tracks1_3d/track1_t003.tif
--------------------------------------------------------------------------------
/stracking/data/tracks1_3d/track1_t004.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/tracks1_3d/track1_t004.tif
--------------------------------------------------------------------------------
/stracking/data/tracks1_3d/track1_t005.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/data/tracks1_3d/track1_t005.tif
--------------------------------------------------------------------------------
/stracking/detectors/__init__.py:
--------------------------------------------------------------------------------
1 | from ._detector import SDetector
2 | from ._gaussian_detectors import LoGDetector, DoGDetector, DoHDetector
3 | from ._seg_detector import SSegDetector
4 |
5 | __all__ = ['SDetector',
6 | 'LoGDetector',
7 | 'DoGDetector',
8 | 'DoHDetector',
9 | 'SSegDetector']
10 |
--------------------------------------------------------------------------------
/stracking/detectors/_detector.py:
--------------------------------------------------------------------------------
1 | from stracking.observers import SObservable
2 |
3 |
4 | class SDetector(SObservable):
5 | """Interface for a particle detector
6 |
7 | The parameters must be set to the constructor and the image data to the
8 | run method
9 | Example:
10 | ```
11 | my_detector = MyParticleDetector(threshold=12.0)
12 | particles = my_detector.run(image)
13 | ```
14 |
15 | """
16 | def __init__(self):
17 | super().__init__()
18 |
19 | def run(self, image, scale=None):
20 | """Run the detection on a ND image
21 |
22 | Parameters
23 | ----------
24 | image: ndarray
25 | time frames to analyse
26 | scale: tuple or list
27 | scale of the image in each dimension
28 |
29 | Returns
30 | -------
31 | detections: SParticles
32 |
33 | """
34 | raise Exception('SDetector is abstract')
35 |
--------------------------------------------------------------------------------
/stracking/detectors/_gaussian_detectors.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from skimage.feature import blob
3 |
4 | from stracking.containers import SParticles
5 | from ._detector import SDetector
6 |
7 |
8 | class DoGDetector(SDetector):
9 | """Detect spots on 2D+t and 3d+t image using the DOG algorithm
10 |
11 | Parameters
12 | ----------
13 | min_sigma : scalar or sequence of scalars, optional
14 | The minimum standard deviation for Gaussian kernel. Keep this low to
15 | detect smaller blobs. The standard deviations of the Gaussian filter
16 | are given for each axis as a sequence, or as a single number, in
17 | which case it is equal for all axes.
18 | max_sigma : scalar or sequence of scalars, optional
19 | The maximum standard deviation for Gaussian kernel. Keep this high to
20 | detect larger blobs. The standard deviations of the Gaussian filter
21 | are given for each axis as a sequence, or as a single number, in
22 | which case it is equal for all axes.
23 | sigma_ratio : float, optional
24 | The ratio between the standard deviation of Gaussian Kernels used for
25 | computing the Difference of Gaussian
26 | threshold : float, optional.
27 | The absolute lower bound for scale space maxima. Local maxima smaller
28 | than thresh are ignored. Reduce this to detect blobs with less
29 | intensities.
30 | overlap : float, optional
31 | A value between 0 and 1. If the area of two blobs overlaps by a
32 | fraction greater than `threshold`, the smaller blob is eliminated.
33 |
34 | """
35 | def __init__(self, min_sigma=1, max_sigma=50, sigma_ratio=1.6,
36 | threshold=2.0, overlap=.5):
37 | super().__init__()
38 | self.min_sigma = min_sigma
39 | self.max_sigma = max_sigma
40 | self.sigma_ratio = sigma_ratio
41 | self.threshold = threshold
42 | self.overlap = overlap
43 |
44 | def run(self, image, scale=None):
45 | """Run the detection on a ND image
46 |
47 | Parameters
48 | ----------
49 | image: ndarray
50 | time frames to analyse
51 | scale: tuple or list
52 | scale of the image in each dimension
53 |
54 | Returns
55 | -------
56 | detections: SParticles
57 |
58 | """
59 | self.notify('processing')
60 | self.progress(0)
61 | if image.ndim == 3: # 2D+t
62 | self.notify('processing 2D+t')
63 | spots_ = np.empty((0, 3))
64 | sigma_ = np.empty((0,))
65 | for t in range(image.shape[0]):
66 | self.progress(int(100 * t / image.shape[0]))
67 | frame = image[t, :, :]
68 | blobs = blob.blob_dog(frame, self.min_sigma, self.max_sigma,
69 | self.sigma_ratio, self.threshold,
70 | self.overlap)
71 | spots = t*np.ones((blobs.shape[0], 3))
72 | spots[:, 1] = blobs[:, 0] # x
73 | spots[:, 2] = blobs[:, 1] # y
74 |
75 | if spots.shape[0] > 0:
76 | spots_ = np.concatenate((spots_, spots), axis=0)
77 | sigma_ = np.concatenate((sigma_, blobs[:, 2]), axis=0)
78 | self.notify('done')
79 | self.progress(100)
80 |
81 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
82 |
83 | elif image.ndim == 4: # 3D+t
84 | self.notify('processing 3D+t')
85 | spots_ = np.empty((0, 4))
86 | sigma_ = np.empty((0,))
87 | for t in range(image.shape[0]):
88 | self.progress(int(100 * t / image.shape[0]))
89 | frame = image[t, :, :, :]
90 | blobs = blob.blob_dog(frame, self.min_sigma, self.max_sigma,
91 | self.sigma_ratio, self.threshold,
92 | self.overlap)
93 | spots = t * np.ones((blobs.shape[0], 4))
94 | spots[:, 1] = blobs[:, 0] # z
95 | spots[:, 2] = blobs[:, 1] # x
96 | spots[:, 3] = blobs[:, 2] # y
97 |
98 | if spots.shape[0] > 0:
99 | spots_ = np.concatenate((spots_, spots), axis=0)
100 | sigma_ = np.concatenate((sigma_, blobs[:, 3]), axis=0)
101 | self.notify('done')
102 | self.progress(100)
103 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
104 | else:
105 | raise Exception('DoGDetector: can process only 2D+t or 3D+t images')
106 |
107 |
108 | class LoGDetector(SDetector):
109 | """Laplacian of Gaussian spots detector
110 |
111 | Detect blobs on an image using the Difference of Gaussian method. The
112 | implementation is from scikit-image
113 |
114 | Parameters
115 | ----------
116 | min_sigma : scalar or sequence of scalars, optional
117 | the minimum standard deviation for Gaussian kernel. Keep this low to
118 | detect smaller blobs. The standard deviations of the Gaussian filter
119 | are given for each axis as a sequence, or as a single number, in
120 | which case it is equal for all axes.
121 | max_sigma : scalar or sequence of scalars, optional
122 | The maximum standard deviation for Gaussian kernel. Keep this high to
123 | detect larger blobs. The standard deviations of the Gaussian filter
124 | are given for each axis as a sequence, or as a single number, in
125 | which case it is equal for all axes.
126 | num_sigma : int, optional
127 | The number of intermediate values of standard deviations to consider
128 | between `min_sigma` and `max_sigma`.
129 | threshold : float, optional.
130 | The absolute lower bound for scale space maxima. Local maxima smaller
131 | than thresh are ignored. Reduce this to detect blobs with less
132 | intensities.
133 | overlap : float, optional
134 | A value between 0 and 1. If the area of two blobs overlaps by a
135 | fraction greater than `threshold`, the smaller blob is eliminated.
136 | log_scale : bool, optional
137 | If set intermediate values of standard deviations are interpolated
138 | using a logarithmic scale to the base `10`. If not, linear
139 | interpolation is used.
140 | """
141 |
142 | def __init__(self, min_sigma=1, max_sigma=50, num_sigma=10, threshold=.2,
143 | overlap=.5, log_scale=False):
144 | super().__init__()
145 | self.min_sigma = min_sigma
146 | self.max_sigma = max_sigma
147 | self.num_sigma = num_sigma
148 | self.threshold = threshold
149 | self.overlap = overlap
150 | self.log_scale = log_scale
151 |
152 | def run(self, image, scale=None):
153 | """Run the detection on a ND image
154 |
155 | Parameters
156 | ----------
157 | image: ndarray
158 | time frames to analyse
159 | scale: tuple or list
160 | scale of the image in each dimension
161 |
162 | Returns
163 | -------
164 | detections: SParticles
165 |
166 | """
167 | self.notify('processing')
168 | self.progress(0)
169 | if image.ndim == 3: # 2D+t
170 | self.notify('processing 2D+t')
171 | spots_ = np.empty((0, 3))
172 | sigma_ = np.empty((0,))
173 | for t in range(image.shape[0]):
174 | self.progress(int(100 * t / image.shape[0]))
175 | frame = image[t, :, :]
176 | blobs = blob.blob_log(frame, self.min_sigma,
177 | self.max_sigma,
178 | self.num_sigma, self.threshold,
179 | self.overlap, self.log_scale)
180 | spots = t*np.ones((blobs.shape[0], 3))
181 | spots[:, 1] = blobs[:, 0] # x
182 | spots[:, 2] = blobs[:, 1] # y
183 |
184 | if spots.shape[0] > 0:
185 | spots_ = np.concatenate((spots_, spots), axis=0)
186 | sigma_ = np.concatenate((sigma_, blobs[:, 2]), axis=0)
187 | self.notify('done')
188 | self.progress(100)
189 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
190 |
191 | elif image.ndim == 4: # 3D+t
192 | self.notify('processing 3D+t')
193 | spots_ = np.empty((0, 4))
194 | sigma_ = np.empty((0,))
195 | for t in range(image.shape[0]):
196 | self.progress(int(100 * t / image.shape[0]))
197 | frame = image[t, :, :, :]
198 | blobs = blob.blob_log(frame, self.min_sigma,
199 | self.max_sigma,
200 | self.num_sigma, self.threshold,
201 | self.overlap, self.log_scale)
202 | spots = t * np.ones((blobs.shape[0], 4))
203 | spots[:, 1] = blobs[:, 0] # z
204 | spots[:, 2] = blobs[:, 1] # x
205 | spots[:, 3] = blobs[:, 2] # y
206 |
207 | if spots.shape[0] > 0:
208 | spots_ = np.concatenate((spots_, spots), axis=0)
209 | sigma_ = np.concatenate((sigma_, blobs[:, 3]), axis=0)
210 | self.notify('done')
211 | self.progress(100)
212 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
213 | else:
214 | raise Exception('LoGDetector: can process only 2D+t or 3D+t images')
215 |
216 |
217 | class DoHDetector(SDetector):
218 | """Determinant of Hessian spots detector.
219 |
220 | Implementation from scikit-image
221 | Blobs are found using the Determinant of Hessian method . For each blob
222 | found, the method returns its coordinates and the standard deviation
223 | of the Gaussian Kernel used for the Hessian matrix whose determinant
224 | detected the blob.
225 |
226 | Parameters
227 | ----------
228 | min_sigma : float, optional
229 | The minimum standard deviation for Gaussian Kernel used to compute
230 | Hessian matrix. Keep this low to detect smaller blobs.
231 | max_sigma : float, optional
232 | The maximum standard deviation for Gaussian Kernel used to compute
233 | Hessian matrix. Keep this high to detect larger blobs.
234 | num_sigma : int, optional
235 | The number of intermediate values of standard deviations to consider
236 | between `min_sigma` and `max_sigma`.
237 | threshold : float, optional.
238 | The absolute lower bound for scale space maxima. Local maxima smaller
239 | than thresh are ignored. Reduce this to detect less prominent blobs.
240 | overlap : float, optional
241 | A value between 0 and 1. If the area of two blobs overlaps by a
242 | fraction greater than `threshold`, the smaller blob is eliminated.
243 | log_scale : bool, optional
244 | If set intermediate values of standard deviations are interpolated
245 | using a logarithmic scale to the base `10`. If not, linear
246 | interpolation is used.
247 |
248 | """
249 |
250 | def __init__(self, min_sigma=1, max_sigma=30, num_sigma=10, threshold=0.01,
251 | overlap=.5, log_scale=False):
252 | super().__init__()
253 | self.min_sigma = min_sigma
254 | self.max_sigma = max_sigma
255 | self.num_sigma = num_sigma
256 | self.threshold = threshold
257 | self.overlap = overlap
258 | self.log_scale = log_scale
259 |
260 | def run(self, image, scale=None):
261 | """Run the detection on a ND image
262 |
263 | Parameters
264 | ----------
265 | image: ndarray
266 | time frames to analyse
267 | scale: tuple or list
268 | scale of the image in each dimension
269 |
270 | Returns
271 | -------
272 | detections: SParticles
273 |
274 | """
275 | self.notify('processing')
276 | self.progress(0)
277 | if image.ndim == 3: # 2D+t
278 | self.notify('processing 2D+t')
279 | spots_ = np.empty((0, 3))
280 | sigma_ = np.empty((0,))
281 | for t in range(image.shape[0]):
282 | self.progress(int(100*t/image.shape[0]))
283 | frame = image[t, :, :]
284 | blobs = blob.blob_doh(frame, self.min_sigma,
285 | self.max_sigma,
286 | self.num_sigma, self.threshold,
287 | self.overlap, self.log_scale)
288 | spots = t*np.ones((blobs.shape[0], 3))
289 | spots[:, 1] = blobs[:, 0] # x
290 | spots[:, 2] = blobs[:, 1] # y
291 |
292 | if spots.shape[0] > 0:
293 | spots_ = np.concatenate((spots_, spots), axis=0)
294 | sigma_ = np.concatenate((sigma_, blobs[:, 2]), axis=0)
295 | self.notify('done')
296 | self.progress(100)
297 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
298 |
299 | elif image.ndim == 4: # 3D+t
300 | self.notify('processing 3D+t')
301 | spots_ = np.empty((0, 4))
302 | sigma_ = np.empty((0,))
303 | for t in range(image.shape[0]):
304 | self.progress(int(100 * t / image.shape[0]))
305 | frame = image[t, :, :, :]
306 | blobs = blob.blob_doh(frame, self.min_sigma,
307 | self.max_sigma,
308 | self.num_sigma, self.threshold,
309 | self.overlap, self.log_scale)
310 | spots = t * np.ones((blobs.shape[0], 4))
311 | spots[:, 1] = blobs[:, 0] # z
312 | spots[:, 2] = blobs[:, 1] # x
313 | spots[:, 3] = blobs[:, 2] # y
314 |
315 | if spots.shape[0] > 0:
316 | spots_ = np.concatenate((spots_, spots), axis=0)
317 | sigma_ = np.concatenate((sigma_, blobs[:, 3]), axis=0)
318 | self.notify('done')
319 | self.progress(100)
320 | return SParticles(data=spots_, properties={'radius': sigma_}, scale=scale)
321 | else:
322 | raise Exception('DoHDetector: can process only 2D+t or 3D+t images')
323 |
--------------------------------------------------------------------------------
/stracking/detectors/_seg_detector.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from skimage.measure import label, regionprops
3 |
4 | from stracking.containers import SParticles
5 | from ._detector import SDetector
6 |
7 |
8 | class SSegDetector(SDetector):
9 | """Detections from segmentation image
10 |
11 | Create a list of particles position from a segmentation image. The segmentation image can be a
12 | binary mask or a label image. This detector is useful for example to create detection from
13 | CellPose segmentation
14 |
15 | Parameters
16 | ----------
17 | is_mask: bool
18 | True if the input image is a mask, false if input image is a label
19 |
20 | """
21 | def __init__(self, is_mask=False):
22 | super().__init__()
23 | self.is_mask = is_mask
24 |
25 | def run(self, image, scale=None):
26 | """Run the detection on a ND image
27 |
28 | Parameters
29 | ----------
30 | image: ndarray
31 | time frames labels images
32 | scale: tuple or list
33 | scale of the image in each dimension
34 |
35 | Returns
36 | -------
37 | detections: SParticles
38 |
39 | """
40 | self.notify('processing')
41 | self.progress(0)
42 | if image.ndim == 3: # 2D+t
43 | self.notify('processing 2D+t')
44 | spots_ = np.empty((0, 3))
45 | for t in range(image.shape[0]):
46 | self.progress(int(100 * t / image.shape[0]))
47 | frame = np.int16(image[t, :, :])
48 | if self.is_mask:
49 | frame = label(frame, background=0)
50 | props = regionprops(frame)
51 | centroids = np.zeros((len(props), 3)) # [T, Y, X]
52 | for i, prop in enumerate(props):
53 | centroids[i, 0] = t
54 | centroids[i, 1] = prop.centroid[0]
55 | centroids[i, 2] = prop.centroid[1]
56 | if centroids.shape[0] > 0:
57 | spots_ = np.concatenate((spots_, centroids), axis=0)
58 | self.notify('done')
59 | self.progress(100)
60 | return SParticles(data=spots_, properties={}, scale=scale)
61 | elif image.ndim == 4: # 3D+t
62 | self.notify('processing 3D+t')
63 | spots_ = np.empty((0, 4))
64 | for t in range(image.shape[0]):
65 | self.progress(int(100 * t / image.shape[0]))
66 | frame = np.int16(image[t, :, :, :])
67 | if self.is_mask:
68 | frame = label(frame, background=0)
69 | props = regionprops(frame)
70 | centroids = np.zeros((len(props), 4)) # [T, Z, Y, X]
71 | for i, prop in enumerate(props):
72 | centroids[i, 0] = t
73 | centroids[i, 1] = prop.centroid[0]
74 | centroids[i, 2] = prop.centroid[1]
75 | centroids[i, 3] = prop.centroid[2]
76 | if centroids.shape[0] > 0:
77 | spots_ = np.concatenate((spots_, centroids), axis=0)
78 | self.notify('done')
79 | self.progress(100)
80 | return SParticles(data=spots_, properties={}, scale=scale)
81 | else:
82 | raise Exception('SSegDetector: can process only 2D+t or 3D+t images')
83 |
--------------------------------------------------------------------------------
/stracking/detectors/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/detectors/tests/__init__.py
--------------------------------------------------------------------------------
/stracking/detectors/tests/test_gaussian_detectors.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | from skimage import io
4 |
5 | from stracking.detectors import LoGDetector, DoGDetector, DoHDetector
6 |
7 |
8 | # tmp_path is a pytest fixture
9 | def test_log_detector(tmp_path):
10 | """An example of how you might test your plugin."""
11 |
12 | root_dir = os.path.dirname(os.path.abspath(__file__))
13 | my_test_file = os.path.join(root_dir, 'tracks1_crop.tif')
14 |
15 | image = io.imread(my_test_file)
16 |
17 | detector = LoGDetector(min_sigma=4, max_sigma=5, threshold=0.2)
18 | particles = detector.run(image)
19 | # print(particles.data)
20 |
21 | expected_output = [[0., 54., 12.],
22 | [0., 94., 12.],
23 | [0., 14., 12.],
24 | [1., 55., 27.],
25 | [1., 94., 27.],
26 | [1., 14., 27.],
27 | [2., 94., 42.],
28 | [2., 54., 42.],
29 | [2., 14., 42.],
30 | [3., 94., 57.],
31 | [3., 14., 57.],
32 | [3., 54., 57.],
33 | [4., 54., 72.],
34 | [4., 94., 72.],
35 | [4., 14., 72.]]
36 |
37 | np.testing.assert_equal(expected_output, particles.data)
38 |
39 |
40 | def test_dog_detector(tmp_path):
41 | """An example of how you might test your plugin."""
42 |
43 | root_dir = os.path.dirname(os.path.abspath(__file__))
44 | my_test_file = os.path.join(root_dir, 'tracks1_crop.tif')
45 |
46 | image = io.imread(my_test_file)
47 |
48 | detector = DoGDetector(min_sigma=4, max_sigma=5, threshold=0.15)
49 | particles = detector.run(image)
50 |
51 | expected_output = np.array([[0., 54., 12.],
52 | [0., 94., 12.],
53 | [0., 14., 12.],
54 | [1., 94., 27.],
55 | [1., 55., 27.],
56 | [1., 14., 27.],
57 | [2., 94., 42.],
58 | [2., 54., 42.],
59 | [2., 14., 42.],
60 | [3., 94., 57.],
61 | [3., 14., 57.],
62 | [3., 54., 57.],
63 | [4., 54., 72.],
64 | [4., 94., 72.],
65 | [4., 14., 72.]])
66 |
67 | np.testing.assert_equal(particles.data.shape, expected_output.shape)
68 |
69 |
70 | def test_doh_detector(tmp_path):
71 | """An example of how you might test your plugin."""
72 |
73 | root_dir = os.path.dirname(os.path.abspath(__file__))
74 | my_test_file = os.path.join(root_dir, 'tracks1_crop.tif')
75 |
76 | image = io.imread(my_test_file)
77 |
78 | detector = DoHDetector(min_sigma=4, max_sigma=5, threshold=0.015)
79 | particles = detector.run(image)
80 |
81 | expected_output = [[0., 53., 12.],
82 | [0., 93., 11.],
83 | [0., 13., 10.],
84 | [1., 53., 26.],
85 | [1., 93., 26.],
86 | [1., 13., 26.],
87 | [2., 13., 41.],
88 | [2., 93., 41.],
89 | [2., 53., 41.],
90 | [3., 93., 56.],
91 | [3., 13., 55.],
92 | [3., 54., 56.],
93 | [4., 53., 71.],
94 | [4., 94., 71.],
95 | [4., 13., 71.]]
96 |
97 | np.testing.assert_equal(expected_output, particles.data)
98 |
--------------------------------------------------------------------------------
/stracking/detectors/tests/tracks1.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/detectors/tests/tracks1.tif
--------------------------------------------------------------------------------
/stracking/detectors/tests/tracks1_crop.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/detectors/tests/tracks1_crop.tif
--------------------------------------------------------------------------------
/stracking/features/__init__.py:
--------------------------------------------------------------------------------
1 | from ._feature import SFeature
2 | from ._length import LengthFeature, DisplacementFeature, DistanceFeature
3 |
4 | __all__ = ['SFeature',
5 | 'LengthFeature',
6 | 'DisplacementFeature',
7 | 'DistanceFeature']
8 |
--------------------------------------------------------------------------------
/stracking/features/_feature.py:
--------------------------------------------------------------------------------
1 | # interfaces
2 | from stracking.observers import SObservable
3 |
4 |
5 | class SFeature(SObservable):
6 | """Interface for a particle feature measurement
7 |
8 | Parameters
9 | ----------
10 | stracks: STracks
11 | tracks to analyse
12 |
13 | """
14 | def __init__(self):
15 | super().__init__()
16 |
17 | def run(self, stracks, image=None):
18 | """Measure a track property
19 |
20 | Parameters
21 | ----------
22 | stracks : STracks
23 | Track data container
24 | image: ndarray
25 | optional image data
26 |
27 | Returns
28 | -------
29 | stracks: STracks
30 | tracks with new feature
31 |
32 | """
33 | raise Exception('SFeature is abstract')
34 |
--------------------------------------------------------------------------------
/stracking/features/_length.py:
--------------------------------------------------------------------------------
1 | import math
2 | import numpy as np
3 | from ._feature import SFeature
4 |
5 |
6 | class LengthFeature(SFeature):
7 | """Calculate track length features.
8 |
9 | Length is defined here as the number of point in a track
10 |
11 | """
12 | def __init__(self):
13 | super().__init__()
14 |
15 | def run(self, stracks, image=None):
16 | self.notify('length feature')
17 | self.progress(0)
18 |
19 | data = stracks.data
20 | tracks_ids = np.unique(data[:, 0])
21 | length_features = dict()
22 | t = -1
23 | for t_id in tracks_ids:
24 | t += 1
25 | self.progress(int(100*t/len(tracks_ids)))
26 | length_features[int(t_id)] = \
27 | int(np.count_nonzero(data[:, 0] == t_id))
28 | stracks.features['length'] = length_features
29 |
30 | self.notify('done')
31 | self.progress(1000)
32 | return stracks
33 |
34 |
35 | class DistanceFeature(SFeature):
36 | """Calculate track length features.
37 |
38 | Length is defined here as the number of point in a track
39 |
40 | """
41 | def __init__(self):
42 | super().__init__()
43 |
44 | def run(self, stracks, image=None):
45 | self.notify('distance feature')
46 | self.progress(0)
47 | if stracks.data.shape[1] < 5:
48 | return self._run_2d(stracks)
49 | else:
50 | return self._run_3d(stracks)
51 |
52 | def _run_2d(self, stracks):
53 | data = stracks.data
54 | tracks_ids = np.unique(data[:, 0])
55 | distance_features = dict()
56 |
57 | scale_x = 1
58 | scale_y = 1
59 | if stracks.scale and len(stracks.scale) == 3:
60 | scale_x = pow(stracks.scale[1], 2)
61 | scale_y = pow(stracks.scale[2], 2)
62 |
63 | t = -1
64 | for t_id in tracks_ids:
65 | t += 1
66 | self.progress(int(100 * t / len(tracks_ids)))
67 | track = data[data[:, 0] == t_id]
68 | distance = 0
69 | for i in range(track.shape[0]-1):
70 | distance += \
71 | math.sqrt(scale_x*pow(track[i+1, 2]-track[i, 2], 2) +
72 | scale_y*pow(track[i+1, 3]-track[i, 3], 2))
73 | distance_features[int(t_id)] = distance
74 |
75 | stracks.features['distance'] = distance_features
76 | self.notify('done')
77 | self.progress(100)
78 | return stracks
79 |
80 | def _run_3d(self, stracks):
81 | data = stracks.data
82 | tracks_ids = np.unique(data[:, 0])
83 | distance_features = dict()
84 |
85 | scale_x = 1
86 | scale_y = 1
87 | scale_z = 1
88 | if len(stracks.scale) == 3:
89 | scale_x = pow(stracks.scale[2], 2)
90 | scale_y = pow(stracks.scale[3], 2)
91 | scale_z = pow(stracks.scale[1], 2)
92 |
93 | t = -1
94 | for t_id in tracks_ids:
95 | t += 1
96 | self.progress(int(100 * t / len(tracks_ids)))
97 | track = data[data[:, 0] == t_id]
98 | distance = 0
99 | for i in range(track.shape[0]-1):
100 | distance += \
101 | math.sqrt(scale_x*pow(track[i+1, 2]-track[i, 2], 2) +
102 | scale_y*pow(track[i+1, 3]-track[i, 3], 2) +
103 | scale_z*pow(track[i+1, 4]-track[i, 4], 2))
104 | distance_features[int(t_id)] = distance
105 |
106 | stracks.features['distance'] = distance_features
107 | self.notify('done')
108 | self.progress(100)
109 | return stracks
110 |
111 |
112 | class DisplacementFeature(SFeature):
113 | """Calculate track length features.
114 |
115 | Length is defined here as the number of point in a track
116 |
117 | """
118 | def __init__(self):
119 | super().__init__()
120 |
121 | def run(self, stracks, image=None):
122 | self.notify('displacement feature')
123 | self.progress(0)
124 | if stracks.data.shape[1] < 5:
125 | return self._run_2d(stracks)
126 | else:
127 | return self._run_3d(stracks)
128 |
129 | def _run_2d(self, stracks):
130 | data = stracks.data
131 | tracks_ids = np.unique(data[:, 0])
132 | displacement_features = dict()
133 |
134 | scale_x = 1
135 | scale_y = 1
136 | if stracks.scale and len(stracks.scale) == 3:
137 | scale_x = pow(stracks.scale[1], 2)
138 | scale_y = pow(stracks.scale[2], 2)
139 |
140 | t = -1
141 | for t_id in tracks_ids:
142 | t += 1
143 | self.progress(int(100 * t / len(tracks_ids)))
144 | track = data[data[:, 0] == t_id]
145 | i_end = track.shape[0]-1
146 | displacement = \
147 | math.sqrt(scale_x*pow(track[i_end, 2]-track[0, 2], 2) +
148 | scale_y*pow(track[i_end, 3]-track[0, 3], 2))
149 | displacement_features[int(t_id)] = displacement
150 |
151 | stracks.features['displacement'] = displacement_features
152 | self.notify('done')
153 | self.progress(100)
154 | return stracks
155 |
156 | def _run_3d(self, stracks):
157 | data = stracks.data
158 | tracks_ids = np.unique(data[:, 0])
159 | displacement_features = dict()
160 |
161 | scale_x = 1
162 | scale_y = 1
163 | scale_z = 1
164 | if len(stracks.scale) == 3:
165 | scale_x = pow(stracks.scale[2], 2)
166 | scale_y = pow(stracks.scale[3], 2)
167 | scale_z = pow(stracks.scale[1], 2)
168 |
169 | t = -1
170 | for t_id in tracks_ids:
171 | t += 1
172 | self.progress(int(100 * t / len(tracks_ids)))
173 | track = data[data[:, 0] == t_id]
174 | i_end = track.shape[0]-1
175 | displacement = \
176 | math.sqrt(scale_x*pow(track[i_end, 2]-track[0, 2], 2) +
177 | scale_y*pow(track[i_end, 3]-track[0, 3], 2) +
178 | scale_z*pow(track[i_end, 4]-track[0, 4], 2))
179 | displacement_features[int(t_id)] = displacement
180 |
181 | stracks.features['displacement'] = displacement_features
182 | self.notify('done')
183 | self.progress(100)
184 | return stracks
185 |
--------------------------------------------------------------------------------
/stracking/features/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/features/tests/__init__.py
--------------------------------------------------------------------------------
/stracking/features/tests/test_length.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from stracking.containers import STracks
4 | from stracking.features import (LengthFeature, DistanceFeature,
5 | DisplacementFeature)
6 |
7 |
8 | def test_length_feature():
9 | data = np.array([[0, 0, 20, 20],
10 | [0, 1, 20, 35],
11 | [0, 2, 20, 50],
12 | [0, 3, 20, 65],
13 | [0, 4, 20, 80],
14 | [1, 0, 100, 20],
15 | [1, 1, 100, 35],
16 | [1, 2, 100, 50],
17 | [1, 3, 100, 65],
18 | [1, 4, 100, 80],
19 | [2, 0, 60, 20],
20 | [2, 2, 60, 50],
21 | [2, 3, 60, 65],
22 | [2, 4, 60, 80]]
23 | )
24 |
25 | tracks = STracks(data=data)
26 |
27 | feature_calc = LengthFeature()
28 | tracks = feature_calc.run(tracks)
29 |
30 | # print(tracks.data)
31 | # print(tracks.features)
32 |
33 | expected_data = np.array([[0, 0, 20, 20],
34 | [0, 1, 20, 35],
35 | [0, 2, 20, 50],
36 | [0, 3, 20, 65],
37 | [0, 4, 20, 80],
38 | [1, 0, 100, 20],
39 | [1, 1, 100, 35],
40 | [1, 2, 100, 50],
41 | [1, 3, 100, 65],
42 | [1, 4, 100, 80],
43 | [2, 0, 60, 20],
44 | [2, 2, 60, 50],
45 | [2, 3, 60, 65],
46 | [2, 4, 60, 80]]
47 | )
48 |
49 | expected_features = {'length': {0: 5, 1: 5, 2: 4}}
50 |
51 | np.testing.assert_almost_equal(expected_data, tracks.data, decimal=1)
52 | np.testing.assert_equal(expected_features, tracks.features)
53 |
54 |
55 | def test_distance_feature():
56 | data = np.array([[0, 0, 20, 20],
57 | [0, 1, 20, 35],
58 | [0, 2, 20, 50],
59 | [0, 3, 20, 65],
60 | [0, 4, 20, 80],
61 | [1, 0, 100, 25],
62 | [1, 1, 100, 35],
63 | [1, 2, 100, 50],
64 | [1, 3, 100, 65],
65 | [1, 4, 100, 80],
66 | [2, 0, 60, 19],
67 | [2, 2, 60, 50],
68 | [2, 3, 60, 65],
69 | [2, 4, 60, 80]]
70 | )
71 |
72 | tracks2 = STracks(data=data, features=dict())
73 |
74 | print('features before=')
75 | print(tracks2.features)
76 |
77 | feature_calc = DistanceFeature()
78 | tracks2 = feature_calc.run(tracks2)
79 |
80 | print(tracks2.data)
81 | print('features=')
82 | print(tracks2.features)
83 |
84 | expected_data = np.array([[0, 0, 20, 20],
85 | [0, 1, 20, 35],
86 | [0, 2, 20, 50],
87 | [0, 3, 20, 65],
88 | [0, 4, 20, 80],
89 | [1, 0, 100, 25],
90 | [1, 1, 100, 35],
91 | [1, 2, 100, 50],
92 | [1, 3, 100, 65],
93 | [1, 4, 100, 80],
94 | [2, 0, 60, 19],
95 | [2, 2, 60, 50],
96 | [2, 3, 60, 65],
97 | [2, 4, 60, 80]]
98 | )
99 |
100 | expected_features = {'distance': {0: 60.0, 1: 55.0, 2: 61.0}}
101 |
102 | np.testing.assert_almost_equal(expected_data, tracks2.data, decimal=1)
103 | np.testing.assert_equal(expected_features, tracks2.features)
104 |
105 |
106 | def test_displacement_feature():
107 | data = np.array([[0, 0, 20, 20],
108 | [0, 1, 20, 35],
109 | [0, 2, 20, 50],
110 | [0, 3, 20, 65],
111 | [0, 4, 20, 80],
112 | [1, 0, 100, 25],
113 | [1, 1, 100, 35],
114 | [1, 2, 100, 50],
115 | [1, 3, 100, 65],
116 | [1, 4, 100, 80],
117 | [2, 0, 60, 19],
118 | [2, 2, 65, 50],
119 | [2, 3, 60, 65],
120 | [2, 4, 60, 80]]
121 | )
122 |
123 | tracks2 = STracks(data=data, features=dict())
124 |
125 | print('features before=')
126 | print(tracks2.features)
127 |
128 | feature_calc = DisplacementFeature()
129 | tracks2 = feature_calc.run(tracks2)
130 |
131 | print(tracks2.data)
132 | print('features=')
133 | print(tracks2.features)
134 |
135 | expected_data = np.array([[0, 0, 20, 20],
136 | [0, 1, 20, 35],
137 | [0, 2, 20, 50],
138 | [0, 3, 20, 65],
139 | [0, 4, 20, 80],
140 | [1, 0, 100, 25],
141 | [1, 1, 100, 35],
142 | [1, 2, 100, 50],
143 | [1, 3, 100, 65],
144 | [1, 4, 100, 80],
145 | [2, 0, 60, 19],
146 | [2, 2, 65, 50],
147 | [2, 3, 60, 65],
148 | [2, 4, 60, 80]]
149 | )
150 |
151 | expected_features = {'displacement': {0: 60.0, 1: 55.0, 2: 61.0}}
152 |
153 | np.testing.assert_almost_equal(expected_data, tracks2.data, decimal=1)
154 | np.testing.assert_equal(expected_features, tracks2.features)
155 |
--------------------------------------------------------------------------------
/stracking/filters/__init__.py:
--------------------------------------------------------------------------------
1 | from ._filter import STracksFilter
2 | from ._feature_filter import FeatureFilter
3 |
4 | __all__ = ['STracksFilter', 'FeatureFilter']
5 |
--------------------------------------------------------------------------------
/stracking/filters/_feature_filter.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from ._filter import STracksFilter
3 |
4 |
5 | class FeatureFilter(STracksFilter):
6 | """Select trajectories based on feature
7 |
8 | This filter select trajectories where a given feature have a value between
9 | a given min and max value
10 |
11 | Parameters
12 | ----------
13 | feature_name: str
14 | Name of the feature to use
15 | min_val: float
16 | Minimum value of the feature to keep the track
17 | max_val: float
18 | Maximum value of the feature to keep the track
19 |
20 | """
21 | def __init__(self, feature_name, min_val, max_val):
22 | super().__init__()
23 | self.feature_name = feature_name
24 | self.min_val = 0
25 | self.max_val = 0
26 | if isinstance(min_val, float) or isinstance(min_val, int):
27 | self.min_val = min_val
28 | else:
29 | raise Exception(f"FeatureFilter max_val parameter must be a number not a "
30 | f"{type(min_val)}")
31 | if isinstance(max_val, float) or isinstance(max_val, int):
32 | self.max_val = max_val
33 | else:
34 | raise Exception(f"FeatureFilter max_val parameter must be a number not a "
35 | f"{type(max_val)}")
36 |
37 | def run(self, stracks):
38 | if self.feature_name not in stracks.features:
39 | raise Exception('FeatureFilter: feature ' + self.feature_name +
40 | ' not found')
41 | self.notify('processing')
42 | self.progress(0)
43 | tracks_feature = stracks.features[self.feature_name]
44 | graph = stracks.graph
45 | keys = graph.keys()
46 | t = -1
47 | tracks_feature_keys = tracks_feature.keys()
48 | for track_id in tracks_feature_keys:
49 | t += 1
50 | self.progress(int(100*t/len(tracks_feature_keys)))
51 | val = tracks_feature[track_id]
52 | if val < self.min_val or val > self.max_val:
53 | # remove the particles properties
54 | idxs = np.where((stracks.data[:, 0] == track_id))
55 | #print('filter tracks point indexes=', idxs)
56 | for property_ in stracks.properties:
57 | stracks.properties[property_] = np.delete(stracks.properties[property_], idxs)
58 | # remove from data
59 | stracks.data = np.delete(stracks.data,
60 | stracks.data[:, 0] == track_id,
61 | axis=0)
62 | # remove from the graph
63 | if track_id in keys:
64 | graph.pop(track_id)
65 | for key in graph.keys():
66 | if track_id in graph[key]:
67 | graph[key].remove(track_id)
68 | # remove track from features
69 | for key, feature in stracks.features.items():
70 | new_feature = feature.copy()
71 | new_feature.pop(track_id)
72 | stracks.features[key] = new_feature
73 | self.notify('done')
74 | self.progress(100)
75 | return stracks
76 |
--------------------------------------------------------------------------------
/stracking/filters/_filter.py:
--------------------------------------------------------------------------------
1 | from stracking.containers import STracks
2 | from stracking.observers import SObservable
3 |
4 |
5 | class STracksFilter(SObservable):
6 | """Interface for a tracks filter
7 |
8 | A filter can select tracks based on properties of features
9 | Must implement the filter method
10 |
11 | """
12 | def __init__(self):
13 | super().__init__()
14 |
15 | def run(self, stracks):
16 | """Run the filtering
17 |
18 | Parameters
19 | ----------
20 | stracks: STracks
21 | Tracks to filter
22 |
23 | Returns
24 | -------
25 | stracks: STracks
26 | Filtered tracks
27 |
28 | """
29 | raise Exception("STracksFilter is abstract class")
30 |
--------------------------------------------------------------------------------
/stracking/filters/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/filters/tests/__init__.py
--------------------------------------------------------------------------------
/stracking/filters/tests/test_feature_filter.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from stracking.containers import STracks
4 | from stracking.features import DistanceFeature
5 | from stracking.filters import FeatureFilter
6 |
7 |
8 | def test_length_feature():
9 | # init tracks
10 | data = np.array([[0, 0, 20, 20],
11 | [0, 1, 20, 35],
12 | [0, 2, 20, 50],
13 | [0, 3, 20, 65],
14 | [0, 4, 20, 80],
15 | [1, 0, 100, 25],
16 | [1, 1, 100, 35],
17 | [1, 2, 100, 50],
18 | [1, 3, 100, 65],
19 | [1, 4, 100, 80],
20 | [2, 0, 60, 19],
21 | [2, 2, 65, 50],
22 | [2, 3, 60, 65],
23 | [2, 4, 60, 80]]
24 | )
25 |
26 | tracks = STracks(data=data)
27 |
28 | # calculate length features
29 | feature_calc = DistanceFeature()
30 | tracks = feature_calc.run(tracks)
31 |
32 | # filter int
33 | f_filter = FeatureFilter(feature_name='distance', min_val=0, max_val=60)
34 | tracks = f_filter.run(tracks)
35 |
36 | expected_data = np.array([[0, 0, 20, 20],
37 | [0, 1, 20, 35],
38 | [0, 2, 20, 50],
39 | [0, 3, 20, 65],
40 | [0, 4, 20, 80],
41 | [1, 0, 100, 25],
42 | [1, 1, 100, 35],
43 | [1, 2, 100, 50],
44 | [1, 3, 100, 65],
45 | [1, 4, 100, 80]])
46 |
47 | np.testing.assert_almost_equal(expected_data, tracks.data, decimal=1)
48 |
--------------------------------------------------------------------------------
/stracking/io/__init__.py:
--------------------------------------------------------------------------------
1 | from ._reader_function import read_tracks, write_tracks
2 | from ._csv_io import CSVIO
3 | from ._icy_io import ICYIO
4 | from ._isbi_io import ISBIIO
5 | from ._trackmate_io import TrackMateIO
6 | from ._st_io import StIO
7 | from ._particles_io import read_particles, write_particles
8 |
9 | __all__ = ['read_tracks',
10 | 'write_tracks',
11 | 'read_particles',
12 | 'write_particles',
13 | 'StIO',
14 | 'CSVIO',
15 | 'ICYIO',
16 | 'ISBIIO',
17 | 'TrackMateIO']
18 |
--------------------------------------------------------------------------------
/stracking/io/_csv_io.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 |
4 | from ._io import STrackIO
5 | from stracking.containers import STracks
6 |
7 |
8 | class CSVIO(STrackIO):
9 | """Read/write tracks from/to csv file
10 |
11 | This format does not support split/merge events and tracks features
12 |
13 | Parameters
14 | ----------
15 | file_path: str
16 | Path of the csv file
17 |
18 | """
19 | def __init__(self, file_path):
20 | super().__init__(file_path)
21 | self.__track_id_header = 'track_id'
22 |
23 | def is_compatible(self):
24 | if self.file_path.endswith('.csv'):
25 | return True
26 | return False
27 |
28 | def read(self):
29 | df = pd.read_csv(self.file_path)
30 | headers = list(df.columns.values)
31 | in_tracks = df.to_numpy()
32 |
33 | track_id_header = self.__track_id_header
34 | if 'TrackID' in headers:
35 | track_id_header = 'TrackID'
36 |
37 | tracks = np.zeros((in_tracks.shape[0], 5))
38 | if track_id_header in headers:
39 | index = headers.index(track_id_header)
40 | tracks[:, 0] = in_tracks[:, index]
41 | if 't' in headers:
42 | index = headers.index('t')
43 | tracks[:, 1] = in_tracks[:, index]
44 | if 'z' in headers:
45 | index = headers.index('z')
46 | tracks[:, 2] = in_tracks[:, index]
47 | if 'y' in headers:
48 | index = headers.index('y')
49 | tracks[:, 3] = in_tracks[:, index]
50 | if 'x' in headers:
51 | index = headers.index('x')
52 | tracks[:, 4] = in_tracks[:, index]
53 |
54 | scale = [1, 1, 1]
55 | if 'z' in headers:
56 | scale = [1, 1, 1, 1]
57 |
58 | default_headers = [track_id_header, 't', 'z', 'y', 'x']
59 | properties = {}
60 | for head in headers:
61 | if head not in default_headers:
62 | property_ = []
63 | index_head = headers.index(head)
64 | for i in range(in_tracks.shape[0]):
65 | property_.append(in_tracks[i, index_head])
66 | properties[head] = property_
67 | self.stracks = STracks(data=tracks, properties=properties, graph={}, scale=scale)
68 |
69 | def write(self, tracks):
70 | self.stracks = tracks
71 | # write tracks
72 | columns = [self.__track_id_header, 't', 'y', 'x']
73 | if tracks.data.shape[1] == 5:
74 | columns = [self.__track_id_header, 't', 'z', 'y', 'x']
75 | df = pd.DataFrame(data=tracks.data, index=None, columns=columns)
76 | # write properties
77 | for key, value in tracks.properties.items():
78 | df[key] = value
79 | print(df)
80 | df.to_csv(self.file_path, index=False)
81 |
--------------------------------------------------------------------------------
/stracking/io/_icy_io.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import xml.etree.ElementTree as ET
3 |
4 | from ._io import STrackIO
5 | from stracking.containers import STracks
6 |
7 |
8 | class ICYIO(STrackIO):
9 | """Read a ICY model
10 |
11 | Parameters
12 | ----------
13 | file_path: str
14 | Path of the xml ICY file
15 |
16 | """
17 | def __init__(self, file_path):
18 | super().__init__(file_path)
19 | # read xml into tree
20 | if file_path.endswith('.xml'):
21 | self._tree = ET.parse(file_path)
22 | self._root = self._tree.getroot()
23 | else:
24 | self._root = None
25 |
26 | def is_compatible(self):
27 | if self._root and self._root.tag == 'root':
28 | if len(self._root) >= 1:
29 | if self._root[0].tag == 'trackfile':
30 | return True
31 | return False
32 |
33 | def read(self):
34 | root = self._tree.getroot()
35 | tracks = np.empty((0, 5))
36 |
37 | # get the trackgroup element
38 | idx_trackgroup = 0
39 | for i in range(len(root)):
40 | if root[i].tag == 'trackgroup':
41 | idx_trackgroup = i
42 | break
43 |
44 | # parse tracks
45 | ids_map = {}
46 | graph = {}
47 | track_id = -1
48 | for track_element in root[idx_trackgroup]:
49 | track_id += 1
50 | ids_map[track_element.attrib['id']] = track_id
51 | for detection_element in track_element:
52 | row = [float(track_id),
53 | float(detection_element.attrib['t']),
54 | float(detection_element.attrib['z']),
55 | float(detection_element.attrib['y']),
56 | float(detection_element.attrib['x'])
57 | ]
58 | tracks = np.concatenate((tracks, [row]), axis=0)
59 |
60 | # parse linklist
61 | idx_linklist = 0
62 | for i in range(len(root)):
63 | if root[i].tag == 'linklist':
64 | idx_linklist = i
65 | break
66 |
67 | # print("id map=", ids_map)
68 | for link_element in root[idx_linklist]:
69 | from_idx = ids_map[link_element.attrib['from']]
70 | to_idx = ids_map[link_element.attrib['to']]
71 | if to_idx in graph:
72 | graph[float(to_idx)].append(float(from_idx))
73 | else:
74 | graph[float(to_idx)] = [float(from_idx)]
75 |
76 | self.stracks = STracks(data=tracks, properties=None, graph=graph)
77 |
78 | def write(self):
79 | raise Exception('STracking cannot write to ICY XML. Please use st.json')
80 |
--------------------------------------------------------------------------------
/stracking/io/_io.py:
--------------------------------------------------------------------------------
1 | # Reader interface and service
2 |
3 | class STrackIO:
4 | """Interface for a tracks reader/writer
5 |
6 | Parameters
7 | ----------
8 | file_path: str
9 | Path of the file to read
10 |
11 | Attributes
12 | ----------
13 | stracks : STracks
14 | Container of the read tracks
15 |
16 | """
17 | def __init__(self, file_path):
18 | self.file_path = file_path
19 | self.stracks = None
20 |
21 | def is_compatible(self):
22 | """Check if the file format and the reader are compatible
23 |
24 | Returns
25 | -------
26 | compatible: bool
27 | True if the reader and the filter are compatible, False otherwise
28 |
29 | """
30 | return False
31 |
32 | def read(self):
33 | """Read a track file into STracks
34 |
35 | The parsed data are stored in the stracks attribute
36 |
37 | """
38 | raise Exception('STrackIO is abstract')
39 |
40 | def write(self, tracks):
41 | """Write tracks to file
42 |
43 | Parameters
44 | ----------
45 | tracks: STracks
46 | Tracks to write
47 | """
48 | raise Exception('STrackIO is abstract')
49 |
50 |
--------------------------------------------------------------------------------
/stracking/io/_isbi_io.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import xml.etree.ElementTree as ET
3 |
4 | from ._io import STrackIO
5 | from stracking.containers import STracks
6 |
7 |
8 | class ISBIIO(STrackIO):
9 | """Read/Write a ISBI XML tracks format
10 |
11 | Parameters
12 | ----------
13 | file_path: str
14 | Path of the xml ISBI file
15 |
16 | """
17 | def __init__(self, file_path):
18 | super().__init__(file_path)
19 | # read xml into tree
20 | if file_path.endswith('.xml'):
21 | self._tree = ET.parse(file_path)
22 | self._root = self._tree.getroot()
23 | else:
24 | self._root = None
25 |
26 | def is_compatible(self):
27 | if self._root and self._root.tag == 'root':
28 | if len(self._root) >= 1:
29 | if self._root[0].tag == 'TrackContestISBI2012':
30 | return True
31 | return False
32 |
33 | def read(self):
34 | root = self._tree.getroot()
35 | tracks = np.empty((0, 5))
36 |
37 | # get the trackgroup element
38 | idx_trackcontest = 0
39 | for i in range(len(root)):
40 | if root[i].tag == 'TrackContestISBI2012':
41 | idx_trackcontest = i
42 | break
43 |
44 | # parse tracks=particles
45 | track_id = -1
46 | for particle_element in root[idx_trackcontest]:
47 | track_id += 1
48 | for detection_element in particle_element:
49 | row = [float(track_id),
50 | float(detection_element.attrib['t']),
51 | float(detection_element.attrib['z']),
52 | float(detection_element.attrib['y']),
53 | float(detection_element.attrib['x'])
54 | ]
55 | tracks = np.concatenate((tracks, [row]), axis=0)
56 |
57 | self.stracks = STracks(data=tracks, properties=None, graph={})
58 |
59 | def write(self):
60 | raise Exception('STracking cannot write to ISBI CSV. '
61 | 'Please use st.json')
62 |
--------------------------------------------------------------------------------
/stracking/io/_particles_io.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | from stracking.containers import SParticles
4 |
5 |
6 | def read_particles(file):
7 | """Read particles from a file
8 |
9 | The CSV file must contain column with headers T, Y and X for 2D data and T, Z, Y and X for 3D
10 | data. All additional columns will be read as particles properties
11 |
12 | Parameters
13 | ----------
14 | file: str
15 | Path of the input file
16 |
17 | Returns
18 | -------
19 | SParticles container with the read file
20 |
21 | Raises
22 | ------
23 | IOError when the file format is not recognised or is not well formatted
24 |
25 | """
26 | reader = SParticlesIO()
27 | return reader.read(file)
28 |
29 |
30 | def write_particles(file, particles):
31 | """Write particles into a file
32 |
33 | Parameters
34 | ----------
35 | file: str
36 | Path the file to be written
37 | particles: SParticles
38 | Particles container
39 |
40 | """
41 | writer = SParticlesIO()
42 | writer.write(file, particles)
43 |
44 |
45 | class SParticlesIO:
46 | """Read and write particles to file"""
47 |
48 | def __init__(self):
49 | pass
50 |
51 | @staticmethod
52 | def read(file):
53 | if file.endswith('.csv') or file.endswith('.CSV'):
54 | return CSVParticlesIO.read_csv_particles(file)
55 | else:
56 | raise IOError(f'SParticlesIO can read only CSV files')
57 |
58 | @staticmethod
59 | def write(file, particles):
60 | if file.endswith('.csv') or file.endswith('.CSV'):
61 | return CSVParticlesIO.write_csv_particles(file, particles)
62 | else:
63 | raise IOError(f'SParticlesIO can read only CSV files')
64 |
65 |
66 | class CSVParticlesIO:
67 | """Read and Write particles from CSV files
68 |
69 | The CSV file must contain column with headers T, Y and X for 2D data and T, Z, Y and X for 3D
70 | data. All additional columns will be read as particles properties
71 |
72 | """
73 | def __init__(self):
74 | pass
75 |
76 | @staticmethod
77 | def _read_properties(df):
78 | properties = {}
79 | header = df.columns.values.tolist()
80 | for h in header:
81 | if h != 'T' and h != 'Z' and h != 'Y' and h != 'X':
82 | properties[h] = df[h].values
83 | return properties
84 |
85 | @staticmethod
86 | def read_csv_particles(file):
87 | df = pd.read_csv(file)
88 | header = df.columns.values.tolist()
89 | if 'T' in header and 'X' in header and 'Y' in header and 'Z' not in header: # 2D+t
90 | particles = SParticles()
91 | data = np.zeros((df.shape[0], 3))
92 | for index, row in df.iterrows():
93 | data[index, 0] = row['T']
94 | data[index, 1] = row['Y']
95 | data[index, 2] = row['X']
96 | particles.data = data
97 | particles.properties = CSVParticlesIO._read_properties(df)
98 | particles.scale = [1, 1, 1]
99 | return particles
100 | elif 'T' in header and 'X' in header and 'Y' in header and 'Z' in header: # 3D+t
101 | particles = SParticles()
102 | data = np.zeros((df.shape[0], 4))
103 | for index, row in df.iterrows():
104 | data[index, 0] = row['T']
105 | data[index, 1] = row['Z']
106 | data[index, 2] = row['Y']
107 | data[index, 3] = row['X']
108 | particles.data = data
109 | particles.properties = CSVParticlesIO._read_properties(df)
110 | particles.scale = [1, 1, 1, 1]
111 | return particles
112 | else:
113 | raise IOError('A CSV particle file must have T, Y, X columns')
114 |
115 | @staticmethod
116 | def write_csv_particles(file, particles):
117 | if particles.data.shape[1] == 3: # 2D+t
118 | data_mat = particles.data.copy()
119 | columns = ['T', 'Y', 'X']
120 | for prop, values in particles.properties.items():
121 | columns.append(prop)
122 | data_mat = np.column_stack((data_mat, values))
123 | df = pd.DataFrame(data_mat, columns=columns)
124 | df.to_csv(file, index=False)
125 | else:
126 | data_mat = particles.data.copy()
127 | columns = ['T', 'Z', 'Y', 'X']
128 | for prop, values in particles.properties.items():
129 | columns.append(prop)
130 | data_mat = np.column_stack((data_mat, values))
131 | df = pd.DataFrame(data_mat, columns=columns)
132 | df.to_csv(file, index=False)
133 |
--------------------------------------------------------------------------------
/stracking/io/_reader_function.py:
--------------------------------------------------------------------------------
1 | from ._csv_io import CSVIO
2 | from ._trackmate_io import TrackMateIO
3 | from ._icy_io import ICYIO
4 | from ._isbi_io import ISBIIO
5 | from ._st_io import StIO
6 |
7 |
8 | def write_tracks(file_path, tracks, format_='st.json'):
9 | """Write tracks to file
10 |
11 | Parameters
12 | ----------
13 | file_path: str
14 | Path of the destination file
15 | tracks: STracks
16 | Container of tracks to be saved
17 | format_: str
18 | Name of the file format ('st.json', 'CSV', 'ICY', 'Trackmate')
19 |
20 | """
21 | if format_ == 'st.json':
22 | writer = StIO(file_path)
23 | writer.write(tracks)
24 | elif format_ == 'csv':
25 | writer = CSVIO(file_path)
26 | writer.write(tracks)
27 | else:
28 | raise IOError(f'Format {format_} not (yet) supported')
29 |
30 |
31 | def read_tracks(file_path):
32 | """Main track reader
33 |
34 | This method call the first compatible reader is found
35 |
36 | Parameters
37 | ----------
38 | file_path: str
39 | Path of the track file to read
40 |
41 | Returns
42 | -------
43 | tracks: STracks
44 | Container of the trajectories
45 |
46 | """
47 | print("read tracks:", file_path)
48 | # CSV
49 | csv_reader = CSVIO(file_path)
50 | if csv_reader.is_compatible():
51 | csv_reader.read()
52 | return csv_reader.stracks
53 |
54 | # TrackMate
55 | trackmate_reader = TrackMateIO(file_path)
56 | if trackmate_reader.is_compatible():
57 | trackmate_reader.read()
58 | return trackmate_reader.stracks
59 |
60 | # ICY
61 | icy_reader = ICYIO(file_path)
62 | if icy_reader.is_compatible():
63 | print('is compatible ICY :', file_path)
64 | icy_reader.read()
65 | return icy_reader.stracks
66 |
67 | # ICY
68 | isbi_reader = ISBIIO(file_path)
69 | if isbi_reader.is_compatible():
70 | print('is compatible ISBI :', file_path)
71 | isbi_reader.read()
72 | return isbi_reader.stracks
73 |
74 | # JSON
75 | json_reader = StIO(file_path)
76 | if json_reader.is_compatible():
77 | print('is compatible STracking format :', file_path)
78 | json_reader.read()
79 | return json_reader.stracks
80 |
81 | print('is not compatible at all :', file_path)
82 | return None
83 |
--------------------------------------------------------------------------------
/stracking/io/_st_io.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 |
4 | import numpy as np
5 |
6 | from ._io import STrackIO
7 | from stracking.containers import STracks
8 |
9 |
10 | class StIO(STrackIO):
11 | """Read/write tracking with the native stracking format
12 |
13 | This format has been created for this library to easily read and write data
14 | stored in the STracks container
15 |
16 | Parameters
17 | ----------
18 | file_path: str
19 | Path of the .st.json file
20 |
21 | """
22 | def __init__(self, file_path):
23 | super().__init__(file_path)
24 | self.stracks = None
25 | self.indent = None
26 |
27 | def is_compatible(self):
28 | if self.file_path.endswith('.json'):
29 | return True
30 | return False
31 |
32 | def read(self):
33 | if os.path.getsize(self.file_path) > 0:
34 | with open(self.file_path) as json_file:
35 | json_data = json.load(json_file)
36 |
37 | self.stracks = STracks()
38 | if 'tracks' in json_data:
39 | self.stracks.data = np.array(json_data['tracks'])
40 | else:
41 | raise Exception('StIO reader: no tracks found in the input file')
42 |
43 | if 'properties' in json_data:
44 | self.stracks.properties = json_data['properties']
45 |
46 | if 'graph' in json_data:
47 | self.stracks.graph = json_data['graph']
48 |
49 | if 'features' in json_data:
50 | self.stracks.features = json_data['features']
51 |
52 | if 'scale' in json_data:
53 | self.stracks.scale = tuple(json_data['scale'])
54 | if len(self.stracks.scale) < self.stracks.data.shape[0]-1:
55 | self.stracks.scale = list(np.ones(self.stracks.data.shape[1]-1))
56 |
57 | def write(self, tracks):
58 | self.stracks = tracks
59 | json_data = dict()
60 | json_data['tracks'] = self.stracks.data.tolist()
61 |
62 | json_data['properties'] = dict()
63 | if self.stracks.properties is not None:
64 | for key in self.stracks.properties:
65 | if isinstance(self.stracks.properties[key], list):
66 | json_data['properties'][key] = self.stracks.properties[key]
67 | else:
68 | json_data['properties'][key] = self.stracks.properties[key].tolist()
69 |
70 | json_data['graph'] = self.stracks.graph
71 | json_data['features'] = self.stracks.features
72 | if self.stracks.scale is not None:
73 | json_data['scale'] = list(self.stracks.scale)
74 | else:
75 | json_data['scale'] = []
76 |
77 | # write the data to file
78 | with open(self.file_path, 'w') as outfile:
79 | json.dump(json_data, outfile, indent=self.indent)
80 |
--------------------------------------------------------------------------------
/stracking/io/_trackmate_io.py:
--------------------------------------------------------------------------------
1 | import xml.etree.ElementTree as ET
2 | import numpy as np
3 |
4 | from ._io import STrackIO
5 | from stracking.containers import STracks
6 |
7 |
8 | class TrackMateIO(STrackIO):
9 | """Read a TrackMate model
10 |
11 | Parameters
12 | ----------
13 | file_path: str
14 | Path of the xml TrackMate model file
15 |
16 | """
17 |
18 | def __init__(self, file_path):
19 | super().__init__(file_path)
20 | # read xml into tree
21 | if file_path.endswith('.xml'):
22 | self._tree = ET.parse(file_path)
23 | self._root = self._tree.getroot()
24 | else:
25 | self._root = None
26 |
27 | # internal tmp data
28 | self._tracks = None
29 | self._graph = {}
30 | self._properties = dict()
31 | self._features = dict()
32 | self._model_idx = 0
33 | self._track_ids_count = -1
34 | self._starting_sources = []
35 | self._starting_track_idx = []
36 | self._feature_tags = []
37 | self.init_features()
38 | self._props = []
39 | self.init_properties()
40 |
41 | def init_properties(self):
42 | self._props = ['QUALITY',
43 | 'MAX_INTENSITY',
44 | 'MEDIAN_INTENSITY',
45 | 'VISIBILITY',
46 | 'MEAN_INTENSITY',
47 | 'TOTAL_INTENSITY',
48 | 'ESTIMATED_DIAMETER',
49 | 'RADIUS',
50 | 'SNR',
51 | 'STANDARD_DEVIATION',
52 | 'CONTRAST',
53 | 'MANUAL_COLOR',
54 | 'MIN_INTENSITY']
55 |
56 | for prop in self._props:
57 | self._properties[prop] = []
58 |
59 | def init_features(self):
60 | self._feature_tags = ['NUMBER_SPOTS',
61 | 'NUMBER_GAPS',
62 | 'LONGEST_GAP',
63 | 'TRACK_DURATION',
64 | 'TRACK_START',
65 | 'TRACK_STOP',
66 | 'TRACK_DISPLACEMENT',
67 | 'TRACK_X_LOCATION',
68 | 'TRACK_Y_LOCATION',
69 | 'TRACK_Z_LOCATION',
70 | 'TRACK_MEAN_SPEED',
71 | 'TRACK_MAX_SPEED',
72 | 'TRACK_MIN_SPEED',
73 | 'TRACK_MEDIAN_SPEED',
74 | 'TRACK_STD_SPEED',
75 | 'TRACK_MEAN_QUALITY',
76 | 'TRACK_MAX_QUALITY',
77 | 'TRACK_MIN_QUALITY',
78 | 'TRACK_MEDIAN_QUALITY',
79 | 'TRACK_STD_QUALITY']
80 |
81 | for tag in self._feature_tags:
82 | self._features[tag] = dict()
83 |
84 | def _add_property(self, prop):
85 | for key in prop:
86 | if key in self._properties:
87 | self._properties[key].append(prop[key])
88 |
89 | def is_compatible(self):
90 | if self._root and self._root.tag == 'TrackMate':
91 | return True
92 | return False
93 |
94 | def read(self):
95 | self._tracks = np.empty((0, 5))
96 | # find model element
97 | for i in range(len(self._root)):
98 | if self._root[i].tag == 'Model':
99 | self._model_idx = i
100 | break
101 | # parse each track
102 | for filtered_track in self._root[self._model_idx][3]:
103 | # get the edges of each filtered tracks
104 | track_id = int(filtered_track.attrib['TRACK_ID'])
105 | for all_track in self._root[self._model_idx][2]:
106 | if int(all_track.attrib['TRACK_ID']) == track_id:
107 | # if track_id == 0: # remove later
108 | self.get_track_edges(track_id, all_track)
109 | self.stracks = STracks(data=self._tracks, properties=self._properties,
110 | graph=self._graph, features=self._features)
111 |
112 | def get_track_features(self, track_id, xml_element):
113 | for tag in self._feature_tags:
114 | if tag in xml_element.attrib:
115 | self._features[tag][track_id] = float(xml_element.attrib[tag])
116 |
117 | def get_track_edges(self, track_id, xml_element):
118 | sources = []
119 | targets = []
120 | sources_props = []
121 | targets_props = []
122 | for child in xml_element:
123 | source_spot, source_props = \
124 | self.find_spot(child.attrib['SPOT_SOURCE_ID'])
125 | target_spot, target_props = \
126 | self.find_spot(child.attrib['SPOT_TARGET_ID'])
127 | sources.append(source_spot)
128 | targets.append(target_spot)
129 | sources_props.append(source_props)
130 | targets_props.append(target_props)
131 |
132 | sources = np.array(sources)
133 | targets = np.array(targets)
134 |
135 | # sort sources and targets
136 | sort_idxs = sources[:, 1].argsort()
137 | sources = sources[sort_idxs]
138 | targets = targets[sort_idxs]
139 | #sources_props = sources_props[sort_idxs]
140 | #targets_props = targets_props[sort_idxs]
141 |
142 | # search for splits ids
143 | unique, counts = np.unique(sources[:, 0], return_counts=True)
144 | split_idxs = unique[np.where(counts > 1)]
145 |
146 | # search merge ids
147 | uniquet, countst = np.unique(targets[:, 0], return_counts=True)
148 | merge_idxs = uniquet[np.where(countst > 1)]
149 |
150 | self._track_ids_count += 1
151 | self.extract_subtrack(sources, sources_props, targets, targets_props,
152 | sort_idxs, split_idxs, merge_idxs,
153 | sources[0, 0], self._track_ids_count)
154 | self.get_track_features(self._track_ids_count, xml_element)
155 |
156 | def extract_subtrack(self, sources, sources_props, targets, targets_props,
157 | sort_idxs, split_idxs, merge_idxs, source_id,
158 | track_id):
159 |
160 | self._starting_sources.append(source_id)
161 | self._starting_track_idx.append(track_id)
162 | idx = np.where(sources[:, 0] == source_id)[0][0]
163 |
164 | # create new track
165 | # add sources
166 | source = sources[idx, :].copy()
167 |
168 | source[0] = track_id
169 | self._tracks = np.concatenate((self._tracks, [source]), axis=0)
170 | self._add_property(sources_props[sort_idxs[idx]])
171 | # add next targets
172 | while 1:
173 | source = sources[idx, :].copy()
174 | target = targets[idx, :].copy()
175 | if source[0] in split_idxs:
176 | # maybe need to start in the next point
177 | split_sources = np.where(sources[:, 0] == source[0])[0]
178 | for ss_id in split_sources:
179 | self._track_ids_count += 1
180 | next_track_id = self._track_ids_count
181 | self._graph[next_track_id] = track_id
182 | next_idx = targets[ss_id, 0].copy()
183 | self.extract_subtrack(sources, sources_props, targets,
184 | targets_props, sort_idxs, split_idxs,
185 | merge_idxs, next_idx, next_track_id)
186 | break
187 | elif target[0] in merge_idxs:
188 | starting_points = np.where(sources[:, 0] == target[0])
189 | for sp_idx in starting_points[0]:
190 | if sources[sp_idx, 0] not in self._starting_sources:
191 | self._track_ids_count += 1
192 | next_track_id = self._track_ids_count
193 | self._graph[next_track_id] = [track_id]
194 |
195 | self.extract_subtrack(sources, sources_props, targets,
196 | targets_props, sort_idxs,
197 | split_idxs, merge_idxs,
198 | sources[sp_idx, 0], next_track_id)
199 | else:
200 | ind = self._starting_sources.index(sources[sp_idx, 0])
201 | merge_id = self._starting_track_idx[ind]
202 | if merge_id in self._graph:
203 | self._graph[merge_id].append(track_id)
204 | else:
205 | self._graph[merge_id] = [track_id]
206 | break
207 | else:
208 | target[0] = track_id
209 | self._tracks = np.concatenate((self._tracks, [target]), axis=0)
210 | self._add_property(targets_props[sort_idxs[idx]])
211 |
212 | # go to the next
213 | idx = np.where(sources[:, 0] == targets[idx, 0])[0]
214 | if len(idx) > 0:
215 | idx = idx[0]
216 | else:
217 | break
218 |
219 | self.stracks = STracks(data=self._tracks, properties=None,
220 | graph=self._graph)
221 |
222 | def extract_spot_properties(self, spot_element):
223 | spot_properties = dict()
224 | for prop in self._props:
225 | if prop in spot_element.attrib:
226 | spot_properties[prop] = float(spot_element.attrib[prop])
227 | return spot_properties
228 |
229 | def find_spot(self, spot_id):
230 | all_spots = self._root[self._model_idx][1]
231 | for spot_in_frame in all_spots:
232 | for spot in spot_in_frame:
233 | if spot.attrib['ID'] == spot_id:
234 | props = self.extract_spot_properties(spot)
235 | return [int(spot_id),
236 | float(spot.attrib['POSITION_T']),
237 | float(spot.attrib['POSITION_Z']),
238 | float(spot.attrib['POSITION_Y']),
239 | float(spot.attrib['POSITION_X'])], props
240 |
241 | def write(self):
242 | raise Exception('STracking cannot write to TrackMate XML. '
243 | 'Please use st.json')
244 |
--------------------------------------------------------------------------------
/stracking/io/tests/FakeTracks_ISBI.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
--------------------------------------------------------------------------------
/stracking/io/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/io/tests/__init__.py
--------------------------------------------------------------------------------
/stracking/io/tests/test_reader.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | from stracking.io._reader_function import read_tracks
4 | from stracking.containers import STracks
5 |
6 |
7 | # tmp_path is a pytest fixture
8 | def test_reader_trackmate(tmp_path):
9 | """An example of how you might test your plugin."""
10 |
11 | root_dir = os.path.dirname(os.path.abspath(__file__))
12 | my_test_file = os.path.join(root_dir, 'FakeTracks_TrackMate.xml')
13 |
14 | stracks = read_tracks(my_test_file)
15 | assert isinstance(stracks, STracks)
16 |
17 | data = stracks.data
18 | assert isinstance(data, np.ndarray)
19 |
20 | graph = stracks.graph
21 | assert isinstance(graph, dict)
22 |
23 | np.testing.assert_equal((200, 5), data.shape)
24 | np.testing.assert_equal(8, len(graph))
25 |
26 |
27 | def test_reader_icy(tmp_path):
28 | """An example of how you might test your plugin."""
29 |
30 | root_dir = os.path.dirname(os.path.abspath(__file__))
31 | my_test_file = os.path.join(root_dir, 'FakeTracks_Icy.xml')
32 |
33 | stracks = read_tracks(my_test_file)
34 | assert isinstance(stracks, STracks)
35 |
36 | data = stracks.data
37 | assert isinstance(data, np.ndarray)
38 |
39 | graph = stracks.graph
40 | assert isinstance(graph, dict)
41 |
42 | np.testing.assert_equal((237, 5), data.shape)
43 | np.testing.assert_equal(9, len(graph))
44 |
45 |
46 | def test_reader_isbi(tmp_path):
47 | """An example of how you might test your plugin."""
48 |
49 | root_dir = os.path.dirname(os.path.abspath(__file__))
50 | my_test_file = os.path.join(root_dir, 'FakeTracks_ISBI.xml')
51 |
52 | stracks = read_tracks(my_test_file)
53 | assert isinstance(stracks, STracks)
54 |
55 | data = stracks.data
56 | assert isinstance(data, np.ndarray)
57 |
58 | graph = stracks.graph
59 | assert isinstance(graph, dict)
60 |
61 | np.testing.assert_equal((156, 5), data.shape)
62 | np.testing.assert_equal(0, len(graph))
63 |
64 |
65 | def test_reader_csv(tmp_path):
66 | """An example of how you might test your plugin."""
67 |
68 | root_dir = os.path.dirname(os.path.abspath(__file__))
69 | my_test_file = os.path.join(root_dir, 'two_tracks.csv')
70 |
71 | stracks = read_tracks(my_test_file)
72 | assert isinstance(stracks, STracks)
73 |
74 | data = stracks.data
75 | assert isinstance(data, np.ndarray)
76 |
77 | graph = stracks.graph
78 | assert isinstance(graph, dict)
79 |
80 | np.testing.assert_equal((29, 5), data.shape)
81 | np.testing.assert_equal(0, len(graph))
82 |
--------------------------------------------------------------------------------
/stracking/io/tests/two_tracks.csv:
--------------------------------------------------------------------------------
1 | TrackID,t,x,y,z
2 | 0,16, 41.5828343348868, 47.505930020081664, 0
3 | 0,17, 41.48425270538317, 51.6023835597057, 0
4 | 0,18, 41.481034867980455, 54.54519941534417, 0
5 | 0,19, 41.64010631130603, 56.412158936456485, 0
6 | 0,20, 41.45348007949482, 59.493075392567796, 0
7 | 0,21, 41.48427793603068, 63.487171303379796, 0
8 | 0,22, 41.522612554717504, 67.48191857483229, 0
9 | 0,23, 41.49643414461526, 71.48156069500057, 0
10 | 0,24, 41.55137307580421, 75.54046900963168, 0
11 | 0,25, 44.42802758812339, 79.47175329484698, 0
12 | 0,26, 48.48500802831536, 82.31747794676691, 0
13 | 1,9, 61.46371941203304, 25.451857915848844, 0
14 | 1,10, 62.52032573501999, 29.503322789223674, 0
15 | 1,11, 62.59759565265421, 33.51195575773614, 0
16 | 1,12, 62.61499566572898, 36.63152829485196, 0
17 | 1,13, 62.533356685340756, 38.52097717863964, 0
18 | 1,14, 62.546943735014004, 40.62404332909287, 0
19 | 1,15, 64.10591804132584, 46.354399061407456, 0
20 | 1,16, 63.4831365134461, 51.53871467360241, 0
21 | 1,17, 63.69349901967333, 55.38130315664524, 0
22 | 1,18, 61.54717526309182, 57.48457974917827, 0
23 | 1,19, 59.46700644114774, 62.42209302661032, 0
24 | 1,20, 59.330652590225625, 67.49686927278012, 0
25 | 1,21, 59.53822569942737, 72.48454314779649, 0
26 | 1,22, 59.31800920615935, 76.62252875662888, 0
27 | 1,23, 59.522771113989045, 78.43727917122956, 0
28 | 1,24, 59.43247491434688, 79.62167298768841, 0
29 | 1,25, 59.53680156615469, 79.48271733219865, 0
30 | 1,26, 54.76543582275898, 82.56952403357312, 0
--------------------------------------------------------------------------------
/stracking/linkers/__init__.py:
--------------------------------------------------------------------------------
1 | from ._euclidean_cost import EuclideanCost
2 | from ._sp_linker import SPLinker
3 | from ._nn_linker import SNNLinker
4 | from ._linker import SLinker, SLinkerCost
5 |
6 | __all__ = ['SLinker', 'SLinkerCost', 'SPLinker', 'SNNLinker', 'EuclideanCost']
7 |
--------------------------------------------------------------------------------
/stracking/linkers/_euclidean_cost.py:
--------------------------------------------------------------------------------
1 | from ._linker import SLinkerCost
2 |
3 |
4 | class EuclideanCost(SLinkerCost):
5 | """Calculate the squared euclidean distance between two objects center
6 |
7 | It calculated the squared distance and not the distance to save computation
8 |
9 | """
10 |
11 | def __init__(self, max_cost=1000):
12 | super().__init__(max_cost)
13 |
14 | def run(self, obj1, obj2, dt=1):
15 | # print('obj1=', obj1)
16 | # print('obj2=', obj2)
17 | if len(obj1) == 4: # 3D
18 | return pow(obj1[1] - obj2[1], 2) + \
19 | pow(obj1[2] - obj2[2], 2) + \
20 | pow(obj1[3] - obj2[3], 2)
21 | return pow(obj1[1] - obj2[1], 2) + pow(obj1[2] - obj2[2], 2)
22 |
--------------------------------------------------------------------------------
/stracking/linkers/_linker.py:
--------------------------------------------------------------------------------
1 | # interface for detector
2 | import numpy as np
3 | from stracking.observers import SObservable
4 |
5 |
6 | class SLinkerCost:
7 | """Interface for a linker cost
8 |
9 | This calculate the cost between two particles
10 |
11 | """
12 | def __init__(self, max_cost=1000):
13 | self.max_cost = max_cost
14 |
15 | def run(self, particle1, particle2):
16 | """Calculate the cost of linking particle1 and particle2
17 |
18 | Parameters
19 | ----------
20 | particle1 : array
21 | First particle data (t, Y, X) for 2D, (t, Z, Y, X) for 3D
22 | particle2: array
23 | Second particle data (t, Y, X) for 2D, (t, Z, Y, X) for 3D
24 |
25 | Returns
26 | -------
27 | cost: float
28 | Link cost
29 |
30 | """
31 | raise Exception('SLinkerCost: is abstract')
32 |
33 |
34 | class SLinker(SObservable):
35 | """Interface for a particle tracker
36 |
37 | The parameters must be set to the constructor and the image data and
38 | particles to the run method
39 | Example:
40 | ```
41 | euclidean_cost = EuclideanCost(max_move=5.0)
42 | my_tracker = MyParticleTracker(cost=euclidean_cost, gap=1)
43 | tracks = my_detector.run(image, particles)
44 | ```
45 |
46 | Parameters
47 | ----------
48 | cost: SLinkerCost
49 | Object defining the linker cost
50 |
51 | """
52 | def __init__(self, cost=None):
53 | super().__init__()
54 | self.cost = cost
55 |
56 | def run(self, particles, image=None):
57 | """Run the tracker
58 |
59 | Parameters
60 | ----------
61 | image: ndarray
62 | time frames to analyse
63 | particles: SParticles
64 | List of particles for each frames
65 |
66 | Returns
67 | -------
68 | detections: SParticles
69 |
70 | """
71 | raise Exception('STracker is abstract')
72 |
73 |
74 | def calculate_num_obj_per_frame(detections):
75 | """Calculate the number of objects for each frames
76 |
77 | Parameters
78 | ----------
79 | detections : ndarray
80 | 2D array where each line is an object with the following mandatory
81 | features: [t, z, x, y] or [t, x, y].
82 |
83 | Returns
84 | -------
85 | counts : ndarray
86 | Number of objects for each frames
87 | """
88 | first_col = detections[:, 0]
89 | num_index = len(np.unique(first_col))
90 | counts = np.zeros(num_index, dtype=int)
91 | for t in range(num_index):
92 | counts[t] = int(np.count_nonzero(first_col == t))
93 | return counts
94 |
--------------------------------------------------------------------------------
/stracking/linkers/_nn_linker.py:
--------------------------------------------------------------------------------
1 | # shortest path tracker
2 | import numpy as np
3 | from scipy.sparse import lil_matrix
4 | from scipy.sparse.csgraph import bellman_ford
5 |
6 | from ._linker import SLinker, calculate_num_obj_per_frame
7 | from stracking.containers import STracks
8 |
9 |
10 | class SNNLinker(SLinker):
11 | """Linker using nearest neighbor algorithm
12 |
13 | Find the trajectories by linking each detection to it nearest neighbor
14 |
15 | This tracker cannot handle split or merge events
16 |
17 | Example:
18 |
19 | particles = SParticles(...)
20 | euclidean_cost = EuclideanCost(max_move=5.0)
21 | my_tracker = SNNLinker(cost=euclidean_cost, gap=1)
22 | tracks = my_tracker.run(particles)
23 |
24 |
25 | Parameters
26 | ----------
27 | cost: SLinkerCost
28 | Object defining the linker cost
29 | gap: int
30 | Gap (in frame number) of possible missing detections
31 | min_track_length: int
32 | Minimum number of connections in a selected track
33 |
34 | """
35 | def __init__(self, cost=None, gap=1, min_track_length=2):
36 | super().__init__(cost)
37 | self.int_convert_coef = 10000
38 | self._detections = None
39 | self.min_track_length = min_track_length
40 | self.gap_closing = gap
41 | self.tracks_ = None
42 | self.track_count_ = -1
43 | self._dim = 0
44 |
45 | def run(self, particles, image=None):
46 | self._detections = particles.data
47 |
48 | self.notify('processing')
49 | self.progress(0)
50 |
51 | print('detections shape=', self._detections.shape)
52 |
53 | if self._detections.shape[1] == 4:
54 | self._dim = 3
55 | else:
56 | self._dim = 2
57 |
58 | # TODO implement the shortest path algorithm
59 |
60 | self.progress(100)
61 | self.notify('done')
62 | return STracks(data=self.tracks_, properties=None, graph={}, scale=particles.scale)
63 |
--------------------------------------------------------------------------------
/stracking/linkers/_sp_linker.py:
--------------------------------------------------------------------------------
1 | # shortest path tracker
2 | import numpy as np
3 | from scipy.sparse import lil_matrix
4 | from scipy.sparse.csgraph import bellman_ford
5 |
6 | from ._linker import SLinker, calculate_num_obj_per_frame
7 | from .utils import match_properties
8 | from stracking.containers import STracks
9 |
10 |
11 | class SPLinker(SLinker):
12 | """Linker using Shortest Path algorithm
13 |
14 | Find the optimal trajectories by finding iteratively the shortest path in
15 | the graph of all the possible trajectories
16 |
17 | This tracker cannot handle split or merge events
18 |
19 | Example:
20 |
21 | particles = SParticles(...)
22 | euclidean_cost = EuclideanCost(max_move=5.0)
23 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
24 | tracks = my_tracker.run(particles)
25 |
26 |
27 | Parameters
28 | ----------
29 | cost: SLinkerCost
30 | Object defining the linker cost
31 | gap: int
32 | Gap (in frame number) of possible missing detections
33 | min_track_length: int
34 | Minimum number of connections in a selected track
35 |
36 | """
37 | def __init__(self, cost=None, gap=1, min_track_length=2):
38 | super().__init__(cost)
39 | self.int_convert_coef = 10000
40 | self._detections = None
41 | self.min_track_length = min_track_length
42 | self.gap_closing = gap
43 | self._jumpEpsilon = 0.01
44 | self.tracks_ = None
45 | self.track_count_ = -1
46 | self._dim = 0
47 |
48 | def run(self, particles, image=None):
49 | self._detections = particles.data
50 |
51 | self.notify('processing')
52 | self.progress(0)
53 |
54 | # print('detections shape=', self._detections.shape)
55 |
56 | if self._detections.shape[1] == 4:
57 | self._dim = 3
58 | else:
59 | self._dim = 2
60 | # get the number of objects per frames
61 | num_obj_per_frame = calculate_num_obj_per_frame(self._detections)
62 | detections_num = self._detections.shape[0]
63 |
64 | # 1- build the graph
65 | self.notify('processing: build graph')
66 | graph = lil_matrix((detections_num + 2, detections_num + 2))
67 | source_idx = 0
68 | target_idx = detections_num + 1
69 |
70 | # 1.1- connect source to each detection and then to target
71 | for i in range(detections_num):
72 | graph[source_idx, i + 1] = 1
73 | graph[i + 1, target_idx] = 1
74 |
75 | # 1.2- connect detections that are close enough
76 | num_frames = len(num_obj_per_frame)
77 | #print('num frames=', num_frames)
78 | for frame in range(num_frames - 1):
79 | for nframe in range(1, self.gap_closing + 1):
80 | n_frame = frame + nframe
81 | if n_frame >= num_frames:
82 | break
83 | for nt in range(num_obj_per_frame[frame]):
84 | for nnt in range(num_obj_per_frame[n_frame]):
85 |
86 | idx_obj1 = nt + num_obj_per_frame[0:frame].sum() + 1
87 | idx_obj2 = nnt + num_obj_per_frame[0:n_frame].sum() + 1
88 |
89 | cost_value = \
90 | self.cost.run(self._detections[idx_obj1 - 1, :],
91 | self._detections[idx_obj2 - 1, :])
92 |
93 | #print('cost=', cost_value)
94 | #print('self.cost.max_cost=', self.cost.max_cost)
95 | if cost_value < self.cost.max_cost:
96 | if frame - n_frame - 1 > 0:
97 | graph[idx_obj1, idx_obj2] = \
98 | int((cost_value / self.cost.max_cost - 1.0
99 | - (frame - n_frame - 1)
100 | + self._jumpEpsilon)
101 | * self.int_convert_coef)
102 | else:
103 | graph[idx_obj1, idx_obj2] = \
104 | int((cost_value / self.cost.max_cost - 1.0)
105 | * self.int_convert_coef)
106 |
107 | # 2- Optimize
108 | self.progress(50)
109 | self.notify('processing: shortest path')
110 | self.tracks_ = np.empty((0, self._detections.shape[1]+1))
111 | while 1:
112 | #print('extract track...')
113 | # 2.1- Short path algorithm
114 | dist_matrix, predecessors = bellman_ford(csgraph=graph,
115 | directed=True,
116 | indices=0,
117 | return_predecessors=True)
118 |
119 | # 2.2- Make track from predecessors and update graph
120 | track = self._path_to_track(graph, predecessors)
121 |
122 | if track.shape[0] <= self.min_track_length:
123 | break
124 | else:
125 | self.tracks_ = np.concatenate((self.tracks_, track), axis=0)
126 |
127 | self.progress(100)
128 | self.notify('done')
129 | #print('Create tracks with scale= ', particles.scale)
130 | stracks = STracks(data=self.tracks_, properties=None,
131 | graph={}, features={}, scale=particles.scale)
132 | return match_properties(particles, stracks)
133 |
134 | def _path_to_track(self, graph, predecessors):
135 | """Transform a predecessor path to a Track
136 |
137 | Parameters
138 | ----------
139 | graph : array
140 | Sparse matrix containing the graph. The track nodes are removed by
141 | this method
142 | predecessors : array
143 | List of the predecessors index of the objects in the path
144 |
145 | Returns
146 | -------
147 | track : Track
148 | Track object representing the estimated trajectory
149 |
150 | """
151 |
152 | track = np.empty((0, self._detections.shape[1]+1))
153 | current = len(predecessors) - 1
154 | self.track_count_ += 1
155 | #print('dim in track to path=', self._dim)
156 | while 1:
157 | pred = predecessors[current]
158 | if pred > 0:
159 | #print("add predecessor...")
160 | # remove the track nodes in the graph
161 | graph[pred, :] = 0
162 | graph[:, pred] = 0
163 |
164 | # create the track data
165 | object_array = self._detections[pred - 1, :]
166 | if self._dim == 2:
167 | spot = [self.track_count_, object_array[0],
168 | object_array[1], object_array[2]]
169 | track = np.concatenate(([spot], track), axis=0)
170 | elif self._dim == 3:
171 | spot = [self.track_count_, object_array[0],
172 | object_array[1], object_array[2], object_array[3]]
173 | track = np.concatenate(([spot], track), axis=0)
174 | else:
175 | raise Exception('Tracker cannot create track with object'
176 | ' of dimension ' + str(self._dim))
177 | current = pred
178 | else:
179 | break
180 | return track
181 |
--------------------------------------------------------------------------------
/stracking/linkers/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sylvainprigent/stracking/b992eeab4abac0b8d6e83c37d530eb9835d4c837/stracking/linkers/tests/__init__.py
--------------------------------------------------------------------------------
/stracking/linkers/tests/test_sp_linker.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 |
4 | from stracking.containers import SParticles
5 | from stracking.linkers import EuclideanCost, SPLinker
6 |
7 |
8 | def test_sp_linker():
9 | """An example of how you might test your plugin."""
10 |
11 | detections = np.array([[0., 53., 12.],
12 | [0., 93., 11.],
13 | [0., 13., 10.],
14 | [1., 53., 26.],
15 | [1., 93., 26.],
16 | [1., 13., 26.],
17 | [2., 13., 41.],
18 | [2., 93., 41.],
19 | [2., 53., 41.],
20 | [3., 93., 56.],
21 | [3., 13., 55.],
22 | [3., 54., 56.],
23 | [4., 53., 71.],
24 | [4., 94., 71.],
25 | [4., 13., 71.]])
26 | particles = SParticles(data=detections)
27 |
28 | euclidean_cost = EuclideanCost(max_cost=3000)
29 | my_tracker = SPLinker(cost=euclidean_cost, gap=1)
30 | tracks = my_tracker.run(particles)
31 |
32 | # print(tracks.data)
33 |
34 | expected_output = [[0., 0., 53., 12.],
35 | [0., 1., 53., 26.],
36 | [0., 2., 53., 41.],
37 | [0., 3., 54., 56.],
38 | [0., 4., 53., 71.],
39 | [1., 0., 93., 11.],
40 | [1., 1., 93., 26.],
41 | [1., 2., 93., 41.],
42 | [1., 3., 93., 56.],
43 | [1., 4., 94., 71.],
44 | [2., 0., 13., 10.],
45 | [2., 1., 13., 26.],
46 | [2., 2., 13., 41.],
47 | [2., 3., 13., 55.],
48 | [2., 4., 13., 71.]]
49 |
50 | np.testing.assert_almost_equal(expected_output, tracks.data, decimal=1)
51 |
52 |
53 | def test_sp_linker_gap():
54 | """An example of how you might test your plugin."""
55 |
56 | detections = np.array([[0, 20, 20],
57 | [0, 60, 20],
58 | [0, 100, 20],
59 | [1, 20, 35],
60 | [1, 100, 35],
61 | [2, 20, 50],
62 | [2, 60, 50],
63 | [2, 100, 50],
64 | [3, 20, 65],
65 | [3, 60, 65],
66 | [3, 100, 65],
67 | [4, 20, 80],
68 | [4, 60, 80],
69 | [4, 100, 80]])
70 | particles = SParticles(data=detections)
71 |
72 | euclidean_cost = EuclideanCost(max_cost=3000)
73 | my_tracker = SPLinker(cost=euclidean_cost, gap=2)
74 | tracks = my_tracker.run(particles)
75 |
76 | # print(tracks.data)
77 |
78 | expected_output = np.array([[0, 0, 20, 20],
79 | [0, 1, 20, 35],
80 | [0, 2, 20, 50],
81 | [0, 3, 20, 65],
82 | [0, 4, 20, 80],
83 | [1, 0, 100, 20],
84 | [1, 1, 100, 35],
85 | [1, 2, 100, 50],
86 | [1, 3, 100, 65],
87 | [1, 4, 100, 80],
88 | [2, 0, 60, 20],
89 | [2, 2, 60, 50],
90 | [2, 3, 60, 65],
91 | [2, 4, 60, 80]]
92 | )
93 |
94 | np.testing.assert_almost_equal(expected_output, tracks.data, decimal=1)
95 |
--------------------------------------------------------------------------------
/stracking/linkers/utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def match_properties(particles, tracks):
5 | """Copy properties calculated from particles to tracks
6 |
7 | Parameters
8 | ----------
9 | particles: SParticles
10 | Set of particles with properties
11 | tracks: STRack
12 | Set of track without properties
13 |
14 | Returns
15 | -------
16 | the set of tracks with properties
17 |
18 | """
19 | # add all the properties
20 | properties = {}
21 | for property_ in particles.properties:
22 | properties[property_] = []
23 | # fill properties
24 | for i in range(tracks.data.shape[0]):
25 | x = np.where((particles.data == tracks.data[i, 1:]).all(axis=1))
26 | if len(x) > 0:
27 | for property_ in particles.properties:
28 | properties[property_].append(float(particles.properties[property_][x[0]]))
29 | for property_ in particles.properties:
30 | properties[property_] = np.array(properties[property_])
31 | tracks.properties = properties
32 | return tracks
33 |
--------------------------------------------------------------------------------
/stracking/observers/__init__.py:
--------------------------------------------------------------------------------
1 | from ._observers import SObserver, SObservable, SObserverConsole
2 |
3 | __all__ = ['SObserver',
4 | 'SObservable',
5 | 'SObserverConsole']
6 |
--------------------------------------------------------------------------------
/stracking/observers/_observers.py:
--------------------------------------------------------------------------------
1 |
2 | class SObservable:
3 | """Interface for data processing class
4 |
5 | The observable class can notify the observers for progress
6 |
7 | """
8 | def __init__(self):
9 | self._observers = list()
10 |
11 | def add_observer(self, observer):
12 | """Add an observer
13 |
14 | Parameters
15 | ----------
16 | observer: SObserver
17 | Observer class
18 |
19 | """
20 | self._observers.append(observer)
21 |
22 | def notify(self, message):
23 | """Notify progress to observers
24 |
25 | Parameters
26 | ----------
27 | message: str
28 | Progress message
29 |
30 | """
31 | for obs in self._observers:
32 | obs.notify(message)
33 |
34 | def progress(self, value):
35 | """Notify progress to observers
36 |
37 | Parameters
38 | ----------
39 | value: int
40 | Progress value in [0, 100]
41 |
42 | """
43 | for obs in self._observers:
44 | obs.progress(value)
45 |
46 |
47 | class SObserver:
48 | """Interface of observer to notify progress
49 |
50 | An observer must implement the progress and message
51 |
52 | """
53 | def __init__(self):
54 | pass
55 |
56 | def notify(self, message):
57 | """Notify a progress message
58 |
59 | Parameters
60 | ----------
61 | message: str
62 | Progress message
63 |
64 | """
65 | raise Exception('SObserver is abstract')
66 |
67 | def progress(self, value):
68 | """Notify progress value
69 |
70 | Parameters
71 | ----------
72 | value: int
73 | Progress value in [0, 100]
74 |
75 | """
76 | raise Exception('SObserver is abstract')
77 |
78 |
79 | class SObserverConsole(SObserver):
80 | """print message and progress to console"""
81 | def __init__(self):
82 | super().__init__()
83 | pass
84 |
85 | def notify(self, message):
86 | print(message)
87 |
88 | def progress(self, value):
89 | print('progress:', value, '%')
90 |
--------------------------------------------------------------------------------
/stracking/pipelines/__init__.py:
--------------------------------------------------------------------------------
1 | from ._pipeline import STrackingPipeline
2 |
3 | __all__ = ["STrackingPipeline"]
4 |
--------------------------------------------------------------------------------
/stracking/pipelines/_pipeline.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 |
4 | from stracking import detectors
5 | from stracking import linkers
6 | from stracking import properties
7 | from stracking import features
8 | from stracking import filters
9 | from stracking.observers import SObservable
10 |
11 |
12 | class STrackingPipeline(SObservable):
13 | def __init__(self):
14 | super().__init__()
15 | self.name = ''
16 | self.date = ''
17 | self.author = ''
18 | self.stracking_version = ''
19 | self._detector = None
20 | self._linker = None
21 | self._properties = []
22 | self._features = []
23 | self._filters = []
24 |
25 | @staticmethod
26 | def _read_json(file_path: str):
27 | """Read the metadata from the a json file"""
28 | if os.path.getsize(file_path) > 0:
29 | with open(file_path) as json_file:
30 | return json.load(json_file)
31 |
32 | @staticmethod
33 | def _write_json(metadata: dict, file_path: str):
34 | """Write the metadata to the a json file"""
35 | with open(file_path, 'w') as outfile:
36 | json.dump(metadata, outfile, indent=2)
37 |
38 | def load(self, file):
39 | """Load the pipeline from a json file
40 |
41 | Parameters
42 | ----------
43 | file: str
44 | Path of the pipeline json file
45 |
46 | """
47 | json_data = self._read_json(file)
48 | if 'name' in json_data:
49 | self.name = json_data['name']
50 | if 'date' in json_data:
51 | self.date = json_data['date']
52 | if 'author' in json_data:
53 | self.author = json_data['author']
54 | if 'stracking_version' in json_data:
55 | self.stracking_version = json_data['stracking_version']
56 | if 'detector' in json_data['steps']:
57 | if 'name' in json_data['steps']['detector']:
58 | parameters = {}
59 | if 'parameters' in json_data['steps']['detector']:
60 | parameters = json_data['steps']['detector']['parameters']
61 | self._detector = getattr(detectors, json_data['steps']['detector']['name'])(**parameters)
62 | if 'linker' in json_data['steps']:
63 | if 'name' in json_data['steps']['linker']:
64 | cost_fn = None
65 | if 'cost' in json_data['steps']['linker']:
66 | cost_params = json_data['steps']['linker']['cost']['parameters']
67 | cost_fn = getattr(linkers, json_data['steps']['linker']['cost']['name'])(**cost_params)
68 | parameters = {}
69 | if 'parameters' in json_data['steps']['linker']:
70 | parameters = json_data['steps']['linker']['parameters']
71 | self._linker = getattr(linkers, json_data['steps']['linker']['name'])(cost_fn, **parameters)
72 | if 'properties' in json_data['steps']:
73 | for prop in json_data['steps']['properties']:
74 | params = {}
75 | if "parameters" in prop:
76 | params = prop["parameters"]
77 | self._properties.append(getattr(properties, prop['name'])(**params))
78 | if 'features' in json_data['steps']:
79 | for feat in json_data['steps']["features"]:
80 | params = {}
81 | if "parameters" in feat:
82 | params = feat["parameters"]
83 | self._features.append(getattr(features, feat['name'])(**params))
84 | if 'filters' in json_data['steps']:
85 | for filter_ in json_data['steps']['filters']:
86 | params = {}
87 | if "parameters" in filter_:
88 | params = filter_["parameters"]
89 | self._filters.append(getattr(filters, filter_['name'])(**params))
90 |
91 | def run(self, image):
92 | """Run the pipeline on an image
93 |
94 | Parameters
95 | ----------
96 | image: ndarray
97 |
98 | Returns
99 | -------
100 | A STracks container of the extracted tracks
101 |
102 | """
103 | self.notify('Pipeline starts')
104 | self.notify('Pipeline detection...')
105 | self.progress(0)
106 | particles = self._detector.run(image)
107 |
108 | self.notify('Pipeline properties...')
109 | self.progress(20)
110 | for prop in self._properties:
111 | particles = prop.run(particles, image)
112 |
113 | self.notify('Pipeline linking...')
114 | self.progress(40)
115 | tracks = self._linker.run(particles, image)
116 |
117 | self.notify('Pipeline features...')
118 | self.progress(60)
119 | for feat in self._features:
120 | tracks = feat.run(tracks)
121 |
122 | self.notify('Pipeline filters...')
123 | self.progress(80)
124 | for filter_ in self._filters:
125 | tracks = filter_.run(tracks)
126 |
127 | self.notify('Pipeline done')
128 | self.progress(100)
129 | return tracks
130 |
--------------------------------------------------------------------------------
/stracking/pipelines/tests/pipeline1.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "pipeline1",
3 | "author": "Sylvain Prigent",
4 | "date": "2022-04-13",
5 | "stracking_version": "0.1.8",
6 | "steps": {
7 | "detector": {
8 | "name": "DoGDetector",
9 | "parameters": {
10 | "min_sigma": 4,
11 | "max_sigma": 5,
12 | "sigma_ratio": 1.1,
13 | "threshold": 0.15,
14 | "overlap": 0
15 | }
16 | },
17 | "linker": {
18 | "name": "SPLinker",
19 | "cost": {
20 | "name": "EuclideanCost",
21 | "parameters": {}
22 | },
23 | "parameters": {
24 | "gap": 1,
25 | "min_track_length": 2
26 | }
27 | },
28 | "properties": [
29 | {
30 | "name": "IntensityProperty",
31 | "parameters": {
32 | "radius": 2.5
33 | }
34 | }
35 | ],
36 | "features": [
37 | {
38 | "name": "LengthFeature"
39 | },
40 | {
41 | "name": "DistanceFeature"
42 | },
43 | {
44 | "name": "DisplacementFeature"
45 | }
46 | ],
47 | "filters": [
48 | {
49 | "name": "FeatureFilter",
50 | "parameters": {
51 | "feature_name": "distance",
52 | "min_val": 20,
53 | "max_val": 60
54 | }
55 | }
56 | ]
57 | }
58 | }
--------------------------------------------------------------------------------
/stracking/properties/__init__.py:
--------------------------------------------------------------------------------
1 | from ._properties import SProperty
2 | from ._intensity import IntensityProperty
3 |
4 | __all__ = ['SProperty', 'IntensityProperty']
5 |
--------------------------------------------------------------------------------
/stracking/properties/_intensity.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from skimage.draw import disk
3 | from ._properties import SProperty
4 |
5 |
6 | def ball(z, x, y, radius):
7 | """Calculate coordinates of points inside a ball
8 |
9 | Parameters
10 | ----------
11 | z, x, y : double
12 | Center coordinate of disk.
13 | radius : double
14 | Radius of disk.
15 |
16 | Returns
17 | -------
18 | coords : (zz, xx, yy) tuple
19 | List of the coordinates (z, y, x) of the points inside the ball.
20 | """
21 |
22 | xx = []
23 | yy = []
24 | zz = []
25 | r1 = radius*radius
26 | rr = int(round(radius))
27 | for xo in range(int(x-rr), int(x+rr+1)):
28 | for yo in range(int(y - rr), int(y+rr+1)):
29 | for zo in range(int(z-rr), int(z+rr+1)):
30 | euclid = pow(x-xo, 2) + pow(y-yo, 2) + pow(z-zo, 2)
31 | if euclid <= r1:
32 | xx.append(xo)
33 | yy.append(yo)
34 | zz.append(zo)
35 |
36 | return zz, xx, yy
37 |
38 |
39 | class IntensityProperty(SProperty):
40 | """Calculate the intensity properties of the partices
41 |
42 | This measure adds 5 properties: mean_intensity, min_intensity, max_intensity
43 | std_intensity and the radius parameter
44 |
45 | """
46 | def __init__(self, radius):
47 | super().__init__()
48 | if radius <= 0:
49 | raise Exception('IntensityProperty: radius must be positive')
50 | self.radius = radius
51 |
52 | def run(self, sparticles, image):
53 | self.notify('processing')
54 | self.progress(0)
55 | if image.ndim != sparticles.data.shape[1]:
56 | raise Exception('IntensityProperty: image and particles dimensions'
57 | 'do not match')
58 |
59 | if image.ndim == 4:
60 | self.notify('done')
61 | self.progress(100)
62 | return self._measure3d(sparticles, image)
63 | elif image.ndim == 3:
64 | self.notify('done')
65 | self.progress(100)
66 | return self._measure2d(sparticles, image)
67 | else:
68 | raise Exception('IntensityProperty: can process only (3D:2D+t) or '
69 | '(4D:3D+t) arrays')
70 |
71 | def _measure2d(self, sparticles, image):
72 |
73 | particles = sparticles.data
74 | rr, cc = disk((0, 0), self.radius)
75 | mean_ = np.zeros((particles.shape[0]))
76 | std_ = np.zeros((particles.shape[0]))
77 | min_ = np.zeros((particles.shape[0]))
78 | max_ = np.zeros((particles.shape[0]))
79 |
80 | for i in range(particles.shape[0]):
81 | self.progress(int(100*i/particles.shape[0]))
82 | x = int(particles[i, 2])
83 | y = int(particles[i, 1])
84 | t = int(particles[i, 0])
85 | # get the disk coordinates
86 | val = image[t, cc+y, rr+x]
87 | mean_[i] = np.mean(val)
88 | std_[i] = np.std(val)
89 | min_[i] = np.min(val)
90 | max_[i] = np.max(val)
91 |
92 | sparticles.properties['mean_intensity'] = mean_
93 | sparticles.properties['std_intensity'] = std_
94 | sparticles.properties['min_intensity'] = min_
95 | sparticles.properties['max_intensity'] = max_
96 | sparticles.properties['radius'] = \
97 | self.radius*np.ones((particles.shape[0]))
98 | return sparticles
99 |
100 | def _measure3d(self, sparticles, image):
101 |
102 | zz, xx, yy = ball(0, 0, 0, self.radius)
103 | particles = sparticles.data
104 | mean_ = np.zeros((particles.shape[0]))
105 | std_ = np.zeros((particles.shape[0]))
106 | min_ = np.zeros((particles.shape[0]))
107 | max_ = np.zeros((particles.shape[0]))
108 |
109 | for i in range(particles.shape[0]):
110 | self.progress(int(100 * i / particles.shape[0]))
111 | x = int(particles[i, 3])
112 | y = int(particles[i, 2])
113 | z = int(particles[i, 1])
114 | t = int(particles[i, 0])
115 |
116 | zzz = [w+z for w in zz]
117 | yyy = [w+y for w in yy]
118 | xxx = [w+x for w in xx]
119 |
120 | xxxx=[]
121 | yyyy=[]
122 | zzzz=[]
123 |
124 | for ii in range(0,len(zz)):
125 | if zzz[ii]