├── docs ├── history.rst ├── readme.rst ├── contributing.rst ├── contributors.rst ├── index.rst ├── Makefile ├── make.bat ├── installation.rst ├── reference.rst ├── background.rst ├── conf.py └── usage.rst ├── test-data ├── test-data.parquet ├── faros-internal.parquet └── faros-plus-physilog │ ├── faros.csv.gz │ └── physilog.csv.gz ├── .pyup.yml ├── jointly ├── __init__.py ├── log.py ├── synchronization_errors.py ├── abstract_extractor.py ├── types.py ├── helpers_plotting.py ├── helpers.py ├── shake_extractor.py └── synchronizer.py ├── .readthedocs.yaml ├── .pre-commit-config.yaml ├── .github ├── ISSUE_TEMPLATE.md └── workflows │ ├── deploy.yml │ ├── deploy-test.yml │ └── all.yml ├── CONTRIBUTORS.rst ├── CITATION.cff ├── LICENSE ├── pyproject.toml ├── tests ├── test_shake_extractor.py ├── test_helpers_plotting.py ├── parquet_reader.py ├── test_examples.py ├── test_helpers.py └── test_synchronizer.py ├── HISTORY.rst ├── CONTRIBUTING.rst ├── .gitignore ├── CODE_OF_CONDUCT.md ├── README.rst └── poetry.lock /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | 2 | .. include:: ../README.rst -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/contributors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTORS.rst 2 | -------------------------------------------------------------------------------- /test-data/test-data.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hpi-dhc/jointly/HEAD/test-data/test-data.parquet -------------------------------------------------------------------------------- /test-data/faros-internal.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hpi-dhc/jointly/HEAD/test-data/faros-internal.parquet -------------------------------------------------------------------------------- /test-data/faros-plus-physilog/faros.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hpi-dhc/jointly/HEAD/test-data/faros-plus-physilog/faros.csv.gz -------------------------------------------------------------------------------- /test-data/faros-plus-physilog/physilog.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hpi-dhc/jointly/HEAD/test-data/faros-plus-physilog/physilog.csv.gz -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | # autogenerated pyup.io config file 2 | # see https://pyup.io/docs/configuration/ for all available options 3 | 4 | schedule: every week 5 | update: false 6 | -------------------------------------------------------------------------------- /jointly/__init__.py: -------------------------------------------------------------------------------- 1 | from .abstract_extractor import * 2 | from .shake_extractor import * 3 | from .synchronizer import * 4 | from .helpers import * 5 | from .helpers_plotting import * 6 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: "3.9" 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . 12 | -------------------------------------------------------------------------------- /jointly/log.py: -------------------------------------------------------------------------------- 1 | """Sets up the logging format""" 2 | import logging 3 | 4 | FORMAT = "[%(lineno)3s - %(funcName)20s() ] %(message)s" 5 | logging.basicConfig(format=FORMAT) 6 | logger = logging.getLogger("jointly") 7 | logger.setLevel(logging.CRITICAL) 8 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. toctree:: 2 | :hidden: 3 | :maxdepth: 2 4 | 5 | readme 6 | installation 7 | usage 8 | background 9 | reference 10 | contributing 11 | contributors 12 | history 13 | 14 | .. include:: ../README.rst 15 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | fail_fast: true 2 | 3 | repos: 4 | - repo: https://github.com/psf/black 5 | rev: 21.7b0 6 | hooks: 7 | - id: black 8 | language_version: python3 9 | - repo: https://github.com/pycqa/flake8 10 | rev: 3.9.2 11 | hooks: 12 | - id: flake8 13 | entry: pflake8 14 | additional_dependencies: [ pyproject-flake8 ] -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * jointly version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /CONTRIBUTORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Contributors 6 | ------------ 7 | 8 | * Felix Musmann: 9 | * Arne Herdick 10 | * Ariane Morassi Sasso 11 | 12 | 13 | Bug Fixes and Error Spotting 14 | ---------------------------- 15 | 16 | * Justin Albert 17 | * Martin Schlegel 18 | 19 | 20 | Mentor 21 | ---------------------------- 22 | 23 | * Prof. Dr. Bert Arnrich 24 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to PyPi 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*.*.*" 7 | 8 | jobs: 9 | build-n-publish: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | - name: Build and publish to pypi 14 | uses: JRubics/poetry-publish@v1.6 15 | with: 16 | python_version: '3.7.1' 17 | poetry_version: '==1.1.7' # (PIP version specifier syntax) 18 | pypi_token: ${{ secrets.PYPI_TOKEN }} 19 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = jointly 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /.github/workflows/deploy-test.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to TestPyPi 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - 'master' 7 | 8 | jobs: 9 | build-n-publish-testing: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | - name: Build and publish to pypi 14 | continue-on-error: true 15 | uses: JRubics/poetry-publish@v1.6 16 | with: 17 | python_version: '3.7.1' 18 | poetry_version: '==1.1.7' # (PIP version specifier syntax) 19 | pypi_token: ${{ secrets.TEST_PYPI_TOKEN }} 20 | repository_name: 'testpypi' 21 | repository_url: 'https://test.pypi.org/legacy/' 22 | 23 | -------------------------------------------------------------------------------- /jointly/synchronization_errors.py: -------------------------------------------------------------------------------- 1 | class ShakeMissingException(Exception): 2 | """Thrown when a synchronization point is missing, e.g., a second shake could not be found in the signal.""" 3 | 4 | pass 5 | 6 | 7 | class BadThresholdException(Exception): 8 | """Thrown if the shake threshold is below 0 or above 1.""" 9 | 10 | pass 11 | 12 | 13 | class StartEqualsEndError(Exception): 14 | """ 15 | Thrown when the detected start synchronization point equals the end synchronization point. 16 | Maybe change the detection window lengths? 17 | """ 18 | 19 | pass 20 | 21 | 22 | class BadWindowException(Exception): 23 | """Thrown when the sync point detection window length is longer than the data""" 24 | 25 | pass 26 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: "Herdick" 5 | given-names: "Arne" 6 | orcid: "https://orcid.org/0000-0002-1288-3571" 7 | - family-names: "Musmann" 8 | given-names: "Felix" 9 | orcid: "https://orcid.org/0000-0001-5365-0785" 10 | - family-names: "Sasso" 11 | given-names: "Ariane" 12 | orcid: "https://orcid.org/0000-0002-3669-4599" 13 | - family-names: "Albert" 14 | given-names: "Justin" 15 | orcid: "https://orcid.org/0000-0002-6121-792X" 16 | - family-names: "Arnrich" 17 | given-names: "Bert" 18 | orcid: "https://orcid.org/0000-0001-8380-7667" 19 | url: "https://github.com/hpi-dhc/jointly" 20 | doi: "https://doi.org/10.5281/zenodo.5833858" 21 | title: "Jointly: A Python package for synchronizing multiple sensors with accelerometer data" 22 | version: 1.0.4 23 | date-released: 2021-01-10 24 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=jointly 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Digital Health Center (Hasso Plattner Institute) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install jointly, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install jointly 16 | 17 | This is the preferred method to install jointly, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for jointly can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/hpi-dhc/jointly 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OL https://github.com/hpi-dhc/jointly/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with `poetry`_: 44 | 45 | .. code-block:: console 46 | 47 | $ poetry install 48 | 49 | 50 | .. _Github repo: https://github.com/hpi-dhc/jointly 51 | .. _tarball: https://github.com/hpi-dhc/jointly/tarball/master 52 | .. _poetry: https://python-poetry.org/docs/#installation -------------------------------------------------------------------------------- /docs/reference.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Reference 3 | ========= 4 | 5 | 6 | jointly.synchronizer 7 | -------------------- 8 | 9 | .. automodule:: jointly.synchronizer 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | 14 | jointly.shake\_extractor 15 | ------------------------ 16 | 17 | .. automodule:: jointly.shake_extractor 18 | :members: 19 | :show-inheritance: 20 | 21 | jointly.types 22 | ------------- 23 | 24 | .. automodule:: jointly.types 25 | :members: 26 | :undoc-members: 27 | :show-inheritance: 28 | 29 | jointly.synchronization\_errors 30 | ------------------------------- 31 | 32 | .. automodule:: jointly.synchronization_errors 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | 37 | 38 | jointly.helpers 39 | --------------- 40 | 41 | .. automodule:: jointly.helpers 42 | :members: 43 | :undoc-members: 44 | :show-inheritance: 45 | 46 | jointly.helpers_plotting 47 | --------------- 48 | 49 | .. automodule:: jointly.helpers_plotting 50 | :members: 51 | :undoc-members: 52 | :show-inheritance: 53 | 54 | jointly.abstract\_extractor 55 | --------------------------- 56 | 57 | .. automodule:: jointly.abstract_extractor 58 | :members: 59 | :undoc-members: 60 | :show-inheritance: 61 | 62 | 63 | jointly.log 64 | ----------- 65 | 66 | .. automodule:: jointly.log 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | -------------------------------------------------------------------------------- /jointly/abstract_extractor.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import List 3 | 4 | import pandas as pd 5 | 6 | from jointly.types import SyncPairs 7 | 8 | 9 | class AbstractExtractor(metaclass=abc.ABCMeta): 10 | """ 11 | Super class for extractor methods. 12 | First subclass is the shake extractor, which finds the location of shakes in the data. 13 | """ 14 | 15 | def __init__(self): 16 | self.segments = {} 17 | 18 | @abc.abstractmethod 19 | def get_segments(self, signals: pd.DataFrame) -> SyncPairs: 20 | """ 21 | Detect first and second segments to use for synchronization and 22 | return dictionary with start and end timestamps for each signal. 23 | """ 24 | 25 | def _init_segments(self, columns: List[str]): 26 | """Create a SynchronizationPair for each column""" 27 | self.segments = {} 28 | for column_name in columns: 29 | self.segments[column_name] = { 30 | "first": {}, 31 | "second": {}, 32 | } 33 | 34 | def _set_first_segment( 35 | self, column_name: str, start: pd.Timestamp, end: pd.Timestamp 36 | ): 37 | self.segments[column_name]["first"]["start"] = start 38 | self.segments[column_name]["first"]["end"] = end 39 | 40 | def _set_second_segment( 41 | self, column_name: str, start: pd.Timestamp, end: pd.Timestamp 42 | ): 43 | self.segments[column_name]["second"]["start"] = start 44 | self.segments[column_name]["second"]["end"] = end 45 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "jointly" 3 | version = "1.0.4" 4 | description = "Synchronize sensor data from accelerometer shakes" 5 | authors = [ 6 | "Ariane Morassi Sasso ", 7 | "Arne Herdick ", 8 | "Felix Musmann " 9 | ] 10 | license = "MIT" 11 | repository = "https://github.com/hpi-dhc/jointly" 12 | documentation = "https://hpi-dhc.github.io/jointly" 13 | classifiers = [ 14 | 'License :: OSI Approved :: MIT License', 15 | 'Programming Language :: Python', 16 | 'Programming Language :: Python :: 3.7', 17 | 'Programming Language :: Python :: 3.8', 18 | 'Programming Language :: Python :: 3.9' 19 | ] 20 | include = ['*.rst', 'LICENSE', 'tests/*', 'docs/*', '*.jpg', '*.png', '*.gif'] 21 | exclude = ['test-data'] 22 | 23 | [tool.black] 24 | extend-exclude = "^/docs" 25 | 26 | [tool.flake8] 27 | max-line-length = 88 28 | extend-ignore = "E203, E266, E501, W503, F403, F401" 29 | max-complexity = 18 30 | select = "B,C,E,F,W,T4,B9" 31 | 32 | [tool.poetry.dependencies] 33 | python = ">=3.7.1,<3.10" 34 | pandas = "^1.3.1" 35 | scipy = "^1.7.1" 36 | numpy = "^1.21.1" 37 | matplotlib = "^3.4.2" 38 | 39 | [tool.poetry.dev-dependencies] 40 | pytest = "^6.2.4" 41 | coverage = "^5.5" 42 | sphinx-rtd-theme = "^0.5.2" 43 | black = "^21.7b0" 44 | pre-commit = "^2.13.0" 45 | flake8 = "^3.9.2" 46 | pyproject-flake8 = "^0.0.1-alpha.2" 47 | pyarrow = "^5.0.0" 48 | 49 | [build-system] 50 | requires = ["poetry-core"] 51 | build-backend = "poetry.core.masonry.api" 52 | -------------------------------------------------------------------------------- /tests/test_shake_extractor.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | from jointly import ShakeExtractor, Synchronizer, BadWindowException 5 | from tests.parquet_reader import get_parquet_test_data 6 | 7 | 8 | def test_window_params(): 9 | e = ShakeExtractor() 10 | with pytest.raises(ValueError): 11 | e.start_window_length = 3 12 | with pytest.raises(ValueError): 13 | e.end_window_length = 3 14 | 15 | e.start_window_length = pd.Timedelta(seconds=3) 16 | e.end_window_length = pd.Timedelta(seconds=3) 17 | 18 | 19 | def test_threshold_param(): 20 | e = ShakeExtractor() 21 | with pytest.raises(ValueError): 22 | e.threshold = 3 23 | with pytest.raises(ValueError): 24 | e.threshold = -1 25 | with pytest.raises(ValueError): 26 | e.threshold = 0 27 | with pytest.raises(ValueError): 28 | e.threshold = 1 29 | e.threshold = 0.5 30 | 31 | 32 | def test_window_length_error(): 33 | base_data = get_parquet_test_data("test-data.parquet") 34 | reference_signal, target_signal = "A", "B" 35 | sources = { 36 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 37 | target_signal: {"data": base_data, "ref_column": "ACCELERATION_Z"}, 38 | } 39 | extractor = ShakeExtractor() 40 | extractor.start_window_length = pd.Timedelta(seconds=50) 41 | extractor.end_window_length = pd.Timedelta(seconds=3) 42 | extractor.min_length = 3 43 | extractor.threshold = 0.5 44 | 45 | with pytest.raises(BadWindowException): 46 | Synchronizer(sources, reference_signal, extractor).get_sync_params() 47 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 1.0.4 (2021-12-27) 6 | ------------------ 7 | 8 | * fix contributing.rst guide to testing 9 | * improve documentation problems 10 | * c.f. https://github.com/hpi-dhc/jointly/pull/12 11 | 12 | 1.0.3 (2021-12-09) 13 | ------------------ 14 | 15 | * remove poetry-core version dependency 16 | * add information on running all tests to CONTRIBUTING.rst 17 | * add a statement of need 18 | * add documentation deep links to README.rst 19 | * hopefully fix RTD build 20 | 21 | 1.0.2 (2021-08-04) 22 | ------------------ 23 | 24 | * Further improve CI/CD 25 | * add documentation 26 | * add badges 27 | * add tests 28 | * add more input validation 29 | * changes some API parameter names and removes some functions 30 | 31 | 0.2.0 (2021-05-06) 32 | ------------------ 33 | 34 | * Improve CI/CD 35 | * Various fixes 36 | 37 | * introduce ``black`` linter in repo 38 | * drastically reduce memory allocation by sequentially resampling 39 | * avoid OOM crash if debug/info plots are too large 40 | * seperately configurable start- and end-windoww lengths 41 | * handle error cases: 42 | 43 | * same start and end window 44 | * first or second shake missing 45 | 46 | * fix off-by-two error in shift index 47 | * add feature: per-sensor data exports in ``pickle`` format 48 | 49 | 50 | 0.1.3 - 0.1.5 (2019-09-10 to 2019-09-24) 51 | ---------------------------------------- 52 | 53 | * Minor Bugfixes 54 | * Only look for peaks in window 55 | 56 | 0.1.2 (2019-08-26) 57 | ------------------ 58 | 59 | * Added parameter to interpolate only between valid values 60 | 61 | 0.1.1 (2019-05-16) 62 | ------------------ 63 | 64 | * Fixed get_synced_data() 65 | 66 | 0.1 (2019-05-09) 67 | ------------------ 68 | 69 | * Renamed methods to indicate internal only usage 70 | * Fix truncation of data and remove auto truncation 71 | * Improve readme 72 | * Initial commit 73 | * First release on PyPI. 74 | -------------------------------------------------------------------------------- /tests/test_helpers_plotting.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import matplotlib 4 | import pandas as pd 5 | import pytest 6 | 7 | from jointly import ShakeExtractor, Synchronizer, get_equidistant_signals 8 | from jointly.helpers_plotting import plot_reference_columns, plot_segments 9 | from tests.parquet_reader import get_parquet_test_data 10 | 11 | 12 | @patch("jointly.helpers_plotting.plt.show") 13 | def test_plot_segments(mocked_show): 14 | base_data = get_parquet_test_data("test-data.parquet") 15 | reference_signal, target_signal = "A", "B" 16 | sources = { 17 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 18 | target_signal: {"data": base_data, "ref_column": "ACCELERATION_Z"}, 19 | } 20 | extractor = ShakeExtractor() 21 | extractor.start_window_length = pd.Timedelta(seconds=5) 22 | extractor.end_window_length = pd.Timedelta(seconds=3) 23 | extractor.min_length = 3 24 | extractor.threshold = 0.5 25 | 26 | synchronizer = Synchronizer(sources, reference_signal, extractor) 27 | segments = extractor.get_segments( 28 | get_equidistant_signals(synchronizer.ref_signals, synchronizer.sampling_freq) 29 | ) 30 | 31 | with pytest.raises(ValueError): 32 | plot_segments(synchronizer.ref_signals, segments) 33 | 34 | plot_segments(synchronizer.ref_signals, segments, together=True) 35 | plot_segments(synchronizer.ref_signals, segments, separate=True) 36 | 37 | 38 | @patch("jointly.helpers_plotting.plt.show") 39 | def test_plot_reference_columns(mocked_show): 40 | base_data = get_parquet_test_data("test-data.parquet") 41 | reference_signal, target_signal = "A", "B" 42 | sources = { 43 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 44 | target_signal: {"data": base_data, "ref_column": "ACCELERATION_Z"}, 45 | } 46 | plot_reference_columns(sources) 47 | -------------------------------------------------------------------------------- /.github/workflows/all.yml: -------------------------------------------------------------------------------- 1 | name: Testing, linting, and coverage 2 | 3 | on: [ push ] 4 | 5 | jobs: 6 | test-lint-coverage: 7 | runs-on: ${{ matrix.os }} 8 | strategy: 9 | matrix: 10 | python-version: [ 3.7, 3.8, 3.9 ] 11 | os: [ubuntu-latest, macOS-latest, windows-latest] 12 | steps: 13 | - uses: actions/checkout@v2 14 | 15 | - name: Set up Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | 20 | - name: Install Poetry 21 | uses: snok/install-poetry@v1 22 | 23 | - name: Lint with black 24 | uses: psf/black@stable 25 | with: 26 | options: "--check --verbose" 27 | src: "jointly tests" 28 | 29 | - name: Install dependencies 30 | run: | 31 | poetry install 32 | 33 | - name: Test with pytest 34 | run: | 35 | poetry run python -m pytest 36 | - if: ${{ matrix.python-version == '3.9' && github.ref == 'refs/heads/master' && matrix.os == 'ubuntu-latest' }} 37 | name: Calculate coverage 38 | run: | 39 | poetry run coverage run --source=jointly -m pytest 40 | poetry run coverage xml 41 | COVERAGE=$(python -c "import xml.etree.ElementTree as ET; print(int(float(ET.parse('coverage.xml').getroot().attrib['line-rate']) * 100))") 42 | COLOR=$(echo $COVERAGE | python -c "import sys; from bisect import bisect; i=bisect([0,60,70,80,95,100], int(sys.stdin.read()))-1; print(['red', 'orange', 'yellow', 'yellowgreen', 'green', 'brightgreen'][i])") 43 | echo "COVERAGE=$COVERAGE" >> $GITHUB_ENV 44 | echo "COLOR=$COLOR" >> $GITHUB_ENV 45 | - if: ${{ matrix.python-version == '3.9' && github.ref == 'refs/heads/master' && matrix.os == 'ubuntu-latest' }} 46 | name: Create the coverage gist 47 | uses: schneegans/dynamic-badges-action@v1.0.0 48 | with: 49 | auth: ${{ secrets.GIST_SECRET }} 50 | gistID: f731de158a21515e2d6c52ed48d406ad 51 | filename: jointly_coverage_main.json 52 | label: Test Coverage 53 | message: ${{ env.COVERAGE }}% 54 | color: ${{ env.COLOR }} 55 | -------------------------------------------------------------------------------- /docs/background.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Background 3 | ========== 4 | 5 | The Syncing Process 6 | ------------------- 7 | 8 | To sync two sources with each other, they need a simultaneously recorded 9 | signal with a characteristic signature at two timepoints in common. This 10 | could be the magnitude of the accelerometer for example, if multiple 11 | devices are shaken together. 12 | 13 | Selecting common segments 14 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 15 | 16 | The script can detect prominent shakes automatically with the 17 | ``ShakeExtractor``. This is done by detecting the peaks above a certain 18 | ``threshold``. These peaks are then merged into sequences of peaks that 19 | are within ``distance`` milliseconds of each other. Sequence candidates 20 | need to fulfill the following conditions: 21 | 22 | * must have at least ``min_length`` peaks 23 | * must be contained in ``start_window_length`` or ``end_window_length``, respectively 24 | 25 | The sequence with the highest weight, i.e., ``mean + median`` of the peaks 26 | in the sequence, is selected to represent the start- or end segment. 27 | 28 | Calculation of the timeshift 29 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 30 | 31 | To compensate offsets in the system time of different sources, 32 | the timeshift to synchronize the selected start segments with each other is 33 | calculated. For the automatic computation of the timeshift between two 34 | signals, the cross-correlation for each segment with the reference signal 35 | is calculated. The signals are shifted so that the correlation between 36 | the selected segments is maximized. 37 | 38 | Adjusting the frequency 39 | ~~~~~~~~~~~~~~~~~~~~~~~ 40 | 41 | As no clock is perfect, an additional issue that arises when using multiple 42 | sensors is that of clocks with an offset in running speed. While clock 43 | speeds can drift over time, these influences are typically very small, and 44 | it can generally be assumed that the offset from the correct speed is constant 45 | for anything but long trials (Zhou, Hui, et al. "Frequency accuracy & stability 46 | dependencies of crystal oscillators." Carleton University, Systems and Computer 47 | Engineering, Technical Report SCE-08-12 (2008)). 48 | 49 | The result of these differences in running speed is that signals desynchronize 50 | over time. To compensate, a stretching factor is calculated, which brings 51 | the difference between the synchronization timeshifts for the start- and end 52 | segments to zero. After stretching the signal, the timeshift to remove the 53 | offset between signals is removed again, resulting in the final timeshift 54 | and stretch factor values. -------------------------------------------------------------------------------- /tests/parquet_reader.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | from typing import Optional 3 | 4 | import pandas as pd 5 | 6 | 7 | def read_parquet_sensor_data( 8 | file: str, device_id: Optional[int] = None 9 | ) -> pd.DataFrame: 10 | """Read a long-format parquet file into a dataframe""" 11 | df = pd.read_parquet(file) 12 | 13 | df_components = [ 14 | df["timestamp"].explode(ignore_index=True), 15 | df["type"].explode(ignore_index=True), 16 | df["value"].explode(ignore_index=True), 17 | ] 18 | 19 | if device_id is None: 20 | return pd.concat(df_components, axis="columns") 21 | else: 22 | device_col = df["deviceId"].explode(ignore_index=True) 23 | df = pd.concat([*df_components, device_col], axis="columns") 24 | df = df[df["deviceId"] == device_id] 25 | return df.drop("deviceId", axis="columns") 26 | 27 | 28 | def get_parquet_test_data(file_name: str, device_id: Optional[int] = None): 29 | """Pivot a long-format dataframe into groups of data points with the same sensor data type group""" 30 | sensor_data_type_group_map = { 31 | "ACCELERATION_X": "ACCELERATION", 32 | "ACCELERATION_Y": "ACCELERATION", 33 | "ACCELERATION_Z": "ACCELERATION", 34 | "ECG": "ACCELERATION", 35 | "ORIENTATION_X": "ORIENTATION", 36 | "ORIENTATION_Y": "ORIENTATION", 37 | "ORIENTATION_Z": "ORIENTATION", 38 | "LINEAR_ACCELERATION_X": "LINEAR_ACCELERATION", 39 | "LINEAR_ACCELERATION_Y": "LINEAR_ACCELERATION", 40 | "LINEAR_ACCELERATION_Z": "LINEAR_ACCELERATION", 41 | "MAGNETOMETER_X": "MAGNETOMETER", 42 | "MAGNETOMETER_Y": "MAGNETOMETER", 43 | "MAGNETOMETER_Z": "MAGNETOMETER", 44 | "GRAVITY_X": "GRAVITY", 45 | "GRAVITY_Y": "GRAVITY", 46 | "GRAVITY_Z": "GRAVITY", 47 | "LIGHT": "LIGHT", 48 | } 49 | if os.path.isfile(f"../test-data/{file_name}"): 50 | file = f"../test-data/{file_name}" 51 | elif os.path.isfile(f"./test-data/{file_name}"): 52 | file = f"./test-data/{file_name}" 53 | else: 54 | raise FileNotFoundError(f"Couldn't find test file `{file_name}`") 55 | 56 | data: pd.DataFrame = read_parquet_sensor_data(file, device_id) 57 | data["typeGroup"] = data["type"].map(sensor_data_type_group_map.get) 58 | data = ( 59 | data.reset_index(drop=True) 60 | .pivot(index=["timestamp", "typeGroup"], columns=["type"], values="value") 61 | .droplevel("typeGroup") 62 | ) 63 | data.index = pd.to_datetime(data.index, unit="ns", utc=True) 64 | return data 65 | -------------------------------------------------------------------------------- /jointly/types.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Union, List 2 | 3 | import pandas as pd 4 | 5 | SourceDict = Dict[str, Dict[str, Union[str, pd.DataFrame, float, pd.Timedelta, None]]] 6 | """ 7 | A dictionary of dictionaries. 8 | Each entry defines an input sensor, and points to a dictionary with the keys ``data`` and ``ref_column``. 9 | 10 | ``data`` is a pandas ``DataFrame`` with a ``DateTimeIndex``. 11 | 12 | ``ref_column`` specifies the column within ``data`` which should be used to extract synchronization points, e.g., shakes. 13 | """ 14 | 15 | SynchronizationPoint = Dict[str, pd.Timestamp] 16 | """ 17 | A dictionary describing a synchronization point, e.g., a shake. 18 | A synchronization point has a start and an end, and thus the properties ``start`` and ``end``. 19 | """ 20 | 21 | SynchronizationPair = Dict[str, SynchronizationPoint] 22 | """ 23 | A dictionary containing both the first and the second synchronization point of a signal. 24 | Two points are required to calculate the distance in between them. 25 | Properties are ``first`` and ``second``. 26 | """ 27 | 28 | SyncPairs = Dict[str, SynchronizationPair] 29 | """ 30 | A dictionary that contains SynchronizationPair instances for a number of sources. 31 | """ 32 | 33 | SyncPairTimeshift = Dict[str, pd.Timedelta] 34 | """Timeshift for a single sync pair, i.e., the shift required to synchronize one pair to the reference signal""" 35 | 36 | ResultTableSpec = Dict[str, Dict[str, List[str]]] 37 | """ 38 | Specification for saving the synchronized results in separated files, with each root key defining a target file. 39 | The second level defines the columns which should be saved from each source file into the given target file. 40 | This can be used to separate the input files into files containing only a single sensor type, e.g., to extract the 41 | PPG signal from two different sensors into a single file. 42 | 43 | 44 | Example: 45 | 46 | .. code:: python 47 | 48 | { 49 | 'ACC': { 50 | 'Faros': ['Accelerometer_X', 'Accelerometer_Y', 'Accelerometer_Z'], 51 | 'Empatica': ['acc_x', 'acc_y', 'acc_z'], 52 | 'Everion': ['accx_data', 'accy_data', 'accz_data'], 53 | }, 54 | 'PPG': { 55 | 'Empatica': ['bvp'], 56 | 'Everion': ['blood_pulse_wave', 'led2_data', 'led3_data'], 57 | }, 58 | 'EDA': { 59 | 'Empatica': ['eda'], 60 | 'Everion': ['gsr_electrode'], 61 | }, 62 | 'ECG': { 63 | 'Faros': ['ECG'], 64 | }, 65 | 'TEMP': { 66 | 'Empatica': ['temp'], 67 | 'Everion': ['temperature_object'], 68 | }, 69 | 'HR': { 70 | 'Empatica': ['hr'], 71 | 'Everion': ['heart_rate', 'heart_rate_quality'], 72 | }, 73 | 'IBI': { 74 | 'Faros': ['HRV'], 75 | 'Empatica': ['ibi'], 76 | 'Everion': ['inter_pulse_interval', 'inter_pulse_interval_deviation'], 77 | } 78 | } 79 | 80 | """ 81 | -------------------------------------------------------------------------------- /jointly/helpers_plotting.py: -------------------------------------------------------------------------------- 1 | """Contains plotting helpers""" 2 | from typing import List, Optional 3 | 4 | import matplotlib.cm 5 | import numpy as np 6 | import pandas as pd 7 | import matplotlib.pyplot as plt 8 | 9 | from jointly import normalize 10 | from jointly.types import SyncPairs, SourceDict 11 | 12 | 13 | def plot_segments( 14 | dataframe: pd.DataFrame, segments: SyncPairs, together=False, separate=False 15 | ): 16 | """ 17 | Plot the segments of a reference signal dataframe 18 | 19 | :param dataframe: the dataframe 20 | :param segments: a SyncPairs instance that specifies the data to be drawn 21 | :param together: true to plot everything together 22 | :param separate: true to plot separate 23 | """ 24 | signal_names = list(segments.keys()) 25 | segment_names = list(segments[signal_names[0]].keys()) 26 | 27 | if together == separate: 28 | raise ValueError("Set either `together` or `separate`") 29 | 30 | if together is True: 31 | # plot signals together 32 | ncols = 1 33 | nrows = len(segment_names) 34 | fig, axes = plt.subplots(nrows, ncols, figsize=(15, 4 * nrows)) 35 | for segment_index, segment in enumerate(segment_names): 36 | axes[segment_index].set_title("{} segment".format(segment)) 37 | signals_with_segment = [ 38 | signal for signal in signal_names if segment in segments[signal] 39 | ] 40 | start = np.amin( 41 | [segments[x][segment]["start"] for x in signals_with_segment] 42 | ) 43 | end = np.amax([segments[x][segment]["end"] for x in signals_with_segment]) 44 | dataframe[start:end].plot(ax=axes[segment_index]) 45 | fig.tight_layout() 46 | 47 | if separate is True: 48 | # plot signals seperately 49 | ncols = len(segment_names) 50 | nrows = len(segments.keys()) 51 | cmap = matplotlib.cm.get_cmap("tab10") 52 | fig, axes = plt.subplots(nrows, ncols, figsize=(15, 4 * nrows)) 53 | for segment_index, signal_name in enumerate(segments.keys()): 54 | for index_seg, segment in enumerate(segment_names): 55 | if segment not in segments[signal_name]: 56 | continue 57 | axes[segment_index, index_seg].set_title( 58 | "{} segment of {}".format(segment, signal_name) 59 | ) 60 | segment = segments[signal_name][segment] 61 | dataframe[signal_name][segment["start"] : segment["end"]].plot( 62 | ax=axes[segment_index, index_seg], color=cmap(segment_index) 63 | ) 64 | fig.tight_layout() 65 | 66 | plt.show() 67 | 68 | 69 | def plot_reference_columns(sources: SourceDict, title: str = ""): 70 | """ 71 | Plots a normalized version of the reference columns, i.e., what jointly is detecting shakes on 72 | 73 | :param sources: a SourceDict 74 | :param title: additional title if desired 75 | """ 76 | plt.figure(f"Test Debug: {title}") 77 | 78 | for device in sources: 79 | ref_column = sources[device]["ref_column"] 80 | data = sources[device]["data"][ref_column].dropna() 81 | data = data[data != 0] 82 | data = pd.Series(normalize(data.values), data.index) 83 | plt.plot(data.index, data, label=device) 84 | 85 | plt.legend() 86 | plt.show() 87 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | .. _contributing_label: 3 | 4 | ============ 5 | Contributing 6 | ============ 7 | 8 | Contributions are welcome, and they are greatly appreciated! Every little bit 9 | helps, and credit will always be given. 10 | 11 | We would appreciate a feature suggestion issue before you create a PR so we can 12 | discuss the feature, its use, and its implementation. 13 | 14 | You can contribute in many ways: 15 | 16 | Types of Contributions 17 | ---------------------- 18 | 19 | Report Bugs 20 | ~~~~~~~~~~~ 21 | 22 | Report bugs at https://github.com/hpi-dhc/jointly/issues. 23 | 24 | If you are reporting a bug, please include: 25 | 26 | * Your operating system name and version. 27 | * Any details about your local setup that might be helpful in troubleshooting. 28 | * Detailed steps to reproduce the bug. 29 | 30 | Fix Bugs 31 | ~~~~~~~~ 32 | 33 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 34 | wanted" is open to whoever wants to implement it. 35 | 36 | Implement Features 37 | ~~~~~~~~~~~~~~~~~~ 38 | 39 | Look through the GitHub issues for features. Anything tagged with "enhancement" 40 | and "help wanted" is open to whoever wants to implement it. 41 | 42 | Write Documentation 43 | ~~~~~~~~~~~~~~~~~~~ 44 | 45 | jointly could always use more documentation, whether as part of the 46 | official jointly docs, in docstrings, or even on the web in blog posts, 47 | articles, and such. 48 | 49 | Submit Feedback 50 | ~~~~~~~~~~~~~~~ 51 | 52 | The best way to send feedback is to file an issue at https://github.com/hpi-dhc/jointly/issues. 53 | 54 | If you are proposing a feature: 55 | 56 | * Explain in detail how it would work. 57 | * Keep the scope as narrow as possible, to make it easier to implement. 58 | * Remember that this is a volunteer-driven project, and that contributions 59 | are welcome :) 60 | 61 | Get Started! 62 | ------------ 63 | 64 | Ready to contribute? Here's how to set up `jointly` for local development. 65 | 66 | 1. Fork the `jointly` repo on GitHub. 67 | 2. Clone your fork locally:: 68 | 69 | $ git clone git@github.com:your_name_here/jointly.git 70 | 71 | 3. Install all dependencies after installing `poetry `:: 72 | 73 | $ cd jointly/ 74 | $ poetry install 75 | $ pre-commit install 76 | 77 | 4. Create a branch for local development:: 78 | 79 | $ git checkout -b name-of-your-bugfix-or-feature 80 | 81 | Now you can make your changes locally. 82 | 83 | 5. When you're done making changes, check that your changes pass the tests and linters and that the docs can be built:: 84 | 85 | $ py.test 86 | $ pre-commit run --all-files 87 | $ cd docs && make html 88 | 89 | 90 | 7. Commit your changes and then push your branch to GitHub:: 91 | 92 | $ git add . 93 | $ git commit -m "Your detailed description of your changes." 94 | $ git push origin name-of-your-bugfix-or-feature 95 | 96 | 8. Submit a pull request through the GitHub website. 97 | 98 | Pull Request Guidelines 99 | ----------------------- 100 | 101 | Before you submit a pull request, check that it meets these guidelines: 102 | 103 | 1. The pull request should include tests. 104 | 2. If the pull request adds functionality, the docs should be updated. Put 105 | your new functionality into a function with a docstring, and add the 106 | feature to the list in README.rst. 107 | 3. The pull request should work for Python 3.7, 3.8 and 3.9, and for PyPi. This will be verified within the PR. 108 | 109 | Tips 110 | ---- 111 | 112 | To run a subset of tests:: 113 | 114 | $ py.test tests/test_examples.py 115 | 116 | 117 | To run all tests:: 118 | 119 | $ py.test 120 | 121 | Deploying 122 | --------- 123 | 124 | A reminder for the maintainers on how to deploy. 125 | Make sure all your changes are committed, including an entry in HISTORY.rst and an update of the old version code 126 | in ``docs/conf.py`` and ``pyproject.toml``. 127 | Please also link the PR in your history entry. 128 | 129 | GitHub will then deploy to PyPI if tests pass. 130 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/macos,linux,python,windows,jupyternotebook,visualstudiocode 3 | # Edit at https://www.gitignore.io/?templates=macos,linux,python,windows,jupyternotebook,visualstudiocode 4 | 5 | ### JupyterNotebook ### 6 | .ipynb_checkpoints 7 | */.ipynb_checkpoints/* 8 | 9 | # Remove previous ipynb_checkpoints 10 | # git rm -r .ipynb_checkpoints/ 11 | # 12 | 13 | ### Linux ### 14 | *~ 15 | 16 | # temporary files which can be created if a process still has a handle open of a deleted file 17 | .fuse_hidden* 18 | 19 | # KDE directory preferences 20 | .directory 21 | 22 | # Linux trash folder which might appear on any partition or disk 23 | .Trash-* 24 | 25 | # .nfs files are created when an open file is removed but is still being accessed 26 | .nfs* 27 | 28 | ### macOS ### 29 | # General 30 | .DS_Store 31 | .AppleDouble 32 | .LSOverride 33 | 34 | # Icon must end with two \r 35 | Icon 36 | 37 | # Thumbnails 38 | ._* 39 | 40 | # Files that might appear in the root of a volume 41 | .DocumentRevisions-V100 42 | .fseventsd 43 | .Spotlight-V100 44 | .TemporaryItems 45 | .Trashes 46 | .VolumeIcon.icns 47 | .com.apple.timemachine.donotpresent 48 | 49 | # Directories potentially created on remote AFP share 50 | .AppleDB 51 | .AppleDesktop 52 | Network Trash Folder 53 | Temporary Items 54 | .apdisk 55 | 56 | ### Python ### 57 | # Byte-compiled / optimized / DLL files 58 | __pycache__/ 59 | *.py[cod] 60 | *$py.class 61 | 62 | # C extensions 63 | *.so 64 | 65 | # Distribution / packaging 66 | .Python 67 | build/ 68 | develop-eggs/ 69 | dist/ 70 | downloads/ 71 | eggs/ 72 | .eggs/ 73 | lib/ 74 | lib64/ 75 | parts/ 76 | sdist/ 77 | var/ 78 | wheels/ 79 | pip-wheel-metadata/ 80 | share/python-wheels/ 81 | *.egg-info/ 82 | .installed.cfg 83 | *.egg 84 | MANIFEST 85 | 86 | # PyInstaller 87 | # Usually these files are written by a python script from a template 88 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 89 | *.manifest 90 | *.spec 91 | 92 | # Installer logs 93 | pip-log.txt 94 | pip-delete-this-directory.txt 95 | 96 | # Unit test / coverage reports 97 | htmlcov/ 98 | .tox/ 99 | .nox/ 100 | .coverage 101 | .coverage.* 102 | .cache 103 | nosetests.xml 104 | coverage.xml 105 | *.cover 106 | .hypothesis/ 107 | .pytest_cache/ 108 | 109 | # Translations 110 | *.mo 111 | *.pot 112 | 113 | # Django stuff: 114 | *.log 115 | local_settings.py 116 | db.sqlite3 117 | 118 | # Flask stuff: 119 | instance/ 120 | .webassets-cache 121 | 122 | # Scrapy stuff: 123 | .scrapy 124 | 125 | # Sphinx documentation 126 | docs/_build/ 127 | 128 | # PyBuilder 129 | target/ 130 | 131 | # Jupyter Notebook 132 | 133 | # IPython 134 | profile_default/ 135 | ipython_config.py 136 | 137 | # pyenv 138 | .python-version 139 | 140 | # pipenv 141 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 142 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 143 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 144 | # install all needed dependencies. 145 | #Pipfile.lock 146 | 147 | # celery beat schedule file 148 | celerybeat-schedule 149 | 150 | # SageMath parsed files 151 | *.sage.py 152 | 153 | # Environments 154 | .env 155 | .venv 156 | env/ 157 | venv/ 158 | ENV/ 159 | env.bak/ 160 | venv.bak/ 161 | 162 | # Spyder project settings 163 | .spyderproject 164 | .spyproject 165 | 166 | # Rope project settings 167 | .ropeproject 168 | 169 | # mkdocs documentation 170 | /site 171 | 172 | # mypy 173 | .mypy_cache/ 174 | .dmypy.json 175 | dmypy.json 176 | 177 | # Pyre type checker 178 | .pyre/ 179 | 180 | ### VisualStudioCode ### 181 | .vscode/* 182 | !.vscode/settings.json 183 | !.vscode/tasks.json 184 | !.vscode/launch.json 185 | !.vscode/extensions.json 186 | 187 | ### VisualStudioCode Patch ### 188 | # Ignore all local history of files 189 | .history 190 | 191 | ### Windows ### 192 | # Windows thumbnail cache files 193 | Thumbs.db 194 | ehthumbs.db 195 | ehthumbs_vista.db 196 | 197 | # Dump file 198 | *.stackdump 199 | 200 | # Folder config file 201 | [Dd]esktop.ini 202 | 203 | # Recycle Bin used on file shares 204 | $RECYCLE.BIN/ 205 | 206 | # Windows Installer files 207 | *.cab 208 | *.msi 209 | *.msix 210 | *.msm 211 | *.msp 212 | 213 | # Windows shortcuts 214 | *.lnk 215 | 216 | # End of https://www.gitignore.io/api/macos,linux,python,windows,jupyternotebook,visualstudiocode 217 | 218 | .idea 219 | .vscode 220 | 221 | **/tmp_test_file_gen.py -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # jointly documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another 17 | # directory, add these directories to sys.path here. If the directory is 18 | # relative to the documentation root, use os.path.abspath to make it 19 | # absolute, like shown here. 20 | # 21 | import os 22 | import sys 23 | 24 | sys.path.insert(0, os.path.abspath("..")) 25 | sys.path.insert(0, os.path.abspath("../")) 26 | 27 | # -- General configuration --------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 35 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx_rtd_theme"] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ["_templates"] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = ".rst" 45 | 46 | # The master toctree document. 47 | master_doc = "index" 48 | 49 | # General information about the project. 50 | project = u"jointly" 51 | copyright = u"2021, Digital Health Center, Hasso Plattner Institute " 52 | author = u"Felix Musmann, Arne Herdick, Ariane Morassi-Sasso" 53 | 54 | # The version info for the project you're documenting, acts as replacement 55 | # for |version| and |release|, also used in various other places throughout 56 | # the built documents. 57 | # 58 | # The short X.Y version. 59 | version = "0.3.4" 60 | # The full version, including alpha/beta/rc tags. 61 | release = version 62 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation 64 | # for a list of supported languages. 65 | # 66 | # This is also used if you do content translation via gettext catalogs. 67 | # Usually you set "language" from the command line for these cases. 68 | language = None 69 | 70 | # List of patterns, relative to source directory, that match files and 71 | # directories to ignore when looking for source files. 72 | # This patterns also effect to html_static_path and html_extra_path 73 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 74 | 75 | # The name of the Pygments (syntax highlighting) style to use. 76 | pygments_style = "sphinx" 77 | 78 | # If true, `todo` and `todoList` produce output, else they produce nothing. 79 | todo_include_todos = False 80 | 81 | 82 | # -- Options for HTML output ------------------------------------------- 83 | 84 | # The theme to use for HTML and HTML Help pages. See the documentation for 85 | # a list of builtin themes. 86 | # 87 | html_theme = "sphinx_rtd_theme" 88 | 89 | # Theme options are theme-specific and customize the look and feel of a 90 | # theme further. For a list of options available for each theme, see the 91 | # documentation. 92 | # 93 | # html_theme_options = {} 94 | 95 | # Add any paths that contain custom static files (such as style sheets) here, 96 | # relative to this directory. They are copied after the builtin static files, 97 | # so a file named "default.css" will overwrite the builtin "default.css". 98 | html_static_path = ["_static"] 99 | 100 | 101 | # -- Options for HTMLHelp output --------------------------------------- 102 | 103 | # Output file base name for HTML help builder. 104 | htmlhelp_basename = "jointlydoc" 105 | 106 | 107 | # -- Options for LaTeX output ------------------------------------------ 108 | 109 | latex_elements = { 110 | # The paper size ('letterpaper' or 'a4paper'). 111 | # 112 | # 'papersize': 'letterpaper', 113 | # The font size ('10pt', '11pt' or '12pt'). 114 | # 115 | # 'pointsize': '10pt', 116 | # Additional stuff for the LaTeX preamble. 117 | # 118 | # 'preamble': '', 119 | # Latex figure (float) alignment 120 | # 121 | # 'figure_align': 'htbp', 122 | } 123 | 124 | # Grouping the document tree into LaTeX files. List of tuples 125 | # (source start file, target name, title, author, documentclass 126 | # [howto, manual, or own class]). 127 | latex_documents = [ 128 | (master_doc, "jointly.tex", u"jointly Documentation", u"Arne Herdick", "manual"), 129 | ] 130 | 131 | 132 | # -- Options for manual page output ------------------------------------ 133 | 134 | # One entry per manual page. List of tuples 135 | # (source start file, name, description, authors, manual section). 136 | man_pages = [(master_doc, "jointly", u"jointly Documentation", [author], 1)] 137 | 138 | 139 | # -- Options for Texinfo output ---------------------------------------- 140 | 141 | # Grouping the document tree into Texinfo files. List of tuples 142 | # (source start file, target name, title, author, 143 | # dir menu entry, description, category) 144 | texinfo_documents = [ 145 | ( 146 | master_doc, 147 | "jointly", 148 | u"jointly Documentation", 149 | author, 150 | "jointly", 151 | "Synchronize sensor data from accelerometer shakes.", 152 | "Miscellaneous", 153 | ), 154 | ] 155 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Covenant Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We as members, contributors, and leaders pledge to make participation in our 7 | community a harassment-free experience for everyone, regardless of age, body 8 | size, visible or invisible disability, ethnicity, sex characteristics, gender 9 | identity and expression, level of experience, education, socio-economic status, 10 | nationality, personal appearance, race, caste, color, religion, or sexual identity 11 | and orientation. 12 | 13 | We pledge to act and interact in ways that contribute to an open, welcoming, 14 | diverse, inclusive, and healthy community. 15 | 16 | ## Our Standards 17 | 18 | Examples of behavior that contributes to a positive environment for our 19 | community include: 20 | 21 | * Demonstrating empathy and kindness toward other people 22 | * Being respectful of differing opinions, viewpoints, and experiences 23 | * Giving and gracefully accepting constructive feedback 24 | * Accepting responsibility and apologizing to those affected by our mistakes, 25 | and learning from the experience 26 | * Focusing on what is best not just for us as individuals, but for the 27 | overall community 28 | 29 | Examples of unacceptable behavior include: 30 | 31 | * The use of sexualized language or imagery, and sexual attention or 32 | advances of any kind 33 | * Trolling, insulting or derogatory comments, and personal or political attacks 34 | * Public or private harassment 35 | * Publishing others' private information, such as a physical or email 36 | address, without their explicit permission 37 | * Other conduct which could reasonably be considered inappropriate in a 38 | professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our standards of 43 | acceptable behavior and will take appropriate and fair corrective action in 44 | response to any behavior that they deem inappropriate, threatening, offensive, 45 | or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or reject 48 | comments, commits, code, wiki edits, issues, and other contributions that are 49 | not aligned to this Code of Conduct, and will communicate reasons for moderation 50 | decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also applies when 55 | an individual is officially representing the community in public spaces. 56 | Examples of representing our community include using an official e-mail address, 57 | posting via an official social media account, or acting as an appointed 58 | representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported to the community leaders responsible for enforcement at ariane.sasso@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.1, available at 119 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 120 | 121 | Community Impact Guidelines were inspired by 122 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 123 | 124 | For answers to common questions about this code of conduct, see the FAQ at 125 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available 126 | at [https://www.contributor-covenant.org/translations][translations]. 127 | 128 | [homepage]: https://www.contributor-covenant.org 129 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 130 | [Mozilla CoC]: https://github.com/mozilla/diversity 131 | [FAQ]: https://www.contributor-covenant.org/faq 132 | [translations]: https://www.contributor-covenant.org/translations 133 | -------------------------------------------------------------------------------- /jointly/helpers.py: -------------------------------------------------------------------------------- 1 | """Contains various helper functions useful in conjunction with or internally to jointly.""" 2 | import logging 3 | from pprint import pprint 4 | from typing import List, Tuple, Iterable 5 | 6 | import numpy as np 7 | import pandas as pd 8 | 9 | from jointly import ShakeMissingException 10 | from jointly.types import SyncPairs, SynchronizationPair, SyncPairTimeshift 11 | 12 | logger = logging.getLogger("jointly.helpers") 13 | 14 | 15 | def calculate_magnitude( 16 | df: pd.DataFrame, of_cols: List[str], title: str = "Magnitude" 17 | ) -> pd.DataFrame: 18 | """ 19 | Calculate the magnitude of a subset of columns from a DataFrame. 20 | 21 | Will return 0 if the input data is NaN to allow future algorithms 22 | to continue working despite the NaN in the input values. 23 | """ 24 | data = df[of_cols] 25 | result = np.sqrt(np.square(data).sum(axis=1)) 26 | result.name = title 27 | return result.to_frame(name=title) 28 | 29 | 30 | def normalize(x: List[float]): 31 | """Normalizes signal to interval [-1, 1] with mean 0.""" 32 | if len(x) <= 1: 33 | raise ValueError("Cannot normalize list with less than 2 entries") 34 | x_centered = x - np.mean(x) 35 | x_maximum = np.max(np.abs(x_centered)) 36 | if x_maximum == 0: 37 | raise ZeroDivisionError("input vector is all-zero") 38 | x_normalized = x_centered / x_maximum 39 | return x_normalized 40 | 41 | 42 | def get_equidistant_signals(signals: pd.DataFrame, frequency: float): 43 | """ 44 | Returns dataframe with columns from ``signals`` sampled equidistantly at the specified frequency. 45 | 46 | :param signals: the columns of this dataframe will be independently resampled 47 | :param frequency: the target frequency in Hz 48 | :return: equidistantly sampled dataframe 49 | """ 50 | freq = "{}N".format(int(1e9 / frequency)) 51 | df = pd.DataFrame( 52 | {col: signals[col].dropna().resample(freq).nearest() for col in signals.columns} 53 | ) 54 | index = pd.date_range( 55 | start=pd.to_datetime(df.index.min(), unit="s"), 56 | end=pd.to_datetime(df.index.max(), unit="s"), 57 | freq=freq, 58 | ) 59 | return df.set_index(index) 60 | 61 | 62 | def get_max_ref_frequency(signals: pd.DataFrame) -> float: 63 | """ 64 | Get the maximum frequency in the dataframe 65 | 66 | :param signals: input dataframe with the given signals 67 | :return: float describing the maximum frequency in the source data. 68 | """ 69 | if not isinstance(signals, pd.DataFrame): 70 | raise ValueError("Can only find the max frequency of DataFrames") 71 | if len(signals.columns) == 0: 72 | raise ValueError("Can't get the max frequency of 0 columns") 73 | 74 | frequencies = signals.aggregate(infer_freq) 75 | return np.amax(frequencies) 76 | 77 | 78 | def infer_freq(series: pd.Series) -> float: 79 | """ 80 | Infer the frequency of a series by finding the median temporal distance between its elements. 81 | 82 | :param series: the frequency of this series will be inferred 83 | :return: frequency, as a float, measured in Hz 84 | """ 85 | index = series.dropna().index 86 | time_deltas = index[1:] - index[:-1] 87 | median = np.median(time_deltas) 88 | return np.timedelta64(1, "s") / median 89 | 90 | 91 | def stretch_signals( 92 | source: pd.DataFrame, factor: float, start_time: pd.DatetimeIndex 93 | ) -> pd.DataFrame: 94 | """ 95 | Returns a copy of DataFrame with stretched DateTimeIndex. 96 | 97 | :param source: the index of this DataFrame will be stretched. 98 | :param factor: the factor by which to streth the DateTimeIndex 99 | :param start_time: first index, i.e., time, in the dataframe 100 | :return: copy of the dataframe with stretched index 101 | """ 102 | timedelta = source.index - start_time 103 | new_index = timedelta * factor + start_time 104 | df = source.set_index(new_index, verify_integrity=True) 105 | return df 106 | 107 | 108 | def get_stretch_factor( 109 | segments: SynchronizationPair, timeshifts: SyncPairTimeshift 110 | ) -> float: 111 | """ 112 | Get the stretch factor required to stretch the duration between segments such that it will fit exactly to the 113 | signal when shifted by the amount given by timeshifts. 114 | 115 | This is a function that should basically exclusively be used within jointly, as the parameters are produced during 116 | the synchronization process. 117 | 118 | :param segments: the segment instance containing the segment info to be stretched 119 | :param timeshifts: the timeshifts that should be applied to make the signal align to the reference signal 120 | :return: a float as described above 121 | """ 122 | old_length = segments["second"]["start"] - segments["first"]["start"] 123 | new_length = old_length + timeshifts["second"] - timeshifts["first"] 124 | stretch_factor = new_length / old_length 125 | return stretch_factor 126 | 127 | 128 | def verify_segments(signals: Iterable[str], segments: SyncPairs): 129 | """Verify that two synchronization points (i.e., start and end) have been found for each signal.""" 130 | for signal in signals: 131 | for segment in ["first", "second"]: 132 | for part in ["start", "end"]: 133 | try: 134 | segments[signal][segment][part] 135 | except KeyError: 136 | print("Dumping all detected segments:") 137 | pprint(segments) 138 | raise ShakeMissingException( 139 | f"No {segment} shake detected for {signal}, missing the {part}" 140 | ) 141 | 142 | 143 | def get_segment_data( 144 | dataframe: pd.DataFrame, segments: SyncPairs, col: str, segment: str 145 | ) -> Tuple[pd.Timestamp, pd.Timestamp, pd.DataFrame]: 146 | """ 147 | Return a 3-tuple of start and end indices plus data 148 | within that timeframe of the given column in the given dataframe. 149 | """ 150 | start = segments[col][segment]["start"] 151 | end = segments[col][segment]["end"] 152 | return start, end, dataframe[col][start:end] 153 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file contains test functions for the examples in the documentation. 3 | 4 | Do not modify the examples from the documentation. 5 | """ 6 | 7 | import os 8 | import pytest 9 | 10 | 11 | @pytest.fixture(scope="function") 12 | def change_to_repo_root(request): 13 | if os.getcwd().endswith("tests"): 14 | os.chdir("..") 15 | yield 16 | os.chdir(request.config.invocation_dir) 17 | 18 | 19 | def test_usage_logging(change_to_repo_root): 20 | import logging 21 | from jointly.log import logger 22 | 23 | logger.setLevel(logging.DEBUG) 24 | 25 | 26 | def test_save_data(change_to_repo_root): 27 | import pandas as pd 28 | import tempfile 29 | import traceback 30 | 31 | import jointly 32 | 33 | # load source dataframes with datetime index 34 | faros_df = pd.read_csv( 35 | "./test-data/faros-plus-physilog/faros.csv.gz", index_col=[0], parse_dates=True 36 | ) 37 | physilog_df = pd.read_csv( 38 | "./test-data/faros-plus-physilog/physilog.csv.gz", 39 | index_col=[0], 40 | parse_dates=True, 41 | ) 42 | 43 | # the magnitude is a common property that keeps shake information without axis relevance 44 | faros_df["Accel Mag"] = jointly.calculate_magnitude( 45 | faros_df, ["Accel X", "Accel Y", "Accel Z"] 46 | ) 47 | physilog_df["Accel Mag"] = jointly.calculate_magnitude( 48 | physilog_df, ["Accel X", "Accel Y", "Accel Z"] 49 | ) 50 | 51 | # create dictionary of source sensors 52 | sources = { 53 | "Faros": { 54 | "data": faros_df, 55 | "ref_column": "Accel Mag", 56 | }, 57 | "Physilog": { 58 | "data": physilog_df, 59 | "ref_column": "Accel Mag", 60 | }, 61 | } 62 | 63 | # set shake extraction parameters 64 | extractor = jointly.ShakeExtractor() 65 | extractor.start_window_length = pd.Timedelta(seconds=15) 66 | extractor.end_window_length = pd.Timedelta(seconds=10) 67 | extractor.min_length = 3 68 | extractor.threshold = 0.55 69 | 70 | # prepare the synchronizer 71 | synchronizer = jointly.Synchronizer( 72 | sources, reference_source_name="Faros", extractor=extractor 73 | ) 74 | 75 | # define output format for two files, one containing all acceleration 76 | # data, the other the ECG data 77 | tables = { 78 | "ACC": { 79 | "Faros": ["Accel X", "Accel Y", "Accel Z"], 80 | "Physilog": ["Accel X", "Accel Y", "Accel Z"], 81 | }, 82 | "ECG": { 83 | "Faros": ["ECG"], 84 | }, 85 | } 86 | 87 | # if the extractor parameters are wrong, print the problem and show the data 88 | try: 89 | # get_synced_data returns a dictionary of sensor names to synced DataFrames 90 | with tempfile.TemporaryDirectory() as tmp_dir: 91 | synchronizer.save_data(tmp_dir, tables=tables, save_total_table=False) 92 | print("test") 93 | except Exception: 94 | traceback.print_exc() 95 | jointly.plot_reference_columns(sources) 96 | 97 | 98 | def test_usage_df_head(change_to_repo_root): 99 | import pandas as pd 100 | 101 | faros_df = pd.read_csv( 102 | "./test-data/faros-plus-physilog/faros.csv.gz", index_col=[0], parse_dates=True 103 | ) 104 | print(faros_df.head()) 105 | 106 | 107 | def test_usage_extractor_params(change_to_repo_root): 108 | import pandas as pd 109 | import jointly 110 | 111 | extractor = jointly.ShakeExtractor() 112 | 113 | # The start window should be long enough to contain 114 | # only the start shake in every data stream 115 | extractor.start_window_length = pd.Timedelta(seconds=15) 116 | 117 | # The end window (measured from the end of data) 118 | # should be exactly long enough to contain 119 | # only the end shake in every data stream 120 | extractor.end_window_length = pd.Timedelta(seconds=3) 121 | 122 | # Set to at most the number of shakes you did 123 | extractor.min_length = 3 124 | 125 | # Shakes are only accepted if they are higher than the 126 | # threshold (with all data normalized). 127 | extractor.threshold = 0.5 128 | 129 | 130 | def test_main_readme_example(change_to_repo_root): 131 | import pandas as pd 132 | import tempfile 133 | import traceback 134 | 135 | import jointly 136 | 137 | # load source dataframes with datetime index 138 | faros_df = pd.read_csv( 139 | "./test-data/faros-plus-physilog/faros.csv.gz", index_col=[0], parse_dates=True 140 | ) 141 | physilog_df = pd.read_csv( 142 | "./test-data/faros-plus-physilog/physilog.csv.gz", 143 | index_col=[0], 144 | parse_dates=True, 145 | ) 146 | 147 | # the magnitude is a common property that keeps shake information without axis relevance 148 | faros_df["Accel Mag"] = jointly.calculate_magnitude( 149 | faros_df, ["Accel X", "Accel Y", "Accel Z"] 150 | ) 151 | physilog_df["Accel Mag"] = jointly.calculate_magnitude( 152 | physilog_df, ["Accel X", "Accel Y", "Accel Z"] 153 | ) 154 | 155 | # create dictionary of source sensors 156 | sources = { 157 | "Faros": { 158 | "data": faros_df, 159 | "ref_column": "Accel Mag", 160 | }, 161 | "Physilog": { 162 | "data": physilog_df, 163 | "ref_column": "Accel Mag", 164 | }, 165 | } 166 | 167 | # set shake extraction parameters 168 | extractor = jointly.ShakeExtractor() 169 | extractor.start_window_length = pd.Timedelta(seconds=15) 170 | extractor.end_window_length = pd.Timedelta(seconds=10) 171 | extractor.min_length = 3 172 | extractor.threshold = 0.55 173 | 174 | # prepare the synchronizer 175 | synchronizer = jointly.Synchronizer( 176 | sources, reference_source_name="Faros", extractor=extractor 177 | ) 178 | 179 | # if the extractor parameters are wrong, print the problem and show the data 180 | try: 181 | # get_synced_data returns a dictionary of sensor names to synced DataFrames 182 | synchronizer.get_synced_data() 183 | except Exception: 184 | traceback.print_exc() 185 | jointly.plot_reference_columns(sources) 186 | 187 | # save a file for each input sensor somewhere 188 | with tempfile.TemporaryDirectory() as tmp_dir: 189 | synchronizer.save_pickles(tmp_dir) 190 | -------------------------------------------------------------------------------- /tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | 7 | from jointly import ShakeMissingException, SyncPairTimeshift 8 | from jointly.helpers import ( 9 | calculate_magnitude, 10 | normalize, 11 | verify_segments, 12 | get_segment_data, 13 | infer_freq, 14 | get_max_ref_frequency, 15 | get_stretch_factor, 16 | stretch_signals, 17 | get_equidistant_signals, 18 | ) 19 | from jointly.types import SynchronizationPair 20 | from tests.parquet_reader import get_parquet_test_data 21 | 22 | 23 | def test_calculate_magnitude(): 24 | df = pd.DataFrame({"x": [1, 2, 3], "y": [-1.5, 2, 0], "z": [-1.5, 25, 1234]}) 25 | magnitude = calculate_magnitude(df, ["x", "y", "z"], "testname") 26 | correct = pd.DataFrame( 27 | {"testname": [2.345207879911715, 25.15949125081825, 1234.0036466720833]} 28 | ) 29 | assert magnitude.equals(correct), "Should have correct magnitude results" 30 | df["Magnitude"] = magnitude 31 | # noinspection PyUnresolvedReferences 32 | assert df["Magnitude"].equals( 33 | magnitude["testname"] 34 | ), "Should be possible to set+rename result to old dataframe" 35 | 36 | 37 | def test_normalize(): 38 | assert np.array_equal(normalize([1, 2, 3]), [-1, 0, 1]), "should be normalized" 39 | assert np.array_equal(normalize([-1, 0, 1]), [-1, 0, 1]), "should be normalized" 40 | 41 | with pytest.raises(ValueError): 42 | normalize([]) 43 | with pytest.raises(ValueError): 44 | normalize([1]) 45 | with pytest.raises(ZeroDivisionError): 46 | normalize([0, 0]) 47 | 48 | 49 | def test_get_equidistant_signals(): 50 | test_data = get_parquet_test_data("faros-internal.parquet", 667) 51 | 52 | result = get_equidistant_signals(test_data, frequency=1_000) 53 | for col in result.columns: 54 | assert infer_freq(result[col]) == 1000, f"{col} should have 1000 Hz" 55 | 56 | result = get_equidistant_signals(test_data, frequency=500) 57 | for col in result.columns: 58 | assert infer_freq(result[col]) == 500, f"{col} should have 500 Hz" 59 | 60 | result = get_equidistant_signals(test_data, frequency=1) 61 | for col in result.columns: 62 | assert infer_freq(result[col]) == 1, f"{col} should have 1 Hz" 63 | 64 | 65 | def test_get_max_ref_frequency(): 66 | test_data = get_parquet_test_data("faros-internal.parquet", 667) 67 | 68 | assert get_max_ref_frequency(test_data) == 500, "max(all) should be 500 Hz" 69 | assert ( 70 | get_max_ref_frequency(test_data[["ACCELERATION_X", "ACCELERATION_Y"]]) == 100 71 | ), "max(acc) should be 100 Hz" 72 | assert ( 73 | get_max_ref_frequency(test_data["ACCELERATION_Y"].to_frame()) == 100 74 | ), "max(acc) should be 100 Hz" 75 | 76 | with pytest.raises(ValueError): 77 | get_max_ref_frequency(test_data["ACCELERATION_X"]) 78 | 79 | with pytest.raises(ValueError): 80 | get_max_ref_frequency(pd.DataFrame()) 81 | 82 | 83 | def test_infer_freq(): 84 | test_data = get_parquet_test_data("faros-internal.parquet", 667) 85 | 86 | assert infer_freq(test_data["ECG"]) == 500, "ECG should be 500 Hz" 87 | assert infer_freq(test_data["ACCELERATION_X"]) == 100, "Acc. should be 100 Hz" 88 | assert infer_freq(test_data["ACCELERATION_Y"]) == 100, "Acc. should be 100 Hz" 89 | assert infer_freq(test_data["ACCELERATION_Z"]) == 100, "Acc. should be 100 Hz" 90 | 91 | 92 | def test_stretch_signals(): 93 | test_idx = pd.date_range(start="1/1/2018", periods=8) 94 | test_data = [42] * 8 95 | test_df = pd.DataFrame(test_data, test_idx) 96 | 97 | result = stretch_signals(test_df, factor=1, start_time=test_idx.min()) 98 | assert result is not test_df, "must be a copy" 99 | assert result.equals(test_df), "must be equal" 100 | 101 | result = stretch_signals(test_df, factor=2, start_time=test_idx.min()) 102 | assert result is not test_df, "must be a copy" 103 | assert result.index.max() == pd.to_datetime( 104 | "1/15/2018" 105 | ), "must have double the distance equal" 106 | 107 | 108 | def test_get_stretch_factor(): 109 | def _ts(seconds: int) -> pd.Timestamp: 110 | return pd.Timestamp(seconds, unit="s") 111 | 112 | def _td(seconds: int) -> pd.Timedelta: 113 | return pd.Timedelta(seconds, unit="s") 114 | 115 | segments: SynchronizationPair = { 116 | "first": {"start": _ts(1), "end": _ts(3)}, 117 | "second": {"start": _ts(11), "end": _ts(14)}, 118 | } 119 | 120 | timeshift: SyncPairTimeshift = {"first": _td(5), "second": _td(0)} 121 | assert ( 122 | get_stretch_factor(segments, timeshift) == 0.5 123 | ), "should double the speed if distance halves" 124 | 125 | timeshift: SyncPairTimeshift = {"first": _td(0), "second": _td(10)} 126 | assert ( 127 | get_stretch_factor(segments, timeshift) == 2 128 | ), "should halve the speed if distance doubles" 129 | 130 | 131 | def test_verify_segments(): 132 | """delete all parts of a proper SyncPairs instance and check if the verification alg throws""" 133 | good_segments = { 134 | "s": { 135 | "first": {"start": 0, "end": 0}, 136 | "second": {"start": 0, "end": 0}, 137 | }, 138 | "d": { 139 | "first": {"start": 0, "end": 0}, 140 | "second": {"start": 0, "end": 0}, 141 | }, 142 | } 143 | columns = ["s", "d"] 144 | # noinspection PyTypeChecker 145 | verify_segments(columns, good_segments) 146 | 147 | with pytest.raises(ShakeMissingException): 148 | verify_segments(columns, {}) 149 | 150 | for signal_name, signal_dict in good_segments.items(): 151 | for segment_name, segment_dict in signal_dict.items(): 152 | for position_name in segment_dict: 153 | copied = copy.deepcopy(good_segments) 154 | del copied[signal_name][segment_name][position_name] 155 | 156 | with pytest.raises(ShakeMissingException): 157 | # noinspection PyTypeChecker 158 | verify_segments(columns, copied) 159 | copied = copy.deepcopy(good_segments) 160 | del copied[signal_name][segment_name] 161 | 162 | with pytest.raises(ShakeMissingException): 163 | # noinspection PyTypeChecker 164 | verify_segments(columns, copied) 165 | copied = copy.deepcopy(good_segments) 166 | del copied[signal_name] 167 | 168 | with pytest.raises(ShakeMissingException): 169 | # noinspection PyTypeChecker 170 | verify_segments(columns, copied) 171 | 172 | 173 | def test_get_segment_data(): 174 | segments = { 175 | "s": {"first": {"start": 0, "end": 2}}, 176 | } 177 | df = pd.DataFrame({"s": [1, 2, 3, 4]}) 178 | result_expected = 0, 2, pd.Series([1, 2], name="s") 179 | # noinspection PyTypeChecker 180 | result_actual = get_segment_data(df, segments, "s", "first") 181 | 182 | assert len(result_actual) == 3, "should have start, end, data" 183 | assert result_expected[0] == result_actual[0], "should find right start" 184 | assert result_expected[1] == result_actual[1], "should find right end" 185 | assert result_expected[2].equals(result_actual[2]), "should extract correct portion" 186 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Jointly 3 | ============== 4 | 5 | 6 | .. image:: https://img.shields.io/pypi/v/jointly.svg 7 | :target: https://pypi.python.org/pypi/jointly 8 | 9 | .. image:: https://github.com/hpi-dhc/jointly/actions/workflows/deploy.yml/badge.svg 10 | :target: https://github.com/hpi-dhc/jointly/actions/workflows/deploy.yml?query=branch%3Amaster 11 | 12 | .. image:: https://github.com/hpi-dhc/jointly/actions/workflows/all.yml/badge.svg 13 | :target: https://github.com/hpi-dhc/jointly/actions/workflows/all.yml?query=branch%3Amaster 14 | 15 | .. image:: https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/enra64/f731de158a21515e2d6c52ed48d406ad/raw/jointly_coverage_main.json 16 | :target: https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/enra64/f731de158a21515e2d6c52ed48d406ad/raw/jointly_coverage_main.json 17 | 18 | .. image:: https://readthedocs.org/projects/jointly/badge/?version=latest 19 | :target: https://jointly.readthedocs.io/en/latest/?badge=latest 20 | :alt: Documentation Status 21 | 22 | .. image:: https://pyup.io/repos/github/hpi-dhc/jointly/shield.svg 23 | :target: https://pyup.io/repos/github/hpi-dhc/jointly/ 24 | :alt: Updates 25 | 26 | .. image:: https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg 27 | :target: https://github.com/hpi-dhc/jointly/blob/master/CODE_OF_CONDUCT.md 28 | 29 | .. image:: https://zenodo.org/badge/303936309.svg 30 | :target: https://zenodo.org/badge/latestdoi/303936309 31 | 32 | .. image:: https://tinyurl.com/y22nb8up 33 | :target: https://github.com/pyOpenSci/software-review/issues/45 34 | 35 | Jointly is a python package for synchronizing sensors with accelerometer data. You need this package if you're a researcher who has recorded accelerometer data (plus possibly other data) on multiple sensors and wants to synchronize the multiple data streams precisely. Specifically, shake all your sensors together before and after a study and jointly will find these shakes, remove any temporal offset between sensors, and stretch the data so every clock aligns with a reference sensor. Jointly ingests and produces ``pandas DataFrame`` objects. 36 | 37 | * Free software: MIT license 38 | * Documentation: https://jointly.readthedocs.io 39 | 40 | 41 | Features 42 | -------- 43 | 44 | * Detect and compare shakes in multiple sensor data streams 45 | * Remove temporal offsets in the data 46 | * Remove clock speed offsets by stretching the data 47 | 48 | Installation 49 | ------------ 50 | 51 | Install the package from pypi: 52 | 53 | .. code:: bash 54 | 55 | pip install jointly 56 | 57 | You might want to check out our `contributing guide`_ in case you want to edit the package. 58 | 59 | Usage 60 | ----- 61 | 62 | The data has to be provided in pandas ``DataFrame`` instances with a 63 | ``DateTimeIndex`` for each sensor. In the following example, ``Faros`` and ``Empatica`` 64 | are two sensors we want to synchronize, and we have already prepared dataframes for them. 65 | The Empatica is the reference source, and thus the Faros' data will be changed in the output. 66 | The ``ref_column`` is the column that contains the characteristic shake, and all other columns 67 | in the ``DataFrame`` will be synchronized together with that column. 68 | 69 | .. code:: python 70 | 71 | import pandas as pd 72 | import tempfile 73 | import traceback 74 | 75 | import jointly 76 | 77 | # load source dataframes with datetime index 78 | faros_df = pd.read_csv( 79 | "./test-data/faros-plus-physilog/faros.csv.gz", 80 | index_col=[0], 81 | parse_dates=True 82 | ) 83 | physilog_df = pd.read_csv( 84 | "./test-data/faros-plus-physilog/physilog.csv.gz", 85 | index_col=[0], 86 | parse_dates=True, 87 | ) 88 | 89 | # the magnitude is a common property that keeps shake information without axis relevance 90 | faros_df["Accel Mag"] = jointly.calculate_magnitude( 91 | faros_df, ["Accel X", "Accel Y", "Accel Z"] 92 | ) 93 | physilog_df["Accel Mag"] = jointly.calculate_magnitude( 94 | physilog_df, ["Accel X", "Accel Y", "Accel Z"] 95 | ) 96 | 97 | # create dictionary of source sensors 98 | sources = { 99 | "Faros": { 100 | "data": faros_df, 101 | "ref_column": "Accel Mag", 102 | }, 103 | "Physilog": { 104 | "data": physilog_df, 105 | "ref_column": "Accel Mag", 106 | }, 107 | } 108 | 109 | # set shake extraction parameters 110 | extractor = jointly.ShakeExtractor() 111 | extractor.start_window_length = pd.Timedelta(seconds=15) 112 | extractor.end_window_length = pd.Timedelta(seconds=10) 113 | extractor.min_length = 3 114 | extractor.threshold = 0.55 115 | 116 | # prepare the synchronizer 117 | synchronizer = jointly.Synchronizer( 118 | sources, reference_source_name="Faros", extractor=extractor 119 | ) 120 | 121 | # if the extractor parameters are wrong, print the problem and show the data 122 | try: 123 | # get_synced_data returns a dictionary of sensor names to synced DataFrames 124 | synchronizer.get_synced_data() 125 | except Exception: 126 | traceback.print_exc() 127 | jointly.plot_reference_columns(sources) 128 | 129 | # save a file for each input sensor somewhere 130 | with tempfile.TemporaryDirectory() as tmp_dir: 131 | synchronizer.save_pickles(tmp_dir) 132 | 133 | Documentation Deep Links 134 | ~~~~~~~~~~~~~~~~~~~~~~~~ 135 | 136 | Here you can find more information on specific topics: 137 | 138 | * `Preparing Data for Ingestion`_ 139 | * `Tuning the Shake Detection`_ 140 | * `Debugging the Shake Detection`_ 141 | * `How to Save the Synchronized Data`_ 142 | * `How to Enable Logging`_ 143 | * `Full Explanation of the Synchronization`_ 144 | 145 | Template Credits 146 | ---------------- 147 | 148 | This package was created with Cookiecutter_ and the `pyOpenSci/cookiecutter-pyopensci`_ project template, based off `audreyr/cookiecutter-pypackage`_. 149 | 150 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter 151 | .. _`pyOpenSci/cookiecutter-pyopensci`: https://github.com/pyOpenSci/cookiecutter-pyopensci 152 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage 153 | .. _`Preparing Data for Ingestion`: https://jointly.readthedocs.io/en/latest/usage.html#preparing-data-for-ingestion 154 | .. _`Tuning the Shake Detection`: https://jointly.readthedocs.io/en/latest/usage.html#tuning-shake-detection 155 | .. _`Debugging the Shake Detection`: https://jointly.readthedocs.io/en/latest/usage.html#debugging 156 | .. _`How to Save the Synchronized Data`: https://jointly.readthedocs.io/en/latest/usage.html#saving-data 157 | .. _`How to Enable Logging`: https://jointly.readthedocs.io/en/latest/usage.html#logging 158 | .. _`Full Explanation of the Synchronization`: https://jointly.readthedocs.io/en/latest/background.html#the-syncing-process 159 | .. _`contributing guide`: https://jointly.readthedocs.io/en/latest/contributing.html 160 | 161 | Citation 162 | -------- 163 | 164 | Arne Herdick, Felix Musmann, Ariane Sasso, Justin Albert, & Bert Arnrich. (2022). Jointly: A Python package for synchronizing multiple sensors with accelerometer data (1.0.4). Zenodo. https://doi.org/10.5281/zenodo.5833858 165 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Usage 3 | ========== 4 | 5 | 6 | Preparing Data for Ingestion 7 | ---------------------------- 8 | 9 | The data has to be provided in pandas ``DataFrame`` with a 10 | ``DateTimeIndex``. The following example shows how such a dataframe 11 | should look: 12 | 13 | .. code:: python 14 | 15 | import pandas as pd 16 | 17 | faros_df = pd.read_csv( 18 | "./test-data/faros-plus-physilog/faros.csv.gz", 19 | index_col=[0], 20 | parse_dates=True 21 | ) 22 | print(faros_df.head()) 23 | 24 | The output of ``faros_df.head()`` shows that the index is a ``DateTimeIndex``. 25 | The ``NaN`` values due to the different sampling frequencies are ignored during synchronization. 26 | 27 | :: 28 | 29 | Accel X Accel Y Accel Z ECG 30 | 1970-01-01 00:00:01.000 -88.0 771.0 -531.5 -21.0 31 | 1970-01-01 00:00:01.008 NaN NaN NaN -10.0 32 | 1970-01-01 00:00:01.010 -86.0 779.0 -539.5 NaN 33 | 1970-01-01 00:00:01.016 NaN NaN NaN -2.0 34 | 1970-01-01 00:00:01.020 -82.5 781.0 -543.0 NaN 35 | 36 | Each signal source, i.e., each sensor, 37 | is given in a dictionary together with the name of the column 38 | containing the events that should be synchronized, e.g., the 39 | shake common to all sensor signals in the acceleration magnitude. 40 | The name of that column and its frequency can be different for 41 | each sensor. 42 | 43 | Finally, given the source dictionary, the synchronizer instance 44 | can be created. 45 | 46 | .. code:: python 47 | 48 | import jointly 49 | 50 | sources = { 51 | "Faros": { 52 | "data": faros_df, 53 | "ref_column": "Accel Mag", 54 | }, 55 | "Physilog": { 56 | "data": physilog_df, 57 | "ref_column": "Accel Mag", 58 | }, 59 | # Any number of sensors can be added 60 | # 'Everion': { 61 | # 'data': everion_dataframe, 62 | # 'ref_column': 'ACCELERATION_MAGNITUDE', 63 | # } 64 | } 65 | 66 | jointly.Synchronizer(sources, reference_source_name="Faros") 67 | 68 | Tuning Shake Detection 69 | ---------------------- 70 | 71 | If the shake detection doesn't find all shakes on the first try, 72 | the following parameters will help: 73 | 74 | .. code:: python 75 | 76 | import pandas as pd 77 | import jointly 78 | 79 | extractor = jointly.ShakeExtractor() 80 | 81 | # The start window should be long enough to contain 82 | # only the start shake in every data stream 83 | extractor.start_window_length = pd.Timedelta(seconds=15) 84 | 85 | # The end window (measured from the end of data) 86 | # should be exactly long enough to contain 87 | # only the end shake in every data stream 88 | extractor.end_window_length = pd.Timedelta(seconds=3) 89 | 90 | # Set to at most the number of shakes you did 91 | extractor.min_length = 3 92 | 93 | # Shakes are only accepted if they are higher than the 94 | # threshold (with all data normalized). 95 | extractor.threshold = 0.5 96 | 97 | Debugging 98 | ~~~~~~~~~ 99 | 100 | To find issues with the shake detection, it often helps to plot the data. 101 | ``plot_reference_columns`` is available to plot the reference columns from 102 | a source table. 103 | 104 | Problems during synchronization throw exceptions, such as a ``BadWindowException``: 105 | 106 | jointly.synchronization_errors.BadWindowException: 107 | 108 | Start (0 days 00:10:00) or end (0 days 00:10:00) window lengths greater than length of signal Faros (0 days 00:00:36.992000). Make it so each window only covers start or end, not both. 109 | 110 | Thus, the following code catches the problem and prints/shows helpful information: 111 | 112 | .. code:: python 113 | 114 | # if the extractor parameters are wrong, print the problem and show the data 115 | try: 116 | # get_synced_data returns a dictionary of sensor names to synced DataFrames 117 | synchronizer.get_synced_data() 118 | except Exception: 119 | traceback.print_exc() 120 | jointly.plot_reference_columns(sources) 121 | 122 | 123 | Saving data 124 | ----------- 125 | 126 | There are two approaches to saving the data. ``save_data()`` can be used 127 | to create an export file for each data category, while ``save_pickles`` 128 | dumps the synchronized dataframes for each individual sensor into a ``.pickle`` 129 | each. 130 | 131 | To run the following examples, you should already have a ``Synchronizer`` instance 132 | called ``synchronizer`` with an extractor configured such that no exceptions are thrown. 133 | Check the readme file for an example. 134 | 135 | ``save_pickles()`` 136 | ~~~~~~~~~~~~~~~~~~~~~~~ 137 | 138 | To save an individual DataFrame for each input source, call ``synchronizer.save_pickles()`` 139 | 140 | 141 | .. code:: python 142 | 143 | synchronizer.save_pickles(sync_dir_path) 144 | 145 | 146 | ``save_data()`` 147 | ~~~~~~~~~~~~~~~~~~~~~~~ 148 | 149 | To use ``save_data()`` create a dictionary as follows: every 150 | key at the root level defines the name of a corresponding file. 151 | In each entry, select the source columns by creating a key (for 152 | example, add ``Faros`` to select data from the ``Faros`` source) 153 | that points to the columns to be extracted from that source, e.g., 154 | ``['Accel X', 'Accel Y', 'Accel Z']``. 155 | 156 | .. code:: python 157 | 158 | # define output format for two files, one containing all acceleration 159 | # data, the other the ECG data 160 | tables = { 161 | 'ACC': { 162 | 'Faros': ['Accel X', 'Accel Y', 'Accel Z'], 163 | 'Physilog': ['Accel X', 'Accel Y', 'Accel Z'], 164 | }, 165 | 'ECG': { 166 | 'Faros': ['ECG'], 167 | }, 168 | } 169 | 170 | # if the extractor parameters are wrong, print the problem and show the data 171 | try: 172 | # get_synced_data returns a dictionary of sensor names to synced DataFrames 173 | with tempfile.TemporaryDirectory() as tmp_dir: 174 | synchronizer.save_data(tmp_dir, tables=tables, save_total_table=False) 175 | print("test") 176 | except Exception: 177 | traceback.print_exc() 178 | jointly.plot_reference_columns(sources) 179 | 180 | 181 | In the resulting CSV file, each combination gets a column like this: 182 | ``Faros_Accel X``, or ``Physilog_Accel Z``, etc: 183 | 184 | :: 185 | 186 | Faros_Accel X Faros_Accel Y Faros_Accel Z Physilog_Accel X Physilog_Accel Y Physilog_Accel Z 187 | 1970-01-01 00:00:01.000000000 -88 771 -531.5 188 | 1970-01-01 00:00:01.010000000 -86 779 -539.5 189 | 1970-01-01 00:00:01.020000000 -82.5 781 -543 190 | 1970-01-01 00:00:01.020907696 -0.80457 0.02234 0.61023 191 | 1970-01-01 00:00:01.030000000 -98 787 -521.5 192 | 1970-01-01 00:00:01.040000000 -80.5 777 -557 193 | 1970-01-01 00:00:01.050000000 -94 761.5 -539.5 194 | 1970-01-01 00:00:01.052150462 -0.81104 0.01721 0.59253 195 | 196 | 197 | 198 | Logging 199 | ------- 200 | 201 | To activate logging simply add the following lines to your code: 202 | 203 | .. code:: python 204 | 205 | import logging 206 | from jointly.log import logger 207 | 208 | logger.setLevel(logging.DEBUG) 209 | 210 | This will give you insight into the shake detection, calculation of the 211 | timeshifts and stretching factor, and output plots of the segements. 212 | -------------------------------------------------------------------------------- /jointly/shake_extractor.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import scipy.signal 6 | import scipy.interpolate 7 | import pprint 8 | 9 | from . import SyncPairs 10 | from .abstract_extractor import AbstractExtractor 11 | from .log import logger 12 | from .synchronization_errors import ( 13 | BadThresholdException, 14 | BadWindowException, 15 | ShakeMissingException, 16 | ) 17 | 18 | pp = pprint.PrettyPrinter() 19 | 20 | 21 | def _get_shake_weight(x: List[pd.DatetimeIndex]): 22 | """Returns a shake weight describing the importance of a shake sequence""" 23 | return np.median(x) + np.mean(x) 24 | 25 | 26 | class ShakeExtractor(AbstractExtractor): 27 | def __init__(self): 28 | super().__init__() 29 | self.start_window_length = pd.Timedelta(seconds=600) 30 | self.end_window_length = pd.Timedelta(seconds=600) 31 | self.threshold = 0.6 32 | 33 | @property 34 | def start_window_length(self) -> pd.Timedelta: 35 | """time window as pandas.Timedelta in which to look for peaks from start of signal""" 36 | return self._start_window_length 37 | 38 | @start_window_length.setter 39 | def start_window_length(self, value: pd.Timedelta): 40 | if isinstance(value, pd.Timedelta): 41 | self._start_window_length = value 42 | else: 43 | raise ValueError( 44 | "window lengths are given as e.g. pd.Timedelta(seconds=600)" 45 | ) 46 | 47 | @property 48 | def end_window_length(self) -> pd.Timedelta: 49 | """time window as pandas.Timedelta in which to look for peaks at end of signal""" 50 | return self._end_window_length 51 | 52 | @end_window_length.setter 53 | def end_window_length(self, value: pd.Timedelta): 54 | if isinstance(value, pd.Timedelta): 55 | self._end_window_length = value 56 | else: 57 | raise ValueError( 58 | "window lengths are given as e.g. pd.Timedelta(seconds=600)" 59 | ) 60 | 61 | @property 62 | def threshold(self) -> float: 63 | """min height for peak detection. In range [0, 1], as the data is normalized""" 64 | return self._threshold 65 | 66 | @threshold.setter 67 | def threshold(self, value: float): 68 | if 0 < value < 1: 69 | self._threshold = value 70 | else: 71 | raise ValueError(f"threshold must be given in (0, 1), but you gave {value}") 72 | 73 | distance = 1500 74 | """distance in milliseconds in which the next peak must occur to be considered a sequence""" 75 | 76 | min_length = 6 77 | """minimum number of peaks per sequence""" 78 | 79 | time_buffer = pd.Timedelta(seconds=1) 80 | """time in seconds will be padded to first and last peak for timestamps of segment""" 81 | 82 | def _merge_peak_sequences( 83 | self, peaks: List[pd.DatetimeIndex], signals: pd.DataFrame 84 | ) -> List[List[pd.DatetimeIndex]]: 85 | """ 86 | Merge the given peaks into peak sequences with inter-peak distances of less than ``self.distance``. 87 | 88 | :param peaks: list of peak indices 89 | :param signals: reference signals dataframe 90 | :return: list of lists, each inner list denotes a number of peaks by index 91 | """ 92 | sequences = [] 93 | for pos, index in enumerate(peaks): 94 | row = signals.iloc[[index]] 95 | if pos == 0: 96 | # start initial sequence 97 | sequences.append([row.index]) 98 | continue 99 | row_prev = signals.iloc[[peaks[pos - 1]]] 100 | time = pd.to_datetime(row.index) 101 | time_prev = pd.to_datetime(row_prev.index) 102 | if time_prev + pd.Timedelta(milliseconds=self.distance) < time: 103 | # add peak to sequence, since this peak lies within distance of previous one 104 | sequences.append([row.index]) 105 | else: 106 | # start new sequence 107 | sequences[len(sequences) - 1].append(row.index) 108 | return sequences 109 | 110 | def _get_peak_sequences( 111 | self, 112 | signals: pd.DataFrame, 113 | column: str, 114 | start_window: pd.Timestamp, 115 | end_window: pd.Timestamp, 116 | ) -> List[List[pd.DatetimeIndex]]: 117 | """ 118 | Returns index list of peak sequences from a normalized signal. 119 | Peaks that have no adjacent peaks within ``distance`` ms are ignored. 120 | Sequences with less than ``min_length`` peaks are ignored. 121 | """ 122 | logger.debug(f"Using peak threshold {self.threshold}") 123 | 124 | # find peaks in start window 125 | start_part = signals[column].truncate(after=start_window) 126 | peaks_start, _ = scipy.signal.find_peaks(start_part, height=self.threshold) 127 | 128 | # find peaks in end window 129 | end_part = signals[column].truncate(before=end_window) 130 | peaks_end, _ = scipy.signal.find_peaks(end_part, height=self.threshold) 131 | peaks_end += signals.index.get_loc(end_part.index[0]) 132 | 133 | peaks = [*peaks_start, *peaks_end] 134 | logger.debug("Found {} peaks for {}".format(len(peaks), column)) 135 | 136 | # merge peaks into peak sequences 137 | sequences = self._merge_peak_sequences(peaks, signals) 138 | logger.debug( 139 | f"Merged peaks within {self.distance} ms to " 140 | f"{len(sequences)} sequences for {column}" 141 | ) 142 | 143 | # filter sequences with less than min_length peaks 144 | sequences_filtered = [seq for seq in sequences if len(seq) >= self.min_length] 145 | logger.debug( 146 | f"{len(sequences_filtered)} sequences satisfy" 147 | f" minimum length of {self.min_length} for {column}" 148 | ) 149 | 150 | return sequences_filtered 151 | 152 | def _choose_sequence( 153 | self, signal: pd.Series, shake_list: List[List[pd.DatetimeIndex]] 154 | ) -> Tuple[pd.Timestamp, pd.Timestamp]: 155 | """ 156 | Choose the sequence with the highest shake weight 157 | 158 | :param signal: signal from which the shake is 159 | :param shake_list: list of peak sequence value lists 160 | :return: start and end index values 161 | """ 162 | best_shake = max(shake_list, key=_get_shake_weight) 163 | 164 | segment_start_time = best_shake[0].index[0] - self.time_buffer 165 | start_index = signal.index.get_loc(segment_start_time, method="nearest") 166 | 167 | segment_end_time = best_shake[-1].index[0] + self.time_buffer 168 | end_index = signal.index.get_loc(segment_end_time, method="nearest") 169 | 170 | return signal.index[start_index], signal.index[end_index] 171 | 172 | @staticmethod 173 | def _check_shakes_not_empty(shakes: List[List[int]], label: str): 174 | """Raise an exception if the given list of shakes is empty""" 175 | if len(shakes) <= 0: 176 | raise ShakeMissingException( 177 | f"No {label} shakes detected - " 178 | "check window lengths, " 179 | "detection threshold, " 180 | "minimum sequence length" 181 | ) 182 | 183 | def get_segments(self, signals: pd.DataFrame) -> SyncPairs: 184 | """ 185 | Returns dictionary with start and end for each sensor source, i.e., a ``SyncPairs`` instance 186 | 187 | :param signals: DataFrame containing the reference signals for each source 188 | :return: SyncPairs instance 189 | """ 190 | columns = list(signals.columns) 191 | self._init_segments(columns) 192 | # will be added to start and subtracted from end of sequence 193 | 194 | for column in columns: 195 | last_timestamp = signals[column].last_valid_index() 196 | first_timestamp = signals[column].first_valid_index() 197 | duration = last_timestamp - first_timestamp 198 | if duration < self.start_window_length or duration < self.end_window_length: 199 | raise BadWindowException( 200 | f"Start ({self.start_window_length}) or end ({self.end_window_length}) " 201 | f"window lengths greater than length of signal {column} ({duration}). " 202 | f"Make it so each window only covers start or end, not both." 203 | ) 204 | 205 | start_window = first_timestamp + self.start_window_length 206 | end_window = last_timestamp - self.end_window_length 207 | peak_sequences = self._get_peak_sequences( 208 | signals, column, start_window, end_window 209 | ) 210 | 211 | start_shakes, end_shakes, other_shakes = [], [], [] 212 | for peak_sequence in peak_sequences: 213 | sequence_values = [signals[column][index] for index in peak_sequence] 214 | if sequence_values[0].index[0] < start_window: 215 | start_shakes.append(sequence_values) 216 | elif sequence_values[-1].index[0] > end_window: 217 | end_shakes.append(sequence_values) 218 | else: 219 | other_shakes.append(sequence_values) 220 | 221 | # select sequences in start/end window 222 | logger.debug( 223 | f"{len(start_shakes)} shakes in start window ({start_window}), " 224 | f"{len(end_shakes)} shakes in end window ({end_window}), " 225 | f"{len(other_shakes)} shakes in between, for {column}." 226 | ) 227 | 228 | ShakeExtractor._check_shakes_not_empty(start_shakes, "start") 229 | ShakeExtractor._check_shakes_not_empty(end_shakes, "end") 230 | 231 | # choose sequence with highest weight 232 | start, end = self._choose_sequence(signals[column], start_shakes) 233 | self._set_first_segment(column, start, end) 234 | 235 | start, end = self._choose_sequence(signals[column], end_shakes) 236 | self._set_second_segment(column, start, end) 237 | 238 | logger.info( 239 | f"Shake segments for {column}:\n{pp.pformat(self.segments[column])}" 240 | ) 241 | 242 | return self.segments 243 | -------------------------------------------------------------------------------- /tests/test_synchronizer.py: -------------------------------------------------------------------------------- 1 | """Happy path tests for the synchronizer and shake extractor""" 2 | import os.path 3 | import tempfile 4 | 5 | import pandas as pd 6 | import pytest 7 | 8 | import jointly 9 | from jointly import ShakeExtractor 10 | from jointly.helpers import stretch_signals 11 | from tests.parquet_reader import get_parquet_test_data 12 | 13 | 14 | def test_happy_path_faros_internal(): 15 | ref_data = get_parquet_test_data("faros-internal.parquet", 666) 16 | target_data = get_parquet_test_data("faros-internal.parquet", 667) 17 | reference_signal, target_signal = "Internal", "Faros" 18 | sources = { 19 | reference_signal: {"data": ref_data, "ref_column": "ACCELERATION_Z"}, 20 | target_signal: {"data": target_data, "ref_column": "ACCELERATION_Z"}, 21 | } 22 | extractor = ShakeExtractor() 23 | extractor.start_window_length = pd.Timedelta(seconds=17) 24 | extractor.end_window_length = pd.Timedelta(seconds=10) 25 | extractor.min_length = 3 26 | extractor.threshold = 0.19 27 | 28 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 29 | sync_result = synchronizer.get_sync_params() 30 | 31 | assert ( 32 | sync_result[reference_signal]["timeshift"] is None 33 | ), "Should not have timeshift for reference signal" 34 | assert ( 35 | sync_result[reference_signal]["stretch_factor"] == 1 36 | ), "Should not stretch reference signal" 37 | 38 | assert sync_result[target_signal]["timeshift"] == pd.Timedelta( 39 | "-1 days +23:59:59.070000" 40 | ), "Should have timeshift of 0 for equal signal" 41 | assert ( 42 | sync_result[target_signal]["stretch_factor"] == 1.0506424792139077 43 | ), "Should have stretching factor of 1 for equal signal" 44 | 45 | 46 | def test_happy_path_equal_data(): 47 | base_data = get_parquet_test_data("test-data.parquet") 48 | reference_signal, target_signal = "A", "B" 49 | sources = { 50 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 51 | target_signal: {"data": base_data, "ref_column": "ACCELERATION_Z"}, 52 | } 53 | extractor = ShakeExtractor() 54 | extractor.start_window_length = pd.Timedelta(seconds=5) 55 | extractor.end_window_length = pd.Timedelta(seconds=3) 56 | extractor.min_length = 3 57 | extractor.threshold = 0.5 58 | 59 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 60 | sync_result = synchronizer.get_sync_params() 61 | 62 | assert ( 63 | sync_result[reference_signal]["timeshift"] is None 64 | ), "Should not have timeshift for reference signal" 65 | assert ( 66 | sync_result[reference_signal]["stretch_factor"] == 1 67 | ), "Should not stretch reference signal" 68 | 69 | assert sync_result[target_signal]["timeshift"] == pd.Timedelta( 70 | seconds=0 71 | ), "Should have timeshift of 0 for equal signal" 72 | assert ( 73 | sync_result[target_signal]["stretch_factor"] == 1 74 | ), "Should have stretching factor of 1 for equal signal" 75 | 76 | 77 | def test_happy_path_shifted_data(): 78 | base_data = get_parquet_test_data("test-data.parquet") 79 | reference_signal, target_signal = "A", "B" 80 | target_df = base_data.shift(-22, freq="100ms") 81 | 82 | sources = { 83 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 84 | target_signal: {"data": target_df, "ref_column": "ACCELERATION_Z"}, 85 | } 86 | extractor = ShakeExtractor() 87 | extractor.start_window_length = pd.Timedelta(seconds=5) 88 | extractor.end_window_length = pd.Timedelta(seconds=3) 89 | extractor.min_length = 3 90 | extractor.threshold = 0.5 91 | 92 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 93 | sync_result = synchronizer.get_sync_params() 94 | 95 | assert ( 96 | sync_result[reference_signal]["timeshift"] is None 97 | ), "Should not have timeshift for reference signal" 98 | assert ( 99 | sync_result[reference_signal]["stretch_factor"] == 1 100 | ), "Should not stretch reference signal" 101 | 102 | assert sync_result[target_signal]["timeshift"] == pd.Timedelta( 103 | "0 days 00:00:02.197549725" 104 | ), "Should have timeshift of 0 for equal signal" 105 | assert ( 106 | sync_result[target_signal]["stretch_factor"] == 1 107 | ), "Should have stretching factor of 1 for equal signal" 108 | 109 | 110 | def test_happy_path_shifted_stretched_data(): 111 | base_data = get_parquet_test_data("test-data.parquet") 112 | reference_signal, target_signal = "A", "B" 113 | target_df = base_data.shift(-22, freq="100ms") 114 | target_df = stretch_signals(target_df, 1.1, target_df.index.min()) 115 | 116 | sources = { 117 | reference_signal: {"data": base_data.copy(), "ref_column": "ACCELERATION_Z"}, 118 | target_signal: {"data": target_df, "ref_column": "ACCELERATION_Z"}, 119 | } 120 | extractor = ShakeExtractor() 121 | extractor.start_window_length = pd.Timedelta(seconds=5) 122 | extractor.end_window_length = pd.Timedelta(seconds=3) 123 | extractor.min_length = 3 124 | extractor.threshold = 0.5 125 | 126 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 127 | sync_result = synchronizer.get_sync_params() 128 | 129 | assert ( 130 | sync_result[reference_signal]["timeshift"] is None 131 | ), "Should not have timeshift for reference signal" 132 | assert ( 133 | sync_result[reference_signal]["stretch_factor"] == 1 134 | ), "Should not stretch reference signal" 135 | 136 | assert sync_result[target_signal]["timeshift"] == pd.Timedelta( 137 | "0 days 00:00:02.197549725" 138 | ), "Should have timeshift of 0 for equal signal" 139 | assert ( 140 | sync_result[target_signal]["stretch_factor"] == 0.9101123595505618 141 | ), "Should have stretching factor of 1 for equal signal" 142 | 143 | 144 | def test_happy_path_save_pickles(): 145 | ref_data = get_parquet_test_data("faros-internal.parquet", 666) 146 | target_data = get_parquet_test_data("faros-internal.parquet", 667) 147 | reference_signal, target_signal = "Internal", "Faros" 148 | sources = { 149 | reference_signal: {"data": ref_data, "ref_column": "ACCELERATION_Z"}, 150 | target_signal: {"data": target_data, "ref_column": "ACCELERATION_Z"}, 151 | } 152 | extractor = ShakeExtractor() 153 | extractor.start_window_length = pd.Timedelta(seconds=17) 154 | extractor.end_window_length = pd.Timedelta(seconds=10) 155 | extractor.min_length = 3 156 | extractor.threshold = 0.19 157 | 158 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 159 | 160 | with tempfile.TemporaryDirectory() as tmp_dir: 161 | synchronizer.save_pickles(tmp_dir) 162 | synced_data = synchronizer.get_synced_data() 163 | 164 | for signal, signal_df in synced_data.items(): 165 | pickle_path = os.path.join(tmp_dir, f"{signal.upper()}.PICKLE") 166 | assert os.path.isfile(pickle_path) 167 | assert pd.read_pickle(pickle_path).equals(signal_df) 168 | 169 | 170 | def test_bad_table_spec_save_tables(): 171 | ref_data = get_parquet_test_data("faros-internal.parquet", 666) 172 | target_data = get_parquet_test_data("faros-internal.parquet", 667) 173 | reference_signal, target_signal = "Internal", "Faros" 174 | sources = { 175 | reference_signal: {"data": ref_data, "ref_column": "ACCELERATION_Z"}, 176 | target_signal: {"data": target_data, "ref_column": "ACCELERATION_Z"}, 177 | } 178 | extractor = ShakeExtractor() 179 | extractor.start_window_length = pd.Timedelta(seconds=17) 180 | extractor.end_window_length = pd.Timedelta(seconds=10) 181 | extractor.min_length = 3 182 | extractor.threshold = 0.19 183 | 184 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 185 | 186 | with tempfile.TemporaryDirectory() as tmp_dir: 187 | with pytest.raises(ValueError): 188 | synchronizer.save_data( 189 | tmp_dir, tables={"N/A": {"Faros": ["N/A"]}}, save_total_table=False 190 | ) 191 | with pytest.raises(ValueError): 192 | synchronizer.save_data( 193 | tmp_dir, 194 | tables={"N/A": {"N/A": ["ACCELERATION_Y"]}}, 195 | save_total_table=False, 196 | ) 197 | 198 | 199 | def test_happy_path_save_tables(): 200 | ref_data = get_parquet_test_data("faros-internal.parquet", 666) 201 | target_data = get_parquet_test_data("faros-internal.parquet", 667) 202 | reference_signal, target_signal = "Internal", "Faros" 203 | sources = { 204 | reference_signal: {"data": ref_data, "ref_column": "ACCELERATION_Z"}, 205 | target_signal: {"data": target_data, "ref_column": "ACCELERATION_Z"}, 206 | } 207 | extractor = ShakeExtractor() 208 | extractor.start_window_length = pd.Timedelta(seconds=17) 209 | extractor.end_window_length = pd.Timedelta(seconds=10) 210 | extractor.min_length = 3 211 | extractor.threshold = 0.19 212 | 213 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 214 | 215 | acc_columns = ["ACCELERATION_X", "ACCELERATION_Y", "ACCELERATION_Z"] 216 | with tempfile.TemporaryDirectory() as tmp_dir: 217 | tables = { 218 | "ACC": {"Faros": acc_columns, "Internal": acc_columns}, 219 | "ECG": {"Faros": ["ECG"]}, 220 | } 221 | synchronizer.save_data(tmp_dir, tables=tables, save_total_table=False) 222 | for file in ["ACC", "ECG", "SYNC"]: 223 | file_path = os.path.join(tmp_dir, f"{file}.csv") 224 | 225 | assert os.path.isfile(file_path), f"{file_path} should exist" 226 | df = pd.read_csv(file_path) 227 | 228 | if file == "ACC": 229 | assert len(df) == 4115, "Should have saved all acc values" 230 | assert "timestamp" in df.columns, "Should have saved timestamp column" 231 | for col in acc_columns: 232 | for device in tables["ACC"]: 233 | assert ( 234 | f"{device}_{col}" in df.columns 235 | ), f"Should have saved {device}_{col}" 236 | elif file == "ECG": 237 | assert len(df) == 15100, "Should have saved all ecg values" 238 | assert "timestamp" in df.columns, "Should have saved timestamp column" 239 | elif file == "SYNC": 240 | for source in ["Faros", "Internal"]: 241 | assert ( 242 | source in df.columns 243 | ), f"Should have saved {source} in SYNC.csv" 244 | assert "Unnamed: 0" in df.columns, "Should have saved index column" 245 | 246 | 247 | def test_happy_path_save_total_table(): 248 | ref_data = get_parquet_test_data("faros-internal.parquet", 666) 249 | target_data = get_parquet_test_data("faros-internal.parquet", 667) 250 | reference_signal, target_signal = "Internal", "Faros" 251 | sources = { 252 | reference_signal: {"data": ref_data, "ref_column": "ACCELERATION_Z"}, 253 | target_signal: {"data": target_data, "ref_column": "ACCELERATION_Z"}, 254 | } 255 | extractor = ShakeExtractor() 256 | extractor.start_window_length = pd.Timedelta(seconds=17) 257 | extractor.end_window_length = pd.Timedelta(seconds=10) 258 | extractor.min_length = 3 259 | extractor.threshold = 0.19 260 | 261 | synchronizer = jointly.Synchronizer(sources, reference_signal, extractor) 262 | with tempfile.TemporaryDirectory() as tmp_dir: 263 | synchronizer.save_data(tmp_dir, tables=None, save_total_table=True) 264 | file_path = os.path.join(tmp_dir, "TOTAL.csv") 265 | assert os.path.isfile(file_path), f"{file_path} should exist" 266 | 267 | df = pd.read_csv(file_path) 268 | assert len(df.columns) == 21, "Should save all sensors from internal and faros" 269 | assert len(df) == 18518, "Should create exact number of synced result items" 270 | -------------------------------------------------------------------------------- /jointly/synchronizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Dict, Optional 3 | 4 | import numpy as np 5 | import pandas as pd 6 | from scipy.signal import correlate 7 | 8 | from . import ShakeExtractor, helpers 9 | from .abstract_extractor import AbstractExtractor 10 | from .helpers import normalize, get_equidistant_signals 11 | from .log import logger 12 | from .synchronization_errors import StartEqualsEndError 13 | from .types import SourceDict, ResultTableSpec, SyncPairTimeshift, SyncPairs 14 | 15 | 16 | class Synchronizer: 17 | @property 18 | def extractor(self) -> AbstractExtractor: 19 | """Get the current extractor""" 20 | return self._extractor 21 | 22 | @extractor.setter 23 | def extractor(self, value: AbstractExtractor): 24 | if not issubclass(type(value), AbstractExtractor): 25 | raise TypeError("Extractor needs to be a subclass of AbstractExtractor.") 26 | self._extractor = value 27 | 28 | def __init__( 29 | self, 30 | sources: SourceDict, 31 | reference_source_name: str, 32 | extractor: Optional[AbstractExtractor] = None, 33 | sampling_freq: Optional[float] = None, 34 | ): 35 | """ 36 | Create a new synchronizer. Synchronizer objects are used to remove offsets and clock offsets by stretching and 37 | moving reference points detected by an extractor. 38 | 39 | :param sources: A SourceDict to describe the input data 40 | :param reference_source_name: name of the sensor to be used as reference. 41 | Other sensors will be made synchronous to this sensor, and data from this sensor will not be modified. 42 | :param extractor: This will be used to find synchronization points in the source data. If None, it defaults to 43 | a ShakeExtractor instance 44 | :param sampling_freq: Override the frequency used to resample input data. If None, it defaults to the maximum 45 | input frequency 46 | """ 47 | self.sources = sources 48 | self.ref_source_name = reference_source_name 49 | self._check_sources() 50 | 51 | self.extractor = extractor if extractor is not None else ShakeExtractor() 52 | self.ref_signals = self._prepare_ref_signals() 53 | 54 | self.sampling_freq = ( 55 | sampling_freq 56 | if sampling_freq is not None 57 | else helpers.get_max_ref_frequency(self.ref_signals) 58 | ) 59 | 60 | def _check_sources(self): 61 | """Verifies that the source dict adheres to the required format and that the reference source is available""" 62 | for source_name, source in self.sources.items(): 63 | if "data" not in source or "ref_column" not in source: 64 | raise ValueError( 65 | "Each source needs to have a `data` and a `ref_column` property" 66 | ) 67 | if not isinstance(source["data"], pd.DataFrame): 68 | raise ValueError( 69 | "The `data` property of each source must contain a DatFrame" 70 | ) 71 | if not isinstance(source["data"].index, pd.DatetimeIndex): 72 | raise ValueError( 73 | "The `data` DataFrame must have a pd.DatetimeIndex for each source" 74 | ) 75 | if source["data"].index.duplicated().any(): 76 | raise ValueError( 77 | "The input dataframe must not have duplicate index values, " 78 | "convert the data into a normalized wide format" 79 | ) 80 | if ( 81 | not isinstance(source["ref_column"], str) 82 | or source["ref_column"] not in source["data"].columns 83 | ): 84 | raise ValueError( 85 | "Each source must have a string specifying the reference column, and the reference" 86 | "column must be available in the source's DataFrame" 87 | ) 88 | if self.ref_source_name not in self.sources.keys(): 89 | raise ValueError( 90 | "The reference source name must be available in the source dict" 91 | ) 92 | 93 | def _prepare_ref_signals(self) -> pd.DataFrame: 94 | """ 95 | Collect the reference columns from all sources and join them into a single dataframe. 96 | Each reference column is named equal to the name of the source it comes from. 97 | 98 | :return: normalized reference signals 99 | """ 100 | reference_signals = pd.DataFrame() 101 | for source_name, source in self.sources.items(): 102 | signal = source["data"][source["ref_column"]].dropna() 103 | reference_signals = reference_signals.join(signal, how="outer") 104 | reference_signals.rename( 105 | columns={source["ref_column"]: source_name}, inplace=True 106 | ) 107 | reference_signals = reference_signals.apply(normalize) 108 | return reference_signals 109 | 110 | @staticmethod 111 | def _get_timeshift_pair( 112 | dataframe: pd.DataFrame, ref_col: str, sig_col: str, segments: SyncPairs 113 | ) -> SyncPairTimeshift: 114 | """ 115 | Returns timeshifts to synchronize sig_col to ref_col. 116 | Expects equidistant sampled signals. 117 | 118 | :param dataframe: reference signal dataframe 119 | :param ref_col: name of the reference signal in segments 120 | :param sig_col: name of the target signal in segments 121 | :param segments: all detected synchronization pairs 122 | :return: timeshift to align the first and second synchronization point 123 | for the target signal to the reference signal 124 | """ 125 | timeshifts = {} 126 | for index, segment in enumerate(["first", "second"]): 127 | logger.debug( 128 | f"Calculate timeshift of {segment} segment " 129 | f"for {sig_col} to {ref_col}." 130 | ) 131 | 132 | # reference signal segment data extraction 133 | ref_start, ref_end, ref_data = helpers.get_segment_data( 134 | dataframe, segments, ref_col, segment 135 | ) 136 | sig_start, sig_end, sig_data = helpers.get_segment_data( 137 | dataframe, segments, sig_col, segment 138 | ) 139 | 140 | # calculate cross-correlation of segments 141 | cross_corr = correlate(ref_data, sig_data) 142 | shift_in_samples = np.argmax(cross_corr) - len(sig_data) + 1 143 | 144 | # get timestamp at which sig_segment must start to sync signals 145 | max_corr_ts = dataframe.index[ 146 | dataframe.index.get_loc(ref_start, method="nearest") + shift_in_samples 147 | ] 148 | logger.debug( 149 | f"Highest correlation with start at " 150 | f"{max_corr_ts} with {np.max(cross_corr)}." 151 | ) 152 | 153 | # calculate timeshift to move signal to maximize correlation 154 | timeshifts[segment] = max_corr_ts - sig_start 155 | logger.debug("Timeshift is {}.".format(str(timeshifts[segment]))) 156 | 157 | return timeshifts 158 | 159 | def _calculate_stretch_factors(self) -> pd.DataFrame: 160 | """ 161 | Calculate the stretch factor that aligns each reference signal to the reference 162 | signal of the reference source. It immediately applies these stretch factors 163 | to a copy of ``self.ref_signals``. 164 | 165 | :return: a copy of self.ref_signals with the stretch factors applied. 166 | """ 167 | ref_signals = self.ref_signals.copy() 168 | start_time = ref_signals.index.min() 169 | 170 | # Get equidistantly sampled reference signals for the cross correlation to work 171 | df_equidistant = get_equidistant_signals(ref_signals, self.sampling_freq) 172 | sync_pairs = self.extractor.get_segments(df_equidistant) 173 | helpers.verify_segments(ref_signals.columns, sync_pairs) 174 | 175 | for source in df_equidistant.columns: 176 | if source == self.ref_source_name: 177 | continue 178 | 179 | timeshifts = Synchronizer._get_timeshift_pair( 180 | df_equidistant, self.ref_source_name, source, sync_pairs 181 | ) 182 | logger.debug( 183 | f"Timedelta between shifts before stretching: " 184 | f"{timeshifts['first'] - timeshifts['second']}" 185 | ) 186 | try: 187 | stretch_factor = helpers.get_stretch_factor( 188 | sync_pairs[source], timeshifts 189 | ) 190 | except ZeroDivisionError: 191 | raise StartEqualsEndError( 192 | "First and last segment have been identified as exactly the same. Bad window, maybe?" 193 | ) 194 | logger.info(f"Stretch factor for {source}: {stretch_factor}") 195 | 196 | # stretch signal and exchange it in dataframe 197 | signal_stretched = helpers.stretch_signals( 198 | pd.DataFrame(ref_signals[source]).dropna(), 199 | stretch_factor, 200 | start_time, 201 | ) 202 | ref_signals = ( 203 | ref_signals.drop(source, axis="columns") 204 | .join(signal_stretched, how="outer") 205 | .astype(pd.SparseDtype("float")) 206 | ) 207 | self.sources[source]["stretch_factor"] = stretch_factor 208 | 209 | return ref_signals 210 | 211 | def _calculate_timeshifts(self, stretched_ref_signals: pd.DataFrame): 212 | """ 213 | Calculate the shift necessary to align the stretched reference signals to the not-stretched reference sensor. 214 | 215 | :param stretched_ref_signals: a copy of self.ref_signals that has been stretched to align the duration between 216 | the synchronization points to the duration between them in the reference sensor 217 | """ 218 | # Resample again with stretched signal 219 | df_equi = get_equidistant_signals(stretched_ref_signals, self.sampling_freq) 220 | segments = self.extractor.get_segments(df_equi) 221 | helpers.verify_segments(stretched_ref_signals.columns, segments) 222 | 223 | for source in df_equi.columns: 224 | if source == self.ref_source_name: 225 | continue 226 | 227 | timeshifts = Synchronizer._get_timeshift_pair( 228 | df_equi, self.ref_source_name, source, segments 229 | ) 230 | timedelta = timeshifts["first"] - timeshifts["second"] 231 | if timedelta > pd.Timedelta(0): 232 | logger.warning( 233 | f"Timedelta between shifts after stretching: {timedelta}." 234 | f"This should be very small: the timedelta to the reference signal" 235 | f"should be equal for both start and end so a simple offset aligns the" 236 | f"signals perfectly." 237 | ) 238 | logger.info("Timeshift for {}: {}".format(source, timeshifts["first"])) 239 | self.sources[source]["timeshift"] = timeshifts["first"] 240 | 241 | def _calculate_sync_params(self): 242 | """ 243 | This function calculates the synchronization parameters to sync all signals to the reference signal. 244 | It stores the result in ``self.sources``, in the keys ``timeshift`` and ``stretch_factor``. 245 | """ 246 | self.sources[self.ref_source_name]["timeshift"] = None 247 | self.sources[self.ref_source_name]["stretch_factor"] = 1 248 | 249 | # Firstly, determine stretch factor and get stretched reference signals 250 | stretched_ref_signals = self._calculate_stretch_factors() 251 | 252 | # Secondly, get timeshift for the stretched signals 253 | self._calculate_timeshifts(stretched_ref_signals) 254 | 255 | def get_sync_params(self, recalculate: bool = False): 256 | """ 257 | Get the synchronization params. If they have not been calculated yet, they will be. 258 | 259 | :param recalculate: force calculation, even if it was already done before 260 | :return: the synchronization params for each source, i.e., each timeshift and stretch factor 261 | """ 262 | selected_keys = ["timeshift", "stretch_factor"] 263 | if recalculate or "timeshift" not in self.sources[self.ref_source_name]: 264 | self._calculate_sync_params() 265 | return { 266 | source_name: { 267 | key: value for key, value in source.items() if key in selected_keys 268 | } 269 | for source_name, source in self.sources.items() 270 | } 271 | 272 | def get_synced_data(self, recalculate: bool = False) -> Dict[str, pd.DataFrame]: 273 | """ 274 | Synchronize the input data. 275 | 276 | :param recalculate: force recalculating the synchronization parameters 277 | :return: a dictionary of the shifted and stretched source signals 278 | """ 279 | self.get_sync_params(recalculate) 280 | synced_data = {} 281 | start_time = self.ref_signals.index.min() 282 | for source_name, source in self.sources.items(): 283 | data = source["data"].copy() 284 | stretch_factor, timeshift = source["stretch_factor"], source["timeshift"] 285 | 286 | if stretch_factor != 1: 287 | data = helpers.stretch_signals(data, stretch_factor, start_time) 288 | if timeshift is not None: 289 | data = data.shift(1, freq=timeshift) 290 | synced_data[source_name] = data 291 | return synced_data 292 | 293 | def save_pickles(self, target_dir: str) -> Dict[str, pd.DataFrame]: 294 | """ 295 | Save a pickled, synced, dataframe for each source file. 296 | Does not save a total table. 297 | Sync parameters are saved as ``SYNC.csv``. 298 | 299 | :param target_dir: target directory for the export files 300 | :return: the synced data, plus a sync parameter dataframe in the dictionary entry with the key "SYNC". 301 | """ 302 | sync_params = pd.DataFrame(self.get_sync_params()) 303 | synced_data = self.get_synced_data() 304 | 305 | sync_params.to_csv(os.path.join(target_dir, "SYNC.csv")) 306 | 307 | for source_name, synced_df in synced_data.items(): 308 | synced_df.to_pickle( 309 | os.path.join(target_dir, f"{source_name.upper()}.PICKLE") 310 | ) 311 | 312 | return {**synced_data, "SYNC": sync_params} 313 | 314 | def save_data( 315 | self, 316 | target_dir: str, 317 | tables: Optional[ResultTableSpec] = None, 318 | save_total_table: bool = True, 319 | ): 320 | """ 321 | Export synchronized data. 322 | Two formats are possible: if ``tables`` is given, a file for each root key is created containing the columns 323 | from the sensors specified as the keys on the second level. This can be used to create a file for each sensor 324 | type, see ``ResultTableSpec`` for an example. 325 | 326 | A ``SYNC.csv`` is always exported to store the synchronization parameters that have been calculated. 327 | 328 | :param target_dir: target directory for the export files 329 | :param tables: ResultTableSpec to specify the export format, or None 330 | :param save_total_table: exports an outer join over all synchronized dataframes 331 | """ 332 | if tables is not None and "SYNC" in tables.keys(): 333 | raise ValueError( 334 | "SYNC must not be one of the table names. " 335 | "It is reserved for the synchronization parameters." 336 | ) 337 | 338 | if save_total_table and tables is not None: 339 | if "TOTAL" in tables.keys(): 340 | raise ValueError( 341 | "TOTAL must not be one of the table names, " 342 | "if the table with all data should be saved." 343 | ) 344 | 345 | sync_params = self.get_sync_params() 346 | synced_data = self.get_synced_data() 347 | 348 | # Save sync params 349 | pd.DataFrame(sync_params).to_csv(os.path.join(target_dir, "SYNC.csv")) 350 | 351 | # Save custom tables 352 | logger.info(tables) 353 | if tables is not None: 354 | for table_name, table_spec in tables.items(): 355 | if len(table_spec) == 0: 356 | logger.warning( 357 | f"Table entry {table_name} is missing any requested columns" 358 | ) 359 | continue 360 | 361 | table_df = pd.DataFrame() 362 | 363 | for source_name, source_columns in table_spec.items(): 364 | # create dataframe for each source 365 | source_df = pd.DataFrame() 366 | for column in source_columns: 367 | try: 368 | data = synced_data[source_name][column] 369 | except KeyError: 370 | raise ValueError( 371 | f"Requested non-existing {source_name}->{column}" 372 | ) 373 | # join selected signals to device dataframe 374 | source_df = source_df.join(data, how="outer") 375 | if not source_df.empty: 376 | # add device signals to general dataframe 377 | source_df = source_df.rename( 378 | lambda col_name: f"{source_name}_{col_name}", 379 | axis="columns", 380 | ) 381 | table_df = table_df.join(source_df, how="outer") 382 | 383 | table_df.dropna(axis="index", how="all", inplace=True) 384 | table_df.to_csv(os.path.join(target_dir, f"{table_name}.csv")) 385 | 386 | # Save table with total data 387 | if save_total_table: 388 | total_table = pd.DataFrame() 389 | 390 | for source_name, data in synced_data.items(): 391 | source_df = data.rename( 392 | lambda col_name: f"{source_name}_{col_name}", 393 | axis="columns", 394 | ) 395 | total_table = total_table.join(source_df, how="outer") 396 | total_table.to_csv(os.path.join(target_dir, "TOTAL.csv")) 397 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "alabaster" 3 | version = "0.7.12" 4 | description = "A configurable sidebar-enabled Sphinx theme" 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "appdirs" 11 | version = "1.4.4" 12 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 13 | category = "dev" 14 | optional = false 15 | python-versions = "*" 16 | 17 | [[package]] 18 | name = "atomicwrites" 19 | version = "1.4.0" 20 | description = "Atomic file writes." 21 | category = "dev" 22 | optional = false 23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 24 | 25 | [[package]] 26 | name = "attrs" 27 | version = "21.2.0" 28 | description = "Classes Without Boilerplate" 29 | category = "dev" 30 | optional = false 31 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 32 | 33 | [package.extras] 34 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] 35 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 36 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] 37 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] 38 | 39 | [[package]] 40 | name = "babel" 41 | version = "2.9.1" 42 | description = "Internationalization utilities" 43 | category = "dev" 44 | optional = false 45 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 46 | 47 | [package.dependencies] 48 | pytz = ">=2015.7" 49 | 50 | [[package]] 51 | name = "backports.entry-points-selectable" 52 | version = "1.1.0" 53 | description = "Compatibility shim providing selectable entry points for older implementations" 54 | category = "dev" 55 | optional = false 56 | python-versions = ">=2.7" 57 | 58 | [package.dependencies] 59 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 60 | 61 | [package.extras] 62 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 63 | testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] 64 | 65 | [[package]] 66 | name = "black" 67 | version = "21.7b0" 68 | description = "The uncompromising code formatter." 69 | category = "dev" 70 | optional = false 71 | python-versions = ">=3.6.2" 72 | 73 | [package.dependencies] 74 | appdirs = "*" 75 | click = ">=7.1.2" 76 | mypy-extensions = ">=0.4.3" 77 | pathspec = ">=0.8.1,<1" 78 | regex = ">=2020.1.8" 79 | tomli = ">=0.2.6,<2.0.0" 80 | typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} 81 | typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} 82 | 83 | [package.extras] 84 | colorama = ["colorama (>=0.4.3)"] 85 | d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] 86 | python2 = ["typed-ast (>=1.4.2)"] 87 | uvloop = ["uvloop (>=0.15.2)"] 88 | 89 | [[package]] 90 | name = "certifi" 91 | version = "2021.5.30" 92 | description = "Python package for providing Mozilla's CA Bundle." 93 | category = "dev" 94 | optional = false 95 | python-versions = "*" 96 | 97 | [[package]] 98 | name = "cfgv" 99 | version = "3.3.0" 100 | description = "Validate configuration and produce human readable error messages." 101 | category = "dev" 102 | optional = false 103 | python-versions = ">=3.6.1" 104 | 105 | [[package]] 106 | name = "charset-normalizer" 107 | version = "2.0.4" 108 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 109 | category = "dev" 110 | optional = false 111 | python-versions = ">=3.5.0" 112 | 113 | [package.extras] 114 | unicode_backport = ["unicodedata2"] 115 | 116 | [[package]] 117 | name = "click" 118 | version = "8.0.1" 119 | description = "Composable command line interface toolkit" 120 | category = "dev" 121 | optional = false 122 | python-versions = ">=3.6" 123 | 124 | [package.dependencies] 125 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 126 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 127 | 128 | [[package]] 129 | name = "colorama" 130 | version = "0.4.4" 131 | description = "Cross-platform colored terminal text." 132 | category = "dev" 133 | optional = false 134 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 135 | 136 | [[package]] 137 | name = "coverage" 138 | version = "5.5" 139 | description = "Code coverage measurement for Python" 140 | category = "dev" 141 | optional = false 142 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 143 | 144 | [package.extras] 145 | toml = ["toml"] 146 | 147 | [[package]] 148 | name = "cycler" 149 | version = "0.10.0" 150 | description = "Composable style cycles" 151 | category = "main" 152 | optional = false 153 | python-versions = "*" 154 | 155 | [package.dependencies] 156 | six = "*" 157 | 158 | [[package]] 159 | name = "distlib" 160 | version = "0.3.2" 161 | description = "Distribution utilities" 162 | category = "dev" 163 | optional = false 164 | python-versions = "*" 165 | 166 | [[package]] 167 | name = "docutils" 168 | version = "0.16" 169 | description = "Docutils -- Python Documentation Utilities" 170 | category = "dev" 171 | optional = false 172 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 173 | 174 | [[package]] 175 | name = "filelock" 176 | version = "3.0.12" 177 | description = "A platform independent file lock." 178 | category = "dev" 179 | optional = false 180 | python-versions = "*" 181 | 182 | [[package]] 183 | name = "flake8" 184 | version = "3.9.2" 185 | description = "the modular source code checker: pep8 pyflakes and co" 186 | category = "dev" 187 | optional = false 188 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 189 | 190 | [package.dependencies] 191 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 192 | mccabe = ">=0.6.0,<0.7.0" 193 | pycodestyle = ">=2.7.0,<2.8.0" 194 | pyflakes = ">=2.3.0,<2.4.0" 195 | 196 | [[package]] 197 | name = "identify" 198 | version = "2.2.12" 199 | description = "File identification library for Python" 200 | category = "dev" 201 | optional = false 202 | python-versions = ">=3.6.1" 203 | 204 | [package.extras] 205 | license = ["editdistance-s"] 206 | 207 | [[package]] 208 | name = "idna" 209 | version = "3.2" 210 | description = "Internationalized Domain Names in Applications (IDNA)" 211 | category = "dev" 212 | optional = false 213 | python-versions = ">=3.5" 214 | 215 | [[package]] 216 | name = "imagesize" 217 | version = "1.2.0" 218 | description = "Getting image size from png/jpeg/jpeg2000/gif file" 219 | category = "dev" 220 | optional = false 221 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 222 | 223 | [[package]] 224 | name = "importlib-metadata" 225 | version = "4.6.3" 226 | description = "Read metadata from Python packages" 227 | category = "dev" 228 | optional = false 229 | python-versions = ">=3.6" 230 | 231 | [package.dependencies] 232 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 233 | zipp = ">=0.5" 234 | 235 | [package.extras] 236 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 237 | perf = ["ipython"] 238 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 239 | 240 | [[package]] 241 | name = "iniconfig" 242 | version = "1.1.1" 243 | description = "iniconfig: brain-dead simple config-ini parsing" 244 | category = "dev" 245 | optional = false 246 | python-versions = "*" 247 | 248 | [[package]] 249 | name = "jinja2" 250 | version = "3.0.1" 251 | description = "A very fast and expressive template engine." 252 | category = "dev" 253 | optional = false 254 | python-versions = ">=3.6" 255 | 256 | [package.dependencies] 257 | MarkupSafe = ">=2.0" 258 | 259 | [package.extras] 260 | i18n = ["Babel (>=2.7)"] 261 | 262 | [[package]] 263 | name = "kiwisolver" 264 | version = "1.3.1" 265 | description = "A fast implementation of the Cassowary constraint solver" 266 | category = "main" 267 | optional = false 268 | python-versions = ">=3.6" 269 | 270 | [[package]] 271 | name = "markupsafe" 272 | version = "2.0.1" 273 | description = "Safely add untrusted strings to HTML/XML markup." 274 | category = "dev" 275 | optional = false 276 | python-versions = ">=3.6" 277 | 278 | [[package]] 279 | name = "matplotlib" 280 | version = "3.4.2" 281 | description = "Python plotting package" 282 | category = "main" 283 | optional = false 284 | python-versions = ">=3.7" 285 | 286 | [package.dependencies] 287 | cycler = ">=0.10" 288 | kiwisolver = ">=1.0.1" 289 | numpy = ">=1.16" 290 | pillow = ">=6.2.0" 291 | pyparsing = ">=2.2.1" 292 | python-dateutil = ">=2.7" 293 | 294 | [[package]] 295 | name = "mccabe" 296 | version = "0.6.1" 297 | description = "McCabe checker, plugin for flake8" 298 | category = "dev" 299 | optional = false 300 | python-versions = "*" 301 | 302 | [[package]] 303 | name = "mypy-extensions" 304 | version = "0.4.3" 305 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 306 | category = "dev" 307 | optional = false 308 | python-versions = "*" 309 | 310 | [[package]] 311 | name = "nodeenv" 312 | version = "1.6.0" 313 | description = "Node.js virtual environment builder" 314 | category = "dev" 315 | optional = false 316 | python-versions = "*" 317 | 318 | [[package]] 319 | name = "numpy" 320 | version = "1.21.1" 321 | description = "NumPy is the fundamental package for array computing with Python." 322 | category = "main" 323 | optional = false 324 | python-versions = ">=3.7" 325 | 326 | [[package]] 327 | name = "packaging" 328 | version = "21.0" 329 | description = "Core utilities for Python packages" 330 | category = "dev" 331 | optional = false 332 | python-versions = ">=3.6" 333 | 334 | [package.dependencies] 335 | pyparsing = ">=2.0.2" 336 | 337 | [[package]] 338 | name = "pandas" 339 | version = "1.3.1" 340 | description = "Powerful data structures for data analysis, time series, and statistics" 341 | category = "main" 342 | optional = false 343 | python-versions = ">=3.7.1" 344 | 345 | [package.dependencies] 346 | numpy = ">=1.17.3" 347 | python-dateutil = ">=2.7.3" 348 | pytz = ">=2017.3" 349 | 350 | [package.extras] 351 | test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] 352 | 353 | [[package]] 354 | name = "pathspec" 355 | version = "0.9.0" 356 | description = "Utility library for gitignore style pattern matching of file paths." 357 | category = "dev" 358 | optional = false 359 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 360 | 361 | [[package]] 362 | name = "pillow" 363 | version = "8.3.1" 364 | description = "Python Imaging Library (Fork)" 365 | category = "main" 366 | optional = false 367 | python-versions = ">=3.6" 368 | 369 | [[package]] 370 | name = "platformdirs" 371 | version = "2.2.0" 372 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 373 | category = "dev" 374 | optional = false 375 | python-versions = ">=3.6" 376 | 377 | [package.extras] 378 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 379 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 380 | 381 | [[package]] 382 | name = "pluggy" 383 | version = "0.13.1" 384 | description = "plugin and hook calling mechanisms for python" 385 | category = "dev" 386 | optional = false 387 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 388 | 389 | [package.dependencies] 390 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 391 | 392 | [package.extras] 393 | dev = ["pre-commit", "tox"] 394 | 395 | [[package]] 396 | name = "pre-commit" 397 | version = "2.13.0" 398 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 399 | category = "dev" 400 | optional = false 401 | python-versions = ">=3.6.1" 402 | 403 | [package.dependencies] 404 | cfgv = ">=2.0.0" 405 | identify = ">=1.0.0" 406 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 407 | nodeenv = ">=0.11.1" 408 | pyyaml = ">=5.1" 409 | toml = "*" 410 | virtualenv = ">=20.0.8" 411 | 412 | [[package]] 413 | name = "py" 414 | version = "1.10.0" 415 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 416 | category = "dev" 417 | optional = false 418 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 419 | 420 | [[package]] 421 | name = "pyarrow" 422 | version = "5.0.0" 423 | description = "Python library for Apache Arrow" 424 | category = "dev" 425 | optional = false 426 | python-versions = ">=3.6" 427 | 428 | [package.dependencies] 429 | numpy = ">=1.16.6" 430 | 431 | [[package]] 432 | name = "pycodestyle" 433 | version = "2.7.0" 434 | description = "Python style guide checker" 435 | category = "dev" 436 | optional = false 437 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 438 | 439 | [[package]] 440 | name = "pyflakes" 441 | version = "2.3.1" 442 | description = "passive checker of Python programs" 443 | category = "dev" 444 | optional = false 445 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 446 | 447 | [[package]] 448 | name = "pygments" 449 | version = "2.9.0" 450 | description = "Pygments is a syntax highlighting package written in Python." 451 | category = "dev" 452 | optional = false 453 | python-versions = ">=3.5" 454 | 455 | [[package]] 456 | name = "pyparsing" 457 | version = "2.4.7" 458 | description = "Python parsing module" 459 | category = "main" 460 | optional = false 461 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 462 | 463 | [[package]] 464 | name = "pyproject-flake8" 465 | version = "0.0.1a2" 466 | description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" 467 | category = "dev" 468 | optional = false 469 | python-versions = "*" 470 | 471 | [package.dependencies] 472 | flake8 = "*" 473 | toml = "*" 474 | 475 | [[package]] 476 | name = "pytest" 477 | version = "6.2.4" 478 | description = "pytest: simple powerful testing with Python" 479 | category = "dev" 480 | optional = false 481 | python-versions = ">=3.6" 482 | 483 | [package.dependencies] 484 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 485 | attrs = ">=19.2.0" 486 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 487 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 488 | iniconfig = "*" 489 | packaging = "*" 490 | pluggy = ">=0.12,<1.0.0a1" 491 | py = ">=1.8.2" 492 | toml = "*" 493 | 494 | [package.extras] 495 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 496 | 497 | [[package]] 498 | name = "python-dateutil" 499 | version = "2.8.2" 500 | description = "Extensions to the standard Python datetime module" 501 | category = "main" 502 | optional = false 503 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 504 | 505 | [package.dependencies] 506 | six = ">=1.5" 507 | 508 | [[package]] 509 | name = "pytz" 510 | version = "2021.1" 511 | description = "World timezone definitions, modern and historical" 512 | category = "main" 513 | optional = false 514 | python-versions = "*" 515 | 516 | [[package]] 517 | name = "pyyaml" 518 | version = "5.4.1" 519 | description = "YAML parser and emitter for Python" 520 | category = "dev" 521 | optional = false 522 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 523 | 524 | [[package]] 525 | name = "regex" 526 | version = "2021.8.3" 527 | description = "Alternative regular expression module, to replace re." 528 | category = "dev" 529 | optional = false 530 | python-versions = "*" 531 | 532 | [[package]] 533 | name = "requests" 534 | version = "2.26.0" 535 | description = "Python HTTP for Humans." 536 | category = "dev" 537 | optional = false 538 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 539 | 540 | [package.dependencies] 541 | certifi = ">=2017.4.17" 542 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 543 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 544 | urllib3 = ">=1.21.1,<1.27" 545 | 546 | [package.extras] 547 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 548 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 549 | 550 | [[package]] 551 | name = "scipy" 552 | version = "1.7.1" 553 | description = "SciPy: Scientific Library for Python" 554 | category = "main" 555 | optional = false 556 | python-versions = ">=3.7,<3.10" 557 | 558 | [package.dependencies] 559 | numpy = ">=1.16.5,<1.23.0" 560 | 561 | [[package]] 562 | name = "six" 563 | version = "1.16.0" 564 | description = "Python 2 and 3 compatibility utilities" 565 | category = "main" 566 | optional = false 567 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 568 | 569 | [[package]] 570 | name = "snowballstemmer" 571 | version = "2.1.0" 572 | description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." 573 | category = "dev" 574 | optional = false 575 | python-versions = "*" 576 | 577 | [[package]] 578 | name = "sphinx" 579 | version = "4.1.2" 580 | description = "Python documentation generator" 581 | category = "dev" 582 | optional = false 583 | python-versions = ">=3.6" 584 | 585 | [package.dependencies] 586 | alabaster = ">=0.7,<0.8" 587 | babel = ">=1.3" 588 | colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} 589 | docutils = ">=0.14,<0.18" 590 | imagesize = "*" 591 | Jinja2 = ">=2.3" 592 | packaging = "*" 593 | Pygments = ">=2.0" 594 | requests = ">=2.5.0" 595 | snowballstemmer = ">=1.1" 596 | sphinxcontrib-applehelp = "*" 597 | sphinxcontrib-devhelp = "*" 598 | sphinxcontrib-htmlhelp = ">=2.0.0" 599 | sphinxcontrib-jsmath = "*" 600 | sphinxcontrib-qthelp = "*" 601 | sphinxcontrib-serializinghtml = ">=1.1.5" 602 | 603 | [package.extras] 604 | docs = ["sphinxcontrib-websupport"] 605 | lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "docutils-stubs", "types-typed-ast", "types-pkg-resources", "types-requests"] 606 | test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] 607 | 608 | [[package]] 609 | name = "sphinx-rtd-theme" 610 | version = "0.5.2" 611 | description = "Read the Docs theme for Sphinx" 612 | category = "dev" 613 | optional = false 614 | python-versions = "*" 615 | 616 | [package.dependencies] 617 | docutils = "<0.17" 618 | sphinx = "*" 619 | 620 | [package.extras] 621 | dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] 622 | 623 | [[package]] 624 | name = "sphinxcontrib-applehelp" 625 | version = "1.0.2" 626 | description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" 627 | category = "dev" 628 | optional = false 629 | python-versions = ">=3.5" 630 | 631 | [package.extras] 632 | lint = ["flake8", "mypy", "docutils-stubs"] 633 | test = ["pytest"] 634 | 635 | [[package]] 636 | name = "sphinxcontrib-devhelp" 637 | version = "1.0.2" 638 | description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." 639 | category = "dev" 640 | optional = false 641 | python-versions = ">=3.5" 642 | 643 | [package.extras] 644 | lint = ["flake8", "mypy", "docutils-stubs"] 645 | test = ["pytest"] 646 | 647 | [[package]] 648 | name = "sphinxcontrib-htmlhelp" 649 | version = "2.0.0" 650 | description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" 651 | category = "dev" 652 | optional = false 653 | python-versions = ">=3.6" 654 | 655 | [package.extras] 656 | lint = ["flake8", "mypy", "docutils-stubs"] 657 | test = ["pytest", "html5lib"] 658 | 659 | [[package]] 660 | name = "sphinxcontrib-jsmath" 661 | version = "1.0.1" 662 | description = "A sphinx extension which renders display math in HTML via JavaScript" 663 | category = "dev" 664 | optional = false 665 | python-versions = ">=3.5" 666 | 667 | [package.extras] 668 | test = ["pytest", "flake8", "mypy"] 669 | 670 | [[package]] 671 | name = "sphinxcontrib-qthelp" 672 | version = "1.0.3" 673 | description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." 674 | category = "dev" 675 | optional = false 676 | python-versions = ">=3.5" 677 | 678 | [package.extras] 679 | lint = ["flake8", "mypy", "docutils-stubs"] 680 | test = ["pytest"] 681 | 682 | [[package]] 683 | name = "sphinxcontrib-serializinghtml" 684 | version = "1.1.5" 685 | description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." 686 | category = "dev" 687 | optional = false 688 | python-versions = ">=3.5" 689 | 690 | [package.extras] 691 | lint = ["flake8", "mypy", "docutils-stubs"] 692 | test = ["pytest"] 693 | 694 | [[package]] 695 | name = "toml" 696 | version = "0.10.2" 697 | description = "Python Library for Tom's Obvious, Minimal Language" 698 | category = "dev" 699 | optional = false 700 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 701 | 702 | [[package]] 703 | name = "tomli" 704 | version = "1.2.0" 705 | description = "A lil' TOML parser" 706 | category = "dev" 707 | optional = false 708 | python-versions = ">=3.6" 709 | 710 | [[package]] 711 | name = "typed-ast" 712 | version = "1.4.3" 713 | description = "a fork of Python 2 and 3 ast modules with type comment support" 714 | category = "dev" 715 | optional = false 716 | python-versions = "*" 717 | 718 | [[package]] 719 | name = "typing-extensions" 720 | version = "3.10.0.0" 721 | description = "Backported and Experimental Type Hints for Python 3.5+" 722 | category = "dev" 723 | optional = false 724 | python-versions = "*" 725 | 726 | [[package]] 727 | name = "urllib3" 728 | version = "1.26.6" 729 | description = "HTTP library with thread-safe connection pooling, file post, and more." 730 | category = "dev" 731 | optional = false 732 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 733 | 734 | [package.extras] 735 | brotli = ["brotlipy (>=0.6.0)"] 736 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 737 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 738 | 739 | [[package]] 740 | name = "virtualenv" 741 | version = "20.7.0" 742 | description = "Virtual Python Environment builder" 743 | category = "dev" 744 | optional = false 745 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 746 | 747 | [package.dependencies] 748 | "backports.entry-points-selectable" = ">=1.0.4" 749 | distlib = ">=0.3.1,<1" 750 | filelock = ">=3.0.0,<4" 751 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 752 | platformdirs = ">=2,<3" 753 | six = ">=1.9.0,<2" 754 | 755 | [package.extras] 756 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] 757 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] 758 | 759 | [[package]] 760 | name = "zipp" 761 | version = "3.5.0" 762 | description = "Backport of pathlib-compatible object wrapper for zip files" 763 | category = "dev" 764 | optional = false 765 | python-versions = ">=3.6" 766 | 767 | [package.extras] 768 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 769 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 770 | 771 | [metadata] 772 | lock-version = "1.1" 773 | python-versions = ">=3.7.1,<3.10" 774 | content-hash = "c1da449158432e82e1f05e137e53db77c2a425ab6d628b0235736fd48aa92ddf" 775 | 776 | [metadata.files] 777 | alabaster = [ 778 | {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, 779 | {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, 780 | ] 781 | appdirs = [ 782 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 783 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 784 | ] 785 | atomicwrites = [ 786 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 787 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 788 | ] 789 | attrs = [ 790 | {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, 791 | {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, 792 | ] 793 | babel = [ 794 | {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, 795 | {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, 796 | ] 797 | "backports.entry-points-selectable" = [ 798 | {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, 799 | {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, 800 | ] 801 | black = [ 802 | {file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"}, 803 | {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, 804 | ] 805 | certifi = [ 806 | {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, 807 | {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, 808 | ] 809 | cfgv = [ 810 | {file = "cfgv-3.3.0-py2.py3-none-any.whl", hash = "sha256:b449c9c6118fe8cca7fa5e00b9ec60ba08145d281d52164230a69211c5d597a1"}, 811 | {file = "cfgv-3.3.0.tar.gz", hash = "sha256:9e600479b3b99e8af981ecdfc80a0296104ee610cab48a5ae4ffd0b668650eb1"}, 812 | ] 813 | charset-normalizer = [ 814 | {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, 815 | {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, 816 | ] 817 | click = [ 818 | {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, 819 | {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, 820 | ] 821 | colorama = [ 822 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 823 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 824 | ] 825 | coverage = [ 826 | {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, 827 | {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, 828 | {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, 829 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, 830 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, 831 | {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, 832 | {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, 833 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, 834 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, 835 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, 836 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, 837 | {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, 838 | {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, 839 | {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, 840 | {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, 841 | {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, 842 | {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, 843 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, 844 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, 845 | {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, 846 | {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, 847 | {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, 848 | {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, 849 | {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, 850 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, 851 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, 852 | {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, 853 | {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, 854 | {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, 855 | {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, 856 | {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, 857 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, 858 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, 859 | {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, 860 | {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, 861 | {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, 862 | {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, 863 | {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, 864 | {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, 865 | {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, 866 | {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, 867 | {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, 868 | {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, 869 | {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, 870 | {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, 871 | {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, 872 | {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, 873 | {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, 874 | {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, 875 | {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, 876 | {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, 877 | {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, 878 | ] 879 | cycler = [ 880 | {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, 881 | {file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"}, 882 | ] 883 | distlib = [ 884 | {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, 885 | {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, 886 | ] 887 | docutils = [ 888 | {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, 889 | {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, 890 | ] 891 | filelock = [ 892 | {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, 893 | {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, 894 | ] 895 | flake8 = [ 896 | {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, 897 | {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, 898 | ] 899 | identify = [ 900 | {file = "identify-2.2.12-py2.py3-none-any.whl", hash = "sha256:a510cbe155f39665625c8a4c4b4f9360cbce539f51f23f47836ab7dd852db541"}, 901 | {file = "identify-2.2.12.tar.gz", hash = "sha256:242332b3bdd45a8af1752d5d5a3afb12bee26f8e67c4be06e394f82d05ef1a4d"}, 902 | ] 903 | idna = [ 904 | {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, 905 | {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, 906 | ] 907 | imagesize = [ 908 | {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, 909 | {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, 910 | ] 911 | importlib-metadata = [ 912 | {file = "importlib_metadata-4.6.3-py3-none-any.whl", hash = "sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b"}, 913 | {file = "importlib_metadata-4.6.3.tar.gz", hash = "sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9"}, 914 | ] 915 | iniconfig = [ 916 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 917 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 918 | ] 919 | jinja2 = [ 920 | {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, 921 | {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, 922 | ] 923 | kiwisolver = [ 924 | {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, 925 | {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0"}, 926 | {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21"}, 927 | {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05"}, 928 | {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b"}, 929 | {file = "kiwisolver-1.3.1-cp36-cp36m-win32.whl", hash = "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9"}, 930 | {file = "kiwisolver-1.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4"}, 931 | {file = "kiwisolver-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0"}, 932 | {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278"}, 933 | {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689"}, 934 | {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8"}, 935 | {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"}, 936 | {file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"}, 937 | {file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"}, 938 | {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"}, 939 | {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"}, 940 | {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"}, 941 | {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"}, 942 | {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"}, 943 | {file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"}, 944 | {file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"}, 945 | {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"}, 946 | {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"}, 947 | {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"}, 948 | {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"}, 949 | {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54"}, 950 | {file = "kiwisolver-1.3.1-cp39-cp39-win32.whl", hash = "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030"}, 951 | {file = "kiwisolver-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6"}, 952 | {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"}, 953 | {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"}, 954 | {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"}, 955 | {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, 956 | ] 957 | markupsafe = [ 958 | {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, 959 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, 960 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, 961 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, 962 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, 963 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, 964 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, 965 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, 966 | {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, 967 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, 968 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, 969 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, 970 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, 971 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, 972 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, 973 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, 974 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, 975 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, 976 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, 977 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, 978 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, 979 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, 980 | {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, 981 | {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, 982 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, 983 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, 984 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, 985 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, 986 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, 987 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, 988 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, 989 | {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, 990 | {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, 991 | {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, 992 | ] 993 | matplotlib = [ 994 | {file = "matplotlib-3.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c541ee5a3287efe066bbe358320853cf4916bc14c00c38f8f3d8d75275a405a9"}, 995 | {file = "matplotlib-3.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3a5c18dbd2c7c366da26a4ad1462fe3e03a577b39e3b503bbcf482b9cdac093c"}, 996 | {file = "matplotlib-3.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a9d8cb5329df13e0cdaa14b3b43f47b5e593ec637f13f14db75bb16e46178b05"}, 997 | {file = "matplotlib-3.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:7ad19f3fb6145b9eb41c08e7cbb9f8e10b91291396bee21e9ce761bb78df63ec"}, 998 | {file = "matplotlib-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:7a58f3d8fe8fac3be522c79d921c9b86e090a59637cb88e3bc51298d7a2c862a"}, 999 | {file = "matplotlib-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6382bc6e2d7e481bcd977eb131c31dee96e0fb4f9177d15ec6fb976d3b9ace1a"}, 1000 | {file = "matplotlib-3.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a6a44f27aabe720ec4fd485061e8a35784c2b9ffa6363ad546316dfc9cea04e"}, 1001 | {file = "matplotlib-3.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1c1779f7ab7d8bdb7d4c605e6ffaa0614b3e80f1e3c8ccf7b9269a22dbc5986b"}, 1002 | {file = "matplotlib-3.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5826f56055b9b1c80fef82e326097e34dc4af8c7249226b7dd63095a686177d1"}, 1003 | {file = "matplotlib-3.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0bea5ec5c28d49020e5d7923c2725b837e60bc8be99d3164af410eb4b4c827da"}, 1004 | {file = "matplotlib-3.4.2-cp38-cp38-win32.whl", hash = "sha256:6475d0209024a77f869163ec3657c47fed35d9b6ed8bccba8aa0f0099fbbdaa8"}, 1005 | {file = "matplotlib-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:21b31057bbc5e75b08e70a43cefc4c0b2c2f1b1a850f4a0f7af044eb4163086c"}, 1006 | {file = "matplotlib-3.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b26535b9de85326e6958cdef720ecd10bcf74a3f4371bf9a7e5b2e659c17e153"}, 1007 | {file = "matplotlib-3.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:32fa638cc10886885d1ca3d409d4473d6a22f7ceecd11322150961a70fab66dd"}, 1008 | {file = "matplotlib-3.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:956c8849b134b4a343598305a3ca1bdd3094f01f5efc8afccdebeffe6b315247"}, 1009 | {file = "matplotlib-3.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:85f191bb03cb1a7b04b5c2cca4792bef94df06ef473bc49e2818105671766fee"}, 1010 | {file = "matplotlib-3.4.2-cp39-cp39-win32.whl", hash = "sha256:b1d5a2cedf5de05567c441b3a8c2651fbde56df08b82640e7f06c8cd91e201f6"}, 1011 | {file = "matplotlib-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:df815378a754a7edd4559f8c51fc7064f779a74013644a7f5ac7a0c31f875866"}, 1012 | {file = "matplotlib-3.4.2.tar.gz", hash = "sha256:d8d994cefdff9aaba45166eb3de4f5211adb4accac85cbf97137e98f26ea0219"}, 1013 | ] 1014 | mccabe = [ 1015 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 1016 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 1017 | ] 1018 | mypy-extensions = [ 1019 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 1020 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 1021 | ] 1022 | nodeenv = [ 1023 | {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, 1024 | {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, 1025 | ] 1026 | numpy = [ 1027 | {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, 1028 | {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, 1029 | {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, 1030 | {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, 1031 | {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, 1032 | {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, 1033 | {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, 1034 | {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, 1035 | {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, 1036 | {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, 1037 | {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, 1038 | {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, 1039 | {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, 1040 | {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, 1041 | {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, 1042 | {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, 1043 | {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, 1044 | {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, 1045 | {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, 1046 | {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, 1047 | {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, 1048 | {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, 1049 | {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, 1050 | {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, 1051 | {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, 1052 | {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, 1053 | {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, 1054 | {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, 1055 | ] 1056 | packaging = [ 1057 | {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, 1058 | {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, 1059 | ] 1060 | pandas = [ 1061 | {file = "pandas-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1ee8418d0f936ff2216513aa03e199657eceb67690995d427a4a7ecd2e68f442"}, 1062 | {file = "pandas-1.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d9acfca191140a518779d1095036d842d5e5bc8e8ad8b5eaad1aff90fe1870d"}, 1063 | {file = "pandas-1.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e323028ab192fcfe1e8999c012a0fa96d066453bb354c7e7a4a267b25e73d3c8"}, 1064 | {file = "pandas-1.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d06661c6eb741ae633ee1c57e8c432bb4203024e263fe1a077fa3fda7817fdb"}, 1065 | {file = "pandas-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:23c7452771501254d2ae23e9e9dac88417de7e6eff3ce64ee494bb94dc88c300"}, 1066 | {file = "pandas-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7150039e78a81eddd9f5a05363a11cadf90a4968aac6f086fd83e66cf1c8d1d6"}, 1067 | {file = "pandas-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5c09a2538f0fddf3895070579082089ff4ae52b6cb176d8ec7a4dacf7e3676c1"}, 1068 | {file = "pandas-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:905fc3e0fcd86b0a9f1f97abee7d36894698d2592b22b859f08ea5a8fe3d3aab"}, 1069 | {file = "pandas-1.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ee927c70794e875a59796fab8047098aa59787b1be680717c141cd7873818ae"}, 1070 | {file = "pandas-1.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c976e023ed580e60a82ccebdca8e1cc24d8b1fbb28175eb6521025c127dab66"}, 1071 | {file = "pandas-1.3.1-cp38-cp38-win32.whl", hash = "sha256:22f3fcc129fb482ef44e7df2a594f0bd514ac45aabe50da1a10709de1b0f9d84"}, 1072 | {file = "pandas-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45656cd59ae9745a1a21271a62001df58342b59c66d50754390066db500a8362"}, 1073 | {file = "pandas-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:114c6789d15862508900a25cb4cb51820bfdd8595ea306bab3b53cd19f990b65"}, 1074 | {file = "pandas-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:527c43311894aff131dea99cf418cd723bfd4f0bcf3c3da460f3b57e52a64da5"}, 1075 | {file = "pandas-1.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb3b33dde260b1766ea4d3c6b8fbf6799cee18d50a2a8bc534cf3550b7c819a"}, 1076 | {file = "pandas-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c28760932283d2c9f6fa5e53d2f77a514163b9e67fd0ee0879081be612567195"}, 1077 | {file = "pandas-1.3.1-cp39-cp39-win32.whl", hash = "sha256:be12d77f7e03c40a2466ed00ccd1a5f20a574d3c622fe1516037faa31aa448aa"}, 1078 | {file = "pandas-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9e1fe6722cbe27eb5891c1977bca62d456c19935352eea64d33956db46139364"}, 1079 | {file = "pandas-1.3.1.tar.gz", hash = "sha256:341935a594db24f3ff07d1b34d1d231786aa9adfa84b76eab10bf42907c8aed3"}, 1080 | ] 1081 | pathspec = [ 1082 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 1083 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 1084 | ] 1085 | pillow = [ 1086 | {file = "Pillow-8.3.1-1-cp36-cp36m-win_amd64.whl", hash = "sha256:fd7eef578f5b2200d066db1b50c4aa66410786201669fb76d5238b007918fb24"}, 1087 | {file = "Pillow-8.3.1-1-cp37-cp37m-win_amd64.whl", hash = "sha256:75e09042a3b39e0ea61ce37e941221313d51a9c26b8e54e12b3ececccb71718a"}, 1088 | {file = "Pillow-8.3.1-1-cp38-cp38-win_amd64.whl", hash = "sha256:c0e0550a404c69aab1e04ae89cca3e2a042b56ab043f7f729d984bf73ed2a093"}, 1089 | {file = "Pillow-8.3.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:479ab11cbd69612acefa8286481f65c5dece2002ffaa4f9db62682379ca3bb77"}, 1090 | {file = "Pillow-8.3.1-1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f156d6ecfc747ee111c167f8faf5f4953761b5e66e91a4e6767e548d0f80129c"}, 1091 | {file = "Pillow-8.3.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:196560dba4da7a72c5e7085fccc5938ab4075fd37fe8b5468869724109812edd"}, 1092 | {file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9569049d04aaacd690573a0398dbd8e0bf0255684fee512b413c2142ab723"}, 1093 | {file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c088a000dfdd88c184cc7271bfac8c5b82d9efa8637cd2b68183771e3cf56f04"}, 1094 | {file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fc214a6b75d2e0ea7745488da7da3c381f41790812988c7a92345978414fad37"}, 1095 | {file = "Pillow-8.3.1-cp36-cp36m-win32.whl", hash = "sha256:a17ca41f45cf78c2216ebfab03add7cc350c305c38ff34ef4eef66b7d76c5229"}, 1096 | {file = "Pillow-8.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:67b3666b544b953a2777cb3f5a922e991be73ab32635666ee72e05876b8a92de"}, 1097 | {file = "Pillow-8.3.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:ff04c373477723430dce2e9d024c708a047d44cf17166bf16e604b379bf0ca14"}, 1098 | {file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9364c81b252d8348e9cc0cb63e856b8f7c1b340caba6ee7a7a65c968312f7dab"}, 1099 | {file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2f381932dca2cf775811a008aa3027671ace723b7a38838045b1aee8669fdcf"}, 1100 | {file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d0da39795049a9afcaadec532e7b669b5ebbb2a9134576ebcc15dd5bdae33cc0"}, 1101 | {file = "Pillow-8.3.1-cp37-cp37m-win32.whl", hash = "sha256:2b6dfa068a8b6137da34a4936f5a816aba0ecc967af2feeb32c4393ddd671cba"}, 1102 | {file = "Pillow-8.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a4eef1ff2d62676deabf076f963eda4da34b51bc0517c70239fafed1d5b51500"}, 1103 | {file = "Pillow-8.3.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:660a87085925c61a0dcc80efb967512ac34dbb256ff7dd2b9b4ee8dbdab58cf4"}, 1104 | {file = "Pillow-8.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:15a2808e269a1cf2131930183dcc0419bc77bb73eb54285dde2706ac9939fa8e"}, 1105 | {file = "Pillow-8.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:969cc558cca859cadf24f890fc009e1bce7d7d0386ba7c0478641a60199adf79"}, 1106 | {file = "Pillow-8.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ee77c14a0299d0541d26f3d8500bb57e081233e3fa915fa35abd02c51fa7fae"}, 1107 | {file = "Pillow-8.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c11003197f908878164f0e6da15fce22373ac3fc320cda8c9d16e6bba105b844"}, 1108 | {file = "Pillow-8.3.1-cp38-cp38-win32.whl", hash = "sha256:3f08bd8d785204149b5b33e3b5f0ebbfe2190ea58d1a051c578e29e39bfd2367"}, 1109 | {file = "Pillow-8.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:70af7d222df0ff81a2da601fab42decb009dc721545ed78549cb96e3a1c5f0c8"}, 1110 | {file = "Pillow-8.3.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:37730f6e68bdc6a3f02d2079c34c532330d206429f3cee651aab6b66839a9f0e"}, 1111 | {file = "Pillow-8.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bc3c7ef940eeb200ca65bd83005eb3aae8083d47e8fcbf5f0943baa50726856"}, 1112 | {file = "Pillow-8.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c35d09db702f4185ba22bb33ef1751ad49c266534339a5cebeb5159d364f6f82"}, 1113 | {file = "Pillow-8.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b2efa07f69dc395d95bb9ef3299f4ca29bcb2157dc615bae0b42c3c20668ffc"}, 1114 | {file = "Pillow-8.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cc866706d56bd3a7dbf8bac8660c6f6462f2f2b8a49add2ba617bc0c54473d83"}, 1115 | {file = "Pillow-8.3.1-cp39-cp39-win32.whl", hash = "sha256:9a211b663cf2314edbdb4cf897beeb5c9ee3810d1d53f0e423f06d6ebbf9cd5d"}, 1116 | {file = "Pillow-8.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:c2a5ff58751670292b406b9f06e07ed1446a4b13ffced6b6cab75b857485cbc8"}, 1117 | {file = "Pillow-8.3.1-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c379425c2707078dfb6bfad2430728831d399dc95a7deeb92015eb4c92345eaf"}, 1118 | {file = "Pillow-8.3.1-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:114f816e4f73f9ec06997b2fde81a92cbf0777c9e8f462005550eed6bae57e63"}, 1119 | {file = "Pillow-8.3.1-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8960a8a9f4598974e4c2aeb1bff9bdd5db03ee65fd1fce8adf3223721aa2a636"}, 1120 | {file = "Pillow-8.3.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:147bd9e71fb9dcf08357b4d530b5167941e222a6fd21f869c7911bac40b9994d"}, 1121 | {file = "Pillow-8.3.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1fd5066cd343b5db88c048d971994e56b296868766e461b82fa4e22498f34d77"}, 1122 | {file = "Pillow-8.3.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4ebde71785f8bceb39dcd1e7f06bcc5d5c3cf48b9f69ab52636309387b097c8"}, 1123 | {file = "Pillow-8.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c03e24be975e2afe70dfc5da6f187eea0b49a68bb2b69db0f30a61b7031cee4"}, 1124 | {file = "Pillow-8.3.1.tar.gz", hash = "sha256:2cac53839bfc5cece8fdbe7f084d5e3ee61e1303cccc86511d351adcb9e2c792"}, 1125 | ] 1126 | platformdirs = [ 1127 | {file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"}, 1128 | {file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"}, 1129 | ] 1130 | pluggy = [ 1131 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 1132 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 1133 | ] 1134 | pre-commit = [ 1135 | {file = "pre_commit-2.13.0-py2.py3-none-any.whl", hash = "sha256:b679d0fddd5b9d6d98783ae5f10fd0c4c59954f375b70a58cbe1ce9bcf9809a4"}, 1136 | {file = "pre_commit-2.13.0.tar.gz", hash = "sha256:764972c60693dc668ba8e86eb29654ec3144501310f7198742a767bec385a378"}, 1137 | ] 1138 | py = [ 1139 | {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, 1140 | {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, 1141 | ] 1142 | pyarrow = [ 1143 | {file = "pyarrow-5.0.0-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:e9ec80f4a77057498cf4c5965389e42e7f6a618b6859e6dd615e57505c9167a6"}, 1144 | {file = "pyarrow-5.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b1453c2411b5062ba6bf6832dbc4df211ad625f678c623a2ee177aee158f199b"}, 1145 | {file = "pyarrow-5.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:9e04d3621b9f2f23898eed0d044203f66c156d880f02c5534a7f9947ebb1a4af"}, 1146 | {file = "pyarrow-5.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:64f30aa6b28b666a925d11c239344741850eb97c29d3aa0f7187918cf82494f7"}, 1147 | {file = "pyarrow-5.0.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:99c8b0f7e2ce2541dd4c0c0101d9944bb8e592ae3295fe7a2f290ab99222666d"}, 1148 | {file = "pyarrow-5.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:456a4488ae810a0569d1adf87dbc522bcc9a0e4a8d1809b934ca28c163d8edce"}, 1149 | {file = "pyarrow-5.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:c5493d2414d0d690a738aac8dd6d38518d1f9b870e52e24f89d8d7eb3afd4161"}, 1150 | {file = "pyarrow-5.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1832709281efefa4f199c639e9f429678286329860188e53beeda71750775923"}, 1151 | {file = "pyarrow-5.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:b6387d2058d95fa48ccfedea810a768187affb62f4a3ef6595fa30bf9d1a65cf"}, 1152 | {file = "pyarrow-5.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:bbe2e439bec2618c74a3bb259700c8a7353dc2ea0c5a62686b6cf04a50ab1e0d"}, 1153 | {file = "pyarrow-5.0.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5c0d1b68e67bb334a5af0cecdf9b6a702aaa4cc259c5cbb71b25bbed40fcedaf"}, 1154 | {file = "pyarrow-5.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6e937ce4a40ea0cc7896faff96adecadd4485beb53fbf510b46858e29b2e75ae"}, 1155 | {file = "pyarrow-5.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:7560332e5846f0e7830b377c14c93624e24a17f91c98f0b25dafb0ca1ea6ba02"}, 1156 | {file = "pyarrow-5.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53e550dec60d1ab86cba3afa1719dc179a8bc9632a0e50d9fe91499cf0a7f2bc"}, 1157 | {file = "pyarrow-5.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2d26186ca9748a1fb89ae6c1fa04fb343a4279b53f118734ea8096f15d66c820"}, 1158 | {file = "pyarrow-5.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7c4edd2bacee3eea6c8c28bddb02347f9d41a55ec9692c71c6de6e47c62a7f0d"}, 1159 | {file = "pyarrow-5.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:601b0aabd6fb066429e706282934d4d8d38f53bdb8d82da9576be49f07eedf5c"}, 1160 | {file = "pyarrow-5.0.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ff21711f6ff3b0bc90abc8ca8169e676faeb2401ddc1a0bc1c7dc181708a3406"}, 1161 | {file = "pyarrow-5.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ed135a99975380c27077f9d0e210aea8618ed9fadcec0e71f8a3190939557afe"}, 1162 | {file = "pyarrow-5.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:6e1f0e4374061116f40e541408a8a170c170d0a070b788717e18165ebfdd2a54"}, 1163 | {file = "pyarrow-5.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:4341ac0f552dc04c450751e049976940c7f4f8f2dae03685cc465ebe0a61e231"}, 1164 | {file = "pyarrow-5.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3fc856f107ca2fb3c9391d7ea33bbb33f3a1c2b4a0e2b41f7525c626214cc03"}, 1165 | {file = "pyarrow-5.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:357605665fbefb573d40939b13a684c2490b6ed1ab4a5de8dd246db4ab02e5a4"}, 1166 | {file = "pyarrow-5.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f4db312e9ba80e730cefcae0a05b63ea5befc7634c28df56682b628ad8e1c25c"}, 1167 | {file = "pyarrow-5.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1d9485741e497ccc516cb0a0c8f56e22be55aea815be185c3f9a681323b0e614"}, 1168 | {file = "pyarrow-5.0.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:b3115df938b8d7a7372911a3cb3904196194bcea8bb48911b4b3eafee3ab8d90"}, 1169 | {file = "pyarrow-5.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d8adda1892ef4553c4804af7f67cce484f4d6371564e2d8374b8e2bc85293e2"}, 1170 | {file = "pyarrow-5.0.0.tar.gz", hash = "sha256:24e64ea33eed07441cc0e80c949e3a1b48211a1add8953268391d250f4d39922"}, 1171 | ] 1172 | pycodestyle = [ 1173 | {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, 1174 | {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, 1175 | ] 1176 | pyflakes = [ 1177 | {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, 1178 | {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, 1179 | ] 1180 | pygments = [ 1181 | {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, 1182 | {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, 1183 | ] 1184 | pyparsing = [ 1185 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 1186 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 1187 | ] 1188 | pyproject-flake8 = [ 1189 | {file = "pyproject-flake8-0.0.1a2.tar.gz", hash = "sha256:bdeca37f78ecd34bd64a49d3657d53d099f5445831071a31c46e1fe20cd61461"}, 1190 | {file = "pyproject_flake8-0.0.1a2-py2.py3-none-any.whl", hash = "sha256:e61ed1dc088e9f9f8a7170967ac4ec135acfef3a59ab9738c7b58cc11f294a7e"}, 1191 | ] 1192 | pytest = [ 1193 | {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, 1194 | {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, 1195 | ] 1196 | python-dateutil = [ 1197 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 1198 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 1199 | ] 1200 | pytz = [ 1201 | {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, 1202 | {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, 1203 | ] 1204 | pyyaml = [ 1205 | {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, 1206 | {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, 1207 | {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, 1208 | {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, 1209 | {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, 1210 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, 1211 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, 1212 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, 1213 | {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, 1214 | {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, 1215 | {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, 1216 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, 1217 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, 1218 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, 1219 | {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, 1220 | {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, 1221 | {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, 1222 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, 1223 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, 1224 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, 1225 | {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, 1226 | {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, 1227 | {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, 1228 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, 1229 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, 1230 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, 1231 | {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, 1232 | {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, 1233 | {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, 1234 | ] 1235 | regex = [ 1236 | {file = "regex-2021.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8764a78c5464ac6bde91a8c87dd718c27c1cabb7ed2b4beaf36d3e8e390567f9"}, 1237 | {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4551728b767f35f86b8e5ec19a363df87450c7376d7419c3cac5b9ceb4bce576"}, 1238 | {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:577737ec3d4c195c4aef01b757905779a9e9aee608fa1cf0aec16b5576c893d3"}, 1239 | {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c856ec9b42e5af4fe2d8e75970fcc3a2c15925cbcc6e7a9bcb44583b10b95e80"}, 1240 | {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3835de96524a7b6869a6c710b26c90e94558c31006e96ca3cf6af6751b27dca1"}, 1241 | {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cea56288eeda8b7511d507bbe7790d89ae7049daa5f51ae31a35ae3c05408531"}, 1242 | {file = "regex-2021.8.3-cp36-cp36m-win32.whl", hash = "sha256:a4eddbe2a715b2dd3849afbdeacf1cc283160b24e09baf64fa5675f51940419d"}, 1243 | {file = "regex-2021.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:57fece29f7cc55d882fe282d9de52f2f522bb85290555b49394102f3621751ee"}, 1244 | {file = "regex-2021.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a5c6dbe09aff091adfa8c7cfc1a0e83fdb8021ddb2c183512775a14f1435fe16"}, 1245 | {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff4a8ad9638b7ca52313d8732f37ecd5fd3c8e3aff10a8ccb93176fd5b3812f6"}, 1246 | {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63e3571b24a7959017573b6455e05b675050bbbea69408f35f3cb984ec54363"}, 1247 | {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fbc20975eee093efa2071de80df7f972b7b35e560b213aafabcec7c0bd00bd8c"}, 1248 | {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14caacd1853e40103f59571f169704367e79fb78fac3d6d09ac84d9197cadd16"}, 1249 | {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb350eb1060591d8e89d6bac4713d41006cd4d479f5e11db334a48ff8999512f"}, 1250 | {file = "regex-2021.8.3-cp37-cp37m-win32.whl", hash = "sha256:18fdc51458abc0a974822333bd3a932d4e06ba2a3243e9a1da305668bd62ec6d"}, 1251 | {file = "regex-2021.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:026beb631097a4a3def7299aa5825e05e057de3c6d72b139c37813bfa351274b"}, 1252 | {file = "regex-2021.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16d9eaa8c7e91537516c20da37db975f09ac2e7772a0694b245076c6d68f85da"}, 1253 | {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3905c86cc4ab6d71635d6419a6f8d972cab7c634539bba6053c47354fd04452c"}, 1254 | {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937b20955806381e08e54bd9d71f83276d1f883264808521b70b33d98e4dec5d"}, 1255 | {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28e8af338240b6f39713a34e337c3813047896ace09d51593d6907c66c0708ba"}, 1256 | {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c09d88a07483231119f5017904db8f60ad67906efac3f1baa31b9b7f7cca281"}, 1257 | {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85f568892422a0e96235eb8ea6c5a41c8ccbf55576a2260c0160800dbd7c4f20"}, 1258 | {file = "regex-2021.8.3-cp38-cp38-win32.whl", hash = "sha256:bf6d987edd4a44dd2fa2723fca2790f9442ae4de2c8438e53fcb1befdf5d823a"}, 1259 | {file = "regex-2021.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:8fe58d9f6e3d1abf690174fd75800fda9bdc23d2a287e77758dc0e8567e38ce6"}, 1260 | {file = "regex-2021.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7976d410e42be9ae7458c1816a416218364e06e162b82e42f7060737e711d9ce"}, 1261 | {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9569da9e78f0947b249370cb8fadf1015a193c359e7e442ac9ecc585d937f08d"}, 1262 | {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bbe342c5b2dec5c5223e7c363f291558bc27982ef39ffd6569e8c082bdc83"}, 1263 | {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f421e3cdd3a273bace013751c345f4ebeef08f05e8c10757533ada360b51a39"}, 1264 | {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea212df6e5d3f60341aef46401d32fcfded85593af1d82b8b4a7a68cd67fdd6b"}, 1265 | {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a3b73390511edd2db2d34ff09aa0b2c08be974c71b4c0505b4a048d5dc128c2b"}, 1266 | {file = "regex-2021.8.3-cp39-cp39-win32.whl", hash = "sha256:f35567470ee6dbfb946f069ed5f5615b40edcbb5f1e6e1d3d2b114468d505fc6"}, 1267 | {file = "regex-2021.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:bfa6a679410b394600eafd16336b2ce8de43e9b13f7fb9247d84ef5ad2b45e91"}, 1268 | {file = "regex-2021.8.3.tar.gz", hash = "sha256:8935937dad2c9b369c3d932b0edbc52a62647c2afb2fafc0c280f14a8bf56a6a"}, 1269 | ] 1270 | requests = [ 1271 | {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, 1272 | {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, 1273 | ] 1274 | scipy = [ 1275 | {file = "scipy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2a0eeaab01258e0870c4022a6cd329aef3b7c6c2b606bd7cf7bb2ba9820ae561"}, 1276 | {file = "scipy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f52470e0548cdb74fb8ddf06773ffdcca7c97550f903b1c51312ec19243a7f7"}, 1277 | {file = "scipy-1.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:787749110a23502031fb1643c55a2236c99c6b989cca703ea2114d65e21728ef"}, 1278 | {file = "scipy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3304bd5bc32e00954ac4b3f4cc382ca8824719bf348aacbec6347337d6b125fe"}, 1279 | {file = "scipy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:d1388fbac9dd591ea630da75c455f4cc637a7ca5ecb31a6b6cef430914749cde"}, 1280 | {file = "scipy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:d648aa85dd5074b1ed83008ae987c3fbb53d68af619fce1dee231f4d8bd40e2f"}, 1281 | {file = "scipy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc61e3e5ff92d2f32bb263621d54a9cff5e3f7c420af3d1fa122ce2529de2bd9"}, 1282 | {file = "scipy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a496b42dbcd04ea9924f5e92be63af3d8e0f43a274b769bfaca0a297327d54ee"}, 1283 | {file = "scipy-1.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d13f31457f2216e5705304d9f28e2826edf75487410a57aa99263fa4ffd792c2"}, 1284 | {file = "scipy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:90c07ba5f34f33299a428b0d4fa24c30d2ceba44d63f8385b2b05be460819fcb"}, 1285 | {file = "scipy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:efdd3825d54c58df2cc394366ca4b9166cf940a0ebddeb87b6c10053deb625ea"}, 1286 | {file = "scipy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:71cfc96297617eab911e22216e8a8597703202e95636d9406df9af5c2ac99a2b"}, 1287 | {file = "scipy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ee952f39a4a4c7ba775a32b664b1f4b74818548b65f765987adc14bb78f5802"}, 1288 | {file = "scipy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:611f9cb459d0707dd8e4de0c96f86e93f61aac7475fcb225e9ec71fecdc5cebf"}, 1289 | {file = "scipy-1.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e101bceeb9e65a90dadbc5ca31283403a2d4667b9c178db29109750568e8d112"}, 1290 | {file = "scipy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4729b41a4cdaf4cd011aeac816b532f990bdf97710cef59149d3e293115cf467"}, 1291 | {file = "scipy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:c9951e3746b68974125e5e3445008a4163dd6d20ae0bbdae22b38cb8951dc11b"}, 1292 | {file = "scipy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:da9c6b336e540def0b7fd65603da8abeb306c5fc9a5f4238665cbbb5ff95cf58"}, 1293 | {file = "scipy-1.7.1.tar.gz", hash = "sha256:6b47d5fa7ea651054362561a28b1ccc8da9368a39514c1bbf6c0977a1c376764"}, 1294 | ] 1295 | six = [ 1296 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 1297 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 1298 | ] 1299 | snowballstemmer = [ 1300 | {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, 1301 | {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, 1302 | ] 1303 | sphinx = [ 1304 | {file = "Sphinx-4.1.2-py3-none-any.whl", hash = "sha256:46d52c6cee13fec44744b8c01ed692c18a640f6910a725cbb938bc36e8d64544"}, 1305 | {file = "Sphinx-4.1.2.tar.gz", hash = "sha256:3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13"}, 1306 | ] 1307 | sphinx-rtd-theme = [ 1308 | {file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"}, 1309 | {file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"}, 1310 | ] 1311 | sphinxcontrib-applehelp = [ 1312 | {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, 1313 | {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, 1314 | ] 1315 | sphinxcontrib-devhelp = [ 1316 | {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, 1317 | {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, 1318 | ] 1319 | sphinxcontrib-htmlhelp = [ 1320 | {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, 1321 | {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, 1322 | ] 1323 | sphinxcontrib-jsmath = [ 1324 | {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, 1325 | {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, 1326 | ] 1327 | sphinxcontrib-qthelp = [ 1328 | {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, 1329 | {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, 1330 | ] 1331 | sphinxcontrib-serializinghtml = [ 1332 | {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, 1333 | {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, 1334 | ] 1335 | toml = [ 1336 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 1337 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 1338 | ] 1339 | tomli = [ 1340 | {file = "tomli-1.2.0-py3-none-any.whl", hash = "sha256:056f0376bf5a6b182c513f9582c1e5b0487265eb6c48842b69aa9ca1cd5f640a"}, 1341 | {file = "tomli-1.2.0.tar.gz", hash = "sha256:d60e681734099207a6add7a10326bc2ddd1fdc36c1b0f547d00ef73ac63739c2"}, 1342 | ] 1343 | typed-ast = [ 1344 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, 1345 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, 1346 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, 1347 | {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, 1348 | {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, 1349 | {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, 1350 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, 1351 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, 1352 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, 1353 | {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, 1354 | {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, 1355 | {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, 1356 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, 1357 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, 1358 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, 1359 | {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, 1360 | {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, 1361 | {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, 1362 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, 1363 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, 1364 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, 1365 | {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, 1366 | {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, 1367 | {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, 1368 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, 1369 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, 1370 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, 1371 | {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, 1372 | {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, 1373 | {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, 1374 | ] 1375 | typing-extensions = [ 1376 | {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, 1377 | {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, 1378 | {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, 1379 | ] 1380 | urllib3 = [ 1381 | {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, 1382 | {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, 1383 | ] 1384 | virtualenv = [ 1385 | {file = "virtualenv-20.7.0-py2.py3-none-any.whl", hash = "sha256:fdfdaaf0979ac03ae7f76d5224a05b58165f3c804f8aa633f3dd6f22fbd435d5"}, 1386 | {file = "virtualenv-20.7.0.tar.gz", hash = "sha256:97066a978431ec096d163e72771df5357c5c898ffdd587048f45e0aecc228094"}, 1387 | ] 1388 | zipp = [ 1389 | {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, 1390 | {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, 1391 | ] 1392 | --------------------------------------------------------------------------------