├── tests
├── __init__.py
├── files
│ ├── Activity.fit
│ ├── Settings.fit
│ ├── Settings2.fit
│ ├── DeveloperData.fit
│ ├── MonitoringFile.fit
│ ├── antfs-dump.63.fit
│ ├── event_timestamp.fit
│ ├── garmin-fr935-cr.fit
│ ├── sample-activity.fit
│ ├── activity-settings.fit
│ ├── 2013-02-06-12-11-14.fit
│ ├── 2015-10-13-08-43-15.fit
│ ├── WeightScaleMultiUser.fit
│ ├── WorkoutRepeatSteps.fit
│ ├── garmin-edge-820-bike.fit
│ ├── garmin-fenix-5-bike.fit
│ ├── garmin-fenix-5-run.fit
│ ├── garmin-fenix-5-walk.fit
│ ├── WeightScaleSingleUser.fit
│ ├── WorkoutIndividualSteps.fit
│ ├── developer-types-sample.fit
│ ├── WorkoutCustomTargetValues.fit
│ ├── activity-small-fenix2-run.fit
│ ├── compressed-speed-distance.fit
│ ├── garmin-edge-500-activity.fit
│ ├── invalid
│ │ ├── activity-filecrc.fit
│ │ ├── activity-settings-nodata.fit
│ │ ├── activity-unexpected-eof.fit
│ │ ├── activity-activity-filecrc.fit
│ │ ├── activity-settings-corruptheader.fit
│ │ └── elemnt-bolt-no-application-id-inside-developer-data-id.fit
│ ├── 20170518-191602-1740899583.fit
│ ├── null_compressed_speed_dist.fit
│ ├── WorkoutRepeatGreaterThanStep.fit
│ ├── sample-activity-indoor-trainer.fit
│ ├── activity-large-fenxi2-multisport.fit
│ ├── 2019-02-17-062644-ELEMNT-297E-195-0.fit
│ ├── Edge810-Vector-2013-08-16-15-35-10.fit
│ ├── garmin-edge-820-bike-records.csv
│ ├── garmin-fenix-5-walk-records.csv
│ ├── garmin-fenix-5-bike-records.csv
│ ├── garmin-fenix-5-run-records.csv
│ └── compressed-speed-distance-records.csv
└── test_reader.py
├── fitdecode
├── cmd
│ ├── __init__.py
│ ├── fitjson.py
│ └── fittxt.py
├── __init__.py
├── __meta__.py
├── exceptions.py
├── utils.py
├── processors.py
├── records.py
└── types.py
├── docs
├── history.rst
├── favicon.ico
├── images
│ └── logo.jpg
├── reference
│ ├── types.rst
│ ├── utils.rst
│ ├── reader.rst
│ ├── records.rst
│ ├── exceptions.rst
│ └── processors.rst
├── reference.rst
├── index.rst
├── conf.py
└── make.py
├── requirements.in
├── cmd
├── fitjson.cmd
├── fittxt.cmd
├── fittxt
└── fitjson
├── .pytest.ini
├── pyproject.toml
├── MANIFEST.in
├── clean.cmd
├── .readthedocs.yml
├── .editorconfig
├── requirements-dev.in
├── LICENSE.txt
├── .github
└── workflows
│ └── python-test.yml
├── .flake8
├── .gitignore
├── setup.cfg
├── fitdecode.sublime-project
├── README.rst
├── HISTORY.rst
└── tools
└── generate_profile.py
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/fitdecode/cmd/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../HISTORY.rst
2 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
1 | # this package does not require extra dependency
2 |
--------------------------------------------------------------------------------
/cmd/fitjson.cmd:
--------------------------------------------------------------------------------
1 | @call python -B "%~dpn0" %*
2 | @exit /b %ERRORLEVEL%
3 |
--------------------------------------------------------------------------------
/cmd/fittxt.cmd:
--------------------------------------------------------------------------------
1 | @call python -B "%~dpn0" %*
2 | @exit /b %ERRORLEVEL%
3 |
--------------------------------------------------------------------------------
/docs/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/docs/favicon.ico
--------------------------------------------------------------------------------
/.pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | minversion = 8.0
3 | addopts = -ra
4 | testpaths =
5 | tests
6 |
--------------------------------------------------------------------------------
/docs/images/logo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/docs/images/logo.jpg
--------------------------------------------------------------------------------
/tests/files/Activity.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/Activity.fit
--------------------------------------------------------------------------------
/tests/files/Settings.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/Settings.fit
--------------------------------------------------------------------------------
/tests/files/Settings2.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/Settings2.fit
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools >= 61.0.0"]
3 | build-backend = "setuptools.build_meta"
4 |
--------------------------------------------------------------------------------
/tests/files/DeveloperData.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/DeveloperData.fit
--------------------------------------------------------------------------------
/tests/files/MonitoringFile.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/MonitoringFile.fit
--------------------------------------------------------------------------------
/tests/files/antfs-dump.63.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/antfs-dump.63.fit
--------------------------------------------------------------------------------
/tests/files/event_timestamp.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/event_timestamp.fit
--------------------------------------------------------------------------------
/tests/files/garmin-fr935-cr.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-fr935-cr.fit
--------------------------------------------------------------------------------
/tests/files/sample-activity.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/sample-activity.fit
--------------------------------------------------------------------------------
/tests/files/activity-settings.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/activity-settings.fit
--------------------------------------------------------------------------------
/tests/files/2013-02-06-12-11-14.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/2013-02-06-12-11-14.fit
--------------------------------------------------------------------------------
/tests/files/2015-10-13-08-43-15.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/2015-10-13-08-43-15.fit
--------------------------------------------------------------------------------
/tests/files/WeightScaleMultiUser.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WeightScaleMultiUser.fit
--------------------------------------------------------------------------------
/tests/files/WorkoutRepeatSteps.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WorkoutRepeatSteps.fit
--------------------------------------------------------------------------------
/tests/files/garmin-edge-820-bike.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-edge-820-bike.fit
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-bike.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-fenix-5-bike.fit
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-run.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-fenix-5-run.fit
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-walk.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-fenix-5-walk.fit
--------------------------------------------------------------------------------
/tests/files/WeightScaleSingleUser.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WeightScaleSingleUser.fit
--------------------------------------------------------------------------------
/tests/files/WorkoutIndividualSteps.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WorkoutIndividualSteps.fit
--------------------------------------------------------------------------------
/tests/files/developer-types-sample.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/developer-types-sample.fit
--------------------------------------------------------------------------------
/tests/files/WorkoutCustomTargetValues.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WorkoutCustomTargetValues.fit
--------------------------------------------------------------------------------
/tests/files/activity-small-fenix2-run.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/activity-small-fenix2-run.fit
--------------------------------------------------------------------------------
/tests/files/compressed-speed-distance.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/compressed-speed-distance.fit
--------------------------------------------------------------------------------
/tests/files/garmin-edge-500-activity.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/garmin-edge-500-activity.fit
--------------------------------------------------------------------------------
/tests/files/invalid/activity-filecrc.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/activity-filecrc.fit
--------------------------------------------------------------------------------
/tests/files/20170518-191602-1740899583.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/20170518-191602-1740899583.fit
--------------------------------------------------------------------------------
/tests/files/null_compressed_speed_dist.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/null_compressed_speed_dist.fit
--------------------------------------------------------------------------------
/tests/files/WorkoutRepeatGreaterThanStep.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/WorkoutRepeatGreaterThanStep.fit
--------------------------------------------------------------------------------
/tests/files/sample-activity-indoor-trainer.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/sample-activity-indoor-trainer.fit
--------------------------------------------------------------------------------
/tests/files/activity-large-fenxi2-multisport.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/activity-large-fenxi2-multisport.fit
--------------------------------------------------------------------------------
/tests/files/invalid/activity-settings-nodata.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/activity-settings-nodata.fit
--------------------------------------------------------------------------------
/tests/files/invalid/activity-unexpected-eof.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/activity-unexpected-eof.fit
--------------------------------------------------------------------------------
/tests/files/2019-02-17-062644-ELEMNT-297E-195-0.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/2019-02-17-062644-ELEMNT-297E-195-0.fit
--------------------------------------------------------------------------------
/tests/files/Edge810-Vector-2013-08-16-15-35-10.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/Edge810-Vector-2013-08-16-15-35-10.fit
--------------------------------------------------------------------------------
/tests/files/invalid/activity-activity-filecrc.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/activity-activity-filecrc.fit
--------------------------------------------------------------------------------
/docs/reference/types.rst:
--------------------------------------------------------------------------------
1 | =====
2 | types
3 | =====
4 |
5 | .. automodule:: fitdecode.types
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/docs/reference/utils.rst:
--------------------------------------------------------------------------------
1 | =====
2 | utils
3 | =====
4 |
5 | .. automodule:: fitdecode.utils
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/tests/files/invalid/activity-settings-corruptheader.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/activity-settings-corruptheader.fit
--------------------------------------------------------------------------------
/docs/reference/reader.rst:
--------------------------------------------------------------------------------
1 | ======
2 | reader
3 | ======
4 |
5 | .. automodule:: fitdecode.reader
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/docs/reference/records.rst:
--------------------------------------------------------------------------------
1 | =======
2 | records
3 | =======
4 |
5 | .. automodule:: fitdecode.records
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/docs/reference/exceptions.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | exceptions
3 | ==========
4 |
5 | .. automodule:: fitdecode.exceptions
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/docs/reference/processors.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | processors
3 | ==========
4 |
5 | .. automodule:: fitdecode.processors
6 | :ignore-module-all:
7 | :members:
8 | :undoc-members:
9 |
--------------------------------------------------------------------------------
/tests/files/invalid/elemnt-bolt-no-application-id-inside-developer-data-id.fit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/polyvertex/fitdecode/HEAD/tests/files/invalid/elemnt-bolt-no-application-id-inside-developer-data-id.fit
--------------------------------------------------------------------------------
/docs/reference.rst:
--------------------------------------------------------------------------------
1 | =========
2 | Reference
3 | =========
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | reference/reader
9 | reference/processors
10 | reference/records
11 | reference/types
12 | reference/exceptions
13 | reference/utils
14 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include HISTORY.rst
2 | include LICENSE.txt
3 | include README.rst
4 | recursive-include fitdecode *
5 | recursive-exclude tests *
6 | global-exclude __pycache__
7 | global-exclude *.py[cod]
8 | global-exclude *~
9 | global-exclude ._*
10 | global-exclude *.so
11 | global-exclude *.dll
12 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
3 |
4 | Contents
5 | ========
6 |
7 | .. toctree::
8 | :maxdepth: 2
9 | :numbered:
10 |
11 | reference
12 |
13 | .. toctree::
14 | :hidden:
15 |
16 | history
17 |
18 |
19 | Indices and tables
20 | ==================
21 |
22 | * :ref:`genindex`
23 | * :ref:`modindex` (API)
24 |
--------------------------------------------------------------------------------
/clean.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | pushd "%~dp0"
3 |
4 | rmdir /s /q .pytest_cache 2>nul
5 |
6 | rmdir /s /q build 2>nul
7 | rmdir /s /q dist 2>nul
8 | rmdir /s /q fitdecode.egg-info 2>nul
9 |
10 | rmdir /s /q fitdecode\__pycache__ 2>nul
11 | rmdir /s /q fitdecode\cmd\__pycache__ 2>nul
12 |
13 | rmdir /s /q tests\__pycache__ 2>nul
14 |
15 | popd
16 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | python:
4 | install:
5 | - requirements: requirements.in
6 | - requirements: requirements-dev.in
7 | - method: pip
8 | path: .
9 |
10 | build:
11 | os: ubuntu-24.04
12 | tools:
13 | python: "3"
14 |
15 | sphinx:
16 | builder: html
17 | configuration: docs/conf.py
18 | fail_on_warning: false
19 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # editorconfig.org
2 | root = true
3 |
4 | [*]
5 | charset = utf-8
6 | end_of_line = lf
7 | indent_style = space
8 | indent_size = 4
9 | trim_trailing_whitespace = true
10 | insert_final_newline = true
11 |
12 | [*.{bat,cmd}]
13 | end_of_line = crlf
14 |
15 | [*.{md,rst,toml,txt,yaml,yml}]
16 | indent_size = 2
17 |
18 | [{README,TODO}]
19 | indent_size = 2
20 |
--------------------------------------------------------------------------------
/cmd/fittxt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | if __name__ == "__main__":
6 | import os.path
7 | import sys
8 |
9 | if not sys.flags.optimize:
10 | sys.dont_write_bytecode = True
11 |
12 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
13 |
14 | from fitdecode.cmd import fittxt
15 |
16 | sys.exit(fittxt.main())
17 |
--------------------------------------------------------------------------------
/cmd/fitjson:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | if __name__ == "__main__":
6 | import os.path
7 | import sys
8 |
9 | if not sys.flags.optimize:
10 | sys.dont_write_bytecode = True
11 |
12 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
13 |
14 | from fitdecode.cmd import fitjson
15 |
16 | sys.exit(fitjson.main())
17 |
--------------------------------------------------------------------------------
/fitdecode/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | from .__meta__ import (
5 | __version__, version_info,
6 | __title__, __fancy_title__, __description__, __url__,
7 | __license__, __author__, __copyright__)
8 |
9 | from .exceptions import *
10 | from .records import *
11 | from .reader import *
12 | from .processors import *
13 |
14 | from . import types
15 | from . import profile
16 | from . import utils
17 | from . import processors
18 | from . import reader
19 |
--------------------------------------------------------------------------------
/fitdecode/__meta__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | __version__ = '0.11.0'
5 | version_info = (0, 11, 0)
6 |
7 | __title__ = 'fitdecode'
8 | __fancy_title__ = 'fitdecode'
9 | __description__ = 'FIT file parser and decoder'
10 | __url__ = 'https://github.com/polyvertex/fitdecode'
11 | __license__ = 'MIT'
12 | __author__ = 'Jean-Charles Lefebvre'
13 | __author_email__ = 'polyvertex@gmail.com'
14 | __copyright__ = 'Copyright 2018-present Jean-Charles Lefebvre'
15 |
16 | __keywords__ = ['fit', 'ant', 'file', 'parse', 'parser', 'decode', 'decoder']
17 |
--------------------------------------------------------------------------------
/requirements-dev.in:
--------------------------------------------------------------------------------
1 | # project specifics
2 | openpyxl
3 |
4 | # code quality: ruff
5 | # ruff
6 |
7 | # code quality: flake8 and plugins
8 | # note that flake8 installs pycodestyle, pyflakes, and mccabe by default, but
9 | # explicit is better than implicit
10 | flake8
11 | flake8-pyproject
12 | pycodestyle
13 | pyflakes
14 | mccabe
15 | flake8-bugbear
16 | flake8-import-order
17 | flake8-noqa
18 | flake8-ruff
19 | dlint
20 |
21 | # code quality: pylint
22 | # pylint
23 |
24 | # unit testing
25 | pytest
26 |
27 | # docs
28 | sphinx
29 | sphinx_rtd_theme
30 |
31 | # packaging
32 | setuptools >= 61.0.0
33 | build
34 | check_manifest >= 0.48
35 | readme_renderer >= 33.0
36 |
37 | # releasing
38 | twine
39 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018-present Jean-Charles Lefebvre
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.github/workflows/python-test.yml:
--------------------------------------------------------------------------------
1 | name: python-test
2 |
3 | # on: [push, pull_request]
4 | on:
5 | push:
6 | branches: [ "master" ]
7 | pull_request:
8 | branches: [ "master" ]
9 |
10 | jobs:
11 | build:
12 | strategy:
13 | fail-fast: false
14 | matrix:
15 | os: [ubuntu-latest, windows-latest]
16 | python-version: ["3.10", "3.11", "3.12", "3.13"]
17 |
18 | runs-on: ${{ matrix.os }}
19 |
20 | steps:
21 | - uses: actions/checkout@v4
22 |
23 | - name: Set up Python ${{ matrix.python-version }}
24 | uses: actions/setup-python@v3
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 |
28 | - name: Install dependencies
29 | run: |
30 | python -m pip install -U pip
31 | python -m pip install -r requirements.in
32 | python -m pip install -r requirements-dev.in
33 |
34 | - name: Run linter
35 | run: |
36 | python -m flake8
37 |
38 | - name: Run pytest
39 | run: |
40 | python -m pytest
41 |
42 | - name: Build and check artifacts
43 | run: |
44 | python -m build
45 | python -m twine check dist/*
46 |
--------------------------------------------------------------------------------
/fitdecode/exceptions.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 |
5 | class FitError(Exception):
6 | pass
7 |
8 |
9 | class FitHeaderError(FitError):
10 | pass
11 |
12 |
13 | class FitCRCError(FitError):
14 | pass
15 |
16 |
17 | class FitEOFError(FitError):
18 | def __init__(self, expected, got, offset, message=''):
19 | self.expected = expected #: number of expected bytes
20 | self.got = got #: number of bytes read
21 | self.offset = offset #: the file offset from which reading took place
22 |
23 | desc = f'expected {self.expected} bytes, got {self.got} @ {self.offset}'
24 | if not message:
25 | message = desc
26 | else:
27 | message += ' (' + desc + ')'
28 |
29 | super().__init__(message)
30 |
31 |
32 | class FitParseError(FitError):
33 | def __init__(self, offset, message=''):
34 | self.offset = offset #: the file offset from which reading took place
35 |
36 | desc = 'FIT parsing error @ ' + str(offset)
37 | if message:
38 | desc += ': ' + message
39 |
40 | super().__init__(desc)
41 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | # flake8 options
3 | # requires flake8-pyproject
4 |
5 | require-plugins =
6 | pycodestyle,
7 | pyflakes,
8 | mccabe,
9 | flake8-bugbear,
10 | flake8-import-order,
11 | flake8-noqa,
12 | flake8-ruff,
13 | dlint
14 |
15 | # default: E, F, W, C90
16 | # B flake8-bugbear
17 | # C90 C901: mccabe
18 | # D dlint
19 | # E pycodestyle
20 | # F flake8
21 | # I flake8-import-order
22 | # NQA flake8-noqa
23 | # RUF flake8-ruff
24 | # W pycodestyle
25 | select = B,C90,D,E,F,I,NQA,RUF,W
26 |
27 | max-line-length = 88
28 | max-doc-length = 88
29 | statistics = True
30 |
31 | extend-exclude =
32 | docs/,
33 | fitdecode/__init__.py,
34 | fitdecode/profile.py,
35 | tests/test_reader.py,
36 | tmp/,
37 | tools/generate_profile.py
38 |
39 | ignore =
40 | # B028 No explicit stacklevel argument found for warnings.warn()
41 | # W503 line break before binary operator
42 | # W504 line break after binary operator
43 | B028,W503,W504
44 |
45 | per-file-ignores =
46 | # F401 Module imported but unused
47 | # F403 'from module import *' used; unable to detect undefined names
48 | # I201 Missing newline between import groups
49 | fitdecode/__init__.py: I,F401,F403
50 |
51 | # flake8-noqa options
52 | noqa-require-code = True
53 | noqa-include-name = True
54 |
55 | # mccabe options
56 | max-complexity = 25
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # development and testing
2 | /tmp/
3 | /t.*
4 | /t[0-9]
5 | /t[0-9].*
6 |
7 | # python auto-generated
8 | /build/
9 | /dist/
10 | __pycache__/
11 | .mypy_cache/
12 | .pytest_cache/
13 | *.egg-info/
14 | *.pyc
15 |
16 | # python sphinx output
17 | /docs/_build/
18 |
19 | # ruff cache dir
20 | .ruff_cache/
21 |
22 | # python environments
23 | .[Ee][Nn][Vv]/
24 | .[Vv][Ee][Nn][Vv]/
25 | .[Ee][Nn][Vv].[Bb][Aa][Kk]/
26 | .[Vv][Ee][Nn][Vv].[Bb][Aa][Kk]/
27 | [Ee][Nn][Vv]/
28 | [Vv][Ee][Nn][Vv]/
29 | [Ee][Nn][Vv].[Bb][Aa][Kk]/
30 | [Vv][Ee][Nn][Vv].[Bb][Aa][Kk]/
31 |
32 | # python unit testing and coverage reports
33 | htmlcov/
34 | .tox/
35 | .nox/
36 | .coverage
37 | .coverage.*
38 | .cache
39 | nosetests.xml
40 | coverage.xml
41 | *.cover
42 | *.py,cover
43 | .hypothesis/
44 | .pytest_cache/
45 | cover/
46 |
47 | # sublimetext
48 | *.sublime-workspace
49 |
50 | # vscode
51 | .vscode/*
52 | !.vscode/settings.json
53 | # !.vscode/tasks.json
54 | # !.vscode/launch.json
55 | # !.vscode/extensions.json
56 | *.code-workspace
57 | .history/
58 |
59 | # vim
60 | *~
61 | [._]*.s[a-v][a-z]
62 | !*.svg
63 | [._]*.sw[a-p]
64 | [._]s[a-rt-v][a-z]
65 | [._]ss[a-gi-z]
66 | [._]sw[a-p]
67 | Session.vim
68 | Sessionx.vim
69 | [._]*.un~
70 |
71 | # linux
72 | .Trash-*
73 | .fuse_hidden*
74 | .directory
75 |
76 | # windows
77 | Thumbs.db
78 | Thumbs.db:encryptable
79 | [Dd]esktop.ini
80 |
81 | # macos
82 | .DS_Store
83 | .AppleDouble
84 | .LSOverride
85 | Icon??
86 | ._*
87 | .AppleDB
88 | .AppleDesktop
89 | Network Trash Folder
90 | Temporary Items
91 | .apdisk
92 |
--------------------------------------------------------------------------------
/tests/files/garmin-edge-820-bike-records.csv:
--------------------------------------------------------------------------------
1 | timestamp,heart_rate,cadence,speed,distance,position_lat,position_long,altitude,temperature
2 | Mon Jun 12 09:10:15 PDT 2017,101,69,6.951,6.95,446375435,-1456338541,-17.600000000000023,13
3 | Mon Jun 12 09:10:20 PDT 2017,104,68,6.867,41.58,446379221,-1456338592,-17.80000000000001,13
4 | Mon Jun 12 09:10:22 PDT 2017,108,68,6.793,55.26,446380702,-1456338600,-17.80000000000001,13
5 | Mon Jun 12 09:10:23 PDT 2017,111,67,6.858,62.11,446381439,-1456338509,-17.600000000000023,13
6 | Mon Jun 12 09:10:24 PDT 2017,114,68,6.839,68.95,446382175,-1456338439,-17.600000000000023,13
7 | Mon Jun 12 09:10:25 PDT 2017,114,67,6.821,75.77,446382913,-1456338404,-17.80000000000001,13
8 | Mon Jun 12 09:10:34 PDT 2017,115,68,6.979,137.8,446389555,-1456338298,-17.399999999999977,13
9 | Mon Jun 12 09:10:43 PDT 2017,118,70,7.101,200.84,446396367,-1456338184,-17.19999999999999,13
10 | Mon Jun 12 09:10:44 PDT 2017,118,70,7.194,208.04,446397136,-1456338169,-17.19999999999999,13
11 | Mon Jun 12 09:10:53 PDT 2017,119,68,6.961,270.99,446403989,-1456338084,-17.19999999999999,13
12 | Mon Jun 12 09:11:02 PDT 2017,119,68,6.895,333.88,446410804,-1456338040,-17.19999999999999,13
13 | Mon Jun 12 09:11:11 PDT 2017,121,69,6.961,395.95,446417518,-1456338079,-17.600000000000023,13
14 | Mon Jun 12 09:11:13 PDT 2017,118,69,6.914,409.82,446419029,-1456338102,-17.80000000000001,13
15 | Mon Jun 12 09:11:16 PDT 2017,115,68,6.849,430.39,446421280,-1456338109,-17.80000000000001,13
16 | Mon Jun 12 09:11:20 PDT 2017,115,66,6.653,457.12,446424211,-1456338151,-17.80000000000001,13
17 |
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-walk-records.csv:
--------------------------------------------------------------------------------
1 | timestamp,heart_rate,cadence,speed,distance,position_lat,position_long,altitude,temperature
2 | Sun Jun 11 07:32:51 PDT 2017,71,53,1.204,0.0,456107865,-1463072519,-0.8000000000000114,26
3 | Sun Jun 11 07:32:52 PDT 2017,71,53,1.409,1.6,456107693,-1463072572,-0.6000000000000227,26
4 | Sun Jun 11 07:32:56 PDT 2017,69,56,1.166,5.8,456107243,-1463072697,-0.6000000000000227,26
5 | Sun Jun 11 07:32:58 PDT 2017,66,58,1.054,8.28,456106976,-1463072755,-0.19999999999998863,26
6 | Sun Jun 11 07:33:02 PDT 2017,72,63,1.474,14.19,456106356,-1463072939,-0.19999999999998863,26
7 | Sun Jun 11 07:33:04 PDT 2017,76,62,1.456,16.95,456106074,-1463073060,-0.19999999999998863,26
8 | Sun Jun 11 07:33:06 PDT 2017,80,61,1.456,20.48,456105715,-1463073219,0.0,26
9 | Sun Jun 11 07:33:07 PDT 2017,81,61,1.502,22.29,456105524,-1463073261,0.0,26
10 | Sun Jun 11 07:33:08 PDT 2017,82,68,1.521,23.99,456105345,-1463073308,0.19999999999998863,26
11 | Sun Jun 11 07:33:11 PDT 2017,80,65,1.568,29.04,456104835,-1463073550,0.6000000000000227,26
12 | Sun Jun 11 07:33:13 PDT 2017,77,61,1.605,32.48,456104480,-1463073677,0.6000000000000227,26
13 | Sun Jun 11 07:33:15 PDT 2017,75,60,1.596,35.34,456104192,-1463073814,0.6000000000000227,26
14 | Sun Jun 11 07:33:16 PDT 2017,74,61,1.558,36.99,456104020,-1463073871,0.6000000000000227,26
15 | Sun Jun 11 07:33:19 PDT 2017,77,61,1.53,41.28,456103603,-1463074119,0.8000000000000114,26
16 | Sun Jun 11 07:33:20 PDT 2017,78,61,1.512,42.56,456103478,-1463074191,0.6000000000000227,26
17 | Sun Jun 11 07:33:23 PDT 2017,77,62,1.456,46.19,456103110,-1463074371,0.6000000000000227,26
18 | Sun Jun 11 07:33:37 PDT 2017,75,60,1.381,67.85,456100917,-1463075331,0.8000000000000114,26
19 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = fitdecode
3 | version = attr: fitdecode.__version__
4 | license = MIT
5 | license_files = LICENSE.txt
6 | description = FIT file parser and decoder
7 | url = https://github.com/polyvertex/fitdecode
8 | project_urls =
9 | Source Code = https://github.com/polyvertex/fitdecode
10 | Issue Tracker = https://github.com/polyvertex/fitdecode/issues
11 | keywords = fit, fitparse, ant, file, parse, parser, decode, decoder
12 | author = Jean-Charles Lefebvre
13 | author_email = polyvertex@gmail.com
14 | # long_description = file: README.rst
15 | long_description = file: README.rst, HISTORY.rst
16 | long_description_content_type = text/x-rst
17 | classifiers =
18 | Development Status :: 4 - Beta
19 | Operating System :: OS Independent
20 | Programming Language :: Python
21 | Programming Language :: Python :: 3.6
22 | Programming Language :: Python :: 3.7
23 | Programming Language :: Python :: 3.8
24 | Programming Language :: Python :: 3.9
25 | Programming Language :: Python :: 3.10
26 | Programming Language :: Python :: 3.11
27 | Programming Language :: Python :: 3.12
28 | Programming Language :: Python :: 3.13
29 |
30 | [options]
31 | packages = find:
32 | include_package_data = True
33 | zip_safe = True
34 | python_requires = >= 3.6
35 | install_requires = file: requirements.in
36 | tests_require =
37 |
38 | [options.packages.find]
39 | # where = src # . by default
40 | include = fitdecode*
41 | exclude =
42 |
43 | [options.entry_points]
44 | console_scripts =
45 | fitjson = fitdecode.cmd.fitjson:main
46 | fittxt = fitdecode.cmd.fittxt:main
47 |
48 | [options.extras_require]
49 | dev = file: requirements-dev.in
50 |
51 | # [sdist]
52 | # this causes check_manifest troubles due to having more than one file in dist/
53 | # formats = gztar, zip
54 |
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-bike-records.csv:
--------------------------------------------------------------------------------
1 | timestamp,heart_rate,cadence,speed,distance,position_lat,position_long,altitude,temperature
2 | Mon Jun 12 09:09:22 PDT 2017,77,,8.258,0.0,446332520,-1456340701,0.0,19
3 | Mon Jun 12 09:09:23 PDT 2017,83,,8.024,16.15,446334253,-1456340838,-1.8000000000000114,19
4 | Mon Jun 12 09:09:25 PDT 2017,86,,8.108,24.27,446335124,-1456340910,-2.8000000000000114,19
5 | Mon Jun 12 09:09:26 PDT 2017,88,,7.894,32.32,446335985,-1456341014,-2.8000000000000114,19
6 | Mon Jun 12 09:09:28 PDT 2017,90,,7.931,48.37,446337704,-1456341206,-4.199999999999989,19
7 | Mon Jun 12 09:09:30 PDT 2017,94,,8.015,64.28,446339412,-1456341307,-3.8000000000000114,19
8 | Mon Jun 12 09:09:33 PDT 2017,98,,7.847,89.5,446342123,-1456341329,-3.8000000000000114,19
9 | Mon Jun 12 09:09:38 PDT 2017,98,,8.286,131.5,446346636,-1456341447,-4.0,19
10 | Mon Jun 12 09:09:43 PDT 2017,101,,7.707,171.56,446350896,-1456340670,-4.399999999999977,19
11 | Mon Jun 12 09:09:49 PDT 2017,104,,7.586,216.12,446355537,-1456339187,-4.199999999999989,19
12 | Mon Jun 12 09:09:50 PDT 2017,104,,7.819,224.94,446356415,-1456338722,-4.399999999999977,19
13 | Mon Jun 12 09:09:52 PDT 2017,105,,6.989,240.69,446358082,-1456338322,-4.600000000000023,19
14 | Mon Jun 12 09:10:01 PDT 2017,106,,7.651,305.71,446365050,-1456338687,-3.6000000000000227,19
15 | Mon Jun 12 09:10:08 PDT 2017,103,,6.877,355.41,446370388,-1456338400,-4.199999999999989,19
16 | Mon Jun 12 09:10:11 PDT 2017,100,,6.83,375.67,446372566,-1456338386,-4.199999999999989,19
17 | Mon Jun 12 09:10:17 PDT 2017,103,,6.606,418.02,446377116,-1456338248,-3.8000000000000114,19
18 | Mon Jun 12 09:10:21 PDT 2017,108,,6.811,445.73,446380093,-1456338315,-4.199999999999989,19
19 | Mon Jun 12 09:10:22 PDT 2017,111,,6.578,452.59,446380831,-1456338298,-4.600000000000023,19
20 | Mon Jun 12 09:10:23 PDT 2017,114,,6.662,459.52,446381573,-1456338223,-4.600000000000023,19
21 |
--------------------------------------------------------------------------------
/tests/files/garmin-fenix-5-run-records.csv:
--------------------------------------------------------------------------------
1 | timestamp,heart_rate,cadence,speed,distance,position_lat,position_long,altitude,temperature
2 | Sun Jun 11 07:34:09 PDT 2017,61,0,0.0,0.0,456099128,-1463077077,2.1999999999999886,25
3 | Sun Jun 11 07:34:10 PDT 2017,61,0,0.0,0.5,456099059,-1463077109,2.3999999999999773,25
4 | Sun Jun 11 07:34:11 PDT 2017,58,0,0.0,0.88,456099079,-1463077607,2.1999999999999886,25
5 | Sun Jun 11 07:34:13 PDT 2017,56,78,1.166,7.93,456098414,-1463078075,3.3999999999999773,25
6 | Sun Jun 11 07:34:14 PDT 2017,59,90,1.624,10.94,456098127,-1463078262,3.1999999999999886,25
7 | Sun Jun 11 07:34:16 PDT 2017,63,89,3.378,17.97,456097397,-1463078534,3.1999999999999886,25
8 | Sun Jun 11 07:34:23 PDT 2017,71,91,3.144,36.76,456095506,-1463079425,4.0,25
9 | Sun Jun 11 07:34:25 PDT 2017,77,90,2.986,40.84,456095112,-1463079672,3.6000000000000227,25
10 | Sun Jun 11 07:34:26 PDT 2017,77,95,2.911,43.29,456094880,-1463079835,3.1999999999999886,25
11 | Sun Jun 11 07:34:31 PDT 2017,80,90,3.126,60.74,456093205,-1463080896,2.3999999999999773,24
12 | Sun Jun 11 07:34:32 PDT 2017,82,90,3.144,63.67,456092911,-1463081037,2.0,24
13 | Sun Jun 11 07:34:34 PDT 2017,87,89,3.107,69.79,456092311,-1463081382,2.1999999999999886,24
14 | Sun Jun 11 07:34:36 PDT 2017,92,89,3.042,75.8,456091727,-1463081734,2.3999999999999773,24
15 | Sun Jun 11 07:34:38 PDT 2017,100,89,3.023,82.77,456091091,-1463082235,2.8000000000000114,24
16 | Sun Jun 11 07:34:39 PDT 2017,103,90,3.042,86.31,456090774,-1463082503,3.1999999999999886,24
17 | Sun Jun 11 07:34:42 PDT 2017,106,90,3.042,95.43,456089922,-1463083113,3.3999999999999773,24
18 | Sun Jun 11 07:34:45 PDT 2017,107,89,2.986,102.55,456089242,-1463083563,4.199999999999989,24
19 | Sun Jun 11 07:34:54 PDT 2017,110,73,2.725,122.86,456087277,-1463084768,4.399999999999977,24
20 | Sun Jun 11 07:34:56 PDT 2017,111,54,2.351,128.95,456086647,-1463085005,4.399999999999977,24
21 | Sun Jun 11 07:35:01 PDT 2017,111,95,2.379,142.1,456085522,-1463086065,5.399999999999977,24
22 | Sun Jun 11 07:35:06 PDT 2017,112,88,2.865,157.56,456084072,-1463087093,4.199999999999989,24
23 |
--------------------------------------------------------------------------------
/fitdecode.sublime-project:
--------------------------------------------------------------------------------
1 | {
2 | "folders":
3 | [
4 | {
5 | "path": ".",
6 | "folder_exclude_patterns":
7 | [
8 | "//build/",
9 | "//dist/",
10 | "//docs/_build",
11 | "__pycache__/",
12 | ".pytest_cache/",
13 | ".ruff_cache/",
14 | "*.egg-info"
15 | ],
16 | "file_exclude_patterns":
17 | [
18 | "*.bz2",
19 | "*.egg-info",
20 | "*.gz",
21 | "*.tar",
22 | "*.tgz",
23 | "*.xz",
24 | "*.zip",
25 | "*.zst"
26 | ]
27 | }
28 | ],
29 | "settings":
30 | {
31 | "default_line_ending": "unix",
32 | "default_encoding": "UTF-8",
33 | "tab_size": 4,
34 | "translate_tabs_to_spaces": true,
35 | "trim_automatic_white_space": true,
36 | "trim_trailing_white_space_on_save": true
37 | },
38 | "build_systems":
39 | [
40 | {
41 | "name": "flake8 - file",
42 | "selector": "source.python",
43 | "quiet": true,
44 | "word_wrap": false,
45 | "working_dir": "$project_path",
46 | "file_regex": "^\\s*(..[^:]*):(\\d+):(\\d+):\\s*([^\\n]+)",
47 | "cmd": ["python", "-B", "-m", "flake8", "$file"]
48 | },
49 | {
50 | "name": "flake8 - project",
51 | "selector": null,
52 | "quiet": true,
53 | "word_wrap": false,
54 | "working_dir": "$project_path",
55 | "file_regex": "^\\s*(..[^:]*):(\\d+):(\\d+):\\s*([^\\n]+)",
56 | "cmd": ["python", "-B", "-m", "flake8"]
57 | },
58 | {
59 | "name": "unit tests",
60 | "selector": null,
61 | "quiet": true,
62 | "word_wrap": false,
63 | "working_dir": "$project_path",
64 | "cmd": ["python", "-B", "-m", "pytest"]
65 | }
66 | ]
67 | }
68 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | _THIS_DIR = os.path.dirname(os.path.realpath(__file__))
5 | _PROJECT_DIR = os.path.normpath(os.path.join(_THIS_DIR, '..'))
6 |
7 | # ensure "fitdecode" python package is importable
8 | sys.path.insert(0, _PROJECT_DIR)
9 |
10 | # import project's info (name, version, ...)
11 | _ABOUT = {}
12 | with open(os.path.join(_PROJECT_DIR, 'fitdecode', '__meta__.py'),
13 | mode='r', encoding='utf-8') as f:
14 | exec(f.read(), _ABOUT)
15 |
16 |
17 | #-------------------------------------------------------------------------------
18 | #needs_sphinx = '1.0'
19 |
20 | project = _ABOUT['__fancy_title__']
21 | copyright = _ABOUT['__copyright__']
22 | author = _ABOUT['__author__']
23 | version = _ABOUT['__version__']
24 | release = _ABOUT['__version__']
25 |
26 | extensions = [
27 | 'sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.intersphinx',
28 | 'sphinx.ext.extlinks']
29 |
30 | source_suffix = '.rst'
31 | #source_encoding = 'utf-8-sig'
32 |
33 | master_doc = 'index'
34 |
35 | exclude_patterns = ['_build', '**/.git', '**/.svn']
36 |
37 | templates_path = ['_templates']
38 |
39 | rst_epilog = """
40 | .. |project| replace:: {title}
41 |
42 | .. |version| replace:: **v{version}**
43 |
44 | .. |br| raw:: html
45 |
46 |
47 |
48 | """.format(
49 | title=_ABOUT['__fancy_title__'],
50 | version=_ABOUT['__version__'])
51 |
52 | primary_domain = 'py'
53 | default_role = 'any'
54 |
55 | #highlight_language = 'python3'
56 | pygments_style = 'sphinx'
57 |
58 | # ext.autodoc config
59 | autodoc_member_order = 'bysource'
60 | autodoc_default_flags = ['members', 'undoc-members']
61 |
62 | # ext.extlinks config
63 | extlinks = {
64 | 'ghu': ('https://github.com/%s', '@')}
65 |
66 | # ext.intersphinx config
67 | intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
68 |
69 | # ext.todo config
70 | todo_include_todos = True
71 |
72 |
73 | html_theme = 'sphinx_rtd_theme'
74 | html_title = _ABOUT['__fancy_title__']
75 | html_short_title = _ABOUT['__fancy_title__']
76 | html_logo = None #'images/logo.jpg'
77 | html_favicon = os.path.join(_THIS_DIR, 'favicon.ico')
78 | # html_static_path = ['_static']
79 | html_show_sourcelink = False
80 | html_show_sphinx = False
81 | html_show_copyright = True
82 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | =========
2 | fitdecode
3 | =========
4 |
5 | .. image:: https://readthedocs.org/projects/fitdecode/badge/?version=latest
6 | :target: https://fitdecode.readthedocs.io/
7 | :alt: Latest Docs
8 |
9 | .. image:: https://github.com/polyvertex/fitdecode/actions/workflows/python-test.yml/badge.svg
10 | :target: https://github.com/polyvertex/fitdecode/actions/workflows/python-test.yml
11 | :alt: python-test
12 |
13 |
14 | A `FIT `_ file parsing and decoding
15 | library written in `Python `_ (``>= 3.6``).
16 |
17 |
18 | Usage Example
19 | =============
20 |
21 | Read a FIT file, frame by frame:
22 |
23 | .. code:: python
24 |
25 | import fitdecode
26 |
27 | with fitdecode.FitReader('file.fit') as fit:
28 | for frame in fit:
29 | # The yielded frame object is of one of the following types:
30 | # * fitdecode.FitHeader (FIT_FRAME_HEADER)
31 | # * fitdecode.FitDefinitionMessage (FIT_FRAME_DEFINITION)
32 | # * fitdecode.FitDataMessage (FIT_FRAME_DATA)
33 | # * fitdecode.FitCRC (FIT_FRAME_CRC)
34 |
35 | if frame.frame_type == fitdecode.FIT_FRAME_DATA:
36 | # Here, frame is a FitDataMessage object.
37 | # A FitDataMessage object contains decoded values that
38 | # are directly usable in your script logic.
39 | print(frame.name)
40 |
41 |
42 | Command line utilities
43 | ----------------------
44 |
45 | ``fitjson`` command converts a FIT file to JSON:
46 |
47 | ::
48 |
49 | $ fitjson --pretty -o out_file.json in_file.fit
50 |
51 | ``fittxt`` command converts a FIT file to human-readable text format convenient
52 | to ease FIT data inspection and debugging::
53 |
54 | $ fittxt -o out_file.txt in_file.fit
55 |
56 | Both commands accept a ``--filter`` option (or ``-f``) which can be specified
57 | multiples times::
58 |
59 | $ # include only RECORD messages:
60 | $ fitjson -f=record -o out_file.json in_file.fit
61 |
62 | $ # exclude FILE_ID and EVENT messages:
63 | $ fitjson -f=-file_id -f=-event -o out_file.json in_file.fit
64 |
65 |
66 | Installation
67 | ============
68 |
69 | fitdecode is available on `PyPI `_::
70 |
71 | $ pip install fitdecode
72 |
73 |
74 | Or, you can clone fitdecode's `source code repository
75 | `_ before installing it::
76 |
77 | $ git clone git@github.com:polyvertex/fitdecode.git
78 | $ cd fitdecode
79 | $ pip install .
80 |
81 |
82 | Note that for convenience, directory ``cmd`` located at the root of the project
83 | can safely be added to your ``PATH``, such that fitdecode commands can be called
84 | without the package to be installed.
85 |
86 |
87 | Overview
88 | ========
89 |
90 | fitdecode is a non-offensive and incompatible rewrite of the fitparse_ library,
91 | with some improvements and additional features, thread-safety, and efforts to
92 | optimize both speed and memory usage.
93 |
94 | Main differences between fitdecode and fitparse:
95 |
96 | * fitdecode API is not compatible with fitparse
97 | * fitdecode is faster
98 | * fitdecode allows concurrent reading of multiple streams by being thread-safe,
99 | in the sense that fitdecode's objects keep their state stored locally
100 | * fitdecode does not discard the FIT header and the CRC footer while reading a
101 | stream so that client code gets a complete 1:1 representation of the stream
102 | that is being read
103 | * This also allows client code to easily deal with so-called chained FIT files,
104 | as per FIT SDK definition (i.e. concatenated FIT files)
105 | * CRC computation and matching are both optional. CRC can be either matched, or
106 | only computed, or just ignored for faster reading.
107 | * fitdecode offers optional access to records, headers and footers in their
108 | binary form, so to allow FIT file cutting, stitching and filtering at binary
109 | level
110 |
111 |
112 | Why a new library?
113 | ==================
114 |
115 | A new library has been created instead of just offering to patch fitparse_
116 | because many changes and adds in fitdecode break fitparse's backward
117 | compatibilty and because it allowed more freedom during the development of
118 | fitdecode.
119 |
120 |
121 | Documentation
122 | =============
123 |
124 | Documentation is available at ``_
125 |
126 |
127 | License
128 | =======
129 |
130 | This project is distributed under the terms of the MIT license.
131 | See the `LICENSE.txt `_ file for details.
132 |
133 |
134 | Credits
135 | =======
136 |
137 | fitdecode is largely based on the generic approach adopted by fitparse_ to
138 | define FIT types and to decode raw values. That includes the module
139 | ``profile.py`` and all the classes it refers to, as well as the script
140 | ``generate_profile.py``.
141 |
142 |
143 |
144 | .. _fitparse: https://github.com/dtcooper/python-fitparse
145 |
--------------------------------------------------------------------------------
/HISTORY.rst:
--------------------------------------------------------------------------------
1 | .. :changelog:
2 |
3 | ==========
4 | Change Log
5 | ==========
6 |
7 |
8 | v0.11.0 (2025-08-06)
9 | ====================
10 |
11 | * FIT SDK profile upgraded to v21.171.0
12 | * Fixed: decoding of ``developer_data_index`` field which was not converted to
13 | an ``int`` due to a typo
14 | * Most of FIT so-called developer fields are now considered optional (should
15 | fix #22 and #24)
16 | * Support of ``localtime`` fields set to out-of-bound value ``86400`` (thanks to
17 | @maethub, see python-fitparse#138)
18 | * ``generate_profile`` utility now supports latests FIT SDK Zip archive layout
19 | and ``Profile.xlsx`` file format (thanks to @fundthmcalculus, see
20 | python-fitparse#134)
21 | * Minor corrections due to stricter flake8 settings (code quality)
22 | * Overhaul of project files and C/I
23 |
24 |
25 | v0.10.0 (2021-09-12)
26 | ====================
27 |
28 | * ``fitjson``: added ``--pretty`` option
29 | * ``fitjson``: added ``--nounk`` option to filter-out *unknown* messages
30 | * ``fitjson``: ``--filter`` option also allows to filter-out messages
31 | * ``fittxt``: ``--filter`` option also allows to filter-out messages
32 | * ``fittxt``: added ``--nounk`` option to filter-out *unknown* messages
33 | * Fixed: `FitReader` does not close a file-like object owned by the user
34 | * Fixed: `FitReader.file_id` gets reset upon FIT footer (CRC frame)
35 | * Fixed: `utils.get_mesg_num()` return value
36 | * Fixed: `utils.get_mesg_field_num()` return value
37 | * Minor corrections, improvements and code cleanup
38 |
39 |
40 | v0.9.0 (2021-09-10)
41 | ===================
42 |
43 | * `FitReader` gets new properties ``fit_file_index`` and ``fit_files_count``
44 | * New ``CrcCheck`` policy: ``WARN``
45 | * **BREAKING CHANGE:** ``CrcCheck`` default policy from ``RAISE`` to ``WARN``
46 | * `FitHeaderError` exception messages a bit more helpful
47 | * Minor corrections and code cleanup
48 |
49 |
50 | v0.8.0 (2021-09-09)
51 | ===================
52 |
53 | * `FitReader` gets the ``error_handling`` argument to be less strict on
54 | malformed files (issues #13, #16, #18)
55 | * FIT SDK profile upgraded to v21.60
56 | * Minor corrections, improvements and cleanup on code and documentation
57 |
58 |
59 | v0.7.0 (2020-10-04)
60 | ===================
61 |
62 | * Compatibility with Apple Watch improved (issue #10)
63 | * FIT SDK profile upgraded to v21.38
64 | * ``generate_profile`` utility now supports recent SDK file structure
65 | * Minor improvements and cleanup on code and documentation
66 |
67 |
68 | v0.6.0 (2019-11-02)
69 | ===================
70 |
71 | * Added `FitReader.last_timestamp` property
72 | * Fixed: `FitReader` was raising `KeyError` instead of `FitParseError` when a
73 | dev_type was not found
74 | * `FitParseError` message contains more details upon malformed file in some
75 | cases
76 | * FIT SDK profile upgraded to v21.16
77 | * README's usage example slightly improved
78 |
79 |
80 | v0.5.0 (2019-04-11)
81 | ===================
82 |
83 | * Added `fitdecode.DataProcessorBase` class
84 | * ``check_crc`` - the parameter to `fitdecode.FitReader`'s constructor - can now
85 | be either "enabled", "read-only" or "disabled" (issue #1)
86 | * Minor speed improvements
87 |
88 |
89 | v0.4.0 (2019-04-10)
90 | ===================
91 |
92 | * Added `fitdecode.FitDataMessage.has_field`
93 | * `fitdecode.FitDataMessage.get_fields` is now a generator
94 | * `fitdecode.FitDataMessage.get_values` is now a generator
95 | * `fitdecode.DefaultDataProcessor` now converts ``hr.event_timestamp`` values
96 | that were populated from ``hr.event_timestamp_12`` components to
97 | `datetime.datetime` objects for convenience
98 | * ``fitjson`` and ``fittxt`` utilities:
99 | * Added support for input files with Unicode characters
100 | * Still write output file even if an error occurred while parsing FIT file
101 | * Fixed handling of some FIT fields that are both scaled and components.
102 | See https://github.com/dtcooper/python-fitparse/issues/84
103 | * Improved support for malformed FIT files.
104 | See https://github.com/dtcooper/python-fitparse/issues/62
105 | * ``generate_profile`` utility slightly improved
106 | * Added some unit tests
107 | * Minor improvements and corrections
108 |
109 |
110 | v0.3.0 (2018-07-27)
111 | ===================
112 |
113 | * Added `fitdecode.utils.get_mesg_field`
114 | * Added `fitdecode.utils.get_mesg_field_num`
115 | * Minor improvements and corrections
116 |
117 |
118 | v0.2.0 (2018-07-16)
119 | ===================
120 |
121 | * Added `FieldData.name_or_num`
122 | * Added `FitDataMessage.get_fields`
123 | * Added `FitDataMessage.get_values`
124 | * Improved `FitDataMessage.get_field` (*idx* arg)
125 | * Improved `FitDataMessage.get_value` (*idx* arg)
126 | * Completed documentation of `FitDataMessage`
127 | * Improved documentation of `FieldData`
128 | * `FitReader`'s internal state is reset as well after a `FitCRC` has been
129 | yielded (i.e. not only when a FIT header is about to be read), in order to
130 | avoid incorrect behavior due to malformed FIT stream
131 |
132 |
133 | v0.1.0 (2018-07-14)
134 | ===================
135 |
136 | * Added class property ``frame_type`` (read-only) to `FitHeader`, `FitCRC`,
137 | `FitDefinitionMessage` and `FitDataMessage` (``records`` module) to ease and
138 | speed up type checking
139 | * Added `FitDataMessage.get_value` method
140 | * ``string`` values with no null byte are still decoded (in full length)
141 | * ``cmd`` directory added to the source code tree for convenience
142 |
143 |
144 | v0.0.1 (2018-07-08)
145 | ===================
146 |
147 | * First release
148 |
149 |
150 | v0.0.0 (2018-05-31)
151 | ===================
152 |
153 | * Birth!
154 |
--------------------------------------------------------------------------------
/fitdecode/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | import re
5 | import time
6 |
7 | from . import profile
8 |
9 | __all__ = []
10 |
11 |
12 | METHOD_NAME_SCRUBBER = re.compile(r'\W|^(?=\d)')
13 | UNIT_NAME_TO_FUNC_REPLACEMENTS = (
14 | ('/', ' per '),
15 | ('%', 'percent'),
16 | ('*', ' times '))
17 |
18 | CRC_START = 0
19 | CRC_TABLE = (
20 | 0x0000, 0xcc01, 0xd801, 0x1400, 0xf001, 0x3c00, 0x2800, 0xe401,
21 | 0xa001, 0x6c00, 0x7800, 0xb401, 0x5000, 0x9c01, 0x8801, 0x4400)
22 |
23 |
24 | def scrub_method_name(method_name, convert_units=False):
25 | """Create a valid Python name out of *method_name*"""
26 | if convert_units:
27 | for replace_from, replace_to in UNIT_NAME_TO_FUNC_REPLACEMENTS:
28 | method_name = method_name.replace(replace_from, str(replace_to))
29 |
30 | return METHOD_NAME_SCRUBBER.sub('_', method_name)
31 |
32 |
33 | def get_mesg_type(mesg_name_or_num):
34 | """
35 | Get a :class:`fitdecode.MessageType` from ``profile``, by its name (`str`)
36 | or its global number (`int`).
37 |
38 | Raise `ValueError` if type was not found.
39 | """
40 | # assume mesg_num first
41 | try:
42 | return profile.MESSAGE_TYPES[mesg_name_or_num]
43 | except KeyError:
44 | pass
45 |
46 | for mesg_type in profile.MESSAGE_TYPES.values():
47 | if mesg_name_or_num == mesg_type.name:
48 | return mesg_type
49 |
50 | raise ValueError(f'message type "{mesg_name_or_num}" not found')
51 |
52 |
53 | def get_mesg_num(mesg_name):
54 | """
55 | Get the global number of a message as defined in ``profile``, by its name
56 |
57 | Raise `ValueError` if type was not found.
58 | """
59 | for mesg_type in profile.MESSAGE_TYPES.values():
60 | if mesg_name == mesg_type.name:
61 | return mesg_type.mesg_num
62 |
63 | raise ValueError(f'message type "{mesg_name}" not found')
64 |
65 |
66 | def get_mesg_field(mesg_name_or_num, field_name_or_num):
67 | """
68 | Get the :class:`fitdecode.types.Field` object of a particular field from a
69 | particular message.
70 |
71 | Raise `ValueError` if message or field was not found.
72 | """
73 | mesg_type = get_mesg_type(mesg_name_or_num)
74 | for field in mesg_type.fields:
75 | if field_name_or_num in (field.def_num, field.name):
76 | return field
77 |
78 | raise ValueError(
79 | f'field "{field_name_or_num}" not found in '
80 | f'message "{mesg_name_or_num}"')
81 |
82 |
83 | def get_mesg_field_num(mesg_name_or_num, field_name):
84 | """
85 | Get the definition number of a particular field from a particular message.
86 |
87 | Raise `ValueError` if message or field was not found.
88 | """
89 | mesg_type = get_mesg_type(mesg_name_or_num)
90 | for field in mesg_type.fields:
91 | if field.name == field_name:
92 | return field.def_num
93 |
94 | raise ValueError(
95 | f'field "{field_name}" not found in message "{mesg_name_or_num}"')
96 |
97 |
98 | def get_field_type(field_name):
99 | """
100 | Get :class:`fitdecode.FieldType` by name from ``profile``.
101 |
102 | Raise `ValueError` if type was not found.
103 | """
104 | try:
105 | return profile.FIELD_TYPE[field_name]
106 | except KeyError:
107 | raise ValueError(f'field type "{field_name}" not found')
108 |
109 |
110 | def compute_crc(byteslike, *, crc=CRC_START, start=0, end=None):
111 | """
112 | Compute the CRC as per FIT definition, of *byteslike* object, from offset
113 | *start* (included) to *end* (excluded)
114 | """
115 | if not end:
116 | end = len(byteslike)
117 |
118 | if start >= end:
119 | assert 0
120 | return crc
121 |
122 | # According to some performance tests, A is always (at least slightly)
123 | # faster than B, either with a high number of calls to this fonction, and/or
124 | # with a high number of "for" iterations (CPython 3.6.5 x64 on Windows).
125 | #
126 | # A. for byte in memoryview(byteslike)[start:end]:
127 | # # ...
128 | #
129 | # B. for idx in range(start, end):
130 | # byte = byteslike[idx]
131 | # # ...
132 |
133 | for byte in memoryview(byteslike)[start:end]:
134 | tmp = CRC_TABLE[crc & 0xf]
135 | crc = (crc >> 4) & 0x0fff
136 | crc = crc ^ tmp ^ CRC_TABLE[byte & 0xf]
137 |
138 | tmp = CRC_TABLE[crc & 0xf]
139 | crc = (crc >> 4) & 0x0fff
140 | crc = crc ^ tmp ^ CRC_TABLE[(byte >> 4) & 0xf]
141 |
142 | return crc
143 |
144 |
145 | def blocking_read(istream, size=-1, nonblocking_reads_delay=0.06):
146 | """
147 | Read from *istream* and do not return until *size* `bytes` have been read
148 | unless EOF has been reached.
149 |
150 | Return all the data read so far. The length of the returned data may still
151 | be less than *size* in case EOF has been reached.
152 |
153 | *nonblocking_reads_delay* specifies the number of seconds (float) to wait
154 | before trying to read from *istream* again in case `BlockingIOError` has
155 | been raised during previous call.
156 | """
157 | assert size is None or (isinstance(size, int) and not isinstance(size, bool))
158 |
159 | if not size:
160 | return None
161 |
162 | output = []
163 | len_read = 0
164 |
165 | def _join():
166 | glue = '' if isinstance(output[0], str) else b''
167 | return glue.join(output)
168 |
169 | while True:
170 | try:
171 | chunk = istream.read(-1 if size < 0 else size - len_read)
172 |
173 | if size < 0:
174 | return chunk
175 | elif not chunk:
176 | if not output:
177 | return chunk
178 | else:
179 | return _join()
180 | else:
181 | assert size > 0
182 | output.append(chunk)
183 | len_read += len(chunk)
184 | if len_read >= size:
185 | return _join()
186 | except BlockingIOError:
187 | time.sleep(nonblocking_reads_delay)
188 |
--------------------------------------------------------------------------------
/fitdecode/cmd/fitjson.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | import argparse
6 | import datetime
7 | import json
8 | import re
9 | import sys
10 | import traceback
11 | import types
12 | from collections import OrderedDict
13 |
14 | import fitdecode
15 |
16 |
17 | class RecordJSONEncoder(json.JSONEncoder):
18 | def default(self, obj):
19 | if isinstance(obj, types.GeneratorType):
20 | return list(obj)
21 |
22 | if isinstance(obj, datetime.time):
23 | return obj.isoformat()
24 |
25 | if isinstance(obj, datetime.datetime):
26 | return obj.isoformat()
27 |
28 | if isinstance(obj, fitdecode.FitChunk):
29 | return OrderedDict((
30 | ('index', obj.index),
31 | ('offset', obj.offset),
32 | ('size', len(obj.bytes))))
33 |
34 | if isinstance(obj, fitdecode.types.FieldDefinition):
35 | return OrderedDict((
36 | ('name', obj.name),
37 | ('def_num', obj.def_num),
38 | ('type_name', obj.type.name),
39 | ('base_type_name', obj.base_type.name),
40 | ('size', obj.size)))
41 |
42 | if isinstance(obj, fitdecode.types.DevFieldDefinition):
43 | return OrderedDict((
44 | ('name', obj.name),
45 | ('dev_data_index', obj.dev_data_index),
46 | ('def_num', obj.def_num),
47 | ('type_name', obj.type.name),
48 | ('size', obj.size)))
49 |
50 | if isinstance(obj, fitdecode.types.FieldData):
51 | return OrderedDict((
52 | ('name', obj.name),
53 | ('value', obj.value),
54 | ('units', obj.units if obj.units else ''),
55 | ('def_num', obj.def_num),
56 | ('raw_value', obj.raw_value)))
57 |
58 | if isinstance(obj, fitdecode.FitHeader):
59 | crc = obj.crc if obj.crc else 0
60 | return OrderedDict((
61 | ('frame_type', 'header'),
62 | ('header_size', obj.header_size),
63 | ('proto_ver', obj.proto_ver),
64 | ('profile_ver', obj.profile_ver),
65 | ('body_size', obj.body_size),
66 | ('crc', f'{crc:#06x}'),
67 | ('crc_matched', obj.crc_matched),
68 | ('chunk', obj.chunk)))
69 |
70 | if isinstance(obj, fitdecode.FitCRC):
71 | return OrderedDict((
72 | ('frame_type', 'crc'),
73 | ('crc', f'{obj.crc:#06x}'),
74 | ('matched', obj.matched),
75 | ('chunk', obj.chunk)))
76 |
77 | if isinstance(obj, fitdecode.FitDefinitionMessage):
78 | return OrderedDict((
79 | ('frame_type', 'definition_message'),
80 | ('name', obj.name),
81 | ('header', OrderedDict((
82 | ('local_mesg_num', obj.local_mesg_num),
83 | ('time_offset', obj.time_offset),
84 | ('is_developer_data', obj.is_developer_data)))),
85 | ('global_mesg_num', obj.global_mesg_num),
86 | ('endian', obj.endian),
87 | ('field_defs', obj.field_defs),
88 | ('dev_field_defs', obj.dev_field_defs),
89 | ('chunk', obj.chunk)))
90 |
91 | if isinstance(obj, fitdecode.FitDataMessage):
92 | return OrderedDict((
93 | ('frame_type', 'data_message'),
94 | ('name', obj.name),
95 | ('header', OrderedDict((
96 | ('local_mesg_num', obj.local_mesg_num),
97 | ('time_offset', obj.time_offset),
98 | ('is_developer_data', obj.is_developer_data)))),
99 | ('fields', obj.fields),
100 | ('chunk', obj.chunk)))
101 |
102 | # fall back to original to raise a TypeError
103 | return super().default(obj)
104 |
105 |
106 | def parse_filter_args(arg_parser, filter_opt):
107 | FILTER_DESC = re.compile(r'^\s*([\+\-]?)\s*([^\s]+)\s*$', re.A)
108 |
109 | if not filter_opt:
110 | return filter_opt, None
111 |
112 | filtr = {} # {msg_num: bool_include}
113 | default_include_policy = False
114 |
115 | for desc in filter_opt:
116 | msg = None
117 | rem = FILTER_DESC.fullmatch(desc)
118 | if rem:
119 | include = False if rem[1] and rem[1] == '-' else True
120 | msg = rem[2].lower()
121 |
122 | if not include:
123 | default_include_policy = True
124 |
125 | try:
126 | msg = fitdecode.utils.get_mesg_num(msg)
127 | except ValueError:
128 | try:
129 | msg = int(msg, base=0)
130 | except ValueError:
131 | msg = None
132 |
133 | if msg is None:
134 | arg_parser.error(f'malformed filter: "{desc}"')
135 | sys.exit(1)
136 |
137 | filtr[msg] = include
138 |
139 | return filtr, default_include_policy
140 |
141 |
142 | def parse_args(args=None):
143 | parser = argparse.ArgumentParser(
144 | description='Dump a FIT file to JSON format',
145 | epilog=f'fitdecode version {fitdecode.__version__}',
146 | allow_abbrev=False)
147 |
148 | parser.add_argument(
149 | '--output', '-o', type=argparse.FileType(mode='wt', encoding='utf-8'),
150 | default='-',
151 | help='File to output data into (defaults to stdout)')
152 |
153 | parser.add_argument(
154 | '--pretty', action='store_true',
155 | help='Prettify JSON output.')
156 |
157 | parser.add_argument(
158 | '--nocrc', action='store_const',
159 | const=fitdecode.CrcCheck.DISABLED,
160 | default=fitdecode.CrcCheck.WARN,
161 | help='Some devices seem to write invalid CRC\'s, ignore these.')
162 |
163 | parser.add_argument(
164 | '--nodef', action='store_true',
165 | help='Do not output FIT local message definitions.')
166 |
167 | parser.add_argument(
168 | '--nounk', action='store_true',
169 | help='Do not output unknown FIT messages (e.g. "unknown_140")')
170 |
171 | parser.add_argument(
172 | '--filter', '-f', action='append',
173 | help=(
174 | 'Message name(s) (or global numbers) to filter-in or out, '
175 | 'depending on sign prefix. Examples: "-record" to exclude record '
176 | 'messages; "+file_id" or "file_id" to include file_id messages.'))
177 |
178 | parser.add_argument(
179 | 'infile', metavar='FITFILE', type=argparse.FileType(mode='rb'),
180 | help='Input .FIT file (use - for stdin)')
181 |
182 | options = parser.parse_args(args)
183 | options.filter, options.default_filter = \
184 | parse_filter_args(parser, options.filter)
185 |
186 | return options
187 |
188 |
189 | def main(args=None):
190 | options = parse_args(args)
191 |
192 | frames = []
193 |
194 | try:
195 | with fitdecode.FitReader(
196 | options.infile,
197 | processor=fitdecode.StandardUnitsDataProcessor(),
198 | check_crc=options.nocrc,
199 | keep_raw_chunks=True) as fit:
200 | for frame in fit:
201 | if (options.nodef and
202 | frame.frame_type == fitdecode.FIT_FRAME_DEFINITION):
203 | continue
204 |
205 | if (options.nounk and
206 | frame.frame_type in (
207 | fitdecode.FIT_FRAME_DEFINITION,
208 | fitdecode.FIT_FRAME_DATA) and
209 | frame.mesg_type is None):
210 | continue
211 |
212 | if (options.filter and
213 | frame.frame_type in (
214 | fitdecode.FIT_FRAME_DEFINITION,
215 | fitdecode.FIT_FRAME_DATA)):
216 | try:
217 | include = options.filter[frame.global_mesg_num]
218 | except KeyError:
219 | include = options.default_filter
220 |
221 | if not include:
222 | continue
223 |
224 | frames.append(frame)
225 | except Exception:
226 | print(
227 | 'WARNING: the following error occurred while parsing FIT file. '
228 | 'Output file might be incomplete or corrupted.',
229 | file=sys.stderr)
230 | print('', file=sys.stderr)
231 | traceback.print_exc()
232 |
233 | indent = '\t' if options.pretty else None
234 | json.dump(frames, fp=options.output, cls=RecordJSONEncoder, indent=indent)
235 |
236 | return 0
237 |
238 |
239 | if __name__ == '__main__':
240 | sys.exit(main())
241 |
--------------------------------------------------------------------------------
/fitdecode/processors.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | import datetime
5 |
6 | from . import profile
7 | from .utils import scrub_method_name
8 |
9 | __all__ = [
10 | 'FIT_UTC_REFERENCE', 'FIT_DATETIME_MIN',
11 | 'DataProcessorBase', 'DefaultDataProcessor', 'StandardUnitsDataProcessor']
12 |
13 |
14 | #: Datetimes (uint32) represent seconds since this ``FIT_UTC_REFERENCE``
15 | #: (unix timestamp for UTC 00:00 Dec 31 1989).
16 | FIT_UTC_REFERENCE = 631065600
17 |
18 | #: ``date_time`` typed fields for which value is below ``FIT_DATETIME_MIN``
19 | #: represent the number of seconds elapsed since device power on.
20 | FIT_DATETIME_MIN = 0x10000000
21 |
22 |
23 | class DataProcessorBase:
24 | """
25 | Data processing base class.
26 |
27 | This class does nothing. It is meant to be derived.
28 |
29 | The following methods are called by :class:`fitdecode.FitReader`:
30 |
31 | * `on_header`, each time a :class:`fitdecode.FitHeader` is reached
32 | * `on_crc`, each time a :class:`fitdecode.FitCRC` (the FIT footer) is
33 | reached
34 |
35 | This is convenient if you wish to reset some context-sensitive state in-
36 | between two chained FIT files for example.
37 |
38 | Bear in mind that a malformed/corrupted file may miss either of these
39 | entities (header and/or CRC footer).
40 |
41 | Also, the following methods are called (still by
42 | :class:`fitdecode.FitReader`) for each field of every data message, in that
43 | order:
44 |
45 | * `on_process_type`
46 | * `on_process_field`
47 | * `on_process_unit`
48 | * `on_process_message`
49 |
50 | By default, the above processor methods call the following methods if they
51 | exist (hence the aforementioned caching)::
52 |
53 | def process_type_(reader, field_data)
54 | def process_field_(reader, field_data) # could be unknown_XYZ but NOT recommended # noqa
55 | def process_units_(reader, field_data)
56 | def process_message_(reader, data_message)
57 |
58 | ``process_*`` methods are not expected to return any value and may alter
59 | the content of the passed *field_data* (:class:`fitdecode.FieldData`) and
60 | *data_message* (:class:`fitdecode.FitDataMessage`) arguments if needed.
61 |
62 | .. seealso:: `DefaultDataProcessor`, `StandardUnitsDataProcessor`
63 | """
64 |
65 | def __init__(self):
66 | self._method_cache = {}
67 |
68 | def on_header(self, reader, fit_header):
69 | pass
70 |
71 | def on_crc(self, reader, fit_crc):
72 | pass
73 |
74 | def on_process_type(self, reader, field_data):
75 | self._run_processor(
76 | f'process_type_{field_data.type.name}',
77 | reader, field_data)
78 |
79 | def on_process_field(self, reader, field_data):
80 | if field_data.name:
81 | self._run_processor(
82 | f'process_field_{field_data.name}',
83 | reader, field_data)
84 |
85 | def on_process_unit(self, reader, field_data):
86 | if field_data.units:
87 | self._run_processor(
88 | f'process_units_{field_data.units}',
89 | reader, field_data)
90 |
91 | def on_process_message(self, reader, data_message):
92 | self._run_processor(
93 | f'process_message_{data_message.def_mesg.name}',
94 | reader, data_message)
95 |
96 | def _run_processor(self, method_name, reader, data):
97 | method = self._resolve_method(method_name)
98 | if method is not None:
99 | method(reader, data)
100 |
101 | def _resolve_method(self, method_name):
102 | try:
103 | return self._method_cache[method_name]
104 | except KeyError:
105 | pass
106 |
107 | scrubbed_method_name = scrub_method_name(method_name)
108 | method = getattr(self, scrubbed_method_name, None)
109 |
110 | self._method_cache[method_name] = method
111 |
112 | return method
113 |
114 |
115 | class DefaultDataProcessor(DataProcessorBase):
116 | """
117 | This is the default data processor used by :class:`fitdecode.FitReader`. It
118 | derives from :class:`DataProcessorBase`.
119 |
120 | This data processor converts some raw values to more comfortable ones.
121 |
122 | .. seealso:: `StandardUnitsDataProcessor`, `DataProcessorBase`
123 | """
124 |
125 | def __init__(self):
126 | super().__init__()
127 |
128 | def process_type_bool(self, reader, field_data):
129 | """Just `bool` any ``bool`` typed FIT field unless value is `None`"""
130 | if field_data.value is not None:
131 | field_data.value = bool(field_data.value)
132 |
133 | def process_type_date_time(self, reader, field_data):
134 | """
135 | Convert ``date_time`` typed field values into `datetime.datetime` object
136 | if possible.
137 |
138 | That is, if value is not `None` and greater or equal than
139 | `FIT_DATETIME_MIN`.
140 |
141 | The resulting `datetime.datetime` object is timezone-aware (UTC).
142 | """
143 | if (field_data.value is not None and
144 | field_data.value >= FIT_DATETIME_MIN):
145 | field_data.value = datetime.datetime.fromtimestamp(
146 | FIT_UTC_REFERENCE + field_data.value,
147 | datetime.timezone.utc)
148 | field_data.units = None # units were 's', set to None
149 |
150 | def process_type_local_date_time(self, reader, field_data):
151 | """
152 | Convert ``date_time`` typed field values into `datetime.datetime` object
153 | unless value is `None`.
154 |
155 | The resulting `datetime.datetime` object **IS NOT** timezone-aware, but
156 | this method assumes UTC at object construction to ensure consistency.
157 | """
158 | if field_data.value is not None:
159 | # This value was created on the device using its local timezone.
160 | # Unless we know that timezone, this value won't be correct.
161 | # However, if we assume UTC, at least it'll be consistent.
162 | field_data.value = datetime.datetime.fromtimestamp(
163 | FIT_UTC_REFERENCE + field_data.value,
164 | datetime.timezone.utc)
165 | field_data.units = None
166 |
167 | def process_type_localtime_into_day(self, reader, field_data):
168 | """
169 | Convert ``localtime_into_day`` typed field values into `datetime.time`
170 | object unless value is `None`.
171 | """
172 | if field_data.value is not None:
173 | # Values larger or equal to 86400 should not be possible.
174 | # Additionally, if the value is exactly 86400, it will lead to an
175 | # error when trying to create the time with datetime.time(24, 0, 0).
176 | #
177 | # E.g. Garmin does add "sleep_time": 86400 to its fit files, which
178 | # causes an error if not properly handled.
179 | if field_data.value >= 86400:
180 | field_data.value = datetime.time.max
181 | else:
182 | m, s = divmod(field_data.value, 60)
183 | h, m = divmod(m, 60)
184 | field_data.value = datetime.time(h, m, s)
185 | field_data.units = None
186 |
187 | def process_message_hr(self, reader, data_message):
188 | """
189 | Convert populated ``event_timestamp`` component values of the ``hr`` to
190 | `datetime.datetime` objects
191 | """
192 | # We want to convert only populated *event_timestamp* fields that were
193 | # originally computed from the *event_timestamp_12* value
194 | if data_message.has_field(profile.FIELD_NUM_HR_EVENT_TIMESTAMP_12):
195 | for field_data in data_message.get_fields(
196 | profile.FIELD_NUM_HR_EVENT_TIMESTAMP):
197 | if field_data is not None:
198 | field_data.value = datetime.datetime.fromtimestamp(
199 | FIT_UTC_REFERENCE + field_data.value,
200 | datetime.timezone.utc)
201 | field_data.units = None # units were 's', set to None
202 |
203 |
204 | class StandardUnitsDataProcessor(DefaultDataProcessor):
205 | """
206 | A `DefaultDataProcessor` that also:
207 |
208 | * Converts ``distance`` and ``total_distance`` fields to ``km``
209 | (standard's default is ``m``)
210 | * Converts all ``speed`` and ``*_speeds`` fields (by name) to ``km/h``
211 | (standard's default is ``m/s``)
212 | * Converts GPS coordinates (i.e. FIT's semicircles type) to ``deg``
213 |
214 | .. seealso:: `DefaultDataProcessor`, `DataProcessorBase`
215 | """
216 |
217 | def __init__(self):
218 | super().__init__()
219 |
220 | def on_process_field(self, reader, field_data):
221 | """
222 | Convert all ``*_speed`` fields using `process_field_speed`.
223 |
224 | All other units will use the default method.
225 | """
226 | if field_data.name and field_data.name.endswith('_speed'):
227 | self.process_field_speed(reader, field_data)
228 | else:
229 | super().on_process_field(reader, field_data)
230 |
231 | def process_field_distance(self, reader, field_data):
232 | if field_data.value is not None:
233 | field_data.value /= 1000.0
234 | field_data.units = 'km'
235 |
236 | def process_field_total_distance(self, reader, field_data):
237 | self.process_field_distance(reader, field_data)
238 |
239 | def process_field_speed(self, reader, field_data):
240 | if field_data.value is not None:
241 | factor = 60.0 * 60.0 / 1000.0
242 |
243 | # record.enhanced_speed field can be a tuple...
244 | # see https://github.com/dtcooper/python-fitparse/issues/62
245 | if isinstance(field_data.value, (tuple, list)):
246 | field_data.value = tuple(x * factor for x in field_data.value)
247 | else:
248 | field_data.value *= factor
249 |
250 | field_data.units = 'km/h'
251 |
252 | def process_units_semicircles(self, reader, field_data):
253 | if field_data.value is not None:
254 | field_data.value *= 180.0 / (2 ** 31)
255 | field_data.units = 'deg'
256 |
--------------------------------------------------------------------------------
/docs/make.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | import argparse
6 | import contextlib
7 | import functools
8 | import http.server
9 | import os
10 | import os.path
11 | import pathlib
12 | import shutil
13 | import subprocess
14 | import sys
15 | import threading
16 | import types
17 | import webbrowser
18 |
19 |
20 | # Sphinx' command scripts, as defined by sphinx' own setup.py
21 | #
22 | # Unfortunately we cannot just go for the pythonic way by importing Sphinx' main
23 | # modules and execute them in our process space. This is because Sphinx actually
24 | # imports the target modules to be able to process them, mechanically implying
25 | # the usual issues due to the way the Python's import machinery is implemented.
26 | # At least in CPython.
27 | #
28 | # For instance, some code artifacts may remain in memory between two calls to
29 | # the sphinx.cmd.build command, with docstrings referencing a class that does
30 | # not actually exist anymore since last run. Due to a renaming or a move to an
31 | # other namespace for instance. This invariably leads to Sphinx having an
32 | # inconsistent behavior and complaining about unknown artifacts.
33 | SPHINX_SCRIPT_BUILD = "sphinx.cmd.build"
34 | # SPHINX_SCRIPT_APIDOC = "sphinx.ext.apidoc"
35 | # SPHINX_SCRIPT_AUTOSUMMARY = "sphinx.ext.autosummary.generate"
36 |
37 |
38 | THIS_DIR = os.path.abspath(os.path.dirname(__file__))
39 |
40 | SPHINX_SOURCE_DIR = THIS_DIR
41 | SPHINX_BUILD_DIR = os.path.join(THIS_DIR, "_build")
42 |
43 | SPHINX_HTML_BUILD_DIR = os.path.join(SPHINX_BUILD_DIR, "html")
44 | SPHINX_HTML_STATIC_DIR = os.path.join(THIS_DIR, "_static")
45 |
46 |
47 | class QuietHTTPHandler(http.server.SimpleHTTPRequestHandler):
48 | def log_message(self, format, *args):
49 | pass
50 |
51 |
52 | def die(*objs, exit_code=1, **kwargs):
53 | kwargs["file"] = sys.stderr
54 | kwargs["flush"] = True
55 | print("ERROR:", *objs, **kwargs)
56 | sys.exit(exit_code)
57 |
58 |
59 | def get_python_cmd():
60 | if sys.executable:
61 | return sys.executable
62 |
63 | return "python" + str(sys.version_info[0])
64 |
65 |
66 | def _invoke_sphinx_build(*args, **kwargs):
67 | kwargs.setdefault("cwd", THIS_DIR)
68 | cmd = [get_python_cmd(), "-B", "-m", SPHINX_SCRIPT_BUILD] + list(args)
69 | res = subprocess.run(cmd, **kwargs)
70 | return res.returncode
71 |
72 |
73 | def _pre_sphinx_html(context):
74 | """Everything that must be done before a call to ``sphinx-build -b html``"""
75 | # sphinx-build fails if these directories do not exist
76 | for path in (SPHINX_BUILD_DIR, SPHINX_HTML_STATIC_DIR):
77 | with contextlib.suppress(FileExistsError):
78 | os.mkdir(path)
79 |
80 |
81 | def _post_sphinx_html(context):
82 | """Everything that must be done after a call to ``sphinx-build -b html``"""
83 | pass
84 |
85 |
86 | def _clean_sphinx_html():
87 | with contextlib.suppress(FileNotFoundError):
88 | shutil.rmtree(SPHINX_HTML_BUILD_DIR)
89 |
90 |
91 | def _clean_sphinx_all():
92 | with contextlib.suppress(FileNotFoundError):
93 | shutil.rmtree(SPHINX_BUILD_DIR)
94 |
95 | with contextlib.suppress(FileNotFoundError, OSError): # OSError: dir not empty
96 | os.rmdir(SPHINX_HTML_STATIC_DIR)
97 |
98 |
99 | def do_clean(args):
100 | _clean_sphinx_all()
101 |
102 |
103 | def do_build_html(args):
104 | http_server = None
105 | http_thread = None
106 | http_url = None
107 | home_html_file = pathlib.Path(SPHINX_HTML_BUILD_DIR, "index.html")
108 |
109 | # cleanup first?
110 | if args.rebuild:
111 | _clean_sphinx_html()
112 |
113 | leave = False
114 | try:
115 | while not leave:
116 | if args.serve:
117 | print("")
118 | print("")
119 | print("*" * 75)
120 | print("*" * 75)
121 |
122 | # generate documentation
123 | context = types.SimpleNamespace()
124 | _pre_sphinx_html(context)
125 | # exit_code = sphinx_build_cmd.main([
126 | # "-b", "html", SPHINX_SOURCE_DIR, SPHINX_HTML_BUILD_DIR] +
127 | # args.extra_args)
128 | exit_code = _invoke_sphinx_build(
129 | "-b", "html", SPHINX_SOURCE_DIR, SPHINX_HTML_BUILD_DIR,
130 | *args.extra_args)
131 | _post_sphinx_html(context)
132 | del context
133 |
134 | print("")
135 | if home_html_file.exists():
136 | print("HTML home:", home_html_file.as_uri())
137 |
138 | # leave the interactive loop if --server not specified
139 | if not args.serve:
140 | # ... but open web browser before if requested
141 | # it feels more consistent to test the existence of the
142 | # index.html file than doing a exit_code == 0 test here
143 | if args.browse:
144 | if home_html_file.exists():
145 | webbrowser.open_new(home_html_file.as_uri())
146 |
147 | return exit_code
148 |
149 | # spacer
150 | print("")
151 |
152 | # launch the http service if not up already
153 | if not http_thread:
154 | # prepare http handler class
155 | http_handler_class = functools.partial(
156 | QuietHTTPHandler, directory=SPHINX_HTML_BUILD_DIR)
157 | # http_handler_class.protocol_version = "HTTP/1.0"
158 |
159 | # create the http server object
160 | http_server = http.server.ThreadingHTTPServer(
161 | (args.bind, args.port), http_handler_class)
162 |
163 | # create thread object
164 | http_thread = threading.Thread(target=http_server.serve_forever)
165 | http_thread.daemon = True
166 |
167 | # launch thread
168 | http_thread.start()
169 |
170 | # build http url to docs
171 | if not args.bind or args.bind == "0.0.0.0":
172 | url_host = "localhost"
173 | elif ":" in args.bind:
174 | url_host = "[" + args.bind + "]"
175 | else:
176 | url_host = args.bind
177 | http_url = "http://{}:{}/".format(url_host, args.port)
178 | del url_host
179 |
180 | print("Listening on", http_url)
181 | print("")
182 |
183 | # enter interactive mode
184 | while True:
185 | try:
186 | ans = input(
187 | "(B)uild, (R)ebuild, bro(W)se, (C)lean, (Q)uit? [B] ")
188 | ans = ans.strip().lower()
189 |
190 | if not ans or ans == "b":
191 | print("")
192 | break
193 | elif ans == "r":
194 | _clean_sphinx_html()
195 | print("")
196 | break
197 | elif ans == "w":
198 | webbrowser.open_new(http_url)
199 | continue
200 | elif ans == "c":
201 | _clean_sphinx_all() # _clean_sphinx_html()
202 | continue
203 | elif ans in ("q", "quit", "exit"):
204 | leave = True
205 | break
206 | else:
207 | print("Invalid input. Try again.")
208 | continue
209 | except KeyboardInterrupt:
210 | leave = True
211 | break
212 | except Exception as exc:
213 | print("An exception occurred:", str(exc))
214 | print("")
215 | finally:
216 | if http_server:
217 | print("Shutting down HTTP server...")
218 | http_server.shutdown() # blocking call
219 | print("HTTP server shut down.")
220 |
221 |
222 | def do_unknown(args):
223 | die("unknown action or no action specified")
224 | return 1
225 |
226 |
227 | def main(args=None):
228 | if args is None:
229 | args = sys.argv[1:]
230 |
231 | # split args if a double dash "--" is found
232 | try:
233 | idx = args.index("--")
234 | extra_args = args[idx+1:]
235 | args = args[0:idx]
236 | except ValueError:
237 | extra_args = []
238 |
239 | parser = argparse.ArgumentParser(allow_abbrev=False)
240 | parser.set_defaults(
241 | action_func=do_unknown,
242 | extra_args=extra_args)
243 | subparsers = parser.add_subparsers()
244 |
245 | # action: clean
246 | parser_clean = subparsers.add_parser(
247 | "clean", aliases=[], allow_abbrev=False,
248 | help="Cleanup documentation output directory")
249 | parser_clean.set_defaults(action_func=do_clean)
250 |
251 | # action: html
252 | parser_html = subparsers.add_parser(
253 | "html", aliases=[], allow_abbrev=False,
254 | help="Generate documentation in HTML format")
255 | parser_html.add_argument(
256 | "--browse", "-w", action="store_true",
257 | help=(
258 | "Launch system's default web browser with the homepage of the "
259 | "generated documentation open. Only if --serve is not used."))
260 | parser_html.add_argument(
261 | "--rebuild", "-r", action="store_true",
262 | help=(
263 | "Rebuild documentation instead of just updating it. This fully "
264 | "cleanups the output directory first."))
265 | parser_html.add_argument(
266 | "--serve", "-s", action="store_true",
267 | help=(
268 | "Launch an HTTP service to serve the result and enter in "
269 | "interactive mode"))
270 | parser_html.add_argument(
271 | "--bind", "-b", metavar="HOST", default="localhost",
272 | help="The local HTTP host to bind to (default: %(default)s)")
273 | parser_html.add_argument(
274 | "--port", "-p", metavar="PORT", default=8008, type=int,
275 | help="The HTTP listen port (default: %(default)s)")
276 | parser_html.set_defaults(action_func=do_build_html)
277 |
278 | args = parser.parse_args(args)
279 |
280 | return args.action_func(args)
281 |
282 |
283 | if __name__ == "__main__":
284 | if not sys.flags.optimize:
285 | sys.dont_write_bytecode = True
286 |
287 | sys.exit(main(sys.argv[1:]))
288 |
--------------------------------------------------------------------------------
/fitdecode/records.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | import itertools
5 |
6 | __all__ = [
7 | 'FitChunk', 'FitHeader', 'FitCRC', 'FitDefinitionMessage', 'FitDataMessage',
8 | 'FIT_FRAME_HEADER', 'FIT_FRAME_CRC',
9 | 'FIT_FRAME_DEFINITION', 'FIT_FRAME_DATA',
10 | 'FIT_FRAME_DEFMESG', 'FIT_FRAME_DATAMESG']
11 |
12 |
13 | _UNSET = object()
14 |
15 | FIT_FRAME_HEADER = 1
16 | FIT_FRAME_CRC = 2
17 | FIT_FRAME_DEFINITION = 3
18 | FIT_FRAME_DATA = 4
19 |
20 | # keep aliases for backward compatibility
21 | FIT_FRAME_DEFMESG = FIT_FRAME_DEFINITION
22 | FIT_FRAME_DATAMESG = FIT_FRAME_DATA
23 |
24 |
25 | class FitChunk:
26 | __slots__ = ('index', 'offset', 'bytes')
27 |
28 | def __init__(self, index, offset, bytes):
29 | self.index = index #: zero-based index of this frame in the file
30 | self.offset = offset #: the offset at which this frame starts in the file
31 | self.bytes = bytes #: the frame itself as a `bytes` object
32 |
33 |
34 | class FitHeader:
35 | frame_type = FIT_FRAME_HEADER
36 |
37 | __slots__ = (
38 | 'header_size', 'proto_ver', 'profile_ver', 'body_size',
39 | 'crc', 'crc_matched', 'chunk')
40 |
41 | def __init__(
42 | self, header_size, proto_ver, profile_ver, body_size, crc,
43 | crc_matched, chunk):
44 | self.header_size = header_size
45 | self.proto_ver = proto_ver
46 | self.profile_ver = profile_ver
47 | self.body_size = body_size
48 | self.crc = crc #: may be null
49 | self.crc_matched = crc_matched
50 | #: `FitChunk` or `None` (depends on ``keep_raw_chunks`` option)
51 | self.chunk = chunk
52 |
53 |
54 | class FitCRC:
55 | frame_type = FIT_FRAME_CRC
56 |
57 | __slots__ = ('crc', 'matched', 'chunk')
58 |
59 | def __init__(self, crc, matched, chunk):
60 | self.crc = crc
61 | self.matched = matched
62 |
63 | #: `FitChunk` or `None` (depends on ``keep_raw_chunks`` option)
64 | self.chunk = chunk
65 |
66 |
67 | class FitDefinitionMessage:
68 | frame_type = FIT_FRAME_DEFMESG
69 |
70 | __slots__ = (
71 | # record header
72 | 'is_developer_data',
73 | 'local_mesg_num',
74 | 'time_offset',
75 |
76 | # payload
77 | 'mesg_type',
78 | 'global_mesg_num',
79 | 'endian',
80 | 'field_defs',
81 | 'dev_field_defs',
82 |
83 | 'chunk')
84 |
85 | def __init__(
86 | self, is_developer_data, local_mesg_num, time_offset, mesg_type,
87 | global_mesg_num, endian, field_defs, dev_field_defs, chunk):
88 | self.is_developer_data = is_developer_data
89 | self.local_mesg_num = local_mesg_num
90 | self.time_offset = time_offset
91 | self.mesg_type = mesg_type
92 | self.global_mesg_num = global_mesg_num
93 | self.endian = endian
94 | self.field_defs = field_defs #: list of `FieldDefinition`
95 | self.dev_field_defs = dev_field_defs #: list of `DevFieldDefinition`
96 | #: `FitChunk` or `None` (depends on ``keep_raw_chunks`` option)
97 | self.chunk = chunk
98 |
99 | @property
100 | def name(self):
101 | if self.mesg_type is not None:
102 | return self.mesg_type.name
103 | else:
104 | return f'unknown_{self.global_mesg_num}'
105 |
106 | @property
107 | def all_field_defs(self):
108 | if not self.dev_field_defs:
109 | return self.field_defs
110 | return itertools.chain(self.field_defs, self.dev_field_defs)
111 |
112 |
113 | class FitDataMessage:
114 | frame_type = FIT_FRAME_DATAMESG
115 |
116 | __slots__ = (
117 | # record header
118 | 'is_developer_data',
119 | 'local_mesg_num',
120 | 'time_offset',
121 |
122 | 'def_mesg',
123 | 'fields',
124 | 'chunk')
125 |
126 | def __init__(
127 | self, is_developer_data, local_mesg_num, time_offset, def_mesg,
128 | fields, chunk):
129 | #: Is this a "developer" message?
130 | self.is_developer_data = is_developer_data
131 |
132 | #: The **local** definition number of this message
133 | self.local_mesg_num = local_mesg_num
134 |
135 | #: Time offset in case header was compressed. `None` otherwise.
136 | self.time_offset = time_offset
137 |
138 | #: `FitDefinitionMessage`
139 | self.def_mesg = def_mesg
140 |
141 | #: list of `FieldData`
142 | self.fields = fields
143 |
144 | #: `FitChunk` or `None` (depends on ``keep_raw_chunks`` option)
145 | self.chunk = chunk
146 |
147 | def __iter__(self):
148 | """Iterate over the `FieldData` object in this mesage"""
149 | return iter(self.fields)
150 |
151 | @property
152 | def name(self):
153 | """Message name"""
154 | return self.def_mesg.name
155 |
156 | @property
157 | def global_mesg_num(self):
158 | """The **global** definition number of this message"""
159 | return self.def_mesg.global_mesg_num
160 |
161 | @property
162 | def mesg_type(self):
163 | """The `MessageType` object this message is associated to"""
164 | return self.def_mesg.mesg_type
165 |
166 | def has_field(self, field_name_or_num):
167 | """
168 | Is the desired field present in this message?
169 |
170 | *field_name_or_num* is the name of the field (`str`), or its definition
171 | number (`int`).
172 |
173 | .. seealso:: `get_field`, `get_fields`, `get_value`, `get_values`
174 | """
175 | for field in self.fields:
176 | if field.is_named(field_name_or_num):
177 | return True
178 |
179 | return False
180 |
181 | def get_field(self, field_name_or_num, idx=0):
182 | """
183 | Get the desired `FieldData` object.
184 |
185 | *field_name_or_num* is the name of the field (`str`), or its definition
186 | number (`int`).
187 |
188 | *idx* is the zero-based index of the specified field **among other
189 | fields with the same name/number**. I.e. not the index of the field in
190 | the list of fields of this message. That is, ``idx=0`` is the first
191 | *field_name_or_num* field found in this message.
192 |
193 | *idx* is useful in case a message contains multiple fields with the same
194 | *field_name_or_num*.
195 |
196 | .. seealso:: `get_fields`, `get_value`, `get_values`, `has_field`
197 | """
198 | current_idx = -1
199 | for field in self.fields:
200 | if field.is_named(field_name_or_num):
201 | current_idx += 1
202 | if current_idx == idx:
203 | return field
204 |
205 | raise KeyError(
206 | f'field "{field_name_or_num}" (idx #{idx}) not found in '
207 | f'message "{self.name}"')
208 |
209 | def get_fields(self, field_name_or_num):
210 | """
211 | Like `get_field` but **yield** every `FieldData` object matching
212 | *field_name_or_num* fields in this message - i.e. generator.
213 |
214 | .. seealso:: `get_field`, `get_value`, `get_values`, `has_field`
215 | """
216 | for field in self.fields:
217 | if field.is_named(field_name_or_num):
218 | yield field
219 |
220 | def get_raw_value(
221 | self, field_name_or_num, *, idx=0, fallback=_UNSET, raw_value=True,
222 | fit_type=None, py_type=_UNSET):
223 | return self.get_value(
224 | field_name_or_num, idx=idx, fallback=fallback, raw_value=raw_value,
225 | fit_type=fit_type, py_type=py_type)
226 |
227 | def get_value(
228 | self, field_name_or_num, *, idx=0, fallback=_UNSET, raw_value=False,
229 | fit_type=None, py_type=_UNSET):
230 | """
231 | Get the value (or raw_value) of a field specified by its name or its
232 | definition number (*field_name_or_num*), with optional type checking.
233 |
234 | *idx* has the same meaning than for `get_field`.
235 |
236 | *fallback* can be specified to avoid `KeyError` being raised in case no
237 | field matched *field_name_or_num*.
238 |
239 | *fit_type* can be a `str` to indicate a given FIT type is expected (as
240 | defined in FIT profile; e.g. ``date_time``, ``manufacturer``, ...), in
241 | which case `TypeError` may be raised in case of a type mismatch.
242 |
243 | *py_type* can be a Python type or a `tuple` of types to expect (as
244 | passed to `isinstance`), in which case `TypeError` may be raised in case
245 | of a type mismatch.
246 |
247 | *raw_value* can be set to a true value so that the returned value is
248 | field's ``raw_value`` property instead of ``value``. This does not
249 | impact the way *fit_type* and *py_type* are interpreted.
250 |
251 | Special case: *field_name_or_num* can be `None`, in which case the field
252 | will be selected using *idx* only. In this case, *idx* is interpreted to
253 | be the zero-based index in the list of fields.
254 |
255 | .. seealso:: `get_values`, `get_field`, `get_fields`, `has_field`
256 | """
257 | assert (
258 | fit_type is _UNSET or
259 | fit_type is None or
260 | isinstance(fit_type, str))
261 |
262 | field_data = None
263 |
264 | if field_name_or_num is None:
265 | try:
266 | field_data = self.fields[idx]
267 | field_name_or_num = field_data.name_or_num
268 |
269 | # change the representation of idx so that its meaning can be
270 | # differentiated in case an exception is raised and it has to be
271 | # printed later on
272 | idx = f'[{idx}]'
273 | except KeyError:
274 | # KeyError exception is handled below so that the *fallback*
275 | # argument can be honored
276 | pass
277 | else:
278 | current_idx = -1
279 | for field in self.fields:
280 | if field.is_named(field_name_or_num):
281 | current_idx += 1
282 | if current_idx == idx:
283 | field_data = field
284 | break
285 |
286 | if field_data is None:
287 | if fallback is _UNSET:
288 | raise KeyError(
289 | f'field "{field_name_or_num}" (idx #{idx}) not found in '
290 | f'message "{self.name}"')
291 | return fallback
292 |
293 | # check FIT type if needed
294 | if fit_type and field_data.type.name != fit_type:
295 | raise TypeError(
296 | 'unexpected type for FIT field '
297 | f'"{self.name}.{field_name_or_num}" (idx #{idx}; '
298 | f'got {field_data.type.name} instead of {fit_type})')
299 |
300 | # pick the right property
301 | value = field_data.value if not raw_value else field_data.raw_value
302 |
303 | # check value's type if needed
304 | if py_type is not _UNSET and not isinstance(value, py_type):
305 | if isinstance(py_type, (tuple, list)):
306 | py_type_str = ' or '.join([str(type(t)) for t in py_type])
307 | else:
308 | py_type_str = str(type(py_type))
309 |
310 | raise TypeError(
311 | 'unexpected type for FIT value '
312 | f'"{self.name}.{field_name_or_num}" (idx #{idx}; '
313 | f'got {type(value)} instead of {py_type_str})')
314 |
315 | return value
316 |
317 | def get_values(
318 | self, field_name_or_num, *, raw_value=False, fit_type=None,
319 | py_type=_UNSET):
320 | """
321 | Like `get_value` but **yield** every value of all the fields that match
322 | *field_name_or_num* - i.e. generator.
323 |
324 | It is not possible to specify a *fallback* value so `KeyError` will
325 | always be raised in case the specified field was not found.
326 |
327 | The other arguments have the same meaning than for `get_value`.
328 |
329 | .. seealso:: `get_value`, `get_field`, `get_fields`, `has_field`
330 | """
331 | for idx, field_data in enumerate(self.fields):
332 | if field_data.is_named(field_name_or_num):
333 | value = self.get_value(
334 | None, idx=idx, raw_value=raw_value,
335 | fit_type=fit_type, py_type=py_type)
336 | yield value
337 |
--------------------------------------------------------------------------------
/fitdecode/types.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jean-Charles Lefebvre
2 | # SPDX-License-Identifier: MIT
3 |
4 | import math
5 | import struct
6 |
7 | __all__ = []
8 |
9 |
10 | class BaseType:
11 | __slots__ = ('name', 'identifier', 'fmt', 'size', 'parse')
12 |
13 | enum = None # in case we're treated as a FieldType
14 |
15 | def __init__(self, name, identifier, fmt, parse):
16 | self.name = name
17 | self.identifier = identifier
18 | self.fmt = fmt
19 | self.size = struct.calcsize(fmt)
20 | self.parse = parse
21 |
22 | @property
23 | def type_num(self):
24 | """"Base Type Number" as per SDK definition"""
25 | return self.identifier & 0x1F
26 |
27 |
28 | class FieldType:
29 | __slots__ = ('name', 'base_type', 'enum')
30 |
31 | def __init__(self, name, base_type, enum=None):
32 | self.name = name
33 | self.base_type = base_type
34 | self.enum = enum
35 |
36 |
37 | class _FieldAndSubFieldBase:
38 | __slots__ = ()
39 |
40 | @property
41 | def base_type(self):
42 | return self.type if self.is_base_type else self.type.base_type
43 |
44 | @property
45 | def is_base_type(self):
46 | return isinstance(self.type, BaseType)
47 |
48 | def render(self, raw_value):
49 | if self.type.enum and (raw_value in self.type.enum):
50 | return self.type.enum[raw_value]
51 | return raw_value
52 |
53 |
54 | class Field(_FieldAndSubFieldBase):
55 | __slots__ = (
56 | 'name', 'type', 'def_num', 'scale', 'offset', 'units', 'components',
57 | 'subfields')
58 |
59 | field_type = 'field'
60 |
61 | def __init__(self, name, type, def_num, scale=None, offset=None, units=None,
62 | components=None, subfields=None):
63 | super().__init__()
64 | self.name = name
65 | self.type = type #: `FieldType`
66 | self.def_num = def_num
67 | self.scale = scale
68 | self.offset = offset
69 | self.units = units
70 | self.components = components
71 | self.subfields = subfields
72 |
73 |
74 | class SubField(_FieldAndSubFieldBase):
75 | __slots__ = ('name', 'def_num', 'type', 'scale', 'offset', 'units',
76 | 'components', 'ref_fields')
77 |
78 | field_type = 'subfield'
79 |
80 | def __init__(self, name, def_num, type, scale=None, offset=None, units=None,
81 | components=None, ref_fields=None):
82 | super().__init__()
83 | self.name = name
84 | self.def_num = def_num
85 | self.type = type
86 | self.scale = scale
87 | self.offset = offset
88 | self.units = units
89 | self.components = components
90 | self.ref_fields = ref_fields
91 |
92 |
93 | class DevField(_FieldAndSubFieldBase):
94 | __slots__ = (
95 | 'dev_data_index', 'name', 'def_num', 'type', 'units',
96 | 'native_field_num',
97 |
98 | # "Inherited" from FitField only to maintain interface compatibility.
99 | # They are always None.
100 | 'scale', 'offset', 'components', 'subfields')
101 |
102 | field_type = 'devfield'
103 |
104 | def __init__(self, dev_data_index, name, def_num, type, units,
105 | native_field_num):
106 | super().__init__()
107 |
108 | self.dev_data_index = dev_data_index
109 | self.name = name
110 | self.def_num = def_num
111 | self.type = type
112 | self.units = units
113 | self.native_field_num = native_field_num
114 |
115 | self.scale = None
116 | self.offset = None
117 | self.components = None
118 | self.subfields = None
119 |
120 |
121 | class ReferenceField:
122 | __slots__ = ('name', 'def_num', 'value', 'raw_value')
123 |
124 | def __init__(self, name, def_num, value, raw_value):
125 | self.name = name
126 | self.def_num = def_num
127 | self.value = value
128 | self.raw_value = raw_value
129 |
130 |
131 | class ComponentField:
132 | __slots__ = (
133 | 'name', 'def_num', 'scale', 'offset', 'units', 'accumulate', 'bits',
134 | 'bit_offset')
135 |
136 | field_type = 'component'
137 |
138 | def __init__(self, name, def_num, scale=None, offset=None, units=None,
139 | accumulate=None, bits=None, bit_offset=None):
140 | self.name = name
141 | self.def_num = def_num
142 | self.scale = scale
143 | self.offset = offset
144 | self.units = units
145 | self.accumulate = accumulate
146 | self.bits = bits
147 | self.bit_offset = bit_offset
148 |
149 | def render(self, raw_value):
150 | if raw_value is None:
151 | return None
152 |
153 | # if it's a tuple, then it's a byte array and unpack it as such
154 | # (only type that uses this is compressed speed/distance)
155 | if isinstance(raw_value, tuple):
156 | # Profile.xlsx sometimes contains more components than the read raw
157 | # value is able to hold (typically the *event_timestamp_12* field in
158 | # *hr* messages).
159 | # This test allows to ensure *unpacked_num* is not right-shifted
160 | # more than possible.
161 | if self.bit_offset and self.bit_offset >= len(raw_value) << 3:
162 | raise ValueError()
163 |
164 | unpacked_num = 0
165 |
166 | # unpack byte array as little endian
167 | for value in reversed(raw_value):
168 | unpacked_num = (unpacked_num << 8) + value
169 |
170 | raw_value = unpacked_num
171 |
172 | # mask and shift like a normal number
173 | if isinstance(raw_value, int):
174 | raw_value = (raw_value >> self.bit_offset) & ((1 << self.bits) - 1)
175 |
176 | return raw_value
177 |
178 |
179 | class MessageType:
180 | __slots__ = ('name', 'mesg_num', 'fields')
181 |
182 | def __init__(self, name, mesg_num, fields):
183 | self.name = name
184 | self.mesg_num = mesg_num
185 | self.fields = fields
186 |
187 |
188 | class FieldDefinition:
189 | __slots__ = ('field', 'def_num', 'base_type', 'size')
190 |
191 | def __init__(self, field, def_num, base_type, size):
192 | self.field = field #: `Field`
193 | self.def_num = def_num
194 | self.base_type = base_type
195 | self.size = size
196 |
197 | @property
198 | def is_dev(self):
199 | return False
200 |
201 | @property
202 | def name(self):
203 | return self.field.name if self.field else 'unknown_' + str(self.def_num)
204 |
205 | @property
206 | def type(self):
207 | return self.field.type if self.field else self.base_type
208 |
209 |
210 | class DevFieldDefinition:
211 | __slots__ = ('field', 'dev_data_index', 'base_type', 'def_num', 'size')
212 |
213 | def __init__(self, field, dev_data_index, def_num, size):
214 | self.field = field
215 | self.dev_data_index = dev_data_index
216 | self.def_num = def_num
217 | self.size = size
218 |
219 | # for dev fields, the base_type and type are always the same
220 | self.base_type = self.type
221 |
222 | @property
223 | def is_dev(self):
224 | return True
225 |
226 | @property
227 | def name(self):
228 | if self.field:
229 | return self.field.name
230 | else:
231 | return f'unknown_dev_{self.dev_data_index}_{self.def_num}'
232 |
233 | @property
234 | def type(self):
235 | return self.field.type
236 |
237 |
238 | class FieldData:
239 | __slots__ = (
240 | 'field_def', 'field', 'parent_field', 'value', 'raw_value', 'units')
241 |
242 | def __init__(self, field_def, field, parent_field, value, raw_value,
243 | units=None):
244 | self.field_def = field_def #: `FieldDefinition` object
245 | self.field = field
246 | self.parent_field = parent_field
247 | self.value = value
248 | self.raw_value = raw_value
249 | self.units = units
250 |
251 | if not self.units and self.field:
252 | # Default to units on field, otherwise None.
253 | # NOTE: Not a property since you may want to override this in a data
254 | # processor
255 | self.units = self.field.units
256 |
257 | @property
258 | def name(self):
259 | """
260 | Field's name as defined in FIT global profile.
261 |
262 | If name was not found in global profile, a string is created with the
263 | form: ``unknown_{def_num}`` where ``def_num`` is the field's definition
264 | number.
265 |
266 | This value is **NOT** compatible with `is_named`.
267 |
268 | .. seealso:: `name_or_num`
269 | """
270 | return self.field.name if self.field else 'unknown_%d' % self.def_num
271 |
272 | @property
273 | def name_or_num(self):
274 | """
275 | Field's name as defined in FIT global profile.
276 |
277 | If name was not found in global profile, ``self.def_num`` is returned
278 | (`int`).
279 |
280 | This value is compatible with `is_named`.
281 |
282 | .. seealso:: `name`
283 | """
284 | return self.field.name if self.field else self.def_num
285 |
286 | @property
287 | def def_num(self):
288 | """Field's definition number (`int`)"""
289 | # prefer to return the def_num on the field since field_def may be None
290 | # if this field is dynamic
291 | return self.field.def_num if self.field else self.field_def.def_num
292 |
293 | @property
294 | def base_type(self):
295 | """Field's `BaseType`"""
296 | # try field_def's base type, if it doesn't exist, this is a dynamically
297 | # added field, so field doesn't be None
298 | if self.field_def:
299 | return self.field_def.base_type
300 | else:
301 | return self.field.base_type
302 |
303 | @property
304 | def is_base_type(self):
305 | """Field's `BaseType`"""
306 | return self.field.is_base_type if self.field else True
307 |
308 | @property
309 | def type(self):
310 | return self.field.type if self.field else self.base_type
311 |
312 | @property
313 | def field_type(self):
314 | return self.field.field_type if self.field else 'field'
315 |
316 | @property
317 | def is_expanded(self):
318 | """
319 | Flag to indicate whether this field has been generated through expansion
320 | """
321 | return not self.field_def
322 |
323 | def is_named(self, name_or_num):
324 | """
325 | Check if this field has the specified name (`str`) or definition number
326 | (`int`)
327 | """
328 | if self.field:
329 | if name_or_num in (self.field.def_num, self.field.name):
330 | return True
331 |
332 | if self.parent_field:
333 | if name_or_num in (self.parent_field.def_num, self.parent_field.name):
334 | return True
335 |
336 | if self.field_def:
337 | if name_or_num == self.field_def.def_num:
338 | return True
339 |
340 | return False
341 |
342 |
343 | def parse_string(byteslike):
344 | try:
345 | s = byteslike[:byteslike.index(0x00)]
346 | except ValueError:
347 | # FIT specification defines the 'string' type as follows: "Null
348 | # terminated string encoded in UTF-8 format".
349 | #
350 | # However 'string' values are not always null-terminated when encoded,
351 | # according to FIT files created by Garmin devices (e.g. DEVICE.FIT file
352 | # from a fenix3).
353 | #
354 | # So in order to be more flexible, in case index() could not find any
355 | # null byte, we just decode the whole bytes-like object.
356 | s = byteslike
357 |
358 | return s.decode(encoding='utf-8', errors='replace') or None
359 |
360 |
361 | BASE_TYPE_BYTE = BaseType(
362 | name='byte', identifier=0x0d, fmt='B',
363 | parse=lambda x: None if all(b == 0xff for b in x) else x)
364 |
365 |
366 | BASE_TYPES = {
367 | 0x00: BaseType(name='enum', identifier=0x00, fmt='B', parse=lambda x: None if x == 0xff else x), # noqa: E501
368 | 0x01: BaseType(name='sint8', identifier=0x01, fmt='b', parse=lambda x: None if x == 0x7f else x), # noqa: E501
369 | 0x02: BaseType(name='uint8', identifier=0x02, fmt='B', parse=lambda x: None if x == 0xff else x), # noqa: E501
370 | 0x83: BaseType(name='sint16', identifier=0x83, fmt='h', parse=lambda x: None if x == 0x7fff else x), # noqa: E501
371 | 0x84: BaseType(name='uint16', identifier=0x84, fmt='H', parse=lambda x: None if x == 0xffff else x), # noqa: E501
372 | 0x85: BaseType(name='sint32', identifier=0x85, fmt='i', parse=lambda x: None if x == 0x7fffffff else x), # noqa: E501
373 | 0x86: BaseType(name='uint32', identifier=0x86, fmt='I', parse=lambda x: None if x == 0xffffffff else x), # noqa: E501
374 | 0x07: BaseType(name='string', identifier=0x07, fmt='s', parse=parse_string),
375 | 0x88: BaseType(name='float32', identifier=0x88, fmt='f', parse=lambda x: None if math.isnan(x) else x), # noqa: E501
376 | 0x89: BaseType(name='float64', identifier=0x89, fmt='d', parse=lambda x: None if math.isnan(x) else x), # noqa: E501
377 | 0x0a: BaseType(name='uint8z', identifier=0x0a, fmt='B', parse=lambda x: None if x == 0 else x), # noqa: E501
378 | 0x8b: BaseType(name='uint16z', identifier=0x8b, fmt='H', parse=lambda x: None if x == 0 else x), # noqa: E501
379 | 0x8c: BaseType(name='uint32z', identifier=0x8c, fmt='I', parse=lambda x: None if x == 0 else x), # noqa: E501
380 | 0x0d: BASE_TYPE_BYTE,
381 | 0x8e: BaseType(name='sint64', identifier=0x8e, fmt='q', parse=lambda x: None if x == 0x7fffffffffffffff else x), # noqa: E501
382 | 0x8f: BaseType(name='uint64', identifier=0x8f, fmt='Q', parse=lambda x: None if x == 0xffffffffffffffff else x), # noqa: E501
383 | 0x90: BaseType(name='uint64z', identifier=0x90, fmt='Q', parse=lambda x: None if x == 0 else x)} # noqa: E501
384 |
--------------------------------------------------------------------------------
/fitdecode/cmd/fittxt.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | import argparse
6 | import datetime
7 | import decimal
8 | import os.path
9 | import re
10 | import sys
11 | import traceback
12 | from collections import OrderedDict
13 |
14 | import fitdecode
15 |
16 | echo = None
17 |
18 |
19 | def txt_encode(obj):
20 | """
21 | Convert a given *obj* to either a *str* or *PrintableObject* depending on
22 | which is the most suitable
23 | """
24 |
25 | if isinstance(obj, PrintableObject):
26 | return obj
27 |
28 | if isinstance(obj, str):
29 | return '[' + obj + ']'
30 |
31 | if obj is None:
32 | return ''
33 |
34 | if isinstance(obj, bool):
35 | return 'yes' if obj else 'no'
36 |
37 | if isinstance(obj, (int, float, decimal.Decimal)):
38 | return str(obj)
39 |
40 | if isinstance(obj, datetime.time):
41 | return obj.isoformat()
42 |
43 | if isinstance(obj, datetime.datetime):
44 | return obj.isoformat()
45 |
46 | if isinstance(obj, fitdecode.FitChunk):
47 | return PrintableObject(
48 | index=obj.index,
49 | offset=obj.offset,
50 | size=len(obj.bytes))
51 |
52 | if isinstance(obj, fitdecode.types.FieldDefinition):
53 | return PrintableObject(
54 | name=obj.name,
55 | def_num=obj.def_num,
56 | type_name=obj.type.name,
57 | base_type_name=obj.base_type.name,
58 | size=obj.size)
59 |
60 | if isinstance(obj, fitdecode.types.DevFieldDefinition):
61 | return PrintableObject(
62 | name=obj.name,
63 | dev_data_index=obj.dev_data_index,
64 | def_num=obj.def_num,
65 | type_name=obj.type.name,
66 | size=obj.size)
67 |
68 | if isinstance(obj, fitdecode.types.FieldData):
69 | return PrintableObject(
70 | name=obj.name,
71 | value=obj.value,
72 | units=obj.units if obj.units else '',
73 | def_num=obj.def_num,
74 | raw_value=obj.raw_value)
75 |
76 | if isinstance(obj, fitdecode.FitHeader):
77 | crc = obj.crc if obj.crc else 0
78 | return PrintableObject(
79 | _label=f'chunk#{obj.chunk.index} - fit_header',
80 | header_size=obj.header_size,
81 | proto_ver='(' + ', '.join([str(v) for v in obj.proto_ver]) + ')',
82 | profile_ver='(' + ', '.join([str(v) for v in obj.profile_ver]) + ')',
83 | body_size=obj.body_size,
84 | crc=f'{crc:#06x}',
85 | crc_matched=obj.crc_matched,
86 | chunk=obj.chunk)
87 |
88 | if isinstance(obj, fitdecode.FitCRC):
89 | return PrintableObject(
90 | _label=f'chunk#{obj.chunk.index} - fit_crc',
91 | crc=f'{obj.crc:#06x}',
92 | matched=obj.matched,
93 | chunk=obj.chunk)
94 |
95 | if isinstance(obj, fitdecode.FitDefinitionMessage):
96 | return PrintableObject(
97 | _label=(
98 | f'chunk#{obj.chunk.index} - fit_definition - {obj.name} '
99 | f'(loc#{obj.local_mesg_num} glob#{obj.global_mesg_num})'),
100 | chunk=obj.chunk,
101 | header=PrintableObject(
102 | local_mesg_num=obj.local_mesg_num,
103 | time_offset=obj.time_offset,
104 | is_developer_data=obj.is_developer_data),
105 | global_mesg_num=obj.global_mesg_num,
106 | endian=obj.endian,
107 | field_defs=obj.field_defs,
108 | dev_field_defs=obj.dev_field_defs)
109 |
110 | if isinstance(obj, fitdecode.FitDataMessage):
111 | return PrintableObject(
112 | _label=(
113 | f'chunk#{obj.chunk.index} - fit_data - {obj.name} '
114 | f'(loc#{obj.local_mesg_num} glob#{obj.global_mesg_num})'),
115 | chunk=obj.chunk,
116 | header=PrintableObject(
117 | local_mesg_num=obj.local_mesg_num,
118 | time_offset=obj.time_offset,
119 | is_developer_data=obj.is_developer_data),
120 | global_mesg_num=obj.global_mesg_num,
121 | fields=obj.fields)
122 |
123 | if __debug__:
124 | print(type(obj))
125 | assert 0
126 |
127 | return repr(obj)
128 |
129 |
130 | class PrintableObject:
131 | __slots__ = (
132 | '_label', '_dict', '_max_key_length',
133 | '_pad', '_key_prefix', '_key_suffix')
134 |
135 | def __init__(self, *, _label=None, _pad=' ', _key_prefix='',
136 | _key_suffix=' ', **props):
137 | self._label = _label
138 | self._dict = OrderedDict()
139 | self._max_key_length = 0
140 | self._pad = _pad
141 | self._key_prefix = _key_prefix
142 | self._key_suffix = _key_suffix
143 |
144 | for key, value in props.items():
145 | # to avoid potential collision with PrintableObject members
146 | # (see __setattr__)
147 | assert key[0] != '_'
148 |
149 | self._dict[key] = value
150 | if len(key) > self._max_key_length:
151 | self._max_key_length = len(key)
152 |
153 | def __iter__(self):
154 | for key, value in self._dict.items():
155 | name = self._key_prefix
156 | name += key.ljust(self._max_key_length, self._pad)
157 | name += self._key_suffix
158 |
159 | yield name, value
160 |
161 | def __getattr__(self, name):
162 | try:
163 | return self._dict[name]
164 | except KeyError:
165 | raise AttributeError
166 |
167 | def __setattr__(self, name, value):
168 | if name[0] == '_':
169 | super().__setattr__(name, value)
170 | elif name not in self._dict:
171 | raise AttributeError
172 | else:
173 | self._dict[name] = value
174 |
175 |
176 | def global_stats(frames, options):
177 | if options.filter:
178 | filter_str = []
179 | for msg_num, include in options.filter.items():
180 | include = '-' if not include else '+'
181 | msg_name = fitdecode.utils.get_mesg_type(msg_num).name
182 | filter_str.append(f'{include}{msg_name}#{msg_num}')
183 | filter_str = '[' + ', '.join(filter_str) + ']'
184 | else:
185 | filter_str = '[]'
186 |
187 | stats = PrintableObject(
188 | _label='TXT',
189 | name=os.path.basename(options.infile.name),
190 | filter=filter_str,
191 | frames=len(frames),
192 | size=0,
193 | missing_headers=0,
194 | fit_files=[])
195 |
196 | stats_got_header = False
197 | for frame in frames:
198 | stats.size += len(frame.chunk.bytes)
199 |
200 | if isinstance(frame, fitdecode.FitHeader):
201 | stats_got_header = True
202 | stats.fit_files.append(PrintableObject(
203 | definition_messages=0,
204 | data_messages=0,
205 | has_footer=False,
206 | header_crc_matched=frame.crc_matched,
207 | footer_crc_matched=False))
208 | continue
209 |
210 | if not stats_got_header:
211 | stats.missing_headers += 1
212 | continue
213 |
214 | curr_file = stats.fit_files[-1]
215 |
216 | if isinstance(frame, fitdecode.FitCRC):
217 | stats_got_header = False
218 | curr_file.has_footer = True
219 | curr_file.footer_crc_matched = frame.matched
220 |
221 | elif isinstance(frame, fitdecode.FitDefinitionMessage):
222 | curr_file.definition_messages += 1
223 |
224 | elif isinstance(frame, fitdecode.FitDataMessage):
225 | curr_file.data_messages += 1
226 |
227 | if options.strip:
228 | stats.fit_files = len(stats.fit_files)
229 |
230 | return stats
231 |
232 |
233 | def txt_print(obj, *, indent='\t', level=0):
234 |
235 | def _recurse(subobj, sublevel=level):
236 | txt_print(subobj, indent=indent, level=sublevel)
237 |
238 | def _p(*values, end='\n'):
239 | echo(indent * level, *values, sep='', end=end)
240 |
241 | if isinstance(obj, str):
242 | _p(obj)
243 |
244 | elif isinstance(obj, (tuple, list)):
245 | # first pass to check if we can keep this list on a single line
246 | one_line = True
247 | for value in obj:
248 | if value is not None and not isinstance(
249 | value, (bool, int, float, decimal.Decimal)):
250 | one_line = False
251 | break
252 |
253 | if one_line:
254 | _p('[' + ', '.join([txt_encode(v) for v in obj]) + ']')
255 | else:
256 | first = True
257 | for value in obj:
258 | if first:
259 | first = False
260 | else:
261 | _p('-')
262 | _recurse(value)
263 |
264 | elif isinstance(obj, PrintableObject):
265 | if obj._label:
266 | _p(obj._label)
267 | obj._label = None
268 | _recurse(obj, sublevel=level + 1)
269 | else:
270 | for key, value in obj:
271 | if isinstance(value, str):
272 | _p(key, value)
273 | elif isinstance(value, (tuple, list)):
274 | _p(f'{key.rstrip()} ({len(value)})')
275 | _recurse(value, sublevel=level + 1)
276 | else:
277 | value = txt_encode(value)
278 | if isinstance(value, str):
279 | _p(key, value)
280 | else:
281 | _p(key.rstrip())
282 | _recurse(value, sublevel=level + 1)
283 |
284 | else:
285 | _recurse(txt_encode(obj))
286 |
287 |
288 | def parse_filter_args(arg_parser, filter_opt):
289 | FILTER_DESC = re.compile(r'^\s*([\+\-]?)\s*([^\s]+)\s*$', re.A)
290 |
291 | if not filter_opt:
292 | return filter_opt, None
293 |
294 | filtr = {} # {msg_num: bool_include}
295 | default_include_policy = False
296 |
297 | for desc in filter_opt:
298 | msg = None
299 | rem = FILTER_DESC.fullmatch(desc)
300 | if rem:
301 | include = False if rem[1] and rem[1] == '-' else True
302 | msg = rem[2].lower()
303 |
304 | if not include:
305 | default_include_policy = True
306 |
307 | try:
308 | msg = fitdecode.utils.get_mesg_num(msg)
309 | except ValueError:
310 | try:
311 | msg = int(msg, base=0)
312 | except ValueError:
313 | msg = None
314 |
315 | if msg is None:
316 | arg_parser.error(f'malformed filter: "{desc}"')
317 | sys.exit(1)
318 |
319 | filtr[msg] = include
320 |
321 | return filtr, default_include_policy
322 |
323 |
324 | def parse_args(args=None):
325 | parser = argparse.ArgumentParser(
326 | description='Dump a FIT file to TXT format that ease debugging',
327 | epilog=f'fitdecode version {fitdecode.__version__}',
328 | allow_abbrev=False)
329 |
330 | parser.add_argument(
331 | '--output', '-o', type=argparse.FileType(mode='wt', encoding='utf-8'),
332 | default='-',
333 | help='File to output data into (defaults to stdout)')
334 |
335 | parser.add_argument(
336 | '--nocrc', action='store_const',
337 | const=fitdecode.CrcCheck.DISABLED,
338 | default=fitdecode.CrcCheck.WARN,
339 | help='Some devices seem to write invalid CRC\'s, ignore these')
340 |
341 | parser.add_argument(
342 | '--nodef', action='store_true',
343 | help='Do not output FIT local message definitions')
344 |
345 | parser.add_argument(
346 | '--nounk', action='store_true',
347 | help='Do not output unknown FIT messages (e.g. "unknown_140")')
348 |
349 | parser.add_argument(
350 | '--strip', action='store_true',
351 | help='Do not output the extended global stats in header')
352 |
353 | parser.add_argument(
354 | '--filter', '-f', action='append',
355 | help=(
356 | 'Message name(s) (or global numbers) to filter-in or out, '
357 | 'depending on sign prefix. Examples: "-record" to exclude record '
358 | 'messages; "+file_id" or "file_id" to include file_id messages.'))
359 |
360 | parser.add_argument(
361 | 'infile', metavar='FITFILE', type=argparse.FileType(mode='rb'),
362 | help='Input .FIT file (use - for stdin)')
363 |
364 | options = parser.parse_args(args)
365 | options.filter, options.default_filter = \
366 | parse_filter_args(parser, options.filter)
367 |
368 | return options
369 |
370 |
371 | def main(args=None):
372 | options = parse_args(args)
373 |
374 | def _echo(*objects, sep=' ', end='\n', file=options.output, flush=False):
375 | print(*objects, sep=sep, end=end, file=file, flush=flush)
376 |
377 | def _echo_separator():
378 | _echo('')
379 | _echo('*' * 80)
380 | _echo('')
381 | _echo('')
382 |
383 | global echo
384 | echo = _echo
385 |
386 | # fully parse input file and filter out the unwanted messages
387 | frames = []
388 | exception_msg = None
389 | try:
390 | with fitdecode.FitReader(
391 | options.infile,
392 | processor=fitdecode.StandardUnitsDataProcessor(),
393 | check_crc=options.nocrc,
394 | keep_raw_chunks=True) as fit:
395 | for frame in fit:
396 | if (options.nodef and
397 | frame.frame_type == fitdecode.FIT_FRAME_DEFINITION):
398 | continue
399 |
400 | if (options.nounk and
401 | frame.frame_type in (
402 | fitdecode.FIT_FRAME_DEFINITION,
403 | fitdecode.FIT_FRAME_DATA) and
404 | frame.mesg_type is None):
405 | continue
406 |
407 | if (options.filter and
408 | frame.frame_type in (
409 | fitdecode.FIT_FRAME_DEFINITION,
410 | fitdecode.FIT_FRAME_DATA)):
411 | try:
412 | include = options.filter[frame.global_mesg_num]
413 | except KeyError:
414 | include = options.default_filter
415 |
416 | if not include:
417 | continue
418 |
419 | frames.append(frame)
420 | except Exception:
421 | print(
422 | 'WARNING: error(s) occurred while parsing FIT file. '
423 | 'See output file for more info.',
424 | file=sys.stderr)
425 | exception_msg = traceback.format_exc()
426 |
427 | # print some statistics as a header
428 | if not exception_msg:
429 | txt_print(global_stats(frames, options))
430 | echo('')
431 | else:
432 | echo('ERROR OCCURRED WHILE PARSING', options.infile.name)
433 | echo('')
434 | echo(exception_msg)
435 | echo('')
436 |
437 | # pretty-print the file
438 | had_frames = False
439 | for frame in frames:
440 | if had_frames and isinstance(frame, fitdecode.FitHeader):
441 | _echo_separator()
442 | had_frames = True
443 | txt_print(frame)
444 | echo('')
445 |
446 | return 0
447 |
448 |
449 | if __name__ == '__main__':
450 | sys.exit(main())
451 |
--------------------------------------------------------------------------------
/tests/test_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Jean-Charles Lefebvre
3 | # SPDX-License-Identifier: MIT
4 |
5 | import csv
6 | import datetime
7 | import glob
8 | import hashlib
9 | import os.path
10 | import struct
11 | import unittest
12 |
13 | import fitdecode
14 |
15 | TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), 'files')
16 | HASH_METHOD = 'sha1'
17 |
18 |
19 | def _test_file(name):
20 | return os.path.join(TEST_FILES_DIR, name)
21 |
22 |
23 | def _invalid_test_file(name):
24 | return os.path.join(TEST_FILES_DIR, 'invalid', name)
25 |
26 |
27 | def _secs_to_dt(secs):
28 | return datetime.datetime.fromtimestamp(
29 | secs + fitdecode.FIT_UTC_REFERENCE,
30 | datetime.timezone.utc)
31 |
32 |
33 | def _generate_messages(mesg_num, local_mesg_num, field_defs,
34 | endian='<', data=None):
35 | mesgs = []
36 | base_type_list = []
37 |
38 | # definition message, local message num
39 | s = struct.pack(''))
43 |
44 | # global message num, num fields
45 | s += struct.pack('%sHB' % endian, mesg_num, len(field_defs))
46 |
47 | for def_num, base_type in field_defs:
48 | base_type = [
49 | bt for bt in fitdecode.types.BASE_TYPES.values()
50 | if bt.name == base_type][0]
51 | base_type_list.append(base_type)
52 | s += struct.pack('<3B', def_num, base_type.size, base_type.identifier)
53 |
54 | mesgs.append(s)
55 |
56 | if data:
57 | for mesg_data in data:
58 | s = struct.pack('B', local_mesg_num)
59 | for value, base_type in zip(mesg_data, base_type_list):
60 | s += struct.pack("%s%s" % (endian, base_type.fmt), value)
61 | mesgs.append(s)
62 |
63 | return b''.join(mesgs)
64 |
65 |
66 | def _generate_fitfile(data=None, endian='<'):
67 | fit_data = (_generate_messages(
68 | # local mesg 0, global mesg 0 (file_id)
69 | mesg_num=0, local_mesg_num=0, endian=endian, field_defs=[
70 | # serial number, time_created, manufacturer
71 | (3, 'uint32z'), (4, 'uint32'), (1, 'uint16'),
72 | # product/garmin_product, number, type
73 | (2, 'uint16'), (5, 'uint16'), (0, 'enum')],
74 | # random serial number, random time, garmin, edge500, null, activity
75 | data=[[558069241, 723842606, 1, 1036, (2 ** 16) - 1, 4]]))
76 |
77 | if data:
78 | fit_data += data
79 |
80 | # Prototcol version 1.0, profile version 1.52
81 | header = struct.pack('<2BHI4s', 14, 16, 152, len(fit_data), b'.FIT')
82 |
83 | file_data = \
84 | header + \
85 | struct.pack('')
232 |
233 | def test_fitparse_component_field_accumulaters(self):
234 | csv_fp = open(
235 | _test_file('compressed-speed-distance-records.csv'),
236 | mode='rt')
237 | csv_file = csv.reader(csv_fp)
238 | next(csv_file) # consume header
239 |
240 | # parse the whole content
241 | fit = tuple(fitdecode.FitReader(
242 | _test_file('compressed-speed-distance.fit'),
243 | check_crc=fitdecode.CrcCheck.RAISE,
244 | keep_raw_chunks=False))
245 |
246 | # make a generator of 'record' messages
247 | records = (
248 | r for r in fit
249 | if isinstance(r, fitdecode.FitDataMessage)
250 | and r.name == 'record')
251 |
252 | # skip empty record for now (sets timestamp via header)
253 | empty_record = next(records)
254 |
255 | # file's timestamp record is < 0x10000000, so field returns seconds
256 | self.assertEqual(empty_record.get_field('timestamp').value, 17217864)
257 |
258 | # TODO: update using local_timestamp as offset, since we have this value
259 | # as 2012 date
260 |
261 | for count, (record, (timestamp, heartrate, speed, distance, cadence)) in enumerate(zip(records, csv_file)):
262 | # no fancy datetime stuff, since timestamp record is < 0x10000000
263 | fit_ts = record.get_field('timestamp').value
264 |
265 | self.assertIsInstance(fit_ts, int)
266 | self.assertLess(fit_ts, 0x10000000)
267 | self.assertEqual(fit_ts, int(timestamp))
268 |
269 | self.assertEqual(record.get_field('heart_rate').value, int(heartrate))
270 | self.assertEqual(record.get_field('cadence').value, int(cadence) if cadence != 'null' else None)
271 | self.assertAlmostEqual(record.get_field('speed').value, float(speed))
272 | self.assertAlmostEqual(record.get_field('distance').value, float(distance))
273 |
274 | self.assertEqual(count, 753) # TODO: confirm size(records) = size(csv)
275 | csv_fp.close()
276 |
277 | def test_fitparse_component_field_resolves_subfield(self):
278 | fit_data = _generate_fitfile(
279 | _generate_messages(
280 | # event (21), local message 1
281 | mesg_num=21, local_mesg_num=1, field_defs=[
282 | # event, event_type, data16
283 | (0, 'enum'), (1, 'enum'), (2, 'uint16')],
284 | data=[[0, 0, 2]]))
285 |
286 | # parse the whole content
287 | fit = tuple(fitdecode.FitReader(
288 | fit_data,
289 | check_crc=fitdecode.CrcCheck.RAISE,
290 | keep_raw_chunks=False))
291 |
292 | event = fit[4]
293 | self.assertEqual(event.name, 'event')
294 |
295 | for field in ('event', 0):
296 | self.assertEqual(event.get_field(field).value, 'timer')
297 | self.assertEqual(event.get_field(field).raw_value, 0)
298 |
299 | for field in ('event_type', 1):
300 | self.assertEqual(event.get_field(field).value, 'start')
301 | self.assertEqual(event.get_field(field).raw_value, 0)
302 |
303 | # should be able to reference by original field name, component field
304 | # name, subfield name, and then the field def_num of both the original
305 | # field and component field
306 | for field in ('timer_trigger', 'data', 3):
307 | self.assertEqual(event.get_field(field).value, 'fitness_equipment')
308 | self.assertEqual(event.get_field(field).raw_value, 2)
309 |
310 | # component field should be left as is
311 | for field in ('data16', 2):
312 | self.assertEqual(event.get_field(field).value, 2)
313 |
314 | def test_fitparse_subfield_components(self):
315 | # score = 123, opponent_score = 456, total = 29884539
316 | sport_point_value = 123 + (456 << 16)
317 |
318 | # rear_gear_num = 4, rear_gear, = 20, front_gear_num = 2, front_gear = 34
319 | gear_chance_value = 4 + (20 << 8) + (2 << 16) + (34 << 24)
320 |
321 | fit_data = _generate_fitfile(
322 | _generate_messages(
323 | # event (21), local message 1
324 | mesg_num=21, local_mesg_num=1, field_defs=[
325 | # event, data
326 | (0, 'enum'), (3, 'uint32')],
327 | data=[
328 | # sport point
329 | [33, sport_point_value],
330 | # front gear change
331 | [42, gear_chance_value]]))
332 |
333 | # parse the whole content
334 | fit = tuple(fitdecode.FitReader(
335 | fit_data,
336 | check_crc=fitdecode.CrcCheck.RAISE,
337 | keep_raw_chunks=False))
338 |
339 | sport_point = fit[4]
340 | self.assertEqual(sport_point.name, 'event')
341 |
342 | for field in ('event', 0):
343 | self.assertEqual(sport_point.get_field(field).value, 'sport_point')
344 | self.assertEqual(sport_point.get_field(field).raw_value, 33)
345 |
346 | for field in ('sport_point', 'data', 3):
347 | # verify raw numeric value
348 | self.assertEqual(sport_point.get_field(field).value, sport_point_value)
349 |
350 | for field in ('score', 7):
351 | self.assertEqual(sport_point.get_field(field).value, 123)
352 |
353 | for field in ('opponent_score', 8):
354 | self.assertEqual(sport_point.get_field(field).value, 456)
355 |
356 | gear_change = fit[5]
357 | self.assertEqual(gear_change.name, 'event')
358 |
359 | for field in ('event', 0):
360 | self.assertEqual(gear_change.get_field(field).value, 'front_gear_change')
361 | self.assertEqual(gear_change.get_field(field).raw_value, 42)
362 |
363 | for field in ('gear_change_data', 'data', 3):
364 | # verify raw numeric value
365 | self.assertEqual(gear_change.get_field(field).value, gear_chance_value)
366 |
367 | for field in ('front_gear_num', 9):
368 | self.assertEqual(gear_change.get_field(field).value, 2)
369 |
370 | for field in ('front_gear', 10):
371 | self.assertEqual(gear_change.get_field(field).value, 34)
372 |
373 | for field in ('rear_gear_num', 11):
374 | self.assertEqual(gear_change.get_field(field).value, 4)
375 |
376 | for field in ('rear_gear', 12):
377 | self.assertEqual(gear_change.get_field(field).value, 20)
378 |
379 | def test_fitparse_parsing_edge_500_fit_file(self):
380 | self._fitparse_csv_test_helper(
381 | 'garmin-edge-500-activity.fit',
382 | 'garmin-edge-500-activity-records.csv')
383 |
384 | def test_fitparse_parsing_fenix_5_bike_fit_file(self):
385 | self._fitparse_csv_test_helper(
386 | 'garmin-fenix-5-bike.fit',
387 | 'garmin-fenix-5-bike-records.csv')
388 |
389 | def test_fitparse_parsing_fenix_5_run_fit_file(self):
390 | self._fitparse_csv_test_helper(
391 | 'garmin-fenix-5-run.fit',
392 | 'garmin-fenix-5-run-records.csv')
393 |
394 | def test_fitparse_parsing_fenix_5_walk_fit_file(self):
395 | self._fitparse_csv_test_helper(
396 | 'garmin-fenix-5-walk.fit',
397 | 'garmin-fenix-5-walk-records.csv')
398 |
399 | def test_fitparse_parsing_edge_820_fit_file(self):
400 | self._fitparse_csv_test_helper(
401 | 'garmin-edge-820-bike.fit',
402 | 'garmin-edge-820-bike-records.csv')
403 |
404 | def _fitparse_csv_test_helper(self, fit_file, csv_file):
405 | csv_fp = open(_test_file(csv_file), 'r')
406 | csv_messages = csv.reader(csv_fp)
407 | field_names = next(csv_messages) # consume header
408 |
409 | # parse the whole content
410 | fit = tuple(fitdecode.FitReader(
411 | _test_file(fit_file),
412 | check_crc=fitdecode.CrcCheck.RAISE,
413 | keep_raw_chunks=False))
414 |
415 | # make a generator of 'record' messages
416 | messages = (
417 | r for r in fit
418 | if isinstance(r, fitdecode.FitDataMessage)
419 | and r.name == 'record')
420 |
421 | # for fixups
422 | last_valid_lat, last_valid_long = None, None
423 |
424 | for message, csv_message in zip(messages, csv_messages):
425 | for csv_index, field_name in enumerate(field_names):
426 | try:
427 | fit_value = message.get_field(field_name).value
428 | except KeyError:
429 | fit_value = None
430 |
431 | csv_value = csv_message[csv_index]
432 |
433 | if field_name == 'timestamp':
434 | # adjust GMT to PDT and format
435 | fit_value = (fit_value - datetime.timedelta(hours=7)).strftime("%a %b %d %H:%M:%S PDT %Y")
436 |
437 | # track last valid lat/longs
438 | if field_name == 'position_lat':
439 | if fit_value is not None:
440 | last_valid_lat = fit_value
441 | if field_name == 'position_long':
442 | if fit_value is not None:
443 | last_valid_long = fit_value
444 |
445 | # ANT FIT SDK Dump tool does a bad job of logging invalids, so fix them
446 | if fit_value is None:
447 | # ANT FIT SDK Dump tool cadence reports invalid as 0
448 | if field_name == 'cadence' and csv_value == '0':
449 | csv_value = None
450 | # ANT FIT SDK Dump tool invalid lat/lng reports as last valid
451 | if field_name == 'position_lat':
452 | fit_value = last_valid_lat
453 | if field_name == 'position_long':
454 | fit_value = last_valid_long
455 |
456 | if isinstance(fit_value, int):
457 | csv_value = int(fit_value)
458 | if csv_value == '':
459 | csv_value = None
460 |
461 | if isinstance(fit_value, float):
462 | # float comparison
463 | self.assertAlmostEqual(fit_value, float(csv_value))
464 | else:
465 | self.assertEqual(
466 | fit_value, csv_value,
467 | msg="For %s, FIT value '%s' did not match CSV value '%s'" % (field_name, fit_value, csv_value))
468 |
469 | try:
470 | next(messages)
471 | self.fail(".FIT file had more than csv file")
472 | except StopIteration:
473 | pass
474 |
475 | try:
476 | next(csv_messages)
477 | self.fail(".CSV file had more messages than .FIT file")
478 | except StopIteration:
479 | pass
480 |
481 | csv_fp.close()
482 |
483 | def test_fitparse_speed(self):
484 | fit = fitdecode.FitReader(
485 | _test_file('2019-02-17-062644-ELEMNT-297E-195-0.fit'),
486 | check_crc=fitdecode.CrcCheck.RAISE)
487 |
488 | # find the first 'session' data message
489 | msg = next(
490 | r for r in fit
491 | if isinstance(r, fitdecode.FitDataMessage)
492 | and r.name == 'session')
493 |
494 | self.assertEqual(msg.get_value('avg_speed', fit_type='uint16'), 5.86)
495 |
496 | def test_fitparse_units_processor(self):
497 | for x in ('2013-02-06-12-11-14.fit', '2015-10-13-08-43-15.fit',
498 | 'Activity.fit', 'Edge810-Vector-2013-08-16-15-35-10.fit',
499 | 'MonitoringFile.fit', 'Settings.fit', 'Settings2.fit',
500 | 'WeightScaleMultiUser.fit', 'WeightScaleSingleUser.fit',
501 | 'WorkoutCustomTargetValues.fit', 'WorkoutIndividualSteps.fit',
502 | 'WorkoutRepeatGreaterThanStep.fit', 'WorkoutRepeatSteps.fit',
503 | 'activity-large-fenxi2-multisport.fit', 'activity-small-fenix2-run.fit',
504 | 'antfs-dump.63.fit', 'sample-activity-indoor-trainer.fit',
505 | 'sample-activity.fit', 'garmin-fenix-5-bike.fit',
506 | 'garmin-fenix-5-run.fit', 'garmin-fenix-5-walk.fit',
507 | 'garmin-edge-820-bike.fit'):
508 | tuple(fitdecode.FitReader(
509 | _test_file(x),
510 | check_crc=fitdecode.CrcCheck.RAISE,
511 | processor=fitdecode.StandardUnitsDataProcessor()))
512 |
513 | def test_fitparse_int_long(self):
514 | """Test that ints are properly shifted and scaled"""
515 | fit = tuple(fitdecode.FitReader(
516 | _test_file('event_timestamp.fit'),
517 | check_crc=fitdecode.CrcCheck.RAISE))
518 | raw_value = fit[-2].get_value('event_timestamp', idx=0, raw_value=True)
519 | self.assertEqual(raw_value, 863.486328125)
520 |
521 |
522 | if __name__ == '__main__':
523 | unittest.main()
524 |
--------------------------------------------------------------------------------
/tests/files/compressed-speed-distance-records.csv:
--------------------------------------------------------------------------------
1 | Timestamp, Heart Rate [bpm],Speed [m/s],Distance [m],Cadence [rpm]
2 | 17217869,93,3.54,0.0,null
3 | 17217874,104,3.55,14.25,88
4 | 17217879,113,0.0,18.875,34
5 | 17217884,111,1.66,25.1875,41
6 | 17217889,119,3.12,42.0,87
7 | 17217894,122,0.0,43.9375,53
8 | 17217899,117,0.0,43.9375,0
9 | 17217904,116,0.55,46.8125,0
10 | 17217909,122,3.31,63.75,74
11 | 17217914,131,2.91,80.25,87
12 | 17217919,137,2.84,89.125,87
13 | 17217924,141,2.59,106.3125,87
14 | 17217929,143,2.52,118.3125,88
15 | 17217934,144,2.49,130.1875,87
16 | 17217939,145,0.0,134.75,69
17 | 17217944,140,0.0,134.75,0
18 | 17217949,134,0.51,134.75,12
19 | 17217954,137,2.77,146.0,76
20 | 17217959,143,2.64,163.6875,86
21 | 17217964,145,2.51,175.125,87
22 | 17217969,146,2.47,187.0625,88
23 | 17217974,149,2.41,198.3125,88
24 | 17217979,150,2.31,208.875,88
25 | 17217984,149,2.24,219.8125,87
26 | 17217989,150,1.39,228.0,43
27 | 17217994,152,2.19,238.5625,75
28 | 17217999,151,2.14,248.75,86
29 | 17218004,149,2.12,259.0625,87
30 | 17218009,150,2.08,268.9375,87
31 | 17218014,150,2.03,278.8125,87
32 | 17218019,149,2.03,289.1875,85
33 | 17218024,151,1.97,299.125,81
34 | 17218029,153,1.96,307.75,82
35 | 17218034,151,2.0,319.6875,83
36 | 17218039,150,1.98,329.3125,83
37 | 17218044,151,1.96,339.25,83
38 | 17218049,151,1.97,347.8125,84
39 | 17218054,148,1.99,357.875,85
40 | 17218059,144,2.02,368.125,85
41 | 17218064,147,2.03,379.3125,86
42 | 17218069,152,2.02,390.625,87
43 | 17218074,153,0.0,396.75,87
44 | 17218079,152,0.0,396.75,0
45 | 17218084,148,0.0,396.75,0
46 | 17218089,145,0.0,396.75,0
47 | 17218094,139,0.0,396.75,0
48 | 17218099,140,2.52,409.25,77
49 | 17218104,143,2.45,421.125,85
50 | 17218109,145,2.41,432.5625,86
51 | 17218114,148,2.4,444.125,87
52 | 17218119,152,2.33,456.375,87
53 | 17218124,154,2.26,467.25,87
54 | 17218129,156,2.21,477.6875,87
55 | 17218134,158,2.14,487.8125,86
56 | 17218139,160,2.09,497.8125,86
57 | 17218144,161,2.07,508.0625,87
58 | 17218149,160,2.05,519.5,86
59 | 17218154,159,1.98,528.9375,85
60 | 17218159,158,1.93,537.0,85
61 | 17218164,158,1.92,546.875,85
62 | 17218169,158,1.95,556.5625,85
63 | 17218174,157,1.97,566.5625,86
64 | 17218179,157,2.04,577.5,86
65 | 17218184,157,2.47,592.375,85
66 | 17218189,157,0.0,596.0625,34
67 | 17218194,154,0.0,596.0625,0
68 | 17218199,150,0.0,599.625,39
69 | 17218204,147,0.0,599.625,0
70 | 17218209,143,0.0,599.625,0
71 | 17218214,137,0.33,600.8125,13
72 | 17218219,137,2.35,612.5,80
73 | 17218224,135,2.43,625.4375,85
74 | 17218229,137,2.62,638.1875,85
75 | 17218234,142,2.54,649.3125,87
76 | 17218239,146,2.45,661.4375,86
77 | 17218244,149,2.33,673.75,86
78 | 17218249,151,2.25,684.375,87
79 | 17218254,152,2.2,694.6875,87
80 | 17218259,153,2.15,706.6875,87
81 | 17218264,154,2.13,717.125,86
82 | 17218269,155,2.12,727.5,85
83 | 17218274,156,2.08,736.3125,85
84 | 17218279,158,2.08,748.25,85
85 | 17218284,159,2.21,760.0,85
86 | 17218289,161,2.22,770.3125,86
87 | 17218294,162,2.14,780.5625,86
88 | 17218299,162,2.11,791.1875,86
89 | 17218304,162,2.16,802.9375,86
90 | 17218309,162,2.47,816.5625,85
91 | 17218314,161,2.8,829.0,85
92 | 17218319,161,2.82,842.75,85
93 | 17218324,161,2.77,856.1875,85
94 | 17218329,160,2.67,870.25,85
95 | 17218334,157,2.45,881.875,86
96 | 17218339,156,2.35,894.4375,85
97 | 17218344,157,2.23,903.3125,85
98 | 17218349,158,2.09,912.6875,85
99 | 17218354,158,2.01,922.5625,85
100 | 17218359,160,1.97,932.125,85
101 | 17218364,164,1.92,942.1875,83
102 | 17218369,165,1.89,951.125,82
103 | 17218374,164,1.84,959.875,83
104 | 17218379,166,1.79,969.375,84
105 | 17218384,168,1.76,976.625,81
106 | 17218389,170,1.67,985.3125,81
107 | 17218394,170,1.38,990.0625,80
108 | 17218399,170,0.93,993.125,81
109 | 17218404,170,1.03,1000.75,82
110 | 17218409,169,1.63,1009.6875,82
111 | 17218414,168,1.72,1019.0,82
112 | 17218419,167,1.12,1022.875,82
113 | 17218424,167,0.85,1029.1875,82
114 | 17218429,168,1.47,1037.75,83
115 | 17218434,169,1.64,1045.0625,83
116 | 17218439,168,1.66,1052.1875,84
117 | 17218444,167,1.66,1061.8125,84
118 | 17218449,167,1.64,1069.0,82
119 | 17218454,168,1.56,1075.75,82
120 | 17218459,169,0.75,1079.5625,82
121 | 17218464,168,0.78,1083.75,77
122 | 17218469,166,0.0,1083.75,24
123 | 17218474,163,0.0,1083.75,0
124 | 17218479,160,0.0,1083.75,0
125 | 17218484,156,0.0,1083.75,0
126 | 17218489,151,0.0,1083.75,0
127 | 17218494,144,0.0,1083.75,0
128 | 17218499,140,0.0,1083.75,0
129 | 17218504,132,0.0,1083.75,0
130 | 17218509,128,0.48,1087.0,21
131 | 17218514,125,0.0,1087.375,17
132 | 17218519,121,0.0,1087.375,0
133 | 17218524,119,0.0,1087.375,0
134 | 17218529,118,0.0,1087.375,0
135 | 17218534,117,0.0,1087.375,0
136 | 17218539,116,0.0,1087.375,0
137 | 17218544,117,0.0,1087.375,0
138 | 17218659,108,1.14,1092.5,24
139 | 17218664,110,2.19,1103.6875,78
140 | 17218669,121,2.12,1112.0625,84
141 | 17218674,133,2.05,1121.9375,85
142 | 17218679,142,2.04,1132.0,86
143 | 17218684,148,1.98,1141.0625,87
144 | 17218689,154,1.88,1152.8125,87
145 | 17218694,158,0.0,1155.9375,51
146 | 17218699,161,1.73,1165.5625,69
147 | 17218704,159,1.81,1173.4375,84
148 | 17218709,158,1.77,1182.1875,84
149 | 17218714,160,1.75,1190.9375,84
150 | 17218719,163,1.77,1199.8125,84
151 | 17218724,166,1.75,1208.6875,83
152 | 17218729,168,1.74,1217.125,84
153 | 17218734,168,1.72,1225.5625,85
154 | 17218739,168,1.71,1234.5,85
155 | 17218744,167,1.24,1239.0,84
156 | 17218749,167,0.93,1242.1875,83
157 | 17218754,168,1.22,1250.5625,83
158 | 17218759,169,1.57,1258.8125,84
159 | 17218764,168,1.62,1268.1875,85
160 | 17218769,168,1.64,1276.75,84
161 | 17218774,167,1.64,1285.25,83
162 | 17218779,167,1.64,1291.625,83
163 | 17218784,167,0.86,1295.4375,83
164 | 17218789,167,0.77,1299.3125,83
165 | 17218794,166,0.75,1303.25,83
166 | 17218799,167,0.78,1306.8125,83
167 | 17218804,168,0.76,1310.5,83
168 | 17218809,164,0.7,1313.25,83
169 | 17218814,165,0.71,1317.0,83
170 | 17218819,166,0.74,1321.125,82
171 | 17218824,166,0.73,1325.0,81
172 | 17218829,166,0.7,1328.1875,82
173 | 17218834,166,0.68,1331.6875,82
174 | 17218839,165,0.73,1336.1875,82
175 | 17218844,165,0.89,1341.125,84
176 | 17218849,163,0.89,1344.875,85
177 | 17218854,164,0.81,1349.125,85
178 | 17218859,165,0.88,1354.6875,84
179 | 17218864,166,1.41,1363.125,84
180 | 17218869,166,1.6,1372.0,83
181 | 17218874,165,1.63,1380.3125,83
182 | 17218879,165,1.64,1388.5,83
183 | 17218884,166,1.64,1395.75,84
184 | 17218889,166,1.68,1404.6875,84
185 | 17218894,168,1.72,1414.75,84
186 | 17218899,171,1.75,1423.9375,84
187 | 17218904,171,1.74,1431.25,84
188 | 17218909,172,1.72,1440.0,83
189 | 17218914,170,1.71,1448.4375,84
190 | 17218919,169,1.69,1456.5625,85
191 | 17218924,171,1.65,1463.5625,85
192 | 17218929,170,1.66,1472.125,84
193 | 17218934,171,1.37,1477.8125,83
194 | 17218939,170,0.79,1482.8125,82
195 | 17218944,171,1.39,1491.0625,83
196 | 17218949,170,1.61,1500.0,84
197 | 17218954,171,1.68,1508.6875,84
198 | 17218959,172,1.71,1517.625,84
199 | 17218964,174,1.75,1526.6875,84
200 | 17218969,175,1.78,1535.625,84
201 | 17218974,176,1.82,1545.0,85
202 | 17218979,176,1.86,1554.3125,86
203 | 17218984,176,1.84,1563.4375,86
204 | 17218989,177,1.88,1574.375,86
205 | 17218994,175,1.92,1583.875,86
206 | 17218999,175,1.95,1593.6875,87
207 | 17219004,175,1.99,1603.8125,87
208 | 17219009,176,2.16,1615.9375,87
209 | 17219014,174,2.62,1630.3125,87
210 | 17219019,174,2.3,1637.5625,87
211 | 17219024,174,0.0,1641.625,55
212 | 17219029,169,0.0,1641.625,0
213 | 17219034,164,0.0,1641.625,0
214 | 17219039,155,0.0,1641.625,0
215 | 17219044,149,0.0,1641.625,0
216 | 17219049,150,1.19,1648.625,36
217 | 17219054,152,2.46,1655.25,74
218 | 17219059,151,3.98,1678.0625,91
219 | 17219064,146,0.0,1678.0625,0
220 | 17219069,140,0.0,1678.0625,0
221 | 17219074,137,0.0,1678.0625,0
222 | 17219079,130,0.0,1678.0625,0
223 | 17219084,125,0.0,1678.0625,0
224 | 17219089,123,0.0,1678.0625,0
225 | 17219094,122,0.0,1678.0625,0
226 | 17219099,123,2.47,1689.75,45
227 | 17219104,136,3.44,1707.375,89
228 | 17219109,146,3.21,1722.125,91
229 | 17219114,155,3.39,1741.25,91
230 | 17219119,161,3.61,1761.0,91
231 | 17219124,165,3.62,1778.5625,90
232 | 17219129,167,3.69,1799.3125,89
233 | 17219134,170,3.78,1818.375,89
234 | 17219139,171,3.81,1836.25,89
235 | 17219144,168,3.7,1852.875,89
236 | 17219149,165,3.52,1871.4375,89
237 | 17219154,164,3.49,1885.8125,90
238 | 17219159,167,3.69,1906.3125,90
239 | 17219164,169,3.72,1923.75,90
240 | 17219169,171,3.7,1941.0625,90
241 | 17219174,172,3.77,1964.375,90
242 | 17219179,172,3.83,1985.3125,90
243 | 17219184,173,3.79,2000.375,90
244 | 17219189,173,3.79,2021.0,90
245 | 17219194,173,3.81,2039.1875,90
246 | 17219199,172,3.95,2061.1875,91
247 | 17219204,172,4.24,2080.5,93
248 | 17219209,174,4.28,2102.75,93
249 | 17219214,174,3.99,2120.9375,90
250 | 17219219,173,3.82,2141.3125,89
251 | 17219224,174,3.73,2158.5625,89
252 | 17219229,176,3.58,2177.4375,89
253 | 17219234,174,3.49,2194.25,89
254 | 17219239,174,3.48,2210.75,89
255 | 17219244,174,3.46,2227.375,89
256 | 17219249,174,3.42,2245.5,89
257 | 17219254,174,3.33,2261.0625,89
258 | 17219259,174,3.29,2279.0625,89
259 | 17219264,172,3.25,2294.1875,89
260 | 17219269,173,3.19,2309.4375,89
261 | 17219274,173,3.29,2326.0625,89
262 | 17219279,175,3.38,2340.4375,89
263 | 17219284,177,3.41,2359.1875,89
264 | 17219289,179,3.52,2377.125,89
265 | 17219294,179,3.74,2398.0625,89
266 | 17219299,179,3.74,2415.25,89
267 | 17219304,178,3.58,2431.5,89
268 | 17219309,180,3.38,2447.375,89
269 | 17219314,180,3.24,2464.4375,88
270 | 17219319,178,3.17,2482.25,88
271 | 17219324,178,3.37,2498.75,88
272 | 17219329,180,3.32,2511.5625,89
273 | 17219334,180,3.14,2528.0625,89
274 | 17219339,180,2.97,2542.0625,89
275 | 17219344,177,2.95,2558.5,89
276 | 17219349,179,2.95,2570.6875,88
277 | 17219354,179,2.96,2588.5,89
278 | 17219359,179,2.97,2602.875,90
279 | 17219364,180,3.02,2619.5625,90
280 | 17219369,182,3.0,2634.125,89
281 | 17219374,181,3.08,2649.0625,89
282 | 17219379,182,3.07,2663.6875,89
283 | 17219384,183,3.0,2681.875,89
284 | 17219389,183,3.04,2696.625,89
285 | 17219394,184,3.0,2710.125,89
286 | 17219399,185,2.87,2723.0625,89
287 | 17219404,184,2.74,2737.3125,89
288 | 17219409,185,2.69,2748.125,89
289 | 17219414,186,2.62,2760.25,89
290 | 17219419,186,2.61,2775.0,89
291 | 17219424,186,2.9,2789.8125,89
292 | 17219429,186,2.96,2803.8125,89
293 | 17219434,186,2.88,2818.9375,89
294 | 17219439,186,2.87,2832.8125,89
295 | 17219444,185,2.85,2847.875,89
296 | 17219449,185,2.78,2860.75,89
297 | 17219454,185,2.74,2873.6875,89
298 | 17219459,185,2.73,2888.6875,89
299 | 17219464,185,2.85,2903.4375,89
300 | 17219469,184,3.09,2918.5,89
301 | 17219474,184,3.06,2934.5,89
302 | 17219479,183,2.9,2949.1875,89
303 | 17219484,182,2.75,2961.375,89
304 | 17219489,181,2.59,2972.625,89
305 | 17219494,181,2.44,2983.9375,89
306 | 17219499,180,2.43,2993.875,89
307 | 17219504,179,2.37,3007.75,89
308 | 17219509,180,2.3,3018.375,89
309 | 17219514,179,2.25,3029.0,89
310 | 17219519,176,2.29,3044.0625,89
311 | 17219524,173,2.51,3056.875,89
312 | 17219529,174,2.65,3070.4375,89
313 | 17219534,175,2.79,3084.0625,89
314 | 17219539,174,2.85,3098.25,89
315 | 17219544,174,3.09,3114.0,89
316 | 17219549,174,3.19,3131.75,89
317 | 17219554,173,3.35,3148.5625,89
318 | 17219559,172,3.4,3164.5625,89
319 | 17219564,172,3.35,3179.875,89
320 | 17219569,171,3.15,3196.1875,89
321 | 17219574,170,3.0,3212.0,89
322 | 17219579,171,2.96,3226.0,89
323 | 17219584,170,2.94,3239.875,89
324 | 17219589,170,2.93,3254.0,89
325 | 17219594,170,2.9,3269.3125,89
326 | 17219599,168,2.78,3279.9375,89
327 | 17219604,167,1.66,3287.875,31
328 | 17219609,166,3.5,3307.875,84
329 | 17219614,165,3.64,3324.9375,90
330 | 17219619,166,3.64,3341.8125,91
331 | 17219624,165,3.53,3360.5625,90
332 | 17219629,166,3.51,3377.0625,90
333 | 17219634,167,3.56,3392.4375,90
334 | 17219639,166,3.72,3413.75,90
335 | 17219644,167,3.82,3431.625,90
336 | 17219649,169,3.69,3448.625,90
337 | 17219654,169,3.66,3468.8125,90
338 | 17219659,168,3.69,3488.625,89
339 | 17219664,166,3.54,3507.375,89
340 | 17219669,165,3.53,3522.0625,89
341 | 17219674,165,3.52,3540.9375,89
342 | 17219679,166,3.47,3557.5625,89
343 | 17219684,166,3.46,3573.875,89
344 | 17219689,165,3.5,3590.625,89
345 | 17219694,165,3.53,3609.75,90
346 | 17219699,166,3.62,3630.0,90
347 | 17219704,166,3.64,3647.1875,90
348 | 17219709,165,3.62,3666.5,90
349 | 17219714,165,3.56,3683.0,90
350 | 17219719,167,3.49,3699.0625,90
351 | 17219724,169,3.39,3717.0,90
352 | 17219729,170,3.35,3732.625,90
353 | 17219734,170,3.33,3750.6875,90
354 | 17219739,169,3.3,3766.0,90
355 | 17219744,169,3.29,3784.6875,90
356 | 17219749,171,3.66,3802.5625,90
357 | 17219754,172,3.75,3822.5,91
358 | 17219759,174,3.68,3839.375,90
359 | 17219764,175,3.58,3853.0625,90
360 | 17219769,176,3.35,3871.125,89
361 | 17219774,177,3.36,3886.9375,89
362 | 17219779,178,3.24,3901.875,89
363 | 17219784,178,3.2,3918.75,89
364 | 17219789,177,3.14,3933.8125,89
365 | 17219794,175,3.26,3947.9375,89
366 | 17219799,176,3.44,3969.125,89
367 | 17219804,176,3.45,3987.8125,89
368 | 17219809,177,3.43,4003.8125,90
369 | 17219814,177,3.33,4019.0,90
370 | 17219819,175,3.22,4033.875,89
371 | 17219824,174,3.14,4050.6875,89
372 | 17219829,172,3.09,4068.125,88
373 | 17219834,173,3.09,4082.1875,88
374 | 17219839,174,2.89,4094.75,89
375 | 17219844,175,2.68,4106.9375,89
376 | 17219849,175,2.55,4118.4375,88
377 | 17219854,173,2.38,4129.75,87
378 | 17219859,173,2.42,4144.3125,87
379 | 17219864,176,2.53,4155.3125,87
380 | 17219869,177,2.54,4170.9375,87
381 | 17219874,180,2.48,4182.9375,87
382 | 17219879,182,2.47,4195.0625,87
383 | 17219884,183,2.51,4207.5625,87
384 | 17219889,183,2.69,4222.125,87
385 | 17219894,182,3.07,4233.1875,87
386 | 17219899,182,3.06,4251.8125,88
387 | 17219904,180,3.11,4267.25,88
388 | 17219909,180,3.12,4281.9375,88
389 | 17219914,178,3.12,4298.9375,89
390 | 17219919,176,3.2,4314.875,90
391 | 17219924,175,3.31,4333.3125,90
392 | 17219929,174,3.43,4352.3125,90
393 | 17219934,174,3.61,4369.8125,91
394 | 17219939,173,3.81,4389.75,93
395 | 17219944,172,3.81,4409.4375,94
396 | 17219949,171,3.62,4425.75,92
397 | 17219954,170,3.54,4444.25,91
398 | 17219959,169,3.44,4462.25,89
399 | 17219964,169,3.35,4477.875,89
400 | 17219969,170,3.37,4493.625,90
401 | 17219974,170,3.3,4510.9375,90
402 | 17219979,171,3.2,4525.75,89
403 | 17219984,168,3.14,4540.9375,88
404 | 17219989,167,3.11,4553.625,88
405 | 17219994,167,3.01,4567.625,88
406 | 17219999,168,2.97,4584.0,88
407 | 17220004,171,2.89,4597.8125,87
408 | 17220009,172,2.92,4614.75,87
409 | 17220014,174,3.12,4630.5,87
410 | 17220019,175,3.2,4648.6875,87
411 | 17220024,174,3.26,4662.4375,88
412 | 17220029,174,3.44,4681.9375,89
413 | 17220034,175,3.63,4699.0,90
414 | 17220039,176,3.81,4719.5,92
415 | 17220044,175,3.85,4736.9375,92
416 | 17220049,174,3.63,4755.75,91
417 | 17220054,174,3.37,4771.9375,91
418 | 17220059,175,3.01,4785.625,91
419 | 17220064,176,2.89,4798.625,90
420 | 17220069,177,2.8,4813.75,89
421 | 17220074,179,2.77,4826.8125,89
422 | 17220079,180,2.68,4838.875,89
423 | 17220084,180,2.61,4851.3125,89
424 | 17220089,180,1.66,4860.25,61
425 | 17220094,178,2.77,4872.875,84
426 | 17220099,179,2.59,4884.6875,88
427 | 17220104,180,2.53,4896.5,88
428 | 17220109,181,2.47,4908.625,87
429 | 17220114,183,2.48,4920.6875,87
430 | 17220119,184,2.52,4933.5625,87
431 | 17220124,184,2.54,4945.75,87
432 | 17220129,185,2.51,4957.875,88
433 | 17220134,185,2.77,4972.5,88
434 | 17220139,185,3.09,4989.875,88
435 | 17220144,184,3.14,5007.25,89
436 | 17220149,184,3.12,5023.5,89
437 | 17220154,184,2.95,5034.6875,89
438 | 17220159,183,2.73,5049.0,89
439 | 17220164,181,2.71,5062.8125,88
440 | 17220169,182,2.89,5077.0,89
441 | 17220174,180,2.87,5090.5,89
442 | 17220179,179,2.85,5104.625,88
443 | 17220184,176,3.13,5123.0625,88
444 | 17220189,175,3.64,5143.5625,89
445 | 17220194,177,3.71,5160.5625,90
446 | 17220199,177,3.56,5178.4375,90
447 | 17220204,178,3.0,5191.0,90
448 | 17220209,175,2.71,5203.5,90
449 | 17220214,175,2.68,5216.9375,89
450 | 17220219,175,2.73,5231.75,88
451 | 17220224,175,2.65,5245.5625,89
452 | 17220229,173,2.56,5257.6875,89
453 | 17220234,172,2.59,5270.6875,89
454 | 17220239,175,2.78,5285.5,89
455 | 17220244,177,3.15,5303.0625,89
456 | 17220249,179,3.26,5316.6875,90
457 | 17220254,178,3.25,5333.9375,90
458 | 17220259,178,3.16,5348.4375,90
459 | 17220264,178,2.97,5361.3125,90
460 | 17220269,180,2.58,5374.875,89
461 | 17220274,179,2.48,5385.9375,89
462 | 17220279,180,2.35,5396.9375,87
463 | 17220284,181,2.3,5409.6875,87
464 | 17220289,181,2.27,5420.375,87
465 | 17220294,183,2.22,5430.875,87
466 | 17220299,184,2.18,5439.75,87
467 | 17220304,184,2.15,5451.5,87
468 | 17220309,184,2.11,5461.5,87
469 | 17220314,184,2.06,5472.9375,87
470 | 17220319,185,2.07,5483.1875,87
471 | 17220324,185,2.08,5493.625,87
472 | 17220329,185,2.09,5503.8125,87
473 | 17220334,186,2.09,5514.125,87
474 | 17220339,186,2.09,5525.6875,87
475 | 17220344,186,2.05,5532.5625,87
476 | 17220349,186,2.05,5544.375,87
477 | 17220354,186,2.11,5557.75,87
478 | 17220359,185,2.1,5567.75,87
479 | 17220364,184,2.1,5579.125,87
480 | 17220369,183,2.22,5588.0625,81
481 | 17220374,181,1.01,5590.5,36
482 | 17220379,180,1.74,5601.0625,64
483 | 17220384,177,1.78,5607.5,74
484 | 17220389,174,0.0,5609.8125,46
485 | 17220394,169,0.68,5613.0,34
486 | 17220399,166,1.84,5624.625,81
487 | 17220404,163,2.71,5639.0,86
488 | 17220409,160,3.04,5656.75,88
489 | 17220414,159,3.28,5675.125,88
490 | 17220419,158,3.27,5690.1875,88
491 | 17220424,155,3.2,5705.0,89
492 | 17220429,153,2.93,5717.5,89
493 | 17220434,153,1.85,5723.875,77
494 | 17220439,155,1.81,5734.0625,80
495 | 17220444,156,2.27,5747.8125,86
496 | 17220449,157,2.42,5760.125,86
497 | 17220454,158,2.44,5773.6875,86
498 | 17220459,159,2.42,5785.4375,87
499 | 17220464,160,2.4,5797.0625,87
500 | 17220469,163,2.55,5811.0,87
501 | 17220474,167,2.86,5823.6875,87
502 | 17220479,170,2.89,5839.1875,87
503 | 17220484,171,2.8,5851.75,88
504 | 17220489,171,2.71,5865.1875,88
505 | 17220494,172,3.38,5887.6875,88
506 | 17220499,173,3.78,5905.25,90
507 | 17220504,175,3.86,5925.75,92
508 | 17220509,176,3.84,5942.375,93
509 | 17220514,176,3.77,5961.0,95
510 | 17220519,176,3.9,5980.6875,97
511 | 17220524,175,4.08,6003.0625,100
512 | 17220529,175,4.12,6023.375,101
513 | 17220534,175,4.16,6043.9375,100
514 | 17220539,174,4.24,6068.3125,99
515 | 17220544,174,4.26,6087.0,96
516 | 17220549,175,4.14,6107.9375,95
517 | 17220554,176,4.05,6128.1875,94
518 | 17220559,177,3.92,6145.4375,94
519 | 17220564,176,3.82,6164.5625,94
520 | 17220569,175,3.78,6184.375,94
521 | 17220574,171,3.81,6203.8125,94
522 | 17220579,168,3.84,6224.25,95
523 | 17220584,169,3.93,6244.375,95
524 | 17220589,172,3.88,6262.0625,94
525 | 17220594,175,3.98,6283.6875,94
526 | 17220599,177,3.98,6298.5625,93
527 | 17220604,180,3.8,6318.375,93
528 | 17220609,181,3.87,6338.9375,93
529 | 17220614,181,3.79,6355.8125,93
530 | 17220619,182,3.85,6376.8125,93
531 | 17220624,182,3.91,6397.4375,93
532 | 17220629,182,3.95,6415.75,93
533 | 17220634,181,3.92,6435.8125,93
534 | 17220639,178,3.9,6456.25,93
535 | 17220644,178,3.99,6471.75,94
536 | 17220649,179,4.02,6498.0625,94
537 | 17220654,178,4.1,6518.0625,95
538 | 17220659,178,3.96,6535.9375,95
539 | 17220664,177,3.89,6554.8125,95
540 | 17220669,176,3.86,6577.3125,95
541 | 17220674,176,3.85,6591.8125,95
542 | 17220679,175,4.22,6619.25,96
543 | 17220684,176,4.1,6636.5625,95
544 | 17220689,176,3.88,6656.125,94
545 | 17220694,177,3.54,6671.9375,93
546 | 17220699,179,3.65,6694.75,93
547 | 17220704,182,3.78,6712.1875,93
548 | 17220709,185,3.75,6731.5,93
549 | 17220714,184,3.66,6747.6875,93
550 | 17220719,183,3.67,6767.5,92
551 | 17220724,184,3.66,6784.25,92
552 | 17220729,186,3.72,6804.875,92
553 | 17220734,185,3.83,6822.6875,92
554 | 17220739,183,3.79,6839.75,92
555 | 17220744,182,3.84,6860.625,92
556 | 17220749,181,3.9,6878.4375,93
557 | 17220754,181,3.84,6897.875,93
558 | 17220759,182,3.87,6918.75,93
559 | 17220764,183,4.07,6940.6875,93
560 | 17220769,184,4.06,6961.375,93
561 | 17220774,184,3.79,6977.75,93
562 | 17220779,182,3.66,6997.1875,93
563 | 17220784,182,3.58,7013.125,92
564 | 17220789,184,3.41,7030.5625,92
565 | 17220794,186,3.16,7044.5,91
566 | 17220799,188,3.01,7060.0,91
567 | 17220804,187,2.91,7073.3125,90
568 | 17220809,188,2.86,7088.625,90
569 | 17220814,188,2.83,7102.5,90
570 | 17220819,189,2.95,7117.75,89
571 | 17220824,189,3.12,7134.625,89
572 | 17220829,189,3.14,7147.9375,89
573 | 17220834,189,3.28,7166.5,90
574 | 17220839,188,3.35,7185.25,90
575 | 17220844,187,3.6,7200.625,90
576 | 17220849,187,3.73,7222.9375,91
577 | 17220854,185,3.77,7235.375,93
578 | 17220859,184,3.91,7263.25,94
579 | 17220864,183,4.01,7281.1875,95
580 | 17220869,182,4.07,7302.375,96
581 | 17220874,182,4.15,7324.1875,95
582 | 17220879,181,4.1,7342.4375,94
583 | 17220884,180,4.05,7363.25,94
584 | 17220889,180,4.02,7381.5625,94
585 | 17220894,180,4.11,7406.0625,94
586 | 17220899,180,4.1,7425.0,93
587 | 17220904,181,4.1,7446.25,93
588 | 17220909,181,4.02,7464.3125,93
589 | 17220914,181,3.96,7484.6875,93
590 | 17220919,181,3.9,7504.625,93
591 | 17220924,181,3.92,7523.375,93
592 | 17220929,181,4.02,7547.375,93
593 | 17220934,183,3.99,7565.3125,93
594 | 17220939,184,3.88,7584.9375,93
595 | 17220944,182,3.65,7601.0,92
596 | 17220949,181,3.56,7618.0625,91
597 | 17220954,183,3.62,7637.9375,91
598 | 17220959,184,3.7,7653.0,91
599 | 17220964,185,3.73,7673.4375,91
600 | 17220969,184,3.82,7691.5625,91
601 | 17220974,184,3.79,7711.5,91
602 | 17220979,185,3.78,7734.75,90
603 | 17220984,184,3.74,7751.625,90
604 | 17220989,184,3.57,7766.25,89
605 | 17220994,184,3.56,7785.4375,89
606 | 17220999,184,3.47,7801.5625,89
607 | 17221004,185,3.46,7817.875,89
608 | 17221009,185,3.43,7831.5625,90
609 | 17221014,184,3.47,7850.5,90
610 | 17221019,185,3.45,7869.0,90
611 | 17221024,185,3.43,7887.375,90
612 | 17221029,185,3.38,7905.125,90
613 | 17221034,185,3.34,7921.125,90
614 | 17221039,183,3.3,7936.4375,90
615 | 17221044,182,3.31,7954.9375,89
616 | 17221049,183,3.34,7971.0,89
617 | 17221054,184,3.31,7988.8125,89
618 | 17221059,185,3.27,8004.625,89
619 | 17221064,186,3.36,8021.1875,89
620 | 17221069,186,3.37,8039.5,89
621 | 17221074,186,3.31,8054.875,89
622 | 17221079,186,3.2,8067.375,89
623 | 17221084,187,3.18,8084.5,89
624 | 17221089,187,2.93,8097.75,89
625 | 17221094,187,3.02,8113.4375,89
626 | 17221099,186,3.27,8129.9375,89
627 | 17221104,186,3.33,8148.6875,89
628 | 17221109,186,3.46,8165.875,89
629 | 17221114,185,3.57,8183.375,89
630 | 17221119,185,3.63,8203.375,90
631 | 17221124,183,3.68,8223.125,90
632 | 17221129,184,3.67,8240.0625,90
633 | 17221134,183,3.57,8258.75,90
634 | 17221139,183,3.51,8274.9375,90
635 | 17221144,183,3.45,8291.0625,90
636 | 17221149,183,3.38,8309.625,90
637 | 17221154,182,3.59,8329.0625,90
638 | 17221159,182,3.61,8346.5625,90
639 | 17221164,183,3.72,8367.0625,90
640 | 17221169,183,3.81,8384.75,91
641 | 17221174,182,3.86,8405.5,92
642 | 17221179,181,3.94,8423.9375,93
643 | 17221184,181,3.98,8442.375,93
644 | 17221189,181,4.0,8465.875,93
645 | 17221194,181,3.89,8486.1875,92
646 | 17221199,180,3.9,8504.0625,92
647 | 17221204,180,3.95,8524.625,93
648 | 17221209,179,3.9,8542.1875,93
649 | 17221214,177,3.78,8561.4375,93
650 | 17221219,178,3.76,8578.625,93
651 | 17221224,177,3.74,8598.0,93
652 | 17221229,177,3.71,8614.5,94
653 | 17221234,178,3.95,8635.0,95
654 | 17221239,178,4.15,8656.8125,96
655 | 17221244,179,4.11,8677.25,94
656 | 17221249,179,4.02,8697.75,94
657 | 17221254,179,3.95,8718.0,93
658 | 17221259,179,3.95,8738.625,93
659 | 17221264,178,3.99,8756.75,95
660 | 17221269,178,3.99,8777.3125,94
661 | 17221274,178,4.0,8798.625,94
662 | 17221279,178,4.16,8817.6875,94
663 | 17221284,179,4.13,8840.5625,94
664 | 17221289,179,3.9,8858.125,93
665 | 17221294,180,3.87,8878.375,93
666 | 17221299,180,3.92,8898.9375,94
667 | 17221304,180,3.87,8915.75,93
668 | 17221309,180,3.72,8934.4375,93
669 | 17221314,179,3.62,8950.875,92
670 | 17221319,179,3.64,8968.4375,92
671 | 17221324,179,3.96,8990.0625,92
672 | 17221329,180,4.01,9013.25,93
673 | 17221334,181,4.02,9031.75,93
674 | 17221339,181,4.03,9052.3125,93
675 | 17221344,181,3.96,9067.75,93
676 | 17221349,181,3.99,9091.125,93
677 | 17221354,180,3.95,9109.0625,93
678 | 17221359,178,3.88,9128.875,93
679 | 17221364,178,3.84,9146.4375,93
680 | 17221369,180,3.77,9165.5625,93
681 | 17221374,181,3.68,9182.3125,92
682 | 17221379,183,3.68,9204.875,92
683 | 17221384,183,3.71,9221.1875,92
684 | 17221389,184,3.34,9238.0625,91
685 | 17221394,185,3.29,9254.125,91
686 | 17221399,186,3.31,9269.625,91
687 | 17221404,186,3.28,9287.5625,91
688 | 17221409,187,3.39,9306.4375,91
689 | 17221414,187,3.49,9323.5625,91
690 | 17221419,187,3.55,9342.4375,91
691 | 17221424,187,3.59,9359.875,91
692 | 17221429,187,3.6,9378.875,91
693 | 17221434,187,3.6,9395.75,91
694 | 17221439,187,3.59,9412.4375,91
695 | 17221444,187,3.57,9426.75,91
696 | 17221449,187,3.56,9445.8125,91
697 | 17221454,186,3.71,9466.4375,91
698 | 17221459,186,3.7,9483.0625,91
699 | 17221464,185,3.61,9501.75,91
700 | 17221469,185,3.51,9521.875,91
701 | 17221474,184,3.35,9535.4375,91
702 | 17221479,184,3.57,9555.4375,91
703 | 17221484,185,3.65,9572.625,91
704 | 17221489,183,3.8,9593.25,91
705 | 17221494,182,3.84,9613.4375,93
706 | 17221499,182,3.84,9630.8125,93
707 | 17221504,181,3.56,9648.4375,92
708 | 17221509,182,3.15,9664.0,92
709 | 17221514,184,2.99,9678.5,90
710 | 17221519,185,3.52,9698.875,90
711 | 17221524,187,3.65,9714.875,91
712 | 17221529,187,3.47,9730.625,91
713 | 17221534,186,3.42,9749.0,91
714 | 17221539,187,3.31,9765.5,91
715 | 17221544,187,2.92,9778.375,91
716 | 17221549,187,2.81,9793.25,91
717 | 17221554,187,2.81,9806.5625,91
718 | 17221559,188,2.82,9821.75,91
719 | 17221564,188,2.82,9835.1875,91
720 | 17221569,189,2.91,9849.3125,91
721 | 17221574,189,2.99,9864.0,91
722 | 17221579,189,3.06,9880.0,91
723 | 17221584,189,3.02,9894.0,91
724 | 17221589,188,2.96,9907.4375,91
725 | 17221594,188,2.88,9922.375,91
726 | 17221599,188,2.87,9933.9375,91
727 | 17221604,188,2.79,9950.375,90
728 | 17221609,188,3.06,9969.0,88
729 | 17221614,187,3.67,9986.625,90
730 | 17221619,187,3.72,10006.25,91
731 | 17221624,185,3.79,10024.0,92
732 | 17221629,184,3.8,10043.875,93
733 | 17221634,183,3.73,10060.8125,93
734 | 17221639,183,3.78,10083.75,93
735 | 17221644,182,3.88,10104.1875,93
736 | 17221649,182,3.96,10122.375,94
737 | 17221654,182,3.99,10142.6875,94
738 | 17221659,181,3.96,10163.0,94
739 | 17221664,181,3.86,10179.6875,94
740 | 17221669,181,3.74,10195.625,94
741 | 17221674,179,3.38,10213.9375,91
742 | 17221679,177,3.66,10234.9375,92
743 | 17221684,177,2.59,10242.0625,84
744 | 17221689,177,0.56,10244.4375,58
745 | 17221694,175,0.0,10246.25,51
746 | 17221699,173,0.0,10246.25,0
747 | 17221704,169,0.0,10246.25,0
748 | 17221709,166,0.0,10246.25,0
749 | 17221714,164,0.0,10246.25,0
750 | 17221719,162,0.0,10248.6875,14
751 | 17221724,157,0.0,10248.6875,0
752 | 17221729,153,0.0,10248.6875,0
753 | 17221734,150,0.0,10248.6875,0
754 | 17221739,130,0.0,10248.6875,0
755 | 17221744,118,0.0,10248.6875,0
756 |
--------------------------------------------------------------------------------
/tools/generate_profile.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Note:
4 | # Script originally copied from python-fitparse v1.0.1, it has since been
5 | # slightly corrected and improved. Some modifications have been merged upstream.
6 | #
7 | # Original comment:
8 | # Horrible, dirty, ugly, awful, and terrible script to export the Profile.xls
9 | # that comes with the FIT SDK to the Python data structures in profile.py. You
10 | # shouldn't have to use this unless you're developing python-fitparse.
11 | #
12 |
13 | import datetime
14 | import os
15 | import re
16 | import sys
17 | import zipfile
18 | from collections import namedtuple
19 | from itertools import islice
20 |
21 | import openpyxl
22 |
23 |
24 | FIELD_NUM_TIMESTAMP = 253
25 |
26 | XLS_HEADER_MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1'
27 |
28 | SYMBOL_NAME_SCRUBBER = re.compile(r'\W|^(?=\d)')
29 |
30 |
31 | def header(header, indent=0):
32 | return '%s# %s' % (' ' * indent, (' %s ' % header).center(78 - indent, '*'))
33 |
34 |
35 | def scrub_symbol_name(symbol_name):
36 | return SYMBOL_NAME_SCRUBBER.sub('_', symbol_name)
37 |
38 |
39 | PROFILE_HEADER_FIRST_PART = "%s\n%s" % (
40 | header('BEGIN AUTOMATICALLY GENERATED FIT PROFILE'),
41 | header('DO NOT EDIT THIS FILE'),
42 | )
43 |
44 | IMPORT_HEADER = '''from .types import (
45 | ComponentField,
46 | Field,
47 | FieldType,
48 | MessageType,
49 | ReferenceField,
50 | SubField,
51 | BASE_TYPES)'''
52 |
53 | # This allows to prepend the declaration of some message numbers to the
54 | # generated file.
55 | # E.g. 'hr' -> MESG_NUM_HR = 132
56 | MESSAGE_NUM_DECLARATIONS = (
57 | 'file_id',
58 | 'developer_data_id',
59 | 'field_description',
60 | 'hr')
61 |
62 | # This allows to prepend the declaration of some field numbers of specific
63 | # messages to the generated file.
64 | # E.g. 'hr.event_timestamp' -> FIELD_NUM_HR_EVENT_TIMESTAMP = 9
65 | FIELD_NUM_DECLARATIONS = (
66 | 'hr.event_timestamp',
67 | 'hr.event_timestamp_12')
68 |
69 | SPECIAL_FIELD_DECLARATIONS = "FIELD_TYPE_TIMESTAMP = Field(name='timestamp', type=FIELD_TYPES['date_time'], def_num=" + str(FIELD_NUM_TIMESTAMP) + ", units='s')"
70 |
71 | IGNORE_TYPE_VALUES = (
72 | # of the form 'type_name:value_name'
73 | 'mesg_num:mfg_range_min',
74 | 'mesg_num:mfg_range_max',
75 | 'date_time:min') # TODO: How to account for this? (see Profile.xls)
76 |
77 | BASE_TYPES = {
78 | 'enum': '0x00',
79 | 'sint8': '0x01',
80 | 'uint8': '0x02',
81 | 'sint16': '0x83',
82 | 'uint16': '0x84',
83 | 'sint32': '0x85',
84 | 'uint32': '0x86',
85 | 'string': '0x07',
86 | 'float32': '0x88',
87 | 'float64': '0x89',
88 | 'uint8z': '0x0a',
89 | 'uint16z': '0x8b',
90 | 'uint32z': '0x8c',
91 | 'byte': '0x0d',
92 | 'sint64': '0x8e',
93 | 'uint64': '0x8f',
94 | 'uint64z': '0x90'}
95 |
96 |
97 | def render_type(name):
98 | if name in BASE_TYPES:
99 | return "BASE_TYPES[%s], # %s" % (BASE_TYPES[name], name)
100 | else:
101 | return "FIELD_TYPES['%s']," % name
102 |
103 |
104 | def indent(s, amount=1):
105 | return ('\n%s' % (' ' * (amount * 4))).join(str(s).splitlines())
106 |
107 |
108 | class TypeList(namedtuple('TypeList', ('types'))):
109 | def get(self, name, raise_exception=True):
110 | for type in self.types:
111 | if type.name == name:
112 | return type
113 | if raise_exception:
114 | raise AssertionError("Couldn't find type by name: %s" % name)
115 |
116 | def num_values(self):
117 | return sum(len(type.enum) for type in self.types)
118 |
119 | def get_mesg_num(self, name):
120 | for mesg in self.get('mesg_num').enum:
121 | if mesg.name == name:
122 | return mesg.value
123 | raise AssertionError("Couldn't find message by name: %s" % name)
124 |
125 | def __str__(self):
126 | s = 'FIELD_TYPES = {\n'
127 | for type in sorted(self.types, key=lambda x: x.name):
128 | s += " '%s': %s,\n" % (type.name, indent(type))
129 | s += '}'
130 | return s
131 |
132 |
133 | class TypeInfo(namedtuple('TypeInfo', ('name', 'base_type', 'enum', 'comment'))):
134 | def get(self, value_name):
135 | for value in self.enum:
136 | if value.name == value_name:
137 | return value
138 | raise AssertionError("Invalid value name %s in type %s" % (value_name, self.name))
139 |
140 | def __str__(self):
141 | s = 'FieldType(%s\n' % render_comment(self.comment)
142 | s += " name='%s',\n" % (self.name)
143 | s += " base_type=BASE_TYPES[%s], # %s\n" % (
144 | BASE_TYPES[self.base_type], self.base_type,
145 | )
146 | if self.enum:
147 | s += " enum={\n"
148 | for value in sorted(self.enum, key=lambda x: x.value if isinstance(x.value, int) else int(x.value, 16)):
149 | s += " %s\n" % (value,)
150 | s += " },\n"
151 | s += ")"
152 | return s
153 |
154 |
155 | class TypeValueInfo(namedtuple('TypeValueInfo', ('name', 'value', 'comment'))):
156 | def __str__(self):
157 | return "%s: '%s',%s" % (self.value, self.name, render_comment(self.comment))
158 |
159 |
160 | class MessageList(namedtuple('MessageList', ('messages'))):
161 | def __str__(self):
162 | s = 'MESSAGE_TYPES = {\n'
163 | last_group_name = None
164 | for message in sorted(
165 | self.messages,
166 | key=lambda mi: (
167 | 0 if mi.group_name.lower().startswith('common') else 1,
168 | mi.group_name.lower(), mi.num,
169 | ),
170 | ):
171 | # Group name comment
172 | if message.group_name != last_group_name:
173 | if last_group_name is not None:
174 | s += '\n\n'
175 | s += "%s\n" % header(message.group_name, 4)
176 | last_group_name = message.group_name
177 | s += " %s: %s,\n" % (message.num, indent(message))
178 | s += '}'
179 | return s
180 |
181 | def get_by_name(self, mesg_name):
182 | for mesg in self.messages:
183 | if mesg.name == mesg_name:
184 | return mesg
185 |
186 | raise ValueError('message "%s" not found' % mesg_name)
187 |
188 | def get_field_by_name(self, mesg_name, field_name):
189 | mesg = self.get_by_name(mesg_name)
190 |
191 | for field in mesg.fields:
192 | if field.name == field_name:
193 | return mesg, field
194 |
195 | raise ValueError('field "%s" not found in message "%s"' % (field_name, mesg_name))
196 |
197 |
198 | class MessageInfo(namedtuple('MessageInfo', ('name', 'num', 'group_name', 'fields', 'comment'))):
199 | def get(self, field_name):
200 | for field in self.fields:
201 | if field.name == field_name:
202 | return field
203 | raise AssertionError("Invalid field name %s in message %s" % (field_name, self.name))
204 |
205 | def __str__(self):
206 | s = "MessageType(%s\n" % render_comment(self.comment)
207 | s += " name='%s',\n" % self.name
208 | s += " mesg_num=%d,\n" % self.num
209 | s += " fields={\n"
210 | for field in sorted(self.fields, key=lambda fi: fi.num):
211 | # Don't include trailing comma for fields
212 | s += " %d: %s\n" % (field.num, indent(field, 2))
213 | s += " },\n"
214 | s += ")"
215 | return s
216 |
217 |
218 | class FieldInfo(namedtuple('FieldInfo', ('name', 'type', 'num', 'scale', 'offset', 'units', 'components', 'subfields', 'comment'))):
219 | def __str__(self):
220 | if self.num == FIELD_NUM_TIMESTAMP:
221 | # Add trailing comma here because of comment
222 | assert not self.components and not self.subfields
223 | return 'FIELD_TYPE_TIMESTAMP,%s' % render_comment(self.comment)
224 | s = "Field(%s\n" % render_comment(self.comment)
225 | s += " name='%s',\n" % self.name
226 | s += " type=%s\n" % render_type(self.type)
227 | s += " def_num=%d,\n" % self.num
228 | if self.scale:
229 | s += " scale=%s,\n" % self.scale
230 | if self.offset:
231 | s += " offset=%s,\n" % self.offset
232 | if self.units:
233 | s += " units=%s,\n" % repr(self.units)
234 | if self.components:
235 | s += ' components=(\n'
236 | # Leave components sorted as is (order matters because of bit layout)
237 | for component in self.components:
238 | s += " %s,\n" % indent(component, 2)
239 | s += " ),\n"
240 | if self.subfields:
241 | s += " subfields=(\n"
242 | for subfield in sorted(self.subfields, key=lambda si: si.name):
243 | s += " %s,\n" % indent(subfield, 2)
244 | s += " ),\n"
245 | s += "),"
246 | return s
247 |
248 |
249 | class ComponentFieldInfo(namedtuple('ComponentFieldInfo', ('name', 'num', 'scale', 'offset', 'units', 'bits', 'bit_offset', 'accumulate'))):
250 | def __str__(self):
251 | s = "ComponentField(\n"
252 | s += " name='%s',\n" % self.name
253 | s += " def_num=%d,\n" % (self.num if self.num is not None else 0)
254 | if self.scale:
255 | s += " scale=%s,\n" % self.scale
256 | if self.offset:
257 | s += " offset=%s,\n" % self.offset
258 | if self.units:
259 | s += " units=%s,\n" % repr(self.units)
260 | s += " accumulate=%s,\n" % self.accumulate
261 | s += " bits=%s,\n" % self.bits
262 | s += " bit_offset=%s,\n" % self.bit_offset
263 | s += ")"
264 | return s
265 |
266 |
267 | class SubFieldInfo(namedtuple('SubFieldInfo', ('name', 'num', 'type', 'scale', 'offset', 'units', 'ref_fields', 'components', 'comment'))):
268 | def __str__(self):
269 | s = "SubField(%s\n" % render_comment(self.comment)
270 | s += " name='%s',\n" % self.name
271 | s += " def_num=%s,\n" % self.num
272 | s += " type=%s\n" % render_type(self.type)
273 | if self.scale:
274 | s += " scale=%s,\n" % self.scale
275 | if self.offset:
276 | s += " offset=%s,\n" % self.offset
277 | if self.units:
278 | s += " units=%s,\n" % repr(self.units)
279 | s += " ref_fields=(\n"
280 | for ref_field in self.ref_fields: # sorted(self.ref_fields, key=lambda rf: (rf.name, rf.value)):
281 | s += " %s,\n" % indent(ref_field, 2)
282 | s += " ),\n"
283 | if self.components:
284 | s += ' components=(\n'
285 | # Leave components sorted as is (order matters because of bit layout)
286 | for component in self.components:
287 | s += " %s,\n" % indent(component, 2)
288 | s += " ),\n"
289 | s += ")"
290 | return s
291 |
292 |
293 | class ReferenceFieldInfo(namedtuple('ReferenceFieldInfo', ('name', 'value', 'num', 'raw_value'))):
294 | def __str__(self):
295 | s = 'ReferenceField(\n'
296 | s += " name='%s',\n" % self.name
297 | s += ' def_num=%d,\n' % self.num
298 | s += " value='%s',\n" % self.value
299 | s += ' raw_value=%d,\n' % self.raw_value
300 | s += ')'
301 | return s
302 |
303 |
304 | def render_comment(comment):
305 | if comment:
306 | return ' # %s' % comment
307 | return ''
308 |
309 |
310 | def fix_scale(data):
311 | if data == 1:
312 | return None
313 | return data
314 |
315 |
316 | def fix_units(data):
317 | if isinstance(data, str):
318 | data = data.replace(' / ', '/')
319 | data = data.replace(' * ', '*')
320 | data = data.replace('(steps)', 'or steps')
321 | data = data.strip()
322 | return data
323 |
324 |
325 | def parse_csv_fields(data, num_expected):
326 | if data is None or data == '':
327 | return [None] * num_expected
328 | elif isinstance(data, str):
329 | ret = [(int(x.strip()) if x.strip().isdigit() else x.strip()) for x in data.strip().split(',')]
330 | else:
331 | ret = [data]
332 |
333 | # Only len 1 but more were expected, extend it for all values
334 | if len(ret) == 1 and num_expected:
335 | return ret * num_expected
336 | return ret
337 |
338 |
339 | def parse_spreadsheet(xls_file, *sheet_names):
340 | workbook = openpyxl.load_workbook(xls_file)
341 |
342 | for sheet_name in sheet_names:
343 | sheet = workbook[sheet_name]
344 |
345 | parsed_values = []
346 |
347 | # Strip sheet header
348 | for row in islice(sheet.rows, 1, None):
349 | row_values = [cell.value for cell in row]
350 | values = []
351 |
352 | if sheet_name.lower() == 'messages':
353 | # Only care about the first 14 rows for Messages
354 | row_values = row_values[:14]
355 | # Skip the blocking information
356 | if is_metadata_row(row_values):
357 | continue
358 |
359 | for value in row_values:
360 | if isinstance(value, str):
361 | # Use strings for now. Unicode is wonky
362 | value = value.strip().encode('ascii', 'ignore')
363 | if value == '':
364 | value = None
365 | if isinstance(value, float):
366 | if value.is_integer():
367 | value = int(value)
368 |
369 | values.append(value)
370 |
371 | if all(v is None for v in values):
372 | continue
373 |
374 | parsed_values.append(values)
375 |
376 | yield parsed_values
377 |
378 |
379 | def is_metadata_row(row_values):
380 | label_index = 3
381 | return row_values[label_index] and all(v is None for v in row_values[0:label_index]) and \
382 | all(v is None for v in row_values[label_index + 1:])
383 |
384 |
385 | def parse_types(types_rows):
386 | type_list = TypeList([])
387 |
388 | for row in types_rows:
389 | if row[0]:
390 | # First column means new type - allow for no comments
391 | type = TypeInfo(
392 | name=row[0].decode(), base_type=row[1].decode(), enum=[], comment=default_comment(row[4]),
393 | )
394 | type_list.types.append(type)
395 | assert type.name
396 | assert type.base_type
397 |
398 | else:
399 | # No first column means a value for this type
400 | value = TypeValueInfo(name=row[2].decode(), value=maybe_decode(row[3]), comment=default_comment(row[4]))
401 |
402 | if value.name and value.value is not None:
403 | # Don't add ignore keyed types
404 | if "%s:%s" % (type.name, value.name) not in IGNORE_TYPE_VALUES:
405 | type.enum.append(value)
406 |
407 | # Add missing boolean type if it's not there
408 | if not type_list.get('bool', raise_exception=False):
409 | type_list.types.append(TypeInfo('bool', 'enum', [], None))
410 |
411 | return type_list
412 |
413 |
414 | def maybe_decode(o):
415 | if isinstance(o, bytes):
416 | return o.decode()
417 | return o
418 |
419 |
420 | def default_comment(x):
421 | return (x or b'').decode()
422 |
423 |
424 | def parse_messages(messages_rows, type_list):
425 | message_list = MessageList([])
426 |
427 | group_name = ""
428 | for row in messages_rows:
429 | if (row[3] is not None) and all(r == b'' for n, r in enumerate(row[:14]) if n != 3):
430 | # Only row 3 means it's a group name
431 | group_name = row[3].decode().title()
432 | elif row[0] is not None and row[0] != b'':
433 | # First row means a new message
434 | name = row[0].decode()
435 | message = MessageInfo(
436 | name=name, num=type_list.get_mesg_num(name),
437 | group_name=group_name, fields=[], comment=default_comment(row[13]),
438 | )
439 | message_list.messages.append(message)
440 | else:
441 | # Get components if they exist
442 | components = []
443 | component_names = parse_csv_fields(default_comment(row[5]), 0)
444 | if component_names and (len(component_names) != 1 or component_names[0] != ''):
445 | num_components = len(component_names)
446 | components = [
447 | ComponentFieldInfo(
448 | name=cmp_name, num=None, scale=fix_scale(cmp_scale),
449 | offset=cmp_offset, units=fix_units(cmp_units),
450 | bits=cmp_bits, bit_offset=None, accumulate=bool(cmp_accumulate),
451 | )
452 | for cmp_name, cmp_scale, cmp_offset, cmp_units, cmp_bits, cmp_accumulate in zip(
453 | component_names, # name
454 | parse_csv_fields(maybe_decode(row[6]), num_components), # scale
455 | parse_csv_fields(maybe_decode(row[7]), num_components), # offset
456 | parse_csv_fields(maybe_decode(row[8]), num_components), # units
457 | parse_csv_fields(maybe_decode(row[9]), num_components), # bits
458 | parse_csv_fields(maybe_decode(row[10]), num_components), # accumulate
459 | )
460 | ]
461 |
462 | assert len(components) == num_components
463 | for component in components:
464 | assert component.name
465 | assert component.bits
466 |
467 | # Otherwise a field
468 | # Not a subfield if first row has definition num
469 | if row[1] is not None and row[1] != b'':
470 | field = FieldInfo(
471 | name=row[2].decode(), type=row[3].decode(), num=maybe_decode(row[1]), scale=fix_scale(row[6]),
472 | offset=maybe_decode(row[7]), units=fix_units(default_comment(row[8])), components=[],
473 | subfields=[], comment=default_comment(row[13]),
474 | )
475 |
476 | assert field.name
477 | assert field.type
478 |
479 | # Add components if they exist
480 | if components:
481 | field.components.extend(components)
482 |
483 | # Wipe out scale, units, offset from field since components scale is None or b'' or is not digit
484 | if row[6] is None or row[6] == b'' or not str(row[6]).isdigit():
485 | field = field._replace(scale=None, offset=None, units=None)
486 |
487 | message.fields.append(field)
488 | elif row[2] != b'':
489 | # Sub fields
490 | subfield = SubFieldInfo(
491 | name=row[2].decode(), num=field.num, type=row[3].decode(), scale=fix_scale(row[6]),
492 | offset=maybe_decode(row[7]), units=fix_units(default_comment(row[8])), ref_fields=[],
493 | components=[], comment=default_comment(row[13]),
494 | )
495 |
496 | ref_field_names = parse_csv_fields(row[11].decode(), 0)
497 | assert ref_field_names
498 |
499 | if components:
500 | subfield.components.extend(components)
501 | # Wipe out scale, units, offset from field since it's a component
502 | subfield = subfield._replace(scale=None, offset=None, units=None)
503 |
504 | subfield.ref_fields.extend(
505 | ReferenceFieldInfo(
506 | name=ref_field_name, value=ref_field_value,
507 | num=None, raw_value=None,
508 | )
509 | for ref_field_name, ref_field_value
510 | in zip(ref_field_names, parse_csv_fields(row[12].decode(), 0))
511 | )
512 |
513 | assert len(subfield.ref_fields) == len(ref_field_names)
514 | if "alert_type" not in ref_field_names:
515 | field.subfields.append(subfield)
516 |
517 | # Resolve reference fields for subfields and components
518 | for message in message_list.messages:
519 | for field in message.fields:
520 | for sub_field in field.subfields:
521 | for n, ref_field_info in enumerate(sub_field.ref_fields[:]):
522 | ref_field = message.get(ref_field_info.name)
523 | sub_field.ref_fields[n] = ref_field_info._replace(
524 | num=ref_field.num,
525 | # Get the type of the reference field, then get its numeric value
526 | raw_value=type_list.get(ref_field.type).get(ref_field_info.value).value,
527 | )
528 | bit_offset = 0
529 | for n, component in enumerate(sub_field.components[:]):
530 | sub_field.components[n] = component._replace(
531 | num=message.get(component.name).num, bit_offset=bit_offset
532 | )
533 | bit_offset += component.bits
534 | bit_offset = 0
535 | for n, component in enumerate(field.components[:]):
536 | field.components[n] = component._replace(
537 | num=message.get(component.name).num, bit_offset=bit_offset
538 | )
539 | bit_offset += component.bits
540 |
541 | return message_list
542 |
543 |
544 | def get_xls_and_version_from_zip_or_xlsx(path):
545 | archive = zipfile.ZipFile(path, 'r')
546 | profile_xls = None
547 | profile_version = None
548 |
549 | # check if file is a .xlsx file
550 | try:
551 | archive.open('[Content_Types].xml')
552 | return path, None # yes
553 | except KeyError:
554 | pass
555 |
556 | # old structure
557 | for name in ('Profile.xlsx', 'Profile.xls'):
558 | try:
559 | profile_xls = archive.open(name)
560 | break
561 | except KeyError:
562 | pass
563 | else:
564 | # new structure?
565 | for name in archive.namelist():
566 | rem = re.fullmatch(
567 | r'^FitSDKRelease_(\d+(?:\.\d+)+)[/\\]Profile.xlsx?$',
568 | name, re.I)
569 | if rem:
570 | profile_xls = archive.open(name)
571 | profile_version = rem.group(1)
572 | break
573 | else:
574 | print('Profile.xls(x) not found in', path)
575 | sys.exit(1)
576 |
577 | # read version from a C header file (old structure)
578 | if profile_version is None:
579 | content = archive.open('c/fit.h').read().decode()
580 | rem = re.search(
581 | r'\s+Profile\s+Version\s*\=\s*(\d+(?:\.\d+)+)[^\d]*',
582 | content, re.I)
583 | if not rem:
584 | print('SDK version number not found in', path)
585 | sys.exit(1)
586 |
587 | profile_version = rem.group(1)
588 |
589 | if profile_version.count('.') > 1 and re.fullmatch(r'.+\.0+$', profile_version):
590 | profile_version = profile_version.rstrip("0").rstrip(".")
591 |
592 | return profile_xls, profile_version
593 |
594 |
595 | def main(input_xls_or_zip, output_py_path=None):
596 | if output_py_path and os.path.exists(output_py_path):
597 | if not open(output_py_path, 'rb').read().strip().startswith(PROFILE_HEADER_FIRST_PART.encode()):
598 | print('Python file does not begin with appropriate header.')
599 | sys.exit(1)
600 |
601 | if zipfile.is_zipfile(input_xls_or_zip):
602 | xls_file, profile_version = get_xls_and_version_from_zip_or_xlsx(input_xls_or_zip)
603 | elif open(input_xls_or_zip, 'rb').read().startswith(XLS_HEADER_MAGIC):
604 | xls_file, profile_version = input_xls_or_zip, None
605 | else:
606 | print("Not a valid .zip or .xls(x) file.")
607 | sys.exit(1)
608 |
609 | types_rows, messages_rows = parse_spreadsheet(xls_file, 'Types', 'Messages')
610 | type_list = parse_types(types_rows)
611 | message_list = parse_messages(messages_rows, type_list)
612 |
613 | mesg_num_declarations = []
614 | for mesg_name in MESSAGE_NUM_DECLARATIONS:
615 | mesg_info = message_list.get_by_name(mesg_name)
616 |
617 | mesg_num_declarations.append('MESG_NUM_%s = %s # message "%s"' % (
618 | scrub_symbol_name(mesg_name).upper(),
619 | str(mesg_info.num) if mesg_info else 'None',
620 | mesg_name))
621 |
622 | field_num_declarations = [
623 | 'FIELD_NUM_TIMESTAMP = ' + str(FIELD_NUM_TIMESTAMP) + ' # field "timestamp"']
624 | for field_fqn in FIELD_NUM_DECLARATIONS:
625 | mesg_name, field_name = field_fqn.split('.', maxsplit=1)
626 | mesg_info, field_info = message_list.get_field_by_name(mesg_name, field_name)
627 |
628 | field_decl = 'FIELD_NUM_%s_%s = %s # field "%s"' % (
629 | scrub_symbol_name(mesg_name).upper(),
630 | scrub_symbol_name(field_name).upper(),
631 | str(field_info.num),
632 | field_fqn)
633 |
634 | field_num_declarations.append(field_decl)
635 |
636 | output = '\n'.join([
637 | "\n%s" % PROFILE_HEADER_FIRST_PART,
638 | header('EXPORTED PROFILE FROM %s ON %s' % (
639 | ('SDK VERSION %s' % profile_version) if profile_version else 'SPREADSHEET',
640 | datetime.datetime.now().strftime('%Y-%m-%d'),
641 | )),
642 | header('PARSED %d TYPES (%d VALUES), %d MESSAGES (%d FIELDS)' % (
643 | len(type_list.types), sum(len(ti.enum) for ti in type_list.types),
644 | len(message_list.messages), sum(len(mi.fields) for mi in message_list.messages),
645 | )),
646 | '', IMPORT_HEADER
647 | ]) + '\n'
648 |
649 | if mesg_num_declarations:
650 | output += '\n\n' + '\n'.join(mesg_num_declarations) + '\n'
651 | if field_num_declarations:
652 | output += '\n\n' + '\n'.join(field_num_declarations) + '\n'
653 |
654 | output += '\n\n' + '\n'.join([
655 | str(type_list), '\n',
656 | SPECIAL_FIELD_DECLARATIONS, '\n',
657 | str(message_list), ''
658 | ])
659 |
660 | # TODO: Apply an additional layer of monkey patching to match reference/component
661 | # fields to actual field objects? Would clean up accesses to these
662 |
663 | if output_py_path:
664 | open(output_py_path, 'wb').write(output.encode())
665 | print('Profile version %s written to %s' % (
666 | profile_version if profile_version else '',
667 | output_py_path))
668 | else:
669 | print(output.strip())
670 |
671 |
672 | if __name__ == '__main__':
673 | if len(sys.argv) < 2:
674 | print("Usage: %s [profile.py]" % os.path.basename(__file__))
675 | sys.exit(0)
676 |
677 | xls = sys.argv[1]
678 | profile = sys.argv[2] if len(sys.argv) >= 3 else None
679 | main(xls, profile)
680 |
--------------------------------------------------------------------------------