├── requirements.txt ├── tests ├── fits │ ├── ActivityDevFields.fit │ ├── WithGearChangeData.fit │ └── HrmPluginTestActivity.fit ├── __init__.py ├── test_util.py ├── test_crc_calculator.py ├── test_accumulator.py ├── test_hr_mesg_utils.py ├── test_bitstream.py ├── test_stream.py ├── test_decoder.py └── data.py ├── pyproject.toml ├── setup.cfg ├── .github └── workflows │ ├── run_tests.yml │ └── publish.yml ├── garmin_fit_sdk ├── __init__.py ├── util.py ├── crc_calculator.py ├── accumulator.py ├── bitstream.py ├── fit.py ├── hr_mesg_utils.py ├── stream.py └── decoder.py └── README.md /requirements.txt: -------------------------------------------------------------------------------- 1 | pytest~=7.0.1 2 | pytest-cov~=3.0.0 3 | pytest-mock~=3.6.1 -------------------------------------------------------------------------------- /tests/fits/ActivityDevFields.fit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/garmin/fit-python-sdk/HEAD/tests/fits/ActivityDevFields.fit -------------------------------------------------------------------------------- /tests/fits/WithGearChangeData.fit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/garmin/fit-python-sdk/HEAD/tests/fits/WithGearChangeData.fit -------------------------------------------------------------------------------- /tests/fits/HrmPluginTestActivity.fit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/garmin/fit-python-sdk/HEAD/tests/fits/HrmPluginTestActivity.fit -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools >= 48", 4 | "setuptools_scm[toml] >= 4, <6", 5 | "setuptools_scm_git_archive", 6 | "wheel >= 0.29.0", 7 | ] 8 | build-backend = 'setuptools.build_meta' -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | ########################################################################################### 2 | # Copyright 2025 Garmin International, Inc. 3 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 4 | # may not use this file except in compliance with the Flexible and Interoperable Data 5 | # Transfer (FIT) Protocol License. 6 | ########################################################################################### 7 | 8 | 9 | # __init__.py for the fit sdk tests module 10 | 11 | __version__ = '21.178.0' -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = garmin-fit-sdk 3 | version = 21.178.0 4 | author = Garmin International, Inc. 5 | url = https://github.com/garmin/fit-python-sdk 6 | description = Garmin FIT Python SDK 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | keywords = garmin, fit sdk, fit 10 | license_files = LICENSE.txt 11 | 12 | [bdist_wheel] 13 | universal = 1 14 | 15 | [options] 16 | packages=find: 17 | zip_safe = True 18 | include_package_data = True 19 | python_requires = >=3.6 20 | 21 | 22 | [options.packages.find] 23 | exclude = 24 | .gitignore 25 | .vscode* 26 | .pytest_cache* 27 | .test.py -------------------------------------------------------------------------------- /.github/workflows/run_tests.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests 2 | 3 | on: 4 | push: 5 | branches: [ "**" ] 6 | pull_request: 7 | branches: [ "**" ] 8 | 9 | workflow_dispatch: 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-24.04 14 | timeout-minutes: 10 15 | 16 | steps: 17 | - name: Check out repository code 18 | uses: actions/checkout@v2 19 | 20 | - name: Setup Python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: "3.12" 24 | 25 | - name: Install requirements 26 | run: | 27 | python3 -m pip install -r requirements.txt 28 | - name: Run test suite 29 | run: | 30 | python3 -m pytest 31 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPi 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | workflow_dispatch: 8 | 9 | jobs: 10 | build-n-publish: 11 | name: Build and publish Python distributions to PyPI 12 | runs-on: ubuntu-24.04 13 | 14 | steps: 15 | - uses: actions/checkout@master 16 | - name: Set up Python 3.12 17 | uses: actions/setup-python@v3 18 | with: 19 | python-version: "3.12" 20 | 21 | - name: Install pypa/build 22 | run: >- 23 | python -m 24 | pip install 25 | build 26 | --user 27 | - name: Build a binary wheel and a source tarball 28 | run: >- 29 | python -m 30 | build 31 | --sdist 32 | --wheel 33 | --outdir dist/ 34 | . 35 | 36 | - name: Publish distribution 📦 to PyPI 37 | uses: pypa/gh-action-pypi-publish@release/v1 38 | with: 39 | user: __token__ 40 | password: ${{ secrets.PYPI_TOKEN }} 41 | -------------------------------------------------------------------------------- /garmin_fit_sdk/__init__.py: -------------------------------------------------------------------------------- 1 | # __init__garmin_fit_sdk.py 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | from garmin_fit_sdk.accumulator import Accumulator 16 | from garmin_fit_sdk.bitstream import BitStream 17 | from garmin_fit_sdk.crc_calculator import CrcCalculator 18 | from garmin_fit_sdk.decoder import Decoder 19 | from garmin_fit_sdk.fit import BASE_TYPE, BASE_TYPE_DEFINITIONS 20 | from garmin_fit_sdk.hr_mesg_utils import expand_heart_rates 21 | from garmin_fit_sdk.profile import Profile 22 | from garmin_fit_sdk.stream import Stream 23 | from garmin_fit_sdk.util import FIT_EPOCH_S, convert_timestamp_to_datetime 24 | 25 | __version__ = '21.178.0' 26 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | '''test_util.py: Contains the set of tests for the util module in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | from datetime import datetime, timezone 12 | 13 | import pytest 14 | from garmin_fit_sdk import util 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "given_timestamp,expected_datetime", 19 | [ 20 | (1029086357, datetime.fromtimestamp(1029086357 + 631065600, timezone.utc)), 21 | (0, datetime.fromtimestamp(631065600, timezone.utc)), 22 | (None, datetime.fromtimestamp(631065600, timezone.utc)), 23 | ], ids=["Regular timestamp", "0 timestamp defaults to FITEPOCH", "Null timestamp defaults to FITEPOCH"], 24 | ) 25 | def test_convert_datetime(given_timestamp, expected_datetime): 26 | '''Tests converting a FIT timestamp to a python utc datetime''' 27 | expected_datetime = expected_datetime.replace(tzinfo=timezone.utc) 28 | 29 | actual_datetime = util.convert_timestamp_to_datetime(given_timestamp) 30 | assert str(actual_datetime) == str(expected_datetime) 31 | -------------------------------------------------------------------------------- /tests/test_crc_calculator.py: -------------------------------------------------------------------------------- 1 | '''test_crc_calculator.py: Contains the set of tests for the Stream class in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | import pytest 12 | from garmin_fit_sdk import CrcCalculator 13 | 14 | from tests.data import Data 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "data,crc_expected,is_correct_crc", 19 | [ 20 | (Data.fit_file_invalid, 0x0000, False), 21 | (Data.fit_file_minimum, 0x488D, True), 22 | (Data.fit_file_short, 0xE3B9, True), 23 | ], 24 | ) 25 | def test_file_header_crc(data, crc_expected, is_correct_crc): 26 | '''Tests which validate crc calcualtion on fit file headers''' 27 | if is_correct_crc: 28 | assert (CrcCalculator.calculate_crc(data, 0, 12) == 29 | crc_expected) == is_correct_crc 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "data,file_length,crc_expected,is_correct_crc", 34 | [ 35 | (Data.fit_file_invalid, len(Data.fit_file_invalid) - 2, 0x0000, False), 36 | (Data.fit_file_minimum, len(Data.fit_file_minimum) - 2, 0x0000, True), 37 | (Data.fit_file_short, len(Data.fit_file_short) - 2, 0x4F87, True), 38 | ], 39 | ) 40 | def test_file_crc(data, crc_expected, is_correct_crc, file_length): 41 | '''Tests which validate crc calcualtion on fit file data.''' 42 | if is_correct_crc: 43 | assert ( 44 | CrcCalculator.calculate_crc(data, 0, file_length) == crc_expected 45 | ) == is_correct_crc 46 | -------------------------------------------------------------------------------- /tests/test_accumulator.py: -------------------------------------------------------------------------------- 1 | '''test_accumulator.py: Contains the set of tests for the Accumulator class in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | from garmin_fit_sdk.accumulator import Accumulator 12 | 13 | def test_accumulator(): 14 | '''Tests functionality of the accumulator class.''' 15 | accumulator = Accumulator() 16 | 17 | accumulator.createAccumulatedField(0,0,0) 18 | 19 | assert accumulator.accumulate(0,0,1,8) == 1 20 | assert accumulator.accumulate(0,0,2,8) == 2 21 | assert accumulator.accumulate(0,0,3,8) == 3 22 | assert accumulator.accumulate(0,0,4,8) == 4 23 | 24 | def test_accumulators_accumulates_multiple_fields_independently(): 25 | '''Tests that the accumulator can hold and accumluate different fields at the same time.''' 26 | accumulator = Accumulator() 27 | 28 | accumulator.createAccumulatedField(0,0,0) 29 | assert accumulator.accumulate(0,0,254,8) == 254 30 | 31 | accumulator.createAccumulatedField(1,1,0) 32 | assert accumulator.accumulate(1,1,2,8) == 2 33 | 34 | assert accumulator.accumulate(0,0,0,8) == 256 35 | 36 | def test_accumulator_accumulates_field_rollover(): 37 | '''Tests that the accumulator handles rollover field values accordingly.''' 38 | accumulator = Accumulator() 39 | 40 | accumulator.createAccumulatedField(0,0,250) 41 | 42 | assert accumulator.accumulate(0,0,254,8) == 254 43 | assert accumulator.accumulate(0,0,255,8) == 255 44 | assert accumulator.accumulate(0,0,0,8) == 256 45 | assert accumulator.accumulate(0,0,3,8) == 259 -------------------------------------------------------------------------------- /tests/test_hr_mesg_utils.py: -------------------------------------------------------------------------------- 1 | '''test_hr_mesg_utils.py: Contains the tests for the heart rate message merging functionality''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | from garmin_fit_sdk import hr_mesg_utils 12 | from garmin_fit_sdk.decoder import Decoder 13 | from garmin_fit_sdk.stream import Stream 14 | 15 | from . import data_expand_hr_mesgs 16 | 17 | 18 | def test_expand_heart_rates(): 19 | '''Tests expanding heart rates''' 20 | stream = Stream.from_file("tests/fits/HrmPluginTestActivity.fit") 21 | decoder = Decoder(stream) 22 | messages, errors = decoder.read() 23 | 24 | assert len(errors) == 0 25 | 26 | heartrates = hr_mesg_utils.expand_heart_rates(messages['hr_mesgs']) 27 | 28 | assert len(heartrates) == len(data_expand_hr_mesgs.expanded_hr_messages) 29 | 30 | index = 0 31 | for message in heartrates: 32 | expected = data_expand_hr_mesgs.expanded_hr_messages[index] 33 | assert message['timestamp'] == expected['timestamp'] 34 | assert message['heart_rate'] == expected['heart_rate'] 35 | index += 1 36 | 37 | def test_hr_mesgs_to_record_mesgs(): 38 | '''Tests that the heart rate messages are merged into the record messages correctly.''' 39 | stream = Stream.from_file("tests/fits/HrmPluginTestActivity.fit") 40 | decoder = Decoder(stream) 41 | messages, errors = decoder.read(merge_heart_rates=True, convert_datetimes_to_dates=False) 42 | 43 | assert len(errors) == 0 44 | assert len(messages['record_mesgs']) == len(data_expand_hr_mesgs.merged_record_messages) 45 | 46 | index = 0 47 | for message in messages['record_mesgs']: 48 | expected = data_expand_hr_mesgs.merged_record_messages[index] 49 | assert message['timestamp'] == expected['timestamp'] 50 | assert message['heart_rate'] == expected['heart_rate'] 51 | index += 1 52 | -------------------------------------------------------------------------------- /garmin_fit_sdk/util.py: -------------------------------------------------------------------------------- 1 | '''util.py: Contains utility functions used throughout the project.''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | from datetime import datetime, timezone 16 | 17 | FIT_EPOCH_S = 631065600 18 | 19 | def convert_timestamp_to_datetime(timestamp): 20 | '''Takes a FIT datetime timestamp and converts it to a python datetime in utc''' 21 | utc_datetime = datetime.fromtimestamp((timestamp if timestamp else 0) + FIT_EPOCH_S, timezone.utc) 22 | return utc_datetime.replace(tzinfo=timezone.utc) 23 | 24 | def _convert_string(string): 25 | '''Takes a string and converts it according to the fit protocol standard.''' 26 | string = string.decode("utf-8", errors="ignore") 27 | strings = string.split(sep='\0') 28 | 29 | while strings[len(strings) - 1] == '': 30 | strings.pop() 31 | if len(strings) == 0: 32 | return None 33 | 34 | if len(strings) == 1: 35 | return strings[0] 36 | else: 37 | return strings 38 | 39 | def _only_invalid_values(raw_field_value, invalid_value): 40 | '''Returns whether the given value(s) consist of only invalid values.''' 41 | if isinstance(raw_field_value, list): 42 | for value in raw_field_value: 43 | if value != invalid_value: 44 | return False 45 | 46 | return True 47 | 48 | return raw_field_value == invalid_value 49 | 50 | def _sanitize_values(values): 51 | '''Reduces values if it is an array of length one.''' 52 | if isinstance(values, list) and len(values) == 1: 53 | return values[0] 54 | 55 | return values -------------------------------------------------------------------------------- /garmin_fit_sdk/crc_calculator.py: -------------------------------------------------------------------------------- 1 | '''crc_calculator.py: Contains the CRC class which is used for calculating the header and file data CRCs.''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | _CRC_TABLE = [ 16 | 0x0000, 0xCC01, 0xD801, 0x1400, 0xF001, 0x3C00, 0x2800, 0xE401, 17 | 0xA001, 0x6C00, 0x7800, 0xB401, 0x5000, 0x9C01, 0x8801, 0x4400 18 | ] 19 | 20 | 21 | class CrcCalculator: 22 | '''A class for calculating the CRC of a given .fit file header or file contents.''' 23 | 24 | def __init__(self) -> None: 25 | self._crc = 0 26 | self._bytes_seen = 0 27 | 28 | def get_crc(self): 29 | '''Returns the calculated CRC value.''' 30 | return self._crc 31 | 32 | def __update_crc(self, value): 33 | # compute checksum of lower four bits of byte 34 | temp = _CRC_TABLE[self._crc & 0xF] 35 | self._crc = (self._crc >> 4) & 0x0FFF 36 | self._crc = self._crc ^ temp ^ _CRC_TABLE[value & 0xF] 37 | 38 | # compute checksum of upper four bits of byte 39 | temp = _CRC_TABLE[self._crc & 0xF] 40 | self._crc = (self._crc >> 4) & 0x0FFF 41 | self._crc = self._crc ^ temp ^ _CRC_TABLE[(value >> 4) & 0xF] 42 | 43 | return self._crc 44 | 45 | def add_bytes(self, buffer, start, end): 46 | '''Adds another chunk of bytes for calculating the CRC.''' 47 | for i in range(start, end): 48 | self._crc = self.__update_crc(buffer[i]) 49 | self._bytes_seen += 1 50 | 51 | return self._crc 52 | 53 | @staticmethod 54 | def calculate_crc(buffer, start: int, end: int): 55 | '''Calculates the CRC of a given buffer from the given starting index to the ending index.''' 56 | crc_calculator = CrcCalculator() 57 | return crc_calculator.add_bytes(buffer, start, end) 58 | -------------------------------------------------------------------------------- /garmin_fit_sdk/accumulator.py: -------------------------------------------------------------------------------- 1 | '''accumulator.py: Contains the Accumulator class and sub-component class AccumulatedField''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | class AccumulatedField: 16 | '''A class that accumulates a value for a particular field. 17 | Attributes: 18 | _accumulated_value: Resulting accumulated value 19 | _last_value: The previous accumulated value thus far. 20 | ''' 21 | def __init__(self, value = 0): 22 | self._accumulated_value = value 23 | self._last_value = value 24 | 25 | def accumulate(self, value, bits): 26 | ''''Accumulates to the previous value and gives the updated accumulated value.''' 27 | mask = (1 << bits) - 1 28 | 29 | self._accumulated_value += (value - self._last_value) & mask 30 | self._last_value = value 31 | 32 | return self._accumulated_value 33 | 34 | class Accumulator: 35 | '''A class that represents the accumulated values for particular fields. 36 | Attributes: 37 | _messages: A list of messages with a field or fields to accumulate. 38 | ''' 39 | def __init__(self): 40 | self._messages = {} 41 | 42 | def createAccumulatedField(self, mesg_num, field_num, value): 43 | '''Creates an accumulated field and stores its initial value in the accumulator''' 44 | accumulatedField = AccumulatedField(value) 45 | 46 | if mesg_num not in self._messages: 47 | self._messages[mesg_num] = {} 48 | 49 | self._messages[mesg_num][field_num] = accumulatedField 50 | 51 | return accumulatedField 52 | 53 | def accumulate(self, mesg_num, field_num, value, bits): 54 | '''Accumulates the given field value if present in the accumulator. If it is not, the accumulated field is added to the Accumulator.''' 55 | accumulatedField = None 56 | 57 | if mesg_num in self._messages and field_num in self._messages[mesg_num]: 58 | accumulatedField = self._messages[mesg_num][field_num] 59 | else: 60 | accumulatedField = self.createAccumulatedField(mesg_num, field_num, value) 61 | 62 | return accumulatedField.accumulate(value, bits) 63 | 64 | -------------------------------------------------------------------------------- /garmin_fit_sdk/bitstream.py: -------------------------------------------------------------------------------- 1 | '''bitstream.py: Contains BitStream class which handles reading streams of data bit by bit ''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | from . import fit as FIT 16 | 17 | 18 | class BitStream: 19 | ''' 20 | A class that represents a stream of binary data from a chunk of data. 21 | 22 | Attributes: 23 | _array: The stream of data in an array structure. 24 | _current_array_position: Current position in data array. 25 | _bits_per_position: Number of bits per step through the data. 26 | _current_byte: Position of the current byte being read in the data. 27 | _current_bit: Position of the current bit being read in the data. 28 | _bits_available: Remaining number of bits left unread in the data. 29 | ''' 30 | def __init__(self, data, base_type): 31 | self._array = None 32 | self._current_array_position = 0 33 | self._bits_per_position = 0 34 | self._current_byte = 0 35 | self._current_bit = 0 36 | self._bits_available = 0 37 | 38 | self._array = data if isinstance(data, list) else [data] 39 | base_type_size = FIT.BASE_TYPE_DEFINITIONS[base_type]['size'] 40 | self._bits_per_position = base_type_size * 8 41 | self.reset() 42 | 43 | def bits_available(self): 44 | '''Returns the number of bits left in the data.''' 45 | return self._bits_available 46 | 47 | def has_bits_available(self): 48 | '''Returns true if the data has bits available.''' 49 | return self._bits_available > 0 50 | 51 | def reset(self): 52 | '''Resets the bitstream to the start of the data and resets the bits available.''' 53 | self._current_array_position = 0 54 | self._bits_available = self._bits_per_position * len(self._array) 55 | self.__next_byte() 56 | 57 | def read_bit(self): 58 | '''Reads the next bit if possible.''' 59 | if self.has_bits_available() is False: 60 | self.__raise_error() 61 | 62 | if self._current_bit >= self._bits_per_position: 63 | self.__next_byte() 64 | 65 | bit = self._current_byte & 0x01 66 | self._current_byte = (self._current_byte >> 1) 67 | self._current_bit += 1 68 | self._bits_available -= 1 69 | 70 | return bit 71 | 72 | def read_bits(self, number_bits_to_read): 73 | '''Reads the specificed number of bits if possible.''' 74 | value = 0 75 | 76 | for i in range(number_bits_to_read): 77 | value |= self.read_bit() << i 78 | 79 | return value 80 | 81 | def __next_byte(self): 82 | if self._current_array_position >= len(self._array): 83 | self.__raise_error() 84 | 85 | self._current_byte = self._array[self._current_array_position] 86 | self._current_array_position += 1 87 | self._current_bit = 0 88 | 89 | def __raise_error(self): 90 | raise IndexError('FIT Runtime Error, no bits available.') 91 | -------------------------------------------------------------------------------- /garmin_fit_sdk/fit.py: -------------------------------------------------------------------------------- 1 | '''fit.py: Contains base type defintions and conversion functions compliant with the FIT Protocol.''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | BASE_TYPE = { 16 | "ENUM": 0x00, 17 | "SINT8": 0x01, 18 | "UINT8": 0x02, 19 | "SINT16": 0x83, 20 | "UINT16": 0x84, 21 | "SINT32": 0x85, 22 | "UINT32": 0x86, 23 | "STRING": 0x07, 24 | "FLOAT32": 0x88, 25 | "FLOAT64": 0x89, 26 | "UINT8Z": 0x0A, 27 | "UINT16Z": 0x8B, 28 | "UINT32Z": 0x8C, 29 | "BYTE": 0x0D, 30 | "SINT64": 0x8E, 31 | "UINT64": 0x8F, 32 | "UINT64Z": 0x90 33 | } 34 | 35 | FIELD_TYPE_TO_BASE_TYPE = { 36 | "sint8": BASE_TYPE['SINT8'], 37 | "uint8": BASE_TYPE['UINT8'], 38 | "sint16": BASE_TYPE['SINT16'], 39 | "uint16": BASE_TYPE['UINT16'], 40 | "sint32": BASE_TYPE['SINT32'], 41 | "uint32": BASE_TYPE['UINT32'], 42 | "string": BASE_TYPE['STRING'], 43 | "float32": BASE_TYPE['FLOAT32'], 44 | "float64": BASE_TYPE['FLOAT64'], 45 | "uint8z": BASE_TYPE['UINT8Z'], 46 | "uint16z": BASE_TYPE['UINT16Z'], 47 | "uint32z": BASE_TYPE['UINT32Z'], 48 | "byte": BASE_TYPE['BYTE'], 49 | "sint64": BASE_TYPE['SINT64'], 50 | "uint64": BASE_TYPE['UINT64'], 51 | "uint64z": BASE_TYPE['UINT64Z'] 52 | } 53 | 54 | BASE_TYPE_DEFINITIONS = { 55 | 0x00: {'size': 1, 'type': BASE_TYPE["ENUM"], 'signed': False, 'type_code': 'B', 'invalid': 0xFF}, 56 | 0x01: {'size': 1, 'type': BASE_TYPE["SINT8"], 'signed': True, 'type_code': 'b', 'invalid': 0x7F}, 57 | 0x02: {'size': 1, 'type': BASE_TYPE["UINT8"], 'signed': False, 'type_code': 'B', 'invalid': 0xFF}, 58 | 0x83: {'size': 2, 'type': BASE_TYPE["SINT16"], 'signed': True, 'type_code': 'h', 'invalid': 0x7FFF}, 59 | 0x84: {'size': 2, 'type': BASE_TYPE["UINT16"], 'signed': False, 'type_code': 'H', 'invalid': 0xFFFF}, 60 | 0x85: {'size': 4, 'type': BASE_TYPE["SINT32"], 'signed': True, 'type_code': 'i', 'invalid': 0x7FFFFFFF}, 61 | 0x86: {'size': 4, 'type': BASE_TYPE["UINT32"], 'signed': False, 'type_code': 'I', 'invalid': 0xFFFFFFFF}, 62 | 0x07: {'size': 1, 'type': BASE_TYPE["STRING"], 'signed': False, 'type_code': 's', 'invalid': 0x00}, 63 | 0x88: {'size': 4, 'type': BASE_TYPE["FLOAT32"], 'signed': True, 'type_code': 'f', 'invalid': 0xFFFFFFFF}, 64 | 0x89: {'size': 8, 'type': BASE_TYPE["FLOAT64"], 'signed': True, 'type_code': 'd', 'invalid': 0xFFFFFFFFFFFFFFFF}, 65 | 0x0A: {'size': 1, 'type': BASE_TYPE["UINT8Z"], 'signed': False, 'type_code': 'B', 'invalid': 0x00}, 66 | 0x8B: {'size': 2, 'type': BASE_TYPE["UINT16Z"], 'signed': False, 'type_code': 'H', 'invalid': 0x0000}, 67 | 0x8C: {'size': 4, 'type': BASE_TYPE["UINT32Z"], 'signed': False, 'type_code': 'I', 'invalid': 0x00000000}, 68 | 0x0D: {'size': 1, 'type': BASE_TYPE["BYTE"], 'signed': False, 'type_code': 'B', 'invalid': 0xFF}, 69 | 0x8E: {'size': 8, 'type': BASE_TYPE["SINT64"], 'signed': True, 'type_code': 'q', 'invalid': 0x7FFFFFFFFFFFFFFF}, 70 | 0x8F: {'size': 8, 'type': BASE_TYPE["UINT64"], 'signed': False, 'type_code': 'Q', 'invalid': 0xFFFFFFFFFFFFFFFF}, 71 | 0x90: {'size': 8, 'type': BASE_TYPE["UINT64Z"], 'signed': False, 'type_code': 'L', 'invalid': 0x0000000000000000}, 72 | } 73 | 74 | NUMERIC_FIELD_TYPES = [ 75 | "sint8", 76 | "uint8", 77 | "sint16", 78 | "uint16", 79 | "sint32", 80 | "uint32", 81 | "float32", 82 | "float64", 83 | "uint8z", 84 | "uint16z", 85 | "uint32z", 86 | "byte", 87 | "sint64", 88 | "uint64", 89 | "uint64z" 90 | ] 91 | -------------------------------------------------------------------------------- /tests/test_bitstream.py: -------------------------------------------------------------------------------- 1 | '''test_bitstream.py: Contains the set of tests for the Bitstream class in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | import pytest 12 | from garmin_fit_sdk import BitStream 13 | from garmin_fit_sdk import fit as FIT 14 | 15 | 16 | class TestFromByteArray: 17 | def test_next_bit(self): 18 | bit_stream = BitStream([0xAA, 0xAA], FIT.BASE_TYPE['UINT8']) 19 | values = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] 20 | 21 | index = 0 22 | for expected in values: 23 | assert bit_stream.bits_available() == len(values) - index 24 | assert bit_stream.has_bits_available() is True 25 | 26 | actual = bit_stream.read_bit() 27 | assert actual == expected 28 | 29 | assert bit_stream.bits_available() == len(values) - index - 1 30 | 31 | index += 1 32 | 33 | @pytest.mark.parametrize( 34 | "test_data", 35 | [ 36 | { 37 | 'data': [0xAA], 38 | 'base_type': FIT.BASE_TYPE['UINT8'], 39 | 'bits_to_read': [4, 4], 40 | 'values': [0xA, 0xA] 41 | }, 42 | { 43 | 'data': [0xAA], 44 | 'base_type': FIT.BASE_TYPE['UINT8'], 45 | 'bits_to_read': [8], 46 | 'values': [0xAA] 47 | }, 48 | { 49 | 'data': [0xAA, 0xAA], 50 | 'base_type': FIT.BASE_TYPE['UINT8'], 51 | 'bits_to_read': [16], 52 | 'values': [0xAAAA] 53 | }, 54 | { 55 | 'data': [0xFF, 0xFF], 56 | 'base_type': FIT.BASE_TYPE['UINT8'], 57 | 'bits_to_read': [16], 58 | 'values': [0xFFFF] 59 | }, 60 | { 61 | 'data': [0xAA, 0xAA, 0xAA, 0x2A], 62 | 'base_type': FIT.BASE_TYPE['UINT8'], 63 | 'bits_to_read': [32], 64 | 'values': [0x2AAAAAAA] 65 | }, 66 | { 67 | 'data': [0x10, 0x32, 0x54, 0x76], 68 | 'base_type': FIT.BASE_TYPE['UINT8'], 69 | 'bits_to_read': [32], 70 | 'values': [0x76543210] 71 | }, 72 | ], 73 | ) 74 | def test_from_byte_array(self, test_data): 75 | bit_stream = BitStream(test_data['data'], test_data['base_type']) 76 | index = 0 77 | for expected in test_data['values']: 78 | actual = bit_stream.read_bits(test_data['bits_to_read'][index]) 79 | assert actual == expected 80 | index += 1 81 | 82 | class TestFromInteger: 83 | 84 | def test_next_bit(self): 85 | bit_stream = BitStream(0x0FAA, FIT.BASE_TYPE['UINT16']) 86 | values = [0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0] 87 | 88 | index = 0 89 | for expected in values: 90 | assert bit_stream.bits_available() == len(values) - index 91 | assert bit_stream.has_bits_available() is True 92 | 93 | actual = bit_stream.read_bit() 94 | assert actual == expected 95 | 96 | assert bit_stream.bits_available() == len(values) - index - 1 97 | 98 | index += 1 99 | 100 | @pytest.mark.parametrize( 101 | "test_data", 102 | [ 103 | { 104 | 'data': 0xAA, 105 | 'base_type': FIT.BASE_TYPE['UINT8'], 106 | 'bits_to_read': [4], 107 | 'values': [0xA] 108 | }, 109 | { 110 | 'data': 0xAA, 111 | 'base_type': FIT.BASE_TYPE['UINT8'], 112 | 'bits_to_read': [4, 4], 113 | 'values': [0xA, 0xA] 114 | }, 115 | { 116 | 'data': 0xAA, 117 | 'base_type': FIT.BASE_TYPE['UINT8'], 118 | 'bits_to_read': [4, 1, 1, 1, 1], 119 | 'values': [0xA, 0x0, 0x1, 0x0, 0x1] 120 | }, 121 | { 122 | 'data': 0xAA, 123 | 'base_type': FIT.BASE_TYPE['UINT16'], 124 | 'bits_to_read': [4, 1, 1, 1, 1], 125 | 'values': [0xA, 0x0, 0x1, 0x0, 0x1] 126 | }, 127 | { 128 | 'data': [0xAAAA, 0x2AAA], 129 | 'base_type': FIT.BASE_TYPE['UINT16'], 130 | 'bits_to_read': [32], 131 | 'values': [0x2AAAAAAA] 132 | }, 133 | { 134 | 'data': [0xAAAAAAAA], 135 | 'base_type': FIT.BASE_TYPE['UINT32'], 136 | 'bits_to_read': [16, 8, 8], 137 | 'values': [0xAAAA, 0xAA, 0xAA] 138 | }, 139 | ], 140 | ) 141 | def test_from_integer(self, test_data): 142 | bit_stream = BitStream(test_data['data'], test_data['base_type']) 143 | index = 0 144 | for expected in test_data['values']: 145 | actual = bit_stream.read_bits(test_data['bits_to_read'][index]) 146 | assert actual == expected 147 | index += 1 148 | 149 | def test_exception_raised_big_overstep(): 150 | '''Test that makes sure that an index error exception is raised when reading too many bits.''' 151 | try: 152 | bit_stream = BitStream(0x0FAA, FIT.BASE_TYPE['UINT16']) 153 | bit_stream.read_bits(20) 154 | assert False 155 | except IndexError: 156 | assert True 157 | def test_exception_raised_boundary(): 158 | '''Test that makes sure that an index error exception is raised when reading one too many bits.''' 159 | try: 160 | bit_stream = BitStream(0x0FAA, FIT.BASE_TYPE['UINT16']) 161 | bit_stream.read_bits(16) 162 | bit_stream.read_bit() 163 | assert False 164 | except IndexError: 165 | assert True 166 | -------------------------------------------------------------------------------- /garmin_fit_sdk/hr_mesg_utils.py: -------------------------------------------------------------------------------- 1 | '''hr_mesg_utils.py: Contains the functions for merging hr_mesgs to record_mesgs''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | from datetime import datetime 16 | 17 | from . import util 18 | 19 | 20 | def merge_heart_rates(hr_mesgs, record_mesgs): 21 | '''Takes the list of heart rate messages and merges them into the record messages.''' 22 | if hr_mesgs is None or record_mesgs is None or len(hr_mesgs) == 0 or len(record_mesgs) == 0: 23 | return 24 | 25 | heartrates = expand_heart_rates(hr_mesgs) 26 | 27 | heartrate_index = 0 28 | record_range_start_time = None 29 | 30 | for i in range(len(record_mesgs)): 31 | message = record_mesgs[i] 32 | 33 | hr_sum = 0 34 | hr_sum_count = 0 35 | 36 | record_range_end_time = seconds_since_fit_epoch(message['timestamp']) 37 | 38 | if record_range_start_time is None: 39 | record_range_start_time = record_range_end_time 40 | 41 | if record_range_start_time == record_range_end_time: 42 | record_range_start_time -= 1 43 | heartrate_index = heartrate_index - 1 if heartrate_index >= 1 else 0 44 | 45 | finding_in_range_hr_mesgs = True 46 | while(finding_in_range_hr_mesgs and (heartrate_index < len(heartrates))): 47 | heart_rate = heartrates[heartrate_index] 48 | 49 | # Check if the heartrate timestamp is > record start time 50 | # and if the heartrate timestamp is <= to record end time 51 | if heart_rate['timestamp'] > record_range_start_time and heart_rate['timestamp'] <= record_range_end_time: 52 | hr_sum += heart_rate['heart_rate'] 53 | hr_sum_count += 1 54 | # Check if the heartrate timestamp exceeds the record time 55 | elif heart_rate['timestamp'] > record_range_end_time: 56 | finding_in_range_hr_mesgs = False 57 | 58 | if hr_sum_count > 0: 59 | # Update record's heart rate value 60 | #avg_hr = round(hr_sum / hr_sum_count, 0) 61 | avg_hr = int((hr_sum / hr_sum_count) + .5) 62 | message['heart_rate'] = avg_hr 63 | # Reset HR average accumulators 64 | hr_sum = 0 65 | hr_sum_count = 0 66 | record_range_start_time = record_range_end_time 67 | 68 | # Breaks out of finding_in_range_hr_messages while loop without incrementing heartrate_index 69 | break 70 | heartrate_index += 1 71 | 72 | 73 | def expand_heart_rates(hr_mesgs): 74 | '''Takes the heart rate messages and expands them to 250ms increments.''' 75 | GAP_INCREMENT_MILLISECONDS = 250 76 | GAP_INCREMENT_SECONDS = GAP_INCREMENT_MILLISECONDS / 1000.0 77 | GAP_MAX_MILLISECONDS = 5000 78 | GAP_MAX_STEPS = GAP_MAX_MILLISECONDS / GAP_INCREMENT_MILLISECONDS 79 | 80 | if hr_mesgs is None or len(hr_mesgs) == 0: 81 | return [] 82 | 83 | anchor_event_timestamp = 0.0 84 | anchor_timestamp = None 85 | 86 | heartrates = [] 87 | 88 | for message in hr_mesgs: 89 | if message is None: 90 | __raise_error("HR message must not be None.") 91 | 92 | event_timestamps = message['event_timestamp'] if isinstance(message['event_timestamp'], list) else [message['event_timestamp']] 93 | filtered_bpm = message['filtered_bpm'] if isinstance(message['filtered_bpm'], list) else [message['filtered_bpm']] 94 | 95 | # Update HR anchor timestamp if present 96 | if 'timestamp' in message and message['timestamp'] is not None: 97 | anchor_timestamp = seconds_since_fit_epoch(message['timestamp']) 98 | 99 | if message['fractional_timestamp'] is not None: 100 | anchor_timestamp += message['fractional_timestamp'] 101 | 102 | if len(event_timestamps) == 1: 103 | anchor_event_timestamp = event_timestamps[0] 104 | else: 105 | __raise_error("Anchor HR message must have at least one event_timestamp") 106 | 107 | if anchor_timestamp is None or anchor_event_timestamp is None: 108 | __raise_error("No anchor timestamp received in an HR message before delta HR messages") 109 | elif len(event_timestamps) != len(filtered_bpm): 110 | __raise_error("HR message with mismatching event timestamp and filtered bpm") 111 | 112 | for i in range(len(event_timestamps)): 113 | event_timestamp = event_timestamps[i] 114 | 115 | if event_timestamp < anchor_event_timestamp: 116 | if anchor_event_timestamp - event_timestamp > (0x400000): 117 | event_timestamp += 0x400000 118 | else: 119 | __raise_error("Anchor event_timestamp is greater than subsequent event_timestamp. This does not allow for correct delta caluclation.") 120 | 121 | 122 | current_hr = { 'timestamp': anchor_timestamp, 'heart_rate': filtered_bpm[i] } 123 | current_hr['timestamp'] += (event_timestamp - anchor_event_timestamp) 124 | 125 | # Carry the previous HR value forward across the gap to the current 126 | # HR value for up to 5 seconds in 250ms increments 127 | if len(heartrates) > 0: 128 | previous_hr = heartrates[len(heartrates) - 1] 129 | gap_in_milliseconds = abs(current_hr['timestamp'] - previous_hr['timestamp']) * 1000 130 | step = 1 131 | 132 | while(gap_in_milliseconds > GAP_INCREMENT_MILLISECONDS and step <= GAP_MAX_STEPS): 133 | gap_hr = { 'timestamp': previous_hr['timestamp'], 'heart_rate': previous_hr['heart_rate'] } 134 | gap_hr['timestamp'] += (GAP_INCREMENT_SECONDS * step) 135 | heartrates.append(gap_hr) 136 | 137 | gap_in_milliseconds -= GAP_INCREMENT_MILLISECONDS 138 | step += 1 139 | 140 | heartrates.append(current_hr) 141 | return heartrates 142 | 143 | def seconds_since_fit_epoch(timestamp): 144 | '''Gives the time in seconds since the fit epoch.''' 145 | if isinstance(timestamp, datetime): 146 | return (timestamp.timestamp() - util.FIT_EPOCH_S) 147 | 148 | return timestamp 149 | 150 | def __raise_error(error = ""): 151 | message = f"FIT Runtime Error {error}" 152 | raise RuntimeError(message) 153 | -------------------------------------------------------------------------------- /garmin_fit_sdk/stream.py: -------------------------------------------------------------------------------- 1 | '''stream.py: Contains stream class which handles reading streams of data in the following ways: 2 | 1. From a binary .fit file 3 | 2. From a Python bytearray 4 | 3. From a Python BytesIO object 5 | 4. From a Python BufferedReader''' 6 | 7 | ########################################################################################### 8 | # Copyright 2025 Garmin International, Inc. 9 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 10 | # may not use this file except in compliance with the Flexible and Interoperable Data 11 | # Transfer (FIT) Protocol License. 12 | ########################################################################################### 13 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 14 | # Profile Version = 21.178.0Release 15 | # Tag = production/release/21.178.0-0-g3bea629 16 | ############################################################################################ 17 | 18 | 19 | import os 20 | from enum import Enum 21 | from io import BufferedReader, BytesIO 22 | from struct import unpack 23 | 24 | 25 | class Endianness(str, Enum): 26 | '''An enum class for denoting a bytes endinannes (LSB or MSB)''' 27 | LITTLE = "little" 28 | BIG = "big" 29 | 30 | 31 | class Stream: 32 | ''' 33 | A class that represents a stream of data from a .fit file. 34 | 35 | Attributes: 36 | _buffered_reader: The buffered reader that holds the stream data. 37 | _stream_length: The calculated length of the stream. 38 | _crc_calculator: The CRC calculator which calculates the CRC each time bytes are read. 39 | ''' 40 | @staticmethod 41 | def from_file(filename): 42 | '''Creates a stream object from a given .fit file''' 43 | buffered_reader = open(filename, "rb") 44 | return Stream.from_buffered_reader(buffered_reader, os.path.getsize(filename)) 45 | 46 | @staticmethod 47 | def from_byte_array(byte_array: bytearray, stream_length = None): 48 | '''Creates a stream object from a given byte array''' 49 | bytes_io = BytesIO(byte_array) 50 | if stream_length is None: 51 | stream_length = len(byte_array) 52 | 53 | return Stream.from_bytes_io(bytes_io, stream_length) 54 | 55 | @staticmethod 56 | def from_bytes_io(bytes_io: BytesIO, length = None): 57 | '''Creates a stream object from a given BytesIO object''' 58 | buffered_reader = BufferedReader(bytes_io) 59 | if length is None: 60 | length = bytes_io.getbuffer().nbytes 61 | 62 | return Stream.from_buffered_reader(buffered_reader, length) 63 | 64 | @staticmethod 65 | def from_buffered_reader(buffered_reader: BufferedReader, length = None): 66 | '''Creates a stream boject from a given BufferedReader object''' 67 | if length is None: 68 | length = Stream.__calc_stream_size(buffered_reader) 69 | 70 | stream = Stream(buffered_reader, length) 71 | return stream 72 | 73 | @staticmethod 74 | def __calc_stream_size(buffered_reader: BufferedReader): 75 | starting_position = buffered_reader.tell() 76 | buffered_reader.seek(0, os.SEEK_END) 77 | size = buffered_reader.tell() 78 | buffered_reader.seek(starting_position) 79 | return size 80 | 81 | def __init__(self, buffered_reader: BufferedReader, stream_length): 82 | self._buffered_reader = buffered_reader 83 | self._stream_length = stream_length 84 | 85 | self._crc_calculator = None 86 | 87 | def __del__(self): 88 | self.close() 89 | 90 | def __exit__(self, *_): 91 | self.close() 92 | 93 | def close(self): 94 | '''Closes the buffered reader in the stream.''' 95 | self._buffered_reader.close() 96 | 97 | def get_buffered_reader(self): 98 | '''Returns the buffered reader of the stream.''' 99 | return self._buffered_reader 100 | 101 | def peek_byte(self): 102 | '''Reads one byte from the stream without advancing stream position.''' 103 | return self._buffered_reader.peek(1)[0] 104 | 105 | def peek_bytes(self, num_bytes: int): 106 | '''Reads the given amount of bytes from the stream without advancing stream position ''' 107 | return self._buffered_reader.peek(num_bytes)[0:num_bytes] 108 | 109 | def slice(self, start: int, end: int): 110 | '''Returns all of the bytes from the stream between the given start and end.''' 111 | starting_position = self.position() 112 | self.seek(start) 113 | slice = self.peek_bytes(end - start)[0: end - start] 114 | self.seek(starting_position) 115 | return slice 116 | 117 | def seek(self, position: int): 118 | '''Moves the stream position of stream to the given position.''' 119 | self._buffered_reader.seek(position) 120 | 121 | def read_byte(self): 122 | '''Reads one byte from the stream.''' 123 | if self.position() > self._stream_length - 1: 124 | raise IndexError("FIT Runtime Error, end of file reached at byte pos: " + self.position()) 125 | return self.read_bytes(1)[0] 126 | 127 | def read_bytes(self, num_bytes: int): 128 | '''Reads the given amount of bytes from the stream.''' 129 | if num_bytes > (self._stream_length - self.position()): 130 | raise IndexError("FIT Runtime Error number of bytes provided is longer than the number of bytes remaining") 131 | 132 | read_bytes = self._buffered_reader.read(num_bytes)[0:num_bytes] 133 | 134 | if self._crc_calculator is not None: 135 | self._crc_calculator.add_bytes(read_bytes, 0, num_bytes) 136 | 137 | return read_bytes 138 | 139 | def read_unint_16(self, endianness: Endianness = Endianness.LITTLE): 140 | '''Reads a 16-bit unsigned integer from the stream with the given endianness''' 141 | return int.from_bytes(self.read_bytes(2), endianness) 142 | 143 | def read_unint_32(self, endianness: Endianness = Endianness.LITTLE): 144 | '''Reads a 32-bit unsigned integer from the stream with the given endianness''' 145 | return int.from_bytes(self.read_bytes(4), endianness) 146 | 147 | def read_string(self, string_length): 148 | '''Reads a string from the stream with the given string length''' 149 | struct_string = "=" + str(string_length) + "s" 150 | return self.read_and_unpack(string_length, struct_string) 151 | 152 | def reset(self): 153 | '''Resets the stream position to the beginning of the stream.''' 154 | self._buffered_reader.seek(0) 155 | 156 | def position(self): 157 | '''Returns the current position in the stream.''' 158 | return self._buffered_reader.tell() 159 | 160 | def get_length(self): 161 | '''Returns the total length of the stream.''' 162 | return self._stream_length 163 | 164 | def read_and_unpack(self, size: int, struct_format_string): 165 | '''Reads a given number of bytes and unpacks the binary struct given a formatting string template''' 166 | byte_array = self.read_bytes(size) 167 | 168 | values = list(unpack(struct_format_string, byte_array)) 169 | 170 | return values 171 | 172 | def get_crc_caclulator(self): 173 | '''Returns the CRC calculator''' 174 | return self._crc_calculator 175 | 176 | def set_crc_calculator(self, crc_calculator): 177 | '''Sets the CRC calculator''' 178 | self._crc_calculator = crc_calculator 179 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Garmin - FIT Python SDK 2 | 3 | ## FIT SDK Documentation 4 | The FIT SDK documentation is available at [https://developer.garmin.com/fit](https://developer.garmin.com/fit). 5 | ## FIT SDK Developer Forum 6 | Share your knowledge, ask questions, and get the latest FIT SDK news in the [FIT SDK Developer Forum](https://forums.garmin.com/developer/). 7 | 8 | ## FIT Python SDK Requirements 9 | * [Python](https:##www.python.org/downloads/) Version 3.6 or greater is required to run the FIT Python SDK 10 | 11 | ## Install 12 | ```sh 13 | pip install garmin-fit-sdk 14 | ``` 15 | 16 | ## Usage 17 | ```py 18 | from garmin_fit_sdk import Decoder, Stream 19 | 20 | stream = Stream.from_file("Activity.fit") 21 | decoder = Decoder(stream) 22 | messages, errors = decoder.read() 23 | 24 | print(errors) 25 | print(messages) 26 | ``` 27 | 28 | ## Decoder 29 | 30 | ### Constructor 31 | 32 | Creating Decoder objects requires an input Stream representing the binary FIT file data to be decoded. See [Creating Streams](#creatingstreams) for more information on constructing Stream objects. 33 | 34 | Once a Decoder object is created it can be used to check that the Stream is a FIT file, that the FIT file is valid, and to read the contents of the FIT file. 35 | 36 | ### is_fit Method 37 | 38 | All valid FIT files should include a 12 or 14 byte file header. The 14 byte header is the preferred header size and the most common size used. Bytes 8-11 of the header contain the ASCII values ".FIT". This string can easily be spotted when opening a binary FIT file in a text or hex editor. 39 | 40 | ``` 41 | Offset: 00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F 42 | 00000000: 0E 10 43 08 78 06 09 00 2E 46 49 54 96 85 40 00 ..C.x....FIT..@. 43 | 00000010: 00 00 00 07 03 04 8C 04 04 86 07 04 86 01 02 84 ................ 44 | 00000020: 02 02 84 05 02 84 00 01 00 00 19 28 7E C5 95 B0 ...........(~E.0 45 | ``` 46 | 47 | ### check_integrity Method 48 | 49 | The checkIntegrity method performs three checks on a FIT file: 50 | 51 | 1. Checks that bytes 8-11 of the header contain the ASCII values ".FIT". 52 | 2. Checks that the total file size is equal to Header Size + Data Size + CRC Size. 53 | 3. Reads the contents of the file, computes the CRC, and then checks that the computed CRC matches the file CRC. 54 | 55 | A file must pass all three of these tests to be considered a valid FIT file. See the [IsFIT(), CheckIntegrity(), and Read() Methods recipe](/fit/cookbook/isfit-checkintegrity-read/) for use-cases where the checkIntegrity method should be used and cases when it might be better to avoid it. 56 | 57 | #### Read Method 58 | The Read method decodes all messages from the input stream and returns an object containing a list of errors encountered during the decoding and a dictionary of decoded messages grouped by message type. Any exceptions encountered during decoding will be caught by the Read method and added to the list of errors. 59 | 60 | The Read method accepts an optional options object that can be used to customize how field data is represented in the decoded messages. All options are enabled by default. Disabling options may speed up file decoding. Options may also be enabled or disable based on how the decoded data will be used. 61 | 62 | ```py 63 | messages, errors = read( 64 | apply_scale_and_offset = True, 65 | convert_datetimes_to_dates = True, 66 | convert_types_to_strings = True, 67 | enable_crc_check = True, 68 | expand_sub_fields = True, 69 | expand_components = True, 70 | merge_heart_rates = True, 71 | mesg_listener = None) 72 | ``` 73 | #### mesg_listener 74 | Optional callback function that can be used to inspect or manipulate messages after they are fully decoded and all the options have been applied. The message is mutable and we be returned from the Read method in the messages dictionary. 75 | 76 | Example mesg_listener callback that tracks the field names across all Record messages. 77 | 78 | ```py 79 | from garmin_fit_sdk import Decoder, Stream, Profile 80 | 81 | stream = Stream.from_file("Activity.fit") 82 | decoder = Decoder(stream) 83 | 84 | record_fields = set() 85 | def mesg_listener(mesg_num, message): 86 | if mesg_num == Profile['mesg_num']['RECORD']: 87 | for field in message: 88 | record_fields.add(field) 89 | 90 | messages, errors = decoder.read(mesg_listener = mesg_listener) 91 | 92 | if len(errors) > 0: 93 | print(f"Something went wrong decoding the file: {errors}") 94 | return 95 | 96 | print(record_fields) 97 | ``` 98 | 99 | #### apply_scale_and_offset: true | false 100 | When true the scale and offset values as defined in the FIT Profile are applied to the raw field values. 101 | ```py 102 | { 103 | 'altitude': 1587 ## with a scale of 5 and offset of 500 applied 104 | } 105 | ``` 106 | When false the raw field value is used. 107 | ```py 108 | { 109 | 'altitude': 10435 ## raw value store in file 110 | } 111 | ``` 112 | #### enable_crc_check: true | false 113 | When true the CRC of the file is calculated when decoding a FIT file and then validated with the CRC found in the file. Disabling the CRC calculation will improve the performance of the read method. 114 | #### expand_sub_fields: true | false 115 | When true subfields are created for fields as defined in the FIT Profile. 116 | ```py 117 | { 118 | 'event': 'rear_gear_change', 119 | 'data': 16717829, 120 | 'gear_change_data':16717829 ## Sub Field of data when event == 'rear_gear_change' 121 | } 122 | ``` 123 | When false subfields are omitted. 124 | ```py 125 | { 126 | 'event': 'rearGearChange', 127 | 'data': 16717829 128 | } 129 | ``` 130 | #### expand_components: true | false 131 | When true field components as defined in the FIT Profile are expanded into new fields. expand_sub_fields must be set to true in order for subfields to be expanded 132 | 133 | ```py 134 | { 135 | 'event': 'rear_gear_change' 136 | 'data': 16717829, 137 | 'gear_change_data':16717829, ## Sub Field of data when event == 'rear_gear_change' 138 | 'front_gear': 2, ## Expanded field of gear_change_data, bits 0-7 139 | 'front_gear_num': 53, ## Expanded field of gear_change_data, bits 8-15 140 | 'rear_gear': 11, ## Expanded field of gear_change_data, bits 16-23 141 | 'rear_gear_num': 1, ## Expanded field of gear_change_data, bits 24-31 142 | } 143 | ``` 144 | When false field components are not expanded. 145 | ```py 146 | { 147 | 'event': 'rear_gear_change', 148 | 'data': 16717829, 149 | 'gear_change_data': 16717829 ### Sub Field of data when event == 'rear_gear_change' 150 | } 151 | ``` 152 | #### convert_types_to_strings: true | false 153 | When true field values are converted from raw integer values to the corresponding string values as defined in the FIT Profile. 154 | ```py 155 | { 'type':'activity'} 156 | ``` 157 | When false the raw integer value is used. 158 | ```py 159 | { 'type': 4 } 160 | ``` 161 | #### convert_datetimes_to_dates: true | false 162 | When true FIT Epoch values are converted to Python datetime objects. 163 | ```py 164 | { 'time_created': {Python datetime object} } 165 | ``` 166 | When false the FIT Epoch value is used. 167 | ```py 168 | { 'time_created': 995749880 } 169 | ``` 170 | When false the Util.convert_timestamp_to_datetime method may be used to convert FIT Epoch values to Python datetime objects. 171 | #### merge_heart_rates: true | false 172 | When true automatically merge heart rate values from HR messages into the Record messages. This option requires the apply_scale_and_offset and expand_components options to be enabled. This option has no effect on the Record messages when no HR messages are present in the decoded messages. 173 | 174 | ## Creating Streams 175 | Stream objects contain the binary FIT data to be decoded. Streams objects can be created from bytearrays, BufferedReaders, and BytesIO objects. Internally the Stream class uses a BufferedReader to manage the byte stream. 176 | 177 | #### From a file 178 | ```py 179 | stream = Stream.from_file("activity.fit") 180 | print(f"is_fit: {Decoder.is_fit(stream)}") 181 | ``` 182 | #### From a bytearray 183 | ```py 184 | fit_byte_array = bytearray([0x0E, 0x10, 0xD9, 0x07, 0x00, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x91, 0x33, 0x00, 0x00]) 185 | stream = Stream.from_byte_array(fit_byte_array) 186 | print(f"is_fit: {Decoder.is_fit(stream)}") 187 | ``` 188 | #### From a BytesIO Object 189 | ```py 190 | fit_byte_bytes_io = io.BytesIO(bytearray([0x0E, 0x10, 0xD9, 0x07, 0x00, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x91, 0x33, 0x00, 0x00])) 191 | stream = Stream.from_byte_io(fit_byte_bytes_io) 192 | print(f"is_fit: {Decoder.is_fit(stream)}") 193 | ``` 194 | #### From a buffered_reader 195 | ```py 196 | fit_buffered_reader = io.BufferedReader(io.BytesIO(bytearray([0x0E, 0x10, 0xD9, 0x07, 0x00, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x91, 0x33, 0x00, 0x00]))) 197 | stream = Stream.from_buffered_reader(fit_buffered_reader) 198 | print(f"is_fit: {Decoder.is_fit(stream)}") 199 | ``` 200 | 201 | ## Util 202 | The Util object contains both constants and methods for working with decoded messages and fields. 203 | ### FIT_EPOCH_S Constant 204 | The FIT_EPOCH_S constant represents the number of seconds between the Unix Epoch and the FIT Epoch. 205 | ```py 206 | FIT_EPOCH_S = 631065600 207 | ``` 208 | The FIT_EPOCH_S value can be used to convert FIT Epoch values to Python datetime objects. 209 | ```py 210 | python_date = datetime.datetime.fromtimestamp(fitDateTime + FIT_EPOCH_S, datetime.UTC) 211 | ``` 212 | ### convert_timestamp_to_datetime Method 213 | A convenience method for converting FIT Epoch values to Python Datetime objects. 214 | ```py 215 | python_date = convert_timestamp_to_datetime(fit_datetime) 216 | ``` -------------------------------------------------------------------------------- /tests/test_stream.py: -------------------------------------------------------------------------------- 1 | '''test_stream.py: Contains the set of tests for the Stream class in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | import io 12 | 13 | import pytest 14 | from garmin_fit_sdk import Stream, util 15 | 16 | 17 | def test_stream_from_buffered_reader(): 18 | '''Tests creating a stream from a buffered reader object''' 19 | buffered_reader = io.BufferedReader( 20 | io.BytesIO(bytearray([0x0E, 0x20, 0x8B]))) 21 | stream = Stream.from_buffered_reader(buffered_reader) 22 | assert stream.get_buffered_reader() is not None 23 | assert stream.peek_byte() == 0x0E 24 | 25 | def test_stream_from_bytes_io(): 26 | '''Tests creating a stream from a BytesIO object''' 27 | bytes_io = io.BytesIO(bytearray([0x0E, 0x20, 0x8B])) 28 | stream = Stream.from_bytes_io(bytes_io) 29 | assert stream.get_buffered_reader() is not None 30 | assert stream.peek_byte() == 0x0E 31 | 32 | def test_stream_from_byte_array(): 33 | '''Tests creating a stream from a bytearray object''' 34 | byte_array = bytearray([0x0E, 0x20, 0x8B]) 35 | stream = Stream.from_byte_array(byte_array) 36 | assert stream.get_buffered_reader() is not None 37 | assert stream.peek_byte() == 0x0E 38 | 39 | def test_stream_from_file(): 40 | '''Tests creating a stream from a binary fit file''' 41 | stream = Stream.from_file("tests/fits/ActivityDevFields.fit") 42 | assert stream.get_buffered_reader() is not None 43 | assert stream.peek_byte() == 0x0E 44 | 45 | 46 | @pytest.mark.parametrize( 47 | "given_bytes,position,expected_value", 48 | [ 49 | (bytearray([0x0E, 0x20, 0x8B]), 0, 0x0E), 50 | (bytearray([0x0E, 0x20, 0x8B]), 1, 0x20), 51 | (bytearray([0x0E, 0x20, 0x8B]), 2, 0x8B), 52 | ], 53 | ) 54 | class TestParametrizedByByte: 55 | '''Group of tests for testing read and peek by byte''' 56 | def test_peek_byte(self, given_bytes, position, expected_value): 57 | '''Tests peeking a single byte from the stream and returning its value''' 58 | stream = Stream.from_byte_array(given_bytes) 59 | stream.seek(position) 60 | assert stream.peek_byte() == expected_value 61 | assert stream.peek_byte() == stream.read_byte() 62 | 63 | def test_read_byte(self, given_bytes, position, expected_value): 64 | '''Tests reading a single byte from the stream and returning its value''' 65 | stream = Stream.from_byte_array(given_bytes) 66 | stream.seek(position) 67 | assert stream.read_byte() == expected_value 68 | stream.seek(position) 69 | assert stream.peek_byte() == stream.read_byte() 70 | 71 | 72 | @pytest.mark.parametrize( 73 | "given_bytes,num_bytes,expected_value", 74 | [ 75 | (bytearray([0x0E, 0x20, 0x8B]), 0, bytearray([])), 76 | (bytearray([0x0E, 0x20, 0x8B]), 1, bytearray([0x0E])), 77 | (bytearray([0x0E, 0x20, 0x8B]), 2, bytearray([0x0E, 0x20])), 78 | (bytearray([0x0E, 0x20, 0x8B]), 3, bytearray([0x0E, 0x20, 0x8B])), 79 | ], 80 | ) 81 | class TestParametrizedByBytes: 82 | '''Set of tests for verifying reads and peeks greater than one byte''' 83 | def test_peek_bytes(self, given_bytes, num_bytes, expected_value): 84 | '''Tests peeking a number of bytes from a stream''' 85 | stream = Stream.from_byte_array(given_bytes) 86 | assert stream.peek_bytes(num_bytes) == expected_value 87 | assert stream.peek_bytes(num_bytes) == stream.read_bytes(num_bytes) 88 | 89 | def test_read_bytes(self, given_bytes, num_bytes, expected_value): 90 | '''Tests peeking a number of bytes from a stream''' 91 | stream = Stream.from_byte_array(given_bytes) 92 | assert stream.read_bytes(num_bytes) == expected_value 93 | stream.reset() 94 | assert stream.peek_bytes(num_bytes) == stream.read_bytes(num_bytes) 95 | 96 | 97 | @pytest.mark.parametrize( 98 | "given_bytes,start,end,expected_value", 99 | [ 100 | (bytearray([0x0E, 0x20, 0x8B]), 0, 1, bytearray([0x0E])), 101 | (bytearray([0x0E, 0x20, 0x8B]), 1, 2, bytearray([0x20])), 102 | (bytearray([0x0E, 0x20, 0x8B]), 0, 2, bytearray([0x0E, 0x20])), 103 | (bytearray([0x0E, 0x20, 0x8B]), 0, 3, bytearray([0x0E, 0x20, 0x8B])), 104 | ], 105 | ) 106 | def test_slice(given_bytes, start, end, expected_value): 107 | '''Tests taking an array values from the stream from the start index to the end''' 108 | stream = Stream.from_byte_array(given_bytes) 109 | starting_position = stream.position() 110 | assert stream.slice(start, end) == expected_value 111 | assert stream.position() == starting_position 112 | 113 | 114 | class TestReadValues: 115 | '''Set of tests which validate correct reading of numeric values and strings from the stream.''' 116 | @pytest.mark.parametrize( 117 | "given_bytes", 118 | [ 119 | (bytearray([0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF])), 120 | ], 121 | ) 122 | class TestInts: 123 | '''Set of tests which verify decoding of int values from a fit file.''' 124 | def test_read_unint8(self, given_bytes): 125 | stream = Stream.from_byte_array(given_bytes) 126 | values = stream.read_and_unpack(stream.get_length(), '=8B' ) 127 | assert values == [255, 255, 255, 255, 255, 255, 255, 255] 128 | 129 | def test_read_sint8(self, given_bytes): 130 | stream = Stream.from_byte_array(given_bytes) 131 | values = stream.read_and_unpack(stream.get_length(), '=8b' ) 132 | assert values == [-1, -1, -1, -1, -1, -1, -1, -1] 133 | 134 | def test_read_unint16(self, given_bytes): 135 | stream = Stream.from_byte_array(given_bytes) 136 | values = stream.read_and_unpack(stream.get_length(), '=4H' ) 137 | assert values == [65535, 65535, 65535, 65535] 138 | 139 | def test_read_sint16(self, given_bytes): 140 | stream = Stream.from_byte_array(given_bytes) 141 | values = stream.read_and_unpack(stream.get_length(), '=4h' ) 142 | assert values == [-1, -1, -1, -1] 143 | 144 | def test_read_uint32(self, given_bytes): 145 | stream = Stream.from_byte_array(given_bytes) 146 | values = stream.read_and_unpack(stream.get_length(), '=2I' ) 147 | assert values == [4294967295, 4294967295] 148 | 149 | def test_read_sint32(self, given_bytes): 150 | stream = Stream.from_byte_array(given_bytes) 151 | values = stream.read_and_unpack(stream.get_length(), '=2i' ) 152 | assert values == [-1, -1] 153 | 154 | def test_read_uint64(self, given_bytes): 155 | stream = Stream.from_byte_array(given_bytes) 156 | values = stream.read_and_unpack(stream.get_length(), '=Q' ) 157 | assert values == [18446744073709551615] 158 | def test_read_sint64(self, given_bytes): 159 | stream = Stream.from_byte_array(given_bytes) 160 | values = stream.read_and_unpack(stream.get_length(), '=q' ) 161 | assert values == [-1] 162 | 163 | 164 | @pytest.mark.parametrize( 165 | "given_bytes", 166 | [ 167 | (bytearray([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x3F])), 168 | ], 169 | ) 170 | class TestFloats: 171 | '''Set of tests which verify decoding of float values from a fit file.''' 172 | def test_read_float_32(self, given_bytes): 173 | stream = Stream.from_byte_array(given_bytes) 174 | values = stream.read_and_unpack(stream.get_length(), '=2f' ) 175 | assert values == [0.0, 1.875] 176 | 177 | def test_read_double_64(self, given_bytes): 178 | stream = Stream.from_byte_array(given_bytes) 179 | values = stream.read_and_unpack(stream.get_length(), '=d' ) 180 | assert values == [1] 181 | 182 | 183 | class TestStrings: 184 | '''Set of tests which verify decoding of strings from a fit file.''' 185 | @pytest.mark.parametrize( 186 | "given_bytes,expected_value", 187 | [ 188 | (bytearray([0x2E, 0x46, 0x49, 0x54]), ".FIT"), 189 | (bytearray([0x2E, 0x46, 0x49, 0x54, 0x00, 0x00]), ".FIT"), 190 | (bytearray([0xe8, 0xbf, 0x99, 0xe5, 0xa5, 0x97, 0xe5, 0x8a, 0xa8, 0xe4, 0xbd, 191 | 0x9c, 0xe7, 0x94, 0xb1, 0xe4, 0xb8, 0xa4, 0xe7, 0xbb, 0x84]), "这套动作由两组"), 192 | (bytearray([0xe8, 0xbf, 0x99, 0xe5, 0xa5, 0x97, 0xe5, 0x8a, 0xa8, 0xe4, 0xbd, 193 | 0x9c, 0xe7, 0x94, 0xb1, 0xe4, 0xb8, 0xa4, 0xe7, 0xbb, 0x84, 0x00]), "这套动作由两组"), 194 | ], ids=["Regular String", "String w/ Null Terminator", "Multibyte String w/o Null Terminator", "Multibyte String w/ Null Terminator"], 195 | ) 196 | def test_read_string(self, given_bytes, expected_value): 197 | '''Tests reading any given string from the stream.''' 198 | stream = Stream.from_byte_array(given_bytes) 199 | value = stream.read_string(stream.get_length()) 200 | assert util._convert_string(value[0]) == expected_value 201 | 202 | def test_read_string_array(self): 203 | '''Tests reading an array of strings from the stream.''' 204 | stream = Stream.from_byte_array(bytearray([0x2E, 0x46, 0x49, 0x54, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x00])) 205 | string = stream.read_string(stream.get_length()) 206 | strings = util._convert_string(string[0]) 207 | for string in strings: 208 | assert string == '.FIT' 209 | 210 | def test_read_string_bad_utf8_characters(self): 211 | '''Tests correctly reading bad utf8 characters after decoding from the stream.''' 212 | stream = Stream.from_byte_array(bytearray([ 213 | 0x37, 0x35, 0x25, 0x20, 0x65, 0x66, 0x66, 0x6F, 0x72, 0x74, 0x2E, 0x00, 0x65, 0x66, 0x66, 0x6F, 214 | 0x72, 0x74, 0x2E, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 215 | 0x75, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x20, 0x00, 0x03, 0x40, 0x00, 0x01, 0x00, 0x1B, 0x07, 216 | 0xFE, 0x02, 0x84, 0x07, 0x01, 0x00, 0x03, 0x01, 0x00, 0x04, 0x04, 0x86, 0x01, 0x01, 217 | 0x00, 0x02, 0x04, 0x86, 0x08, 0x0C, 0x07, 0x00, 0x00, 0x00, 0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 218 | 0x01, 0x00, 0x01, 0xD7, 0x7C, 0x37, 0x35, 0x25, 0x20, 0x65, 0x66, 0x66, 0x6F, 0x72, 0x74, 0x2E, 219 | 0x00, 0x40, 0x00, 0x01, 0x00, 0x1B, 0x0B, 0xFE, 0x02, 0x84, 0x07, 0x01, 0x00, 0x03, 0x01, 220 | 0x00, 0x05, 0x04, 0x86, 0x06, 0x04, 0x86, 0x04, 0x04, 0x86, 0x01, 0x01, 0x00, 0x02, 221 | 0x04, 0x86, 0x08, 0x10, 0x07, 0x0A, 0x02, 0x84, 0x0B, 0x02, 0x84, 0x00, 0x00, 0x01, 222 | 0x00, 0x00, 0x00, 0x00, 0x08, 0xEB, 0x00, 0x03, 0x7B, 0xD7, 0x00, 0x00, 0x00, 0x00, 0x01, 223 | 0x00, 0x03, 0xAE, 0xF8, 0x52, 0x61, 0x63, 0x65, 0x20, 0x67, 0x6F, 0x61, 0x6C, 0x20, 0x70, 224 | 0x61, 0x63, 0x65, 0x2E, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x02, 0x03, 0x02, 0xFF, 225 | 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 226 | 0x00, 0x00, 0x01, 0x00, 0x01, 0xD7, 0x7C, 0x37, 0x35, 0x25, 0x20, 0x65, 0x66])) 227 | string = stream.read_string(stream.get_length()) 228 | strings = util._convert_string(string[0]) 229 | assert len(strings) == 54 230 | assert strings[0] == "75% effort." 231 | assert strings[6] == "un" # Not '����un' 232 | assert strings[13] == "" # Not '��' 233 | assert strings[42] == "Race goal pace." # Not '��Race goal pace.' 234 | assert strings[53] == "|75% ef" # Not '�|75% ef' 235 | -------------------------------------------------------------------------------- /tests/test_decoder.py: -------------------------------------------------------------------------------- 1 | '''test_decoder.py: Contains the set of tests for the decoder class in the Python FIT SDK''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | from datetime import datetime, timezone 12 | 13 | import pytest 14 | from garmin_fit_sdk import Decoder, Stream, CrcCalculator 15 | from garmin_fit_sdk.decoder import DecodeMode 16 | 17 | from tests.data import Data 18 | 19 | 20 | class TestCheckIntegrity: 21 | '''Set of tests verify that the decoder class correctly tests the integrity of one or more fit files.''' 22 | @pytest.mark.parametrize( 23 | "data,expected_value", 24 | [ 25 | (bytearray(), False), 26 | (Data.fit_file_invalid, False), 27 | (Data.fit_file_minimum, True), 28 | (Data.fit_file_short, True), 29 | 30 | (Data.fit_file_incorrect_data_size, False) 31 | ], ids=["Empty File", "Invalid Fit File", "Minimum Size Fit File", 32 | "Fit File with Messages", "Incorrect Data Size"] 33 | ) 34 | def test_check_integrity(self, data, expected_value): 35 | '''Tests the validity of the decoder when it checks a fit file's integrity.''' 36 | stream = Stream.from_byte_array(data) 37 | decoder = Decoder(stream) 38 | assert decoder.check_integrity() == expected_value 39 | 40 | def test_check_integrity_is_fit_fail(self, mocker): 41 | '''Tests that an invalid fit file will fail when checking integrity.''' 42 | stream = Stream.from_byte_array(Data.fit_file_short) 43 | mocker.patch('garmin_fit_sdk.Decoder.is_fit', return_value=False) 44 | decoder = Decoder(stream) 45 | 46 | assert decoder.check_integrity() is False 47 | 48 | @pytest.mark.parametrize( 49 | "data,expected_value", 50 | [ 51 | (Data.fit_file_invalid, False), 52 | (Data.fit_file_minimum, True), 53 | (Data.fit_file_short, True), 54 | (bytearray(), False), 55 | (bytearray([0xE]), False), 56 | (bytearray([0x0A, 0x10, 0xD9, 0x07, 0x00, 0x00, 0x00, 0x00, 57 | 0x2E, 0x46, 0x49, 0x54, 0x91, 0x33, 0x00, 0x00]), False), 58 | (bytearray([0x0E, 0x10, 0xD9, 0x07, 0x00, 0x00, 0x00, 0x00, 59 | 0x2C, 0x46, 0x49, 0x54, 0x91, 0x33, 0x00, 0x00]), False), 60 | ], ids=["Invalid Fit File", "Minimum Size Fit File", "Fit File with Messages", 61 | "Empty File", "Input Length < 14", "Header Size != 14 || 12", "Data Type != .FIT"] 62 | ) 63 | def test_is_fit(self, data, expected_value): 64 | '''Tests the validity of the decoder function used to determine if a file is a valid fit file.''' 65 | stream = Stream.from_byte_array(data) 66 | decoder = Decoder(stream) 67 | assert decoder.is_fit() == expected_value 68 | 69 | class TestDecoderConstructor: 70 | '''Set of tests that test the functionality of the Decoder constructor''' 71 | def test_fails_if_stream_is_none(self): 72 | '''Tests that the decoder will properly throw an error if a stream that is None is provided.''' 73 | try: 74 | decoder = Decoder(None) 75 | assert False 76 | except RuntimeError: 77 | assert True 78 | 79 | class TestSkipHeaderDecodeMode: 80 | '''Set of tests that test the fuctionality of the skip header decode mode''' 81 | def test_invalid_header_with_skip_header(self): 82 | '''Tests that file with invalid header should not fail when decode mode is skip header''' 83 | stream = Stream.from_byte_array(Data.fit_file_short_invalid_header) 84 | decoder = Decoder(stream) 85 | messages, errors = decoder.read(decode_mode = DecodeMode.SKIP_HEADER) 86 | 87 | assert len(errors) == 0 88 | assert len(messages['file_id_mesgs']) == 1 89 | 90 | def test_invalid_header_without_skip_header(self): 91 | '''Tests that file with invalid header should fail when decode mode is normal''' 92 | stream = Stream.from_byte_array(Data.fit_file_short_invalid_header) 93 | decoder = Decoder(stream) 94 | messages, errors = decoder.read() 95 | 96 | assert len(errors) == 1 97 | 98 | def test_valid_header_with_skip_header(self): 99 | '''Tests that file with valid header should not fail when decode mode is skip header''' 100 | stream = Stream.from_byte_array(Data.fit_file_short) 101 | decoder = Decoder(stream) 102 | messages, errors = decoder.read(decode_mode = DecodeMode.SKIP_HEADER) 103 | 104 | assert len(errors) == 0 105 | assert len(messages['file_id_mesgs']) == 1 106 | 107 | def test_invalid_crc_with_skip_header(self): 108 | '''Tests that file with invalid CRC should not fail when decode mode is skip header''' 109 | stream = Stream.from_byte_array(Data.fit_file_short_new_invalid_crc) 110 | decoder = Decoder(stream) 111 | messages, errors = decoder.read(decode_mode = DecodeMode.SKIP_HEADER) 112 | 113 | assert len(errors) == 0 114 | assert len(messages['file_id_mesgs']) == 1 115 | 116 | class TestDataOnlyDecodeMode: 117 | '''Set of tests that test the fuctionality of the data only decode mode''' 118 | def test_no_header_with_data_only(self): 119 | '''Tests that file with no header should not fail when decode mode is data only''' 120 | stream = Stream.from_byte_array(Data.fit_file_short_data_only) 121 | decoder = Decoder(stream) 122 | messages, errors = decoder.read(decode_mode = DecodeMode.DATA_ONLY) 123 | 124 | assert len(errors) == 0 125 | assert len(messages['file_id_mesgs']) == 1 126 | 127 | def test_no_header_without_data_only(self): 128 | '''Tests that file with no header fails when decode mode is data only''' 129 | stream = Stream.from_byte_array(Data.fit_file_short_data_only) 130 | decoder = Decoder(stream) 131 | messages, errors = decoder.read() 132 | 133 | assert len(errors) == 1 134 | 135 | def test_invalid_crc_with_data_only(self): 136 | '''Tests that file with invalid CRC should not fail when decode mode is data only''' 137 | stream = Stream.from_byte_array(Data.fit_file_short_new_invalid_crc[14:]) 138 | decoder = Decoder(stream) 139 | messages, errors = decoder.read(decode_mode = DecodeMode.DATA_ONLY) 140 | 141 | assert len(errors) == 0 142 | assert len(messages['file_id_mesgs']) == 1 143 | 144 | class TestReadFileHeader: 145 | '''Set of tests that test the functionality of reading file headers and the File Header class''' 146 | def test_read_file_header(self): 147 | '''Tests reading the file header with the decoder and decoding the profile and protocol versions.''' 148 | stream = Stream.from_byte_array(Data.fit_file_minimum) 149 | decoder = Decoder(stream) 150 | 151 | file_header = decoder.read_file_header(stream) 152 | 153 | assert file_header.header_size == 14 154 | assert file_header.protocol_version == 32 155 | assert file_header.profile_version == 2187 156 | assert file_header.data_size == 0 157 | assert file_header.data_type == [b'.FIT'] 158 | assert file_header.header_crc == 18573 159 | assert file_header.file_total_size == 14 160 | 161 | def test_read_file_header_dict(self): 162 | '''Tests reading the file header and converting the class to a dictionary.''' 163 | stream = Stream.from_byte_array(Data.fit_file_minimum) 164 | decoder = Decoder(stream) 165 | 166 | file_header = decoder.read_file_header(stream) 167 | file_header_dict = file_header.get_dict() 168 | 169 | protocol_version = (file_header.protocol_version >> 4) + ((file_header.protocol_version & 0x0F) / 10) 170 | profile_version = file_header.profile_version / 1000 if file_header.profile_version > 2199 else 100 171 | 172 | assert file_header.header_size == file_header_dict['header_size'] 173 | assert protocol_version == file_header_dict['protocol_version'] 174 | assert profile_version == file_header_dict['profile_version'] 175 | assert file_header.data_size == file_header_dict['data_size'] 176 | assert file_header.data_type == file_header_dict['data_type'] 177 | assert file_header.header_crc == file_header_dict['header_crc'] 178 | assert file_header.file_total_size == file_header_dict['file_total_size'] 179 | 180 | class TestDecoderRead(): 181 | '''Set of tests that verify the validity and accuracy of the decoder when reading files.''' 182 | @pytest.mark.parametrize( 183 | "data,num_messages", 184 | [ 185 | (Data.fit_file_minimum, 0), 186 | (Data.fit_file_short, 2), 187 | (Data.fit_file_short_new, 1), 188 | (Data.fit_file_chained, 4) 189 | ], ids=["Fit File Minimum", "Fit File Short with Invalids", "Fit File Short", "Chained Fit File"] 190 | ) 191 | def test_successful_read(self, data, num_messages): 192 | '''Tests that the decoder successfully reads fit files and returns the correct number of messages.''' 193 | stream = Stream.from_byte_array(data) 194 | decoder = Decoder(stream) 195 | messages, errors = decoder.read() 196 | assert len(errors) == 0 197 | assert decoder.get_num_messages() == num_messages 198 | 199 | def test_stream_not_reset(self): 200 | '''Tests that the decoder does not reset the stream before decoding.''' 201 | stream = Stream.from_byte_array(Data.fit_file_short) 202 | decoder = Decoder(stream) 203 | decoder.read() 204 | 205 | assert stream.position() == stream.get_length() 206 | messages, errors = decoder.read() 207 | assert len(errors) == 0 and len(messages) == 0 208 | 209 | def test_compressed_timestamp_message_should_throw(self): 210 | '''Tests that the decoder should throw an error when reading a message with a compressed timestamp''' 211 | stream = Stream.from_byte_array(Data.fit_file_short_compressed_timestamp) 212 | decoder = Decoder(stream) 213 | messages, errors = decoder.read() 214 | 215 | assert len(errors) == 1 216 | assert "Compressed timestamp messages are not currently supported" in str(errors[0]) 217 | 218 | def test_read_incorrect_field_def_size(self): 219 | '''Tests that the decoder doesn't break when reading a message with an incorrect field definition size.''' 220 | stream = Stream.from_byte_array(Data.fit_file_short_with_wrong_field_def_size) 221 | decoder = Decoder(stream) 222 | messages, errors = decoder.read(convert_datetimes_to_dates=False) 223 | 224 | assert len(errors) == 0 225 | assert "time_created" in messages["file_id_mesgs"][0] 226 | 227 | def test_invalid_crc_should_fail(self): 228 | '''Test decoder should fail when CRC is invalid''' 229 | stream = Stream.from_byte_array(Data.fit_file_short_invalid_CRC) 230 | decoder = Decoder(stream) 231 | messages, errors = decoder.read() 232 | 233 | assert len(errors) == 1 234 | assert len(messages['file_id_mesgs']) == 1 235 | 236 | 237 | @pytest.mark.parametrize( 238 | "data,expected_output", 239 | [ 240 | (Data.fit_file_short_new, {'file_id_mesgs' : [{'manufacturer': 'garmin', 'type': 'activity', 'time_created': 1000000000, 'product_name': 'abcdefghi'}]}), 241 | (Data.fit_file_short_none_array, {'file_id_mesgs' : [{'manufacturer': 'garmin', 'type': 'activity', 'time_created': 1000000000}]}) 242 | ], ids=["Fit File Short", "Fit File Short w/ Invalid String"] 243 | ) 244 | def test_read_decoder_output(self, data, expected_output): 245 | '''Tests the validity of the decoder's output after reading a fit file.''' 246 | stream = Stream.from_byte_array(data) 247 | decoder = Decoder(stream) 248 | messages, errors = decoder.read(convert_datetimes_to_dates=False) 249 | assert expected_output == messages 250 | assert len(errors) == 0 251 | 252 | 253 | @pytest.mark.parametrize( 254 | "option_status,expected_value", 255 | [ 256 | (True, -127), 257 | (False, 1865), 258 | (None, -127) 259 | ], ids=["Set to True", "Set to False", "Default Should Apply Scale and Offset"] 260 | ) 261 | def test_apply_scale_and_offset(self, option_status, expected_value): 262 | '''Tests the validity of applying scales and offsets to the decoded fields.''' 263 | stream = Stream.from_file('tests/fits/ActivityDevFields.fit') 264 | decoder = Decoder(stream) 265 | if option_status is not None: 266 | messages, errors = decoder.read(apply_scale_and_offset=option_status, merge_heart_rates=False) 267 | else: 268 | messages, errors = decoder.read() 269 | assert len(errors) == 0 270 | assert messages['record_mesgs'][0]['altitude'] == expected_value 271 | 272 | def test_scale_and_offset_apply_to_arrays(self): 273 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 274 | decoder = Decoder(stream) 275 | messages, errors = decoder.read() 276 | assert len(errors) == 0 277 | 278 | left_power_phase = messages['record_mesgs'][28]['left_power_phase'] 279 | left_power_phase_peak = messages['record_mesgs'][28]['left_power_phase_peak'] 280 | 281 | right_power_phase = messages['record_mesgs'][28]['right_power_phase'] 282 | right_power_phase_peak = messages['record_mesgs'][28]['right_power_phase_peak'] 283 | 284 | assert left_power_phase == [337.5000052734376, 199.68750312011724] 285 | assert left_power_phase_peak == [75.93750118652346, 104.0625016259766] 286 | assert right_power_phase == [7.031250109863283, 205.31250320800785] 287 | assert right_power_phase_peak == [70.31250109863284, 106.8750016699219] 288 | 289 | def test_scale_and_offset_correct_type_conversion(self): 290 | '''Tests applying scale and offset producing the correct data type.''' 291 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 292 | decoder = Decoder(stream) 293 | messages, errors = decoder.read() 294 | assert len(errors) == 0 295 | 296 | assert messages['record_mesgs'][0]['power'] == 0 297 | assert isinstance(messages['record_mesgs'][0]['power'], int) is True 298 | 299 | assert messages['file_id_mesgs'][0]['product'] == 3843 300 | assert isinstance(messages['file_id_mesgs'][0]['product'], int) is True 301 | 302 | assert isinstance(messages['event_mesgs'][4]['rear_gear_num'], int) is True 303 | 304 | @pytest.mark.parametrize( 305 | "option_status,expected_value", 306 | [ 307 | (True, datetime.fromtimestamp(1000000000 + 631065600, timezone.utc)), 308 | (False, 1000000000), 309 | (None, datetime.fromtimestamp(1000000000 + 631065600, timezone.utc)) 310 | ], ids=["Set to True", "Set to False", "Default Should Convert Timestamps"] 311 | ) 312 | def test_convert_datetimes_to_python_datetimes(self, option_status, expected_value): 313 | '''Tests the validity of converting timestamps to python datetimes when decoding files.''' 314 | stream = Stream.from_byte_array(Data.fit_file_short_new) 315 | decoder = Decoder(stream) 316 | if option_status is not None: 317 | messages, errors = decoder.read(convert_datetimes_to_dates=option_status) 318 | else: 319 | messages, errors = decoder.read() 320 | 321 | assert len(errors) == 0 322 | 323 | if option_status is False: 324 | assert messages['file_id_mesgs'][0]['time_created'] == expected_value 325 | else: 326 | assert str(messages['file_id_mesgs'][0]['time_created']) == str(expected_value.replace(tzinfo=timezone.utc)) 327 | 328 | @pytest.mark.parametrize( 329 | "option_status,expected_type_value", 330 | [ 331 | (True, 'activity'), 332 | (False, 4), 333 | (None, 'activity') 334 | ], ids=["Set to True", "Set to False", "Default Should Convert"] 335 | ) 336 | def test_convert_types_to_strings(self, option_status, expected_type_value): 337 | '''Tests the validity of converting types to strings when decoding files.''' 338 | stream = Stream.from_byte_array(Data.fit_file_short_new) 339 | decoder = Decoder(stream) 340 | if option_status is not None: 341 | messages, errors = decoder.read(convert_types_to_strings=option_status) 342 | else: 343 | messages, errors = decoder.read() 344 | assert len(errors) == 0 345 | assert messages['file_id_mesgs'][0]['type'] == expected_type_value 346 | 347 | 348 | @pytest.mark.parametrize( 349 | "field_key,expected_value", 350 | [ 351 | (0, pytest.approx(3.0, 0.1)), 352 | (2, [-10, 12]), 353 | (3, ['Hello!', 'Good Job!']) 354 | ], ids=["Single Value", "Array of Values", "String Value(s)"] 355 | ) 356 | def test_read_developer_data(self, field_key, expected_value): 357 | '''Tests the validity of reading developer data from a fit file''' 358 | stream = Stream.from_file('tests/fits/ActivityDevFields.fit') 359 | decoder = Decoder(stream) 360 | messages, errors = decoder.read() 361 | 362 | assert len(errors) == 0 363 | assert len(messages['record_mesgs']) == 3601 and len(messages['session_mesgs']) == 1 364 | 365 | assert messages['session_mesgs'][0]['developer_fields'][field_key] == expected_value 366 | 367 | def test_read_dev_data_no_field_description(self): 368 | '''Tests reading past dev data with no field description message or dev data ID.''' 369 | 370 | stream = Stream.from_byte_array(Data.fit_file_dev_data_missing_field_description) 371 | decoder = Decoder(stream) 372 | messages, errors = decoder.read() 373 | 374 | assert len(errors) == 0 and len(messages['activity_mesgs']) == 1 375 | 376 | @pytest.mark.parametrize( 377 | "option_status", 378 | [ 379 | (True), 380 | (False), 381 | (None) 382 | ], ids=["Set to True", "Set to False", "Default should have CRC calculations enabled"] 383 | ) 384 | def test_enable_crc_options(self, mocker, option_status): 385 | '''Tests enabling and disabling CRC calculation when decoding a FIT file.''' 386 | spy_add_bytes = mocker.spy(CrcCalculator, "add_bytes") 387 | spy_get_crc = mocker.spy(CrcCalculator, "get_crc") 388 | 389 | stream = Stream.from_byte_array(Data.fit_file_short) 390 | decoder = Decoder(stream) 391 | 392 | if option_status is not None: 393 | messages, errors = decoder.read(enable_crc_check=option_status) 394 | else: 395 | messages, errors = decoder.read() 396 | 397 | assert len(errors) == 0 398 | 399 | assert spy_add_bytes.call_count == 0 if option_status is False else spy_add_bytes.call_count > 0 400 | assert spy_get_crc.call_count == 0 if option_status is False else spy_get_crc.call_count > 0 401 | 402 | @pytest.mark.parametrize( 403 | "option_status, data, expected_error_status", 404 | [ 405 | (True, Data.fit_file_short_new, False), 406 | (True, Data.fit_file_short_new_invalid_crc, True), 407 | (False, Data.fit_file_short_new, False), 408 | (False, Data.fit_file_short_new_invalid_crc, False), 409 | ], ids=["With CRC | Valid File", "With CRC | Invalid File", "Without CRC | Valid File", "Without CRC | Invalid File"] 410 | ) 411 | def test_enable_crc_options_errors_returned(self, option_status, data, expected_error_status): 412 | '''Tests if errors are returned when decoding a file when CRC calculations are enabled or disabled.''' 413 | stream = Stream.from_byte_array(data) 414 | decoder = Decoder(stream) 415 | messages, errors = decoder.read(enable_crc_check=option_status) 416 | 417 | assert len(errors) == 0 if expected_error_status is False else len(errors) > 0 418 | 419 | @pytest.mark.parametrize( 420 | "option_status", 421 | [ 422 | (True), 423 | (False), 424 | (None) 425 | ], ids=["Set to True", "Set to False", "Default should expand sub fields"] 426 | ) 427 | def test_expand_sub_fields_options(self, option_status): 428 | '''Tests the validity of expanding sub fields of messages decoded from a fit file''' 429 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 430 | decoder = Decoder(stream) 431 | if option_status is not None: 432 | messages, errors = decoder.read(expand_sub_fields=option_status, convert_types_to_strings=False) 433 | else: 434 | messages, errors = decoder.read(convert_types_to_strings=False) 435 | 436 | assert len(errors) == 0 437 | assert decoder.get_num_messages() == 2055 438 | 439 | for message in (message for message in messages['event_mesgs'] if message['event'] == 'rider_position_change'): 440 | if option_status is True or option_status is None: 441 | assert message['rider_position'] == message['data'] 442 | 443 | @pytest.mark.parametrize( 444 | "option_status, is_integer", 445 | [ 446 | (True, False), 447 | (False, True), 448 | ], ids=["Convert Types is True, No Ints", "Convert Types is True, All Ints"] 449 | ) 450 | def test_expand_sub_fields_convert_types_to_strings(self, option_status, is_integer): 451 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 452 | decoder = Decoder(stream) 453 | messages, errors = decoder.read(convert_types_to_strings=option_status) 454 | 455 | assert len(errors) == 0 456 | 457 | rider_position_event_mesgs = [mesg for mesg in messages['event_mesgs'] if 'rider_position' in mesg] 458 | 459 | for mesg in rider_position_event_mesgs: 460 | assert isinstance(mesg['rider_position'], int) is is_integer 461 | 462 | @pytest.mark.parametrize( 463 | "option_status, data, distances", 464 | [ 465 | (True, Data.fit_file_800m_repeats_little_endian, [4000, 800, 200, 1000]), 466 | (False, Data.fit_file_800m_repeats_little_endian, [400000, 80000, 20000, 100000]), 467 | (True, Data.fit_file_800m_repeats_big_endian, [4000, 800, 200, 1000]), 468 | (False, Data.fit_file_800m_repeats_big_endian, [400000, 80000, 20000, 100000]), 469 | ], ids=["Apply Scale and Offset is True, Little Endian", "Apply Scale and Offset is False, Little Endian", 470 | "Apply Scale and Offset is True, Big Endian", "Apply Scale and Offset is False, Big Endian"] 471 | ) 472 | def test_expand_sub_fields_scale_and_offset(self, option_status, data, distances): 473 | stream = Stream.from_byte_array(data) 474 | decoder = Decoder(stream) 475 | messages, errors = decoder.read(apply_scale_and_offset=option_status, merge_heart_rates=False) 476 | 477 | assert len(errors) == 0 478 | 479 | duration_distance_workout_step_mesgs = [mesg for mesg in messages['workout_step_mesgs'] if 'duration_distance' in mesg] 480 | 481 | for mesg, distance in zip(duration_distance_workout_step_mesgs, distances): 482 | assert mesg['duration_distance'] == distance 483 | 484 | def test_messages_with_no_fields(self): 485 | '''Tests reading messages with no fields assigned in their message definition''' 486 | stream = Stream.from_byte_array(Data.fit_file_messages_with_no_fields) 487 | decoder = Decoder(stream) 488 | messages, errors = decoder.read() 489 | assert len(errors) == 0 490 | 491 | serial_number = 3452116910 492 | 493 | assert len(messages['pad_mesgs']) == 1 494 | assert len(messages['file_id_mesgs']) == 2 495 | 496 | assert messages['pad_mesgs'][0] == {} 497 | 498 | assert messages['file_id_mesgs'][0]["serial_number"] == serial_number 499 | assert messages['file_id_mesgs'][1]["serial_number"] == serial_number 500 | 501 | class TestComponentExpansion: 502 | def test_sub_field_and_component_expansion(self): 503 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 504 | decoder = Decoder(stream) 505 | messages, errors = decoder.read() 506 | 507 | assert len(errors) == 0 508 | 509 | rider_position_event_mesgs = [mesg for mesg in messages['event_mesgs'] if 'gear_change_data' in mesg] 510 | 511 | index = 0 512 | for mesg in rider_position_event_mesgs: 513 | expected = Data.gear_change_data[index] 514 | assert mesg['front_gear_num'] == expected['front_gear_num'] 515 | assert mesg['front_gear'] == expected['front_gear'] 516 | assert mesg['rear_gear_num'] == expected['rear_gear_num'] 517 | assert mesg['rear_gear'] == expected['rear_gear'] 518 | assert mesg['data'] == expected['data'] 519 | assert mesg['gear_change_data'] == expected['gear_change_data'] 520 | 521 | index += 1 522 | 523 | @pytest.mark.parametrize( 524 | "option_status", 525 | [ 526 | (True), 527 | (False), 528 | (None) 529 | ], ids=["Set to True", "Set to False", "Default should expand components"] 530 | ) 531 | def test_component_expansion_options(self, option_status): 532 | '''Tests the validity of expanding components of messages decoded from a fit file''' 533 | stream = Stream.from_file('tests/fits/WithGearChangeData.fit') 534 | decoder = Decoder(stream) 535 | if option_status is not None: 536 | messages, errors = decoder.read(expand_components=option_status, merge_heart_rates=False) 537 | else: 538 | messages, errors = decoder.read() 539 | 540 | assert len(errors) == 0 541 | 542 | for message in messages['record_mesgs']: 543 | if option_status is True or option_status is None: 544 | assert message['speed'] == message['enhanced_speed'] 545 | assert message['altitude'] == message['enhanced_altitude'] 546 | else: 547 | assert 'enhanced_speed' not in message 548 | assert 'enhanced_altitude' not in message 549 | 550 | def test_hr_message_component_expansion(self): 551 | '''Tests component expansion given heart rate messages.''' 552 | stream = Stream.from_file('tests/fits/HrmPluginTestActivity.fit') 553 | decoder = Decoder(stream) 554 | messages, errors = decoder.read() 555 | assert len(errors) == 0 556 | 557 | assert messages['hr_mesgs'][0]['event_timestamp'] == 1242209 558 | 559 | hr_mesgs = messages['hr_mesgs'] 560 | 561 | index = 0 562 | for message in hr_mesgs: 563 | if isinstance(message['event_timestamp'], float): 564 | assert message['event_timestamp'] == pytest.approx(Data.hrm_plugin_test_activity_expected[index]) 565 | index += 1 566 | continue 567 | for timestamp in message['event_timestamp']: 568 | assert timestamp == pytest.approx(Data.hrm_plugin_test_activity_expected[index]) 569 | index += 1 570 | 571 | def test_enum_component_expansion(self): 572 | '''Tests component expansion in a monitoring file which includes expanded components which are enums.''' 573 | stream = Stream.from_byte_array(Data.fit_file_monitoring) 574 | decoder = Decoder(stream) 575 | messages, errors = decoder.read() 576 | assert len(errors) == 0 577 | assert len(messages['monitoring_mesgs']) == 4 578 | assert messages['monitoring_mesgs'][0]['activity_type'] == "running" and messages['monitoring_mesgs'][0]['intensity'] == 3 579 | assert messages['monitoring_mesgs'][0]['cycles'] == 10 580 | 581 | assert messages['monitoring_mesgs'][1]['activity_type'] == "walking" and messages['monitoring_mesgs'][1]['intensity'] == 0 582 | assert messages['monitoring_mesgs'][1]['cycles'] == 30 583 | 584 | assert messages['monitoring_mesgs'][2]['activity_type'] == 30 and messages['monitoring_mesgs'][2]['intensity'] == 0 585 | assert messages['monitoring_mesgs'][2]['cycles'] == 15 586 | 587 | assert 'activity_type' not in messages['monitoring_mesgs'][3] and 'intensity' not in messages['monitoring_mesgs'][3] 588 | assert messages['monitoring_mesgs'][3]['cycles'] == 15 589 | 590 | class TestMergeHeartrates: 591 | '''Set of tests which verify the functionality of merging heartrates to records when decoding.''' 592 | @pytest.mark.parametrize( 593 | "option_status, expected", 594 | [ 595 | (True, False), 596 | (False, True), 597 | (None, False) 598 | ], ids=["Set to True", "Set to False", "Default should merge heart rates"] 599 | ) 600 | def test_merge_heart_rates_options(self, option_status, expected): 601 | '''Tests that all the options settings for merge_heart_rates work as expected when decoding.''' 602 | stream = Stream.from_file('tests/fits/HrmPluginTestActivity.fit') 603 | decoder = Decoder(stream) 604 | 605 | if option_status is not None: 606 | messages, errors = decoder.read(merge_heart_rates=option_status) 607 | else: 608 | messages, errors = decoder.read() 609 | 610 | assert len(errors) == 0 611 | 612 | missing_hr = False 613 | for message in messages['record_mesgs']: 614 | if 'heart_rate' not in message: 615 | missing_hr = True 616 | assert missing_hr == expected 617 | 618 | def test_merge_heart_rate_fails_without_scale_and_offset(self): 619 | '''Tests to ensure that decoding fails when merge_heart_rates == True but apply_scale_and_offset == False''' 620 | stream = Stream.from_file('tests/fits/HrmPluginTestActivity.fit') 621 | decoder = Decoder(stream) 622 | messages, errors = decoder.read(apply_scale_and_offset=False) 623 | assert len(errors) == 1 624 | 625 | def test_merge_heart_rate_fails_without_expand_components(self): 626 | '''Tests to ensure that decoding fails when merge_heart_rates == True but expand_components == False''' 627 | stream = Stream.from_file('tests/fits/HrmPluginTestActivity.fit') 628 | decoder = Decoder(stream) 629 | messages, errors = decoder.read(expand_components=False) 630 | assert len(errors) == 1 631 | 632 | class TestAccumulatedFields: 633 | def test_expanded_components_expand_with_fields_that_accumulate(self): 634 | '''Tests that expanding components which are set to accumulate, accumulate properly.''' 635 | stream = Stream.from_byte_array(Data.fit_file_accumulated_components) 636 | decoder = Decoder(stream) 637 | messages, errors = decoder.read() 638 | assert len(errors) == 0 639 | 640 | assert messages['record_mesgs'][0]['cycles'] == 254 641 | assert messages['record_mesgs'][0]['total_cycles'] == 254 642 | 643 | assert messages['record_mesgs'][1]['cycles'] == 0 644 | assert messages['record_mesgs'][1]['total_cycles'] == 256 645 | 646 | assert messages['record_mesgs'][2]['cycles'] == 1 647 | assert messages['record_mesgs'][2]['total_cycles'] == 257 648 | 649 | def test_expanded_components_which_accumulate_and_have_initial_value_scale_and_accumulate(self): 650 | '''Tests that when an accumulated, expanded component field that is given an initial value is scaled accordingly in accumulation.''' 651 | stream = Stream.from_byte_array(Data.fit_file_compressed_speed_distance_with_initial_distance) 652 | decoder = Decoder(stream) 653 | messages, errors = decoder.read() 654 | assert len(errors) == 0 655 | 656 | # The first distance field is not expanded from a compressedSpeedDistance field 657 | assert messages['record_mesgs'][0]['distance'] == 2 658 | assert messages['record_mesgs'][1]['distance'] == 264 659 | assert messages['record_mesgs'][2]['distance'] == 276 660 | 661 | class TestDecoderExceptions: 662 | '''Set of tests which verifies behavior of the decoder when various exceptions are raised''' 663 | @pytest.mark.parametrize( 664 | "exception", 665 | [ 666 | KeyboardInterrupt, 667 | SystemExit, 668 | ], ids=["KeyboardInterrupt", "SystemExit"] 669 | ) 670 | def test_keyboard_interrupt_and_system_exit_exceptions_are_rethrown(self, mocker, exception): 671 | '''Tests to ensure that the decoder rethrows KeyboardInterrupt and SystemExit exceptions''' 672 | stream = Stream.from_byte_array(Data.fit_file_short) 673 | decoder = Decoder(stream) 674 | 675 | mocked_is_fit = mocker.patch('garmin_fit_sdk.Decoder.is_fit') 676 | mocked_is_fit.side_effect = exception 677 | 678 | with pytest.raises(exception): 679 | decoder.read() 680 | 681 | @pytest.mark.parametrize( 682 | "exception", 683 | [ 684 | Exception, 685 | RuntimeError, 686 | BufferError, 687 | LookupError, 688 | IndexError 689 | ], ids=["Generic Exception", "RuntimeError", "BufferError", "LookupError", "IndexError"] 690 | ) 691 | def test_other_exceptions_are_not_rethrown(self, mocker, exception): 692 | '''Tests to ensure that the decoder does not rethrow other exceptions''' 693 | stream = Stream.from_byte_array(Data.fit_file_short) 694 | decoder = Decoder(stream) 695 | 696 | mocked_is_fit = mocker.patch('garmin_fit_sdk.Decoder.is_fit') 697 | mocked_is_fit.side_effect = exception 698 | 699 | messages, errors = decoder.read() 700 | 701 | assert len(errors) == 1 702 | 703 | def test_mesg_listener(): 704 | '''Tests that a message listener passed to the decoder is correctly called.''' 705 | stream = Stream.from_byte_array(Data.fit_file_short) 706 | decoder = Decoder(stream) 707 | 708 | def mesg_listener(mesg_num, message): 709 | raise Exception("The message listener was called!") 710 | 711 | messages, errors = decoder.read(mesg_listener=mesg_listener) 712 | 713 | assert len(errors) == 1 714 | assert str(errors[0]) == "The message listener was called!" 715 | -------------------------------------------------------------------------------- /garmin_fit_sdk/decoder.py: -------------------------------------------------------------------------------- 1 | '''decoder.py: Contains the decoder class which is used to decode fit files.''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | # ****WARNING**** This file is auto-generated! Do NOT edit this file. 10 | # Profile Version = 21.178.0Release 11 | # Tag = production/release/21.178.0-0-g3bea629 12 | ############################################################################################ 13 | 14 | 15 | import copy 16 | 17 | from . import Accumulator, BitStream, CrcCalculator 18 | from . import fit as FIT 19 | from . import hr_mesg_utils, util 20 | from .profile import Profile 21 | from .stream import Endianness, Stream 22 | from enum import Enum 23 | 24 | _CRCSIZE = 2 25 | _COMPRESSED_HEADER_MASK = 0x80 26 | _MESG_DEFINITION_MASK = 0x40 27 | _MESG_HEADER_MASK = 0x00 28 | _LOCAL_MESG_NUM_MASK = 0x0F 29 | _DEV_DATA_MASK = 0x20 30 | 31 | _HEADER_WITH_CRC_SIZE = 14 32 | _HEADER_WITHOUT_CRC_SIZE = 12 33 | 34 | DecodeMode = Enum('DecodeMode', ['NORMAL', 'SKIP_HEADER', 'DATA_ONLY']) 35 | 36 | class Decoder: 37 | ''' 38 | A class for decoding a given stream (fit file). Will return the decoded data 39 | from the stream 40 | 41 | Attributes: 42 | _stream: The given stream of data to be decoded. 43 | _local_mesg_defs: The 16 most recent message definitions read. 44 | _messages: The messages decoded by the Decoder. 45 | ''' 46 | 47 | def __init__(self, stream: Stream): 48 | if stream is None: 49 | raise RuntimeError("FIT Runtine Error stream parameter is None.") 50 | 51 | self._stream = stream 52 | self._local_mesg_defs = {} 53 | self._developer_data_defs = {} 54 | self._messages = {} 55 | self._accumulator = Accumulator() 56 | 57 | self._fields_with_subfields = [] 58 | self._fields_to_expand = [] 59 | 60 | self._decode_mode = DecodeMode.NORMAL 61 | 62 | self._mesg_listener = None 63 | self._apply_scale_and_offset = True 64 | self._convert_timestamps_to_datetimes = True 65 | self._convert_types_to_strings = True 66 | self._enable_crc_check = True 67 | self._expand_sub_fields = True 68 | self._expand_components = True 69 | self._merge_heart_rates = True 70 | 71 | 72 | def is_fit(self): 73 | '''Returns whether the file is a valid fit file.''' 74 | try: 75 | file_header_size = self._stream.peek_byte() 76 | if file_header_size != _HEADER_WITH_CRC_SIZE and file_header_size != _HEADER_WITHOUT_CRC_SIZE: 77 | return False 78 | 79 | if self._stream.get_length() < (file_header_size + _CRCSIZE): 80 | return False 81 | 82 | # TODO make sure this works with chained files (add offset) 83 | file_header = self.read_file_header(True) 84 | if file_header.data_type[0].decode() != ".FIT": 85 | return False 86 | 87 | except Exception: 88 | return False 89 | 90 | return True 91 | 92 | def check_integrity(self): 93 | '''Returns whether the integrity of the file is good or not.''' 94 | try: 95 | if self.is_fit() is False: 96 | return False 97 | 98 | file_header = self.read_file_header(True) 99 | 100 | if file_header.header_size + file_header.data_size + _CRCSIZE > self._stream.get_length(): 101 | return False 102 | 103 | if file_header.header_size is _HEADER_WITH_CRC_SIZE and file_header.header_crc != CrcCalculator.calculate_crc(self._stream.slice(0, 12), 0, 12): 104 | return False 105 | 106 | file_crc = CrcCalculator.calculate_crc(self._stream.read_bytes(file_header.file_total_size),0, file_header.file_total_size) 107 | crc_from_file = self._stream.read_byte() + (self._stream.read_byte() << 8) 108 | if crc_from_file != file_crc: 109 | return False 110 | 111 | except Exception: 112 | return False 113 | 114 | return True 115 | 116 | def read(self, apply_scale_and_offset = True, 117 | convert_datetimes_to_dates = True, 118 | convert_types_to_strings = True, 119 | enable_crc_check = True, 120 | expand_sub_fields = True, 121 | expand_components = True, 122 | merge_heart_rates = True, 123 | mesg_listener = None, 124 | decode_mode = DecodeMode.NORMAL): 125 | '''Reads the entire contents of the fit file and returns the decoded messages''' 126 | self._apply_scale_and_offset = apply_scale_and_offset 127 | self._convert_timestamps_to_datetimes = convert_datetimes_to_dates 128 | self._convert_types_to_strings = convert_types_to_strings 129 | self._enable_crc_check = enable_crc_check 130 | self._expand_sub_fields = expand_sub_fields 131 | self._expand_components = expand_components 132 | self._merge_heart_rates = merge_heart_rates 133 | self._mesg_listener = mesg_listener 134 | self._decode_mode = decode_mode 135 | 136 | self._local_mesg_defs = {} 137 | self._developer_data_defs = {} 138 | self._messages = {} 139 | 140 | errors = [] 141 | try: 142 | if self._merge_heart_rates and (not self._apply_scale_and_offset or not self._expand_components): 143 | self.__raise_error("merge_heart_rates requires both apply_scale_and_offset and expand_components to be enabled!") 144 | 145 | while self._stream.position() < self._stream.get_length(): 146 | self.__decode_next_file() 147 | 148 | if self._merge_heart_rates is True and 'hr_mesgs' in self._messages: 149 | hr_mesg_utils.merge_heart_rates(self._messages['hr_mesgs'], self._messages['record_mesgs']) 150 | 151 | except (KeyboardInterrupt, SystemExit): 152 | raise 153 | except Exception as error: 154 | errors.append(error) 155 | 156 | return self._messages, errors 157 | 158 | def __decode_next_file(self): 159 | position = self._stream.position() 160 | 161 | if self._decode_mode == DecodeMode.NORMAL and self.is_fit() is False: 162 | self.__raise_error("The file is not a fit file.") 163 | 164 | crc_calculator = CrcCalculator() if self._enable_crc_check is True else None 165 | self._stream.set_crc_calculator(crc_calculator) 166 | 167 | file_header = self.read_file_header(False, decode_mode=self._decode_mode) 168 | 169 | # Read data definitions and messages 170 | while self._stream.position() < (position + file_header.header_size + file_header.data_size): 171 | self.__decode_next_record() 172 | 173 | 174 | self._stream.set_crc_calculator(None) 175 | crc = self._stream.read_unint_16() 176 | 177 | if crc_calculator is not None: 178 | calculated_crc = crc_calculator.get_crc() 179 | if self._decode_mode == DecodeMode.NORMAL and crc != calculated_crc: 180 | self.__raise_error("CRC Error") 181 | 182 | def __decode_next_record(self): 183 | record_header = self._stream.peek_byte() 184 | 185 | if record_header & _COMPRESSED_HEADER_MASK == _COMPRESSED_HEADER_MASK: 186 | self.__decode_compressed_timestamp_message() 187 | 188 | if record_header & _MESG_DEFINITION_MASK == _MESG_HEADER_MASK: 189 | self.__decode_message() 190 | 191 | if record_header & _MESG_DEFINITION_MASK == _MESG_DEFINITION_MASK: 192 | self.__decode_mesg_def() 193 | 194 | def __decode_mesg_def(self): 195 | record_header = self._stream.read_byte() 196 | 197 | struct_format_string = '' 198 | mesg_def = {} 199 | mesg_def["record_header"] = record_header 200 | mesg_def["local_mesg_num"] = record_header & _LOCAL_MESG_NUM_MASK 201 | mesg_def["reserved"] = self._stream.read_byte() 202 | 203 | mesg_def["architecture"] = self._stream.read_byte() 204 | mesg_def["endianness"] = Endianness.LITTLE if mesg_def["architecture"] == 0 else Endianness.BIG 205 | 206 | struct_format_string += '>' if mesg_def["endianness"] == Endianness.BIG else '<' 207 | mesg_def["struct_format_string"] = struct_format_string 208 | 209 | mesg_def["global_mesg_num"] = self._stream.read_unint_16(mesg_def["endianness"]) 210 | mesg_def["num_fields"] = self._stream.read_byte() 211 | mesg_def["field_definitions"] = [] 212 | mesg_def["developer_field_defs"] = [] 213 | mesg_def["message_size"] = 0 214 | mesg_def["developer_data_size"] = 0 215 | 216 | for i in range(mesg_def["num_fields"]): 217 | field_definition = { 218 | "field_id": self._stream.read_byte(), 219 | "size": self._stream.read_byte(), 220 | "base_type": self._stream.read_byte(), 221 | } 222 | 223 | if field_definition["base_type"] not in FIT.BASE_TYPE_DEFINITIONS: 224 | self.__raise_error("Invalid field definition base type") 225 | 226 | if field_definition["size"] % FIT.BASE_TYPE_DEFINITIONS[field_definition["base_type"]]["size"] != 0: 227 | field_definition["base_type"] = FIT.BASE_TYPE['UINT8'] 228 | 229 | num_field_elements = int(field_definition["size"] / FIT.BASE_TYPE_DEFINITIONS[field_definition["base_type"]]["size"]) 230 | field_definition["num_field_elements"] = num_field_elements 231 | 232 | struct_format_string += str(num_field_elements) if num_field_elements > 1 else '' 233 | struct_format_string += FIT.BASE_TYPE_DEFINITIONS[field_definition["base_type"]]["type_code"] 234 | 235 | mesg_def["struct_format_string"] = struct_format_string 236 | mesg_def["field_definitions"].append(field_definition) 237 | mesg_def["message_size"] += field_definition["size"] 238 | 239 | if record_header & _DEV_DATA_MASK == _DEV_DATA_MASK: 240 | num_dev_fields = self._stream.read_byte() 241 | 242 | for i in range(num_dev_fields): 243 | developer_field_definition = { 244 | "field_definition_number": self._stream.read_byte(), 245 | "size": self._stream.read_byte(), 246 | "developer_data_index": self._stream.read_byte(), 247 | "endianness": Endianness.LITTLE if mesg_def["architecture"] == 0 else Endianness.BIG 248 | } 249 | 250 | mesg_def["developer_field_defs"].append(developer_field_definition) 251 | mesg_def["developer_data_size"] += developer_field_definition["size"] 252 | 253 | if mesg_def["global_mesg_num"] in Profile['messages']: 254 | message_profile = Profile['messages'][mesg_def["global_mesg_num"]] 255 | else: 256 | message_profile = { 257 | "name": str(mesg_def["global_mesg_num"]), 258 | "messages_key": str(mesg_def["global_mesg_num"]), 259 | "num": mesg_def["global_mesg_num"], 260 | 'fields': {} 261 | } 262 | 263 | #TODO add option for unknown data 264 | 265 | # Add the profile to the local message definition 266 | self._local_mesg_defs[mesg_def["local_mesg_num"]] = {**mesg_def, **message_profile} 267 | 268 | messages_key = message_profile['messages_key'] if 'messages_key' in message_profile else None 269 | if message_profile is not None and messages_key not in self._messages: 270 | self._messages[messages_key] = [] 271 | 272 | def __decode_message(self): 273 | record_header = self._stream.read_byte() 274 | 275 | local_mesg_num = record_header & _LOCAL_MESG_NUM_MASK 276 | if local_mesg_num in self._local_mesg_defs: 277 | mesg_def = self._local_mesg_defs[local_mesg_num] 278 | else: 279 | self.__raise_error("Invalid local message number") 280 | 281 | messages_key = mesg_def['messages_key'] 282 | 283 | # Decode regular message 284 | message = {} 285 | self._fields_to_expand = [] 286 | self._fields_with_subfields = [] 287 | 288 | message = self.__read_message(mesg_def) 289 | 290 | developer_fields = {} 291 | 292 | # Decode developer data if it exists 293 | if len(mesg_def["developer_field_defs"]) > 0: 294 | 295 | for developer_field_def in mesg_def['developer_field_defs']: 296 | field_profile = self.__lookup_developer_data_field(developer_field_def) 297 | if field_profile is None: 298 | # If there is not a field definition, then read past the field data. 299 | self._stream.read_bytes(developer_field_def['size']) 300 | continue 301 | 302 | struct_format_string = self.__build_dev_data_struct_string(developer_field_def, field_profile) 303 | field_value = self.__read_raw_value(developer_field_def['size'], struct_format_string) 304 | 305 | if field_profile['fit_base_type_id'] == FIT.BASE_TYPE['STRING']: 306 | field_value = util._convert_string(field_value) 307 | #NOTE possible point to scrub invalids???? 308 | 309 | if field_value is not None: 310 | developer_fields[field_profile['key']] = field_value 311 | 312 | if mesg_def['global_mesg_num'] == Profile['mesg_num']['DEVELOPER_DATA_ID']: 313 | self.__add_developer_data_id_to_profile(message) 314 | 315 | elif mesg_def['global_mesg_num'] == Profile['mesg_num']['FIELD_DESCRIPTION']: 316 | message['key'] = len(self._messages[messages_key]) 317 | self.__add_field_description_to_profile(message) 318 | 319 | else: 320 | message = self.__apply_profile(mesg_def, message) 321 | 322 | self.__clean_message(message) 323 | 324 | if len(developer_fields) != 0: 325 | message['developer_fields'] = developer_fields 326 | 327 | # Append decoded message 328 | self._messages[messages_key].append(message) 329 | 330 | if self._mesg_listener is not None: 331 | self._mesg_listener(mesg_def['global_mesg_num'], message) 332 | 333 | def __decode_compressed_timestamp_message(self): 334 | self.__raise_error("Compressed timestamp messages are not currently supported") 335 | 336 | def __read_message(self, mesg_def): 337 | message = {} 338 | raw_values = self.__read_raw_values(mesg_def["message_size"], mesg_def["struct_format_string"]) 339 | 340 | index = 0 341 | for field in mesg_def['field_definitions']: 342 | base_type_definition = FIT.BASE_TYPE_DEFINITIONS[field["base_type"]] 343 | invalid = base_type_definition["invalid"] 344 | num_elements = field["num_field_elements"] 345 | 346 | field_id = field["field_id"] 347 | field_profile = mesg_def['fields'][field_id] if field_id in mesg_def['fields'] else None 348 | field_name = field_profile['name'] if field_id in mesg_def['fields'] else field_id 349 | 350 | if field_profile is not None and 'has_components' in field_profile: 351 | convert_invalids_to_none = not field_profile['has_components'] 352 | else: 353 | convert_invalids_to_none = True 354 | 355 | field_value = None 356 | 357 | # Fields with strings or string arrays 358 | if base_type_definition['type'] == FIT.BASE_TYPE["STRING"]: 359 | field_value = util._convert_string(raw_values[index]) 360 | 361 | # Fields with an array of values 362 | elif num_elements > 1: 363 | field_value = [] 364 | 365 | if(base_type_definition['type'] == FIT.BASE_TYPE["BYTE"]): 366 | raw_array = raw_values[index : index + num_elements] 367 | field_value = raw_array if util._only_invalid_values(raw_array, invalid) is False else None 368 | else: 369 | for i in range(num_elements): 370 | raw_value = raw_values[index + i] if raw_values[index + i] != invalid or not convert_invalids_to_none else None 371 | field_value.append(raw_value) 372 | 373 | if self.__is_array_all_none(field_value) is True: 374 | field_value = None 375 | 376 | # Fields with a single value 377 | else: 378 | if raw_values[index] != invalid or not convert_invalids_to_none: 379 | field_value = raw_values[index] 380 | 381 | if field_value is not None: 382 | message[field_name] = { 383 | 'raw_field_value': field_value, 384 | 'field_definition_number': field_id 385 | } 386 | 387 | if field_profile and len(field_profile['sub_fields']) > 0: 388 | self._fields_with_subfields.append(field_name) 389 | 390 | if field_profile and field_profile['has_components'] is True: 391 | self._fields_to_expand.append(field_name) 392 | 393 | if field_profile and field_profile['is_accumulated'] is True: 394 | self.__set_accumulated_value(mesg_def, message, field_profile, field_value) 395 | 396 | index += num_elements if base_type_definition['type'] != FIT.BASE_TYPE["STRING"] else 1 397 | 398 | return message 399 | 400 | def __apply_profile(self, mesg_def: dict, raw_message: dict): 401 | message = raw_message 402 | 403 | 404 | self.__expand_sub_fields(mesg_def['global_mesg_num'], message) 405 | 406 | self.__expand_components(mesg_def['global_mesg_num'], message, mesg_def['fields'], mesg_def) 407 | 408 | self.__transform_values(message, mesg_def) 409 | 410 | return message 411 | 412 | def __transform_values(self, message, mesg_def): 413 | for field in message: 414 | if 'is_expanded_field'in message[field] and message[field]['is_expanded_field'] is True: 415 | continue 416 | 417 | field_name = field 418 | field_id = message[field]['field_definition_number'] 419 | field_profile = mesg_def['fields'][field_id] if field_id in mesg_def['fields'] else None 420 | field_type = field_profile['type'] if field_id in mesg_def['fields'] else field_id 421 | 422 | is_sub_field = message[field]['is_sub_field'] if 'is_sub_field' in message[field] else False 423 | if is_sub_field: 424 | field_profile = self.__get_subfield_profile(field_profile, field_name) 425 | field_type = field_profile['type'] if field_id in mesg_def['fields'] else field_id 426 | 427 | field_value = message[field_name]['raw_field_value'] 428 | # Optional data operations 429 | if self._convert_types_to_strings is True: 430 | field_value = self.__convert_type_to_string(field_type, message[field_name]['raw_field_value']) 431 | 432 | if self._apply_scale_and_offset is True and field_type in FIT.NUMERIC_FIELD_TYPES: 433 | field_value = self.__apply_scale_and_offset(field_profile, message[field_name]['raw_field_value']) 434 | 435 | if self._convert_timestamps_to_datetimes is True and field_type == 'date_time': 436 | field_value = util.convert_timestamp_to_datetime(message[field_name]['raw_field_value']) 437 | 438 | message[field_name]['field_value'] = field_value 439 | return 440 | 441 | def __expand_components(self, mesg_num, message, fields, mesg_def): 442 | if self._expand_components is False or len(self._fields_to_expand) == 0: 443 | return 444 | 445 | mesg = {} 446 | 447 | while len(self._fields_to_expand) > 0: 448 | field_name = self._fields_to_expand.pop() 449 | 450 | field_to_expand = message.get(field_name) or mesg.get(field_name) 451 | 452 | raw_field_value = field_to_expand['raw_field_value'] 453 | field_definition_number = field_to_expand['field_definition_number'] 454 | field_profile = mesg_def['fields'].get(field_definition_number) 455 | 456 | if field_profile is None: 457 | continue 458 | 459 | is_sub_field = field_to_expand.get('is_sub_field') or False 460 | if is_sub_field is True: 461 | field_profile = self.__get_subfield_profile(field_profile, field_name) 462 | 463 | base_type = FIT.FIELD_TYPE_TO_BASE_TYPE[field_profile['type']] if field_profile['type'] in FIT.FIELD_TYPE_TO_BASE_TYPE else None 464 | 465 | if field_profile['has_components'] is False or base_type is None: 466 | continue 467 | 468 | if util._only_invalid_values(raw_field_value, FIT.BASE_TYPE_DEFINITIONS[base_type]['invalid']) is True: 469 | continue 470 | 471 | bitstream = BitStream(raw_field_value, base_type) 472 | 473 | for i in range(len(field_profile['components'])): 474 | if bitstream.bits_available() < field_profile['bits'][i]: 475 | break 476 | 477 | target_field = fields[field_profile['components'][i]] 478 | if target_field['name'] not in mesg: 479 | base_type = FIT.FIELD_TYPE_TO_BASE_TYPE[target_field['type']] if target_field['type'] in FIT.FIELD_TYPE_TO_BASE_TYPE else target_field['type'] 480 | invalid_value = FIT.BASE_TYPE_DEFINITIONS[base_type]['invalid'] if base_type in FIT.BASE_TYPE_DEFINITIONS else 0xFF 481 | 482 | mesg[target_field['name']] = { 483 | 'field_value': [], 484 | 'raw_field_value': [], 485 | 'field_definition_number': target_field['num'], 486 | 'is_expanded_field': True, 487 | 'invalid': invalid_value 488 | } 489 | 490 | value = bitstream.read_bits(field_profile['bits'][i]) 491 | 492 | if target_field['is_accumulated'] is True: 493 | value = self._accumulator.accumulate(mesg_num, target_field['num'], value, field_profile['bits'][i]) 494 | 495 | # Undo component scale and offset before applying the destination field's scale and offset 496 | value = (value / field_profile['scale'][i]) - field_profile['offset'][i] 497 | value = int(value) if value.is_integer() else value 498 | raw_value = (value + target_field['offset'][0]) * target_field['scale'][0] 499 | 500 | mesg[target_field['name']]['raw_field_value'].append(int(raw_value)) 501 | 502 | if raw_value == invalid_value: 503 | mesg[target_field['name']]['field_value'].append(None) 504 | else: 505 | if self._convert_types_to_strings is True: 506 | value = self.__convert_type_to_string(target_field['type'], value) 507 | 508 | mesg[target_field['name']]['field_value'].append(value) 509 | 510 | if target_field['has_components'] is True: 511 | self._fields_to_expand.append(target_field['name']) 512 | 513 | if bitstream.has_bits_available() is False: 514 | break 515 | 516 | for field_name in mesg: 517 | mesg[field_name]['raw_field_value'] = util._sanitize_values(mesg[field_name]['raw_field_value']) 518 | mesg[field_name]['field_value'] = util._sanitize_values(mesg[field_name]['field_value']) 519 | message[field_name] = mesg[field_name] 520 | 521 | def __expand_sub_fields(self, global_mesg_num, message): 522 | if self._expand_sub_fields is False or len(self._fields_with_subfields) == 0: 523 | return 524 | 525 | # Save the original fields for iteration before expanding sub fields. 526 | for field in self._fields_with_subfields: 527 | if message[field]['field_definition_number'] in Profile['messages'][global_mesg_num]['fields']: 528 | field_profile = Profile['messages'][global_mesg_num]['fields'][message[field]['field_definition_number']] 529 | else: 530 | continue 531 | 532 | if len(field_profile['sub_fields']) > 0: 533 | self.__expand_sub_field(message, field_profile) 534 | 535 | def __expand_sub_field(self, message, field_profile): 536 | for sub_field in field_profile['sub_fields']: 537 | for map_item in sub_field['map']: 538 | reference_field_profile = message[map_item['name']] if map_item['name'] in message else None 539 | 540 | if reference_field_profile is None: 541 | continue 542 | 543 | if reference_field_profile['raw_field_value'] == map_item['raw_value']: 544 | message[sub_field['name']] = copy.deepcopy(message[field_profile['name']]) 545 | message[sub_field['name']]['is_sub_field'] = True 546 | 547 | if sub_field['has_components'] is True: 548 | self._fields_to_expand.append(sub_field['name']) 549 | 550 | break 551 | 552 | def __get_subfield_profile(self, field_profile, name): 553 | return next(sub_field for sub_field in field_profile['sub_fields'] if sub_field['name'] == name) or {} 554 | 555 | def __set_accumulated_value(self, mesg_def, message, field, raw_field_value): 556 | raw_field_values = raw_field_value if type(raw_field_value) == list else [raw_field_value] 557 | 558 | for value in raw_field_values: 559 | for containing_field in message.values(): 560 | components = mesg_def['fields'].get(field['num'])['components'] 561 | for i, component_field_num in enumerate(components): 562 | target_field = mesg_def['fields'][component_field_num] 563 | 564 | if target_field['num'] == field['num'] and target_field['is_accumulated']: 565 | value = (((value / field['scale'][0]) - field['offset'][0]) + containing_field['offset'][i]) * containing_field['scale'][i] 566 | 567 | self._accumulator.createAccumulatedField(mesg_def['global_mesg_num'], field['num'], int(value)) 568 | 569 | def __convert_type_to_string(self, field_type, raw_field_value): 570 | try: 571 | if field_type in Profile['types']: 572 | types = Profile['types'][field_type] 573 | else: 574 | return raw_field_value 575 | 576 | field_value = raw_field_value 577 | 578 | if isinstance(raw_field_value, list): 579 | for i in range(len(raw_field_value)): 580 | field_value[i] = types[str(raw_field_value[i])] if str(raw_field_value[i]) in types else raw_field_value[i] 581 | return field_value 582 | 583 | return types[str(raw_field_value)] if str(raw_field_value) in types else field_value 584 | except Exception: 585 | return raw_field_value 586 | 587 | def __apply_scale_and_offset(self, field_profile, raw_field_value): 588 | 589 | if self._apply_scale_and_offset is False: 590 | return raw_field_value 591 | 592 | if raw_field_value is None: 593 | return raw_field_value 594 | 595 | if len(field_profile['scale']) > 1: 596 | return raw_field_value 597 | 598 | scale = field_profile['scale'][0] if field_profile['scale'] else 1 599 | offset = field_profile['offset'][0] if field_profile['offset'] else 0 600 | 601 | try: 602 | 603 | field_values = raw_field_value 604 | 605 | if isinstance(raw_field_value, list): 606 | for i in range(len(raw_field_value)): 607 | field_value = raw_field_value[i] / scale if (raw_field_value[i] is not None and scale != 1) else raw_field_value[i] 608 | field_values[i] = (field_value - offset) if raw_field_value[i] is not None else None 609 | return field_values 610 | 611 | field_value = raw_field_value / scale if scale != 1 else raw_field_value 612 | return field_value - offset 613 | except Exception: 614 | return raw_field_value 615 | 616 | 617 | def __add_developer_data_id_to_profile(self, message): 618 | if message is None or message['developer_data_index'] is None or message['developer_data_index']['raw_field_value'] == 0xFF: 619 | return 620 | 621 | self._developer_data_defs[message['developer_data_index']['raw_field_value']] = { 622 | 'developer_data_index': message['developer_data_index']['raw_field_value'], 623 | 'developer_id': message['developer_id']['raw_field_value'] if 'developer_id' in message else None, 624 | 'application_id': message['application_id']['raw_field_value'] if 'application_id' in message else None, 625 | 'manufacturer_id': message['manufacturer_id']['raw_field_value'] if 'manufacturer_id' in message else None, 626 | 'application_version': message['application_version']['raw_field_value'] if 'application_version' in message else None, 627 | 'fields': [] 628 | } 629 | 630 | def __add_field_description_to_profile(self, message): 631 | 632 | if message is None or message['developer_data_index'] is None or message['developer_data_index']['raw_field_value'] == 0xFF: 633 | return 634 | 635 | if self._developer_data_defs[message['developer_data_index']['raw_field_value']] is None: 636 | return 637 | 638 | if message["fit_base_type_id"] is not None: 639 | base_type_code = FIT.BASE_TYPE_DEFINITIONS[message["fit_base_type_id"]['raw_field_value']]["type_code"] 640 | else: 641 | base_type_code = None 642 | 643 | self._developer_data_defs[message['developer_data_index']['raw_field_value']]['fields'].append({ 644 | 'developer_data_index': message['developer_data_index']['raw_field_value'], 645 | 'field_definition_number': message['field_definition_number']['raw_field_value'], 646 | 'fit_base_type_id': message['fit_base_type_id']['raw_field_value'] if 'fit_base_type_id' in message else None, 647 | 'base_type_code': base_type_code, 648 | 'name': message['name']['raw_field_value'] if 'name' in message else None, 649 | 'array': message['array']['raw_field_value'] if 'array' in message else None, 650 | 'components': message['components']['raw_field_value'] if 'components' in message else None, 651 | 'scale': message['scale']['raw_field_value'] if 'scale' in message else None, 652 | 'offset': message['offset']['raw_field_value'] if 'offset' in message else None, 653 | 'units': message['units']['raw_field_value'] if 'units' in message else None, 654 | 'bits': message['bits']['raw_field_value'] if 'bits' in message else None, 655 | 'accumulate': message['accumulate']['raw_field_value'] if 'accumulate' in message else None, 656 | 'ref_field_name': message['ref_field_name']['raw_field_value'] if 'ref_field_name' in message else None, 657 | 'ref_field_value': message['ref_field_value']['raw_field_value'] if 'ref_field_value' in message else None, 658 | 'fit_base_unit_id': message['fit_base_unit_id']['raw_field_value'] if 'fit_base_unit_id' in message else None, 659 | 'native_mesg_num': message['native_mesg_num']['raw_field_value'] if 'native_mesg_num' in message else None, 660 | 'native_field_num': message['native_field_num']['raw_field_value'] if 'native_field_num' in message else None, 661 | 'key': message['key'] 662 | }) 663 | 664 | def __build_dev_data_struct_string(self, developer_field_def: dict, field_profile: dict): 665 | struct_format_string = "<" if developer_field_def['endianness'] == Endianness.LITTLE else ">" 666 | invalid_value = FIT.BASE_TYPE_DEFINITIONS[field_profile['fit_base_type_id']]['invalid'] 667 | base_type_code = field_profile['base_type_code'] 668 | base_type_size = FIT.BASE_TYPE_DEFINITIONS[field_profile['fit_base_type_id']]['size'] 669 | num_elements = int(developer_field_def["size"] / base_type_size) 670 | 671 | field_profile['num_elements'] = num_elements 672 | field_profile['invalid'] = invalid_value 673 | 674 | struct_format_string += str(num_elements) + base_type_code 675 | 676 | return struct_format_string 677 | 678 | def __lookup_developer_data_field(self, developer_field_def): 679 | try: 680 | for field in self._developer_data_defs[developer_field_def['developer_data_index']]['fields']: 681 | if field['field_definition_number'] == developer_field_def['field_definition_number']: 682 | return field 683 | 684 | return None 685 | 686 | except Exception: 687 | return None 688 | 689 | def __clean_message(self, message): 690 | if message is not None: 691 | for field in message: 692 | if isinstance(message[field], dict) and 'raw_field_value' in message[field]: 693 | message[field] = message[field]['field_value'] if 'field_value' in message[field] else message[field]['raw_field_value'] 694 | message[field] = util._sanitize_values(message[field]) 695 | 696 | def __read_raw_values(self, message_size, struct_format_string): 697 | return self._stream.read_and_unpack(message_size, struct_format_string) 698 | 699 | def __read_raw_value(self, message_size, struct_format_string): 700 | field_value = self._stream.read_and_unpack(message_size, struct_format_string) 701 | return field_value if len(field_value) > 1 else field_value[0] 702 | 703 | def __is_array_all_none(self, array): 704 | for i in array: 705 | if i is not None: 706 | return False 707 | return True 708 | 709 | def __raise_error(self, error = ""): 710 | position = self._stream.position() 711 | message = "FIT Runtime Error at byte: " + str(position) + " " + error 712 | raise RuntimeError(message) 713 | 714 | def read_file_header(self, reset, decode_mode = DecodeMode.NORMAL): 715 | '''Reads the file's header and returns its parameters.''' 716 | starting_position = self._stream.position() 717 | 718 | class FileHeader: 719 | '''A class that decodes a FIT file header.''' 720 | def __init__(self, stream, decode_mode): 721 | if decode_mode != DecodeMode.NORMAL: 722 | if decode_mode == DecodeMode.SKIP_HEADER: 723 | stream.seek(_HEADER_WITH_CRC_SIZE) 724 | 725 | header_size = _HEADER_WITH_CRC_SIZE if decode_mode == DecodeMode.SKIP_HEADER else 0 726 | data_size = stream.get_length() - header_size - _CRCSIZE 727 | 728 | self.header_size = header_size 729 | self.data_size = data_size 730 | 731 | return 732 | 733 | self.header_size = stream.read_byte() 734 | self.protocol_version = stream.read_byte() 735 | self.profile_version = stream.read_unint_16("little") 736 | self.data_size = stream.read_unint_32("little") 737 | self.data_type = stream.read_string(4) 738 | self.header_crc = 0 739 | self.file_total_size = self.header_size + self.data_size 740 | 741 | if self.header_size == 14: 742 | self.header_crc = stream.read_unint_16("little") 743 | 744 | def get_dict(self): 745 | dict = {} 746 | dict["header_size"] = self.header_size 747 | dict["protocol_version"] = (self.protocol_version >> 4) + ((self.protocol_version & 0x0F) / 10) 748 | dict["profile_version"] = self.profile_version / 1000 if self.profile_version > 2199 else 100 749 | dict["data_size"] = self.data_size 750 | dict["data_type"] = self.data_type 751 | dict["header_crc"] = self.header_crc 752 | dict["file_total_size"] = self.file_total_size 753 | 754 | return dict 755 | 756 | file_header = FileHeader(self._stream, decode_mode) 757 | 758 | if reset is True: 759 | self._stream.seek(starting_position) 760 | 761 | return file_header 762 | 763 | def get_num_messages(self): 764 | '''Returns the total number of messages successfully decoded from the file(s)''' 765 | num_messages = 0 766 | for message in self._messages: 767 | num_messages += len(self._messages[message]) 768 | return num_messages 769 | -------------------------------------------------------------------------------- /tests/data.py: -------------------------------------------------------------------------------- 1 | '''data.py: Contains various byte arrays which are reused in testing the FIT SDK.''' 2 | 3 | ########################################################################################### 4 | # Copyright 2025 Garmin International, Inc. 5 | # Licensed under the Flexible and Interoperable Data Transfer (FIT) Protocol License; you 6 | # may not use this file except in compliance with the Flexible and Interoperable Data 7 | # Transfer (FIT) Protocol License. 8 | ########################################################################################### 9 | 10 | 11 | class Data: 12 | '''Helper class that holds example fit files and byte arrays to be used for testing.''' 13 | 14 | fit_file_invalid = bytearray([ 15 | 0x0E, 0x20, 0x9F, 0x03, 0x64, 0x00, 0x00, 0x00, 16 | 0x2E, 0x99, 0x49, 0x54, 0xB9, 0xE3, 0x00, 0x00 17 | ]) 18 | 19 | fit_file_minimum = bytearray([ 20 | 0x0E, 0x20, 0x8B, 0x08, 0x00, 0x00, 0x00, 0x00, 21 | 0x2E, 0x46, 0x49, 0x54, 0x8D, 0x48, 0x00, 0x00, 22 | ]) 23 | 24 | fit_file_incorrect_data_size = bytearray([ 25 | 0x0E, 0x20, 0x8B, 0x08, 0xFF, 0x00, 0x00, 0x00, 26 | 0x2E, 0x46, 0x49, 0x54, 0x8D, 0x48, 0x00, 0x00, 27 | ]) 28 | 29 | fit_file_short = bytearray([ 30 | 0x0E, 0x20, 0x9F, 0x03, 0x64, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 31 | 0xB9, 0xE3, 0x40, 0x00, 0x00, 0x00, 0x00, 0x08, 0x03, 0x04, 0x8C, 0x04, 32 | 0x04, 0x86, 0x08, 0x14, 0x07, 0x01, 0x02, 0x84, 0x02, 0x02, 0x84, 0x05, 33 | 0x02, 0x84, 0x06, 0x02, 0x84, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 34 | 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 35 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 36 | 0x00, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x02, 0x40, 0x00, 37 | 0x00, 0xD3, 0x00, 0x05, 0xFD, 0x04, 0x86, 0x03, 0x02, 0x84, 0x04, 0x02, 38 | 0x84, 0x00, 0x01, 0x02, 0x01, 0x01, 0x02, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 39 | 0x32, 0x00, 0x37, 0x00, 0x2C, 0x2E, 0x87, 0x4F 40 | ]) 41 | 42 | fit_file_short_new = bytearray([ 43 | 0x0E, 0x20, 0x8B, 0x08, 0x24, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 44 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 45 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 46 | 0x5D, 0xF2 # CRC 47 | ]) 48 | 49 | fit_file_short_compressed_timestamp = bytearray([ 50 | 0x0E, 0x20, 0x8B, 0x08, 0x24, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 51 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 52 | 0x80, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 53 | 0x5D, 0xF2 54 | ]) 55 | 56 | fit_file_short_new_invalid_crc = bytearray([ 57 | 0x0E, 0x20, 0x8B, 0x08, 0x24, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 58 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 59 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 60 | 0xFF, 0xFF # CRC 61 | ]) 62 | 63 | fit_file_short_none_array = bytearray([ 64 | 0x0E, 0x20, 0x8B, 0x08, 0x24, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 65 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 66 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, # Message 67 | 0x6C, 0x15 # CRC 68 | ]) 69 | 70 | fit_file_short_with_wrong_field_def_size = bytearray([ 71 | 0x0E, 0x20, 0x8B, 0x08, 0x21, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 72 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x01, 0x86, 0x08, 0x0A, 0x07, # Message Definition 73 | 0x00, 0x04, 0x01, 0x00, 0x12, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 74 | 0x65, 0xFE # CRC 75 | ]) 76 | 77 | fit_file_arrays = bytearray([ 78 | 0x0E, 0x20, 0x9F, 0x03, 0x32, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 79 | 0x3C, 0xF5, 0x40, 0x00, 0x01, 0x00, 0x00, 0x03, 0x01, 0x02, 0x84, 0x00, 80 | 0x01, 0x00, 0x03, 0x04, 0x8C, 0x00, 0x00, 0xFF, 0x04, 0x00, 0x00, 0x30, 81 | 0x39, 0x40, 0x00, 0x01, 0x00, 0x0C, 0x03, 0x03, 0x05, 0x07, 0x0A, 0x04, 82 | 0x02, 0x13, 0x02, 0x00, 0x00, 0x54, 0x65, 0x73, 0x74, 0x00, 0xFF, 0xFF, 83 | 0xFF, 0xFF, 0x05, 0x01, 0x5C, 0x21 84 | ]) 85 | 86 | fit_file_chained = bytearray([ 87 | 0x0E, 0x20, 0x9F, 0x03, 0x64, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 88 | 0xB9, 0xE3, 0x40, 0x00, 0x00, 0x00, 0x00, 0x08, 0x03, 0x04, 0x8C, 0x04, 89 | 0x04, 0x86, 0x08, 0x14, 0x07, 0x01, 0x02, 0x84, 0x02, 0x02, 0x84, 0x05, 90 | 0x02, 0x84, 0x06, 0x02, 0x84, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 91 | 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 92 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 93 | 0x00, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x02, 0x40, 0x00, 94 | 0x00, 0xD3, 0x00, 0x05, 0xFD, 0x04, 0x86, 0x03, 0x02, 0x84, 0x04, 0x02, 95 | 0x84, 0x00, 0x01, 0x02, 0x01, 0x01, 0x02, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 96 | 0x32, 0x00, 0x37, 0x00, 0x2C, 0x2E, 0x7C, 0xD5, 97 | 0x0E, 0x20, 0x9F, 0x03, 0x64, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 98 | 0xB9, 0xE3, 0x40, 0x00, 0x00, 0x00, 0x00, 0x08, 0x03, 0x04, 0x8C, 0x04, 99 | 0x04, 0x86, 0x08, 0x14, 0x07, 0x01, 0x02, 0x84, 0x02, 0x02, 0x84, 0x05, 100 | 0x02, 0x84, 0x06, 0x02, 0x84, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 101 | 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 102 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 103 | 0x00, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x02, 0x40, 0x00, 104 | 0x00, 0xD3, 0x00, 0x05, 0xFD, 0x04, 0x86, 0x03, 0x02, 0x84, 0x04, 0x02, 105 | 0x84, 0x00, 0x01, 0x02, 0x01, 0x01, 0x02, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 106 | 0x32, 0x00, 0x37, 0x00, 0x2C, 0x2E, 0x7C, 0xD5 107 | ]) 108 | 109 | fit_file_800m_repeats_little_endian = bytearray([ 110 | 0x0E, 0x10, 0x8D, 0x08, 0xDB, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 111 | 0xDE, 0xB8, 0x40, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x01, 0x00, 0x01, 112 | 0x02, 0x84, 0x02, 0x02, 0x84, 0x04, 0x04, 0x86, 0x03, 0x04, 0x8C, 0x00, 113 | 0x05, 0xFF, 0x00, 0x00, 0x00, 0x12, 0xAD, 0x66, 0x3D, 0x38, 0xB6, 0xC1, 114 | 0x0A, 0x40, 0x00, 0x00, 0x1A, 0x00, 0x04, 0x08, 0x15, 0x07, 0x04, 0x01, 115 | 0x00, 0x0B, 0x01, 0x00, 0x06, 0x02, 0x84, 0x00, 0x52, 0x75, 0x6E, 0x6E, 116 | 0x69, 0x6E, 0x67, 0x20, 0x38, 0x30, 0x30, 0x6D, 0x20, 0x52, 0x65, 0x70, 117 | 0x65, 0x61, 0x74, 0x73, 0x00, 0x01, 0xFF, 0x05, 0x00, 0x40, 0x00, 0x00, 118 | 0x1B, 0x00, 0x08, 0x02, 0x04, 0x86, 0xFE, 0x02, 0x84, 0x07, 0x01, 0x00, 119 | 0x01, 0x01, 0x00, 0x03, 0x01, 0x00, 0x04, 0x04, 0x86, 0x05, 0x04, 0x86, 120 | 0x06, 0x04, 0x86, 0x00, 0x80, 0x1A, 0x06, 0x00, 0x00, 0x00, 0x02, 0x01, 121 | 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 122 | 0x00, 0x00, 0x80, 0x38, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x04, 123 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 124 | 0x20, 0x4E, 0x00, 0x00, 0x02, 0x00, 0x01, 0x01, 0x01, 0x02, 0x00, 0x00, 125 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 126 | 0x00, 0x00, 0x03, 0x00, 0xFF, 0x06, 0x02, 0x05, 0x00, 0x00, 0x00, 0xFF, 127 | 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xA0, 0x86, 0x01, 0x00, 128 | 0x04, 0x00, 0x03, 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 129 | 0x00, 0x00, 0x00, 0x00, 0x00, 0xAE, 0xC4 130 | ]) 131 | 132 | fit_file_800m_repeats_big_endian = bytearray([ 133 | 0x0E, 0x20, 0x9F, 0x03, 0xDB, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 134 | 0xF2, 0xD7, 0x40, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x00, 0x01, 135 | 0x02, 0x84, 0x02, 0x02, 0x84, 0x04, 0x04, 0x86, 0x03, 0x04, 0x8C, 0x00, 136 | 0x05, 0x00, 0xFF, 0x00, 0x00, 0x3D, 0x66, 0xAD, 0x12, 0x0A, 0xC1, 0xB6, 137 | 0x38, 0x40, 0x00, 0x01, 0x00, 0x1A, 0x04, 0x08, 0x15, 0x07, 0x04, 0x01, 138 | 0x00, 0x0B, 0x01, 0x00, 0x06, 0x02, 0x84, 0x00, 0x52, 0x75, 0x6E, 0x6E, 139 | 0x69, 0x6E, 0x67, 0x20, 0x38, 0x30, 0x30, 0x6D, 0x20, 0x52, 0x65, 0x70, 140 | 0x65, 0x61, 0x74, 0x73, 0x00, 0x01, 0xFF, 0x00, 0x05, 0x40, 0x00, 0x01, 141 | 0x00, 0x1B, 0x08, 0x02, 0x04, 0x86, 0xFE, 0x02, 0x84, 0x07, 0x01, 0x00, 142 | 0x01, 0x01, 0x00, 0x03, 0x01, 0x00, 0x04, 0x04, 0x86, 0x05, 0x04, 0x86, 143 | 0x06, 0x04, 0x86, 0x00, 0x00, 0x06, 0x1A, 0x80, 0x00, 0x00, 0x02, 0x01, 144 | 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 145 | 0x00, 0x00, 0x00, 0x01, 0x38, 0x80, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 146 | 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 147 | 0x00, 0x00, 0x4E, 0x20, 0x00, 0x02, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 148 | 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 149 | 0x00, 0x01, 0x00, 0x03, 0xFF, 0x06, 0x02, 0x00, 0x00, 0x00, 0x05, 0xFF, 150 | 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x01, 0x86, 0xA0, 151 | 0x00, 0x04, 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 152 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x87, 0x67 153 | ]) 154 | 155 | fit_file_dev_data_missing_field_description = bytearray([ 156 | 0x0E, 0x20, 0x64, 0x00, 0xD1, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 157 | 0x12, 0x7E, 0x40, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x00, 0x01, 158 | 0x02, 0x84, 0x02, 0x02, 0x84, 0x04, 0x04, 0x86, 0x03, 0x04, 0x8C, 0x00, 159 | 0x04, 0x00, 0xFF, 0x00, 0x00, 0x3D, 0x5D, 0x38, 0xBD, 0x1E, 0x29, 0x25, 160 | 0x9B, 0x60, 0x00, 0x01, 0x00, 0x14, 0x09, 0xFD, 0x04, 0x86, 0x05, 0x04, 161 | 0x86, 0x06, 0x02, 0x84, 0x03, 0x01, 0x02, 0x04, 0x01, 0x02, 0x07, 0x02, 162 | 0x84, 0x02, 0x02, 0x84, 0x00, 0x04, 0x85, 0x01, 0x04, 0x85, 0x01, 0x01, 163 | 0x01, 0x00, 0x00, 0x3D, 0x5D, 0x38, 0xBD, 0x00, 0x00, 0x00, 0x00, 0x03, 164 | 0xE8, 0x7E, 0x00, 0x00, 0x96, 0x07, 0x49, 0x00, 0x00, 0x00, 0x00, 0x00, 165 | 0x00, 0x00, 0x00, 0x7E, 0x00, 0x3D, 0x5D, 0x38, 0xBE, 0x00, 0x00, 0x00, 166 | 0x64, 0x03, 0xE8, 0x86, 0x01, 0x00, 0x96, 0x07, 0x4E, 0x7F, 0xFF, 0xFF, 167 | 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0x86, 0x00, 0x3D, 0x5D, 0x38, 0xBF, 0x00, 168 | 0x00, 0x00, 0xC8, 0x03, 0xE8, 0x8E, 0x02, 0x00, 0x96, 0x07, 0x53, 0x7F, 169 | 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0x8E, 0x00, 0x3D, 0x5D, 0x38, 170 | 0xC0, 0x00, 0x00, 0x01, 0x2C, 0x03, 0xE8, 0x96, 0x03, 0x00, 0x96, 0x07, 171 | 0x58, 0x7F, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0x96, 0x40, 0x00, 172 | 0x01, 0x00, 0x22, 0x04, 0xFD, 0x04, 0x86, 0x01, 0x02, 0x84, 0x05, 0x04, 173 | 0x86, 0x00, 0x04, 0x86, 0x00, 0x3D, 0x5D, 0x46, 0xCD, 0x00, 0x01, 0x3D, 174 | 0x5C, 0xF2, 0x6D, 0x00, 0x36, 0xEE, 0x80, 0x78, 0x3B 175 | ]) 176 | 177 | fit_file_monitoring = bytearray([ 178 | 0x0E, 0x10, 0x28, 0x23, 0x37, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 179 | 0x2C, 0xC6, 0x41, 0x00, 0x01, 0x00, 0x37, 0x03, 0xFD, 0x04, 0x86, 0x18, 180 | 0x01, 0x0D, 0x03, 0x04, 0x86, 0x01, 0x3F, 0x2A, 0xE2, 0xFF, 0x61, 0x00, 181 | 0x00, 0x00, 0x14, 0x01, 0x3F, 0x2A, 0xE2, 0xFF, 0x06, 0x00, 0x00, 0x00, 182 | 0x3C, 0x01, 0x3F, 0x2A, 0xE2, 0xFF, 0x1E, 0x00, 0x00, 0x00, 0x1E, 0x01, 183 | 0x3F, 0x2A, 0xE2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x1E, 0xED, 0xF9 184 | ]) 185 | 186 | fit_file_messages_with_no_fields = bytearray([ 187 | 0x0E, 0x20, 0x84, 0x52, 0x44, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 188 | 0x3A, 0x18, 0x40, 0x00, 0x01, 0x00, 0x69, 0x00, 0x41, 0x00, 0x01, 0x00, 189 | 0x00, 0x07, 0x03, 0x04, 0x8C, 0x04, 0x04, 0x86, 0x01, 0x02, 0x84, 0x02, 190 | 0x02, 0x84, 0x05, 0x02, 0x84, 0x00, 0x01, 0x00, 0xFB, 0x01, 0x0D, 0x01, 191 | 0xCD, 0xC3, 0x1F, 0xAE, 0x3F, 0x92, 0x50, 0x78, 0x00, 0x01, 0x10, 0x22, 192 | 0x00, 0x00, 0x20, 0xFF, 0x00, 0x01, 0xCD, 0xC3, 0x1F, 0xAE, 0x3F, 0x92, 193 | 0x50, 0x78, 0x00, 0x01, 0x10, 0x22, 0x00, 0x00, 0x20, 0xFF, 0x25, 0xFB 194 | ]) 195 | 196 | fit_file_accumulated_components = bytearray([ 197 | 0x0E, 0x20, 0xE8, 0x03, 0x0F, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 198 | 0x4D, 0x89, 0x40, 0x00, 0x00, 0x14, 0x00, 0x01, 0x12, 0x01, 0x02, 0x00, 199 | 0xFE, 0x00, 0x00, 0x00, 0x01, 0xFF, 0x7D 200 | ]) 201 | 202 | fit_file_compressed_speed_distance = bytearray([ 203 | 0x0E, 0x20, 0xE8, 0x03, 0x11, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 204 | 0xCD, 0x09, 0x40, 0x00, 0x00, 0x14, 0x00, 0x01, 0x08, 0x03, 0x0D, 0x00, 205 | 0x8B, 0x00, 0x08, 0x00, 0xF9, 0x00, 0x14, 0x50, 0x0B 206 | ]) 207 | 208 | fit_file_compressed_speed_distance_with_initial_distance = bytearray([ 209 | 0x0E, 0x20, 0xE8, 0x03, 0x1F, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 210 | 0x4C, 0x85, 0x40, 0x00, 0x00, 0x14, 0x00, 0x01, 0x05, 0x04, 0x86, 0x00, 211 | 0xC8, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x14, 0x00, 0x01, 0x08, 0x03, 212 | 0x0D, 0x00, 0x8B, 0x00, 0x08, 0x00, 0xF9, 0x00, 0x14, 0x65, 0xB1 213 | ]) 214 | 215 | fit_file_short_invalid_header = bytearray([ 216 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, # File Header 217 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 218 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 219 | 0x5D, 0xF2 220 | ]) # CRC 221 | 222 | fit_file_short_data_only = bytearray([ 223 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 224 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 225 | 0x5D, 0xF2 226 | ]) # CRC 227 | 228 | fit_file_short_invalid_CRC = bytearray([ 229 | 0x0E, 0x20, 0x8B, 0x08, 0x24, 0x00, 0x00, 0x00, 0x2E, 0x46, 0x49, 0x54, 0x8E, 0xA3, # File Header 230 | 0x40, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x02, 0x84, 0x04, 0x04, 0x86, 0x08, 0x0A, 0x07, # Message Definition 231 | 0x00, 0x04, 0x01, 0x00, 0x00, 0xCA, 0x9A, 0x3B, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x00, # Message 232 | 0x00, 0x00 233 | ]) # CRC 234 | 235 | gear_change_data = [ 236 | { 237 | "timestamp": 1024873717, 238 | "rear_gear_num": 5, 239 | "rear_gear": 24, 240 | "front_gear_num": 255, 241 | "front_gear": 22, 242 | "data": 385816581, 243 | "gear_change_data": 385816581 244 | }, 245 | { 246 | "timestamp": 1024873760, 247 | "rear_gear_num": 6, 248 | "rear_gear": 21, 249 | "front_gear_num": 255, 250 | "front_gear": None, 251 | "data": 16717062, 252 | "gear_change_data": 16717062 253 | }, 254 | { 255 | "timestamp": 1024873819, 256 | "rear_gear_num": 7, 257 | "rear_gear": 19, 258 | "front_gear_num": 255, 259 | "front_gear": None, 260 | "data": 16716551, 261 | "gear_change_data": 16716551 262 | }, 263 | { 264 | "timestamp": 1024873850, 265 | "rear_gear_num": 6, 266 | "rear_gear": 21, 267 | "front_gear_num": 255, 268 | "front_gear": None, 269 | "data": 16717062, 270 | "gear_change_data": 16717062 271 | }, 272 | { 273 | "timestamp": 1024874601, 274 | "rear_gear_num": 7, 275 | "rear_gear": 19, 276 | "front_gear_num": 255, 277 | "front_gear": None, 278 | "data": 16716551, 279 | "gear_change_data": 16716551 280 | }, 281 | { 282 | "timestamp": 1024874624, 283 | "rear_gear_num": 8, 284 | "rear_gear": 17, 285 | "front_gear_num": 255, 286 | "front_gear": None, 287 | "data": 16716040, 288 | "gear_change_data": 16716040 289 | }, 290 | { 291 | "timestamp": 1024874694, 292 | "rear_gear_num": 7, 293 | "rear_gear": 19, 294 | "front_gear_num": 255, 295 | "front_gear": None, 296 | "data": 16716551, 297 | "gear_change_data": 16716551 298 | }, 299 | { 300 | "timestamp": 1024874698, 301 | "rear_gear_num": 6, 302 | "rear_gear": 21, 303 | "front_gear_num": 255, 304 | "front_gear": None, 305 | "data": 16717062, 306 | "gear_change_data": 16717062 307 | }, 308 | { 309 | "timestamp": 1024874727, 310 | "rear_gear_num": 7, 311 | "rear_gear": 19, 312 | "front_gear_num": 255, 313 | "front_gear": None, 314 | "data": 16716551, 315 | "gear_change_data": 16716551 316 | }, 317 | { 318 | "timestamp": 1024874755, 319 | "rear_gear_num": 8, 320 | "rear_gear": 17, 321 | "front_gear_num": 255, 322 | "front_gear": None, 323 | "data": 16716040, 324 | "gear_change_data": 16716040 325 | }, 326 | { 327 | "timestamp": 1024874824, 328 | "rear_gear_num": 7, 329 | "rear_gear": 19, 330 | "front_gear_num": 255, 331 | "front_gear": None, 332 | "data": 16716551, 333 | "gear_change_data": 16716551 334 | }, 335 | { 336 | "timestamp": 1024874829, 337 | "rear_gear_num": 6, 338 | "rear_gear": 21, 339 | "front_gear_num": 255, 340 | "front_gear": None, 341 | "data": 16717062, 342 | "gear_change_data": 16717062 343 | }, 344 | { 345 | "timestamp": 1024874864, 346 | "rear_gear_num": 7, 347 | "rear_gear": 19, 348 | "front_gear_num": 255, 349 | "front_gear": None, 350 | "data": 16716551, 351 | "gear_change_data": 16716551 352 | }, 353 | { 354 | "timestamp": 1024874913, 355 | "rear_gear_num": 6, 356 | "rear_gear": 21, 357 | "front_gear_num": 255, 358 | "front_gear": None, 359 | "data": 16717062, 360 | "gear_change_data": 16717062 361 | }, 362 | { 363 | "timestamp": 1024874927, 364 | "rear_gear_num": 4, 365 | "rear_gear": 27, 366 | "front_gear_num": 255, 367 | "front_gear": None, 368 | "data": 16718596, 369 | "gear_change_data": 16718596 370 | }, 371 | { 372 | "timestamp": 1024875097, 373 | "rear_gear_num": 5, 374 | "rear_gear": 24, 375 | "front_gear_num": 255, 376 | "front_gear": None, 377 | "data": 16717829, 378 | "gear_change_data": 16717829 379 | }, 380 | { 381 | "timestamp": 1024875097, 382 | "rear_gear_num": 6, 383 | "rear_gear": 21, 384 | "front_gear_num": 255, 385 | "front_gear": None, 386 | "data": 16717062, 387 | "gear_change_data": 16717062 388 | }, 389 | { 390 | "timestamp": 1024875111, 391 | "rear_gear_num": 5, 392 | "rear_gear": 24, 393 | "front_gear_num": 255, 394 | "front_gear": None, 395 | "data": 16717829, 396 | "gear_change_data": 16717829 397 | }, 398 | { 399 | "timestamp": 1024875126, 400 | "rear_gear_num": 4, 401 | "rear_gear": 27, 402 | "front_gear_num": 255, 403 | "front_gear": None, 404 | "data": 16718596, 405 | "gear_change_data": 16718596 406 | }, 407 | { 408 | "timestamp": 1024875251, 409 | "rear_gear_num": 3, 410 | "rear_gear": 31, 411 | "front_gear_num": 255, 412 | "front_gear": None, 413 | "data": 16719619, 414 | "gear_change_data": 16719619 415 | }, 416 | { 417 | "timestamp": 1024875265, 418 | "rear_gear_num": 4, 419 | "rear_gear": 27, 420 | "front_gear_num": 255, 421 | "front_gear": None, 422 | "data": 16718596, 423 | "gear_change_data": 16718596 424 | }, 425 | { 426 | "timestamp": 1024875271, 427 | "rear_gear_num": 5, 428 | "rear_gear": 24, 429 | "front_gear_num": 255, 430 | "front_gear": None, 431 | "data": 16717829, 432 | "gear_change_data": 16717829 433 | }, 434 | { 435 | "timestamp": 1024875291, 436 | "rear_gear_num": 6, 437 | "rear_gear": 21, 438 | "front_gear_num": 255, 439 | "front_gear": None, 440 | "data": 16717062, 441 | "gear_change_data": 16717062 442 | }, 443 | { 444 | "timestamp": 1024875364, 445 | "rear_gear_num": 7, 446 | "rear_gear": 19, 447 | "front_gear_num": 255, 448 | "front_gear": None, 449 | "data": 16716551, 450 | "gear_change_data": 16716551 451 | }, 452 | { 453 | "timestamp": 1024875388, 454 | "rear_gear_num": 8, 455 | "rear_gear": 17, 456 | "front_gear_num": 255, 457 | "front_gear": None, 458 | "data": 16716040, 459 | "gear_change_data": 16716040 460 | }, 461 | { 462 | "timestamp": 1024875423, 463 | "rear_gear_num": 9, 464 | "rear_gear": 15, 465 | "front_gear_num": 255, 466 | "front_gear": None, 467 | "data": 16715529, 468 | "gear_change_data": 16715529 469 | }, 470 | { 471 | "timestamp": 1024875515, 472 | "rear_gear_num": 8, 473 | "rear_gear": 17, 474 | "front_gear_num": 255, 475 | "front_gear": None, 476 | "data": 16716040, 477 | "gear_change_data": 16716040 478 | }, 479 | { 480 | "timestamp": 1024875589, 481 | "rear_gear_num": 7, 482 | "rear_gear": 19, 483 | "front_gear_num": 255, 484 | "front_gear": None, 485 | "data": 16716551, 486 | "gear_change_data": 16716551 487 | }, 488 | { 489 | "timestamp": 1024875615, 490 | "rear_gear_num": 8, 491 | "rear_gear": 17, 492 | "front_gear_num": 255, 493 | "front_gear": None, 494 | "data": 16716040, 495 | "gear_change_data": 16716040 496 | }, 497 | { 498 | "timestamp": 1024875616, 499 | "rear_gear_num": 9, 500 | "rear_gear": 15, 501 | "front_gear_num": 255, 502 | "front_gear": None, 503 | "data": 16715529, 504 | "gear_change_data": 16715529 505 | }, 506 | { 507 | "timestamp": 1024875621, 508 | "rear_gear_num": 10, 509 | "rear_gear": 13, 510 | "front_gear_num": 255, 511 | "front_gear": None, 512 | "data": 16715018, 513 | "gear_change_data": 16715018 514 | }, 515 | { 516 | "timestamp": 1024875622, 517 | "rear_gear_num": 11, 518 | "rear_gear": 11, 519 | "front_gear_num": 255, 520 | "front_gear": None, 521 | "data": 16714507, 522 | "gear_change_data": 16714507 523 | }, 524 | { 525 | "timestamp": 1024875651, 526 | "rear_gear_num": 9, 527 | "rear_gear": 15, 528 | "front_gear_num": 255, 529 | "front_gear": None, 530 | "data": 16715529, 531 | "gear_change_data": 16715529 532 | }, 533 | { 534 | "timestamp": 1024875658, 535 | "rear_gear_num": 8, 536 | "rear_gear": 17, 537 | "front_gear_num": 255, 538 | "front_gear": None, 539 | "data": 16716040, 540 | "gear_change_data": 16716040 541 | }, 542 | { 543 | "timestamp": 1024875658, 544 | "rear_gear_num": 7, 545 | "rear_gear": 19, 546 | "front_gear_num": 255, 547 | "front_gear": None, 548 | "data": 16716551, 549 | "gear_change_data": 16716551 550 | }, 551 | { 552 | "timestamp": 1024875665, 553 | "rear_gear_num": 6, 554 | "rear_gear": 21, 555 | "front_gear_num": 255, 556 | "front_gear": None, 557 | "data": 16717062, 558 | "gear_change_data": 16717062 559 | }, 560 | { 561 | "timestamp": 1024875695, 562 | "rear_gear_num": 6, 563 | "rear_gear": 21, 564 | "front_gear_num": 255, 565 | "front_gear": 22, 566 | "data": 385815814, 567 | "gear_change_data": 385815814 568 | } 569 | ] 570 | 571 | hrm_plugin_test_activity_expected = [ 572 | 1242209, 573 | 1242212.0, 1242213.7314453125, 1242215.5029296875, 1242215.865234375, 1242216.9541015625, 1242218.3369140625, 1242219.6220703125, 1242219.9853515625, 574 | 1242220.71875, 1242221.2607421875, 1242221.83203125, 1242222.103515625, 1242222.970703125, 1242223.849609375, 1242224.234375, 1242225.193359375, 575 | 1242225.537109375, 1242226.345703125, 1242226.9990234375, 1242227.599609375, 1242227.978515625, 1242228.5849609375, 1242228.962890625, 1242229.291015625, 576 | 1242229.8154296875, 1242230.1708984375, 1242230.630859375, 1242230.9794921875, 1242231.3359375, 1242231.7421875, 1242232.041015625, 1242232.517578125, 577 | 1242232.93359375, 1242233.2890625, 1242233.6767578125, 1242234.0703125, 1242234.4638671875, 1242234.9033203125, 1242235.23828125, 1242235.625, 578 | 1242236.0126953125, 1242236.28125, 1242236.6396484375, 1242237.56640625, 1242239.478515625, 1242240.4189453125, 1242241.388671875, 1242242.35546875, 579 | 1242244.2861328125, 1242245.2841796875, 1242246.279296875, 1242247.2900390625, 1242248.3251953125, 1242249.36328125, 1242250.736328125, 1242251.0703125, 580 | 1242251.1640625, 1242251.921875, 1242252.546875, 1242253.5751953125, 1242254.6064453125, 1242255.6201171875, 1242256.626953125, 1242257.568359375, 581 | 1242258.5009765625, 1242259.509765625, 1242260.5712890625, 1242261.142578125, 1242261.66796875, 1242262.72265625, 1242263.7890625, 1242264.869140625, 582 | 1242265.951171875, 1242266.9794921875, 1242268.0087890625, 1242269.0380859375, 1242270.046875, 1242271.0712890625, 1242272.162109375, 1242273.3310546875, 583 | 1242274.494140625, 1242275.6552734375, 1242276.802734375, 1242277.8896484375, 1242278.9677734375, 1242280.048828125, 1242281.115234375, 1242282.1796875, 584 | 1242283.25, 1242284.296875, 1242285.30859375, 1242286.3388671875, 1242287.40625, 1242288.45703125, 1242289.4921875, 1242290.5458984375, 585 | 1242291.5986328125, 1242292.62109375, 1242293.65625, 1242294.6943359375, 1242295.7236328125, 1242296.7744140625, 1242297.849609375, 1242298.5556640625, 586 | 1242299.51953125, 1242300.2177734375, 1242301.1015625, 1242302.2080078125, 1242303.3037109375, 1242304.3857421875, 1242305.453125, 1242306.4921875, 587 | 1242307.544921875, 1242308.609375, 1242309.6494140625, 1242310.6845703125, 1242311.732421875, 1242312.775390625, 1242313.8115234375, 1242314.8408203125, 588 | 1242315.859375, 1242316.9033203125, 1242317.9716796875, 1242319.04296875, 1242320.0966796875, 1242321.1630859375, 1242322.1953125, 1242323.2197265625, 589 | 1242324.2373046875, 1242325.2265625, 1242326.1767578125, 1242327.123046875, 1242328.013671875, 1242328.6884765625, 1242329.87109375, 1242330.8564453125, 590 | 1242331.814453125, 1242332.830078125, 1242333.8212890625, 1242334.83203125, 1242335.818359375, 1242336.8076171875, 1242337.78515625, 1242338.7470703125, 591 | 1242339.7216796875, 1242340.6982421875, 1242341.6806640625, 1242342.6708984375, 1242343.666015625, 1242344.6630859375, 1242345.685546875, 1242346.7275390625, 592 | 1242347.77734375, 1242348.791015625, 1242349.7216796875, 1242350.5986328125, 1242351.5146484375, 1242352.513671875, 1242353.6279296875, 1242354.7880859375, 593 | 1242355.931640625, 1242357.0771484375, 1242358.205078125, 1242359.259765625, 1242360.314453125, 1242361.353515625, 1242362.4091796875, 1242363.470703125, 594 | 1242364.474609375, 1242365.4462890625, 1242366.568359375, 1242367.7119140625, 1242368.8740234375, 1242369.982421875, 1242369.982421875, 1242369.982421875, 595 | 1242371, 596 | 1242372.0, 1242373.0419921875, 1242374.0595703125, 1242375.05078125, 1242376.0244140625, 1242376.9765625, 1242378.0068359375, 1242379.0810546875, 597 | 1242380.2138671875, 1242381.3935546875, 1242382.5712890625, 1242383.7431640625, 1242384.8720703125, 1242385.97265625, 1242387.052734375, 1242388.087890625, 598 | 1242389.091796875, 1242390.1025390625, 1242391.1162109375, 1242392.1533203125, 1242393.1689453125, 1242394.20703125, 1242395.2685546875, 1242396.3369140625, 599 | 1242397.40625, 1242398.4853515625, 1242399.5390625, 1242400.5693359375, 1242401.5380859375, 1242402.552734375, 1242403.6220703125, 1242404.7431640625, 600 | 1242405.86328125, 1242407.0126953125, 1242408.162109375, 1242409.2705078125, 1242410.3994140625, 1242411.5, 1242412.537109375, 1242413.5380859375, 601 | 1242414.4072265625, 1242415.48046875, 1242416.5419921875, 1242417.6337890625, 1242418.68359375, 1242419.787109375, 1242420.8564453125, 1242421.9287109375, 602 | 1242423.005859375, 1242424.02734375, 1242425.076171875, 1242426.12109375, 1242427.1767578125, 1242428.25, 1242429.3076171875, 1242430.3662109375, 603 | 1242431.41015625, 1242432.4521484375, 1242433.4482421875, 1242434.455078125, 1242435.5009765625, 1242436.58984375, 1242437.6796875, 1242438.79296875, 604 | 1242439.8974609375, 1242441.0107421875, 1242442.1044921875, 1242443.228515625, 1242444.3642578125, 1242445.474609375, 1242446.5673828125, 1242447.328125, 605 | 1242448.6318359375, 1242449.6572265625, 1242450.7001953125, 1242451.7587890625, 1242452.8447265625, 1242453.94140625, 1242455.126953125, 1242456.3349609375, 606 | 1242457.541015625, 1242458.7265625, 1242459.9248046875, 1242461.1240234375, 1242462.2998046875, 1242463.4736328125, 1242464.650390625, 1242465.81640625, 607 | 1242466.9931640625, 1242468.1953125, 1242469.3505859375, 1242470.5322265625, 1242471.7412109375, 1242472.94140625, 1242474.1318359375, 1242475.3251953125, 608 | 1242476.5107421875, 1242477.6181640625, 1242478.712890625, 1242479.8349609375, 1242480.9765625, 1242482.10546875, 1242483.21875, 1242484.33984375, 609 | 1242485.396484375, 1242486.486328125, 1242487.6201171875, 1242488.7890625, 1242489.9208984375, 1242491.0703125, 1242492.2373046875, 1242493.4111328125, 610 | 1242494.5732421875, 1242495.7470703125, 1242496.919921875, 1242498.0927734375, 1242499.2978515625, 1242500.4833984375, 1242501.65234375, 1242502.84765625, 611 | 1242504.0380859375, 1242505.185546875, 1242506.326171875, 1242507.306640625, 1242508.5546875, 1242509.6640625, 1242510.7578125, 1242511.8408203125, 612 | 1242512.943359375, 1242514.0673828125, 1242515.19921875, 1242516.3330078125, 1242517.4306640625, 1242518.533203125, 1242519.6357421875, 1242520.7138671875, 613 | 1242521.80859375, 1242522.91015625, 1242523.9892578125, 1242525.08984375, 1242526.20703125, 1242527.3232421875, 1242528.4365234375, 1242529.53515625, 614 | 1242530.6279296875, 1242531.673828125, 1242532.7119140625, 1242533.759765625, 1242534.8017578125, 1242535.83203125, 1242536.8779296875, 1242537.9150390625, 615 | 1242538.9453125, 1242539.982421875, 1242540.98828125, 1242542.01171875, 1242543.0478515625, 1242544.1103515625, 1242545.2109375, 1242546.3203125, 616 | 1242547.4580078125, 1242548.6123046875, 1242549.7822265625, 1242550.9775390625, 1242552.1669921875, 1242553.3349609375, 1242554.5078125, 1242555.6220703125, 617 | 1242556.63671875, 1242557.666015625, 1242558.77734375, 1242559.89453125, 1242561.0458984375, 1242562.2314453125, 1242563.38671875, 1242564.5478515625, 618 | 1242565.7177734375, 1242566.87109375, 1242568.041015625, 1242569.2255859375, 1242570.392578125, 1242571.5810546875, 1242572.79296875, 1242573.93359375, 619 | 1242575.0634765625, 1242576.2080078125, 1242577.33203125, 1242578.447265625, 1242579.5517578125, 1242580.6748046875, 1242581.8388671875, 1242582.9521484375, 620 | 1242584.0771484375, 1242585.220703125, 1242586.34375, 1242587.474609375, 1242588.6162109375, 1242589.7529296875, 1242590.9287109375, 1242592.11328125, 621 | 1242593.3251953125, 1242594.53515625, 1242595.7724609375, 1242596.9892578125, 1242598.1728515625, 1242599.3134765625, 1242600.4345703125, 1242601.4814453125, 622 | 1242602.4345703125, 1242603.390625, 1242604.404296875, 1242605.4287109375, 1242606.44921875, 1242607.4765625, 1242608.5234375, 1242609.35546875, 623 | 1242609.8701171875, 1242610.9150390625, 1242611.9345703125, 1242612.939453125, 1242613.8974609375, 1242614.3642578125, 1242615.4052734375, 1242616.30078125, 624 | 1242617.1884765625, 1242618.3427734375, 1242619.396484375, 1242619.9580078125, 1242621.015625, 1242621.939453125, 1242622.609375, 1242623.1474609375, 625 | 1242623.92578125, 1242624.7041015625, 1242625.482421875, 1242626.5693359375, 1242627.5361328125, 1242628.419921875, 1242629.4208984375, 1242630.4853515625, 626 | 1242631.486328125, 1242632.6103515625, 1242633.177734375, 1242633.8720703125, 1242634.8076171875, 1242635.220703125, 1242636.041015625, 1242636.732421875, 627 | 1242637.4638671875, 1242638.1318359375, 1242638.765625, 1242639.2099609375, 1242639.8291015625, 1242640.5673828125, 1242640.9833984375, 1242641.58203125, 628 | 1242642.1806640625, 1242642.6279296875, 1242643.330078125, 1242643.92578125, 1242644.515625, 1242645.1103515625, 1242645.5888671875, 1242646.17578125, 629 | 1242646.74609375, 1242647.3330078125, 1242647.9072265625, 1242648.5498046875, 1242649.166015625, 1242649.681640625, 1242650.4130859375, 1242651.2177734375, 630 | 1242651.8798828125, 1242652.744140625, 1242653.45703125, 1242654.244140625, 1242654.9755859375, 1242655.544921875, 1242656.203125, 1242656.8857421875, 631 | 1242657.6220703125, 1242658.3173828125, 1242659.0693359375, 1242659.779296875, 1242660.4892578125, 1242661.1982421875, 1242662.560546875, 1242662.869140625, 632 | 1242663.185546875, 1242663.560546875, 1242663.560546875, 1242663.560546875, 1242663.560546875, 1242663.560546875, 1242663.560546875, 1242663.560546875, 633 | 1242959, 634 | 1242960.0, 1242962.28515625, 1242963.3388671875, 1242964.3212890625, 1242965.3193359375, 1242966.2392578125, 1242967.150390625, 1242968.0263671875, 635 | 1242969.041015625, 1242969.5400390625, 1242970.552734375, 1242971.1083984375, 1242972.1123046875, 1242973.158203125, 1242973.439453125, 1242973.9423828125, 636 | 1242974.7841796875, 1242975.8095703125, 1242976.6220703125, 1242977.3203125, 1242978.3544921875, 1242979.2958984375, 1242980.1875, 1242981.12890625, 637 | 1242982.130859375, 1242983.130859375, 1242984.1640625, 1242985.2080078125, 1242986.33203125, 1242987.5859375, 1242988.6240234375, 1242989.298828125, 638 | 1242990.4794921875, 1242991.49609375, 1242992.0546875, 1242992.6044921875, 1242993.2138671875, 1242993.474609375, 1242994.2900390625, 1242994.9638671875, 639 | 1242995.2333984375, 1242995.810546875, 1242996.0703125, 1242996.724609375, 1242997.26171875, 1242997.7607421875, 1242998.0234375, 1242998.400390625, 640 | 1242998.8408203125, 1242999.455078125, 1242999.943359375, 1243000.59765625, 1243001.2041015625, 1243001.748046875, 1243002.2939453125, 1243002.8583984375, 641 | 1243003.5771484375, 1243004.2158203125, 1243004.8154296875, 1243005.2958984375, 1243005.9736328125, 1243006.2998046875, 1243006.9052734375, 1243007.490234375, 642 | 1243008.0478515625, 1243008.818359375, 1243009.2353515625, 1243009.9482421875, 1243010.71875, 1243011.47265625, 1243012.20703125, 1243012.9541015625, 643 | 1243013.701171875, 1243014.4716796875, 1243015.236328125, 1243016.005859375, 1243016.6005859375, 1243017.5107421875, 1243018.248046875, 1243018.876953125, 644 | 1243019.7783203125, 1243020.5439453125, 1243021.337890625, 1243022.1103515625, 1243022.931640625, 1243023.73046875, 1243024.5263671875, 1243025.3125, 645 | 1243026.12890625, 1243026.6640625, 1243027.3427734375, 1243027.7578125, 1243028.578125, 1243029.4013671875, 1243030.1923828125, 1243031.0107421875, 646 | 1243031.8251953125, 1243032.62890625, 1243033.40625, 1243034.197265625, 1243034.982421875, 1243035.8359375, 1243036.56640625, 1243037.3828125, 647 | 1243038.197265625, 1243039.0830078125, 1243039.9013671875, 1243040.7587890625, 1243041.333984375, 1243042.330078125, 1243043.4169921875, 1243044.494140625, 648 | 1243045.0302734375, 1243045.888671875, 1243046.4521484375, 1243047.0185546875, 1243047.5419921875, 1243048.1904296875, 1243048.619140625, 1243049.40234375, 649 | 1243049.8798828125, 1243050.65625, 1243051.419921875, 1243052.0634765625, 1243052.9853515625, 1243053.783203125, 1243054.595703125, 1243055.4287109375, 650 | 1243056.26171875, 1243057.1005859375, 1243057.92578125, 1243058.7275390625, 1243059.4921875, 1243060.2705078125, 1243061.0703125, 1243061.8583984375, 651 | 1243062.6826171875, 1243063.5068359375, 1243064.3251953125, 1243065.146484375, 1243065.998046875, 1243066.880859375, 1243067.748046875, 1243068.626953125, 652 | 1243069.509765625, 1243070.400390625, 1243071.251953125, 1243072.125, 1243072.984375, 1243073.86328125, 1243074.6982421875, 1243075.576171875, 653 | 1243076.4404296875, 1243077.3369140625, 1243078.224609375, 1243079.2919921875, 1243079.97265625, 1243080.8681640625, 1243081.76953125, 1243082.71484375, 654 | 1243083.6669921875, 1243084.6142578125, 1243085.5546875, 1243086.474609375, 1243087.390625, 1243088.2919921875, 1243089.197265625, 1243090.0693359375, 655 | 1243090.9384765625, 1243091.8349609375, 1243092.751953125, 1243093.716796875, 1243094.640625, 1243095.576171875, 1243096.50390625, 1243097.400390625, 656 | 1243098.2666015625, 1243099.16796875, 1243100.0771484375, 1243100.9423828125, 1243101.8251953125, 1243102.7099609375, 1243103.6357421875, 1243104.544921875, 657 | 1243105.462890625, 1243106.357421875, 1243107.283203125, 1243108.162109375, 1243109.291015625, 1243109.92578125, 1243110.8173828125, 1243111.69921875, 658 | 1243112.591796875, 1243113.484375, 1243114.3818359375, 1243115.3056640625, 1243116.1572265625, 1243117.0439453125, 1243117.9130859375, 1243118.759765625, 659 | 1243119.619140625, 1243120.49609375, 1243121.3037109375, 1243122.15625, 1243122.9951171875, 1243123.8408203125, 1243124.6572265625, 1243125.5234375, 660 | 1243126.4296875, 1243127.345703125, 1243128.24609375, 1243129.1162109375, 1243129.9873046875, 1243130.8427734375, 1243131.693359375, 1243132.537109375, 661 | 1243133.392578125, 1243134.2099609375, 1243135.0498046875, 1243135.8857421875, 1243136.6962890625, 1243137.5595703125, 1243138.41015625, 1243139.1396484375, 662 | 1243140.056640625, 1243141.23046875, 1243142.0625, 1243142.68359375, 1243143.3212890625, 1243144.1484375, 1243145.0078125, 1243145.8818359375, 663 | 1243146.7666015625, 1243147.6484375, 1243148.5732421875, 1243149.482421875, 1243150.423828125, 1243151.373046875, 1243152.3349609375, 1243153.25, 664 | 1243154.19140625, 1243155.1259765625, 1243156.0615234375, 1243157.0126953125, 1243157.9677734375, 1243158.9013671875, 1243159.8447265625, 1243160.779296875, 665 | 1243161.6953125, 1243162.6357421875, 1243163.60546875, 1243164.5810546875, 1243165.5107421875, 1243166.4619140625, 1243167.3974609375, 1243168.33984375, 666 | 1243169.1240234375, 1243170.2392578125, 1243171.1533203125, 1243172.0419921875, 1243172.9208984375, 1243173.6552734375, 1243174.66015625, 1243175.5439453125, 667 | 1243176.466796875, 1243177.404296875, 1243178.35546875, 1243179.318359375, 1243180.25, 1243181.1796875, 1243182.0869140625, 1243183.0, 668 | 1243183.9267578125, 1243184.849609375, 1243185.75, 1243186.6474609375, 1243187.546875, 1243188.4638671875, 1243189.3857421875, 1243190.2900390625, 669 | 1243191.1982421875, 1243192.091796875, 1243192.978515625, 1243193.818359375, 1243194.638671875, 1243195.4541015625, 1243196.2802734375, 1243197.1318359375, 670 | 1243198.0029296875, 1243198.8828125, 1243199.7529296875, 1243200.6591796875, 1243201.5380859375, 1243202.447265625, 1243203.3681640625, 1243204.236328125, 671 | 1243205.1572265625, 1243206.103515625, 1243207.09765625, 1243208.09765625, 1243209.115234375, 1243210.1513671875, 1243211.1650390625, 1243212.1845703125, 672 | 1243213.208984375, 1243214.201171875, 1243215.1787109375, 1243216.1171875, 1243217.0703125, 1243218.0224609375, 1243218.9775390625, 1243219.9580078125, 673 | 1243220.9599609375, 1243221.953125, 1243222.8662109375, 1243223.7861328125, 1243224.69921875, 1243225.6396484375, 1243226.541015625, 1243227.4970703125, 674 | 1243228.4462890625, 1243229.2880859375, 1243230.248046875, 1243231.1767578125, 1243232.1533203125, 1243233.0966796875, 1243234.033203125, 1243234.9755859375, 675 | 1243235.919921875, 1243236.837890625, 1243237.71875, 1243238.6357421875, 1243239.5068359375, 1243240.43359375, 1243241.3515625, 1243242.26953125, 676 | 1243243.107421875, 1243244.044921875, 1243244.9619140625, 1243245.94921875, 1243246.9560546875, 1243247.96484375, 1243249.080078125, 1243249.9404296875, 677 | 1243250.89453125, 1243251.8291015625, 1243252.744140625, 1243253.6474609375, 1243254.55078125, 1243255.46484375, 1243256.3544921875, 1243257.2412109375, 678 | 1243258.1591796875, 1243259.27734375, 1243260.03515625, 1243260.9609375, 1243261.9052734375, 1243262.791015625, 1243263.6513671875, 1243264.478515625, 679 | 1243265.25, 1243266.01953125, 1243266.7841796875, 1243267.5302734375, 1243268.26953125, 1243269.0107421875, 1243269.751953125, 1243270.4755859375, 680 | 1243271.1796875, 1243271.876953125, 1243272.5615234375, 1243273.2373046875, 1243273.9189453125, 1243274.591796875, 1243275.2490234375, 1243275.9033203125, 681 | 1243276.595703125, 1243277.2861328125, 1243277.96484375, 1243278.626953125, 1243279.3115234375, 1243279.98046875, 1243280.6591796875, 1243281.3291015625, 682 | 1243281.9873046875, 1243282.6435546875, 1243283.287109375, 1243283.9296875, 1243284.587890625, 1243285.2509765625, 1243285.904296875, 1243286.5712890625, 683 | 1243287.228515625, 1243287.8857421875, 1243288.55859375, 1243289.1240234375, 1243289.76171875, 1243290.3994140625, 1243291.037109375, 1243291.6748046875, 684 | 1243292.0361328125, 1243292.7470703125, 1243293.453125, 1243294.1689453125, 1243294.8798828125, 1243295.58203125, 1243296.2900390625, 1243296.9912109375, 685 | 1243297.6904296875, 1243298.39453125, 1243298.39453125, 1243298.39453125, 1243298.39453125, 1243298.39453125, 1243298.39453125, 1243298.39453125, 686 | ] 687 | --------------------------------------------------------------------------------