├── MANIFEST.in ├── dslogs ├── dslogs │ ├── __init__.py │ └── dslogs.py ├── README.md ├── LICENSE.txt └── setup.py ├── .gitignore ├── dslogparser ├── __init__.py └── dslogparser.py ├── Notes.txt ├── README-dslogparser.md ├── README.md ├── LICENSE.txt ├── setup.py └── dslog2csv.py /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README-dslogparser.md 2 | -------------------------------------------------------------------------------- /dslogs/dslogs/__init__.py: -------------------------------------------------------------------------------- 1 | from .dslogs import DSlogs 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info 3 | *~ 4 | .mypy_cache 5 | __pycache__ 6 | dist 7 | build 8 | version.py 9 | -------------------------------------------------------------------------------- /dslogs/README.md: -------------------------------------------------------------------------------- 1 | # dslogs 2 | 3 | dslogs contains the DSlogs class which is useful for streaming log entries to "real time" application. See package dslogparser. 4 | -------------------------------------------------------------------------------- /dslogparser/__init__.py: -------------------------------------------------------------------------------- 1 | from .dslogparser import DSEventParser 2 | from .dslogparser import DSLogParser 3 | from .dslogparser import DSLOG_TIMESTEP 4 | 5 | __all__ = ['DSLogParser', 'DSEventParser'] 6 | -------------------------------------------------------------------------------- /Notes.txt: -------------------------------------------------------------------------------- 1 | Upload test version: 2 | twine upload --repository-url https://test.pypi.org/legacy/ dist/XXX 3 | 4 | Install from test upload: 5 | sudo pip3 install --index-url https://test.pypi.org/simple/ dslogparser 6 | 7 | Upload prod version: 8 | twine upload dist/XXX 9 | -------------------------------------------------------------------------------- /README-dslogparser.md: -------------------------------------------------------------------------------- 1 | # dslogparser 2 | Parse FIRST FRC Driver Station log files. 3 | 4 | The DSLogParser class can parse FRC .dslog files and extract each entry, which are returned as dictionaries. 5 | 6 | The DSEventParser class parses FRC .dsevents, including extracting the match information. 7 | 8 | The script dslog2csv.py can read one or many .dslog files to produce CSV file(s). 9 | It can combine .dslog and .dsevents files if the filenames match. See ```dslog2csv.py -h``` for usage information. 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dslogparser 2 | Parse FIRST FRC Driver Station log files. 3 | 4 | This repository hosts 2 PyPI packages: 5 | * dslogparser includes the basic parsing library, plus a command line script to output CSV files 6 | * dslogs contains the DSlogs class which is useful for streaming log entries to "real time" application. 7 | 8 | # Reference Sources: 9 | https://www.chiefdelphi.com/forums/showthread.php?p=1556451 10 | 11 | Particularly: 12 | https://www.chiefdelphi.com/forums/showpost.php?p=1556451&postcount=11 13 | 14 | Program: https://github.com/orangelight/DSLOG-Reader 15 | 16 | However, DSLog-Reader does not seem to be fully correct: 17 | * It unpacks the "packet loss" value as a *signed* integer. Unsigned gives more sensible answers (https://github.com/orangelight/DSLOG-Reader/issues/3) 18 | * There is a bug in FormMain::BoolNameToValue(): wrong string for "Robo Tele" and "Watchdog" returns the Brownout value. (https://github.com/orangelight/DSLOG-Reader/issues/2) 19 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Paul Rensing 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /dslogs/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Austin Page 4 | Copyright (c) 2019 Paul Rensing 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining 7 | a copy of this software and associated documentation files (the 8 | "Software"), to deal in the Software without restriction, including 9 | without limitation the rights to use, copy, modify, merge, publish, 10 | distribute, sublicense, and/or sell copies of the Software, and to 11 | permit persons to whom the Software is furnished to do so, subject to 12 | the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be 15 | included in all copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 21 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /dslogs/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | import sys 4 | from os.path import dirname, join 5 | import subprocess 6 | 7 | base_package = "dslogs" 8 | 9 | setup_dir = dirname(__file__) 10 | git_dir = join(setup_dir, ".git") 11 | version_file = join(setup_dir, base_package, "version.py") 12 | 13 | # Automatically generate a version.py based on the git version 14 | p = subprocess.Popen( 15 | ["git", "describe", "--tags", "--long", "--dirty=-dirty"], 16 | stdout=subprocess.PIPE, 17 | stderr=subprocess.PIPE, 18 | ) 19 | out, err = p.communicate() 20 | # Make sure the git version has at least one tag 21 | if err: 22 | print("Error: You need to create a tag for this repo to use the builder") 23 | sys.exit(1) 24 | 25 | # Convert git version to PEP440 compliant version 26 | # - Older versions of pip choke on local identifiers, so we can't include the git commit 27 | version, commits, local = out.decode("utf-8").rstrip().split("-", 2) 28 | if commits != "0" or "-dirty" in local: 29 | version = "%s.post0.dev%s" % (version, commits) 30 | 31 | # Create the version.py file 32 | with open(version_file, "w") as fp: 33 | fp.write("# Autogenerated by setup.py\n__version__ = '{0}'\n".format(version)) 34 | 35 | # read the contents of your README file 36 | with open(join(setup_dir, 'README.md'), encoding='utf-8') as f: 37 | long_description = f.read() 38 | 39 | 40 | setup( 41 | name=base_package, 42 | version=version, 43 | description='FIRST FRC Driver Station log streamer', 44 | author='Paul Rensing', 45 | author_email='prensing@ligerbots.org', 46 | long_description=long_description, 47 | long_description_content_type='text/markdown', 48 | url='http://github.com/ligerbots/dslogparser', 49 | license='MIT', 50 | packages=['dslogs'], 51 | install_requires=['arrow', 'dslogparser'], 52 | keywords=['FRC', 'logfiles'], 53 | classifiers=[ 54 | 'Development Status :: 4 - Beta', 55 | 'Intended Audience :: Developers', 56 | 'Intended Audience :: Education', 57 | 'License :: OSI Approved :: MIT License', 58 | 'Programming Language :: Python :: 3', 59 | ] 60 | ) 61 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | import sys 4 | from os.path import dirname, exists, join 5 | import subprocess 6 | 7 | base_package = "dslogparser" 8 | 9 | setup_dir = dirname(__file__) 10 | git_dir = join(setup_dir, ".git") 11 | version_file = join(setup_dir, base_package, "version.py") 12 | 13 | # Automatically generate a version.py based on the git version 14 | if exists(git_dir): 15 | p = subprocess.Popen( 16 | ["git", "describe", "--tags", "--long", "--dirty=-dirty"], 17 | stdout=subprocess.PIPE, 18 | stderr=subprocess.PIPE, 19 | ) 20 | out, err = p.communicate() 21 | # Make sure the git version has at least one tag 22 | if err: 23 | print("Error: You need to create a tag for this repo to use the builder") 24 | sys.exit(1) 25 | 26 | # Convert git version to PEP440 compliant version 27 | # - Older versions of pip choke on local identifiers, so we can't include the git commit 28 | v, commits, local = out.decode("utf-8").rstrip().split("-", 2) 29 | if commits != "0" or "-dirty" in local: 30 | v = "%s.post0.dev%s" % (v, commits) 31 | 32 | # Create the version.py file 33 | with open(version_file, "w") as fp: 34 | fp.write("# Autogenerated by setup.py\n__version__ = '{0}'\n".format(v)) 35 | 36 | if exists(version_file): 37 | with open(join(setup_dir, base_package, "version.py"), "r") as fp: 38 | exec(fp.read(), globals()) 39 | else: 40 | __version__ = "master" 41 | 42 | # read the contents of your README file 43 | with open(join(setup_dir, 'README-dslogparser.md'), encoding='utf-8') as f: 44 | long_description = f.read() 45 | 46 | 47 | setup( 48 | name=base_package, 49 | version=__version__, 50 | description='FIRST FRC Driver Station logs parser', 51 | author='Paul Rensing', 52 | author_email='prensing@ligerbots.org', 53 | long_description=long_description, 54 | long_description_content_type='text/markdown', 55 | url='http://github.com/ligerbots/dslogparser', 56 | license='MIT', 57 | packages=['dslogparser'], 58 | scripts=['dslog2csv.py'], 59 | install_requires=['bitstring'], 60 | keywords=['FRC', 'logfiles'], 61 | classifiers=[ 62 | 'Development Status :: 5 - Production/Stable', 63 | 'Environment :: Console', 64 | 'Intended Audience :: Developers', 65 | 'Intended Audience :: Education', 66 | 'License :: OSI Approved :: MIT License', 67 | 'Programming Language :: Python :: 3', 68 | ] 69 | ) 70 | -------------------------------------------------------------------------------- /dslogs/dslogs/dslogs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # builtins 5 | import collections 6 | from pathlib import Path 7 | 8 | # not builtins 9 | import arrow 10 | import dslogparser 11 | 12 | 13 | def default_log(time): 14 | return { 15 | 'time': time, 16 | 'round_trip_time': 0, 17 | 'packet_loss': 0, 18 | 'voltage': 255, 19 | 'rio_cpu': 0, 20 | 'can_usage': 0, 21 | 'wifi_db': 0, 22 | 'bandwidth': 0, 23 | 'robot_disabled': False, 24 | 'robot_auto': False, 25 | 'robot_tele': False, 26 | 'ds_disabled': False, 27 | 'ds_auto': False, 28 | 'ds_tele': False, 29 | 'watchdog': False, 30 | 'brownout': False, 31 | 'pdp_id': 0, 32 | 'pdp_currents': 16 * [0, ], 33 | 'pdp_resistance': 0, 34 | 'pdp_voltage': 0, 35 | 'pdp_temp': 0, 36 | 'pdp_total_current': 0, 37 | } 38 | 39 | 40 | class DSlogs(): 41 | def __init__(self, dslog_path, dsevent_path): 42 | self.logpath = Path(dslog_path) 43 | self.eventpath = Path(dsevent_path) 44 | 45 | @property 46 | def _log_parser(self): 47 | return dslogparser.DSLogParser(str(self.logpath)) 48 | 49 | @property 50 | def _event_parser(self): 51 | return dslogparser.DSEventParser(str(self.eventpath)) 52 | 53 | @staticmethod 54 | def _continuous(gen): 55 | last_item = None 56 | for item in gen: 57 | last_item = item 58 | yield item 59 | while True: 60 | last_item['time'] = arrow.get(last_item['time']).shift(seconds=dslogparser.DSLOG_TIMESTEP).datetime 61 | yield last_item 62 | 63 | @staticmethod 64 | def _fix_time(gen): 65 | for item in gen: 66 | item['time'] = arrow.get(item['time']) 67 | yield item 68 | 69 | def _slice(self, gen, start=None, end=None): 70 | if not start: 71 | start = arrow.get(0) 72 | if not end: 73 | end = arrow.get() 74 | for item in gen: 75 | if item['time'].is_between(start, end, '[]'): 76 | yield item 77 | elif end < item['time']: 78 | break 79 | 80 | def _window(self, gen, start, end, items_per_window): 81 | if not items_per_window: 82 | raise ValueError('Must provide a window size') 83 | if not start: 84 | start = arrow.get(0) 85 | if not end: 86 | end = arrow.get() 87 | window = collections.deque(maxlen=items_per_window) 88 | middle_index = items_per_window // 2 # left of center if even, else absolute center 89 | for item in gen: 90 | window.append(item) 91 | if (len(window) < items_per_window): 92 | continue 93 | if not window[middle_index]['time'].is_between(start, end, '[]'): 94 | continue 95 | yield window 96 | 97 | def _items(self, gen, start=None, end=None, window=None, continuous=False): 98 | if continuous: 99 | gen = self._continuous(gen) 100 | gen = self._fix_time(gen) 101 | if window: 102 | gen = self._window(gen, start=start, end=end, items_per_window=window) 103 | for item in gen: 104 | yield item 105 | 106 | elif not start and not end: 107 | for item in gen: 108 | yield item 109 | else: 110 | gen = self._slice(gen, start, end) 111 | for item in gen: 112 | yield item 113 | 114 | def logs(self, start=None, end=None, window=None, continuous=False): 115 | return self._items(self._log_parser.read_records(), start, end, window, continuous) 116 | 117 | def events(self, start=None, end=None, window=None, continuous=False): 118 | return self._items(self._event_parser.read_records(), start, end, window, continuous) 119 | 120 | def match_info(self): 121 | match_data = self._event_parser.find_match_info(str(self.eventpath)) 122 | field_time = arrow.get(match_data['field_time']) 123 | match = match_data['match_name'] 124 | start_time = None 125 | for log in self.logs(start=field_time): 126 | if log['ds_auto']: 127 | start_time = log['time'] 128 | break 129 | 130 | return start_time, match 131 | -------------------------------------------------------------------------------- /dslog2csv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Note: should work correctly with either Python 2 or 3 4 | from __future__ import print_function 5 | 6 | # Parse the FRC drive station logs which are packed binary data 7 | 8 | import sys 9 | import os 10 | import os.path 11 | import csv 12 | from dslogparser import DSLogParser, DSEventParser 13 | 14 | # Python 2 CSV writer wants binary output, but Py3 want regular 15 | _USE_BINARY_OUTPUT = sys.version_info[0] == 2 16 | 17 | OUTPUT_COLUMNS = [ 18 | 'time', 'round_trip_time', 'packet_loss', 'voltage', 'rio_cpu', 19 | 'robot_disabled', 'robot_auto', 'robot_tele', 20 | 'ds_disabled', 'ds_auto', 'ds_tele', 21 | 'watchdog', 'brownout', 22 | 'can_usage', 'wifi_db', 'bandwidth', 23 | 'pdp_id', 24 | 'pdp_0', 'pdp_1', 'pdp_2', 'pdp_3', 'pdp_4', 'pdp_5', 'pdp_6', 'pdp_7', 25 | 'pdp_8', 'pdp_9', 'pdp_10', 'pdp_11', 'pdp_12', 'pdp_13', 'pdp_14', 'pdp_15', 26 | 'pdp_total_current', 27 | # don't output these. They are not correct 28 | # 'pdp_resistance', 'pdp_voltage', 'pdp_temp' 29 | ] 30 | 31 | 32 | def find_event_file(filename): 33 | evtname = os.path.splitext(filename)[0] + '.dsevents' 34 | if os.path.exists(evtname): 35 | return evtname 36 | return None 37 | 38 | 39 | if __name__ == '__main__': 40 | import argparse 41 | parser = argparse.ArgumentParser(description='FRC DSLog to CSV file') 42 | parser.add_argument('--one-output-per-file', action='store_true', help='Output one CSV per DSLog file') 43 | parser.add_argument('--output', '-o', help='Output filename (stdout otherwise)') 44 | parser.add_argument('--event', action='store_true', help='Input files are EVENT files') 45 | parser.add_argument('--add-match-info', action='store_true', help='Look for EVENT files matching DSLOG files and pull info') 46 | parser.add_argument('--matches-only', action='store_true', help='Ignore files which have no match info. Imples add-match-info') 47 | parser.add_argument('files', nargs='+', help='Input files') 48 | 49 | args = parser.parse_args() 50 | 51 | if args.matches_only: 52 | args.add_match_info = True 53 | 54 | if sys.platform == "win32": 55 | if _USE_BINARY_OUTPUT: 56 | # csv.writer requires binary output file 57 | import msvcrt 58 | msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) 59 | 60 | # do glob expanding on Windows. Linux/Mac does this automatically. 61 | import glob 62 | newfiles = [] 63 | for a in args.files: 64 | newfiles.extend(glob.glob(a)) 65 | args.files = newfiles 66 | 67 | if args.event: 68 | dsparser = DSEventParser(args.files[0]) 69 | for rec in dsparser.read_records(): 70 | print(rec['time'], rec['message']) 71 | 72 | else: 73 | col = ['inputfile', ] 74 | if args.add_match_info: 75 | col.extend(('match_name', 'field_time')) 76 | col.extend(OUTPUT_COLUMNS) 77 | 78 | if not args.one_output_per_file: 79 | if args.output: 80 | outstrm = open(args.output, 'wb' if _USE_BINARY_OUTPUT else 'w') 81 | else: 82 | outstrm = sys.stdout 83 | outcsv = csv.DictWriter(outstrm, fieldnames=col, extrasaction='ignore') 84 | outcsv.writeheader() 85 | else: 86 | outstrm = None 87 | outcsv = None 88 | 89 | for fn in args.files: 90 | match_info = None 91 | if args.add_match_info: 92 | evtfn = find_event_file(fn) 93 | if evtfn: 94 | match_info = DSEventParser.find_match_info(evtfn) 95 | 96 | if args.matches_only and not match_info: 97 | continue 98 | 99 | if args.one_output_per_file: 100 | if outstrm: 101 | outstrm.close() 102 | outname, _ = os.path.splitext(os.path.basename(fn)) 103 | outname += '.csv' 104 | outstrm = open(outname, 'wb' if _USE_BINARY_OUTPUT else 'w') 105 | outcsv = csv.DictWriter(outstrm, fieldnames=col, extrasaction='ignore') 106 | outcsv.writeheader() 107 | 108 | dsparser = DSLogParser(fn) 109 | for rec in dsparser.read_records(): 110 | rec['inputfile'] = fn 111 | if match_info: 112 | rec.update(match_info) 113 | 114 | # unpack the PDP currents to go into columns more easily 115 | for i in range(16): 116 | rec['pdp_{}'.format(i)] = rec['pdp_currents'][i] 117 | 118 | outcsv.writerow(rec) 119 | 120 | dsparser.close() 121 | 122 | if args.output or args.one_output_per_file: 123 | outstrm.close() 124 | -------------------------------------------------------------------------------- /dslogparser/dslogparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # Parse the FRC drive station logs which are packed binary data 5 | 6 | # Notes on comparison to DSLog-Parse: 7 | # D-P has packet_loss as a *signed* integer, which makes no sense. Unsigned looks sensible. 8 | 9 | import datetime 10 | import math 11 | import re 12 | import struct 13 | 14 | import bitstring 15 | 16 | 17 | MAX_INT64 = 2**63 - 1 18 | DSLOG_TIMESTEP = 0.020 19 | 20 | 21 | def read_timestamp(strm): 22 | # Time stamp: int64, uint64 23 | b1 = strm.read(8) 24 | b2 = strm.read(8) 25 | if not b1 or not b2: 26 | return None 27 | sec = struct.unpack('>q', b1)[0] 28 | millisec = struct.unpack('>Q', b2)[0] 29 | 30 | # for now, ignore 31 | dt = datetime.datetime(1904, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc) 32 | dt += datetime.timedelta(seconds=(sec + float(millisec) / MAX_INT64)) 33 | return dt 34 | 35 | 36 | class DSLogParser(): 37 | def __init__(self, input_file): 38 | self.strm = open(input_file, 'rb') 39 | 40 | self.record_time_offset = datetime.timedelta(seconds=DSLOG_TIMESTEP) 41 | self.curr_time = None 42 | 43 | self.read_header() 44 | return 45 | 46 | def close(self): 47 | self.strm.close() 48 | return 49 | 50 | def read_records(self): 51 | if self.version != 3: 52 | raise Exception("Unknown file version number {}".format(self.version)) 53 | 54 | while True: 55 | r = self.read_record_v3() 56 | if r is None: 57 | break 58 | yield r 59 | return 60 | 61 | def read_header(self): 62 | self.version = struct.unpack('>i', self.strm.read(4))[0] 63 | if self.version != 3: 64 | raise Exception("Unknown file version number {}".format(self.version)) 65 | 66 | self.curr_time = read_timestamp(self.strm) 67 | return 68 | 69 | def read_record_v3(self): 70 | data_bytes = self.strm.read(10) 71 | if not data_bytes or len(data_bytes) < 10: 72 | return None 73 | pdp_bytes = self.strm.read(25) 74 | if not pdp_bytes or len(pdp_bytes) < 25: 75 | # should not happen!! 76 | raise EOFError("No data for PDP. Unexpected end of file.") 77 | 78 | res = {'time': self.curr_time} 79 | res.update(self.parse_data_v3(data_bytes)) 80 | res.update(self.parse_pdp_v3(pdp_bytes)) 81 | self.curr_time += self.record_time_offset 82 | return res 83 | 84 | @staticmethod 85 | def shifted_float(raw_value, shift_right): 86 | return raw_value / (2.0**shift_right) 87 | 88 | @staticmethod 89 | def unpack_bits(raw_value): 90 | '''Unpack and invert the bits in a byte''' 91 | 92 | status_bits = bitstring.Bits(bytes=raw_value) 93 | # invert them all 94 | return [not b for b in status_bits] 95 | 96 | @staticmethod 97 | def uint_from_bytes(bytes, offset, size_in_bits): 98 | '''Pull out an unsigned int from an array of bytes, with arbitrary bit start and length''' 99 | 100 | first_byte = math.floor(offset / 8) 101 | num_bytes = math.ceil(size_in_bits / 8) 102 | 103 | if num_bytes == 1: 104 | uint = struct.unpack_from('>B', bytes, first_byte)[0] 105 | elif num_bytes == 2: 106 | uint = struct.unpack_from('>H', bytes, first_byte)[0] 107 | else: 108 | # not needed here, and general case is harder 109 | raise Exception('not supported') 110 | 111 | # Need to mask off the incorrect high bits and then shift right to get rid of the incorrect low bits 112 | left_bitshift = offset - first_byte * 8 113 | right_bitshift = num_bytes * 8 - size_in_bits - left_bitshift 114 | 115 | return (uint & (0xFFFF >> left_bitshift)) >> right_bitshift 116 | 117 | def parse_data_v3(self, data_bytes): 118 | raw_values = struct.unpack('>BBHBcBBH', data_bytes) 119 | status_bits = self.unpack_bits(raw_values[4]) 120 | 121 | res = { 122 | 'round_trip_time': self.shifted_float(raw_values[0], 1), 123 | 'packet_loss': 0.04 * raw_values[1], # not shifted 124 | 'voltage': self.shifted_float(raw_values[2], 8), 125 | 'rio_cpu': 0.01 * self.shifted_float(raw_values[3], 1), 126 | 'can_usage': 0.01 * self.shifted_float(raw_values[5], 1), 127 | 'wifi_db': self.shifted_float(raw_values[6], 1), 128 | 'bandwidth': self.shifted_float(raw_values[7], 8), 129 | 130 | 'robot_disabled': status_bits[7], 131 | 'robot_auto': status_bits[6], 132 | 'robot_tele': status_bits[5], 133 | 'ds_disabled': status_bits[4], 134 | 'ds_auto': status_bits[3], 135 | 'ds_tele': status_bits[2], 136 | 'watchdog': status_bits[1], 137 | 'brownout': status_bits[0], 138 | } 139 | 140 | return res 141 | 142 | def parse_pdp_v3(self, pdp_bytes): 143 | # from CD post https://www.chiefdelphi.com/forums/showpost.php?p=1556451&postcount=11 144 | # pdp_offsets = (8, 18, 28, 38, 52, 62, 72, 82, 92, 102, 116, 126, 136, 146, 156, 166) 145 | 146 | # from DSLog-Reader 147 | # these make more sense in terms of defining a packing scheme, so stick with them 148 | # looks like this is a 64-bit int holding 6 10-bit numbers and they ignore the extra 4 bits 149 | pdp_offsets = (8, 18, 28, 38, 48, 58, 150 | 72, 82, 92, 102, 112, 122, 151 | 136, 146, 156, 166) 152 | 153 | vals = [] 154 | for offset in pdp_offsets: 155 | vals.append(self.shifted_float(self.uint_from_bytes(pdp_bytes, offset, 10), 3)) 156 | 157 | total_i = 0.0 158 | for i in vals: 159 | total_i += i 160 | 161 | # the scaling on R, V and T are almost certainly not correct 162 | # need to find a reference for those values 163 | res = { 164 | 'pdp_id': self.uint_from_bytes(pdp_bytes, 0, 8), 165 | 'pdp_currents': vals, 166 | 'pdp_resistance': self.uint_from_bytes(pdp_bytes, 176, 8), 167 | 'pdp_voltage': self.uint_from_bytes(pdp_bytes, 184, 8), 168 | 'pdp_temp': self.uint_from_bytes(pdp_bytes, 192, 8), 169 | 'pdp_total_current': total_i, 170 | } 171 | 172 | return res 173 | 174 | 175 | class DSEventParser(): 176 | def __init__(self, input_file): 177 | self.strm = open(input_file, 'rb') 178 | self.version = None 179 | self.start_time = None 180 | 181 | self.read_header() 182 | return 183 | 184 | def close(self): 185 | self.strm.close() 186 | return 187 | 188 | def read_records(self): 189 | if self.version != 3: 190 | raise Exception("Unknown file version number {}".format(self.version)) 191 | 192 | while True: 193 | r = self.read_record_v3() 194 | if r is None: 195 | break 196 | yield r 197 | return 198 | 199 | def read_header(self): 200 | self.version = struct.unpack('>i', self.strm.read(4))[0] 201 | if self.version != 3: 202 | raise Exception("Unknown file version number {}".format(self.version)) 203 | self.start_time = read_timestamp(self.strm) # file starttime 204 | return 205 | 206 | def read_record_v3(self): 207 | t = read_timestamp(self.strm) 208 | if t is None: 209 | return None 210 | 211 | msg_len = struct.unpack('>i', self.strm.read(4))[0] 212 | msg = struct.unpack('%ds' % msg_len, self.strm.read(msg_len))[0] 213 | msg = msg.decode('ascii', "backslashreplace") 214 | 215 | return {'time': t, 'message': msg} 216 | 217 | @staticmethod 218 | def find_match_info(filename): 219 | rdr = DSEventParser(filename) 220 | try: 221 | for rec in rdr.read_records(): 222 | m = re.match(r'FMS Connected:\s+(?P.*),\s+Field Time:\s+(?P