├── .gitattributes
├── .github
└── workflows
│ └── test.yml
├── .gitignore
├── .gitmodules
├── LICENSE.md
├── MANIFEST.in
├── README.ko-KR.md
├── README.md
├── gen_expected_output.sh
├── pylintrc
├── pyulog
├── __init__.py
├── _version.py
├── core.py
├── db.py
├── extract_gps_dump.py
├── extract_message.py
├── info.py
├── libevents_parse
├── messages.py
├── migrate_db.py
├── params.py
├── px4.py
├── px4_events.py
├── sql
│ ├── pyulog.1.sql
│ ├── pyulog.2.sql
│ ├── pyulog.3.sql
│ ├── pyulog.4.sql
│ └── pyulog.5.sql
├── ulog2csv.py
├── ulog2kml.py
└── ulog2rosbag.py
├── run_tests.sh
├── setup.cfg
├── setup.py
├── test
├── __init__.py
├── sample.ulg
├── sample_appended.ulg
├── sample_appended_info.txt
├── sample_appended_messages.txt
├── sample_appended_multiple.ulg
├── sample_appended_multiple_info.txt
├── sample_appended_multiple_messages.txt
├── sample_info.txt
├── sample_log_small.ulg
├── sample_log_small_messages.txt
├── sample_logging_tagged_and_default_params.ulg
├── sample_logging_tagged_and_default_params_messages.txt
├── sample_messages.txt
├── sample_px4_events.ulg
├── sample_px4_events_messages.txt
├── test_cli.py
├── test_db.py
├── test_extract_message.py
├── test_migration.py
├── test_px4.py
├── test_px4_events.py
└── test_ulog.py
└── versioneer.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | pyulog/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Run Tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'main'
7 | pull_request:
8 | branches:
9 | - '*'
10 |
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | strategy:
15 | fail-fast: false # don't cancel if a job from the matrix fails
16 | matrix:
17 | python-version: ["3.8", "3.9", "3.10", "3.11"]
18 |
19 | steps:
20 | - uses: actions/checkout@v2
21 | with:
22 | submodules: 'recursive'
23 | - name: Set up Python ${{ matrix.python-version }}
24 | uses: actions/setup-python@v2
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | - name: Install Dependencies
28 | run: |
29 | pip install pylint ddt pytest cython
30 | python setup.py build install
31 | - name : Running Tests
32 | run: |
33 | ./run_tests.sh
34 |
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.pyc
3 | build/
4 | dist/
5 | *.egg-info
6 | .eggs/
7 | *.sqlite3
8 | venv/
9 |
10 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "3rd_party/libevents"]
2 | path = 3rd_party/libevents
3 | url = https://github.com/mavlink/libevents.git
4 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) 2016, PX4 Pro Drone Autopilot
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | * Neither the name of GpsDrivers nor the names of its
15 | contributors may be used to endorse or promote products derived from
16 | this software without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
29 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include pyulog/_version.py
3 | include LICENSE.md
4 | recursive-include pyulog/sql *
5 |
--------------------------------------------------------------------------------
/README.ko-KR.md:
--------------------------------------------------------------------------------
1 | # pyulog
2 |
3 | 이 레포지토리에는 ULog 파일 및 스크립트를 파싱하는 python 패키지가 포함되어 있습니다.
4 | ULog는 self-describing 형식을 따르며, 해당 관련 문서는 다음과 같습니다(https://docs.px4.io/main/en/dev_log/ulog_file_format.html).
5 |
6 | 제공되는 명령어 스크립트는(command line scripts)는 아래와 같습니다:
7 | - `ulog_info`: ULog 파일의 정보를 나타냅니다.
8 | - `ulog_messages`: ULog 파일에 기록된 로그 메시지(logged messages)를 출력합니다.
9 | - `ulog_params`: ULog 파일에 저장된 파라미터들을 추출합니다.
10 | - `ulog2csv`: ULog 파일을 CSV 파일로 변환합니다.
11 | - `ulog2kml`: ULog 파일을 KML 파일로 변환합니다.
12 |
13 |
14 | ## 설치
15 |
16 | 패키지 설치:
17 | ```bash
18 | pip install pyulog
19 | ```
20 |
21 | 소스코드를 통한 설치:
22 | ```bash
23 | python setup.py build install
24 | ```
25 |
26 | ## 추가 개발
27 |
28 | 코드를 쉽게 변경 및 편집할 수 있는 형식으로 설치하려면 다음 명령 사용(해당 명령은 패키지를 Repo에 대한 링크로 설치합니다):
29 |
30 | ```bash
31 | pip install -e .
32 | ```
33 |
34 | ## 테스트
35 |
36 | ```bash
37 | pytest test
38 | ```
39 |
40 | 또는,
41 |
42 | ```bash
43 | python setup.py test
44 | ```
45 |
46 | ## 코드 검사(Code Checking)
47 |
48 | ```bash
49 | pylint pyulog/*.py
50 | ```
51 |
52 |
53 | ## 명령어 스크립트
54 |
55 |
56 | 모든 스크립트는 시스템 전체 어플리케이션단에서 설치되며(Python 또는 시스템 경로를 지정하지 않고
57 | 커맨드 라인에서 호출), `-h` 플래그를 통해 각 스크립트의 사용법을 확인할 수 있습니다.
58 |
59 | 아래 섹션에서는 사용 구문 및 샘플 출력을 나타냅니다. (from [test/sample.ulg](test/sample.ulg)):
60 |
61 | ### ULog 파일로부터 정보 출력 (ulog_info)
62 |
63 | 사용:
64 | ```bash
65 | usage: ulog_info [-h] [-v] file.ulg
66 |
67 | Display information from an ULog file
68 |
69 | positional arguments:
70 | file.ulg ULog input file
71 |
72 | optional arguments:
73 | -h, --help show this help message and exit
74 | -v, --verbose Verbose output
75 | ```
76 |
77 | 결과 예시:
78 | ```bash
79 | $ ulog_info sample.ulg
80 | Logging start time: 0:01:52, duration: 0:01:08
81 | Dropouts: count: 4, total duration: 0.1 s, max: 62 ms, mean: 29 ms
82 | Info Messages:
83 | sys_name: PX4
84 | time_ref_utc: 0
85 | ver_hw: AUAV_X21
86 | ver_sw: fd483321a5cf50ead91164356d15aa474643aa73
87 |
88 | Name (multi id, message size in bytes) number of data points, total bytes
89 | actuator_controls_0 (0, 48) 3269 156912
90 | actuator_outputs (0, 76) 1311 99636
91 | commander_state (0, 9) 678 6102
92 | control_state (0, 122) 3268 398696
93 | cpuload (0, 16) 69 1104
94 | ekf2_innovations (0, 140) 3271 457940
95 | estimator_status (0, 309) 1311 405099
96 | sensor_combined (0, 72) 17070 1229040
97 | sensor_preflight (0, 16) 17072 273152
98 | telemetry_status (0, 36) 70 2520
99 | vehicle_attitude (0, 36) 6461 232596
100 | vehicle_attitude_setpoint (0, 55) 3272 179960
101 | vehicle_local_position (0, 123) 678 83394
102 | vehicle_rates_setpoint (0, 24) 6448 154752
103 | vehicle_status (0, 45) 294 13230
104 | ```
105 |
106 | ### ULog 파일에 기록된 로그 메시지 출력 (ulog_messages)
107 |
108 | 사용:
109 | ```
110 | usage: ulog_messages [-h] file.ulg
111 |
112 | Display logged messages from an ULog file
113 |
114 | positional arguments:
115 | file.ulg ULog input file
116 |
117 | optional arguments:
118 | -h, --help show this help message and exit
119 | ```
120 |
121 | 결과 예시:
122 | ```
123 | ubuntu@ubuntu:~/github/pyulog/test$ ulog_messages sample.ulg
124 | 0:02:38 ERROR: [sensors] no barometer found on /dev/baro0 (2)
125 | 0:02:42 ERROR: [sensors] no barometer found on /dev/baro0 (2)
126 | 0:02:51 ERROR: [sensors] no barometer found on /dev/baro0 (2)
127 | 0:02:56 ERROR: [sensors] no barometer found on /dev/baro0 (2)
128 | ```
129 |
130 | ### ULog 파일에 저장된 파라미터 추출 (ulog_params)
131 |
132 | 사용:
133 | ```
134 | usage: ulog_params [-h] [-d DELIMITER] [-i] [-o] file.ulg [params.txt]
135 |
136 | Extract parameters from an ULog file
137 |
138 | positional arguments:
139 | file.ulg ULog input file
140 | params.txt Output filename (default=stdout)
141 |
142 | optional arguments:
143 | -h, --help show this help message and exit
144 | -d DELIMITER, --delimiter DELIMITER
145 | Use delimiter in CSV (default is ',')
146 | -i, --initial Only extract initial parameters
147 | -o, --octave Use Octave format
148 | ```
149 |
150 | 결과 예시 (콘솔 출력):
151 | ```
152 | ubuntu@ubuntu:~/github/pyulog/test$ ulog_params sample.ulg
153 | ATT_ACC_COMP,1
154 | ATT_BIAS_MAX,0.0500000007451
155 | ATT_EXT_HDG_M,0
156 | ...
157 | VT_OPT_RECOV_EN,0
158 | VT_TYPE,0
159 | VT_WV_LND_EN,0
160 | VT_WV_LTR_EN,0
161 | VT_WV_YAWR_SCL,0.15000000596
162 | ```
163 |
164 | ### ULog 파일을 CSV 파일로 변환 (ulog2csv)
165 |
166 | 사용:
167 | ```
168 | usage: ulog2csv [-h] [-m MESSAGES] [-d DELIMITER] [-o DIR] file.ulg
169 |
170 | Convert ULog to CSV
171 |
172 | positional arguments:
173 | file.ulg ULog input file
174 |
175 | optional arguments:
176 | -h, --help show this help message and exit
177 | -m MESSAGES, --messages MESSAGES
178 | Only consider given messages. Must be a comma-
179 | separated list of names, like
180 | 'sensor_combined,vehicle_gps_position'
181 | -d DELIMITER, --delimiter DELIMITER
182 | Use delimiter in CSV (default is ',')
183 | -o DIR, --output DIR Output directory (default is same as input file)
184 | ```
185 |
186 |
187 | ### ULog 파일을 KML 파일로 변환 (ulog2kml)
188 |
189 | > **Note** 모듈 `simplekml` 이 사용자의 PC에 설치되어 있어야 합니다. 만약 설치되어 있지 않다면, 아래 명령어를 통해 설치하십시오.
190 | ```
191 | pip install simplekml
192 | ```
193 |
194 | 사용:
195 | ```
196 | usage: ulog2kml [-h] [-o OUTPUT_FILENAME] [--topic TOPIC_NAME]
197 | [--camera-trigger CAMERA_TRIGGER]
198 | file.ulg
199 |
200 | Convert ULog to KML
201 |
202 | positional arguments:
203 | file.ulg ULog input file
204 |
205 | optional arguments:
206 | -h, --help show this help message and exit
207 | -o OUTPUT_FILENAME, --output OUTPUT_FILENAME
208 | output filename
209 | --topic TOPIC_NAME topic name with position data
210 | (default=vehicle_gps_position)
211 | --camera-trigger CAMERA_TRIGGER
212 | Camera trigger topic name (e.g. camera_capture)
213 | ```
214 |
215 | ### ULog 파일을 rosbag 파일로 변환 (ulog2rosbag)
216 |
217 | > **Note** `px4_msgs`가 설치된 ROS 환경이 필요합니다.
218 |
219 | 사용:
220 | ```
221 | usage: ulog2rosbag [-h] [-m MESSAGES] file.ulg result.bag
222 |
223 | Convert ULog to rosbag
224 |
225 | positional arguments:
226 | file.ulg ULog input file
227 | result.ulg rosbag output file
228 |
229 | optional arguments:
230 | -h, --help show this help message and exit
231 | -m MESSAGES, --messages MESSAGES
232 | Only consider given messages. Must be a comma-
233 | separated list of names, like
234 | 'sensor_combined,vehicle_gps_position'
235 | ```
236 | ### Migrate/setup the database for use with the DatabaseULog class (ulog_migratedb)
237 |
238 | 사용:
239 | ```
240 | usage: ulog_migratedb [-h] [-d DB_PATH] [-n] [-s SQL_DIR] [-f]
241 |
242 | Setup the database for DatabaseULog
243 |
244 | optional arguments:
245 | -h, --help show this help message and exit
246 | -d DB_PATH, --database DB_PATH
247 | Path to the database file
248 | -n, --noop Only print results, do not execute migration scripts.
249 | -s SQL_DIR, --sql SQL_DIR
250 | Directory with migration SQL files
251 | -f, --force Run the migration script even if the database is not
252 | created with this script.
253 |
254 | ```
255 | 결과 예시 (콘솔 출력):
256 | ```
257 | ubuntu@ubuntu:~/github/pyulog$ ulog_migratedb
258 | Using migration files in /home/ubuntu/github/pyulog/pyulog/sql.
259 | Database file pyulog.sqlite3 not found, creating it from scratch.
260 | Current schema version: 0 (database) and 1 (code).
261 | Executing /home/ubuntu/github/pyulog/pyulog/sql/pyulog.1.sql.
262 | Migration done.
263 | ```
264 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pyulog
2 |
3 | This repository contains a python package to parse ULog files and scripts to
4 | convert and display them. ULog is a self-describing logging format which is
5 | documented [here](https://docs.px4.io/main/en/dev_log/ulog_file_format.html).
6 |
7 | The provided [command line scripts](#scripts) are:
8 | - `ulog_info`: display information from an ULog file.
9 | - `ulog_messages`: display logged messages from an ULog file.
10 | - `ulog_params`: extract parameters from an ULog file.
11 | - `ulog2csv`: convert ULog to CSV files.
12 | - `ulog2kml`: convert ULog to KML files.
13 |
14 |
15 | ## Installation
16 |
17 | Installation with package manager:
18 | ```bash
19 | pip install pyulog
20 | ```
21 |
22 | Installation from source:
23 | ```bash
24 | python setup.py build install
25 | ```
26 |
27 | ## Development
28 |
29 | To install the code in a format so that it can be easily edited use the
30 | following command (this will install the package as a link to the repo):
31 |
32 | ```bash
33 | pip install -e .
34 | ```
35 |
36 | ## Testing
37 |
38 | ```bash
39 | pytest test
40 | ```
41 |
42 | or
43 |
44 | ```bash
45 | python setup.py test
46 | ```
47 |
48 | ## Code Checking
49 |
50 | ```bash
51 | pylint pyulog/*.py
52 | ```
53 |
54 |
55 | ## Command Line Scripts
56 |
57 | All scripts are installed as system-wide applications (i.e. they be called on the command line without specifying Python or a system path), and support the `-h` flag for getting usage instructions.
58 |
59 | The sections below show the usage syntax and sample output (from [test/sample.ulg](test/sample.ulg)):
60 |
61 | ### Display information from an ULog file (ulog_info)
62 |
63 | Usage:
64 | ```bash
65 | usage: ulog_info [-h] [-v] file.ulg
66 |
67 | Display information from an ULog file
68 |
69 | positional arguments:
70 | file.ulg ULog input file
71 |
72 | optional arguments:
73 | -h, --help show this help message and exit
74 | -v, --verbose Verbose output
75 | ```
76 |
77 | Example output:
78 | ```bash
79 | $ ulog_info sample.ulg
80 | Logging start time: 0:01:52, duration: 0:01:08
81 | Dropouts: count: 4, total duration: 0.1 s, max: 62 ms, mean: 29 ms
82 | Info Messages:
83 | sys_name: PX4
84 | time_ref_utc: 0
85 | ver_hw: AUAV_X21
86 | ver_sw: fd483321a5cf50ead91164356d15aa474643aa73
87 |
88 | Name (multi id, message size in bytes) number of data points, total bytes
89 | actuator_controls_0 (0, 48) 3269 156912
90 | actuator_outputs (0, 76) 1311 99636
91 | commander_state (0, 9) 678 6102
92 | control_state (0, 122) 3268 398696
93 | cpuload (0, 16) 69 1104
94 | ekf2_innovations (0, 140) 3271 457940
95 | estimator_status (0, 309) 1311 405099
96 | sensor_combined (0, 72) 17070 1229040
97 | sensor_preflight (0, 16) 17072 273152
98 | telemetry_status (0, 36) 70 2520
99 | vehicle_attitude (0, 36) 6461 232596
100 | vehicle_attitude_setpoint (0, 55) 3272 179960
101 | vehicle_local_position (0, 123) 678 83394
102 | vehicle_rates_setpoint (0, 24) 6448 154752
103 | vehicle_status (0, 45) 294 13230
104 | ```
105 |
106 | ### Display logged messages from an ULog file (ulog_messages)
107 |
108 | Usage:
109 | ```
110 | usage: ulog_messages [-h] file.ulg
111 |
112 | Display logged messages from an ULog file
113 |
114 | positional arguments:
115 | file.ulg ULog input file
116 |
117 | optional arguments:
118 | -h, --help show this help message and exit
119 | ```
120 |
121 | Example output:
122 | ```
123 | ubuntu@ubuntu:~/github/pyulog/test$ ulog_messages sample.ulg
124 | 0:02:38 ERROR: [sensors] no barometer found on /dev/baro0 (2)
125 | 0:02:42 ERROR: [sensors] no barometer found on /dev/baro0 (2)
126 | 0:02:51 ERROR: [sensors] no barometer found on /dev/baro0 (2)
127 | 0:02:56 ERROR: [sensors] no barometer found on /dev/baro0 (2)
128 | ```
129 |
130 | ### Extract parameters from an ULog file (ulog_params)
131 |
132 | Usage:
133 | ```
134 | usage: ulog_params [-h] [-d DELIMITER] [-i] [-o] file.ulg [params.txt]
135 |
136 | Extract parameters from an ULog file
137 |
138 | positional arguments:
139 | file.ulg ULog input file
140 | params.txt Output filename (default=stdout)
141 |
142 | optional arguments:
143 | -h, --help show this help message and exit
144 | -d DELIMITER, --delimiter DELIMITER
145 | Use delimiter in CSV (default is ',')
146 | -i, --initial Only extract initial parameters
147 | -o, --octave Use Octave format
148 | ```
149 |
150 | Example output (to console):
151 | ```
152 | ubuntu@ubuntu:~/github/pyulog/test$ ulog_params sample.ulg
153 | ATT_ACC_COMP,1
154 | ATT_BIAS_MAX,0.0500000007451
155 | ATT_EXT_HDG_M,0
156 | ...
157 | VT_OPT_RECOV_EN,0
158 | VT_TYPE,0
159 | VT_WV_LND_EN,0
160 | VT_WV_LTR_EN,0
161 | VT_WV_YAWR_SCL,0.15000000596
162 | ```
163 |
164 | ### Convert ULog to CSV files (ulog2csv)
165 |
166 | Usage:
167 | ```
168 | usage: ulog2csv [-h] [-m MESSAGES] [-d DELIMITER] [-o DIR] file.ulg
169 |
170 | Convert ULog to CSV
171 |
172 | positional arguments:
173 | file.ulg ULog input file
174 |
175 | optional arguments:
176 | -h, --help show this help message and exit
177 | -m MESSAGES, --messages MESSAGES
178 | Only consider given messages. Must be a comma-
179 | separated list of names, like
180 | 'sensor_combined,vehicle_gps_position'
181 | -d DELIMITER, --delimiter DELIMITER
182 | Use delimiter in CSV (default is ',')
183 | -o DIR, --output DIR Output directory (default is same as input file)
184 | ```
185 |
186 |
187 | ### Convert ULog to KML files (ulog2kml)
188 |
189 | > **Note** The `simplekml` module must be installed on your computer. If not already present, you can install it with:
190 | ```
191 | pip install simplekml
192 | ```
193 |
194 | Usage:
195 | ```
196 | usage: ulog2kml [-h] [-o OUTPUT_FILENAME] [--topic TOPIC_NAME]
197 | [--camera-trigger CAMERA_TRIGGER]
198 | file.ulg
199 |
200 | Convert ULog to KML
201 |
202 | positional arguments:
203 | file.ulg ULog input file
204 |
205 | optional arguments:
206 | -h, --help show this help message and exit
207 | -o OUTPUT_FILENAME, --output OUTPUT_FILENAME
208 | output filename
209 | --topic TOPIC_NAME topic name with position data
210 | (default=vehicle_gps_position)
211 | --camera-trigger CAMERA_TRIGGER
212 | Camera trigger topic name (e.g. camera_capture)
213 | ```
214 |
215 | ### Convert ULog to rosbag files (ulog2rosbag)
216 |
217 | > **Note** You need a ROS environment with `px4_msgs` built and sourced.
218 |
219 | Usage:
220 | ```
221 | usage: ulog2rosbag [-h] [-m MESSAGES] file.ulg result.bag
222 |
223 | Convert ULog to rosbag
224 |
225 | positional arguments:
226 | file.ulg ULog input file
227 | result.ulg rosbag output file
228 |
229 | optional arguments:
230 | -h, --help show this help message and exit
231 | -m MESSAGES, --messages MESSAGES
232 | Only consider given messages. Must be a comma-
233 | separated list of names, like
234 | 'sensor_combined,vehicle_gps_position'
235 | ```
236 | ### Migrate/setup the database for use with the DatabaseULog class (ulog_migratedb)
237 |
238 | > **Warning** This command must be run whenever the schema changes, otherwise DatabaseULog won't function.
239 |
240 | > **Warning** Even if you store logs in the database, you should keep the original .ulg files. Otherwise you may lose your data.
241 |
242 |
243 | Usage:
244 | ```
245 | usage: ulog_migratedb [-h] [-d DB_PATH] [-n] [-s SQL_DIR] [-f]
246 |
247 | Setup the database for DatabaseULog.
248 |
249 | optional arguments:
250 | -h, --help show this help message and exit
251 | -d DB_PATH, --database DB_PATH
252 | Path to the database file
253 | -n, --noop Only print results, do not execute migration scripts.
254 | -s SQL_DIR, --sql SQL_DIR
255 | Directory with migration SQL files
256 | -f, --force Run the migration script even if the database is not
257 | created with this script.
258 |
259 | ```
260 | Example output (to console):
261 | ```
262 | ubuntu@ubuntu:~/github/pyulog$ ulog_migratedb
263 | Using migration files in /home/ubuntu/github/pyulog/pyulog/sql.
264 | Database file pyulog.sqlite3 not found, creating it from scratch.
265 | Current schema version: 0 (database) and 1 (code).
266 | Executing /home/ubuntu/github/pyulog/pyulog/sql/pyulog.1.sql.
267 | Migration done.
268 | ```
269 |
--------------------------------------------------------------------------------
/gen_expected_output.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | # generate the expected output for the unittests
4 |
5 | for f in test/*.ulg; do
6 | echo "Processing $f"
7 | ulog_info -v "$f" > "${f%.*}"_info.txt
8 | ulog_messages "$f" > "${f%.*}"_messages.txt
9 | #ulog_params "$f" > "${f%.*}"_params.txt
10 | done
11 |
--------------------------------------------------------------------------------
/pylintrc:
--------------------------------------------------------------------------------
1 | [MAIN]
2 |
3 | # Analyse import fallback blocks. This can be used to support both Python 2 and
4 | # 3 compatible code, which means that the block might have code that exists
5 | # only in one or another interpreter, leading to false positives when analysed.
6 | analyse-fallback-blocks=no
7 |
8 | # Clear in-memory caches upon conclusion of linting. Useful if running pylint
9 | # in a server-like mode.
10 | clear-cache-post-run=no
11 |
12 | # Load and enable all available extensions. Use --list-extensions to see a list
13 | # all available extensions.
14 | #enable-all-extensions=
15 |
16 | # In error mode, messages with a category besides ERROR or FATAL are
17 | # suppressed, and no reports are done by default. Error mode is compatible with
18 | # disabling specific errors.
19 | #errors-only=
20 |
21 | # Always return a 0 (non-error) status code, even if lint errors are found.
22 | # This is primarily useful in continuous integration scripts.
23 | #exit-zero=
24 |
25 | # A comma-separated list of package or module names from where C extensions may
26 | # be loaded. Extensions are loading into the active Python interpreter and may
27 | # run arbitrary code.
28 | extension-pkg-allow-list=
29 |
30 | # A comma-separated list of package or module names from where C extensions may
31 | # be loaded. Extensions are loading into the active Python interpreter and may
32 | # run arbitrary code. (This is an alternative name to extension-pkg-allow-list
33 | # for backward compatibility.)
34 | extension-pkg-whitelist=
35 |
36 | # Return non-zero exit code if any of these messages/categories are detected,
37 | # even if score is above --fail-under value. Syntax same as enable. Messages
38 | # specified are enabled, while categories only check already-enabled messages.
39 | fail-on=
40 |
41 | # Specify a score threshold under which the program will exit with error.
42 | fail-under=10
43 |
44 | # Interpret the stdin as a python script, whose filename needs to be passed as
45 | # the module_or_package argument.
46 | #from-stdin=
47 |
48 | # Files or directories to be skipped. They should be base names, not paths.
49 | ignore=CVS
50 |
51 | # Add files or directories matching the regular expressions patterns to the
52 | # ignore-list. The regex matches against paths and can be in Posix or Windows
53 | # format. Because '\\' represents the directory delimiter on Windows systems,
54 | # it can't be used as an escape character.
55 | ignore-paths=
56 |
57 | # Files or directories matching the regular expression patterns are skipped.
58 | # The regex matches against base names, not paths. The default value ignores
59 | # Emacs file locks
60 | ignore-patterns=
61 |
62 | # List of module names for which member attributes should not be checked and
63 | # will not be imported (useful for modules/projects where namespaces are
64 | # manipulated during runtime and thus existing member attributes cannot be
65 | # deduced by static analysis). It supports qualified module names, as well as
66 | # Unix pattern matching.
67 | ignored-modules=numpy
68 |
69 | # Python code to execute, usually for sys.path manipulation such as
70 | # pygtk.require().
71 | #init-hook=
72 |
73 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
74 | # number of processors available to use, and will cap the count on Windows to
75 | # avoid hangs.
76 | jobs=1
77 |
78 | # Control the amount of potential inferred values when inferring a single
79 | # object. This can help the performance when dealing with large functions or
80 | # complex, nested conditions.
81 | limit-inference-results=100
82 |
83 | # List of plugins (as comma separated values of python module names) to load,
84 | # usually to register additional checkers.
85 | load-plugins=
86 |
87 | # Pickle collected data for later comparisons.
88 | persistent=yes
89 |
90 | # Resolve imports to .pyi stubs if available. May reduce no-member messages and
91 | # increase not-an-iterable messages.
92 | prefer-stubs=no
93 |
94 | # Minimum Python version to use for version dependent checks. Will default to
95 | # the version used to run pylint.
96 | py-version=3.13
97 |
98 | # Discover python modules and packages in the file system subtree.
99 | recursive=no
100 |
101 | # Add paths to the list of the source roots. Supports globbing patterns. The
102 | # source root is an absolute path or a path relative to the current working
103 | # directory used to determine a package namespace for modules located under the
104 | # source root.
105 | source-roots=
106 |
107 | # When enabled, pylint would attempt to guess common misconfiguration and emit
108 | # user-friendly hints instead of false-positive error messages.
109 | suggestion-mode=yes
110 |
111 | # Allow loading of arbitrary C extensions. Extensions are imported into the
112 | # active Python interpreter and may run arbitrary code.
113 | unsafe-load-any-extension=no
114 |
115 | # In verbose mode, extra non-checker-related info will be displayed.
116 | #verbose=
117 |
118 |
119 | [BASIC]
120 |
121 | # Naming style matching correct argument names.
122 | argument-naming-style=snake_case
123 |
124 | # Regular expression matching correct argument names. Overrides argument-
125 | # naming-style. If left empty, argument names will be checked with the set
126 | # naming style.
127 | argument-rgx=[a-z_][a-z0-9_]{2,30}$
128 |
129 | # Naming style matching correct attribute names.
130 | attr-naming-style=snake_case
131 |
132 | # Regular expression matching correct attribute names. Overrides attr-naming-
133 | # style. If left empty, attribute names will be checked with the set naming
134 | # style.
135 | attr-rgx=[a-z_][a-z0-9_]{2,30}$
136 |
137 | # Bad variable names which should always be refused, separated by a comma.
138 | bad-names=foo,
139 | bar,
140 | baz,
141 | toto,
142 | tutu,
143 | tata
144 |
145 | # Bad variable names regexes, separated by a comma. If names match any regex,
146 | # they will always be refused
147 | bad-names-rgxs=
148 |
149 | # Naming style matching correct class attribute names.
150 | class-attribute-naming-style=any
151 |
152 | # Regular expression matching correct class attribute names. Overrides class-
153 | # attribute-naming-style. If left empty, class attribute names will be checked
154 | # with the set naming style.
155 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
156 |
157 | # Naming style matching correct class constant names.
158 | class-const-naming-style=UPPER_CASE
159 |
160 | # Regular expression matching correct class constant names. Overrides class-
161 | # const-naming-style. If left empty, class constant names will be checked with
162 | # the set naming style.
163 | #class-const-rgx=
164 |
165 | # Naming style matching correct class names.
166 | class-naming-style=PascalCase
167 |
168 | # Regular expression matching correct class names. Overrides class-naming-
169 | # style. If left empty, class names will be checked with the set naming style.
170 | class-rgx=[A-Z_][a-zA-Z0-9]+$
171 |
172 | # Naming style matching correct constant names.
173 | const-naming-style=UPPER_CASE
174 |
175 | # Regular expression matching correct constant names. Overrides const-naming-
176 | # style. If left empty, constant names will be checked with the set naming
177 | # style.
178 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
179 |
180 | # Minimum line length for functions/classes that require docstrings, shorter
181 | # ones are exempt.
182 | docstring-min-length=-1
183 |
184 | # Naming style matching correct function names.
185 | function-naming-style=snake_case
186 |
187 | # Regular expression matching correct function names. Overrides function-
188 | # naming-style. If left empty, function names will be checked with the set
189 | # naming style.
190 | function-rgx=[a-z_][a-z0-9_]{2,30}$
191 |
192 | # Good variable names which should always be accepted, separated by a comma.
193 | good-names=i,
194 | j,
195 | k,
196 | ex,
197 | Run,
198 | _,
199 | t,
200 | x,
201 | q
202 |
203 | # Good variable names regexes, separated by a comma. If names match any regex,
204 | # they will always be accepted
205 | good-names-rgxs=
206 |
207 | # Include a hint for the correct naming format with invalid-name.
208 | include-naming-hint=no
209 |
210 | # Naming style matching correct inline iteration names.
211 | inlinevar-naming-style=any
212 |
213 | # Regular expression matching correct inline iteration names. Overrides
214 | # inlinevar-naming-style. If left empty, inline iteration names will be checked
215 | # with the set naming style.
216 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
217 |
218 | # Naming style matching correct method names.
219 | method-naming-style=snake_case
220 |
221 | # Regular expression matching correct method names. Overrides method-naming-
222 | # style. If left empty, method names will be checked with the set naming style.
223 | method-rgx=[a-z_][a-z0-9_]{2,40}$
224 |
225 | # Naming style matching correct module names.
226 | module-naming-style=snake_case
227 |
228 | # Regular expression matching correct module names. Overrides module-naming-
229 | # style. If left empty, module names will be checked with the set naming style.
230 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
231 |
232 | # Colon-delimited sets of names that determine each other's naming style when
233 | # the name regexes allow several styles.
234 | name-group=
235 |
236 | # Regular expression which should only match function or class names that do
237 | # not require a docstring.
238 | no-docstring-rgx=^_
239 |
240 | # List of decorators that produce properties, such as abc.abstractproperty. Add
241 | # to this list to register other decorators that produce valid properties.
242 | # These decorators are taken in consideration only for invalid-name.
243 | property-classes=abc.abstractproperty
244 |
245 | # Regular expression matching correct type alias names. If left empty, type
246 | # alias names will be checked with the set naming style.
247 | #typealias-rgx=
248 |
249 | # Regular expression matching correct type variable names. If left empty, type
250 | # variable names will be checked with the set naming style.
251 | #typevar-rgx=
252 |
253 | # Naming style matching correct variable names.
254 | variable-naming-style=snake_case
255 |
256 | # Regular expression matching correct variable names. Overrides variable-
257 | # naming-style. If left empty, variable names will be checked with the set
258 | # naming style.
259 | variable-rgx=[a-z_][a-z0-9_]{2,30}$
260 |
261 |
262 | [CLASSES]
263 |
264 | # Warn about protected attribute access inside special methods
265 | check-protected-access-in-special-methods=no
266 |
267 | # List of method names used to declare (i.e. assign) instance attributes.
268 | defining-attr-methods=__init__,
269 | __new__,
270 | setUp
271 |
272 | # List of member names, which should be excluded from the protected access
273 | # warning.
274 | exclude-protected=_asdict,
275 | _fields,
276 | _replace,
277 | _source,
278 | _make
279 |
280 | # List of valid names for the first argument in a class method.
281 | valid-classmethod-first-arg=cls
282 |
283 | # List of valid names for the first argument in a metaclass class method.
284 | valid-metaclass-classmethod-first-arg=mcs
285 |
286 |
287 | [DESIGN]
288 |
289 | # List of regular expressions of class ancestor names to ignore when counting
290 | # public methods (see R0903)
291 | exclude-too-few-public-methods=
292 |
293 | # List of qualified class names to ignore when counting class parents (see
294 | # R0901)
295 | ignored-parents=
296 |
297 | # Maximum number of arguments for function / method.
298 | max-args=7
299 |
300 | # Maximum number of attributes for a class (see R0902).
301 | max-attributes=7
302 |
303 | # Maximum number of boolean expressions in an if statement (see R0916).
304 | max-bool-expr=5
305 |
306 | # Maximum number of branch for function / method body.
307 | max-branches=12
308 |
309 | # Maximum number of locals for function / method body.
310 | max-locals=20
311 |
312 | # Maximum number of parents for a class (see R0901).
313 | max-parents=7
314 |
315 | # Maximum number of positional arguments for function / method.
316 | max-positional-arguments=10
317 |
318 | # Maximum number of public methods for a class (see R0904).
319 | max-public-methods=25
320 |
321 | # Maximum number of return / yield for function / method body.
322 | max-returns=6
323 |
324 | # Maximum number of statements in function / method body.
325 | max-statements=100
326 |
327 | # Minimum number of public methods for a class (see R0903).
328 | min-public-methods=2
329 |
330 |
331 | [EXCEPTIONS]
332 |
333 | # Exceptions that will emit a warning when caught.
334 | overgeneral-exceptions=builtins.Exception
335 |
336 |
337 | [FORMAT]
338 |
339 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
340 | expected-line-ending-format=
341 |
342 | # Regexp for a line that is allowed to be longer than the limit.
343 | ignore-long-lines=^\s*(# )??$
344 |
345 | # Number of spaces of indent required inside a hanging or continued line.
346 | indent-after-paren=4
347 |
348 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
349 | # tab).
350 | indent-string=' '
351 |
352 | # Maximum number of characters on a single line.
353 | max-line-length=100
354 |
355 | # Maximum number of lines in a module.
356 | max-module-lines=1200
357 |
358 | # Allow the body of a class to be on the same line as the declaration if body
359 | # contains single statement.
360 | single-line-class-stmt=no
361 |
362 | # Allow the body of an if to be on the same line as the test if there is no
363 | # else.
364 | single-line-if-stmt=no
365 |
366 |
367 | [IMPORTS]
368 |
369 | # List of modules that can be imported at any level, not just the top level
370 | # one.
371 | allow-any-import-level=
372 |
373 | # Allow explicit reexports by alias from a package __init__.
374 | allow-reexport-from-package=no
375 |
376 | # Allow wildcard imports from modules that define __all__.
377 | allow-wildcard-with-all=no
378 |
379 | # Deprecated modules which should not be used, separated by a comma.
380 | deprecated-modules=regsub,
381 | TERMIOS,
382 | Bastion,
383 | rexec
384 |
385 | # Output a graph (.gv or any supported image format) of external dependencies
386 | # to the given file (report RP0402 must not be disabled).
387 | ext-import-graph=
388 |
389 | # Output a graph (.gv or any supported image format) of all (i.e. internal and
390 | # external) dependencies to the given file (report RP0402 must not be
391 | # disabled).
392 | import-graph=
393 |
394 | # Output a graph (.gv or any supported image format) of internal dependencies
395 | # to the given file (report RP0402 must not be disabled).
396 | int-import-graph=
397 |
398 | # Force import order to recognize a module as part of the standard
399 | # compatibility libraries.
400 | known-standard-library=
401 |
402 | # Force import order to recognize a module as part of a third party library.
403 | known-third-party=enchant
404 |
405 | # Couples of modules and preferred modules, separated by a comma.
406 | preferred-modules=
407 |
408 |
409 | [LOGGING]
410 |
411 | # The type of string formatting that logging methods do. `old` means using %
412 | # formatting, `new` is for `{}` formatting.
413 | logging-format-style=old
414 |
415 | # Logging modules to check that the string format arguments are in logging
416 | # function parameter format.
417 | logging-modules=logging
418 |
419 |
420 | [MESSAGES CONTROL]
421 |
422 | # Only show warnings with the listed confidence levels. Leave empty to show
423 | # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
424 | # UNDEFINED.
425 | confidence=HIGH,
426 | CONTROL_FLOW,
427 | INFERENCE,
428 | INFERENCE_FAILURE,
429 | UNDEFINED
430 |
431 | # Disable the message, report, category or checker with the given id(s). You
432 | # can either give multiple identifiers separated by comma (,) or put this
433 | # option multiple times (only on the command line, not in the configuration
434 | # file where it should appear only once). You can also use "--disable=all" to
435 | # disable everything first and then re-enable specific checks. For example, if
436 | # you want to run only the similarities checker, you can use "--disable=all
437 | # --enable=similarities". If you want to run only the classes checker, but have
438 | # no Warning level messages displayed, use "--disable=all --enable=classes
439 | # --disable=W".
440 | disable=raw-checker-failed,
441 | bad-inline-option,
442 | locally-disabled,
443 | file-ignored,
444 | suppressed-message,
445 | useless-suppression,
446 | deprecated-pragma,
447 | use-symbolic-message-instead,
448 | use-implicit-booleaness-not-comparison-to-string,
449 | use-implicit-booleaness-not-comparison-to-zero,
450 | fixme,
451 | trailing-newlines,
452 | multiple-statements,
453 | too-few-public-methods,
454 | use-implicit-booleaness-not-len,
455 | useless-object-inheritance,
456 | consider-using-f-string,
457 | unused-argument
458 |
459 | # Enable the message, report, category or checker with the given id(s). You can
460 | # either give multiple identifier separated by comma (,) or put this option
461 | # multiple time (only on the command line, not in the configuration file where
462 | # it should appear only once). See also the "--disable" option for examples.
463 | enable=
464 |
465 |
466 | [METHOD_ARGS]
467 |
468 | # List of qualified names (i.e., library.method) which require a timeout
469 | # parameter e.g. 'requests.api.get,requests.api.post'
470 | timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
471 |
472 |
473 | [MISCELLANEOUS]
474 |
475 | # List of note tags to take in consideration, separated by a comma.
476 | notes=FIXME,
477 | XXX,
478 | TODO
479 |
480 | # Regular expression of note tags to take in consideration.
481 | notes-rgx=
482 |
483 |
484 | [REFACTORING]
485 |
486 | # Maximum number of nested blocks for function / method body
487 | max-nested-blocks=6
488 |
489 | # Complete name of functions that never returns. When checking for
490 | # inconsistent-return-statements if a never returning function is called then
491 | # it will be considered as an explicit return statement and no message will be
492 | # printed.
493 | never-returning-functions=sys.exit,argparse.parse_error
494 |
495 | # Let 'consider-using-join' be raised when the separator to join on would be
496 | # non-empty (resulting in expected fixes of the type: ``"- " + " -
497 | # ".join(items)``)
498 | suggest-join-with-non-empty-separator=yes
499 |
500 |
501 | [REPORTS]
502 |
503 | # Python expression which should return a score less than or equal to 10. You
504 | # have access to the variables 'fatal', 'error', 'warning', 'refactor',
505 | # 'convention', and 'info' which contain the number of messages in each
506 | # category, as well as 'statement' which is the total number of statements
507 | # analyzed. This score is used by the global evaluation report (RP0004).
508 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
509 |
510 | # Template used to display messages. This is a python new-style format string
511 | # used to format the message information. See doc for all details.
512 | msg-template=
513 |
514 | # Set the output format. Available formats are: text, parseable, colorized,
515 | # json2 (improved json format), json (old json format) and msvs (visual
516 | # studio). You can also give a reporter class, e.g.
517 | # mypackage.mymodule.MyReporterClass.
518 | #output-format=
519 |
520 | # Tells whether to display a full report or only the messages.
521 | reports=no
522 |
523 | # Activate the evaluation score.
524 | score=yes
525 |
526 |
527 | [SIMILARITIES]
528 |
529 | # Comments are removed from the similarity computation
530 | ignore-comments=yes
531 |
532 | # Docstrings are removed from the similarity computation
533 | ignore-docstrings=yes
534 |
535 | # Imports are removed from the similarity computation
536 | ignore-imports=yes
537 |
538 | # Signatures are removed from the similarity computation
539 | ignore-signatures=yes
540 |
541 | # Minimum lines number of a similarity.
542 | min-similarity-lines=5
543 |
544 |
545 | [SPELLING]
546 |
547 | # Limits count of emitted suggestions for spelling mistakes.
548 | max-spelling-suggestions=4
549 |
550 | # Spelling dictionary name. No available dictionaries : You need to install
551 | # both the python package and the system dependency for enchant to work.
552 | spelling-dict=
553 |
554 | # List of comma separated words that should be considered directives if they
555 | # appear at the beginning of a comment and should not be checked.
556 | spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
557 |
558 | # List of comma separated words that should not be checked.
559 | spelling-ignore-words=
560 |
561 | # A path to a file that contains the private dictionary; one word per line.
562 | spelling-private-dict-file=
563 |
564 | # Tells whether to store unknown words to the private dictionary (see the
565 | # --spelling-private-dict-file option) instead of raising a message.
566 | spelling-store-unknown-words=no
567 |
568 |
569 | [STRING]
570 |
571 | # This flag controls whether inconsistent-quotes generates a warning when the
572 | # character used as a quote delimiter is used inconsistently within a module.
573 | check-quote-consistency=no
574 |
575 | # This flag controls whether the implicit-str-concat should generate a warning
576 | # on implicit string concatenation in sequences defined over several lines.
577 | check-str-concat-over-line-jumps=no
578 |
579 |
580 | [TYPECHECK]
581 |
582 | # List of decorators that produce context managers, such as
583 | # contextlib.contextmanager. Add to this list to register other decorators that
584 | # produce valid context managers.
585 | contextmanager-decorators=contextlib.contextmanager
586 |
587 | # List of members which are set dynamically and missed by pylint inference
588 | # system, and so shouldn't trigger E1101 when accessed. Python regular
589 | # expressions are accepted.
590 | generated-members=
591 |
592 | # Tells whether to warn about missing members when the owner of the attribute
593 | # is inferred to be None.
594 | ignore-none=yes
595 |
596 | # This flag controls whether pylint should warn about no-member and similar
597 | # checks whenever an opaque object is returned when inferring. The inference
598 | # can return multiple potential results while evaluating a Python object, but
599 | # some branches might not be evaluated, which results in partial inference. In
600 | # that case, it might be useful to still emit no-member and other checks for
601 | # the rest of the inferred objects.
602 | ignore-on-opaque-inference=yes
603 |
604 | # List of symbolic message names to ignore for Mixin members.
605 | ignored-checks-for-mixins=no-member,
606 | not-async-context-manager,
607 | not-context-manager,
608 | attribute-defined-outside-init
609 |
610 | # List of class names for which member attributes should not be checked (useful
611 | # for classes with dynamically set attributes). This supports the use of
612 | # qualified names.
613 | ignored-classes=optparse.Values,thread._local,_thread._local
614 |
615 | # Show a hint with possible names when a member name was not found. The aspect
616 | # of finding the hint is based on edit distance.
617 | missing-member-hint=yes
618 |
619 | # The minimum edit distance a name should have in order to be considered a
620 | # similar match for a missing member name.
621 | missing-member-hint-distance=1
622 |
623 | # The total number of similar names that should be taken in consideration when
624 | # showing a hint for a missing member.
625 | missing-member-max-choices=1
626 |
627 | # Regex pattern to define which classes are considered mixins.
628 | mixin-class-rgx=.*[Mm]ixin
629 |
630 | # List of decorators that change the signature of a decorated function.
631 | signature-mutators=
632 |
633 |
634 | [VARIABLES]
635 |
636 | # List of additional names supposed to be defined in builtins. Remember that
637 | # you should avoid defining new builtins when possible.
638 | additional-builtins=
639 |
640 | # Tells whether unused global variables should be treated as a violation.
641 | allow-global-unused-variables=yes
642 |
643 | # List of names allowed to shadow builtins
644 | allowed-redefined-builtins=
645 |
646 | # List of strings which can identify a callback function by name. A callback
647 | # name must start or end with one of those strings.
648 | callbacks=cb_,
649 | _cb
650 |
651 | # A regular expression matching the name of dummy variables (i.e. expected to
652 | # not be used).
653 | dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
654 |
655 | # Argument names that match this expression will be ignored.
656 | ignored-argument-names=_.*
657 |
658 | # Tells whether we should check for unused import in __init__ files.
659 | init-import=no
660 |
661 | # List of qualified module names which can have objects that can redefine
662 | # builtins.
663 | redefining-builtins-modules=six.moves,future.builtins
664 |
--------------------------------------------------------------------------------
/pyulog/__init__.py:
--------------------------------------------------------------------------------
1 | """ Wrapper to include the main library modules """
2 | from .core import ULog
3 | from . import px4
4 | from . import _version
5 |
6 | from ._version import get_versions
7 | __version__ = get_versions()['version']
8 | del get_versions
9 |
--------------------------------------------------------------------------------
/pyulog/_version.py:
--------------------------------------------------------------------------------
1 |
2 | # This file helps to compute a version number in source trees obtained from
3 | # git-archive tarball (such as those provided by githubs download-from-tag
4 | # feature). Distribution tarballs (built by setup.py sdist) and build
5 | # directories (produced by setup.py build) will contain a much shorter file
6 | # that just contains the computed version number.
7 |
8 | # This file is released into the public domain. Generated by
9 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer)
10 |
11 | # pylint: skip-file
12 | """Git implementation of _version.py."""
13 |
14 | import errno
15 | import os
16 | import re
17 | import subprocess
18 | import sys
19 |
20 |
21 | def get_keywords():
22 | """Get the keywords needed to look up the version information."""
23 | # these strings will be replaced by git during git-archive.
24 | # setup.py/versioneer.py will grep for the variable names, so they must
25 | # each be defined on a line of their own. _version.py will just call
26 | # get_keywords().
27 | git_refnames = " (HEAD -> main, tag: v1.2.0)"
28 | git_full = "1de773d064d5dfbbdda55fbf129f57a6c4b70d0c"
29 | git_date = "2025-02-19 09:56:05 +0100"
30 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
31 | return keywords
32 |
33 |
34 | class VersioneerConfig:
35 | """Container for Versioneer configuration parameters."""
36 |
37 |
38 | def get_config():
39 | """Create, populate and return the VersioneerConfig() object."""
40 | # these strings are filled in when 'setup.py versioneer' creates
41 | # _version.py
42 | cfg = VersioneerConfig()
43 | cfg.VCS = "git"
44 | cfg.style = "pep440"
45 | cfg.tag_prefix = "v"
46 | cfg.parentdir_prefix = "pyulog-"
47 | cfg.versionfile_source = "pyulog/_version.py"
48 | cfg.verbose = False
49 | return cfg
50 |
51 |
52 | class NotThisMethod(Exception):
53 | """Exception raised if a method is not valid for the current scenario."""
54 |
55 |
56 | LONG_VERSION_PY = {}
57 | HANDLERS = {}
58 |
59 |
60 | def register_vcs_handler(vcs, method): # decorator
61 | """Create decorator to mark a method as the handler of a VCS."""
62 | def decorate(f):
63 | """Store f in HANDLERS[vcs][method]."""
64 | if vcs not in HANDLERS:
65 | HANDLERS[vcs] = {}
66 | HANDLERS[vcs][method] = f
67 | return f
68 | return decorate
69 |
70 |
71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
72 | env=None):
73 | """Call the given command(s)."""
74 | assert isinstance(commands, list)
75 | p = None
76 | for c in commands:
77 | try:
78 | dispcmd = str([c] + args)
79 | # remember shell=False, so use git.cmd on windows, not just git
80 | p = subprocess.Popen([c] + args, cwd=cwd, env=env,
81 | stdout=subprocess.PIPE,
82 | stderr=(subprocess.PIPE if hide_stderr
83 | else None))
84 | break
85 | except EnvironmentError:
86 | e = sys.exc_info()[1]
87 | if e.errno == errno.ENOENT:
88 | continue
89 | if verbose:
90 | print("unable to run %s" % dispcmd)
91 | print(e)
92 | return None, None
93 | else:
94 | if verbose:
95 | print("unable to find command, tried %s" % (commands,))
96 | return None, None
97 | stdout = p.communicate()[0].strip().decode()
98 | if p.returncode != 0:
99 | if verbose:
100 | print("unable to run %s (error)" % dispcmd)
101 | print("stdout was %s" % stdout)
102 | return None, p.returncode
103 | return stdout, p.returncode
104 |
105 |
106 | def versions_from_parentdir(parentdir_prefix, root, verbose):
107 | """Try to determine the version from the parent directory name.
108 |
109 | Source tarballs conventionally unpack into a directory that includes both
110 | the project name and a version string. We will also support searching up
111 | two directory levels for an appropriately named parent directory
112 | """
113 | rootdirs = []
114 |
115 | for i in range(3):
116 | dirname = os.path.basename(root)
117 | if dirname.startswith(parentdir_prefix):
118 | return {"version": dirname[len(parentdir_prefix):],
119 | "full-revisionid": None,
120 | "dirty": False, "error": None, "date": None}
121 | else:
122 | rootdirs.append(root)
123 | root = os.path.dirname(root) # up a level
124 |
125 | if verbose:
126 | print("Tried directories %s but none started with prefix %s" %
127 | (str(rootdirs), parentdir_prefix))
128 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
129 |
130 |
131 | @register_vcs_handler("git", "get_keywords")
132 | def git_get_keywords(versionfile_abs):
133 | """Extract version information from the given file."""
134 | # the code embedded in _version.py can just fetch the value of these
135 | # keywords. When used from setup.py, we don't want to import _version.py,
136 | # so we do it with a regexp instead. This function is not used from
137 | # _version.py.
138 | keywords = {}
139 | try:
140 | f = open(versionfile_abs, "r")
141 | for line in f.readlines():
142 | if line.strip().startswith("git_refnames ="):
143 | mo = re.search(r'=\s*"(.*)"', line)
144 | if mo:
145 | keywords["refnames"] = mo.group(1)
146 | if line.strip().startswith("git_full ="):
147 | mo = re.search(r'=\s*"(.*)"', line)
148 | if mo:
149 | keywords["full"] = mo.group(1)
150 | if line.strip().startswith("git_date ="):
151 | mo = re.search(r'=\s*"(.*)"', line)
152 | if mo:
153 | keywords["date"] = mo.group(1)
154 | f.close()
155 | except EnvironmentError:
156 | pass
157 | return keywords
158 |
159 |
160 | @register_vcs_handler("git", "keywords")
161 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
162 | """Get version information from git keywords."""
163 | if not keywords:
164 | raise NotThisMethod("no keywords at all, weird")
165 | date = keywords.get("date")
166 | if date is not None:
167 | # Use only the last line. Previous lines may contain GPG signature
168 | # information.
169 | date = date.splitlines()[-1]
170 |
171 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
172 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
173 | # -like" string, which we must then edit to make compliant), because
174 | # it's been around since git-1.5.3, and it's too difficult to
175 | # discover which version we're using, or to work around using an
176 | # older one.
177 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
178 | refnames = keywords["refnames"].strip()
179 | if refnames.startswith("$Format"):
180 | if verbose:
181 | print("keywords are unexpanded, not using")
182 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
183 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
184 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
185 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
186 | TAG = "tag: "
187 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
188 | if not tags:
189 | # Either we're using git < 1.8.3, or there really are no tags. We use
190 | # a heuristic: assume all version tags have a digit. The old git %d
191 | # expansion behaves like git log --decorate=short and strips out the
192 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
193 | # between branches and tags. By ignoring refnames without digits, we
194 | # filter out many common branch names like "release" and
195 | # "stabilization", as well as "HEAD" and "master".
196 | tags = set([r for r in refs if re.search(r'\d', r)])
197 | if verbose:
198 | print("discarding '%s', no digits" % ",".join(refs - tags))
199 | if verbose:
200 | print("likely tags: %s" % ",".join(sorted(tags)))
201 | for ref in sorted(tags):
202 | # sorting will prefer e.g. "2.0" over "2.0rc1"
203 | if ref.startswith(tag_prefix):
204 | r = ref[len(tag_prefix):]
205 | if verbose:
206 | print("picking %s" % r)
207 | return {"version": r,
208 | "full-revisionid": keywords["full"].strip(),
209 | "dirty": False, "error": None,
210 | "date": date}
211 | # no suitable tags, so version is "0+unknown", but full hex is still there
212 | if verbose:
213 | print("no suitable tags, using unknown + full revision id")
214 | return {"version": "0+unknown",
215 | "full-revisionid": keywords["full"].strip(),
216 | "dirty": False, "error": "no suitable tags", "date": None}
217 |
218 |
219 | @register_vcs_handler("git", "pieces_from_vcs")
220 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
221 | """Get version from 'git describe' in the root of the source tree.
222 |
223 | This only gets called if the git-archive 'subst' keywords were *not*
224 | expanded, and _version.py hasn't already been rewritten with a short
225 | version string, meaning we're inside a checked out source tree.
226 | """
227 | GITS = ["git"]
228 | if sys.platform == "win32":
229 | GITS = ["git.cmd", "git.exe"]
230 |
231 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
232 | hide_stderr=True)
233 | if rc != 0:
234 | if verbose:
235 | print("Directory %s not under git control" % root)
236 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
237 |
238 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
239 | # if there isn't one, this yields HEX[-dirty] (no NUM)
240 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
241 | "--always", "--long",
242 | "--match", "%s*" % tag_prefix],
243 | cwd=root)
244 | # --long was added in git-1.5.5
245 | if describe_out is None:
246 | raise NotThisMethod("'git describe' failed")
247 | describe_out = describe_out.strip()
248 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
249 | if full_out is None:
250 | raise NotThisMethod("'git rev-parse' failed")
251 | full_out = full_out.strip()
252 |
253 | pieces = {}
254 | pieces["long"] = full_out
255 | pieces["short"] = full_out[:7] # maybe improved later
256 | pieces["error"] = None
257 |
258 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
259 | # TAG might have hyphens.
260 | git_describe = describe_out
261 |
262 | # look for -dirty suffix
263 | dirty = git_describe.endswith("-dirty")
264 | pieces["dirty"] = dirty
265 | if dirty:
266 | git_describe = git_describe[:git_describe.rindex("-dirty")]
267 |
268 | # now we have TAG-NUM-gHEX or HEX
269 |
270 | if "-" in git_describe:
271 | # TAG-NUM-gHEX
272 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
273 | if not mo:
274 | # unparseable. Maybe git-describe is misbehaving?
275 | pieces["error"] = ("unable to parse git-describe output: '%s'"
276 | % describe_out)
277 | return pieces
278 |
279 | # tag
280 | full_tag = mo.group(1)
281 | if not full_tag.startswith(tag_prefix):
282 | if verbose:
283 | fmt = "tag '%s' doesn't start with prefix '%s'"
284 | print(fmt % (full_tag, tag_prefix))
285 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
286 | % (full_tag, tag_prefix))
287 | return pieces
288 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
289 |
290 | # distance: number of commits since tag
291 | pieces["distance"] = int(mo.group(2))
292 |
293 | # commit: short hex revision ID
294 | pieces["short"] = mo.group(3)
295 |
296 | else:
297 | # HEX: no tags
298 | pieces["closest-tag"] = None
299 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
300 | cwd=root)
301 | pieces["distance"] = int(count_out) # total number of commits
302 |
303 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
304 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
305 | cwd=root)[0].strip()
306 | # Use only the last line. Previous lines may contain GPG signature
307 | # information.
308 | date = date.splitlines()[-1]
309 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
310 |
311 | return pieces
312 |
313 |
314 | def plus_or_dot(pieces):
315 | """Return a + if we don't already have one, else return a ."""
316 | if "+" in pieces.get("closest-tag", ""):
317 | return "."
318 | return "+"
319 |
320 |
321 | def render_pep440(pieces):
322 | """Build up version string, with post-release "local version identifier".
323 |
324 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
325 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
326 |
327 | Exceptions:
328 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
329 | """
330 | if pieces["closest-tag"]:
331 | rendered = pieces["closest-tag"]
332 | if pieces["distance"] or pieces["dirty"]:
333 | rendered += plus_or_dot(pieces)
334 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
335 | if pieces["dirty"]:
336 | rendered += ".dirty"
337 | else:
338 | # exception #1
339 | rendered = "0+untagged.%d.g%s" % (pieces["distance"],
340 | pieces["short"])
341 | if pieces["dirty"]:
342 | rendered += ".dirty"
343 | return rendered
344 |
345 |
346 | def render_pep440_pre(pieces):
347 | """TAG[.post0.devDISTANCE] -- No -dirty.
348 |
349 | Exceptions:
350 | 1: no tags. 0.post0.devDISTANCE
351 | """
352 | if pieces["closest-tag"]:
353 | rendered = pieces["closest-tag"]
354 | if pieces["distance"]:
355 | rendered += ".post0.dev%d" % pieces["distance"]
356 | else:
357 | # exception #1
358 | rendered = "0.post0.dev%d" % pieces["distance"]
359 | return rendered
360 |
361 |
362 | def render_pep440_post(pieces):
363 | """TAG[.postDISTANCE[.dev0]+gHEX] .
364 |
365 | The ".dev0" means dirty. Note that .dev0 sorts backwards
366 | (a dirty tree will appear "older" than the corresponding clean one),
367 | but you shouldn't be releasing software with -dirty anyways.
368 |
369 | Exceptions:
370 | 1: no tags. 0.postDISTANCE[.dev0]
371 | """
372 | if pieces["closest-tag"]:
373 | rendered = pieces["closest-tag"]
374 | if pieces["distance"] or pieces["dirty"]:
375 | rendered += ".post%d" % pieces["distance"]
376 | if pieces["dirty"]:
377 | rendered += ".dev0"
378 | rendered += plus_or_dot(pieces)
379 | rendered += "g%s" % pieces["short"]
380 | else:
381 | # exception #1
382 | rendered = "0.post%d" % pieces["distance"]
383 | if pieces["dirty"]:
384 | rendered += ".dev0"
385 | rendered += "+g%s" % pieces["short"]
386 | return rendered
387 |
388 |
389 | def render_pep440_old(pieces):
390 | """TAG[.postDISTANCE[.dev0]] .
391 |
392 | The ".dev0" means dirty.
393 |
394 | Exceptions:
395 | 1: no tags. 0.postDISTANCE[.dev0]
396 | """
397 | if pieces["closest-tag"]:
398 | rendered = pieces["closest-tag"]
399 | if pieces["distance"] or pieces["dirty"]:
400 | rendered += ".post%d" % pieces["distance"]
401 | if pieces["dirty"]:
402 | rendered += ".dev0"
403 | else:
404 | # exception #1
405 | rendered = "0.post%d" % pieces["distance"]
406 | if pieces["dirty"]:
407 | rendered += ".dev0"
408 | return rendered
409 |
410 |
411 | def render_git_describe(pieces):
412 | """TAG[-DISTANCE-gHEX][-dirty].
413 |
414 | Like 'git describe --tags --dirty --always'.
415 |
416 | Exceptions:
417 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
418 | """
419 | if pieces["closest-tag"]:
420 | rendered = pieces["closest-tag"]
421 | if pieces["distance"]:
422 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
423 | else:
424 | # exception #1
425 | rendered = pieces["short"]
426 | if pieces["dirty"]:
427 | rendered += "-dirty"
428 | return rendered
429 |
430 |
431 | def render_git_describe_long(pieces):
432 | """TAG-DISTANCE-gHEX[-dirty].
433 |
434 | Like 'git describe --tags --dirty --always -long'.
435 | The distance/hash is unconditional.
436 |
437 | Exceptions:
438 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
439 | """
440 | if pieces["closest-tag"]:
441 | rendered = pieces["closest-tag"]
442 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
443 | else:
444 | # exception #1
445 | rendered = pieces["short"]
446 | if pieces["dirty"]:
447 | rendered += "-dirty"
448 | return rendered
449 |
450 |
451 | def render(pieces, style):
452 | """Render the given version pieces into the requested style."""
453 | if pieces["error"]:
454 | return {"version": "unknown",
455 | "full-revisionid": pieces.get("long"),
456 | "dirty": None,
457 | "error": pieces["error"],
458 | "date": None}
459 |
460 | if not style or style == "default":
461 | style = "pep440" # the default
462 |
463 | if style == "pep440":
464 | rendered = render_pep440(pieces)
465 | elif style == "pep440-pre":
466 | rendered = render_pep440_pre(pieces)
467 | elif style == "pep440-post":
468 | rendered = render_pep440_post(pieces)
469 | elif style == "pep440-old":
470 | rendered = render_pep440_old(pieces)
471 | elif style == "git-describe":
472 | rendered = render_git_describe(pieces)
473 | elif style == "git-describe-long":
474 | rendered = render_git_describe_long(pieces)
475 | else:
476 | raise ValueError("unknown style '%s'" % style)
477 |
478 | return {"version": rendered, "full-revisionid": pieces["long"],
479 | "dirty": pieces["dirty"], "error": None,
480 | "date": pieces.get("date")}
481 |
482 |
483 | def get_versions():
484 | """Get version information or return default if unable to do so."""
485 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
486 | # __file__, we can work backwards from there to the root. Some
487 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
488 | # case we can only use expanded keywords.
489 |
490 | cfg = get_config()
491 | verbose = cfg.verbose
492 |
493 | try:
494 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
495 | verbose)
496 | except NotThisMethod:
497 | pass
498 |
499 | try:
500 | root = os.path.realpath(__file__)
501 | # versionfile_source is the relative path from the top of the source
502 | # tree (where the .git directory might live) to this file. Invert
503 | # this to find the root from __file__.
504 | for i in cfg.versionfile_source.split('/'):
505 | root = os.path.dirname(root)
506 | except NameError:
507 | return {"version": "0+unknown", "full-revisionid": None,
508 | "dirty": None,
509 | "error": "unable to find root of source tree",
510 | "date": None}
511 |
512 | try:
513 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
514 | return render(pieces, cfg.style)
515 | except NotThisMethod:
516 | pass
517 |
518 | try:
519 | if cfg.parentdir_prefix:
520 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
521 | except NotThisMethod:
522 | pass
523 |
524 | return {"version": "0+unknown", "full-revisionid": None,
525 | "dirty": None,
526 | "error": "unable to compute version", "date": None}
527 |
--------------------------------------------------------------------------------
/pyulog/extract_gps_dump.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Extract the raw gps communication from an ULog file.
4 | """
5 |
6 | import argparse
7 | import os
8 | import sys
9 | import numpy as np
10 |
11 | from .core import ULog
12 |
13 | #pylint: disable=too-many-locals, unused-wildcard-import, wildcard-import
14 |
15 | def main():
16 | """
17 | Command line interface
18 | """
19 | parser = argparse.ArgumentParser(
20 | description='Extract the raw gps communication from an ULog file')
21 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
22 |
23 | def is_valid_directory(parser, arg):
24 | """Check if valid directory"""
25 | if not os.path.isdir(arg):
26 | parser.error('The directory {} does not exist'.format(arg))
27 | # File exists so return the directory
28 | return arg
29 | parser.add_argument('-o', '--output', dest='output', action='store',
30 | help='Output directory (default is CWD)',
31 | metavar='DIR', type=lambda x: is_valid_directory(parser, x))
32 | parser.add_argument('-x', '--ignore', dest='ignore', action='store_true',
33 | help='Ignore string parsing exceptions', default=False)
34 | parser.add_argument('-i', '--instance', dest='required_instance', action='store',
35 | help='GPS instance. Use 0 (default)'
36 | + 'for main GPS, 1 for secondary GPS reciever.',
37 | default=0)
38 |
39 | args = parser.parse_args()
40 | ulog_file_name = args.filename
41 | disable_str_exceptions = args.ignore
42 | required_instance = int(args.required_instance)
43 |
44 | msg_filter = ['gps_dump']
45 | ulog = ULog(ulog_file_name, msg_filter, disable_str_exceptions)
46 | data = ulog.data_list
47 |
48 | output_file_prefix = os.path.basename(ulog_file_name)
49 | # strip '.ulg'
50 | if output_file_prefix.lower().endswith('.ulg'):
51 | output_file_prefix = output_file_prefix[:-4]
52 |
53 | # write to different output path?
54 | if args.output is not None:
55 | output_file_prefix = os.path.join(args.output, output_file_prefix)
56 |
57 | to_dev_filename = output_file_prefix + '_' + str(required_instance) + '_to_device.dat'
58 | from_dev_filename = output_file_prefix + '_' + str(required_instance) + '_from_device.dat'
59 |
60 |
61 | if len(data) == 0:
62 | print("File {0} does not contain gps_dump messages!".format(ulog_file_name))
63 | sys.exit(0)
64 |
65 | gps_dump_data = data[0]
66 |
67 | # message format check
68 | field_names = [f.field_name for f in gps_dump_data.field_data]
69 | if not 'len' in field_names or not 'data[0]' in field_names:
70 | print('Error: gps_dump message has wrong format')
71 | sys.exit(-1)
72 |
73 | if len(ulog.dropouts) > 0:
74 | print("Warning: file contains {0} dropouts".format(len(ulog.dropouts)))
75 |
76 | print("Creating files {0} and {1}".format(to_dev_filename, from_dev_filename))
77 |
78 | with open(to_dev_filename, 'wb') as to_dev_file:
79 | with open(from_dev_filename, 'wb') as from_dev_file:
80 | msg_lens = gps_dump_data.data['len']
81 | instances = gps_dump_data.data.get('instance', [0]*len(msg_lens))
82 | for i in range(len(gps_dump_data.data['timestamp'])):
83 | instance = instances[i]
84 | msg_len = msg_lens[i]
85 | if instance == required_instance:
86 | if msg_len & (1<<7):
87 | msg_len = msg_len & ~(np.uint8(1) << 7)
88 | file_handle = to_dev_file
89 | else:
90 | file_handle = from_dev_file
91 | for k in range(msg_len):
92 | file_handle.write(gps_dump_data.data['data['+str(k)+']'][i])
93 |
--------------------------------------------------------------------------------
/pyulog/extract_message.py:
--------------------------------------------------------------------------------
1 | """
2 | Extract values from a ULog file message to use in scripting
3 | """
4 |
5 | from typing import List
6 | import numpy as np
7 | from .core import ULog
8 |
9 | def extract_message(ulog_file_name: str, message: str,
10 | time_s: "int | None" = None, time_e: "int | None" = None,
11 | disable_str_exceptions: bool = False) -> List[dict]:
12 | """
13 | Extract values from a ULog file
14 |
15 | :param ulog_file_name: (str) The ULog filename to open and read
16 | :param message: (str) A ULog message to return values from
17 | :param time_s: (int) Offset time for conversion in seconds
18 | :param time_e: (int) Limit until time for conversion in seconds
19 |
20 | :return: (List[dict]) A list of each record from the ULog as key-value pairs
21 | """
22 |
23 | if not isinstance(message, str):
24 | raise AttributeError("Must provide a message to pull from ULog file")
25 |
26 | ulog = ULog(ulog_file_name, message, disable_str_exceptions)
27 |
28 | try:
29 | data = ulog.get_dataset(message)
30 | except Exception as exc:
31 | raise AttributeError("Provided message is not in the ULog file") from exc
32 |
33 | values = []
34 |
35 | # use same field order as in the log, except for the timestamp
36 | data_keys = [f.field_name for f in data.field_data]
37 | data_keys.remove('timestamp')
38 | data_keys.insert(0, 'timestamp') # we want timestamp at first position
39 |
40 | #get the index for row where timestamp exceeds or equals the required value
41 | time_s_i = np.where(data.data['timestamp'] >= time_s * 1e6)[0][0] \
42 | if time_s else 0
43 | #get the index for row upto the timestamp of the required value
44 | time_e_i = np.where(data.data['timestamp'] >= time_e * 1e6)[0][0] \
45 | if time_e else len(data.data['timestamp'])
46 |
47 | # write the data
48 | for i in range(time_s_i, time_e_i):
49 | row = {}
50 | for key in data_keys:
51 | row[key] = data.data[key][i]
52 | values.append(row)
53 |
54 | return values
55 |
--------------------------------------------------------------------------------
/pyulog/info.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Display information from an ULog file
4 | """
5 |
6 | import argparse
7 |
8 | from .core import ULog
9 |
10 | #pylint: disable=too-many-locals, unused-wildcard-import, wildcard-import
11 | #pylint: disable=invalid-name
12 |
13 | def show_info(ulog, verbose):
14 | """Show general information from an ULog"""
15 |
16 | if ulog.file_corruption:
17 | print("Warning: file has data corruption(s)")
18 |
19 | m1, s1 = divmod(int(ulog.start_timestamp/1e6), 60)
20 | h1, m1 = divmod(m1, 60)
21 | m2, s2 = divmod(int((ulog.last_timestamp - ulog.start_timestamp)/1e6), 60)
22 | h2, m2 = divmod(m2, 60)
23 | print("Logging start time: {:d}:{:02d}:{:02d}, duration: {:d}:{:02d}:{:02d}".format(
24 | h1, m1, s1, h2, m2, s2))
25 |
26 | dropout_durations = [dropout.duration for dropout in ulog.dropouts]
27 | if len(dropout_durations) == 0:
28 | print("No Dropouts")
29 | else:
30 | print("Dropouts: count: {:}, total duration: {:.1f} s, max: {:} ms, mean: {:} ms"
31 | .format(len(dropout_durations), sum(dropout_durations)/1000.,
32 | max(dropout_durations),
33 | int(sum(dropout_durations)/len(dropout_durations))))
34 |
35 | version = ulog.get_version_info_str()
36 | if not version is None:
37 | print('SW Version: {}'.format(version))
38 |
39 | print("Info Messages:")
40 | for k in sorted(ulog.msg_info_dict):
41 | if not k.startswith('perf_') or verbose:
42 | print(" {0}: {1}".format(k, ulog.msg_info_dict[k]))
43 |
44 |
45 | if len(ulog.msg_info_multiple_dict) > 0:
46 | if verbose:
47 | print("Info Multiple Messages:")
48 | for k in sorted(ulog.msg_info_multiple_dict):
49 | print(" {0}: {1}".format(k, ulog.msg_info_multiple_dict[k]))
50 | else:
51 | print("Info Multiple Messages: {}".format(
52 | ", ".join(["[{}: {}]".format(k, len(ulog.msg_info_multiple_dict[k])) for k in
53 | sorted(ulog.msg_info_multiple_dict)])))
54 |
55 |
56 |
57 | print("")
58 | print("{:<41} {:7}, {:10}".format("Name (multi id, message size in bytes)",
59 | "number of data points", "total bytes"))
60 |
61 | data_list_sorted = sorted(ulog.data_list, key=lambda d: d.name + str(d.multi_id))
62 | for d in data_list_sorted:
63 | message_size = sum(ULog.get_field_size(f.type_str) for f in d.field_data)
64 | num_data_points = len(d.data['timestamp'])
65 | name_id = "{:} ({:}, {:})".format(d.name, d.multi_id, message_size)
66 | print(" {:<40} {:7d} {:10d}".format(name_id, num_data_points,
67 | message_size * num_data_points))
68 |
69 |
70 | def main():
71 | """Commande line interface"""
72 | parser = argparse.ArgumentParser(description='Display information from an ULog file')
73 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
74 | parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
75 | help='Verbose output', default=False)
76 | parser.add_argument('-m', '--message', dest='message',
77 | help='Show a specific Info Multiple Message')
78 | parser.add_argument('-n', '--newline', dest='newline', action='store_true',
79 | help='Add newline separators (only with --message)', default=False)
80 | parser.add_argument('-i', '--ignore', dest='ignore', action='store_true',
81 | help='Ignore string parsing exceptions', default=False)
82 |
83 |
84 | args = parser.parse_args()
85 | ulog_file_name = args.filename
86 | disable_str_exceptions = args.ignore
87 | ulog = ULog(ulog_file_name, None, disable_str_exceptions)
88 | message = args.message
89 | if message:
90 | separator = ""
91 | if args.newline: separator = "\n"
92 | if len(ulog.msg_info_multiple_dict) > 0 and message in ulog.msg_info_multiple_dict:
93 | message_info_multiple = ulog.msg_info_multiple_dict[message]
94 | for i, m in enumerate(message_info_multiple):
95 | if len(m) > 0 and isinstance(m[0], (bytes, bytearray)):
96 | print("# {} {} (len: {:}):".format(message, i, sum(len(item) for item in m)))
97 | print(separator.join(' '.join('{:02x}'.format(x) for x in item) for item in m))
98 | else:
99 | print("# {} {}:".format(message, i))
100 | print(separator.join(m))
101 | else:
102 | print("message {} not found".format(message))
103 | else:
104 | show_info(ulog, args.verbose)
105 |
106 |
--------------------------------------------------------------------------------
/pyulog/libevents_parse:
--------------------------------------------------------------------------------
1 | ../3rd_party/libevents/libs/python/libevents_parse
--------------------------------------------------------------------------------
/pyulog/messages.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Display logged messages from an ULog file
4 | """
5 |
6 | import argparse
7 |
8 | from .core import ULog
9 | from .px4_events import PX4Events
10 | #pylint: disable=invalid-name
11 |
12 | def main():
13 | """Commande line interface"""
14 |
15 | parser = argparse.ArgumentParser(description='Display logged messages from an ULog file')
16 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
17 | parser.add_argument('-i', '--ignore', dest='ignore', action='store_true',
18 | help='Ignore string parsing exceptions', default=False)
19 |
20 | args = parser.parse_args()
21 | ulog_file_name = args.filename
22 | disable_str_exceptions = args.ignore
23 |
24 | msg_filter = ['event']
25 | ulog = ULog(ulog_file_name, msg_filter, disable_str_exceptions)
26 |
27 | logged_messages = [(m.timestamp, m.log_level_str(), m.message) for m in ulog.logged_messages]
28 |
29 | # If this is a PX4 log, try to get the events too
30 | if ulog.msg_info_dict.get('sys_name', '') == 'PX4':
31 | px4_events = PX4Events()
32 | events = px4_events.get_logged_events(ulog)
33 |
34 | for t, log_level, message in logged_messages:
35 | # backwards compatibility: a string message with appended tab is output
36 | # in addition to an event with the same message so we can ignore those
37 | if message[-1] == '\t':
38 | continue
39 | events.append((t, log_level, message))
40 |
41 | logged_messages = sorted(events, key=lambda m: m[0])
42 |
43 | for t, log_level, message in logged_messages:
44 | m1, s1 = divmod(int(t/1e6), 60)
45 | h1, m1 = divmod(m1, 60)
46 | print("{:d}:{:02d}:{:02d} {:}: {:}".format(h1, m1, s1, log_level, message))
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/pyulog/migrate_db.py:
--------------------------------------------------------------------------------
1 | '''
2 | Tool for handling changes in the database schema. This is necessary for
3 | avoiding breaking backwards compatibility whenver bugs are discovered in the
4 | database model, or if the ULog format changes.
5 |
6 | There are some options available, such as "alembic" or "migrations', but these
7 | seem like overkill for us. For instance, we don't really need to migrate both up and
8 | down, just up.
9 | '''
10 |
11 | import os
12 | import argparse
13 | from pyulog.db import DatabaseULog
14 |
15 | def main():
16 | '''
17 | Entry point for the console script.
18 | '''
19 | parser = argparse.ArgumentParser(description='Setup the database for DatabaseULog')
20 | parser.add_argument('-d', '--database', dest='db_path', action='store',
21 | help='Path to the database file',
22 | default='pyulog.sqlite3')
23 | # The noop flag actually has a side effect if it is called on an uncreated
24 | # database, since the "PRAGMA user_version" command implicitly creates the
25 | # database. The created database will have user_version = 0, which will
26 | # later confuse the migration tool. however, this edge case will mostly be
27 | # relevant for advanced users, and can be handled with the -f flag.
28 | parser.add_argument('-n', '--noop', dest='noop', action='store_true',
29 | help='Only print results, do not execute migration scripts.',
30 | default=False)
31 | parser.add_argument('-s', '--sql', dest='sql_dir', action='store',
32 | help='Directory with migration SQL files',
33 | default=None)
34 | parser.add_argument('-f', '--force', dest='force', action='store_true',
35 | help=('Run the migration script even if the database is not created'
36 | 'with this script.'),
37 | default=False)
38 | args = parser.parse_args()
39 | migrate_db(args.db_path, sql_dir=args.sql_dir, noop=args.noop, force=args.force)
40 |
41 | def _read_db_schema_version(db_path, force):
42 | '''
43 | Read and validate the schema version defined by the "PRAGMA user_version"
44 | field in the database. If the database file exists and schema version is 0,
45 | then this means that the database was not created with the migration tool.
46 | This means that the database is in a state unknown to the migration tool,
47 | and hence a migration could cause schema corruption. The default behavior
48 | in this case is to reject the migration, but it can be overriden with
49 | force=True.
50 | '''
51 | db_handle = DatabaseULog.get_db_handle(db_path)
52 | if not os.path.isfile(db_path):
53 | print(f'Database file {db_path} not found, creating it from scratch.')
54 | return 0
55 | print(f'Found database file {db_path}.')
56 |
57 | with db_handle() as con:
58 | cur = con.cursor()
59 | cur.execute('PRAGMA user_version')
60 | (db_schema_version,) = cur.fetchone()
61 | cur.close()
62 |
63 | if db_schema_version is None:
64 | raise ValueError(f'Could not fetch database schema version for {db_path}.')
65 | if db_schema_version == 0 and not force:
66 | raise FileExistsError('Database has user_version = 0, rejecting migration.'
67 | 'Use the "force" flag to migrate anyway.')
68 | if not isinstance(db_schema_version, int) or db_schema_version < 0:
69 | raise ValueError(f'Invalid database schema version {db_schema_version}.')
70 | return db_schema_version
71 |
72 | def _read_migration_file(migration_id, sql_dir):
73 | '''
74 | Read the migration file with id "migration_id" in directory "sql_dir", and
75 | check that it handles transactions strictly.
76 | '''
77 | migration_filename_format = os.path.join(sql_dir, 'pyulog.{migration_id}.sql')
78 | migration_filename = migration_filename_format.format(migration_id=migration_id)
79 | if not os.path.exists(migration_filename):
80 | raise FileNotFoundError(f'Migration file {migration_filename} does not exist. '
81 | f'Stopped after migration {migration_id}.')
82 |
83 | with open(migration_filename, 'r', encoding='utf8') as migration_file:
84 | migration_lines = migration_file.read()
85 | if not migration_lines.strip().startswith('BEGIN;'):
86 | raise ValueError(f'Migration file {migration_filename} must start with "BEGIN;"')
87 | if not migration_lines.strip().endswith('COMMIT;'):
88 | raise ValueError(f'Migration file {migration_filename} must end with "COMMIT;"')
89 |
90 | migration_lines += f'\nPRAGMA user_version = {migration_id};'
91 | return migration_filename, migration_lines
92 |
93 | def migrate_db(db_path, sql_dir=None, noop=False, force=False):
94 | '''
95 | Apply database migrations that have not yet been applied.
96 |
97 | Compares "PRAGMA user_version" from the sqlite3 database at "db_path" with
98 | the SCHEMA_VERSION in the DatabaseULog class. If the former is larger than
99 | the latter, then migration scripts will be read and executed from files in
100 | "sql_dir", and the user_version will be incremented, until the database is
101 | up to date.
102 | '''
103 | if sql_dir is None:
104 | module_dir = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
105 | sql_dir = os.path.join(module_dir, 'sql')
106 | if not os.path.isdir(sql_dir):
107 | raise NotADirectoryError(f'{sql_dir} is not a directory.')
108 | print(f'Using migration files in {sql_dir}.')
109 |
110 | db_schema_version = _read_db_schema_version(db_path, force)
111 | class_schema_version = DatabaseULog.SCHEMA_VERSION
112 | print('Current schema version: {} (database) and {} (code).'.format(
113 | db_schema_version,
114 | class_schema_version,
115 | ))
116 |
117 | db_handle = DatabaseULog.get_db_handle(db_path)
118 | with db_handle() as con:
119 | cur = con.cursor()
120 | for migration_id in range(db_schema_version+1,
121 | DatabaseULog.SCHEMA_VERSION+1):
122 | migration_filename, migration_lines = _read_migration_file(migration_id, sql_dir)
123 | print(f'Executing {migration_filename}.')
124 | if noop:
125 | print(migration_lines)
126 | else:
127 | cur.executescript(migration_lines)
128 |
129 | cur.close()
130 | print('Migration done.')
131 | return db_path
132 |
133 | if __name__ == '__main__':
134 | raise SystemExit(main())
135 |
--------------------------------------------------------------------------------
/pyulog/params.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Extract parameters from an ULog file
4 | """
5 |
6 | import argparse
7 | import sys
8 |
9 | from .core import ULog
10 | #pylint: disable=unused-variable, too-many-branches
11 |
12 | def get_defaults(ulog, default):
13 | """ get default params from ulog """
14 | assert ulog.has_default_parameters, "Log does not contain default parameters"
15 |
16 | if default == 'system': return ulog.get_default_parameters(0)
17 | if default == 'current_setup': return ulog.get_default_parameters(1)
18 | raise ValueError('invalid value \'{}\' for --default'.format(default))
19 |
20 | def main():
21 | """Commande line interface"""
22 | parser = argparse.ArgumentParser(description='Extract parameters from an ULog file')
23 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
24 |
25 | parser.add_argument('-l', '--delimiter', dest='delimiter', action='store',
26 | help='Use delimiter in CSV (default is \',\')', default=',')
27 |
28 | parser.add_argument('-i', '--initial', dest='initial', action='store_true',
29 | help='Only extract initial parameters. (octave|csv)', default=False)
30 |
31 | parser.add_argument('-t', '--timestamps', dest='timestamps', action='store_true',
32 | help='Extract changed parameters with timestamps. (csv)', default=False)
33 |
34 | parser.add_argument('-f', '--format', dest='format', action='store', type=str,
35 | help='csv|octave|qgc', default='csv')
36 |
37 | parser.add_argument('output_filename', metavar='params.txt',
38 | type=argparse.FileType('w'), nargs='?',
39 | help='Output filename (default=stdout)', default=sys.stdout)
40 |
41 | parser.add_argument('--ignore', dest='ignore', action='store_true',
42 | help='Ignore string parsing exceptions', default=False)
43 |
44 | parser.add_argument('-d', '--default', dest='default', action='store', type=str,
45 | help='Select default param values instead of configured '
46 | 'values (implies --initial). Valid values: system|current_setup',
47 | default=None)
48 |
49 | args = parser.parse_args()
50 | ulog_file_name = args.filename
51 | disable_str_exceptions = args.ignore
52 |
53 | message_filter = []
54 | if not args.initial: message_filter = None
55 |
56 | ulog = ULog(ulog_file_name, message_filter, disable_str_exceptions)
57 |
58 | params = ulog.initial_parameters
59 | if args.default is not None:
60 | params = get_defaults(ulog, args.default)
61 | args.initial = True
62 |
63 | param_keys = sorted(params.keys())
64 | delimiter = args.delimiter
65 | output_file = args.output_filename
66 |
67 | if args.format == "csv":
68 | for param_key in param_keys:
69 | output_file.write(param_key)
70 | if args.timestamps:
71 | output_file.write(delimiter)
72 | output_file.write(str(params[param_key]))
73 | for t, name, value in ulog.changed_parameters:
74 | if name == param_key:
75 | output_file.write(delimiter)
76 | output_file.write(str(value))
77 |
78 | output_file.write('\n')
79 | output_file.write("timestamp")
80 | output_file.write(delimiter)
81 | output_file.write('0')
82 | for t, name, value in ulog.changed_parameters:
83 | if name == param_key:
84 | output_file.write(delimiter)
85 | output_file.write(str(t))
86 |
87 | output_file.write('\n')
88 | else:
89 | output_file.write(delimiter)
90 | output_file.write(str(params[param_key]))
91 | if not args.initial:
92 | for t, name, value in ulog.changed_parameters:
93 | if name == param_key:
94 | output_file.write(delimiter)
95 | output_file.write(str(value))
96 | output_file.write('\n')
97 |
98 | elif args.format == "octave":
99 |
100 | for param_key in param_keys:
101 | output_file.write('# name ')
102 | output_file.write(param_key)
103 | values = [params[param_key]]
104 |
105 | if not args.initial:
106 | for t, name, value in ulog.changed_parameters:
107 | if name == param_key:
108 | values += [value]
109 |
110 | if len(values) > 1:
111 | output_file.write('\n# type: matrix\n')
112 | output_file.write('# rows: 1\n')
113 | output_file.write('# columns: ')
114 | output_file.write(str(len(values)) + '\n')
115 | for value in values:
116 | output_file.write(str(value) + ' ')
117 |
118 | else:
119 | output_file.write('\n# type: scalar\n')
120 | output_file.write(str(values[0]))
121 |
122 | output_file.write('\n')
123 |
124 | elif args.format == "qgc":
125 |
126 | for param_key in param_keys:
127 | sys_id = 1
128 | comp_id = 1
129 | delimiter = '\t'
130 | param_value = params[param_key]
131 |
132 | output_file.write(str(sys_id))
133 | output_file.write(delimiter)
134 | output_file.write(str(comp_id))
135 | output_file.write(delimiter)
136 | output_file.write(param_key)
137 | output_file.write(delimiter)
138 | output_file.write(str(param_value))
139 | output_file.write(delimiter)
140 |
141 | if isinstance(param_value, float):
142 | # Float
143 | param_type = 9
144 | else:
145 | # Int
146 | param_type = 6
147 |
148 | output_file.write(str(param_type))
149 | output_file.write('\n')
150 |
--------------------------------------------------------------------------------
/pyulog/px4.py:
--------------------------------------------------------------------------------
1 | """
2 | PX4-specific ULog helper
3 | """
4 | import numpy as np
5 |
6 | __author__ = "Beat Kueng"
7 |
8 |
9 | class PX4ULog(object):
10 | """
11 | This class contains PX4-specific ULog things (field names, etc.)
12 | """
13 |
14 | def __init__(self, ulog_object):
15 | """
16 | @param ulog_object: ULog instance
17 | """
18 | self._ulog = ulog_object
19 |
20 | def get_mav_type(self):
21 | """ return the MAV type as string from initial parameters """
22 |
23 | mav_type = self._ulog.initial_parameters.get('MAV_TYPE', None)
24 | return {0: 'Generic',
25 | 1: 'Fixed Wing',
26 | 2: 'Quadrotor',
27 | 3: 'Coaxial helicopter',
28 | 4: 'Normal helicopter with tail rotor',
29 | 5: 'Ground installation',
30 | 6: 'Ground Control Station',
31 | 7: 'Airship, controlled',
32 | 8: 'Free balloon, uncontrolled',
33 | 9: 'Rocket',
34 | 10: 'Ground Rover',
35 | 11: 'Surface Vessel, Boat, Ship',
36 | 12: 'Submarine',
37 | 13: 'Hexarotor',
38 | 14: 'Octorotor',
39 | 15: 'Tricopter',
40 | 16: 'Flapping wing',
41 | 17: 'Kite',
42 | 18: 'Onboard Companion Controller',
43 | 19: 'Two-rotor VTOL (Tailsitter)',
44 | 20: 'Quad-rotor VTOL (Tailsitter)',
45 | 21: 'Tiltrotor VTOL',
46 | 22: 'VTOL Standard',
47 | 23: 'VTOL Tailsitter',
48 | 24: 'VTOL reserved 4',
49 | 25: 'VTOL reserved 5',
50 | 26: 'Onboard Gimbal',
51 | 27: 'Onboard ADSB Peripheral'}.get(mav_type, 'unknown type')
52 |
53 | def get_estimator(self):
54 | """return the configured estimator as string from initial parameters"""
55 |
56 | mav_type = self._ulog.initial_parameters.get('MAV_TYPE', None)
57 | if mav_type == 1: # fixed wing always uses EKF2
58 | return 'EKF2'
59 |
60 | mc_est_group = self._ulog.initial_parameters.get('SYS_MC_EST_GROUP', 2)
61 | return {0: 'INAV',
62 | 1: 'LPE',
63 | 2: 'EKF2',
64 | 3: 'Q'}.get(mc_est_group, 'unknown ({})'.format(mc_est_group))
65 |
66 |
67 | def add_roll_pitch_yaw(self, messages=None):
68 | """ convenience method to add the fields 'roll', 'pitch', 'yaw' to the
69 | loaded data using the quaternion fields (does not update field_data).
70 |
71 | By default, messages are: 'vehicle_attitude.q',
72 | 'vehicle_attitude_setpoint.q_d', 'vehicle_attitude_groundtruth.q' and
73 | 'vehicle_vision_attitude.q' """
74 |
75 | if messages is None:
76 | messages = ['vehicle_attitude',
77 | 'vehicle_vision_attitude',
78 | 'vehicle_attitude_groundtruth',
79 | 'vehicle_attitude_setpoint:_d']
80 | for message in messages:
81 | if message.endswith(':_d'):
82 | suffix = '_d'
83 | message = message[:-3]
84 | else:
85 | suffix = ''
86 | self._add_roll_pitch_yaw_to_message(message, suffix)
87 |
88 |
89 | def _add_roll_pitch_yaw_to_message(self, message_name, field_name_suffix=''):
90 |
91 | message_data_all = [elem for elem in self._ulog.data_list if elem.name == message_name]
92 | for message_data in message_data_all:
93 | q = [message_data.data['q'+field_name_suffix+'['+str(i)+']'] for i in range(4)]
94 | roll = np.arctan2(2.0 * (q[0] * q[1] + q[2] * q[3]),
95 | 1.0 - 2.0 * (q[1] * q[1] + q[2] * q[2]))
96 | pitch = np.arcsin(2.0 * (q[0] * q[2] - q[3] * q[1]))
97 | yaw = np.arctan2(2.0 * (q[0] * q[3] + q[1] * q[2]),
98 | 1.0 - 2.0 * (q[2] * q[2] + q[3] * q[3]))
99 | message_data.data['roll'+field_name_suffix] = roll
100 | message_data.data['pitch'+field_name_suffix] = pitch
101 | message_data.data['yaw'+field_name_suffix] = yaw
102 |
103 |
104 | def get_configured_rc_input_names(self, channel):
105 | """
106 | find all RC mappings to a given channel and return their names
107 |
108 | :param channel: input channel (0=first)
109 | :return: list of strings or None
110 | """
111 | ret_val = []
112 | for key in self._ulog.initial_parameters:
113 | param_val = self._ulog.initial_parameters[key]
114 | if key.startswith('RC_MAP_') and param_val == channel + 1:
115 | ret_val.append(key[7:].capitalize())
116 |
117 | if len(ret_val) > 0:
118 | return ret_val
119 | return None
120 |
--------------------------------------------------------------------------------
/pyulog/px4_events.py:
--------------------------------------------------------------------------------
1 | """ Event parsing """
2 | import json
3 | import lzma
4 | import urllib.request
5 | from typing import Optional, Callable, Any, List, Tuple
6 |
7 | from .libevents_parse.parser import Parser
8 | from .core import ULog
9 |
10 |
11 | class PX4Events:
12 | """ class to extract events from logs and combine them with metadata to get the messages """
13 |
14 | DEFAULT_EVENTS_URL = \
15 | 'https://px4-travis.s3.amazonaws.com/Firmware/master/_general/all_events.json.xz'
16 |
17 | def __init__(self):
18 | self._events_profile = 'dev'
19 | self._default_parser: Optional[Parser] = None
20 | self._get_default_json_def_cb = self._get_default_json_definitions
21 |
22 | @staticmethod
23 | def _get_default_json_definitions(already_has_default_parser: bool) -> Optional[Any]:
24 | """ Default implementation for retrieving the default json event definitions """
25 |
26 | # If it already exists, return it to avoid re-downloading
27 | if already_has_default_parser:
28 | return None
29 |
30 | with urllib.request.urlopen(PX4Events.DEFAULT_EVENTS_URL, timeout=4) as response:
31 | data = response.read()
32 | return json.loads(lzma.decompress(data))
33 |
34 | def set_default_json_definitions_cb(self,
35 | default_json_definitions_cb: Callable[[bool], Optional[Any]]):
36 | """ Set the callback to retrieve the default event definitions json
37 | data (can be used for caching) """
38 | self._get_default_json_def_cb = default_json_definitions_cb
39 |
40 | def _get_event_parser(self, ulog: ULog) -> Optional[Parser]:
41 | """ get event parser instance or None on error """
42 |
43 | if 'metadata_events' in ulog.msg_info_multiple_dict and \
44 | 'metadata_events_sha256' in ulog.msg_info_dict:
45 | file_hash = ulog.msg_info_dict['metadata_events_sha256']
46 | if len(file_hash) <= 64 and file_hash.isalnum():
47 | events_metadata = ulog.msg_info_multiple_dict['metadata_events'][0]
48 | event_definitions_json = json.loads(lzma.decompress(b''.join(events_metadata)))
49 | parser = Parser()
50 | parser.load_definitions(event_definitions_json)
51 | parser.set_profile(self._events_profile)
52 | return parser
53 |
54 | # No json definitions in the log -> use default definitions
55 | json_definitions = self._get_default_json_def_cb(
56 | self._default_parser is not None)
57 | if json_definitions is not None:
58 | self._default_parser = Parser()
59 | self._default_parser.load_definitions(json_definitions)
60 | self._default_parser.set_profile(self._events_profile)
61 |
62 | return self._default_parser
63 |
64 | def get_logged_events(self, ulog: ULog) -> List[Tuple[int, str, str]]:
65 | """
66 | Get the events as list of messages
67 | :return: list of (timestamp, log level str, message) tuples
68 | """
69 |
70 | def event_log_level_str(log_level: int):
71 | return {0: 'EMERGENCY',
72 | 1: 'ALERT',
73 | 2: 'CRITICAL',
74 | 3: 'ERROR',
75 | 4: 'WARNING',
76 | 5: 'NOTICE',
77 | 6: 'INFO',
78 | 7: 'DEBUG',
79 | 8: 'PROTOCOL',
80 | 9: 'DISABLED'}.get(log_level, 'UNKNOWN')
81 |
82 | # Parse events
83 | messages = []
84 | try:
85 | events = ulog.get_dataset('event')
86 | all_ids = events.data['id']
87 |
88 | if len(all_ids) == 0:
89 | return []
90 |
91 | # Get the parser
92 | try:
93 | event_parser = self._get_event_parser(ulog)
94 | except Exception as exception: # pylint: disable=broad-exception-caught
95 | print('Failed to get event parser: {}'.format(exception))
96 | return []
97 |
98 | for event_idx, event_id in enumerate(all_ids):
99 | log_level = (events.data['log_levels'][event_idx] >> 4) & 0xf
100 | if log_level >= 8:
101 | continue
102 | args = []
103 | i = 0
104 | while True:
105 | arg_str = 'arguments[{}]'.format(i)
106 | if arg_str not in events.data:
107 | break
108 | arg = events.data[arg_str][event_idx]
109 | args.append(arg)
110 | i += 1
111 | log_level_str = event_log_level_str(log_level)
112 | t = events.data['timestamp'][event_idx]
113 | event = None
114 | if event_parser is not None:
115 | event = event_parser.parse(event_id, bytes(args))
116 | if event is None:
117 | messages.append((t, log_level_str,
118 | '[Unknown event with ID {:}]'.format(event_id)))
119 | else:
120 | # only show default group
121 | if event.group() == "default":
122 | messages.append((t, log_level_str, event.message()))
123 | # we could expand this a bit for events:
124 | # - show the description too
125 | # - handle url's as link (currently it's shown as text, and all tags are escaped)
126 | except (KeyError, IndexError, ValueError):
127 | # no events in log
128 | pass
129 |
130 | return messages
131 |
--------------------------------------------------------------------------------
/pyulog/sql/pyulog.1.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 | CREATE TABLE IF NOT EXISTS ULog (
3 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
4 | FileVersion INT,
5 | StartTimestamp REAL,
6 | LastTimestamp REAL,
7 | CompatFlags TEXT,
8 | IncompatFlags TEXT,
9 | SyncCount INT,
10 | HasSync BOOLEAN
11 | );
12 |
13 | CREATE TABLE IF NOT EXISTS ULogAppendedOffsets (
14 | SeriesIndex INTEGER,
15 | Offset INTEGER,
16 | ULogId INT REFERENCES ULog (Id)
17 | );
18 |
19 | CREATE TABLE IF NOT EXISTS ULogDataset (
20 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
21 | DatasetName TEXT,
22 | MultiId INT,
23 | MessageId INT,
24 | TimestampIndex INT,
25 | ULogId INT REFERENCES ULog (Id),
26 | UNIQUE (ULogId, DatasetName, MultiId)
27 | );
28 |
29 | CREATE TABLE IF NOT EXISTS ULogField (
30 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
31 | TopicName TEXT,
32 | DataType TEXT,
33 | ValueArray BLOB,
34 |
35 | DatasetId INTEGER REFERENCES ULogDataset (Id)
36 | );
37 | CREATE INDEX IF NOT EXISTS btree_ulogfield_datasetid ON ULogField(DatasetId);
38 |
39 | CREATE TABLE IF NOT EXISTS ULogMessageDropout (
40 | Timestamp REAL,
41 | Duration FLOAT,
42 | ULogId INT REFERENCES ULog (Id)
43 | );
44 |
45 | CREATE TABLE IF NOT EXISTS ULogMessageFormat (
46 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
47 | Name TEXT,
48 | ULogId INT REFERENCES ULog (Id)
49 | );
50 |
51 | CREATE TABLE IF NOT EXISTS ULogMessageFormatField (
52 | FieldType TEXT,
53 | ArraySize INT,
54 | Name TEXT,
55 | MessageId INT REFERENCES ULogMessageFormat (Id)
56 | );
57 |
58 | CREATE TABLE IF NOT EXISTS ULogMessageLogging (
59 | LogLevel INT,
60 | Timestamp REAL,
61 | Message TEXT,
62 | ULogId INT REFERENCES ULog (Id)
63 | );
64 |
65 | CREATE TABLE IF NOT EXISTS ULogMessageLoggingTagged (
66 | LogLevel INT,
67 | Timestamp REAL,
68 | Tag INT,
69 | Message TEXT,
70 | ULogId INT REFERENCES ULog (Id)
71 | );
72 |
73 | CREATE TABLE IF NOT EXISTS ULogMessageInfo (
74 | Key TEXT,
75 | Typename TEXT,
76 | Value BLOB,
77 | ULogId INT REFERENCES ULog (Id)
78 | );
79 |
80 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultiple (
81 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
82 | Key TEXT,
83 | Typename TEXT,
84 | ULogId INT REFERENCES ULog (Id)
85 | );
86 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultipleList (
87 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
88 | SeriesIndex INTEGER,
89 | MessageId TEXT REFERENCES ULogMessageInfoMultiple (Id)
90 | );
91 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultipleListElement (
92 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
93 | SeriesIndex INTEGER,
94 | Value TEXT,
95 | ListId TEXT REFERENCES ULogMessageInfoMultipleList (Id)
96 | );
97 |
98 | CREATE TABLE IF NOT EXISTS ULogInitialParameter (
99 | Key TEXT,
100 | Value BLOB,
101 | ULogId INT REFERENCES ULog (Id)
102 | );
103 |
104 | CREATE TABLE IF NOT EXISTS ULogChangedParameter (
105 | Timestamp REAL,
106 | Key TEXT,
107 | Value BLOB,
108 | ULogId INT REFERENCES ULog (Id)
109 | );
110 |
111 | CREATE TABLE IF NOT EXISTS ULogDefaultParameter (
112 | DefaultType INT,
113 | Key TEXT,
114 | Value BLOB,
115 | ULogId INT REFERENCES ULog (Id)
116 | );
117 | COMMIT;
118 |
--------------------------------------------------------------------------------
/pyulog/sql/pyulog.2.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 | PRAGMA foreign_keys=off;
3 |
4 | CREATE TABLE IF NOT EXISTS ULog_tmp (
5 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
6 | SHA256Sum TEXT UNIQUE,
7 | FileVersion INT,
8 | StartTimestamp REAL,
9 | LastTimestamp REAL,
10 | CompatFlags TEXT,
11 | IncompatFlags TEXT,
12 | SyncCount INT,
13 | HasSync BOOLEAN
14 | );
15 | INSERT OR IGNORE INTO ULog_tmp (Id, FileVersion, StartTimestamp, LastTimestamp, CompatFlags, IncompatFlags, SyncCount, HasSync) SELECT Id, FileVersion, StartTimestamp, LastTimestamp, CompatFlags, IncompatFlags, SyncCount, HasSync FROM ULog;
16 |
17 |
18 | CREATE TABLE IF NOT EXISTS ULogAppendedOffsets_tmp (
19 | SeriesIndex INTEGER,
20 | Offset INTEGER,
21 | ULogId INT REFERENCES ULog_tmp (Id) ON DELETE CASCADE
22 | );
23 | INSERT OR IGNORE INTO ULogAppendedOffsets_tmp SELECT * FROM ULogAppendedOffsets;
24 |
25 | CREATE TABLE IF NOT EXISTS ULogDataset_tmp (
26 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
27 | DatasetName TEXT,
28 | MultiId INT,
29 | MessageId INT,
30 | TimestampIndex INT,
31 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE,
32 | UNIQUE (ULogId, DatasetName, MultiId)
33 | );
34 | INSERT OR IGNORE INTO ULogDataset_tmp SELECT * FROM ULogDataset;
35 |
36 | CREATE TABLE IF NOT EXISTS ULogField_tmp (
37 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
38 | TopicName TEXT,
39 | DataType TEXT,
40 | ValueArray BLOB,
41 | DatasetId INTEGER REFERENCES ULogDataset (Id) ON DELETE CASCADE
42 | );
43 | INSERT OR IGNORE INTO ULogField_tmp SELECT * FROM ULogField;
44 | CREATE INDEX IF NOT EXISTS btree_ulogfield_datasetid ON ULogField_tmp(DatasetId);
45 |
46 | CREATE TABLE IF NOT EXISTS ULogMessageDropout_tmp (
47 | Timestamp REAL,
48 | Duration FLOAT,
49 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
50 | );
51 | INSERT OR IGNORE INTO ULogMessageDropout_tmp SELECT * FROM ULogMessageDropout;
52 |
53 | CREATE TABLE IF NOT EXISTS ULogMessageFormat_tmp (
54 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
55 | Name TEXT,
56 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
57 | );
58 | INSERT OR IGNORE INTO ULogMessageFormat_tmp SELECT * FROM ULogMessageFormat;
59 |
60 | CREATE TABLE IF NOT EXISTS ULogMessageFormatField_tmp (
61 | FieldType TEXT,
62 | ArraySize INT,
63 | Name TEXT,
64 | MessageId INT REFERENCES ULogMessageFormat (Id) ON DELETE CASCADE
65 | );
66 | INSERT OR IGNORE INTO ULogMessageFormatField_tmp SELECT * FROM ULogMessageFormatField;
67 |
68 | CREATE TABLE IF NOT EXISTS ULogMessageLogging_tmp (
69 | LogLevel INT,
70 | Timestamp REAL,
71 | Message TEXT,
72 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
73 | );
74 | INSERT OR IGNORE INTO ULogMessageLogging_tmp SELECT * FROM ULogMessageLogging;
75 |
76 | CREATE TABLE IF NOT EXISTS ULogMessageLoggingTagged_tmp (
77 | LogLevel INT,
78 | Timestamp REAL,
79 | Tag INT,
80 | Message TEXT,
81 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
82 | );
83 | INSERT OR IGNORE INTO ULogMessageLoggingTagged_tmp SELECT * FROM ULogMessageLoggingTagged;
84 |
85 | CREATE TABLE IF NOT EXISTS ULogMessageInfo_tmp (
86 | Key TEXT,
87 | Typename TEXT,
88 | Value BLOB,
89 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
90 | );
91 | INSERT OR IGNORE INTO ULogMessageInfo_tmp SELECT * FROM ULogMessageInfo;
92 |
93 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultiple_tmp (
94 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
95 | Key TEXT,
96 | Typename TEXT,
97 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
98 | );
99 | INSERT OR IGNORE INTO ULogMessageInfoMultiple_tmp SELECT * FROM ULogMessageInfoMultiple;
100 |
101 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultipleList_tmp (
102 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
103 | SeriesIndex INTEGER,
104 | MessageId TEXT REFERENCES ULogMessageInfoMultiple (Id) ON DELETE CASCADE
105 | );
106 | INSERT OR IGNORE INTO ULogMessageInfoMultipleList_tmp SELECT * FROM ULogMessageInfoMultipleList;
107 |
108 | CREATE TABLE IF NOT EXISTS ULogMessageInfoMultipleListElement_tmp (
109 | Id INTEGER PRIMARY KEY AUTOINCREMENT,
110 | SeriesIndex INTEGER,
111 | Value TEXT,
112 | ListId TEXT REFERENCES ULogMessageInfoMultipleList (Id) ON DELETE CASCADE
113 | );
114 | INSERT OR IGNORE INTO ULogMessageInfoMultipleListElement_tmp SELECT * FROM ULogMessageInfoMultipleListElement;
115 |
116 | CREATE TABLE IF NOT EXISTS ULogInitialParameter_tmp (
117 | Key TEXT,
118 | Value BLOB,
119 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
120 | );
121 | INSERT OR IGNORE INTO ULogInitialParameter_tmp SELECT * FROM ULogInitialParameter;
122 |
123 | CREATE TABLE IF NOT EXISTS ULogChangedParameter_tmp (
124 | Timestamp REAL,
125 | Key TEXT,
126 | Value BLOB,
127 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
128 | );
129 | INSERT OR IGNORE INTO ULogChangedParameter_tmp SELECT * FROM ULogChangedParameter;
130 |
131 | CREATE TABLE IF NOT EXISTS ULogDefaultParameter_tmp (
132 | DefaultType INT,
133 | Key TEXT,
134 | Value BLOB,
135 | ULogId INT REFERENCES ULog (Id) ON DELETE CASCADE
136 | );
137 | INSERT OR IGNORE INTO ULogDefaultParameter_tmp SELECT * FROM ULogDefaultParameter;
138 |
139 | DROP TABLE IF EXISTS ULog;
140 | DROP TABLE IF EXISTS ULogAppendedOffsets;
141 | DROP TABLE IF EXISTS ULogDataset;
142 | DROP TABLE IF EXISTS ULogField;
143 | DROP TABLE IF EXISTS ULogMessageDropout;
144 | DROP TABLE IF EXISTS ULogMessageFormat;
145 | DROP TABLE IF EXISTS ULogMessageFormatField;
146 | DROP TABLE IF EXISTS ULogMessageLogging;
147 | DROP TABLE IF EXISTS ULogMessageLoggingTagged;
148 | DROP TABLE IF EXISTS ULogMessageInfo;
149 | DROP TABLE IF EXISTS ULogMessageInfoMultiple;
150 | DROP TABLE IF EXISTS ULogMessageInfoMultipleList;
151 | DROP TABLE IF EXISTS ULogMessageInfoMultipleListElement;
152 | DROP TABLE IF EXISTS ULogInitialParameter;
153 | DROP TABLE IF EXISTS ULogChangedParameter;
154 | DROP TABLE IF EXISTS ULogDefaultParameter;
155 |
156 | ALTER TABLE ULog_tmp RENAME TO ULog;
157 | ALTER TABLE ULogAppendedOffsets_tmp RENAME TO ULogAppendedOffsets;
158 | ALTER TABLE ULogDataset_tmp RENAME TO ULogDataset;
159 | ALTER TABLE ULogField_tmp RENAME TO ULogField;
160 | ALTER TABLE ULogMessageDropout_tmp RENAME TO ULogMessageDropout;
161 | ALTER TABLE ULogMessageFormat_tmp RENAME TO ULogMessageFormat;
162 | ALTER TABLE ULogMessageFormatField_tmp RENAME TO ULogMessageFormatField;
163 | ALTER TABLE ULogMessageLogging_tmp RENAME TO ULogMessageLogging;
164 | ALTER TABLE ULogMessageLoggingTagged_tmp RENAME TO ULogMessageLoggingTagged;
165 | ALTER TABLE ULogMessageInfo_tmp RENAME TO ULogMessageInfo;
166 | ALTER TABLE ULogMessageInfoMultiple_tmp RENAME TO ULogMessageInfoMultiple;
167 | ALTER TABLE ULogMessageInfoMultipleList_tmp RENAME TO ULogMessageInfoMultipleList;
168 | ALTER TABLE ULogMessageInfoMultipleListElement_tmp RENAME TO ULogMessageInfoMultipleListElement;
169 | ALTER TABLE ULogInitialParameter_tmp RENAME TO ULogInitialParameter;
170 | ALTER TABLE ULogChangedParameter_tmp RENAME TO ULogChangedParameter;
171 | ALTER TABLE ULogDefaultParameter_tmp RENAME TO ULogDefaultParameter;
172 |
173 | PRAGMA foreign_keys=on;
174 | COMMIT;
175 |
--------------------------------------------------------------------------------
/pyulog/sql/pyulog.3.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 | ALTER TABLE ULogField ADD COLUMN ValueJson JSON;
3 | COMMIT;
4 |
--------------------------------------------------------------------------------
/pyulog/sql/pyulog.4.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 | PRAGMA foreign_keys=off;
3 |
4 | -- Change REAL timestamps to INT. SQLITE only supports INT64, but ULog -- changed from REAL
5 | -- timestamps are UINT64. We accept losing 1 bit at the top end, since 2^63
6 | -- microseconds = 400,000 years. which should be enough.
7 |
8 | ALTER TABLE ULog RENAME COLUMN StartTimestamp TO StartTimestamp_old;
9 | ALTER TABLE ULog ADD COLUMN StartTimestamp INT;
10 | UPDATE ULog SET StartTimestamp = CAST(StartTimestamp_old AS INT);
11 |
12 | ALTER TABLE ULog RENAME COLUMN LastTimestamp TO LastTimestamp_old;
13 | ALTER TABLE ULog ADD COLUMN LastTimestamp INT;
14 | UPDATE ULog SET LastTimestamp = CAST(LastTimestamp_old AS INT);
15 |
16 | ALTER TABLE ULogMessageDropout RENAME COLUMN Timestamp TO Timestamp_old;
17 | ALTER TABLE ULogMessageDropout ADD COLUMN Timestamp INT;
18 | UPDATE ULogMessageDropout SET Timestamp = CAST(Timestamp_old AS INT);
19 |
20 | ALTER TABLE ULogMessageDropout RENAME COLUMN Duration TO Duration_old;
21 | ALTER TABLE ULogMessageDropout ADD COLUMN Duration INT;
22 | UPDATE ULogMessageDropout SET Duration = CAST(Duration_old AS INT);
23 |
24 | ALTER TABLE ULogMessageLogging RENAME COLUMN Timestamp TO Timestamp_old;
25 | ALTER TABLE ULogMessageLogging ADD COLUMN Timestamp INT;
26 | UPDATE ULogMessageLogging SET Timestamp = CAST(Timestamp_old AS INT);
27 |
28 | ALTER TABLE ULogMessageLoggingTagged RENAME COLUMN Timestamp TO Timestamp_old;
29 | ALTER TABLE ULogMessageLoggingTagged ADD COLUMN Timestamp INT;
30 | UPDATE ULogMessageLoggingTagged SET Timestamp = CAST(Timestamp_old AS INT);
31 |
32 | ALTER TABLE ULogChangedParameter RENAME COLUMN Timestamp TO Timestamp_old;
33 | ALTER TABLE ULogChangedParameter ADD COLUMN Timestamp INT;
34 | UPDATE ULogChangedParameter SET Timestamp = CAST(Timestamp_old AS INT);
35 |
36 | PRAGMA foreign_keys=on;
37 | COMMIT;
38 |
--------------------------------------------------------------------------------
/pyulog/sql/pyulog.5.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 | CREATE INDEX IF NOT EXISTS btree_ULogAppendedOffsets_ULogId ON ULogAppendedOffsets(ULogId);
3 | CREATE INDEX IF NOT EXISTS btree_ULogDataset_ULogId ON ULogDataset(ULogId);
4 | CREATE INDEX IF NOT EXISTS btree_ULogField_DatasetId ON ULogField(DatasetId);
5 | CREATE INDEX IF NOT EXISTS btree_ULogMessageDropout_ULogId ON ULogMessageDropout(ULogId);
6 | CREATE INDEX IF NOT EXISTS btree_ULogMessageFormat_ULogId ON ULogMessageFormat(ULogId);
7 | CREATE INDEX IF NOT EXISTS btree_ULogMessageFormatField_MessageId ON ULogMessageFormatField(MessageId);
8 | CREATE INDEX IF NOT EXISTS btree_ULogMessageLogging_ULogId ON ULogMessageLogging(ULogId);
9 | CREATE INDEX IF NOT EXISTS btree_ULogMessageLoggingTagged_ULogId ON ULogMessageLoggingTagged(ULogId);
10 | CREATE INDEX IF NOT EXISTS btree_ULogMessageInfo_ULogId ON ULogMessageInfo(ULogId);
11 | CREATE INDEX IF NOT EXISTS btree_ULogMessageInfoMultiple_ULogId ON ULogMessageInfoMultiple(ULogId);
12 | CREATE INDEX IF NOT EXISTS btree_ULogMessageInfoMultipleList_MessageId ON ULogMessageInfoMultipleList(MessageId);
13 | CREATE INDEX IF NOT EXISTS btree_ULogMessageInfoMultipleListElement_ListId ON ULogMessageInfoMultipleListElement(ListId);
14 | CREATE INDEX IF NOT EXISTS btree_ULogInitialParameter_ULogId ON ULogInitialParameter(ULogId);
15 | CREATE INDEX IF NOT EXISTS btree_ULogChangedParameter_ULogId ON ULogChangedParameter(ULogId);
16 | CREATE INDEX IF NOT EXISTS btree_ULogDefaultParameter_ULogId ON ULogDefaultParameter(ULogId);
17 | COMMIT;
18 |
--------------------------------------------------------------------------------
/pyulog/ulog2csv.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 |
3 | """
4 | Convert a ULog file into CSV file(s)
5 | """
6 |
7 | import argparse
8 | import os
9 | import numpy as np
10 |
11 | from .core import ULog
12 |
13 | #pylint: disable=too-many-locals, invalid-name, consider-using-enumerate
14 |
15 | def main():
16 | """Command line interface"""
17 |
18 | parser = argparse.ArgumentParser(description='Convert ULog to CSV')
19 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
20 |
21 | parser.add_argument(
22 | '-m', '--messages', dest='messages',
23 | help=("Only consider given messages. Must be a comma-separated list of"
24 | " names, like 'sensor_combined,vehicle_gps_position'"))
25 | parser.add_argument('-d', '--delimiter', dest='delimiter', action='store',
26 | help="Use delimiter in CSV (default is ',')", default=',')
27 |
28 |
29 | parser.add_argument('-o', '--output', dest='output', action='store',
30 | help='Output directory (default is same as input file)',
31 | metavar='DIR')
32 | parser.add_argument('-i', '--ignore', dest='ignore', action='store_true',
33 | help='Ignore string parsing exceptions', default=False)
34 |
35 | parser.add_argument(
36 | '-ts', '--time_s', dest='time_s', type = int,
37 | help="Only convert data after this timestamp (in seconds)")
38 |
39 | parser.add_argument(
40 | '-te', '--time_e', dest='time_e', type=int,
41 | help="Only convert data upto this timestamp (in seconds)")
42 |
43 | args = parser.parse_args()
44 |
45 | if args.output and not os.path.isdir(args.output):
46 | print('Creating output directory {:}'.format(args.output))
47 | os.mkdir(args.output)
48 |
49 | convert_ulog2csv(args.filename, args.messages, args.output, args.delimiter,
50 | args.time_s, args.time_e, args.ignore)
51 |
52 |
53 | def convert_ulog2csv(ulog_file_name, messages, output, delimiter, time_s, time_e,
54 | disable_str_exceptions=False):
55 | """
56 | Coverts and ULog file to a CSV file.
57 |
58 | :param ulog_file_name: The ULog filename to open and read
59 | :param messages: A list of message names
60 | :param output: Output file path
61 | :param delimiter: CSV delimiter
62 | :param time_s: Offset time for conversion in seconds
63 | :param time_e: Limit until time for conversion in seconds
64 |
65 | :return: None
66 | """
67 |
68 | msg_filter = messages.split(',') if messages else None
69 |
70 | ulog = ULog(ulog_file_name, msg_filter, disable_str_exceptions)
71 | data = ulog.data_list
72 |
73 | output_file_prefix = ulog_file_name
74 | # strip '.ulg'
75 | if output_file_prefix.lower().endswith('.ulg'):
76 | output_file_prefix = output_file_prefix[:-4]
77 |
78 | # write to different output path?
79 | if output:
80 | base_name = os.path.basename(output_file_prefix)
81 | output_file_prefix = os.path.join(output, base_name)
82 |
83 | for d in data:
84 | fmt = '{0}_{1}_{2}.csv'
85 | output_file_name = fmt.format(output_file_prefix, d.name.replace('/', '_'), d.multi_id)
86 | fmt = 'Writing {0} ({1} data points)'
87 | # print(fmt.format(output_file_name, len(d.data['timestamp'])))
88 | with open(output_file_name, 'w', encoding='utf-8') as csvfile:
89 |
90 | # use same field order as in the log, except for the timestamp
91 | data_keys = [f.field_name for f in d.field_data]
92 | data_keys.remove('timestamp')
93 | data_keys.insert(0, 'timestamp') # we want timestamp at first position
94 |
95 | # we don't use np.savetxt, because we have multiple arrays with
96 | # potentially different data types. However the following is quite
97 | # slow...
98 |
99 | # write the header
100 | csvfile.write(delimiter.join(data_keys) + '\n')
101 |
102 | #get the index for row where timestamp exceeds or equals the required value
103 | time_s_i = np.where(d.data['timestamp'] >= time_s * 1e6)[0][0] \
104 | if time_s else 0
105 | #get the index for row upto the timestamp of the required value
106 | time_e_i = np.where(d.data['timestamp'] >= time_e * 1e6)[0][0] \
107 | if time_e else len(d.data['timestamp'])
108 |
109 | # write the data
110 | last_elem = len(data_keys)-1
111 | for i in range(time_s_i, time_e_i):
112 | for k in range(len(data_keys)):
113 | csvfile.write(str(d.data[data_keys[k]][i]))
114 | if k != last_elem:
115 | csvfile.write(delimiter)
116 | csvfile.write('\n')
117 |
118 |
--------------------------------------------------------------------------------
/pyulog/ulog2kml.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Convert a ULog file into a KML file (positioning information)
4 | """
5 |
6 | import argparse
7 | import simplekml # pylint: disable=import-error
8 |
9 | from .core import ULog
10 |
11 |
12 | #pylint: disable=too-many-locals, invalid-name, consider-using-enumerate, too-many-arguments
13 | #pylint: disable=unused-variable
14 |
15 |
16 | def main():
17 | """Command line interface"""
18 |
19 | parser = argparse.ArgumentParser(description='Convert ULog to KML')
20 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
21 |
22 | parser.add_argument('-o', '--output', dest='output_filename',
23 | help="output filename", default='track.kml')
24 | parser.add_argument('--topic', dest='topic_name',
25 | help="topic name with position data (default=vehicle_gps_position)",
26 | default='vehicle_gps_position')
27 | parser.add_argument('--camera-trigger', dest='camera_trigger',
28 | help="Camera trigger topic name (e.g. camera_capture)",
29 | default=None)
30 | parser.add_argument('-i', '--ignore', dest='ignore', action='store_true',
31 | help='Ignore string parsing exceptions', default=False)
32 |
33 | args = parser.parse_args()
34 |
35 | convert_ulog2kml(args.filename, args.output_filename,
36 | position_topic_name=args.topic_name,
37 | camera_trigger_topic_name=args.camera_trigger,
38 | disable_str_exceptions=args.ignore)
39 |
40 | # alternative example call:
41 | # convert_ulog2kml(args.filename, 'test.kml', ['vehicle_global_position',
42 | # 'vehicle_gps_position'], [_kml_default_colors, lambda x: simplekml.Color.green])
43 |
44 |
45 | def _kml_default_colors(x):
46 | """ flight mode to color conversion """
47 | x = max([x, 0])
48 | colors_arr = [simplekml.Color.red, simplekml.Color.green, simplekml.Color.blue,
49 | simplekml.Color.violet, simplekml.Color.yellow, simplekml.Color.orange,
50 | simplekml.Color.burlywood, simplekml.Color.azure, simplekml.Color.lightblue,
51 | simplekml.Color.lawngreen, simplekml.Color.indianred, simplekml.Color.hotpink,
52 | simplekml.Color.bisque, simplekml.Color.cyan, simplekml.Color.darksalmon,
53 | simplekml.Color.deepskyblue, simplekml.Color.lime, simplekml.Color.orchid]
54 | return colors_arr[x]
55 |
56 |
57 |
58 | def convert_ulog2kml(ulog_file_name, output_file_name, position_topic_name=
59 | 'vehicle_gps_position', colors=_kml_default_colors, altitude_offset=0,
60 | minimum_interval_s=0.1, style=None, camera_trigger_topic_name=None,
61 | disable_str_exceptions=False):
62 | """
63 | Coverts and ULog file to a CSV file.
64 |
65 | :param ulog_file_name: The ULog filename to open and read
66 | :param output_file_name: KML Output file name
67 | :param position_topic_name: either name of a topic (must have 'lon', 'lat' &
68 | 'alt' fields), or a list of topic names
69 | :param colors: lambda function with flight mode (int) (or -1) as input and
70 | returns a color (eg 'fffff8f0') (or list of lambda functions if
71 | multiple position_topic_name's)
72 | :param altitude_offset: add this offset to the altitude [m]
73 | :param minimum_interval_s: minimum time difference between two datapoints
74 | (drop if more points)
75 | :param style: dictionary with rendering options:
76 | 'extrude': Bool
77 | 'line_width': int
78 | :param camera_trigger_topic_name: name of the camera trigger topic (must
79 | have 'lon', 'lat' & 'seq')
80 |
81 | :return: None
82 | """
83 |
84 | default_style = {
85 | 'extrude': False,
86 | 'line_width': 3
87 | }
88 |
89 | used_style = default_style
90 | if style is not None:
91 | for key in style:
92 | used_style[key] = style[key]
93 |
94 |
95 | if not isinstance(position_topic_name, list):
96 | position_topic_name = [position_topic_name]
97 | colors = [colors]
98 |
99 | kml = simplekml.Kml()
100 | load_topic_names = position_topic_name + ['vehicle_status']
101 | if camera_trigger_topic_name is not None:
102 | load_topic_names.append(camera_trigger_topic_name)
103 | ulog = ULog(ulog_file_name, load_topic_names, disable_str_exceptions)
104 |
105 | # get flight modes
106 | try:
107 | cur_dataset = ulog.get_dataset('vehicle_status')
108 | flight_mode_changes = cur_dataset.list_value_changes('nav_state')
109 | flight_mode_changes.append((ulog.last_timestamp, -1))
110 | except (KeyError, IndexError) as error:
111 | flight_mode_changes = []
112 |
113 | # add the graphs
114 | for topic, cur_colors in zip(position_topic_name, colors):
115 | _kml_add_position_data(kml, ulog, topic, cur_colors, used_style,
116 | altitude_offset, minimum_interval_s, flight_mode_changes)
117 |
118 | # camera triggers
119 | _kml_add_camera_triggers(kml, ulog, camera_trigger_topic_name, altitude_offset)
120 |
121 | kml.save(output_file_name)
122 |
123 |
124 | def _kml_add_camera_triggers(kml, ulog, camera_trigger_topic_name, altitude_offset):
125 | """
126 | Add camera trigger points to the map
127 | """
128 |
129 | data = ulog.data_list
130 | topic_instance = 0
131 |
132 | cur_dataset = [elem for elem in data
133 | if elem.name == camera_trigger_topic_name and elem.multi_id == topic_instance]
134 | if len(cur_dataset) > 0:
135 | cur_dataset = cur_dataset[0]
136 |
137 | pos_lon = cur_dataset.data['lon']
138 | pos_lat = cur_dataset.data['lat']
139 | pos_alt = cur_dataset.data['alt']
140 | sequence = cur_dataset.data['seq']
141 |
142 | for i in range(len(pos_lon)):
143 | pnt = kml.newpoint(name='Camera Trigger '+str(sequence[i]))
144 | pnt.coords = [(pos_lon[i], pos_lat[i], pos_alt[i] + altitude_offset)]
145 | # Balloons instead of text does not work
146 | #pnt.style.balloonstyle.text = 'Camera Trigger '+str(sequence[i])
147 |
148 |
149 | def _kml_add_position_data(kml, ulog, position_topic_name, colors, style,
150 | altitude_offset=0, minimum_interval_s=0.1,
151 | flight_mode_changes=None):
152 |
153 | data = ulog.data_list
154 | topic_instance = 0
155 | if flight_mode_changes is None:
156 | flight_mode_changes = []
157 |
158 | cur_dataset = [elem for elem in data
159 | if elem.name == position_topic_name and elem.multi_id == topic_instance]
160 | if len(cur_dataset) == 0:
161 | raise KeyError(position_topic_name+' not found in data')
162 |
163 | cur_dataset = cur_dataset[0]
164 |
165 |
166 | pos_lon = cur_dataset.data['lon']
167 | pos_lat = cur_dataset.data['lat']
168 | pos_alt = cur_dataset.data['alt']
169 | pos_t = cur_dataset.data['timestamp']
170 |
171 | if 'fix_type' in cur_dataset.data:
172 | indices = cur_dataset.data['fix_type'] > 2 # use only data with a fix
173 | pos_lon = pos_lon[indices]
174 | pos_lat = pos_lat[indices]
175 | pos_alt = pos_alt[indices]
176 | pos_t = pos_t[indices]
177 |
178 | # scale if it's an integer type
179 | lon_type = [f.type_str for f in cur_dataset.field_data if f.field_name == 'lon']
180 | if len(lon_type) > 0 and lon_type[0] == 'int32_t':
181 | pos_lon = pos_lon / 1e7 # to degrees
182 | pos_lat = pos_lat / 1e7
183 | pos_alt = pos_alt / 1e3 # to meters
184 |
185 |
186 | current_flight_mode = 0
187 | current_flight_mode_idx = 0
188 | if len(flight_mode_changes) > 0:
189 | current_flight_mode = flight_mode_changes[0][1]
190 |
191 |
192 | def create_linestring():
193 | """ create a new kml linestring and set rendering options """
194 | name = position_topic_name + ":" + str(current_flight_mode)
195 | new_linestring = kml.newlinestring(name=name, altitudemode='absolute')
196 |
197 | # set rendering options
198 | if style['extrude']:
199 | new_linestring.extrude = 1
200 | new_linestring.style.linestyle.color = colors(current_flight_mode)
201 |
202 | new_linestring.style.linestyle.width = style['line_width']
203 | return new_linestring
204 |
205 | current_kml_linestring = create_linestring()
206 |
207 | last_t = 0
208 | for i in range(len(pos_lon)):
209 | cur_t = pos_t[i]
210 |
211 | if (cur_t - last_t)/1e6 > minimum_interval_s: # assume timestamp is in [us]
212 | pos_data = [pos_lon[i], pos_lat[i], pos_alt[i] + altitude_offset]
213 | current_kml_linestring.coords.addcoordinates([pos_data])
214 | last_t = cur_t
215 |
216 | # flight mode change?
217 | while current_flight_mode_idx < len(flight_mode_changes)-1 and \
218 | flight_mode_changes[current_flight_mode_idx+1][0] <= cur_t:
219 | current_flight_mode_idx += 1
220 | current_flight_mode = flight_mode_changes[current_flight_mode_idx][1]
221 | current_kml_linestring = create_linestring()
222 |
223 | current_kml_linestring.coords.addcoordinates([pos_data])
224 |
225 |
226 |
--------------------------------------------------------------------------------
/pyulog/ulog2rosbag.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 |
3 | """
4 | Convert a ULog file into rosbag file(s)
5 | """
6 |
7 | from collections import defaultdict
8 | import argparse
9 | import re
10 | import rospy # pylint: disable=import-error
11 | import rosbag # pylint: disable=import-error
12 | from px4_msgs import msg as px4_msgs # pylint: disable=import-error
13 |
14 | from .core import ULog
15 |
16 | #pylint: disable=too-many-locals, invalid-name
17 |
18 | def main():
19 | """Command line interface"""
20 |
21 | parser = argparse.ArgumentParser(description='Convert ULog to rosbag')
22 | parser.add_argument('filename', metavar='file.ulg', help='ULog input file')
23 | parser.add_argument('bag', metavar='file.bag', help='rosbag output file')
24 |
25 | parser.add_argument(
26 | '-m', '--messages', dest='messages',
27 | help=("Only consider given messages. Must be a comma-separated list of"
28 | " names, like 'sensor_combined,vehicle_gps_position'"))
29 |
30 | parser.add_argument('-i', '--ignore', dest='ignore', action='store_true',
31 | help='Ignore string parsing exceptions', default=False)
32 |
33 | args = parser.parse_args()
34 |
35 | convert_ulog2rosbag(args.filename, args.bag, args.messages, args.ignore)
36 |
37 | # https://stackoverflow.com/questions/19053707/converting-snake-case-to-lower-camel-case-lowercamelcase
38 | def to_camel_case(snake_str):
39 | """ Convert snake case string to camel case """
40 | components = snake_str.split("_")
41 | return ''.join(x.title() for x in components)
42 |
43 | def convert_ulog2rosbag(ulog_file_name, rosbag_file_name, messages, disable_str_exceptions=False):
44 | """
45 | Coverts and ULog file to a CSV file.
46 |
47 | :param ulog_file_name: The ULog filename to open and read
48 | :param rosbag_file_name: The rosbag filename to open and write
49 | :param messages: A list of message names
50 |
51 | :return: No
52 | """
53 |
54 | array_pattern = re.compile(r"(.*?)\[(.*?)\]")
55 | msg_filter = messages.split(',') if messages else None
56 |
57 | ulog = ULog(ulog_file_name, msg_filter, disable_str_exceptions)
58 | data = ulog.data_list
59 |
60 | multiids = defaultdict(set)
61 | for d in data:
62 | multiids[d.name].add(d.multi_id)
63 |
64 | with rosbag.Bag(rosbag_file_name, 'w') as bag:
65 | items = []
66 | for d in data:
67 | if multiids[d.name] == {0}:
68 | topic = "/px4/{}".format(d.name)
69 | else:
70 | topic = "/px4/{}_{}".format(d.name, d.multi_id)
71 | msg_type = getattr(px4_msgs, to_camel_case(d.name))
72 |
73 | for i in range(len(d.data['timestamp'])):
74 | msg = msg_type()
75 | for f in d.field_data:
76 | result = array_pattern.match(f.field_name)
77 | value = d.data[f.field_name][i]
78 | if result:
79 | field, array_index = result.groups()
80 | array_index = int(array_index)
81 | if isinstance(getattr(msg, field), bytes):
82 | attr = bytearray(getattr(msg, field))
83 | attr[array_index] = value
84 | setattr(msg, field, bytes(attr))
85 | else:
86 | getattr(msg, field)[array_index] = value
87 | else:
88 | setattr(msg, f.field_name, value)
89 | ts = rospy.Time(nsecs=d.data['timestamp'][i]*1000)
90 | items.append((topic, msg, ts))
91 | items.sort(key=lambda x: x[2])
92 | for topic, msg, ts in items:
93 | bag.write(topic, msg, ts)
94 |
95 |
--------------------------------------------------------------------------------
/run_tests.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | pytest test && pylint pyulog/*.py test/*.py
4 |
5 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 |
2 | # See the docstring in versioneer.py for instructions. Note that you must
3 | # re-run 'versioneer.py setup' after changing this section, and commit the
4 | # resulting files.
5 |
6 | [versioneer]
7 | VCS = git
8 | style = pep440
9 | versionfile_source = pyulog/_version.py
10 | versionfile_build = pyulog/_version.py
11 | tag_prefix = v
12 | parentdir_prefix = pyulog-
13 |
14 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Python log parser for ULog.
3 |
4 | This module allows you to parse ULog files, which are used within the PX4
5 | autopilot middleware.
6 |
7 | The file format is documented on https://docs.px4.io/master/en/dev_log/ulog_file_format.html
8 |
9 | """
10 |
11 | import versioneer
12 |
13 | from setuptools import setup, find_packages
14 |
15 | DOCLINES = __doc__.split("\n")
16 |
17 | CLASSIFIERS = """\
18 | Development Status :: 1 - Planning
19 | Intended Audience :: Science/Research
20 | Intended Audience :: Developers
21 | License :: OSI Approved :: BSD License
22 | Programming Language :: Python
23 | Programming Language :: Python :: 3
24 | Programming Language :: Other
25 | Topic :: Software Development
26 | Topic :: Scientific/Engineering :: Artificial Intelligence
27 | Topic :: Scientific/Engineering :: Mathematics
28 | Topic :: Scientific/Engineering :: Physics
29 | Operating System :: Microsoft :: Windows
30 | Operating System :: POSIX
31 | Operating System :: Unix
32 | Operating System :: MacOS
33 | """
34 |
35 | # pylint: disable=invalid-name
36 |
37 | setup(
38 | name='pyulog',
39 | maintainer="James Goppert",
40 | maintainer_email="james.goppert@gmail.com",
41 | description=DOCLINES[0],
42 | long_description="\n".join(DOCLINES[2:]),
43 | url='https://github.com/PX4/pyulog',
44 | author='Beat Kueng',
45 | author_email='beat-kueng@gmx.net',
46 | download_url='https://github.com/PX4/pyulog',
47 | license='BSD 3-Clause',
48 | classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f],
49 | platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
50 | install_requires=[
51 | "numpy < 1.25; python_version < '3.9'",
52 | "numpy >= 1.25; python_version >= '3.9'",
53 | ],
54 | tests_require=['pytest', 'ddt'],
55 | entry_points = {
56 | 'console_scripts': [
57 | 'ulog_extract_gps_dump=pyulog.extract_gps_dump:main',
58 | 'ulog_info=pyulog.info:main',
59 | 'ulog_messages=pyulog.messages:main',
60 | 'ulog_params=pyulog.params:main',
61 | 'ulog2csv=pyulog.ulog2csv:main',
62 | 'ulog2kml=pyulog.ulog2kml:main',
63 | 'ulog2rosbag=pyulog.ulog2rosbag:main',
64 | 'ulog_migratedb=pyulog.migrate_db:main',
65 | ],
66 | },
67 | packages=find_packages(),
68 | version=versioneer.get_version(),
69 | cmdclass=versioneer.get_cmdclass(),
70 | include_package_data=True,
71 | )
72 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/__init__.py
--------------------------------------------------------------------------------
/test/sample.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample.ulg
--------------------------------------------------------------------------------
/test/sample_appended.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample_appended.ulg
--------------------------------------------------------------------------------
/test/sample_appended_info.txt:
--------------------------------------------------------------------------------
1 | Logging start time: 0:00:05, duration: 0:01:54
2 | Dropouts: count: 1, total duration: 0.0 s, max: 10 ms, mean: 10 ms
3 | Info Messages:
4 | perf_counter_preflight-00: navigator: 3 events, 80us elapsed, 26us avg, min 25us max 28us 1.528us rms
5 | perf_counter_preflight-01: mc_att_control: 766 events, 38087us elapsed, 49us avg, min 23us max 395us 32.174us rms
6 | perf_counter_preflight-02: logger_sd_fsync: 0 events, 0us elapsed, 0us avg, min 0us max 0us 0.000us rms
7 | perf_counter_preflight-03: logger_sd_write: 3 events, 72442us elapsed, 24147us avg, min 10us max 36356us 20904.014us rms
8 | perf_counter_preflight-04: mavlink_txe: 226 events
9 | perf_counter_preflight-05: mavlink_el: 1016 events, 163693us elapsed, 161us avg, min 84us max 2478us 191.598us rms
10 | perf_counter_preflight-06: mavlink_txe: 0 events
11 | perf_counter_preflight-07: mavlink_el: 286 events, 33394us elapsed, 116us avg, min 46us max 1851us 166.045us rms
12 | perf_counter_preflight-08: mavlink_txe: 0 events
13 | perf_counter_preflight-09: mavlink_el: 318 events, 48587us elapsed, 152us avg, min 66us max 2327us 259.293us rms
14 | perf_counter_preflight-10: mavlink_txe: 0 events
15 | perf_counter_preflight-11: mavlink_el: 1030 events, 214017us elapsed, 207us avg, min 79us max 4163us 310.871us rms
16 | perf_counter_preflight-12: ctl_lat: 321 events, 13187us elapsed, 41us avg, min 38us max 111us 11.183us rms
17 | perf_counter_preflight-13: stack_check: 7 events, 69us elapsed, 9us avg, min 2us max 16us 4.488us rms
18 | perf_counter_preflight-14: sensors: 826 events, 93853us elapsed, 113us avg, min 65us max 5118us 179.764us rms
19 | perf_counter_preflight-15: ctrl_latency: 321 events, 40037us elapsed, 124us avg, min 103us max 3022us 166.815us rms
20 | perf_counter_preflight-16: mpu9250_dupe: 898 events
21 | perf_counter_preflight-17: mpu9250_reset: 0 events
22 | perf_counter_preflight-18: mpu9250_good_trans: 3443 events
23 | perf_counter_preflight-19: mpu9250_bad_reg: 0 events
24 | perf_counter_preflight-20: mpu9250_bad_trans: 0 events
25 | perf_counter_preflight-21: mpu9250_read: 4342 events, 269357us elapsed, 62us avg, min 41us max 91us 13.632us rms
26 | perf_counter_preflight-22: mpu9250_gyro_read: 0 events
27 | perf_counter_preflight-23: mpu9250_acc_read: 2 events
28 | perf_counter_preflight-24: mpu9250_mag_duplicates: 3066 events
29 | perf_counter_preflight-25: mpu9250_mag_overflows: 0 events
30 | perf_counter_preflight-26: mpu9250_mag_overruns: 51 events
31 | perf_counter_preflight-27: mpu9250_mag_errors: 0 events
32 | perf_counter_preflight-28: mpu9250_mag_reads: 0 events
33 | perf_counter_preflight-29: adc_samples: 3024 events, 8046us elapsed, 2us avg, min 2us max 3us 0.474us rms
34 | perf_counter_preflight-30: ms5611_com_err: 0 events
35 | perf_counter_preflight-31: ms5611_measure: 321 events, 5603us elapsed, 17us avg, min 8us max 679us 54.355us rms
36 | perf_counter_preflight-32: ms5611_read: 320 events, 23168us elapsed, 72us avg, min 13us max 543us 49.197us rms
37 | perf_counter_preflight-33: dma_alloc: 4 events
38 | perf_top_preflight-00: PID COMMAND CPU(ms) CPU(%) USED/STACK PRIO(BASE) STATE
39 | perf_top_preflight-01: 0 Idle Task 2922 53.339 596/ 748 0 ( 0) READY
40 | perf_top_preflight-02: 1 hpwork 207 4.486 928/ 1780 192 (192) w:sig
41 | perf_top_preflight-03: 2 lpwork 3 0.099 640/ 1780 50 ( 50) w:sig
42 | perf_top_preflight-04: 3 init 4816 0.000 1720/ 2580 100 (100) w:sem
43 | perf_top_preflight-05: 228 px4flow 1 0.000 728/ 1164 100 (100) w:sig
44 | perf_top_preflight-06: 104 gps 10 0.099 1032/ 1524 220 (220) w:sem
45 | perf_top_preflight-07: 108 dataman 1 0.000 720/ 1180 90 ( 90) w:sem
46 | perf_top_preflight-08: 152 sensors 161 3.389 1440/ 1980 250 (250) READY
47 | perf_top_preflight-09: 154 commander 59 2.093 2776/ 3652 140 (140) w:sig
48 | perf_top_preflight-10: 169 mavlink_if0 261 6.281 1648/ 2380 100 (100) READY
49 | perf_top_preflight-11: 170 mavlink_rcv_if0 14 0.299 1320/ 2140 175 (175) w:sem
50 | perf_top_preflight-12: 175 mavlink_if1 63 1.395 1616/ 2420 100 (100) w:sig
51 | perf_top_preflight-13: 176 mavlink_rcv_if1 16 0.398 1496/ 2140 175 (175) w:sem
52 | perf_top_preflight-14: 187 mavlink_if2 48 1.096 1632/ 2388 100 (100) w:sig
53 | perf_top_preflight-15: 188 mavlink_rcv_if2 15 0.299 1316/ 2140 175 (175) w:sem
54 | perf_top_preflight-16: 217 frsky_telemetry 0 0.000 544/ 1188 200 (200) w:sem
55 | perf_top_preflight-17: 254 log_writer_file 13 0.997 544/ 1060 60 ( 60) w:sem
56 | perf_top_preflight-18: 235 mavlink_if3 249 4.287 1576/ 2388 100 (100) READY
57 | perf_top_preflight-19: 237 mavlink_rcv_if3 62 2.093 1316/ 2140 175 (175) READY
58 | perf_top_preflight-20: 253 logger 115 6.779 3104/ 3532 250 (250) RUN
59 | perf_top_preflight-21: 341 commander_low_prio 0 0.000 592/ 2996 50 ( 50) w:sem
60 | perf_top_preflight-22: 295 ekf2 331 9.870 5056/ 5780 250 (250) w:sem
61 | perf_top_preflight-23: 305 mc_att_control 97 2.991 1136/ 1676 250 (250) READY
62 | perf_top_preflight-24: 307 mc_pos_control 12 0.299 552/ 1876 250 (250) w:sem
63 | perf_top_preflight-25: 310 navigator 4 0.000 904/ 1772 105 (105) w:sem
64 | perf_top_preflight-26:
65 | perf_top_preflight-27: Processes: 25 total, 7 running, 18 sleeping
66 | perf_top_preflight-28: CPU usage: 46.66% tasks, 0.00% sched, 53.34% idle
67 | perf_top_preflight-29: DMA Memory: 5120 total, 1536 used 1536 peak
68 | perf_top_preflight-30: Uptime: 6.236s total, 2.922s idle
69 | sys_mcu: STM32F42x, rev. 3
70 | sys_name: PX4
71 | sys_os_name: NuttX
72 | sys_os_ver: 8b81cf5c7ece0c228eaaea3e9d8e667fc4d21a06
73 | sys_os_ver_release: 192
74 | sys_toolchain: GNU GCC
75 | sys_toolchain_ver: 5.4.1 20160919 (release) [ARM/embedded-5-branch revision 240496]
76 | sys_uuid: 004F00413335510D30383336
77 | time_ref_utc: 0
78 | ver_hw: PX4FMU_V4
79 | ver_sw: f54a6c2999e1e2fcbf56dd89de06b615b4186a6e
80 | ver_sw_branch: ulog_crash_dump
81 | ver_sw_release: 17170432
82 | Info Multiple Messages:
83 | hardfault_plain: [['[hardfault_log] -- 2000-01-01-00:06:01 Begin Fault Log --\nSystem fault Occurred on: 2000-01-01-00:06:01\n Type:Hard Fault in file:armv7-m/up_hardfault.c at line: 171 running task: hardfault_log\n FW git-hash: f54a6c2999e1e2fcbf56dd89de06b615b4186a6e\n Build datetime: Jul 3 2017 17:04:33\n Build url: localhost \n Processor registers: from 0x2002b8e4\n r0:0x00000000 r1:0x00000000 r2:0x00000001 r3:0xe000ed14 r4:0x00000000 r5:0x20002854 r6:0x2002bba6 r7:0x2002bb8c\n r8:0x00000000 r9:0x00000000 r10:0x00000000 r11:0x00000000 r12:0x00000000 sp:0x2002b9b8 lr:0x080353ad pc:0x080353dc\n xpsr:0x21000000 basepri:0x000000f0 control:0x00000004\n exe return:0xffffffe9\n IRQ stack: \n top: 0x200068f0\n sp: 0x200068a0 Valid\n bottom: 0x20006604\n size: 0x000002ec\n used: 000000e0\n User stack: \n top: 0x2002bb88\n sp: 0x2002b9b8 Valid\n bottom: 0x2002b3ac\n size: 0x000007dc\n used: 000007dc\nInterrupt sp memory region, stack pointer lies within stack\n0x20006952 0x00000000\n0x20006951 0x00000000\n0x20006950 0x00000000\n0x2000694f 0x00000000\n0x2000694e 0x00000000\n0x2000694d 0x00000000\n0x2000694c 0x00000000\n0x2000694b 0x00000000\n0x2000694a 0x00000000\n0x20006949 0x00000000\n0x20006948 0x00000000\n0x20006947 0x00000000\n0x20006946 0x00000000\n0x20006945 0x00000000\n0x20006944 0x00000000\n0x20006943 0x00000000\n0x20006942 0x00000000\n0x20006941 0x00000000\n0x20006940 0x00000000\n0x2000693f 0x00000000\n0x2000693e 0x00000000\n0x2000693d 0x00000000\n0x2000693c 0x00000000\n0x2000693b 0x00000000\n0x2000693a 0x00000000\n0x20006939 0x2001ba90\n0x20006938 0x00000000\n0x20006937 0x00000000\n0x20006936 0x2001bac0\n0x20006935 0x00000000\n0x20006934 0x20007758\n0x20006933 0x20007640\n0x20006932 0x20007608\n0x20006931 0x20007590\n0x20006930 0x20007790\n0x2000692f 0x20007790\n0x2000692e 0x200078a8\n0x2000692d 0x20007794\n0x2000692c 0x00000000\n0x2000692b 0x00000603\n0x2000692a 0x100061b0\n0x20006929 0x10006060\n0x20006928 0x20006990\n0x20006927 0x10005460\n0x20006926 0x00400003\n0x20006925 0x0813b450\n0x20006924 0x00000002\n0x20006923 0x00000000\n0x20006922 0x00000000\n0x20006921 0x20006990\n0x20006920 0x00000000\n0x2000691f 0x00400002\n0x2000691e 0x0813b450\n0x2000691d 0x00000001\n0x2000691c 0x00000000\n0x2000691b 0x00000000\n0x2000691a 0x20006990\n0x20006919 0x00000000\n0x20006918 0x00400001\n0x20006917 0x0813b450\n0x20006916 0x00000000\n0x20006915 0x00000000\n0x20006914 0x00000000\n0x20006913 0x20006990\n0x20006912 0x20006990\n0x20006911 0x00400000\n0x20006910 0x0813b450\n0x2000690f 0x00001003\n0x2000690e 0x00000000\n0x2000690d 0x00000000\n0x2000690c 0x20006990\n0x2000690b 0x00000000\n0x2000690a 0x00400083\n0x20006909 0x0813b450\n0x20006908 0x00001602\n0x20006907 0x10006220\n0x20006906 0x100062b0\n0x20006905 0x20006990\n0x20006904 0x10005460\n0x20006903 0x00400082\n0x20006902 0x0813b450\n0x20006901 0x00001301\n0x20006900 0x00000000\n0x200068ff 0x00000000\n0x200068fe 0x20006990\n0x200068fd 0x10005460\n0x200068fc 0x00400081\n0x200068fb 0x0813b450\n0x200068fa 0x00001000\n0x200068f9 0x00000000\n0x200068f8 0x00000000\n0x200068f7 0x20006990\n0x200068f6 0x10005460\n0x200068f5 0x00400080\n0x200068f4 0x0813b450\n0x200068f3 0x00000000\n0x200068f2 0x00000000\n0x200068f1 0x00000000\n0x200068f0 0x00000000<-- Interrupt sp top\n0x200068ef 0x00000000\n0x200068ee 0x00000000\n0x200068ed 0x00000000\n0x200068ec 0x00000000\n0x200068eb 0x00000000\n0x200068ea 0x00000000\n0x200068e9 0x00000000\n0x200068e8 0x00000000\n0x200068e7 0x00000000\n0x200068e6 0x00000000\n0x200068e5 0x00000063\n0x200068e4 0x08000000\n0x200068e3 0x00258000\n0x200068e2 0x07000000\n0x200068e1 0x0020210e\n0x200068e0 0x0e00020c\n0x200068df 0x10005fa4\n0x200068de 0x00000002\n0x200068dd 0x200069f0\n0x200068dc 0x0813b434\n0x200068db 0x00000000\n0x200068da 0x00000000\n0x200068d9 0x00000000\n0x200068d8 0x00000000\n0x200068d7 0x00000000\n0x200068d6 0x00000000\n0x200068d5 0x00000000\n0x200068d4 0x00000000\n0x200068d3 0x00000000\n0x200068d2 0x00000000\n0x200068d1 0x00000000\n0x200068d0 0x00000000\n0x200068cf 0x00000000\n0x200068ce 0x00000000\n0x200068cd 0x00000000\n0x200068cc 0x00000000\n0x200068cb 0x00000000\n0x200068ca 0x00000000\n0x200068c9 0x10005130\n0x200068c8 0x00000001\n0x200068c7 0x20007570\n0x200068c6 0x20007500\n0x200068c5 0x200074e0\n0x200068c4 0x200073f0\n0x200068c3 0x200071a8\n0x200068c2 0x20007040\n0x200068c1 0x200073cc\n0x200068c0 0x200072a0\n0x200068bf 0x20007278\n0x200068be 0x200071d0\n0x200068bd 0x00000005\n0x200068bc 0x10004460\n0x200068bb 0x0000ffff\n0x200068ba 0x00000000\n0x200068b9 0x00000000\n0x200068b8 0x00000000\n0x200068b7 0x00000000\n0x200068b6 0x00000001\n0x200068b5 0x10000010\n0x200068b4 0x00000000\n0x200068b3 0x080f32b7\n0x200068b2 0x2002bb8c\n0x200068b1 0x2002bba6\n0x200068b0 0x20002854\n0x200068af 0x2002b8e4\n0x200068ae 0x000000f0\n0x200068ad 0x080fbca5\n0x200068ac 0x080f3331\n0x200068ab 0x080f3353\n0x200068aa 0x00000000\n0x200068a9 0x00000003\n0x200068a8 0x080f334b\n0x200068a7 0x200068c8\n0x200068a6 0x080353dc\n0x200068a5 0x080353ad\n0x200068a4 0x2002b9b8\n0x200068a3 0x00000000\n0x200068a2 0x00000000\n0x200068a1 0x00000000\n0x200068a0 0x000000f0<-- Interrupt sp\n0x2000689f 0x080f309d\n0x2000689e 0x000000ab\n0x2000689d 0x0813ad13\n0x2000689c 0x20003d88\n0x2000689b 0x20004628\n0x2000689a 0x00000000\n0x20006899 0x20004628\n0x20006898 0x000000ab\n0x20006897 0x08109da9\n0x20006896 0x20000010\n0x20006895 0x080f3429\n0x20006894 0x2002b9b8\n0x20006893 0x2001bb00\n0x20006892 0x080353ad\n0x20006891 0x0000015f\n0x20006890 0x200068a0\n0x2000688f 0x2002b8e4\n0x2000688e 0x2001bb00\n0x2000688d 0x20000000\n0x2000688c 0x20000000\n0x2000688b 0x080f0571\n0x2000688a 0x20006860\n0x20006889 0x20006860\n0x20006888 0x00000000\n0x20006887 0x080f0553\n0x20006886 0x0000015f\n0x20006885 0x2001bb00\n0x20006884 0x00000000\n0x20006883 0x0000003b\n0x20006882 0x080ff849\n0x20006881 0x080fe621\n0x20006880 0x080ff849\n0x2000687f 0x080f8ee1\n0x2000687e 0x2001bb00\n0x2000687d 0x080353ad\n0x2000687c 0x000000ab\n0x2000687b 0x20003c84\n0x2000687a 0x00000000\n0x20006879 0x080f2ec9\n0x20006878 0x20006844\n0x20006877 0x0813ab5a\n0x20006876 0x00000000\n0x20006875 0x00000000\n0x20006874 0x00000000\n0x20006873 0x00000004\n0x20006872 0x080ff849\n0x20006871 0x080f9a59\n0x20006870 0x080fe621\n0x2000686f 0x00000007\n0x2000686e 0x0813b3fe\n0x2000686d 0x080f9603\n0x2000686c 0x00000004\n0x2000686b 0x080f9603\n0x2000686a 0x20006824\n0x20006869 0x00000020\n0x20006868 0x080fe621\n0x20006867 0x080f90f3\n0x20006866 0x20006824\n0x20006865 0x00000002\n0x20006864 0x080fe621\n0x20006863 0x080f90f3\n0x20006862 0x20006824\n0x20006861 0x00000001\n0x20006860 0x080fe621\n0x2000685f 0x080f90f3\n0x2000685e 0x20006824\n0x2000685d 0x00000000\n0x2000685c 0x080fe621\n0x2000685b 0xfbff0f00\n0x2000685a 0xe2ffe425\n0x20006859 0xe4f7d600\n0x20006858 0xbdff01ff\n0x20006857 0xdeadfffb\n0x20006856 0x001e000f\n0x20006855 0x25e4f7e4\n0x20006854 0x004300d6\n0x20006853 0xc0a00000\n0x20006852 0x41700000\n0x20006851 0xc1f00000\n0x20006850 0xdeadbeef\n0x2000684f 0xdeadbeef\n0x2000684e 0x20006848\n0x2000684d 0x200067e0\n0x2000684c 0x200067c8\n0x2000684b 0x0800c071\n0x2000684a 0x00000000\n0x20006849 0x00000000\n0x20006848 0x10007f20\n0x20006847 0x00000000\n0x20006846 0x3a534ee0\n0x20006845 0x3c089f4a\n0x20006844 0x3a3737e0\n0x20006843 0x080accaf\n0x20006842 0x200067f8\n0x20006841 0x1000a730\n0x20006840 0x1000a9a0\n0x2000683f 0x080f3429\n0x2000683e 0x00000000\n0x2000683d 0x00000000\n0x2000683c 0x00000000\n0x2000683b 0x00000000\n0x2000683a 0x200067e0\n0x20006839 0x00000000\n0x20006838 0x00000000\n0x20006837 0x00000002\n0x20006836 0x1000a980\n0x20006835 0xa91ce0dd\n0x20006834 0x2ac590bd\n0x20006833 0x200066d8\n0x20006832 0x00000001\n0x20006831 0x00000003\n0x20006830 0xb34915f7\n0x2000682f 0x351ebdee\n0x2000682e 0x3316cf97\n0x2000682d 0x200066c0\n0x2000682c 0x00000001\n0x2000682b 0x00000003\n0x2000682a 0xb34915f7\n0x20006829 0x351ebdee\n0x20006828 0x3316cf97\n0x20006827 0x200066a8\n0x20006826 0x00000001\n0x20006825 0x00000003\n0x20006824 0x340f1c97\n0x20006823 0x36c307c3\n0x20006822 0x352ce320\n0x20006821 0x08107ccf\n0x20006820 0x075957d3\n0x2000681f 0x200067c8\n0x2000681e 0x10008178\n0x2000681d 0xdeadbeef\n0x2000681c 0xdeadbeef\n0x2000681b 0xdeadbeef\n0x2000681a 0xdeadbeef\n0x20006819 0xdeadbeef\n0x20006818 0xdeadbeef\n0x20006817 0xdeadbeef\n0x20006816 0xdeadbeef\n0x20006815 0xdeadbeef\n0x20006814 0xdeadbeef\n0x20006813 0xdeadbeef\n0x20006812 0xdeadbeef\n0x20006811 0xdeadbeef\n0x20006810 0xdeadbeef\n0x2000680f 0xdeadbeef\n0x2000680e 0xdeadbeef\n0x2000680d 0xdeadbeef\n0x2000680c 0xdeadbeef\n0x2000680b 0xdeadbeef\n0x2000680a 0xdeadbeef\n0x20006809 0xdeadbeef\n0x20006808 0xdeadbeef\n0x20006807 0xdeadbeef\n0x20006806 0xdeadbeef\n0x20006805 0xdeadbeef\n0x20006804 0xdeadbeef\n0x20006803 0xdeadbeef\n0x20006802 0xdeadbeef\n0x20006801 0xdeadbeef\n0x20006800 0xdeadbeef\n0x200067ff 0xdeadbeef\n0x200067fe 0xdeadbeef\n0x200067fd 0xdeadbeef\n0x200067fc 0xdeadbeef\n0x200067fb 0xdeadbeef\n0x200067fa 0xdeadbeef\n0x200067f9 0xdeadbeef\n0x200067f8 0xdeadbeef\n0x200067f7 0x00000000\n0x200067f6 0x00000000\n0x200067f5 0x00000000\n0x200067f4 0x00000000\n0x200067f3 0x00000000\n0x200067f2 0x00000000\n0x200067f1 0x00000000\n0x200067f0 0x00000000\n0x200067ef 0x00000000\n0x200067ee 0x00000000\n0x200067ed 0xfb98a0bf\n0x200067ec 0xda7d080a\n0x200067eb 0xe6b029e7\n0x200067ea 0x55a55b8b\n0x200067e9 0x26aff8f1\n0x200067e8 0x3a57dc4d\n0x200067e7 0xaa08710c\n0x200067e6 0x48cedaff\n0x200067e5 0xbac645a8\n0x200067e4 0xef1e6244\n0x200067e3 0x2528f8a6\n0x200067e2 0xd4a95a6b\n0x200067e1 0x1f3500b1\n0x200067e0 0xb69bbf87\n0x200067df 0xc97b8e02\n0x200067de 0xdcd13ab8\n0x200067dd 0x1d934129\n0x200067dc 0x1670d1c7\n0x200067db 0x858b8458\n0x200067da 0xb5dc6b2e\n0x200067d9 0x007cb39d\n0x200067d8 0x36910361\n0x200067d7 0x73110452\n0x200067d6 0xf2dccad0\n0x200067d5 0x26161caf\n0x200067d4 0xb8b9cbb7\n0x200067d3 0x25068328\nUser sp memory region, stack pointer lies within stack\n0x2002ba6a 0xfb98a0bf\n0x2002ba69 0xda7d080a\n0x2002ba68 0xe6b029e7\n0x2002ba67 0x55a55b8b\n0x2002ba66 0x26aff8f1\n0x2002ba65 0x3a57dc4d\n0x2002ba64 0xaa08710c\n0x2002ba63 0x48cedaff\n0x2002ba62 0xbac645a8\n0x2002ba61 0xef1e6244\n0x2002ba60 0x2528f8a6\n0x2002ba5f 0xd4a95a6b\n0x2002ba5e 0x1f3500b1\n0x2002ba5d 0xb69bbf87\n0x2002ba5c 0xc97b8e02\n0x2002ba5b 0xdcd13ab8\n0x2002ba5a 0x1d934129\n0x2002ba59 0x1670d1c7\n0x2002ba58 0x858b8458\n0x2002ba57 0xb5dc6b2e\n0x2002ba56 0x007cb39d\n0x2002ba55 0x36910361\n0x2002ba54 0x73110452\n0x2002ba53 0xf2dccad0\n0x2002ba52 0x26161caf\n0x2002ba51 0xb8b9cbb7\n0x2002ba50 0x25068328\n0x2002ba4f 0x487f6c1a\n0x2002ba4e 0x010301d1\n0x2002ba4d 0xbdab57fc\n0x2002ba4c 0x28f32129\n0x2002ba4b 0xd817af66\n0x2002ba4a 0x0a2be006\n0x2002ba49 0x13965cbd\n0x2002ba48 0x8fea0b38\n0x2002ba47 0x6dc4ad61\n0x2002ba46 0x0ccce009\n0x2002ba45 0x466bb2df\n0x2002ba44 0xeb861cb7\n0x2002ba43 0x35b476ef\n0x2002ba42 0x9fc05ab4\n0x2002ba41 0xeecbf38e\n0x2002ba40 0x721d10e7\n0x2002ba3f 0x79fefa9f\n0x2002ba3e 0x600a345f\n0x2002ba3d 0x41f054ed\n0x2002ba3c 0x213f5fcb\n0x2002ba3b 0x20004508\n0x2002ba3a 0x20004518\n0x2002ba39 0x00000810\n0x2002ba38 0x00004440\n0x2002ba37 0x4a3797f4\n0x2002ba36 0x1ce8cada\n0x2002ba35 0xbe0385f1\n0x2002ba34 0x00746c75\n0x2002ba33 0x61660067\n0x2002ba32 0x6f6c5f74\n0x2002ba31 0x6c756166\n0x2002ba30 0x64726168\n0x2002ba2f 0x00000000\n0x2002ba2e 0x2002bba6\n0x2002ba2d 0x2002bb98\n0x2002ba2c 0xdeadbeef\n0x2002ba2b 0x00000000\n0x2002ba2a 0x00000000\n0x2002ba29 0x080ecea5\n0x2002ba28 0x00000000\n0x2002ba27 0x00000000\n0x2002ba26 0x00000000\n0x2002ba25 0x00000000\n0x2002ba24 0x00000000\n0x2002ba23 0x00000000\n0x2002ba22 0x00000101\n0x2002ba21 0x00000000\n0x2002ba20 0x00000000\n0x2002ba1f 0x00000000\n0x2002ba1e 0x00000000\n0x2002ba1d 0x00000000\n0x2002ba1c 0x00000000\n0x2002ba1b 0x00000000\n0x2002ba1a 0x00000000\n0x2002ba19 0x01000000\n0x2002ba18 0x080ece80\n0x2002ba17 0x00000000\n0x2002ba16 0x00000000\n0x2002ba15 0x00000000\n0x2002ba14 0x00000000\n0x2002ba13 0x00000000\n0x2002ba12 0x00000000\n0x2002ba11 0xdeadbeef\n0x2002ba10 0xdeadbeef\n0x2002ba0f 0xdeadbeef\n0x2002ba0e 0xdeadbeef\n0x2002ba0d 0xdeadbeef\n0x2002ba0c 0xdeadbeef\n0x2002ba0b 0xdeadbeef\n0x2002ba0a 0xdeadbeef\n0x2002ba09 0xdeadbeef\n0x2002ba08 0xdeadbeef\n0x2002ba07 0xdeadbeef\n0x2002ba06 0xdeadbeef\n0x2002ba05 0xdeadbeef\n0x2002ba04 0xdeadbeef\n0x2002ba03 0xdeadbeef\n0x2002ba02 0xdeadbeef\n0x2002ba01 0xdeadbeef\n0x2002ba00 0xdeadbeef\n0x2002b9ff 0xdeadbeef\n0x2002b9fe 0xdeadbeef\n0x2002b9fd 0xdeadbeef\n0x2002b9fc 0xdeadbeef\n0x2002b9fb 0xdeadbeef\n0x2002b9fa 0xdeadbeef\n0x2002b9f9 0xdeadbeef\n0x2002b9f8 0xdeadbeef\n0x2002b9f7 0xdeadbeef\n0x2002b9f6 0xdeadbeef\n0x2002b9f5 0xdeadbeef\n0x2002b9f4 0xdeadbeef\n0x2002b9f3 0xdeadbeef\n0x2002b9f2 0xdeadbeef\n0x2002b9f1 0xdeadbeef\n0x2002b9f0 0xdeadbeef\n0x2002b9ef 0xdeadbeef\n0x2002b9ee 0xdeadbeef\n0x2002b9ed 0xdeadbeef\n0x2002b9ec 0xdeadbeef\n0x2002b9eb 0xdeadbeef\n0x2002b9ea 0xdeadbeef\n0x2002b9e9 0xdeadbeef\n0x2002b9e8 0xdeadbeef\n0x2002b9e7 0xdeadbeef\n0x2002b9e6 0xdeadbeef\n0x2002b9e5 0xdeadbeef\n0x2002b9e4 0xdeadbeef\n0x2002b9e3 0xdeadbeef\n0x2002b9e2 0xdeadbeef\n0x2002b9e1 0xdeadbeef\n0x2002b9e0 0xdeadbeef\n0x2002b9df 0xdeadbeef\n0x2002b9de 0xdeadbeef\n0x2002b9dd 0xdeadbeef\n0x2002b9dc 0xdeadbeef\n0x2002b9db 0xdeadbeef\n0x2002b9da 0xdeadbeef\n0x2002b9d9 0xdeadbeef\n0x2002b9d8 0xdeadbeef\n0x2002b9d7 0xdeadbeef\n0x2002b9d6 0xdeadbeef\n0x2002b9d5 0xdeadbeef\n0x2002b9d4 0xdeadbeef\n0x2002b9d3 0xdeadbeef\n0x2002b9d2 0xdeadbeef\n0x2002b9d1 0xdeadbeef\n0x2002b9d0 0xdeadbeef\n0x2002b9cf 0xdeadbeef\n0x2002b9ce 0xdeadbeef\n0x2002b9cd 0xdeadbeef\n0x2002b9cc 0xdeadbeef\n0x2002b9cb 0xdeadbeef\n0x2002b9ca 0xdeadbeef\n0x2002b9c9 0xdeadbeef\n0x2002b9c8 0xdeadbeef\n0x2002b9c7 0xdeadbeef\n0x2002b9c6 0xdeadbeef\n0x2002b9c5 0xdeadbeef\n0x2002b9c4 0xdeadbeef\n0x2002b9c3 0xdeadbeef\n0x2002b9c2 0xdeadbeef\n0x2002b9c1 0xdeadbeef\n0x2002b9c0 0xdeadbeef\n0x2002b9bf 0xdeadbeef\n0x2002b9be 0xdeadbeef\n0x2002b9bd 0xdeadbeef\n0x2002b9bc 0xdeadbeef\n0x2002b9bb 0xdeadbeef\n0x2002b9ba 0xdeadbeef\n0x2002b9b9 0xdeadbeef\n0x2002b9b8 0xdeadbeef<-- User sp\n0x2002b9b7 0xdeadbeef\n0x2002b9b6 0x00000000\n0x2002b9b5 0x00000000\n0x2002b9b4 0x00000000\n0x2002b9b3 0x00000000\n0x2002b9b2 0x00000000\n0x2002b9b1 0x00000000\n0x2002b9b0 0x00000000\n0x2002b9af 0x00000000\n0x2002b9ae 0x00000000\n0x2002b9ad 0x00000000\n0x2002b9ac 0x00000000\n0x2002b9ab 0x00000000\n0x2002b9aa 0x00000000\n0x2002b9a9 0x00000000\n0x2002b9a8 0x00000000\n0x2002b9a7 0x00000000\n0x2002b9a6 0x00000000\n0x2002b9a5 0x21000000\n0x2002b9a4 0x080353dc\n0x2002b9a3 0x080353ad\n0x2002b9a2 0x00000000\n0x2002b9a1 0xe000ed14\n0x2002b9a0 0x00000001\n0x2002b99f 0x00000000\n0x2002b99e 0x00000000\n0x2002b99d 0x00000000\n0x2002b99c 0x00000000\n0x2002b99b 0x00000000\n0x2002b99a 0x00000000\n0x2002b999 0x00000000\n0x2002b998 0x00000000\n0x2002b997 0x00000000\n0x2002b996 0x00000000\n0x2002b995 0x00000000\n0x2002b994 0x00000000\n0x2002b993 0x00000000\n0x2002b992 0x00000000\n0x2002b991 0x00000000\n0x2002b990 0x00000000\n0x2002b98f 0x00000000\n0x2002b98e 0x00000000\n0x2002b98d 0xffffffe9\n0x2002b98c 0x00000000\n0x2002b98b 0x00000000\n0x2002b98a 0x00000000\n0x2002b989 0x00000000\n0x2002b988 0x2002bb8c\n0x2002b987 0x2002bba6\n0x2002b986 0x20002854\n0x2002b985 0x00000000\n0x2002b984 0x000000f0\n0x2002b983 0x2002b9b8\n0x2002b982 0xdeadbeef\n0x2002b981 0xdeadbeef\n0x2002b980 0xdeadbeef\n0x2002b97f 0xdeadbeef\n0x2002b97e 0xdeadbeef\n0x2002b97d 0xdeadbeef\n0x2002b97c 0xdeadbeef\n0x2002b97b 0xdeadbeef\n0x2002b97a 0xdeadbeef\n0x2002b979 0xdeadbeef\n0x2002b978 0xdeadbeef\n0x2002b977 0xdeadbeef\n0x2002b976 0xdeadbeef\n0x2002b975 0xdeadbeef\n0x2002b974 0xdeadbeef\n0x2002b973 0xdeadbeef\n0x2002b972 0xdeadbeef\n0x2002b971 0xdeadbeef\n0x2002b970 0xdeadbeef\n0x2002b96f 0xdeadbeef\n0x2002b96e 0xdeadbeef\n0x2002b96d 0xdeadbeef\n0x2002b96c 0xdeadbeef\n0x2002b96b 0xdeadbeef\n0x2002b96a 0xdeadbeef\n0x2002b969 0xdeadbeef\n0x2002b968 0xdeadbeef\n0x2002b967 0xdeadbeef\n0x2002b966 0xdeadbeef\n0x2002b965 0xdeadbeef\n0x2002b964 0xdeadbeef\n0x2002b963 0xdeadbeef\n0x2002b962 0xdeadbeef\n0x2002b961 0xdeadbeef\n0x2002b960 0xdeadbeef\n0x2002b95f 0xdeadbeef\n0x2002b95e 0xdeadbeef\n0x2002b95d 0xdeadbeef\n0x2002b95c 0xdeadbeef\n0x2002b95b 0xdeadbeef\n0x2002b95a 0xdeadbeef\n0x2002b959 0xdeadbeef\n0x2002b958 0xdeadbeef\n0x2002b957 0xdeadbeef\n0x2002b956 0xdeadbeef\n0x2002b955 0xdeadbeef\n0x2002b954 0xdeadbeef\n0x2002b953 0xdeadbeef\n0x2002b952 0xdeadbeef\n0x2002b951 0xdeadbeef\n0x2002b950 0xdeadbeef\n0x2002b94f 0xdeadbeef\n0x2002b94e 0xdeadbeef\n0x2002b94d 0xdeadbeef\n0x2002b94c 0xdeadbeef\n0x2002b94b 0xdeadbeef\n0x2002b94a 0xdeadbeef\n0x2002b949 0xdeadbeef\n0x2002b948 0xdeadbeef\n0x2002b947 0xdeadbeef\n0x2002b946 0xdeadbeef\n0x2002b945 0xdeadbeef\n0x2002b944 0xdeadbeef\n0x2002b943 0xdeadbeef\n0x2002b942 0xdeadbeef\n0x2002b941 0xdeadbeef\n0x2002b940 0xdeadbeef\n0x2002b93f 0xdeadbeef\n0x2002b93e 0xdeadbeef\n0x2002b93d 0xdeadbeef\n0x2002b93c 0xdeadbeef\n0x2002b93b 0xdeadbeef\n0x2002b93a 0xdeadbeef\n0x2002b939 0xdeadbeef\n0x2002b938 0xdeadbeef\n0x2002b937 0xdeadbeef\n0x2002b936 0xdeadbeef\n0x2002b935 0xdeadbeef\n0x2002b934 0xdeadbeef\n0x2002b933 0xdeadbeef\n0x2002b932 0xdeadbeef\n0x2002b931 0xdeadbeef\n0x2002b930 0xdeadbeef\n0x2002b92f 0xdeadbeef\n0x2002b92e 0xdeadbeef\n0x2002b92d 0xdeadbeef\n0x2002b92c 0xdeadbeef\n0x2002b92b 0xdeadbeef\n0x2002b92a 0xdeadbeef\n0x2002b929 0xdeadbeef\n0x2002b928 0xdeadbeef\n0x2002b927 0xdeadbeef\n0x2002b926 0xdeadbeef\n0x2002b925 0xdeadbeef\n0x2002b924 0xdeadbeef\n0x2002b923 0xdeadbeef\n0x2002b922 0xdeadbeef\n0x2002b921 0xdeadbeef\n0x2002b920 0xdeadbeef\n0x2002b91f 0xdeadbeef\n0x2002b91e 0xdeadbeef\n0x2002b91d 0xdeadbeef\n0x2002b91c 0xdeadbeef\n0x2002b91b 0xdeadbeef\n0x2002b91a 0xdeadbeef\n0x2002b919 0xdeadbeef\n0x2002b918 0xdeadbeef\n0x2002b917 0xdeadbeef\n0x2002b916 0xdeadbeef\n0x2002b915 0xdeadbeef\n0x2002b914 0xdeadbeef\n0x2002b913 0xdeadbeef\n0x2002b912 0xdeadbeef\n0x2002b911 0xdeadbeef\n0x2002b910 0xdeadbeef\n0x2002b90f 0xdeadbeef\n0x2002b90e 0xdeadbeef\n0x2002b90d 0xdeadbeef\n0x2002b90c 0xdeadbeef\n0x2002b90b 0xdeadbeef\n0x2002b90a 0xdeadbeef\n0x2002b909 0xdeadbeef\n0x2002b908 0xdeadbeef\n0x2002b907 0xdeadbeef\n0x2002b906 0xdeadbeef\n[hardfault_log] -- 2000-01-01-00:06:01 END Fault Log --\n']]
84 |
85 | Name (multi id, message size in bytes) number of data points, total bytes
86 | actuator_controls_0 (0, 48) 1132 54336
87 | actuator_outputs (0, 76) 1132 86032
88 | battery_status (0, 42) 382 16044
89 | commander_state (0, 9) 1132 10188
90 | control_state (0, 134) 1110 148740
91 | cpuload (0, 16) 115 1840
92 | ekf2_innovations (0, 140) 2148 300720
93 | ekf2_timestamps (0, 20) 28585 571700
94 | estimator_status (0, 267) 556 148452
95 | sensor_combined (0, 72) 28584 2058048
96 | sensor_preflight (0, 16) 2189 35024
97 | system_power (0, 17) 382 6494
98 | task_stack_info (0, 26) 230 5980
99 | vehicle_attitude (0, 36) 3576 128736
100 | vehicle_attitude_setpoint (0, 55) 3645 200475
101 | vehicle_land_detected (0, 15) 1 15
102 | vehicle_local_position (0, 148) 1110 164280
103 | vehicle_rates_setpoint (0, 24) 3645 87480
104 | vehicle_status (0, 45) 493 22185
105 | wind_estimate (0, 24) 1110 26640
106 |
--------------------------------------------------------------------------------
/test/sample_appended_messages.txt:
--------------------------------------------------------------------------------
1 | 0:00:11 WARNING: [commander_tests] Not ready to fly: Sensors not set up correctly
2 |
--------------------------------------------------------------------------------
/test/sample_appended_multiple.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample_appended_multiple.ulg
--------------------------------------------------------------------------------
/test/sample_appended_multiple_messages.txt:
--------------------------------------------------------------------------------
1 | 0:00:11 WARNING: [commander_tests] Not ready to fly: Sensors not set up correctly
2 |
--------------------------------------------------------------------------------
/test/sample_info.txt:
--------------------------------------------------------------------------------
1 | Logging start time: 0:01:52, duration: 0:01:08
2 | Dropouts: count: 4, total duration: 0.1 s, max: 62 ms, mean: 29 ms
3 | Info Messages:
4 | sys_name: PX4
5 | time_ref_utc: 0
6 | ver_hw: AUAV_X21
7 | ver_sw: fd483321a5cf50ead91164356d15aa474643aa73
8 |
9 | Name (multi id, message size in bytes) number of data points, total bytes
10 | actuator_controls_0 (0, 48) 3269 156912
11 | actuator_outputs (0, 76) 1311 99636
12 | commander_state (0, 9) 678 6102
13 | control_state (0, 122) 3268 398696
14 | cpuload (0, 16) 69 1104
15 | ekf2_innovations (0, 140) 3271 457940
16 | estimator_status (0, 309) 1311 405099
17 | sensor_combined (0, 72) 17070 1229040
18 | sensor_preflight (0, 16) 17072 273152
19 | telemetry_status (0, 36) 70 2520
20 | vehicle_attitude (0, 36) 6461 232596
21 | vehicle_attitude_setpoint (0, 55) 3272 179960
22 | vehicle_local_position (0, 123) 678 83394
23 | vehicle_rates_setpoint (0, 24) 6448 154752
24 | vehicle_status (0, 45) 294 13230
25 |
--------------------------------------------------------------------------------
/test/sample_log_small.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample_log_small.ulg
--------------------------------------------------------------------------------
/test/sample_log_small_messages.txt:
--------------------------------------------------------------------------------
1 | 0:00:22 INFO: [commander] Takeoff detected
2 | 0:00:23 INFO: [commander] Landing detected
3 | 0:00:25 INFO: [commander] Disarmed by landing
4 |
--------------------------------------------------------------------------------
/test/sample_logging_tagged_and_default_params.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample_logging_tagged_and_default_params.ulg
--------------------------------------------------------------------------------
/test/sample_logging_tagged_and_default_params_messages.txt:
--------------------------------------------------------------------------------
1 | 0:00:00 INFO: [px4] Startup script returned successfully
2 | 0:00:00 INFO: logging: opening log file 2022-4-29/8_45_27.ulg
3 | 0:00:00 INFO: [logger] Start file log (type: full)
4 | 0:00:00 INFO: [logger] Opened full log file: ./log/2022-04-29/08_45_27.ulg
5 |
--------------------------------------------------------------------------------
/test/sample_messages.txt:
--------------------------------------------------------------------------------
1 | 0:02:38 ERROR: [sensors] no barometer found on /dev/baro0 (2)
2 | 0:02:42 ERROR: [sensors] no barometer found on /dev/baro0 (2)
3 | 0:02:51 ERROR: [sensors] no barometer found on /dev/baro0 (2)
4 | 0:02:56 ERROR: [sensors] no barometer found on /dev/baro0 (2)
5 |
--------------------------------------------------------------------------------
/test/sample_px4_events.ulg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PX4/pyulog/1de773d064d5dfbbdda55fbf129f57a6c4b70d0c/test/sample_px4_events.ulg
--------------------------------------------------------------------------------
/test/sample_px4_events_messages.txt:
--------------------------------------------------------------------------------
1 | 475214:49:10 INFO: logging: opening log file 2024-3-18/14_49_10.ulg
2 | 475214:49:10 INFO: [px4] Startup script returned successfully
3 | 475214:49:10 INFO: [logger] Start file log (type: full)
4 | 475214:49:10 INFO: [logger] Opened full log file: ./log/2024-03-18/14_49_10.ulg
5 | 475214:49:10 INFO: [mavlink] partner IP: 127.0.0.1
6 | 475214:49:11 WARNING: [health_and_arming_checks] Preflight: GPS fix too low
7 | 475214:49:18 INFO: [tone_alarm] home set
8 | 475214:49:18 WARNING: [health_and_arming_checks] Preflight: GPS fix too low
9 | 475214:49:21 INFO: [commander] [32mReady for takeoff![0m
10 | 475214:49:25 INFO: Armed by internal command
11 | 475214:49:25 INFO: Using default takeoff altitude: 2.50 m
12 | 475214:49:25 INFO: [tone_alarm] arming warning
13 | 475214:49:27 INFO: Takeoff detected
14 | 475214:49:32 INFO: RTL: start return at 491 m (3 m above destination)
15 | 475214:49:32 INFO: RTL: land at destination
16 | 475214:49:38 INFO: Landing detected
17 | 475214:49:40 INFO: Disarmed by landing
18 | 475214:49:40 INFO: [tone_alarm] notify neutral
19 |
--------------------------------------------------------------------------------
/test/test_cli.py:
--------------------------------------------------------------------------------
1 | '''
2 | Test command line tools
3 | '''
4 |
5 | import sys
6 | import os
7 | import inspect
8 | import unittest
9 | import tempfile
10 |
11 | from ddt import ddt, data
12 |
13 | from pyulog import ulog2csv, info, params, messages, extract_gps_dump
14 |
15 | try:
16 | from StringIO import StringIO
17 | except ImportError:
18 | from io import StringIO
19 |
20 | TEST_PATH = os.path.dirname(os.path.abspath(
21 | inspect.getfile(inspect.currentframe())))
22 |
23 | @ddt
24 | class TestCommandLineTools(unittest.TestCase):
25 | """
26 | Test command line tools.
27 | """
28 |
29 | def run_against_file(self, expected_output_file, test):
30 | """
31 | run a test and compare the output against an expected file
32 | """
33 | saved_stdout = sys.stdout
34 | with open(expected_output_file, 'r', encoding='utf8') as file_handle:
35 | expected_output = file_handle.read().strip()
36 | output = None
37 | try:
38 | out = StringIO()
39 | sys.stdout = out
40 | test()
41 | output = out.getvalue().strip()
42 | assert output == expected_output
43 | finally:
44 | if output is not None:
45 | sys.stdout = saved_stdout
46 | print("Got output:")
47 | print(output)
48 | print("\nExpected output:")
49 | print(expected_output)
50 |
51 | @data('sample')
52 | def test_ulog2csv(self, test_case):
53 | """
54 | Test that 'ulog2csv' runs without error.
55 | """
56 |
57 | tmpdir = tempfile.gettempdir()
58 | print('writing files to ', tmpdir)
59 | ulog_file_name = os.path.join(TEST_PATH, test_case+'.ulg')
60 | included_messages = []
61 | output=tmpdir
62 | delimiter=','
63 | time_s = 0
64 | time_e = 0
65 | ulog2csv.convert_ulog2csv(ulog_file_name,
66 | included_messages,
67 | output,
68 | delimiter,
69 | time_s,
70 | time_e)
71 |
72 | @data('sample', 'sample_appended', 'sample_appended_multiple')
73 | def test_pyulog_info_cli(self, test_case):
74 | """
75 | Test that the output of 'ulog_info' on sample logs match previously generated results.
76 | """
77 | sys.argv = [
78 | '',
79 | os.path.join(TEST_PATH, test_case+'.ulg'),
80 | '-v'
81 | ]
82 | self.run_against_file(
83 | os.path.join(TEST_PATH, test_case+'_info.txt'), info.main)
84 |
85 | @unittest.skip("no gps data in log file")
86 | def test_extract_gps_dump_cli(self):
87 | """
88 | Test that the output of 'ulog_extract_gps_dump' on sample logs match previously generated
89 | results.
90 | """
91 | sys.argv = [
92 | '',
93 | os.path.join(TEST_PATH, 'sample.ulg')
94 | ]
95 | extract_gps_dump.main()
96 |
97 | @data('sample', 'sample_appended', 'sample_px4_events')
98 | def test_messages_cli(self, test_case):
99 | """
100 | Test that the output of 'ulog_messages' on sample logs match previously generated results.
101 | """
102 | sys.argv = [
103 | '',
104 | os.path.join(TEST_PATH, test_case+'.ulg')
105 | ]
106 | self.run_against_file(
107 | os.path.join(TEST_PATH, test_case+'_messages.txt'), messages.main)
108 |
109 | @data('sample', 'sample_appended')
110 | def test_params_cli(self, test_case):
111 | """
112 | Test that 'ulog_params' runs without error.
113 | """
114 | sys.argv = [
115 | '',
116 | os.path.join(TEST_PATH, test_case+'.ulg')
117 | ]
118 | params.main()
119 |
120 |
121 | # vim: set et fenc=utf-8 ft=python ff=unix sts=4 sw=4 ts=4
122 |
--------------------------------------------------------------------------------
/test/test_db.py:
--------------------------------------------------------------------------------
1 | '''
2 | Test the DatabaseULog module.
3 | '''
4 |
5 | import unittest
6 | import os
7 | import tempfile
8 | from unittest.mock import patch
9 | import numpy as np
10 | from ddt import ddt, data
11 |
12 | from pyulog import ULog
13 | from pyulog.db import DatabaseULog
14 | from pyulog.migrate_db import migrate_db
15 |
16 | TEST_PATH = os.path.dirname(os.path.abspath(__file__))
17 |
18 | @ddt
19 | class TestDatabaseULog(unittest.TestCase):
20 | '''
21 | Test that the DatabaseULog successfully reads a ULog, writes it to database
22 | and then is able to read from it without losing any data.
23 | '''
24 |
25 | def setUp(self):
26 | '''
27 | Set up the test database.
28 | '''
29 | self.db_path = os.path.join(TEST_PATH, 'pyulog_test.sqlite3')
30 | self.db_handle = DatabaseULog.get_db_handle(self.db_path)
31 | migrate_db(self.db_path)
32 |
33 |
34 | def tearDown(self):
35 | '''
36 | Remove the test database after use.
37 | '''
38 | os.remove(self.db_path)
39 |
40 | @data('sample_log_small',
41 | 'sample_appended_multiple',
42 | 'sample_appended',
43 | 'sample',
44 | 'sample_logging_tagged_and_default_params')
45 | def test_parsing(self, test_case):
46 | '''
47 | Verify that log files written and read from the database are
48 | identical to the original ulog file.
49 | '''
50 | test_file = os.path.join(TEST_PATH, f'{test_case}.ulg')
51 | log_path = os.path.join(TEST_PATH, test_file)
52 |
53 | ulog = ULog(log_path)
54 | dbulog_saved = DatabaseULog(self.db_handle, log_file=test_file)
55 | dbulog_saved.save()
56 | primary_key = dbulog_saved.primary_key
57 | dbulog_loaded = DatabaseULog(self.db_handle, primary_key=primary_key, lazy=False)
58 | self.assertEqual(ulog, dbulog_loaded)
59 |
60 | def test_lazy(self):
61 | '''
62 | Verify that when lazy loading is enabled (which is the default
63 | behaviour), then the datasets are only retrieved when get_dataset is
64 | explicitly called.
65 | '''
66 | test_file = os.path.join(TEST_PATH, 'sample_log_small.ulg')
67 | log_path = os.path.join(TEST_PATH, test_file)
68 |
69 | ulog = ULog(log_path)
70 | dbulog_saved = DatabaseULog(self.db_handle, log_file=test_file)
71 | dbulog_saved.save()
72 | primary_key = dbulog_saved.primary_key
73 | dbulog_loaded = DatabaseULog(self.db_handle, primary_key=primary_key)
74 | for dataset in ulog.data_list:
75 | db_dataset = next(ds for ds in dbulog_loaded.data_list
76 | if ds.name == dataset.name and ds.multi_id == dataset.multi_id)
77 | self.assertEqual(len(db_dataset.data), 0)
78 | self.assertNotEqual(len(dataset.data), 0)
79 | ulog_dataset = ulog.get_dataset(dataset.name,
80 | multi_instance=dataset.multi_id)
81 | dbulog_dataset = dbulog_loaded.get_dataset(dataset.name,
82 | multi_instance=dataset.multi_id)
83 | self.assertEqual(ulog_dataset, dbulog_dataset)
84 |
85 |
86 | def test_data_caching(self):
87 | '''
88 | Verify that the caching of dataset data works as expected.
89 | '''
90 | test_file = os.path.join(TEST_PATH, 'sample_log_small.ulg')
91 |
92 | dbulog_saved = DatabaseULog(self.db_handle, log_file=test_file)
93 | dbulog_saved.save()
94 | primary_key = dbulog_saved.primary_key
95 | dbulog_loaded = DatabaseULog(self.db_handle, primary_key=primary_key, lazy=True)
96 | for dataset in dbulog_loaded.data_list:
97 | cache_miss = dbulog_loaded.get_dataset(dataset.name,
98 | multi_instance=dataset.multi_id,
99 | caching=True)
100 | cache_hit = dbulog_loaded.get_dataset(dataset.name,
101 | multi_instance=dataset.multi_id,
102 | caching=True)
103 | uncached = dbulog_loaded.get_dataset(dataset.name,
104 | multi_instance=dataset.multi_id,
105 | caching=False)
106 |
107 | self.assertEqual(cache_miss, cache_hit)
108 | self.assertEqual(cache_miss, uncached)
109 | self.assertIs(cache_miss, cache_hit)
110 | self.assertIsNot(cache_miss, uncached)
111 |
112 | def test_save(self):
113 | '''
114 | Test that save() twice raises an error, since we currently do not
115 | support updating the database.
116 | '''
117 | log_path = os.path.join(TEST_PATH, 'sample_log_small.ulg')
118 | dbulog = DatabaseULog(self.db_handle, log_file=log_path)
119 | dbulog.save()
120 | with self.assertRaises(KeyError):
121 | dbulog.save()
122 |
123 | def test_load(self):
124 | ''' Test that load() on an unknown primary key raises an error.'''
125 | with self.assertRaises(KeyError):
126 | _ = DatabaseULog(self.db_handle, primary_key=100)
127 |
128 | def test_unapplied_migrations(self):
129 | '''
130 | Test that we get get an error when trying to initialize a DatabaseULog
131 | if there are unapplied migrations, i.e. the SCHEMA_VERSION of
132 | DatabaseULog is larger than user_version in the database.
133 | '''
134 | migrate_db(self.db_path)
135 | log_file = os.path.join(TEST_PATH, 'sample_log_small.ulg')
136 | _ = DatabaseULog(self.db_handle, log_file=log_file)
137 | with self.assertRaises(ValueError):
138 | # Increment SCHEMA_VERSION so the database is seemingly out of date
139 | with patch.object(DatabaseULog, 'SCHEMA_VERSION', DatabaseULog.SCHEMA_VERSION+1):
140 | _ = DatabaseULog(self.db_handle, log_file=log_file)
141 |
142 | @data('sample_log_small')
143 | def test_sha256sum(self, test_case):
144 | '''
145 | Verify that the sha256sum set on save can be used to find the same file
146 | again, using any of the approved file input methods.
147 | '''
148 |
149 | test_file = os.path.join(TEST_PATH, f'{test_case}.ulg')
150 | dbulog = DatabaseULog(self.db_handle, log_file=test_file)
151 | dbulog.save()
152 | digest = DatabaseULog.calc_sha256sum(test_file)
153 | self.assertEqual(digest, dbulog.sha256sum)
154 |
155 | test_file_handle = open(test_file, 'rb') # pylint: disable=consider-using-with
156 | open_digest = DatabaseULog.calc_sha256sum(test_file_handle)
157 | self.assertEqual(digest, open_digest)
158 |
159 | test_file_handle.close()
160 | closed_digest = DatabaseULog.calc_sha256sum(test_file_handle)
161 | self.assertEqual(digest, closed_digest)
162 |
163 | pk_from_digest = DatabaseULog.primary_key_from_sha256sum(self.db_handle, digest)
164 | self.assertEqual(pk_from_digest, dbulog.primary_key)
165 |
166 | dbulog_duplicate = DatabaseULog(self.db_handle, log_file=test_file)
167 | with self.assertRaises(KeyError):
168 | dbulog_duplicate.save()
169 |
170 | def test_delete(self):
171 | '''
172 | Verify that the delete method completely deletes the relevant ulog from
173 | the database, and nothing else, by looking at the size of the database
174 | before and after deleting.
175 | '''
176 |
177 | def db_size():
178 | '''
179 | Get the size in bytes of the database file, after VACUUMING to make
180 | sure that deleted rows are cleaned up.
181 | '''
182 | with self.db_handle() as con:
183 | con.execute('VACUUM')
184 | return os.path.getsize(self.db_path)
185 |
186 | # We pre-populate the database with a log to detect if delete() just
187 | # wipes everything
188 | test_file1 = os.path.join(TEST_PATH, 'sample.ulg')
189 | DatabaseULog(self.db_handle, log_file=test_file1).save()
190 | initial_size = db_size()
191 |
192 | test_file2 = os.path.join(TEST_PATH, 'sample_log_small.ulg')
193 | dbulog = DatabaseULog(self.db_handle, log_file=test_file2)
194 | dbulog.save()
195 | self.assertNotEqual(db_size(), initial_size)
196 |
197 | dbulog.delete()
198 | self.assertEqual(db_size(), initial_size)
199 |
200 | def test_json(self):
201 | '''
202 | Verify that the storage of JSON rows allows for reproduction of the
203 | datasets.
204 | '''
205 | test_file = os.path.join(TEST_PATH, 'sample_log_small.ulg')
206 | log_path = os.path.join(TEST_PATH, test_file)
207 |
208 | dbulog = DatabaseULog(self.db_handle, log_file=log_path)
209 | dbulog.save(append_json=True)
210 |
211 | with self.db_handle() as con:
212 | cur = con.cursor()
213 | for dataset in dbulog.data_list:
214 | for field_name, values in dataset.data.items():
215 | cur.execute('''
216 | SELECT j.key, j.value
217 | FROM ULogField uf, json_each(uf.ValueJson) j
218 | JOIN ULogDataset uds ON uf.DatasetId = uds.Id
219 | WHERE uds.DatasetName = ?
220 | AND uds.MultiId = ?
221 | AND uf.TopicName = ?
222 | AND uds.ULogId = ?
223 | ORDER BY j.key ASC
224 | ''', (dataset.name, dataset.multi_id, field_name, dbulog.primary_key))
225 | results = np.array(cur.fetchall(), dtype=float)
226 | db_timestamps = results[:,0].flatten()
227 | db_values = results[:,1].flatten()
228 |
229 | # We must filter out None, nan and inf values since JSON
230 | # doesn't support nan and inf.
231 | db_values_finite = db_values[np.isfinite(db_values)]
232 | values_finite = values[np.isfinite(values)]
233 |
234 | # We test for approximate equality since we are comparing
235 | # string-formatted floats.
236 | self.assertEqual(len(db_values_finite), len(values_finite))
237 | if len(db_values_finite) > 0:
238 | np.testing.assert_allclose(db_values_finite, values_finite)
239 |
240 | if field_name == 'timestamp':
241 | self.assertEqual(len(db_timestamps), len(values))
242 | np.testing.assert_allclose(db_timestamps, values)
243 | cur.close()
244 |
245 | @data('sample',
246 | 'sample_appended',
247 | 'sample_appended_multiple',
248 | 'sample_logging_tagged_and_default_params')
249 | def test_write_ulog(self, base_name):
250 | '''
251 | Test that the write_ulog method successfully replicates all relevant data.
252 | '''
253 | with tempfile.TemporaryDirectory() as tmpdirname:
254 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
255 | written_ulog_file_name = os.path.join(tmpdirname, base_name + '_copy.ulg')
256 |
257 | dbulog = DatabaseULog(self.db_handle, log_file=ulog_file_name)
258 | dbulog.save()
259 |
260 | lazy_loaded_dbulog = DatabaseULog(
261 | self.db_handle,
262 | primary_key=dbulog.primary_key,
263 | lazy=True
264 | )
265 | with self.assertRaises(ValueError):
266 | lazy_loaded_dbulog.write_ulog(written_ulog_file_name)
267 |
268 | loaded_dbulog = DatabaseULog(self.db_handle, primary_key=dbulog.primary_key, lazy=False)
269 | loaded_dbulog.write_ulog(written_ulog_file_name)
270 | copied = ULog(written_ulog_file_name)
271 |
272 | # Some fields are not copied but dropped, so we cheat by modifying the original
273 | loaded_dbulog._sync_seq_cnt = 0 # pylint: disable=protected-access
274 | loaded_dbulog._appended_offsets = [] # pylint: disable=protected-access
275 | loaded_dbulog._incompat_flags[0] &= 0xFE # pylint: disable=protected-access
276 | assert copied == loaded_dbulog
277 |
--------------------------------------------------------------------------------
/test/test_extract_message.py:
--------------------------------------------------------------------------------
1 | '''
2 | Test extract_message module
3 | '''
4 |
5 | import os
6 | import inspect
7 | import unittest
8 |
9 | from ddt import ddt, data
10 |
11 | from pyulog.extract_message import extract_message
12 |
13 | TEST_PATH = os.path.dirname(os.path.abspath(
14 | inspect.getfile(inspect.currentframe())))
15 |
16 | @ddt
17 | class TestExtractMessage(unittest.TestCase):
18 | """
19 | Test extract_message module.
20 | """
21 |
22 | @data('sample')
23 | def test_extract_message(self, test_case):
24 | """
25 | Test that extract_message module runs without error.
26 | """
27 |
28 | ulog_file_name = os.path.join(TEST_PATH, test_case+'.ulg')
29 | message = "actuator_controls_0"
30 | time_s = None
31 | time_e = None
32 | extract_message(ulog_file_name,
33 | message,
34 | time_s,
35 | time_e)
36 |
37 | # vim: set et fenc=utf-8 ft=python ff=unix sts=4 sw=4 ts=4
38 |
--------------------------------------------------------------------------------
/test/test_migration.py:
--------------------------------------------------------------------------------
1 | '''
2 | Test that the migration module works correctly.
3 | '''
4 |
5 | import unittest
6 | import os
7 | import re
8 | import sqlite3
9 | import subprocess
10 |
11 | from unittest.mock import patch
12 | from ddt import ddt, data
13 | from pyulog.db import DatabaseULog
14 | from pyulog.migrate_db import migrate_db
15 |
16 | TEST_PATH = os.path.dirname(os.path.abspath(__file__))
17 |
18 | @ddt
19 | class TestMigration(unittest.TestCase):
20 | '''
21 | Using both fake and real migration files, try various migration sequences
22 | and check that the state of the database is as expected from the migrations
23 | that were run.
24 |
25 | '''
26 |
27 | def setUp(self):
28 | '''
29 | Set up the test database and fake migration script directory for each test.
30 | '''
31 | self.db_path = os.path.join(TEST_PATH, 'test_pyulog.sqlite3')
32 | self.db_handle = DatabaseULog.get_db_handle(self.db_path)
33 | self.sql_dir = os.path.join(TEST_PATH, 'test_sql')
34 | os.mkdir(self.sql_dir)
35 |
36 | def tearDown(self):
37 | '''
38 | Remove test database and fake migration script directory after each test.
39 | '''
40 | for filename in os.listdir(self.sql_dir):
41 | assert re.match(r'pyulog\.\d\.sql', filename), 'Only removing migration files.'
42 | filepath = os.path.join(self.sql_dir, filename)
43 | os.remove(filepath)
44 | os.rmdir(self.sql_dir)
45 | if os.path.exists(self.db_path):
46 | os.remove(self.db_path)
47 |
48 | def _make_migration_file(self, sql):
49 | '''
50 | Utility function for creating fake migration files. This is necessary
51 | because the migration tool reads from disk, so any fake migration
52 | scripts we want to test must be written to disk too, with correct file
53 | names.
54 |
55 | The files are cleaned up in tearDown.
56 | '''
57 | current_migration_files = os.listdir(self.sql_dir)
58 | migration_index = len(current_migration_files) + 1
59 | sql_filename = f'pyulog.{migration_index}.sql'
60 | sql_path = os.path.join(self.sql_dir, sql_filename)
61 | with open(sql_path, 'w', encoding='utf8') as migration_file:
62 | migration_file.write(sql)
63 |
64 | def _get_db_info(self):
65 | '''
66 | Utility function for getting the current database version and column
67 | names. This is used to verify the state of the database after running
68 | various migration scripts.
69 | '''
70 | with self.db_handle() as con:
71 | cur = con.cursor()
72 | cur.execute('PRAGMA table_info(TestTable)')
73 | table_info = cur.fetchall()
74 | cur.execute('PRAGMA user_version')
75 | (db_version,) = cur.fetchone()
76 | cur.close()
77 | return db_version, [column_info[1] for column_info in table_info]
78 |
79 | def test_good_migrations(self):
80 | '''
81 | Test that two sequential migrations run successfully and sequentially.
82 | '''
83 | self._make_migration_file('BEGIN; CREATE TABLE TestTable ( Id INTEGER ); COMMIT;')
84 | with patch.object(DatabaseULog, 'SCHEMA_VERSION', 1):
85 | migrate_db(self.db_path, sql_dir=self.sql_dir)
86 | db_version, col_names = self._get_db_info()
87 | self.assertEqual(col_names[0], 'Id')
88 | self.assertEqual(db_version, 1)
89 |
90 | self._make_migration_file('BEGIN; ALTER TABLE TestTable RENAME Id to IdRenamed; COMMIT;')
91 | with patch.object(DatabaseULog, 'SCHEMA_VERSION', 2):
92 | migrate_db(self.db_path, sql_dir=self.sql_dir)
93 | db_version, col_names = self._get_db_info()
94 | self.assertEqual(col_names[0], 'IdRenamed')
95 | self.assertEqual(db_version, 2)
96 |
97 | @data('CREATE TABLE TestTable;',
98 | 'BEGIN; CREATE TABLE TestTable;',
99 | 'CREATE TABLE TestTable; END;')
100 | def test_transactions(self, sql_line):
101 | '''
102 | Verify that migration files are rejected if they don't enforce
103 | transactions correctly.
104 | '''
105 | self._make_migration_file(sql_line)
106 | with self.assertRaises(ValueError), \
107 | patch.object(DatabaseULog, 'SCHEMA_VERSION', 1):
108 | migrate_db(self.db_path, sql_dir=self.sql_dir)
109 |
110 | def test_bad_migrations(self):
111 | '''
112 | Insert a bug into a line of a migration script, and verify that the
113 | script is rolled back.
114 | '''
115 | self._make_migration_file('''
116 | BEGIN;
117 | CREATE TABLE TestTable ( Id INTEGER, Value TEXT );
118 | COMMIT;
119 | ''')
120 | self._make_migration_file('''
121 | BEGIN;
122 | ALTER TABLE TestTable RENAME COLUMN Id TO IdRenamed;
123 | ALTER TABLE TestTable RENAME COLUMN IdRenamed TO IdRenamed2;
124 | ALTER TABLE TestTable RENAME COLUMN Value TO ValueRenamed;
125 | COMMIT;
126 | ''')
127 | with self.assertRaises(sqlite3.OperationalError), \
128 | patch.object(DatabaseULog, 'SCHEMA_VERSION', 2):
129 | migrate_db(self.db_path, sql_dir=self.sql_dir)
130 | db_version, col_names = self._get_db_info()
131 | self.assertEqual(col_names[0], 'Id')
132 | # Also check the 'Value' column, since the renaming of that field would
133 | # not have been impacted by the buggy line.
134 | self.assertEqual(col_names[1], 'Value')
135 | self.assertEqual(db_version, 1)
136 |
137 | def test_existing_db(self):
138 | '''
139 | Verify that the migration tool will not modify databases that are were
140 | created before the migration tool. Then verify that the -f flag
141 | correctly forces the execution anyway.
142 | '''
143 | db_version, _ = self._get_db_info() # This function implicitly creates the database
144 | self.assertEqual(db_version, 0)
145 | with self.assertRaises(FileExistsError):
146 | migrate_db(self.db_path)
147 |
148 | migrate_db(self.db_path, force=True)
149 | db_version, _ = self._get_db_info()
150 | self.assertEqual(db_version, DatabaseULog.SCHEMA_VERSION)
151 |
152 | def test_missing_migration_file(self):
153 | '''
154 | Verify that the migration tool stops after it encounters a non-existent
155 | migration file.
156 | '''
157 | self._make_migration_file('BEGIN; CREATE TABLE TestTable ( Id INTEGER ); COMMIT;')
158 | with self.assertRaises(FileNotFoundError), \
159 | patch.object(DatabaseULog, 'SCHEMA_VERSION', 2):
160 | migrate_db(self.db_path, sql_dir=self.sql_dir)
161 | db_version, col_names = self._get_db_info()
162 | self.assertEqual(col_names[0], 'Id')
163 | self.assertEqual(db_version, 1)
164 |
165 | def test_noop(self):
166 | '''
167 | Verify that the noop flag removes any effect on the database.
168 | '''
169 | migrate_db(self.db_path, noop=True)
170 | db_version, _ = self._get_db_info()
171 | self.assertEqual(db_version, 0)
172 |
173 | def test_real_migrations(self):
174 | '''
175 | Verify that the migration scripts in the pyulog/sql directory execute
176 | successfully.
177 | '''
178 | migrate_db(self.db_path)
179 | db_version, _ = self._get_db_info()
180 | self.assertEqual(db_version, DatabaseULog.SCHEMA_VERSION)
181 |
182 | def test_cli(self):
183 | '''
184 | Verify that the command line tol ulog_migratedb completes the
185 | migrations successfully.
186 | '''
187 | result = subprocess.run(['ulog_migratedb', '-d', self.db_path], check=True)
188 | self.assertEqual(result.returncode, 0)
189 | db_version, _ = self._get_db_info()
190 | self.assertEqual(db_version, DatabaseULog.SCHEMA_VERSION)
191 |
--------------------------------------------------------------------------------
/test/test_px4.py:
--------------------------------------------------------------------------------
1 | '''
2 | Tests the PX4ULog class
3 | '''
4 |
5 | import os
6 | import inspect
7 | import unittest
8 |
9 | from ddt import ddt, data
10 |
11 | from pyulog import ULog
12 | from pyulog.px4 import PX4ULog
13 | from pyulog.db import DatabaseULog
14 | from pyulog.migrate_db import migrate_db
15 |
16 | TEST_PATH = os.path.dirname(os.path.abspath(
17 | inspect.getfile(inspect.currentframe())))
18 |
19 | @ddt
20 | class TestPX4ULog(unittest.TestCase):
21 | '''
22 | Tests the PX4ULog class
23 | '''
24 |
25 | def setUp(self):
26 | '''
27 | Set up the test database.
28 | '''
29 | self.db_path = os.path.join(TEST_PATH, 'pyulog_test.sqlite3')
30 | self.db_handle = DatabaseULog.get_db_handle(self.db_path)
31 | migrate_db(self.db_path)
32 |
33 |
34 | def tearDown(self):
35 | '''
36 | Remove the test database after use.
37 | '''
38 | os.remove(self.db_path)
39 |
40 | @data('sample',
41 | 'sample_appended',
42 | 'sample_appended_multiple',
43 | 'sample_logging_tagged_and_default_params')
44 | def test_add_roll_pitch_yaw(self, base_name):
45 | '''
46 | Test that add_roll_pitch_yaw correctly adds RPY values to 'vehicle_attitude'
47 | '''
48 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
49 | ulog = ULog(ulog_file_name)
50 | px4 = PX4ULog(ulog)
51 | px4.add_roll_pitch_yaw()
52 |
53 | dataset = ulog.get_dataset('vehicle_attitude')
54 | assert 'roll' in dataset.data
55 | assert 'pitch' in dataset.data
56 | assert 'yaw' in dataset.data
57 |
58 | @data('sample',
59 | 'sample_appended',
60 | 'sample_appended_multiple',
61 | 'sample_logging_tagged_and_default_params')
62 | def test_add_roll_pitch_yaw_db(self, base_name):
63 | '''
64 | Test that add_roll_pitch_yaw correctly adds RPY values to
65 | 'vehicle_attitude' on a DatabaseULog object.
66 | '''
67 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
68 | dbulog = DatabaseULog(self.db_handle, log_file=ulog_file_name)
69 | dbulog.save()
70 | del dbulog
71 | digest = DatabaseULog.calc_sha256sum(ulog_file_name)
72 | primary_key = DatabaseULog.primary_key_from_sha256sum(self.db_handle, digest)
73 | dbulog = DatabaseULog(self.db_handle, primary_key=primary_key, lazy=False)
74 | px4 = PX4ULog(dbulog)
75 | px4.add_roll_pitch_yaw()
76 |
77 | dataset = dbulog.get_dataset('vehicle_attitude')
78 | assert 'roll' in dataset.data
79 | assert 'pitch' in dataset.data
80 | assert 'yaw' in dataset.data
81 |
82 |
83 | # vim: set et fenc=utf-8 ft=python ff=unix sts=4 sw=4 ts=4
84 |
--------------------------------------------------------------------------------
/test/test_px4_events.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests the PX4Events class
3 | """
4 |
5 | import os
6 | import inspect
7 | import unittest
8 |
9 | from ddt import ddt, data
10 |
11 | import pyulog
12 | from pyulog.px4_events import PX4Events
13 |
14 | TEST_PATH = os.path.dirname(os.path.abspath(
15 | inspect.getfile(inspect.currentframe())))
16 |
17 | @ddt
18 | class TestPX4Events(unittest.TestCase):
19 | """
20 | Tests the PX4Events class
21 | """
22 |
23 | @data('sample_px4_events')
24 | def test_px4_events(self, base_name):
25 | """
26 | Test that the PX4 events are extracted.
27 | """
28 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
29 | ulog = pyulog.ULog(ulog_file_name)
30 | px4_ulog = PX4Events()
31 |
32 | def default_json_definitions_cb(already_has_default_parser: bool):
33 | raise AssertionError('Must use definitions from logs')
34 | px4_ulog.set_default_json_definitions_cb(default_json_definitions_cb)
35 |
36 | messages = px4_ulog.get_logged_events(ulog)
37 | expected_messages = [
38 | (1710773350346000, 'INFO', 'logging: opening log file 2024-3-18/14_49_10.ulg'),
39 | (1710773365282000, 'INFO', 'Armed by internal command'),
40 | (1710773365282000, 'INFO', 'Using default takeoff altitude: 2.50 m'),
41 | (1710773367094000, 'INFO', 'Takeoff detected'),
42 | (1710773372482000, 'INFO', 'RTL: start return at 491 m (3 m above destination)'),
43 | (1710773372694000, 'INFO', 'RTL: land at destination'),
44 | (1710773378482000, 'INFO', 'Landing detected'),
45 | (1710773380486000, 'INFO', 'Disarmed by landing')
46 | ]
47 | assert messages == expected_messages
48 |
--------------------------------------------------------------------------------
/test/test_ulog.py:
--------------------------------------------------------------------------------
1 | '''
2 | Tests the ULog class
3 | '''
4 |
5 | import os
6 | import inspect
7 | import unittest
8 | import tempfile
9 | from io import BytesIO
10 |
11 | from ddt import ddt, data
12 |
13 | import pyulog
14 |
15 | TEST_PATH = os.path.dirname(os.path.abspath(
16 | inspect.getfile(inspect.currentframe())))
17 |
18 | @ddt
19 | class TestULog(unittest.TestCase):
20 | '''
21 | Tests the ULog class
22 | '''
23 |
24 | @data('sample')
25 | def test_comparison(self, base_name):
26 | '''
27 | Test that the custom comparison method works as expected.
28 | '''
29 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
30 | ulog1 = pyulog.ULog(ulog_file_name)
31 | ulog2 = pyulog.ULog(ulog_file_name)
32 | assert ulog1 == ulog2
33 | assert ulog1 is not ulog2
34 |
35 | # make them different in arbitrary field
36 | ulog1.data_list[0].data['timestamp'][0] += 1
37 | assert ulog1 != ulog2
38 |
39 |
40 | @data('sample',
41 | 'sample_appended',
42 | 'sample_appended_multiple',
43 | 'sample_logging_tagged_and_default_params')
44 | def test_write_ulog(self, base_name):
45 | '''
46 | Test that the write_ulog method successfully replicates all relevant data.
47 | '''
48 | with tempfile.TemporaryDirectory() as tmpdirname:
49 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
50 | written_ulog_file_name = os.path.join(tmpdirname, base_name + '_copy.ulg')
51 |
52 | original = pyulog.ULog(ulog_file_name)
53 | original.write_ulog(written_ulog_file_name)
54 | copied = pyulog.ULog(written_ulog_file_name)
55 |
56 | # Some fields are not copied but dropped, so we cheat by modifying the original
57 | original._sync_seq_cnt = 0 # pylint: disable=protected-access
58 | original._appended_offsets = [] # pylint: disable=protected-access
59 | original._incompat_flags[0] &= 0xFE # pylint: disable=protected-access
60 |
61 | assert copied == original
62 |
63 | @data('sample')
64 | def test_write_ulog_memory(self, base_name):
65 | '''
66 | Test that the write_ulog method can write bytes to memory.
67 | '''
68 | ulog_file_name = os.path.join(TEST_PATH, base_name + '.ulg')
69 | original = pyulog.ULog(ulog_file_name)
70 | with BytesIO() as bytes_handle:
71 | original.write_ulog(bytes_handle)
72 | bytes_handle.seek(0)
73 | copied = pyulog.ULog(bytes_handle)
74 |
75 | for original_key, original_value in original.__dict__.items():
76 | copied_value = getattr(copied, original_key)
77 | if original_key == '_sync_seq_cnt':
78 | # Sync messages are counted on parse, but otherwise dropped, so
79 | # we don't rewrite them
80 | assert copied_value == 0
81 | elif original_key == '_appended_offsets':
82 | # Abruptly ended messages just before offsets are dropped, so
83 | # we don't rewrite appended offsets
84 | assert copied_value == []
85 | elif original_key == '_incompat_flags':
86 | # Same reasoning on incompat_flags[0] as for '_appended_offsets'
87 | assert copied_value[0] == original_value[0] & 0xFE # pylint: disable=unsubscriptable-object
88 | assert copied_value[1:] == original_value[1:] # pylint: disable=unsubscriptable-object
89 | else:
90 | assert copied_value == original_value
91 |
92 | # vim: set et fenc=utf-8 ft=python ff=unix sts=4 sw=4 ts=4
93 |
--------------------------------------------------------------------------------