├── ChangeLog.md ├── LICENSE ├── README.md └── baggie ├── CMakeLists.txt ├── baggie ├── _Baggie.py ├── __init__.py ├── cmd │ ├── __init__.py │ ├── filter.py │ ├── join.py │ ├── split.py │ └── timeline.py └── util.py ├── doc └── figures │ └── box-logo.png ├── package.xml ├── scripts ├── filter ├── join ├── split └── timeline ├── src └── py │ └── _baggie.cpp └── test ├── test_baggie_reader.py ├── test_baggie_writer.py ├── test_context_mgr.py ├── test_meta.py ├── test_sequential.py ├── test_sequential_compressed.py └── test_stamping.py /ChangeLog.md: -------------------------------------------------------------------------------- 1 | ## 0.1.0 (YYYY.MM.DD) - not yet released 2 | 3 | * Initial alpha-version 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ros2-bagutils 2 | ============= 3 | This repository contains packages for working with bag files in ROS 2. 4 | 5 | baggie 6 | ------ 7 | The `baggie` package provides a Python wrapper around the [C++ rosbag2 8 | API](https://github.com/ros2/rosbag2/tree/master/rosbag2_cpp) as well as some 9 | pure Python convenience interfaces for making working with bags in ROS 2 10 | easier. Inspired, in part, by the (forthcoming?) 11 | [rosbag2_py](https://github.com/ros2/rosbag2/pull/308/files) Python API, this 12 | package exposes the necessary C++ API via Pybind11 but puts a Python interface 13 | in front of it to make the easy things easy and the hard things possible. 14 | 15 | In the simplest of use cases, you'll want to read from an existing bag. You can 16 | do this with a context manager like: 17 | 18 | ```python 19 | import baggie 20 | 21 | [ ... ] 22 | 23 | with baggie.BagReader("/path/to/file.bag") as bag: 24 | for topic, msg, stamp in bag.read_messages(): 25 | # do something with the data 26 | ``` 27 | 28 | Writing a bag is similarly easy: 29 | 30 | ```python 31 | import baggie 32 | from example_interfaces.msg import Int32 33 | from datetime import datetime as DT 34 | 35 | [ ... ] 36 | 37 | with baggie.BagWriter("/path/to/file.bag") as bag: 38 | msg = Int32() 39 | msg.data = 1 40 | bag.write("/int_topic", msg, DT.now()) 41 | ``` 42 | **NOTE:** In the above example we pass a `datetime.datetime` as the timestamp 43 | when we write the message. We do this for simplicity and convenience in this 44 | example. However, we recommend passing an `rclpy.time.Time` instance instead 45 | (the method accepts either). The reason for this recommendation is that 46 | `datetime.datetime` in Python only support usec precision whereas the ROS 2 47 | time libraries support nanosecond precision. 48 | 49 | The above examples are intentionally simplistic and accept many default 50 | arguments. However, since `baggie` [exposes](./baggie/src/py/_baggie.cpp) a 51 | significant portion of the C++ API, much more complex use cases are 52 | supported. The context manager examples above are a front-end to the central 53 | fixture of this library, the `baggie.Baggie` class. [The Baggie 54 | class](./baggie/baggie/_Baggie.py) provides an interface for reading or writing 55 | ROS 2 bag files directly. A given instance of a Baggie can be instantiated as 56 | either a reader or a writer. One `Baggie` maps to exactly one on-disk ROS 2 bag 57 | file (which may be made of up several files). Extensive example code is 58 | available in the [test directory](./baggie/test/) to include manually driving 59 | the lower-level C++ interface via the Python projections. Additionally, 60 | real-world examples of using the `baggie` API can be seen in the code for the 61 | [utility scripts](./baggie/baggie/cmd/) provided with the `baggie` package. 62 | 63 | ### Utility scripts 64 | Beyond the Python library code, the `baggie` package provides several 65 | command-line utilities for performing common operations on bagfiles. They 66 | include: 67 | 68 | - [filter](#filter): Copy bag files with optional transformation filters 69 | - [split](#split): Split a single input bag into multiple, smaller, output bags 70 | - [join](#join): Join multiple input bags into a single output bag 71 | - [timeline](#timeline): Print a JSON timeline of messages in a set of bags 72 | 73 | #### filter 74 | The `filter` script is used to copy bag files while also applying some 75 | filtering during the copying process. Here is its help string: 76 | 77 | ``` 78 | $ ros2 run baggie filter --help 79 | usage: filter [-h] -i INFILE [-o OUTFILE] [--start_time START_TIME] [--end_time END_TIME] [--topics T [T ...]] [--map M [M ...]] 80 | [--compress | --uncompress] 81 | 82 | Copy a bag file, optionally filtered by topic and time 83 | 84 | optional arguments: 85 | -h, --help show this help message and exit 86 | -i INFILE, --infile INFILE 87 | Path to input bag file (default: None) 88 | -o OUTFILE, --outfile OUTFILE 89 | Output bag file, default: -filtered.bag (default: None) 90 | --start_time START_TIME 91 | Earliest message stamp in output bag (nanos) (default: None) 92 | --end_time END_TIME Latest message stamp in output bag (nanos) (default: None) 93 | --topics T [T ...] List of topics to include in output bag (default: None) 94 | --map M [M ...] Topic name remappings: --map from:to (default: None) 95 | --compress Compress output file (default: False) 96 | --uncompress Do not compress output file (default: False) 97 | ``` 98 | 99 | The only required option is `-i` (or `--infile`) to specify which input bag 100 | file to operate on. When run in this way, `filter` acts like the Unix `cp` 101 | command (albeit, inefficient). The output file will be named the same as the 102 | input file except the suffix `-filtered.bag` will be appended to the end. Here 103 | is an example: 104 | 105 | ``` 106 | $ ls 107 | lidar.bag 108 | 109 | $ ros2 bag info lidar.bag 110 | 111 | Files: lidar.bag_0.db3 112 | Bag size: 403.6 MiB 113 | Storage id: sqlite3 114 | Duration: 474.726s 115 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 116 | End: Jun 18 2020 15:34:12.626 (1592508852.626) 117 | Messages: 2829 118 | Topic information: Topic: /tf_static | Type: tf2_msgs/msg/TFMessage | Count: 1 | Serialization Format: cdr 119 | Topic: /tf | Type: tf2_msgs/msg/TFMessage | Count: 1414 | Serialization Format: cdr 120 | Topic: /os1_cloud_node/points_tf | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 121 | 122 | 123 | $ ros2 run baggie filter -i lidar.bag 124 | [INFO] [1594828793.442763739] [rosbag2_storage]: Opened database 'lidar.bag/lidar.bag_0.db3' for READ_ONLY. 125 | [INFO] [1594828793.464352982] [rosbag2_storage]: Opened database 'lidar-filtered.bag/lidar-filtered.bag_0.db3' for READ_WRITE. 126 | 127 | $ ros2 bag info lidar-filtered.bag 128 | 129 | Files: lidar-filtered.bag_0.db3 130 | Bag size: 403.6 MiB 131 | Storage id: sqlite3 132 | Duration: 474.726s 133 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 134 | End: Jun 18 2020 15:34:12.626 (1592508852.626) 135 | Messages: 2829 136 | Topic information: Topic: /tf_static | Type: tf2_msgs/msg/TFMessage | Count: 1 | Serialization Format: cdr 137 | Topic: /tf | Type: tf2_msgs/msg/TFMessage | Count: 1414 | Serialization Format: cdr 138 | Topic: /os1_cloud_node/points_tf | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 139 | ``` 140 | 141 | For exemplary purposes, suppose we wanted a new bag that only contained the 142 | data from the topic `/os1_cloud_node/points_tf` but in the output bag, we 143 | wanted the topic to be called `/points`. Also, suppose we want to compress the 144 | output file. This looks like: 145 | 146 | ``` 147 | $ ros2 run baggie filter -i lidar.bag --compress --topics /os1_cloud_node/points_tf --map /os1_cloud_node/points_tf:/points 148 | [INFO] [1594829379.985118419] [rosbag2_storage]: Opened database 'lidar.bag/lidar.bag_0.db3' for READ_ONLY. 149 | [INFO] [1594829380.007895566] [rosbag2_storage]: Opened database 'lidar-filtered.bag/lidar-filtered.bag_0.db3' for READ_WRITE. 150 | 151 | $ ros2 bag info lidar-filtered.bag 152 | 153 | Files: lidar-filtered.bag/lidar-filtered.bag_0.db3.zstd 154 | Bag size: 372.3 MiB 155 | Storage id: sqlite3 156 | Duration: 143.497s 157 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 158 | End: Jun 18 2020 15:28:41.397 (1592508521.397) 159 | Messages: 1414 160 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 161 | 162 | $ cat lidar-filtered.bag/metadata.yaml 163 | rosbag2_bagfile_information: 164 | version: 4 165 | storage_identifier: sqlite3 166 | relative_file_paths: 167 | - lidar-filtered.bag/lidar-filtered.bag_0.db3.zstd 168 | duration: 169 | nanoseconds: 143497058611 170 | starting_time: 171 | nanoseconds_since_epoch: 1592508377900096491 172 | message_count: 1414 173 | topics_with_message_count: 174 | - topic_metadata: 175 | name: /points 176 | type: sensor_msgs/msg/PointCloud2 177 | serialization_format: cdr 178 | offered_qos_profiles: "" 179 | message_count: 1414 180 | compression_format: zstd 181 | compression_mode: FILE 182 | ``` 183 | 184 | **TODO:** Add a filter, similar to `--topics`, based on message type. 185 | 186 | #### split 187 | The `split` script is used to split a single bag into multiple (smaller) bags 188 | on a **time basis**. Here is its help string: 189 | 190 | ``` 191 | $ ros2 run baggie split --help 192 | usage: split [-h] -i INFILE [-o OUTDIR] [-f FRACTION] 193 | 194 | Splits a bag file into time-based partitions 195 | 196 | optional arguments: 197 | -h, --help show this help message and exit 198 | -i INFILE, --infile INFILE 199 | Path to input bag file (default: None) 200 | -o OUTDIR, --outdir OUTDIR 201 | Path to output directory for split bag files (default: None) 202 | -f FRACTION, --fraction FRACTION 203 | Time fraction (0., 1.]; .5 = split in half (default: 0.5) 204 | ``` 205 | 206 | The critical argument to this script is `-f` (or `--fraction`). It controls how 207 | the bag is split based on the message time stamps. To split a bag into two 208 | (smaller) bags on a time basis, you would pass `-f .5` denoting that each bag 209 | should contain half (`.5`) of the wall clock time associated with this input 210 | bag. This does **not** mean that the bags will each contain half of the 211 | messages. The splitting is based on time. Let's look at a concrete example. 212 | 213 | Suppose we have a bag full of pointclouds captured from a LiDAR that looks like 214 | this: 215 | 216 | ``` 217 | $ ros2 bag info pointcloud.bag 218 | 219 | Files: pointcloud.bag_0.db3 220 | Bag size: 403.5 MiB 221 | Storage id: sqlite3 222 | Duration: 143.497s 223 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 224 | End: Jun 18 2020 15:28:41.397 (1592508521.397) 225 | Messages: 1414 226 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 227 | ``` 228 | 229 | Let's split it into 4 smaller bags on a time basis: 230 | 231 | ``` 232 | $ ros2 run baggie split -i pointcloud.bag -f .25 233 | [INFO] [1594912449.657473501] [rosbag2_storage]: Opened database 'pointcloud.bag/pointcloud.bag_0.db3' for READ_ONLY. 234 | [INFO] [1594912449.680794886] [rosbag2_storage]: Opened database './00_pointcloud.bag/00_pointcloud.bag_0.db3' for READ_WRITE. 235 | [INFO] [1594912451.548708732] [rosbag2_storage]: Opened database './01_pointcloud.bag/01_pointcloud.bag_0.db3' for READ_WRITE. 236 | [INFO] [1594912453.278216856] [rosbag2_storage]: Opened database './02_pointcloud.bag/02_pointcloud.bag_0.db3' for READ_WRITE. 237 | [INFO] [1594912455.048369887] [rosbag2_storage]: Opened database './03_pointcloud.bag/03_pointcloud.bag_0.db3' for READ_WRITE. 238 | ``` 239 | 240 | We note that, by default, the output bags are created in the same directory as 241 | the input source bag. Additionally, the output bags will inherit their storage, 242 | converter, and compression options from the input bag from which they were 243 | derived. Here is what `info` reports for the 4 generated output bags: 244 | 245 | ``` 246 | $ for f in $(ls -1 | grep -i "^0.*\.bag$"); do ros2 bag info ${f}; done 247 | 248 | Files: 00_pointcloud.bag_0.db3 249 | Bag size: 99.6 MiB 250 | Storage id: sqlite3 251 | Duration: 35.802s 252 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 253 | End: Jun 18 2020 15:26:53.702 (1592508413.702) 254 | Messages: 353 255 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 353 | Serialization Format: cdr 256 | 257 | 258 | Files: 01_pointcloud.bag_0.db3 259 | Bag size: 103.0 MiB 260 | Storage id: sqlite3 261 | Duration: 35.827s 262 | Start: Jun 18 2020 15:26:53.796 (1592508413.796) 263 | End: Jun 18 2020 15:27:29.623 (1592508449.623) 264 | Messages: 356 265 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 356 | Serialization Format: cdr 266 | 267 | 268 | Files: 02_pointcloud.bag_0.db3 269 | Bag size: 102.2 MiB 270 | Storage id: sqlite3 271 | Duration: 35.805s 272 | Start: Jun 18 2020 15:27:29.695 (1592508449.695) 273 | End: Jun 18 2020 15:28:05.501 (1592508485.501) 274 | Messages: 352 275 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 352 | Serialization Format: cdr 276 | 277 | 278 | Files: 03_pointcloud.bag_0.db3 279 | Bag size: 98.6 MiB 280 | Storage id: sqlite3 281 | Duration: 35.797s 282 | Start: Jun 18 2020 15:28:05.599 (1592508485.599) 283 | End: Jun 18 2020 15:28:41.397 (1592508521.397) 284 | Messages: 353 285 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 353 | Serialization Format: cdr 286 | ``` 287 | 288 | #### join 289 | The inverse operation of `split` is to `join`. In the context of `baggie`, the 290 | `join` script takes a set of input bag files and joins them into a single 291 | output bag file. The help for `join` is shown below: 292 | 293 | ``` 294 | $ ros2 run baggie join --help 295 | usage: join [-h] -o OUTFILE [--compress | --uncompress] INFILE [INFILE ...] 296 | 297 | Joins several ROS 2 bag files into a single combined bag 298 | 299 | positional arguments: 300 | INFILE The input bag files to join 301 | 302 | optional arguments: 303 | -h, --help show this help message and exit 304 | -o OUTFILE, --outfile OUTFILE 305 | The output bag file name to create (default: None) 306 | --compress Compress the output file (default: False) 307 | --uncompress Do not compress the output file (default: False) 308 | ``` 309 | 310 | Building on the split example from above, we took the input `pointcloud.bag` 311 | and spit it into 4 smaller bags. Let's join them back into a single bag called 312 | `lidar.bag`. 313 | 314 | ``` 315 | $ ls 316 | 00_pointcloud.bag 01_pointcloud.bag 02_pointcloud.bag 03_pointcloud.bag pointcloud.bag 317 | 318 | $ ros2 run baggie join -o lidar.bag 0*.bag 319 | [INFO] [1594918662.208676635] [rosbag2_storage]: Opened database 'lidar.bag/lidar.bag_0.db3' for READ_WRITE. 320 | [INFO] [1594918662.214462213] [rosbag2_storage]: Opened database '00_pointcloud.bag/00_pointcloud.bag_0.db3' for READ_ONLY. 321 | [INFO] [1594918664.026249523] [rosbag2_storage]: Opened database '01_pointcloud.bag/01_pointcloud.bag_0.db3' for READ_ONLY. 322 | [INFO] [1594918665.776982632] [rosbag2_storage]: Opened database '02_pointcloud.bag/02_pointcloud.bag_0.db3' for READ_ONLY. 323 | [INFO] [1594918667.354551863] [rosbag2_storage]: Opened database '03_pointcloud.bag/03_pointcloud.bag_0.db3' for READ_ONLY. 324 | ``` 325 | 326 | Comparing the joined `lidar.bag` with the original `pointcloud.bag`: 327 | 328 | ``` 329 | $ ros2 bag info pointcloud.bag 330 | 331 | Files: pointcloud.bag_0.db3 332 | Bag size: 403.5 MiB 333 | Storage id: sqlite3 334 | Duration: 143.497s 335 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 336 | End: Jun 18 2020 15:28:41.397 (1592508521.397) 337 | Messages: 1414 338 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 339 | 340 | 341 | $ ros2 bag info lidar.bag 342 | 343 | Files: lidar.bag_0.db3 344 | Bag size: 403.5 MiB 345 | Storage id: sqlite3 346 | Duration: 143.497s 347 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 348 | End: Jun 18 2020 15:28:41.397 (1592508521.397) 349 | Messages: 1414 350 | Topic information: Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 1414 | Serialization Format: cdr 351 | ``` 352 | 353 | #### timeline 354 | Print a timeline of high-level message data as JSON to the screen. This allows 355 | for processing with analysis tools like [pandas](https://pandas.pydata.org/) to 356 | get a view into your ROS message data as a timeseries. The help message for the 357 | `timeline` script looks like: 358 | 359 | ``` 360 | $ ros2 run baggie timeline --help 361 | usage: timeline [-h] [--pretty] [--aos | --soa] INFILE [INFILE ...] 362 | 363 | A JSON timeline of messages contained in a set of bags 364 | 365 | positional arguments: 366 | INFILE The input bag files to add to the timeline 367 | 368 | optional arguments: 369 | -h, --help show this help message and exit 370 | --pretty Pretty print the output JSON (default: False) 371 | --aos Output the data as an array-of-structs (default: False) 372 | --soa Output the data as a struct-of-arrays (default: False) 373 | ``` 374 | It supports file globbing to generate one big JSON timeline from a set of bags 375 | if that is desireable. 376 | 377 | Here is an example. I'll work with a very small bag file that has been filtered 378 | to contain less than a half-second of LiDAR data. Here is the bag info: 379 | 380 | ``` 381 | $ ros2 bag info lidar-filtered.bag 382 | 383 | Files: lidar-filtered.bag_0.db3 384 | Bag size: 1.4 MiB 385 | Storage id: sqlite3 386 | Duration: 0.405s 387 | Start: Jun 18 2020 15:26:17.900 (1592508377.900) 388 | End: Jun 18 2020 15:26:18.305 (1592508378.305) 389 | Messages: 10 390 | Topic information: Topic: /tf | Type: tf2_msgs/msg/TFMessage | Count: 5 | Serialization Format: cdr 391 | Topic: /points | Type: sensor_msgs/msg/PointCloud2 | Count: 5 | Serialization Format: cdr 392 | ``` 393 | 394 | The `timeline` output supports two different data structures depending upon how 395 | you want to process your data. The default is the *array-of-structs*: 396 | 397 | ``` 398 | $ ros2 run baggie timeline --pretty --aos lidar-filtered.bag 2>/dev/null 399 | [ 400 | { 401 | "stamp": 1592508377900096491, 402 | "time": "2020-06-18 15:26:17.900096512", 403 | "topic_name": "/points", 404 | "type": "sensor_msgs/msg/PointCloud2" 405 | }, 406 | { 407 | "stamp": 1592508377900705889, 408 | "time": "2020-06-18 15:26:17.900705792", 409 | "topic_name": "/tf", 410 | "type": "tf2_msgs/msg/TFMessage" 411 | }, 412 | { 413 | "stamp": 1592508378001884212, 414 | "time": "2020-06-18 15:26:18.001884160", 415 | "topic_name": "/tf", 416 | "type": "tf2_msgs/msg/TFMessage" 417 | }, 418 | { 419 | "stamp": 1592508378003208996, 420 | "time": "2020-06-18 15:26:18.003208960", 421 | "topic_name": "/points", 422 | "type": "sensor_msgs/msg/PointCloud2" 423 | }, 424 | { 425 | "stamp": 1592508378104989363, 426 | "time": "2020-06-18 15:26:18.104989440", 427 | "topic_name": "/points", 428 | "type": "sensor_msgs/msg/PointCloud2" 429 | }, 430 | { 431 | "stamp": 1592508378107458144, 432 | "time": "2020-06-18 15:26:18.107458048", 433 | "topic_name": "/tf", 434 | "type": "tf2_msgs/msg/TFMessage" 435 | }, 436 | { 437 | "stamp": 1592508378201481910, 438 | "time": "2020-06-18 15:26:18.201481984", 439 | "topic_name": "/tf", 440 | "type": "tf2_msgs/msg/TFMessage" 441 | }, 442 | { 443 | "stamp": 1592508378202927657, 444 | "time": "2020-06-18 15:26:18.202927616", 445 | "topic_name": "/points", 446 | "type": "sensor_msgs/msg/PointCloud2" 447 | }, 448 | { 449 | "stamp": 1592508378303028090, 450 | "time": "2020-06-18 15:26:18.303027968", 451 | "topic_name": "/points", 452 | "type": "sensor_msgs/msg/PointCloud2" 453 | }, 454 | { 455 | "stamp": 1592508378305343350, 456 | "time": "2020-06-18 15:26:18.305343232", 457 | "topic_name": "/tf", 458 | "type": "tf2_msgs/msg/TFMessage" 459 | } 460 | ] 461 | ``` 462 | 463 | Alternatively, you can output the data as a *struct-of-arrays*. This format is 464 | somewhat easier to directly import into analysis tools, YMMV: 465 | 466 | ``` 467 | $ ros2 run baggie timeline --pretty --soa lidar-filtered.bag 2>/dev/null 468 | { 469 | "stamp": [ 470 | 1592508377900096491, 471 | 1592508377900705889, 472 | 1592508378001884212, 473 | 1592508378003208996, 474 | 1592508378104989363, 475 | 1592508378107458144, 476 | 1592508378201481910, 477 | 1592508378202927657, 478 | 1592508378303028090, 479 | 1592508378305343350 480 | ], 481 | "topic": [ 482 | "/points", 483 | "/tf", 484 | "/tf", 485 | "/points", 486 | "/points", 487 | "/tf", 488 | "/tf", 489 | "/points", 490 | "/points", 491 | "/tf" 492 | ], 493 | "type": [ 494 | "sensor_msgs/msg/PointCloud2", 495 | "tf2_msgs/msg/TFMessage", 496 | "tf2_msgs/msg/TFMessage", 497 | "sensor_msgs/msg/PointCloud2", 498 | "sensor_msgs/msg/PointCloud2", 499 | "tf2_msgs/msg/TFMessage", 500 | "tf2_msgs/msg/TFMessage", 501 | "sensor_msgs/msg/PointCloud2", 502 | "sensor_msgs/msg/PointCloud2", 503 | "tf2_msgs/msg/TFMessage" 504 | ] 505 | } 506 | ``` 507 | 508 | **NOTE:** The `2>/dev/null` is to quiet the logging messages from the 509 | `rosbag2_cpp` code to allow for doing things like piping the output JSON from 510 | this program into a JSON *grep* tool like `jq` for post-processing on the 511 | command line. 512 | 513 | 514 | LICENSE 515 | ======= 516 | Please see the file called [LICENSE](LICENSE) 517 | 518 |

519 |
520 | 521 |
522 | Copyright © 2020 Box Robotics, Inc. 523 |

524 | -------------------------------------------------------------------------------- /baggie/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) 2 | project(baggie) 3 | 4 | if (NOT CMAKE_C_STANDARD) 5 | set(CMAKE_C_STANDARD 99) 6 | endif() 7 | 8 | if (NOT CMAKE_CXX_STANDARD) 9 | set(CMAKE_CXX_STANDARD 17) 10 | endif() 11 | 12 | if (CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") 13 | add_compile_options(-Wall -Wextra -Wpedantic -Werror) 14 | endif() 15 | 16 | find_package(ament_cmake REQUIRED) 17 | find_package(ament_cmake_python REQUIRED) 18 | find_package(pybind11 REQUIRED) 19 | find_package(rcutils REQUIRED) 20 | find_package(rosbag2_cpp REQUIRED) 21 | find_package(rosbag2_compression REQUIRED) 22 | find_package(rosbag2_storage REQUIRED) 23 | 24 | ############ 25 | ## Build ## 26 | ############ 27 | pybind11_add_module(_baggie SHARED 28 | src/py/_baggie.cpp 29 | ) 30 | ament_target_dependencies(_baggie PUBLIC 31 | rcutils 32 | rosbag2_cpp 33 | rosbag2_compression 34 | rosbag2_storage 35 | ) 36 | 37 | ############## 38 | ## Install ## 39 | ############## 40 | ament_python_install_package(${PROJECT_NAME}) 41 | 42 | install( 43 | TARGETS _baggie 44 | DESTINATION "${PYTHON_INSTALL_DIR}/${PROJECT_NAME}" 45 | ) 46 | 47 | install( 48 | PROGRAMS 49 | scripts/filter 50 | scripts/join 51 | scripts/split 52 | scripts/timeline 53 | DESTINATION lib/${PROJECT_NAME}/ 54 | ) 55 | 56 | ########## 57 | ## Test ## 58 | ########## 59 | if (BUILD_TESTING) 60 | find_package(ament_cmake_nose REQUIRED) 61 | find_package(example_interfaces REQUIRED) 62 | find_package(rclpy REQUIRED) 63 | 64 | ament_add_nose_test(Sequential test/test_sequential.py) 65 | ament_add_nose_test(SequentialCompressed test/test_sequential_compressed.py) 66 | ament_add_nose_test(BaggieWriter test/test_baggie_writer.py) 67 | ament_add_nose_test(BaggieReader test/test_baggie_reader.py) 68 | ament_add_nose_test(ContextManager test/test_context_mgr.py) 69 | ament_add_nose_test(Meta test/test_meta.py) 70 | ament_add_nose_test(Stamping test/test_stamping.py) 71 | endif() 72 | 73 | ############## 74 | ament_export_dependencies(ament_cmake) 75 | ament_export_dependencies(ament_cmake_python) 76 | ament_package() 77 | -------------------------------------------------------------------------------- /baggie/baggie/_Baggie.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | import os 16 | import sys 17 | from contextlib import contextmanager 18 | 19 | import baggie.util 20 | from baggie._baggie import BagMetadata as _BagMetadata 21 | from baggie._baggie import CompressionMode as _CompressionMode 22 | from baggie._baggie import CompressionOptions as _CompressionOptions 23 | from baggie._baggie import ConverterOptions as _ConverterOptions 24 | from baggie._baggie import BagInfo as _BagInfo 25 | from baggie._baggie import Reader as _Reader 26 | from baggie._baggie import StorageFilter as _StorageFilter 27 | from baggie._baggie import StorageOptions as _StorageOptions 28 | from baggie._baggie import TopicMetadata as _TopicMetadata 29 | from baggie._baggie import TopicInformation as _TopicInformation 30 | from baggie._baggie import Writer as _Writer 31 | 32 | from rclpy.serialization import deserialize_message, serialize_message 33 | from rclpy.time import Time 34 | 35 | class BaggieException(Exception): 36 | """ 37 | Exception wrapper around trapped errors from the rosbag2 C++ API 38 | """ 39 | pass 40 | 41 | class Baggie(object): 42 | """ 43 | The Baggie class provides an interface for reading or writing ROS 2 44 | bag files. 45 | 46 | A given instance of a Baggie can be instantiated as either a reader or a 47 | writer. One Baggie maps to exactly one on-disk ROS 2 bag file. 48 | 49 | Attributes 50 | ---------- 51 | DEFAULT_SERIALIZATION_FORMAT : str 52 | Class variable indicating the default serialization format used and 53 | assumed by the library. The default is `cdr`. 54 | 55 | DEFAULT_COMPRESSION_MODE : baggie._baggie._CompressionMode 56 | Class variable indicating the default compression mode applied when 57 | writing a compressed bag. The default is `FILE`. 58 | 59 | DEFAULT_COMPRESSION_FORMAT : str 60 | Class variable indicating the default compression format applied when 61 | writing a compressed bag. The default is `zstd`. 62 | 63 | DEFAULT_STORAGE_ID : str 64 | Class variable indicating the underlying storage format of the bag 65 | file. The default is `sqlite3`. 66 | 67 | All other attributes are "private" to the implementation. 68 | 69 | """ 70 | DEFAULT_SERIALIZATION_FORMAT = "cdr" 71 | DEFAULT_COMPRESSION_MODE = _CompressionMode.FILE 72 | DEFAULT_COMPRESSION_FORMAT = "zstd" 73 | DEFAULT_STORAGE_ID = "sqlite3" 74 | 75 | def __init__(self, filename, mode="r", 76 | storage_id=DEFAULT_STORAGE_ID, 77 | compress=False, 78 | storage_opts=None, 79 | converter_opts=None, 80 | compression_opts=None): 81 | """ 82 | Instantiates a new Baggie instance in either reader ("r") or writer 83 | ("w") mode. 84 | 85 | Parameters 86 | ---------- 87 | filename : str 88 | Path to the bag file to open. In "r" mode, this file must already 89 | exist. In "w" mode, this file must not exist. For clarity, bag 90 | files in ROS 2 are actually directories. The expected `filename` is 91 | the path to the bag directory. 92 | 93 | mode : str 94 | One of "r" or "w". A mode of "r" instantiates the Baggie as a 95 | reader. A mode of "w" instantiates the Baggie as a writer. 96 | 97 | compress : bool 98 | This parameter only applies when opened in "w" mode. As a reader, 99 | the compression options are introspected by the bag file 100 | metadata. If set to `True` as a writer, default compression options 101 | will be applied to the bag file as it is written out. When set to 102 | `False` as a writer, no compression options are applied. This 103 | parameter is overridden by the passed in `compression_opts` 104 | (optional). 105 | 106 | storage_opts : baggie._baggie.StorageOptions 107 | An instance of `StorageOptions` giving fine grained control over 108 | how the underlying bag file is opened. The `uri` filed of the 109 | passed in `storage_opts` will always be overridden by the required 110 | `filename` parameter passed into this ctor. 111 | 112 | converter_opts: baggie._baggie.ConverterOptions 113 | An instance of `ConverterOptions` giving fine grained control over 114 | how the underlying messages are serialized. Only applies when run 115 | in "w" mode. 116 | 117 | compression_opts: baggie._baggie.CompressionOptions 118 | An instance of `CompressionOptions` giving fine grained control 119 | over how the underlying bag file compression is treated. Only 120 | applies in "w" mode. 121 | 122 | """ 123 | self.reader_ = None 124 | self.writer_ = None 125 | 126 | self.s_opt_ = storage_opts 127 | self.c_opt_ = converter_opts 128 | self.comp_opt_ = compression_opts 129 | 130 | self.meta_ = None 131 | 132 | if mode == "r": 133 | info = _BagInfo() 134 | 135 | try: 136 | if ((self.s_opt_ is None) or 137 | (not isinstance(self.s_opt_, _StorageOptions))): 138 | self.meta_ = info.read_metadata(filename, storage_id) 139 | else: 140 | self.meta_ = \ 141 | info.read_metadata(filename, self.s_opt_.storage_id) 142 | 143 | except RuntimeError as rte: 144 | raise(BaggieException(rte)) 145 | 146 | self.s_opt_ = _StorageOptions() 147 | self.s_opt_.uri = filename 148 | self.s_opt_.storage_id = self.meta_.storage_identifier 149 | 150 | self.c_opt_ = _ConverterOptions() 151 | self.c_opt_.input_serialization_format = \ 152 | self.DEFAULT_SERIALIZATION_FORMAT 153 | self.c_opt_.output_serialization_format = \ 154 | self.DEFAULT_SERIALIZATION_FORMAT 155 | 156 | self.comp_opt_ = _CompressionOptions() 157 | self.comp_opt_.compression_format = self.meta_.compression_format 158 | if self.meta_.compression_mode == "": 159 | self.comp_opt_.compression_mode = _CompressionMode.NONE 160 | else: 161 | # NOTE: as of now, only FILE compression is supported. We will 162 | # have to do some better introspection once other schemes are 163 | # available. 164 | self.comp_opt_.compression_mode = _CompressionMode.FILE 165 | 166 | try: 167 | self.reader_ = _Reader(self.comp_opt_) 168 | self.reader_.open(self.s_opt_, self.c_opt_) 169 | 170 | except Exception as ex: 171 | raise(BaggieException(ex)) 172 | 173 | elif mode == "w": 174 | if self.s_opt_ is None: 175 | self.s_opt_ = _StorageOptions() 176 | self.s_opt_.storage_id = storage_id 177 | elif not isinstance(self.s_opt_, _StorageOptions): 178 | raise(TypeError( 179 | "'storage_opts' must be an instance of " + 180 | "'baggie._baggie.StorageOptions' or 'None', " + 181 | " not '%s'" % type(self.s_opt_))) 182 | self.s_opt_.uri = filename 183 | 184 | if self.c_opt_ is None: 185 | self.c_opt_ = _ConverterOptions() 186 | self.c_opt_.input_serialization_format = \ 187 | self.DEFAULT_SERIALIZATION_FORMAT 188 | self.c_opt_.output_serialization_format = \ 189 | self.DEFAULT_SERIALIZATION_FORMAT 190 | elif not isinstance(self.c_opt_, _ConverterOptions): 191 | raise(TypeError( 192 | "'converter_opts' must be an instance of " + 193 | "'baggie._baggie.ConverterOptions' or 'None', " + 194 | " not '%s'" % type(self.c_opt_))) 195 | 196 | if self.comp_opt_ is None: 197 | self.comp_opt_ = _CompressionOptions() 198 | self.comp_opt_.compression_format = \ 199 | self.DEFAULT_COMPRESSION_FORMAT 200 | if compress: 201 | self.comp_opt_.compression_mode = \ 202 | self.DEFAULT_COMPRESSION_MODE 203 | else: 204 | self.comp_opt_.compression_mode = _CompressionMode.NONE 205 | elif not isinstance(self.comp_opt_, _CompressionOptions): 206 | raise(TypeError( 207 | "'compression_opts' must be an instance of " + 208 | "'baggie._baggie.CompressionOptions' or 'None', " + 209 | " not '%s'" % type(self.comp_opt_))) 210 | 211 | try: 212 | os.makedirs(self.s_opt_.uri, exist_ok=True) 213 | self.writer_ = _Writer(self.comp_opt_) 214 | self.writer_.open(self.s_opt_, self.c_opt_) 215 | 216 | except Exception as ex: 217 | raise(BaggieException(ex)) 218 | 219 | else: 220 | raise ValueError("Unsupported mode: %s" % mode) 221 | 222 | def meta(self): 223 | """ 224 | If the Baggie has been instantiated as a reader ("r" mode), it caches 225 | the bag metadata. This method provides an accessor to this cached 226 | information. If the Baggie as been instantiated as a writer ("w" mode), 227 | this method returns `None`. 228 | 229 | Returns 230 | ------- 231 | baggie._baggie.BagMetadata or None 232 | 233 | """ 234 | return self.meta_ 235 | 236 | def write(self, topic, msg, t=None): 237 | """ 238 | Write a message to Baggie opened in "w" mode. 239 | 240 | Parameters 241 | ---------- 242 | topic : str 243 | The topic name to write to 244 | 245 | msg : Message 246 | The unserialzed message instance (of any message type) to be written 247 | on the topic. 248 | 249 | t : rclpy.time.Time or datetime.datetime or None 250 | The timestamp for the message. If its value is `None` the current 251 | system time is used. 252 | 253 | Exceptions 254 | ---------- 255 | baggie.BaggieException if an attempt to write is made on a Baggie 256 | opened as a reader ("r" mode). Exceptions encountered by the underlying 257 | C++ library are propogated up should they be encountered while writing. 258 | 259 | """ 260 | try: 261 | self.writer_.write( 262 | topic, serialize_message(msg), baggie.util.stamp(t)) 263 | 264 | except IndexError as idx_err: 265 | t_meta = _TopicMetadata() 266 | t_meta.name = topic 267 | t_meta.serialization_format = self.c_opt_.input_serialization_format 268 | t_meta.type = baggie.util.msg2typestr(msg) 269 | 270 | self.writer_.create_topic(t_meta) 271 | self.writer_.write( 272 | topic, serialize_message(msg), baggie.util.stamp(t)) 273 | 274 | except Exception as ex: 275 | if self.writer_ is None: 276 | raise(BaggieException("Cannot write in read-only mode")) 277 | 278 | raise(ex) 279 | 280 | def read_messages(self, topics=None, start_time=None, end_time=None): 281 | """ 282 | Generator function used to produce each message in sequence from a 283 | Baggie opened as a reader ("r" mode). 284 | 285 | Parameters 286 | ---------- 287 | topics : list of str 288 | A list of fully qualified topic names to include in the generated 289 | results. If this parameter is specified, topics not listed will be 290 | omitted from the generated output. Works together with `start_time` 291 | and `end_time`. 292 | 293 | start_time : rclpy.time.Time 294 | Only messages stamped with this start time or later will be included 295 | in the generated output. Works together with `topics` and 296 | `end_time`. 297 | 298 | end_time : rclpy.time.Time 299 | Only messages stamped with this end time or earlier will be included 300 | in the generated output. Works together with the `topics` and 301 | `start_time`. 302 | 303 | Returns 304 | ------- 305 | 3-tuple : str, msg, rclpy.time.Time 306 | [0] is the topic name 307 | [1] is the deserialized message 308 | [2] is the timestamp on the message 309 | 310 | If the Baggie is opened as a writer ("w" mode), a 311 | baggie.BaggieException is raised. 312 | 313 | """ 314 | if self.reader_ is None: 315 | raise(BaggieException("Cannot read in write-only mode")) 316 | 317 | start_time_filter = Time(nanoseconds=0) 318 | if ((start_time is not None) and (isinstance(start_time, Time))): 319 | start_time_filter = start_time 320 | 321 | end_time_filter = Time(nanoseconds=sys.maxsize) 322 | if ((end_time is not None) and (isinstance(end_time, Time))): 323 | end_time_filter = end_time 324 | 325 | self.reader_.reset_filter() 326 | if topics is not None: 327 | filt = _StorageFilter() 328 | filt.topics = topics 329 | self.reader_.set_filter(filt) 330 | 331 | type_lut = {} 332 | t_meta_list = self.reader_.get_all_topics_and_types() 333 | for t_meta in t_meta_list: 334 | if ((topics is not None) and (t_meta.name not in topics)): 335 | continue 336 | type_lut[t_meta.name] = baggie.util.typestr2msgtype(t_meta.type) 337 | 338 | while self.reader_.has_next(): 339 | topic, ser_msg, ts = self.reader_.read_next() 340 | msg_time = Time(nanoseconds=ts) 341 | if ((msg_time < start_time_filter) or 342 | (msg_time > end_time_filter)): 343 | continue 344 | 345 | msg = deserialize_message(ser_msg, type_lut[topic]) 346 | yield topic, msg, msg_time 347 | 348 | @contextmanager 349 | def BagReader(*args, **kwargs): 350 | """ 351 | Context Manager wrapper around a baggie.Baggie in `read` mode. 352 | """ 353 | kwargs["mode"] = "r" 354 | bag = Baggie(*args, **kwargs) 355 | try: 356 | yield bag 357 | finally: 358 | pass 359 | 360 | @contextmanager 361 | def BagWriter(*args, **kwargs): 362 | """ 363 | Context Manager wrapper around a baggie.Baggie in `write` mode. 364 | """ 365 | kwargs["mode"] = "w" 366 | bag = Baggie(*args, **kwargs) 367 | try: 368 | yield bag 369 | finally: 370 | pass 371 | -------------------------------------------------------------------------------- /baggie/baggie/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | from baggie._baggie import BagMetadata as _BagMetadata 16 | from baggie._baggie import CompressionMode as _CompressionMode 17 | from baggie._baggie import CompressionOptions as _CompressionOptions 18 | from baggie._baggie import ConverterOptions as _ConverterOptions 19 | from baggie._baggie import BagInfo as _BagInfo 20 | from baggie._baggie import Reader as _Reader 21 | from baggie._baggie import StorageFilter as _StorageFilter 22 | from baggie._baggie import StorageOptions as _StorageOptions 23 | from baggie._baggie import TopicMetadata as _TopicMetadata 24 | from baggie._baggie import TopicInformation as _TopicInformation 25 | from baggie._baggie import Writer as _Writer 26 | 27 | from baggie.util import msg2typestr as _msg2typestr 28 | from baggie.util import stamp as _stamp 29 | 30 | from baggie._Baggie import Baggie 31 | from baggie._Baggie import BaggieException 32 | from baggie._Baggie import BagReader 33 | from baggie._Baggie import BagWriter 34 | 35 | 36 | __all__ = ['_BagMetadata', '_CompressionMode', '_CompressionOptions', 37 | '_ConverterOptions', '_BagInfo', '_Reader', 38 | '_StorageFilter', '_StorageOptions', '_TopicMetadata', 39 | '_TopicInformation', '_Writer', 40 | '_msg2typestr', '_stamp', 41 | 'Baggie', 'BaggieException', 'BagReader', 'BagWriter'] 42 | -------------------------------------------------------------------------------- /baggie/baggie/cmd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Box-Robotics/ros2-bagutils/4542316e0831f727f8d97f4ec271f693a732482b/baggie/baggie/cmd/__init__.py -------------------------------------------------------------------------------- /baggie/baggie/cmd/filter.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | 16 | """Script to copy a bag file, optionally filtered by topic and a time window""" 17 | 18 | import argparse 19 | import os 20 | import sys 21 | from rclpy.time import Time 22 | from baggie import BagReader, BagWriter 23 | 24 | def get_args(): 25 | parser = argparse.ArgumentParser( 26 | description="Copy a bag file, optionally filtered by topic and time", 27 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 28 | 29 | parser.add_argument("-i", "--infile", required=True, type=str, 30 | help="Path to input bag file") 31 | parser.add_argument("-o", "--outfile", required=False, type=str, 32 | help="Output bag file, default: -filtered.bag") 33 | parser.add_argument("--start_time", required=False, type=int, 34 | help="Earliest message stamp in output bag (nanos)") 35 | parser.add_argument("--end_time", required=False, type=int, 36 | help="Latest message stamp in output bag (nanos)") 37 | parser.add_argument("--topics", required=False, type=str, 38 | metavar="T", nargs="+", 39 | help="List of topics to include in output bag") 40 | parser.add_argument("--map", required=False, type=str, 41 | metavar="M", nargs="+", 42 | help="Topic name remappings: --map from:to") 43 | compression_group = parser.add_mutually_exclusive_group() 44 | compression_group.add_argument("--compress", required=False, 45 | action="store_true", default=False, 46 | help="Compress output file") 47 | compression_group.add_argument("--uncompress", required=False, 48 | action="store_true", default=False, 49 | help="Do not compress output file") 50 | 51 | args = parser.parse_args(sys.argv[1:]) 52 | return args 53 | 54 | def main() -> int: 55 | args = get_args() 56 | 57 | infile = args.infile 58 | if not os.path.exists(infile): 59 | raise OSError("The input file does not exist: {}".format(infile)) 60 | 61 | outfile = args.outfile 62 | if outfile is None: 63 | outfile = infile.rstrip(".bag") 64 | outfile += "-filtered.bag" 65 | 66 | t0 = None 67 | t1 = None 68 | 69 | if args.start_time is not None: 70 | t0 = Time(nanoseconds=args.start_time) 71 | 72 | if args.end_time is not None: 73 | t1 = Time(nanoseconds=args.end_time) 74 | 75 | topic_list = None 76 | if args.topics is not None: 77 | topic_list = args.topics 78 | 79 | topic_lut = {} 80 | if args.map is not None: 81 | for remapping in args.map: 82 | from_to = remapping.split(":") 83 | if len(from_to) == 2: 84 | topic_lut[str(from_to[0])] = str(from_to[1]) 85 | 86 | with BagReader(infile) as inbag: 87 | compress_opts = None 88 | if ((not args.compress) and (not args.uncompress)): 89 | compress_opts = inbag.comp_opt_ 90 | 91 | with BagWriter(outfile, 92 | compress=args.compress, 93 | storage_opts=inbag.s_opt_, 94 | converter_opts=inbag.c_opt_, 95 | compression_opts=compress_opts) as outbag: 96 | 97 | for topic, msg, t, in inbag.read_messages( 98 | topics=topic_list, start_time=t0, end_time=t1): 99 | out_topic = topic 100 | if ((args.map is not None) and (topic in topic_lut)): 101 | out_topic = topic_lut[topic] 102 | outbag.write(out_topic, msg, t) 103 | 104 | return 0 105 | 106 | if __name__ == '__main__': 107 | sys.exit(main()) 108 | -------------------------------------------------------------------------------- /baggie/baggie/cmd/join.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | 16 | """Joins multiple bags into a single bag file""" 17 | 18 | import argparse 19 | import glob 20 | import os 21 | import sys 22 | from baggie import BagReader, BagWriter 23 | 24 | def get_args(): 25 | parser = argparse.ArgumentParser( 26 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 27 | description="Joins several ROS 2 bag files into a single combined bag") 28 | 29 | parser.add_argument("-o", "--outfile", type=str, required=True, 30 | help="The output bag file name to create") 31 | parser.add_argument("infiles", metavar="INFILE", type=str, nargs="+", 32 | help="The input bag files to join") 33 | compression_group = parser.add_mutually_exclusive_group() 34 | compression_group.add_argument("--compress", required=False, 35 | action="store_true", default=False, 36 | help="Compress the output file") 37 | compression_group.add_argument("--uncompress", required=False, 38 | action="store_true", default=False, 39 | help="Do not compress the output file") 40 | 41 | args = parser.parse_args(sys.argv[1:]) 42 | return args 43 | 44 | def main(): 45 | args = get_args() 46 | 47 | infiles_ = [] 48 | for infile in args.infiles: 49 | paths = glob.glob(infile) 50 | for path in paths: 51 | infiles_.append(path) 52 | 53 | infiles = sorted(set(infiles_)) 54 | 55 | with BagWriter(args.outfile, compress=args.compress) as outbag: 56 | for infile in infiles: 57 | with BagReader(infile) as inbag: 58 | for topic, msg, t in inbag.read_messages(): 59 | outbag.write(topic, msg, t) 60 | 61 | return 0 62 | 63 | if __name__ == '__main__': 64 | sys.exit(main()) 65 | -------------------------------------------------------------------------------- /baggie/baggie/cmd/split.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | 16 | """Script to split a bag file into time-based partitions""" 17 | 18 | import argparse 19 | import os 20 | import sys 21 | from rclpy.time import Time, Duration 22 | from baggie import BagReader, Baggie, _stamp 23 | 24 | def get_args(): 25 | parser = argparse.ArgumentParser( 26 | description="Splits a bag file into time-based partitions", 27 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 28 | 29 | def fraction_type(x): 30 | x = float(x) 31 | if ((x <= 0.) or (x > 1.)): 32 | raise argparse.ArgumentTypeError("Fraction must be in: (0., 1.]") 33 | return x 34 | 35 | parser.add_argument("-i", "--infile", required=True, type=str, 36 | help="Path to input bag file") 37 | parser.add_argument("-o", "--outdir", required=False, 38 | help="Path to output directory for split bag files") 39 | parser.add_argument("-f", "--fraction", type=fraction_type, default=.5, 40 | help="Time fraction (0., 1.]; .5 = split in half") 41 | 42 | args = parser.parse_args(sys.argv[1:]) 43 | return args 44 | 45 | def main() -> int: 46 | args = get_args() 47 | 48 | if args.outdir is None: 49 | args.outdir = os.path.dirname(args.infile) 50 | if args.outdir == "": 51 | args.outdir = "." 52 | 53 | with BagReader(args.infile) as inbag: 54 | meta = inbag.meta() 55 | t_start = Time(nanoseconds=_stamp(meta.starting_time_as_nanos())) 56 | t_end = Time(nanoseconds=_stamp( 57 | t_start + Duration(nanoseconds=meta.duration_as_nanos()))) 58 | n_partitions = int(1./args.fraction) 59 | partitions = [] 60 | for i in range(n_partitions): 61 | part_no = i + 1 62 | part_len = Duration( 63 | nanoseconds=int((t_end - t_start).nanoseconds * args.fraction)) 64 | partitions.append( 65 | t_start + Duration( 66 | nanoseconds=int(part_no * part_len.nanoseconds))) 67 | 68 | part_no = 0 69 | outfile = "{}{}{:02d}_{}".format( 70 | args.outdir, os.sep, part_no, os.path.basename(args.infile)) 71 | outbag = Baggie(outfile, mode="w", 72 | storage_opts=inbag.s_opt_, 73 | converter_opts=inbag.c_opt_, 74 | compression_opts=inbag.comp_opt_) 75 | 76 | for topic, msg, t in inbag.read_messages(): 77 | if part_no == 0: 78 | if t <= partitions[part_no]: 79 | outbag.write(topic, msg, t) 80 | else: 81 | part_no += 1 82 | outfile = "{}{}{:02d}_{}".format( 83 | args.outdir, os.sep, 84 | part_no, os.path.basename(args.infile)) 85 | outbag = Baggie(outfile, mode="w", 86 | storage_opts=inbag.s_opt_, 87 | converter_opts=inbag.c_opt_, 88 | compression_opts=inbag.comp_opt_) 89 | outbag.write(topic, msg, t) 90 | 91 | elif (part_no == (len(partitions) - 1)): 92 | outbag.write(topic, msg, t) 93 | 94 | else: 95 | if ((t > partitions[part_no - 1]) and 96 | (t <= partitions[part_no])): 97 | outbag.write(topic, msg, t) 98 | else: 99 | part_no += 1 100 | outfile = "{}{}{:02d}_{}".format( 101 | args.outdir, os.sep, 102 | part_no, os.path.basename(args.infile)) 103 | outbag = Baggie(outfile, mode="w", 104 | storage_opts=inbag.s_opt_, 105 | converter_opts=inbag.c_opt_, 106 | compression_opts=inbag.comp_opt_) 107 | outbag.write(topic, msg, t) 108 | 109 | return 0 110 | 111 | if __name__ == '__main__': 112 | sys.exit(main()) 113 | -------------------------------------------------------------------------------- /baggie/baggie/cmd/timeline.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | 16 | """Output a JSON timeline of messages contained in a set of bags""" 17 | 18 | import argparse 19 | import glob 20 | import json 21 | import os 22 | import sys 23 | from datetime import datetime 24 | from baggie import BagReader 25 | 26 | def get_args(): 27 | parser = argparse.ArgumentParser( 28 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 29 | description="A JSON timeline of messages contained in a set of bags") 30 | 31 | parser.add_argument("infiles", metavar="INFILE", type=str, nargs="+", 32 | help="The input bag files to add to the timeline") 33 | parser.add_argument("--pretty", dest="pretty", action="store_true", 34 | default=False, 35 | help="Pretty print the output JSON") 36 | ds_group = parser.add_mutually_exclusive_group() 37 | ds_group.add_argument("--aos", required=False, 38 | action="store_true", default=False, 39 | help="Output the data as an array-of-structs") 40 | ds_group.add_argument("--soa", required=False, 41 | action="store_true", default=False, 42 | help="Output the data as a struct-of-arrays") 43 | 44 | args = parser.parse_args(sys.argv[1:]) 45 | return args 46 | 47 | def get_time_str(ts): 48 | dt = datetime.fromtimestamp(ts // 1e9) 49 | s = dt.strftime('%Y-%m-%d %H:%M:%S') 50 | s += '.' + str(int(ts % 1e9)).zfill(9) 51 | return s 52 | 53 | def main(): 54 | args = get_args() 55 | 56 | infiles_ = [] 57 | for infile in args.infiles: 58 | paths = glob.glob(infile) 59 | for path in paths: 60 | infiles_.append(path) 61 | 62 | aos = [] 63 | soa = {"topic": [], "type": [], "stamp": []} 64 | 65 | infiles = sorted(set(infiles_)) 66 | for infile in infiles: 67 | with BagReader(infile) as inbag: 68 | meta = inbag.meta() 69 | t_meta = {} 70 | for t_info in meta.topics_with_message_count: 71 | t_name = t_info.topic_metadata.name 72 | t_type = t_info.topic_metadata.type 73 | t_meta[t_name] = t_type 74 | 75 | for topic, msg, t in inbag.read_messages(): 76 | nanos = t.nanoseconds 77 | d = {'topic_name': topic, 78 | 'type': t_meta[topic], 79 | 'stamp': nanos, 80 | 'time': get_time_str(nanos)} 81 | aos.append(d) 82 | 83 | soa["topic"].append(topic) 84 | soa["type"].append(t_meta[topic]) 85 | soa["stamp"].append(nanos) 86 | 87 | if args.pretty: 88 | if (((not args.aos) and (not args.soa)) or args.aos): 89 | print(json.dumps(aos, sort_keys=True, indent=4)) 90 | else: 91 | print(json.dumps(soa, sort_keys=True, indent=4)) 92 | else: 93 | if (((not args.aos) and (not args.soa)) or args.aos): 94 | print(json.dumps(aos, sort_keys=True)) 95 | else: 96 | print(json.dumps(soa, sort_keys=True)) 97 | 98 | return 0 99 | 100 | if __name__ == '__main__': 101 | sys.exit(main()) 102 | -------------------------------------------------------------------------------- /baggie/baggie/util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Box Robotics, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | import importlib 16 | 17 | from datetime import datetime as DT 18 | from rclpy.time import Time 19 | 20 | def stamp(dt=None): 21 | """ 22 | Generates a timestamp compatiable for writing to rosbag2 23 | 24 | Parameters: 25 | dt (datetime.datetime or rclpy.time.Time): 26 | A `datetime` or `Time` object representing the timestamp. If this 27 | parameter is omitted, the current system time is used. 28 | 29 | Returns: 30 | An int encoding of the timestamp as nanoseconds past the epoch (of a 31 | particular clock; usually the system clock) 32 | 33 | """ 34 | if dt is None: 35 | dt = DT.now() 36 | 37 | if isinstance(dt, DT): 38 | return int(dt.timestamp() * 1e9) 39 | elif isinstance(dt, Time): 40 | return dt.nanoseconds 41 | elif isinstance(dt, int): 42 | return dt 43 | else: 44 | # NOTE: let's not encourage passing in an `int` (see docs above -- we 45 | # don't document it -- and the error message below -- also, 46 | # undocumented). 47 | raise(TypeError( 48 | "stamp: 'dt' must be an instance of " + 49 | "'datetime.datetime' or 'rclpy.time.Time', " + 50 | " not '%s'" % type(dt))) 51 | 52 | def msg2typestr(msg): 53 | """ 54 | Introspects the message type from the passed in `msg` and encodes it as a 55 | string in the format required by rosbag2. 56 | 57 | Parameters 58 | ---------- 59 | msg : Message 60 | A ROS 2 message (deserialized) whose type we need to introspect 61 | 62 | Returns 63 | ------- 64 | A string encoding of the message type suitable for serialization to a ROS 2 65 | bag. 66 | 67 | """ 68 | mod_components = msg.__module__.split(".") 69 | mod_components[-1] = msg.__class__.__name__ 70 | return "/".join(mod_components) 71 | 72 | def typestr2msgtype(type_str): 73 | """ 74 | Given a ROS 2 bag message type encoding string, this function will return a 75 | Type instance of the actual Python message type that can then be used for 76 | creating instances of the particular message class. If the loader for the 77 | class type is not on your `sys.path` an exception will be thrown. 78 | 79 | Parameters 80 | ---------- 81 | type_str : str 82 | A string encoding of a message type compatible with rosbag2 83 | 84 | Returns 85 | ------- 86 | The `Type` of the Python message. 87 | 88 | """ 89 | module_components = type_str.split("/") 90 | type_name = module_components[-1] 91 | 92 | module_str = ".".join(module_components[0:-1]) 93 | module = importlib.import_module(module_str) 94 | 95 | return type(getattr(module, type_name)()) 96 | -------------------------------------------------------------------------------- /baggie/doc/figures/box-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Box-Robotics/ros2-bagutils/4542316e0831f727f8d97f4ec271f693a732482b/baggie/doc/figures/box-logo.png -------------------------------------------------------------------------------- /baggie/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | baggie 6 | 0.1.0 7 | An unofficial Python wrapper around the ROS2 C++ Bag API 8 | Tom Panzarella 9 | Tom Panzarella 10 | Apache License 2.0 11 | 12 | ament 13 | ament_cmake_python 14 | 15 | pybind11-dev 16 | 17 | python3 18 | rclpy 19 | rcutils 20 | rosbag2_cpp 21 | rosbag2_compression 22 | rosbag2_storage 23 | 24 | ament_cmake_nose 25 | example_interfaces 26 | python3-nose 27 | 28 | 29 | ament_cmake 30 | 31 | 32 | -------------------------------------------------------------------------------- /baggie/scripts/filter: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- python -*- 3 | 4 | import sys 5 | from baggie.cmd import filter 6 | 7 | if __name__ == '__main__': 8 | sys.exit(filter.main()) 9 | -------------------------------------------------------------------------------- /baggie/scripts/join: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- python -*- 3 | 4 | import sys 5 | from baggie.cmd import join as Join 6 | 7 | if __name__ == '__main__': 8 | sys.exit(Join.main()) 9 | -------------------------------------------------------------------------------- /baggie/scripts/split: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- python -*- 3 | 4 | import sys 5 | from baggie.cmd import split as Split 6 | 7 | if __name__ == '__main__': 8 | sys.exit(Split.main()) 9 | -------------------------------------------------------------------------------- /baggie/scripts/timeline: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- python -*- 3 | 4 | import sys 5 | from baggie.cmd import timeline 6 | 7 | if __name__ == '__main__': 8 | sys.exit(timeline.main()) 9 | -------------------------------------------------------------------------------- /baggie/src/py/_baggie.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2020 Box Robotics, Inc 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #include 23 | #include 24 | #include 25 | 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | #include 34 | #include 35 | #include 36 | #include 37 | #include 38 | #include 39 | #include 40 | #include 41 | #include 42 | 43 | namespace bag = rosbag2_cpp; 44 | namespace bagcomp = rosbag2_compression; 45 | namespace py = pybind11; 46 | 47 | namespace baggie 48 | { 49 | class Reader 50 | { 51 | public: 52 | explicit Reader(const bagcomp::CompressionOptions & c_opt) 53 | { 54 | if (c_opt.compression_mode == bagcomp::CompressionMode::NONE) 55 | { 56 | this->reader_ = 57 | std::make_unique( 58 | std::make_unique()); 59 | } 60 | else 61 | { 62 | this->reader_ = 63 | std::make_unique( 64 | std::make_unique()); 65 | } 66 | } 67 | 68 | void open(bag::StorageOptions & storage_opts, 69 | bag::ConverterOptions & converter_opts) 70 | { 71 | this->reader_->open(storage_opts, converter_opts); 72 | } 73 | 74 | bool has_next() 75 | { 76 | return this->reader_->has_next(); 77 | } 78 | 79 | py::tuple read_next() 80 | { 81 | const auto next = this->reader_->read_next(); 82 | rcutils_uint8_array_t rc_data = *next->serialized_data.get(); 83 | std::string ser_data( 84 | rc_data.buffer, rc_data.buffer + rc_data.buffer_length); 85 | return py::make_tuple( 86 | next->topic_name, py::bytes(ser_data), next->time_stamp); 87 | } 88 | 89 | std::vector 90 | get_all_topics_and_types() 91 | { 92 | return this->reader_->get_all_topics_and_types(); 93 | } 94 | 95 | void set_filter(const rosbag2_storage::StorageFilter & filt) 96 | { 97 | this->reader_->set_filter(filt); 98 | } 99 | 100 | void reset_filter() 101 | { 102 | this->reader_->reset_filter(); 103 | } 104 | 105 | private: 106 | std::unique_ptr reader_; 107 | 108 | }; // end: class Reader 109 | 110 | 111 | class Writer 112 | { 113 | public: 114 | explicit Writer(const bagcomp::CompressionOptions & c_opt) 115 | { 116 | if (c_opt.compression_mode == bagcomp::CompressionMode::NONE) 117 | { 118 | this->writer_ = 119 | std::make_unique( 120 | std::make_unique()); 121 | } 122 | else 123 | { 124 | this->writer_ = 125 | std::make_unique( 126 | std::make_unique(c_opt)); 127 | } 128 | } 129 | 130 | void open(bag::StorageOptions & storage_opts, 131 | bag::ConverterOptions & converter_opts) 132 | { 133 | this->writer_->open(storage_opts, converter_opts); 134 | } 135 | 136 | void create_topic(const rosbag2_storage::TopicMetadata & meta) 137 | { 138 | this->writer_->create_topic(meta); 139 | } 140 | 141 | void remove_topic(const rosbag2_storage::TopicMetadata & meta) 142 | { 143 | this->writer_->remove_topic(meta); 144 | } 145 | 146 | void write(const std::string & topic_name, 147 | const std::string & message, 148 | const rcutils_time_point_value_t & time_stamp) 149 | { 150 | auto bag_message = 151 | std::make_shared(); 152 | 153 | bag_message->topic_name = topic_name; 154 | bag_message->serialized_data = 155 | rosbag2_storage::make_serialized_message( 156 | message.c_str(), message.length()); 157 | bag_message->time_stamp = time_stamp; 158 | 159 | this->writer_->write(bag_message); 160 | } 161 | 162 | private: 163 | std::unique_ptr writer_; 164 | 165 | }; // end: class Writer 166 | 167 | } // end: namespace baggie 168 | 169 | PYBIND11_MODULE(_baggie, m) 170 | { 171 | m.doc() = "Python wrapper around the ROS2 C++ bag API"; 172 | 173 | py::enum_(m, "CompressionMode") 174 | .value("NONE", bagcomp::CompressionMode::NONE) 175 | .value("FILE", bagcomp::CompressionMode::FILE) 176 | // @todo As of this writing, only "FILE" compression is supported in C++ 177 | //.value("MESSAGE", bagcomp::CompressionMode::MESSAGE) 178 | //.value("LAST_MODE", bagcomp::CompressionMode::LAST_MODE) 179 | .export_values(); 180 | 181 | py::class_(m, "CompressionOptions") 182 | .def(py::init()) 183 | .def("mode_to_string", 184 | [](const bagcomp::CompressionOptions & c) -> std::string 185 | { 186 | return bagcomp::compression_mode_to_string(c.compression_mode); 187 | }) 188 | .def_readwrite( 189 | "compression_format", &bagcomp::CompressionOptions::compression_format) 190 | .def_readwrite( 191 | "compression_mode", &bagcomp::CompressionOptions::compression_mode); 192 | 193 | py::class_(m, "Reader") 194 | .def(py::init()) 195 | .def("open", &baggie::Reader::open) 196 | .def("has_next", &baggie::Reader::has_next) 197 | .def("read_next", &baggie::Reader::read_next) 198 | .def("get_all_topics_and_types", &baggie::Reader::get_all_topics_and_types) 199 | .def("set_filter", &baggie::Reader::set_filter) 200 | .def("reset_filter", &baggie::Reader::reset_filter); 201 | 202 | py::class_(m, "Writer") 203 | .def(py::init()) 204 | .def("open", &baggie::Writer::open) 205 | .def("create_topic", &baggie::Writer::create_topic) 206 | .def("remove_topic", &baggie::Writer::remove_topic) 207 | .def("write", &baggie::Writer::write); 208 | 209 | py::class_(m, "ConverterOptions") 210 | .def(py::init()) 211 | .def_readwrite( 212 | "input_serialization_format", 213 | &bag::ConverterOptions::input_serialization_format) 214 | .def_readwrite( 215 | "output_serialization_format", 216 | &bag::ConverterOptions::output_serialization_format); 217 | 218 | py::class_(m, "StorageOptions") 219 | .def(py::init()) 220 | .def_readwrite("uri", &bag::StorageOptions::uri) 221 | .def_readwrite("storage_id", &bag::StorageOptions::storage_id) 222 | .def_readwrite("max_bagfile_size", &bag::StorageOptions::max_bagfile_size); 223 | 224 | py::class_(m, "StorageFilter") 225 | .def(py::init()) 226 | .def_readwrite("topics", &rosbag2_storage::StorageFilter::topics); 227 | 228 | py::class_(m, "TopicMetadata") 229 | .def(py::init()) 230 | .def_readwrite("name", &rosbag2_storage::TopicMetadata::name) 231 | .def_readwrite("type", &rosbag2_storage::TopicMetadata::type) 232 | .def_readwrite( 233 | "serialization_format", 234 | &rosbag2_storage::TopicMetadata::serialization_format) 235 | .def_readwrite( 236 | "offered_qos_profiles", 237 | &rosbag2_storage::TopicMetadata::offered_qos_profiles) 238 | .def("equals", &rosbag2_storage::TopicMetadata::operator==); 239 | 240 | py::class_(m, "TopicInformation") 241 | .def(py::init()) 242 | .def_readwrite( 243 | "topic_metadata", 244 | &rosbag2_storage::TopicInformation::topic_metadata) 245 | .def_readwrite( 246 | "message_count", 247 | &rosbag2_storage::TopicInformation::message_count); 248 | 249 | // 250 | // NOTE: Pybind11 converts the chrono types to datetime.* 251 | // which only have usec resolution. So, we bind the lambdas below 252 | // so we get full nano resolution and convert to rclpy.time.* types in the 253 | // Python layer of baggie. 254 | // 255 | py::class_(m, "BagMetadata") 256 | .def(py::init()) 257 | .def_readwrite("version", &rosbag2_storage::BagMetadata::version) 258 | .def_readwrite( 259 | "storage_identifier", &rosbag2_storage::BagMetadata::storage_identifier) 260 | .def_readwrite( 261 | "relative_file_paths", &rosbag2_storage::BagMetadata::relative_file_paths) 262 | .def_readwrite("duration", &rosbag2_storage::BagMetadata::duration) 263 | .def("duration_as_nanos", [](const rosbag2_storage::BagMetadata & meta) 264 | { 265 | return meta.duration.count(); 266 | }) 267 | .def_readwrite( 268 | "starting_time", &rosbag2_storage::BagMetadata::starting_time) 269 | .def("starting_time_as_nanos", 270 | [](const rosbag2_storage::BagMetadata & meta) 271 | { 272 | return meta.starting_time.time_since_epoch().count(); 273 | }) 274 | .def_readwrite( 275 | "message_count", &rosbag2_storage::BagMetadata::message_count) 276 | .def_readwrite( 277 | "topics_with_message_count", 278 | &rosbag2_storage::BagMetadata::topics_with_message_count) 279 | .def_readwrite( 280 | "compression_format", &rosbag2_storage::BagMetadata::compression_format) 281 | .def_readwrite( 282 | "compression_mode", &rosbag2_storage::BagMetadata::compression_mode); 283 | 284 | py::class_(m, "BagInfo") 285 | .def(py::init()) 286 | .def("read_metadata", &bag::Info::read_metadata); 287 | } 288 | -------------------------------------------------------------------------------- /baggie/test/test_baggie_reader.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import os 4 | import tempfile 5 | import time 6 | 7 | import baggie 8 | from example_interfaces.msg import Int32 9 | from example_interfaces.msg import String 10 | from rclpy.time import Time 11 | 12 | N_MSGS = 100 13 | TOPIC_INT = "/counter" 14 | TOPIC_STR = "/chatter" 15 | BAGNAME = "ziplock.bag" 16 | BAGNAME_COMPRESSED = "ziplock-c.bag" 17 | 18 | class TestBaggieReader(unittest.TestCase): 19 | """ 20 | Test fixture for the baggie.Baggie Python interface for reading bag files 21 | """ 22 | 23 | def setUp(self): 24 | self.tmp_dir = tempfile.TemporaryDirectory() 25 | 26 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 27 | bag = baggie.Baggie(bag_file_name, mode="w", compress=False) 28 | 29 | bag_comp_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, 30 | BAGNAME_COMPRESSED) 31 | bag_comp = baggie.Baggie(bag_comp_file_name, mode="w", compress=True) 32 | 33 | for i in range(N_MSGS): 34 | int_msg = Int32() 35 | int_msg.data = i 36 | 37 | str_msg = String() 38 | str_msg.data = "The count is: %s" % i 39 | 40 | bag.write(TOPIC_INT, int_msg) 41 | bag.write(TOPIC_STR, str_msg) 42 | 43 | bag_comp.write(TOPIC_INT, int_msg) 44 | bag_comp.write(TOPIC_STR, str_msg) 45 | 46 | time.sleep(1./N_MSGS) 47 | 48 | def tearDown(self): 49 | self.tmp_dir.cleanup() 50 | 51 | def test_file_does_not_exist(self): 52 | bag_file_name="{}{}{}".format( 53 | self.tmp_dir.name, os.sep, BAGNAME + "_foo") 54 | 55 | with self.assertRaises(baggie.BaggieException): 56 | bag = baggie.Baggie(bag_file_name, mode="r") 57 | 58 | def test_reader(self): 59 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 60 | 61 | bag = baggie.Baggie(bag_file_name, mode="r") 62 | i = 0 63 | last_time = Time(nanoseconds=0) 64 | for topic, msg, ts in bag.read_messages(): 65 | if topic == TOPIC_INT: 66 | self.assertIsInstance(msg, Int32) 67 | elif topic == TOPIC_STR: 68 | self.assertIsInstance(msg, String) 69 | 70 | self.assertIsInstance(ts, Time) 71 | self.assertTrue(last_time <= ts) 72 | last_time = ts 73 | i += 1 74 | 75 | self.assertEqual(i, N_MSGS*2) 76 | 77 | def test_reader_topic_filtered(self): 78 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 79 | 80 | bag = baggie.Baggie(bag_file_name, mode="r") 81 | i = 0 82 | last_time = Time(nanoseconds=0) 83 | for topic, msg, ts in bag.read_messages(topics=[TOPIC_INT]): 84 | self.assertIsInstance(msg, Int32) 85 | self.assertIsInstance(ts, Time) 86 | self.assertTrue(last_time <= ts) 87 | last_time = ts 88 | i += 1 89 | 90 | self.assertEqual(i, N_MSGS) 91 | 92 | def test_compressed_reader(self): 93 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, 94 | BAGNAME_COMPRESSED) 95 | 96 | bag = baggie.Baggie(bag_file_name, mode="r") 97 | 98 | bag = baggie.Baggie(bag_file_name, mode="r") 99 | i = 0 100 | last_time = Time(nanoseconds=0) 101 | for topic, msg, ts in bag.read_messages(): 102 | if topic == TOPIC_INT: 103 | self.assertIsInstance(msg, Int32) 104 | elif topic == TOPIC_STR: 105 | self.assertIsInstance(msg, String) 106 | 107 | self.assertIsInstance(ts, Time) 108 | self.assertTrue(last_time <= ts) 109 | last_time = ts 110 | i += 1 111 | 112 | self.assertEqual(i, N_MSGS*2) 113 | 114 | def test_readonly_write(self): 115 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, 116 | BAGNAME_COMPRESSED) 117 | 118 | bag = baggie.Baggie(bag_file_name, mode="r") 119 | int_msg = Int32() 120 | int_msg.data = 1000 121 | 122 | with self.assertRaises(baggie.BaggieException): 123 | bag.write(TOPIC_INT, int_msg) 124 | 125 | def test_reader_time_filtered(self): 126 | """ @todo """ 127 | pass 128 | -------------------------------------------------------------------------------- /baggie/test/test_baggie_writer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import os 4 | import tempfile 5 | import time 6 | 7 | import baggie 8 | from example_interfaces.msg import Int32 9 | 10 | N_MSGS = 100 11 | TOPIC_NAME = "/counter" 12 | BAGNAME = "ziplock.bag" 13 | 14 | class TestBaggieWriter(unittest.TestCase): 15 | """ 16 | Test fixture for the baggie.Baggie Python interface to writing bag files 17 | """ 18 | 19 | def setUp(self): 20 | self.tmp_dir = tempfile.TemporaryDirectory() 21 | 22 | def tearDown(self): 23 | self.tmp_dir.cleanup() 24 | 25 | def test_defaults(self): 26 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 27 | bag = baggie.Baggie(bag_file_name, mode="w") 28 | 29 | for i in range(N_MSGS): 30 | msg = Int32() 31 | msg.data = i 32 | bag.write(TOPIC_NAME, msg) 33 | time.sleep(1./N_MSGS) 34 | 35 | def test_compressed(self): 36 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 37 | bag = baggie.Baggie(bag_file_name, mode="w", compress=True) 38 | 39 | for i in range(N_MSGS): 40 | msg = Int32() 41 | msg.data = i 42 | bag.write(TOPIC_NAME, msg) 43 | time.sleep(1./N_MSGS) 44 | 45 | def test_legal_override_types(self): 46 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 47 | 48 | s_opt = baggie._StorageOptions() 49 | s_opt.storage_id = baggie.Baggie.DEFAULT_STORAGE_ID 50 | 51 | c_opt = baggie._ConverterOptions() 52 | c_opt.input_serialization_format = \ 53 | baggie.Baggie.DEFAULT_SERIALIZATION_FORMAT 54 | c_opt.output_serialization_format = \ 55 | baggie.Baggie.DEFAULT_SERIALIZATION_FORMAT 56 | 57 | comp_opt = baggie._CompressionOptions() 58 | comp_opt.compression_format = baggie.Baggie.DEFAULT_COMPRESSION_FORMAT 59 | comp_opt.compression_mode = baggie.Baggie.DEFAULT_COMPRESSION_MODE 60 | 61 | bag = baggie.Baggie(bag_file_name, mode="w", 62 | storage_opts=s_opt, 63 | converter_opts=c_opt, 64 | compression_opts=comp_opt) 65 | 66 | def test_illegal_override_types(self): 67 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 68 | 69 | with self.assertRaises(TypeError): 70 | bag = baggie.Baggie(bag_file_name, mode="w", storage_opts="foo") 71 | 72 | with self.assertRaises(TypeError): 73 | bag = baggie.Baggie(bag_file_name, mode="w", converter_opts=100) 74 | 75 | with self.assertRaises(TypeError): 76 | bag = baggie.Baggie(bag_file_name, mode="w", compression_opts=1.0) 77 | 78 | 79 | def test_file_already_exits(self): 80 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 81 | 82 | bag1 = baggie.Baggie(bag_file_name, mode="w") 83 | 84 | with self.assertRaises(baggie.BaggieException): 85 | bag2 = baggie.Baggie(bag_file_name, mode="w") 86 | -------------------------------------------------------------------------------- /baggie/test/test_context_mgr.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import os 4 | import tempfile 5 | import time 6 | 7 | import baggie 8 | from example_interfaces.msg import Int32 9 | from example_interfaces.msg import String 10 | from rclpy.time import Time 11 | 12 | N_MSGS = 100 13 | TOPIC_INT = "/counter" 14 | TOPIC_STR = "/chatter" 15 | BAGNAME = "ziplock.bag" 16 | 17 | class TestBaggieWriter(unittest.TestCase): 18 | """ 19 | Test fixture for the baggie context manager interfaces 20 | """ 21 | 22 | def setUp(self): 23 | self.tmp_dir = tempfile.TemporaryDirectory() 24 | 25 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 26 | with baggie.BagWriter(bag_file_name) as bag: 27 | for i in range(N_MSGS): 28 | int_msg = Int32() 29 | int_msg.data = i 30 | 31 | str_msg = String() 32 | str_msg.data = "The count is: %s" % i 33 | 34 | bag.write(TOPIC_INT, int_msg) 35 | bag.write(TOPIC_STR, str_msg) 36 | 37 | time.sleep(1./N_MSGS) 38 | 39 | def tearDown(self): 40 | self.tmp_dir.cleanup() 41 | 42 | def test_reader(self): 43 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 44 | 45 | with baggie.BagReader(bag_file_name) as bag: 46 | i = 0 47 | last_time = Time(nanoseconds=0) 48 | for topic, msg, ts in bag.read_messages(): 49 | if topic == TOPIC_INT: 50 | self.assertIsInstance(msg, Int32) 51 | elif topic == TOPIC_STR: 52 | self.assertIsInstance(msg, String) 53 | 54 | self.assertIsInstance(ts, Time) 55 | self.assertTrue(last_time <= ts) 56 | last_time = ts 57 | i += 1 58 | 59 | self.assertEqual(i, N_MSGS*2) 60 | -------------------------------------------------------------------------------- /baggie/test/test_meta.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import datetime 4 | import os 5 | import tempfile 6 | import time 7 | 8 | import baggie 9 | from example_interfaces.msg import Int32 10 | from example_interfaces.msg import String 11 | from rclpy.time import Time 12 | 13 | N_MSGS = 100 14 | TOPIC_INT = "/counter" 15 | TOPIC_STR = "/chatter" 16 | BAGNAME = "ziplock.bag" 17 | 18 | class TestBaggieWriter(unittest.TestCase): 19 | """ 20 | """ 21 | 22 | def setUp(self): 23 | self.tmp_dir = tempfile.TemporaryDirectory() 24 | 25 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 26 | bag = baggie.Baggie(bag_file_name, mode="w", compress=False) 27 | 28 | for i in range(N_MSGS): 29 | int_msg = Int32() 30 | int_msg.data = i 31 | 32 | str_msg = String() 33 | str_msg.data = "The count is: %s" % i 34 | 35 | bag.write(TOPIC_INT, int_msg) 36 | bag.write(TOPIC_STR, str_msg) 37 | 38 | time.sleep(1./N_MSGS) 39 | 40 | def tearDown(self): 41 | self.tmp_dir.cleanup() 42 | 43 | def test_meta(self): 44 | bag_file_name="{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 45 | 46 | bag = baggie.Baggie(bag_file_name, mode="r") 47 | meta = bag.meta() 48 | 49 | self.assertIsInstance(meta, baggie._BagMetadata) 50 | self.assertIsInstance(meta.duration, datetime.timedelta) 51 | self.assertIsInstance(meta.starting_time, datetime.datetime) 52 | self.assertEqual(meta.message_count, N_MSGS*2) 53 | -------------------------------------------------------------------------------- /baggie/test/test_sequential.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import os 4 | import tempfile 5 | import time 6 | 7 | import baggie 8 | from example_interfaces.msg import Int32 9 | from rclpy.serialization import deserialize_message, serialize_message 10 | 11 | N_MSGS = 100 12 | TOPIC_NAME = "/counter" 13 | BAGNAME = "ziplock.bag" 14 | MSGTYPE = "example_interfaces/msg/Int32" 15 | SERFMT = "cdr" 16 | 17 | class TestSequential(unittest.TestCase): 18 | """ 19 | Test fixture for sequential (uncompressed) read/write access to a ROS2 bag 20 | using the lower-level C++ interface. 21 | """ 22 | 23 | def setUp(self): 24 | self.tmp_dir = tempfile.TemporaryDirectory() 25 | self.s_opt = baggie._StorageOptions() 26 | self.s_opt.uri = "{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 27 | self.s_opt.storage_id = "sqlite3" 28 | os.makedirs(self.s_opt.uri) 29 | 30 | self.c_opt = baggie._ConverterOptions() 31 | self.c_opt.input_serialization_format = SERFMT 32 | self.c_opt.output_serialization_format = SERFMT 33 | 34 | self.comp_opt = baggie._CompressionOptions() 35 | self.comp_opt.compression_format = "" 36 | self.comp_opt.compression_mode = baggie._CompressionMode.NONE 37 | 38 | writer = baggie._Writer(self.comp_opt) 39 | writer.open(self.s_opt, self.c_opt) 40 | 41 | t_meta = baggie._TopicMetadata() 42 | t_meta.name = TOPIC_NAME 43 | t_meta.serialization_format = SERFMT 44 | t_meta.type = MSGTYPE 45 | writer.create_topic(t_meta) 46 | 47 | self.stamps = [] 48 | for i in range(N_MSGS): 49 | msg = Int32() 50 | msg.data = i 51 | ts = baggie._stamp() 52 | self.stamps.append(ts) 53 | writer.write(t_meta.name, serialize_message(msg), ts) 54 | time.sleep(1./N_MSGS) 55 | 56 | def tearDown(self): 57 | self.tmp_dir.cleanup() 58 | 59 | def test_sequential_reader(self): 60 | info = baggie._BagInfo() 61 | meta = info.read_metadata(self.s_opt.uri, self.s_opt.storage_id) 62 | self.assertEquals(meta.storage_identifier, self.s_opt.storage_id) 63 | self.assertEquals(meta.message_count, N_MSGS) 64 | self.assertEquals(meta.compression_format, "") 65 | self.assertEquals(meta.compression_mode, "") 66 | 67 | reader = baggie._Reader(self.comp_opt) 68 | reader.open(self.s_opt, self.c_opt) 69 | 70 | i = 0 71 | while reader.has_next(): 72 | topic, ser_msg, ts = reader.read_next() 73 | msg = deserialize_message(ser_msg, Int32) 74 | 75 | self.assertIsInstance(msg, Int32) 76 | self.assertEquals(i, msg.data) 77 | self.assertEquals(self.stamps[i], ts) 78 | 79 | i += 1 80 | 81 | self.assertEquals(i, N_MSGS) 82 | 83 | if __name__ == '__main__': 84 | unittest.main() 85 | -------------------------------------------------------------------------------- /baggie/test/test_sequential_compressed.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import os 4 | import tempfile 5 | import time 6 | 7 | import baggie 8 | from example_interfaces.msg import Int32 9 | from rclpy.serialization import deserialize_message, serialize_message 10 | 11 | N_MSGS = 100 12 | TOPIC_NAME = "/counter" 13 | BAGNAME = "ziplock.bag" 14 | MSGTYPE = "example_interfaces/msg/Int32" 15 | SERFMT = "cdr" 16 | COMPFMT = "zstd" 17 | 18 | class TestSequentialCompressed(unittest.TestCase): 19 | """ 20 | Test fixture for sequential read/write access, using compression, to a ROS2 21 | bag using the lower-level C++ interface. 22 | """ 23 | 24 | def setUp(self): 25 | self.tmp_dir = tempfile.TemporaryDirectory() 26 | self.s_opt = baggie._StorageOptions() 27 | self.s_opt.uri = "{}{}{}".format(self.tmp_dir.name, os.sep, BAGNAME) 28 | self.s_opt.storage_id = "sqlite3" 29 | os.makedirs(self.s_opt.uri) 30 | 31 | self.c_opt = baggie._ConverterOptions() 32 | self.c_opt.input_serialization_format = SERFMT 33 | self.c_opt.output_serialization_format = SERFMT 34 | 35 | self.comp_opt = baggie._CompressionOptions() 36 | self.comp_opt.compression_format = COMPFMT 37 | self.comp_opt.compression_mode = baggie._CompressionMode.FILE 38 | 39 | writer = baggie._Writer(self.comp_opt) 40 | writer.open(self.s_opt, self.c_opt) 41 | 42 | t_meta = baggie._TopicMetadata() 43 | t_meta.name = TOPIC_NAME 44 | t_meta.serialization_format = SERFMT 45 | t_meta.type = MSGTYPE 46 | writer.create_topic(t_meta) 47 | 48 | self.stamps = [] 49 | for i in range(N_MSGS): 50 | msg = Int32() 51 | msg.data = i 52 | ts = baggie._stamp() 53 | self.stamps.append(ts) 54 | writer.write(t_meta.name, serialize_message(msg), ts) 55 | time.sleep(1./N_MSGS) 56 | 57 | def tearDown(self): 58 | self.tmp_dir.cleanup() 59 | 60 | def test_sequential_reader(self): 61 | info = baggie._BagInfo() 62 | meta = info.read_metadata(self.s_opt.uri, self.s_opt.storage_id) 63 | self.assertEquals(meta.storage_identifier, self.s_opt.storage_id) 64 | self.assertEquals(meta.message_count, N_MSGS) 65 | self.assertEquals( 66 | meta.compression_format, self.comp_opt.compression_format) 67 | self.assertEquals(meta.compression_mode, self.comp_opt.mode_to_string()) 68 | 69 | reader = baggie._Reader(self.comp_opt) 70 | reader.open(self.s_opt, self.c_opt) 71 | 72 | i = 0 73 | while reader.has_next(): 74 | topic, ser_msg, ts = reader.read_next() 75 | msg = deserialize_message(ser_msg, Int32) 76 | 77 | self.assertIsInstance(msg, Int32) 78 | self.assertEquals(i, msg.data) 79 | self.assertEquals(self.stamps[i], ts) 80 | 81 | i += 1 82 | 83 | self.assertEquals(i, N_MSGS) 84 | 85 | if __name__ == '__main__': 86 | unittest.main() 87 | -------------------------------------------------------------------------------- /baggie/test/test_stamping.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import baggie 4 | from rclpy.time import Time 5 | from datetime import datetime as DT 6 | 7 | class TestStamps(unittest.TestCase): 8 | 9 | def test_stamps(self): 10 | now = DT.now() 11 | nanos_in = baggie.util.stamp(now) 12 | nanos_passthru = baggie.util.stamp(nanos_in) 13 | nanos_out = baggie.util.stamp(Time(nanoseconds=nanos_in)) 14 | 15 | self.assertEqual(nanos_in, nanos_passthru) 16 | self.assertEqual(nanos_in, nanos_out) 17 | --------------------------------------------------------------------------------