├── .gitignore ├── LICENSE ├── README.md └── examples ├── data-processing ├── LOG │ ├── 17BD1DB7 │ │ └── 00000006 │ │ │ └── 00000170.MF4 │ ├── 2F6913DB │ │ └── 00000004 │ │ │ └── 00000001.MF4 │ ├── 94C49784 │ │ └── 00000005 │ │ │ ├── 00000002.MF4 │ │ │ └── 00000017-6099D010.MF4 │ ├── 958D2219 │ │ └── 00002501 │ │ │ ├── 00002081.MF4 │ │ │ └── 00002082.MF4 │ └── FCBF0606 │ │ └── 00003482 │ │ └── 00000001-60B621D9.MF4 ├── README.md ├── dbc_files │ ├── CSS-Electronics-SAE-J1939-DEMO.dbc │ ├── tp_j1939.dbc │ ├── tp_nmea.dbc │ ├── tp_uds.dbc │ └── tp_uds_nissan.dbc ├── process_data.py ├── process_tp_data.py ├── requirements.txt └── utils.py └── other ├── asammdf-basics ├── asammdf_basics.py ├── input │ ├── 00000001.MF4 │ └── CSS-Electronics-SAE-J1939-DEMO.dbc └── requirements.txt ├── concatenate-mf4-by-period ├── LOG │ ├── 2F6913DB │ │ └── 00001089 │ │ │ ├── 00000001-63BEF5CD.MF4 │ │ │ └── 00000002-63BEFDF9.MF4 │ └── 5BC57FEC │ │ └── 00000001 │ │ ├── 00000005-643E503A.MFC │ │ ├── 00000006-643E5292.MFC │ │ └── 00000007-643E54EA.MFC ├── README.md ├── concatenate_mf4_by_period.py ├── concatenate_utils.py ├── dbc_files │ └── gnss.dbc ├── install.bat ├── mdf2finalized.exe ├── requirements.txt └── run.bat ├── matlab-basics ├── LOG │ └── 11111111 │ │ └── 00000012 │ │ └── 00000001.MF4 ├── LOG_datastore │ └── 3BA199E2 │ │ ├── 00000164 │ │ ├── 00000001-60FE3F04.MF4 │ │ ├── 00000002-60FE415D.MF4 │ │ ├── 00000003-60FE43B2.MF4 │ │ ├── 00000004-60FEB585.MF4 │ │ └── 00000005-60FEB5A8.MF4 │ │ └── 00000165 │ │ ├── 00000001-60FEB68B.MF4 │ │ ├── 00000002-60FEB8E3.MF4 │ │ ├── 00000003-60FEBB3B.MF4 │ │ ├── 00000004-60FEBD93.MF4 │ │ ├── 00000005-60FEBFEB.MF4 │ │ └── 00000006-60FFE42F.MF4 ├── README.md ├── dbc_files │ ├── CSS-Electronics-SAE-J1939-DEMO.dbc │ └── canmod-gps.dbc ├── matlab_basics.m ├── matlab_datastore.m ├── matlab_mat.m ├── matlab_tall.m ├── mf4_to_mat.py └── requirements.txt ├── misc ├── create_log_files.py ├── mdf2csv.exe ├── mdf_converter.py ├── send_mail.py └── upload_sd_to_s3.py ├── s3-basics ├── requirements.txt ├── s3_basics.py └── s3_get_keys.py └── s3-events ├── README.md ├── aws_lambda_mdf_convert.py ├── mdf2csv.exe ├── minio_listen_mdf_convert.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *.csv 2 | *.json 3 | *.pyc 4 | *.png 5 | *.zip 6 | output/ 7 | *j1939-speed.dbc 8 | *j1939-engine.dbc 9 | *FE34E37D* 10 | LOG2/ 11 | *LIN.dbc 12 | *NMEA-2000-CSS-Electronics-v* 13 | *find_data_event.py 14 | *CSS-Electronics-J1939-2021-08_v1.0.dbc 15 | logging.py 16 | *matlab_raw_to_tallarray.m 17 | *env/* 18 | env/ 19 | *requirements_updated_experimental.txt 20 | *_fin.MF4 21 | *.idea 22 | *tp-uds-kia-ev6.dbc 23 | *CSS-Electronics-J1939-* 24 | *ev6-gps.dbc* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 CSS-Electronics 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CAN bus API examples (Python/MATLAB) | CANedge [LEGACY] 2 | 3 | ## Update: Legacy notice + new Python/MATLAB integration methods 4 | If you need to work with the CANedge data in Python/MATLAB, we now recommend to use the methods described in the below documentations: 5 | - CANedge + Python: [About](https://www.csselectronics.com/pages/python-can-bus-api) | [Intro](https://canlogger.csselectronics.com/canedge-getting-started/ce3/log-file-tools/api-tools) 6 | - CANedge + MATLAB: [About](https://www.csselectronics.com/pages/matlab-mdf-mf4-can-bus) | [Intro](https://canlogger.csselectronics.com/canedge-getting-started/ce3/log-file-tools/matlab) 7 | 8 | The Python methods/modules shown in repository under `examples/data-processing/` can still be used, but will not be updated going forward (this refers to the sub modules of our Python API, `mdf-iter`, `canedge-browser` and `can-decoder`). We instead refer to our new integration with python-can - and our examples of how to work with DBC decoded Parquet data lakes in Python using our MF4 decoders. The script examples found in `examples/other/` will still be relevant going forward. 9 | 10 | -------------------- 11 | 12 | ## Overview 13 | 14 | This project includes Python and MATLAB examples of how to process MF4 log files with CAN/LIN data from your [CANedge](https://www.csselectronics.com/) data loggers. Most examples focus on the use of our Python API modules (`canedge_browser`, `mdf_iter`, `can_decoder`) for use with the CANedge log file formats (`MF4`, `MFC`, `MFE`, `MFM`). However, you'll also find other script examples incl. for the asammdf Python API, MATLAB, S3 and more. 15 | 16 | --- 17 | ## Features 18 | ``` 19 | For most use cases we recommend to start with the below examples: 20 | - data-processing: List log files, load them and DBC decode the data (local, S3) 21 | 22 | For some use cases the below examples may be useful: 23 | - other/asammdf-basics: Load and concatenate MF4 logs, DBC decode them - and save as new MF4 files 24 | - other/matlab-basics: Examples of how to load and use MF4/MAT CAN bus data 25 | - other/s3-basics: Examples of how to download, upload or list specific objects on your server 26 | - other/s3-events: Using AWS Lambda or MinIO notifications (for event based data processing) 27 | - other/misc: Example of automating the use of the MDF4 converters and misc tools 28 | 29 | ``` 30 | 31 | --- 32 | 33 | ## Installation 34 | 35 | - Install Python 3.9.13 for Windows ([32 bit](https://www.python.org/ftp/python/3.9.13/python-3.9.13.exe)/[64 bit](https://www.python.org/ftp/python/3.9.13/python-3.9.13-amd64.exe)) or [Linux](https://www.python.org/downloads/release/python-3913/) (_enable 'Add to PATH'_) 36 | - Download this project as a zip via the green button and unzip it 37 | - Open the folder with the `requirements.txt` file and enter below in your [command prompt](https://www.youtube.com/watch?v=bgSSJQolR0E&t=47s): 38 | 39 | ##### Windows 40 | ``` 41 | python -m venv env & env\Scripts\activate & pip install -r requirements.txt 42 | python script_to_run.py 43 | ``` 44 | 45 | ##### Linux 46 | ``` 47 | python -m venv env && source env/bin/activate && pip install -r requirements.txt 48 | python script_to_run.py 49 | ``` 50 | 51 | If you later need to re-activate the virtual environment, use `env\Scripts\activate`. 52 | 53 | --- 54 | 55 | ## Sample data (MDF4 & DBC) 56 | The various folders include sample log files and DBC files. Once you've tested a script with the sample data, you can replace it with your own. 57 | 58 | --- 59 | 60 | ## Usage info 61 | - Some example folders contain their own `README.md` files for extra information 62 | - These example scripts are designed to be minimal and to help you get started - not for production 63 | - Some S3 scripts use hardcoded credentials to ease testing - for production see e.g. [this guide](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html) 64 | 65 | --- 66 | 67 | ## Which API modules to use? 68 | There are many ways that you can work with the data from your CANedge devices. Most automation use cases involve fetching data from a specific device and time period - and DBC decoding this into a dataframe for further processing. Here, we recommend to look at the examples from the `data-processing/` folder. These examples use our custom modules designed for use with the CANedge: The [mdf_iter](https://pypi.org/project/mdf-iter/) (for loading MDF4 data), the [canedge_browser](https://github.com/CSS-Electronics/canedge_browser) (for fetching specific data locally or from S3) and the [can_decoder](https://github.com/CSS-Electronics/can_decoder) (for DBC decoding the data). In combination, these modules serve to support most use cases. 69 | 70 | If you have needs that are not covered by these modules, you can check out the other examples using the asammdf API, the AWS/MinIO S3 API and our MDF4 converters. 71 | 72 | If in doubt, [contact us](https://www.csselectronics.com/pages/contact-us) for sparring. 73 | 74 | --- 75 | ## About the CANedge 76 | 77 | For details on installation and how to get started, see the documentation: 78 | - [CANedge Docs](https://www.csselectronics.com/pages/can-bus-hardware-software-docs) 79 | - [CANedge1 Product Page](https://www.csselectronics.com/products/can-logger-sd-canedge1) 80 | - [CANedge2 Product Page](https://www.csselectronics.com/products/can-bus-data-logger-wifi-canedge2) 81 | 82 | --- 83 | ## Contribution & support 84 | Feature suggestions, pull requests or questions are welcome! 85 | 86 | You can contact us at CSS Electronics below: 87 | - [www.csselectronics.com](https://www.csselectronics.com) 88 | - [Contact form](https://www.csselectronics.com/pages/contact-us) 89 | -------------------------------------------------------------------------------- /examples/data-processing/LOG/17BD1DB7/00000006/00000170.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/17BD1DB7/00000006/00000170.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/2F6913DB/00000004/00000001.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/2F6913DB/00000004/00000001.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/94C49784/00000005/00000002.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/94C49784/00000005/00000002.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/94C49784/00000005/00000017-6099D010.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/94C49784/00000005/00000017-6099D010.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/958D2219/00002501/00002081.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/958D2219/00002501/00002081.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/958D2219/00002501/00002082.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/958D2219/00002501/00002082.MF4 -------------------------------------------------------------------------------- /examples/data-processing/LOG/FCBF0606/00003482/00000001-60B621D9.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/data-processing/LOG/FCBF0606/00003482/00000001-60B621D9.MF4 -------------------------------------------------------------------------------- /examples/data-processing/README.md: -------------------------------------------------------------------------------- 1 | # CANedge data processing examples 2 | 3 | Here you'll find examples for processing raw CANedge data to physical values. 4 | 5 | --- 6 | 7 | ## File overview 8 | 9 | - `LOG/`: Contains raw data samples (J1939, NMEA 2000, UDS) 10 | - `dbc_files/`: Contains demo/test DBC files for use in the examples 11 | - `process_data.py`: List log files between dates, DBC decode them and perform various processing 12 | - `process_tp_data.py`: Example of how multiframe data can be handled incl. DBC decoding (Transport Protocol) 13 | - `utils.py`: Functions/classes used in the above scripts (note: Identical to utils.py from the dashboard-writer repo) 14 | 15 | --- 16 | 17 | ## Installation 18 | 19 | See the README in the above folder. 20 | 21 | --- 22 | 23 | ### Regarding local disk vs S3 24 | The examples load data from local disk by default. If you want to load data from your S3 server, modify `devices` to include a list of S3 device paths (e.g. `"my_bucket/device_id"`). In addition, you'll modify the `fs` initialization to include your S3 details as below: 25 | 26 | ``` 27 | fs = setup_fs(s3=True, key="access_key", secret="secret_key", endpoint="endpoint") 28 | ``` 29 | 30 | If you're using AWS S3, your endpoint would e.g. be `https://s3.us-east-2.amazonaws.com` (if your region is `us-east-2`). A MinIO S3 endpoint would e.g. be `http://192.168.0.1:9000`. 31 | 32 | --- 33 | ### Regarding encrypted log files 34 | If you need to handle encrypted log files, you can provide a passwords dictionary object with similar structure as the `passwords.json` file used in the CANedge MF4 converters. The object can be provided e.g. as below (or via environmental variables): 35 | 36 | ``` 37 | pw = {"default": "password"} # hardcoded 38 | pw = json.load(open("passwords.json")) # from local JSON file 39 | ``` 40 | 41 | --- 42 | 43 | ### Regarding Transport Protocol example 44 | The example in `process_tp_data.py` should be seen as a simplistic TP implementation. It can be used as a starting point and will most likely need to be modified for individual use cases. We of course welcome any questions/feedback on this functionality. 45 | 46 | The basic concept works as follows: 47 | 48 | 1. You specify the "type" of transport protocol: UDS (`uds`), J1939 (`j1939`) or NMEA 2000 Fast Packets (`nmea`) 49 | 2. The raw data is filtered by the protocol-specific 'TP response IDs' and the payloads of these frames are combined 50 | 3. The original response frames are then replaced by these re-constructed frames with payloads >8 bytes 51 | 4. The re-constructed data can be decoded using DBC files, optionally using multiplexing as in the sample UDS DBC files 52 | 53 | #### Implementing TP processing in other scripts 54 | To use the Transport Protocol functionality in other scripts, you need to make minor modifications: 55 | 56 | 1. Ensure that you import the `MultiFrameDecoder` class from `utils.py` 57 | 2. Specify the type via the `tp_type` variable e.g. to `j1939` 58 | 3. After you've extract the normal raw dataframe, parse it to the `tp.combine_tp_frames` function as below 59 | 60 | See below example: 61 | 62 | ``` 63 | tp_type = "j1939" 64 | df_raw, device_id = proc.get_raw_data(log_file) 65 | tp = MultiFrameDecoder(tp_type) 66 | df_raw = tp.combine_tp_frames(df_raw) 67 | ``` 68 | 69 | 70 | #### UDS example 71 | For UDS basics see our [UDS tutorial](https://www.csselectronics.com/pages/uds-protocol-tutorial-unified-diagnostic-services). The UDS example for device `17BD1DB7` shows UDS response data from a Hyundai Kona EV. 72 | 73 | Below is a snippet of raw CAN data output before TP processing: 74 | 75 | ``` 76 | TimeStamp,BusChannel,ID,IDE,DLC,DataLength,Dir,EDL,BRS,DataBytes 77 | 2020-12-15 14:15:00.316550+00:00,1,2028,False,8,8,False,False,False,"[16, 62, 98, 1, 1, 255, 247, 231]" 78 | 2020-12-15 14:15:00.326550+00:00,1,2028,False,8,8,False,False,False,"[33, 255, 100, 0, 0, 0, 0, 131]" 79 | 2020-12-15 14:15:00.336600+00:00,1,2028,False,8,8,False,False,False,"[34, 0, 3, 13, 244, 10, 9, 9]" 80 | 2020-12-15 14:15:00.346600+00:00,1,2028,False,8,8,False,False,False,"[35, 9, 9, 10, 0, 0, 10, 182]" 81 | 2020-12-15 14:15:00.356550+00:00,1,2028,False,8,8,False,False,False,"[36, 35, 182, 50, 0, 0, 146, 0]" 82 | 2020-12-15 14:15:00.370950+00:00,1,2028,False,8,8,False,False,False,"[37, 0, 1, 197, 0, 0, 4, 112]" 83 | 2020-12-15 14:15:00.376600+00:00,1,2028,False,8,8,False,False,False,"[38, 0, 0, 0, 155, 0, 0, 1]" 84 | 2020-12-15 14:15:00.388250+00:00,1,2028,False,8,8,False,False,False,"[39, 143, 0, 2, 157, 31, 9, 1]" 85 | 2020-12-15 14:15:00.397200+00:00,1,2028,False,8,8,False,False,False,"[40, 101, 0, 0, 0, 0, 11, 184]" 86 | 2020-12-15 14:15:01.326600+00:00,1,1979,False,8,8,False,False,False,"[16, 38, 98, 1, 0, 126, 80, 7]" 87 | ``` 88 | 89 | After the above sequence is processed via the UDS TP script, it results in the below single frame: 90 | 91 | ``` 92 | 2020-12-15 14:15:00.316550+00:00,1,2028,False,0,62,False,False,False,"[98, 1, 1, 255, 247, 231, 255, 100, 0, 0, 0, 0, 131, 0, 3, 13, 244, 10, 9, 9, 9, 9, 10, 0, 0, 10, 182, 35, 182, 50, 0, 0, 146, 0, 0, 1, 197, 0, 0, 4, 112, 0, 0, 0, 155, 0, 0, 1, 143, 0, 2, 157, 31, 9, 1, 101, 0, 0, 0, 0, 11, 184]" 93 | ``` 94 | 95 | Let's look at how this works in the script: 96 | 97 | First, the script filters the data to show only the filtered UDS response IDs, in this case `2028` (`0x7EC`). The script then iterates through the data line-by-line until it encounters the 'First Frame' of an UDS sequence (identified based on the 1st byte value, `16`). Next, the script extracts bytes 2-7 from the First Frame and concatenates these with bytes 1-7 of the 'Consequtive Frames'. The script 'finalizes' the constructed frame once a new 'First Frame' is encountered. 98 | 99 | The first 3 bytes of this new frame should be interpreted as follows: 100 | - Byte 0: This is the UDS Response Service ID (SID) 101 | - Bytes 1-2: This is the UDS Data Identifier (DID) 102 | 103 | Often you'll see references to UDS extended PIDs, e.g. `0x220101`. Here, `0x22` is the request service (with `0x62` being the corresponding response service). The `0x0101` is the DID. 104 | 105 | A UDS DBC file can use extended multiplexing to decode UDS signals, utilizing the SID and DID as sequential multiplexors to distinguish between different UDS service modes and DIDs. See the UDS DBC file examples for a starting point on how this can be constructed. 106 | 107 | The script merges the reconstructed UDS frames into the original data (removing the original entries of the response ID). The result is a new raw dataframe that can be processed as you would normally do (using a suitable DBC file). The above example has an associated DBC file, `tp_uds.dbc`, which lets you extract e.g. State of Charge. 108 | 109 | The script also contains an example of a proprietary UDS-style request/response from a Nissan Leaf 2019 for State of Charge (SoC) and battery pack temperatures. 110 | -------------------------------------------------------------------------------- /examples/data-processing/dbc_files/CSS-Electronics-SAE-J1939-DEMO.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2364540158 EEC1: 8 Vector__XXX 40 | SG_ EngineSpeed : 24|16@1+ (0.125,0) [0|8031.875] "rpm" Vector__XXX 41 | 42 | BO_ 2566844926 CCVS1: 8 Vector__XXX 43 | SG_ WheelBasedVehicleSpeed : 8|16@1+ (0.00390625,0) [0|250.996] "km/h" Vector__XXX 44 | 45 | 46 | CM_ BO_ 2364540158 "Electronic Engine Controller 1"; 47 | CM_ SG_ 2364540158 EngineSpeed "Actual engine speed which is calculated over a minimum crankshaft angle of 720 degrees divided by the number of cylinders.…"; 48 | CM_ BO_ 2566844926 "Cruise Control/Vehicle Speed 1"; 49 | CM_ SG_ 2566844926 WheelBasedVehicleSpeed "Wheel-Based Vehicle Speed: Speed of the vehicle as calculated from wheel or tailshaft speed."; 50 | BA_DEF_ SG_ "SPN" INT 0 524287; 51 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","reserved","J1939PG"; 52 | BA_DEF_ "DatabaseVersion" STRING ; 53 | BA_DEF_ "BusType" STRING ; 54 | BA_DEF_ "ProtocolType" STRING ; 55 | BA_DEF_ "DatabaseCompiler" STRING ; 56 | BA_DEF_DEF_ "SPN" 0; 57 | BA_DEF_DEF_ "VFrameFormat" "J1939PG"; 58 | BA_DEF_DEF_ "DatabaseVersion" "DEMO PLUS"; 59 | BA_DEF_DEF_ "BusType" ""; 60 | BA_DEF_DEF_ "ProtocolType" ""; 61 | BA_DEF_DEF_ "DatabaseCompiler" ""; 62 | BA_ "ProtocolType" "J1939"; 63 | BA_ "BusType" "CAN"; 64 | BA_ "DatabaseCompiler" "CSS ELECTRONICS (WWW.CSSELECTRONICS.COM)"; 65 | BA_ "DatabaseVersion" "1.0.0"; 66 | BA_ "VFrameFormat" BO_ 2364540158 3; 67 | BA_ "VFrameFormat" BO_ 2566844926 3; 68 | BA_ "SPN" SG_ 2364540158 EngineSpeed 190; 69 | BA_ "SPN" SG_ 2566844926 WheelBasedVehicleSpeed 84; 70 | -------------------------------------------------------------------------------- /examples/data-processing/dbc_files/tp_j1939.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2566841342 EC1: 40 Vector__XXX 40 | SG_ EngineSpeedAtPoint5 : 96|16@1+ (0.125,0) [0|8031.875] "rpm" Vector__XXX 41 | 42 | 43 | 44 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","StandardCAN_FD","ExtendedCAN_FD","J1939PG"; 45 | BA_DEF_ "ProtocolType" STRING ; 46 | BA_DEF_DEF_ "VFrameFormat" ""; 47 | BA_DEF_DEF_ "ProtocolType" "J1939PG"; 48 | BA_ "ProtocolType" ""; 49 | BA_ "VFrameFormat" BO_ 2566841342 1; 50 | 51 | -------------------------------------------------------------------------------- /examples/data-processing/dbc_files/tp_nmea.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2583168510 gnssPositionData: 51 Vector__XXX 40 | SG_ sid : 0|8@1+ (1,0) [0|0] "" Vector__XXX 41 | SG_ date : 8|16@1+ (1,0) [0|0] "days" Vector__XXX 42 | SG_ time : 24|32@1+ (0.0001,0) [0|0] "s" Vector__XXX 43 | SG_ latitude : 56|64@1- (1e-16,0) [0|0] "deg" Vector__XXX 44 | SG_ longitude : 120|64@1- (1e-16,0) [0|0] "deg" Vector__XXX 45 | SG_ altitude : 184|64@1- (1e-06,0) [0|0] "m" Vector__XXX 46 | SG_ gnssType : 248|4@1+ (1,0) [0|0] "" Vector__XXX 47 | SG_ method : 252|4@1+ (1,0) [0|0] "" Vector__XXX 48 | SG_ integrity : 256|2@1+ (1,0) [0|0] "" Vector__XXX 49 | SG_ reserved : 258|6@1+ (1,0) [0|0] "" Vector__XXX 50 | SG_ numberOfSvs : 264|8@1+ (1,0) [0|0] "" Vector__XXX 51 | SG_ hdop : 272|16@1- (0.01,0) [0|0] "" Vector__XXX 52 | SG_ pdop : 288|16@1- (0.01,0) [0|0] "" Vector__XXX 53 | SG_ geoidalSeparation : 304|32@1- (0.01,0) [0|0] "m" Vector__XXX 54 | SG_ referenceStations : 336|8@1+ (1,0) [0|0] "" Vector__XXX 55 | SG_ referenceStationType : 344|4@1+ (1,0) [0|0] "" Vector__XXX 56 | SG_ referenceStationId : 348|12@1+ (1,0) [0|0] "None" Vector__XXX 57 | SG_ ageOfDgnssCorrections : 360|16@1+ (0.01,0) [0|0] "s" Vector__XXX 58 | 59 | 60 | 61 | CM_ BO_ 2583168510 "GNSS Position Data"; 62 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","reserved","J1939PG"; 63 | BA_DEF_ "DatabaseVersion" STRING ; 64 | BA_DEF_ "BusType" STRING ; 65 | BA_DEF_ "ProtocolType" STRING ; 66 | BA_DEF_ "DatabaseCompiler" STRING ; 67 | BA_DEF_DEF_ "VFrameFormat" "J1939PG"; 68 | BA_DEF_DEF_ "DatabaseVersion" "1.2"; 69 | BA_DEF_DEF_ "BusType" ""; 70 | BA_DEF_DEF_ "ProtocolType" "J1939"; 71 | BA_DEF_DEF_ "DatabaseCompiler" ""; 72 | BA_ "VFrameFormat" BO_ 2583168510 3; 73 | VAL_ 2583168510 gnssType 8 "Galileo" 7 "surveyed" 6 "integrated" 5 "Chayka" 4 "GPS+SBAS/WAAS+GLONASS" 3 "GPS+SBAS/WAAS" 2 "GPS+GLONASS" 1 "GLONASS" 0 "GPS" ; 74 | VAL_ 2583168510 method 8 "Simulate mode" 7 "Manual Input" 6 "Estimated (DR) mode" 5 "RTK float" 4 "RTK Fixed Integer" 3 "Precise GNSS" 2 "DGNSS fix" 1 "GNSS fix" 0 "no GNSS" ; 75 | VAL_ 2583168510 integrity 2 "Caution" 1 "Safe" 0 "No integrity checking" ; 76 | VAL_ 2583168510 referenceStationType 8 "Galileo" 7 "surveyed" 6 "integrated" 5 "Chayka" 4 "GPS+SBAS/WAAS+GLONASS" 3 "GPS+SBAS/WAAS" 2 "GPS+GLONASS" 1 "GLONASS" 0 "GPS" ; 77 | 78 | -------------------------------------------------------------------------------- /examples/data-processing/dbc_files/tp_uds.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2028 Battery: 62 Vector__XXX 40 | SG_ M_SID_0x220101_StateOfChargeBMS m257 : 64|8@1+ (0.5,0) [0|0] "%" Vector__XXX 41 | SG_ response m98M : 23|16@0+ (1,0) [0|0] "unit" Vector__XXX 42 | SG_ service M : 15|8@0+ (1,0) [0|0] "" Vector__XXX 43 | 44 | BO_ 1979 Temperature: 54 Vector__XXX 45 | SG_ M_SID_0x220100_IndoorTemp m256 : 72|8@1+ (0.5,-40) [0|0] "degC" Vector__XXX 46 | SG_ response m98 : 23|16@0+ (1,0) [0|0] "unit" Vector__XXX 47 | SG_ service M : 15|8@0+ (1,0) [0|0] "" Vector__XXX 48 | SG_ M_SID_0x220100_OutdoorTemp : 87|8@0+ (0.5,-40) [0|0] "" Vector__XXX 49 | 50 | 51 | 52 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","StandardCAN_FD","ExtendedCAN_FD","J1939PG"; 53 | BA_DEF_ "ProtocolType" STRING ; 54 | BA_DEF_DEF_ "VFrameFormat" ""; 55 | BA_DEF_DEF_ "ProtocolType" ""; 56 | BA_ "ProtocolType" ""; 57 | BA_ "VFrameFormat" BO_ 2028 0; 58 | 59 | SG_MUL_VAL_ 2028 M_SID_0x220101_StateOfChargeBMS response 257-257; 60 | SG_MUL_VAL_ 2028 response service 98-98; 61 | 62 | -------------------------------------------------------------------------------- /examples/data-processing/dbc_files/tp_uds_nissan.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 1979 SoC_Temp: 55 Vector__XXX 40 | SG_ SoC m1 : 279|24@0+ (0.0001,0) [0|0] "%" Vector__XXX 41 | SG_ BatPackTemp1 m4 : 40|8@1+ (1,0) [0|0] "%" Vector__XXX 42 | SG_ BatPackTemp2 m4 : 64|8@1+ (1,0) [0|0] "%" Vector__XXX 43 | SG_ BatPackTemp4 m4 : 112|8@1+ (1,0) [0|0] "%" Vector__XXX 44 | SG_ response m97M : 16|8@1+ (1,0) [0|0] "" Vector__XXX 45 | SG_ service M : 8|8@1+ (1,0) [0|0] "" Vector__XXX 46 | 47 | 48 | 49 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","StandardCAN_FD","ExtendedCAN_FD","J1939PG"; 50 | BA_DEF_ "ProtocolType" STRING ; 51 | BA_DEF_DEF_ "VFrameFormat" ""; 52 | BA_DEF_DEF_ "ProtocolType" ""; 53 | BA_ "ProtocolType" ""; 54 | 55 | SG_MUL_VAL_ 1979 SoC response 1-1; 56 | SG_MUL_VAL_ 1979 BatPackTemp1 response 4-4; 57 | SG_MUL_VAL_ 1979 BatPackTemp2 response 4-4; 58 | SG_MUL_VAL_ 1979 BatPackTemp4 response 4-4; 59 | SG_MUL_VAL_ 1979 response service 97-97; 60 | 61 | -------------------------------------------------------------------------------- /examples/data-processing/process_data.py: -------------------------------------------------------------------------------- 1 | import mdf_iter 2 | import canedge_browser 3 | 4 | import pandas as pd 5 | from datetime import datetime, timezone 6 | from utils import setup_fs, load_dbc_files, restructure_data, add_custom_sig, ProcessData, test_signal_threshold 7 | 8 | # specify devices to process (from local/S3), DBC files, start time and optionally passwords 9 | devices = ["LOG/958D2219"] 10 | 11 | dbc_paths = ["dbc_files/CSS-Electronics-SAE-J1939-DEMO.dbc"] 12 | 13 | start = datetime(year=2020, month=1, day=1, hour=0, tzinfo=timezone.utc) 14 | stop = datetime(year=2030, month=1, day=1, hour=0, tzinfo=timezone.utc) 15 | 16 | pw = {"default": "password"} 17 | 18 | # setup filesystem (local/S3), load DBC files and list log files for processing 19 | fs = setup_fs(s3=False, key="", secret="", endpoint="", region="", passwords=pw) 20 | db_list = load_dbc_files(dbc_paths) 21 | log_files = canedge_browser.get_log_files(fs, devices, start_date=start, stop_date=stop, passwords=pw) 22 | print(f"Found a total of {len(log_files)} log files") 23 | 24 | # -------------------------------------------- 25 | # perform data processing of each log file (e.g. evaluation of signal stats vs. thresholds) 26 | proc = ProcessData(fs, db_list, signals=[]) 27 | df_phys_all = [] 28 | 29 | for log_file in log_files: 30 | df_raw, device_id = proc.get_raw_data(log_file, passwords=pw) 31 | df_phys = proc.extract_phys(df_raw) 32 | proc.print_log_summary(device_id, log_file, df_phys) 33 | 34 | # test_signal_threshold(df_phys=df_phys, signal="EngineSpeed", threshold=800) 35 | 36 | df_phys_all.append(df_phys) 37 | 38 | df_phys_all = pd.concat(df_phys_all,ignore_index=False).sort_index() 39 | 40 | # -------------------------------------------- 41 | # example: Add a custom signal 42 | # def ratio(s1, s2): 43 | # return s2 / s1 if s1 else np.nan 44 | 45 | # df_phys_all = add_custom_sig(df_phys_all, "WheelBasedVehicleSpeed", "EngineSpeed", ratio, "RatioRpmSpeed") 46 | 47 | # -------------------------------------------- 48 | # example: resample and restructure data (parameters in columns) 49 | df_phys_join = restructure_data(df_phys=df_phys_all, res="1S") 50 | df_phys_join.to_csv("output_joined.csv") 51 | print("\nConcatenated DBC decoded data:\n", df_phys_join) 52 | -------------------------------------------------------------------------------- /examples/data-processing/process_tp_data.py: -------------------------------------------------------------------------------- 1 | import canedge_browser, os 2 | from utils import setup_fs, load_dbc_files, ProcessData, MultiFrameDecoder 3 | 4 | 5 | def process_tp_example(devices, dbc_path, tp_type): 6 | fs = setup_fs(s3=False) 7 | db_list = load_dbc_files(dbc_paths) 8 | log_files = canedge_browser.get_log_files(fs, devices) 9 | 10 | proc = ProcessData(fs, db_list) 11 | 12 | for log_file in log_files: 13 | output_folder = "output" + log_file.replace(".MF4", "") 14 | if not os.path.exists(output_folder): 15 | os.makedirs(f"{output_folder}") 16 | 17 | df_raw, device_id = proc.get_raw_data(log_file) 18 | df_raw.to_csv(f"{output_folder}/tp_raw_data.csv") 19 | 20 | # replace transport protocol sequences with single frames 21 | tp = MultiFrameDecoder(tp_type) 22 | df_raw = tp.combine_tp_frames(df_raw) 23 | df_raw.to_csv(f"{output_folder}/tp_raw_data_combined.csv") 24 | 25 | # extract physical values as normal, but add tp_type 26 | df_phys = proc.extract_phys(df_raw) 27 | df_phys.to_csv(f"{output_folder}/tp_physical_values.csv") 28 | 29 | print("Finished saving CSV output for devices:", devices) 30 | 31 | 32 | # ---------------------------------------- 33 | # run different TP examples 34 | 35 | # UDS data from Hyundai Kona EV (SoC%) 36 | devices = ["LOG/17BD1DB7"] 37 | dbc_paths = [r"dbc_files/tp_uds.dbc"] 38 | process_tp_example(devices, dbc_paths, "uds") 39 | 40 | # UDS data from Nissan Leaf 2019 (SoC%) 41 | devices = ["LOG/2F6913DB"] 42 | dbc_paths = [r"dbc_files/tp_uds_nissan.dbc"] 43 | process_tp_example(devices, dbc_paths, "uds") 44 | 45 | # J1939 TP data 46 | devices = ["LOG/FCBF0606"] 47 | dbc_paths = [r"dbc_files/tp_j1939.dbc"] 48 | process_tp_example(devices, dbc_paths, "j1939") 49 | 50 | # NMEA 2000 fast packet data (with GNSS position) 51 | devices = ["LOG/94C49784"] 52 | dbc_paths = [r"dbc_files/tp_nmea.dbc"] 53 | process_tp_example(devices, dbc_paths, "nmea") 54 | -------------------------------------------------------------------------------- /examples/data-processing/requirements.txt: -------------------------------------------------------------------------------- 1 | aiobotocore==2.5.0 2 | aiohttp==3.8.1 3 | aioitertools==0.10.0 4 | aiosignal==1.2.0 5 | async-timeout==4.0.2 6 | attrs==21.4.0 7 | botocore==1.29.76 8 | can_decoder>=0.1.9 9 | canedge-browser>=0.0.8 10 | canmatrix==0.9.5 11 | certifi==2021.10.8 12 | charset-normalizer==2.0.12 13 | click==8.1.2 14 | colorama==0.4.4 15 | frozenlist==1.3.0 16 | fsspec==2023.4.0 17 | future==0.18.3 18 | idna==3.4 19 | influxdb-client==1.35.0 20 | J1939-PGN==0.4 21 | jmespath==0.10.0 22 | mdf-iter>=2.1.1 23 | multidict==6.0.2 24 | numpy==1.24.1 25 | pandas==1.5.3 26 | python-dateutil==2.8.2 27 | pytz==2022.1 28 | reactivex==4.0.4 29 | s3fs==2023.4.0 30 | six==1.16.0 31 | typing_extensions==4.1.1 32 | urllib3==1.26.9 33 | wrapt==1.14.0 34 | yarl==1.7.2 35 | -------------------------------------------------------------------------------- /examples/data-processing/utils.py: -------------------------------------------------------------------------------- 1 | def setup_fs(s3, key="", secret="", endpoint="", region="",cert="", passwords={}): 2 | """Given a boolean specifying whether to use local disk or S3, setup filesystem 3 | Syntax examples: AWS (http://s3.us-east-2.amazonaws.com), MinIO (http://192.168.0.1:9000) 4 | The cert input is relevant if you're using MinIO with TLS enabled, for specifying the path to the certficiate. 5 | For MinIO you should also parse the region_name 6 | 7 | The block_size is set to accomodate files up to 55 MB in size. If your log files are larger, adjust this value accordingly 8 | """ 9 | 10 | if s3: 11 | import s3fs 12 | 13 | block_size = 55 * 1024 * 1024 14 | 15 | if "amazonaws" in endpoint: 16 | fs = s3fs.S3FileSystem(key=key, secret=secret, default_block_size=block_size) 17 | elif cert != "": 18 | fs = s3fs.S3FileSystem( 19 | key=key, 20 | secret=secret, 21 | client_kwargs={"endpoint_url": endpoint, "verify": cert, "region_name": region}, 22 | default_block_size=block_size, 23 | ) 24 | else: 25 | fs = s3fs.S3FileSystem( 26 | key=key, 27 | secret=secret, 28 | client_kwargs={"endpoint_url": endpoint, "region_name": region}, 29 | default_block_size=block_size, 30 | ) 31 | 32 | else: 33 | from pathlib import Path 34 | import canedge_browser 35 | 36 | base_path = Path(__file__).parent 37 | fs = canedge_browser.LocalFileSystem(base_path=base_path, passwords=passwords) 38 | 39 | return fs 40 | 41 | 42 | # ----------------------------------------------- 43 | def load_dbc_files(dbc_paths): 44 | """Given a list of DBC file paths, create a list of conversion rule databases""" 45 | import can_decoder 46 | from pathlib import Path 47 | 48 | db_list = [] 49 | for dbc in dbc_paths: 50 | db = can_decoder.load_dbc(Path(__file__).parent / dbc) 51 | db_list.append(db) 52 | 53 | return db_list 54 | 55 | 56 | # ----------------------------------------------- 57 | def list_log_files(fs, devices, start_times, verbose=True, passwords={}): 58 | """Given a list of device paths, list log files from specified filesystem. 59 | Data is loaded based on the list of start datetimes 60 | """ 61 | import canedge_browser 62 | 63 | log_files = [] 64 | 65 | if len(start_times): 66 | for idx, device in enumerate(devices): 67 | start = start_times[idx] 68 | log_files_device = canedge_browser.get_log_files(fs, [device], start_date=start, passwords=passwords) 69 | log_files.extend(log_files_device) 70 | 71 | if verbose: 72 | print(f"Found {len(log_files)} log files\n") 73 | 74 | return log_files 75 | 76 | def add_signal_prefix(df_phys, can_id_prefix=False, pgn_prefix=False, bus_prefix=False): 77 | """Rename Signal names by prefixing the full 78 | CAN ID (in hex) and/or J1939 PGN 79 | """ 80 | from J1939_PGN import J1939_PGN 81 | 82 | if df_phys.empty: 83 | return df_phys 84 | else: 85 | prefix = "" 86 | if bus_prefix: 87 | prefix += df_phys["BusChannel"].apply(lambda x: f"{x}.") 88 | if can_id_prefix: 89 | prefix += df_phys["CAN ID"].apply(lambda x: f"{hex(int(x))[2:].upper()}." ) 90 | if pgn_prefix: 91 | prefix += df_phys["CAN ID"].apply(lambda x: f"{J1939_PGN(int(x)).pgn}.") 92 | 93 | df_phys["Signal"] = prefix + df_phys["Signal"] 94 | 95 | return df_phys 96 | 97 | def restructure_data(df_phys, res, ffill=False): 98 | """Restructure the decoded data to a resampled 99 | format where each column reflects a Signal 100 | """ 101 | import pandas as pd 102 | 103 | if not df_phys.empty and res != "": 104 | df_phys = df_phys.pivot_table(values="Physical Value", index=pd.Grouper(freq=res), columns="Signal") 105 | 106 | if ffill: 107 | df_phys = df_phys.ffill() 108 | 109 | return df_phys 110 | 111 | 112 | def test_signal_threshold(df_phys, signal, threshold): 113 | """Illustrative example for how to extract a signal and evaluate statistical values 114 | vs. defined thresholds. The function can be easily modified for your needs. 115 | """ 116 | df_signal = df_phys[df_phys["Signal"] == signal]["Physical Value"] 117 | 118 | stats = df_signal.agg(["count", "min", "max", "mean", "std"]) 119 | delta = stats["max"] - stats["min"] 120 | 121 | if delta > threshold: 122 | print(f"{signal} exhibits a 'max - min' delta of {delta} exceeding threshold of {threshold}") 123 | 124 | 125 | def add_custom_sig(df_phys, signal1, signal2, function, new_signal): 126 | """Helper function for calculating a new signal based on two signals and a function. 127 | Returns a dataframe with the new signal name and physical values 128 | """ 129 | import pandas as pd 130 | 131 | try: 132 | s1 = df_phys[df_phys["Signal"] == signal1]["Physical Value"].rename(signal1) 133 | s2 = df_phys[df_phys["Signal"] == signal2]["Physical Value"].rename(signal2) 134 | 135 | df_new_sig = pd.merge_ordered( 136 | s1, 137 | s2, 138 | on="TimeStamp", 139 | fill_method="ffill", 140 | ).set_index("TimeStamp") 141 | df_new_sig = df_new_sig.apply(lambda x: function(x[0], x[1]), axis=1).dropna().rename("Physical Value").to_frame() 142 | df_new_sig["Signal"] = new_signal 143 | df_phys = df_phys.append(df_new_sig) 144 | 145 | except: 146 | print(f"Warning: Custom signal {new_signal} not created\n") 147 | 148 | return df_phys 149 | 150 | 151 | # ----------------------------------------------- 152 | class ProcessData: 153 | def __init__(self, fs, db_list, signals=[], days_offset=None, verbose=True): 154 | from datetime import datetime, timedelta 155 | 156 | self.db_list = db_list 157 | self.signals = signals 158 | self.fs = fs 159 | self.days_offset = days_offset 160 | self.verbose = verbose 161 | 162 | if self.verbose == True and self.days_offset != None: 163 | date_offset = (datetime.today() - timedelta(days=self.days_offset)).strftime("%Y-%m-%d") 164 | print( 165 | f"Warning: days_offset = {self.days_offset}, meaning data is offset to start at {date_offset}.\nThis is intended for sample data testing only. Set days_offset = None when processing your own data." 166 | ) 167 | 168 | return 169 | 170 | def extract_phys(self, df_raw): 171 | """Given df of raw data and list of decoding databases, create new def with 172 | physical values (no duplicate signals and optionally filtered/rebaselined) 173 | """ 174 | import can_decoder 175 | import pandas as pd 176 | 177 | df_phys = pd.DataFrame() 178 | df_phys_temp = [] 179 | for db in self.db_list: 180 | df_decoder = can_decoder.DataFrameDecoder(db) 181 | 182 | for bus, bus_group in df_raw.groupby("BusChannel"): 183 | for length, group in bus_group.groupby("DataLength"): 184 | df_phys_group = df_decoder.decode_frame(group) 185 | if not df_phys_group.empty: 186 | df_phys_group["BusChannel"] = bus 187 | df_phys_temp.append(df_phys_group) 188 | 189 | df_phys = pd.concat(df_phys_temp, ignore_index=False).sort_index() 190 | 191 | # remove duplicates in case multiple DBC files contain identical signals 192 | df_phys["datetime"] = df_phys.index 193 | df_phys = df_phys.drop_duplicates(keep="first") 194 | df_phys = df_phys.drop(labels="datetime", axis=1) 195 | 196 | # optionally filter and rebaseline the data 197 | df_phys = self.filter_signals(df_phys) 198 | 199 | if not df_phys.empty and type(self.days_offset) == int: 200 | df_phys = self.rebaseline_data(df_phys) 201 | 202 | return df_phys 203 | 204 | def rebaseline_data(self, df_phys): 205 | """Given a df of physical values, this offsets the timestamp 206 | to be equal to today, minus a given number of days. 207 | """ 208 | from datetime import datetime, timezone 209 | import pandas as pd 210 | 211 | delta_days = (datetime.now(timezone.utc) - df_phys.index.min()).days - self.days_offset 212 | df_phys.index = df_phys.index + pd.Timedelta(delta_days, "day") 213 | 214 | return df_phys 215 | 216 | def filter_signals(self, df_phys): 217 | """Given a df of physical values, return only signals matched by filter""" 218 | if not df_phys.empty and len(self.signals): 219 | df_phys = df_phys[df_phys["Signal"].isin(self.signals)] 220 | 221 | return df_phys 222 | 223 | def get_raw_data(self, log_file, passwords={},lin=False): 224 | """Extract a df of raw data and device ID from log file. 225 | Optionally include LIN bus data by setting lin=True 226 | """ 227 | import mdf_iter 228 | 229 | with self.fs.open(log_file, "rb") as handle: 230 | mdf_file = mdf_iter.MdfFile(handle, passwords=passwords) 231 | device_id = self.get_device_id(mdf_file) 232 | 233 | if lin: 234 | df_raw_lin = mdf_file.get_data_frame_lin() 235 | df_raw_lin["IDE"] = 0 236 | df_raw_can = mdf_file.get_data_frame() 237 | df_raw = df_raw_can.append(df_raw_lin) 238 | else: 239 | df_raw = mdf_file.get_data_frame() 240 | 241 | return df_raw, device_id 242 | 243 | def get_device_id(self, mdf_file): 244 | return mdf_file.get_metadata()["HDcomment.Device Information.serial number"]["value_raw"] 245 | 246 | def print_log_summary(self, device_id, log_file, df_phys): 247 | """Print summary information for each log file""" 248 | if self.verbose: 249 | print( 250 | "\n---------------", 251 | f"\nDevice: {device_id} | Log file: {log_file.split(device_id)[-1]} [Extracted {len(df_phys)} decoded frames]\nPeriod: {df_phys.index.min()} - {df_phys.index.max()}\n", 252 | ) 253 | 254 | 255 | # ----------------------------------------------- 256 | class MultiFrameDecoder: 257 | 258 | """Class for handling transport protocol data. For each response ID, identify 259 | sequences of subsequent frames and combine the relevant parts of the data payloads 260 | into a single payload with the relevant CAN ID. The original raw dataframe is 261 | then cleansed of the original response ID sequence frames. Instead, the new reassembled 262 | frames are inserted. 263 | 264 | :param tp_type: the class supports UDS ("uds"), NMEA 2000 Fast Packets ("nmea") and J1939 ("j1939") 265 | :param df_raw: dataframe of raw CAN data from the mdf_iter module 266 | 267 | SINGLE_FRAME_MASK: mask used in matching single frames 268 | FIRST_FRAME_MASK: mask used in matching first frames 269 | CONSEQ_FRAME_MASK: mask used in matching consequtive frames 270 | SINGLE_FRAME: frame type reflecting a single frame response 271 | FIRST_FRAME: frame type reflecting the first frame in a multi frame response 272 | CONSEQ_FRAME: frame type reflecting a consequtive frame in a multi frame response 273 | ff_payload_start: the combined payload will start at this byte in the FIRST_FRAME 274 | bam_pgn: this is used in J1939 and marks the initial BAM message ID in DEC 275 | res_id_list: TP 'response CAN IDs' to process 276 | 277 | """ 278 | FRAME_STRUCT = { 279 | "": {}, 280 | "uds": { 281 | "SINGLE_FRAME_MASK": 0xF0, 282 | "FIRST_FRAME_MASK": 0xF0, 283 | "CONSEQ_FRAME_MASK": 0xF0, 284 | "SINGLE_FRAME": 0x00, 285 | "FIRST_FRAME": 0x10, 286 | "CONSEQ_FRAME": 0x20, 287 | "ff_payload_start": 1, 288 | "bam_pgn": -1, 289 | "res_id_list": [1960, 2016, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2026, 1979, 1992, 1998, 2001, 402522235], 290 | "group": "ID" 291 | }, 292 | "j1939": { 293 | "SINGLE_FRAME_MASK": 0xFF, 294 | "FIRST_FRAME_MASK": 0xFF, 295 | "CONSEQ_FRAME_MASK": 0x00, 296 | "SINGLE_FRAME": 0xFF, 297 | "FIRST_FRAME": 0x20, 298 | "CONSEQ_FRAME": 0x00, 299 | "ff_payload_start": 8, 300 | "bam_pgn": 60416, 301 | "res_id_list": [60416, 60160], 302 | "group": "SA" 303 | }, 304 | "nmea": { 305 | "SINGLE_FRAME_MASK": 0xFF, 306 | "FIRST_FRAME_MASK": 0x1F, 307 | "CONSEQ_FRAME_MASK": 0x00, 308 | "SINGLE_FRAME": 0xFF, 309 | "FIRST_FRAME": 0x00, 310 | "CONSEQ_FRAME": 0x00, 311 | "ff_payload_start": 2, 312 | "bam_pgn": -1, 313 | "res_id_list":[126983, 126984, 126985, 126986, 126987, 126988, 126996, 127233, 127237, 127489, 127496, 127497, 127503, 127504, 127506, 127751, 128275, 128520, 128538, 129029, 129038, 129039, 129040, 129041, 129044, 129284, 129285, 129301, 129302, 129538, 129540, 129541, 129542, 129545, 129547, 129549, 129551, 129556, 129792, 129793, 129794, 129795, 129796, 129798, 129799, 129800, 129801, 129803, 129804, 129805, 129806, 129807, 129808, 129809, 129810, 129811, 129812, 129813, 129814, 129815, 129816, 130052, 130053, 130054, 130060, 130061, 130064, 130065, 130067, 130068, 130069, 130070, 130071, 130072, 130073, 130074, 130320, 130321, 130322, 130323, 130324, 130564, 130565, 130567, 130569, 130571, 130575, 130577, 130578, 130581, 130584, 130586], 314 | "group": "ID" 315 | }} 316 | 317 | def __init__(self, tp_type=""): 318 | self.tp_type = tp_type 319 | return 320 | 321 | def calculate_pgn(self, frame_id): 322 | pgn = (frame_id & 0x03FFFF00) >> 8 323 | pgn_f = pgn & 0xFF00 324 | if pgn_f < 0xF000: 325 | pgn &= 0xFFFFFF00 326 | return pgn 327 | 328 | def calculate_sa(self, frame_id): 329 | sa = frame_id & 0x000000FF 330 | return sa 331 | 332 | def construct_new_tp_frame(self, base_frame, payload_concatenated, can_id): 333 | new_frame = base_frame.copy() 334 | new_frame["DataBytes"] = payload_concatenated 335 | new_frame["DLC"] = 0 336 | new_frame["DataLength"] = len(payload_concatenated) 337 | if can_id: 338 | new_frame["ID"] = can_id 339 | return new_frame 340 | 341 | def identify_matching_ids(self,df_raw,res_id_list_full, bam_pgn): 342 | # identify which CAN IDs (or PGNs) match the TP IDs and create a filtered df_raw_match 343 | # which is used to separate the df_raw into two parts: Incl/excl TP frames. 344 | # Also produces a reduced res_id_list that only contains relevant ID entries 345 | if self.tp_type == "nmea": 346 | df_raw_pgns = df_raw["ID"].apply(self.calculate_pgn) 347 | df_raw_match = df_raw_pgns.isin(res_id_list_full) 348 | res_id_list = df_raw_pgns[df_raw_match].drop_duplicates().values.tolist() 349 | if self.tp_type == "j1939": 350 | df_raw_pgns = df_raw["ID"].apply(self.calculate_pgn) 351 | df_raw_match = df_raw_pgns.isin(res_id_list_full) 352 | res_id_list = res_id_list_full.copy() 353 | res_id_list.remove(bam_pgn) 354 | if type(res_id_list) is not list: 355 | res_id_list = [res_id_list] 356 | elif self.tp_type == "uds": 357 | df_raw_pgns = None 358 | df_raw_match = df_raw["ID"].isin(res_id_list_full) 359 | res_id_list = df_raw["ID"][df_raw_match].drop_duplicates().values.tolist() 360 | 361 | df_raw_tp = df_raw[df_raw_match] 362 | df_raw_excl_tp = df_raw[~df_raw_match] 363 | 364 | if len(df_raw) - len(df_raw_tp) - len(df_raw_excl_tp): 365 | print("Warning - total rows does not equal sum of rows incl/excl transport protocol frames") 366 | 367 | return df_raw_tp, df_raw_excl_tp, res_id_list, df_raw_pgns 368 | 369 | def filter_df_raw_tp(self, df_raw_tp, df_raw_tp_pgns,res_id): 370 | # filter df_raw_tp to include only frames for the specific response ID res_id 371 | if self.tp_type == "nmea": 372 | df_raw_tp_res_id = df_raw_tp[df_raw_tp_pgns.isin([res_id])] 373 | elif self.tp_type == "j1939": 374 | df_raw_tp_res_id = df_raw_tp 375 | df_raw_tp_res_id = df_raw_tp_res_id.copy() 376 | df_raw_tp_res_id["SA"] = df_raw_tp_res_id["ID"].apply(self.calculate_sa) 377 | else: 378 | df_raw_tp_res_id = df_raw_tp[df_raw_tp["ID"].isin([res_id])] 379 | return df_raw_tp_res_id 380 | 381 | def check_if_first_frame(self,row, bam_pgn, first_frame_mask,first_frame): 382 | # check if row reflects the first frame of a TP sequence 383 | if self.tp_type == "j1939" and bam_pgn == self.calculate_pgn(row.ID): 384 | first_frame_test = True 385 | elif (row.DataBytes[0] & first_frame_mask) == first_frame: 386 | first_frame_test = True 387 | else: 388 | first_frame_test = False 389 | 390 | return first_frame_test 391 | 392 | def pgn_to_can_id(self,row): 393 | # for J1939, extract PGN and convert to 29 bit CAN ID for use in baseframe 394 | pgn_hex = "".join("{:02x}".format(x) for x in reversed(row.DataBytes[5:8])) 395 | pgn = int(pgn_hex, 16) 396 | can_id = (6 << 26) | (pgn << 8) | row.SA 397 | return can_id 398 | 399 | def get_payload_length(self,row): 400 | if self.tp_type == "uds": 401 | ff_length = (row.DataBytes[0] & 0x0F) << 8 | row.DataBytes[1] 402 | if self.tp_type == "nmea": 403 | ff_length = row.DataBytes[1] 404 | if self.tp_type == "j1939": 405 | ff_length = int("".join("{:02x}".format(x) for x in reversed(row.DataBytes[1:2])),16) 406 | return ff_length 407 | 408 | def combine_tp_frames(self, df_raw): 409 | # main function that reassembles TP frames in df_raw 410 | import pandas as pd 411 | 412 | # if tp_type = "" return original df_raw 413 | if self.tp_type not in ["uds","nmea", "j1939"]: 414 | return df_raw 415 | 416 | # extract protocol specific TP frame info 417 | frame_struct = MultiFrameDecoder.FRAME_STRUCT[self.tp_type] 418 | res_id_list_full = frame_struct["res_id_list"] 419 | bam_pgn = frame_struct["bam_pgn"] 420 | ff_payload_start = frame_struct["ff_payload_start"] 421 | first_frame_mask = frame_struct["FIRST_FRAME_MASK"] 422 | first_frame = frame_struct["FIRST_FRAME"] 423 | single_frame_mask = frame_struct["SINGLE_FRAME_MASK"] 424 | single_frame = frame_struct["SINGLE_FRAME"] 425 | conseq_frame_mask = frame_struct["CONSEQ_FRAME_MASK"] 426 | conseq_frame = frame_struct["CONSEQ_FRAME"] 427 | 428 | # split df_raw in two (incl/excl TP frames) 429 | df_raw_tp, df_raw_excl_tp, res_id_list, df_raw_pgns = self.identify_matching_ids(df_raw,res_id_list_full, bam_pgn) 430 | 431 | # initiate new df_raw that will contain both the df_raw excl. TP frames and subsequently all combined TP frames 432 | df_raw = [df_raw_excl_tp] 433 | 434 | # for NMEA, apply PGN decoding outside loop 435 | if self.tp_type == "nmea": 436 | df_raw_tp_pgns = df_raw_tp["ID"].apply(self.calculate_pgn) 437 | else: 438 | df_raw_tp_pgns = None 439 | 440 | # loop through each relevant TP response ID 441 | for res_id in res_id_list: 442 | 443 | # get subset of df_raw_tp containing res_id 444 | df_raw_tp_res_id = self.filter_df_raw_tp(df_raw_tp,df_raw_tp_pgns, res_id) 445 | 446 | # distinguish channels 447 | for channel, df_channel in df_raw_tp_res_id.groupby("BusChannel"): 448 | 449 | # distinguish IDs from PGNs by grouping on ID (or SA for J1939) 450 | for identifier, df_raw_filter in df_channel.groupby(frame_struct["group"]): 451 | base_frame = df_raw_filter.iloc[0] 452 | frame_list = [] 453 | frame_timestamp_list = [] 454 | payload_concatenated = [] 455 | 456 | ff_length = 0xFFF 457 | first_first_frame_test = True 458 | can_id = None 459 | conseq_frame_prev = None 460 | 461 | # iterate through rows in filtered dataframe 462 | for row in df_raw_filter.itertuples(index=True,name='Pandas'): 463 | index = row.Index 464 | first_frame_test = self.check_if_first_frame(row, bam_pgn, first_frame_mask,first_frame) 465 | first_byte = row.DataBytes[0] 466 | 467 | # if single frame, save frame directly (excl. 1st byte) 468 | if self.tp_type != "nmea" and (first_byte & single_frame_mask == single_frame): 469 | new_frame = self.construct_new_tp_frame(base_frame, row.DataBytes, row.ID) 470 | frame_list.append(new_frame.values.tolist()) 471 | frame_timestamp_list.append(index) 472 | 473 | # if first frame, save info from prior multi frame response sequence, 474 | # then initialize a new sequence incl. the first frame payload 475 | elif first_frame_test: 476 | # create a new frame using information from previous iterations 477 | if len(payload_concatenated) >= ff_length: 478 | new_frame = self.construct_new_tp_frame(base_frame, payload_concatenated, can_id) 479 | frame_list.append(new_frame.values.tolist()) 480 | frame_timestamp_list.append(frame_timestamp) 481 | 482 | # reset and start next frame with timestamp & CAN ID from this first frame plus initial payload 483 | conseq_frame_prev = None 484 | frame_timestamp = index 485 | 486 | if self.tp_type == "j1939": 487 | can_id = self.pgn_to_can_id(row) 488 | 489 | ff_length = self.get_payload_length(row) 490 | payload_concatenated = row.DataBytes[ff_payload_start:] 491 | 492 | # if consequtive frame, extend payload with payload excl. 1st byte 493 | elif (conseq_frame_prev == None) or ((first_byte - conseq_frame_prev) == 1): 494 | conseq_frame_prev = first_byte 495 | payload_concatenated += row.DataBytes[1:] 496 | 497 | 498 | df_raw_res_id_new = pd.DataFrame(frame_list, columns=base_frame.index, index=frame_timestamp_list) 499 | df_raw.append(df_raw_res_id_new) 500 | 501 | df_raw = pd.concat(df_raw,join='outer') 502 | df_raw.index.name = "TimeStamp" 503 | df_raw = df_raw.sort_index() 504 | return df_raw 505 | -------------------------------------------------------------------------------- /examples/other/asammdf-basics/asammdf_basics.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: Load MDF log files & DBCs from an input folder and showcase various operations. 3 | Note: Example assumes v7.5.0dev2 of asammdf 4 | """ 5 | from asammdf import MDF 6 | import matplotlib.pyplot as plt 7 | from datetime import timedelta 8 | import glob, sys, os 9 | from pathlib import Path 10 | 11 | 12 | # set variables 13 | mdf_extension = ".MF4" 14 | input_folder = "input" 15 | output_folder = "output" 16 | 17 | # load MDF/DBC files from input folder 18 | path = Path(__file__).parent.absolute() 19 | path_in = Path(path, input_folder) 20 | path_out = Path(path, output_folder) 21 | 22 | dbc_files = {"CAN": [(dbc, 0) for dbc in list(path_in.glob("*" + ".DBC"))]} 23 | logfiles = list(path_in.glob("*" + mdf_extension)) 24 | 25 | signals = ["EngineSpeed", "WheelBasedVehicleSpeed"] 26 | print("Log file(s): ", logfiles, "\nDBC(s): ", dbc_files) 27 | 28 | # concatenate MDF files from input folder and export as CSV incl. timestamps (localized time) 29 | mdf = MDF.concatenate(logfiles) 30 | mdf.save(Path(path_out, "conc"), overwrite=True) 31 | mdf.export("csv", filename=Path(path_out, "conc"), time_as_date=True, time_from_zero=False, single_time_base=True) 32 | 33 | # DBC convert the unfiltered MDF + save & export resampled data 34 | mdf_scaled = mdf.extract_bus_logging(dbc_files) 35 | 36 | mdf_scaled.save("scaled", overwrite=True) 37 | mdf_scaled.export( 38 | "csv", filename=Path(path_out, "scaled"), time_as_date=True, time_from_zero=False, single_time_base=True, raster=0.5, 39 | ) 40 | 41 | # extract a list of signals from a scaled MDF 42 | mdf_scaled_signal_list = mdf_scaled.select(signals) 43 | 44 | # extract a filtered MDF based on a signal list 45 | mdf_scaled_signals = mdf_scaled.filter(signals) 46 | 47 | # create pandas dataframe from the scaled MDF and e.g. add new signals 48 | pd = mdf_scaled.to_dataframe(time_as_date=True) 49 | pd["ratio"] = pd.loc[:, signals[0]] / pd.loc[:, signals[1]] 50 | # pd_f = pd.loc["2020-01-13 13:00:35":"2020-01-13 13:59:56"] 51 | # pd_f = pd_f[(pd_f[signals[0]] > 640)] 52 | # print("\nFiltered pandas dataframe:\n", pd_f) 53 | 54 | # trigger an action if a condition is satisfied 55 | signal_stats = pd[signals[0]].agg(["count", "min", "max", "mean", "std"]) 56 | signal_diff = signal_stats["max"] - signal_stats["min"] 57 | max_diff = 300 58 | 59 | if signal_diff > max_diff: 60 | print(f"Filtered {signals[0]} max difference of {signal_diff} is above {max_diff}") 61 | pd.plot(y=signals[0]) 62 | plt.savefig(Path(path_out, f"signal_{signals[0]}.png")) 63 | # do something, e.g. send a warning mail with a plot 64 | -------------------------------------------------------------------------------- /examples/other/asammdf-basics/input/00000001.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/asammdf-basics/input/00000001.MF4 -------------------------------------------------------------------------------- /examples/other/asammdf-basics/input/CSS-Electronics-SAE-J1939-DEMO.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2364540158 EEC1: 8 Vector__XXX 40 | SG_ EngineSpeed : 24|16@1+ (0.125,0) [0|8031.875] "rpm" Vector__XXX 41 | 42 | BO_ 2566844926 CCVS1: 8 Vector__XXX 43 | SG_ WheelBasedVehicleSpeed : 8|16@1+ (0.00390625,0) [0|250.996] "km/h" Vector__XXX 44 | 45 | 46 | CM_ BO_ 2364540158 "Electronic Engine Controller 1"; 47 | CM_ SG_ 2364540158 EngineSpeed "Actual engine speed which is calculated over a minimum crankshaft angle of 720 degrees divided by the number of cylinders.…"; 48 | CM_ BO_ 2566844926 "Cruise Control/Vehicle Speed 1"; 49 | CM_ SG_ 2566844926 WheelBasedVehicleSpeed "Wheel-Based Vehicle Speed: Speed of the vehicle as calculated from wheel or tailshaft speed."; 50 | BA_DEF_ SG_ "SPN" INT 0 524287; 51 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","reserved","J1939PG"; 52 | BA_DEF_ "DatabaseVersion" STRING ; 53 | BA_DEF_ "BusType" STRING ; 54 | BA_DEF_ "ProtocolType" STRING ; 55 | BA_DEF_ "DatabaseCompiler" STRING ; 56 | BA_DEF_DEF_ "SPN" 0; 57 | BA_DEF_DEF_ "VFrameFormat" "J1939PG"; 58 | BA_DEF_DEF_ "DatabaseVersion" ""; 59 | BA_DEF_DEF_ "BusType" ""; 60 | BA_DEF_DEF_ "ProtocolType" ""; 61 | BA_DEF_DEF_ "DatabaseCompiler" ""; 62 | BA_ "ProtocolType" "J1939"; 63 | BA_ "BusType" "CAN"; 64 | BA_ "DatabaseCompiler" "CSS ELECTRONICS (WWW.CSSELECTRONICS.COM)"; 65 | BA_ "DatabaseVersion" "1.0.0"; 66 | BA_ "VFrameFormat" BO_ 2364540158 3; 67 | BA_ "SPN" SG_ 2364540158 EngineSpeed 190; 68 | BA_ "SPN" SG_ 2566844926 WheelBasedVehicleSpeed 84; 69 | -------------------------------------------------------------------------------- /examples/other/asammdf-basics/requirements.txt: -------------------------------------------------------------------------------- 1 | asammdf==7.0.2 2 | attrs==21.4.0 3 | canmatrix==0.9.5 4 | click==8.1.2 5 | colorama==0.4.4 6 | cycler==0.11.0 7 | fonttools==4.32.0 8 | future==0.18.2 9 | importlib-metadata==4.11.3 10 | isal==0.11.1 11 | kiwisolver==1.4.2 12 | lxml==4.8.0 13 | lz4==4.0.0 14 | matplotlib==3.5.1 15 | numexpr==2.8.1 16 | numpy==1.21.6 17 | packaging==21.3 18 | pandas==1.3.5 19 | Pillow==9.1.0 20 | pyparsing==3.0.8 21 | python-dateutil==2.8.2 22 | pytz==2022.1 23 | six==1.16.0 24 | typing-extensions==4.1.1 25 | zipp==3.8.0 -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/LOG/2F6913DB/00001089/00000001-63BEF5CD.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/LOG/2F6913DB/00001089/00000001-63BEF5CD.MF4 -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/LOG/2F6913DB/00001089/00000002-63BEFDF9.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/LOG/2F6913DB/00001089/00000002-63BEFDF9.MF4 -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000005-643E503A.MFC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000005-643E503A.MFC -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000006-643E5292.MFC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000006-643E5292.MFC -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000007-643E54EA.MFC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/LOG/5BC57FEC/00000001/00000007-643E54EA.MFC -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/README.md: -------------------------------------------------------------------------------- 1 | # Concatenate MF4 files by period (+ optional DBC decoding) 2 | 3 | This script lets you process MF4 log files across multiple CANedge devices. The script does the following: 4 | 5 | 1. List all log files for a list of devices within a specific 'total period' 6 | 2. Specify a sub period length (e.g. 24 hours) 7 | 3. Identify log files pertaining to each sub period, concatenate them and save the result 8 | 4. Optionally, the output file can be DBC decoded before it is saved 9 | 5. Saved files are named based on the 1st and last timestamp, e.g. `221213-0612-to-221213-1506.mf4` 10 | 11 | The data can be fetched from an absolute input path on local disk (e.g. the `LOG/` folder on an SD card) and saved locally. 12 | 13 | Alternatively, the files can be loaded/saved directly from/to S3 buckets. This requires that you map your S3 input/output bucket(s) using [TntDrive](https://canlogger.csselectronics.com/canedge-getting-started/ce2/transfer-data/server-tools/other-s3-tools/). 14 | 15 | 16 | ## Installation 17 | 18 | See the README in the root of the api-examples repository. 19 | 20 | ## Regarding file structure 21 | 22 | Note that the script relies on the `canedge_browser` module to list log files. In order for this to work, your log files must be structured correctly, i.e. `///`. 23 | 24 | ## Processing compressed (*.MFC) files 25 | 26 | The script can also be used to process compressed files. To do so, set `finalize_log_files = True`. In this case, the script uses the `mdf2finalized.exe` MF4 converter to convert the MFC files to MF4 and output these into a temporary folder. The script then loads the MF4 files, processes them and deletes the temporary folder for each loop. 27 | 28 | ## Dynamic script automation 29 | 30 | The script can be easily modified to run in a dynamic/automated way. For example, you can update the `period_start` and `period_stop` as below and setup a daily task (e.g. via Windows Task Scheduler) to execute the script via a `.bat` file. 31 | 32 | ``` 33 | period_start = datetime.now(timezone.utc) - timedelta(days=1) 34 | period_stop = datetime(year=2030, month=1, day=1, hour=1, tzinfo=timezone.utc) 35 | ``` -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/concatenate_mf4_by_period.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: List MF4 log files by period using the CANedge Python API 3 | and concatenate them into 'combined' MF4 files using the asammdf Python API. 4 | Optionally use TntDrive to map S3 server as local drive to work with S3 directly: 5 | https://canlogger.csselectronics.com/canedge-getting-started/transfer-data/server-tools/other-s3-tools/ 6 | """ 7 | import canedge_browser 8 | from asammdf import MDF 9 | from datetime import datetime, timezone, timedelta 10 | from pathlib import Path 11 | from concatenate_utils import extract_mdf_start_stop_time, hour_rounder, finalize_log_files 12 | import sys,os, shutil 13 | import gc 14 | 15 | path_script = Path(__file__).parent.absolute() 16 | 17 | # specify input paths for MF4 files (e.g. on Windows Path("D:\\LOG") for SD, Path("Z:\\") for mapped S3 bucket, 18 | # path_script / "LOG" for relative folder, C:\\Users\\myuser\\folder\\subfolder\\LOG for absolute path, ...) 19 | path_input_orig = path_script / "LOG" 20 | 21 | # specify devices to process from path_input 22 | devices = ["2F6913DB", "5BC57FEC"] 23 | 24 | # specify output path (e.g. another mapped S3 bucket, local disk, ...) 25 | path_output = path_script / "mf4-output/concatenated" 26 | path_output_temp = path_script / "mf4-output/temp" 27 | 28 | # optionally finalize files (if *.MFC) and DBC decode them 29 | enable_dbc_decoding = False 30 | enable_mf4_compression = True 31 | path_dbc_files = path_script / "dbc_files" 32 | path_mdf2finalized = path_script / "mdf2finalized.exe" 33 | 34 | # specify which period you wish to process and the max period length of each concatenated log file 35 | period_start = datetime(year=2023, month=1, day=1, hour=2, tzinfo=timezone.utc) 36 | period_stop = datetime(year=2023, month=12, day=31, hour=2, tzinfo=timezone.utc) 37 | file_length_hours = 24 38 | 39 | # ---------------------------------------- 40 | 41 | dbc_files = {"CAN": [(dbc, 0) for dbc in list(path_dbc_files.glob("*" + ".DBC"))]} 42 | 43 | for device in devices: 44 | path_input = path_input_orig 45 | fs = canedge_browser.LocalFileSystem(base_path=path_input) 46 | 47 | cnt_sub_period = 0 48 | sub_period_start = period_start 49 | sub_period_stop = period_start 50 | files_to_skip = [] 51 | 52 | log_files_total = canedge_browser.get_log_files(fs, device, start_date=period_start,stop_date=period_stop) 53 | log_files_total = [path_input.joinpath(log_file[1:]) for log_file in log_files_total] 54 | 55 | print(f"\n-----------\nProcessing device {device} | sub period length: {file_length_hours} hours | start: {period_start} | stop: {period_stop} \n{len(log_files_total)} log file(s): ",log_files_total) 56 | 57 | # check whether to update sub_period_start to equal 2nd log file start for efficiency 58 | if len(log_files_total) == 0: 59 | print("Skipping device") 60 | continue 61 | 62 | # finalize all files, then update the filesystem fs and path_input to the temp_finalized sub folder 63 | log_files_total = finalize_log_files(log_files_total, path_output_temp, path_mdf2finalized) 64 | fs = canedge_browser.LocalFileSystem(base_path=path_input / path_output_temp.parent / "temp_finalized") 65 | path_input = path_input / path_output_temp.parent / "temp_finalized" 66 | 67 | # extract first_log_file 68 | first_log_file = log_files_total[0] 69 | 70 | mdf = MDF(first_log_file) 71 | 72 | mdf_start, mdf_stop = extract_mdf_start_stop_time(mdf) 73 | 74 | if mdf_stop < sub_period_start: 75 | print("First log file is before period start (skip): ", log_files_total[0]) 76 | files_to_skip.append(log_files_total[0]) 77 | if len(log_files_total) == 1: 78 | continue 79 | elif len(log_files_total) > 1: 80 | mdf = MDF(log_files_total[1]) 81 | mdf_start, mdf_stop = extract_mdf_start_stop_time(mdf) 82 | sub_period_start = hour_rounder(mdf_start) 83 | print(f"Period start updated to {sub_period_start}") 84 | 85 | # process each sub period for the device 86 | while sub_period_stop <= period_stop: 87 | cnt_sub_period += 1 88 | sub_period_stop = sub_period_start + timedelta(hours=file_length_hours) 89 | 90 | # list log files for the sub period 91 | log_files_orig_path = canedge_browser.get_log_files(fs, device, start_date=sub_period_start,stop_date=sub_period_stop) 92 | log_files_orig_path = [path_input.joinpath(log_file[1:]) for log_file in log_files_orig_path] 93 | log_files = [log_file for log_file in log_files_orig_path if log_file not in files_to_skip] 94 | 95 | if len(log_files) > 0: 96 | print(f"\n- Sub period #{cnt_sub_period} \t\t\t| start: {sub_period_start} | stop: {sub_period_stop} \n- {len(log_files)} log file(s): ", log_files) 97 | 98 | if len(log_files) == 0: 99 | sub_period_start = sub_period_stop 100 | continue 101 | 102 | # concatenate all sub period files and identify the delta sec to start/stop 103 | mdf = MDF.concatenate(log_files) 104 | mdf_start, mdf_stop = extract_mdf_start_stop_time(mdf) 105 | mdf_header_start = mdf.header.start_time 106 | start_delta = (sub_period_start - mdf_header_start).total_seconds() 107 | stop_delta = (sub_period_stop - mdf_header_start).total_seconds() 108 | print(f"- Concatenated MF4 created (pre cut)\t| start: {mdf_start} | stop: {mdf_stop}") 109 | 110 | # cut the log file to only include intended period 111 | mdf = mdf.cut(start=start_delta, stop=stop_delta, whence=0,include_ends=False, time_from_zero=False) 112 | mdf_start, mdf_stop = extract_mdf_start_stop_time(mdf) 113 | 114 | # convert the start/stop time to string format for file-saving 115 | mdf_start_str = mdf_start.strftime(f"%y%m%d-%H%M") 116 | mdf_stop_str = mdf_stop.strftime(f"%y%m%d-%H%M") 117 | output_file_name = f"{device}/{mdf_start_str}-to-{mdf_stop_str}.MF4" 118 | path_output_file = path_output / output_file_name 119 | 120 | # DBC decode the data before saving 121 | if enable_dbc_decoding: 122 | mdf = mdf.extract_bus_logging(dbc_files) 123 | 124 | # save the cut MF4 to local disk 125 | mdf.save(path_output_file, overwrite=True, compression=enable_mf4_compression) 126 | print(f"- Concatenated MF4 saved (cut)\t\t| start: {mdf_start} | stop: {mdf_stop} \n- Output path: {path_output_file}") 127 | 128 | # clear MDF 129 | mdf = mdf.close() 130 | del mdf 131 | gc.collect() 132 | 133 | # if temp folder is used, clear it 134 | if os.path.exists(path_output_temp): 135 | print("- Deleting temporary folder") 136 | shutil.rmtree(path_output_temp) 137 | 138 | # check if the last log file is fully within sub period (i.e. skip it during next cycle) 139 | if mdf_stop < sub_period_stop: 140 | files_to_skip.append(log_files_orig_path[-1]) 141 | 142 | if log_files_orig_path[-1] == log_files_total[-1]: 143 | print(f"- Completed processing device {device}") 144 | break 145 | 146 | # update sub period start 147 | sub_period_start = sub_period_stop -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/concatenate_utils.py: -------------------------------------------------------------------------------- 1 | def extract_mdf_start_stop_time(mdf): 2 | from datetime import timedelta 3 | 4 | # function to identify start/stop timestamp of concatenated log file 5 | df_raw_asam = mdf.to_dataframe(time_as_date=True) 6 | mdf_start = df_raw_asam.index[0] 7 | mdf_stop = df_raw_asam.index[-1] 8 | 9 | return mdf_start, mdf_stop 10 | 11 | def hour_rounder(t): 12 | from datetime import timedelta 13 | 14 | # Rounds to nearest hour by adding a timedelta hour if minute >= 30 15 | return (t.replace(second=0, microsecond=0, minute=0, hour=t.hour) 16 | +timedelta(hours=t.minute//30)) 17 | 18 | 19 | def finalize_log_files(log_files, path_output_temp, path_mdf2finalized): 20 | import subprocess 21 | from pathlib import Path 22 | import glob 23 | import shutil 24 | 25 | path_output_temp_finalized = path_output_temp.parent / "temp_finalized" 26 | 27 | for log_file in log_files: 28 | path_output_file_temp_name = Path(*log_file.parts[-3:][0:2]) 29 | 30 | # create repository for finalized files 31 | try: 32 | Path(path_output_temp_finalized / path_output_file_temp_name).mkdir(parents=True, exist_ok=True) 33 | except Exception as e: 34 | print(e) 35 | 36 | # create repository for unfinalized files 37 | try: 38 | Path(path_output_temp / path_output_file_temp_name).mkdir(parents=True, exist_ok=True) 39 | except Exception as e: 40 | print(e) 41 | 42 | # copy log file to local disk first 43 | shutil.copy(log_file, path_output_temp / path_output_file_temp_name) 44 | 45 | # finalize the copied file 46 | subprocess.run( 47 | [ 48 | path_mdf2finalized, 49 | "-i", 50 | path_output_temp / path_output_file_temp_name / log_file.name, 51 | "-O", 52 | path_output_temp_finalized / path_output_file_temp_name, 53 | ] 54 | ) 55 | 56 | log_files = list(path_output_temp_finalized.glob("**/*.MF4")) 57 | 58 | return log_files 59 | -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/dbc_files/gnss.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 3221225472 VECTOR__INDEPENDENT_SIG_MSG: 0 Vector__XXX 40 | SG_ StateOfChargeBMS : 0|8@0+ (0.5,3) [0|100] "%" Vector__XXX 41 | SG_ response : 0|16@0+ (1,0) [0|0] "unit" Vector__XXX 42 | SG_ service : 0|8@0+ (1,0) [0|0] "" Vector__XXX 43 | SG_ MinCellVoltage : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 44 | SG_ MaxCellVoltage : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 45 | SG_ StateOfChargeDisplay : 0|8@0+ (0.5,3) [0|0] "%" Vector__XXX 46 | SG_ BatteryTemperature1 : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 47 | SG_ BatteryTemperature2 : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 48 | SG_ BatteryTemperature3 : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 49 | SG_ BatteryTemperature4 : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 50 | SG_ BatteryMinTemperature : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 51 | SG_ BatteryMaxTemperature : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 52 | SG_ BatteryHeaterTemperature1 : 0|8@0- (1,0) [0|0] "degC" Vector__XXX 53 | SG_ BatteryVoltageAuxillary : 0|8@0+ (0.1,0) [11|14.6] "V" Vector__XXX 54 | SG_ BatteryTemperature5 : 0|8@0- (1,0) [0|0.5] "V" Vector__XXX 55 | SG_ BatteryFanFeedback : 0|8@0+ (1,0) [0|120] "Hz" Vector__XXX 56 | SG_ BatteryFanStatus : 0|8@0+ (1,0) [0|9] "" Vector__XXX 57 | SG_ NormalChargePort : 0|1@0+ (1,0) [0|0] "" Vector__XXX 58 | SG_ RapidChargePort : 0|1@0+ (1,0) [0|0] "" Vector__XXX 59 | SG_ OperatingTime : 0|32@0+ (0.00027777777,0) [0|1000000] "hours" Vector__XXX 60 | SG_ MinDeterioration : 0|16@0+ (0.1,0) [0|100] "%" Vector__XXX 61 | SG_ MinDeteriorationCellNo : 0|8@0+ (1,0) [0|98] "" Vector__XXX 62 | SG_ MinCellVoltageCellNo : 0|8@0+ (1,0) [0|0] "" Vector__XXX 63 | SG_ MaxCellVoltageCellNo : 0|8@0+ (1,0) [0|0] "" Vector__XXX 64 | SG_ Charging : 0|1@0+ (1,0) [0|0] "" Vector__XXX 65 | SG_ CCC_CumulativeChargeCurrent : 0|32@0+ (0.1,0) [0|1000000] "Ah" Vector__XXX 66 | SG_ CDC_CumulativeDischargeCurrent : 0|32@0+ (0.1,0) [0|1000000] "Ah" Vector__XXX 67 | SG_ CEC_CumulativeEnergyCharged : 0|32@0+ (0.1,0) [0|1000000] "kWh" Vector__XXX 68 | SG_ CED_CumulativeEnergyDischarged : 0|32@0+ (0.1,0) [0|1000000] "kWh" Vector__XXX 69 | SG_ BMSMainRelay : 0|1@0+ (1,0) [0|0] "" Vector__XXX 70 | SG_ BMSIgnition : 0|1@0+ (1,0) [0|0] "" Vector__XXX 71 | SG_ BatteryDCVoltage : 0|16@0+ (0.1,0) [0|0] "V" Vector__XXX 72 | SG_ BatteryCurrent : 0|16@0- (0.1,0) [-230|230] "A" Vector__XXX 73 | SG_ BatteryAvailableChargePower : 0|16@0+ (0.01,0) [0|270] "kW" Vector__XXX 74 | SG_ BatteryAvailableDischargePower : 0|16@0+ (0.01,0) [0|270] "kW" Vector__XXX 75 | SG_ CellVoltage01 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 76 | SG_ CellVoltage02 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 77 | SG_ CellVoltage03 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 78 | SG_ CellVoltage04 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 79 | SG_ CellVoltage05 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 80 | SG_ CellVoltage06 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 81 | SG_ CellVoltage07 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 82 | SG_ CellVoltage08 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 83 | SG_ CellVoltage09 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 84 | SG_ CellVoltage10 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 85 | SG_ CellVoltage11 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 86 | SG_ CellVoltage12 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 87 | SG_ CellVoltage13 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 88 | SG_ CellVoltage14 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 89 | SG_ CellVoltage15 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 90 | SG_ CellVoltage16 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 91 | SG_ CellVoltage17 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 92 | SG_ CellVoltage18 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 93 | SG_ CellVoltage19 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 94 | SG_ CellVoltage20 : 0|8@0+ (0.02,0) [2.8|4.2] "V" Vector__XXX 95 | SG_ StateOfHealth : 0|16@0+ (0.1,0) [0|100] "%" Vector__XXX 96 | SG_ IndoorTemperature : 0|8@0+ (0.5,-40) [-50|50] "degC" Vector__XXX 97 | SG_ response : 0|16@0+ (1,0) [0|0] "unit" Vector__XXX 98 | SG_ service : 0|8@0+ (1,0) [0|0] "" Vector__XXX 99 | SG_ OutdoorTemperature : 0|8@0+ (0.5,-40) [-50|50] "degC" Vector__XXX 100 | SG_ VehicleSpeed : 0|8@0+ (1,0) [0|200] "kmh" Vector__XXX 101 | SG_ response : 0|16@0+ (1,0) [0|0] "" Vector__XXX 102 | SG_ service : 0|8@0+ (1,0) [0|0] "" Vector__XXX 103 | SG_ TirePressureFrontLeft : 0|8@0+ (0.2,0) [0|120] "psi" Vector__XXX 104 | SG_ TirePressureFrontRight : 0|8@0+ (0.2,0) [0|120] "psi" Vector__XXX 105 | SG_ TirePressureBackLeft : 0|8@0+ (0.2,0) [0|120] "psi" Vector__XXX 106 | SG_ TirePressureBackRight : 0|8@0+ (0.2,0) [0|120] "psi" Vector__XXX 107 | SG_ TireTemperatureFrontLeft : 0|8@0+ (1,-50) [-40|65] "degC" Vector__XXX 108 | SG_ TireTemperatureFrontRight : 0|8@0+ (1,-50) [-40|65] "degC" Vector__XXX 109 | SG_ TireTemperatureBackLeft : 0|8@0+ (1,-50) [-40|65] "degC" Vector__XXX 110 | SG_ TireTemperatureBackRight : 0|8@0+ (1,-50) [-40|65] "degC" Vector__XXX 111 | 112 | BO_ 3 gnss_pos: 8 Vector__XXX 113 | SG_ PositionAccuracy : 58|6@1+ (1,0) [0|63] "m" Vector__XXX 114 | SG_ Latitude : 1|28@1+ (1E-006,-90) [-90|90] "deg" Vector__XXX 115 | SG_ Longitude : 29|29@1+ (1E-006,-180) [-180|180] "deg" Vector__XXX 116 | SG_ PositionValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 117 | 118 | BO_ 2 gnss_time: 6 Vector__XXX 119 | SG_ TimeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 120 | SG_ TimeConfirmed : 1|1@1+ (1,0) [0|1] "" Vector__XXX 121 | SG_ Epoch : 8|40@1+ (0.001,1577840400) [1577840400|2677352027] "sec" Vector__XXX 122 | 123 | BO_ 5 gnss_attitude: 8 Vector__XXX 124 | SG_ AttitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 125 | SG_ Roll : 1|12@1+ (0.1,-180) [-180|180] "deg" Vector__XXX 126 | SG_ Pitch : 22|12@1+ (0.1,-90) [-90|90] "deg" Vector__XXX 127 | SG_ Heading : 43|12@1+ (0.1,0) [0|360] "deg" Vector__XXX 128 | SG_ RollAccuracy : 13|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 129 | SG_ PitchAccuracy : 34|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 130 | SG_ HeadingAccuracy : 55|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 131 | 132 | BO_ 6 gnss_odo: 8 Vector__XXX 133 | SG_ DistanceTrip : 1|22@1+ (1,0) [0|4194303] "m" Vector__XXX 134 | SG_ DistanceAccuracy : 23|19@1+ (1,0) [0|524287] "m" Vector__XXX 135 | SG_ DistanceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 136 | SG_ DistanceTotal : 42|22@1+ (1,0) [0|4194303] "km" Vector__XXX 137 | 138 | BO_ 1 gnss_status: 1 Vector__XXX 139 | SG_ FixType : 0|3@1+ (1,0) [0|5] "" Vector__XXX 140 | SG_ Satellites : 3|5@1+ (1,0) [0|31] "" Vector__XXX 141 | 142 | BO_ 4 gnss_altitude: 4 Vector__XXX 143 | SG_ AltitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 144 | SG_ Altitude : 1|18@1+ (0.1,-6000) [-6000|20000] "m" Vector__XXX 145 | SG_ AltitudeAccuracy : 19|13@1+ (1,0) [0|8000] "m" Vector__XXX 146 | 147 | BO_ 8 gnss_geofence: 2 Vector__XXX 148 | SG_ FenceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 149 | SG_ FenceCombined : 1|2@1+ (1,0) [0|1] "" Vector__XXX 150 | SG_ Fence1 : 8|2@1+ (1,0) [0|1] "" Vector__XXX 151 | SG_ Fence2 : 10|2@1+ (1,0) [0|1] "" Vector__XXX 152 | SG_ Fence3 : 12|2@1+ (1,0) [0|1] "" Vector__XXX 153 | SG_ Fence4 : 14|2@1+ (1,0) [0|1] "" Vector__XXX 154 | 155 | BO_ 7 gnss_speed: 5 Vector__XXX 156 | SG_ Speed : 1|20@1+ (0.001,0) [0|1048.575] "m/s" Vector__XXX 157 | SG_ SpeedAccuracy : 21|19@1+ (0.001,0) [0|524.287] "m/s" Vector__XXX 158 | SG_ SpeedValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 159 | 160 | BO_ 9 gnss_imu: 8 Vector__XXX 161 | SG_ AccelerationX : 1|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 162 | SG_ AccelerationY : 11|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 163 | SG_ AccelerationZ : 21|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 164 | SG_ AngularRateX : 31|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 165 | SG_ AngularRateY : 42|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 166 | SG_ AngularRateZ : 53|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 167 | SG_ ImuValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 168 | 169 | BO_ 19 gnss_pos: 8 Vector__XXX 170 | SG_ PositionAccuracy : 58|6@1+ (1,0) [0|63] "m" Vector__XXX 171 | SG_ Latitude : 1|28@1+ (1E-006,-90) [-90|90] "deg" Vector__XXX 172 | SG_ Longitude : 29|29@1+ (1E-006,-180) [-180|180] "deg" Vector__XXX 173 | SG_ PositionValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 174 | 175 | BO_ 18 gnss_time: 6 Vector__XXX 176 | SG_ TimeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 177 | SG_ TimeConfirmed : 1|1@1+ (1,0) [0|1] "" Vector__XXX 178 | SG_ Epoch : 8|40@1+ (0.001,1577840400) [1577840400|2677352027] "sec" Vector__XXX 179 | 180 | BO_ 21 gnss_attitude: 8 Vector__XXX 181 | SG_ AttitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 182 | SG_ Roll : 1|12@1+ (0.1,-180) [-180|180] "deg" Vector__XXX 183 | SG_ Pitch : 22|12@1+ (0.1,-90) [-90|90] "deg" Vector__XXX 184 | SG_ Heading : 43|12@1+ (0.1,0) [0|360] "deg" Vector__XXX 185 | SG_ RollAccuracy : 13|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 186 | SG_ PitchAccuracy : 34|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 187 | SG_ HeadingAccuracy : 55|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 188 | 189 | BO_ 22 gnss_odo: 8 Vector__XXX 190 | SG_ DistanceTrip : 1|22@1+ (1,0) [0|4194303] "m" Vector__XXX 191 | SG_ DistanceAccuracy : 23|19@1+ (1,0) [0|524287] "m" Vector__XXX 192 | SG_ DistanceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 193 | SG_ DistanceTotal : 42|22@1+ (1,0) [0|4194303] "km" Vector__XXX 194 | 195 | BO_ 17 gnss_status: 1 Vector__XXX 196 | SG_ FixType : 0|3@1+ (1,0) [0|5] "" Vector__XXX 197 | SG_ Satellites : 3|5@1+ (1,0) [0|31] "" Vector__XXX 198 | 199 | BO_ 20 gnss_altitude: 4 Vector__XXX 200 | SG_ AltitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 201 | SG_ Altitude : 1|18@1+ (0.1,-6000) [-6000|20000] "m" Vector__XXX 202 | SG_ AltitudeAccuracy : 19|13@1+ (1,0) [0|8000] "m" Vector__XXX 203 | 204 | BO_ 24 gnss_geofence: 2 Vector__XXX 205 | SG_ FenceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 206 | SG_ FenceCombined : 1|2@1+ (1,0) [0|1] "" Vector__XXX 207 | SG_ Fence1 : 8|2@1+ (1,0) [0|1] "" Vector__XXX 208 | SG_ Fence2 : 10|2@1+ (1,0) [0|1] "" Vector__XXX 209 | SG_ Fence3 : 12|2@1+ (1,0) [0|1] "" Vector__XXX 210 | SG_ Fence4 : 14|2@1+ (1,0) [0|1] "" Vector__XXX 211 | 212 | BO_ 23 gnss_speed: 5 Vector__XXX 213 | SG_ Speed : 1|20@1+ (0.001,0) [0|1048.575] "m/s" Vector__XXX 214 | SG_ SpeedAccuracy : 21|19@1+ (0.001,0) [0|524.287] "m/s" Vector__XXX 215 | SG_ SpeedValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 216 | 217 | BO_ 25 gnss_imu: 8 Vector__XXX 218 | SG_ AccelerationX : 1|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 219 | SG_ AccelerationY : 11|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 220 | SG_ AccelerationZ : 21|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 221 | SG_ AngularRateX : 31|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 222 | SG_ AngularRateY : 42|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 223 | SG_ AngularRateZ : 53|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 224 | SG_ ImuValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 225 | 226 | 227 | 228 | CM_ BO_ 3221225472 "This is a message for not used signals, created by Vector CANdb++ DBC OLE DB Provider."; 229 | CM_ BO_ 3 "GNSS position"; 230 | CM_ SG_ 3 PositionAccuracy "Accuracy of position"; 231 | CM_ SG_ 3 Latitude "Latitude"; 232 | CM_ SG_ 3 Longitude "Longitude"; 233 | CM_ SG_ 3 PositionValid "Position validity"; 234 | CM_ BO_ 2 "GNSS time"; 235 | CM_ SG_ 2 TimeValid "Time validity"; 236 | CM_ SG_ 2 TimeConfirmed "Time confirmed"; 237 | CM_ SG_ 2 Epoch "Epoch time"; 238 | CM_ BO_ 5 "GNSS attitude"; 239 | CM_ SG_ 5 AttitudeValid "Attitude validity"; 240 | CM_ SG_ 5 Roll "Vehicle roll"; 241 | CM_ SG_ 5 Pitch "Vehicle pitch"; 242 | CM_ SG_ 5 Heading "Vehicle heading"; 243 | CM_ SG_ 5 RollAccuracy "Vehicle roll accuracy"; 244 | CM_ SG_ 5 PitchAccuracy "Vehicle pitch accuracy"; 245 | CM_ SG_ 5 HeadingAccuracy "Vehicle heading accuracy"; 246 | CM_ BO_ 6 "GNSS odometer"; 247 | CM_ SG_ 6 DistanceTrip "Distance traveled since last reset"; 248 | CM_ SG_ 6 DistanceAccuracy "Distance accuracy (1-sigma)"; 249 | CM_ SG_ 6 DistanceTotal "Distance traveled in total"; 250 | CM_ BO_ 1 "GNSS information"; 251 | CM_ SG_ 1 FixType "Fix type"; 252 | CM_ SG_ 1 Satellites "Number of satellites used"; 253 | CM_ BO_ 4 "GNSS altitude"; 254 | CM_ SG_ 4 AltitudeValid "Altitude validity"; 255 | CM_ SG_ 4 Altitude "Altitude"; 256 | CM_ SG_ 4 AltitudeAccuracy "Accuracy of altitude"; 257 | CM_ BO_ 8 "GNSS geofence(s)"; 258 | CM_ SG_ 8 FenceValid "Geofencing status"; 259 | CM_ SG_ 8 FenceCombined "Combined (logical OR) state of all geofences"; 260 | CM_ SG_ 8 Fence1 "Geofence 1 state"; 261 | CM_ SG_ 8 Fence2 "Geofence 2 state"; 262 | CM_ SG_ 8 Fence3 "Geofence 3 state"; 263 | CM_ SG_ 8 Fence4 "Geofence 4 state"; 264 | CM_ BO_ 7 "GNSS speed"; 265 | CM_ SG_ 7 Speed "Speed"; 266 | CM_ SG_ 7 SpeedAccuracy "Speed accuracy"; 267 | CM_ BO_ 9 "GNSS IMU"; 268 | CM_ SG_ 9 AccelerationX "X acceleration with a resolution of 0.125 m/s^2"; 269 | CM_ SG_ 9 AccelerationY "Y acceleration with a resolution of 0.125 m/s^2"; 270 | CM_ SG_ 9 AccelerationZ "Z acceleration with a resolution of 0.125 m/s^2"; 271 | CM_ SG_ 9 AngularRateX "X angular rate with a resolution of 0.25 deg/s"; 272 | CM_ SG_ 9 AngularRateY "Y angular rate with a resolution of 0.25 deg/s"; 273 | CM_ SG_ 9 AngularRateZ "Z angular rate with a resolution of 0.25 deg/s"; 274 | CM_ BO_ 19 "GNSS position"; 275 | CM_ SG_ 19 PositionAccuracy "Accuracy of position"; 276 | CM_ SG_ 19 Latitude "Latitude"; 277 | CM_ SG_ 19 Longitude "Longitude"; 278 | CM_ SG_ 19 PositionValid "Position validity"; 279 | CM_ BO_ 18 "GNSS time"; 280 | CM_ SG_ 18 TimeValid "Time validity"; 281 | CM_ SG_ 18 TimeConfirmed "Time confirmed"; 282 | CM_ SG_ 18 Epoch "Epoch time"; 283 | CM_ BO_ 21 "GNSS attitude"; 284 | CM_ SG_ 21 AttitudeValid "Attitude validity"; 285 | CM_ SG_ 21 Roll "Vehicle roll"; 286 | CM_ SG_ 21 Pitch "Vehicle pitch"; 287 | CM_ SG_ 21 Heading "Vehicle heading"; 288 | CM_ SG_ 21 RollAccuracy "Vehicle roll accuracy"; 289 | CM_ SG_ 21 PitchAccuracy "Vehicle pitch accuracy"; 290 | CM_ SG_ 21 HeadingAccuracy "Vehicle heading accuracy"; 291 | CM_ BO_ 22 "GNSS odometer"; 292 | CM_ SG_ 22 DistanceTrip "Distance traveled since last reset"; 293 | CM_ SG_ 22 DistanceAccuracy "Distance accuracy (1-sigma)"; 294 | CM_ SG_ 22 DistanceTotal "Distance traveled in total"; 295 | CM_ BO_ 17 "GNSS information"; 296 | CM_ SG_ 17 FixType "Fix type"; 297 | CM_ SG_ 17 Satellites "Number of satellites used"; 298 | CM_ BO_ 20 "GNSS altitude"; 299 | CM_ SG_ 20 AltitudeValid "Altitude validity"; 300 | CM_ SG_ 20 Altitude "Altitude"; 301 | CM_ SG_ 20 AltitudeAccuracy "Accuracy of altitude"; 302 | CM_ BO_ 24 "GNSS geofence(s)"; 303 | CM_ SG_ 24 FenceValid "Geofencing status"; 304 | CM_ SG_ 24 FenceCombined "Combined (logical OR) state of all geofences"; 305 | CM_ SG_ 24 Fence1 "Geofence 1 state"; 306 | CM_ SG_ 24 Fence2 "Geofence 2 state"; 307 | CM_ SG_ 24 Fence3 "Geofence 3 state"; 308 | CM_ SG_ 24 Fence4 "Geofence 4 state"; 309 | CM_ BO_ 23 "GNSS speed"; 310 | CM_ SG_ 23 Speed "Speed"; 311 | CM_ SG_ 23 SpeedAccuracy "Speed accuracy"; 312 | CM_ BO_ 25 "GNSS IMU"; 313 | CM_ SG_ 25 AccelerationX "X acceleration with a resolution of 0.125 m/s^2"; 314 | CM_ SG_ 25 AccelerationY "Y acceleration with a resolution of 0.125 m/s^2"; 315 | CM_ SG_ 25 AccelerationZ "Z acceleration with a resolution of 0.125 m/s^2"; 316 | CM_ SG_ 25 AngularRateX "X angular rate with a resolution of 0.25 deg/s"; 317 | CM_ SG_ 25 AngularRateY "Y angular rate with a resolution of 0.25 deg/s"; 318 | CM_ SG_ 25 AngularRateZ "Z angular rate with a resolution of 0.25 deg/s"; 319 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","StandardCAN_FD","ExtendedCAN_FD","J1939PG"; 320 | BA_DEF_ "ProtocolType" STRING ; 321 | BA_DEF_DEF_ "VFrameFormat" ""; 322 | BA_DEF_DEF_ "ProtocolType" ""; 323 | BA_ "ProtocolType" ""; 324 | VAL_ 3 PositionValid 0 "Invalid" 1 "Valid" ; 325 | VAL_ 2 TimeValid 0 "Invalid" 1 "Valid" ; 326 | VAL_ 2 TimeConfirmed 0 "Unconfirmed" 1 "Confirmed" ; 327 | VAL_ 5 AttitudeValid 0 "Invalid" 1 "Valid" ; 328 | VAL_ 6 DistanceValid 0 "Invalid" 1 "Valid" ; 329 | VAL_ 1 FixType 0 "No fix" 1 "Dead reckoning only" 2 "2D-fix" 3 "3D-fix" 4 "GNSS + dead reckoning combined" 5 "Time only fix" ; 330 | VAL_ 4 AltitudeValid 0 "Invalid" 1 "Valid" ; 331 | VAL_ 8 FenceValid 0 "Invalid" 1 "Valid" ; 332 | VAL_ 8 FenceCombined 0 "Unknown" 1 "Inside" 2 "Outside" ; 333 | VAL_ 8 Fence1 0 "Unknown" 1 "Inside" 2 "Outside" ; 334 | VAL_ 8 Fence2 0 "Unknown" 1 "Inside" 2 "Outside" ; 335 | VAL_ 8 Fence3 0 "Unknown" 1 "Inside" 2 "Outside" ; 336 | VAL_ 8 Fence4 0 "Unknown" 1 "Inside" 2 "Outside" ; 337 | VAL_ 7 SpeedValid 0 "Invalid" 1 "Valid" ; 338 | VAL_ 9 ImuValid 0 "Invalid" 1 "Valid" ; 339 | VAL_ 19 PositionValid 0 "Invalid" 1 "Valid" ; 340 | VAL_ 18 TimeValid 0 "Invalid" 1 "Valid" ; 341 | VAL_ 18 TimeConfirmed 0 "Unconfirmed" 1 "Confirmed" ; 342 | VAL_ 21 AttitudeValid 0 "Invalid" 1 "Valid" ; 343 | VAL_ 22 DistanceValid 0 "Invalid" 1 "Valid" ; 344 | VAL_ 17 FixType 0 "No fix" 1 "Dead reckoning only" 2 "2D-fix" 3 "3D-fix" 4 "GNSS + dead reckoning combined" 5 "Time only fix" ; 345 | VAL_ 20 AltitudeValid 0 "Invalid" 1 "Valid" ; 346 | VAL_ 24 FenceValid 0 "Invalid" 1 "Valid" ; 347 | VAL_ 24 FenceCombined 0 "Unknown" 1 "Inside" 2 "Outside" ; 348 | VAL_ 24 Fence1 0 "Unknown" 1 "Inside" 2 "Outside" ; 349 | VAL_ 24 Fence2 0 "Unknown" 1 "Inside" 2 "Outside" ; 350 | VAL_ 24 Fence3 0 "Unknown" 1 "Inside" 2 "Outside" ; 351 | VAL_ 24 Fence4 0 "Unknown" 1 "Inside" 2 "Outside" ; 352 | VAL_ 23 SpeedValid 0 "Invalid" 1 "Valid" ; 353 | VAL_ 25 ImuValid 0 "Invalid" 1 "Valid" ; 354 | 355 | -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/install.bat: -------------------------------------------------------------------------------- 1 | python -m venv env & env\Scripts\activate & pip install -r requirements.txt -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/mdf2finalized.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/concatenate-mf4-by-period/mdf2finalized.exe -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/requirements.txt: -------------------------------------------------------------------------------- 1 | asammdf==7.2.0 2 | attrs==22.1.0 3 | canedge-browser==0.0.8 4 | canmatrix==0.9.5 5 | click==8.1.3 6 | colorama==0.4.6 7 | fsspec==2022.11.0 8 | future==0.18.2 9 | isal==1.1.0 10 | lxml==4.9.2 11 | lz4==4.0.2 12 | mdf-iter>=2.0.4 13 | numexpr==2.8.4 14 | numpy==1.23.5 15 | pandas==1.5.2 16 | python-dateutil==2.8.2 17 | pytz==2022.7 18 | six==1.16.0 19 | typing-extensions==4.4.0 20 | -------------------------------------------------------------------------------- /examples/other/concatenate-mf4-by-period/run.bat: -------------------------------------------------------------------------------- 1 | env\Scripts\activate & python concatenate_mf4_by_period.py -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG/11111111/00000012/00000001.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG/11111111/00000012/00000001.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000001-60FE3F04.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000001-60FE3F04.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000002-60FE415D.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000002-60FE415D.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000003-60FE43B2.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000003-60FE43B2.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000004-60FEB585.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000004-60FEB585.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000005-60FEB5A8.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000164/00000005-60FEB5A8.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000001-60FEB68B.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000001-60FEB68B.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000002-60FEB8E3.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000002-60FEB8E3.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000003-60FEBB3B.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000003-60FEBB3B.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000004-60FEBD93.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000004-60FEBD93.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000005-60FEBFEB.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000005-60FEBFEB.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000006-60FFE42F.MF4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/matlab-basics/LOG_datastore/3BA199E2/00000165/00000006-60FFE42F.MF4 -------------------------------------------------------------------------------- /examples/other/matlab-basics/README.md: -------------------------------------------------------------------------------- 1 | # Process MF4/MAT CAN bus data from the CANedge via MATLAB 2 | 3 | Here you'll find examples for loading CANedge data in MATLAB in different ways. We recommend that you check out our intro to using [MATLAB with CAN bus data](https://www.csselectronics.com/pages/matlab-mdf-mf4-can-bus) from the CANedge. 4 | 5 | The scripts primarily focus on showcasing how you can load the log files from the CANedge in different ways. 6 | 7 | Further, we also provide a script example letting you load `.mat` files instead of MF4 files, in case you prefer not to use the Vehicle Network Toolbox. To help automate the export of your MF4 to `.mat` we provide a plug & play script example. 8 | 9 | **NOTE:** You can use our new MF4 decoders to create a Parquet data lake. This is now our recommended solution for working with your CANedge data in MATLAB. For details and script examples, see the [MF4 decoder docs](https://canlogger.csselectronics.com/canedge-getting-started/ce3/log-file-tools/mdf4-decoders/). 10 | 11 | --- 12 | 13 | ## File overview 14 | 15 | - The LOG folders contain data for the variuous script examples 16 | - `matlab_basics.m`: Load unfinalized and finalized MF4 log files via the VNT 17 | - `matlab_datastore.m`: Load several finalized MF4 files via datastores 18 | - `matlab_tall.m`: Load DBC decoded MF4 files into tall array (for big data) 19 | - `matlab_mat.m`: Load DBC decoded `.mat` files into datastores and tall arrays 20 | - `mf4_to_mat.py`: DBC decodes MF4 log files and exports to `.mat` with suitable settings 21 | 22 | --- 23 | 24 | ## Installation/requirements 25 | 26 | ### Regarding MATLAB version 27 | The MATLAB scripts are tested for release 2021b. The `matlab_basics.m`, `matlab_datastore.m` and `matlab_tallarray.m` assume you have the [Vehicle Network Toolbox](https://www.csselectronics.com/pages/matlab-mdf-mf4-can-bus) installed. Further, to load CANedge MF4 log files directly in MATLAB, the log files need to have been recorded with Firmware `01.04.01+`. 28 | 29 | ### Using the asammdf GUI to export MAT files 30 | The `matlab_mat.m` example can be used with older versions of MATLAB and without using the Vehicle Network Toolbox. You can use the asammdf GUI to DBC decode and export your CANedge MF4 log files to the `.mat` format with settings as in the below picture. 31 | 32 | asammdf GUI settings for MATLAB export of MF4 33 | 34 | ### Using the asammdf Python API to export MAT files 35 | Alternatively, you can use the asammdf Python API to automate this process via the `mf4_to_mat.py` script. See the general instructions for installing Python and the relevant `requirements.txt` in the `asammdf-basics/`. We generally recommend using the API to enable full automation of your workflow. 36 | 37 | --- 38 | 39 | ## Documentation on using MF4 (MDF) in MATLAB's VNT 40 | 41 | MATLAB provides a number of examples for how you can use MF4 data in MATLAB's Vehicle Network Toolbox in their [MDF overview](https://www.mathworks.com/help/vnt/mdf-files.html). 42 | 43 | For details on working with tall arrays, see also MATLAB's [visualization of tall arrays](https://www.mathworks.com/help/matlab/import_export/tall-data-visualization.html) guide. 44 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/dbc_files/CSS-Electronics-SAE-J1939-DEMO.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 2364540158 EEC1: 8 Vector__XXX 40 | SG_ EngineSpeed : 24|16@1+ (0.125,0) [0|8031.875] "rpm" Vector__XXX 41 | 42 | BO_ 2566844926 CCVS1: 8 Vector__XXX 43 | SG_ WheelBasedVehicleSpeed : 8|16@1+ (0.00390625,0) [0|250.996] "km/h" Vector__XXX 44 | 45 | 46 | CM_ BO_ 2364540158 "Electronic Engine Controller 1"; 47 | CM_ SG_ 2364540158 EngineSpeed "Actual engine speed which is calculated over a minimum crankshaft angle of 720 degrees divided by the number of cylinders.…"; 48 | CM_ BO_ 2566844926 "Cruise Control/Vehicle Speed 1"; 49 | CM_ SG_ 2566844926 WheelBasedVehicleSpeed "Wheel-Based Vehicle Speed: Speed of the vehicle as calculated from wheel or tailshaft speed."; 50 | BA_DEF_ SG_ "SPN" INT 0 524287; 51 | BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","reserved","J1939PG"; 52 | BA_DEF_ "DatabaseVersion" STRING ; 53 | BA_DEF_ "BusType" STRING ; 54 | BA_DEF_ "ProtocolType" STRING ; 55 | BA_DEF_ "DatabaseCompiler" STRING ; 56 | BA_DEF_DEF_ "SPN" 0; 57 | BA_DEF_DEF_ "VFrameFormat" "J1939PG"; 58 | BA_DEF_DEF_ "DatabaseVersion" ""; 59 | BA_DEF_DEF_ "BusType" ""; 60 | BA_DEF_DEF_ "ProtocolType" ""; 61 | BA_DEF_DEF_ "DatabaseCompiler" ""; 62 | BA_ "ProtocolType" "J1939"; 63 | BA_ "BusType" "CAN"; 64 | BA_ "DatabaseCompiler" "CSS ELECTRONICS (WWW.CSSELECTRONICS.COM)"; 65 | BA_ "DatabaseVersion" "1.0.0"; 66 | BA_ "VFrameFormat" BO_ 2364540158 3; 67 | BA_ "SPN" SG_ 2364540158 EngineSpeed 190; 68 | BA_ "SPN" SG_ 2566844926 WheelBasedVehicleSpeed 84; 69 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/dbc_files/canmod-gps.dbc: -------------------------------------------------------------------------------- 1 | VERSION "" 2 | 3 | 4 | NS_ : 5 | NS_DESC_ 6 | CM_ 7 | BA_DEF_ 8 | BA_ 9 | VAL_ 10 | CAT_DEF_ 11 | CAT_ 12 | FILTER 13 | BA_DEF_DEF_ 14 | EV_DATA_ 15 | ENVVAR_DATA_ 16 | SGTYPE_ 17 | SGTYPE_VAL_ 18 | BA_DEF_SGTYPE_ 19 | BA_SGTYPE_ 20 | SIG_TYPE_REF_ 21 | VAL_TABLE_ 22 | SIG_GROUP_ 23 | SIG_VALTYPE_ 24 | SIGTYPE_VALTYPE_ 25 | BO_TX_BU_ 26 | BA_DEF_REL_ 27 | BA_REL_ 28 | BA_DEF_DEF_REL_ 29 | BU_SG_REL_ 30 | BU_EV_REL_ 31 | BU_BO_REL_ 32 | SG_MUL_VAL_ 33 | 34 | BS_: 35 | 36 | BU_: 37 | 38 | 39 | BO_ 3 gnss_pos: 8 Vector__XXX 40 | SG_ PositionAccuracy : 58|6@1+ (1,0) [0|63] "m" Vector__XXX 41 | SG_ Latitude : 1|28@1+ (1e-06,-90) [-90|90] "deg" Vector__XXX 42 | SG_ Longitude : 29|29@1+ (1e-06,-180) [-180|180] "deg" Vector__XXX 43 | SG_ PositionValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 44 | 45 | BO_ 2 gnss_time: 6 Vector__XXX 46 | SG_ TimeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 47 | SG_ TimeConfirmed : 1|1@1+ (1,0) [0|1] "" Vector__XXX 48 | SG_ Epoch : 8|40@1+ (0.001,1577840400) [1577840400|2677352027] "sec" Vector__XXX 49 | 50 | BO_ 5 gnss_attitude: 8 Vector__XXX 51 | SG_ AttitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 52 | SG_ Roll : 1|12@1+ (0.1,-180) [-180|180] "deg" Vector__XXX 53 | SG_ Pitch : 22|12@1+ (0.1,-90) [-90|90] "deg" Vector__XXX 54 | SG_ Heading : 43|12@1+ (0.1,0) [0|360] "deg" Vector__XXX 55 | SG_ RollAccuracy : 13|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 56 | SG_ PitchAccuracy : 34|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 57 | SG_ HeadingAccuracy : 55|9@1+ (0.1,0) [0|50] "deg" Vector__XXX 58 | 59 | BO_ 6 gnss_odo: 8 Vector__XXX 60 | SG_ DistanceTrip : 1|22@1+ (1,0) [0|4194303] "m" Vector__XXX 61 | SG_ DistanceAccuracy : 23|19@1+ (1,0) [0|524287] "m" Vector__XXX 62 | SG_ DistanceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 63 | SG_ DistanceTotal : 42|22@1+ (1,0) [0|4194303] "km" Vector__XXX 64 | 65 | BO_ 1 gnss_status: 1 Vector__XXX 66 | SG_ FixType : 0|3@1+ (1,0) [0|5] "" Vector__XXX 67 | SG_ Satellites : 3|5@1+ (1,0) [0|31] "" Vector__XXX 68 | 69 | BO_ 4 gnss_altitude: 4 Vector__XXX 70 | SG_ AltitudeValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 71 | SG_ Altitude : 1|18@1+ (0.1,-6000) [-6000|20000] "m" Vector__XXX 72 | SG_ AltitudeAccuracy : 19|13@1+ (1,0) [0|8000] "m" Vector__XXX 73 | 74 | BO_ 8 gnss_geofence: 2 Vector__XXX 75 | SG_ FenceValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 76 | SG_ FenceCombined : 1|2@1+ (1,0) [0|1] "" Vector__XXX 77 | SG_ Fence1 : 8|2@1+ (1,0) [0|1] "" Vector__XXX 78 | SG_ Fence2 : 10|2@1+ (1,0) [0|1] "" Vector__XXX 79 | SG_ Fence3 : 12|2@1+ (1,0) [0|1] "" Vector__XXX 80 | SG_ Fence4 : 14|2@1+ (1,0) [0|1] "" Vector__XXX 81 | 82 | BO_ 7 gnss_speed: 5 Vector__XXX 83 | SG_ Speed : 1|20@1+ (0.001,0) [0|1048.575] "m/s" Vector__XXX 84 | SG_ SpeedAccuracy : 21|19@1+ (0.001,0) [0|524.287] "m/s" Vector__XXX 85 | SG_ SpeedValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 86 | 87 | BO_ 9 gnss_imu: 8 Vector__XXX 88 | SG_ AccelerationX : 1|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 89 | SG_ AccelerationY : 11|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 90 | SG_ AccelerationZ : 21|10@1+ (0.125,-64) [-64|63.875] "m/s^2" Vector__XXX 91 | SG_ AngularRateX : 31|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 92 | SG_ AngularRateY : 42|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 93 | SG_ AngularRateZ : 53|11@1+ (0.25,-256) [-256|255.75] "deg/s" Vector__XXX 94 | SG_ ImuValid : 0|1@1+ (1,0) [0|1] "" Vector__XXX 95 | 96 | 97 | 98 | CM_ BO_ 3 "GNSS position"; 99 | CM_ SG_ 3 PositionAccuracy "Accuracy of position"; 100 | CM_ SG_ 3 Latitude "Latitude"; 101 | CM_ SG_ 3 Longitude "Longitude"; 102 | CM_ SG_ 3 PositionValid "Position validity"; 103 | CM_ BO_ 2 "GNSS time"; 104 | CM_ SG_ 2 TimeValid "Time validity"; 105 | CM_ SG_ 2 TimeConfirmed "Time confirmed"; 106 | CM_ SG_ 2 Epoch "Epoch time"; 107 | CM_ BO_ 5 "GNSS attitude"; 108 | CM_ SG_ 5 AttitudeValid "Attitude validity"; 109 | CM_ SG_ 5 Roll "Vehicle roll"; 110 | CM_ SG_ 5 Pitch "Vehicle pitch"; 111 | CM_ SG_ 5 Heading "Vehicle heading"; 112 | CM_ SG_ 5 RollAccuracy "Vehicle roll accuracy"; 113 | CM_ SG_ 5 PitchAccuracy "Vehicle pitch accuracy"; 114 | CM_ SG_ 5 HeadingAccuracy "Vehicle heading accuracy"; 115 | CM_ BO_ 6 "GNSS odometer"; 116 | CM_ SG_ 6 DistanceTrip "Distance traveled since last reset"; 117 | CM_ SG_ 6 DistanceAccuracy "Distance accuracy (1-sigma)"; 118 | CM_ SG_ 6 DistanceTotal "Distance traveled in total"; 119 | CM_ BO_ 1 "GNSS information"; 120 | CM_ SG_ 1 FixType "Fix type"; 121 | CM_ SG_ 1 Satellites "Number of satellites used"; 122 | CM_ BO_ 4 "GNSS altitude"; 123 | CM_ SG_ 4 AltitudeValid "Altitude validity"; 124 | CM_ SG_ 4 Altitude "Altitude"; 125 | CM_ SG_ 4 AltitudeAccuracy "Accuracy of altitude"; 126 | CM_ BO_ 8 "GNSS geofence(s)"; 127 | CM_ SG_ 8 FenceValid "Geofencing status"; 128 | CM_ SG_ 8 FenceCombined "Combined (logical OR) state of all geofences"; 129 | CM_ SG_ 8 Fence1 "Geofence 1 state"; 130 | CM_ SG_ 8 Fence2 "Geofence 2 state"; 131 | CM_ SG_ 8 Fence3 "Geofence 3 state"; 132 | CM_ SG_ 8 Fence4 "Geofence 4 state"; 133 | CM_ BO_ 7 "GNSS speed"; 134 | CM_ SG_ 7 Speed "Speed"; 135 | CM_ SG_ 7 SpeedAccuracy "Speed accuracy"; 136 | CM_ BO_ 9 "GNSS IMU"; 137 | CM_ SG_ 9 AccelerationX "X acceleration with a resolution of 0.125 m/s^2"; 138 | CM_ SG_ 9 AccelerationY "Y acceleration with a resolution of 0.125 m/s^2"; 139 | CM_ SG_ 9 AccelerationZ "Z acceleration with a resolution of 0.125 m/s^2"; 140 | CM_ SG_ 9 AngularRateX "X angular rate with a resolution of 0.25 deg/s"; 141 | CM_ SG_ 9 AngularRateY "Y angular rate with a resolution of 0.25 deg/s"; 142 | CM_ SG_ 9 AngularRateZ "Z angular rate with a resolution of 0.25 deg/s"; 143 | VAL_ 3 PositionValid 0 "Invalid" 1 "Valid" ; 144 | VAL_ 2 TimeValid 0 "Invalid" 1 "Valid" ; 145 | VAL_ 2 TimeConfirmed 0 "Unconfirmed" 1 "Confirmed" ; 146 | VAL_ 5 AttitudeValid 0 "Invalid" 1 "Valid" ; 147 | VAL_ 6 DistanceValid 0 "Invalid" 1 "Valid" ; 148 | VAL_ 1 FixType 0 "No fix" 1 "Dead reckoning only" 2 "2D-fix" 3 "3D-fix" 4 "GNSS + dead reckoning combined" 5 "Time only fix" ; 149 | VAL_ 4 AltitudeValid 0 "Invalid" 1 "Valid" ; 150 | VAL_ 8 FenceValid 0 "Invalid" 1 "Valid" ; 151 | VAL_ 8 FenceCombined 0 "Unknown" 1 "Inside" 2 "Outside" ; 152 | VAL_ 8 Fence1 0 "Unknown" 1 "Inside" 2 "Outside" ; 153 | VAL_ 8 Fence2 0 "Unknown" 1 "Inside" 2 "Outside" ; 154 | VAL_ 8 Fence3 0 "Unknown" 1 "Inside" 2 "Outside" ; 155 | VAL_ 8 Fence4 0 "Unknown" 1 "Inside" 2 "Outside" ; 156 | VAL_ 7 SpeedValid 0 "Invalid" 1 "Valid" ; 157 | VAL_ 9 ImuValid 0 "Invalid" 1 "Valid" ; 158 | 159 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/matlab_basics.m: -------------------------------------------------------------------------------- 1 | clear, clc, close all 2 | 3 | % set index of CAN channel (MATLAB finalization: 8 | mdf2finalized: 1) 4 | can_idx = 8; 5 | 6 | % ------------------------------------------------------------------------ 7 | % finalize & load MF4 "in place" (overwrites original file) 8 | % m = mdf(mdfFinalize("LOG/11111111/00000012/00000001.MF4")); 9 | 10 | % finalize & load MF4 "out of place" (makes a copy of original file) 11 | try 12 | finalizedPath2 = mdfFinalize("LOG/11111111/00000012/00000001.MF4", "LOG/11111111/00000012/00000001_fin.MF4"); 13 | m = mdf(finalizedPath2); 14 | catch ME 15 | disp(ME.message) 16 | end 17 | 18 | % load an MF4 which has already been 'finalized' via MATLAB or mdf2finalized 19 | m = mdf("LOG/11111111/00000012/00000001_fin.MF4"); 20 | 21 | % extract CAN data into timetable 22 | rawTimeTable = read(m,can_idx,m.ChannelNames{can_idx}); 23 | 24 | 25 | 26 | % ------------------------------------------------------------------------ 27 | % decode CAN data using DBC, use absolute date & time and extract a specific message 28 | canDB = canDatabase('dbc_files/canmod-gps.dbc'); 29 | msgTimetableGPS = canFDMessageTimetable(rawTimeTable, canDB); 30 | msgTimetableGPS.Time = msgTimetableGPS.Time + m.InitialTimestamp; 31 | msgSpeed = canSignalTimetable(msgTimetableGPS, "gnss_speed"); 32 | 33 | % decode J1939 data (first convert data to 'Classical' CAN by removing EDL) 34 | rawTimeTable = removevars(rawTimeTable, "CAN_DataFrame_EDL"); 35 | 36 | canDB = canDatabase('dbc_files/CSS-Electronics-SAE-J1939-DEMO.dbc'); 37 | msgTimetableJ1939 = j1939ParameterGroupTimetable(rawTimeTable, canDB); 38 | msgTimetableJ1939.Time = msgTimetableJ1939.Time + m.InitialTimestamp; 39 | msgEEC1 = j1939SignalTimetable(msgTimetableJ1939, "ParameterGroups","EEC1"); 40 | 41 | % plot select decoded signals 42 | ax1 = subplot(2, 1, 1); 43 | plot(msgSpeed.Time, msgSpeed.Speed) 44 | ylabel("Speed (m/s)") 45 | ax2 = subplot(2, 1, 2); 46 | plot(msgEEC1.Time, msgEEC1.EngineSpeed) 47 | ylabel("Engine Speed (rpm)") 48 | linkaxes([ax1,ax2],'x'); 49 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/matlab_datastore.m: -------------------------------------------------------------------------------- 1 | clear, clc, close all 2 | 3 | % load database 4 | canDB = canDatabase("dbc_files/canmod-gps.dbc"); 5 | 6 | % create datastore of finalized MF4 files from local disk 7 | mds = mdfDatastore("LOG_datastore/3BA199E2/00000164", "IncludeSubfolders", true); 8 | 9 | % same principle can be used to load MF4 files from mounted S3 drive 10 | % mds = mdfDatastore("Z:\3BA199E2","IncludeSubfolders", true); 11 | 12 | % preview datastore 13 | preview(mds); 14 | 15 | % ------------------------------------------------------------------------ 16 | % if datastore fits into memory you can simply read all data into timetable 17 | rawTimeTable = readall(mds); 18 | msgTimetable = canFDMessageTimetable(rawTimeTable, canDB); 19 | msgSpeed = canSignalTimetable(msgTimetable, "gnss_speed"); 20 | subplot(2, 1, 1); 21 | plot(msgSpeed.Time, msgSpeed.Speed) 22 | ylabel("Speed (m/s) via readall") 23 | 24 | % ------------------------------------------------------------------------ 25 | % if datastore is larger than memory, use in-memory chunks 26 | mds.ReadSize = seconds(300); 27 | msgSpeed = []; 28 | i = 1; 29 | 30 | while hasdata(mds) 31 | % read a chunk, decode it and extract data to separate table 32 | rawTimeTable = read(mds); 33 | msgTimeTable = canFDMessageTimetable(rawTimeTable, canDB); 34 | msgSpeedChunk = canSignalTimetable(msgTimetable, "gnss_speed"); 35 | msgSpeed = vertcat(msgSpeed,msgSpeedChunk); 36 | fprintf("\nreading chunk %i",i) 37 | i = i + 1; 38 | end 39 | 40 | subplot(2, 1, 2); 41 | plot(msgSpeed.Time, msgSpeed.Speed) 42 | ylabel("Speed (m/s) via chunks") -------------------------------------------------------------------------------- /examples/other/matlab-basics/matlab_mat.m: -------------------------------------------------------------------------------- 1 | clear, clc, close all 2 | 3 | % load decoded MAT 7.3 files from folder into datastore and tall array 4 | ds = fileDatastore("LOG_mat_decoded/3BA199E2/00000164",'ReadFcn', @(x)struct2table(load(x)), 'UniformRead', true, 'IncludeSubfolders', true); 5 | tt = tall(ds); 6 | 7 | % create deferred calculations 8 | meanSpeed = mean(tt.Speed); 9 | 10 | % use gather to force computation of deferred calculations 11 | [meanSpeed] = gather(meanSpeed); 12 | 13 | 14 | plot(tt.timestamps,tt.Speed) 15 | fprintf("\nThe average speed is %s m/s\n",num2str(meanSpeed)) 16 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/matlab_tall.m: -------------------------------------------------------------------------------- 1 | clear, clc, close all 2 | 3 | % create datastore of finalized MF4 files from local disk or mounted S3 drive 4 | mds = mdfDatastore("LOG_mf4_decoded/3BA199E2/00000164", "IncludeSubfolders", true); 5 | 6 | % preview datastore 7 | preview(mds); 8 | 9 | % view datastore CAN message groups and turn relevant group into tall array 10 | mds.ChannelGroups; 11 | 12 | mds.SelectedChannelGroupNumber = 3; 13 | tt1 = tall(mds); 14 | 15 | mds.SelectedChannelGroupNumber = 9; 16 | tt2 = tall(mds); 17 | 18 | % create deferred calculations 19 | meanPositionAccuracy = mean(tt1.PositionAccuracy); 20 | medianPositionAccuracy = median(tt1.PositionAccuracy); 21 | maxTime = max(tt1.Time); 22 | 23 | % use gather to force computation of deferred calculations 24 | [Latitude, Longitude, meanPositionAccuracy, medianPositionAccuracy, maxTime] = gather(tt1.Latitude, tt1.Longitude, meanPositionAccuracy, medianPositionAccuracy, maxTime); 25 | 26 | 27 | fprintf("\nThe mean [median] position accuracy is %s [%s] meters\n", num2str(meanPositionAccuracy), num2str(medianPositionAccuracy)); 28 | 29 | % plot variables and note that how plot() supports tall arrays directly 30 | subplot(4,1,1) 31 | geoplot(Latitude,Longitude,'-') 32 | title("GNSS position"); 33 | 34 | ax1 = subplot(4,1,2); 35 | plot(tt1.Time,tt1.PositionAccuracy) 36 | title("Position accuracy (m)"); 37 | 38 | ax2 = subplot(4,1,3); 39 | plot(tt2.Time,tt2.AccelerationX) 40 | title("Acceleration X (incl. invalid)"); 41 | 42 | ax3 = subplot(4,1,4); 43 | plot(tt2.Time,tt2.AccelerationX ./ logical(tt2.ImuValid)) 44 | title("Acceleration X (excl. invalid)"); 45 | 46 | linkaxes([ax1,ax2,ax3],'x'); 47 | xlim([0 maxTime]); 48 | 49 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/mf4_to_mat.py: -------------------------------------------------------------------------------- 1 | from asammdf import MDF 2 | import glob, sys, os 3 | from pathlib import Path 4 | from datetime import timedelta 5 | 6 | # set variables 7 | suffix_start = True # include session start time in mat file names 8 | raster = 1 # resamples output data to 1 second 9 | mdf_extension = ".MF4" 10 | input_folder = "LOG_datastore" 11 | output_folder_mf4 = "LOG_mf4_decoded" 12 | output_folder_mat = "LOG_mat_decoded" 13 | 14 | # load MDF/DBC files from input folder 15 | path = Path(__file__).parent.absolute() 16 | path_in = Path(path, input_folder) 17 | path_out_mf4 = Path(path, output_folder_mf4) 18 | path_out_mat = Path(path, output_folder_mat) 19 | 20 | dbc_files = {"CAN": [(dbc, 0) for dbc in list(path.rglob("dbc_files/*.dbc"))]} 21 | logfiles = list(path_in.rglob("*" + mdf_extension)) 22 | 23 | print("Log file(s): ", logfiles, "\nDBC(s): ", dbc_files, "\n") 24 | 25 | # export each logfile individually for use in e.g. datastore/tall array 26 | for logfile in logfiles: 27 | # load MF4 log file and get the session start 28 | mdf = MDF(logfile) 29 | session_start = mdf.header.start_time 30 | df_raw_asam = mdf.to_dataframe() 31 | delta_seconds_start = df_raw_asam.index[0] 32 | delta_seconds_stop = df_raw_asam.index[-1] 33 | 34 | mdf_start = (session_start + timedelta(seconds=delta_seconds_start)) 35 | mdf_start_str = mdf_start.strftime(f"%y%m%d-%H%M") 36 | 37 | # optionally use 1st timestamp start in output filename 38 | if suffix_start: 39 | mat_extension = f"-{mdf_start_str}.mat" 40 | else: 41 | mat_extension = ".mat" 42 | 43 | # specify output filenames 44 | filename = logfile.name 45 | filename_mat = str(filename).replace(".MF4", mat_extension) 46 | 47 | # re-use input path hierarchy for output 48 | rel_path = str(logfile).split(input_folder)[-1][1:].replace(filename, "") 49 | 50 | # set output paths using output hierarchy and filenames 51 | output_path_mf4 = Path(path_out_mf4, rel_path, filename) 52 | output_path_mat = Path(path_out_mat, rel_path, filename_mat) 53 | 54 | # dbc decode data 55 | mdf_scaled = mdf.extract_bus_logging(dbc_files) 56 | 57 | # EXPORT TO DBC DECODED MF4 58 | mdf_scaled.save(output_path_mf4, overwrite=True) 59 | 60 | # EXPORT TO DBC DECODED MAT 61 | Path(output_path_mat).parent.mkdir(parents=True, exist_ok=True) 62 | 63 | mdf_scaled.export( 64 | "mat", 65 | filename=output_path_mat, 66 | time_from_zero=False, 67 | single_time_base=True, 68 | raster=raster, 69 | use_display_names=True, 70 | oned_as="column", 71 | keep_arrays=True, 72 | ) 73 | 74 | print(f"Saving MAT file to {output_path_mat}") 75 | -------------------------------------------------------------------------------- /examples/other/matlab-basics/requirements.txt: -------------------------------------------------------------------------------- 1 | asammdf==7.2.0 2 | attrs==22.2.0 3 | canmatrix==0.9.5 4 | click==8.1.3 5 | colorama==0.4.6 6 | future==0.18.2 7 | isal==1.1.0 8 | lxml==4.9.2 9 | lz4==4.0.2 10 | numexpr==2.8.4 11 | numpy==1.24.0 12 | pandas==1.5.2 13 | python-dateutil==2.8.2 14 | pytz==2022.7 15 | scipy==1.9.3 16 | six==1.16.0 17 | typing-extensions==4.4.0 18 | -------------------------------------------------------------------------------- /examples/other/misc/create_log_files.py: -------------------------------------------------------------------------------- 1 | import os 2 | import hashlib 3 | 4 | 5 | def sha256_file(path): 6 | digest = hashlib.sha256() 7 | with open(path, "rb") as f: 8 | while True: 9 | buf = f.read(65536) 10 | if len(buf) == 0: 11 | break 12 | digest.update(buf) 13 | return digest.hexdigest().upper() 14 | 15 | 16 | device_id = "3851A144" #"1973B1D6" 17 | file_type = "MF4" 18 | sessions = 30 19 | splits = 50 20 | session_offset = 20000 21 | size_bytes = 2 * 1024 * 1024 22 | 23 | dir_path = os.path.dirname(os.path.abspath(__file__)) 24 | 25 | for session_no in range(session_offset, sessions + session_offset): 26 | for split_no in range(1, splits + 1): 27 | tmp_file_name = "{}.mf4".format(split_no) 28 | tmp_file_path = os.path.join(dir_path, tmp_file_name) 29 | 30 | with open(tmp_file_path, "w+") as f: 31 | f.seek(size_bytes - 1) 32 | f.write("\0") 33 | 34 | # Calculate digest of file 35 | digest = sha256_file(tmp_file_path) 36 | 37 | folder = f'{session_no}'.zfill(8) 38 | # Check whether the specified path exists or not 39 | isExist = os.path.exists(folder) 40 | 41 | if not isExist: 42 | # Create a new directory because it does not exist 43 | os.makedirs(folder) 44 | 45 | # Create file name 46 | new_file_name = folder + "\/" + f"{split_no}".zfill(8) + ".MF4" 47 | # new_file_name = "{}_{:08}_{:08}-{}.mf4".format(device_id, session_no, split_no, digest) 48 | new_file_path = os.path.join(dir_path, new_file_name) 49 | 50 | os.rename(tmp_file_path, new_file_path) 51 | -------------------------------------------------------------------------------- /examples/other/misc/mdf2csv.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/misc/mdf2csv.exe -------------------------------------------------------------------------------- /examples/other/misc/mdf_converter.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: This basic example shows how you can use the simple MDF4 converters in scripts 3 | """ 4 | import subprocess 5 | from pathlib import Path 6 | 7 | # get path of input/output folders relative to script location 8 | path = Path(__file__).parent.absolute() 9 | 10 | converter = str(Path(path, "mdf2csv.exe")) 11 | path_in = str(Path(path, "input")) 12 | path_out = str(Path(path, "output")) 13 | 14 | # run converter 15 | subprocess.run([converter, "-i", path_in, "-O", path_out]) 16 | -------------------------------------------------------------------------------- /examples/other/misc/send_mail.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: helper function to send basic email incl. optional image attachment (modify with your own SMTP server & details) 3 | Test: Create a new gmail and enable less secure apps: https://myaccount.google.com/lesssecureapps 4 | """ 5 | import os 6 | import smtplib, ssl 7 | from email.mime.text import MIMEText 8 | from email.mime.image import MIMEImage 9 | from email.mime.multipart import MIMEMultipart 10 | 11 | 12 | def send_mail(sender, receiver, subject, content, password, smtp_server, image_path=""): 13 | msg = MIMEMultipart() 14 | msg["Subject"] = subject 15 | msg["From"] = sender 16 | msg["To"] = receiver 17 | 18 | text = MIMEText(content) 19 | msg.attach(text) 20 | 21 | if image_path != "": 22 | img_data = open(image_path, "rb").read() 23 | image = MIMEImage(img_data, name=os.path.basename(image_path)) 24 | msg.attach(image) 25 | 26 | context = ssl.create_default_context() 27 | s = smtplib.SMTP_SSL(smtp_server, port, context=context) 28 | s.login(sender, password) 29 | s.sendmail(sender, receiver, msg.as_string()) 30 | s.quit() 31 | 32 | 33 | # test usage 34 | sender = "xyz@gmail.com" 35 | receiver = "xyz@hotmail.com" 36 | password = "xyz" 37 | smtp_server = "smtp.gmail.com" 38 | port = 465 39 | image_path = "signal_EngineSpeed.png" 40 | subject = "[Subject line]" 41 | content = "[Mail content text]" 42 | 43 | send_mail(sender, receiver, subject, content, password, smtp_server, image_path) 44 | -------------------------------------------------------------------------------- /examples/other/misc/upload_sd_to_s3.py: -------------------------------------------------------------------------------- 1 | # This script can be used for 'manually' uploading CANedge log files from an SD to S3. 2 | # The script includes S3 meta data such as firmware and SD timestamp and correctly derives S3 key. 3 | 4 | import mdf_iter 5 | import canedge_browser 6 | from pathlib import Path 7 | import boto3 8 | from botocore.client import Config 9 | from s3transfer import TransferConfig, S3Transfer 10 | 11 | 12 | # specify devices to process from local disk 13 | devices = ["LOG/958D2219"] 14 | session_offset = 0 # optionally offset the session counter for the uploaded files 15 | 16 | # specify target S3 bucket details 17 | key = "s3_key" 18 | secret = "s3_secret" 19 | endpoint = "s3_endpoint" # e.g. https://s3.eu-central-1.amazonaws.com 20 | bucket = "s3_bucket" 21 | 22 | 23 | # ---------------------------------- 24 | # load all log files from local folder 25 | base_path = Path(__file__).parent 26 | fs = canedge_browser.LocalFileSystem(base_path=base_path) 27 | log_files = canedge_browser.get_log_files(fs, devices) 28 | print(f"Found a total of {len(log_files)} log files") 29 | 30 | s3 = boto3.client( 31 | "s3", endpoint_url=endpoint, aws_access_key_id=key, aws_secret_access_key=secret, config=Config(signature_version="s3v4"), 32 | ) 33 | 34 | transfer = S3Transfer(s3, TransferConfig(multipart_threshold=9999999999999999, max_concurrency=10, num_download_attempts=10,)) 35 | 36 | # for each log file, extract header information, create S3 key and upload 37 | for log_file in log_files: 38 | 39 | with fs.open(log_file, "rb") as handle: 40 | mdf_file = mdf_iter.MdfFile(handle) 41 | header = "HDcomment.Device Information" 42 | 43 | device_id = mdf_file.get_metadata()[f"{header}.serial number"]["value_raw"] 44 | session = mdf_file.get_metadata()[f"HDcomment.File Information.session"]["value_raw"] 45 | session = f"{(int(session) + session_offset):08}" 46 | split = int(mdf_file.get_metadata()[f"HDcomment.File Information.split"]["value_raw"]) 47 | split = f"{split:08}" 48 | ext = log_file.split(".")[-1] 49 | 50 | s3_meta_fw = mdf_file.get_metadata()[f"{header}.firmware version"]["value_raw"] 51 | s3_meta_timestamp = mdf_file.get_data_frame().index.min().strftime("%Y%m%dT%H%M%S") 52 | 53 | s3_key = f"{device_id}/{session}/{split}.{ext}" 54 | s3_meta = {"Metadata": {"Fw": s3_meta_fw, "Timestamp": s3_meta_timestamp}} 55 | 56 | # upload local file to S3 57 | transfer.upload_file(log_file[1:], key=s3_key, bucket=bucket, extra_args=s3_meta) 58 | print(f"Uploaded {log_file} as {s3_key}") 59 | -------------------------------------------------------------------------------- /examples/other/s3-basics/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.17.88 2 | botocore==1.20.88 3 | jmespath==0.10.0 4 | python-dateutil==2.8.1 5 | s3transfer==0.4.2 6 | six==1.16.0 7 | urllib3==1.26.5 8 | -------------------------------------------------------------------------------- /examples/other/s3-basics/s3_basics.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: Showcase basic S3 server operations (download, upload, delete, ...). 3 | Note that the S3 API (called boto3 in Python) can be used across S3 servers, incl. AWS and MinIO. 4 | Note: If you need to use MinIO with a self-signed certificate (TLS), we suggest using the MinIO Python SDK. 5 | Test: Tested on MinIO S3 and AWS S3 - you can test with your own server 6 | """ 7 | 8 | import boto3, re 9 | from botocore.client import Config 10 | from datetime import datetime 11 | from s3_get_keys import get_keys 12 | 13 | # initialize S3 resource 14 | endpoint = "http://127.0.0.1:9000" # e.g. "https://s3.amazonaws.com" for us-east-1 AWS S3 server 15 | access_key = "CANedgeTestServerAccessKey" 16 | secret_key = "MySecretPassword" 17 | bucket_name = "ce2-source" 18 | region_name = "us-east-1" 19 | 20 | s3 = boto3.resource( 21 | "s3", 22 | endpoint_url=endpoint, 23 | aws_access_key_id=access_key, 24 | aws_secret_access_key=secret_key, 25 | config=Config(signature_version="s3v4"), 26 | region_name=region_name, 27 | ) 28 | bucket = s3.Bucket(bucket_name) 29 | 30 | 31 | # create a selective list of S3 object keys using get_keys (see s3_get_keys.py) 32 | keys = [] 33 | for key in get_keys( 34 | s3=s3, 35 | bucket_name=bucket_name, 36 | prefix="", 37 | suffix=".mf4", 38 | date_start=datetime(2020, 1, 1, 19, 54, 0), 39 | date_end=datetime(2020, 10, 10, 19, 56, 0), 40 | ): 41 | keys.append(key) 42 | 43 | print("\nObject keys: ", keys) 44 | 45 | 46 | # list all device serial numbers in a bucket 47 | devices = [] 48 | result = bucket.meta.client.list_objects_v2(Bucket=bucket_name, Delimiter="/") 49 | for obj in result.get("CommonPrefixes"): 50 | if re.compile("^[0-9a-fA-F]{8}/").match(obj["Prefix"]): 51 | devices.append(obj["Prefix"].split("/")[0]) 52 | 53 | print(f"\nDevices in bucket {bucket_name}: ", devices) 54 | 55 | 56 | # download object from S3 (specify a device connected to your S3 server) 57 | device = "31CB1F25" 58 | s3_key = device + "/device.json" 59 | local_path = s3_key.replace("/", "_") 60 | 61 | try: 62 | bucket.download_file(s3_key, local_path) 63 | print(f"Downloaded S3 object {s3_key} to local path {local_path}") 64 | except: 65 | print(f"Warning: Unable to download {s3_key}") 66 | 67 | # get device S3 meta data object 68 | try: 69 | meta = s3.meta.client.head_object(Bucket=bucket_name, Key=s3_key,)[ 70 | "ResponseMetadata" 71 | ]["HTTPHeaders"] 72 | print(f"S3 meta data of object {s3_key}:", meta) 73 | except: 74 | print(f"Warning: Unable to get meta data of {s3_key}") 75 | 76 | 77 | # upload file to S3 78 | try: 79 | s3_key_upload = s3_key.replace(".json", "-upload.json") 80 | bucket.upload_file(local_path, Key=s3_key_upload) 81 | print(f"Uploaded {local_path} as {s3_key_upload}") 82 | except: 83 | print(f"Warning: Unable to upload {local_path} as {s3_key_upload}") 84 | 85 | # delete object from S3 86 | try: 87 | bucket.Object(s3_key_upload).delete() 88 | print(f"Deleted {s3_key_upload} from S3") 89 | except: 90 | print(f"Warning: Unable to delete {s3_key_upload} from S3") 91 | -------------------------------------------------------------------------------- /examples/other/s3-basics/s3_get_keys.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: This function can be used to extract specific S3 keys, utilzing the CANedge S3 meta data timestamp as 3 | optional segmentation. Note that this timestamp reflects the time a file was created on the device SD card - 4 | not the time it was uploaded. You can ignore the timestamp by leaving the start/end blank. 5 | You can fetch data from a specific device by adding a prefix - or specific file types by adding a suffix. 6 | """ 7 | from datetime import datetime 8 | 9 | 10 | def get_keys( 11 | s3, bucket_name, prefix="", suffix="", date_start=datetime(1900, 1, 1, 0, 0, 0), date_end=datetime(2100, 1, 1, 0, 0, 0), 12 | ): 13 | kwargs = {"Bucket": bucket_name, "Prefix": prefix} 14 | while True: 15 | resp = s3.meta.client.list_objects_v2(**kwargs) 16 | for obj in resp["Contents"]: 17 | key = obj["Key"] 18 | meta = s3.meta.client.head_object(Bucket=bucket_name, Key=key) 19 | date_time = datetime(1900, 1, 1, 0, 0, 0) 20 | if key.endswith(suffix) and datetime(1900, 1, 1, 0, 0, 0) < date_start and date_end < datetime(2100, 1, 1, 0, 0, 0): 21 | try: 22 | date_time = datetime.strptime( 23 | str(meta["ResponseMetadata"]["HTTPHeaders"]["x-amz-meta-timestamp"]), "%Y%m%dT%H%M%S", 24 | ) 25 | except: 26 | print("Object " + key + " was excluded (no valid meta timestamp)") 27 | date_time = datetime(1800, 1, 1, 0, 0, 0) 28 | if key.endswith(suffix) and date_start <= date_time and date_time <= date_end: 29 | yield key 30 | try: 31 | kwargs["ContinuationToken"] = resp["NextContinuationToken"] 32 | except KeyError: 33 | break 34 | -------------------------------------------------------------------------------- /examples/other/s3-events/README.md: -------------------------------------------------------------------------------- 1 | # S3 events 2 | Often it can be useful to process log files immediately when they're uploaded - e.g. for predictive maintenance or workflow. 3 | 4 | Below, we describe one (of many) ways this can be setup on AWS S3 and MinIO S3, respectively. The examples take can be used to automatically run an MDF4 converter on each uploaded log file, transferring the output into a new bucket. 5 | 6 | ## AWS Lambda 7 | AWS Lambda lets you run code without provisioning or managing servers - learn more [here](https://docs.aws.amazon.com/lambda/latest/dg/welcome.html). 8 | 9 | To test this, you can try the `aws_lambda_mdf_convert.py` code: 10 | 1. Create an [IAM execution role](https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html#with-s3-create-execution-role) incl. permissions: `AWSLambdaBasicExecutionRole` + `AmazonS3FullAccess` 11 | 1. Create a target bucket for the converted files 12 | 1. In Services/Lambda add a new function incl. a name, Python 3.7 and your execution role 13 | 1. Add S3 as trigger with your source bucket and 'All object create events' as event type 14 | 1. Set the suffix to match your log file extension, e.g. `.MF4`, `.MFC`, `.MFE`, ... 15 | 1. Download the `aws_lambda_mdf_convert.py` to a folder and update the `target_bucket` 16 | 1. Add a Linux MDF4 converter (update the `converter_name`) and the `passwords.json` file 17 | 1. Zip the folder and upload the content via the AWS Lambda dropdown under Code entry type 18 | 1. Change the Handler field to `aws_lambda_mdf_convert.lambda_handler` and hit Save 19 | 1. Under Basic settings, set the timeout to e.g. 2 minutes (test based on your file size) 20 | 1. Test by uploading a log file from the Home tab in CANcloud (monitor the CloudWatch logs) 21 | 22 | Note: If your deployment package requires additional dependencies, you need to include these in the zip. To do this, you can use `pip install [module] --target .` in the folder. 23 | 24 | ## MinIO Client (Listen Bucket Notifications) 25 | The MinIO Client provides a simple interface to listen to bucket events and react. 26 | 27 | To test this, you can try the `minio_listen_mdf_convert.py` code: 28 | 1. Update the code with relevant suffix, converter path and MinIO server details 29 | 1. Run the code with your MinIO server by e.g. adding it to your server startup `*.bat` 30 | -------------------------------------------------------------------------------- /examples/other/s3-events/aws_lambda_mdf_convert.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: This is a basic AWS Lambda handler function for event-based processing of uploaded log files - see README for details 3 | Test: Last tested on April 4, 2020 with MDF4 sample data 4 | """ 5 | from __future__ import print_function 6 | import boto3 7 | import subprocess 8 | import glob 9 | import re 10 | 11 | s3 = boto3.client("s3") 12 | 13 | 14 | def lambda_handler(event, context): 15 | # specify the target bucket for the output and the converter name 16 | target_bucket = "ce2-lambda-target" 17 | converter_name = "mdf2asc" 18 | 19 | # load converter and list relevant support_files 20 | converter = glob.glob(converter_name)[0] 21 | support_files = ["passwords.json"] 22 | 23 | # extract source_bucket and key from event 24 | source_bucket = event["Records"][0]["s3"]["bucket"]["name"] 25 | key = event["Records"][0]["s3"]["object"]["key"] 26 | print(f"Event: {key} uploaded to {source_bucket}") 27 | 28 | # download the object to tmp folder 29 | local_key = "/tmp/" + key.replace("/", "%2F") 30 | print(f"Set local_key: {local_key}") 31 | 32 | s3.download_file(source_bucket, key, local_key) 33 | print(f"Downloaded object as {local_key}") 34 | 35 | # move support files and the MDF4 converter to tmp and process the object 36 | for file in support_files: 37 | subprocess.run(["cp", "./" + file, "/tmp"]) 38 | 39 | subprocess.run(["cp", "./" + converter, "/tmp"]) 40 | subprocess.run(["chmod", "+x", "/tmp/" + converter]) 41 | subprocess.run(["/tmp/" + converter, "-i", local_key]) 42 | 43 | # select the converted objects 44 | objects_all = glob.glob("/tmp/*") 45 | objects_conv = [ 46 | obj 47 | for obj in objects_all 48 | if not re.search(f"(passwords.json$|{converter}$|{local_key}$)", obj) 49 | ] 50 | 51 | print("All objects in tmp/:", objects_all) 52 | print("Converted objects:", objects_conv) 53 | 54 | # upload the converted objects to target destination 55 | for obj in objects_conv: 56 | # (optionally add e.g. analytics based conditioning here) 57 | target_key = obj.replace("/tmp/", "").replace("%2F", "/") 58 | s3.upload_file(obj, target_bucket, target_key) 59 | print(f"Uploaded {obj} to {target_bucket} as {target_key}") 60 | -------------------------------------------------------------------------------- /examples/other/s3-events/mdf2csv.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSS-Electronics/api-examples/d1e2ebe35ba4321deae58105ab0a0c0562c9171f/examples/other/s3-events/mdf2csv.exe -------------------------------------------------------------------------------- /examples/other/s3-events/minio_listen_mdf_convert.py: -------------------------------------------------------------------------------- 1 | """ 2 | About: This is a basic MinIO 'listen bucket notification' function for event-based processing of uploaded log files - see README for details 3 | Note that this example uses the MinIO S3 SDK as listen_bucket_notification is specific to MinIO - but the AWS S3 SDK could have been used for the rest 4 | Test: Last tested on April 4, 2020 with MDF4 sample data 5 | """ 6 | 7 | from minio import Minio 8 | import glob, subprocess, re, tempfile, os 9 | 10 | # variables 11 | prefix = "" # use to optionally specify a specific device 12 | suffix = ".MF4" # set to match the file extension your devices are uploading with (.MF4, .MFE, .MFM) 13 | 14 | endpoint = "127.0.0.1:9000" 15 | access_key = "CANedgeTestServerAccessKey" 16 | secret_key = "MySecretPassword" 17 | source_bucket = "ce2-source" 18 | secure = False 19 | 20 | # if using TLS (HTTPS), set secure = True and set the path below to your public.crt certificate: 21 | # os.environ["SSL_CERT_FILE"] = "C:\\Users\\marti\\.minio\\certs\\public.crt" 22 | 23 | target_bucket = "ce2-target" 24 | converter = "mdf2csv.exe" 25 | 26 | client = Minio(endpoint, access_key=access_key, secret_key=secret_key, secure=secure) 27 | 28 | # listen to events 29 | events = client.listen_bucket_notification( 30 | source_bucket, prefix, suffix, ["s3:ObjectCreated:*"] 31 | ) 32 | 33 | print("Initialized - awaiting events ... [CTRL + C to exit]\n") 34 | 35 | for event in events: 36 | # extract source_bucket and key from event 37 | source_bucket = event["Records"][0]["s3"]["bucket"]["name"] 38 | key = event["Records"][0]["s3"]["object"]["key"].replace("%2F", "/") 39 | print(f"\n\nEvent: {key} uploaded to {source_bucket}") 40 | 41 | # download the object to tmp folder 42 | f = tempfile.TemporaryDirectory() 43 | tmp = f.name + "\\" 44 | local_key = tmp + key.replace("/", "%2F") 45 | print(f"Set local_key: {local_key}") 46 | 47 | client.fget_object(source_bucket, key, local_key) 48 | print(f"Downloaded object as {local_key}") 49 | 50 | # convert the object 51 | subprocess.run([converter, "-i", local_key]) 52 | 53 | # select the converted objects 54 | objects_all = glob.glob(tmp + "*") 55 | objects_conv = [ 56 | obj 57 | for obj in objects_all 58 | if not re.search(f"(passwords.json$|.exe$|{suffix}$)", obj) 59 | ] 60 | print("Objects in temporary directory:\n", objects_all) 61 | print("Successfully converted objects:\n", objects_conv) 62 | 63 | # upload the converted objects to target destination 64 | for obj in objects_conv: 65 | # (optionally add e.g. analytics based conditioning here) 66 | target_key = obj.replace(tmp, "").replace("%2F", "/") 67 | client.fput_object(target_bucket, target_key, obj) 68 | print(f"Uploaded {obj} to {target_bucket} as {target_key}") 69 | 70 | f.cleanup() 71 | -------------------------------------------------------------------------------- /examples/other/s3-events/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.14.41 2 | botocore==1.17.41 3 | certifi==2020.6.20 4 | configparser==5.0.0 5 | docutils==0.15.2 6 | jmespath==0.10.0 7 | minio==6.0.0 8 | python-dateutil==2.8.1 9 | pytz==2020.1 10 | s3transfer==0.3.3 11 | six==1.15.0 12 | urllib3==1.26.5 13 | --------------------------------------------------------------------------------