├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md ├── main.py └── sample_data ├── sample data.txt └── sample with 10 records.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | #.idea/ 153 | data/data.json 154 | data/default_IMEI/default_IMEI_data.json 155 | data/default_IMEI/default_IMEI_RAWdata.json 156 | data/file_Write_Test/file_Write_Test_data.json 157 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Justas Belevičius 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg?style=flat-square) 2 | ![Python](https://img.shields.io/badge/Python-3.11.2-blue) 3 | ![Gluten Status](https://img.shields.io/badge/Gluten-Free-green.svg) 4 | ![Eco Status](https://img.shields.io/badge/ECO-Friendly-green.svg) 5 | 6 | # Teltonika Codec 8 / Codec 8 Extended TCP Server and Parser 7 | 8 | Personal Hobby project with Python. 9 | 10 | Simple TCP server which listens for Teltonika trackers communicating via Codec 8 Extended using TCP 11 | 12 | Server waits for device to send IMEI then responds with "01", device then must send Codec 8 Extended packet, server responds with records number. 13 | 14 | Codec 8 documentation can found via URL bellow: 15 | https://wiki.teltonika-gps.com/view/Codec 16 | 17 | ~~Server - Device communication must work but not yet tested~~ - works 18 | 19 | AVL IDs are parsed, ~~AVL Data - not yet parsed~~ - partially parsed 20 | 21 | AVL IDs and Raw values ar saved in ./data/data.json 22 | 23 | ## Project Features: 24 | 25 | - TCP communaction - done! 26 | - Codec 8 Extended structure parsing - done! 27 | - Codec 8 structure parsing - done! 28 | - Saving AVL IDs with RAW/Parsed values to JSON - done! 29 | - TCP Server and Parser functionality - done! 30 | - CRC16 checking to detect corupted packets - done! 31 | - RAW DATA saving to separate .JSON - done! 32 | - Seprate .JSON files for different IMEIs - done! 33 | 34 | ## Planed Features - not done yet: 35 | 36 | - Codec 8 (not extended support) - ~~not done yet~~ - done! 37 | - AVL IDs value parsing - ~~not done yet~~ - progress ongoing 38 | - Make server multithreaded (code rewrite may be required) - not done yet 39 | - Seprate .JSON files for different IMEIs - ~~not done yet~~ - done! 40 | - More... - not done yet 41 | 42 | ### How to test?: 43 | 44 | - Install latest Python version from: https://www.python.org/ 45 | - Download "main.py" ~~and Data folder: ./data/data.json~~ - now created automatically 46 | - Open "main.py" with any text editor, change 'port' to YOUR open Port 47 | - Run "main.py" via terminal 48 | - Follow on screen instructions 49 | 50 | ## Auhors: 51 | [Justas](https://github.com/Justas1988)
52 | [LinkedIn](https://www.linkedin.com/in/justas-belevi%C4%8Dius-4a5485219/) 53 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import json 3 | import os 4 | import datetime 5 | import struct 6 | import decimal 7 | 8 | HOST = socket.gethostbyname(socket.gethostname()) #function may not work in Linux systems, change to string with IP adress example: "192.168.0.1" 9 | PORT = 7494 #change this to your port 10 | 11 | def input_trigger(): #triggers user input 12 | 13 | print("Paste full 'Codec 8' packet to parse it or:") 14 | print("Type SERVER to start the server or:") 15 | print("Type EXIT to stop the program") 16 | device_imei = "default_IMEI" 17 | user_input = input("waiting for input: ") 18 | if user_input.upper() == "EXIT": 19 | print(f"exiting program............") 20 | exit() 21 | 22 | elif user_input.upper() == "SERVER": 23 | start_server_trigger() 24 | else: 25 | try: 26 | if codec_8e_checker(user_input.replace(" ","")) == False: 27 | print("Wrong input or invalid Codec8 packet") 28 | print() 29 | input_trigger() 30 | else: 31 | codec_parser_trigger(user_input, device_imei, "USER") 32 | except Exception as e: 33 | print(f"error occured: {e} enter proper Codec8 packet or EXIT!!!") 34 | input_trigger() 35 | 36 | #################################################### 37 | ###############__CRC16/ARC Checker__################ 38 | #################################################### 39 | 40 | def crc16_arc(data): 41 | data_part_length_crc = int(data[8:16], 16) 42 | data_part_for_crc = bytes.fromhex(data[16:16+2*data_part_length_crc]) 43 | crc16_arc_from_record = data[16+len(data_part_for_crc.hex()):24+len(data_part_for_crc.hex())] 44 | 45 | crc = 0 46 | 47 | for byte in data_part_for_crc: 48 | crc ^= byte 49 | for _ in range(8): 50 | if crc & 1: 51 | crc = (crc >> 1) ^ 0xA001 52 | else: 53 | crc >>= 1 54 | 55 | if crc16_arc_from_record.upper() == crc.to_bytes(4, byteorder='big').hex().upper(): 56 | print ("CRC check passed!") 57 | print (f"Record length: {len(data)} characters // {int(len(data)/2)} bytes") 58 | return True 59 | else: 60 | print("CRC check Failed!") 61 | return False 62 | 63 | #################################################### 64 | 65 | def codec_8e_checker(codec8_packet): 66 | if str(codec8_packet[16:16+2]).upper() != "8E" and str(codec8_packet[16:16+2]).upper() != "08": 67 | print() 68 | print(f"Invalid packet!!!!!!!!!!!!!!!!!!!") 69 | return False 70 | else: 71 | return crc16_arc(codec8_packet) 72 | 73 | def codec_parser_trigger(codec8_packet, device_imei, props): 74 | try: 75 | return codec_8e_parser(codec8_packet.replace(" ",""), device_imei, props) 76 | 77 | except Exception as e: 78 | print(f"Error occured: {e} enter proper Codec8 packet or EXIT!!!") 79 | input_trigger() 80 | 81 | def imei_checker(hex_imei): #IMEI checker function 82 | imei_length = int(hex_imei[:4], 16) 83 | # print(f"IMEI length = {imei_length}") 84 | if imei_length != len(hex_imei[4:]) / 2: 85 | # print(f"Not an IMEI - length is not correct!") 86 | return False 87 | else: 88 | pass 89 | 90 | ascii_imei = ascii_imei_converter(hex_imei) 91 | print(f"IMEI received = {ascii_imei}") 92 | if not ascii_imei.isnumeric() or len(ascii_imei) != 15: 93 | print(f"Not an IMEI - is not numeric or wrong length!") 94 | return False 95 | else: 96 | return True 97 | 98 | def ascii_imei_converter(hex_imei): 99 | return bytes.fromhex(hex_imei[4:]).decode() 100 | 101 | def start_server_trigger(): 102 | print("Starting server!") 103 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 104 | s.bind((HOST, PORT)) 105 | while True: 106 | s.listen() 107 | print(f"// {time_stamper()} // listening port: {PORT} // IP: {HOST}") 108 | conn, addr = s.accept() 109 | conn.settimeout(20) #connection timeout, change this value to close the socket if no DATA is received for X amount of seconds 110 | with conn: 111 | print(f"// {time_stamper()} // Connected by {addr}") 112 | device_imei = "default_IMEI" 113 | while True: 114 | try: 115 | data = conn.recv(1280) 116 | print(f"// {time_stamper()} // data received = {data.hex()}") 117 | if not data: 118 | break 119 | elif imei_checker(data.hex()) != False: 120 | device_imei = ascii_imei_converter(data.hex()) 121 | imei_reply = (1).to_bytes(1, byteorder="big") 122 | conn.sendall(imei_reply) 123 | print(f"-- {time_stamper()} sending reply = {imei_reply}") 124 | elif codec_8e_checker(data.hex().replace(" ","")) != False: 125 | record_number = codec_parser_trigger(data.hex(), device_imei, "SERVER") 126 | print(f"received records {record_number}") 127 | print(f"from device IMEI = {device_imei}") 128 | print() 129 | record_response = (record_number).to_bytes(4, byteorder="big") 130 | conn.sendall(record_response) 131 | print(f"// {time_stamper()} // response sent = {record_response.hex()}") 132 | else: 133 | print(f"// {time_stamper()} // no expected DATA received - dropping connection") 134 | break 135 | except socket.timeout: 136 | print(f"// {time_stamper()} // Socket timed out. Closing connection with {addr}") 137 | break 138 | 139 | #################################################### 140 | ###############_Codec8E_parser_code_################ 141 | #################################################### 142 | 143 | def codec_8e_parser(codec_8E_packet, device_imei, props): #think a lot before modifying this function 144 | print() 145 | # print (str("codec 8 string entered - " + codec_8E_packet)) 146 | 147 | io_dict_raw = {} 148 | # timestamp = codec_8E_packet[20:36] 149 | io_dict_raw["device_IMEI"] = device_imei 150 | io_dict_raw["server_time"] = time_stamper_for_json() 151 | # io_dict_raw["_timestamp_"] = device_time_stamper(timestamp) 152 | # io_dict_raw["_rec_delay_"] = record_delay_counter(timestamp) 153 | io_dict_raw["data_length"] = "Record length: " + str(int(len(codec_8E_packet))) + " characters" + " // " + str(int(len(codec_8E_packet) // 2)) + " bytes" 154 | io_dict_raw["_raw_data__"] = codec_8E_packet 155 | 156 | try: #writing raw DATA dictionary to ./data/data.json 157 | json_printer_rawDATA(io_dict_raw, device_imei) 158 | except Exception as e: 159 | print(f"JSON raw data writing error occured = {e}") 160 | 161 | zero_bytes = codec_8E_packet[:8] 162 | print() 163 | print (str("zero bytes = " + zero_bytes)) 164 | 165 | data_field_length = int(codec_8E_packet[8:8+8], 16) 166 | print (f"data field lenght = {data_field_length} bytes") 167 | codec_type = str(codec_8E_packet[16:16+2]) 168 | print (f"codec type = {codec_type}") 169 | 170 | data_step = 4 171 | if codec_type == "08": 172 | data_step = 2 173 | else: 174 | pass 175 | 176 | number_of_records = int(codec_8E_packet[18:18+2], 16) 177 | print (f"number of records = {number_of_records}") 178 | 179 | record_number = 1 180 | avl_data_start = codec_8E_packet[20:] 181 | data_field_position = 0 182 | while data_field_position < (2*data_field_length-6): 183 | io_dict = {} 184 | io_dict["device_IMEI"] = device_imei 185 | io_dict["server_time"] = time_stamper_for_json() 186 | print() 187 | print (f"data from record {record_number}") 188 | print (f"########################################") 189 | 190 | timestamp = avl_data_start[data_field_position:data_field_position+16] 191 | io_dict["_timestamp_"] = device_time_stamper(timestamp) 192 | print (f"timestamp = {device_time_stamper(timestamp)}") 193 | io_dict["_rec_delay_"] = record_delay_counter(timestamp) 194 | data_field_position += len(timestamp) 195 | 196 | priority = avl_data_start[data_field_position:data_field_position+2] 197 | io_dict["priority"] = int(priority, 16) 198 | print (f"record priority = {int(priority, 16)}") 199 | data_field_position += len(priority) 200 | 201 | longitude = avl_data_start[data_field_position:data_field_position+8] 202 | # io_dict["longitude"] = struct.unpack('>i', bytes.fromhex(longitude))[0] 203 | # print (f"longitude = {struct.unpack('>i', bytes.fromhex(longitude))[0]}") 204 | io_dict["longitude"] = coordinate_formater(longitude) 205 | print (f"longitude = {coordinate_formater(longitude)}") 206 | data_field_position += len(longitude) 207 | 208 | latitude = avl_data_start[data_field_position:data_field_position+8] 209 | # print (f"latitude = {struct.unpack('>i', bytes.fromhex(latitude))[0]}") 210 | # io_dict["latitude"] = struct.unpack('>i', bytes.fromhex(latitude))[0] 211 | io_dict["latitude"] = coordinate_formater(latitude) 212 | print (f"latitude = {coordinate_formater(latitude)}") 213 | data_field_position += len(latitude) 214 | 215 | altitude = avl_data_start[data_field_position:data_field_position+4] 216 | print(f"altitude = {int(altitude, 16)}") 217 | io_dict["altitude"] = int(altitude, 16) 218 | data_field_position += len(altitude) 219 | 220 | angle = avl_data_start[data_field_position:data_field_position+4] 221 | print(f"angle = {int(angle, 16)}") 222 | io_dict["angle"] = int(angle, 16) 223 | data_field_position += len(angle) 224 | 225 | satelites = avl_data_start[data_field_position:data_field_position+2] 226 | print(f"satelites = {int(satelites, 16)}") 227 | io_dict["satelites"] = int(satelites, 16) 228 | data_field_position += len(satelites) 229 | 230 | speed = avl_data_start[data_field_position:data_field_position+4] 231 | io_dict["speed"] = int(speed, 16) 232 | print(f"speed = {int(speed, 16)}") 233 | data_field_position += len(speed) 234 | 235 | event_io_id = avl_data_start[data_field_position:data_field_position+data_step] 236 | io_dict["eventID"] = int(event_io_id, 16) 237 | print(f"event ID = {int(event_io_id, 16)}") 238 | data_field_position += len(event_io_id) 239 | 240 | total_io_elements = avl_data_start[data_field_position:data_field_position+data_step] 241 | total_io_elements_parsed = int(total_io_elements, 16) 242 | print(f"total I/O elements in record {record_number} = {total_io_elements_parsed}") 243 | data_field_position += len(total_io_elements) 244 | 245 | byte1_io_number = avl_data_start[data_field_position:data_field_position+data_step] 246 | byte1_io_number_parsed = int(byte1_io_number, 16) 247 | print(f"1 byte io count = {byte1_io_number_parsed}") 248 | data_field_position += len(byte1_io_number) 249 | 250 | if byte1_io_number_parsed > 0: 251 | i = 1 252 | while i <= byte1_io_number_parsed: 253 | key = avl_data_start[data_field_position:data_field_position+data_step] 254 | data_field_position += len(key) 255 | value = avl_data_start[data_field_position:data_field_position+2] 256 | 257 | io_dict[int(key, 16)] = sorting_hat(int(key, 16), value) 258 | data_field_position += len(value) 259 | print (f"avl_ID: {int(key, 16)} : {io_dict[int(key, 16)]}") 260 | i += 1 261 | else: 262 | pass 263 | 264 | byte2_io_number = avl_data_start[data_field_position:data_field_position+data_step] 265 | byte2_io_number_parsed = int(byte2_io_number, 16) 266 | print(f"2 byte io count = {byte2_io_number_parsed}") 267 | data_field_position += len(byte2_io_number) 268 | 269 | if byte2_io_number_parsed > 0: 270 | i = 1 271 | while i <= byte2_io_number_parsed: 272 | key = avl_data_start[data_field_position:data_field_position+data_step] 273 | data_field_position += len(key) 274 | 275 | value = avl_data_start[data_field_position:data_field_position+4] 276 | io_dict[int(key, 16)] = sorting_hat(int(key, 16), value) 277 | data_field_position += len(value) 278 | print (f"avl_ID: {int(key, 16)} : {io_dict[int(key, 16)]}") 279 | i += 1 280 | else: 281 | pass 282 | 283 | byte4_io_number = avl_data_start[data_field_position:data_field_position+data_step] 284 | byte4_io_number_parsed = int(byte4_io_number, 16) 285 | print(f"4 byte io count = {byte4_io_number_parsed}") 286 | data_field_position += len(byte4_io_number) 287 | 288 | if byte4_io_number_parsed > 0: 289 | i = 1 290 | while i <= byte4_io_number_parsed: 291 | key = avl_data_start[data_field_position:data_field_position+data_step] 292 | data_field_position += len(key) 293 | 294 | value = avl_data_start[data_field_position:data_field_position+8] 295 | io_dict[int(key, 16)] = sorting_hat(int(key, 16), value) 296 | data_field_position += len(value) 297 | print(f"avl_ID: {int(key, 16)} : {io_dict[int(key, 16)]}") 298 | i += 1 299 | else: 300 | pass 301 | 302 | byte8_io_number = avl_data_start[data_field_position:data_field_position+data_step] 303 | byte8_io_number_parsed = int(byte8_io_number, 16) 304 | print(f"8 byte io count = {byte8_io_number_parsed}") 305 | data_field_position += len(byte8_io_number) 306 | 307 | if byte8_io_number_parsed > 0: 308 | i = 1 309 | while i <= byte8_io_number_parsed: 310 | key = avl_data_start[data_field_position:data_field_position+data_step] 311 | data_field_position += len(key) 312 | 313 | value = avl_data_start[data_field_position:data_field_position+16] 314 | io_dict[int(key, 16)] = sorting_hat(int(key, 16), value) 315 | data_field_position += len(value) 316 | print(f"avl_ID: {int(key, 16)} : {io_dict[int(key, 16)]}") 317 | i += 1 318 | else: 319 | pass 320 | 321 | if codec_type.upper() == "8E": 322 | 323 | byteX_io_number = avl_data_start[data_field_position:data_field_position+4] 324 | byteX_io_number_parsed = int(byteX_io_number, 16) 325 | print(f"X byte io count = {byteX_io_number_parsed}") 326 | data_field_position += len(byteX_io_number) 327 | 328 | if byteX_io_number_parsed > 0: 329 | i = 1 330 | while i <= byteX_io_number_parsed: 331 | key = avl_data_start[data_field_position:data_field_position+4] 332 | data_field_position += len(key) 333 | 334 | value_length = avl_data_start[data_field_position:data_field_position+4] 335 | data_field_position += 4 336 | value = avl_data_start[data_field_position:data_field_position+(2*(int(value_length, 16)))] 337 | io_dict[int(key, 16)] = sorting_hat(int(key, 16), value) 338 | data_field_position += len(value) 339 | print(f"avl_ID: {int(key, 16)} : {io_dict[int(key, 16)]}") 340 | # print (f"data field postition = {data_field_position}") 341 | # print (f"data_field_length = {2*data_field_length}") 342 | i += 1 343 | else: 344 | pass 345 | else: 346 | pass 347 | 348 | record_number += 1 349 | 350 | try: #writing dictionary to ./data/data.json 351 | json_printer(io_dict, device_imei) 352 | except Exception as e: 353 | print(f"JSON writing error occured = {e}") 354 | 355 | if props == "SERVER": 356 | 357 | total_records_parsed = int(avl_data_start[data_field_position:data_field_position+2], 16) 358 | print() 359 | print(f"total parsed records = {total_records_parsed}") 360 | print() 361 | return int(number_of_records) 362 | 363 | else: 364 | total_records_parsed = int(avl_data_start[data_field_position:data_field_position+2], 16) 365 | print() 366 | print(f"total parsed records = {total_records_parsed}") 367 | print() 368 | input_trigger() 369 | 370 | #################################################### 371 | ###############_End_of_MAIN_Parser_Code############# 372 | #################################################### 373 | 374 | #################################################### 375 | ###############_Coordinates_Function_############### 376 | #################################################### 377 | 378 | def coordinate_formater(hex_coordinate): # Fixed :), hopefuly this works for you too - https://stackoverflow.com/questions/36506910/convert-integer-to-lat-long-geo-position 379 | coordinate = int(hex_coordinate, 16) 380 | if coordinate & (1 << 31): 381 | new_int = coordinate - 2**32 382 | dec_coordinate = new_int/1e7 383 | else: 384 | dec_coordinate = coordinate / 10000000 385 | return dec_coordinate 386 | 387 | 388 | 389 | #################################################### 390 | ###############____JSON_Functions____############### 391 | #################################################### 392 | 393 | def json_printer(io_dict, device_imei): #function to write JSON file with data 394 | json_data = json.dumps(io_dict, indent=4) 395 | data_path = "./data/" + str(device_imei) 396 | json_file = str(device_imei) + "_data.json" 397 | 398 | if not os.path.exists(data_path): 399 | os.makedirs(data_path) 400 | else: 401 | pass 402 | 403 | if not os.path.exists(os.path.join(data_path, json_file)): 404 | with open(os.path.join(data_path, json_file), "w") as file: 405 | file.write(json_data) 406 | else: 407 | with open(os.path.join(data_path, json_file), "a") as file: 408 | file.write(json_data) 409 | return 410 | 411 | def json_printer_rawDATA(io_dict_raw, device_imei): #function to write JSON file with data 412 | # print (io_dict_raw) 413 | json_data = json.dumps(io_dict_raw, indent=4) 414 | data_path = "./data/" + str(device_imei) 415 | json_file = str(device_imei) + "_RAWdata.json" 416 | 417 | if not os.path.exists(data_path): 418 | os.makedirs(data_path) 419 | else: 420 | pass 421 | 422 | if not os.path.exists(os.path.join(data_path, json_file)): 423 | with open(os.path.join(data_path, json_file), "w") as file: 424 | file.write(json_data) 425 | else: 426 | with open(os.path.join(data_path, json_file), "a") as file: 427 | file.write(json_data) 428 | return 429 | #################################################### 430 | ###############____TIME_FUNCTIONS____############### 431 | #################################################### 432 | 433 | def time_stamper(): 434 | current_server_time = datetime.datetime.now() 435 | server_time_stamp = current_server_time.strftime('%H:%M:%S %d-%m-%Y') 436 | return server_time_stamp 437 | 438 | def time_stamper_for_json(): 439 | current_server_time = datetime.datetime.now() 440 | timestamp_utc = datetime.datetime.utcnow() 441 | server_time_stamp = f"{current_server_time.strftime('%H:%M:%S %d-%m-%Y')} (local) / {timestamp_utc.strftime('%H:%M:%S %d-%m-%Y')} (utc)" 442 | return server_time_stamp 443 | 444 | def device_time_stamper(timestamp): 445 | timestamp_ms = int(timestamp, 16) / 1000 446 | timestamp_utc = datetime.datetime.utcfromtimestamp(timestamp_ms) 447 | utc_offset = datetime.datetime.fromtimestamp(timestamp_ms) - datetime.datetime.utcfromtimestamp(timestamp_ms) 448 | timestamp_local = timestamp_utc + utc_offset 449 | formatted_timestamp_local = timestamp_local.strftime("%H:%M:%S %d-%m-%Y") 450 | formatted_timestamp_utc = timestamp_utc.strftime("%H:%M:%S %d-%m-%Y") 451 | formatted_timestamp = f"{formatted_timestamp_local} (local) / {formatted_timestamp_utc} (utc)" 452 | 453 | return formatted_timestamp 454 | 455 | def record_delay_counter(timestamp): 456 | timestamp_ms = int(timestamp, 16) / 1000 457 | current_server_time = datetime.datetime.now().timestamp() 458 | return f"{int(current_server_time - timestamp_ms)} seconds" 459 | 460 | #################################################### 461 | ###############_PARSE_FUNCTIONS_CODE_############### 462 | #################################################### 463 | 464 | def parse_data_integer(data): 465 | return int(data, 16) 466 | 467 | def int_multiply_01(data): 468 | return float(decimal.Decimal(int(data, 16)) * decimal.Decimal('0.1')) 469 | 470 | def int_multiply_001(data): 471 | return float(decimal.Decimal(int(data, 16)) * decimal.Decimal('0.01')) 472 | 473 | def int_multiply_0001(data): 474 | return float(decimal.Decimal(int(data, 16)) * decimal.Decimal('0.001')) 475 | 476 | def signed_no_multiply(data): #need more testing of this function 477 | try: 478 | binary = bytes.fromhex(data.zfill(8)) 479 | value = struct.unpack(">i", binary)[0] 480 | return value 481 | except Exception as e: 482 | print(f"unexpected value received in function '{data}' error: '{e}' will leave unparsed value!") 483 | return f"0x{data}" 484 | 485 | parse_functions_dictionary = { #this must simply be updated with new AVL IDs and their functions 486 | 487 | 240: parse_data_integer, 488 | 239: parse_data_integer, 489 | 80: parse_data_integer, 490 | 21: parse_data_integer, 491 | 200: parse_data_integer, 492 | 69: parse_data_integer, 493 | 181: int_multiply_01, 494 | 182: int_multiply_01, 495 | 66: int_multiply_0001, 496 | 24: parse_data_integer, 497 | 205: parse_data_integer, 498 | 206: parse_data_integer, 499 | 67: int_multiply_0001, 500 | 68: int_multiply_0001, 501 | 241: parse_data_integer, 502 | 299: parse_data_integer, 503 | 16: parse_data_integer, 504 | 1: parse_data_integer, 505 | 9: parse_data_integer, 506 | 179: parse_data_integer, 507 | 12: int_multiply_0001, 508 | 13: int_multiply_001, 509 | 17: signed_no_multiply, 510 | 18: signed_no_multiply, 511 | 19: signed_no_multiply, 512 | 11: parse_data_integer, 513 | 10: parse_data_integer, 514 | 2: parse_data_integer, 515 | 3: parse_data_integer, 516 | 6: int_multiply_0001, 517 | 180: parse_data_integer 518 | 519 | } 520 | 521 | def sorting_hat(key, value): 522 | if key in parse_functions_dictionary: 523 | parse_function = parse_functions_dictionary[key] 524 | return parse_function(value) 525 | else: 526 | return f"0x{value}" 527 | 528 | #################################################### 529 | 530 | def fileAccessTest(): #check if script can create files and folders 531 | try: 532 | testDict = {} 533 | testDict["_Writing_Test_"] = "Writing_Test" 534 | testDict["Script_Started"] = time_stamper_for_json() 535 | 536 | json_printer(testDict, "file_Write_Test") 537 | 538 | print (f"---### File access test passed! ###---") 539 | input_trigger() 540 | 541 | except Exception as e: 542 | print () 543 | print (f"---### File access error occured ###---") 544 | print (f"'{e}'") 545 | print (f"---### Try running terminal with Administrator rights! ###---") 546 | print (f"---### Nothing will be saved if you decide to continue! ###---") 547 | print () 548 | input_trigger() 549 | 550 | 551 | def main(): 552 | fileAccessTest() 553 | 554 | if __name__ == "__main__": 555 | main() 556 | -------------------------------------------------------------------------------- /sample_data/sample data.txt: -------------------------------------------------------------------------------- 1 | Data received(29/03/2023 09:28:55 size: 984B tcp) 2 | 00000000000003CC8E03000001872BC9BC7000FFB833351EADCC01001F001B13001000000037000F00EF0100F00100500100150500C80000450100010100B300005119005206005900006F0000720000980000A000001100B5000900B60005004238850018001C00CE182E00430FFA00440000001100100012FEE70013001700540000005504DA005A0000006E0000007000000073000000970000000D00C70A021CCD0010F7A39A1A027C00273A5A00530000000000570504169000640000325700660000000000670000F395006900032618006B00000000007B90300000008500000000008600000000000900653030200F92004D000084510000000310000700930000000000000000009400000000000000000095000000000000000000960000000000000000020500000000000000000206000000000000000002070000000000000000000101840000000001872BC9C05800FFB833991EADCE380020000F13001400000037000F00EF0100F00100500100150500C80000450100010100B300005119005206005900006F0000720000980000A000001100B5000900B60005004238850018001C00CE182E00430FFA00440000001100100012FEE70013001700540000005504DA005A0000006E0000007000000073000000970000000D00C70A021CCD0010F7A39A1A027C00273A5A00530000000000570504169000640000325700660000000000670000F395006900032618006B00000000007B90300000008500000000008600000000000900653030200F92004D000084510000000310000700930000000000000000009400000000000000000095000000000000000000960000000000000000020500000000000000000206000000000000000002070000000000000000000101840000000001872BC9C44000FFB833031EADD1580020000113001900000037000F00EF0100F00100500100150500C80000450100010100B300005119005206005900006F0000720000980000A000001100B5000900B60005004238850018001C00CE182E00430FFA00440000001100100012FEE70013001700540000005504DA005A0000006E0000007000000073000000970000000D00C70A021CCD0010F7A39A1A027C00273A5A00530000000000570504169000640000325700660000000000670000F395006900032618006B00000000007B90300000008500000000008600000000000900653030200F92004D000084510000000310000700930000000000000000009400000000000000000095000000000000000000960000000000000000020500000000000000000206000000000000000002070000000000000000000101840000030000CD45 3 | -------------------------------------------------------------------------------- /sample_data/sample with 10 records.txt: -------------------------------------------------------------------------------- 1 | 00000000000003558e0a000001876f6d9000000f0db045209591780075011f0600080000000c000500ef0100f00100150500c800004501000500b5001b00b6001400423a3700430fe100440000000200f10000601a00100000248f00000000000001876f6dbee0000f0db22820959178007300770600070000000c000500ef0100f00100150500c800004501000500b5001d00b6001500423a3800430fe200440000000200f10000601a00100000249800000000000001876f6dda38000f0db08720959329007301260800080000000c000500ef0100f00100150500c800004501000500b5001100b6000a00423a3500430fe200440000000200f10000601a00100000249c00000000000001876f6e2470000f0dac2b2095964a007701040a00060000000c000500ef0100f00100150500c800004501000500b5000b00b6000800423a2c00430fe200440000000200f10000601a0010000024a900000000000001876f6e3410000f0da77b209596ad007901150a000f0000000c000500ef0100f00100150500c800004501000500b5001100b6000800423a2d00430fe100440000000200f10000601a0010000024b000000000000001876f6e3be0000f0da267209597fb007b011f0a00130000000c000500ef0100f00100150500c800004501000500b5001100b6000800423a3200430fe200440000000200f10000601a0010000024b900000000000001876f6e5350000f0d9907209599de008101060800080000000c000500ef0100f00100150500c800004501000500b5001300b6000b00423a3400430fe200440000000200f10000601a0010000024ca00000000000001876f6e7a60000f0d9e4d209598b2008100800900060000000c000500ef0100f00100150500c800004501000500b5000c00b6000900423a3200430fe200440000000200f10000601a0010000024d400000000000001876f6e7e48000f0da2672095986f0081007409000b0000000c000500ef0100f00100150500c800004501000500b5001100b6000a00423a2e00430fe200440000000200f10000601a0010000024db00000000000001876f6e8618000f0da8752095984e0080006a0900070000000c000500ef0100f00100150500c800004501000500b5001100b6000a00423a1900430fdf00440000000200f10000601a0010000024e5000000000a00005243 --------------------------------------------------------------------------------