├── .github ├── FUNDING.yml └── workflows │ ├── acd-tools.yml │ └── release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── acd ├── __init__.py ├── api.py ├── database │ ├── __init__.py │ ├── acd_database.py │ ├── dbextract.py │ └── sql_database_provider.py ├── exceptions │ ├── CompsRecordException.py │ └── __init__.py ├── generated │ ├── __init__.py │ ├── comments │ │ ├── __init__.py │ │ └── fafa_coments.py │ ├── comps │ │ ├── __init__.py │ │ ├── fafa_comps.py │ │ ├── fdfd_comps.py │ │ ├── rx_generic.py │ │ └── rx_tag.py │ ├── controller │ │ ├── __init__.py │ │ └── rx_controller.py │ ├── dat.py │ ├── map_device │ │ ├── __init__.py │ │ └── rx_map_device.py │ └── sbregion │ │ ├── __init__.py │ │ └── fafa_sbregions.py ├── l5x │ ├── __init__.py │ ├── elements.py │ └── export_l5x.py ├── record │ ├── __init__.py │ ├── comments.py │ ├── comps.py │ ├── nameless.py │ └── sbregion.py └── zip │ ├── __init__.py │ └── unzip.py ├── resources ├── ACDTestsEmptyRedundant.ACD ├── ACDTestsEmptyRedundant.L5X ├── ACDTestsFilledRedundant.ACD ├── ACDTestsNonRedundant.ACD ├── ACDTestsWithAOI.ACD ├── ACDTestsWithAOI.L5X ├── CuteLogix.ACD ├── Kaitai Struct │ ├── comps.ksy │ └── sbregion.ksy ├── ProjectTemplate.ACD ├── Test_IO.ACD └── templates │ ├── Comments │ └── FAFA_Comments.ksy │ ├── Comps │ ├── FAFA_Comps.ksy │ ├── FDFD_Comps.ksy │ └── RxGeneric.ksy │ ├── Dat │ └── Dat.ksy │ └── SbRegion │ └── FAFA_SbRegion.ksy ├── setup.cfg ├── setup.py └── test ├── __init__.py ├── test_api.py ├── test_database.py ├── test_extract_database.py └── test_unzip.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | [hutcheb] 4 | -------------------------------------------------------------------------------- /.github/workflows/acd-tools.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | strategy: 18 | matrix: 19 | os: [ ubuntu-latest ] 20 | python-version: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] 21 | 22 | runs-on: ${{ matrix.os }} 23 | 24 | steps: 25 | - uses: actions/checkout@v3 26 | - name: Set up Python 27 | uses: actions/setup-python@v3 28 | with: 29 | python-version: '${{ matrix.python-version }}' 30 | - name: Install dependencies 31 | run: | 32 | curl -LO https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/0.10/kaitai-struct-compiler_0.10_all.deb 33 | sudo apt-get install ./kaitai-struct-compiler_0.10_all.deb 34 | python -m pip install --upgrade pip 35 | pip install flake8 pytest 36 | pip install . ./[dev] 37 | - name: Lint with flake8 38 | run: | 39 | # stop the build if there are Python syntax errors or undefined names 40 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 41 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 42 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 43 | - name: Test with pytest 44 | run: | 45 | cd test 46 | pytest 47 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | 2 | on: push 3 | 4 | jobs: 5 | build: 6 | name: Build distribution 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v4 11 | - name: Download Kaitai 12 | run: curl -LO https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/0.10/kaitai-struct-compiler_0.10_all.deb 13 | - name: Install Kaitai 14 | run: sudo apt-get install ./kaitai-struct-compiler_0.10_all.deb 15 | - name: Set up Python 16 | uses: actions/setup-python@v4 17 | with: 18 | python-version: "3.8" 19 | - name: Install pypa/build 20 | run: >- 21 | python3 -m 22 | pip install 23 | build 24 | --user 25 | - name: Build a binary wheel and a source tarball 26 | run: python3 -m build 27 | - name: Store the distribution packages 28 | uses: actions/upload-artifact@v3 29 | with: 30 | name: python-package-distributions 31 | path: dist/ 32 | 33 | publish-to-pypi: 34 | name: >- 35 | Publish Python 🐍 distribution 📦 to PyPI 36 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes 37 | needs: 38 | - build 39 | runs-on: ubuntu-latest 40 | environment: 41 | name: pypi 42 | url: https://pypi.org/p/acd-tools # Replace with your PyPI project name 43 | permissions: 44 | id-token: write # IMPORTANT: mandatory for trusted publishing 45 | 46 | steps: 47 | - name: Download all the dists 48 | uses: actions/download-artifact@v3 49 | with: 50 | name: python-package-distributions 51 | path: dist/ 52 | - name: Publish distribution 📦 to PyPI 53 | uses: pypa/gh-action-pypi-publish@release/v1 54 | 55 | github-release: 56 | name: >- 57 | Sign the Python 🐍 distribution 📦 with Sigstore 58 | and upload them to GitHub Release 59 | needs: 60 | - publish-to-pypi 61 | runs-on: ubuntu-latest 62 | 63 | permissions: 64 | contents: write # IMPORTANT: mandatory for making GitHub Releases 65 | id-token: write # IMPORTANT: mandatory for sigstore 66 | 67 | steps: 68 | - name: Download all the dists 69 | uses: actions/download-artifact@v3 70 | with: 71 | name: python-package-distributions 72 | path: dist/ 73 | - name: Sign the dists with Sigstore 74 | uses: sigstore/gh-action-sigstore-python@v2.1.1 75 | with: 76 | inputs: >- 77 | ./dist/*.tar.gz 78 | ./dist/*.whl 79 | - name: Create GitHub Release 80 | env: 81 | GITHUB_TOKEN: ${{ github.token }} 82 | run: >- 83 | gh release create 84 | '${{ github.ref_name }}' 85 | --repo '${{ github.repository }}' 86 | --notes "" 87 | - name: Upload artifact signatures to GitHub Release 88 | env: 89 | GITHUB_TOKEN: ${{ github.token }} 90 | # Upload to GitHub Release using the `gh` CLI. 91 | # `dist/` contains the built packages, and the 92 | # sigstore-produced signatures and certificates. 93 | run: >- 94 | gh release upload 95 | '${{ github.ref_name }}' dist/** 96 | --repo '${{ github.repository }}' 97 | 98 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | test/build/* 5 | acd.egg-info 6 | .pytest_cache 7 | venv 8 | build 9 | .idea 10 | dist 11 | /test/dump/ 12 | /.mypy_cache/ 13 | /acd_tools.egg-info/ 14 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, 13 | # software distributed under the License is distributed on an 14 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | # KIND, either express or implied. See the License for the 16 | # specific language governing permissions and limitations 17 | # under the License. 18 | # 19 | 20 | # See https://pre-commit.com for more information 21 | # See https://pre-commit.com/hooks.html for more hooks 22 | # 23 | # NOTE: This configuration runs on the entire repository 24 | # So, the types: should be set accordingly 25 | # Right now we only run this on python, and this configuration 26 | # is in the plc4py directory.default_language_version: 27 | # If we were to use this for hooks across the whole project we 28 | # should configure this at the top level 29 | repos: 30 | - repo: https://github.com/pre-commit/pre-commit-hooks 31 | rev: v4.2.0 32 | hooks: 33 | - id: trailing-whitespace 34 | types: [python] 35 | - id: end-of-file-fixer 36 | types: [python] 37 | - repo: https://github.com/psf/black 38 | rev: 22.3.0 # Replace by any tag/version: https://github.com/psf/black/tags 39 | hooks: 40 | - id: black 41 | language_version: python3 # Should be a command that runs python3.6 42 | types: [python] 43 | - repo: https://github.com/pre-commit/mirrors-mypy 44 | rev: v1.10.1 45 | hooks: 46 | - id: mypy 47 | types: [python] 48 | pass_filenames: false 49 | follow-imports: skip 50 | args: [acd/database, acd/exceptions, acd/l5x, acd/record, acd/zip, --check-untyped-defs, --ignore-missing-imports, --exclude, acd/generated/, --follow-imports, skip] 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ![PyPI](https://img.shields.io/pypi/v/acd-tools?label=acd-tools) 4 | ![PyPI - Downloads](https://img.shields.io/pypi/dm/acd-tools) 5 | ![ACD Tools](https://github.com/hutcheb/acd/actions/workflows/acd-tools.yml/badge.svg) 6 | 7 | ## Rockwell ACD Project File Tools 8 | 9 | The Rockwell ACD file is an archive file that contains all the files 10 | that are used by RSLogix/Studio 5000. 11 | 12 | It consists of a number of text files containing version information, compressed XML 13 | files containing project and tag information as well as a number of database files. 14 | 15 | This library allows you to unzip all the files and extract information from these files. 16 | 17 | ### Installing 18 | 19 | To install acd tools from pypi run 20 | 21 | ```bash 22 | pip install acd-tools 23 | ``` 24 | 25 | ### Parsing the ACD file 26 | 27 | To get the Controller object and get the program/routines/rungs/tags/datatypes, use something like this 28 | ```python 29 | from acd.api import ImportProjectFromFile 30 | 31 | controller = ImportProjectFromFile("../resources/CuteLogix.ACD").import_project().controller 32 | rung = controller.programs[0].routines[0].rungs[0] 33 | data_type = controller.data_types[-1] 34 | tag_name = controller.tags[75].text 35 | tag_data_type = controller.tags[75].data_type 36 | ``` 37 | 38 | ### Unzip 39 | 40 | To extract the file use the acd.api.ExtractAcdDatabase class. This extracts the database files to a directory. 41 | 42 | ```python 43 | from acd.api import ExtractAcdDatabase 44 | 45 | ExtractAcdDatabase('CuteLogix.ACD', 'output_directory').extract() 46 | 47 | ``` 48 | 49 | ### Extract Raw Records From ACD Files 50 | 51 | A select number of database files contain interesting information. This will save each database record to a file 52 | to make it easier to see whats in them. 53 | 54 | ```python 55 | from acd.api import ExtractAcdDatabaseRecordsToFiles 56 | 57 | ExtractAcdDatabaseRecordsToFiles('CuteLogix.ACD', 'output_directory').extract() 58 | 59 | ``` 60 | 61 | ### Dump Comps Database Records 62 | 63 | The Comps database contains a lot of information and can be export as a directory structure to make it easier to look at. 64 | It will also extract the CIP class and instance and write it to the log file. 65 | 66 | ```python 67 | from acd.api import DumpCompsRecordsToFile 68 | 69 | DumpCompsRecordsToFile('CuteLogix.ACD', 'output_directory').extract() 70 | 71 | ``` 72 | 73 | ### Converting from ACD to L5X 74 | 75 | This hasn't been started but could be feasible eventually. 76 | -------------------------------------------------------------------------------- /acd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/__init__.py -------------------------------------------------------------------------------- /acd/api.py: -------------------------------------------------------------------------------- 1 | import os 2 | from abc import abstractmethod 3 | from dataclasses import dataclass 4 | from os import PathLike 5 | 6 | from acd.l5x.export_l5x import ExportL5x 7 | from acd.zip.unzip import Unzip 8 | 9 | from acd.database.acd_database import AcdDatabase 10 | from acd.l5x.elements import DumpCompsRecords, RSLogix5000Content 11 | 12 | 13 | # Returned Project Structures 14 | 15 | 16 | # Import Export Interfaces 17 | class ImportProject: 18 | """ "Interface to import an PLC project""" 19 | 20 | @abstractmethod 21 | def import_project(self) -> RSLogix5000Content: 22 | # Import Project Interface 23 | pass 24 | 25 | 26 | class ExportProject: 27 | """ "Interface to export an PLC project""" 28 | 29 | @abstractmethod 30 | def export_project(self, project: RSLogix5000Content): 31 | # Export Project Interface 32 | pass 33 | 34 | 35 | # Concreate examples of importing and exporting projects 36 | @dataclass 37 | class ImportProjectFromFile(ImportProject): 38 | """Import a Controller from an ACD stored on file""" 39 | 40 | filename: PathLike 41 | 42 | def import_project(self) -> RSLogix5000Content: 43 | # Import Project Interface 44 | export = ExportL5x(self.filename) 45 | return export.project 46 | 47 | 48 | @dataclass 49 | class ExportProjectToFile(ExportProject): 50 | """Export a Controller to an ACD file""" 51 | 52 | filename: PathLike 53 | 54 | def export_project(self, project: RSLogix5000Content): 55 | # Concreate example of exporting a Project Object to an ACD file 56 | raise NotImplementedError 57 | 58 | 59 | # Extracting/Compressing files from an ACD file Interfaces 60 | class Extract: 61 | """Base class for all extract functions""" 62 | 63 | @abstractmethod 64 | def extract(self): 65 | # Interface for extracting database files 66 | pass 67 | 68 | 69 | class Compress: 70 | """Base class for all compress functions""" 71 | 72 | @abstractmethod 73 | def compress(self): 74 | # Interface for extracting database files 75 | pass 76 | 77 | 78 | # Concreate examples of extracting and compressing ACD files 79 | @dataclass 80 | class ExtractAcdDatabase(Extract): 81 | """Extract database files from a Logix ACD file""" 82 | 83 | filename: PathLike 84 | output_directory: PathLike 85 | 86 | def extract(self): 87 | # Implement the extraction of an ACD file 88 | unzip = Unzip(self.filename) 89 | unzip.write_files(self.output_directory) 90 | 91 | 92 | @dataclass 93 | class CompressAcdDatabase(Extract): 94 | """Compress database files to a Logix ACD file""" 95 | 96 | filename: PathLike 97 | output_directory: PathLike 98 | 99 | def compress(self): 100 | # Implement the compressing of an ACD file 101 | raise NotImplementedError 102 | 103 | 104 | @dataclass 105 | class ExtractAcdDatabaseRecordsToFiles(ExportProject): 106 | """Export all ACD databases to a raw database record tree""" 107 | 108 | filename: PathLike 109 | output_directory: PathLike 110 | 111 | def extract(self): 112 | # Implement the extraction of an ACD file 113 | database = AcdDatabase(self.filename, self.output_directory) 114 | database.extract_to_file() 115 | 116 | 117 | @dataclass 118 | class DumpCompsRecordsToFile(ExportProject): 119 | """ 120 | Dump the Comps database to a folder. Each individual record can then be navigated and viewed. 121 | 122 | :param str filename: Filename of ACD file 123 | :param str output_directory: Location to store the records 124 | """ 125 | 126 | filename: PathLike 127 | output_directory: PathLike 128 | 129 | def extract(self): 130 | export = ExportL5x(self.filename) 131 | with open( 132 | os.path.join(self.output_directory, "output.log"), 133 | "w", 134 | ) as log_file: 135 | DumpCompsRecords(export._cur, 0).dump(log_file=log_file) 136 | -------------------------------------------------------------------------------- /acd/database/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/database/__init__.py -------------------------------------------------------------------------------- /acd/database/acd_database.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dataclasses import dataclass 3 | 4 | from loguru import logger as log 5 | 6 | from acd.database.dbextract import DbExtract 7 | from acd.zip.unzip import Unzip 8 | 9 | 10 | @dataclass 11 | class AcdDatabase: 12 | input_filename: os.PathLike 13 | output_filename: str 14 | _temp_dir: str = "build" # tempfile.mkdtemp() 15 | 16 | def __post_init__(self): 17 | if not os.path.exists(os.path.join(self._temp_dir)): 18 | os.makedirs(self._temp_dir) 19 | 20 | log.info("Extracting ACD database file") 21 | unzip = Unzip(self.input_filename) 22 | unzip.write_files(self._temp_dir) 23 | 24 | log.info("Getting records from ACD Comps file and storing in sqllite database") 25 | self.comps_db = DbExtract(os.path.join(self._temp_dir, "Comps.Dat")).read() 26 | 27 | log.info( 28 | "Getting records from ACD SbRegion file and storing in sqllite database" 29 | ) 30 | self.sb_region_db = DbExtract( 31 | os.path.join(self._temp_dir, "SbRegion.Dat") 32 | ).read() 33 | 34 | log.info( 35 | "Getting records from ACD Comments file and storing in sqllite database" 36 | ) 37 | self.comments_db = DbExtract( 38 | os.path.join(self._temp_dir, "Comments.Dat") 39 | ).read() 40 | 41 | log.info( 42 | "Getting records from ACD Nameless file and storing in sqllite database" 43 | ) 44 | self.nameless_db = DbExtract( 45 | os.path.join(self._temp_dir, "Nameless.Dat") 46 | ).read() 47 | 48 | def extract_to_file(self): 49 | directory = os.path.join(self._temp_dir, "comps_db") 50 | if not os.path.exists(os.path.join(directory)): 51 | os.makedirs(directory) 52 | for count, record in enumerate(self.comps_db.records.record): 53 | with open(os.path.join(directory, str(count)), "wb") as out_file: 54 | out_file.write(record.record.record_buffer) 55 | 56 | directory = os.path.join(self._temp_dir, "sb_region_db") 57 | if not os.path.exists(os.path.join(directory)): 58 | os.makedirs(directory) 59 | for count, record in enumerate(self.sb_region_db.records.record): 60 | with open(os.path.join(directory, str(count)), "wb") as out_file: 61 | out_file.write(record.record.record_buffer) 62 | 63 | directory = os.path.join(self._temp_dir, "comments_db") 64 | if not os.path.exists(os.path.join(directory)): 65 | os.makedirs(directory) 66 | for count, record in enumerate(self.comments_db.records.record): 67 | with open(os.path.join(directory, str(count)), "wb") as out_file: 68 | out_file.write(record.record.record_buffer) 69 | 70 | directory = os.path.join(self._temp_dir, "nameless_db") 71 | if not os.path.exists(os.path.join(directory)): 72 | os.makedirs(directory) 73 | for count, record in enumerate(self.nameless_db.records.record): 74 | with open(os.path.join(directory, str(count)), "wb") as out_file: 75 | out_file.write(record.record.record_buffer) 76 | -------------------------------------------------------------------------------- /acd/database/dbextract.py: -------------------------------------------------------------------------------- 1 | import struct 2 | from dataclasses import dataclass 3 | from io import BufferedReader 4 | 5 | from acd.generated.dat import Dat 6 | 7 | 8 | @dataclass 9 | class DatHeader: 10 | f: BufferedReader 11 | 12 | def __post_init__(self): 13 | self.start_position = self.f.seek(8) 14 | ( 15 | self.total_length, 16 | self.region_pointer_offset, 17 | self._unknown1, 18 | self.no_records, 19 | self.no_records_table2, 20 | ) = struct.unpack("IIIII", self.f.read(20)) 21 | self.f.seek(self.region_pointer_offset) 22 | if self.f.read(2) != b"\xfe\xfe": 23 | raise RuntimeError("Pointer Region Incorrect") 24 | ( 25 | self.region_pointer_length, 26 | self.unknown2, 27 | self._unknown3, 28 | self.pointer_metadata_region, 29 | self.pointer_records_region, 30 | ) = struct.unpack("IIIII", self.f.read(20)) 31 | self.f.seek(self.pointer_records_region) 32 | if self.f.read(2) != b"\xfe\xfe": 33 | raise RuntimeError("Record Region Incorrect") 34 | ( 35 | self.record_header_length, 36 | self._unknown4, 37 | self.unknown5, 38 | self.record_format, 39 | ) = struct.unpack("IIII", self.f.read(16)) 40 | if self.record_format == 132: 41 | raise RuntimeError("Cross Reference Databases Not Supported") 42 | elif self.record_format != 512: 43 | raise RuntimeError("Unknown record format") 44 | self.start_records_position = ( 45 | self.pointer_records_region + self.record_header_length 46 | ) 47 | self.f.seek(self.start_position) 48 | 49 | 50 | @dataclass 51 | class DatRecord: 52 | f: BufferedReader 53 | 54 | def __post_init__(self): 55 | self.identifier: bytes = self.f.read(2) 56 | 57 | self.record_length = struct.unpack("I", self.f.read(4))[0] 58 | self.record: bytes = self.f.read(self.record_length - 6) 59 | 60 | 61 | @dataclass 62 | class DbExtract: 63 | filename: str 64 | 65 | def read(self) -> Dat: 66 | return Dat.from_file(self.filename) 67 | -------------------------------------------------------------------------------- /acd/database/sql_database_provider.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sqlite3 3 | import tempfile 4 | from dataclasses import dataclass 5 | from sqlite3 import Cursor 6 | 7 | from loguru import logger as log 8 | 9 | 10 | class DatabaseProvider: 11 | pass 12 | 13 | 14 | @dataclass 15 | class SqlDatabaseProvider(DatabaseProvider): 16 | directory: str = tempfile.mkdtemp() 17 | filename: str = "acd.db" 18 | 19 | def __post_init__(self): 20 | log.info( 21 | "Creating temporary directory (if it doesn't exist to store ACD database files - " 22 | + self.directory 23 | ) 24 | 25 | if os.path.exists(os.path.join(self.directory, self.filename)): 26 | os.remove(os.path.join(self.directory, self.filename)) 27 | if not os.path.exists(os.path.join(self.directory)): 28 | os.makedirs(self.directory) 29 | log.info("Creating sqllite database to store ACD database records") 30 | self._db = sqlite3.connect(os.path.join(self.directory, self.filename)) 31 | self._cur: Cursor = self._db.cursor() 32 | 33 | log.debug("Create Comps table in sqllite db") 34 | self._cur.execute( 35 | "CREATE TABLE comps(object_id int, parent_id int, comp_name text, seq_number int, " 36 | "record_type int, record BLOB NOT NULL)" 37 | ) 38 | 39 | log.debug("Create pointers table in sqllite db") 40 | self._cur.execute( 41 | "CREATE TABLE pointers(object_id int, parent_id int, comp_name text, seq_number int, " 42 | "record_type int, record BLOB NOT NULL)" 43 | ) 44 | 45 | log.debug("Create Rungs table in sqllite db") 46 | self._cur.execute( 47 | "CREATE TABLE rungs(object_id int, rung text, seq_number int)" 48 | ) 49 | 50 | log.debug("Create Region_map table in sqllite db") 51 | self._cur.execute( 52 | "CREATE TABLE region_map(object_id int, parent_id int, unknown int, seq_no int, record BLOB " 53 | "NOT NULL)" 54 | ) 55 | log.debug("Create Comments table in sqllite db") 56 | self._cur.execute( 57 | "CREATE TABLE comments(seq_number int, string_length int, lookup_id int, comment text, record_type int, " 58 | "sub_record_type int)" 59 | ) 60 | 61 | log.debug("Create Nameless table in sqllite db") 62 | self._cur.execute( 63 | "CREATE TABLE nameless(object_id int, parent_id int, record BLOB NOT NULL)" 64 | ) 65 | -------------------------------------------------------------------------------- /acd/exceptions/CompsRecordException.py: -------------------------------------------------------------------------------- 1 | class CompsRecordException(Exception): 2 | def __init__(self, record_number, message): 3 | super().__init__(message + f"Unknown {message} version found- {record_number}") 4 | 5 | 6 | class UnknownRxTagVersion(CompsRecordException): 7 | def __init__(self, record_number): 8 | super().__init__(record_number, "RxTag") 9 | -------------------------------------------------------------------------------- /acd/exceptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/exceptions/__init__.py -------------------------------------------------------------------------------- /acd/generated/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/__init__.py -------------------------------------------------------------------------------- /acd/generated/comments/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/comments/__init__.py -------------------------------------------------------------------------------- /acd/generated/comments/fafa_coments.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class FafaComents(KaitaiStruct): 15 | def __init__(self, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self._read() 20 | 21 | def _read(self): 22 | self.record_length = self._io.read_u4le() 23 | self._raw_header = self._io.read_bytes(10) 24 | _io__raw_header = KaitaiStream(BytesIO(self._raw_header)) 25 | self.header = FafaComents.Header(_io__raw_header, self, self._root) 26 | _on = self.header.record_type 27 | if _on == 14: 28 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 29 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 30 | self.body = FafaComents.Utf16Record(12, _io__raw_body, self, self._root) 31 | elif _on == 4: 32 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 33 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 34 | self.body = FafaComents.Utf16Record(12, _io__raw_body, self, self._root) 35 | elif _on == 1: 36 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 37 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 38 | self.body = FafaComents.AsciiRecord(_io__raw_body, self, self._root) 39 | elif _on == 13: 40 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 41 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 42 | self.body = FafaComents.Utf16Record(12, _io__raw_body, self, self._root) 43 | elif _on == 3: 44 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 45 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 46 | self.body = FafaComents.Utf16Record(12, _io__raw_body, self, self._root) 47 | elif _on == 23: 48 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 49 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 50 | self.body = FafaComents.ControllerRecord(_io__raw_body, self, self._root) 51 | elif _on == 2: 52 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 53 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 54 | self.body = FafaComents.AsciiRecord(_io__raw_body, self, self._root) 55 | elif _on == 25: 56 | self._raw_body = self._io.read_bytes((self.record_length - 10)) 57 | _io__raw_body = KaitaiStream(BytesIO(self._raw_body)) 58 | self.body = FafaComents.ControllerRecord(_io__raw_body, self, self._root) 59 | else: 60 | self.body = self._io.read_bytes((self.record_length - 10)) 61 | 62 | class ControllerRecord(KaitaiStruct): 63 | def __init__(self, _io, _parent=None, _root=None): 64 | self._io = _io 65 | self._parent = _parent 66 | self._root = _root if _root else self 67 | self._read() 68 | 69 | def _read(self): 70 | self.unknown_1 = self._io.read_bytes(8) 71 | self.object_id = self._io.read_u4le() 72 | self.unknown_2 = self._io.read_bytes(4) 73 | self.tag_reference = FafaComents.StrzUtf16(self._io, self, self._root) 74 | self.unknown_3 = self._io.read_bytes(12) 75 | self.record_string = ( 76 | self._io.read_bytes_term(0, False, True, True) 77 | ).decode("UTF-8") 78 | 79 | class AsciiRecord(KaitaiStruct): 80 | def __init__(self, _io, _parent=None, _root=None): 81 | self._io = _io 82 | self._parent = _parent 83 | self._root = _root if _root else self 84 | self._read() 85 | 86 | def _read(self): 87 | self.unknown_1 = self._io.read_bytes(13) 88 | self.object_id = self._io.read_u4le() 89 | self.unknown_2 = self._io.read_bytes(13) 90 | self.record_string = ( 91 | self._io.read_bytes_term(0, False, True, True) 92 | ).decode("UTF-8") 93 | 94 | class AsciiRecord4(KaitaiStruct): 95 | def __init__(self, _io, _parent=None, _root=None): 96 | self._io = _io 97 | self._parent = _parent 98 | self._root = _root if _root else self 99 | self._read() 100 | 101 | def _read(self): 102 | self.unknown_1 = self._io.read_bytes(8) 103 | self.object_id = self._io.read_u4le() 104 | self.unknown_2 = self._io.read_bytes(24) 105 | self.record_string = ( 106 | self._io.read_bytes_term(0, False, True, True) 107 | ).decode("UTF-8") 108 | 109 | class Header(KaitaiStruct): 110 | def __init__(self, _io, _parent=None, _root=None): 111 | self._io = _io 112 | self._parent = _parent 113 | self._root = _root if _root else self 114 | self._read() 115 | 116 | def _read(self): 117 | pass 118 | 119 | @property 120 | def seq_number(self): 121 | if hasattr(self, "_m_seq_number"): 122 | return self._m_seq_number 123 | 124 | _pos = self._io.pos() 125 | self._io.seek(0) 126 | self._m_seq_number = self._io.read_u2le() 127 | self._io.seek(_pos) 128 | return getattr(self, "_m_seq_number", None) 129 | 130 | @property 131 | def record_type(self): 132 | if hasattr(self, "_m_record_type"): 133 | return self._m_record_type 134 | 135 | _pos = self._io.pos() 136 | self._io.seek(2) 137 | self._m_record_type = self._io.read_u2le() 138 | self._io.seek(_pos) 139 | return getattr(self, "_m_record_type", None) 140 | 141 | @property 142 | def sub_record_length(self): 143 | if hasattr(self, "_m_sub_record_length"): 144 | return self._m_sub_record_length 145 | 146 | _pos = self._io.pos() 147 | self._io.seek(4) 148 | self._m_sub_record_length = self._io.read_u2le() 149 | self._io.seek(_pos) 150 | return getattr(self, "_m_sub_record_length", None) 151 | 152 | @property 153 | def parent(self): 154 | if hasattr(self, "_m_parent"): 155 | return self._m_parent 156 | 157 | _pos = self._io.pos() 158 | self._io.seek(6) 159 | self._m_parent = self._io.read_u4le() 160 | self._io.seek(_pos) 161 | return getattr(self, "_m_parent", None) 162 | 163 | class StrzUtf16(KaitaiStruct): 164 | def __init__(self, _io, _parent=None, _root=None): 165 | self._io = _io 166 | self._parent = _parent 167 | self._root = _root if _root else self 168 | self._read() 169 | 170 | def _read(self): 171 | self.value = (self._io.read_bytes((2 * (len(self.code_units) - 1)))).decode( 172 | "utf-16le" 173 | ) 174 | self.term = self._io.read_u2le() 175 | if not self.term == 0: 176 | raise kaitaistruct.ValidationNotEqualError( 177 | 0, self.term, self._io, "/types/strz_utf_16/seq/1" 178 | ) 179 | 180 | @property 181 | def code_units(self): 182 | if hasattr(self, "_m_code_units"): 183 | return self._m_code_units 184 | 185 | _pos = self._io.pos() 186 | self._io.seek(self._io.pos()) 187 | self._m_code_units = [] 188 | i = 0 189 | while True: 190 | _ = self._io.read_u2le() 191 | self._m_code_units.append(_) 192 | if _ == 0: 193 | break 194 | i += 1 195 | self._io.seek(_pos) 196 | return getattr(self, "_m_code_units", None) 197 | 198 | class Utf16Record(KaitaiStruct): 199 | def __init__(self, len_unknown_3, _io, _parent=None, _root=None): 200 | self._io = _io 201 | self._parent = _parent 202 | self._root = _root if _root else self 203 | self.len_unknown_3 = len_unknown_3 204 | self._read() 205 | 206 | def _read(self): 207 | self.unknown_1 = self._io.read_bytes(8) 208 | self.object_id = self._io.read_u4le() 209 | self.unknown_2 = self._io.read_bytes(4) 210 | self.len_record = self._io.read_u2le() 211 | self.tag_reference = FafaComents.StrzUtf16(self._io, self, self._root) 212 | self.unknown_3 = self._io.read_bytes(self.len_unknown_3) 213 | self.record_string = ( 214 | self._io.read_bytes_term(0, False, True, True) 215 | ).decode("UTF-8") 216 | 217 | @property 218 | def lookup_id(self): 219 | if hasattr(self, "_m_lookup_id"): 220 | return self._m_lookup_id 221 | 222 | _pos = self._io.pos() 223 | self._io.seek(27) 224 | self._m_lookup_id = self._io.read_u2le() 225 | self._io.seek(_pos) 226 | return getattr(self, "_m_lookup_id", None) 227 | 228 | @property 229 | def sub_record_type(self): 230 | if hasattr(self, "_m_sub_record_type"): 231 | return self._m_sub_record_type 232 | 233 | _pos = self._io.pos() 234 | self._io.seek(41) 235 | self._m_sub_record_type = self._io.read_u2le() 236 | self._io.seek(_pos) 237 | return getattr(self, "_m_sub_record_type", None) 238 | -------------------------------------------------------------------------------- /acd/generated/comps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/comps/__init__.py -------------------------------------------------------------------------------- /acd/generated/comps/fafa_comps.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class FafaComps(KaitaiStruct): 15 | def __init__(self, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self._read() 20 | 21 | def _read(self): 22 | self.record_length = self._io.read_u4le() 23 | self._raw_header = self._io.read_bytes(144) 24 | _io__raw_header = KaitaiStream(BytesIO(self._raw_header)) 25 | self.header = FafaComps.Header(_io__raw_header, self, self._root) 26 | self.record_buffer = self._io.read_bytes(((self.record_length - 144) - 4)) 27 | 28 | class Header(KaitaiStruct): 29 | def __init__(self, _io, _parent=None, _root=None): 30 | self._io = _io 31 | self._parent = _parent 32 | self._root = _root if _root else self 33 | self._read() 34 | 35 | def _read(self): 36 | pass 37 | 38 | @property 39 | def record_type(self): 40 | if hasattr(self, "_m_record_type"): 41 | return self._m_record_type 42 | 43 | _pos = self._io.pos() 44 | self._io.seek(6) 45 | self._m_record_type = self._io.read_u2le() 46 | self._io.seek(_pos) 47 | return getattr(self, "_m_record_type", None) 48 | 49 | @property 50 | def object_id(self): 51 | if hasattr(self, "_m_object_id"): 52 | return self._m_object_id 53 | 54 | _pos = self._io.pos() 55 | self._io.seek(12) 56 | self._m_object_id = self._io.read_u4le() 57 | self._io.seek(_pos) 58 | return getattr(self, "_m_object_id", None) 59 | 60 | @property 61 | def record_name(self): 62 | if hasattr(self, "_m_record_name"): 63 | return self._m_record_name 64 | 65 | _pos = self._io.pos() 66 | self._io.seek(20) 67 | self._raw__m_record_name = self._io.read_bytes(124) 68 | _io__raw__m_record_name = KaitaiStream(BytesIO(self._raw__m_record_name)) 69 | self._m_record_name = FafaComps.StrzUtf16( 70 | _io__raw__m_record_name, self, self._root 71 | ) 72 | self._io.seek(_pos) 73 | return getattr(self, "_m_record_name", None) 74 | 75 | @property 76 | def seq_number(self): 77 | if hasattr(self, "_m_seq_number"): 78 | return self._m_seq_number 79 | 80 | _pos = self._io.pos() 81 | self._io.seek(4) 82 | self._m_seq_number = self._io.read_u2le() 83 | self._io.seek(_pos) 84 | return getattr(self, "_m_seq_number", None) 85 | 86 | @property 87 | def parent_id(self): 88 | if hasattr(self, "_m_parent_id"): 89 | return self._m_parent_id 90 | 91 | _pos = self._io.pos() 92 | self._io.seek(16) 93 | self._m_parent_id = self._io.read_u4le() 94 | self._io.seek(_pos) 95 | return getattr(self, "_m_parent_id", None) 96 | 97 | class StrzUtf16(KaitaiStruct): 98 | def __init__(self, _io, _parent=None, _root=None): 99 | self._io = _io 100 | self._parent = _parent 101 | self._root = _root if _root else self 102 | self._read() 103 | 104 | def _read(self): 105 | self.value = (self._io.read_bytes((2 * (len(self.code_units) - 1)))).decode( 106 | "utf-16le" 107 | ) 108 | self.term = self._io.read_u2le() 109 | if not self.term == 0: 110 | raise kaitaistruct.ValidationNotEqualError( 111 | 0, self.term, self._io, "/types/strz_utf_16/seq/1" 112 | ) 113 | 114 | @property 115 | def code_units(self): 116 | if hasattr(self, "_m_code_units"): 117 | return self._m_code_units 118 | 119 | _pos = self._io.pos() 120 | self._io.seek(self._io.pos()) 121 | self._m_code_units = [] 122 | i = 0 123 | while True: 124 | _ = self._io.read_u2le() 125 | self._m_code_units.append(_) 126 | if _ == 0: 127 | break 128 | i += 1 129 | self._io.seek(_pos) 130 | return getattr(self, "_m_code_units", None) 131 | -------------------------------------------------------------------------------- /acd/generated/comps/fdfd_comps.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class FdfdComps(KaitaiStruct): 15 | def __init__(self, record_length, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self.record_length = record_length 20 | self._read() 21 | 22 | def _read(self): 23 | self._raw_header = self._io.read_bytes(155) 24 | _io__raw_header = KaitaiStream(BytesIO(self._raw_header)) 25 | self.header = FdfdComps.Header(_io__raw_header, self, self._root) 26 | self.record_buffer = self._io.read_bytes(((self.record_length - 155) - 8)) 27 | 28 | class Header(KaitaiStruct): 29 | def __init__(self, _io, _parent=None, _root=None): 30 | self._io = _io 31 | self._parent = _parent 32 | self._root = _root if _root else self 33 | self._read() 34 | 35 | def _read(self): 36 | pass 37 | 38 | @property 39 | def record_type(self): 40 | if hasattr(self, "_m_record_type"): 41 | return self._m_record_type 42 | 43 | _pos = self._io.pos() 44 | self._io.seek(10) 45 | self._m_record_type = self._io.read_u2le() 46 | self._io.seek(_pos) 47 | return getattr(self, "_m_record_type", None) 48 | 49 | @property 50 | def object_id(self): 51 | if hasattr(self, "_m_object_id"): 52 | return self._m_object_id 53 | 54 | _pos = self._io.pos() 55 | self._io.seek(16) 56 | self._m_object_id = self._io.read_u4le() 57 | self._io.seek(_pos) 58 | return getattr(self, "_m_object_id", None) 59 | 60 | @property 61 | def record_name(self): 62 | if hasattr(self, "_m_record_name"): 63 | return self._m_record_name 64 | 65 | _pos = self._io.pos() 66 | self._io.seek(24) 67 | self._raw__m_record_name = self._io.read_bytes(124) 68 | _io__raw__m_record_name = KaitaiStream(BytesIO(self._raw__m_record_name)) 69 | self._m_record_name = FdfdComps.StrzUtf16( 70 | _io__raw__m_record_name, self, self._root 71 | ) 72 | self._io.seek(_pos) 73 | return getattr(self, "_m_record_name", None) 74 | 75 | @property 76 | def seq_number(self): 77 | if hasattr(self, "_m_seq_number"): 78 | return self._m_seq_number 79 | 80 | _pos = self._io.pos() 81 | self._io.seek(4) 82 | self._m_seq_number = self._io.read_u2le() 83 | self._io.seek(_pos) 84 | return getattr(self, "_m_seq_number", None) 85 | 86 | @property 87 | def parent_id(self): 88 | if hasattr(self, "_m_parent_id"): 89 | return self._m_parent_id 90 | 91 | _pos = self._io.pos() 92 | self._io.seek(20) 93 | self._m_parent_id = self._io.read_u4le() 94 | self._io.seek(_pos) 95 | return getattr(self, "_m_parent_id", None) 96 | 97 | class StrzUtf16(KaitaiStruct): 98 | def __init__(self, _io, _parent=None, _root=None): 99 | self._io = _io 100 | self._parent = _parent 101 | self._root = _root if _root else self 102 | self._read() 103 | 104 | def _read(self): 105 | self.value = (self._io.read_bytes((2 * (len(self.code_units) - 1)))).decode( 106 | "utf-16le" 107 | ) 108 | self.term = self._io.read_u2le() 109 | if not self.term == 0: 110 | raise kaitaistruct.ValidationNotEqualError( 111 | 0, self.term, self._io, "/types/strz_utf_16/seq/1" 112 | ) 113 | 114 | @property 115 | def code_units(self): 116 | if hasattr(self, "_m_code_units"): 117 | return self._m_code_units 118 | 119 | _pos = self._io.pos() 120 | self._io.seek(self._io.pos()) 121 | self._m_code_units = [] 122 | i = 0 123 | while True: 124 | _ = self._io.read_u2le() 125 | self._m_code_units.append(_) 126 | if _ == 0: 127 | break 128 | i += 1 129 | self._io.seek(_pos) 130 | return getattr(self, "_m_code_units", None) 131 | -------------------------------------------------------------------------------- /acd/generated/comps/rx_generic.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class RxGeneric(KaitaiStruct): 15 | def __init__(self, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self._read() 20 | 21 | def _read(self): 22 | self.parent_id = self._io.read_u4le() 23 | self.unique_tag_identifier = self._io.read_u4le() 24 | self.record_format_version = self._io.read_u2le() 25 | self.cip_type = self._io.read_u2le() 26 | self.comment_id = self._io.read_u2le() 27 | _on = self.cip_type 28 | if _on == 104: 29 | self._raw_main_record = self._io.read_bytes(60) 30 | _io__raw_main_record = KaitaiStream(BytesIO(self._raw_main_record)) 31 | self.main_record = RxGeneric.RxTag(_io__raw_main_record, self, self._root) 32 | elif _on == 107: 33 | self._raw_main_record = self._io.read_bytes(60) 34 | _io__raw_main_record = KaitaiStream(BytesIO(self._raw_main_record)) 35 | self.main_record = RxGeneric.RxTag(_io__raw_main_record, self, self._root) 36 | else: 37 | self._raw_main_record = self._io.read_bytes(60) 38 | _io__raw_main_record = KaitaiStream(BytesIO(self._raw_main_record)) 39 | self.main_record = RxGeneric.Unknown(_io__raw_main_record, self, self._root) 40 | self.len_record = self._io.read_u4le() 41 | self.count_record = self._io.read_u4le() 42 | self.extended_records = [] 43 | for i in range((self.count_record - 1)): 44 | self.extended_records.append( 45 | RxGeneric.AttributeRecord(self._io, self, self._root) 46 | ) 47 | 48 | class Unknown(KaitaiStruct): 49 | def __init__(self, _io, _parent=None, _root=None): 50 | self._io = _io 51 | self._parent = _parent 52 | self._root = _root if _root else self 53 | self._read() 54 | 55 | def _read(self): 56 | self.body = self._io.read_bytes(60) 57 | 58 | class LastAttributeRecord(KaitaiStruct): 59 | def __init__(self, _io, _parent=None, _root=None): 60 | self._io = _io 61 | self._parent = _parent 62 | self._root = _root if _root else self 63 | self._read() 64 | 65 | def _read(self): 66 | self.attribute_id = self._io.read_u4le() 67 | self.len_value = self._io.read_u4le() 68 | self.value = self._io.read_bytes((self.len_value - 4)) 69 | 70 | class RxMapDevice(KaitaiStruct): 71 | def __init__(self, _io, _parent=None, _root=None): 72 | self._io = _io 73 | self._parent = _parent 74 | self._root = _root if _root else self 75 | self._read() 76 | 77 | def _read(self): 78 | pass 79 | 80 | @property 81 | def module_id(self): 82 | if hasattr(self, "_m_module_id"): 83 | return self._m_module_id 84 | 85 | _pos = self._io.pos() 86 | self._io.seek(36) 87 | self._m_module_id = self._io.read_u4le() 88 | self._io.seek(_pos) 89 | return getattr(self, "_m_module_id", None) 90 | 91 | @property 92 | def product_type(self): 93 | if hasattr(self, "_m_product_type"): 94 | return self._m_product_type 95 | 96 | _pos = self._io.pos() 97 | self._io.seek(4) 98 | self._m_product_type = self._io.read_u2le() 99 | self._io.seek(_pos) 100 | return getattr(self, "_m_product_type", None) 101 | 102 | @property 103 | def vendor_id(self): 104 | if hasattr(self, "_m_vendor_id"): 105 | return self._m_vendor_id 106 | 107 | _pos = self._io.pos() 108 | self._io.seek(2) 109 | self._m_vendor_id = self._io.read_u2le() 110 | self._io.seek(_pos) 111 | return getattr(self, "_m_vendor_id", None) 112 | 113 | @property 114 | def slot_no(self): 115 | if hasattr(self, "_m_slot_no"): 116 | return self._m_slot_no 117 | 118 | _pos = self._io.pos() 119 | self._io.seek(32) 120 | self._m_slot_no = self._io.read_u4le() 121 | self._io.seek(_pos) 122 | return getattr(self, "_m_slot_no", None) 123 | 124 | @property 125 | def product_code(self): 126 | if hasattr(self, "_m_product_code"): 127 | return self._m_product_code 128 | 129 | _pos = self._io.pos() 130 | self._io.seek(6) 131 | self._m_product_code = self._io.read_u2le() 132 | self._io.seek(_pos) 133 | return getattr(self, "_m_product_code", None) 134 | 135 | @property 136 | def parent_module(self): 137 | if hasattr(self, "_m_parent_module"): 138 | return self._m_parent_module 139 | 140 | _pos = self._io.pos() 141 | self._io.seek(22) 142 | self._m_parent_module = self._io.read_u4le() 143 | self._io.seek(_pos) 144 | return getattr(self, "_m_parent_module", None) 145 | 146 | class RxTag(KaitaiStruct): 147 | def __init__(self, _io, _parent=None, _root=None): 148 | self._io = _io 149 | self._parent = _parent 150 | self._root = _root if _root else self 151 | self._read() 152 | 153 | def _read(self): 154 | pass 155 | 156 | @property 157 | def cip_data_type(self): 158 | if hasattr(self, "_m_cip_data_type"): 159 | return self._m_cip_data_type 160 | 161 | _pos = self._io.pos() 162 | self._io.seek(52) 163 | self._m_cip_data_type = self._io.read_u2le() 164 | self._io.seek(_pos) 165 | return getattr(self, "_m_cip_data_type", None) 166 | 167 | @property 168 | def radix(self): 169 | if hasattr(self, "_m_radix"): 170 | return self._m_radix 171 | 172 | _pos = self._io.pos() 173 | self._io.seek(32) 174 | self._m_radix = self._io.read_u2le() 175 | self._io.seek(_pos) 176 | return getattr(self, "_m_radix", None) 177 | 178 | @property 179 | def data_type(self): 180 | if hasattr(self, "_m_data_type"): 181 | return self._m_data_type 182 | 183 | _pos = self._io.pos() 184 | self._io.seek(28) 185 | self._m_data_type = self._io.read_u4le() 186 | self._io.seek(_pos) 187 | return getattr(self, "_m_data_type", None) 188 | 189 | @property 190 | def dimension_2(self): 191 | if hasattr(self, "_m_dimension_2"): 192 | return self._m_dimension_2 193 | 194 | _pos = self._io.pos() 195 | self._io.seek(16) 196 | self._m_dimension_2 = self._io.read_u4le() 197 | self._io.seek(_pos) 198 | return getattr(self, "_m_dimension_2", None) 199 | 200 | @property 201 | def dimension_3(self): 202 | if hasattr(self, "_m_dimension_3"): 203 | return self._m_dimension_3 204 | 205 | _pos = self._io.pos() 206 | self._io.seek(20) 207 | self._m_dimension_3 = self._io.read_u4le() 208 | self._io.seek(_pos) 209 | return getattr(self, "_m_dimension_3", None) 210 | 211 | @property 212 | def valid(self): 213 | if hasattr(self, "_m_valid"): 214 | return self._m_valid 215 | 216 | self._m_valid = True 217 | return getattr(self, "_m_valid", None) 218 | 219 | @property 220 | def external_access(self): 221 | if hasattr(self, "_m_external_access"): 222 | return self._m_external_access 223 | 224 | _pos = self._io.pos() 225 | self._io.seek(34) 226 | self._m_external_access = self._io.read_u2le() 227 | self._io.seek(_pos) 228 | return getattr(self, "_m_external_access", None) 229 | 230 | @property 231 | def dimension_1(self): 232 | if hasattr(self, "_m_dimension_1"): 233 | return self._m_dimension_1 234 | 235 | _pos = self._io.pos() 236 | self._io.seek(12) 237 | self._m_dimension_1 = self._io.read_u4le() 238 | self._io.seek(_pos) 239 | return getattr(self, "_m_dimension_1", None) 240 | 241 | @property 242 | def data_table_instance(self): 243 | if hasattr(self, "_m_data_table_instance"): 244 | return self._m_data_table_instance 245 | 246 | _pos = self._io.pos() 247 | self._io.seek(36) 248 | self._m_data_table_instance = self._io.read_u4le() 249 | self._io.seek(_pos) 250 | return getattr(self, "_m_data_table_instance", None) 251 | 252 | class AttributeRecord(KaitaiStruct): 253 | def __init__(self, _io, _parent=None, _root=None): 254 | self._io = _io 255 | self._parent = _parent 256 | self._root = _root if _root else self 257 | self._read() 258 | 259 | def _read(self): 260 | self.attribute_id = self._io.read_u4le() 261 | self.len_value = self._io.read_u4le() 262 | self.value = self._io.read_bytes(self.len_value) 263 | 264 | @property 265 | def record_buffer(self): 266 | if hasattr(self, "_m_record_buffer"): 267 | return self._m_record_buffer 268 | 269 | _pos = self._io.pos() 270 | self._io.seek(14) 271 | self._m_record_buffer = self._io.read_bytes(60) 272 | self._io.seek(_pos) 273 | return getattr(self, "_m_record_buffer", None) 274 | -------------------------------------------------------------------------------- /acd/generated/comps/rx_tag.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct 5 | 6 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 7 | raise Exception( 8 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 9 | % (kaitaistruct.__version__) 10 | ) 11 | 12 | 13 | class RxTag(KaitaiStruct): 14 | def __init__(self, _io, _parent=None, _root=None): 15 | self._io = _io 16 | self._parent = _parent 17 | self._root = _root if _root else self 18 | self._read() 19 | 20 | def _read(self): 21 | self.parent_id = self._io.read_u4le() 22 | self.unique_tag_identifier = self._io.read_u4le() 23 | self.record_format_version = self._io.read_u2le() 24 | self.comment_id = self._io.read_u4le() 25 | _on = self.record_format_version 26 | if _on == 0: 27 | self.body = RxTag.V0(self._io, self, self._root) 28 | elif _on == 60: 29 | self.body = RxTag.V60(self._io, self, self._root) 30 | elif _on == 63: 31 | self.body = RxTag.V63(self._io, self, self._root) 32 | else: 33 | self.body = RxTag.VUnknown(self._io, self, self._root) 34 | 35 | class V63(KaitaiStruct): 36 | def __init__(self, _io, _parent=None, _root=None): 37 | self._io = _io 38 | self._parent = _parent 39 | self._root = _root if _root else self 40 | self._read() 41 | 42 | def _read(self): 43 | pass 44 | 45 | @property 46 | def cip_data_type(self): 47 | if hasattr(self, "_m_cip_data_type"): 48 | return self._m_cip_data_type 49 | 50 | _pos = self._io.pos() 51 | self._io.seek(66) 52 | self._m_cip_data_type = self._io.read_u2le() 53 | self._io.seek(_pos) 54 | return getattr(self, "_m_cip_data_type", None) 55 | 56 | @property 57 | def tag_name_length(self): 58 | if hasattr(self, "_m_tag_name_length"): 59 | return self._m_tag_name_length 60 | 61 | _pos = self._io.pos() 62 | self._io.seek((78 + (len(self.records) * 4))) 63 | self._m_tag_name_length = self._io.read_u2le() 64 | self._io.seek(_pos) 65 | return getattr(self, "_m_tag_name_length", None) 66 | 67 | @property 68 | def device_map_instance(self): 69 | if hasattr(self, "_m_device_map_instance"): 70 | return self._m_device_map_instance 71 | 72 | _pos = self._io.pos() 73 | self._io.seek(372) 74 | self._m_device_map_instance = self._io.read_u4le() 75 | self._io.seek(_pos) 76 | return getattr(self, "_m_device_map_instance", None) 77 | 78 | @property 79 | def data_type(self): 80 | if hasattr(self, "_m_data_type"): 81 | return self._m_data_type 82 | 83 | _pos = self._io.pos() 84 | self._io.seek(42) 85 | self._m_data_type = self._io.read_u4le() 86 | self._io.seek(_pos) 87 | return getattr(self, "_m_data_type", None) 88 | 89 | @property 90 | def data_instance(self): 91 | if hasattr(self, "_m_data_instance"): 92 | return self._m_data_instance 93 | 94 | _pos = self._io.pos() 95 | self._io.seek(358) 96 | self._m_data_instance = self._io.read_u4le() 97 | self._io.seek(_pos) 98 | return getattr(self, "_m_data_instance", None) 99 | 100 | @property 101 | def dimension_2(self): 102 | if hasattr(self, "_m_dimension_2"): 103 | return self._m_dimension_2 104 | 105 | _pos = self._io.pos() 106 | self._io.seek(30) 107 | self._m_dimension_2 = self._io.read_u4le() 108 | self._io.seek(_pos) 109 | return getattr(self, "_m_dimension_2", None) 110 | 111 | @property 112 | def dimension_3(self): 113 | if hasattr(self, "_m_dimension_3"): 114 | return self._m_dimension_3 115 | 116 | _pos = self._io.pos() 117 | self._io.seek(34) 118 | self._m_dimension_3 = self._io.read_u4le() 119 | self._io.seek(_pos) 120 | return getattr(self, "_m_dimension_3", None) 121 | 122 | @property 123 | def valid(self): 124 | if hasattr(self, "_m_valid"): 125 | return self._m_valid 126 | 127 | self._m_valid = True 128 | return getattr(self, "_m_valid", None) 129 | 130 | @property 131 | def sub_record_length(self): 132 | if hasattr(self, "_m_sub_record_length"): 133 | return self._m_sub_record_length 134 | 135 | _pos = self._io.pos() 136 | self._io.seek((78 + (len(self.records) * 4))) 137 | self._m_sub_record_length = self._io.read_u4le() 138 | self._io.seek(_pos) 139 | return getattr(self, "_m_sub_record_length", None) 140 | 141 | @property 142 | def logical_path(self): 143 | if hasattr(self, "_m_logical_path"): 144 | return self._m_logical_path 145 | 146 | _pos = self._io.pos() 147 | self._io.seek(666) 148 | self._m_logical_path = RxTag.LogicalPath(self._io, self, self._root) 149 | self._io.seek(_pos) 150 | return getattr(self, "_m_logical_path", None) 151 | 152 | @property 153 | def name(self): 154 | if hasattr(self, "_m_name"): 155 | return self._m_name 156 | 157 | _pos = self._io.pos() 158 | self._io.seek(((78 + (len(self.records) * 4)) + 2)) 159 | self._m_name = (self._io.read_bytes(self.tag_name_length)).decode("UTF-8") 160 | self._io.seek(_pos) 161 | return getattr(self, "_m_name", None) 162 | 163 | @property 164 | def dimension_1(self): 165 | if hasattr(self, "_m_dimension_1"): 166 | return self._m_dimension_1 167 | 168 | _pos = self._io.pos() 169 | self._io.seek(26) 170 | self._m_dimension_1 = self._io.read_u4le() 171 | self._io.seek(_pos) 172 | return getattr(self, "_m_dimension_1", None) 173 | 174 | @property 175 | def records(self): 176 | if hasattr(self, "_m_records"): 177 | return self._m_records 178 | 179 | _pos = self._io.pos() 180 | self._io.seek(78) 181 | self._m_records = [] 182 | i = 0 183 | while True: 184 | _ = self._io.read_u4le() 185 | self._m_records.append(_) 186 | if _ == 590: 187 | break 188 | i += 1 189 | self._io.seek(_pos) 190 | return getattr(self, "_m_records", None) 191 | 192 | @property 193 | def data_table_instance(self): 194 | if hasattr(self, "_m_data_table_instance"): 195 | return self._m_data_table_instance 196 | 197 | _pos = self._io.pos() 198 | self._io.seek(50) 199 | self._m_data_table_instance = self._io.read_u4le() 200 | self._io.seek(_pos) 201 | return getattr(self, "_m_data_table_instance", None) 202 | 203 | class V60(KaitaiStruct): 204 | def __init__(self, _io, _parent=None, _root=None): 205 | self._io = _io 206 | self._parent = _parent 207 | self._root = _root if _root else self 208 | self._read() 209 | 210 | def _read(self): 211 | pass 212 | 213 | @property 214 | def cip_data_type(self): 215 | if hasattr(self, "_m_cip_data_type"): 216 | return self._m_cip_data_type 217 | 218 | _pos = self._io.pos() 219 | self._io.seek(66) 220 | self._m_cip_data_type = self._io.read_u2le() 221 | self._io.seek(_pos) 222 | return getattr(self, "_m_cip_data_type", None) 223 | 224 | @property 225 | def tag_name_length(self): 226 | if hasattr(self, "_m_tag_name_length"): 227 | return self._m_tag_name_length 228 | 229 | _pos = self._io.pos() 230 | self._io.seek(90) 231 | self._m_tag_name_length = self._io.read_u2le() 232 | self._io.seek(_pos) 233 | return getattr(self, "_m_tag_name_length", None) 234 | 235 | @property 236 | def data_type(self): 237 | if hasattr(self, "_m_data_type"): 238 | return self._m_data_type 239 | 240 | _pos = self._io.pos() 241 | self._io.seek(42) 242 | self._m_data_type = self._io.read_u4le() 243 | self._io.seek(_pos) 244 | return getattr(self, "_m_data_type", None) 245 | 246 | @property 247 | def dimension_2(self): 248 | if hasattr(self, "_m_dimension_2"): 249 | return self._m_dimension_2 250 | 251 | _pos = self._io.pos() 252 | self._io.seek(30) 253 | self._m_dimension_2 = self._io.read_u4le() 254 | self._io.seek(_pos) 255 | return getattr(self, "_m_dimension_2", None) 256 | 257 | @property 258 | def dimension_3(self): 259 | if hasattr(self, "_m_dimension_3"): 260 | return self._m_dimension_3 261 | 262 | _pos = self._io.pos() 263 | self._io.seek(34) 264 | self._m_dimension_3 = self._io.read_u4le() 265 | self._io.seek(_pos) 266 | return getattr(self, "_m_dimension_3", None) 267 | 268 | @property 269 | def valid(self): 270 | if hasattr(self, "_m_valid"): 271 | return self._m_valid 272 | 273 | self._m_valid = True 274 | return getattr(self, "_m_valid", None) 275 | 276 | @property 277 | def logical_path(self): 278 | if hasattr(self, "_m_logical_path"): 279 | return self._m_logical_path 280 | 281 | _pos = self._io.pos() 282 | self._io.seek(666) 283 | self._m_logical_path = RxTag.LogicalPath(self._io, self, self._root) 284 | self._io.seek(_pos) 285 | return getattr(self, "_m_logical_path", None) 286 | 287 | @property 288 | def name(self): 289 | if hasattr(self, "_m_name"): 290 | return self._m_name 291 | 292 | _pos = self._io.pos() 293 | self._io.seek(92) 294 | self._m_name = (self._io.read_bytes(self.tag_name_length)).decode("UTF-8") 295 | self._io.seek(_pos) 296 | return getattr(self, "_m_name", None) 297 | 298 | @property 299 | def dimension_1(self): 300 | if hasattr(self, "_m_dimension_1"): 301 | return self._m_dimension_1 302 | 303 | _pos = self._io.pos() 304 | self._io.seek(26) 305 | self._m_dimension_1 = self._io.read_u4le() 306 | self._io.seek(_pos) 307 | return getattr(self, "_m_dimension_1", None) 308 | 309 | @property 310 | def data_table_instance(self): 311 | if hasattr(self, "_m_data_table_instance"): 312 | return self._m_data_table_instance 313 | 314 | _pos = self._io.pos() 315 | self._io.seek(50) 316 | self._m_data_table_instance = self._io.read_u4le() 317 | self._io.seek(_pos) 318 | return getattr(self, "_m_data_table_instance", None) 319 | 320 | class LogicalPath(KaitaiStruct): 321 | def __init__(self, _io, _parent=None, _root=None): 322 | self._io = _io 323 | self._parent = _parent 324 | self._root = _root if _root else self 325 | self._read() 326 | 327 | def _read(self): 328 | self.position_0 = self._io.read_u4le() 329 | self.position_1 = self._io.read_u4le() 330 | self.position_2 = self._io.read_u4le() 331 | self.position_3 = self._io.read_u4le() 332 | self.position_4 = self._io.read_u4le() 333 | 334 | class V0(KaitaiStruct): 335 | def __init__(self, _io, _parent=None, _root=None): 336 | self._io = _io 337 | self._parent = _parent 338 | self._root = _root if _root else self 339 | self._read() 340 | 341 | def _read(self): 342 | pass 343 | 344 | @property 345 | def valid(self): 346 | if hasattr(self, "_m_valid"): 347 | return self._m_valid 348 | 349 | self._m_valid = False 350 | return getattr(self, "_m_valid", None) 351 | 352 | class VUnknown(KaitaiStruct): 353 | def __init__(self, _io, _parent=None, _root=None): 354 | self._io = _io 355 | self._parent = _parent 356 | self._root = _root if _root else self 357 | self._read() 358 | 359 | def _read(self): 360 | pass 361 | 362 | @property 363 | def valid(self): 364 | if hasattr(self, "_m_valid"): 365 | return self._m_valid 366 | 367 | self._m_valid = False 368 | return getattr(self, "_m_valid", None) 369 | 370 | class V63Records(KaitaiStruct): 371 | def __init__(self, _io, _parent=None, _root=None): 372 | self._io = _io 373 | self._parent = _parent 374 | self._root = _root if _root else self 375 | self._read() 376 | 377 | def _read(self): 378 | self.unknown_0 = self._io.read_u4le() 379 | self.unknown_1 = self._io.read_u4le() 380 | self.unknown_2 = self._io.read_u4le() 381 | -------------------------------------------------------------------------------- /acd/generated/controller/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/controller/__init__.py -------------------------------------------------------------------------------- /acd/generated/controller/rx_controller.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct 5 | 6 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 7 | raise Exception( 8 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 9 | % (kaitaistruct.__version__) 10 | ) 11 | 12 | 13 | class RxController(KaitaiStruct): 14 | def __init__(self, _io, _parent=None, _root=None): 15 | self._io = _io 16 | self._parent = _parent 17 | self._root = _root if _root else self 18 | self._read() 19 | 20 | def _read(self): 21 | self.parent_id = self._io.read_u4le() 22 | self.unique_tag_identifier = self._io.read_u4le() 23 | self.record_format_version = self._io.read_u2le() 24 | self.comment_id = self._io.read_u4le() 25 | _on = self.record_format_version 26 | if _on == 95: 27 | self.body = RxController.V95(self._io, self, self._root) 28 | elif _on == 103: 29 | self.body = RxController.V103(self._io, self, self._root) 30 | else: 31 | self.body = RxController.VUnknown(self._io, self, self._root) 32 | 33 | class VUnknown(KaitaiStruct): 34 | def __init__(self, _io, _parent=None, _root=None): 35 | self._io = _io 36 | self._parent = _parent 37 | self._root = _root if _root else self 38 | self._read() 39 | 40 | def _read(self): 41 | pass 42 | 43 | @property 44 | def valid(self): 45 | if hasattr(self, "_m_valid"): 46 | return self._m_valid 47 | 48 | self._m_valid = False 49 | return getattr(self, "_m_valid", None) 50 | 51 | class V95(KaitaiStruct): 52 | def __init__(self, _io, _parent=None, _root=None): 53 | self._io = _io 54 | self._parent = _parent 55 | self._root = _root if _root else self 56 | self._read() 57 | 58 | def _read(self): 59 | pass 60 | 61 | @property 62 | def len_most_recent(self): 63 | if hasattr(self, "_m_len_most_recent"): 64 | return self._m_len_most_recent 65 | 66 | _pos = self._io.pos() 67 | self._io.seek(363) 68 | self._m_len_most_recent = self._io.read_u4le() 69 | self._io.seek(_pos) 70 | return getattr(self, "_m_len_most_recent", None) 71 | 72 | @property 73 | def record(self): 74 | if hasattr(self, "_m_record"): 75 | return self._m_record 76 | 77 | _pos = self._io.pos() 78 | self._io.seek(74) 79 | self._m_record = self._io.read_bytes(self.len_record) 80 | self._io.seek(_pos) 81 | return getattr(self, "_m_record", None) 82 | 83 | @property 84 | def len_current_active(self): 85 | if hasattr(self, "_m_len_current_active"): 86 | return self._m_len_current_active 87 | 88 | _pos = self._io.pos() 89 | self._io.seek(327) 90 | self._m_len_current_active = self._io.read_u4le() 91 | self._io.seek(_pos) 92 | return getattr(self, "_m_len_current_active", None) 93 | 94 | @property 95 | def most_recent(self): 96 | if hasattr(self, "_m_most_recent"): 97 | return self._m_most_recent 98 | 99 | _pos = self._io.pos() 100 | self._io.seek(367) 101 | self._m_most_recent = (self._io.read_bytes(self.len_most_recent)).decode( 102 | "utf-16" 103 | ) 104 | self._io.seek(_pos) 105 | return getattr(self, "_m_most_recent", None) 106 | 107 | @property 108 | def serial_number(self): 109 | if hasattr(self, "_m_serial_number"): 110 | return self._m_serial_number 111 | 112 | _pos = self._io.pos() 113 | self._io.seek(459) 114 | self._m_serial_number = self._io.read_u4le() 115 | self._io.seek(_pos) 116 | return getattr(self, "_m_serial_number", None) 117 | 118 | @property 119 | def valid(self): 120 | if hasattr(self, "_m_valid"): 121 | return self._m_valid 122 | 123 | self._m_valid = True 124 | return getattr(self, "_m_valid", None) 125 | 126 | @property 127 | def len_record(self): 128 | if hasattr(self, "_m_len_record"): 129 | return self._m_len_record 130 | 131 | _pos = self._io.pos() 132 | self._io.seek(74) 133 | self._m_len_record = self._io.read_u4le() 134 | self._io.seek(_pos) 135 | return getattr(self, "_m_len_record", None) 136 | 137 | @property 138 | def current_acive(self): 139 | if hasattr(self, "_m_current_acive"): 140 | return self._m_current_acive 141 | 142 | _pos = self._io.pos() 143 | self._io.seek(331) 144 | self._m_current_acive = ( 145 | self._io.read_bytes(self.len_current_active) 146 | ).decode("utf-16") 147 | self._io.seek(_pos) 148 | return getattr(self, "_m_current_acive", None) 149 | 150 | class V103(KaitaiStruct): 151 | def __init__(self, _io, _parent=None, _root=None): 152 | self._io = _io 153 | self._parent = _parent 154 | self._root = _root if _root else self 155 | self._read() 156 | 157 | def _read(self): 158 | pass 159 | 160 | @property 161 | def len_most_recent(self): 162 | if hasattr(self, "_m_len_most_recent"): 163 | return self._m_len_most_recent 164 | 165 | _pos = self._io.pos() 166 | self._io.seek(232) 167 | self._m_len_most_recent = self._io.read_u4le() 168 | self._io.seek(_pos) 169 | return getattr(self, "_m_len_most_recent", None) 170 | 171 | @property 172 | def record(self): 173 | if hasattr(self, "_m_record"): 174 | return self._m_record 175 | 176 | _pos = self._io.pos() 177 | self._io.seek(74) 178 | self._m_record = self._io.read_bytes(self.len_record) 179 | self._io.seek(_pos) 180 | return getattr(self, "_m_record", None) 181 | 182 | @property 183 | def len_current_active(self): 184 | if hasattr(self, "_m_len_current_active"): 185 | return self._m_len_current_active 186 | 187 | _pos = self._io.pos() 188 | self._io.seek(196) 189 | self._m_len_current_active = self._io.read_u4le() 190 | self._io.seek(_pos) 191 | return getattr(self, "_m_len_current_active", None) 192 | 193 | @property 194 | def most_recent(self): 195 | if hasattr(self, "_m_most_recent"): 196 | return self._m_most_recent 197 | 198 | _pos = self._io.pos() 199 | self._io.seek(236) 200 | self._m_most_recent = (self._io.read_bytes(self.len_most_recent)).decode( 201 | "utf-16" 202 | ) 203 | self._io.seek(_pos) 204 | return getattr(self, "_m_most_recent", None) 205 | 206 | @property 207 | def serial_number(self): 208 | if hasattr(self, "_m_serial_number"): 209 | return self._m_serial_number 210 | 211 | _pos = self._io.pos() 212 | self._io.seek(328) 213 | self._m_serial_number = self._io.read_u4le() 214 | self._io.seek(_pos) 215 | return getattr(self, "_m_serial_number", None) 216 | 217 | @property 218 | def valid(self): 219 | if hasattr(self, "_m_valid"): 220 | return self._m_valid 221 | 222 | self._m_valid = True 223 | return getattr(self, "_m_valid", None) 224 | 225 | @property 226 | def len_record(self): 227 | if hasattr(self, "_m_len_record"): 228 | return self._m_len_record 229 | 230 | _pos = self._io.pos() 231 | self._io.seek(74) 232 | self._m_len_record = self._io.read_u4le() 233 | self._io.seek(_pos) 234 | return getattr(self, "_m_len_record", None) 235 | 236 | @property 237 | def path(self): 238 | if hasattr(self, "_m_path"): 239 | return self._m_path 240 | 241 | _pos = self._io.pos() 242 | self._io.seek(388) 243 | self._m_path = (self._io.read_bytes(self.len_path)).decode("utf-16") 244 | self._io.seek(_pos) 245 | return getattr(self, "_m_path", None) 246 | 247 | @property 248 | def len_path(self): 249 | if hasattr(self, "_m_len_path"): 250 | return self._m_len_path 251 | 252 | _pos = self._io.pos() 253 | self._io.seek(384) 254 | self._m_len_path = self._io.read_u4le() 255 | self._io.seek(_pos) 256 | return getattr(self, "_m_len_path", None) 257 | 258 | @property 259 | def current_acive(self): 260 | if hasattr(self, "_m_current_acive"): 261 | return self._m_current_acive 262 | 263 | _pos = self._io.pos() 264 | self._io.seek(200) 265 | self._m_current_acive = ( 266 | self._io.read_bytes(self.len_current_active) 267 | ).decode("utf-16") 268 | self._io.seek(_pos) 269 | return getattr(self, "_m_current_acive", None) 270 | -------------------------------------------------------------------------------- /acd/generated/dat.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class Dat(KaitaiStruct): 15 | def __init__(self, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self._read() 20 | 21 | def _read(self): 22 | self.header = Dat.Header(self._io, self, self._root) 23 | self._raw_records = self._io.read_bytes( 24 | ((self.header.file_length - self.header.first_record_position) + 1) 25 | ) 26 | _io__raw_records = KaitaiStream(BytesIO(self._raw_records)) 27 | self.records = Dat.Records(_io__raw_records, self, self._root) 28 | 29 | class FefeRecord(KaitaiStruct): 30 | def __init__(self, _io, _parent=None, _root=None): 31 | self._io = _io 32 | self._parent = _parent 33 | self._root = _root if _root else self 34 | self._read() 35 | 36 | def _read(self): 37 | self.len_record_buffer = self._io.read_u4le() 38 | self.blank_1 = self._io.read_u4le() 39 | self.unknown_1 = self._io.read_u4le() 40 | self.unknown_2 = self._io.read_u4le() 41 | self.record_buffer = self._io.read_bytes(self.len_record_buffer) 42 | 43 | class FdfdRecord(KaitaiStruct): 44 | def __init__(self, len_record_buffer, _io, _parent=None, _root=None): 45 | self._io = _io 46 | self._parent = _parent 47 | self._root = _root if _root else self 48 | self.len_record_buffer = len_record_buffer 49 | self._read() 50 | 51 | def _read(self): 52 | self.record_buffer = self._io.read_bytes_full() 53 | 54 | class BffbRecord(KaitaiStruct): 55 | def __init__(self, len_record_buffer, _io, _parent=None, _root=None): 56 | self._io = _io 57 | self._parent = _parent 58 | self._root = _root if _root else self 59 | self.len_record_buffer = len_record_buffer 60 | self._read() 61 | 62 | def _read(self): 63 | self.record_buffer = self._io.read_bytes(self.len_record_buffer) 64 | 65 | class Header(KaitaiStruct): 66 | def __init__(self, _io, _parent=None, _root=None): 67 | self._io = _io 68 | self._parent = _parent 69 | self._root = _root if _root else self 70 | self._read() 71 | 72 | def _read(self): 73 | self.format_type = self._io.read_u4le() 74 | self.blank_2 = self._io.read_u4le() 75 | self.file_length = self._io.read_u4le() 76 | self.first_record_position = self._io.read_u4le() 77 | self.blank_3 = self._io.read_u4le() 78 | self.number_records_fafa = self._io.read_u4le() 79 | self.header_buffer = [] 80 | for i in range((self.first_record_position - 24)): 81 | self.header_buffer.append(self._io.read_u1()) 82 | 83 | class Records(KaitaiStruct): 84 | def __init__(self, _io, _parent=None, _root=None): 85 | self._io = _io 86 | self._parent = _parent 87 | self._root = _root if _root else self 88 | self._read() 89 | 90 | def _read(self): 91 | self.record = [] 92 | i = 0 93 | while not self._io.is_eof(): 94 | self.record.append(Dat.Record(self._io, self, self._root)) 95 | i += 1 96 | 97 | class Record(KaitaiStruct): 98 | def __init__(self, _io, _parent=None, _root=None): 99 | self._io = _io 100 | self._parent = _parent 101 | self._root = _root if _root else self 102 | self._read() 103 | 104 | def _read(self): 105 | self.identifier = self._io.read_u2le() 106 | if not ( 107 | (self.identifier == 65278) 108 | or (self.identifier == 65021) 109 | or (self.identifier == 64250) 110 | or (self.identifier == 64447) 111 | ): 112 | raise kaitaistruct.ValidationNotAnyOfError( 113 | self.identifier, self._io, "/types/record/seq/0" 114 | ) 115 | self.len_record = self._io.read_u4le() 116 | _on = self.identifier 117 | if _on == 65278: 118 | self._raw_record = self._io.read_bytes((self.len_record - 6)) 119 | _io__raw_record = KaitaiStream(BytesIO(self._raw_record)) 120 | self.record = Dat.FefeRecord(_io__raw_record, self, self._root) 121 | elif _on == 64447: 122 | self._raw_record = self._io.read_bytes((self.len_record - 6)) 123 | _io__raw_record = KaitaiStream(BytesIO(self._raw_record)) 124 | self.record = Dat.BffbRecord( 125 | (self.len_record - 6), _io__raw_record, self, self._root 126 | ) 127 | elif _on == 64250: 128 | self._raw_record = self._io.read_bytes((self.len_record - 6)) 129 | _io__raw_record = KaitaiStream(BytesIO(self._raw_record)) 130 | self.record = Dat.FafaRecord( 131 | (self.len_record - 6), _io__raw_record, self, self._root 132 | ) 133 | elif _on == 65021: 134 | self._raw_record = self._io.read_bytes((self.len_record - 6)) 135 | _io__raw_record = KaitaiStream(BytesIO(self._raw_record)) 136 | self.record = Dat.FdfdRecord( 137 | (self.len_record - 6), _io__raw_record, self, self._root 138 | ) 139 | else: 140 | self.record = self._io.read_bytes((self.len_record - 6)) 141 | 142 | class FafaRecord(KaitaiStruct): 143 | def __init__(self, len_record_buffer, _io, _parent=None, _root=None): 144 | self._io = _io 145 | self._parent = _parent 146 | self._root = _root if _root else self 147 | self.len_record_buffer = len_record_buffer 148 | self._read() 149 | 150 | def _read(self): 151 | self.record_buffer = self._io.read_bytes(self.len_record_buffer) 152 | 153 | @property 154 | def third_array_dimension(self): 155 | if hasattr(self, "_m_third_array_dimension"): 156 | return self._m_third_array_dimension 157 | 158 | _pos = self._io.pos() 159 | self._io.seek(182) 160 | self._m_third_array_dimension = self._io.read_u4le() 161 | self._io.seek(_pos) 162 | return getattr(self, "_m_third_array_dimension", None) 163 | 164 | @property 165 | def data_type_id(self): 166 | if hasattr(self, "_m_data_type_id"): 167 | return self._m_data_type_id 168 | 169 | _pos = self._io.pos() 170 | self._io.seek(190) 171 | self._m_data_type_id = self._io.read_u4le() 172 | self._io.seek(_pos) 173 | return getattr(self, "_m_data_type_id", None) 174 | 175 | @property 176 | def tag_name_length(self): 177 | if hasattr(self, "_m_tag_name_length"): 178 | return self._m_tag_name_length 179 | 180 | _pos = self._io.pos() 181 | self._io.seek(238) 182 | self._m_tag_name_length = self._io.read_u2le() 183 | self._io.seek(_pos) 184 | return getattr(self, "_m_tag_name_length", None) 185 | 186 | @property 187 | def tag_name(self): 188 | if hasattr(self, "_m_tag_name"): 189 | return self._m_tag_name 190 | 191 | _pos = self._io.pos() 192 | self._io.seek(240) 193 | self._m_tag_name = (self._io.read_bytes(self.tag_name_length)).decode("UTF-8") 194 | self._io.seek(_pos) 195 | return getattr(self, "_m_tag_name", None) 196 | -------------------------------------------------------------------------------- /acd/generated/map_device/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/map_device/__init__.py -------------------------------------------------------------------------------- /acd/generated/map_device/rx_map_device.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct 5 | 6 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 7 | raise Exception( 8 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 9 | % (kaitaistruct.__version__) 10 | ) 11 | 12 | 13 | class RxMapDevice(KaitaiStruct): 14 | def __init__(self, _io, _parent=None, _root=None): 15 | self._io = _io 16 | self._parent = _parent 17 | self._root = _root if _root else self 18 | self._read() 19 | 20 | def _read(self): 21 | self.parent_id = self._io.read_u4le() 22 | self.unique_tag_identifier = self._io.read_u4le() 23 | self.record_format_version = self._io.read_u2le() 24 | self.comment_id = self._io.read_u4le() 25 | _on = self.record_format_version 26 | if _on == 0: 27 | self.body = RxMapDevice.V0(self._io, self, self._root) 28 | elif _on == 162: 29 | self.body = RxMapDevice.V162(self._io, self, self._root) 30 | elif _on == 173: 31 | self.body = RxMapDevice.V173(self._io, self, self._root) 32 | else: 33 | self.body = RxMapDevice.VUnknown(self._io, self, self._root) 34 | 35 | class VUnknown(KaitaiStruct): 36 | def __init__(self, _io, _parent=None, _root=None): 37 | self._io = _io 38 | self._parent = _parent 39 | self._root = _root if _root else self 40 | self._read() 41 | 42 | def _read(self): 43 | pass 44 | 45 | @property 46 | def valid(self): 47 | if hasattr(self, "_m_valid"): 48 | return self._m_valid 49 | 50 | self._m_valid = False 51 | return getattr(self, "_m_valid", None) 52 | 53 | class V0(KaitaiStruct): 54 | def __init__(self, _io, _parent=None, _root=None): 55 | self._io = _io 56 | self._parent = _parent 57 | self._root = _root if _root else self 58 | self._read() 59 | 60 | def _read(self): 61 | pass 62 | 63 | @property 64 | def valid(self): 65 | if hasattr(self, "_m_valid"): 66 | return self._m_valid 67 | 68 | self._m_valid = False 69 | return getattr(self, "_m_valid", None) 70 | 71 | class V162(KaitaiStruct): 72 | def __init__(self, _io, _parent=None, _root=None): 73 | self._io = _io 74 | self._parent = _parent 75 | self._root = _root if _root else self 76 | self._read() 77 | 78 | def _read(self): 79 | pass 80 | 81 | @property 82 | def module_id(self): 83 | if hasattr(self, "_m_module_id"): 84 | return self._m_module_id 85 | 86 | _pos = self._io.pos() 87 | self._io.seek(126) 88 | self._m_module_id = self._io.read_u4le() 89 | self._io.seek(_pos) 90 | return getattr(self, "_m_module_id", None) 91 | 92 | @property 93 | def valid(self): 94 | if hasattr(self, "_m_valid"): 95 | return self._m_valid 96 | 97 | self._m_valid = True 98 | return getattr(self, "_m_valid", None) 99 | 100 | @property 101 | def record_length(self): 102 | if hasattr(self, "_m_record_length"): 103 | return self._m_record_length 104 | 105 | _pos = self._io.pos() 106 | self._io.seek(74) 107 | self._m_record_length = self._io.read_u4le() 108 | self._io.seek(_pos) 109 | return getattr(self, "_m_record_length", None) 110 | 111 | @property 112 | def product_type(self): 113 | if hasattr(self, "_m_product_type"): 114 | return self._m_product_type 115 | 116 | _pos = self._io.pos() 117 | self._io.seek(94) 118 | self._m_product_type = self._io.read_u2le() 119 | self._io.seek(_pos) 120 | return getattr(self, "_m_product_type", None) 121 | 122 | @property 123 | def vendor_id(self): 124 | if hasattr(self, "_m_vendor_id"): 125 | return self._m_vendor_id 126 | 127 | _pos = self._io.pos() 128 | self._io.seek(92) 129 | self._m_vendor_id = self._io.read_u2le() 130 | self._io.seek(_pos) 131 | return getattr(self, "_m_vendor_id", None) 132 | 133 | @property 134 | def slot_no(self): 135 | if hasattr(self, "_m_slot_no"): 136 | return self._m_slot_no 137 | 138 | _pos = self._io.pos() 139 | self._io.seek(122) 140 | self._m_slot_no = self._io.read_u4le() 141 | self._io.seek(_pos) 142 | return getattr(self, "_m_slot_no", None) 143 | 144 | @property 145 | def product_code(self): 146 | if hasattr(self, "_m_product_code"): 147 | return self._m_product_code 148 | 149 | _pos = self._io.pos() 150 | self._io.seek(96) 151 | self._m_product_code = self._io.read_u2le() 152 | self._io.seek(_pos) 153 | return getattr(self, "_m_product_code", None) 154 | 155 | @property 156 | def parent_module(self): 157 | if hasattr(self, "_m_parent_module"): 158 | return self._m_parent_module 159 | 160 | _pos = self._io.pos() 161 | self._io.seek(112) 162 | self._m_parent_module = self._io.read_u4le() 163 | self._io.seek(_pos) 164 | return getattr(self, "_m_parent_module", None) 165 | 166 | class V173(KaitaiStruct): 167 | def __init__(self, _io, _parent=None, _root=None): 168 | self._io = _io 169 | self._parent = _parent 170 | self._root = _root if _root else self 171 | self._read() 172 | 173 | def _read(self): 174 | pass 175 | 176 | @property 177 | def module_id(self): 178 | if hasattr(self, "_m_module_id"): 179 | return self._m_module_id 180 | 181 | _pos = self._io.pos() 182 | self._io.seek((((80 + 4) + (self.record_count * 12)) + 38)) 183 | self._m_module_id = self._io.read_u4le() 184 | self._io.seek(_pos) 185 | return getattr(self, "_m_module_id", None) 186 | 187 | @property 188 | def valid(self): 189 | if hasattr(self, "_m_valid"): 190 | return self._m_valid 191 | 192 | self._m_valid = True 193 | return getattr(self, "_m_valid", None) 194 | 195 | @property 196 | def record_count(self): 197 | if hasattr(self, "_m_record_count"): 198 | return self._m_record_count 199 | 200 | _pos = self._io.pos() 201 | self._io.seek(78) 202 | self._m_record_count = self._io.read_u2le() 203 | self._io.seek(_pos) 204 | return getattr(self, "_m_record_count", None) 205 | 206 | @property 207 | def record_length(self): 208 | if hasattr(self, "_m_record_length"): 209 | return self._m_record_length 210 | 211 | _pos = self._io.pos() 212 | self._io.seek(74) 213 | self._m_record_length = self._io.read_u4le() 214 | self._io.seek(_pos) 215 | return getattr(self, "_m_record_length", None) 216 | 217 | @property 218 | def product_type(self): 219 | if hasattr(self, "_m_product_type"): 220 | return self._m_product_type 221 | 222 | _pos = self._io.pos() 223 | self._io.seek(((80 + 2) + (self.record_count * 12))) 224 | self._m_product_type = self._io.read_u2le() 225 | self._io.seek(_pos) 226 | return getattr(self, "_m_product_type", None) 227 | 228 | @property 229 | def vendor_id(self): 230 | if hasattr(self, "_m_vendor_id"): 231 | return self._m_vendor_id 232 | 233 | _pos = self._io.pos() 234 | self._io.seek((80 + (self.record_count * 12))) 235 | self._m_vendor_id = self._io.read_u2le() 236 | self._io.seek(_pos) 237 | return getattr(self, "_m_vendor_id", None) 238 | 239 | @property 240 | def slot_no(self): 241 | if hasattr(self, "_m_slot_no"): 242 | return self._m_slot_no 243 | 244 | _pos = self._io.pos() 245 | self._io.seek((((80 + 4) + (self.record_count * 12)) + 22)) 246 | self._m_slot_no = self._io.read_u4le() 247 | self._io.seek(_pos) 248 | return getattr(self, "_m_slot_no", None) 249 | 250 | @property 251 | def product_code(self): 252 | if hasattr(self, "_m_product_code"): 253 | return self._m_product_code 254 | 255 | _pos = self._io.pos() 256 | self._io.seek(((80 + 4) + (self.record_count * 12))) 257 | self._m_product_code = self._io.read_u2le() 258 | self._io.seek(_pos) 259 | return getattr(self, "_m_product_code", None) 260 | 261 | @property 262 | def records(self): 263 | if hasattr(self, "_m_records"): 264 | return self._m_records 265 | 266 | _pos = self._io.pos() 267 | self._io.seek(80) 268 | self._m_records = [] 269 | for i in range(self.record_count): 270 | self._m_records.append(self._io.read_bytes(12)) 271 | 272 | self._io.seek(_pos) 273 | return getattr(self, "_m_records", None) 274 | 275 | @property 276 | def parent_module(self): 277 | if hasattr(self, "_m_parent_module"): 278 | return self._m_parent_module 279 | 280 | _pos = self._io.pos() 281 | self._io.seek(((80 + (self.record_count * 12)) + 20)) 282 | self._m_parent_module = self._io.read_u4le() 283 | self._io.seek(_pos) 284 | return getattr(self, "_m_parent_module", None) 285 | -------------------------------------------------------------------------------- /acd/generated/sbregion/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/generated/sbregion/__init__.py -------------------------------------------------------------------------------- /acd/generated/sbregion/fafa_sbregions.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, "API_VERSION", (0, 9)) < (0, 9): 8 | raise Exception( 9 | "Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" 10 | % (kaitaistruct.__version__) 11 | ) 12 | 13 | 14 | class FafaSbregions(KaitaiStruct): 15 | def __init__(self, _io, _parent=None, _root=None): 16 | self._io = _io 17 | self._parent = _parent 18 | self._root = _root if _root else self 19 | self._read() 20 | 21 | def _read(self): 22 | self.record_length = self._io.read_u4le() 23 | self.header = FafaSbregions.Header(self._io, self, self._root) 24 | self.len_record_buffer = self._io.read_u4le() 25 | self.record_buffer = self._io.read_bytes(self.len_record_buffer) 26 | 27 | class Header(KaitaiStruct): 28 | def __init__(self, _io, _parent=None, _root=None): 29 | self._io = _io 30 | self._parent = _parent 31 | self._root = _root if _root else self 32 | self._read() 33 | 34 | def _read(self): 35 | self.sb_regions = self._io.read_u2le() 36 | self.identifier = self._io.read_u4le() 37 | self.language_type = ( 38 | KaitaiStream.bytes_terminate(self._io.read_bytes(41), 0, False) 39 | ).decode("UTF-8") 40 | -------------------------------------------------------------------------------- /acd/l5x/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hutcheb/acd/75b95955026ec58495b35b86cb091547ed2e2907/acd/l5x/__init__.py -------------------------------------------------------------------------------- /acd/l5x/elements.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import struct 4 | import xml.etree.ElementTree as ET 5 | from dataclasses import dataclass, field 6 | from datetime import datetime, timedelta 7 | from os import PathLike 8 | from pathlib import Path 9 | from sqlite3 import Cursor 10 | from typing import List, Tuple, Dict, Union 11 | 12 | from acd.generated.comps.rx_generic import RxGeneric 13 | 14 | 15 | @dataclass 16 | class L5xElementBuilder: 17 | _cur: Cursor 18 | _object_id: int = -1 19 | 20 | 21 | @dataclass 22 | class L5xElement: 23 | _name: str 24 | 25 | def __post_init__(self): 26 | self._export_name = "" 27 | 28 | def to_xml(self): 29 | attribute_list: List[str] = [] 30 | child_list: List[str] = [] 31 | for attribute in self.__dict__: 32 | if attribute[0] != "_": 33 | attribute_value = self.__getattribute__(attribute) 34 | if isinstance(attribute_value, L5xElement): 35 | child_list.append(attribute_value.to_xml()) 36 | elif isinstance(attribute_value, list): 37 | if ( 38 | attribute == "tags" 39 | or attribute == "data_types" 40 | or attribute == "members" 41 | or attribute == "programs" 42 | or attribute == "routines" 43 | ): 44 | new_child_list: List[str] = [] 45 | for element in attribute_value: 46 | if isinstance(element, L5xElement): 47 | new_child_list.append(element.to_xml()) 48 | else: 49 | new_child_list.append(f"<{element}/>") 50 | child_list.append( 51 | f'<{attribute.title().replace("_", "")}>{"".join(new_child_list)}' 52 | ) 53 | 54 | else: 55 | if attribute == "cls": 56 | attribute = "class" 57 | attribute_list.append( 58 | f'{attribute.title().replace("_", "")}="{attribute_value}"' 59 | ) 60 | 61 | _export_name = self.__class__.__name__.title().replace("_", "") 62 | return f'<{_export_name} {" ".join(attribute_list)}>{"".join(child_list)}' 63 | 64 | 65 | @dataclass 66 | class Member(L5xElement): 67 | name: str 68 | data_type: str 69 | dimension: int 70 | radix: str 71 | hidden: bool 72 | external_access: str 73 | 74 | 75 | @dataclass 76 | class DataType(L5xElement): 77 | name: str 78 | family: str 79 | cls: str 80 | members: List[Member] 81 | 82 | 83 | @dataclass 84 | class Tag(L5xElement): 85 | name: str 86 | tag_type: str 87 | data_type: str 88 | radix: str 89 | external_access: str 90 | _data_table_instance: int 91 | _comments: List[Tuple[str, str]] 92 | 93 | 94 | @dataclass 95 | class MapDevice(L5xElement): 96 | module_id: int 97 | parent_module: int 98 | slot_no: int 99 | vendor_id: int 100 | product_type: int 101 | product_code: int 102 | comments: List[Tuple[str, str]] 103 | 104 | 105 | @dataclass 106 | class Routine(L5xElement): 107 | name: str 108 | type: str 109 | rungs: List[str] 110 | 111 | 112 | @dataclass 113 | class AOI(L5xElement): 114 | routines: List[Routine] 115 | tags: List[Tag] 116 | 117 | 118 | @dataclass 119 | class Program(L5xElement): 120 | routines: List[Routine] 121 | tags: List[Tag] 122 | 123 | 124 | @dataclass 125 | class Controller(L5xElement): 126 | serial_number: str 127 | comm_path: str 128 | sfc_execution_control: str 129 | sfc_restart_position: str 130 | sfc_last_scan: str 131 | created_date: str 132 | modified_date: str 133 | data_types: List[DataType] 134 | tags: List[Tag] 135 | programs: List[Program] 136 | aois: List[AOI] 137 | map_devices: List[MapDevice] 138 | 139 | 140 | @dataclass 141 | class RSLogix5000Content(L5xElement): 142 | """Controller Project""" 143 | 144 | controller: Union[Controller, None] 145 | schema_revision: str 146 | software_revision: str 147 | target_name: str 148 | target_type: str 149 | contains_context: str 150 | export_date: str 151 | export_options: str 152 | 153 | def __post_init__(self): 154 | self._name = "RSLogix5000Content" 155 | 156 | 157 | def radix_enum(i: int) -> str: 158 | if i == 0: 159 | return "NullType" 160 | if i == 1: 161 | return "General" 162 | if i == 2: 163 | return "Binary" 164 | if i == 3: 165 | return "Octal" 166 | if i == 4: 167 | return "Decimal" 168 | if i == 5: 169 | return "Hex" 170 | if i == 6: 171 | return "Exponential" 172 | if i == 7: 173 | return "Float" 174 | if i == 8: 175 | return "ASCII" 176 | if i == 9: 177 | return "Unicode" 178 | if i == 10: 179 | return "Date/Time" 180 | if i == 11: 181 | return "Date/Time (ns)" 182 | if i == 12: 183 | return "UseTypeStyle" 184 | return "General" 185 | 186 | 187 | def external_access_enum(i: int) -> str: 188 | default = "Read/Write" 189 | if i == 0: 190 | return default 191 | if i == 2: 192 | return "Read Only" 193 | if i == 3: 194 | return "None" 195 | return default 196 | 197 | 198 | @dataclass 199 | class MemberBuilder(L5xElementBuilder): 200 | record: bytes = field(default_factory=bytes) 201 | 202 | def build(self) -> Member: 203 | self._cur.execute( 204 | "SELECT comp_name, object_id, parent_id, record FROM comps WHERE object_id=" 205 | + str(self._object_id) 206 | ) 207 | results = self._cur.fetchall() 208 | 209 | name = results[0][0] 210 | r = RxGeneric.from_bytes(results[0][3]) 211 | try: 212 | r = RxGeneric.from_bytes(results[0][3]) 213 | except Exception as e: 214 | return Member(name, name, "", 0, "Decimal", False, "Read/Write") 215 | 216 | extended_records: Dict[int, List[int]] = {} 217 | for extended_record in r.extended_records: 218 | extended_records[extended_record.attribute_id] = extended_record.value 219 | 220 | cip_data_typoe = struct.unpack_from("