├── test ├── features │ ├── dictionary_truncate.feature │ ├── dictionary_oem.feature │ ├── dictionary.feature │ ├── environment.py │ ├── steps │ │ └── dictionary_format.py │ └── encode_decode.feature ├── test.sh ├── dummysimple.json ├── dummysimple2.json ├── error.json ├── storage_profile_conformant.json ├── outlet.json ├── test_cli.sh ├── utils.py ├── circuit.json ├── storage.json ├── schema │ ├── oem-csdl │ │ ├── OEM2DriveExt_v1.xml │ │ └── OEM1DriveExt_v1.xml │ └── dummysimple │ │ ├── json-schema │ │ └── DummySimple.v1_0_0.json │ │ └── csdl │ │ └── DummySimple_v1.xml ├── drive.json ├── example_profile_for_truncation.json ├── test.py └── storage_large.json ├── requirements.txt ├── .gitignore ├── dmtf-config.json ├── Pipfile ├── rdebej ├── __init__.py ├── __version__.py ├── _internal_utils.py ├── decode.py └── encode.py ├── .travis.yml ├── LICENSE.md ├── pldm_bej_encoder_decoder.py ├── rde_schema_dictionary_gen.py └── generate_dictionaries.py /test/features/dictionary_truncate.feature: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | lxml 2 | tabulate 3 | gitpython 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .idea 3 | .vscode 4 | Pipfile.lock 5 | -------------------------------------------------------------------------------- /dmtf-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copyright": "Copyright 2014-2020 DMTF", 3 | "DoNotWrite": [], 4 | "ExplicitEntities": {} 5 | } 6 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | behave = "*" 8 | 9 | [packages] 10 | lxml = "*" 11 | tabulate = "*" 12 | GitPython = "*" 13 | requests = "*" 14 | 15 | -------------------------------------------------------------------------------- /rdebej/__init__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | from .__version__ import __title__, __description__, __version__ 7 | -------------------------------------------------------------------------------- /test/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | python -u test/test.py --schema_source=https://github.com/DMTF/Redfish.git --git_tag=2018.3 7 | -------------------------------------------------------------------------------- /rdebej/__version__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | __title__ = 'rdebej' 7 | __description__ = 'RDE BEJ Library' 8 | __url__ = '' 9 | __version__ = '0.0.1' 10 | -------------------------------------------------------------------------------- /test/features/dictionary_oem.feature: -------------------------------------------------------------------------------- 1 | # @fixture.schema_source 2 | # Feature: A dictionary with OEM sections can be generated 3 | # 4 | # Scenario: Encoding JSON into BEJ using dictionaries 5 | # Given a CSDL schema file Storage_v1.xml and entity Storage.Storage 6 | # When the dictionary is generated 7 | # Then the following JSON is encoded using the dictionary successfully 8 | -------------------------------------------------------------------------------- /test/dummysimple.json: -------------------------------------------------------------------------------- 1 | { 2 | "Id": "Dummy ID", 3 | "SampleIntegerProperty": 12, 4 | "SampleRealProperty": 12345.000001, 5 | "SampleEnabledProperty": false, 6 | "ChildArrayProperty": [ 7 | { 8 | "AnotherBoolean": true, 9 | "LinkStatus": "NoLink" 10 | }, 11 | { 12 | "LinkStatus": "LinkDown" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /test/dummysimple2.json: -------------------------------------------------------------------------------- 1 | { 2 | "Id": "Dummy ID", 3 | "SampleIntegerProperty": -5, 4 | "SampleRealProperty": -5576.90001, 5 | "SampleEnabledProperty": false, 6 | "ChildArrayProperty": [ 7 | { 8 | "AnotherBoolean": true, 9 | "LinkStatus": "NoLink" 10 | }, 11 | { 12 | "LinkStatus": "LinkDown" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /test/error.json: -------------------------------------------------------------------------------- 1 | { 2 | "error": { 3 | "code": "PredictiveFailure", 4 | "message": "Predictive failure detected", 5 | "@Message.ExtendedInfo": [ 6 | { 7 | "MessageId": "PredictiveFailure", 8 | "Severity": "Warning", 9 | "RelatedProperties": [ "FailurePredicted", "MediaType" ] 10 | }, 11 | { 12 | "MessageId": "LinkFailure", 13 | "Severity": "Warning", 14 | "MessageArgs": [ "Port", "1" ] 15 | } 16 | ] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: jammy 2 | language: python 3 | cache: 4 | - pip 5 | python: 6 | - "3.8" 7 | - "3.9" 8 | - "3.10" 9 | - "3.11" 10 | 11 | env: 12 | - PIPENV_IGNORE_VIRTUALENVS=1 13 | 14 | install: 15 | - pip install pipenv 16 | - pipenv install --dev 17 | 18 | script: 19 | - pipenv run python -u test/test.py --schema_source=https://www.dmtf.org/sites/default/files/standards/documents/DSP8010_2020.2.zip 20 | - pipenv run behave -D schema_sources="[{'source':'http', 'url':'https://www.dmtf.org/sites/default/files/standards/documents/DSP8010_2020.2.zip', 'schema_dir':'tmp_schema', 'csdl_dir':'csdl', 'json_schema_dir':'json-schema'}]" test/features 21 | - test/test_cli.sh 22 | -------------------------------------------------------------------------------- /test/features/dictionary.feature: -------------------------------------------------------------------------------- 1 | @fixture.schema_source 2 | Feature: The dictionary is generated with the correct format 3 | 4 | Scenario Outline: Dictionary headers are encoded correctly 5 | Given a CSDL schema file and entity 6 | When the dictionary is generated with Copyright set to Copyright (c) 2018 DMTF 7 | Then the dictionary header shall have the VersionTag equal to 0x00 8 | And the dictionary header shall have the DictionaryFlags equal to 0x00 9 | And the dictionary header shall have the EntryCount greater than 0x00 10 | And the dictionary header shall have the SchemaVersion greater than 0x00 11 | And the dictionary header shall have the SchemaVersion not equal to 0xFFFFFFFF 12 | And the dictionary header shall have the DictionarySize greater than 0x00 13 | And the dictionary size is correct 14 | And the dictionary shall have the Copyright set to Copyright (c) 2018 DMTF 15 | 16 | Examples: 17 | | Schema | Entity | 18 | | Storage_v1.xml | Storage.Storage | 19 | | Drive_v1.xml | Drive.Drive | 20 | | ComputerSystem_v1.xml | ComputerSystem.ComputerSystem | 21 | | Port_v1.xml | Port.Port | 22 | 23 | 24 | Scenario: Generate dictionaries for all schema files 25 | Given a list of schema files 26 | Then the resulting dictionaries have valid header information 27 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018-2024, Contributing Member(s) of Distributed Management Task 4 | Force, Inc.. All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, 7 | are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation and/or 14 | other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its contributors 17 | may be used to endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 21 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 24 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 25 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 26 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 27 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 29 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /test/storage_profile_conformant.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#Storage.v1_3_0.Storage", 3 | "@odata.context": "/redfish/v1/$metadata#Storage.Storage", 4 | "@odata.id": "/redfish/v1/Systems/1/Storage/1", 5 | "Id": "RAID Controller 1", 6 | "Name": "RAID Controller", 7 | "Status": { 8 | "State": "Enabled", 9 | "Health": "OK", 10 | "HealthRollup": "OK" 11 | }, 12 | "StorageControllers": [ 13 | { 14 | "@odata.id": "/redfish/v1/Systems/1/Storage/1#/StorageControllers/0", 15 | "@odata.type": "#Storage.v1_3_0.StorageController", 16 | "MemberId": "0", 17 | "Status": { 18 | "State": "Enabled", 19 | "Health": "OK" 20 | }, 21 | "Identifiers": [ 22 | { 23 | "DurableNameFormat": "NAA", 24 | "DurableName": "5045594843305852483430304E452000" 25 | } 26 | ], 27 | "Manufacturer": "Consorto", 28 | "Model": "Consorty RAID Controller XYZ", 29 | "SerialNumber": "PEYHC0XRH400NE", 30 | "PartNumber": "7334534", 31 | "SpeedGbps": 12, 32 | "FirmwareVersion": "1.00", 33 | "SupportedControllerProtocols": [ 34 | "PCIe" 35 | ], 36 | "SupportedDeviceProtocols": [ 37 | "SAS", 38 | "SATA" 39 | ] 40 | } 41 | ], 42 | "Drives": [ 43 | { 44 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.1" 45 | }, 46 | { 47 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.2" 48 | }, 49 | { 50 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.3" 51 | }, 52 | { 53 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.4" 54 | }, 55 | { 56 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.5" 57 | }, 58 | { 59 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.6" 60 | } 61 | ], 62 | "Volumes": { 63 | "@odata.id": "/redfish/v1/volcollection" 64 | } 65 | } -------------------------------------------------------------------------------- /test/outlet.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#Outlet.v1_0_0.Outlet", 3 | "Id": "A1", 4 | "Name": "Outlet A1, Branch Circuit A", 5 | "Status": { 6 | "Health": "OK", 7 | "State": "Enabled" 8 | }, 9 | "PhaseWiringType": "OnePhase3Wire", 10 | "VoltageType": "AC", 11 | "OutletType": "NEMA_5_20R", 12 | "RatedCurrentAmps": 20, 13 | "NominalVoltage": "AC120V", 14 | "IndicatorLED": "Lit", 15 | "PowerOnDelaySeconds": 4, 16 | "PowerOffDelaySeconds": 0, 17 | "PowerState": "On", 18 | "PowerEnabled": true, 19 | "Voltage": { 20 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/VoltageA1", 21 | "Reading": 117.5 22 | }, 23 | "PolyPhaseVoltage": { 24 | "Line1ToNeutral": { 25 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/VoltageA1", 26 | "Reading": 117.5 27 | } 28 | }, 29 | "CurrentAmps": { 30 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/CurrentA1", 31 | "Reading": 1.68 32 | }, 33 | "PolyPhaseCurrentAmps": { 34 | "Line1": { 35 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/CurrentA1", 36 | "Reading": 1.68 37 | } 38 | }, 39 | "PowerWatts": { 40 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/PowerA1", 41 | "Reading": 197.4, 42 | "ApparentVA": 197.4, 43 | "ReactiveVAR": 0, 44 | "PowerFactor": 1 45 | }, 46 | "FrequencyHz": { 47 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/FrequencyA1", 48 | "Reading": 60 49 | }, 50 | "EnergykWh": { 51 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/EnergyA1", 52 | "Reading": 36166 53 | }, 54 | "Links": { 55 | "BranchCircuit": { 56 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Branches/A" 57 | } 58 | }, 59 | "Actions": { 60 | "#Outlet.PowerControl": { 61 | "target": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A1/Outlet.PowerControl" 62 | }, 63 | "#Outlet.ResetMetrics": { 64 | "target": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A1/Outlet.ResetMetrics" 65 | } 66 | }, 67 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A1" 68 | } -------------------------------------------------------------------------------- /test/test_cli.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | pipenv run python rde_schema_dictionary_gen.py local --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl test/schema/oem-csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema --schemaFilename Drive_v1.xml --entity Drive.Drive --outputFile drive.bin 7 | pipenv run python rde_schema_dictionary_gen.py local --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl test/schema/oem-csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema --schemaFilename Drive_v1.xml --entity Drive.Drive --outputFile drive.bin -f drive.json 8 | pipenv run python rde_schema_dictionary_gen.py local --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl test/schema/oem-csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema --schemaFilename Drive_v1.xml --entity Drive.Drive --oemSchemaFilenames OEM1DriveExt_v1.xml OEM2DriveExt_v1.xml --oemEntities OEM1=OEM1DriveExt.OEM1DriveExt OEM2=OEM2DriveExt.OEM2DriveExt --outputFile drive.bin 9 | pipenv run python rde_schema_dictionary_gen.py local --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema --schemaFilename Drive_v1.xml --entity Drive.Drive --profile test/example_profile_for_truncation.json 10 | pipenv run python rde_schema_dictionary_gen.py annotation --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema -v v1_0_0 --outputFile annotation.bin 11 | pipenv run python rde_schema_dictionary_gen.py annotation --csdlSchemaDirectories tmp-schema/DSP8010_2020.2/csdl --jsonSchemaDirectories tmp-schema/DSP8010_2020.2/json-schema -v v1_0_0 --outputFile annotation.bin -f annotation.json 12 | pipenv run python rde_schema_dictionary_gen.py error -c tmp-schema/DSP8010_2020.2/csdl -j tmp-schema/DSP8010_2020.2/json-schema 13 | pipenv run python pldm_bej_encoder_decoder.py encode --schemaDictionary drive.bin --annotationDictionary annotation.bin --jsonFile test/drive.json --bejOutputFile drive_bej.bin --pdrMapFile pdr.txt 14 | pipenv run python pldm_bej_encoder_decoder.py decode --schemaDictionary drive.bin --annotationDictionary annotation.bin --bejEncodedFile drive_bej.bin --pdrMapFile pdr.txt 15 | -------------------------------------------------------------------------------- /test/utils.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | import os 7 | import stat 8 | from git import Repo 9 | 10 | 11 | def cloneFrom(repo_url, repo_path, checkout=None, paths=None): 12 | """Helper function to clone a git repository. 13 | 14 | Args: 15 | repo_url (str): URL of the git repository. 16 | repo_path (str): Path where the repository should be cloned. 17 | checkout (str): Branch or Tag to checkout (default None). 18 | paths (array): List of directories to check out (default None). 19 | 20 | Returns: 21 | git.Repo: Instance of git.Repo on success else None. 22 | """ 23 | repo = None 24 | 25 | if repo_path and repo_url is not None: 26 | try: 27 | repo = Repo.init(repo_path, bare=False) 28 | config = repo.config_writer() 29 | config.set_value('core', 'sparsecheckout', True) 30 | config.release() 31 | origin = repo.create_remote('origin', repo_url) 32 | 33 | if paths is not None: 34 | with open(os.path.join(repo_path, ".git/info/sparse-checkout"), "w+") as sparse_checkout: 35 | # Add required pathsto checkout. 36 | for path in paths: 37 | sparse_checkout.write(path + "\n") 38 | 39 | origin.fetch() 40 | 41 | if checkout is not None: 42 | repo.git.checkout(checkout) 43 | 44 | except Exception as ex: 45 | repo = None 46 | print("Error: Exception in cloneFrom()") 47 | print("Error: repo_path: {0}, url: {1}".format(repo_path, repo_url)) 48 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 49 | 50 | return repo 51 | 52 | 53 | def onerror(func, path, exc_info): 54 | """ 55 | Error handler for ``shutil.rmtree``. 56 | 57 | If the error is due to an access error (read only file) 58 | it attempts to add write permission and then retries. 59 | 60 | If the error is for another reason it re-raises the error. 61 | 62 | Usage : ``shutil.rmtree(path, onerror=onerror)`` 63 | """ 64 | if not os.access(path, os.W_OK): 65 | # Is the error an access error ? 66 | os.chmod(path, stat.S_IWUSR) 67 | func(path) 68 | -------------------------------------------------------------------------------- /test/circuit.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#Circuit.v1_0_0.Circuit", 3 | "Id": "A", 4 | "Name": "Branch Circuit A", 5 | "Status": { 6 | "State": "Enabled", 7 | "Health": "OK" 8 | }, 9 | "CircuitType": "Branch", 10 | "PhaseWiringType": "TwoPhase3Wire", 11 | "NominalVoltage": "AC200To240V", 12 | "RatedCurrentAmps": 16, 13 | "BreakerState": "Normal", 14 | "PolyPhaseVoltage": { 15 | "Line1ToNeutral": { 16 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/VoltageAL1N", 17 | "Reading": 118.2 18 | }, 19 | "Line1ToLine2": { 20 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/VoltageAL1L2", 21 | "Reading": 203.5 22 | } 23 | }, 24 | "CurrentAmps": { 25 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/CurrentA", 26 | "Reading": 5.19 27 | }, 28 | "PolyPhaseCurrentAmps": { 29 | "Line1": { 30 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/CurrentA", 31 | "Reading": 5.19 32 | } 33 | }, 34 | "PowerWatts": { 35 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/PowerA", 36 | "Reading": 937.4, 37 | "ApparentVA": 937.4, 38 | "ReactiveVAR": 0, 39 | "PowerFactor": 1 40 | }, 41 | "PolyPhasePowerWatts": { 42 | "Line1ToNeutral": { 43 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/PowerA1", 44 | "Reading": 937.4, 45 | "ApparentVA": 937.4, 46 | "ReactiveVAR": 0, 47 | "PowerFactor": 1 48 | } 49 | }, 50 | "FrequencyHz": { 51 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/FrequencyA", 52 | "Reading": 60 53 | }, 54 | "EnergykWh": { 55 | "DataSourceUri": "/redfish/v1/PowerEquipment/RackPDUs/1/Sensors/EnergyA", 56 | "Reading": 325675 57 | }, 58 | "Links": { 59 | "Outlets": [ 60 | { 61 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A1" 62 | }, 63 | { 64 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A2" 65 | }, 66 | { 67 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Outlets/A3" 68 | } 69 | ] 70 | }, 71 | "@odata.id": "/redfish/v1/PowerEquipment/RackPDUs/1/Branches/A" 72 | } 73 | -------------------------------------------------------------------------------- /test/storage.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#Storage.v1_3_0.Storage", 3 | "@odata.context": "/redfish/v1/$metadata#Storage.Storage", 4 | "@odata.id": "/redfish/v1/Systems/1/Storage/1", 5 | "Id": "RAID Controller 1", 6 | "Name": "RAID Controller", 7 | "Description": "RAID Controller", 8 | "Status": { 9 | "State": "Enabled", 10 | "Health": "OK", 11 | "HealthRollup": "OK" 12 | }, 13 | "StorageControllers": [ 14 | { 15 | "@odata.id": "/redfish/v1/Systems/1/Storage/1#/StorageControllers/0", 16 | "@odata.type": "#Storage.v1_3_0.StorageController", 17 | "MemberId": "0", 18 | "Name": "SAS RAID Controller", 19 | "Status": { 20 | "State": "Enabled", 21 | "Health": "OK" 22 | }, 23 | "Identifiers": [ 24 | { 25 | "DurableNameFormat": "NAA", 26 | "DurableName": "5045594843305852483430304E452000" 27 | } 28 | ], 29 | "Manufacturer": "Consorto", 30 | "Model": "Consorty RAID Controller XYZ", 31 | "SerialNumber": "PEYHC0XRH400NE", 32 | "PartNumber": "7334534", 33 | "SpeedGbps": 12, 34 | "FirmwareVersion": "1.00", 35 | "SupportedControllerProtocols": [ 36 | "PCIe" 37 | ], 38 | "SupportedDeviceProtocols": [ 39 | "SAS", 40 | "SATA" 41 | ] 42 | } 43 | ], 44 | "Drives": [ 45 | { 46 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.1" 47 | }, 48 | { 49 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.2" 50 | }, 51 | { 52 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.3" 53 | }, 54 | { 55 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.4" 56 | }, 57 | { 58 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.5" 59 | }, 60 | { 61 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.6" 62 | } 63 | ], 64 | "Volumes": { 65 | "@odata.id": "/redfish/v1/volcollection" 66 | }, 67 | "Links": { 68 | "Enclosures": [ 69 | { 70 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1" 71 | } 72 | ] 73 | }, 74 | "@Redfish.OperationApplyTime": "Immediate", 75 | "@Redfish.CollectionCapabilities": { 76 | "MaxMembers": 0 77 | }, 78 | "@Message.ExtendedInfo": [] 79 | } -------------------------------------------------------------------------------- /test/schema/oem-csdl/OEM2DriveExt_v1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /test/drive.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.id": "/redfish/v1/drives/1", 3 | "@odata.type": "#Drive.v1_5_0.Drive", 4 | "@odata.etag": "FBS4553345", 5 | "Id": "Drive1", 6 | "Name": "Disk Bay 1", 7 | "IndicatorLED": "Lit", 8 | "Model": "Consorto MM0500FBFVQ", 9 | "Revision": "C1.1", 10 | "Status": { 11 | "State": "Enabled", 12 | "Health": "Warning" 13 | }, 14 | "Actions": { 15 | "#Drive.SecureErase": { 16 | "target": "/redfish/v1/drives/1/Actions/Drive.SecureErase", 17 | "title": "Secure Erase a Drive" 18 | }, 19 | "#Drive.Reset": { 20 | "target": "/redfish/v1/drives/1/Actions/Drive.Reset", 21 | "title": "Reset a Drive", 22 | "ResetType@Redfish.AllowableValues": [ 23 | "On", 24 | "ForceOff", 25 | "ForceRestart", 26 | "Nmi", 27 | "ForceOn", 28 | "PushPowerButton" 29 | ] 30 | } 31 | }, 32 | "Status@Message.ExtendedInfo": [ 33 | { 34 | "MessageId": "PredictiveFailure", 35 | "Severity": "Warning", 36 | "RelatedProperties": ["FailurePredicted", "MediaType"] 37 | }, 38 | { 39 | "MessageId": "LinkFailure", 40 | "Severity": "Warning", 41 | "MessageArgs": ["Port", "1"] 42 | } 43 | ], 44 | "CapacityBytes": 500105991946, 45 | "BlockSizeBytes": 512, 46 | "Identifiers": [ 47 | { 48 | "DurableNameFormat": "NAA", 49 | "DurableName": "5000C5004183A941" 50 | } 51 | ], 52 | "FailurePredicted": true, 53 | "Protocol": "SAS", 54 | "MediaType": "HDD", 55 | "Manufacturer": "CONSORTO", 56 | "SerialNumber": "9XF11DLF00009238W7LN", 57 | "PhysicalLocation": { 58 | "PartLocation": { 59 | "LocationOrdinalValue": 1, 60 | "LocationType": "Bay", 61 | "ServiceLabel": "Port=A:Bay=1" 62 | } 63 | }, 64 | "RotationSpeedRPM": 15000, 65 | "CapableSpeedGbs": 12, 66 | "NegotiatedSpeedGbs": 12, 67 | "Operations": [ 68 | { 69 | "OperationName": "Erasing", 70 | "PercentageComplete": 20, 71 | "AssociatedTask": { 72 | "@odata.id": "/redfish/v1/Tasks/1" 73 | } 74 | }, 75 | { 76 | "OperationName": "Rebuilding", 77 | "PercentageComplete": 70, 78 | "AssociatedTask": { 79 | "@odata.id": "/redfish/v1/Tasks/2" 80 | } 81 | } 82 | ], 83 | "Links": { 84 | "Volumes": [ 85 | { 86 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Volumes/1" 87 | }, 88 | { 89 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Volumes/2" 90 | }, 91 | { 92 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Volumes/3" 93 | } 94 | ] 95 | }, 96 | "Oem": { 97 | "OEM1": { 98 | "@odata.type": "#OEMDriveExt.v1_0_0.OEM1DriveExt", 99 | "ArrayOfStrings": [ 100 | "str1", 101 | "str2", 102 | "str3", 103 | "str4" 104 | ], 105 | "ArrayOfInts": [ 106 | 10, 107 | 20, 108 | 30, 109 | 40, 110 | 50 111 | ] 112 | } 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /rdebej/_internal_utils.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | """ 7 | rdebej._internal_utils 8 | ~~~~~~~~~~~~~~ 9 | Provides utility functions that are consumed internally by rdebej 10 | """ 11 | 12 | # BEJ FORMAT definitions 13 | BEJ_FORMAT_SET = 0x00 14 | BEJ_FORMAT_ARRAY = 0x01 15 | BEJ_FORMAT_NULL = 0x02 16 | BEJ_FORMAT_INTEGER = 0x03 17 | BEJ_FORMAT_ENUM = 0x04 18 | BEJ_FORMAT_STRING = 0x05 19 | BEJ_FORMAT_REAL = 0x06 20 | BEJ_FORMAT_BOOLEAN = 0x07 21 | BEJ_FORMAT_BYTE_STRING = 0x08 22 | BEJ_FORMAT_CHOICE = 0x09 23 | BEJ_FORMAT_PROPERTY_ANNOTATION = 0x0A 24 | BEJ_FORMAT_RESOURCE_LINK = 0x0E 25 | BEJ_FORMAT_RESOURCE_LINK_EXPANSION = 0x0F 26 | BEJ_FORMAT_UNKNOWN = 0xFF 27 | 28 | BEJ_FLAG_DEFERRED = 1 << 0 29 | BEJ_FLAG_READONLY = 1 << 1 30 | BEJ_FLAG_NULLABLE = 1 << 2 31 | BEJ_FLAG_NESTED_TOP_LEVEL_ANNOTATION = 1 << 1 32 | 33 | # Internal dictionary index 34 | DICTIONARY_ENTRY_FORMAT = 0 35 | DICTIONARY_ENTRY_FLAGS = 1 36 | DICTIONARY_ENTRY_SEQUENCE_NUMBER = 2 37 | DICTIONARY_ENTRY_OFFSET = 3 38 | DICTIONARY_ENTRY_CHILD_COUNT = 4 39 | DICTIONARY_ENTRY_NAME = 5 40 | 41 | BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA = 0x00 42 | BEJ_DICTIONARY_SELECTOR_ANNOTATION = 0x01 43 | 44 | 45 | class DictionaryByteArrayStream: 46 | def __init__(self, byte_array, offset=0, child_count=-1): 47 | self._byte_array = byte_array 48 | self._current_index = offset 49 | self._child_count = child_count 50 | self._current_entry = 0 51 | 52 | if self._current_index == 0: 53 | # skip thru the header 54 | self.get_int(1) # VersionTag 55 | self.get_int(1) # DictionaryFlags 56 | self.get_int(4) # SchemaVersion 57 | self._total_entries = self.get_int(2) # EntryCount 58 | self.get_int(4) # DictionarySize 59 | 60 | self._child_count = 1 61 | 62 | def get_offset(self): 63 | return self._current_index 64 | 65 | def get_child_count(self): 66 | return self._child_count 67 | 68 | def get_int(self, size): 69 | value = int.from_bytes(self._byte_array[self._current_index:self._current_index+size], 'little') 70 | self._current_index += size 71 | return value 72 | 73 | def has_entry(self): 74 | return self._current_entry < self._child_count 75 | 76 | def get_next_entry(self): 77 | entry = [] 78 | current_entry = 0 79 | if self._current_entry < self._child_count or self._child_count == -1: 80 | 81 | format_flags = self.get_int(1) 82 | entry.append(format_flags >> 4) # format 83 | entry.append(format_flags & 0xF) # flags 84 | entry.append(self.get_int(2)) # sequence 85 | entry.append(self.get_int(2)) # offset 86 | entry.append(self.get_int(2)) # child_count 87 | 88 | name_length = self.get_int(1) 89 | name_offset = self.get_int(2) 90 | 91 | # fetch the name 92 | name = '' 93 | if name_length > 0: 94 | name = "".join(map(chr, self._byte_array[name_offset:name_offset+name_length-1])) # -1 to skip null terminator 95 | 96 | entry.append(name) 97 | 98 | if self._child_count != -1: 99 | self._current_entry += 1 100 | 101 | return entry 102 | -------------------------------------------------------------------------------- /test/schema/dummysimple/json-schema/DummySimple.v1_0_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/DummySimple", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "copyright": "Copyright 2018 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright", 5 | "definitions": { 6 | "LinkStatus": { 7 | "enum": [ 8 | "NoLink", 9 | "LinkDown", 10 | "LinkUp" 11 | ], 12 | "type": "string" 13 | }, 14 | "DummySimple" : { 15 | "additionalProperties": false, 16 | "description": "The DummySimple schema represents a very simple schema used to demonstrate the BEJ dictionary format.", 17 | "longDescription": "This resource shall not be used except for illustrative purposes. It does not correspond to any real hardware or software.", 18 | "patternProperties": { 19 | "^([a-zA-Z_][a-zA-Z0-9_]*)?@(odata|Redfish|Message|Privileges)\\.[a-zA-Z_][a-zA-Z0-9_.]+$": { 20 | "description": "This property shall specify a valid odata or Redfish property.", 21 | "type": [ 22 | "array", 23 | "boolean", 24 | "number", 25 | "null", 26 | "object", 27 | "string" 28 | ] 29 | } 30 | }, 31 | "properties": { 32 | "@odata.context": { 33 | "$ref": "http://redfish.dmtf.org/schemas/v1/odata.v4_0_1.json#/definitions/context" 34 | }, 35 | "@odata.id": { 36 | "$ref": "http://redfish.dmtf.org/schemas/v1/odata.v4_0_1.json#/definitions/id" 37 | }, 38 | "@odata.type": { 39 | "$ref": "http://redfish.dmtf.org/schemas/v1/odata.v4_0_1.json#/definitions/type" 40 | }, 41 | "ChildArrayProperty": { 42 | "items": { 43 | "additionalProperties": false, 44 | "type": "object", 45 | "properties": { 46 | "LinkStatus": { 47 | "anyOf": [ 48 | { 49 | "$ref": "#/definitions/LinkStatus" 50 | }, 51 | { 52 | "type": "null" 53 | } 54 | ], 55 | "readOnly": true 56 | }, 57 | "AnotherBoolean": { 58 | "type": "boolean", 59 | "readOnly": true 60 | } 61 | } 62 | }, 63 | "type": "array" 64 | } 65 | }, 66 | "SampleIntegerProperty": { 67 | "type": "integer", 68 | "readOnly": true 69 | }, 70 | "Id": { 71 | "type": "string", 72 | "readOnly": true 73 | }, 74 | "SampleEnabledProperty": { 75 | "type": "boolean", 76 | "readOnly": true 77 | } 78 | } 79 | }, 80 | "title": "#DummySimple.v1_0_0.DummySimple" 81 | } 82 | -------------------------------------------------------------------------------- /test/schema/oem-csdl/OEM1DriveExt_v1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /test/schema/dummysimple/csdl/DummySimple_v1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /test/features/environment.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | from behave import * 7 | from behave import fixture, use_fixture 8 | import parse 9 | from parse_type import TypeBuilder 10 | from behave import register_type 11 | import re 12 | import sys 13 | import shutil 14 | import os 15 | from behave import register_type 16 | import requests 17 | import zipfile 18 | 19 | sys.path.append('./test') 20 | from utils import * 21 | 22 | 23 | # Cardinality support for numbers in parse 24 | # d+ allows list of integers e.g. 1, 2, 3 25 | # w+ allows list of words e.g. hello, world 26 | # h allow hex numbers e.g. 0x123F 27 | # h+ allows list of hex numbgers e.g. 0x123, 0x345, 0xDEF 28 | # cv allows a context variable name that needs to be accessed 29 | @parse.with_pattern(r"\d+") 30 | def parse_number(text): 31 | return int(text) 32 | 33 | 34 | @parse.with_pattern(r"[0-9a-zA-Z\$]+") 35 | def parse_word(text): 36 | return text 37 | 38 | 39 | @parse.with_pattern(r"\w+") 40 | def parse_hex(text): 41 | return int(text, 16) 42 | 43 | 44 | @parse.with_pattern(r"{\w+}") 45 | def parse_context_variable(text): 46 | return text[1:-1] 47 | 48 | 49 | # Allows using context variables and expression evaluation: 50 | # {SomeVariable} 51 | # will be evaluated to context.SomeVariable 52 | # `{SomeVariable} + 4 + {SomeOtherVariable}` 53 | # will be evaluated to context.SomeVariable + 4 + context.SomeOtherVariable 54 | def parse_with_context_variables(context, text): 55 | isEval = re.match(r'^[`{].*?[`}]$', text) is not None 56 | 57 | # strip of trailing ` 58 | if re.match(r'^`.*?`$', text): 59 | text = text[1:-1] 60 | 61 | # first substitute any {Variable} with context.Variable 62 | result = re.sub(r'{(?P\w+)}', r'context.\g', text) 63 | 64 | if isEval: 65 | result = eval(result) 66 | 67 | return result 68 | 69 | 70 | parse_numbers = TypeBuilder.with_many(parse_number, listsep=",") 71 | parse_words = TypeBuilder.with_many(parse_word, listsep=",") 72 | parse_hexs = TypeBuilder.with_many(parse_hex, listsep=",") 73 | parse_one_hex = TypeBuilder.with_zero_or_one(parse_hex) 74 | parse_is_is_not = TypeBuilder.make_choice(["is", "is NOT"]) 75 | parse_have_not_have = TypeBuilder.make_choice(["have", "NOT have"]) 76 | parse_has_does_not_have = TypeBuilder.make_choice(["has", "does NOT have"]) 77 | parse_one_context_variables = TypeBuilder.with_zero_or_one(parse_context_variable) 78 | type_dict = { 79 | 'd+': parse_numbers, 80 | 'w+': parse_words, 81 | 'h': parse_one_hex, 82 | 'h+': parse_hexs, 83 | 'is': parse_is_is_not, 84 | 'have': parse_have_not_have, 85 | 'has': parse_has_does_not_have, 86 | 'cv': parse_one_context_variables 87 | } 88 | register_type(**type_dict) 89 | 90 | # schema_sources: 91 | # schema_source (git, local) 92 | # schema_dir 93 | # csdl_sub_dir 94 | # json_sub_dir 95 | # -D schema_sources=[ 96 | # {"git", "http://ip/repo.git", "schema_dir", "metadata", "json-schema"}, 97 | # {"local", "schema_dir", "metadata", "json-schema"} 98 | # ] 99 | 100 | @fixture 101 | def schema_source(context, **kwargs): 102 | # -- SETUP-FIXTURE PART: 103 | schema_sources = eval(context.config.userdata['schema_sources']) 104 | context.csdl_dirs = [] 105 | context.json_schema_dirs = [] 106 | context.dirs_to_cleanup = [] 107 | for schema_source in schema_sources: 108 | if schema_source['source'] == 'git': 109 | # fill the schema dir with schema from the source 110 | if re.search('.*\.git$', schema_source['repo']): 111 | repo = cloneFrom(schema_source['repo'], schema_source['schema_dir'], schema_source['branch'], 112 | [schema_source['csdl_dir'], schema_source['json_schema_dir']]) 113 | assert repo, "Could not fetch repo" 114 | context.dirs_to_cleanup.append(schema_source['schema_dir']) 115 | schema_test_dir = schema_source['schema_dir'] 116 | elif schema_source['source'] == 'http': 117 | r = requests.get(schema_source['url'], allow_redirects=True) 118 | open('tmp_schema.zip', 'wb').write(r.content) 119 | with zipfile.ZipFile('tmp_schema.zip', 'r') as zip_ref: 120 | zip_ref.extractall(schema_source['schema_dir']) 121 | schema_test_dir = schema_source['schema_dir'] + '//' + os.listdir(schema_source['schema_dir'])[0] 122 | 123 | context.csdl_dirs.append(schema_test_dir + '//' + schema_source['csdl_dir']) 124 | context.json_schema_dirs.append(schema_test_dir + '//' + schema_source['json_schema_dir']) 125 | 126 | 127 | 128 | def before_tag(context, tag): 129 | if tag == 'fixture.schema_source': 130 | use_fixture(schema_source, context) 131 | 132 | 133 | def after_tag(context, tag): 134 | if tag == 'fixture.schema_source': 135 | for dir_to_cleanup in context.dirs_to_cleanup: 136 | shutil.rmtree(dir_to_cleanup, onerror=onerror) -------------------------------------------------------------------------------- /pldm_bej_encoder_decoder.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | """ 7 | PLDM BEJ Encoder/Decoder 8 | 9 | File : pldm_bej_encoder_decoder.py 10 | 11 | Brief : This file allows encoding a JSON file to PLDM Binary encoded JSON (BEJ) and 12 | decoding a PLDM BEJ file back into JSON. 13 | """ 14 | 15 | import argparse 16 | import json 17 | import io 18 | import sys 19 | from rdebej import encode, decode 20 | 21 | 22 | if __name__ == '__main__': 23 | parser = argparse.ArgumentParser() 24 | parser.add_argument("--verbose", help="increase output verbosity", action="store_true") 25 | parser.add_argument("--silent", help="no output prints unless errors", action="store_true") 26 | subparsers = parser.add_subparsers(dest='operation') 27 | 28 | encode_parser = subparsers.add_parser('encode') 29 | encode_parser.add_argument('-s', '--schemaDictionary', type=argparse.FileType('rb'), required=True) 30 | encode_parser.add_argument('-a', '--annotationDictionary', type=argparse.FileType('rb'), required=True) 31 | encode_parser.add_argument('-j', '--jsonFile', type=argparse.FileType('r'), required=False) 32 | encode_parser.add_argument('-o', '--bejOutputFile', type=argparse.FileType('wb'), required=False) 33 | encode_parser.add_argument('-op', '--pdrMapFile', type=argparse.FileType('w'), required=False) 34 | encode_parser.add_argument('-fi', '--fixedIntegerLength', required=False, default=0, 35 | help="Pack the integer with the fixed length." 36 | "Fixed length integer doesn't have the padding zero byte for the MSB of value set to 1." 37 | "That is the consumer's prerequisite knowdlege of the value characteristic." 38 | "For example: -fi=4 gives all of the integer in 4-byte. Throw an error message if there" 39 | "is an integer which length in byte is greater than 4-byte." 40 | ) 41 | 42 | decode_parser = subparsers.add_parser('decode') 43 | decode_parser.add_argument('-s', '--schemaDictionary', type=argparse.FileType('rb'), required=True) 44 | decode_parser.add_argument('-a', '--annotationDictionary', type=argparse.FileType('rb'), required=True) 45 | decode_parser.add_argument('-b', '--bejEncodedFile', type=argparse.FileType('rb'), required=True) 46 | decode_parser.add_argument('-p', '--pdrMapFile', type=argparse.FileType('r'), required=False) 47 | 48 | args = parser.parse_args() 49 | 50 | if len(sys.argv) == 1: 51 | parser.print_help(sys.stderr) 52 | sys.exit(1) 53 | 54 | # Set the verbose flag. 55 | verbose = args.verbose 56 | silent = args.silent 57 | if verbose and silent: # override silent if verbose is set 58 | verbose = True 59 | silent = False 60 | 61 | # Read the binary schema dictionary into a byte array 62 | schema_dictionary = list(args.schemaDictionary.read()) 63 | 64 | # Read the binary annotation dictionary into a byte array 65 | annotation_dictionary = list(args.annotationDictionary.read()) 66 | 67 | if args.operation == 'encode': 68 | json_str = {} 69 | 70 | # Read the json file 71 | if args.jsonFile: 72 | json_str = args.jsonFile.read() 73 | else: # read from stdin 74 | json_str = sys.stdin.read() 75 | 76 | json_to_encode = json.loads(json_str) 77 | 78 | # create a byte stream 79 | output_stream = io.BytesIO() 80 | success, pdr_map = encode.bej_encode(output_stream, json_to_encode, schema_dictionary, annotation_dictionary, fixed_int_len=int(args.fixedIntegerLength)) 81 | if success: 82 | encoded_bytes = output_stream.getvalue() 83 | if not silent: 84 | encode.print_encode_summary(json_to_encode, encoded_bytes) 85 | 86 | if args.bejOutputFile: 87 | args.bejOutputFile.write(encoded_bytes) 88 | 89 | if args.pdrMapFile: 90 | args.pdrMapFile.write(json.dumps(pdr_map)) 91 | else: 92 | if not silent: 93 | print('Failed to encode JSON') 94 | 95 | elif args.operation == 'decode': 96 | # Read the encoded bytes 97 | bej_encoded_bytes = list(args.bejEncodedFile.read()) 98 | 99 | pdr_map = {} 100 | if args.pdrMapFile: 101 | pdr_map = json.loads(args.pdrMapFile.read()) 102 | 103 | input_stream = io.BytesIO(bytes(bej_encoded_bytes)) 104 | output_stream = io.StringIO() 105 | success = decode.bej_decode(output_stream, input_stream, schema_dictionary, annotation_dictionary, {}, pdr_map, 106 | {}) 107 | if success: 108 | if not silent: 109 | print(json.dumps(json.loads(output_stream.getvalue()), indent=3)) 110 | else: 111 | if not silent: 112 | print('Failed to decode JSON') 113 | -------------------------------------------------------------------------------- /rde_schema_dictionary_gen.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | """ 7 | RDE Dictionary Builder 8 | 9 | File : rde-dictionary-builder.py 10 | 11 | Brief : This file contains the definitions and functionalities for generating 12 | a RDE schema dictionary from a set of standard Redfish CSDL and JSON Schema 13 | files 14 | """ 15 | 16 | import argparse 17 | import sys 18 | from rdebej.dictionary import * 19 | 20 | 21 | if __name__ == '__main__': 22 | # rde_schema_dictionary parse --schemaDir=directory --schemaFilename=filename 23 | parser = argparse.ArgumentParser() 24 | parser.add_argument("--verbose", help="increase output verbosity", action="store_true") 25 | parser.add_argument("--silent", help="no output prints unless errors", action="store_true") 26 | subparsers = parser.add_subparsers(dest='source') 27 | 28 | # TODO: Fix remote for json fixups 29 | # remote_parser = subparsers.add_parser('remote') 30 | # remote_parser.add_argument('--schemaURL', type=str, required=True) 31 | # remote_parser.add_argument('--entity', type=str, required=True) 32 | # remote_parser.add_argument('--outputFile', type=str, required=False) 33 | 34 | local_parser = subparsers.add_parser('local') 35 | local_parser.add_argument('-c', '--csdlSchemaDirectories', nargs='*', type=str, required=True) 36 | local_parser.add_argument('-j', '--jsonSchemaDirectories', nargs='*', type=str, required=True) 37 | local_parser.add_argument('-s', '--schemaFilename', type=str, required=True) 38 | local_parser.add_argument('-e', '--entity', type=str, required=True) 39 | local_parser.add_argument('-o', '--oemSchemaFilenames', nargs='*', type=str, required=False) 40 | local_parser.add_argument('-t', '--oemEntities', nargs='*', type=str, required=False) 41 | local_parser.add_argument('-r', '--copyright', type=str, required=False) 42 | local_parser.add_argument('-p', '--profile', type=str, required=False) 43 | local_parser.add_argument('-d', '--outputFile', type=argparse.FileType('wb'), required=False) 44 | local_parser.add_argument('-f', '--outputJsonDictionaryFile', type=argparse.FileType('w'), required=False) 45 | 46 | annotation_v2_parser = subparsers.add_parser('annotation') 47 | annotation_v2_parser.add_argument('-c', '--csdlSchemaDirectories', nargs='*', type=str, required=True) 48 | annotation_v2_parser.add_argument('-j', '--jsonSchemaDirectories', nargs='*', type=str, required=True) 49 | annotation_v2_parser.add_argument('-v', '--version', type=str, required=True) 50 | annotation_v2_parser.add_argument('-r', '--copyright', type=str, required=False) 51 | annotation_v2_parser.add_argument('-d', '--outputFile', type=argparse.FileType('wb'), required=False) 52 | annotation_v2_parser.add_argument('-f', '--outputJsonDictionaryFile', type=argparse.FileType('w'), required=False) 53 | 54 | error_parser = subparsers.add_parser('error') 55 | error_parser.add_argument('-c', '--csdlSchemaDirectories', nargs='*', type=str, required=True) 56 | error_parser.add_argument('-j', '--jsonSchemaDirectories', nargs='*', type=str, required=True) 57 | error_parser.add_argument('-r', '--copyright', type=str, required=False) 58 | error_parser.add_argument('-d', '--outputFile', type=argparse.FileType('wb'), required=False) 59 | error_parser.add_argument('-f', '--outputJsonDictionaryFile', type=argparse.FileType('w'), required=False) 60 | 61 | dictionary_dump = subparsers.add_parser('view') 62 | dictionary_dump.add_argument('-f', '--file', type=str, required=True) 63 | 64 | args = parser.parse_args() 65 | 66 | if len(sys.argv) == 1 or args.source is None: 67 | parser.print_help(sys.stderr) 68 | sys.exit(1) 69 | 70 | # Set the verbose flag. 71 | verbose = args.verbose 72 | silent = args.silent 73 | if verbose and silent: # override silent if verbose is set 74 | verbose = True 75 | silent = False 76 | 77 | # view an existing binary dictionary 78 | if args.source == 'view': 79 | # load the binary dictionary file 80 | file = open(args.file, 'rb') 81 | contents = file.read() 82 | print_binary_dictionary(list(contents)) 83 | sys.exit() 84 | 85 | # Generate the schema dictionary. 86 | schema_dictionary = None 87 | if args.source == 'local': 88 | schema_dictionary = generate_schema_dictionary(args.source, args.csdlSchemaDirectories, 89 | args.jsonSchemaDirectories, args.entity, 90 | args.schemaFilename, args.oemEntities, 91 | args.oemSchemaFilenames, args.profile, 92 | None, 93 | args.copyright) 94 | elif args.source == 'remote': 95 | schema_dictionary = generate_schema_dictionary(args.source, None, None, args.entity, None, 96 | None, None, None, args.schemaURL) 97 | elif args.source == 'annotation': 98 | # Just choose a dummy complex entity type to start the annotation dictionary generation process. 99 | schema_dictionary = generate_annotation_schema_dictionary(args.csdlSchemaDirectories, 100 | args.jsonSchemaDirectories, args.version, 101 | args.copyright) 102 | elif args.source == 'error': 103 | schema_dictionary = generate_error_schema_dictionary(args.csdlSchemaDirectories, 104 | args.jsonSchemaDirectories, args.copyright) 105 | 106 | # Print table data. 107 | if schema_dictionary is not None and schema_dictionary.dictionary: 108 | if not silent: 109 | print_table_data( 110 | [["Row", "Sequence#", "Format", "Flags", "Field String", "Child Count", "Offset"]] 111 | + 112 | schema_dictionary.dictionary) 113 | 114 | # Print dictionary summary. 115 | if not silent: 116 | print_dictionary_summary(schema_dictionary.dictionary, schema_dictionary.dictionary_byte_array) 117 | 118 | # Generate binary dictionary file 119 | if args.outputFile: 120 | args.outputFile.write(bytes(schema_dictionary.dictionary_byte_array)) 121 | 122 | if args.outputJsonDictionaryFile: 123 | args.outputJsonDictionaryFile.write(schema_dictionary.json_dictionary) 124 | else: 125 | print('Error, dictionary could not be generated') 126 | -------------------------------------------------------------------------------- /test/features/steps/dictionary_format.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | from behave import * 7 | import re 8 | import os 9 | import sys 10 | from ctypes import * 11 | import io 12 | import json 13 | #sys.path.append('../..') 14 | import rdebej.dictionary 15 | import rdebej.encode 16 | import rdebej.decode 17 | 18 | 19 | class DictionaryHeader(LittleEndianStructure): 20 | _pack_ = 1 21 | 22 | _fields_ = [ 23 | ('VersionTag', c_uint8), 24 | ('DictionaryFlags', c_uint8), 25 | ('EntryCount', c_uint16), 26 | ('SchemaVersion', c_uint32), 27 | ('DictionarySize', c_uint32) 28 | ] 29 | 30 | 31 | @given('a CSDL schema file {Schema} and entity {Entity}') 32 | def step_impl(context, Schema, Entity): 33 | context.Schema = Schema 34 | context.Entity = Entity 35 | is_found = False 36 | for dir in context.csdl_dirs: 37 | if os.path.isfile(dir + '//' + Schema): 38 | is_found = True 39 | 40 | assert is_found, "Could not find %s" % (Schema) 41 | 42 | 43 | @given('a list of schema files') 44 | def step_impl(context): 45 | context.schemas = [] 46 | for dir in context.csdl_dirs: 47 | context.schemas += os.listdir(dir) 48 | assert context.schemas and len(context.schemas) > 0 49 | 50 | 51 | @when('the dictionary is generated with Copyright set to {Copyright}') 52 | def step_impl(context, Copyright): 53 | dict = rdebej.dictionary.generate_schema_dictionary( 54 | 'local', 55 | context.csdl_dirs, 56 | context.json_schema_dirs, 57 | context.Entity, context.Schema, 58 | oem_entities=None, 59 | oem_schema_file_names=None, 60 | profile=None, 61 | schema_url=None, 62 | copyright=Copyright) 63 | 64 | assert dict.dictionary, "Could not generate dictionary for %s:%s" % (context.Schema, context.Entity) 65 | assert dict.dictionary_byte_array, "Could not generate byte array dictionary for %s:%s" % (context.Schema, context.Entity) 66 | assert dict.json_dictionary, "Could not generate json dictionary for %s:%s" % (context.Schema, context.Entity) 67 | 68 | context.dictionary = dict 69 | 70 | 71 | @then('the dictionary header shall have the {Property} {Comparison} {Value:h}') 72 | def step_impl(context, Property, Comparison, Value): 73 | header = DictionaryHeader.from_buffer_copy(bytearray(context.dictionary.dictionary_byte_array)) 74 | 75 | if Comparison == 'not equal to': 76 | assert getattr(header, Property) != Value, "Expected %s, Actual %s" % (Value, getattr(header, Property)) 77 | if Comparison == 'equal to': 78 | assert getattr(header, Property) == Value, "Expected %s, Actual %s" % (Value, getattr(header, Property)) 79 | elif Comparison == 'greater than': 80 | assert getattr(header, Property) > Value, "Expected %s, Actual %s" % (Value, getattr(header, Property)) 81 | 82 | 83 | @then('the resulting dictionaries have valid header information') 84 | def step_impl(context): 85 | skip_list = ['IPAddresses_v1.xml', 'Privileges_v1.xml', 'RedfishExtensions_v1.xml', 'Resource_v1.xml'] 86 | for filename in context.schemas: 87 | if filename not in skip_list: 88 | # strip out the _v1.xml 89 | m = re.compile('(.*)_v1.xml').match(filename) 90 | entity = '' 91 | if m: 92 | entity = m.group(1) + '.' + m.group(1) 93 | 94 | context.execute_steps(u''' 95 | Given a CSDL schema file %s and entity %s 96 | When the dictionary is generated with Copyright set to Copyright (c) 2018 DMTF 97 | Then the dictionary header shall have the VersionTag equal to 0x00 98 | And the dictionary header shall have the DictionaryFlags equal to 0x00 99 | And the dictionary header shall have the EntryCount greater than 0x00 100 | And the dictionary header shall have the SchemaVersion greater than 0x00 101 | And the dictionary header shall have the DictionarySize greater than 0x00 102 | And the dictionary size is correct 103 | And the dictionary shall have the Copyright set to Copyright (c) 2018 DMTF 104 | ''' % (filename, entity)) 105 | 106 | 107 | @then('the following JSON is encoded using the dictionary successfully') 108 | def step_impl(context): 109 | bej_stream = io.BytesIO() 110 | context.json_to_encode = json.loads(context.text) 111 | 112 | context.annotation_dictionary = rdebej.dictionary.generate_annotation_schema_dictionary( 113 | context.csdl_dirs, 114 | context.json_schema_dirs, 115 | 'v1_0_0' 116 | ) 117 | 118 | encode_success, pdr_map = rdebej.encode.bej_encode(bej_stream, context.json_to_encode, 119 | context.dictionary.dictionary_byte_array, 120 | context.annotation_dictionary.dictionary_byte_array, 121 | verbose=True) 122 | 123 | assert encode_success, 'Encode failure' 124 | 125 | context.bej_encoded_bytes = bej_stream.getvalue() 126 | context.pdr_map = pdr_map 127 | 128 | 129 | @then('the BEJ can be successfully decoded back to JSON') 130 | def step_impl(context): 131 | # build the deferred binding strings from the pdr_map 132 | deferred_binding_strings = {} 133 | for url, pdr_num in context.pdr_map.items(): 134 | deferred_binding_strings['%L' + str(pdr_num)] = url 135 | 136 | decode_stream = io.StringIO() 137 | decode_success = rdebej.decode.bej_decode( 138 | decode_stream, 139 | io.BytesIO(bytes(context.bej_encoded_bytes)), 140 | context.dictionary.dictionary_byte_array, 141 | context.annotation_dictionary.dictionary_byte_array, 142 | [], context.pdr_map, deferred_binding_strings 143 | ) 144 | 145 | assert decode_success, 'Decode failure' 146 | decode_file = decode_stream.getvalue() 147 | assert json.loads(decode_file) == context.json_to_encode, 'Mismatch in original JSON and decoded JSON' 148 | 149 | 150 | @then('the dictionary shall have the Copyright set to {Copyright}') 151 | def step_impl(context, Copyright): 152 | copyright_bytes = bytearray(context.dictionary.dictionary_byte_array[ 153 | len(context.dictionary.dictionary_byte_array) - len(Copyright) - 1 : -1]) 154 | assert copyright_bytes.decode('utf-8') == Copyright, \ 155 | "Actual %s, Expected %s" % (copyright_bytes.decode('utf-8'), Copyright) 156 | 157 | 158 | @then('the dictionary size is correct') 159 | def step_imp(context): 160 | header = DictionaryHeader.from_buffer_copy(bytearray(context.dictionary.dictionary_byte_array)) 161 | assert getattr(header, 'DictionarySize') == len(context.dictionary.dictionary_byte_array), \ 162 | "Actual %s, Expected %s" % (getattr(header, 'DictionarySize'), len(context.dictionary.dictionary_byte_array)) -------------------------------------------------------------------------------- /generate_dictionaries.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | import sys 7 | import os 8 | import json 9 | import re 10 | import argparse 11 | import traceback 12 | 13 | #sys.path.append('./') 14 | 15 | from rdebej import dictionary 16 | 17 | 18 | def write_map_file(filename, schema_dictionary): 19 | with open(filename, 'w') as file: 20 | sys.stdout = file 21 | dictionary.print_table_data( 22 | [["Row", "Sequence#", "Format", "Flags", "Field String", "Child Count", "Offset"]] 23 | + 24 | schema_dictionary.dictionary) 25 | 26 | dictionary.print_dictionary_summary(schema_dictionary.dictionary, 27 | schema_dictionary.dictionary_byte_array) 28 | sys.stdout = sys.__stdout__ 29 | 30 | 31 | if __name__ == '__main__': 32 | parser = argparse.ArgumentParser( 33 | description='Generate dictionaries by scanning and parsing xml schema directories', 34 | formatter_class=argparse.RawTextHelpFormatter, 35 | epilog='Example config file: \n' + 36 | '{ \n' + 37 | ' "Copyright": "Copyright 2014-2020 MyCompany",\n' + 38 | ' "DoNotWrite": ["ExcludeFile1_v1.xml", "ExcludeFile2_v1.xml"],\n' + 39 | ' "ExplicitEntities": {\n' + 40 | ' "AnXMLSchemaFile_v1.xml": {\n' + 41 | ' "Namespace.SomeEntity1": "DictionaryForSomeEntity1.bin",\n' + 42 | ' "Namespace.SomeEntity2": "DictionaryForSomeEntity2.bin"\n' + 43 | ' }\n' + 44 | ' }\n' + 45 | '}' 46 | ) 47 | parser.add_argument('-x', '--input-csdl', help="source directory(s) for local XML CSDL schema files", nargs='+', 48 | required=True) 49 | parser.add_argument('-j', '--input-json-schema', help="source directory(s) for local JSON schema files", nargs='+', 50 | required=False) 51 | parser.add_argument('-c', '--config', help="config file for specific user options", required=False) 52 | 53 | parser.add_argument('-o', '--output', help="The folder(s) to write the RDE dictionary files", nargs='+', required=True) 54 | 55 | args = parser.parse_args() 56 | 57 | schema_dir_csdl = [] 58 | schema_dir_json = [] 59 | 60 | for source in args.input_csdl: 61 | schema_dir_csdl.append(source) 62 | 63 | if args.input_json_schema: 64 | for source in args.input_json_schema: 65 | schema_dir_json.append(source) 66 | 67 | copyright = '' 68 | do_not_write = [] 69 | explicit_entities = {} 70 | # Read the configuration file 71 | config_data = {} 72 | if args.config is not None: 73 | try: 74 | with open(args.config) as config_file: 75 | config_data = json.load(config_file) 76 | if 'Copyright' in config_data: 77 | copyright = config_data['Copyright'] 78 | if 'DoNotWrite' in config_data: 79 | do_not_write = config_data['DoNotWrite'] 80 | if 'ExplicitEntities' in config_data: 81 | explicit_entities = config_data['ExplicitEntities'] 82 | except json.JSONDecodeError: 83 | print("ERROR: {} contains a malformed JSON object".format(args.config)) 84 | sys.exit(1) 85 | except: 86 | print("ERROR: Could not open {}".format(args.config)) 87 | sys.exit(1) 88 | 89 | for i in range(0, len(schema_dir_csdl)): 90 | for filename in os.listdir(schema_dir_csdl[i]): 91 | if filename not in do_not_write: 92 | entities = [] 93 | output_filenames = [] 94 | 95 | if filename in explicit_entities: 96 | for (k, v) in explicit_entities[filename].items(): 97 | entities.append(k) 98 | output_filenames.append(v) 99 | else: 100 | # strip out the _v1.xml 101 | m = re.compile('(.*)_v1.xml').match(filename) 102 | if m: 103 | entities.append(m.group(1) + '.' + m.group(1)) 104 | output_filenames.append(filename.replace('.xml', '')) 105 | 106 | for j in range(0, len(entities)): 107 | try: 108 | schema_dictionary = dictionary.generate_schema_dictionary( 109 | 'local', 110 | schema_dir_csdl, 111 | schema_dir_json, 112 | entities[j], 113 | filename, 114 | None, 115 | None, 116 | None, 117 | None, 118 | copyright 119 | ) 120 | 121 | if schema_dictionary and schema_dictionary.dictionary and schema_dictionary.json_dictionary: 122 | print(filename, entities[j], 'Entries:', len(schema_dictionary.dictionary), 123 | 'Size:', len(schema_dictionary.dictionary_byte_array), 124 | 'Url:', json.loads(schema_dictionary.json_dictionary)['schema_url']) 125 | 126 | dir_to_save = args.output[i] 127 | 128 | if not os.path.exists(dir_to_save): 129 | os.makedirs(dir_to_save) 130 | 131 | # save the binary and also dump the ascii version 132 | with open(dir_to_save + '//' + output_filenames[j] + '.bin', 'wb') as file: 133 | file.write(bytes(schema_dictionary.dictionary_byte_array)) 134 | 135 | write_map_file(dir_to_save + '//' + output_filenames[j] + '.map', schema_dictionary) 136 | else: 137 | print(filename, "Missing entities, skipping...") 138 | 139 | except Exception as ex: 140 | print("Error: Could not generate RDE dictionary for schema:", filename) 141 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 142 | traceback.print_exception(type(ex), ex, ex.__traceback__) 143 | sys.exit(1) 144 | 145 | # Generate the annotation dictionary 146 | print('Generating annotation dictionary...') 147 | annotation_dictionary = None 148 | try: 149 | annotation_dictionary = dictionary.generate_annotation_schema_dictionary( 150 | schema_dir_csdl, 151 | schema_dir_json, 152 | 'v1' 153 | ) 154 | 155 | if annotation_dictionary and annotation_dictionary.dictionary \ 156 | and annotation_dictionary.dictionary_byte_array and annotation_dictionary.json_dictionary: 157 | print('Entries:', len(annotation_dictionary.dictionary), 'Size:', 158 | len(annotation_dictionary.dictionary_byte_array)) 159 | 160 | dir_to_save = args.output[i] 161 | 162 | with open(dir_to_save + '//' + 'annotation.bin', 'wb') as annotaton_bin: 163 | annotaton_bin.write(bytearray(annotation_dictionary.dictionary_byte_array)) 164 | 165 | write_map_file(dir_to_save + '//' + 'annotation.map', annotation_dictionary) 166 | 167 | except Exception as ex: 168 | print("Error: Could not generate Annotation RDE dictionary for schema: annotation.bin") 169 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 170 | sys.exit(1) 171 | 172 | sys.exit(0) 173 | -------------------------------------------------------------------------------- /test/example_profile_for_truncation.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_0_1", 3 | "ProfileName": "Example Profile", 4 | "ProfileVersion": "0.0.1", 5 | "Purpose": "Example Profile for truncating dictionaries", 6 | "OwningEntity": "", 7 | "ContactInfo": "", 8 | "RequiredProfiles": { 9 | "DMTFBasic": { 10 | "MinVersion": "1.0.0" 11 | } 12 | }, 13 | "Protocol": { 14 | "MinVersion": "1.6", 15 | "Discovery": "None", 16 | "HostInterface": "None", 17 | "ExpandQuery": "None", 18 | "SelectQuery": "None", 19 | "FilterQuery": "None" 20 | }, 21 | "Resources": { 22 | "Storage": { 23 | "MinVersion": "1.5.0", 24 | "Purpose": "Every implementation must have a storage resource from which the other storage resources are available", 25 | "ReadRequirement": "Mandatory", 26 | "PropertyRequirements": { 27 | "@odata.context": { 28 | "ReadRequirement": "Mandatory" 29 | }, 30 | "@odata.id": { 31 | "ReadRequirement": "Mandatory" 32 | }, 33 | "@odata.etag": { 34 | "ReadRequirement": "Mandatory" 35 | }, 36 | "@odata.type": { 37 | "ReadRequirement": "Mandatory" 38 | }, 39 | "Id":{ 40 | "ReadRequirement": "Mandatory" 41 | }, 42 | "Name": { 43 | "ReadRequirement": "Mandatory" 44 | }, 45 | "Drives@odata.count": { 46 | "ReadRequirement": "Mandatory" 47 | }, 48 | "Drives": { 49 | "ReadRequirement": "Mandatory", 50 | "MinCount": 0 51 | }, 52 | "Volumes": { 53 | "ReadRequirement": "Mandatory" 54 | }, 55 | "Status": { 56 | "PropertyRequirements": { 57 | "HealthRollup": { 58 | "ReadRequirement": "Mandatory" 59 | } 60 | } 61 | }, 62 | "StorageControllers": { 63 | "ReadRequirement": "Mandatory", 64 | "MinCount": 1, 65 | "PropertyRequirements": { 66 | "FirmwareVersion": { 67 | "ReadRequirement": "Mandatory" 68 | }, 69 | "Identifiers": { 70 | "ReadRequirement": "Mandatory", 71 | "MinCount": 1, 72 | "PropertyRequirements": { 73 | "DurableName": { 74 | "ReadRequirement": "Mandatory" 75 | }, 76 | "DurableNameFormat": { 77 | "ReadRequirement": "Mandatory", 78 | "Comparison": "Equal", 79 | "Values": ["NAA"] 80 | } 81 | } 82 | }, 83 | "Location": { 84 | "ReadRequirement": "Mandatory", 85 | "PropertyRequirements": { 86 | "PartLocation": { 87 | "ReadRequirement": "Mandatory", 88 | "PropertyRequirements": { 89 | "LocationOrdinalValue": { 90 | "ReadRequirement": "Mandatory" 91 | }, 92 | "LocationType": { 93 | "ReadRequirement": "Mandatory", 94 | "Comparison": "Equal", 95 | "Values": ["Slot"] 96 | }, 97 | "ServiceLabel": { 98 | "ReadRequirement": "Mandatory", 99 | "Purpose": "Human readable slot number e.g. Slot 2" 100 | } 101 | } 102 | } 103 | } 104 | }, 105 | "Manufacturer": { 106 | "ReadRequirement": "Mandatory" 107 | }, 108 | "MemberId": { 109 | "ReadRequirement": "Mandatory" 110 | }, 111 | "Model": { 112 | "ReadRequirement": "Mandatory" 113 | }, 114 | "PartNumber": { 115 | "ReadRequirement": "Mandatory" 116 | }, 117 | "SerialNumber": { 118 | "ReadRequirement": "Mandatory" 119 | }, 120 | "SpeedGbps": { 121 | "ReadRequirement": "Recommended" 122 | }, 123 | "SKU": { 124 | "ReadRequirement": "Mandatory" 125 | }, 126 | "Status": { 127 | "PropertyRequirements": { 128 | "State": { 129 | "ReadRequirement": "Mandatory", 130 | "Comparison": "AnyOf", 131 | "Values": ["Starting", "Enabled", "Updating"] 132 | }, 133 | "Health": { 134 | "ReadRequirement": "Mandatory" 135 | } 136 | } 137 | }, 138 | "SupportedDeviceProtocols": { 139 | "ReadRequirement": "Mandatory", 140 | "MinCount": 2, 141 | "Comparison": "Equal", 142 | "Values": ["SAS", "SATA"] 143 | }, 144 | "SupportedControllerProtocols": { 145 | "ReadRequirement": "Mandatory", 146 | "MinCount": 1, 147 | "Comparison": "Equal", 148 | "Values": ["PCIe"] 149 | }, 150 | "CacheSummary": { 151 | "ReadRequirement": "Mandatory", 152 | "PropertyRequirements": { 153 | "TotalCacheSizeMiB": { 154 | "ReadRequirement": "Mandatory" 155 | }, 156 | "PersistentCacheSizeMiB": { 157 | "ReadRequirement": "Mandatory" 158 | }, 159 | "Status": { 160 | "PropertyRequirements": { 161 | "State": { 162 | "ReadRequirement": "Mandatory", 163 | "Comparison": "AnyOf", 164 | "Values": ["Enabled", "Disabled", "StandbyOffline"] 165 | }, 166 | "Health": { 167 | "ReadRequirement": "Mandatory" 168 | } 169 | } 170 | } 171 | } 172 | }, 173 | "PCIeInterface": { 174 | "ReadRequirement": "Conditional", 175 | "ConditionalRequirements": [{ 176 | "Purpose": "Applicable only if this has a host PCIe interface", 177 | "CompareProperty": "SupportedControllerProtocols", 178 | "CompareType": "AnyOf", 179 | "CompareValues": ["PCIe"], 180 | "ReadRequirement": "Mandatory" 181 | }], 182 | "PropertyRequirements": { 183 | "MaxPCIeType": { 184 | "ReadRequirement": "Mandatory" 185 | }, 186 | "PCIeType": { 187 | "ReadRequirement": "Mandatory" 188 | }, 189 | "MaxLanes": { 190 | "ReadRequirement": "Mandatory" 191 | }, 192 | "LanesInUse": { 193 | "ReadRequirement": "Mandatory" 194 | } 195 | } 196 | } 197 | } 198 | } 199 | } 200 | }, 201 | "Drive": { 202 | "MinVersion": "1.5.0", 203 | "Purpose": "Every implementation must have one or more drive resources", 204 | "PropertyRequirements": { 205 | "ReadRequirement": "Mandatory", 206 | "@odata.context": { 207 | "ReadRequirement": "Mandatory" 208 | }, 209 | "@odata.id": { 210 | "ReadRequirement": "Mandatory" 211 | }, 212 | "@odata.etag": { 213 | "ReadRequirement": "Mandatory" 214 | }, 215 | "@odata.type": { 216 | "ReadRequirement": "Mandatory" 217 | }, 218 | "Id":{ 219 | "ReadRequirement": "Mandatory" 220 | }, 221 | "Name": { 222 | "ReadRequirement": "Mandatory" 223 | }, 224 | "Status": { 225 | "PropertyRequirements": { 226 | "State": { 227 | "ReadRequirement": "Mandatory", 228 | "Comparison": "AnyOf", 229 | "Values": ["Enabled", "Disabled", "StandbyOffline", "StandbySpare", "UnavailableOffline", "Updating"] 230 | }, 231 | "Health": { 232 | "ReadRequirement": "Mandatory", 233 | "Purpose": "Health of the drive" 234 | } 235 | } 236 | }, 237 | "IndicatorLED": { 238 | "ReadRequirement": "Mandatory", 239 | "Comparison": "AnyOf", 240 | "Values": ["Lit", "Off"] 241 | }, 242 | "Model": { 243 | "ReadRequirement": "Mandatory" 244 | }, 245 | "Revision": { 246 | "ReadRequirement": "Mandatory" 247 | }, 248 | "CapacityBytes": { 249 | "ReadRequirement": "Mandatory", 250 | "Purpose": "Actual drive capacity" 251 | }, 252 | "BlockSizeBytes": { 253 | "ReadRequirement": "Mandatory" 254 | }, 255 | "FailurePredicted": { 256 | "ReadRequirement": "Mandatory" 257 | }, 258 | "Protocol": { 259 | "ReadRequirement": "Mandatory", 260 | "Comparison": "AnyOf", 261 | "Values": ["SAS", "SATA"] 262 | }, 263 | "MediaType": { 264 | "ReadRequirement": "Mandatory", 265 | "Comparison": "AnyOf", 266 | "Values": ["HDD", "SSD", "SMR"] 267 | }, 268 | "Manufacturer": { 269 | "ReadRequirement": "Mandatory" 270 | }, 271 | "SerialNumber": { 272 | "ReadRequirement": "Mandatory" 273 | }, 274 | "StatusIndicator": { 275 | "ReadRequirement": "Mandatory", 276 | "Purpose": "SES status of drive", 277 | "Comparison": "AnyOf", 278 | "Values": ["OK", "Fail", "Rebuild", "PredictiveFailureAnalysis", "Hotspare"] 279 | }, 280 | "Identifiers": { 281 | "ReadRequirement": "Mandatory", 282 | "MinCount": 1, 283 | "PropertyRequirements": { 284 | "DurableName": { 285 | "ReadRequirement": "Mandatory" 286 | }, 287 | "DurableNameFormat": { 288 | "ReadRequirement": "Mandatory", 289 | "Comparison": "Equal", 290 | "Values": ["NAA"] 291 | } 292 | } 293 | }, 294 | "PhysicalLocation": { 295 | "ReadRequirement": "Mandatory", 296 | "PropertyRequirements": { 297 | "PartLocation": { 298 | "ReadRequirement": "Mandatory", 299 | "PropertyRequirements": { 300 | "LocationOrdinalValue": { 301 | "ReadRequirement": "Mandatory" 302 | }, 303 | "LocationType": { 304 | "ReadRequirement": "Mandatory", 305 | "Comparison": "Equal", 306 | "Values": ["Bay"] 307 | }, 308 | "ServiceLabel": { 309 | "ReadRequirement": "Mandatory" 310 | } 311 | } 312 | } 313 | } 314 | }, 315 | "HotspareType": { 316 | "ReadRequirement": "Mandatory", 317 | "Comparison": "AnyOf", 318 | "Values": ["None", "Dedicated"] 319 | }, 320 | "RotationSpeedRPM": { 321 | "ReadRequirement": "Conditional", 322 | "ConditionalRequirements": [{ 323 | "Purpose": "Applicable only if MediaType is HDD or SMR", 324 | "CompareProperty": "MediaType", 325 | "CompareType": "AnyOf", 326 | "CompareValues": ["HDD", "SMR"], 327 | "ReadRequirement": "Mandatory" 328 | }] 329 | }, 330 | "CapableSpeedGbs": { 331 | "ReadRequirement": "Mandatory" 332 | }, 333 | "NegotiatedSpeedGbs": { 334 | "ReadRequirement": "Mandatory" 335 | }, 336 | "PredictedMediaLifeLeftPercent": { 337 | "ReadRequirement": "Conditional", 338 | "ConditionalRequirements": [{ 339 | "Purpose": "Applicable only if MediaType is SSD", 340 | "CompareProperty": "MediaType", 341 | "CompareType": "AnyOf", 342 | "CompareValues": ["SSD"], 343 | "ReadRequirement": "Mandatory" 344 | }] 345 | }, 346 | "Operations": { 347 | "ReadRequirement": "Mandatory", 348 | "MinCount": 0, 349 | "PropertyRequirements": { 350 | "OperationName": { 351 | "ReadRequirement": "Mandatory", 352 | "Comparison": "AnyOf", 353 | "Values": ["Rebuilding", "Erasing", "Sanitizing"] 354 | } 355 | } 356 | }, 357 | "Links": { 358 | "ReadRequirement": "Mandatory", 359 | "PropertyRequirements": { 360 | "Volumes@odata.count": { 361 | "ReadRequirement": "Mandatory" 362 | }, 363 | "Volumes": { 364 | "ReadRequirement": "Mandatory" 365 | } 366 | } 367 | }, 368 | "HotspareReplacementMode": { 369 | "ReadRequirement": "Conditional", 370 | "Comparison": "AnyOf", 371 | "Values": ["Revertible", "NonRevertible"], 372 | "ConditionalRequirements": [{ 373 | "Purpose": "Applicable only if the drive is configured as a hot spare", 374 | "CompareProperty": "HotspareType", 375 | "CompareType": "AnyOf", 376 | "CompareValues": ["Dedicated"], 377 | "ReadRequirement": "Mandatory" 378 | }] 379 | } 380 | } 381 | } 382 | } 383 | } -------------------------------------------------------------------------------- /test/test.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | import json 7 | import os 8 | import re 9 | import io 10 | from collections import namedtuple 11 | import importlib 12 | import sys 13 | import argparse 14 | from utils import * 15 | import shutil 16 | import stat 17 | import traceback 18 | import requests 19 | import zipfile 20 | 21 | sys.path.append('./') 22 | 23 | from rdebej import dictionary 24 | from rdebej import encode, decode 25 | 26 | 27 | COPYRIGHT = 'Copyright (c) 2018 DMTF' 28 | 29 | TestSpecification = namedtuple('TestSpecification', 'csdl_directories ' 30 | 'json_schema_directories ' 31 | 'schema_filename ' 32 | 'entity ' 33 | 'oem_schema_filenames ' 34 | 'oem_entities ' 35 | 'profile ' 36 | 'dictionary_filename ' 37 | 'input_encode_filename ' 38 | 'copyright') 39 | MAJOR_SCHEMA_DICTIONARY_LIST = [ 40 | TestSpecification( 41 | 'test/schema/dummysimple/csdl', 42 | 'test/schema/dummysimple/json-schema', 43 | 'DummySimple_v1.xml', 44 | 'DummySimple.DummySimple', 45 | '', 46 | '', 47 | '', 48 | 'DummySimple.bin', 49 | 'test/dummysimple.json', 50 | 'Copyright (c) 2018 Acme Corp'), 51 | 52 | TestSpecification( 53 | 'test/schema/dummysimple/csdl', 54 | 'test/schema/dummysimple/json-schema', 55 | 'DummySimple_v1.xml', 56 | 'DummySimple.DummySimple', 57 | '', 58 | '', 59 | '', 60 | 'DummySimple.bin', 61 | 'test/dummysimple2.json', 62 | 'Copyright (c) 2018 Acme Corp'), 63 | 64 | TestSpecification( 65 | '$csdl_dir test/schema/oem-csdl', 66 | '$json_schema_dir', 67 | 'Drive_v1.xml', 68 | 'Drive.Drive', 69 | 'OEM1DriveExt_v1.xml OEM2DriveExt_v1.xml', 70 | 'OEM1=OEM1DriveExt.OEM1DriveExt OEM2=OEM2DriveExt.OEM2DriveExt', 71 | '', # profile 72 | 'drive.bin', 73 | 'test/drive.json', # file to encode 74 | 'Copyright (c) 2018 Acme Corp'), # encoded bej file 75 | 76 | TestSpecification( 77 | '$csdl_dir', 78 | '$json_schema_dir', 79 | 'Storage_v1.xml', 80 | 'Storage.Storage', 81 | '', 82 | '', 83 | '', 84 | 'storage.bin', 85 | 'test/storage.json', 86 | 'Copyright (c) 2018 Acme Corp'), 87 | 88 | TestSpecification( 89 | '$csdl_dir', 90 | '$json_schema_dir', 91 | 'Storage_v1.xml', 92 | 'Storage.Storage', 93 | '', 94 | '', 95 | '', 96 | 'storage.bin', 97 | 'test/storage_large.json', 98 | 'Copyright (c) 2018 Acme Corp'), 99 | 100 | TestSpecification( 101 | '$csdl_dir', 102 | '$json_schema_dir', 103 | 'Outlet_v1.xml', 104 | 'Outlet.Outlet', 105 | '', 106 | '', 107 | '', 108 | 'outlet.bin', 109 | 'test/outlet.json', 110 | 'Copyright (c) 2018 Acme Corp'), 111 | 112 | TestSpecification( 113 | '$csdl_dir', 114 | '$json_schema_dir', 115 | 'Circuit_v1.xml', 116 | 'Circuit.Circuit', 117 | '', 118 | '', 119 | '', 120 | 'circuit.bin', 121 | 'test/circuit.json', 122 | 'Copyright (c) 2018 Acme Corp'), 123 | 124 | TestSpecification( 125 | '$csdl_dir', 126 | '$json_schema_dir', 127 | 'Storage_v1.xml', 128 | 'Storage.Storage', 129 | '', 130 | '', 131 | 'test/example_profile_for_truncation.json', 132 | 'storage.bin', 133 | 'test/storage_profile_conformant.json', 134 | 'Copyright (c) 2018 Acme Corp') 135 | ] 136 | 137 | 138 | if __name__ == '__main__': 139 | parser = argparse.ArgumentParser() 140 | parser.add_argument("--test_bej", help="test only BEJ", action="store_true") 141 | parser.add_argument("--schema_source", help="source for schema files", type=str, required=True) 142 | parser.add_argument("--git_tag", help="git repo tag", type=str, required=False) 143 | parser.add_argument("--delete_schema_dir", help="cleanup the schema directories", action="store_true") 144 | parser.add_argument("--save_dictionaries", help="location to store dictionaries", type=str, required=False) 145 | 146 | args = parser.parse_args() 147 | 148 | # default location of schema files 149 | schema_test_dir = 'test/schema' 150 | delete_schema_test_dir = False 151 | 152 | if args.schema_source: 153 | # we support only git repos from the master branch 154 | if re.search('.*\.git$', args.schema_source): 155 | schema_test_dir = 'tmp-schema' 156 | branch = 'master' 157 | if args.git_tag: 158 | branch = args.git_tag 159 | repo = cloneFrom(args.schema_source, schema_test_dir, branch, ['metadata', 'json-schema']) 160 | if not repo: 161 | exit(1) 162 | elif re.search('https://.*\.zip$', args.schema_source): 163 | schema_test_dir = 'tmp-schema' 164 | r = requests.get(args.schema_source, allow_redirects=True) 165 | open('tmp_schema.zip', 'wb').write(r.content) 166 | with zipfile.ZipFile('tmp_schema.zip', 'r') as zip_ref: 167 | zip_ref.extractall('tmp-schema') 168 | schema_test_dir = 'tmp-schema/'+os.listdir('tmp-schema')[0] 169 | else: # standard directory 170 | schema_test_dir = args.schema_source 171 | 172 | if args.delete_schema_dir: 173 | delete_schema_test_dir = True 174 | 175 | csdl_dir = schema_test_dir + '/metadata' 176 | json_schema_dir = schema_test_dir + '/json-schema' 177 | if os.path.isdir(schema_test_dir + '/metadata'): 178 | csdl_dir = schema_test_dir + '/metadata' 179 | elif os.path.isdir(schema_test_dir + '/csdl'): 180 | csdl_dir = schema_test_dir + '/csdl' 181 | 182 | if not args.test_bej: 183 | # go thru every csdl and attempt creating a dictionary 184 | skip_list = [] 185 | 186 | for filename in os.listdir(csdl_dir): 187 | if filename not in skip_list: 188 | # strip out the _v1.xml 189 | m = re.compile('(.*)_v1.xml').match(filename) 190 | entity = '' 191 | if m: 192 | entity = m.group(1) + '.' + m.group(1) 193 | 194 | try: 195 | schema_dictionary = dictionary.generate_schema_dictionary( 196 | 'local', 197 | [csdl_dir], 198 | [json_schema_dir], 199 | entity, 200 | filename, 201 | None, 202 | None, 203 | None, 204 | None, 205 | COPYRIGHT 206 | ) 207 | 208 | if schema_dictionary and schema_dictionary.dictionary and schema_dictionary.json_dictionary: 209 | print(filename, 'Entries:', len(schema_dictionary.dictionary), 210 | 'Size:', len(schema_dictionary.dictionary_byte_array), 211 | 'Url:', json.loads(schema_dictionary.json_dictionary)['schema_url']) 212 | # verify copyright 213 | assert(bytearray(schema_dictionary.dictionary_byte_array[ 214 | len(schema_dictionary.dictionary_byte_array) - len(COPYRIGHT) - 1: 215 | len(schema_dictionary.dictionary_byte_array)-1]).decode('utf-8') == COPYRIGHT) 216 | if args.save_dictionaries: 217 | dir_to_save = args.save_dictionaries 218 | if not os.path.exists(dir_to_save): 219 | os.makedirs(dir_to_save) 220 | 221 | # save the binary and also dump the ascii version 222 | with open(dir_to_save + '//' + filename.replace('.xml', '.dict'), 'wb') as file: 223 | file.write(bytes(schema_dictionary.dictionary_byte_array)) 224 | 225 | else: 226 | print(filename, "Missing entities, skipping...") 227 | 228 | except Exception as ex: 229 | print("Error: Could not generate JSON schema dictionary for schema:", filename) 230 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 231 | exit(1) 232 | 233 | # Generate the annotation dictionary 234 | print('Generating annotation dictionary...') 235 | annotation_dictionary = None 236 | try: 237 | annotation_dictionary = dictionary.generate_annotation_schema_dictionary( 238 | [csdl_dir], 239 | [json_schema_dir], 240 | 'v1_0_0' 241 | ) 242 | 243 | if annotation_dictionary and annotation_dictionary.dictionary \ 244 | and annotation_dictionary.dictionary_byte_array and annotation_dictionary.json_dictionary: 245 | print('Entries:', len(annotation_dictionary.dictionary), 'Size:', 246 | len(annotation_dictionary.dictionary_byte_array)) 247 | 248 | dir_to_save = './' 249 | if args.save_dictionaries: 250 | dir_to_save = args.save_dictionaries 251 | 252 | with open(dir_to_save + '//' + 'annotation.dict', 'wb') as annotaton_bin: 253 | annotaton_bin.write(bytearray(annotation_dictionary.dictionary_byte_array)) 254 | dictionary.print_binary_dictionary(annotation_dictionary.dictionary_byte_array) 255 | 256 | except Exception as ex: 257 | print("Error: Could not generate JSON schema dictionary for schema annotation") 258 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 259 | exit(1) 260 | 261 | # Generate the error schema dictionary 262 | print('Generating error schema dictionary...') 263 | error_schema_dictionary = None 264 | try: 265 | error_schema_dictionary = dictionary.generate_error_schema_dictionary( 266 | [csdl_dir], 267 | [json_schema_dir] 268 | ) 269 | 270 | if error_schema_dictionary and error_schema_dictionary.dictionary \ 271 | and error_schema_dictionary.dictionary_byte_array and error_schema_dictionary.json_dictionary: 272 | print('Entries:', len(error_schema_dictionary.dictionary), 'Size:', 273 | len(error_schema_dictionary.dictionary_byte_array)) 274 | 275 | dir_to_save = './' 276 | if args.save_dictionaries: 277 | dir_to_save = args.save_dictionaries 278 | 279 | with open(dir_to_save + '//' + 'error.dict', 'wb') as error_bin: 280 | error_bin.write(bytearray(error_schema_dictionary.dictionary_byte_array)) 281 | dictionary.print_binary_dictionary(error_schema_dictionary.dictionary_byte_array) 282 | 283 | # Run the encode/decode 284 | bej_stream = io.BytesIO() 285 | 286 | json_to_encode = json.load(open('test/error.json')) 287 | encode_success, pdr_map = encode.bej_encode( 288 | bej_stream, 289 | json_to_encode, 290 | error_schema_dictionary.dictionary_byte_array, 291 | annotation_dictionary.dictionary_byte_array, 292 | verbose=True 293 | ) 294 | assert encode_success,'Encode failure' 295 | encoded_bytes = bej_stream.getvalue() 296 | encode.print_encode_summary(json_to_encode, encoded_bytes) 297 | 298 | decode_stream = io.StringIO() 299 | decode_success = decode.bej_decode( 300 | decode_stream, 301 | io.BytesIO(bytes(encoded_bytes)), 302 | error_schema_dictionary.dictionary_byte_array, 303 | annotation_dictionary.dictionary_byte_array, 304 | error_schema_dictionary, pdr_map, {} 305 | ) 306 | assert decode_success,'Decode failure' 307 | 308 | decode_file = decode_stream.getvalue() 309 | 310 | # compare the decode with the original 311 | print('Decoded JSON:') 312 | print(json.dumps(json.loads(decode_file), indent=3)) 313 | assert(json.loads(decode_file) == json.load(open('test/error.json'))), \ 314 | 'Mismtach in original JSON and decoded JSON' 315 | 316 | except Exception as ex: 317 | print("Error: Could not validate error schema dictionary") 318 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 319 | traceback.print_exc() 320 | exit(1) 321 | 322 | # Generate the major schema dictionaries 323 | for major_schema in MAJOR_SCHEMA_DICTIONARY_LIST: 324 | schema_dictionary = None 325 | try: 326 | csdl_dirs = major_schema.csdl_directories.replace('$csdl_dir', csdl_dir) 327 | json_schema__dirs = major_schema.json_schema_directories.replace('$json_schema_dir', json_schema_dir) 328 | 329 | print(csdl_dirs) 330 | schema_dictionary = dictionary.generate_schema_dictionary( 331 | 'local', 332 | csdl_dirs.split(), 333 | json_schema__dirs.split(), 334 | major_schema.entity, 335 | major_schema.schema_filename, 336 | major_schema.oem_entities.split(), 337 | major_schema.oem_schema_filenames.split(), 338 | major_schema.profile 339 | ) 340 | 341 | if schema_dictionary and schema_dictionary.dictionary and schema_dictionary.json_dictionary: 342 | print('Entries:', len(schema_dictionary.dictionary), 'Size:', 343 | len(schema_dictionary.dictionary_byte_array)) 344 | with open(major_schema.dictionary_filename, 'wb') as dictionary_bin: 345 | dictionary_bin.write(bytearray(schema_dictionary.dictionary_byte_array)) 346 | dictionary.print_binary_dictionary(schema_dictionary.dictionary_byte_array) 347 | 348 | except Exception as ex: 349 | print("Error: Could not generate JSON schema dictionary for schema:", major_schema.schema_filename) 350 | print("Error: Exception type: {0}, message: {1}".format(ex.__class__.__name__, str(ex))) 351 | exit(1) 352 | 353 | # Run the encode/decode 354 | bej_stream = io.BytesIO() 355 | 356 | json_to_encode = json.load(open(major_schema.input_encode_filename)) 357 | encode_success, pdr_map = encode.bej_encode( 358 | bej_stream, 359 | json_to_encode, 360 | schema_dictionary.dictionary_byte_array, 361 | annotation_dictionary.dictionary_byte_array, True 362 | ) 363 | 364 | # build the deferred binding strings from the pdr_map 365 | deferred_binding_strings = {} 366 | for url, pdr_num in pdr_map.items(): 367 | deferred_binding_strings['%L' + str(pdr_num)] = url 368 | 369 | assert encode_success, 'Encode failure' 370 | encoded_bytes = bej_stream.getvalue() 371 | encode.print_encode_summary(json_to_encode, encoded_bytes) 372 | 373 | decode_stream = io.StringIO() 374 | decode_success = decode.bej_decode( 375 | decode_stream, 376 | io.BytesIO(bytes(encoded_bytes)), 377 | schema_dictionary.dictionary_byte_array, 378 | annotation_dictionary.dictionary_byte_array, 379 | error_schema_dictionary, pdr_map, deferred_binding_strings 380 | ) 381 | assert decode_success, 'Decode failure' 382 | 383 | decode_file = decode_stream.getvalue() 384 | 385 | # compare the decode with the original 386 | print('Decoded JSON:') 387 | print(json.dumps(json.loads(decode_file), indent=3)) 388 | assert(json.loads(decode_file) == json.load(open(major_schema.input_encode_filename))) 389 | 390 | # cleanup 391 | os.remove(major_schema.dictionary_filename) 392 | 393 | # cleanup 394 | if delete_schema_test_dir: 395 | shutil.rmtree(schema_test_dir, onerror=onerror) 396 | 397 | if not args.save_dictionaries: 398 | os.remove('annotation.dict') 399 | os.remove('error.dict') 400 | 401 | exit(code=0) 402 | -------------------------------------------------------------------------------- /rdebej/decode.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | """ 7 | PLDM BEJ Decoder 8 | 9 | File : decode.py 10 | 11 | Brief : This file defines APIs to decode a PLDM Binary encoded JSON (BEJ) to JSON 12 | """ 13 | 14 | import os 15 | import re 16 | from ._internal_utils import * 17 | 18 | 19 | def bej_unpack_nnint(stream): 20 | # read num bytes 21 | num_bytes = int.from_bytes(stream.read(1), 'little') 22 | return int.from_bytes(stream.read(num_bytes), 'little') 23 | 24 | 25 | def bej_unpack_sfl(stream): 26 | # unpack seq 27 | seq = bej_unpack_nnint(stream) 28 | 29 | # unpack format 30 | format = int.from_bytes(stream.read(1), 'little') >> 4 31 | 32 | # unpack length 33 | length = bej_unpack_nnint(stream) 34 | 35 | return seq, format, length 36 | 37 | 38 | def bej_decode_sequence_number(seq): 39 | """ 40 | Returns the sequence number and the dictionary selector 41 | """ 42 | return seq >> 1, seq & 0x01 43 | 44 | 45 | def bej_unpack_sflv_string(stream): 46 | seq, format, length = bej_unpack_sfl(stream) 47 | val = stream.read(length).decode() 48 | 49 | # the last byte in a string decode is the null terminator, remove that and return 50 | return bej_decode_sequence_number(seq), val[:length-1] 51 | 52 | 53 | def bej_unpack_sflv_boolean(stream): 54 | seq, format, length = bej_unpack_sfl(stream) 55 | val = stream.read(length) 56 | 57 | bool_val = 'false' 58 | if val[0] == 0x01: 59 | bool_val = 'true' 60 | 61 | # the last byte in a string decode is the null terminator, remove that and return 62 | return bej_decode_sequence_number(seq), bool_val 63 | 64 | 65 | def bej_unpack_sflv_integer(stream): 66 | seq, format, length = bej_unpack_sfl(stream) 67 | int_array = stream.read(length) 68 | return bej_decode_sequence_number(seq), int.from_bytes(int_array, 'little', signed=True) 69 | 70 | 71 | def bej_unpack_sflv_real(stream): 72 | seq, format, length = bej_unpack_sfl(stream) 73 | length_of_whole = bej_unpack_nnint(stream) 74 | whole_array = stream.read(length_of_whole) 75 | whole = int.from_bytes(whole_array, 'little', signed=True) 76 | leading_zero_count = bej_unpack_nnint(stream) 77 | fract = bej_unpack_nnint(stream) 78 | length_of_exponent = bej_unpack_nnint(stream) 79 | exponent = 0 80 | if length_of_exponent > 0: 81 | exponent_array = stream.read(length_of_exponent) 82 | exponent = int.from_bytes(exponent_array, 'little', signed=True) 83 | 84 | real_str = str(whole) + '.' 85 | for i in range(0, leading_zero_count): 86 | real_str += '0' 87 | real_str += str(fract) 88 | real_str += 'e' + str(exponent) 89 | return bej_decode_sequence_number(seq), float(real_str) 90 | 91 | 92 | def bej_unpack_sflv_enum(stream): 93 | seq, format, length = bej_unpack_sfl(stream) 94 | value = bej_unpack_nnint(stream) 95 | 96 | return bej_decode_sequence_number(seq), value 97 | 98 | 99 | def bej_unpack_sflv_resource_link(stream): 100 | seq, format, length = bej_unpack_sfl(stream) 101 | value = bej_unpack_nnint(stream) 102 | 103 | return bej_decode_sequence_number(seq), value 104 | 105 | 106 | def bej_unpack_sflv_null(stream): 107 | seq, format, length = bej_unpack_sfl(stream) 108 | return bej_decode_sequence_number(seq) 109 | 110 | 111 | def bej_unpack_set_start(stream): 112 | ''' 113 | :param stream: 114 | :return: [sequence_num, selector], length, count 115 | ''' 116 | 117 | # move the stream to point to the first element in the set 118 | seq, format, length = bej_unpack_sfl(stream) 119 | 120 | # unpack the count 121 | count = bej_unpack_nnint(stream) 122 | 123 | return bej_decode_sequence_number(seq), length, count 124 | 125 | 126 | def bej_unpack_array_start(stream): 127 | ''' 128 | :param stream: 129 | :return: [sequence_num, selector], length, count 130 | ''' 131 | 132 | # move the stream to point to the first element in the array 133 | seq, format, length = bej_unpack_sfl(stream) 134 | 135 | # unpack the count 136 | count = bej_unpack_nnint(stream) 137 | 138 | return bej_decode_sequence_number(seq), length, count 139 | 140 | 141 | def bej_unpack_property_annotation_start(stream): 142 | ''' 143 | :param stream: 144 | :return: 145 | ''' 146 | 147 | # move the stream to point to the first element in the set 148 | seq, format, length = bej_unpack_sfl(stream) 149 | prop_seq, selector = bej_decode_sequence_number(seq) 150 | annot_seq, selector = bej_decode_sequence_number(bej_sequenceof(stream)) 151 | return annot_seq, prop_seq 152 | 153 | 154 | pass 155 | 156 | 157 | def bej_unpack_array_done(): 158 | pass 159 | 160 | 161 | def bej_unpack_property_annotation_done(): 162 | pass 163 | 164 | 165 | def bej_typeof(stream): 166 | current_pos = stream.tell() 167 | 168 | # skip seq 169 | bej_unpack_nnint(stream) 170 | 171 | format_and_flags = int.from_bytes(stream.read(1), 'little') 172 | stream.seek(current_pos, os.SEEK_SET) 173 | 174 | format = format_and_flags >> 4 175 | flags = format_and_flags & 0x0F 176 | return format, flags 177 | 178 | 179 | def bej_is_deferred_binding(stream): 180 | current_pos = stream.tell() 181 | 182 | # skip seq 183 | bej_unpack_nnint(stream) 184 | 185 | is_deferred_binding = int.from_bytes(stream.read(1), 'little') & 0x01 == 0x01 186 | stream.seek(current_pos, os.SEEK_SET) 187 | 188 | return is_deferred_binding 189 | 190 | 191 | def bej_sequenceof(stream): 192 | current_pos = stream.tell() 193 | 194 | # get seq 195 | seq = bej_unpack_nnint(stream) 196 | 197 | stream.seek(current_pos, os.SEEK_SET) 198 | 199 | return seq 200 | 201 | 202 | def get_stream_size(stream): 203 | current_pos = stream.tell() 204 | stream.seek(0, os.SEEK_END) 205 | final_pos = stream.tell() 206 | stream.seek(current_pos, os.SEEK_SET) 207 | return final_pos 208 | 209 | 210 | current_available_pdr = 0 211 | 212 | 213 | def get_link_from_pdr_map(pdr, pdr_map): 214 | for key, value in pdr_map.items(): 215 | if value == pdr: 216 | return key 217 | return '' 218 | 219 | 220 | def load_dictionary_subset_by_key_sequence(schema_dict, offset, child_count): 221 | schema_dict_stream = DictionaryByteArrayStream(schema_dict, offset, child_count) 222 | 223 | entry_dict = {} 224 | while schema_dict_stream.has_entry(): 225 | entry = schema_dict_stream.get_next_entry() 226 | entry_dict[entry[DICTIONARY_ENTRY_SEQUENCE_NUMBER]] = entry 227 | 228 | return entry_dict 229 | 230 | 231 | def get_full_annotation_name_from_sequence_number(seq, annot_dict): 232 | # TODO: cache the main annotations 233 | base_entry = DictionaryByteArrayStream(annot_dict, 0, -1).get_next_entry() 234 | annotation_entries = load_dictionary_subset_by_key_sequence(annot_dict, 235 | base_entry[DICTIONARY_ENTRY_OFFSET], 236 | base_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 237 | 238 | return annotation_entries[seq][DICTIONARY_ENTRY_NAME] 239 | 240 | 241 | def bej_decode_enum_value(dict_to_use, dict_entry, value): 242 | # get the value for the enum sequence number from the dictionary 243 | enum_dict_stream = DictionaryByteArrayStream(dict_to_use, dict_entry[DICTIONARY_ENTRY_OFFSET], 244 | dict_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 245 | enum_value = '' 246 | while enum_dict_stream.has_entry(): 247 | enum_entry = enum_dict_stream.get_next_entry() 248 | 249 | if enum_entry[DICTIONARY_ENTRY_SEQUENCE_NUMBER] == value: 250 | enum_value = enum_entry[DICTIONARY_ENTRY_NAME] 251 | break 252 | return enum_value 253 | 254 | 255 | def bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream): 256 | if (selector == entries_by_seq_selector) and ((flags & BEJ_FLAG_NESTED_TOP_LEVEL_ANNOTATION) == 0): 257 | name = entries_by_seq[seq][DICTIONARY_ENTRY_NAME] 258 | elif selector == BEJ_DICTIONARY_SELECTOR_ANNOTATION: 259 | name = get_full_annotation_name_from_sequence_number(seq, annot_dict) 260 | else: 261 | name = entries_by_seq[seq][DICTIONARY_ENTRY_NAME] 262 | 263 | if name != '': 264 | output_stream.write('"' + name + '":') 265 | 266 | 267 | def bej_decode_property_annotation_name(annot_dict, annot_seq, prop_seq, entries_by_seq, output_stream): 268 | prop_name = entries_by_seq[prop_seq][DICTIONARY_ENTRY_NAME] 269 | annot_name = get_full_annotation_name_from_sequence_number(annot_seq, annot_dict) 270 | 271 | output_stream.write('"' + prop_name + annot_name + '":') 272 | 273 | 274 | def get_annotation_dictionary_entries_by_seq(annotation_dictionary): 275 | base_entry = DictionaryByteArrayStream(annotation_dictionary, 0, -1).get_next_entry() 276 | return load_dictionary_subset_by_key_sequence(annotation_dictionary, base_entry[DICTIONARY_ENTRY_OFFSET], 277 | base_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 278 | 279 | 280 | def validate_complex_type_length(input_stream, complex_type_start_pos, length): 281 | current_pos = input_stream.tell() 282 | input_stream.seek(complex_type_start_pos, os.SEEK_SET) 283 | bej_unpack_sfl(input_stream) 284 | set_value_start_pos = input_stream.tell() 285 | input_stream.seek(current_pos, os.SEEK_SET) 286 | return current_pos - set_value_start_pos == length 287 | 288 | 289 | def get_entry_by_seq(schema_dict, annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector): 290 | dict_to_use = schema_dict if selector is BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA else annot_dict 291 | 292 | # if we are changing dictionary context, we need to load entries for the new dictionary 293 | if entries_by_seq_selector != selector or (flags & BEJ_FLAG_NESTED_TOP_LEVEL_ANNOTATION) != 0: 294 | base_entry = DictionaryByteArrayStream(dict_to_use, 0, -1).get_next_entry() 295 | entries_by_seq = load_dictionary_subset_by_key_sequence(dict_to_use, 296 | base_entry[DICTIONARY_ENTRY_OFFSET], 297 | base_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 298 | return entries_by_seq[seq] 299 | 300 | 301 | def bej_decode_stream(output_stream, input_stream, schema_dict, annot_dict, entries_by_seq, entries_by_seq_selector, 302 | prop_count, is_seq_array_index, add_name, deferred_binding_strings): 303 | index = 0 304 | success = True 305 | while success and input_stream.tell() < get_stream_size(input_stream) and index < prop_count: 306 | format, flags = bej_typeof(input_stream) 307 | 308 | if format == BEJ_FORMAT_SET: 309 | # record the stream pos so we can validate the length later 310 | set_start_pos = input_stream.tell() 311 | [seq, selector], length, count = bej_unpack_set_start(input_stream) 312 | if is_seq_array_index: 313 | seq = 0 314 | entry = get_entry_by_seq(schema_dict, annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector) 315 | 316 | if add_name: 317 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 318 | 319 | dict_to_use = schema_dict if selector is BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA else annot_dict 320 | output_stream.write('{') 321 | 322 | success = bej_decode_stream(output_stream, input_stream, schema_dict, annot_dict, 323 | load_dictionary_subset_by_key_sequence( 324 | dict_to_use, entry[DICTIONARY_ENTRY_OFFSET], entry[DICTIONARY_ENTRY_CHILD_COUNT]), 325 | selector, 326 | count, is_seq_array_index=False, add_name=True, deferred_binding_strings=deferred_binding_strings) 327 | output_stream.write('}') 328 | 329 | # validate the length 330 | if not validate_complex_type_length(input_stream, set_start_pos, length): 331 | print('BEJ decoding error: Invalid length/count for set. Current stream contents:', 332 | output_stream.getvalue()) 333 | return False 334 | 335 | elif format == BEJ_FORMAT_STRING: 336 | is_deferred_binding = bej_is_deferred_binding(input_stream) 337 | [seq, selector], value = bej_unpack_sflv_string(input_stream) 338 | if add_name: 339 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 340 | 341 | if is_deferred_binding: 342 | bindings_to_resolve = re.findall('(%[BCMSU]|%[LTPI][0-9]+|%PF[0-9a-f]+)\.?[0-9]*.*?', value) 343 | for binding in bindings_to_resolve: 344 | if binding in deferred_binding_strings: 345 | value = value.replace(binding, deferred_binding_strings[binding]) 346 | 347 | output_stream.write('"' + value + '"') 348 | 349 | elif format == BEJ_FORMAT_INTEGER: 350 | [seq, selector], value = bej_unpack_sflv_integer(input_stream) 351 | if add_name: 352 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 353 | 354 | output_stream.write(str(value)) 355 | 356 | elif format == BEJ_FORMAT_REAL: 357 | [seq, selector], value = bej_unpack_sflv_real(input_stream) 358 | if add_name: 359 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 360 | 361 | output_stream.write(str(value)) 362 | 363 | elif format == BEJ_FORMAT_BOOLEAN: 364 | [seq, selector], value = bej_unpack_sflv_boolean(input_stream) 365 | if add_name: 366 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 367 | 368 | output_stream.write(value) 369 | 370 | elif format == BEJ_FORMAT_RESOURCE_LINK: 371 | [seq, selector], pdr = bej_unpack_sflv_resource_link(input_stream) 372 | if add_name: 373 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 374 | 375 | output_stream.write('"' + get_link_from_pdr_map(pdr) + '"') 376 | 377 | elif format == BEJ_FORMAT_ENUM: 378 | [seq, selector], value = bej_unpack_sflv_enum(input_stream) 379 | if is_seq_array_index: 380 | seq = 0 381 | 382 | if add_name: 383 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 384 | 385 | dict_to_use = schema_dict if selector is BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA else annot_dict 386 | enum_value = bej_decode_enum_value(dict_to_use, get_entry_by_seq(schema_dict, annot_dict, seq, selector, 387 | flags, entries_by_seq, entries_by_seq_selector), value) 388 | output_stream.write('"' + enum_value + '"') 389 | 390 | elif format == BEJ_FORMAT_NULL: 391 | [seq, selector] = bej_unpack_sflv_null(input_stream) 392 | if add_name: 393 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 394 | 395 | output_stream.write('null') 396 | 397 | elif format == BEJ_FORMAT_ARRAY: 398 | array_start_pos = input_stream.tell() 399 | [seq, selector], length, array_member_count = bej_unpack_array_start(input_stream) 400 | if is_seq_array_index: 401 | seq = 0 402 | 403 | dict_to_use = schema_dict if selector is BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA else annot_dict 404 | entry = get_entry_by_seq(schema_dict, annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector) 405 | 406 | if add_name: 407 | bej_decode_name(annot_dict, seq, selector, flags, entries_by_seq, entries_by_seq_selector, output_stream) 408 | 409 | output_stream.write('[') 410 | for i in range(0, array_member_count): 411 | success = bej_decode_stream(output_stream, input_stream, schema_dict, annot_dict, 412 | load_dictionary_subset_by_key_sequence(dict_to_use, entry[DICTIONARY_ENTRY_OFFSET], 413 | entry[DICTIONARY_ENTRY_CHILD_COUNT]), 414 | selector, 415 | prop_count=1, is_seq_array_index=True, add_name=False, 416 | deferred_binding_strings=deferred_binding_strings) 417 | if i < array_member_count-1: 418 | output_stream.write(',') 419 | 420 | output_stream.write(']') 421 | 422 | # validate the length 423 | if not validate_complex_type_length(input_stream, array_start_pos, length): 424 | print('BEJ decoding error: Invalid length/count for array. Current stream contents:', 425 | output_stream.getvalue()) 426 | return False 427 | 428 | 429 | elif format == BEJ_FORMAT_PROPERTY_ANNOTATION: 430 | # Seq(property sequence #) 431 | # Format(bejPropertyAnnotation) 432 | # Length 433 | # Seq(Annotation_name) 434 | # Format(format of annotation value) 435 | # Length 436 | # Value(value: can be a complex type) 437 | # e.g Status@Message.ExtendedInfo 438 | 439 | annot_seq, prop_seq = bej_unpack_property_annotation_start(input_stream) 440 | bej_decode_property_annotation_name(annot_dict, annot_seq, prop_seq, entries_by_seq, 441 | output_stream) 442 | 443 | success = bej_decode_stream(output_stream, input_stream, schema_dict, annot_dict, 444 | get_annotation_dictionary_entries_by_seq(annot_dict), 445 | BEJ_DICTIONARY_SELECTOR_ANNOTATION, 446 | prop_count=1, is_seq_array_index=False, add_name=False, 447 | deferred_binding_strings=deferred_binding_strings) 448 | else: 449 | success = False 450 | 451 | if index < prop_count-1: 452 | output_stream.write(',') 453 | index += 1 454 | 455 | return success 456 | 457 | 458 | def bej_decode(output_stream, input_stream, schema_dictionary, annotation_dictionary, 459 | error_dictionary, pdr_map, def_binding_strings): 460 | """ 461 | Decode a BEJ stream into JSON 462 | 463 | Args: 464 | output_stream: 465 | input_stream: 466 | schema_dictionary: 467 | annotation_dictionary: 468 | error_dictionary: 469 | pdr_map: 470 | def_binding_strings: 471 | 472 | Returns: 473 | """ 474 | resource_link_to_pdr_map = pdr_map 475 | # strip off the headers 476 | version = input_stream.read(4) 477 | assert((version == bytes([0x00, 0xF0, 0xF0, 0xF1])) or (version == bytes([0x00, 0xF0, 0xF1, 0xF1]))) 478 | flags = input_stream.read(2) 479 | assert (flags == bytes([0x00, 0x00])) 480 | schemaClass = input_stream.read(1) 481 | assert(schemaClass in [bytes([0x00]), bytes([0x01]), bytes([0x04])]) 482 | 483 | if schemaClass == bytes([0x00]) or schemaClass == bytes([0x01]): # Major schema class or Event 484 | return bej_decode_stream(output_stream, input_stream, schema_dictionary, annotation_dictionary, 485 | load_dictionary_subset_by_key_sequence(schema_dictionary, 0, -1), 486 | BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA, 487 | 1, is_seq_array_index=False, add_name=False, 488 | deferred_binding_strings=def_binding_strings) 489 | else: # Error schema class 490 | return bej_decode_stream(output_stream, input_stream, error_dictionary, annotation_dictionary, 491 | load_dictionary_subset_by_key_sequence(error_dictionary, 0, -1), 492 | BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA, 493 | 1, is_seq_array_index=False, add_name=False, 494 | deferred_binding_strings=def_binding_strings) 495 | 496 | -------------------------------------------------------------------------------- /test/storage_large.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#Storage.v1_3_0.Storage", 3 | "@odata.context": "/redfish/v1/$metadata#Storage.Storage", 4 | "@odata.id": "/redfish/v1/Systems/1/Storage/1", 5 | "Id": "RAID Controller 1", 6 | "Name": "RAID Controller", 7 | "Description": "RAID Controller", 8 | "Status": { 9 | "State": "Enabled", 10 | "Health": "OK", 11 | "HealthRollup": "OK" 12 | }, 13 | "StorageControllers": [ 14 | { 15 | "@odata.id": "/redfish/v1/Systems/1/Storage/1#/StorageControllers/0", 16 | "@odata.type": "#Storage.v1_3_0.StorageController", 17 | "MemberId": "0", 18 | "Name": "SAS RAID Controller", 19 | "Status": { 20 | "State": "Enabled", 21 | "Health": "OK" 22 | }, 23 | "Identifiers": [ 24 | { 25 | "DurableNameFormat": "NAA", 26 | "DurableName": "5045594843305852483430304E452000" 27 | } 28 | ], 29 | "Manufacturer": "Consorto", 30 | "Model": "Consorty RAID Controller XYZ", 31 | "SerialNumber": "PEYHC0XRH400NE", 32 | "PartNumber": "7334534", 33 | "SpeedGbps": 12, 34 | "FirmwareVersion": "1.00", 35 | "SupportedControllerProtocols": [ 36 | "PCIe" 37 | ], 38 | "SupportedDeviceProtocols": [ 39 | "SAS", 40 | "SATA" 41 | ] 42 | } 43 | ], 44 | "Drives": [ 45 | { 46 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 47 | }, 48 | { 49 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/2" 50 | }, 51 | { 52 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/3" 53 | }, 54 | { 55 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/4" 56 | }, 57 | { 58 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/5" 59 | }, 60 | { 61 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/6" 62 | }, 63 | { 64 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/7" 65 | }, 66 | { 67 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/8" 68 | }, 69 | { 70 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/9" 71 | }, 72 | { 73 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/10" 74 | }, 75 | { 76 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/11" 77 | }, 78 | { 79 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/12" 80 | }, 81 | { 82 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/13" 83 | }, 84 | { 85 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/14" 86 | }, 87 | { 88 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/15" 89 | }, 90 | { 91 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/16" 92 | }, 93 | { 94 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/17" 95 | }, 96 | { 97 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/18" 98 | }, 99 | { 100 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/19" 101 | }, 102 | { 103 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/20" 104 | }, 105 | { 106 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/21" 107 | }, 108 | { 109 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/22" 110 | }, 111 | { 112 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/23" 113 | }, 114 | { 115 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/24" 116 | }, 117 | { 118 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/25" 119 | }, 120 | { 121 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/26" 122 | }, 123 | { 124 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/27" 125 | }, 126 | { 127 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/28" 128 | }, 129 | { 130 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/29" 131 | }, 132 | { 133 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/30" 134 | }, 135 | { 136 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/31" 137 | }, 138 | { 139 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/32" 140 | }, 141 | { 142 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/33" 143 | }, 144 | { 145 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/34" 146 | }, 147 | { 148 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/35" 149 | }, 150 | { 151 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/36" 152 | }, 153 | { 154 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/37" 155 | }, 156 | { 157 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/38" 158 | }, 159 | { 160 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/39" 161 | }, 162 | { 163 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/40" 164 | }, 165 | { 166 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/41" 167 | }, 168 | { 169 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/42" 170 | }, 171 | { 172 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/43" 173 | }, 174 | { 175 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/44" 176 | }, 177 | { 178 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/45" 179 | }, 180 | { 181 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/46" 182 | }, 183 | { 184 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/47" 185 | }, 186 | { 187 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/48" 188 | }, 189 | { 190 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/49" 191 | }, 192 | { 193 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/50" 194 | }, 195 | { 196 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/51" 197 | }, 198 | { 199 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/52" 200 | }, 201 | { 202 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/53" 203 | }, 204 | { 205 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/54" 206 | }, 207 | { 208 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/55" 209 | }, 210 | { 211 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/56" 212 | }, 213 | { 214 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/57" 215 | }, 216 | { 217 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/58" 218 | }, 219 | { 220 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/59" 221 | }, 222 | { 223 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/60" 224 | }, 225 | { 226 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/61" 227 | }, 228 | { 229 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/62" 230 | }, 231 | { 232 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/63" 233 | }, 234 | { 235 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/64" 236 | }, 237 | { 238 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/65" 239 | }, 240 | { 241 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/66" 242 | }, 243 | { 244 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/67" 245 | }, 246 | { 247 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/68" 248 | }, 249 | { 250 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/69" 251 | }, 252 | { 253 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/70" 254 | }, 255 | { 256 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/71" 257 | }, 258 | { 259 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/72" 260 | }, 261 | { 262 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/73" 263 | }, 264 | { 265 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/74" 266 | }, 267 | { 268 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/75" 269 | }, 270 | { 271 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/76" 272 | }, 273 | { 274 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 275 | }, 276 | { 277 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 278 | }, 279 | { 280 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 281 | }, 282 | { 283 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 284 | }, 285 | { 286 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 287 | }, 288 | { 289 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 290 | }, 291 | { 292 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 293 | }, 294 | { 295 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 296 | }, 297 | { 298 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 299 | }, 300 | { 301 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 302 | }, 303 | { 304 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 305 | }, 306 | { 307 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 308 | }, 309 | { 310 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 311 | }, 312 | { 313 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 314 | }, 315 | { 316 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 317 | }, 318 | { 319 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 320 | }, 321 | { 322 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 323 | }, 324 | { 325 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 326 | }, 327 | { 328 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 329 | }, 330 | { 331 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 332 | }, 333 | { 334 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 335 | }, 336 | { 337 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 338 | }, 339 | { 340 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 341 | }, 342 | { 343 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 344 | }, 345 | { 346 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 347 | }, 348 | { 349 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/2" 350 | }, 351 | { 352 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/3" 353 | }, 354 | { 355 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/4" 356 | }, 357 | { 358 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/5" 359 | }, 360 | { 361 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/6" 362 | }, 363 | { 364 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/7" 365 | }, 366 | { 367 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/8" 368 | }, 369 | { 370 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/9" 371 | }, 372 | { 373 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/10" 374 | }, 375 | { 376 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/11" 377 | }, 378 | { 379 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/12" 380 | }, 381 | { 382 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/13" 383 | }, 384 | { 385 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/14" 386 | }, 387 | { 388 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/15" 389 | }, 390 | { 391 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/16" 392 | }, 393 | { 394 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/17" 395 | }, 396 | { 397 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/18" 398 | }, 399 | { 400 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/19" 401 | }, 402 | { 403 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/20" 404 | }, 405 | { 406 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/21" 407 | }, 408 | { 409 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/22" 410 | }, 411 | { 412 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/23" 413 | }, 414 | { 415 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/24" 416 | }, 417 | { 418 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/25" 419 | }, 420 | { 421 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/26" 422 | }, 423 | { 424 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/27" 425 | }, 426 | { 427 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/28" 428 | }, 429 | { 430 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/29" 431 | }, 432 | { 433 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/30" 434 | }, 435 | { 436 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/31" 437 | }, 438 | { 439 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/32" 440 | }, 441 | { 442 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/33" 443 | }, 444 | { 445 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/34" 446 | }, 447 | { 448 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/35" 449 | }, 450 | { 451 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/36" 452 | }, 453 | { 454 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/37" 455 | }, 456 | { 457 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/38" 458 | }, 459 | { 460 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/39" 461 | }, 462 | { 463 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/40" 464 | }, 465 | { 466 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/41" 467 | }, 468 | { 469 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/42" 470 | }, 471 | { 472 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/43" 473 | }, 474 | { 475 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/44" 476 | }, 477 | { 478 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/45" 479 | }, 480 | { 481 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/46" 482 | }, 483 | { 484 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/47" 485 | }, 486 | { 487 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/48" 488 | }, 489 | { 490 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/49" 491 | }, 492 | { 493 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/50" 494 | }, 495 | { 496 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/51" 497 | }, 498 | { 499 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/52" 500 | }, 501 | { 502 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/53" 503 | }, 504 | { 505 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/54" 506 | }, 507 | { 508 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/55" 509 | }, 510 | { 511 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/56" 512 | }, 513 | { 514 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/57" 515 | }, 516 | { 517 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/58" 518 | }, 519 | { 520 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/59" 521 | }, 522 | { 523 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/60" 524 | }, 525 | { 526 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/61" 527 | }, 528 | { 529 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/62" 530 | }, 531 | { 532 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/63" 533 | }, 534 | { 535 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/64" 536 | }, 537 | { 538 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/65" 539 | }, 540 | { 541 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/66" 542 | }, 543 | { 544 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/67" 545 | }, 546 | { 547 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/68" 548 | }, 549 | { 550 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/69" 551 | }, 552 | { 553 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/70" 554 | }, 555 | { 556 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/71" 557 | }, 558 | { 559 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/72" 560 | }, 561 | { 562 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/73" 563 | }, 564 | { 565 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/74" 566 | }, 567 | { 568 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/75" 569 | }, 570 | { 571 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/76" 572 | }, 573 | { 574 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 575 | }, 576 | { 577 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 578 | }, 579 | { 580 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 581 | }, 582 | { 583 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 584 | }, 585 | { 586 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 587 | }, 588 | { 589 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 590 | }, 591 | { 592 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 593 | }, 594 | { 595 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 596 | }, 597 | { 598 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 599 | }, 600 | { 601 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 602 | }, 603 | { 604 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 605 | }, 606 | { 607 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 608 | }, 609 | { 610 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 611 | }, 612 | { 613 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 614 | }, 615 | { 616 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 617 | }, 618 | { 619 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 620 | }, 621 | { 622 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 623 | }, 624 | { 625 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 626 | }, 627 | { 628 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 629 | }, 630 | { 631 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 632 | }, 633 | { 634 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 635 | }, 636 | { 637 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 638 | }, 639 | { 640 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 641 | }, 642 | { 643 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 644 | } 645 | ], 646 | "Volumes": { 647 | "@odata.id": "/redfish/v1/volcollection" 648 | }, 649 | "Links": { 650 | "Enclosures": [ 651 | { 652 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1" 653 | } 654 | ] 655 | } 656 | } -------------------------------------------------------------------------------- /rdebej/encode.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright Notice: 3 | # Copyright 2018-2019 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/RDE-Dictionary/blob/master/LICENSE.md 5 | 6 | """ 7 | PLDM BEJ Encoder 8 | 9 | File : encode.py 10 | 11 | Brief : This file defines API to encode a JSON file to PLDM Binary encoded JSON (BEJ) 12 | """ 13 | 14 | import json 15 | import io 16 | import os 17 | import re 18 | import string 19 | from ._internal_utils import * 20 | from math import * 21 | 22 | 23 | NUM_BYTES_FOR_INTEGER = 8 24 | 25 | VALID_ASCII_PRINT_CHARS = string.ascii_letters + string.hexdigits + string.punctuation 26 | 27 | 28 | def print_hex(byte_buf, max_size=None, add_line_number=True, show_ascii=True): 29 | """ 30 | Prints a byte array as hex dump 31 | 32 | Args: 33 | byte_buf: byte array to be printed as a hex dump 34 | max_size: Number of bytes to print, None indicates to print all bytes 35 | add_line_number: Set to True to show line numbers 36 | show_ascii: Set to True to print ASCII 37 | """ 38 | 39 | ascii_print = '' 40 | limit_size = True if max_size else False 41 | for ii, byte in enumerate(byte_buf): 42 | if limit_size and ii >= max_size: 43 | break 44 | mod = ii % 16 45 | next_mod = (ii + 1) % 16 46 | if add_line_number and mod == 0: 47 | print(format(ii, '#08X')+': ', end="") 48 | print(format(byte, '02X'), end=" ") 49 | byte_char = format(byte, 'c') 50 | if show_ascii: 51 | ascii_print += (byte_char if byte_char in VALID_ASCII_PRINT_CHARS else '.') 52 | 53 | if next_mod == 0: 54 | # Print the ascii line 55 | if show_ascii: 56 | print(ascii_print, end="") 57 | ascii_print = '' 58 | print('') 59 | 60 | # Add a newline to seperate 61 | print('') 62 | 63 | 64 | def twos_complement(value, nbits): 65 | """ 66 | Computes 2's complement 67 | """ 68 | return (value + (1 << nbits)) % (1 << nbits) 69 | 70 | 71 | def find_num_bytes_and_msb(value): 72 | if value == 0: 73 | return 1, 0x00 74 | if value == -1: 75 | return 1, 0xff 76 | 77 | # use a big endian byte array (MSB is at index 0) as it is easier to eliminate the padding 78 | value_byte_array = twos_complement(value, 64).to_bytes(NUM_BYTES_FOR_INTEGER, 'big') 79 | for index, val in enumerate(value_byte_array): 80 | if (value > 0 and val != 0x00) or (value < 0 and val != 0xff): 81 | return NUM_BYTES_FOR_INTEGER - index, val 82 | 83 | 84 | def num_bytes_for_unsigned_integer(value): 85 | num_bytes = 1 if value == 0 else 0 86 | while value != 0: 87 | value >>= 8 88 | num_bytes = num_bytes + 1 89 | 90 | return num_bytes 91 | 92 | 93 | def bej_pack_nnint(stream, value, num_bytes): 94 | """ 95 | The nnint type captures the BEJ encoding of Non-Negative Integers via the following encoding: 96 | The first byte shall consist of metadata for the number of bytes needed to encode the numeric 97 | value in the remaining bytes. Subsequent bytes shall contain the encoded value in 98 | little-endian format. As examples, the value 65 shall be encoded as 0x01 0x41; the value 130 99 | shall be encoded as 0x01 0x82; and the value 1337 shall be encoded as 0x02 0x39 0x05. 100 | 101 | Args: 102 | stream: 103 | value: 104 | num_bytes: indicates number of bytes (length) to use to represent the value, if 0 is specified, the most 105 | optimal size is used 106 | Return: -1 if error or no bytes written, >= 0 indicates number of bytes packed 107 | """ 108 | num_bytes_for_value = num_bytes_for_unsigned_integer(value) 109 | if num_bytes and (num_bytes < num_bytes_for_value): 110 | return -1 111 | 112 | if num_bytes: 113 | num_bytes_for_value = num_bytes 114 | 115 | num_bytes_packed = stream.write(num_bytes_for_value.to_bytes(1, 'little')) 116 | num_bytes_packed += stream.write(value.to_bytes(num_bytes_for_value, 'little')) 117 | 118 | return num_bytes_packed 119 | 120 | 121 | def bej_pack_sfl(stream, seq_num, format, length, format_flags): 122 | # pack seq num as nnint 123 | num_bytes = bej_pack_nnint(stream, seq_num, 0) 124 | 125 | # pack format 126 | format = (format << 4) | format_flags 127 | num_bytes += stream.write(format.to_bytes(1, 'little')) 128 | 129 | # pack length as nnint 130 | num_bytes += bej_pack_nnint(stream, length, 0) 131 | 132 | return num_bytes 133 | 134 | 135 | def bej_pack_sflv_string(stream, seq_num, str, format_flags): 136 | escape_sequences = [ 137 | ('\\', '\\\\'), 138 | ('"', '\\"'), 139 | ('/', '\\/'), 140 | ('\b', '\\b'), 141 | ('\f', '\\f'), 142 | ('\n', '\\n'), 143 | ('\r', '\\r') 144 | ] 145 | for old, new in escape_sequences: 146 | str = str.replace(old, new) 147 | 148 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_STRING, len(str) + 1, format_flags) 149 | 150 | # pack str 151 | null = 0 152 | num_bytes_packed += stream.write(str.encode()) 153 | num_bytes_packed += stream.write(null.to_bytes(1, 'little')) # null termination 154 | 155 | return num_bytes_packed 156 | 157 | 158 | def bej_decode_sequence_number(seq): 159 | """ 160 | Returns the sequence number and the dictionary selector 161 | """ 162 | return seq >> 1, seq & 0x01 163 | 164 | 165 | def bej_pack_sflv_boolean(stream, seq_num, val, format_flags): 166 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_BOOLEAN, 1, format_flags) 167 | 168 | # pack val 169 | if val == True: 170 | num_bytes_packed += stream.write(0x01.to_bytes(1, 'little')) 171 | else: 172 | num_bytes_packed += stream.write(0x00.to_bytes(1, 'little')) 173 | 174 | return num_bytes_packed 175 | 176 | 177 | def get_num_bytes_and_padding(value): 178 | num_bytes_for_value, msb = find_num_bytes_and_msb(value) 179 | # determine if padding is required to guarantee 2's complement 180 | is_padding_required = False 181 | 182 | # determine if we are using a fixed length of integer 183 | if fixed_integer_length != 0: 184 | # pack the value with the fixed length if the length of value < fixed_integer_length specified in to option "-fi". 185 | if num_bytes_for_value <= fixed_integer_length: 186 | is_padding_required = num_bytes_for_value < fixed_integer_length 187 | else: 188 | assert False, 'Value length ' + str(num_bytes_for_value) + ' byte(s) is great than the fixed integer length specified to -fi (' + str(fixed_integer_length) + ' byte(s))' 189 | 190 | else: 191 | if (value > 0 and (msb & 0x80)) or (value < 0 and not (msb & 0x80)): 192 | # add one more byte to the msb to guarantee highest MSb is zero (or 0xff for negative ints) 193 | is_padding_required = True 194 | 195 | return num_bytes_for_value, is_padding_required 196 | 197 | 198 | def bej_pack_v_integer(stream, value, num_bytes_for_value, is_padding_required): 199 | # pack the value 200 | num_bytes_packed = stream.write(twos_complement(value, 64).to_bytes(8, 'little')[:num_bytes_for_value]) 201 | # add padding if needed 202 | if is_padding_required: 203 | pad = 0 if value >= 0 else 0xff 204 | pad_length = 1 if fixed_integer_length == 0 else fixed_integer_length - num_bytes_for_value 205 | for _ in range (pad_length): 206 | num_bytes_packed += stream.write(pad.to_bytes(1, 'little')) 207 | 208 | return num_bytes_packed 209 | 210 | 211 | def bej_pack_sflv_integer(stream, seq_num, value, format_flags): 212 | num_bytes_for_value, is_padding_required = get_num_bytes_and_padding(value) 213 | 214 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_INTEGER, 215 | fixed_integer_length if fixed_integer_length else 216 | num_bytes_for_value+1 if is_padding_required else num_bytes_for_value, 217 | format_flags) 218 | 219 | # pack the value 220 | num_bytes_packed += bej_pack_v_integer(stream, value, num_bytes_for_value, is_padding_required) 221 | 222 | return num_bytes_packed 223 | 224 | 225 | def split_whole_frac_leading_zeros(value, precision): 226 | # split into whole, fract (exponent not supported for now) 227 | value_parts = str(value).split('.') 228 | whole = int(value_parts[0]) 229 | frac = '' 230 | if len(value_parts) > 1: 231 | frac = value_parts[1] 232 | 233 | num_leading_zeros = 0 234 | while frac and frac[0] == '0': 235 | num_leading_zeros += 1 236 | frac = frac[1:] 237 | 238 | frac_val = 0 239 | if frac != '': 240 | frac_val = int(frac[0:precision]) 241 | 242 | return whole, frac_val, num_leading_zeros 243 | 244 | 245 | # Packs a float as a SFLV 246 | # TODO: Does not support exponent 247 | def bej_pack_sflv_real(stream, seq_num, value, format_flags, precision=16): 248 | whole, frac, num_leading_zeros = split_whole_frac_leading_zeros(value, precision) 249 | 250 | num_bytes_for_whole, is_padding_required = get_num_bytes_and_padding(whole) 251 | num_bytes_to_pack_for_whole = num_bytes_for_whole+1 if is_padding_required else num_bytes_for_whole 252 | 253 | num_bytes_for_frac = num_bytes_for_unsigned_integer(frac) 254 | 255 | total_length = (2 + # length of whole (nnint) 256 | num_bytes_to_pack_for_whole + # whole (bejInteger) 257 | 1 + num_bytes_for_unsigned_integer(num_leading_zeros) + # leading zero count for fract (nnint) 258 | 1 + num_bytes_for_frac + # fract (nnint) 259 | 2) # length of exp (nnint) 260 | 261 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_REAL, total_length, format_flags) 262 | 263 | # pack the value 264 | num_bytes_packed += bej_pack_nnint(stream, num_bytes_to_pack_for_whole, 0) 265 | num_bytes_packed += bej_pack_v_integer(stream, whole, num_bytes_for_whole, is_padding_required) 266 | num_bytes_packed += bej_pack_nnint(stream, num_leading_zeros, 0) 267 | num_bytes_packed += bej_pack_nnint(stream, frac, 0) 268 | num_bytes_packed += bej_pack_nnint(stream, 0, 0) # Length of exp == 0 269 | 270 | return num_bytes_packed 271 | 272 | 273 | def bej_pack_sflv_enum(stream, seq_num, value, format_flags): 274 | enum_value_size = num_bytes_for_unsigned_integer(value) + 1 # enum value size as nint 275 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_ENUM, enum_value_size, format_flags) 276 | num_bytes_packed += bej_pack_nnint(stream, value, 0) 277 | 278 | return num_bytes_packed 279 | 280 | 281 | def bej_pack_sflv_resource_link(stream, seq_num, pdr, format_flags): 282 | num_bytes_packed = bej_pack_sfl(stream, seq_num, BEJ_FORMAT_RESOURCE_LINK, num_bytes_for_unsigned_integer(pdr)+1, format_flags) 283 | num_bytes_packed += bej_pack_nnint(stream, pdr, 0) 284 | 285 | return num_bytes_packed 286 | 287 | 288 | # Globals for bej set - Warning! not thread safe 289 | bej_set_stream_stack = [] 290 | 291 | 292 | def bej_pack_set_start(stream, count): 293 | bej_set_stream_stack.append(stream) 294 | 295 | # construct a new stream to start adding set data and pack the count 296 | tmp_stream = io.BytesIO() 297 | bej_pack_nnint(tmp_stream, count, 0) 298 | 299 | return tmp_stream 300 | 301 | 302 | def bej_pack_set_done(stream, seq_num, format_flags=0): 303 | # pop the last stream from the stack and add the s, f and l. Length can now be determined from the current stream 304 | length = len(stream.getvalue()) 305 | prev_stream = bej_set_stream_stack.pop() 306 | num_bytes_packed = bej_pack_sfl(prev_stream, seq_num, BEJ_FORMAT_SET, length, format_flags) 307 | 308 | # append the current stream to the prev and return prev 309 | prev_stream.write(stream.getvalue()) 310 | 311 | return num_bytes_packed + len(stream.getvalue()) 312 | 313 | 314 | def bej_pack_array_start(stream, count): 315 | bej_set_stream_stack.append(stream) 316 | 317 | # construct a new stream to start adding array data and pack the count 318 | tmp_stream = io.BytesIO() 319 | bej_pack_nnint(tmp_stream, count, 0) 320 | 321 | return tmp_stream 322 | 323 | 324 | def bej_pack_array_done(stream, seq_num, format_flags): 325 | # pop the last stream from the stack and add the s, f and l. Length can now be determined from the current stream 326 | length = len(stream.getvalue()) 327 | prev_stream = bej_set_stream_stack.pop() 328 | num_bytes_packed = bej_pack_sfl(prev_stream, seq_num, BEJ_FORMAT_ARRAY, length, format_flags) 329 | 330 | # append the current stream to the prev and return prev 331 | prev_stream.write(stream.getvalue()) 332 | 333 | return num_bytes_packed + len(stream.getvalue()) 334 | 335 | 336 | def bej_pack_property_annotation_start(stream): 337 | bej_set_stream_stack.append(stream) 338 | 339 | # construct a new stream to start adding annotation data 340 | tmp_stream = io.BytesIO() 341 | return tmp_stream 342 | 343 | 344 | def bej_pack_property_annotation_done(stream, prop_seq, format_flags=0): 345 | # pop the last stream from the stack and add the s, f and l. Length can now be determined from the current stream 346 | length = len(stream.getvalue()) 347 | prev_stream = bej_set_stream_stack.pop() 348 | num_bytes_packed = bej_pack_sfl(prev_stream, prop_seq, BEJ_FORMAT_PROPERTY_ANNOTATION, length, format_flags) 349 | 350 | # append the current stream to the prev and return prev 351 | prev_stream.write(stream.getvalue()) 352 | 353 | return num_bytes_packed + len(stream.getvalue()) 354 | 355 | 356 | current_available_pdr = 0 357 | 358 | 359 | def load_dictionary_subset_by_key_name(schema_dict, offset, child_count): 360 | schema_dict_stream = DictionaryByteArrayStream(schema_dict, offset, child_count) 361 | 362 | entry_dict = {} 363 | while schema_dict_stream.has_entry(): 364 | entry = schema_dict_stream.get_next_entry() 365 | entry_dict[entry[DICTIONARY_ENTRY_NAME]] = entry 366 | 367 | return entry_dict 368 | 369 | 370 | def is_payload_annotation(property): 371 | if '@' in property: 372 | return True 373 | return False 374 | 375 | 376 | def get_annotation_parts(property): 377 | """ 378 | Returns the schema property name (if present) and the annotation property name 379 | 380 | Returns: schema property name, annotation property name 381 | """ 382 | m = re.compile('(.*)(@.*\..*)').match(property) 383 | 384 | return m.group(1), m.group(2) 385 | 386 | 387 | def get_annotation_name(annotation_property): 388 | m = re.compile('.*@.*\.(.*)').match(annotation_property) 389 | return m.group(1) 390 | 391 | 392 | odata_dictionary_entries = {} 393 | 394 | 395 | def get_annotation_dictionary_entries(annot_dict): 396 | # TODO: cache the main annotations 397 | base_entry = DictionaryByteArrayStream(annot_dict, 0, -1).get_next_entry() 398 | return load_dictionary_subset_by_key_name(annot_dict, base_entry[DICTIONARY_ENTRY_OFFSET], 399 | base_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 400 | 401 | 402 | def bej_encode_enum(output_stream, dict_to_use, dict_entry, sequence_number_with_dictionary_selector, enum_value, format_flags): 403 | # get the sequence number for the enum value from the dictionary 404 | enum_dict_stream = DictionaryByteArrayStream(dict_to_use, dict_entry[DICTIONARY_ENTRY_OFFSET], 405 | dict_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 406 | value = None 407 | while enum_dict_stream.has_entry(): 408 | enum_entry = enum_dict_stream.get_next_entry() 409 | 410 | if enum_entry[DICTIONARY_ENTRY_NAME] == enum_value: 411 | value = enum_entry[DICTIONARY_ENTRY_SEQUENCE_NUMBER] 412 | break 413 | 414 | bej_pack_sflv_enum(output_stream, sequence_number_with_dictionary_selector, value, format_flags) 415 | 416 | def is_dict_entry_nullable(dict_entry): 417 | """ 418 | Return True if the dictionary entry is nullable, False otherwise 419 | """ 420 | if dict_entry[DICTIONARY_ENTRY_FLAGS] & 0x4: 421 | return True 422 | return False 423 | 424 | 425 | def bej_encode_sflv(output_stream, schema_dict, annot_dict, dict_to_use, dict_entry, seq, format, json_value, 426 | pdr_map, format_flags, verbose, is_strict, preserve_odata_id_strings): 427 | success = True 428 | if is_dict_entry_nullable(dict_entry) and json_value == None: 429 | bej_pack_sfl(output_stream, seq, BEJ_FORMAT_NULL, 0, format_flags) 430 | 431 | elif format == BEJ_FORMAT_STRING and isinstance(json_value, str): 432 | bej_pack_sflv_string(output_stream, seq, json_value, format_flags) 433 | 434 | elif format == BEJ_FORMAT_INTEGER and isinstance(json_value, int): 435 | bej_pack_sflv_integer(output_stream, seq, json_value, format_flags) 436 | 437 | elif format == BEJ_FORMAT_REAL and isinstance(json_value, (float, int)): 438 | bej_pack_sflv_real(output_stream, seq, json_value, format_flags) 439 | 440 | elif format == BEJ_FORMAT_BOOLEAN and isinstance(json_value, bool): 441 | bej_pack_sflv_boolean(output_stream, seq, json_value, format_flags) 442 | 443 | elif format == BEJ_FORMAT_ENUM and isinstance(json_value, str): 444 | bej_encode_enum(output_stream, dict_to_use, dict_entry, seq, json_value, format_flags) 445 | 446 | elif format == BEJ_FORMAT_RESOURCE_LINK and isinstance(json_value, str): 447 | global current_available_pdr 448 | # add an entry to the PDR 449 | if json_value not in pdr_map: 450 | if is_strict: 451 | return False 452 | pdr_map[json_value] = current_available_pdr 453 | current_available_pdr += 1 454 | new_pdr_num = pdr_map[json_value] 455 | bej_pack_sflv_resource_link(output_stream, seq, new_pdr_num, format_flags) 456 | 457 | elif format == BEJ_FORMAT_SET: 458 | nested_set_stream = bej_pack_set_start(output_stream, len(json_value)) 459 | success = bej_encode_stream(nested_set_stream, json_value, schema_dict, 460 | annot_dict, dict_to_use, pdr_map, dict_entry[DICTIONARY_ENTRY_OFFSET], 461 | dict_entry[DICTIONARY_ENTRY_CHILD_COUNT], verbose, is_strict, preserve_odata_id_strings) 462 | bej_pack_set_done(nested_set_stream, seq, format_flags) 463 | 464 | elif format == BEJ_FORMAT_ARRAY: 465 | count = len(json_value) 466 | array_dict_stream = DictionaryByteArrayStream(dict_to_use, dict_entry[DICTIONARY_ENTRY_OFFSET], 467 | dict_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 468 | array_dict_entry = array_dict_stream.get_next_entry() 469 | 470 | nested_stream = bej_pack_array_start(output_stream, count) 471 | tmp_seq, selector = bej_decode_sequence_number(seq) 472 | for i in range(0, count): 473 | success = bej_encode_sflv(nested_stream, schema_dict, annot_dict, dict_to_use, array_dict_entry, 474 | (i << 1) | selector, array_dict_entry[DICTIONARY_ENTRY_FORMAT], 475 | json_value[i], pdr_map, 0, verbose, is_strict, preserve_odata_id_strings) 476 | if not success: 477 | break 478 | 479 | bej_pack_array_done(nested_stream, seq, format_flags) 480 | 481 | else: 482 | if verbose: 483 | print('Failed to encode value:', json_value) 484 | success = False 485 | 486 | return success 487 | 488 | 489 | def bej_encode_stream(output_stream, json_data, schema_dict, annot_dict, dict_to_use, pdr_map, offset=0, 490 | child_count=-1, verbose=False, is_strict=False, preserve_odata_id_strings=False): 491 | global current_available_pdr 492 | dict_entries = load_dictionary_subset_by_key_name(dict_to_use, offset, child_count) 493 | success = True 494 | 495 | for prop in json_data: 496 | if prop in dict_entries or is_payload_annotation(prop): 497 | tmp_dict_to_use = dict_to_use 498 | entry = [] 499 | format_flags = 0 500 | # dict_to_use = schema_dict 501 | 502 | if is_payload_annotation(prop): 503 | # two kinds - property annotation (e.g. Status@Message.ExtendedInfo) or payload annotation 504 | schema_property, annotation_property = get_annotation_parts(prop) 505 | entry = get_annotation_dictionary_entries(annot_dict)[annotation_property] 506 | dictionary_selector_bit_value = BEJ_DICTIONARY_SELECTOR_ANNOTATION 507 | tmp_dict_to_use = annot_dict 508 | if dict_to_use == annot_dict: 509 | format_flags |= BEJ_FLAG_NESTED_TOP_LEVEL_ANNOTATION 510 | 511 | if schema_property != '': # this is a property annotation (e.g. Status@Message.ExtendedInfo) 512 | prop_format = BEJ_FORMAT_PROPERTY_ANNOTATION 513 | else: 514 | prop_format = entry[DICTIONARY_ENTRY_FORMAT] 515 | 516 | else: 517 | entry = dict_entries[prop] 518 | dictionary_selector_bit_value = BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA \ 519 | if dict_to_use == schema_dict else BEJ_DICTIONARY_SELECTOR_ANNOTATION 520 | prop_format = entry[DICTIONARY_ENTRY_FORMAT] 521 | 522 | sequence_number_with_dictionary_selector = (entry[DICTIONARY_ENTRY_SEQUENCE_NUMBER] << 1) \ 523 | | dictionary_selector_bit_value 524 | 525 | if prop_format == BEJ_FORMAT_PROPERTY_ANNOTATION: 526 | # Seq(Prop_name) 527 | # Format(bejPropertyAnnotation) 528 | # Length 529 | # Seq(Annotation_name) 530 | # Format(format of annotation value) 531 | # Length 532 | # Value(value: can be a complex type) 533 | # e.g Status@Message.ExtendedInfo 534 | schema_property, annotation_property = get_annotation_parts(prop) 535 | prop_seq = (dict_entries[schema_property][DICTIONARY_ENTRY_SEQUENCE_NUMBER] << 1) \ 536 | | BEJ_DICTIONARY_SELECTOR_MAJOR_SCHEMA 537 | 538 | nested_stream = bej_pack_property_annotation_start(output_stream) 539 | 540 | success = bej_encode_sflv(nested_stream, schema_dict, annot_dict, tmp_dict_to_use, entry, 541 | sequence_number_with_dictionary_selector, entry[DICTIONARY_ENTRY_FORMAT], 542 | json_data[prop], pdr_map, format_flags, verbose, is_strict, preserve_odata_id_strings) 543 | 544 | bej_pack_property_annotation_done(nested_stream, prop_seq) 545 | else: 546 | json_value = json_data[prop] 547 | # Special handling for '@odata.id' deferred binding string 548 | if prop == '@odata.id' and prop_format == BEJ_FORMAT_STRING and not preserve_odata_id_strings: 549 | if is_strict: 550 | prop_format = BEJ_FORMAT_RESOURCE_LINK 551 | else: 552 | global current_available_pdr 553 | # Add an entry to the PDR map 554 | # Special case frags by only including the string preceeding the '#' into 555 | # the PDR map 556 | res_link_parts = json_value.split('#') 557 | if res_link_parts[0] not in pdr_map: 558 | pdr_map[res_link_parts[0]] = current_available_pdr 559 | current_available_pdr += 1 560 | new_pdr_num = pdr_map[res_link_parts[0]] 561 | json_value = '%L' + str(new_pdr_num) 562 | if len(res_link_parts) > 1: # add the frag portion to the deferred binding string if any 563 | json_value += '#' + res_link_parts[1] 564 | format_flags |= BEJ_FLAG_DEFERRED # deferred binding flag 565 | 566 | success = bej_encode_sflv(output_stream, schema_dict, annot_dict, tmp_dict_to_use, entry, 567 | sequence_number_with_dictionary_selector, prop_format, json_value, pdr_map, 568 | format_flags, verbose, is_strict, preserve_odata_id_strings) 569 | else: 570 | if verbose: 571 | print('Property cannot be encoded - missing dictionary entry', prop) 572 | success = False 573 | 574 | if not success: 575 | break 576 | 577 | return success 578 | 579 | 580 | def bej_action_encode(output_stream, json_data, schema_dict, annot_dict, action_name, verbose=False, 581 | resource_link_to_pdr_map=None, version=None, preserve_odata_id_strings=False): 582 | """ 583 | BEJ encode Action request payload JSON data into an output stream 584 | 585 | Args: 586 | output_stream: Stream to dump BEJ data into 587 | json_data: JSON string 588 | schema_dict: The RDE schema dictionary to use to encode the BEJ 589 | annot_dict: The RDE annotation dictionary to use to encode the BEJ 590 | action_name: The field string (name) of the particular Action being requested 591 | resource_link_to_pdr_map: Map of uri to resource id 592 | bej_version: BEJ version to use in payload 593 | 594 | Return: 595 | Returns a tuple (True, pdr_map) to indicate success, (False, None) otherwise. 596 | """ 597 | bej_version = 0xF1F0F000 598 | pdr_map = {} 599 | is_strict = False 600 | if version: 601 | bej_version = version 602 | if resource_link_to_pdr_map: 603 | pdr_map = resource_link_to_pdr_map 604 | is_strict = True 605 | 606 | # Skip ahead to Action subset in dictionary 607 | dict_stream = DictionaryByteArrayStream(schema_dict) 608 | resource_entry = dict_stream.get_next_entry() 609 | resource_prop_entries = load_dictionary_subset_by_key_name(schema_dict, resource_entry[DICTIONARY_ENTRY_OFFSET], 610 | resource_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 611 | actions_entry = resource_prop_entries['Actions'] 612 | actions_subset_entries = load_dictionary_subset_by_key_name(schema_dict, actions_entry[DICTIONARY_ENTRY_OFFSET], 613 | actions_entry[DICTIONARY_ENTRY_CHILD_COUNT]) 614 | requested_action_entry = actions_subset_entries[action_name] 615 | 616 | # Add header info 617 | output_stream.write(bej_version.to_bytes(4, 'little')) # BEJ Version 618 | output_stream.write(0x0000.to_bytes(2, 'little')) # BEJ flags 619 | output_stream.write(0x00.to_bytes(1, 'little')) # schemaClass - MAJOR only for now 620 | 621 | # Encode the bejTuple 622 | new_stream = bej_pack_set_start(output_stream, len(json_data)) 623 | success = bej_encode_stream(new_stream, json_data, schema_dict, annot_dict, schema_dict, pdr_map, requested_action_entry[DICTIONARY_ENTRY_OFFSET], 624 | requested_action_entry[DICTIONARY_ENTRY_CHILD_COUNT], verbose, is_strict, preserve_odata_id_strings) 625 | if success: 626 | bej_pack_set_done(new_stream, 0) 627 | return success, pdr_map 628 | 629 | 630 | def bej_encode(output_stream, json_data, schema_dict, annot_dict, verbose=False, resource_link_to_pdr_map=None, 631 | version=None, preserve_odata_id_strings=False, fixed_int_len=0): 632 | """ 633 | BEJ encode JSON data into an output stream 634 | 635 | Args: 636 | output_stream: Stream to dump BEJ data into 637 | json_data: JSON string 638 | schema_dict: The RDE schema dictionary to use to encode the BEJ 639 | annot_dict: The RDE annotation dictionary to use to encode the BEJ 640 | resource_link_to_pdr_map: Map of uri to resource id 641 | bej_version: BEJ version to use in payload 642 | 643 | Return: 644 | Returns a tuple (True, pdr_map) to indicate success, (False, None) otherwise. 645 | """ 646 | 647 | global fixed_integer_length 648 | fixed_integer_length = fixed_int_len 649 | bej_version = 0xF1F0F000 650 | pdr_map = {} 651 | is_strict = False 652 | if version: 653 | bej_version = version 654 | if resource_link_to_pdr_map: 655 | pdr_map = resource_link_to_pdr_map 656 | is_strict = True 657 | # Add header info 658 | output_stream.write(bej_version.to_bytes(4, 'little')) # BEJ Version 659 | output_stream.write(0x0000.to_bytes(2, 'little')) # BEJ flags 660 | output_stream.write(0x00.to_bytes(1, 'little')) # schemaClass - MAJOR only for now 661 | 662 | # Encode the bejTuple 663 | new_stream = bej_pack_set_start(output_stream, len(json_data)) 664 | dict_stream = DictionaryByteArrayStream(schema_dict) 665 | entry = dict_stream.get_next_entry() 666 | success = bej_encode_stream(new_stream, json_data, schema_dict, annot_dict, schema_dict, pdr_map, entry[DICTIONARY_ENTRY_OFFSET], 667 | entry[DICTIONARY_ENTRY_CHILD_COUNT], verbose, is_strict, preserve_odata_id_strings) 668 | if success: 669 | bej_pack_set_done(new_stream, 0) 670 | return success, pdr_map 671 | 672 | 673 | def print_encode_summary(json_to_encode, encoded_bytes): 674 | total_json_size = len(json.dumps(json_to_encode, separators=(',', ':'))) 675 | print_hex(encoded_bytes) 676 | print('JSON size:', total_json_size) 677 | print('Total encode size:', len(encoded_bytes)) 678 | print('Compression ratio(%):', (1.0 - len(encoded_bytes) / total_json_size) * 100) 679 | -------------------------------------------------------------------------------- /test/features/encode_decode.feature: -------------------------------------------------------------------------------- 1 | @fixture.schema_source 2 | Feature: The dictionary can be used to encode/decode BEJ 3 | 4 | Scenario: Encoding JSON into BEJ using dictionaries 5 | Given a CSDL schema file Storage_v1.xml and entity Storage.Storage 6 | When the dictionary is generated with Copyright set to Copyright (c) 2018 DMTF 7 | Then the following JSON is encoded using the dictionary successfully 8 | """ 9 | { 10 | "@odata.type": "#Storage.v1_3_0.Storage", 11 | "@odata.context": "/redfish/v1/$metadata#Storage.Storage", 12 | "@odata.id": "/redfish/v1/Systems/1/Storage/1", 13 | "Id": "RAID Controller 1", 14 | "Name": "RAID Controller", 15 | "Description": "RAID Controller", 16 | "Status": { 17 | "State": "Enabled", 18 | "Health": "OK", 19 | "HealthRollup": "OK" 20 | }, 21 | "StorageControllers": [ 22 | { 23 | "@odata.id": "/redfish/v1/Systems/1/Storage/1#/StorageControllers/0", 24 | "@odata.type": "#Storage.v1_3_0.StorageController", 25 | "MemberId": "0", 26 | "Name": "SAS RAID Controller", 27 | "Status": { 28 | "State": "Enabled", 29 | "Health": "OK" 30 | }, 31 | "Identifiers": [ 32 | { 33 | "DurableNameFormat": "NAA", 34 | "DurableName": "5045594843305852483430304E452000" 35 | } 36 | ], 37 | "Manufacturer": "Consorto", 38 | "Model": "Consorty RAID Controller XYZ", 39 | "SerialNumber": "PEYHC0XRH400NE", 40 | "PartNumber": "7334534", 41 | "SpeedGbps": 12.0, 42 | "FirmwareVersion": "1.00", 43 | "SupportedControllerProtocols": [ 44 | "PCIe" 45 | ], 46 | "SupportedDeviceProtocols": [ 47 | "SAS", 48 | "SATA" 49 | ] 50 | } 51 | ], 52 | "Drives": [ 53 | { 54 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.1" 55 | }, 56 | { 57 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.2" 58 | }, 59 | { 60 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.3" 61 | }, 62 | { 63 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.4" 64 | }, 65 | { 66 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.5" 67 | }, 68 | { 69 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1/Drives/Disk.Bay.6" 70 | } 71 | ], 72 | "Volumes": { 73 | "@odata.id": "/redfish/v1/volcollection" 74 | }, 75 | "Links": { 76 | "Enclosures": [ 77 | { 78 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1" 79 | } 80 | ] 81 | } 82 | } 83 | """ 84 | And the BEJ can be successfully decoded back to JSON 85 | 86 | 87 | Scenario: Encoding large JSON into BEJ using dictionaries 88 | Given a CSDL schema file Storage_v1.xml and entity Storage.Storage 89 | When the dictionary is generated with Copyright set to Copyright (c) 2018 DMTF 90 | Then the following JSON is encoded using the dictionary successfully 91 | """ 92 | { 93 | "@odata.type": "#Storage.v1_3_0.Storage", 94 | "@odata.context": "/redfish/v1/$metadata#Storage.Storage", 95 | "@odata.id": "/redfish/v1/Systems/1/Storage/1", 96 | "Id": "RAID Controller 1", 97 | "Name": "RAID Controller", 98 | "Description": "RAID Controller", 99 | "Status": { 100 | "State": "Enabled", 101 | "Health": "OK", 102 | "HealthRollup": "OK" 103 | }, 104 | "StorageControllers": [ 105 | { 106 | "@odata.id": "/redfish/v1/Systems/1/Storage/1#/StorageControllers/0", 107 | "@odata.type": "#Storage.v1_3_0.StorageController", 108 | "MemberId": "0", 109 | "Name": "SAS RAID Controller", 110 | "Status": { 111 | "State": "Enabled", 112 | "Health": "OK" 113 | }, 114 | "Identifiers": [ 115 | { 116 | "DurableNameFormat": "NAA", 117 | "DurableName": "5045594843305852483430304E452000" 118 | } 119 | ], 120 | "Manufacturer": "Consorto", 121 | "Model": "Consorty RAID Controller XYZ", 122 | "SerialNumber": "PEYHC0XRH400NE", 123 | "PartNumber": "7334534", 124 | "SpeedGbps": 12, 125 | "FirmwareVersion": "1.00", 126 | "SupportedControllerProtocols": [ 127 | "PCIe" 128 | ], 129 | "SupportedDeviceProtocols": [ 130 | "SAS", 131 | "SATA" 132 | ] 133 | } 134 | ], 135 | "Drives": [ 136 | { 137 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 138 | }, 139 | { 140 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/2" 141 | }, 142 | { 143 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/3" 144 | }, 145 | { 146 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/4" 147 | }, 148 | { 149 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/5" 150 | }, 151 | { 152 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/6" 153 | }, 154 | { 155 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/7" 156 | }, 157 | { 158 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/8" 159 | }, 160 | { 161 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/9" 162 | }, 163 | { 164 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/10" 165 | }, 166 | { 167 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/11" 168 | }, 169 | { 170 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/12" 171 | }, 172 | { 173 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/13" 174 | }, 175 | { 176 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/14" 177 | }, 178 | { 179 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/15" 180 | }, 181 | { 182 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/16" 183 | }, 184 | { 185 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/17" 186 | }, 187 | { 188 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/18" 189 | }, 190 | { 191 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/19" 192 | }, 193 | { 194 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/20" 195 | }, 196 | { 197 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/21" 198 | }, 199 | { 200 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/22" 201 | }, 202 | { 203 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/23" 204 | }, 205 | { 206 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/24" 207 | }, 208 | { 209 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/25" 210 | }, 211 | { 212 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/26" 213 | }, 214 | { 215 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/27" 216 | }, 217 | { 218 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/28" 219 | }, 220 | { 221 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/29" 222 | }, 223 | { 224 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/30" 225 | }, 226 | { 227 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/31" 228 | }, 229 | { 230 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/32" 231 | }, 232 | { 233 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/33" 234 | }, 235 | { 236 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/34" 237 | }, 238 | { 239 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/35" 240 | }, 241 | { 242 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/36" 243 | }, 244 | { 245 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/37" 246 | }, 247 | { 248 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/38" 249 | }, 250 | { 251 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/39" 252 | }, 253 | { 254 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/40" 255 | }, 256 | { 257 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/41" 258 | }, 259 | { 260 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/42" 261 | }, 262 | { 263 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/43" 264 | }, 265 | { 266 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/44" 267 | }, 268 | { 269 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/45" 270 | }, 271 | { 272 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/46" 273 | }, 274 | { 275 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/47" 276 | }, 277 | { 278 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/48" 279 | }, 280 | { 281 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/49" 282 | }, 283 | { 284 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/50" 285 | }, 286 | { 287 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/51" 288 | }, 289 | { 290 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/52" 291 | }, 292 | { 293 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/53" 294 | }, 295 | { 296 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/54" 297 | }, 298 | { 299 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/55" 300 | }, 301 | { 302 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/56" 303 | }, 304 | { 305 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/57" 306 | }, 307 | { 308 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/58" 309 | }, 310 | { 311 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/59" 312 | }, 313 | { 314 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/60" 315 | }, 316 | { 317 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/61" 318 | }, 319 | { 320 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/62" 321 | }, 322 | { 323 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/63" 324 | }, 325 | { 326 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/64" 327 | }, 328 | { 329 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/65" 330 | }, 331 | { 332 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/66" 333 | }, 334 | { 335 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/67" 336 | }, 337 | { 338 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/68" 339 | }, 340 | { 341 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/69" 342 | }, 343 | { 344 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/70" 345 | }, 346 | { 347 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/71" 348 | }, 349 | { 350 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/72" 351 | }, 352 | { 353 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/73" 354 | }, 355 | { 356 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/74" 357 | }, 358 | { 359 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/75" 360 | }, 361 | { 362 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/76" 363 | }, 364 | { 365 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 366 | }, 367 | { 368 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 369 | }, 370 | { 371 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 372 | }, 373 | { 374 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 375 | }, 376 | { 377 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 378 | }, 379 | { 380 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 381 | }, 382 | { 383 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 384 | }, 385 | { 386 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 387 | }, 388 | { 389 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 390 | }, 391 | { 392 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 393 | }, 394 | { 395 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 396 | }, 397 | { 398 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 399 | }, 400 | { 401 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 402 | }, 403 | { 404 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 405 | }, 406 | { 407 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 408 | }, 409 | { 410 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 411 | }, 412 | { 413 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 414 | }, 415 | { 416 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 417 | }, 418 | { 419 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 420 | }, 421 | { 422 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 423 | }, 424 | { 425 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 426 | }, 427 | { 428 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 429 | }, 430 | { 431 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 432 | }, 433 | { 434 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 435 | }, 436 | { 437 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 438 | }, 439 | { 440 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/2" 441 | }, 442 | { 443 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/3" 444 | }, 445 | { 446 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/4" 447 | }, 448 | { 449 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/5" 450 | }, 451 | { 452 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/6" 453 | }, 454 | { 455 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/7" 456 | }, 457 | { 458 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/8" 459 | }, 460 | { 461 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/9" 462 | }, 463 | { 464 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/10" 465 | }, 466 | { 467 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/11" 468 | }, 469 | { 470 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/12" 471 | }, 472 | { 473 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/13" 474 | }, 475 | { 476 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/14" 477 | }, 478 | { 479 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/15" 480 | }, 481 | { 482 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/16" 483 | }, 484 | { 485 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/17" 486 | }, 487 | { 488 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/18" 489 | }, 490 | { 491 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/19" 492 | }, 493 | { 494 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/20" 495 | }, 496 | { 497 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/21" 498 | }, 499 | { 500 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/22" 501 | }, 502 | { 503 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/23" 504 | }, 505 | { 506 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/24" 507 | }, 508 | { 509 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/25" 510 | }, 511 | { 512 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/26" 513 | }, 514 | { 515 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/27" 516 | }, 517 | { 518 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/28" 519 | }, 520 | { 521 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/29" 522 | }, 523 | { 524 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/30" 525 | }, 526 | { 527 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/31" 528 | }, 529 | { 530 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/32" 531 | }, 532 | { 533 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/33" 534 | }, 535 | { 536 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/34" 537 | }, 538 | { 539 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/35" 540 | }, 541 | { 542 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/36" 543 | }, 544 | { 545 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/37" 546 | }, 547 | { 548 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/38" 549 | }, 550 | { 551 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/39" 552 | }, 553 | { 554 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/40" 555 | }, 556 | { 557 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/41" 558 | }, 559 | { 560 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/42" 561 | }, 562 | { 563 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/43" 564 | }, 565 | { 566 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/44" 567 | }, 568 | { 569 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/45" 570 | }, 571 | { 572 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/46" 573 | }, 574 | { 575 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/47" 576 | }, 577 | { 578 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/48" 579 | }, 580 | { 581 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/49" 582 | }, 583 | { 584 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/50" 585 | }, 586 | { 587 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/51" 588 | }, 589 | { 590 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/52" 591 | }, 592 | { 593 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/53" 594 | }, 595 | { 596 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/54" 597 | }, 598 | { 599 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/55" 600 | }, 601 | { 602 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/56" 603 | }, 604 | { 605 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/57" 606 | }, 607 | { 608 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/58" 609 | }, 610 | { 611 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/59" 612 | }, 613 | { 614 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/60" 615 | }, 616 | { 617 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/61" 618 | }, 619 | { 620 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/62" 621 | }, 622 | { 623 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/63" 624 | }, 625 | { 626 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/64" 627 | }, 628 | { 629 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/65" 630 | }, 631 | { 632 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/66" 633 | }, 634 | { 635 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/67" 636 | }, 637 | { 638 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/68" 639 | }, 640 | { 641 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/69" 642 | }, 643 | { 644 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/70" 645 | }, 646 | { 647 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/71" 648 | }, 649 | { 650 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/72" 651 | }, 652 | { 653 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/73" 654 | }, 655 | { 656 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/74" 657 | }, 658 | { 659 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/75" 660 | }, 661 | { 662 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/76" 663 | }, 664 | { 665 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 666 | }, 667 | { 668 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 669 | }, 670 | { 671 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 672 | }, 673 | { 674 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 675 | }, 676 | { 677 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 678 | }, 679 | { 680 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 681 | }, 682 | { 683 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 684 | }, 685 | { 686 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 687 | }, 688 | { 689 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 690 | }, 691 | { 692 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 693 | }, 694 | { 695 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 696 | }, 697 | { 698 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 699 | }, 700 | { 701 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 702 | }, 703 | { 704 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 705 | }, 706 | { 707 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 708 | }, 709 | { 710 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 711 | }, 712 | { 713 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 714 | }, 715 | { 716 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 717 | }, 718 | { 719 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 720 | }, 721 | { 722 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 723 | }, 724 | { 725 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 726 | }, 727 | { 728 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 729 | }, 730 | { 731 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 732 | }, 733 | { 734 | "@odata.id": "/redfish/v1/Systems/1/Storage/1/Drives/1" 735 | } 736 | ], 737 | "Volumes": { 738 | "@odata.id": "/redfish/v1/volcollection" 739 | }, 740 | "Links": { 741 | "Enclosures": [ 742 | { 743 | "@odata.id": "/redfish/v1/Chassis/StorageEnclosure1" 744 | } 745 | ] 746 | } 747 | } 748 | """ 749 | And the BEJ can be successfully decoded back to JSON 750 | --------------------------------------------------------------------------------