├── redfish_service_validator ├── __init__.py ├── schema_pack.py ├── config.py ├── logger.py ├── helper.py ├── RedfishServiceValidator.py ├── traverse.py ├── tohtml.py ├── RedfishServiceValidatorGui.py ├── validateResource.py ├── RedfishLogo.py └── metadata.py ├── requirements.txt ├── redfish.ico ├── .gitignore ├── AUTHORS.md ├── test_conf.json ├── RedfishServiceValidatorGui.py ├── config └── example.ini ├── tests ├── testdata │ └── payloads │ │ ├── simple.json │ │ ├── simple_bad.json │ │ └── system-bad-additional.json └── test_catalog.py ├── RedfishServiceValidator.py ├── RedfishServiceValidator.exe.spec ├── setup.py ├── LICENSE.md ├── CONTRIBUTING.md ├── .github └── workflows │ └── release.yml ├── README.md └── CHANGELOG.md /redfish_service_validator/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | redfish>=3.1.5 2 | requests 3 | beautifulsoup4>=4.6.0 4 | lxml 5 | -------------------------------------------------------------------------------- /redfish.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DMTF/Redfish-Service-Validator/main/redfish.ico -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | build/ 3 | dist/ 4 | logs/ 5 | SchemaFiles/ 6 | *.pyc 7 | *.spec 8 | *.egg-info/ 9 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Original Contribution: 2 | 3 | * Dell Inc. -- Dell Redfish Validation Team 4 | 5 | # Other Key Contributions: 6 | 7 | * Majec Systems 8 | -------------------------------------------------------------------------------- /test_conf.json: -------------------------------------------------------------------------------- 1 | { 2 | "test": { 3 | "command": "$interpreter RedfishServiceValidator.py --ip $target_system -u $username -p $password --logdir $output_subdir" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /RedfishServiceValidatorGui.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: 4 | # https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 5 | 6 | from redfish_service_validator.RedfishServiceValidatorGui import main 7 | 8 | if __name__ == '__main__': 9 | main() 10 | -------------------------------------------------------------------------------- /config/example.ini: -------------------------------------------------------------------------------- 1 | [Tool] 2 | verbose = 3 | 4 | [Host] 5 | ip = http://localhost:8000 6 | username = MyUser 7 | password = MyPass 8 | description = MySystem 9 | forceauth = False 10 | authtype = Basic 11 | token = 12 | 13 | [Validator] 14 | payload = 15 | logdir = ./logs 16 | oemcheck = True 17 | debugging = False 18 | schema_directory = ./SchemaFiles/metadata 19 | collectionlimit = LogEntry 20 20 | -------------------------------------------------------------------------------- /tests/testdata/payloads/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type":"#Example.v1_0_0.Example", 3 | "pDeprecatedEnum":"On", 4 | "pDeprecatedEnum2":["On"], 5 | "pEnum":"Off", 6 | "pString":"None", 7 | "pDateTimeOffset":"None", 8 | "pGuid":"None", 9 | "pInt16":10, 10 | "pInt32":20, 11 | "pInt64":30, 12 | "pInt64Low":1, 13 | "pInt64High":9, 14 | "pDecimal":1.1, 15 | "pPrimitive": true 16 | } 17 | -------------------------------------------------------------------------------- /tests/testdata/payloads/simple_bad.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type":"#Example.v1_0_0.Example", 3 | "pDeprecatedEnum":"None", 4 | "pDeprecatedEnum2":0, 5 | "pEnum":"None", 6 | "pString":0, 7 | "pDateTimeOffset":"None", 8 | "pGuid":"None", 9 | "pInt16":"None", 10 | "pInt32":"None", 11 | "pInt64":"None", 12 | "pInt64Low":-10, 13 | "pInt64High":99, 14 | "pDecimal":"None", 15 | "pPrimitive":{} 16 | } 17 | -------------------------------------------------------------------------------- /RedfishServiceValidator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: 4 | # https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 5 | 6 | import sys 7 | import logging 8 | from redfish_service_validator.RedfishServiceValidator import main 9 | 10 | my_logger = logging.getLogger('rsv') 11 | my_logger.setLevel(logging.DEBUG) 12 | 13 | if __name__ == '__main__': 14 | try: 15 | sys.exit(main()) 16 | except Exception as e: 17 | my_logger.exception("Program finished prematurely: %s", e) 18 | raise 19 | -------------------------------------------------------------------------------- /RedfishServiceValidator.exe.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | block_cipher = None 5 | 6 | 7 | a = Analysis(['RedfishServiceValidatorGui.py'], 8 | pathex=[], 9 | binaries=[], 10 | datas=[], 11 | hiddenimports=[], 12 | hookspath=[], 13 | runtime_hooks=[], 14 | excludes=[], 15 | win_no_prefer_redirects=False, 16 | win_private_assemblies=False, 17 | cipher=block_cipher, 18 | noarchive=False) 19 | pyz = PYZ(a.pure, a.zipped_data, 20 | cipher=block_cipher) 21 | 22 | exe = EXE(pyz, 23 | a.scripts, 24 | a.binaries, 25 | a.zipfiles, 26 | a.datas, 27 | [], 28 | name='RedfishServiceValidator.exe', 29 | debug=False, 30 | bootloader_ignore_signals=False, 31 | strip=False, 32 | upx=True, 33 | upx_exclude=[], 34 | runtime_tmpdir=None, 35 | console=False, 36 | disable_windowed_traceback=False, 37 | target_arch=None, 38 | codesign_identity=None, 39 | entitlements_file=None , icon='redfish.ico') 40 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: 4 | # https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 5 | 6 | from setuptools import setup 7 | from codecs import open 8 | 9 | with open("README.md", "r", "utf-8") as f: 10 | long_description = f.read() 11 | 12 | setup( 13 | name="redfish_service_validator", 14 | version="2.5.1", 15 | description="Redfish Service Validator", 16 | long_description=long_description, 17 | long_description_content_type="text/markdown", 18 | author="DMTF, https://www.dmtf.org/standards/feedback", 19 | license="BSD 3-clause \"New\" or \"Revised License\"", 20 | classifiers=[ 21 | "Development Status :: 5 - Production/Stable", 22 | "License :: OSI Approved :: BSD License", 23 | "Programming Language :: Python", 24 | "Topic :: Communications" 25 | ], 26 | keywords="Redfish", 27 | url="https://github.com/DMTF/Redfish-Protocol-Validator", 28 | packages=["redfish_service_validator"], 29 | entry_points={ 30 | 'console_scripts': [ 31 | 'rf_service_validator=redfish_service_validator.RedfishServiceValidator:main', 32 | 'rf_service_validator_gui=redfish_service_validator.RedfishServiceValidatorGui:main' 33 | ] 34 | }, 35 | install_requires=[ 36 | "redfish>=3.1.5", 37 | "requests", 38 | "beautifulsoup4>=4.6.0", 39 | "lxml" 40 | ] 41 | ) 42 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2016-2025, Contributing Member(s) of Distributed Management Task 4 | Force, Inc.. All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, 7 | are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation and/or 14 | other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its contributors 17 | may be used to endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 21 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 24 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 25 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 26 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 27 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 29 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /tests/testdata/payloads/system-bad-additional.json: -------------------------------------------------------------------------------- 1 | { 2 | "@odata.type": "#ComputerSystem.v1_15_0.ComputerSystem", 3 | "Id": "529QB9450R6", 4 | "Name": "Bladed System", 5 | "SystemType": "Physical", 6 | "Manufacturer": "Contoso", 7 | "Model": "SX1000", 8 | "SKU": "6914260", 9 | "SerialNumbers": "529QB9450R6", 10 | "PartNumbers": "166480-S23", 11 | "Status": { 12 | "State": "Enabled", 13 | "Health": "OK", 14 | "HealthRollup": "OK" 15 | }, 16 | "IndicatorLED": "Off", 17 | "PowerState": "On", 18 | "Boot": { 19 | "BootSourceOverrieEnabled": "Disabled", 20 | "BootSourceverrideTarget": "None", 21 | "BootSourceOverrideTarget@Redfish.AllowableValues": [ 22 | "None", 23 | "Pxe", 24 | "Floppy", 25 | "Cd", 26 | "Usb", 27 | "Hdd", 28 | "BiosSetup" 29 | ] 30 | }, 31 | "Bioserson": "P86 v1.58 (10/15/2015)", 32 | "ProcessorSummary": { 33 | "Count": 1, 34 | "Model": "Multi-Core Intel(R) Xeon(R) processor E5-16xx Series", 35 | "Status": { 36 | "State": "Enabled", 37 | "Health": "OK", 38 | "HealthRollup": "OK" 39 | } 40 | }, 41 | "MemorySummary": { 42 | "TotalSystemMemoryGiB": 64, 43 | "Status": { 44 | "State": "Enabled", 45 | "Health": "OK", 46 | "HealthRollup": "OK" 47 | } 48 | }, 49 | "Processors": { 50 | "@odata.id": "/redfish/v1/Systems/529QB9450R6/Processors" 51 | }, 52 | "SimpleStorage": { 53 | "@odata.id": "/redfish/v1/Systems/529QB9450R6/SimpleStorage" 54 | }, 55 | "Links": { 56 | "Chassis": [ 57 | { 58 | "@odata.id": "/redfish/v1/Chassis/Blade1" 59 | } 60 | ], 61 | "ManagedBy": [ 62 | { 63 | "@odata.id": "/redfish/v1/Managers/Blade1BMC" 64 | } 65 | ] 66 | }, 67 | "Actions": { 68 | "#ComputerSystem.Reset": { 69 | "target": "/redfish/v1/Systems/529QB9450R6/Actions/ComputerSystem.Reset", 70 | "ResetType@Redfish.AllowableValues": [ 71 | "On", 72 | "ForceOff", 73 | "GracefulShutdown", 74 | "GracefulRestart", 75 | "ForceRestart", 76 | "Nmi", 77 | "ForceOn", 78 | "PushPowerButton" 79 | ] 80 | } 81 | }, 82 | "@odata.id": "/redfish/v1/Systems/529QB9450R6", 83 | "@Redfish.Copyright": "Copyright 2016-2025 DMTF. For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." 84 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Overview 4 | 5 | This repository is maintained by the [DMTF](https://www.dmtf.org/ "https://www.dmtf.org/"). All contributions are reviewed and approved by members of the organization. 6 | 7 | ## Submitting Issues 8 | 9 | Bugs, feature requests, and questions are all submitted in the "Issues" section for the project. DMTF members are responsible for triaging and addressing issues. 10 | 11 | ## Contribution Process 12 | 13 | 1. Fork the repository. 14 | 2. Make and commit changes. 15 | 3. Make a pull request. 16 | 17 | All contributions must adhere to the BSD 3-Clause License described in the LICENSE.md file, and the [Developer Certificate of Origin](#developer-certificate-of-origin). 18 | 19 | Pull requests are reviewed and approved by DMTF members. 20 | 21 | ## Developer Certificate of Origin 22 | 23 | All contributions must adhere to the [Developer Certificate of Origin (DCO)](http://developercertificate.org "http://developercertificate.org"). 24 | 25 | The DCO is an attestation attached to every contribution made by every developer. In the commit message of the contribution, the developer adds a "Signed-off-by" statement and thereby agrees to the DCO. This can be added by using the `--signoff` parameter with `git commit`. 26 | 27 | Full text of the DCO: 28 | 29 | ``` 30 | Developer Certificate of Origin 31 | Version 1.1 32 | 33 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 34 | 35 | Everyone is permitted to copy and distribute verbatim copies of this 36 | license document, but changing it is not allowed. 37 | 38 | 39 | Developer's Certificate of Origin 1.1 40 | 41 | By making a contribution to this project, I certify that: 42 | 43 | (a) The contribution was created in whole or in part by me and I 44 | have the right to submit it under the open source license 45 | indicated in the file; or 46 | 47 | (b) The contribution is based upon previous work that, to the best 48 | of my knowledge, is covered under an appropriate open source 49 | license and I have the right under that license to submit that 50 | work with modifications, whether created in whole or in part 51 | by me, under the same open source license (unless I am 52 | permitted to submit under a different license), as indicated 53 | in the file; or 54 | 55 | (c) The contribution was provided directly to me by some other 56 | person who certified (a), (b) or (c) and I have not modified 57 | it. 58 | 59 | (d) I understand and agree that this project and the contribution 60 | are public and that a record of the contribution (including all 61 | personal information I submit with it, including my sign-off) is 62 | maintained indefinitely and may be redistributed consistent with 63 | this project or the open source license(s) involved. 64 | ``` 65 | -------------------------------------------------------------------------------- /redfish_service_validator/schema_pack.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import argparse 6 | import os 7 | import zipfile 8 | from io import BytesIO 9 | import logging 10 | import requests 11 | 12 | my_logger = logging.getLogger('rsv') 13 | my_logger.setLevel(logging.DEBUG) 14 | 15 | # live_zip_uri = 'http://redfish.dmtf.org/schemas/DSP8010_2021.1.zip' 16 | live_zip_uri = 'https://www.dmtf.org/sites/default/files/standards/documents/DSP8010.zip' 17 | 18 | 19 | def setup_schema_pack(uri, local_dir, http_proxy='', https_proxy=''): 20 | proxies, timeout = None, 20 21 | if http_proxy != '' or https_proxy != '': 22 | proxies = {} 23 | if http_proxy != '': proxies['http'] = http_proxy 24 | if https_proxy != '': proxies['https'] = https_proxy 25 | if uri == 'latest': 26 | uri = live_zip_uri 27 | my_logger.info('Unpacking schema pack... {}'.format(uri)) 28 | try: 29 | if not os.path.isdir(local_dir): 30 | os.makedirs(local_dir) 31 | response = requests.get(uri, timeout=timeout, proxies=proxies) 32 | expCode = [200] 33 | elapsed = response.elapsed.total_seconds() 34 | statusCode = response.status_code 35 | my_logger.debug('{}, {}, {},\nTIME ELAPSED: {}'.format(statusCode, expCode, response.headers, elapsed)) 36 | if statusCode in expCode: 37 | if not zipfile.is_zipfile(BytesIO(response.content)): 38 | my_logger.error('Schema Unpacking Error: This URL did not return a valid zipfile') 39 | pass 40 | else: 41 | zf = zipfile.ZipFile(BytesIO(response.content)) 42 | zf.testzip() 43 | for name in zf.namelist(): 44 | if '.xml' in name: 45 | cpath = '{}/{}'.format(local_dir, name.split('/')[-1]) 46 | my_logger.debug((name, cpath)) 47 | item = zf.open(name) 48 | with open(cpath, 'wb') as f: 49 | f.write(item.read()) 50 | item.close() 51 | zf.close() 52 | except Exception as ex: 53 | my_logger.error("Schema Unpacking Error: A problem when getting resource has occurred {}".format(uri)) 54 | my_logger.error("output: ", exc_info=True) 55 | return True 56 | 57 | 58 | if __name__ == '__main__': 59 | argget = argparse.ArgumentParser(description='Acquire schema_pack from DMTF website') 60 | 61 | # config 62 | argget.add_argument('--source', type=str, default=live_zip_uri, help='URL of the given schemapack, if unspecified, always grab latest') 63 | argget.add_argument('--schema_directory', type=str, default='./SchemaFiles/metadata', help='directory for local schema files') 64 | 65 | args = argget.parse_args() 66 | 67 | setup_schema_pack(args.source, args.schema_directory) 68 | -------------------------------------------------------------------------------- /redfish_service_validator/config.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import configparser 6 | import logging 7 | import json 8 | 9 | my_logger = logging.getLogger('rsv') 10 | my_logger.setLevel(logging.DEBUG) 11 | 12 | config_struct = { 13 | 'Tool': ['verbose'], 14 | 'Host': ['ip', 'username', 'password', 'description', 'forceauth', 'authtype', 'token', 'ext_http_proxy', 'ext_https_proxy', 'serv_http_proxy', 'serv_https_proxy'], 15 | 'Validator': ['payload', 'logdir', 'oemcheck', 'debugging', 'schema_directory', 'uricheck', 'mockup', 'collectionlimit', 'requesttimeout', 'requestattempts'] 16 | } 17 | 18 | config_options = [x for name in config_struct for x in config_struct[name]] 19 | 20 | 21 | def convert_args_to_config(args): 22 | # Disable interpolation (https://docs.python.org/3/library/configparser.html#interpolation-of-values) 23 | my_config = configparser.ConfigParser(interpolation=None) 24 | for section in ['Tool', 'Host', 'Validator']: 25 | my_config.add_section(section) 26 | for option in config_struct[section]: 27 | if option not in ['password', 'token']: 28 | my_var = vars(args)[option] 29 | if isinstance(my_var, list): 30 | my_var = ' '.join(my_var) 31 | my_config.set(section, option, str(my_var) if my_var is not None else '') 32 | else: 33 | my_config.set(section, option, '******') 34 | return my_config 35 | 36 | 37 | def convert_config_to_args(args, config): 38 | my_config = configparser.ConfigParser() 39 | if isinstance(config, configparser.ConfigParser): 40 | my_config = config 41 | elif isinstance(config, str): 42 | with open(config, 'r') as f: 43 | my_config.read_file(f) 44 | elif isinstance(config, dict): 45 | my_config.read_dict(config) 46 | for section in config_struct: 47 | if section in my_config: 48 | for option in my_config[section]: 49 | if option.lower() not in config_options: 50 | if option.lower() not in ['version', 'copyright']: 51 | my_logger.error('Tool Configuration Error: Option {} not supported!'.format(option), extra={"result": "unsupportedOption"}) 52 | elif my_config[section][option] not in ['', None]: 53 | if option.lower() == 'payload' or option.lower() == 'collectionlimit': 54 | setattr(args, option, my_config[section][option].split(' ')) 55 | elif option.lower() in ['requesttimeout', 'requestattempts']: 56 | setattr(args, option, int(my_config[section][option])) 57 | else: 58 | setattr(args, option, my_config[section][option]) 59 | if option.lower() in ['password', 'token']: 60 | my_config.set(section, option, '******') 61 | my_config_dict = config_parse_to_dict(my_config) 62 | print(json.dumps(my_config_dict, indent=4)) 63 | 64 | 65 | def config_parse_to_dict(config): 66 | my_dict = {} 67 | for section in config: 68 | my_dict[section] = {} 69 | for option in [x for x in config[section] if x not in ['version', 'copyright']]: 70 | my_dict[section][option] = {} 71 | my_dict[section][option]['value'] = config[section][option] 72 | my_dict[section][option]['description'] = "TBD" 73 | return my_dict 74 | -------------------------------------------------------------------------------- /redfish_service_validator/logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import logging 6 | import sys 7 | from enum import IntEnum 8 | from types import SimpleNamespace 9 | 10 | # List and set up custom debug levels 11 | class Level(IntEnum): 12 | DEBUG = logging.DEBUG 13 | VERBOSE2 = logging.INFO-2 14 | VERBOSE1 = logging.INFO-1 15 | INFO = logging.INFO 16 | WARN = logging.WARN 17 | ERROR = logging.ERROR 18 | 19 | logging.addLevelName(Level.VERBOSE1, "Level.VERBOSE1") 20 | logging.addLevelName(Level.VERBOSE2, "Level.VERBOSE2") 21 | 22 | # Entries for HTML log 23 | LOG_ENTRY = ('name', 'value', 'type', 'exists', 'result') 24 | COUNT_ENTRY = ('id', 'msg', 'level') 25 | 26 | def create_entry(name, value, my_type, exists, result): 27 | return SimpleNamespace(**{ 28 | "name": name, 29 | "value": value, 30 | "type": my_type, 31 | "exists": exists, 32 | "result": result 33 | }) 34 | 35 | def create_count(id_, msg, level): 36 | return SimpleNamespace(**{ 37 | "id": id_, 38 | "msg": msg, 39 | "level": level 40 | }) 41 | 42 | # Handler for log counts to flush (example: per Resource validated) 43 | class RecordHandler(logging.Handler): 44 | def __init__(self): 45 | self.record_collection = [] 46 | super().__init__() 47 | 48 | def emit(self, record): 49 | result = record.__dict__.get('result') 50 | if record.levelno > logging.INFO or result is not None: 51 | self.record_collection.append(record) 52 | 53 | def flush(self): 54 | output = self.record_collection 55 | self.record_collection = [] 56 | return output 57 | 58 | class RecordFormatter(logging.Formatter): 59 | def __init__(self): 60 | self.current_uri = [None] 61 | super().__init__() 62 | 63 | def format(self, record): 64 | msg = "{} - {}".format(record.levelname, record.getMessage()) 65 | result = record.__dict__.get('result') 66 | record.result = result 67 | uri = record.__dict__.get('uri', self.current_uri[-1]) 68 | record.uri = uri 69 | if result or record.levelno > logging.INFO: 70 | append = " ... " 71 | append += "{} ".format(result) if result else " " 72 | append += "at {}".format(uri) if uri else "" 73 | msg += append 74 | return msg 75 | 76 | def create_logging_file_handler(level, file_name): 77 | file_handler = logging.FileHandler(file_name) 78 | file_handler.setLevel(min(level, standard_out.level)) 79 | file_handler.setFormatter(RecordFormatter()) 80 | my_logger.addHandler(file_handler) 81 | 82 | def push_uri(self, uri): 83 | """Pushes uri of text logger formatter. 84 | 85 | Args: 86 | uri (str, optional): URI to change to. Defaults to None. 87 | """ 88 | 89 | for handler in self.handlers: 90 | if isinstance(handler, logging.FileHandler): 91 | handler.formatter.current_uri.append(uri) 92 | 93 | def pop_uri(self): 94 | """Pops uri of text logger formatter. 95 | """ 96 | 97 | for handler in self.handlers: 98 | if isinstance(handler, logging.FileHandler): 99 | if len(handler.formatter.current_uri) > 1: 100 | handler.formatter.current_uri.pop() 101 | 102 | my_logger = logging.getLogger('rsv') 103 | my_logger.setLevel(logging.DEBUG) 104 | 105 | standard_out = logging.StreamHandler(sys.stdout) 106 | standard_out.setLevel(logging.INFO) 107 | my_logger.addHandler(standard_out) 108 | 109 | # Functions to set up externally 110 | def set_standard_out(new_level): 111 | standard_out.setLevel(new_level) 112 | 113 | record_capture = RecordHandler() 114 | my_logger.addHandler(record_capture) 115 | 116 | # Verbose printing functions 117 | def print_verbose_1(self, msg, *args, **kwargs): 118 | if self.isEnabledFor(Level.VERBOSE1): 119 | self._log(Level.VERBOSE1, msg, args, **kwargs) 120 | 121 | def print_verbose_2(self, msg, *args, **kwargs): 122 | if self.isEnabledFor(Level.VERBOSE2): 123 | self._log(Level.VERBOSE2, msg, args, **kwargs) 124 | 125 | logging.Logger.verbose1 = print_verbose_1 126 | logging.Logger.verbose2 = print_verbose_2 127 | logging.Logger.push_uri = push_uri 128 | logging.Logger.pop_uri = pop_uri 129 | 130 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release and Publish 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version: 6 | description: 'Version number' 7 | required: true 8 | changes_1: 9 | description: 'Change entry' 10 | required: true 11 | changes_2: 12 | description: 'Change entry' 13 | required: false 14 | changes_3: 15 | description: 'Change entry' 16 | required: false 17 | changes_4: 18 | description: 'Change entry' 19 | required: false 20 | changes_5: 21 | description: 'Change entry' 22 | required: false 23 | changes_6: 24 | description: 'Change entry' 25 | required: false 26 | changes_7: 27 | description: 'Change entry' 28 | required: false 29 | changes_8: 30 | description: 'Change entry' 31 | required: false 32 | jobs: 33 | release_build: 34 | name: Build the release 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: actions/checkout@v2 38 | with: 39 | token: ${{secrets.GITHUB_TOKEN}} 40 | - name: Build the changelog text 41 | run: | 42 | echo 'CHANGES<> $GITHUB_ENV 43 | echo "## [${{github.event.inputs.version}}] - $(date +'%Y-%m-%d')" >> $GITHUB_ENV 44 | echo "- ${{github.event.inputs.changes_1}}" >> $GITHUB_ENV 45 | if [[ -n "${{github.event.inputs.changes_2}}" ]]; then echo "- ${{github.event.inputs.changes_2}}" >> $GITHUB_ENV; fi 46 | if [[ -n "${{github.event.inputs.changes_3}}" ]]; then echo "- ${{github.event.inputs.changes_3}}" >> $GITHUB_ENV; fi 47 | if [[ -n "${{github.event.inputs.changes_4}}" ]]; then echo "- ${{github.event.inputs.changes_4}}" >> $GITHUB_ENV; fi 48 | if [[ -n "${{github.event.inputs.changes_5}}" ]]; then echo "- ${{github.event.inputs.changes_5}}" >> $GITHUB_ENV; fi 49 | if [[ -n "${{github.event.inputs.changes_6}}" ]]; then echo "- ${{github.event.inputs.changes_6}}" >> $GITHUB_ENV; fi 50 | if [[ -n "${{github.event.inputs.changes_7}}" ]]; then echo "- ${{github.event.inputs.changes_7}}" >> $GITHUB_ENV; fi 51 | if [[ -n "${{github.event.inputs.changes_8}}" ]]; then echo "- ${{github.event.inputs.changes_8}}" >> $GITHUB_ENV; fi 52 | echo "" >> $GITHUB_ENV 53 | echo 'EOF' >> $GITHUB_ENV 54 | - name: Update version numbers 55 | run: | 56 | sed -i -E 's/ version=.+,/ version="'${{github.event.inputs.version}}'",/' setup.py 57 | sed -i -E 's/tool_version = .+/tool_version = '\'${{github.event.inputs.version}}\''/' redfish_service_validator/RedfishServiceValidator.py 58 | - name: Update the changelog 59 | run: | 60 | ex CHANGELOG.md <" 74 | git add * 75 | git commit -s -m "${{github.event.inputs.version}} versioning" 76 | git push origin main 77 | - name: Set up Python 78 | uses: actions/setup-python@v2 79 | with: 80 | python-version: '3.x' 81 | - name: Install dependencies 82 | run: | 83 | python -m pip install --upgrade pip 84 | pip install setuptools wheel twine 85 | - name: Build the distribution 86 | run: | 87 | python setup.py sdist bdist_wheel 88 | - name: Upload to pypi 89 | uses: pypa/gh-action-pypi-publish@release/v1 90 | with: 91 | password: ${{ secrets.PYPI_API_TOKEN }} 92 | - name: Build Windows GUI executable 93 | uses: JackMcKew/pyinstaller-action-windows@main 94 | with: 95 | path: . 96 | - name: Package the Windows GUI executable 97 | run: | 98 | mkdir Redfish-Service-Validator-${{github.event.inputs.version}}-Windows 99 | cp dist/windows/RedfishServiceValidator.exe Redfish-Service-Validator-${{github.event.inputs.version}}-Windows/. 100 | cp README.md Redfish-Service-Validator-${{github.event.inputs.version}}-Windows/. 101 | zip -r Redfish-Service-Validator-${{github.event.inputs.version}}-Windows.zip Redfish-Service-Validator-${{github.event.inputs.version}}-Windows 102 | - name: Make the release 103 | env: 104 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 105 | run: | 106 | gh release create ${{github.event.inputs.version}} -t ${{github.event.inputs.version}} -n "Changes since last release:"$'\n\n'"$CHANGES" *.zip 107 | -------------------------------------------------------------------------------- /redfish_service_validator/helper.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import re 6 | import logging 7 | 8 | my_logger = logging.getLogger('rsv') 9 | my_logger.setLevel(logging.DEBUG) 10 | 11 | VERSION_PATTERN = 'v[0-9]+_[0-9]+_[0-9]+' 12 | 13 | def splitVersionString(v_string): 14 | """ 15 | Split x.y.z and Namespace.vX_Y_Z, vX_Y_Z type version strings into tuples of integers 16 | 17 | :return: tuple of integers 18 | """ 19 | if(re.match(r'([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*', v_string) is not None): 20 | new_string = getVersion(v_string) 21 | if new_string is not None: 22 | v_string = new_string 23 | if ('_' in v_string): 24 | v_string = v_string.replace('v', '') 25 | payload_split = v_string.split('_') 26 | else: 27 | payload_split = v_string.split('.') 28 | if len(payload_split) != 3: 29 | return tuple([0, 0, 0]) 30 | return tuple([int(v) for v in payload_split]) 31 | 32 | 33 | def stripCollection(typename): 34 | """ 35 | Remove "Collection()" from a type string 36 | """ 37 | if 'Collection(' in typename: 38 | typename = typename.replace('Collection(', "").replace(')', "") 39 | return typename 40 | 41 | 42 | def navigateJsonFragment(decoded, URILink): 43 | if '#' in URILink: 44 | URIfragless, frag = tuple(URILink.rsplit('#', 1)) 45 | fragNavigate = frag.split('/') 46 | for item in fragNavigate: 47 | if item == '': 48 | continue 49 | if isinstance(decoded, dict): 50 | decoded = decoded.get(item) 51 | elif isinstance(decoded, list): 52 | if not item.isdigit(): 53 | my_logger.error("URI Destination Error: This URI ({}) is accessing an array, but this is not an index: {}".format(URILink, item)) 54 | return None 55 | if int(item) >= len(decoded): 56 | my_logger.error("URI Destination Error: This URI ({}) is accessing an array, but the index is too large for an array of size {}: {}".format(URILink, len(decoded), item)) 57 | return None 58 | decoded = decoded[int(item)] 59 | else: 60 | my_logger.error("URI Destination Error: This URI ({}) has resolved to an invalid object that is neither an array or dictionary".format(URILink)) 61 | return None 62 | return decoded 63 | 64 | 65 | def getNamespace(string: str): 66 | """getNamespace 67 | 68 | Gives namespace of a type string, version included 69 | 70 | :param string: A type string 71 | :type string: str 72 | """ 73 | if '#' in string: 74 | string = string.rsplit('#', 1)[1] 75 | return string.rsplit('.', 1)[0] 76 | 77 | 78 | def getVersion(string: str): 79 | """getVersion 80 | 81 | Gives version stripped from type/namespace string, if possible 82 | 83 | :param string: A type/namespace string 84 | :type string: str 85 | """ 86 | regcap = re.search(VERSION_PATTERN, string) 87 | return regcap.group() if regcap else None 88 | 89 | 90 | def getNamespaceUnversioned(string: str): 91 | """getNamespaceUnversioned 92 | 93 | Gives namespace of a type string, version NOT included 94 | 95 | :param string: 96 | :type string: str 97 | """ 98 | if '#' in string: 99 | string = string.rsplit('#', 1)[1] 100 | return string.split('.', 1)[0] 101 | 102 | 103 | def getType(string: str): 104 | """getType 105 | 106 | Gives type of a type string (right hand side) 107 | 108 | :param string: 109 | :type string: str 110 | """ 111 | if '#' in string: 112 | string = string.rsplit('#', 1)[1] 113 | return string.rsplit('.', 1)[-1] 114 | 115 | 116 | def createContext(typestring: str): 117 | """createContext 118 | 119 | Create an @odata.context string from a type string 120 | 121 | :param typestring: 122 | :type string: str 123 | """ 124 | ns_name = getNamespaceUnversioned(typestring) 125 | type_name = getType(typestring) 126 | context = '/redfish/v1/$metadata' + '#' + ns_name + '.' + type_name 127 | return context 128 | 129 | 130 | def checkPayloadConformance(jsondata, uri): 131 | """ 132 | checks for @odata entries and their conformance 133 | These are not checked in the normal loop 134 | """ 135 | info = {} 136 | decoded = jsondata 137 | success = True 138 | for key in [k for k in decoded if '@odata' in k]: 139 | paramPass = False 140 | 141 | property_name, odata_name = tuple(key.rsplit('@', maxsplit=1)) 142 | 143 | if odata_name == 'odata.id': 144 | paramPass = isinstance(decoded[key], str) 145 | paramPass = re.match( 146 | r'(\/.*)+(#([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*)?', decoded[key]) is not None 147 | if not paramPass: 148 | my_logger.error("Payload Conformance Error: {} {}, Expected format is /path/to/uri, but received: {}".format(uri, key, decoded[key])) 149 | else: 150 | if uri != '' and decoded[key] != uri and not (uri == "/redfish/v1/" and decoded[key] == "/redfish/v1"): 151 | my_logger.warning("Payload Conformance Error: {} {}, Expected @odata.id to match URI link {}".format(uri, key, decoded[key])) 152 | elif odata_name == 'odata.count': 153 | paramPass = isinstance(decoded[key], int) 154 | if not paramPass: 155 | my_logger.error("Payload Conformance Error: {} {}, Expected an integer, but received: {}".format(uri, key, decoded[key])) 156 | elif odata_name == 'odata.context': 157 | paramPass = isinstance(decoded[key], str) 158 | paramPass = re.match( 159 | r'/redfish/v1/\$metadata#([a-zA-Z0-9_.-]*\.)[a-zA-Z0-9_.-]*', decoded[key]) is not None 160 | if not paramPass: 161 | my_logger.warning("Payload Conformance Error: {} {}, Expected format is /redfish/v1/$metadata#ResourceType, but received: {}".format(uri, key, decoded[key])) 162 | info[key] = (decoded[key], 'odata', 'Exists', 'WARN') 163 | continue 164 | elif odata_name == 'odata.type': 165 | paramPass = isinstance(decoded[key], str) 166 | paramPass = re.match( 167 | r'#([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*', decoded[key]) is not None 168 | if not paramPass: 169 | my_logger.error("Payload Conformance Error: {} {}, Expected format is #Namespace.Type, but received: {}".format(uri, key, decoded[key])) 170 | else: 171 | paramPass = True 172 | 173 | success = success and paramPass 174 | 175 | info[key] = (decoded[key], 'odata', 'Exists', 'PASS' if paramPass else 'FAIL') 176 | 177 | return success, info 178 | -------------------------------------------------------------------------------- /tests/test_catalog.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | # 5 | # Unit tests for RedfishServiceValidator.py 6 | # 7 | 8 | import unittest 9 | import sys 10 | import pprint 11 | 12 | sys.path.append('../') 13 | 14 | import redfish_service_validator.catalog as catalog 15 | 16 | import logging 17 | 18 | logging.getLoggerClass().verbose1 = logging.Logger.debug 19 | logging.getLoggerClass().verbose2 = logging.Logger.debug 20 | 21 | class TestCatalog(unittest.TestCase): 22 | def test_fuzzy(self): 23 | print('\n') 24 | val = catalog.get_fuzzy_property('PropertyA', {'Name': 'Payload', 'PropertyB': False}) 25 | self.assertEqual(val, 'PropertyB') 26 | val = catalog.get_fuzzy_property('PropertyA', {'Name': 'Payload', 'PropertyB': False, 'PropertyA': False}) 27 | self.assertEqual(val, 'PropertyA') 28 | val = catalog.get_fuzzy_property('PropertyA', {'Name': 'Payload'}, []) 29 | self.assertEqual(val, 'PropertyA') 30 | val = catalog.get_fuzzy_property('PropertyA', {'Name': 'Payload'}, ['PropertyB']) 31 | self.assertEqual(val, 'PropertyA') 32 | # OK 33 | 34 | def test_catalog(self): 35 | print('\n') 36 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 37 | 38 | my_schema_doc = my_catalog.getSchemaDocByClass('Example') 39 | 40 | self.assertRaises(catalog.MissingSchemaError, my_catalog.getSchemaDocByClass, 'NotExample') 41 | 42 | my_schema = my_catalog.getSchemaInCatalog('Example.v1_0_0') 43 | 44 | my_type = my_catalog.getTypeInCatalog('Example.v1_7_0.Example') 45 | 46 | my_type = my_catalog.getTypeInCatalog('Example.v1_2_0.Links') 47 | # OK 48 | 49 | def test_schema_doc(self): 50 | print('\n') 51 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 52 | with open('./tests/testdata/schemas/Example_v1.xml') as f: 53 | my_doc = catalog.SchemaDoc(f.read(), my_catalog, 'Example_v1.xml') 54 | 55 | ref = my_doc.getReference('ExampleResource') 56 | ref = my_doc.getReference('ExampleResource.v1_0_0') 57 | ref = my_doc.getReference('ExampleResource.v1_0_1') 58 | ref = my_doc.getReference('ExampleResource.v1_9_9') 59 | ref = my_doc.getReference('Redfish') 60 | ref = my_doc.getReference('RedfishExtension.v1_0_0') 61 | 62 | my_type = my_doc.getTypeInSchemaDoc('Example.v1_0_0.Example') 63 | my_type = my_doc.getTypeInSchemaDoc(my_type) 64 | my_type = my_doc.getTypeInSchemaDoc('Example.v1_9_9.Example') 65 | my_type = my_doc.getTypeInSchemaDoc('Example.v1_0_0.Actions') 66 | my_type = my_doc.getTypeInSchemaDoc('ExampleResource.v1_0_0.ExampleResource') 67 | self.assertRaises(catalog.MissingSchemaError, my_doc.getTypeInSchemaDoc, 'NoExample.v1_0_0.NoExample') 68 | 69 | def test_schema_class(self): 70 | print('\n') 71 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 72 | my_doc = my_catalog.getSchemaDocByClass('Example.v1_0_0') 73 | my_schema = my_catalog.getSchemaInCatalog('Example.v1_0_0') 74 | 75 | def test_basic_properties(self): 76 | print('\nTesting basic types as json') 77 | prop = catalog.RedfishProperty("Edm.Int").populate(1) 78 | print(prop.as_json()) 79 | prop = catalog.RedfishProperty("Edm.Decimal").populate(1.1) 80 | print(prop.as_json()) 81 | prop = catalog.RedfishProperty("Edm.Guid").populate("123") 82 | print(prop.as_json()) 83 | prop = catalog.RedfishProperty("Edm.Guid").populate(catalog.REDFISH_ABSENT) 84 | print(prop.as_json()) 85 | 86 | def test_basic_properties_check(self): 87 | print('\nTesting check values') 88 | prop = catalog.RedfishProperty("Edm.Int").populate(1, check=True) 89 | prop = catalog.RedfishProperty("Edm.Int").populate(1.1, check=True) 90 | prop = catalog.RedfishProperty("Edm.Int").populate("1", check=True) 91 | prop = catalog.RedfishProperty("Edm.Decimal").populate(1.1, check=True) 92 | prop = catalog.RedfishProperty("Edm.Decimal").populate("1.1", check=True) 93 | prop = catalog.RedfishProperty("Edm.String").populate("1", check=True) 94 | prop = catalog.RedfishProperty("Edm.String").populate(1, check=True) 95 | prop = catalog.RedfishProperty("Edm.Guid").populate("123", check=True) 96 | prop = catalog.RedfishProperty("Edm.Guid").populate(catalog.REDFISH_ABSENT, check=True) 97 | 98 | def test_object(self): 99 | print('\nTesting object values') 100 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 101 | my_schema_doc = my_catalog.getSchemaDocByClass("ExampleResource.v1_0_0.ExampleResource") 102 | my_type = my_schema_doc.getTypeInSchemaDoc("ExampleResource.v1_0_0.ExampleResource") 103 | object = catalog.RedfishObject( my_type ) 104 | pprint.pprint(object.as_json(), indent=2) 105 | dct = object.as_json() 106 | dct = object.getLinks() 107 | object = catalog.RedfishObject( my_type ).populate({"Id": None, "Description": None}) 108 | pprint.pprint(object.as_json(), indent=2) 109 | dct = object.as_json() 110 | dct = object.getLinks() 111 | 112 | def test_capabilities(self): 113 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 114 | my_schema_doc = my_catalog.getSchemaDocByClass("Example.v1_0_0.Example") 115 | my_type = my_schema_doc.getTypeInSchemaDoc("Example.v1_0_0.Example") 116 | my_capabilities = my_type.getCapabilities() 117 | 118 | self.assertTrue(my_capabilities['CanUpdate']) 119 | self.assertFalse(my_capabilities['CanInsert']) 120 | self.assertFalse(my_capabilities['CanDelete']) 121 | 122 | self.assertTrue(my_type.CanUpdate) 123 | self.assertFalse(my_type.CanInsert) 124 | self.assertFalse(my_type.CanDelete) 125 | 126 | def test_expected_uris(self): 127 | print('\nTesting expected Uris') 128 | my_catalog = catalog.SchemaCatalog('./tests/testdata/schemas/') 129 | my_schema_doc = my_catalog.getSchemaDocByClass("Example.v1_0_0.Example") 130 | my_type = my_schema_doc.getTypeInSchemaDoc("Example.v1_0_0.Example") 131 | object = catalog.RedfishObject( my_type ) 132 | 133 | arr = object.Type.getUris() 134 | self.assertEqual(len(arr), 3) 135 | 136 | 137 | object = catalog.RedfishObject( my_type ).populate({ 138 | "@odata.id": "/redfish/v1/Example", 139 | "Id": "Example", 140 | "Description": None 141 | }) 142 | self.assertTrue(object.HasValidUri) 143 | 144 | object = catalog.RedfishObject( my_type ).populate({ 145 | "@odata.id": "/redfish/v1/Examples", 146 | "Id": "Examples", 147 | "Description": None 148 | }) 149 | self.assertFalse(object.HasValidUri) 150 | 151 | object = catalog.RedfishObject( my_type ).populate({ 152 | "@odata.id": "/redfish/v1/Examples/FunnyId", 153 | "Id": 'FunnyId', 154 | "Description": None 155 | }) 156 | self.assertTrue(object.HasValidUri) 157 | self.assertTrue(object.HasValidUriStrict) 158 | 159 | object = catalog.RedfishObject( my_type ).populate({ 160 | "@odata.id": "/redfish/v1/Examples/SubObject/FunnyId", 161 | "Id": 'FunnyId', 162 | "Description": None 163 | }) 164 | self.assertTrue(object.HasValidUri) 165 | self.assertTrue(object.HasValidUriStrict) 166 | 167 | object = catalog.RedfishObject( my_type ).populate({ 168 | "@odata.id": "/redfish/v1/Examples/SubObject/FunnyId", 169 | "Description": None 170 | }) 171 | self.assertTrue(object.HasValidUri) 172 | self.assertTrue(object.HasValidUriStrict) 173 | 174 | object = catalog.RedfishObject( my_type ).populate({ 175 | "@odata.id": "/redfish/v1/Examples/WrongId", 176 | "Id": 'FunnyId', 177 | "Description": None 178 | }) 179 | self.assertTrue(object.HasValidUri) 180 | self.assertFalse(object.HasValidUriStrict) 181 | 182 | object = catalog.RedfishObject( my_type ).populate({ 183 | "@odata.id": "/redfish/v1/Examples/NoId", 184 | "Id": None, 185 | "Description": None 186 | }) 187 | self.assertTrue(object.HasValidUri) 188 | self.assertTrue(object.HasValidUriStrict) 189 | 190 | 191 | 192 | if __name__ == '__main__': 193 | unittest.main() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Copyright 2016-2025 DMTF. All rights reserved. 2 | 3 | # Redfish Service Validator 4 | 5 | ## About 6 | 7 | The Redfish Service Validator is a Python3 tool for checking conformance of any "device" with a Redfish interface against Redfish CSDL schema. 8 | The tool is designed to be device-agnostic and is driven based on the Redfish specifications and schema intended to be supported by the device. 9 | 10 | ## Installation 11 | 12 | 13 | From PyPI: 14 | 15 | pip install redfish_service_validator 16 | 17 | From GitHub: 18 | 19 | git clone https://github.com/DMTF/Redfish-Service-Validator.git 20 | cd Redfish-Service-Validator 21 | python setup.py sdist 22 | pip install dist/redfish_service_validator-x.x.x.tar.gz 23 | 24 | ## Requirements 25 | 26 | External modules: 27 | 28 | * beautifulsoup4 - https://pypi.python.org/pypi/beautifulsoup4 29 | * requests - https://github.com/kennethreitz/requests (Documentation is available at http://docs.python-requests.org/) 30 | * lxml - https://pypi.python.org/pypi/lxml 31 | 32 | You may install the prerequisites by running: 33 | 34 | pip3 install -r requirements.txt 35 | 36 | If you have a previous beautifulsoup4 installation, use the following command: 37 | 38 | pip3 install beautifulsoup4 --upgrade 39 | 40 | There is no dependency based on Windows or Linux OS. 41 | The result logs are generated in HTML format and an appropriate browser, such as Chrome, Firefox, or Edge, is required to view the logs on the client system. 42 | 43 | ## Usage 44 | 45 | Example usage without providing a configuration file: 46 | 47 | rf_service_validator -u root -p root -r https://192.168.1.1 48 | 49 | Example usage with a configuration file: 50 | 51 | rf_service_validator -c config/example.ini 52 | 53 | The following sections describe the arguments and configuration file options. 54 | The file `config/example.ini` can be used as a template configuration file. 55 | At a minimum, the `ip`, `username`, and `password` options must be modified. 56 | 57 | ### [Tool] 58 | 59 | | Variable | CLI Argument | Type | Definition | 60 | | :--- | :--- | :--- | :--- | 61 | | `verbose` | `-v` | integer | Verbosity of tool in stdout; 0 to 3, 3 being the greatest level of verbosity. | 62 | 63 | ### [Host] 64 | 65 | | Variable | CLI Argument | Type | Definition | 66 | | :--- | :--- | :--- | :--- | 67 | | `ip` | `-r` | string | The address of the Redfish service (with scheme); example: 'https://123.45.6.7:8000'. | 68 | | `username` | `-u` | string | The username for authentication. | 69 | | `password` | `-p` | string | The password for authentication. | 70 | | `description` | `--description` | string | The description of the system for identifying logs; if none is given, a value is produced from information in the service root. | 71 | | `forceauth` | `--forceauth` | boolean | Force authentication on unsecure connections; 'True' or 'False'. | 72 | | `authtype` | `--authtype` | string | Authorization type; 'None', 'Basic', 'Session', or 'Token'. | 73 | | `token` | `--token` | string | Token when 'authtype' is 'Token'. | 74 | | `ext_http_proxy` | `--ext_http_proxy` | string | URL of the HTTP proxy for accessing external sites. | 75 | | `ext_https_proxy` | `--ext_https_proxy` | string | URL of the HTTPS proxy for accessing external sites. | 76 | | `serv_http_proxy` | `--serv_http_proxy` | string | URL of the HTTP proxy for accessing the service. | 77 | | `serv_https_proxy` | `--serv_https_proxy` | string | URL of the HTTPS proxy for accessing the service. | 78 | 79 | ### [Validator] 80 | 81 | | Variable | CLI Argument | Type | Definition | 82 | | :--- | :--- |:--------| :--- | 83 | | `payload` | `--payload` | string | The mode to validate payloads ('Tree', 'Single', 'SingleFile', or 'TreeFile') followed by resource/filepath; see below. | 84 | | `logdir` | `--logdir` | string | The directory for generated report files; default: 'logs'. | 85 | | `oemcheck` | `--nooemcheck` | boolean | Whether to check OEM items on service; 'True' or 'False'. | 86 | | `uricheck` | `--uricheck` | boolean | Allow URI checking on services below RedfishVersion 1.6.0; 'True' or 'False'. | 87 | | `debugging` | `--debugging` | boolean | Output debug statements to text log, otherwise it only uses INFO; 'True' or 'False'. | 88 | | `schema_directory` | `--schema_directory` | string | Directory for local schema files. | 89 | | `mockup` | `--mockup` | string | Directory tree for local mockup files. This option enables insertion of local mockup resources to replace missing, incomplete, or incorrect implementations retrieved from the service that may hinder full validation coverage. | 90 | | `collectionlimit` | `--collectionlimit` | string | Sets a limit to links gathered from collections by type (schema name).
Example 1: `ComputerSystem 20` limits ComputerSystemCollection to 20 links.
Example 2: `ComputerSystem 20 LogEntry 10` limits ComputerSystemCollection to 20 links and LogEntryCollection to 10 links. | 91 | | `requesttimeout` | `--requesttimeout` | integer | Timeout in seconds for HTTP request waiting for response. | 92 | | `requestattempts` | `--requestattempts` | integer | Number of attempts after failed HTTP requests. | 93 | 94 | ### Payload Option 95 | 96 | The `payload` option takes two parameters as strings. 97 | 98 | The first parameter specifies how to test the payload URI given, which can be 'Single', 'SingleFile', 'Tree', or 'TreeFile'. 99 | 'Single' and 'SingleFile' will test and give a report on a single resource. 100 | 'Tree' and 'TreeFile' will test and give a report on the resource and every link from that resource. 101 | 102 | The second parameter specifies a URI of the target payload to test or a filename of a local file to test. 103 | 104 | For example, `--payload Single /redfish/v1/AccountService` will perform validation of the URI `/redfish/v1/AccountService` and no other resources. 105 | 106 | ### Mockup Option 107 | 108 | The `mockup` option takes a single parameter as a string. The parameter specifies a local directory path to the `ServiceRoot` resource of a Redfish mockup tree. 109 | 110 | This option provides a powerful debugging tool as is allows local "mockup" JSON payloads to replace those retreived from the unit under test. This can aid testers by allowing the tool to skip over problematic resources, which may cause the tool to crash, or more likely, miss portions of the implemented resources due to missing or invalid link properties or values. 111 | 112 | The mockup files follow the Redfish mockup style, with the directory tree matching the URI segments under /redfish/v1, and with a single `index.json` file in each subdirectory as desired. For examples of full mockups, see the Redfish Mockup Bundle (DSP2043) at https://www.dmtf.org/sites/default/files/standards/documents/DSP2043_2024.1.zip. 113 | 114 | Populate the mockup directory tree with `index.json` files wherever problematic resources need to be replaced. Any replaced resource will report a Warning in the report to indicate a workaround was used. 115 | 116 | ## Execution Flow 117 | 118 | 1. The Redfish Service Validator starts by querying the service root resource from the target service and collections information about the service. 119 | * Collects all CSDL from the service. 120 | 2. For each resource found, it performs the following: 121 | * Reads all the URIs referenced in the resource. 122 | * Reads the schema file related to the particular resource and builds a model of expected properties. 123 | * Tests each property in the resource against the model built from the schema. 124 | 3. Step 2 repeats until all resources are covered. 125 | 126 | When validating a resource, the following types of tests may occur for each property: 127 | 128 | * Verify `@odata` properties against known patterns, such as `@odata.id`. 129 | * Check if the property is defined in the resource's schema. 130 | * Check if the value of the property matches the expected type, such as integer, string, boolean, array, or object. 131 | * Check if the property is mandatory. 132 | * Check if the property is allowed to be `null`. 133 | * For string properties with a regular expression, check if the value passes the regular expression. 134 | * For enumerations, check if the value is within the enumeration list. 135 | * For numeric properties with defined ranges, check if the value is within the specified range. 136 | * For object properties, check the properties inside the object againt the object's schema definition. 137 | * For links, check that the URI referenced matches the expected resource type. 138 | 139 | CSDL syntax errors will cause testing to halt and move on to other resources. 140 | The OData CSDL Validator (https://github.com/DMTF/Redfish-Tools/tree/main/odata-csdl-validator) can be used to identify schema errors prior to testing. 141 | 142 | ## Conformance Logs - Summary and Detailed Conformance Report 143 | 144 | The Redfish Service Validator generates an HTML report under the 'logs' folder and is named as 'ConformanceHtmlLog_MM_DD_YYYY_HHMMSS.html', along with a text and config file. 145 | The report gives the detailed view of the individual properties checked, with pass, fail, skip, or warning status for each resource checked for conformance. 146 | 147 | Additionally, there is a verbose text log file that may be referenced to diagnose tool or schema problems when the HTML log is insufficient. 148 | 149 | ## The Test Status 150 | 151 | The test result for each GET operation will be reported as follows: 152 | 153 | * PASS: If the operation is successful and returns a success code, such as `200 OK`. 154 | * FAIL: If the operation failed for reasons mentioned in GET method execution, or some configuration. 155 | * SKIP: If the property or method being checked is not mandatory is not supported by the service. 156 | 157 | ## Limitations 158 | 159 | The Redfish Service Validator only performs GET operations on the service. 160 | Below are certain items that are not in scope for the tool. 161 | 162 | * Other HTTP methods, such as PATCH, are not covered. 163 | * Query parameters, such as $top and $skip, are not covered. 164 | * Multiple services are not tested simultaneously. 165 | 166 | ## Building a Standalone Windows Executable 167 | 168 | The module pyinstaller is used to package the environment as a standlone executable file; this can be installed with the following command: 169 | 170 | pip3 install pyinstaller 171 | 172 | From a Windows system, the following command can be used to build a Windows executable file named `RedfishServiceValidator.exe`, which will be found in dist folder: 173 | 174 | pyinstaller -F -w -i redfish.ico -n RedfishServiceValidator.exe RedfishServiceValidatorGui.py 175 | 176 | ## Release Process 177 | 178 | 1. Go to the "Actions" page 179 | 2. Select the "Release and Publish" workflow 180 | 3. Click "Run workflow" 181 | 4. Fill out the form 182 | 5. Click "Run workflow" 183 | -------------------------------------------------------------------------------- /redfish_service_validator/RedfishServiceValidator.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import sys 6 | import os 7 | import argparse 8 | import logging 9 | import json 10 | from datetime import datetime 11 | from urllib.parse import urlparse 12 | from collections import Counter 13 | 14 | from redfish_service_validator.metadata import getSchemaDetails 15 | from redfish_service_validator.config import convert_config_to_args, convert_args_to_config 16 | from redfish_service_validator.validateResource import validateSingleURI, validateURITree 17 | from redfish_service_validator import tohtml, schema_pack, traverse, logger 18 | 19 | tool_version = '2.5.1' 20 | 21 | def validate(argslist=None, configfile=None): 22 | """Main command 23 | 24 | Args: 25 | argslist ([type], optional): List of arguments in the form of argv. Defaults to None. 26 | """ 27 | argget = argparse.ArgumentParser(description='DMTF tool to test a service against a collection of Schema, version {}'.format(tool_version)) 28 | 29 | # base tool 30 | argget.add_argument('-v', '--verbose', action='count', default=0, help='Verbosity of tool in stdout') 31 | argget.add_argument('-c', '--config', type=str, help='Configuration for this tool') 32 | 33 | # host info 34 | argget.add_argument('-i', '--ip', '--rhost', '-r', type=str, help='The address of the Redfish service (with scheme); example: \'https://123.45.6.7:8000\'') 35 | argget.add_argument('-u', '--username', '-user', type=str, help='The username for authentication') 36 | argget.add_argument('-p', '--password', type=str, help='The password for authentication') 37 | argget.add_argument('--description', type=str, help='The description of the system for identifying logs; if none is given, a value is produced from information in the service root') 38 | argget.add_argument('--forceauth', action='store_true', help='Force authentication on unsecure connections') 39 | argget.add_argument('--authtype', type=str, default='Basic', help='Authorization type; \'None\', \'Basic\', \'Session\', or \'Token\'') 40 | argget.add_argument('--token', type=str, help='Token when \'authtype\' is \'Token\'') 41 | argget.add_argument('--ext_http_proxy', type=str, default='', help='URL of the HTTP proxy for accessing external sites') 42 | argget.add_argument('--ext_https_proxy', type=str, default='', help='URL of the HTTPS proxy for accessing external sites') 43 | argget.add_argument('--serv_http_proxy', type=str, default='', help='URL of the HTTP proxy for accessing the service') 44 | argget.add_argument('--serv_https_proxy', type=str, default='', help='URL of the HTTPS proxy for accessing the service') 45 | 46 | # validator options 47 | argget.add_argument('--payload', type=str, help='The mode to validate payloads (\'Tree\', \'Single\', \'SingleFile\', or \'TreeFile\') followed by resource/filepath', nargs=2) 48 | argget.add_argument('--logdir', '--report-dir', type=str, default='./logs', help='The directory for generated report files; default: \'logs\'') 49 | argget.add_argument('--nooemcheck', action='store_false', dest='oemcheck', help='Don\'t check OEM items') 50 | argget.add_argument('--debugging', action="store_true", help='Output debug statements to text log, otherwise it only uses INFO') 51 | argget.add_argument('--uricheck', action="store_true", help='Allow URI checking on services below RedfishVersion 1.6.0') 52 | argget.add_argument('--schema_directory', type=str, default='./SchemaFiles/metadata', help='Directory for local schema files') 53 | argget.add_argument('--mockup', type=str, default='', help='Enables insertion of local mockup resources to replace missing, incomplete, or incorrect implementations retrieved from the service that may hinder full validation coverage') 54 | argget.add_argument('--collectionlimit', type=str, default=['LogEntry', '20'], help='apply a limit to collections (format: RESOURCE1 COUNT1 RESOURCE2 COUNT2...)', nargs='+') 55 | argget.add_argument('--requesttimeout', type=int, default=10, help='Timeout in seconds for HTTP requests waiting for response') 56 | argget.add_argument('--requestattempts', type=int, default=10, help='Number of attempts after failed HTTP requests') 57 | 58 | # parse... 59 | args = argget.parse_args(argslist) 60 | 61 | if configfile is None: 62 | configfile = args.config 63 | 64 | # set logging file 65 | start_tick = datetime.now() 66 | 67 | logger.set_standard_out(logger.Level.INFO - args.verbose if args.verbose < 3 else logger.Level.DEBUG) 68 | 69 | logpath = args.logdir 70 | 71 | if not os.path.isdir(logpath): 72 | os.makedirs(logpath) 73 | 74 | log_level = logger.Level.INFO if not args.debugging else logger.Level.DEBUG 75 | file_name = datetime.strftime(start_tick, os.path.join(logpath, "ConformanceLog_%m_%d_%Y_%H%M%S.txt")) 76 | 77 | logger.create_logging_file_handler(log_level, file_name) 78 | 79 | my_logger = logging.getLogger('rsv') 80 | my_logger.setLevel(logging.DEBUG) 81 | 82 | # begin logging 83 | my_logger.info("Redfish Service Validator, version {}".format(tool_version)) 84 | my_logger.info("") 85 | 86 | # config verification 87 | if args.ip is None and configfile is None: 88 | my_logger.error('Configuration Error: No IP or Config Specified') 89 | argget.print_help() 90 | return 1, None, 'Configuration Incomplete' 91 | 92 | if configfile: 93 | convert_config_to_args(args, configfile) 94 | else: 95 | my_logger.info('Writing config file to log directory') 96 | configfilename = datetime.strftime(start_tick, os.path.join(logpath, "ConfigFile_%m_%d_%Y_%H%M%S.ini")) 97 | my_config = convert_args_to_config(args) 98 | with open(configfilename, 'w') as f: 99 | my_config.write(f) 100 | 101 | scheme, netloc, _path, _params, _query, _fragment = urlparse(args.ip) 102 | if scheme not in ['http', 'https', 'http+unix']: 103 | my_logger.error('Configuration Error: IP is missing http or https or http+unix') 104 | return 1, None, 'IP Incomplete' 105 | 106 | if netloc == '': 107 | my_logger.error('Configuration Error: IP is missing ip/host') 108 | return 1, None, 'IP Incomplete' 109 | 110 | if len(args.collectionlimit) % 2 != 0: 111 | my_logger.error('Configuration Error: Collection Limit requires two arguments per entry (ResourceType Count)') 112 | return 1, None, 'Collection Limit Incomplete' 113 | 114 | # start printing config details, remove redundant/private info from print 115 | my_logger.info('Target URI: {}'.format(args.ip)) 116 | my_logger.info('\n'.join( 117 | ['{}: {}'.format(x, vars(args)[x] if x not in ['password'] else '******') for x in sorted(list(vars(args).keys() - set(['description']))) if vars(args)[x] not in ['', None]])) 118 | my_logger.info('Start time: {}'.format(start_tick.strftime('%x - %X'))) 119 | my_logger.info("") 120 | 121 | # schema and service init 122 | schemadir = args.schema_directory 123 | 124 | if not os.path.isdir(schemadir): 125 | my_logger.info('Downloading initial schemas from online') 126 | my_logger.info('The tool will, by default, attempt to download and store XML files to relieve traffic from DMTF/service') 127 | schema_pack.setup_schema_pack('latest', args.schema_directory, args.ext_http_proxy, args.ext_https_proxy) 128 | 129 | try: 130 | currentService = traverse.rfService(vars(args)) 131 | except Exception as ex: 132 | my_logger.verbose1('Exception caught while creating Service', exc_info=1) 133 | my_logger.error("Redfish Service Error: Service could not be started: {}".format(repr(ex))) 134 | my_logger.error("Try running the Redfish Protocol Validator to ensure the service meets basic protocol conformance") 135 | return 1, None, 'Service Exception' 136 | 137 | if args.description is None and currentService.service_root: 138 | my_version = currentService.service_root.get('RedfishVersion', 'No Given Version') 139 | my_name = currentService.service_root.get('Name', '') 140 | my_uuid = currentService.service_root.get('UUID', 'No Given UUID') 141 | setattr(args, 'description', 'System Under Test - {} version {}, {}'.format(my_name, my_version, my_uuid)) 142 | 143 | my_logger.info('Description of service: {}'.format(args.description)) 144 | 145 | # Start main 146 | status_code = 1 147 | json_data = None 148 | 149 | if args.payload: 150 | pmode, ppath = args.payload 151 | else: 152 | pmode, ppath = 'Default', '' 153 | pmode = pmode.lower() 154 | 155 | if pmode not in ['tree', 'single', 'singlefile', 'treefile', 'default']: 156 | pmode = 'Default' 157 | my_logger.error('Configuration Error: PayloadMode or path invalid, using Default behavior') 158 | if 'file' in pmode: 159 | if ppath is not None and os.path.isfile(ppath): 160 | with open(ppath) as f: 161 | json_data = json.load(f) 162 | f.close() 163 | else: 164 | my_logger.error('Configuration Error: File not found for payload: {}'.format(ppath)) 165 | return 1, None, 'File not found for payload: {}'.format(ppath) 166 | try: 167 | if 'single' in pmode: 168 | my_logger.push_uri(ppath) 169 | success, my_result, reference_only_links, top_object = validateSingleURI(currentService, ppath, expectedJson=json_data) 170 | results = {'Target': my_result} 171 | my_logger.pop_uri() 172 | elif 'tree' in pmode: 173 | success, results, reference_only_links, top_object = validateURITree(currentService, ppath, 'Target', expectedJson=json_data) 174 | else: 175 | success, results, reference_only_links, top_object = validateURITree(currentService, '/redfish/v1/', 'ServiceRoot', expectedJson=json_data) 176 | except traverse.AuthenticationError as e: 177 | # log authentication error and terminate program 178 | my_logger.error('Authetication Error: {}'.format(e)) 179 | return 1, None, 'Failed to authenticate with the service' 180 | 181 | currentService.close() 182 | 183 | # get final counts 184 | metadata = currentService.metadata 185 | my_logger.verbose1('\nMetadata: Namespaces referenced in service: {}'.format(metadata.get_service_namespaces())) 186 | my_logger.info('Metadata: Namespaces missing from $metadata: {}'.format(metadata.get_missing_namespaces())) 187 | 188 | if len(metadata.get_missing_namespaces()) > 0: 189 | my_logger.error('Metadata Error: Metadata is missing Namespaces that are referenced by the service.') 190 | 191 | nowTick = datetime.now() 192 | my_logger.info('\nElapsed time: {}'.format(str(nowTick-start_tick).rsplit('.', 1)[0])) 193 | 194 | final_counts = Counter() 195 | 196 | my_logger.info('\nListing any warnings and errors: ') 197 | 198 | for k, my_result in results.items(): 199 | 200 | for record in my_result['records']: 201 | if record.result: 202 | final_counts[record.result] += 1 203 | 204 | warns = [x for x in my_result['records'] if x.levelno == logger.Level.WARN] 205 | errors = [x for x in my_result['records'] if x.levelno == logger.Level.ERROR] 206 | if len(warns + errors): 207 | my_logger.info(" ") 208 | my_logger.info(my_result['uri']) 209 | 210 | if len(warns): 211 | my_logger.info("Warnings") 212 | for record in warns: 213 | final_counts[record.levelname.lower()] += 1 214 | my_logger.log(record.levelno, ", ".join([x for x in [record.msg, record.result] if x])) 215 | 216 | if len(errors): 217 | my_logger.info("Errors") 218 | for record in errors: 219 | final_counts[record.levelname.lower()] += 1 220 | my_logger.log(record.levelno, ", ".join([x for x in [record.msg, record.result] if x])) 221 | 222 | final_counts.update({x: k for x, k in metadata.get_counter().items() if k > 0}) 223 | 224 | html_str = tohtml.renderHtml(results, tool_version, start_tick, nowTick, currentService) 225 | 226 | lastResultsPage = datetime.strftime(start_tick, os.path.join(logpath, "ConformanceHtmlLog_%m_%d_%Y_%H%M%S.html")) 227 | 228 | tohtml.writeHtml(html_str, lastResultsPage) 229 | 230 | my_logger.info("\nResults Summary:") 231 | my_logger.info(", ".join([ 232 | 'Pass: {}'.format(final_counts['pass']), 233 | 'Fail: {}'.format(final_counts['error']), 234 | 'Warning: {}'.format(final_counts['warning']), 235 | ])) 236 | 237 | # dump cache info to debug log 238 | my_logger.debug('getSchemaDetails() -> {}'.format(getSchemaDetails.cache_info())) 239 | my_logger.debug('callResourceURI() -> {}'.format(currentService.cache_order)) 240 | 241 | success = final_counts['error'] == 0 242 | 243 | if not success: 244 | my_logger.error("Validation has failed: {} problems found".format(final_counts['error'])) 245 | else: 246 | my_logger.info("Validation has succeeded.") 247 | status_code = 0 248 | 249 | return status_code, lastResultsPage, 'Validation done' 250 | 251 | 252 | def main(): 253 | """ 254 | Entry point for the program. 255 | """ 256 | status_code, _, _ = validate() 257 | return status_code 258 | 259 | 260 | if __name__ == '__main__': 261 | try: 262 | sys.exit(main()) 263 | except Exception as e: 264 | logger.my_logger.exception("Program finished prematurely: %s", e) 265 | raise 266 | -------------------------------------------------------------------------------- /redfish_service_validator/traverse.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import json 6 | from datetime import datetime 7 | import logging 8 | from urllib.parse import urlparse, urlunparse 9 | from http.client import responses 10 | import os 11 | import re 12 | 13 | import redfish as rf 14 | import requests 15 | import redfish_service_validator.catalog as catalog 16 | from redfish_service_validator.helper import navigateJsonFragment, splitVersionString 17 | from redfish_service_validator.metadata import Metadata 18 | 19 | import logging 20 | 21 | my_logger = logging.getLogger('rsv') 22 | my_logger.setLevel(logging.DEBUG) 23 | 24 | CACHE_SIZE = 128 25 | 26 | # dictionary to hold sampling notation strings for URIs 27 | class AuthenticationError(Exception): 28 | """Exception used for failed basic auth or token auth""" 29 | def __init__(self, msg=None): 30 | super(AuthenticationError, self).__init__(msg) 31 | 32 | 33 | class rfService(): 34 | def __init__(self, config): 35 | my_logger.info('Setting up service...') 36 | self.active, self.config = False, config 37 | self.logger = my_logger 38 | 39 | self.cache, self.cache_order = {}, [] 40 | 41 | self.config['configuri'] = self.config['ip'] 42 | self.config['metadatafilepath'] = self.config['schema_directory'] 43 | self.config['usessl'] = urlparse(self.config['configuri']).scheme in ['https'] 44 | self.config['certificatecheck'] = False 45 | self.config['certificatebundle'] = None 46 | 47 | # NOTE: this is a validator limitation. maybe move this to its own config 48 | if self.config['collectionlimit']: 49 | total_len = len(self.config['collectionlimit']) / 2 50 | limit_string = ' '.join(self.config['collectionlimit']) 51 | limit_array = [tuple(found_item.split(' ')) for found_item in re.findall(r"[A-Za-z]+ [0-9]+", limit_string)] 52 | if len(limit_array) != total_len: 53 | raise ValueError('Collection Limit array seems malformed, use format: RESOURCE1 COUNT1 RESOURCE2 COUNT2)...') 54 | self.config['collectionlimit'] = {x[0]: int(x[1]) for x in limit_array} 55 | 56 | # Log into the service 57 | if not self.config['usessl'] and not self.config['forceauth']: 58 | if self.config['username'] not in ['', None] or self.config['password'] not in ['', None]: 59 | my_logger.warning('Authentication Credentials Warning: Attempting to authenticate on unchecked http/https protocol is insecure, if necessary please use ForceAuth option. Clearing auth credentials...') 60 | self.config['username'] = '' 61 | self.config['password'] = '' 62 | rhost, user, passwd = self.config['configuri'], self.config['username'], self.config['password'] 63 | 64 | proxies = None 65 | if self.config['serv_http_proxy'] != '' or self.config['serv_https_proxy'] != '': 66 | proxies = {} 67 | if self.config['serv_http_proxy'] != '': 68 | proxies['http'] = self.config['serv_http_proxy'] 69 | if self.config['serv_https_proxy'] != '': 70 | proxies['https'] = self.config['serv_https_proxy'] 71 | 72 | self.ext_proxies = None 73 | if self.config['ext_http_proxy'] != '' or self.config['ext_https_proxy'] != '': 74 | self.ext_proxies = {} 75 | if self.config['ext_http_proxy'] != '': 76 | self.ext_proxies['http'] = self.config['ext_http_proxy'] 77 | if self.config['ext_https_proxy'] != '': 78 | self.ext_proxies['https'] = self.config['ext_https_proxy'] 79 | 80 | self.context = rf.redfish_client(base_url=rhost, username=user, password=passwd, timeout=self.config['requesttimeout'], max_retry=self.config['requestattempts'], proxies=proxies) 81 | self.context.login(auth=self.config['authtype'].lower()) 82 | 83 | # Go through $metadata and download any additional schema files needed 84 | success, data, response, delay = self.callResourceURI(Metadata.metadata_uri) 85 | if success and data is not None and response.status in range(200, 210): 86 | self.metadata = Metadata(data, self) 87 | self.metadata.elapsed_secs = delay 88 | else: 89 | self.metadata = Metadata(None, self) 90 | 91 | # Build the data model based on cached schema files 92 | self.catalog = catalog.SchemaCatalog(self.config['metadatafilepath']) 93 | 94 | target_version = 'n/a' 95 | 96 | # get Version 97 | success, data, response, delay = self.callResourceURI('/redfish/v1') 98 | if not success: 99 | my_logger.warning('Service Warning: Could not get ServiceRoot') 100 | else: 101 | if 'RedfishVersion' not in data: 102 | my_logger.warning('Service Warning: Could not get RedfishVersion from ServiceRoot') 103 | else: 104 | my_logger.info('Redfish Version of Service: {}'.format(data['RedfishVersion'])) 105 | target_version = data['RedfishVersion'] 106 | if target_version in ['1.0.0', 'n/a']: 107 | my_logger.warning('Service Version Warning: !!Version of target may produce issues!!') 108 | if splitVersionString(target_version) < splitVersionString('1.6.0') and not self.config['uricheck']: 109 | my_logger.warning('Service Version Warning: RedfishVersion below 1.6.0, disabling uri checks') 110 | self.catalog.flags['ignore_uri_checks'] = True 111 | else: 112 | self.catalog.flags['ignore_uri_checks'] = False 113 | self.config['uricheck'] = True 114 | 115 | self.service_root = data 116 | 117 | self.active = True 118 | 119 | def close(self): 120 | self.active = False 121 | 122 | def callResourceURI(self, link_uri): 123 | """ 124 | Makes a call to a given URI or URL 125 | 126 | param arg1: path to URI "/example/1", or URL "http://example.com" 127 | return: (success boolean, data, request status code) 128 | """ 129 | # rs-assertions: 6.4.1, including accept, content-type and odata-versions 130 | # rs-assertion: handle redirects? and target permissions 131 | # rs-assertion: require no auth for serviceroot calls 132 | # TODO: Written with "success" values, should replace with Exception and catches 133 | if link_uri is None: 134 | my_logger.warning("URI Request Warning: Supplied URI is empty!") 135 | return False, None, None, 0 136 | 137 | config = self.config 138 | # proxies = self.proxies 139 | ConfigIP, UseSSL, AuthType, ChkCert, ChkCertBundle, timeout, Token = config['configuri'], config['usessl'], config['authtype'], \ 140 | config['certificatecheck'], config['certificatebundle'], config['requesttimeout'], config['token'] 141 | 142 | scheme, netloc, path, params, query, fragment = urlparse(link_uri) 143 | inService = scheme == '' and netloc == '' 144 | if inService: 145 | my_destination = urlunparse((scheme, netloc, path, '', '', '')) # URILink 146 | else: 147 | my_destination = urlunparse((scheme, netloc, path, params, query, fragment)) 148 | 149 | payload, statusCode, elapsed, auth, noauthchk = None, '', 0, None, True 150 | 151 | isXML = False 152 | if "$metadata" in path or ".xml" in path[:-5]: 153 | isXML = True 154 | my_logger.debug('Should be XML') 155 | 156 | # determine if we need to Auth... 157 | if inService: 158 | noauthchk = link_uri in ['/redfish', '/redfish/v1', '/redfish/v1/odata'] or\ 159 | '/redfish/v1/$metadata' in link_uri 160 | 161 | auth = None if noauthchk else (config.get('username'), config.get('password')) 162 | my_logger.debug('dont chkauth' if noauthchk else 'chkauth') 163 | 164 | # rs-assertion: do not send auth over http 165 | # remove UseSSL if necessary if you require unsecure auth 166 | if (not UseSSL and not config['forceauth']) or not inService or AuthType != 'Basic': 167 | auth = None 168 | 169 | # only send token when we're required to chkauth, during a Session, and on Service and Secure 170 | headers = {"Accept-Encoding": "*"} 171 | 172 | certVal = ChkCertBundle if ChkCert and ChkCertBundle not in [None, ""] else ChkCert 173 | 174 | # rs-assertion: must have application/json or application/xml 175 | my_logger.debug('callingResourceURI {}with authtype {} and ssl {}: {} {}'.format( 176 | 'out of service ' if not inService else '', AuthType, UseSSL, link_uri, headers)) 177 | response = None 178 | try: 179 | startTick = datetime.now() 180 | if my_destination not in self.cache: 181 | mockup_file_path = os.path.join(config['mockup'], my_destination.replace('/redfish/v1/', '', 1).strip('/'), 'index.json') 182 | if not inService: 183 | req = requests.get(my_destination, proxies=self.ext_proxies, verify=False) 184 | content = req.json if not isXML else req.text 185 | response = rf.rest.v1.StaticRestResponse(Status=req.status_code, Headers={x:req.headers[x] for x in req.headers}, Content=req.text) 186 | elif config['mockup'] != '' and os.path.isfile(mockup_file_path): 187 | content = {} 188 | with open(mockup_file_path) as mockup_file: 189 | content = json.load(mockup_file) 190 | response = rf.rest.v1.StaticRestResponse(Status=200, Headers={'Content-Type': 'application/json', 'X-Redfish-Mockup': 'true'}, Content=content) 191 | else: 192 | response = self.context.get(my_destination, headers=headers) 193 | self.cache[my_destination] = response 194 | self.cache_order.append(my_destination) 195 | if len(self.cache) > CACHE_SIZE: 196 | del self.cache[self.cache_order.pop(0)] 197 | else: 198 | my_logger.debug("CACHE HIT {} {}".format(my_destination, link_uri)) 199 | 200 | response = self.cache[my_destination] 201 | 202 | elapsed = datetime.now() - startTick 203 | statusCode = response.status 204 | 205 | my_logger.debug('{}, {},\nTIME ELAPSED: {}'.format(statusCode, response.getheaders(), elapsed)) 206 | if statusCode in [200]: 207 | contenttype = response.getheader('content-type') 208 | if contenttype is None: 209 | my_logger.error("Missing ContentType Error: Content-type not found in header: {}".format(link_uri)) 210 | contenttype = '' 211 | if 'application/json' in contenttype: 212 | my_logger.debug("This is a JSON response") 213 | decoded = response.dict 214 | 215 | # navigate fragment 216 | decoded = navigateJsonFragment(decoded, link_uri) 217 | if decoded is None: 218 | my_logger.error("JSON Pointer Error: The JSON pointer in the fragment of this URI is not constructed properly: {}".format(link_uri)) 219 | elif 'application/xml' in contenttype: 220 | decoded = response.text 221 | elif 'text/xml' in contenttype: 222 | # non-service schemas can use "text/xml" Content-Type 223 | if inService: 224 | my_logger.warning("Response ContentType Warning: Incorrect content type 'text/xml' for file within service {}".format(link_uri)) 225 | decoded = response.text 226 | else: 227 | my_logger.error("Redfish Response Error: This URI did NOT return XML or Json contenttype, is this not a Redfish resource (is this redirected?): {}".format(link_uri)) 228 | decoded = None 229 | if isXML: 230 | my_logger.info('Attempting to interpret as XML') 231 | decoded = response.text 232 | else: 233 | try: 234 | json.loads(response.text) 235 | my_logger.info('Attempting to interpret as JSON') 236 | decoded = response.dict 237 | except ValueError: 238 | pass 239 | 240 | return decoded is not None, decoded, response, elapsed 241 | elif statusCode == 401: 242 | if inService and AuthType in ['Basic', 'Token']: 243 | if AuthType == 'Token': 244 | cred_type = 'token' 245 | else: 246 | cred_type = 'username and password' 247 | raise AuthenticationError('Error accessing URI {}. Status code "{} {}". Check {} supplied for "{}" authentication.' 248 | .format(link_uri, statusCode, responses[statusCode], cred_type, AuthType)) 249 | 250 | except AuthenticationError as e: 251 | raise e # re-raise exception 252 | except Exception as e: 253 | my_logger.warning("A problem when getting resource {} has occurred: {}".format(link_uri, repr(e))) 254 | my_logger.debug("output: ", exc_info=True) 255 | if response and response.text: 256 | my_logger.debug("payload: {}".format(response.text)) 257 | 258 | if payload is not None: 259 | return True, payload, response, 0 260 | return False, None, response, elapsed 261 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | ## [2.5.1] - 2025-07-15 4 | - Cleaned up error and warning reporting 5 | - Added test results summary to the top of the results file 6 | - Added support for validating 'ActionInfo' for OEM actions 7 | 8 | ## [2.5.0] - 2025-05-16 9 | - Corrected regex pattern for date-time properties 10 | 11 | ## [2.4.9] - 2024-09-27 12 | - Added 'requesttimeout' and 'requestattempts' arguments 13 | 14 | ## [2.4.8] - 2024-07-26 15 | - Fixed link verification to ensure the linked resource is of the expected data type 16 | - Fixed usage of collection limiting to apply limits for all aspects of validation 17 | - Fixed URI cache handling for nested JSON objects that contain URI fragments 18 | 19 | ## [2.4.7] - 2024-06-28 20 | - Corrected handling of 'collectionlimit' when specified in a config file 21 | 22 | ## [2.4.6] - 2024-06-14 23 | - Hide password and token parameters from console output when running the tool from a config file 24 | 25 | ## [2.4.5] - 2024-05-10 26 | - Updated action testing to allow for '@Redfish.AllowablePattern' and '@Redfish.AllowableNumbers' annotations on action parameters 27 | 28 | ## [2.4.4] - 2024-05-03 29 | - Added test to ensure write-only properties are null in responses 30 | 31 | ## [2.4.3] - 2024-04-12 32 | - Reduced severity of missing schema for an action from error to warning to be consistent with other warnings 33 | 34 | ## [2.4.2] - 2024-03-22 35 | - Made corrections to skipping OEM checks when '--nooemcheck' is specified 36 | 37 | ## [2.4.1] - 2024-02-09 38 | - Added 'collectionlimit' argument to limit the number of entries checked in a collection 39 | 40 | ## [2.4.0] - 2024-01-26 41 | - Added missing break out of object schema mapping if discovered property is not an object 42 | 43 | ## [2.3.9] - 2024-01-19 44 | - Minor changes to fix Python 3.12 warnings with usage of raw strings 45 | - Added warning and failure totals 46 | 47 | ## [2.3.8] - 2023-11-03 48 | - Added breakout condition when tracing a definition's type tree to prevent hangs due to invalid schemas with type loops 49 | 50 | ## [2.3.7] - 2023-10-06 51 | - Added exception handling when traversing links if the schema definition for the link is invalid 52 | 53 | ## [2.3.6] - 2023-09-22 54 | - Updated regex used to determine if a property is an array to not collide with the collection capabilities term 55 | 56 | ## [2.3.5] - 2023-09-08 57 | - Made change to allow for action names to be annotated 58 | 59 | ## [2.3.4] - 2023-08-11 60 | - Corrected array testing to show errors when array properties are not represented as arrays and vice versa 61 | 62 | ## [2.3.3] - 2023-08-04 63 | - Added check to ensure the '@odata.id' value is a string before trying to follow the link 64 | 65 | ## [2.3.2] - 2023-07-14 66 | - Corrected the usage of return values for script entry points 67 | 68 | ## [2.3.1] - 2023-04-21 69 | - Corrected object mapping logic to handle cases where a common base definition is used for multiple object definitions 70 | 71 | ## [2.3.0] - 2023-03-17 72 | - Corrected the regex pattern for duration properties to not allow for negative durations 73 | - Added validation of action name in responses against the action definitions in schema 74 | 75 | ## [2.2.9] - 2023-03-10 76 | - Added handling of cases where a payload annotation's definition cannot be located in schema files 77 | 78 | ## [2.2.8] - 2023-03-03 79 | - Added deprecated version info to results 80 | 81 | ## [2.2.7] - 2023-02-16 82 | - Corrected URI checking for resources referenced by payload annotations 83 | 84 | ## [2.2.6] - 2023-02-14 85 | - Additional fixes to 'Id' property validation with the URI of the resource 86 | 87 | ## [2.2.5] - 2023-02-13 88 | - Made fixes to 'Id' property validation with the URI of the resource 89 | 90 | ## [2.2.4] - 2023-02-05 91 | - Added validation of the last segment of the URI to ensure it matches the Id property or MemberId property where appropriate 92 | 93 | ## [2.2.3] - 2023-01-27 94 | - Added checking for the presence of '@odata.id' at the root of resources and referenceable members 95 | - Corrected return code from the script to show failures 96 | 97 | ## [2.2.2] - 2022-11-07 98 | - Removed certificate checks when downloading schema files referenced by the service 99 | 100 | ## [2.2.1] - 2022-08-12 101 | - Corrected link traversal logic when objects or arrays of objects are 'null' 102 | 103 | ## [2.2.0] - 2022-08-05 104 | - Removed conditional import statements 105 | 106 | ## [2.1.9] - 2022-07-28 107 | - Fixed import statement in schema pack module 108 | 109 | ## [2.1.8] - 2022-07-22 110 | - Modified project for PyPI publication 111 | 112 | ## [2.1.8] - 2022-07-22 113 | - Modified project for PyPI publication 114 | 115 | ## [2.1.8] - 2022-07-22 116 | - Modified project for PyPI publication 117 | 118 | ## [2.1.8] - 2022-07-22 119 | - Modified project for PyPI publication 120 | 121 | ## [2.1.7] - 2022-07-15 122 | - Added check to on URIs for validation for external reference to ensure it does not go through the Redfish library 123 | - Added scrollbars to the 'Edit Config' window 124 | - Added check to ensure an action is shown in the response as an object before trying to discover if it has action info 125 | 126 | ## [2.1.6] - 2022-05-19 127 | - Added shebang statements at the beginning of the tool in order to execute it directly 128 | - Added support for verifying the data type of '@odata.count' annotations 129 | - Minor enhancements to debug statements 130 | 131 | ## [2.1.5] - 2022-04-08 132 | - Added check to ensure array properties are returned as arrays before iterating over their values 133 | - Added context-driven pattern checking for 'DurableName' 134 | - Added proxy arguments for controlling proxy usage with external sites and the service 135 | 136 | ## [2.1.4] - 2022-04-01 137 | - Corrected parent reference to allow for '@odata.id' testing on referenceable members 138 | - Extended argument usage to allow for argument forms used in other tools, such as 'rhost' instead of 'ip' 139 | 140 | ## [2.1.3] - 2022-03-25 141 | - Corrected order of operations for initializing the tool to collect additional schema files from $metadata before building the data model for payload validation 142 | - Suppress warning messages for deprecated properties when the property is also marked as required 143 | - Added support for Unix sockets 144 | 145 | ## [2.1.2] - 2022-03-21 146 | - Added '--mockup' option to override payloads from a service with a mockup in a directory 147 | - Added support for ensuring properties that are JSON objects that are marked as 'required' are present in payloads 148 | 149 | ## [2.1.1] - 2022-03-04 150 | - Added check to ensure a reference object is an object prior to attempting to extract the link information 151 | 152 | ## [2.1.0] - 2022-02-05 153 | - Added 'Accept-Encoding' header to allow for any encoding in responses 154 | - Corrected '@odata.id' inspection to not crash the tool if the string is empty 155 | 156 | ## [2.0.9] - 2022-01-14 157 | - Removed statements that prevented building models for objects contained within OEM extensions 158 | - Some configuration fixes for handling '--nooemcheck' 159 | 160 | ## [2.0.8] - 2022-01-10 161 | - Several enhancements and fixes to CSV handling 162 | 163 | ## [2.0.7] - 2021-12-01 164 | - Corrected validation of null objects 165 | - Added checks to ensure URIs match patterns defined in schema 166 | - Added checks to ensure the 'Allow' HTTP header does not go beyond the capabilities definitions in schema 167 | - Added validation of registries if discovered via the 'Uri' property in 'MessageRegistryFile' resources 168 | 169 | ## [2.0.6] - 2021-10-27 170 | - Corrected use of iterators to be compatible with versions of Python older than 3.8 171 | 172 | ## [2.0.5] - 2021-10-15 173 | - Corrected namespace referencing for objects nested in objects 174 | 175 | ## [2.0.4] - 2021-10-04 176 | - Updated schema pack link to point to the latest DSP8010 bundle 177 | 178 | ## [2.0.3] - 2021-09-07 179 | - Refactored code to make schema parsing and structure building for resource definitions to be modular 180 | 181 | ## [2.0.2] - 2021-08-30 182 | - Corrected usage of the 'oemcheck' flag to not skip over OEM object validation when enabled 183 | 184 | ## [2.0.1] - 2021-08-09 185 | - Various fixes from previous changes to refactor the arguments with the tool 186 | 187 | ## [2.0.0] - 2021-08-06 188 | - Significant changes to the CLI arguments with the tool to reduce complexity for users 189 | - Added support for validating excerpts 190 | 191 | ## [1.4.1] - 2021-06-18 192 | - Modified calls to requests package to reuse HTTP sessions for better performance 193 | 194 | ## [1.4.0] - 2021-04-16 195 | - Fixed 'is' and 'is not' comparisions that are not allowed in Python3.8+ 196 | 197 | ## [1.3.9] - 2020-09-16 198 | - Several fixes in handling of detection of a proper version of a JSON object within a resource 199 | 200 | ## [1.3.8] - 2020-07-06 201 | - Added exception in link validation for `Resource.Resource` to allow for any type of resource to be found 202 | 203 | ## [1.3.7] - 2020-06-13 204 | - Additional fixes to handling of version detection of resources 205 | 206 | ## [1.3.6] - 2020-05-15 207 | - Corrected handling of version detection of resources 208 | 209 | ## [1.3.5] - 2020-03-21 210 | - Added more descriptive text to `@odata.type` format errors 211 | - Downgraded `@odata.context` format errors to warnings 212 | 213 | ## [1.3.4] - 2019-11-08 214 | - Fixed handling of null objects in arrays of objects 215 | 216 | ## [1.3.3] - 2019-11-01 217 | - Additional fixes for handling schema version checking 218 | 219 | ## [1.3.2] - 2019-10-18 220 | - Clarified error message when a JSON pointer in an `@odata.id` property is invalid 221 | - Fixed some handling of properties than cannot be resolved in order to have better error messages 222 | - Enhanced schema version checking to allow for double digits 223 | 224 | ## [1.3.1] - 2019-08-09 225 | - Added special handling with `OriginOfCondition` to allow for the Resource to not exist 226 | 227 | ## [1.3.0] - 2019-07-19 228 | - Downgraded messages related to not finding `@odata.type` within nested objects of a resource 229 | - Fixed parent validation for registry resources 230 | 231 | ## [1.2.9] - 2019-06-28 232 | - Added special handling with `EventDestination` to allow for `HttpHeaders` to be null per description in the schema 233 | - Made change to make `@odata.context` optional in responses 234 | 235 | ## [1.2.8] - 2019-05-31 236 | - Updated schema bundle reference to 2019.1 237 | - Improved error messages for GET failures 238 | - Removed warnings for @odata.etag properties 239 | - Removed deprecated StopIteration exception 240 | 241 | ## [1.2.7] - 2019-04-26 242 | - Added enhancement to verify `@odata.id` is present when following a navigation property 243 | 244 | ## [1.2.6] - 2019-04-11 245 | - Added missing @odata.context initialization for Message Registries 246 | - Fix to counter for reference links ending in trailing slash 247 | 248 | ## [1.2.5] - 2019-02-01 249 | - Updated schema bundle reference to 2018.3 250 | - Fixed handling of Edm.Duration 251 | - Fixed handling of Redfish.Revision term 252 | 253 | ## [1.2.4] - 2018-11-09 254 | - Fixed check for empty strings to only report warnings if the property is writable 255 | - Added JSON output to expandable tag in the HTML report 256 | - Cleanup of the summary section of the HTML report 257 | 258 | ## [1.2.3] - 2018-10-19 259 | - Fixed regex usage when verifying URIs 260 | 261 | ## [1.2.2] - 2018-10-11 262 | - Added automatic file caching of schemca pulled from the DMTF website and the Service 263 | - Added proper error message for navigating links to Entities with incorrect types 264 | - Added logic to verify that an @odata.id property with a JSON fragment resolves properly 265 | - Updated current schema pack zip to 2018.2 266 | - Fixed missing default option for usessl 267 | 268 | ## [1.2.1] - 2018-10-04 269 | - Made fix to send traceback to debug logging only, not to HTML report 270 | 271 | ## [1.2.0] - 2018-09-21 272 | - Added option to enable/disable protocol version checking 273 | - Various cleanup to error messages 274 | 275 | ## [1.1.9] - 2018-09-14 276 | - Added fixes to OEM checks 277 | - Added support for URI checking as an option with the tool 278 | 279 | ## [1.1.8] - 2018-09-07 280 | - Added additional sanity checking for managing cases where a type cannot be found 281 | 282 | ## [1.1.7] - 2018-08-31 283 | - Added support for following `@odata.id` reference for auto expanded resources 284 | - Added handling for trying to resolve the proper schema file if it's not found 285 | - Added support for following `@odata.nextLink` in collections 286 | - Added handling for resolving the proper ComplexType version based on the reported `@odata.type` value for the a resource 287 | - Added case insensitive checking on invalid properties for giving hints in error messages 288 | - Added warnings for empty strings in payloads if the property is read only 289 | - Added hints in error messages for unknown properties 290 | - Added hint in the error message for enum values if the service returns the string "null" rather than the JSON value null 291 | 292 | ## [1.1.6] - 2018-08-17 293 | - Fixed several cases where exception tracebacks were being printed in the output 294 | 295 | ## [1.1.5] - 2018-08-03 296 | - Added missing start session 297 | - Added exceptions for bad credentials 298 | - Modified the report output to improve readability 299 | - Refactor areas of code to enable automated unit testing 300 | 301 | ## [1.1.4] - 2018-07-06 302 | - Additional fixes to OEM object handling within Actions 303 | 304 | ## [1.1.3] - 2018-06-29 305 | - Fixed annotations being treated as unknown properties 306 | - Fixed handling of dynamic properties patterns that was introduced as part of the OEM object validation 307 | 308 | ## [1.1.2] - 2018-06-22 309 | - Added support for verifying OEM objects in responses 310 | 311 | ## [1.1.1] - 2018-06-01 312 | - Added option to force authentication if using an unsecure connection 313 | - Added error checking for @Redfish.Copyright in payloads 314 | 315 | ## [1.1.0] - 2018-05-11 316 | - Allow for text/xml in schema responses from external sites 317 | - Added console output when running the test via the GUI 318 | - Added Schema Pack option 319 | - Downgraded several messages from Error to Warning 320 | 321 | ## [1.0.9] - 2018-05-04 322 | - Corrected problem when reading metadata from local cache 323 | - Made changes to clean the standard output 324 | 325 | ## [1.0.8] - 2018-04-27 326 | - Enhanced $metadata validation to check if a referenced namespace exists in the referenced schema file 327 | - Enhanced handling of properties found in payloads that are not defined in the schema file 328 | - Added new configuration options to the GUI to make it easier to save/load other configuration files 329 | 330 | ## [1.0.7] - 2018-04-20 331 | - Enhanced authentication error handling for basic and session authentication 332 | - Changed term "collection" in the report to say "array" 333 | - Added method for running the tool via a GUI 334 | - Fixed the Action object validation to allow for the "title" property 335 | - Added support for allowing dynamic properties with @Redfish, @Message, and @odata terms 336 | 337 | ## [1.0.6] - 2018-04-13 338 | - Enhanced validation of Action objects; allow for annotations and Action Info resources, and require the target property 339 | - Added $metadata validation report 340 | - Fixed handling of the Location header when creating a Session to allow for both absolute and relative URIs 341 | 342 | ## [1.0.5] - 2018-03-09 343 | - Changed deprecated property reporting from error to warning 344 | 345 | ## [1.0.4] - 2018-03-02 346 | - Enhanced URI handling in MessageRegistryFile validation 347 | 348 | ## [1.0.3] - 2018-02-15 349 | - Improved display of array members in the HTML report 350 | - Added text in the report to point to other payload reports when testing Referenceable Members 351 | 352 | ## [1.0.2] - 2018-02-09 353 | - Made fixes to proxy support 354 | - Added better handling for when incorrect namespaces are referenced 355 | - Improvements to error messages 356 | - Fixed handling of resolving external ComplexType definitions 357 | - Added argument to control debug output 358 | 359 | ## [1.0.1] - 2018-02-02 360 | - Fixed the display of null types in the report 361 | - Fixed the display of data types found in registries 362 | - Added validation of primitive types 363 | 364 | ## [1.0.0] - 2018-01-26 365 | - Various bug fixes; getting into standard release cadence 366 | 367 | ## [0.9.0] - 2016-09-06 368 | - Initial Public Release 369 | -------------------------------------------------------------------------------- /redfish_service_validator/tohtml.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | from types import SimpleNamespace 6 | from collections import Counter 7 | import json 8 | import re 9 | from redfish_service_validator.helper import getType 10 | import redfish_service_validator.RedfishLogo as logo 11 | from redfish_service_validator.logger import LOG_ENTRY, Level 12 | 13 | if __name__ == '__main__': 14 | import argparse 15 | from bs4 import BeautifulSoup 16 | import os 17 | import csv 18 | 19 | 20 | # hack in tagnames into module namespace 21 | tag = SimpleNamespace(**{tagName: lambda string, attr=None, tag=tagName: wrapTag(string, tag=tag, attr=attr)\ 22 | for tagName in ['tr', 'td', 'th', 'div', 'b', 'table', 'body', 'head', 'summary']}) 23 | 24 | 25 | def wrapTag(string, tag='div', attr=None): 26 | string = str(string) 27 | ltag, rtag = '<{}>'.format(tag), ''.format(tag) 28 | if attr is not None: 29 | ltag = '<{} {}>'.format(tag, attr) 30 | return ltag + string + rtag 31 | 32 | 33 | def infoBlock(strings, split='
', ffunc=None, sort=True): 34 | if isinstance(strings, dict): 35 | infos = [tag.b('{}: '.format(y)) + str(x) for y, x in (sorted(strings.items()) if sort else strings.items())] 36 | else: 37 | infos = strings 38 | return split.join([ffunc(*x) for x in enumerate(infos)] if ffunc is not None else infos) 39 | 40 | 41 | def tableBlock(lines, titles, widths=None, ffunc=None): 42 | widths = widths if widths is not None else [100 for x in range(len(titles))] 43 | attrlist = ['style="width:{}%"'.format(str(x)) for x in widths] 44 | tableHeader = tag.tr(''.join([tag.th(x, y) for x, y in zip(titles,attrlist)])) 45 | for line in lines: 46 | tableHeader += tag.tr(''.join([ffunc(cnt, x) if ffunc is not None else tag.td(x) for cnt, x in enumerate(line)])) 47 | return tag.table(tableHeader) 48 | 49 | 50 | def applySuccessColor(num, entry): 51 | if num < 4: 52 | return wrapTag(entry, 'td') 53 | success_col = str(entry) 54 | if any(x.upper() in str(success_col).upper() for x in ['FAIL', 'errorExcerpt']): 55 | entry = '' + str(success_col) + '' 56 | elif str(success_col).upper() in ['DEPRECATED', 'INVALID', 'WARN']: 57 | entry = '' + str(success_col) + '' 58 | elif any(x in str(success_col).upper() for x in ['DEPRECATED', 'INVALID', 'WARN']): 59 | entry = '' + str(success_col) + '' 60 | elif 'PASS' in str(success_col).upper(): 61 | entry = '' + str(success_col) + '' 62 | else: 63 | entry = '' + str(success_col) + '' 64 | return entry 65 | 66 | 67 | def applyInfoSuccessColor(num, entry): 68 | if any(x in entry for x in ['fail', 'exception', 'error', 'problem', 'err']): 69 | style = 'class="fail"' 70 | elif 'warn' in entry: 71 | style = 'class="warn"' 72 | else: 73 | style = None 74 | return tag.div(entry, attr=style) 75 | 76 | 77 | def renderHtml(results, tool_version, startTick, nowTick, service): 78 | # Render html 79 | config = service.config 80 | sysDescription, ConfigURI = (config['description'], config['ip']) 81 | 82 | # wrap html 83 | htmlPage = '' 84 | htmlStrTop = 'Conformance Test Summary\ 85 | \ 109 | ' 110 | htmlStrBodyHeader = '' 111 | # Logo and logname 112 | infos = [wrapTag('##### Redfish Conformance Test Report #####', 'h2')] 113 | infos.append(wrapTag('DMTF Redfish Logo', 'h4')) 115 | infos.append('

' 116 | 'https://github.com/DMTF/Redfish-Service-Validator

') 117 | infos.append('Tool Version: {}'.format(tool_version)) 118 | infos.append(startTick.strftime('%c')) 119 | infos.append('(Run time: {})'.format(str(nowTick - startTick).rsplit('.', 1)[0])) 120 | infos.append('

This tool is provided and maintained by the DMTF. ' 121 | 'For feedback, please open issues
in the tool\'s Github repository: ' 122 | '' 123 | 'https://github.com/DMTF/Redfish-Service-Validator/issues

') 124 | 125 | htmlStrBodyHeader += tag.tr(tag.th(infoBlock(infos))) 126 | 127 | htmlStrBodyHeader += tag.tr(tag.th('Test Summary', 'class="bluebg titlerow"')) 128 | infos = {'System': ConfigURI, 'Description': sysDescription} 129 | infos['Target'] = ", ".join(service.config['payload']) if service.config['payload'] else 'Complete System Test' 130 | htmlStrBodyHeader += tag.tr(tag.th(infoBlock(infos, sort=False))) 131 | 132 | summary = Counter() 133 | 134 | for k, my_result in results.items(): 135 | for record in my_result['records']: 136 | if record.levelname.lower() in ['error', 'warning']: 137 | summary[record.levelname.lower()] += 1 138 | if record.result: 139 | summary[record.result] += 1 140 | 141 | important_block = tag.div('Results Summary') 142 | important_block += tag.div(", ".join([ 143 | 'Pass: {}'.format(summary['pass']), 144 | 'Fail: {}'.format(summary['error']), 145 | 'Warning: {}'.format(summary['warning']) 146 | ])) 147 | htmlStrBodyHeader += tag.tr(tag.td(important_block, 'class="center"')) 148 | 149 | infos = {x: config[x] for x in config if x not in ['systeminfo', 'ip', 'password', 'description']} 150 | infos_left, infos_right = dict(), dict() 151 | for key in sorted(infos.keys()): 152 | if len(infos_left) <= len(infos_right): 153 | infos_left[key] = infos[key] 154 | else: 155 | infos_right[key] = infos[key] 156 | 157 | htmlButtons = '
Expand All
' 158 | htmlButtons += '
Collapse All
' 159 | htmlButtons += tag.div('Show Configuration', attr='class="button pass" onClick="document.getElementById(\'resNumConfig\').classList.toggle(\'resultsShow\');"') 160 | 161 | htmlStrBodyHeader += tag.tr(tag.th('Full Test Report', 'class="titlerow bluebg"')) 162 | htmlStrBodyHeader += tag.tr(tag.th(htmlButtons)) 163 | 164 | block = tag.td(tag.div(infoBlock(infos_left), 'class=\'column log\'') \ 165 | + tag.div(infoBlock(infos_right), 'class=\'column log\''), 166 | 'id=\'resNumConfig\' class=\'results\'') 167 | 168 | htmlStrBodyHeader += tag.tr(block) 169 | 170 | if service.metadata is not None: 171 | htmlPage = service.metadata.to_html() 172 | 173 | for cnt, item in enumerate(results): 174 | entry = [] 175 | my_result = results[item] 176 | rtime = '(response time: {})'.format(my_result['rtime']) 177 | rcode = my_result['rcode'] 178 | if rcode == -1 or my_result['rtime'] == 0: 179 | rtime = '' 180 | payload = my_result.get('payload', {}) 181 | 182 | # uri block 183 | prop_type, type_name = my_result['fulltype'], '-' 184 | if prop_type is not None: 185 | type_name = getType(prop_type) 186 | 187 | infos = [str(my_result.get(x)) for x in ['uri', 'samplemapped'] if my_result.get(x) not in ['',None]] 188 | infos.append(rtime) 189 | infos.append(type_name) 190 | uriTag = tag.tr(tag.th(infoBlock(infos, ' '), 'class="titlerow bluebg"')) 191 | entry.append(uriTag) 192 | 193 | # info block 194 | # infos = [str(my_result.get(x)) for x in ['uri'] if my_result.get(x) not in ['',None]] 195 | # if rtime: 196 | # infos.append(rtime) 197 | infos = [] 198 | infos_buttons = tag.div('Show Results', attr='class="button warn"\ 199 | onClick="document.getElementById(\'payload{}\').classList.remove(\'resultsShow\');\ 200 | document.getElementById(\'resNum{}\').classList.toggle(\'resultsShow\');"'.format(cnt, cnt)) 201 | infos_buttons += tag.div('Show Payload', attr='class="button pass"\ 202 | onClick="document.getElementById(\'payload{}\').classList.toggle(\'resultsShow\');\ 203 | document.getElementById(\'resNum{}\').classList.add(\'resultsShow\');"'.format(cnt, cnt)) 204 | infos.append(infos_buttons) 205 | buttonTag = tag.td(infoBlock(infos), 'class="title" style="width:30%"') 206 | 207 | infos = [str(my_result.get(x)) for x in ['context', 'origin', 'fulltype']] 208 | infos = {y: x for x, y in zip(infos, ['Context', 'File Origin', 'Resource Type'])} 209 | infosTag = tag.td(infoBlock(infos), 'class="titlesub log" style="width:40%"') 210 | 211 | success = my_result['success'] 212 | if success: 213 | if rcode != -1: 214 | getTag = tag.td('GET Success HTTP Code ({})'.format(rcode), 'class="pass"') 215 | else: 216 | getTag = tag.td('GET Success', 'class="pass"') 217 | else: 218 | getTag = tag.td('GET Failure HTTP Code ({})'.format(rcode), 'class="fail"') 219 | 220 | my_summary = Counter() 221 | 222 | for k_e, val in my_result['messages'].items(): 223 | if val.result.lower() == 'pass': 224 | my_summary['pass'] += 1 225 | 226 | for record in my_result['records']: 227 | if record.levelname.lower() in ['error', 'warning']: 228 | my_summary[record.levelname.lower()] += 1 229 | if record.result: 230 | my_summary[record.result] += 1 231 | 232 | countsTag = tag.td(infoBlock(my_summary, split='', ffunc=applyInfoSuccessColor), 'class="log"') 233 | 234 | rhead = ''.join([buttonTag, infosTag, getTag, countsTag]) 235 | for x in [('tr',), ('table', 'class=titletable'), ('td', 'class=titlerow'), ('tr')]: 236 | rhead = wrapTag(''.join(rhead), *x) 237 | entry.append(rhead) 238 | 239 | # actual table 240 | 241 | rows = [list([str(vars(m)[x]) for x in LOG_ENTRY]) for m in my_result['messages'].values()] 242 | titles = ['Name', 'Value', 'Type', 'Exists', 'Result'] 243 | widths = ['20', '30', '25', '5', '10'] 244 | tableHeader = tableBlock(rows, titles, widths, ffunc=applySuccessColor) 245 | 246 | # lets wrap table and errors and warns into one single column table 247 | tableHeader = tag.tr(tag.td((tableHeader))) 248 | 249 | infos_a = [str(my_result.get(x)) for x in ['uri'] if my_result.get(x) not in ['',None]] 250 | infos_a.append(rtime) 251 | 252 | errors = [x for x in my_result['records'] if x.levelno == Level.ERROR] 253 | warns = [x for x in my_result['records'] if x.levelno == Level.WARN] 254 | 255 | # warns and errors 256 | errors = ['No errors'] if len(errors) == 0 else [x.msg for x in errors] 257 | errorTags = tag.tr(tag.td(infoBlock(errors), 'class="fail log"')) 258 | 259 | warns = ['No warns'] if len(warns) == 0 else [x.msg for x in warns] 260 | warnTags = tag.tr(tag.td(infoBlock(warns), 'class="warn log"')) 261 | 262 | payloadTag = tag.td(json.dumps(payload, indent=4, sort_keys=True), 'id=\'payload{}\' class=\'payload log\''.format(cnt)) 263 | 264 | tableHeader += errorTags 265 | tableHeader += warnTags 266 | tableHeader += payloadTag 267 | tableHeader = tag.table(tableHeader) 268 | tableHeader = tag.td(tableHeader, 'class="results" id=\'resNum{}\''.format(cnt)) 269 | 270 | entry.append(tableHeader) 271 | 272 | # append 273 | htmlPage += ''.join([tag.tr(x) for x in entry]) 274 | 275 | return wrapTag(wrapTag(htmlStrTop + wrapTag(htmlStrBodyHeader + htmlPage, 'table'), 'body'), 'html') 276 | 277 | 278 | def writeHtml(string, path): 279 | with open(path, 'w', encoding='utf-8') as f: 280 | f.write(string) 281 | 282 | 283 | def htmlLogScraper(htmlReport, output_name=None): 284 | outputLogName = os.path.split(htmlReport)[-1] if not output_name else output_name 285 | output = open('./{}.csv'.format(outputLogName),'w',newline='') 286 | csv_output = csv.writer(output) 287 | csv_output.writerow(['URI','Status','Response Time','Context','File Origin','Resource Type','Property Name','Value','Expected','Actual','Result']) 288 | htmlLog = open(htmlReport,'r') 289 | soup = BeautifulSoup(htmlLog, 'html.parser') 290 | glanceDetails = {} 291 | table = soup.find_all('table', {'class':'titletable'}) 292 | for tbl in table: 293 | tr = tbl.find('tr') 294 | URIresp = tr.find('td',{'class':'title'}) # URI, response time, show results button 295 | URI = URIresp.text.partition('(')[0] 296 | responseTime = URIresp.text.partition('response time')[2].split(')')[0].strip(':s') 297 | StatusGET = tr.find('td',{'class':'pass'}) or tr.find('td',{'class':'fail'}) 298 | if 'Success' in StatusGET.text: 299 | Status = '200' 300 | else: 301 | Status = '400' 302 | 303 | context,FileOrigin,ResourceType = ' ',' ',' ' 304 | if 'Context:' in tr.find_all('td')[1].text: 305 | context = tr.find_all('td')[1].text.split('Context:')[1].split('File')[0] 306 | if 'File Origin'in tr.find_all('td')[1].text: 307 | FileOrigin = tr.find_all('td')[1].text.split('File Origin:')[1].split('Resource')[0] 308 | if 'Resource Type'in tr.find_all('td')[1].text: 309 | ResourceType = tr.find_all('td')[1].text.split('Resource Type:')[1] 310 | resNumHtml = tr.find('div', {'class':'button warn'}) 311 | resNum = resNumHtml.attrs['onclick'].split(";") 312 | resNum = resNum[0].split("'")[1] if len(resNum) < 3 else resNum[1].split("'")[1] 313 | results = [ URI, Status, responseTime, context, FileOrigin, ResourceType ] 314 | glanceDetails[resNum] = results # mapping of results to their respective tables 315 | 316 | properties = soup.findAll('td',{'class':'results'}) 317 | data = [] 318 | for table in properties: 319 | tableID = table.attrs.get('id') 320 | if len(table.find_all('table')) == 0 or tableID in ['resMetadata', None]: 321 | continue 322 | tableBody = table.find_all('table')[-1] 323 | tableRows = tableBody.find_all('tr')[1:] #get rows from property tables excluding header 324 | for tr in tableRows: 325 | td = tr.find_all('td') 326 | row = [i.text for i in td] 327 | if tableID in glanceDetails: 328 | data.append(glanceDetails[tableID] + row) 329 | csv_output.writerows(data) 330 | output.close() 331 | 332 | 333 | if __name__ == '__main__': 334 | parser = argparse.ArgumentParser(description='Get an excel sheet of details shown in the HTML reports for the Redfish Service Validator') 335 | parser.add_argument('htmllog' ,type=str, help = 'Path of the HTML log to be converted to csv format' ) 336 | parser.add_argument('--dest' ,type=str, help = 'Name of output' ) 337 | args = parser.parse_args() 338 | 339 | htmlLogScraper(args.htmllog, args.dest) 340 | -------------------------------------------------------------------------------- /redfish_service_validator/RedfishServiceValidatorGui.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | """ 6 | Redfish Service Validator GUI 7 | 8 | File : RedfishServiceValidatorGui.py 9 | 10 | Brief : This file contains the GUI to interact with the RedfishServiceValidator 11 | """ 12 | 13 | import configparser 14 | import os 15 | import threading 16 | import tkinter as tk 17 | from tkinter import filedialog as tkFileDialog 18 | import traceback 19 | import webbrowser 20 | 21 | import redfish_service_validator.RedfishLogo as logo 22 | import redfish_service_validator.RedfishServiceValidator as rsv 23 | 24 | g_config_file_name = "config/config.ini" 25 | 26 | g_config_defaults = { 27 | "Tool": { 28 | "verbose": { 29 | "value": "", 30 | "description": "Level of verbosity (0-3)" 31 | } 32 | }, 33 | "Host": { 34 | "ip": { 35 | "value": "http://localhost:8000", 36 | "description": "Host of testing system, formatted as https:// ip : port (can use http as well)" 37 | }, 38 | "username": { 39 | "value": "MyUser", 40 | "description": "Username for Basic authentication" 41 | }, 42 | "password": { 43 | "value": "MyPass", 44 | "description": "Username for Basic authentication" 45 | }, 46 | "description": { 47 | "value": "MySystem", 48 | "description": "Description of system being tested (optional)" 49 | }, 50 | "forceauth": { 51 | "value": "False", 52 | "description": "Force authentication even on http servers" 53 | }, 54 | "authtype": { 55 | "value": "Basic", 56 | "description": "Authorization type (Basic | Session | Token | None)" 57 | }, 58 | "token": { 59 | "value": "False", 60 | "description": "Token string for Token authentication" 61 | }, 62 | "ext_http_proxy": { 63 | "value": "", 64 | "description": "URL of the HTTP proxy for accessing external sites" 65 | }, 66 | "ext_https_proxy": { 67 | "value": "", 68 | "description": "URL of the HTTPS proxy for accessing external sites" 69 | }, 70 | "serv_http_proxy": { 71 | "value": "", 72 | "description": "URL of the HTTP proxy for accessing the service" 73 | }, 74 | "serv_https_proxy": { 75 | "value": "", 76 | "description": "URL of the HTTPS proxy for accessing the service" 77 | } 78 | }, 79 | "Validator": { 80 | "payload": { 81 | "value": "", 82 | "description": "Option to test a specific payload or resource tree (see README)" 83 | }, 84 | "logdir": { 85 | "value": "./logs", 86 | "description": "Place to save logs and run configs" 87 | }, 88 | "oemcheck": { 89 | "value": "True", 90 | "description": "Whether to check Oem items on service" 91 | }, 92 | "debugging": { 93 | "value": "False", 94 | "description": "Whether to print debug to log" 95 | }, 96 | "uricheck": { 97 | "value": "False", 98 | "description": "Whether to force urichecking if under RedfishVersion 1.6.0" 99 | }, 100 | "schema_directory": { 101 | "value": "./SchemaFiles/metadata", 102 | "description": "Where schema is located/saved on system" 103 | }, 104 | "mockup": { 105 | "value": "", 106 | "description": "Enables insertion of local mockup resources to replace payloads from the service" 107 | } 108 | } 109 | } 110 | 111 | class RSVGui: 112 | """ 113 | Main class for the GUI 114 | 115 | Args: 116 | parent (Tk): Parent Tkinter object 117 | """ 118 | 119 | def __init__( self, parent ): 120 | # Set up the configuration 121 | self.config = {} 122 | for section in g_config_defaults: 123 | self.config[section] = {} 124 | for option in g_config_defaults[section]: 125 | self.config[section][option] = g_config_defaults[section][option] 126 | 127 | # Read in the config file, and apply any valid settings 128 | self.config_file = g_config_file_name 129 | self.system_under_test = tk.StringVar() 130 | self.parse_config() 131 | 132 | # Initialize the window 133 | self.parent = parent 134 | self.parent.title( "Redfish Service Validator {}".format( rsv.tool_version ) ) 135 | 136 | # Add the menu bar 137 | menu_bar = tk.Menu( self.parent ) 138 | file_menu = tk.Menu( menu_bar, tearoff = 0 ) 139 | file_menu.add_command( label = "Open Config", command = self.open_config ) 140 | file_menu.add_command( label = "Save Config", command = self.save_config ) 141 | file_menu.add_command( label = "Save Config As", command = self.save_config_as ) 142 | file_menu.add_command( label = "Edit Config", command = self.edit_config ) 143 | file_menu.add_separator() 144 | file_menu.add_command( label = "Exit", command = self.parent.destroy ) 145 | menu_bar.add_cascade( label = "File", menu = file_menu ) 146 | self.parent.config( menu = menu_bar ) 147 | 148 | # Add the logo 149 | image = tk.PhotoImage( data = logo.logo ) 150 | label = tk.Label( self.parent, image = image, width = 384 ) 151 | label.image = image 152 | label.pack( side = tk.TOP ) 153 | 154 | # Add the system under test label 155 | tk.Label( self.parent, textvariable = self.system_under_test, font = ( None, 12 ) ).pack( side = tk.TOP ) 156 | 157 | # Add the buttons 158 | button_frame = tk.Frame( self.parent ) 159 | button_frame.pack( side = tk.TOP, fill = tk.X ) 160 | self.run_button_text = tk.StringVar() 161 | self.run_button_text.set( "Run Test" ) 162 | self.run_button = tk.Button( button_frame, textvariable = self.run_button_text, command = self.run ) 163 | self.run_button.pack( side = tk.LEFT ) 164 | self.run_label_text = tk.StringVar() 165 | self.run_label_text.set( "" ) 166 | tk.Label( button_frame, textvariable = self.run_label_text ).pack( side = tk.LEFT ) 167 | tk.Button( button_frame, text = "Exit", command = self.parent.destroy ).pack( side = tk.RIGHT ) 168 | 169 | def update_sut( self ): 170 | """ 171 | Updates the System Under Test string 172 | """ 173 | self.system_under_test.set( "System Under Test: " + self.config["Host"]["ip"]["value"] ) 174 | 175 | def parse_config( self ): 176 | """ 177 | Parses the configuration settings from a file 178 | """ 179 | config_parser = configparser.ConfigParser() 180 | config_parser.optionxform = str 181 | config_parser.read( self.config_file ) 182 | for section in config_parser.sections(): 183 | for option in config_parser.options( section ): 184 | if section in self.config: 185 | if option in self.config[section]: 186 | self.config[section][option]["value"] = config_parser.get( section, option ) 187 | self.update_sut() 188 | 189 | def build_config_parser( self, preserve_case ): 190 | """ 191 | Builds a config parser element from the existing configuration 192 | 193 | Args: 194 | preserve_case (bool): True if the casing of the options is to be preserved 195 | 196 | Returns: 197 | ConfigParser: A ConfigParser object generated from the configuration data 198 | """ 199 | config_parser = configparser.ConfigParser() 200 | if preserve_case: 201 | config_parser.optionxform = str 202 | for section in self.config: 203 | config_parser.add_section( section ) 204 | for option in self.config[section]: 205 | config_parser.set( section, option, self.config[section][option]["value"] ) 206 | return config_parser 207 | 208 | def open_config( self ): 209 | """ 210 | Opens the configuration settings from a file 211 | """ 212 | filename = tkFileDialog.askopenfilename( initialdir = os.getcwd(), title = "Open", filetypes = ( ( "INI", "*.ini" ), ( "All Files", "*.*" ) ) ) 213 | if filename == "": 214 | # User closed the box; just return 215 | return 216 | self.config_file = filename 217 | self.parse_config() 218 | 219 | def edit_config( self ): 220 | """ 221 | Edits the configuration settings 222 | """ 223 | option_win = tk.Toplevel() 224 | option_win_frame = tk.Frame( option_win ) 225 | option_win_canvas = tk.Canvas( option_win_frame ) 226 | option_y_scroll = tk.Scrollbar( option_win_frame, orient = "vertical", command = option_win_canvas.yview ) 227 | option_y_scroll.pack( side = tk.RIGHT, fill = tk.Y ) 228 | option_x_scroll = tk.Scrollbar( option_win, orient = "horizontal", command = option_win_canvas.xview ) 229 | option_x_scroll.pack( side = tk.BOTTOM, fill = tk.X ) 230 | option_win_frame.pack( side = tk.TOP, fill = tk.BOTH, expand = True ) 231 | option_win_canvas.pack( side = tk.LEFT, fill = tk.BOTH, expand = True ) 232 | option_win_canvas.bind( "", lambda e: option_win_canvas.configure( scrollregion = option_win_canvas.bbox( "all" ) ) ) 233 | option_win_contents = tk.Frame( option_win_canvas ) 234 | option_win_canvas.create_window( ( 0, 0 ), window = option_win_contents ) 235 | config_values = {} 236 | 237 | # Iterate through the config file options to build the window 238 | for section in self.config: 239 | config_values[section] = {} 240 | section_frame = tk.Frame( option_win_contents ) 241 | section_frame.pack( side = tk.TOP ) 242 | tk.Label( section_frame, text = section, anchor = "center", font = ( None, 16 ) ).pack( side = tk.LEFT ) 243 | for option in self.config[section]: 244 | option_frame = tk.Frame( option_win_contents ) 245 | option_frame.pack( side = tk.TOP, fill = tk.X ) 246 | tk.Label( option_frame, text = option, width = 16, anchor = "w" ).pack( side = tk.LEFT ) 247 | config_values[section][option] = tk.StringVar() 248 | config_values[section][option].set( self.config[section][option]["value"] ) 249 | if "options" in self.config[section][option]: 250 | option_menu = tk.OptionMenu( option_frame, config_values[section][option], *self.config[section][option]["options"] ) 251 | option_menu.configure( width = 26 ) # Need a better way to fine tune this so it lines up nicely with the text boxes 252 | option_menu.pack( side = tk.LEFT ) 253 | else: 254 | tk.Entry( option_frame, width = 32, textvariable = config_values[section][option] ).pack( side = tk.LEFT ) 255 | tk.Label( option_frame, text = self.config[section][option]["description"], anchor = "w" ).pack( side = tk.LEFT ) 256 | tk.Button( option_win_contents, text = "Apply", command = lambda: self.apply_config( option_win, config_values ) ).pack( side = tk.BOTTOM ) 257 | option_win_contents.update() 258 | option_win_canvas.config( xscrollcommand = option_x_scroll.set, yscrollcommand = option_y_scroll.set, width = option_win_contents.winfo_width(), height = option_win_contents.winfo_height() ) 259 | 260 | def apply_config( self, window, config_values ): 261 | """ 262 | Applies the configation settings from the edit window 263 | 264 | Args: 265 | window (Toplevel): Tkinter Toplevel object with text boxes to apply 266 | config_values (Array): An array of StringVar objects with the user input 267 | """ 268 | for section in self.config: 269 | for option in self.config[section]: 270 | self.config[section][option]["value"] = config_values[section][option].get() 271 | self.update_sut() 272 | window.destroy() 273 | 274 | def save_config( self ): 275 | """ 276 | Saves the config file 277 | """ 278 | config_parser = self.build_config_parser( True ) 279 | with open( self.config_file, "w" ) as config_file: 280 | config_parser.write( config_file ) 281 | 282 | def save_config_as( self ): 283 | """ 284 | Saves the config file as a new file 285 | """ 286 | filename = tkFileDialog.asksaveasfilename( initialdir = os.getcwd(), title = "Save As", filetypes = ( ( "INI", "*.ini" ), ( "All Files", "*.*" ) ) ) 287 | if filename == "": 288 | # User closed the box; just return 289 | return 290 | self.config_file = filename 291 | if not self.config_file.lower().endswith( ".ini" ): 292 | self.config_file = self.config_file + ".ini" 293 | self.save_config() 294 | 295 | def run( self ): 296 | """ 297 | Runs the service validator 298 | """ 299 | self.run_button_text.set( "Running" ) 300 | self.run_button.config( state = tk.DISABLED ) 301 | run_thread = threading.Thread( target = self.run_imp ) 302 | run_thread.daemon = True 303 | run_thread.start() 304 | 305 | def run_imp( self ): 306 | """ 307 | Thread for running the service validator so the GUI doesn't freeze 308 | """ 309 | self.run_label_text.set( "Test running; please wait" ) 310 | 311 | run_window = tk.Toplevel() 312 | run_text_frame = tk.Frame( run_window ) 313 | run_text_frame.pack( side = tk.TOP ) 314 | run_scroll = tk.Scrollbar( run_text_frame ) 315 | run_scroll.pack( side = tk.RIGHT, fill = tk.Y ) 316 | run_text = tk.Text( run_text_frame, height = 48, width = 128, yscrollcommand = run_scroll.set ) 317 | rsv.my_logger.handlers[0].stream = RunOutput( run_text ) 318 | run_text.pack( side = tk.TOP ) 319 | run_button_frame = tk.Frame( run_window ) 320 | run_button_frame.pack( side = tk.BOTTOM ) 321 | tk.Button( run_button_frame, text = "OK", command = run_window.destroy ).pack( side = tk.LEFT ) 322 | tk.Button( run_button_frame, text = "Copy", command = lambda: self.copy_text( run_text ) ).pack( side = tk.RIGHT ) 323 | 324 | # Launch the validator 325 | try: 326 | rsv_config = self.build_config_parser( False ) 327 | status_code, last_results_page, exit_string = rsv.validate( configfile = rsv_config ) 328 | if last_results_page is not None: 329 | webbrowser.open_new( last_results_page ) 330 | else: 331 | # The validation could not take place (for a controlled reason) 332 | notification_window = tk.Toplevel() 333 | tk.Label( notification_window, text = "Test aborted: " + exit_string, anchor = "center" ).pack( side = tk.TOP ) 334 | tk.Button( notification_window, text = "OK", command = notification_window.destroy ).pack( side = tk.BOTTOM ) 335 | except: 336 | oops_window = tk.Toplevel() 337 | tk.Label( oops_window, text = "Please copy the info below and file an issue on GitHub!", width = 64, anchor = "center" ).pack( side = tk.TOP ) 338 | oops_text_frame = tk.Frame( oops_window ) 339 | oops_text_frame.pack( side = tk.TOP ) 340 | oops_scroll = tk.Scrollbar( oops_text_frame ) 341 | oops_scroll.pack( side = tk.RIGHT, fill = tk.Y ) 342 | oops_text = tk.Text( oops_text_frame, height = 32, width = 64, yscrollcommand = oops_scroll.set ) 343 | oops_text.insert( tk.END, traceback.format_exc() ) 344 | oops_text.pack( side = tk.TOP ) 345 | oops_button_frame = tk.Frame( oops_window ) 346 | oops_button_frame.pack( side = tk.BOTTOM ) 347 | tk.Button( oops_button_frame, text = "OK", command = oops_window.destroy ).pack( side = tk.LEFT ) 348 | tk.Button( oops_button_frame, text = "Copy", command = lambda: self.copy_text( oops_text ) ).pack( side = tk.RIGHT ) 349 | self.run_button.config( state = tk.NORMAL ) 350 | self.run_button_text.set( "Run Test" ) 351 | self.run_label_text.set( "Test Complete" ) 352 | 353 | def copy_text( self, text ): 354 | """ 355 | Copies text to the system clipboard 356 | 357 | Args: 358 | text (Text): Tkinter Text object with text to copy 359 | """ 360 | self.parent.clipboard_clear() 361 | self.parent.clipboard_append( text.get( 1.0, tk.END ) ) 362 | 363 | class RunOutput( object ): 364 | """ 365 | Runtime output class 366 | 367 | Args: 368 | text (Text): Tkinter Text object to use as the output 369 | """ 370 | 371 | def __init__( self, text ): 372 | self.output = text 373 | 374 | def write( self, string ): 375 | """ 376 | Writes to the output object 377 | 378 | Args: 379 | string (string): The string to output 380 | """ 381 | if self.output.winfo_exists(): 382 | self.output.insert( tk.END, string ) 383 | self.output.see( tk.END ) 384 | 385 | def main(): 386 | """ 387 | Entry point for the GUI 388 | """ 389 | root = tk.Tk() 390 | RSVGui( root ) 391 | root.mainloop() 392 | 393 | if __name__ == '__main__': 394 | main() 395 | -------------------------------------------------------------------------------- /redfish_service_validator/validateResource.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import logging 6 | 7 | import redfish_service_validator.traverse as traverse 8 | import redfish_service_validator.catalog as catalog 9 | from redfish_service_validator.validateRedfish import checkPropertyConformance, displayValue 10 | from redfish_service_validator.helper import getNamespace, getType, createContext, checkPayloadConformance, navigateJsonFragment 11 | from redfish_service_validator.logger import record_capture, create_entry, Level 12 | 13 | my_logger = logging.getLogger('rsv') 14 | my_logger.setLevel(logging.DEBUG) 15 | 16 | RESULT_ENTRY = ('uri', 'success', 'counts', 'entries') 17 | 18 | def validateSingleURI(service, URI, expectedType=None, expectedJson=None, parent=None): 19 | # rs-assertion: 9.4.1 20 | # Initial startup here 21 | my_logger.verbose1("\n*** %s", URI) 22 | my_logger.verbose1("\n*** {}, {}".format(expectedType, expectedJson is not None)) 23 | message_table = {} 24 | 25 | record_capture.flush() 26 | 27 | me = {'uri': URI, 28 | 'success': False, 29 | 'records': [], 30 | 'messages': message_table, 31 | 'rtime': '', 32 | 'rcode': 0, 33 | 'fulltype': '', 34 | 'context': '...', 35 | 'payload': {}} 36 | 37 | # check for @odata mandatory stuff 38 | # check for version numbering problems # check id if its the same as URI 39 | # check @odata.context instead of local. Realize that @odata is NOT a "property" 40 | 41 | # Attempt to get a list of properties 42 | if URI is None: 43 | URI = '/Missing URI Link' 44 | if parent: 45 | URI = str(parent.payload.get('@odata.id')) + URI 46 | my_logger.warning('Missing URI Warning: Tool appears to be missing vital URI information, replacing URI w/: {}'.format(URI)) 47 | # Generate dictionary of property info 48 | try: 49 | if expectedJson is None: 50 | ret = service.callResourceURI(URI) 51 | success, me['payload'], response, me['rtime'] = ret 52 | me['rcode'] = response.status if response else -1 53 | else: 54 | success, me['payload'], me['rcode'], me['rtime'] = True, expectedJson, -1, 0 55 | response = None 56 | 57 | if not success: 58 | my_logger.error('Get URI Error: URI did not return resource {}'.format(URI)) 59 | # Failure to connect to the scheme is an important error that must be included in FAILS 60 | me['records'] = record_capture.flush() 61 | return False, me, None, None 62 | 63 | # verify basic odata strings 64 | if me['payload'] is not None: 65 | successPayload, odataMessages = checkPayloadConformance(me['payload'], URI) 66 | for m in odataMessages: 67 | msg = create_entry(m, *odataMessages[m]) 68 | message_table[msg.name] = msg 69 | else: 70 | successPayload = True 71 | 72 | my_type = me['payload'].get('@odata.type', expectedType) 73 | me['fulltype'] = str(my_type) 74 | if my_type is None: 75 | redfish_obj = None 76 | else: 77 | # TODO: don't have the distinction between Property Type and a Normal Type 78 | if isinstance(my_type, catalog.RedfishType): 79 | my_type = my_type.fulltype 80 | redfish_schema = service.catalog.getSchemaDocByClass(my_type) 81 | redfish_type = redfish_schema.getTypeInSchemaDoc(my_type) 82 | 83 | redfish_obj = catalog.RedfishObject(redfish_type, 'Object', parent=parent).populate(me['payload']) if redfish_type else None 84 | 85 | if redfish_obj: 86 | me['fulltype'] = redfish_obj.Type.fulltype 87 | else: 88 | my_logger.error('A problem has occurred when creating redfish object {}'.format(URI)) 89 | me['records'] = record_capture.flush() 90 | return False, me, None, None 91 | except traverse.AuthenticationError as e: 92 | raise # re-raise exception 93 | except Exception as e: 94 | my_logger.verbose1('Resource Object Exception: caught while creating ResourceObj', exc_info=1) 95 | my_logger.error('Unable to gather property info from schema for URI {}; check its schema definition for schema errors: {}'.format(URI, repr(e))) 96 | # ExceptionResource is an important error that must be included in FAILS 97 | me['records'] = record_capture.flush() 98 | return False, me, None, None 99 | 100 | # counts['passGet'] += 1 101 | 102 | # verify odata_id properly resolves to its parent if holding fragment 103 | odata_id = me['payload'].get('@odata.id') 104 | if odata_id is None: 105 | # Do not error for namespace.type MessageRegistry.MessageRegistry, etc 106 | if any(['{}.{}'.format(x, x) in redfish_obj.Type.getTypeTree() for x in ['MessageRegistry', 'AttributeRegistry', 'PrivilegeRegistry']]): 107 | my_logger.debug('No @odata.id was found in this resource, but not needed') 108 | else: 109 | my_logger.error('Missing OdataId Error: No @odata.id was found in this resource') 110 | message_table['@odata.id'] = create_entry('@odata.id', '-', '-', 'DNE', 'FAIL') 111 | 112 | if odata_id is not None and '#' in odata_id: 113 | if parent is not None: 114 | payload_resolve = navigateJsonFragment(parent.payload, URI) 115 | if parent.payload.get('@odata.id') not in URI: 116 | my_logger.info('@odata.id of ReferenceableMember was referenced elsewhere...: {}'.format(odata_id)) 117 | elif payload_resolve is None: 118 | my_logger.error('OdataId Reference Error: @odata.id of ReferenceableMember does not contain a valid JSON pointer for this payload: {}'.format(odata_id)) 119 | elif payload_resolve != me['payload']: 120 | my_logger.error('OdataId Reference Error: @odata.id of ReferenceableMember does not point to the correct object: {}'.format(odata_id)) 121 | _, end_fragment = tuple(odata_id.split('#', 1)) 122 | my_member_id = me['payload'].get('MemberId') 123 | if not my_member_id: 124 | my_logger.error('MemberId Missing Error: ReferenceableMember MemberId does not exist...') 125 | elif my_member_id not in end_fragment.split('/'): 126 | my_logger.error('MemberId Mismatch Error: ReferenceableMember MemberId does not match id: {} {}'.format(my_member_id, odata_id)) 127 | else: 128 | my_logger.warning('Parent Test Warning: No parent found with which to test @odata.id of ReferenceableMember') 129 | 130 | if service.config['uricheck']: 131 | my_uris = redfish_obj.Type.getUris() 132 | if odata_id is not None and redfish_obj.Populated and len(my_uris) > 0: 133 | if redfish_obj.HasValidUri: 134 | if not redfish_obj.HasValidUriStrict and redfish_obj.payload.get('Id') is not None: 135 | message_table['@odata.id'].result = 'FAIL' 136 | my_logger.error("URI Check Error: The Id property does not match the last segment of the URI {}".format(odata_id)) 137 | else: 138 | if '/Oem/' in odata_id: 139 | message_table['@odata.id'].result = 'WARN' 140 | my_logger.warning('URI Check Warning: URI {} does not match the following required URIs in Schema of {}'.format(odata_id, redfish_obj.Type)) 141 | else: 142 | message_table['@odata.id'].result = 'FAIL' 143 | my_logger.error('URI Check Error: URI {} does not match the following required URIs in Schema of {}'.format(odata_id, redfish_obj.Type)) 144 | 145 | if response and response.getheader('Allow'): 146 | allowed_responses = [x.strip().upper() for x in response.getheader('Allow').split(',')] 147 | if not redfish_obj.Type.CanInsert and 'POST' in allowed_responses: 148 | my_logger.error('Response Header Error: Allow header should NOT contain POST for {}'.format(redfish_obj.Type)) 149 | if not redfish_obj.Type.CanDelete and 'DELETE' in allowed_responses: 150 | my_logger.error('Response Header Error: Allow header should NOT contain DELETE for {}'.format(redfish_obj.Type)) 151 | if not redfish_obj.Type.CanUpdate and any([x in allowed_responses for x in ['PATCH', 'PUT']]): 152 | my_logger.warning('Response Header Warning: Allow header should NOT contain PATCH or PUT for {}'.format(redfish_obj.Type)) 153 | 154 | if response and response.getheader('x-Redfish-Mockup'): 155 | my_logger.warning('Response payload loaded from mockup, not the service under test') 156 | 157 | if not successPayload: 158 | my_logger.error(str(URI) + ': payload error, @odata property non-conformant') 159 | 160 | # if URI was sampled, get the notation text from traverseService.uri_sample_map 161 | me['uri'] = (str(URI)) 162 | me['context'] = createContext(me['fulltype']) 163 | me['origin'] = redfish_obj.Type.owner.parent_doc.name 164 | me['success'] = True 165 | 166 | my_logger.info("\t Type (%s), GET SUCCESS (time: %s)", me['fulltype'], me['rtime']) 167 | 168 | for prop_name, prop in redfish_obj.properties.items(): 169 | try: 170 | if not prop.HasSchema and not prop.Exists: 171 | my_logger.verbose1('No Schema for property {}'.format(prop.Name)) 172 | continue 173 | elif not prop.HasSchema: 174 | my_logger.error('Missing Schema Error: No Schema for property {}'.format(prop.Name)) 175 | continue 176 | propMessages = checkPropertyConformance(service, prop_name, prop) 177 | 178 | propMessages = {x: create_entry(x, *y) if isinstance(y, tuple) else y for x, y in propMessages.items()} 179 | 180 | if 'MessageRegistry.MessageRegistry' not in redfish_obj.Type.getTypeTree(): 181 | if '@Redfish.Copyright' in propMessages: 182 | modified_entry = propMessages['@Redfish.Copyright'] 183 | modified_entry.result = 'FAIL' 184 | my_logger.error('Present Copyright Error: @Redfish.Copyright is only allowed for mockups, and should not be allowed in official implementations') 185 | 186 | message_table.update(propMessages) 187 | except traverse.AuthenticationError as e: 188 | raise # re-raise exception 189 | except Exception as ex: 190 | my_logger.verbose1('Exception caught while validating single URI', exc_info=1) 191 | my_logger.error('Validation Exception Error: Could not finish check on this property {} ({})'.format(prop_name, str(ex))) 192 | message_table[prop_name] = create_entry(prop_name, '', '', '...', 'exception') 193 | 194 | SchemaFullType, jsonData = me['fulltype'], me['payload'] 195 | SchemaNamespace, SchemaType = getNamespace(SchemaFullType), getType(SchemaFullType) 196 | 197 | # List all items checked and unchecked 198 | # current logic does not check inside complex types 199 | fmt = '%-30s%30s' 200 | my_logger.verbose1('%s, %s, %s', URI, SchemaNamespace, SchemaType) 201 | 202 | for key in jsonData: 203 | my_logger.verbose1(fmt % (key, message_table[key].result if key in message_table else 'Exists, no schema check')) 204 | 205 | allowAdditional = redfish_obj.Type.HasAdditional 206 | for key in [k for k in jsonData if k not in message_table and k not in redfish_obj.properties and '@' not in k]: 207 | # note: extra messages for "unchecked" properties 208 | item = jsonData.get(key) 209 | if not allowAdditional: 210 | my_logger.error('Additional Property Error: {} not defined in schema {} (check version, spelling and casing)'.format(key, SchemaNamespace)) 211 | message_table[key] = create_entry(key, displayValue(item), '-', '-', 'FAIL') 212 | else: 213 | my_logger.warning('Additional Property Warning: {} not defined in schema {} (check version, spelling and casing)'.format(key, SchemaNamespace)) 214 | message_table[key] = create_entry(key, displayValue(item), '-', '-', 'Additional') 215 | 216 | fuzz = catalog.get_fuzzy_property(key, redfish_obj.properties) 217 | if fuzz != key and fuzz in redfish_obj.properties: 218 | message_table[fuzz] = create_entry(fuzz, '-', '-', '-', 'INVALID') 219 | my_logger.error('Invalid Property Error: {} not found, attempting {} instead'.format(key, fuzz)) 220 | my_new_obj = redfish_obj.properties[fuzz].populate(item) 221 | new_msgs = checkPropertyConformance(service, key, my_new_obj) 222 | new_msgs = {x: create_entry(x, *y) for x, y in new_msgs.items()} 223 | message_table.update(new_msgs) 224 | 225 | for key in message_table: 226 | if key not in jsonData: 227 | my_logger.verbose1(fmt % (key, message_table[key].result)) 228 | 229 | me['records'] = record_capture.flush() 230 | 231 | pass_val = len([x for x in me['records'] if x.levelno >= Level.ERROR]) == 0 232 | my_logger.info("\t {}".format('PASS' if pass_val else ' FAIL...')) 233 | 234 | # Get all links available 235 | collection_limit = service.config['collectionlimit'] 236 | 237 | return True, me, redfish_obj.getLinks(collection_limit), redfish_obj 238 | 239 | 240 | def validateURITree(service, URI, uriName, expectedType=None, expectedJson=None, parent=None, all_links_traversed=None, in_annotation=False): 241 | # from given URI, validate it, then follow its links like nodes 242 | # Other than expecting a valid URI, on success (real URI) expects valid links 243 | # valid links come from getAllLinks, includes info such as expected values, etc 244 | # as long as it is able to pass that info, should not crash 245 | # If this is our first called URI 246 | top_of_tree = all_links_traversed is None 247 | if top_of_tree: 248 | all_links_traversed = set() 249 | all_links_traversed.add(URI) 250 | 251 | results = {} 252 | 253 | # Links that are not direct, usually "Redundancy" 254 | referenced_links = [] 255 | 256 | if in_annotation and service.config['uricheck']: 257 | service.catalog.flags['ignore_uri_checks'] = True 258 | my_logger.info("\n*** Validating %s", URI) 259 | my_logger.verbose1("\n*** %s", uriName) 260 | my_logger.push_uri(URI) 261 | validateSuccess, my_results, gathered_links, thisobj = validateSingleURI(service, URI, expectedType, expectedJson, parent) 262 | my_logger.pop_uri() 263 | results[uriName] = my_results 264 | if in_annotation and service.config['uricheck']: 265 | service.catalog.flags['ignore_uri_checks'] = False 266 | 267 | # If successful and a MessageRegistryFile... 268 | if validateSuccess and 'MessageRegistryFile.MessageRegistryFile' in thisobj.Type.getTypeTree(): 269 | # thisobj['Location'].Collection[0]['Uri'].Exists 270 | if 'Location' in thisobj: 271 | if thisobj['Location'].IsCollection: 272 | val_list = thisobj['Location'].Value 273 | else: 274 | val_list = [thisobj['Location'].Value] 275 | for sub_obj in val_list: 276 | if 'Uri' in sub_obj: 277 | gathered_links.append(sub_obj) 278 | 279 | # If successful... 280 | if validateSuccess: 281 | # Bring Registries to Front if possible 282 | 283 | for link in sorted(gathered_links, key=lambda link: (link.Type.fulltype != 'Registries.Registries')): 284 | if link is None or link.Value is None: 285 | my_logger.warning('Empty Link Warning: Link is None, does it exist?') 286 | continue 287 | 288 | # get Uri or @odata.id 289 | if not isinstance(link.Value, dict): 290 | my_logger.error('Payload Link Error: {} is expected to be an object containing @odata.id'.format(link.Name)) 291 | continue 292 | link_destination = link.Value.get('@odata.id', link.Value.get('Uri')) 293 | 294 | if link.IsExcerpt or link.Type.Excerpt: 295 | continue 296 | if not service.config['oemcheck']: 297 | if link_destination and '/Oem/' in link_destination or link and 'Resource.OemObject' in link.Type.getTypeTree(): 298 | my_logger.info('Oem link skipped: {}'.format(link_destination)) 299 | continue 300 | if any(x in str(link.parent.Type) or x in link.Name for x in ['RelatedItem', 'Redundancy', 'Links', 'OriginOfCondition']) and not link.IsAutoExpanded: 301 | referenced_links.append((link, thisobj)) 302 | continue 303 | if link_destination in all_links_traversed: 304 | my_logger.verbose1('Link repeated {}'.format(link_destination)) 305 | continue 306 | elif link_destination is None: 307 | my_logger.error('Missing Odata Error: URI for NavigationProperty is missing {}'.format(uriName)) 308 | continue 309 | elif link_destination.split('#')[0].endswith('/'): 310 | # (elegantly) add warn message to resource html 311 | my_logger.warning('Trailing Slash Warning: URI acquired ends in slash: {}'.format(link_destination)) 312 | newLink = ''.join(link_destination.split('/')[:-1]) 313 | if newLink in all_links_traversed: 314 | my_logger.verbose1('Link repeated {}'.format(link_destination)) 315 | continue 316 | 317 | if link.Type is not None and link.IsAutoExpanded: 318 | returnVal = validateURITree(service, link_destination, uriName + ' -> ' + link.Name, link.Type, link.Value, thisobj, all_links_traversed, link.InAnnotation) 319 | else: 320 | returnVal = validateURITree(service, link_destination, uriName + ' -> ' + link.Name, parent=parent, all_links_traversed=all_links_traversed, in_annotation=link.InAnnotation) 321 | success, link_results, xlinks, xobj = returnVal 322 | 323 | my_logger.verbose1('%s, %s', link.Name, len(link_results)) 324 | 325 | referenced_links.extend(xlinks) 326 | 327 | results.update(link_results) 328 | 329 | if top_of_tree: 330 | # TODO: consolidate above code block with this 331 | for link in referenced_links: 332 | link, refparent = link 333 | # get Uri or @odata.id 334 | if link is None or link.Value is None: 335 | my_logger.warning('Empty Link Warning: Link is None, does it exist?') 336 | continue 337 | link_destination = link.Value.get('@odata.id', link.Value.get('Uri')) 338 | if link.IsExcerpt or link.Type.Excerpt: 339 | continue 340 | elif link_destination is None: 341 | my_logger.error('Missing Odata Error: Referenced URI for NavigationProperty is missing {}'.format(uriName)) 342 | continue 343 | elif not isinstance(link_destination, str): 344 | my_logger.error('Invalid Reference Error: URI for NavigationProperty is not a string {} {} {}'.format(link_destination, link.Name, link.parent)) 345 | continue 346 | elif link_destination.split('#')[0].endswith('/'): 347 | # (elegantly) add warn message to resource html 348 | my_logger.warning('Trailing Slash Warning: Referenced URI acquired ends in slash: {}'.format(link_destination)) 349 | newLink = ''.join(link_destination.split('/')[:-1]) 350 | if newLink in all_links_traversed: 351 | my_logger.verbose1('Link repeated {}'.format(link_destination)) 352 | continue 353 | 354 | if link_destination not in all_links_traversed: 355 | my_logger.verbose1('{}, {}'.format(link.Name, link)) 356 | else: 357 | continue 358 | 359 | my_link_type = link.Type.fulltype 360 | success, my_data, _, _ = service.callResourceURI(link_destination) 361 | # Using None instead of refparent simply because the parent is not where the link comes from 362 | returnVal = validateURITree(service, link_destination, uriName + ' -> ' + link.Name, my_link_type, my_data, None, all_links_traversed) 363 | success, link_results, xlinks, xobj = returnVal 364 | # refLinks.update(xlinks) 365 | 366 | if not success: 367 | if 'OriginOfCondition' in link.Name or 'OriginOfCondition' in link.parent.Name: 368 | my_logger.info('Link was unsuccessful, but non mandatory') 369 | else: 370 | results.update(link_results) 371 | else: 372 | results.update(link_results) 373 | 374 | return validateSuccess, results, referenced_links, thisobj 375 | -------------------------------------------------------------------------------- /redfish_service_validator/RedfishLogo.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | """ 6 | Redfish Logo 7 | 8 | File : RedfishLogo.py 9 | 10 | Brief : This file contains the Base64 encoded image data for the Redfish Logo 11 | """ 12 | 13 | logo = "R0lGODlhLAHTAHAAACH5BAEAAPwALAAAAAAsAdMAhwAAAAAAMwAAZgAAmQAAzAAA/wArAAArMwArZgArmQArzAAr/wBVAABVMwBVZgBVmQBVzABV/wCAAACAMwCAZgCAmQCAzACA/wCqAACqMwCqZgCqmQCqzACq/wDVAADVMwDVZgDVmQDVzADV/wD/AAD/MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMrADMrMzMrZjMrmTMrzDMr/zNVADNVMzNVZjNVmTNVzDNV/zOAADOAMzOAZjOAmTOAzDOA/zOqADOqMzOqZjOqmTOqzDOq/zPVADPVMzPVZjPVmTPVzDPV/zP/ADP/MzP/ZjP/mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YrAGYrM2YrZmYrmWYrzGYr/2ZVAGZVM2ZVZmZVmWZVzGZV/2aAAGaAM2aAZmaAmWaAzGaA/2aqAGaqM2aqZmaqmWaqzGaq/2bVAGbVM2bVZmbVmWbVzGbV/2b/AGb/M2b/Zmb/mWb/zGb//5kAAJkAM5kAZpkAmZkAzJkA/5krAJkrM5krZpkrmZkrzJkr/5lVAJlVM5lVZplVmZlVzJlV/5mAAJmAM5mAZpmAmZmAzJmA/5mqAJmqM5mqZpmqmZmqzJmq/5nVAJnVM5nVZpnVmZnVzJnV/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwAM8wAZswAmcwAzMwA/8wrAMwrM8wrZswrmcwrzMwr/8xVAMxVM8xVZsxVmcxVzMxV/8yAAMyAM8yAZsyAmcyAzMyA/8yqAMyqM8yqZsyqmcyqzMyq/8zVAMzVM8zVZszVmczVzMzV/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8Amf8AzP8A//8rAP8rM/8rZv8rmf8rzP8r//9VAP9VM/9VZv9Vmf9VzP9V//+AAP+AM/+AZv+Amf+AzP+A//+qAP+qM/+qZv+qmf+qzP+q///VAP/VM//VZv/Vmf/VzP/V////AP//M///Zv//mf//zP///wAAAAAAAAAAAAAAAAj/APcJHEiwoMGDCBMqXMiwocOF52C9gnXuocWLGDNq3Mixo8ePIENqbJeqZCpwtl65E8mypcuXMGPKnLmvHiyTqWDZ2vkqFc2fQIMKHUr0YK9UPWHdzMYLpa2SFYtKnUq1qtWB7lJ9e6rK1jdVr2yhzAbWFqyVV/fRc4c2rdu3cBty3alN7M2wtsy9Aovy1S2r9MyWpBi3sOG3JO+CqyWWcU6xgnVCTtVu6q3JqtiZpXe4s+eg9JTaqlUWsi1tSJ2+olYLZdPB9Ya622mu19NX53hFtPW5t2+WR3eeRmp6Z05ek2Eh75sq5V+gT83pfo2bFy92t7IR+829u8WszReD/+tZl/G38SVd3wyPnNrJ221hEuvZPLe5m7CG1a4Nrrn3/wAWlNJeOKnS3E5OPaUTSv29Ms16NzHn30v1NHdfKgZiWFJeuuV1WypRBShib+eEl1Iq2hj4ijatOQXOXeaYVlKGYfESI2olbQechbZEhJSK2vRizi21tYPfOTvFN+KSboUWVlgJxigcglQmCE5TMTbVVHFNTcTZR1nl1YuN19lnjn5j1kbmOcLkdk4tzzEpZ1VPCedUa4wx6NVoVDbF2JQu7rSllFeq0otHsNSyHzi9MHplL0LaIqR959zipo1CvjjMnJzKdqBXKNVSF4J/WoncVlUi56GHV4qlJYOp6P+I0VG8hGPjLZhKF+ml0wmZq5CQbhlbp8TGJFFxCxbXGJV7mhZojE5B66qHMeoFTjQWhXalmtIN2RSa0tmHq5vA6mrOfue8smmx7IZ0VGsnsibeifThNa8t2SQ6rS1N3cTsqqJ5yMtNhzZ0lJQSqaIUkTYmPFFY9tmYkk7TnQNsLf4q2e7GD7kjEUrr9WSWcapMY2DJn46WoSouMtdTq96u1xxy901TkkoKgTcMOBnilEptMpfknqS53WISRRXzYqB7jfLG8dNy4eUYgaqYx1Wegr0Ca05IncoThns5u5SBKMWIX2NxElTPiT/zsuGVL1qYnq/+Soceaq8kLXJJvLT/s1PBUAdOEDHp6YRjXalMI9w36e201WO1YPgNLA+6ZovNOpWUZdzNieyazRLzgpqO2KYraUm39AfLfqLXV1K5XGHXOIqXlrw6VLqVaMuwgj+9tkmOm6U1V1uBbKJx7ymOPKEYiyUylWGZUzJtKXVF/YZqmdWtgTEi1WjDM3PfI3W96O53Tm1KDEs7SoOo385gAd47u8HhPd5pf96mE8YbwrsaXgjaC5TstBOTVYtK5tiQrYymuKI1qD85kZL0DkSNV/CnF0lJYCr00yEMhStSESzXdEoCrvfx7Evz4xQ9JoInAxVHPG8TC9kCSLkqnUhV0bpNqwQmGH7ZZWbg6J4q/wY2s/HxTXW40s1erpS48vGLZ6lIn90MVK7y6YaE+wFWO0SWwjkZzSs6WQ9PXiSqp7Bmf8Rxyl2QlRM9JQhDCPRQKrIxrZepqnoXEsv3XNigc0kqgxjK1W0sVhskIYV99gEW7gipyOao4i30IMa6uugQwp1kIou5SVcmNxqnuBBeQ0NVNtpIqgfaYhfUQw9klrOTuv2QQzECy4W0kSuoPCUVYzrHhcAxDBcC6ym0jBEIVXGpcgXSTW3qBTtukhunVcVJidsJRzIBADEYQAydqpdwwkKWV0zuRVB6xcs6eTRx5kRrWNOknmQEi7JNS4O0aZ/CBPOYy/1BJxPRXPskk/8hJq5mNGK8VPde4bcsDqkkiARWbo7pTKlEIxy2GEaJutIfFFpEDDfYx3aIAQA5HcUkdVlRSi53oFowZlQbOo3KbuIeF5qGMdPIW5/AAUEfQouJXSEUfWyWHrMlDico6cUoeToYEJrEZjmt2OWGyMj9YFFI6SPJSYxUFGxpdBrDaEc75mESA/nhkRZRRkYBkAkDZGISmRiRMoijDbzghY7HGlV5wGiWxQQQUM5qpYtYaUNBQYZBr3IWzVblokhFCjLnUipik3ZYXprrl1ciVyH1KAxYTAVfvWjHMLJKjIL2IhzD8MtD0LqPFShDDGnFpnf0sY+HpkJUzQsLLCaXJ33/6Us8CyoVAbcCLz7dSU+MkZblgiioHYIDVwJz1K2EySjpcIhIvkJSU3yVxehqqR25nE5t2CGkhJ6pkG3aWYiEItSsahYdmtWsO9Kr1YgooyFn3YcB0GDafWT0PySRrdZmy1988cR5L7qfTjhJxt9OqUX/wqtwWLmcLbnmgMiRrg9plqUeIcluEbYFdnTFqA5dsJA2upTFapU0yYY3G2kDymxuwd6smlernGWvLVJ8kHrEoLT7QMNZ09obq9bEXxPxr2z/m5IX/RdrdOzkn+A14OLodjHmcOcTXQXYaUmYSO/sUV6uhKss0Wy5vPjehce0rV5JJ2no6luv2FTd+8gq/yiX0ap6XdwOdNDZxe4wb0Qm2VqDoOG++5jEjbtjtObRcciHNjIdMbaVusxVjXpdlpX+KhwpVXqVOcyyO6fTqgtLKi8x0qWWIxZZ61xYN7kUE4jJ9avtAkuZypyxUPQRDY+x487naEeus3qOYawXxlqVZHt3p5BMxOAGMVDtb9whTtkeWiL5SlSi7neaII90cvlkEUpQFVvI9NY8WGuMvlDVqnIXl5VGpjJyJLbgbU3Jh6c+rK20m6oSbwmR/DkLUSCaXidCtHx3xi6k2pFngkuUF0aTn4hYKxCuoOgVGKNaV/crqp4YqHgyk1nVqMTT/aGkQDGM3IxSusMZUe+WPv/DFdzWowq/5aZnGVpdhjVUC0K2r0DNHZPDgYUchb+EtcsYyDCaw+uP+iyz6kU5sFGOE4uKyJLarMsoYZGNfDqolUnZkL5yUsO9PGgrEIopZG7yoESpyCsGgkVb11NuFDWuR/lsp0Qi2KO7lMVug5G7zaDboPRgCkI8MZBuhIE3zfkNHLyDiY8FEpoNwRiKkAELNXx9vgy1o7PgeQVEIaiTEf3OZHF/mIFY5DywmH1DbyfLgZhjONRT9GtVSind8wJH5AnGnRvKUji6doukQLCZRsONrl5mm5KEw1++Qo2Gz3GO8WSmF5rkBdNwNpQv+iu9zNTsPEpEmWGQfegtT2//TgjeDmX0h8WxAtBRzBmWwSymrZ1rvUjnOBzWgDE1fzrnifAp9tEMjZ1xM0Bu8x7Khy8H0i8sYx0oYTKv8R5tZCOqkyV59Bp5gR/hEja9wjfcZw7cR2M04THNMQ8Rkl7cQ3lDRxntYCBZZTOdJVE5QWfBRw8b4nSeMXSoUBZKwTiqEG2k0RwhFTmbJBniVDX4VEF/FU1udDbGgyrO0xy1QA3ttBz+oicTtGU6hGW34SG5AUF+gxzesx8WaBu5ITIXRjaXYkDAohk+MROJ13C2wD5adRMsllU9kVkuhiEvplV/gILEoEHrRQzz0B/gAB5G4nOHsRbugCQxJRF4kyg43lJ1I3MzRVY9JkJ2O8EYAnSJH9ccxRNDYlE4qRFAqvAN68YcfiUyoaYgHdYqgYRlOdFM6XIcHAg+sMBdRNRhQ1KHYGiIiicQQ7c+wCYycmYg4YBdWoVQd1YSBXcU65NVs5ETa5UK7rB9sECDvSFUYoQxPbgTh6YK5eGIT6E4mJgTZXQ5HzMle5Esz+MUjyFAg+Uvf9U+rgFFrYIcoPMon/gzQ9IfqnAuQhIh1mEr6ZJUsfiGmKKBZjFraiFGEpVZxIAaACdVvLBeWYVQndVZUkV5+UWR7fBFMrhB62V+Df8VF4tXEFKVL1oDFizUTjdjZPxzIHQ0Q4txdnsyHtNQKum4FbzwFQfyIEf4HleCHKMEPV3xKkghXa1Sh3JEMYViIdTlL0hSkJeCIimhMTRxC+szgnQmjJrFTJuFXXgIY712SFnFjJRnfifBVSAyD531UXz2G9FAjokyGPkChHjBSfAHMrc0UnMXKLfUTl8DQD2RZBgSXHu5JXnhQvthO1yWE/3IL7bCjxt2Sx2GLzkFXfIYZvdRFrnhHrhEE9HAcAIxH+AAlriEXbumfOnVILnWKMgIbAZicFLVa6Z5C2vVFblmJK84kr8RDvtFOWChITkxOdK2jeOBHjB3EvryQ3b/JUM8ZTKkhBTCWRbzeHuI6TMb8iqumQrWUYHYyZ3VEmZDaR8OdzQ0IZoDAQs36DPGiHRymF620DMa8obYRZGvSQyjhJ0Ep0E+Aw70UCIAMjHYZnr5YhYY1xPo1EpVR3VUZyf6EjakMjESMW1jh5LnOCjVU1ytxIgLUorIARa69D3awKHtZEW6cR9aI0gDWhlDgUHDcBoTCg69Rpub1VbFaF4Do3aYdHnsFRHrU3B+oxR1wSj9NjI6cQ6S5A66EaDiZKD+lSjNM1umIXXFQW5WoyfFwyx6ckd7RWV2AmGgllwkxmXdIqbS5Y8R5i25kV2MhBLWGBPEsDq5RpvAhqO7/6ZZM/piwzAPLZZe5oVnlDcPvuaMlPdr60UPBDcPswEgoYEv0mag7QRx+OSk+1NXbsUs+dNJBERA1Rkog6UlVCYloLo5g9UoYmgd5iIpidVcNDNmBoVBt4CeIFGSNZE6w9ALt8pd2NWQeZpedFqWBWeCxmhnPCpncias7pAM6ZUMf6hZgDgMgdGLAlEP0JAQ1ToQxIAktwCtEOFW/RUWdUFgDxpgTzZ2f9Vby6KlzCItrAQtrGQbWvhE3zOmEmM3x1Ut3zM+aWKq2mVdukQYs0oQQbcPXShRutae7ECjuzajeGqwLtaaxfqnwfaVF6lewMYWl8enyqBV67WoMCGrVv8pdN8JC7RKEBOxaOA4ZOgURnaFMffzJ1CiW6UkFtHCGOVwcu1qU0+EofVoK1OmK+PTYZgSlG5CLdSFcKxTRaoSsh0RGJaia8PQNxI1tb1mDuzjsMbYb8DGXnYYsXKGkX7Ko3nGpzGWZ/TAa0CRHwnxl2DTMxVRsqaDSbLFaDyRWyybTgjmOICJrpqorjM1ZXvFKB5iG1FGM82lctOlJZGZakl7QbqxYfv6WFLCEkbCJrlhjLrRDmeysA5rMVCLp2J5q+zVghYrrDzaWYi6pxSJseW3Vb3ggS5xDn8AVgfBcjE1I/lJGQhRIcKZGqeXOOtBeqPUMz45QMFpL/snnXj/oVOh6CECdDLUg4Ds4Z3CqXmmxi8mITGNQiAz8gq+Yil/mUW39GYbQQwKMnGXK7omkacX0lU94muapCGwkFm5yX0t54zQhxN+QZHxKZ06YSRMyxEDWxP7oAxLwZux4Q6oUEFcgx+DkQp/kBCU87zEQXUq8sAfF78Esiz0QUoGqCIG4pN21bZP1HsrQx/UchfH4bMNzDU/w4oVhJvg88DR5CZA+IrTgVQ3yBnXWsAKsQz6wDsIcjQ40Z43c5oSZXcd3Dco1xM2ow0uZjSWN7Y3AxYZ4mu3xL0TMhO3QFQhO7tcY3FTVxImw5sNFxbn8T/wNzz5SE96UhInNRhhgyo3/0FutyQlNgNBlpPHmlGFTZkvc0S4NxFMQvSiqjMRDcQvgqe5uqkcb2IgT/gK3NU9tahZooURwzANXYkizsh9mgVBu0cZUgsVW9WROSGCJ4Go9OA260MMNaWC4kcZazEPqAEOycA97qAPRvKmLAEeVowKbSgQWWFOKLMeU9cTqIAQ6XIsYjTIjMBJG9JNfCIYdUE5OjFKo8J5q1Iasgclm7hunygZgYQfWnITijIu6cF9uJEapqY5lUl3UKE6qYYiYVaaqYAKAWwQanerhvSGyhRap+l4r2OMCIV05pgV/fiVhChVmWWRYNmMWvUUt0APN9iVvMgS+pDFArSGBmFGP/9icQQCIanwXgZBOCCzjYPRPIzTHIqxbZl4P1BkL2GTIDbzQhhSco9pPBYULTXUKsr3YMTRnUhyEySWQKogDJp5S9hVHYMENPVxJvsEC7JqFIMhftPAYuVzFAltJJoFFRLFfS42DLtHUASjVQ29QdhltScolhgysQ3ykbYQDi4RmtGAQh+FH3PH0QURfCWTFDYD0tA5XlgBrhF3SROKKnC0Ip3TSsMJPcfzMlgDoTRlHADEjwekOgqYF0lRzrJ4OgnYfDaSnQsFIuYQmbJISObIaccxKTfRPQ2lD0HHWub7cePTz1arTO/ZtV7d1fdbtS8YIX+qjHgmVZhXIvmxVe7/YDTVWBK8LBKNpyIIylMIwbZt68IdbJXMpl+FCTYBFo4n8norvTiBx8Y17RRfMTw4RDZb0jXqQUrTZYFhVtO1QXvcKSaKnAq2IkSoahsxlRuYMki6ESGWe4I9Mhjkuw+uopvvK1X+bJoJ+9W9RBkzWtBapdwMDboktLVtXZbjl3QWArstYTQ3gzlQyDXKPHHBCSEVdBMHrlFaA66+K3bNsyF4M83WnD/8mE7Hs5eUFoof95iNEjmrIzDWTDPknN8uJF26dHdXhEv7wRUg5prz61zHYR8AeRwv8iUeU0jDgDdwCNy3PUqeq0yz+T6vuTNIgcq/GpvCilCXxw7jR3kk/xXMLgEeLXwXHWyNH0l10tnXJQPGek0QuvMkpmdxjZHjlrht7sfBPukY9TSTAvjjPlRTNDNDgzKF0gHfXggi7MYLaEjfOYeBZqIgk3WUmTWAY2JAEjVjKcHPfQOV2KUgq4mHuoqrWOSC/VhwEcJszeHhKIh0Jzionny2RvKWIREN9UBrNfGXDuLAmkQgm0KrGkIgD/LRt2vG+8BAwil3P0LEKBGcMeeEDurjPVh1ThyFvTVOV7IX3ZQhO3Gmd4FcEJR2qeAeAXmQJ4EpBLLR9VHb8Zk39gE2fQ4i9ougr5NF54MbmcUz9EEjeZprpEyHNKIhLHZL9I4iFetrr7nnG/9C5y3BfQIfnGSswsSEEJZE7xvNcgmRdXO0cZaZdSzkfN0uUs2ZhQnSVjHn8vP4GNf50fLCYIMcOhJR7eKkXFoWNkVbdSdDz0mrOuTCjw7CF1HbvgZCRxYzyd53EmiS9XanHNj14NyXWWJPwStiXnPnwGGL61yLH/kMEo1XEjhid/lixSgZ6IKeOCwfwwi1u5LaLBizaAHWZHnFICdFs/9S833ll1Tol1ZGLVSWrwyyKwoYMQzDHzYSLrlS+dqlXWvKam4iWWJvtRZz9bmJalbbnlBLmwzrq8fKXgfbYr86uu2AEj8h4tYNFkSvknEvjQlh+ypsEkw7MOCUg/sjGtr/SEbTpo2tJBx44m3r9Pw7cbPTUo+WI4FTBqrSxTAcAjsS49+saqqPpZldplAV82qZWzHcxSanPvp9k0vCgJpiT5uhj7kNm7ViL7oGa4ddu7Wlm2cA4Y6XrXr7DB5EmFDhQob73KWCqOpVKlgSU1mElerVRImvMtZCqC8hPVjTIkIM19BgNlsbbdWCZYulNlvgXn2LaSsmOJg1Y36zhbOmLaLgiNYaCvSlUV5GiZorGnWgUXBNbZm7BRVq04HmBoZrmrVm015XeYHlJdZcL15Qz6U15/Vtr7Vyeb292xbv23a8er0FPMxrO7q9+p6jK1ixuXOCGxvmRbjx5GG92A07/9eOsOZhm9t17tXZnWF3nTtrhnVO5WrWIWGhUpUqW8RX00q+UjVbG25YuCdmdLeanrtz54Kv1ik0GzhYSF+1JPrzlc2ksJQSDRoT6VKiSqlij0q0a9SqRQeOtXXOFtirY7dypduV6zmv4OjaKuw2rzmwgPP6R8wczP6LLL+/CiNMwHOEQUwywx7EzLMHNTMswso+o4xCzSg0LbNe3OkrltZGXK0X2mKraDaKNJJIlYxq6y03j1IhkbV6NorpuZaYs0UbpFgCaqfmckpqOvKGckqqocoxD5yqqOLKrLGebK+t9Gzxq7y3wKoKrF7sGwivrPICR7C78GMnLwIBY/C/xP/eWquvwwZs8C/HIsNMGMLYgWyzyxozZ7PTOsts0HZKK02zRE2DpZ0aH0UIGog0gmii32LjyFJKM6LIt1RsgZSh55rThtMha3nOoo52lGlSioZ6qdVXYTVqo1eggrUll3BtyihYchqoqd4qmm68Hj2K6SxemhJmt5joO3Mgj/Dz6ktbXNztKrYAgwmWuhDU6RWwDouMF49qQQyzsj51sczJwv21FgyHYW5YbQit7NdhzSHmsw1hASfUUG+5SDYXU9GmYBZTocY3ai6KDWGIYMkmI1AFPqgliptb8bmZZFNYlWlewrThiF8COCOJfIS1llRMUgW78rKJTRWjoHJq0iv/jYrY1fZchajhtuhSJeIny2LHlmliq2otcICO7Sq+XBUMQY5SKZMxywiG6F4+zZnUpEo1i0js2AyzBWiIbhkmHIhgntjfW2ApCGMSH6q0YNxYrG1TjaYxZKJsPE1471Rusdul5yZa+hVqdGopap8m6hEimS2vdeJUkDIKqItwy8aqzH0DzxyVazNv4vIS9jZtmwdyOZWtMM3IL6/YiUVzbU2nKC5eVM4LRYgWBGzviQ7UM23bPm0w7VcC5NoyiAoEO5XGXralneuxnicjWC5jh3XUHLWbRFtQuYhmgxf+bcVpfDOJ2BY9mqjjZSAlRqPeZEJx71qC1IhNkLK05kSN/2WOWxzTLkKkWkgugFhKG0WehrX2cMocTMPSBZ+DFajgRifMO8sFD3eOStniYVfBigUzoo34WCw+JEwFXSZWvb9EhlPnsNha1BO1pzXqTFjDTGcmgpiL+KsdFmtHbBqDIYocMRXhME07LHex8rXGRHr73MJotrwVvQhTsnnR53LTm0mRbzXRMAgvJiaTD2Znc/FaIFJiAhHmPBApEtkJBSfinbQZZTaq4AVQbAKwyE1lIFhT46eoIpGn1GR4lKtSbOqjCq9oBCxjAqLl1sILpvWFLrhZS0sQY5LDfOkVkRle1cBmmBx2BomWMZEqkqYR7bEyk6nwTNoalREBHWoYFP85ThVZE0FMFbNjX8wNi2jmGzyCTiMesc0rEgaI56GxIfq4hTQ1Qg2YDI4nxMIawGBkHUd+inQ6sQlFrlPHn4TnWjqCVVM0WROm1PEpiaxSBHnXuk9+allO82Bb8kIXF65lNlfJyhXrQp8rVu1rf7EYoAbUDl1uhnKTkWIM3QaLKCIRIh0yUaMgYpjPpEaYI+JU4Yz5x/apjFIWMdn75tewTmGqYpViGz2U4Q5llOVFz5kYPM+1TOhYDidIuUgtEkZBo0BkO7HCnlO8MxGaJClnRoLgkx6YloO254O/GxlXwME09QyNjm9Jj1sm0rReUI5cFxnoXSDyrcRUT4d3OVD/A7/xoIxkDTPDS1s21OXEMkEkl03kJTF0UreTIkdvsVnmilrUNc11yrLeqw3thrW33sxoRa4aXEtogiKleoQlv6JjTWYEubFi7TdAQao2VCFI1TKSVkTRiHmmJLmbiYdpXbmIliZ4jrWahZOf8ko4rvKq/djOYsrqa34kSSDD2s5qMSNeudrBOuIlzBaYaczxwMbRJSLxbJwh4SvmkdpgNlYl9MibRYJmqfhx8TdiW6r+9Oub2dwGYTPqSHMm+CtpbmSZzHFOUPDYrZcBDCmxIwpvXSdHp1DOKZyL3K26Ax4PbmVKkPQV8yBIkd+ROCs2AWTT0jLXsqrHK9FlTESt/6QRNr3lYaqkUwCrNickNmaFe4olOwimig39EmsZ5YwTYUGSw7l3RJCFMjPrZ5GrLfVg3sNI2eg3rE7tr8HGW1m3wPEH26BqWCzJlFEa+EyXNvV9nf2Imh2ZG5nF6mnPSRLOHvidqjCtLLz71LEQVk6ncMp2Y8JNVcSCGHZEDS8TxA4sGrat9CTyW3UZRmzwqkPBOC+IkCbKUjszRZ5NIzTeG0rUoNJeJzcEvqaCiPpatJuCkdGLs6mf+gQXI9BdbWOWitiLhvUNoOKROgiGrE5gwqKlWe45XBRZcMPzm6CQR2W4ckpT/VlPLGHuZz2b52cnpQ0r0YVr+ymLXP35lv+nRVtnDK3LRf7CoPwEt2qBWSEsMxoycGivfq56hWYi6KpSqabVrYHhZE31PtpILL7A3k2LaHZr7/VmN6YlVsXeXFoCWxwmpRoVThxME9UOO1fSjBdPsFMLo7AsV0TB6jeC5auhWGUq5rk5T6p6pWUxZ+dfgkt7ygMVutRnSn6xinkAdKZK+ydBu4OMYPiTPX1/qbdBxEy2bzGv4tREPUs+OEopxXAqP/ZlDtsm3475G9zAxLQ13TKyRlWxxfUIJjAh9o7qvrhBJmnu3MEwOrezTu4Qnlaiww7Oco6foecTKmDxCrXaYxfA1AUra6ph5TPflmH8Rxh12fSDJuMXxDz/hh2Amfe85FQoQ1WoMYwyeNhbwzAxgqwkHTGZ4aScPshi9r8SYbjF6eeswW32184qcI6gQyrVNj87naMnkgjJZyRVnypPoorin6SVJ7XlFm3pkla+FJcxzYet9DlxCCu90LvAaaE6XOhfGKND+nSGMY4hnvZOv3rVeygz/8cQ7dGMQsmMfwkY2bubgZOslzqmhYEytYOfzuoIF6GfTmmYYdmYiukWt2MjUpmOUumJaWGJDqyFPyiEP9iCQqiCQmDBQkjBFoTBFyyERWDBRdgCGgyDGbxBG9RBGhSDGTSEReACRqBBRpjBH1yEHzRCRgiDIGQEJywEIpTCKAyDRWAEOjFYhCRcBEOQQiusQkZgQikMgy5kQi8kQzC0wiyUQiLEwjXUQjBEQziEQzFYwzFkBEkgjCT8i1dgLAT/rBF6gCG+EZ4syoibgsDc8yLQ6Y2bkgnjewW66zi6c5ZG5ECYsIlaQZmX0IZFAIBO9MRPBMVQFMVRJMVSNMVTRMVUVMVVZMUwaIcV2IIwgAGO8kO7qYfj0ojZsI34ob2JkSnC0Z9oEjY80p+NQD44w4m425/e+Kb/uruY8JEt68QDOIAAoEZrrMZr1EZqzEZrxEZvzMZwBMdx7MZyNEdyLEdyVMduXMd2TMdyFIBOFIN5pMd6tEd7RIN7nEc0iAEA2IJeCIMYoMNDqsXy0YdwyJTK8o1EXJ5EXJHi+5uFpLuaoazNehmFScYv0hueCJk/6MQH0IEcUIAcEEmShIAZ/yjJHDhJHUiAHNABkCxJGoBJCMgBmaSBkgxJmnzJkHyAm9TJB5gBlnRJoHzJoQzKlixKmRxKHZiBnxzKpyzKlwzKB1hKlazKn5zKHAgAAGg1TQAAKuCFLSDCGygYVECcgrRFhJwU75mvYqqNGaEYYryNiJmGBvKs4uMInFigt3ORCLIN1HqVpdoNbSiETpyBBKBKxJyBB0iAxXTMloTMHIjMlgTKBGjMB0hMyZTMyzzMw8wBxjxMoGRMyRTNxpzMzVxMyfRMxTTNx3TNyTxM05TN0FzNz1zMrYQGJ8sEAKiCcxCDMNiCKpCgDyIGtCyfeuiF/JKvMGLLhFGFBhochf9byBnxiMGknf5Csxf5hgVbCuw5p5boRNtsScfUTMp0TNE8z/MkycrUTNEsz8+0zdQkT6B8T/TMTPvUzNQ0z8ykzPdsSfgEUKr0T83cSsZShn3IzWXQBzQ60Nw8UAQtiNysh9w8iN1UAQ2JjdOgqE+hB+MsH0Asic9pSL4BNvrxDW2iCI3DLAmES2SphYcBzHrRSzIaNKXiRACIzNFEzAGFTfuMTf98zPLsTMykTdDUUR5VTR89Us5c0sV0T8uMzcWUzQF9ANw0CGVgxVB80En4SgKEDQ65M/w4CJEwzofwiF74kA5djXpwooJZpoZ804ybHzjrsokcnKSyDuqMOKX/ujsDoh+jKEwAEFL0PE3FXNL3RFIlpUr8NE8kLVTSTM/SdNT6XM8ZSE0e1YFOrBsszdJPfNDdpALtGTVcIqmQcgejKE4PNQgFdJWY2Dp3IFOFSM619BQ6hURazYg3q0CKeJFJ/I01iwnreJEIyonm6MT9PE/GtEwdVdLJVNLNhNTKrE3OPE1JfVRkVVL9TEwpVU3LhMzKtM/I3MoH5VQACABrlAEEmAF1RQAESNd2ddd27cQtBQAVmAwYwpce8hdbOMuQqEWVqqxJ+YO15NdhgNWDaIcJUsTPqcDeaBiM65TdqKNseNHc2piJwbVTYZpkO61jHdAxIAMyAAKQJYMc/yAD/bxW/xTNNFAD9dzP+oTMBCiGbGVMZY1NxMTZZbXMASVJlBRQzSTJ8fRRCNDUKx1Fa1zUpKRKmpxXgwBVUTUy1oMhIzqiYTAIaypITuGshWmcnhEb/bmFv7iFN6OIX3xYmuKyLqNO1EKWjGQJzBFWili2IQFPQZ3MBNAE1lAGSlhW/bRN95TSvDVUIOVPoN2H8/xWYliGZaCHZVAGx62Hx23cx43cZUiDy0wANWgNODBU9bTSfeDUCSWGSbiBTwwAdSXUxWzafdjN3oyQjyKpivoMw+ghVd0HEgK+ibhASmlAimxVzalOtcUjYdSfmyqz9JG73JKYIamfdJIm5//4SJfN2xGJBsJ9zMoszhwI0hydVsnM28v1z1QlETUYTZRsjctNTfssWtDtRIXQBBXwxABoT81cXQtdkM+4iOzBjL7yjGEgmDNVVXrIL05hH5ixlTFau5uyNbiMH74EGouTLF9riYKJtphZtohRBY8EgEhNADh4FGLY2ZfF3Csd3G/12281CE0I4c28nxpRYSlNg9ZoWWl1z8/lVIbwSk9kT8RcXS4N1eIYhvxylQoR1R6aB4Rs4autxfwRHu+xDeArmGkoBLNNyNsoRF5zRNYxvu3cCIZ5xEY0UY0YTJ/omOgl1OndB5JU4xxgWTRGCEiFTL89COvd4c2cXmJ40s//1ARNUIY93uMk9mNi0ARQIAbtTUwdOIiQdEk1VuT6PM8CNVqubAgY8MRGrd8uvRB6wWKSiiIT6bfPoAk0IlOslb0JWh8q8z2J66ITPRj6qUuHVUZjlLuNycTn4FOS4w5bVjlYCM8cxduD2FGclVIEyAHxNYj2TF/PPIhNIGYR/tvFRICDiIYVbkwEUFacjeFj9lZv1dEZ+IE5LtxIFedxjeTVEANPLM9OVFMuFYMN4eQNQa8AjCINbZQx7cOwUwb18ZuVckguo7tHrDgEiruKqZhhO5UMPBZlgwVnGRKhAJgbhU0EoARgzs9wbUmEiIYfZVI3JtQkjdSJpuhClc/M/53j/rTN+S1p9SzfbIUAAwCATe3EGxADNMgEhihdABAA+gWAT/XHzZiQJQoiQUmyfvmMzpibe65FrtHah0EmYFORZZrTjMW45HM7H7k4kGPe/TmtB7O7mBjB3ujl9vxlg6Dmit5MhFBhZGZMY1ZhZ/7WBEiIRD3PSSjpbjXkzhyDkB5Ubs3jBLBhUYwBq02IFehEB1BdAGjh3XRFICtqCdmQJSJADe2M9UiIWA27elCeL8qU3KspLF6m6ozlMzM+DswWqx4SNANBQmoJnIBo/wTpfUBdDi5PuK7rEI7mhIjjOq5MzUUIOJjWwiXpY3bZJwWCgzhhR47ZxczUl47kUf80ZmjoxAA4yUvmggyJEMOA7MoYlERJMn/phVoIDmioB1KWvYw6mKV6kdxbnkc80cu6KeQ7xoJOvr3Tux1JMLyDxkBV6w4GZpoF2uDeBzWwZpSkTIUgA8Ne1O11gHtO68j87dcmTR59gLw2CCMVZ/wUzb8uRTFAiHPe4AfoYQBY7Al556DWDF8gqaIGke2mqH41TqUxu73pr4UZRiujVc96LDMzHEyhiVKJHMvp8VpNBY/lTARAYya9cMgk03r4T8gkg4UIYXHOAYWghyUtXATg7TQGykg1XOEuXEZ9z/UtV1K8AYSI7hy4ZCqICQ1xopgxomtpVcoYjYIxWHaABVb/q0V6qB8EKiaH/K+X2giJgNFMWZmuiSAYsb1XQKpfhI4pqw0NZtQjp9n0HOt9UNbPrPQ37msWhnIf9WU0Ls9wlUwK34dtztnuzVFyZt9TLHOD8HCiBQA1BdXYEBRXKZPPOHTZ+g30mpRXSJT1IghVPQcM5ghcCzZFXFG6A6MVMtHh5VWkkohBEiOKIYoD2w6PJdQs516axfKDGIM4Hut68GCDoAfWjNYpPwhQv/DyzHKPVlJENgiWZdkxUAN6TwMyyNaWhORVP0W6XnUcuGQxMIlb/42M2LrM6KvOmIchM+q5ytAVn5heEGWDoFDZ04dbowiRecRjv0htIiNipLuK/2GaQP84uKSYpFqq5ECRlNHvu0VjBKjWdT8I8k3N2wZd2tbmZwbKB90Hlj2IH8hRIbVMf9dybHXMb2YN0VRSDUfFB+1EBAhxMcgIw9Dd7gGiz1CiKHqZZMio7dEMYoAhejCd9kLqVts35gQ+iuGNLjLERBQttouNb8iW+umWxXkV55CI7agFIi/PcS914D7uizYIvk1MnFeGSj/8zmVMhBD8fWjrbnVZv5/UxleJjhbN5YbpVGz1fjzXnXZaEX+ae6nadpiIrXOlGGoHdBjVQzmLzqB1QtEIg21x2z30+ekiSmE7MtKmlWlYaOoUnXjib8rILw5RaKwFQwhPHz3y7v+t4/88CDhQ15bMcs5Fd7L2TzTm2+pfhmaGz3CFcCS3T1JXicYUT3EFAAod81MsiHM+gBAPA+8yoiC++owSkNOAiH7RUIhAFE4KOIMFiHm2UrnbZ/AgwoQKFzJMSA8WtVSvqL1S9SpVNliwVKWCle0VrIodX10E+WrgtJAVbXmsBStVKlsmbWmDSdMlyFosXwEAkCPBjAdBEVA6mCDBzxxClyZFmuDBQUpKg6o5OOaosoOajk6FevDB0YPRZjht2jTNQbNll+r4+tSs0KlIl86A0LOeQWU99/Lt2xPNvkx8swYGIGagrXPterVr9zLcsMYvbzU+Rwxmu8iZ22GO3Gv/mqrGw9wN7HhQ375oDVezVtgLZsXYFrONtOgxle2UIDO+BAnTJc5pJ8GdDNlRpq1pqbTVYm6r59ygZDUdRCC3aVDsOQ6mAfoAwbK0YMcgRCA9QVWDW2eAT/v0PHY47pPSdUreIFmyS72X7Z8jAACE6eUXgT3FsA8aPQFImGBh1ETZMOcMM0xNtrQjIUcSeoabaMRk1tmFEhHjTmOI0eNHL62puOJBL32EG0kvwViTiy9xRJxxqmTUETUugQNjSLZkYwtF09gCDks22SITdGrJZ9BR2b33gFD+HfRDUgMcVM9+4RmkxnsJIHQUWNQZNMZ+SvWX3j45SEcXXT+kpRR2/0xN+eZdeRW45z6TDGaQYFu8ZOFimV0EYTs/EjrPOTB51lg7HI2WGUftzDPMQ6mEMw9HqdDDIqiruQOTRBzd1pFEMPp2kUWongRbpy/B8g0sMrnUEXCyKqfKHz3RF1QCRUF5nVNS/vSAmfvo8BRSBylTLFpG/fSGVtaRRa1BcAAF11DJqmVsswaBRdavVbr5gFkAQqPnngQiyNenfQJQhXKZMSZZKuBENkxpGskISzskbkjQZqQmCVMv84gUU6gNLxQObDX5Vmqpr2hTEUcVKSdTTRzRxpFMxCEZsUu0wqRjrwDAyV6yUaLrX1kCeLmPdUhtpx5XYCH0UwJ4GXTuVP8HEcNsnWSxqea3VJIx38pwOoXuAwD6PGC7fe0jxp+FUaGKvooxNoxNEeJLairURCaMwJylslmEMp4MTjseup1Kig7bvU89F4XkkUa67b2bxTnNGhJxHNvS3N6wNLdkrdocjiRLtfT0JrDJWtc05egKzVUC3taX7LPR7rNVlUGJ5Z1aPz3ZJrBdNXXfPpm/We4MSalrEDFV97XCPn0xCEAYmSkWoWaQfg2phJCOdu++AW8GqfIkOn/hYrDEe3fDjcY2UuAX/713RiB9P/hJr4CTq/m1FHdSLeAU0tPLTa1+1NPg8jyzDmD95O1aYho1M1wSQJg2Ye47wmoTzJoip5//3ak+PyHXe+wCgKnpji8H8h2gAMCFxgjjaxL64KOSp5kJPW9faZuUh4bhocy4IxkCU4bzbHEL7NmtN98zCd9wGDjFecRWGqGVTGABDiEWh1bf2AmTADC7oxzQP+PS31GStQxr6UxaxercQWb2LOwI0D2yE8rREmgfKx6rXF2pHVJutw+qVRANgsman4LXC68Nr45su9CjqKfCxtxrhSaE1ApZOA93RIYe7hCGLQpCw1C54xUpWU5FsmERHNIKJELiocVg4b2S8dBHJ2HJkYTICPjBBwEt60/t4nIU0bXpWOzJoneMtZAzvoxNYCrg0QoYlLYwMCmy649QJEjBCvZk/x8rwGBhtnBHDRWKhCAUTWaIF7DmjTBtKyRGZK4Zt8iQCBaLdNhAdJhD7wFOI+Yr3+HIyRJauURIQtKJTngCgOi87IA1ow9/cjDA0XFllwfJRANn4ICZGWQZsdyP6DRxz/4kYBLuKWACYEdPMcJlQeyqYAzeyBcvxdEd5+BFoYShPDyyjZDUK6HzPORHEsHQhZZaIQznQY+4zcND5/hmwzIlkfCFpFXeC8mO/mVJWjHHNkB0SY5S4atYHmV+zKJfUNJQjIRsZU0HARMtw8WdgGr1WQl8QBi/+JMFIvCgmUtdntZIzAAR6HdV0FehGHOLWkGGeow5R60Sucdz3IIltf+CKwohJxoStfAcS2IMId0xjxniNHu4GQlMPgabnopEkhsLUi1IVSvzychjKZtLU5KlCWIogxigUAZBEUK6pjyAGPNZC1kQ4lXa+Q8/dqpSsOaEOaRINAe0pFwD/wOAdam1gmJIkF9+t7W1Fao0MdkMY14DE7PxkWykUsVMI+NcxhCDHcRglMEUS0h63LSx4JQIRTqyN9wMKRXTUN96X9Gc2DzScTKZJJJgsdQ3YRFUPPtVAogbu5U5BS2o6U7RvgPAOyXFAWEEMH14WVaKNk1qF62aRpEZqJrAtTQPaiY7TNUR0TQqFeaYI8TWNhpbWCRD0JTuoGh6Idea12Gjig3/33Y0scj1lG8w0YhEZLIjx53kfSoDbrJYo4z/PnBbtd0ibMHiv7HcFo3IsorTeJas+gA3AaJLSurMtTI1snFPuSsQYeIY49I4Jl+aCceNhpGSZJRIFYnMzDwQNo+XTKPNvRivdBMVk5nSoxe2qDH2BmKbV2GkxxnJyMYGMjiJ2Fdvi5OnfxBQlXqgJiHL0IQmcoAAok0Jac6CormcQh0wUc6V6NHKV2FHS9chRRNw0AREU4cUCBhgghcuUIaTm8Ew/AgyjUpYjPcVE4FwKDM0ulRmXnLXVIzmIoqB1DRWHBNCwqK8iL4bPWiDw5g4Eleb7ciQPlk25HzjRn818qwTUTDqowiA3vKm31R8aS6ezfusSNHPPmCmSlHTL7j8KThwwQVVVMMnoEGx6D7OvKcbVC3Nhkk2Hmti7BG24xYSMelA9EXYi0AGU9GmW2K5udh8Gf/62zjljMVO5psh7oYjPppVqYDIqlroxMi61B9Y7NSU+u3Hyq0DpnR+C6cEN/xcYi0WulL5dF0/QAdpvRoajjuJrMfAMGI47teznnWwi8FngdpYZJJ3EQtJMzI/AliJOtK8l1xbMxmiM6YmRBrMuNy8KlGFychd1Iqws1atWueNWEKcyf0cgg/8+a90TVvaObCArF3i5V8WJqXbrz9qdNbXbyAGGoNKMIDR5GL2VSHkbeYWJFGpoiDVSLo1ZoXWjltjCLnncPQd0e6ISE+FE8SSbBYnuDpcbwo3yiM3funNP7quMW9WAv+2LFP3zxezv/SrH0TADjO9QRp5DsSupJn/95I0pNCPzTynAm3cHAbd388oW1yv9zWWtN5MwhHmDGlWLR6ikADJRTQJ0Ukf0z1f0wydGYmRv7kS9TUNRUXgT3weTvmJGCDELcCVtRXPvkAMwExIo3BNLwwRh7xfm5mDSbVDYthf79FDaYAGTAhHkMygcrTPSXRKRKRMDiiAUuiA6zgffSwgrf1gEJaFEAod1NCJ0P1gdiRgEQ5h/DBF1QEIooEfQjRSL7wEXPVChHhIOOVeO1wE2ahCLaRNYyBMOyQDX7EgG56DbfDGOhHO3kTOOgVREikIAABIAOghH+ZhH+5hHgaiHgoiIf5hIR7iICYiIi5iIiqiIzIiIQIA/wyswAFMYgwcwArAwCVq4gHEgCbGACh6YieKoifGAIBcoEI0SmKIzfOMIKJkRjgcyQhuhsB8VwZqF+mxIQviyEl8RCcBB3P00HDohH6t1Z6oAF8g414oY08wozE+Y184YwVJ4zHuBQwACDRWDTICBkPIEB994whRU3axUDRJTzu4IO/pojruwzCQIZENEVHxWH4RY/ogCTjc45HYIz7eoz7yYz7+oz4eiS2Yw5Hwgj2awz0SJEIipEEuZDiYw0OCwzkw5DmAA0JOpESagzlM5EZqZC905ERyZDtoZEiG5EeOpGJsJPWYg10lii34zDoahGN4EEoNDzTd0TRtk/XEZP9MQgwOIUc2NEeuiAwotVOtDMmR6ERSLsnhMOWSQM5TDuSSGCQvCCQ4VCVBTiWSZCVBGpYt8AJBVmVVjiBVjuVA9gIvhAMv3AIvVOVHndiJ8UIvqGUvfORHgqVdwmVe8sJH7eWF1AJj8eQ+xKLwcNPxjIZmpBDuacYKCmZMPsQ5FU6t8ND51ErJ1OERpdOStJMtHBE+diZTZiZU2qNAliZpUmVfUeWSKORXDiQ4kOVA3oJGtuVVDiRYftRH2QJZWiRY9mZf6qU58KVw4mZe2uVHJQosKBJPukO3pR03fVAz5V7zGBpMOmZMqmL5tASRMQ5vcFYdbpYQgeb5bCaS6IT/PZqnUkKOQa5ma14l5CBJBmJle8rQVRrkRy5JXyFJWgZnLDKlQt6loZmDbMrmXQpnYgynXMKlheQlv3yUS1hnLN7CMABWTg4DOgwDd0mGclqnY9IIxYTMRQhHp0jEyJCKcuCGU/7Ij0HlwsBGVPJCLXQK5BCkdXWEPY7oybTm3IBMcLoNDHbbWZpDp3ylgMKljLzCXhoWqQzDiSlpjVqnPsACKnTK8ewdZjhGYHIoh9KDe5mKUkqMbbSYTmTMesXEEdkIjAgkbagXSsTEkbDK2tVm2YAECcYEL8iK+DgSLBjkjzVHqjipv4hhcKLlyeRLgvKlzKnCoJ6YosEEXIIN/4pqg4wcmnWOyo8xZgymwlx1mpZ26j5IV74oTouZg/mURgCGJ5KogvDVREzQhlMOyjfY4x/AAkFGkqKtp6m650oQR77Y5o+8gjkMCkSCJUfwKa3ipauOIEyQYGLEJW60T0e8ZRYqBy9gBl9KGjv0AjtQCEx0aokFjIy0wx9km6eWq0G4YEwQEbk1pU1MaqVZEkmkW0d4piM5JUosCcgkh5sapEUQ5FXCgvANinveqsC2ZbVyDYu9QlqCpRZWpdxdxFUGKEcQpEXw5UfCxERKCly6l3GWhqfqgyowRkVMaJaaa7lyhk7AhOMI0Y8YycWQ4ePERn4dSb4uybJG5XGQ5v/bkSZuiOX5qAKtoqh9lkaw9ipZFutAZANavmZsTORACmlMfCRcXuty3OVHyWW1psL4sYPHIWk7gFRFeFO5eqXjbKjJmmxy9AbyKdqtlCqSXIR9CaTMHsmzRuVFHBGuBpmcGirkkOjaheWvhoPe8CcvuCo4WASBFq02JOgcwS1eDmc4FWdIwkS2fhRubOQc2QIqmG2nVufZfu4+WCoR2Wyr1mxvDOObxsR47ipoouh7KppVdoRq0kjCbqXNqsKd7utVJp5EBGdirF1utuWg4CZeEmRKAGeCcsRHTqhEDANYDsMrpCPoTu/0quKQqCgJngRzkKDjtM+STMQnkWBm0m3/qaqm3jqs6lalsEJsgCasOdgE4cKEw+7pROZubW6kMETEWdon1dIv454YZmys1n6UNuQi9R6wyULmeh3OjRDjSKToin6vjTIlbuyCaQbZU+4qn9povgqoGCaSonzU4WptciDsBuumggorgSKrRUircL4v3ZyY4NpZYyKwDX+u2tjExRRqDGbw28LsU5KKGOZsaUbEwHYWqxhqo5KNclqqEIPlGD6WRg4oOwxvW25k/1osFn8UR2SrOUDqTiaEatwwGWspxECOJunNcpgnU6roUo7MS3DYi9rscEhlzNkGe/KipiwE7EpkX+HfvqJlcJ7D4M5RW4pwRwCnXTqKkXJu/xk/cqe24ysc0fjOLGhq5VOycWmWpme6ZlXWZn2ypr9+5RA5skHUg/de8dTi5rAKqEF+7VseKG4y6V3qJTuMn1dCsi5/LjgYgn2NZ/sQIziIpia/rkDWQlbi6nt+ch77LCzUDWtkoXuCpWxS5dIKMlrS5dQq8okVJ1i+ginvsjg7Jj2QRMhYZVP+VRuT51NasGk6pXyuJyhjZa3UH2tEKZII8sIapAjL5VUKw1VeLV8NcvHipjAQcsmOs0JrKWcsCRApnkNn8iYbM676qzKTcmv6K8A4DF5dZTXX7wgKw0PyJYG+prQqssUSx0KvtMlCTGhG9ESzcS2IpWlWJUar5v9VDqtMJHSouN7VjqChAShBvzBI8eVBI+grtANLL3W5cuknEZl1hSfOCsfrDrFFRCWadoQ92w2XpqlGFulGxMQWL7FF2CVgMjVan+xHyMRjLYxVZlaaFuTJSFKIfmVvYGyN5bBugoMw3DUKoy+MuNeeJmdaF3bnLsnG2KOl3Ve+kOhmJh7F9mqaulw9CGsv5J+hfqRl06jWGrZnd6qlrvPN5utxECSN2DSv0i1B2J87LG7TbrYVE6TYfjZtc+j5jKfauhNxWATk3O1THmzIqGMGmo/9Hm2+qGAi1bZycyhpzEraTjQGkyrQgnJJbDVrL24vuGpcStLh8PRyf7c6OsZx3Motb3/vVfett/FkX4HMIufLO4A3fFtnaUBOZk7wyDjOLghRp+pDhQRnsKZ3fAf4OjZ0ejrSJ/GCxLCEUpdrI2kEOAg4hDum4pRmsfatpkY4hmc4QryD6urNMU+D52q4iAu46wnkESHpiKe4hquE+aj/uItreIi/uIzPOI3XuI3fOI7nuI7vOI/3uI//OJAHuRgLOZEzxBgPuUIcuctx6j4weZE/uZbGOJTruDJUuZUrmRiEouhlQu9ZeWlN+Y7jQF8YsLPsSVY4uZLfTV8kGZi7XJq3xhi/OYdiTdYkRJlZjUGkgSeSR6d5X6gsAzK1uf0RkxhwOegi117s00FUjWrQ+V7gVKALet913VpJwuc6ek8oury0Czv6haFjjz5EuqR/G6Wv1YGYLKJnukLAgF+ggTJAwyQcSKr3BBd8k6iPeo0d0zOeerlieoAohKcrhJ/0BSpiD6DXOa4jGqtDo0OV66wr+p0vxJ1rusOMebK7/1yp98QNTMLWZTmf9LqoS1xPFHtCZBg30lCoI/u1N9YB9MWnH8Ss98S7c+izJ4S4G0ZDQAMxZIKUV7u6r/s3ZTsANHu5+8UNgLu6B9u891403DrA05DAL7xB6DpfOLuolxnBs6HDP/zdUHxPZDxC+DoAIHyiFzxfSDxlbzzHO8yy74UbMESw+Vqn1jtC3DvKu5y1r3zA9wXIw7tfMERWyLlstcYyoIacz4yvp5bNoztDFH1CQEOkp9Zp4E1q6PxCRDxDDHvFt0YmZALYZUIuUvtC7LvWzTvNl/nJ342XEEMafD2ZL7q6K0MmTMLXvQGNqYaTW71BCHzPG4Rf8PpCZP+Cx/dFDIi9Qoi8gRCGr39K1omewMdA6GU5jYmB6Dl+QqCGPsjAnpz7lgQ64mt7v1u9wL+8QsQ75yNEPayVkukOYNT75xOIoQ++sFcQsP/JvRcIm+u9QbR72jtLJgg89/3TMwZ9Xgh/Bc09Br1+sO9Dr239QSg/X/AOQlhNzJu57icE8FfQwStE9evO2xujwGcF9Pe+wMsWgRhA9ucizz/j9SNEyxujtJ9/oXM7xfmFCtQ+gcTADYBiu4QHQIQBMJBgQYOZ9u1bYTBhwhsGYzTcl4mgAWUS6xk0KGZSpkliNA68KJFkSZMnUaZUuZJlS5cvW8YIOXPgCmgmA2hEWJL/osE0EntuLAky5EViaDoSJXjD46RJaIglNMCwocaSygCIMTkTjcmHBrvCFDuWbFmzZyXKpKlRq8mgBEe6tVpV58lJRUliLbizpFqCEiGajEsyZNSTbNEmVryYsUu/a8XwNfkYQFiUlA3v02jZpFK4JIkZnHRyYUHABuu53JzSM4AbjWHHln32AEONEVPqNa1S98Cwd6mihKaRHsm3ACRLnLo7IV6Wc1Eexz2benXrJCnvCx04ukEYmoZpyjSefHnurdumLP1ZYm/kJ7M3BA5x8OHgJ91Pv76fP2MYVFsDYLSTAlyLpoQoS64zg5Yx7iD7mGuOpgEhHEg/kzIq6ML+njjsEKb49lmuoPoa+srAExFEbaXj6tuOIAqxu09Cmm4gUbOCXktpuIIM8NDHH1mqLcKQ4DvRSKlkPIlF0B40ScSBvDKQsxkHSg8/7oDMUssUI3SRoA33MbFKNBpBAykxkCLTzI/SRIpLgm5Sacn2RCsyQozEvI0wHFXK8MstAQXyvzsDnHIfLgyyMSaDMkOptfqOU5BKKFmjyUqhdMQy/9BN+QOxIcoAaHQfNOokK8GV8myxyb6SdAtUAOIyKEeU8uPU1v2E/KskIiXyEoAeVxIjhvpI1XAljSBdlaQnAXDpuPcmnfVKY2+tdjZPG3r2Qo1EvWqpXqEjENm8Si1pPUpbOi49TFPS1Np3FRtU16EQ+zTcrcrVc9pEmdzrJGbzisFKjMCi69vc3IVX4bKwVY7bhtwTySRfJXao3n7HBbcgGNO6Tykb3aNQVt4SXtjkl3JFF1+D4tznVaaUqccjZn0jOKRMLlJGmTxHdNDflXWNuNuMbyRoYKABAPPkpVVqGCiNVjjNyIQLnBpWcje2c96QYnCDGE0KlHogaQUzCHxYptFeSV6Vo2Q34hPJNtjIR30mSNIQ77N6oEbZ3dfCtAFPyek9ldX7aInOPbBY9hryleOGEtfYyORGVqnkwAO/t0KCUmuoah4VlWhxrhOi++mfd5URmlf5JfxvhKnFXPbqiBF2IzQ6b2kSMQ0QI3TqlEGDshjQ+H32xAICADs=" 14 | -------------------------------------------------------------------------------- /redfish_service_validator/metadata.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2016-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/main/LICENSE.md 4 | 5 | import os 6 | import logging 7 | from collections import Counter, OrderedDict, defaultdict 8 | from collections import namedtuple 9 | from bs4 import BeautifulSoup 10 | from functools import lru_cache 11 | import os.path 12 | 13 | from redfish_service_validator.helper import getNamespace, getNamespaceUnversioned 14 | 15 | my_logger = logging.getLogger('rsv') 16 | my_logger.setLevel(logging.DEBUG) 17 | 18 | EDM_NAMESPACE = "http://docs.oasis-open.org/odata/ns/edm" 19 | EDMX_NAMESPACE = "http://docs.oasis-open.org/odata/ns/edmx" 20 | EDM_TAGS = ['Action', 'Annotation', 'Collection', 'ComplexType', 'EntityContainer', 'EntityType', 'EnumType', 'Key', 21 | 'Member', 'NavigationProperty', 'Parameter', 'Property', 'PropertyRef', 'PropertyValue', 'Record', 22 | 'Schema', 'Singleton', 'Term', 'TypeDefinition'] 23 | EDMX_TAGS = ['DataServices', 'Edmx', 'Include', 'Reference'] 24 | 25 | 26 | def bad_edm_tags(tag): 27 | return tag.namespace == EDM_NAMESPACE and tag.name not in EDM_TAGS 28 | 29 | 30 | def bad_edmx_tags(tag): 31 | return tag.namespace == EDMX_NAMESPACE and tag.name not in EDMX_TAGS 32 | 33 | 34 | def other_ns_tags(tag): 35 | return tag.namespace != EDM_NAMESPACE and tag.namespace != EDMX_NAMESPACE 36 | 37 | 38 | def reference_missing_uri_attr(tag): 39 | return tag.name == 'Reference' and tag.get('Uri') is None 40 | 41 | 42 | def include_missing_namespace_attr(tag): 43 | return tag.name == 'Include' and tag.get('Namespace') is None 44 | 45 | 46 | def format_tag_string(tag): 47 | tag_name = tag.name if tag.prefix is None else tag.prefix + ':' + tag.name 48 | tag_attr = '' 49 | for attr in tag.attrs: 50 | tag_attr += '{}="{}" '.format(attr, tag.attrs[attr]) 51 | return (tag_name + ' ' + tag_attr).strip() 52 | 53 | 54 | def list_html(entries): 55 | html_str = '
    ' 56 | for entry in entries: 57 | html_str += '
  • {}
  • '.format(entry) 58 | html_str += '
' 59 | return html_str 60 | 61 | 62 | def tag_list_html(tags_dict): 63 | html_str = '
    ' 64 | for tag in tags_dict: 65 | html_str += '
  • {} {}
  • ' \ 66 | .format(tag, '(' + str(tags_dict[tag]) + ' occurrences)' if tags_dict[tag] > 1 else '') 67 | html_str += '
' 68 | return html_str 69 | 70 | 71 | class Metadata(object): 72 | metadata_uri = '/redfish/v1/$metadata' 73 | schema_type = '$metadata' 74 | 75 | def __init__(self, data, service): 76 | my_logger.info('Constructing metadata...') 77 | self.success_get = False 78 | self.service = service 79 | self.uri_to_namespaces = defaultdict(list) 80 | self.metadata_namespaces = set() 81 | self.service_namespaces = set() 82 | self.bad_tags = dict() 83 | self.bad_tag_ns = dict() 84 | self.refs_missing_uri = dict() 85 | self.includes_missing_ns = dict() 86 | self.bad_schema_uris = set() 87 | self.bad_namespace_include = set() 88 | self.counter = OrderedCounter() 89 | self.redfish_extensions_alias_ok = False 90 | 91 | self.md_soup = None 92 | self.service_refs = None 93 | uri = Metadata.metadata_uri 94 | 95 | self.elapsed_secs = -1 96 | self.schema_obj = None 97 | if data: 98 | self.md_soup = BeautifulSoup(data, "xml") 99 | self.service_refs = getReferenceDetails(self.md_soup) 100 | self.success_get = True 101 | # set of namespaces included in $metadata 102 | self.metadata_namespaces = {k for k in self.service_refs.keys()} 103 | # create map of schema URIs to namespaces from $metadata 104 | for k in self.service_refs.keys(): 105 | self.uri_to_namespaces[self.service_refs[k][1]].append(self.service_refs[k][0]) 106 | my_logger.debug('Metadata: uri = {}'.format(uri)) 107 | my_logger.debug('Metadata: metadata_namespaces: {} = {}' 108 | .format(type(self.metadata_namespaces), self.metadata_namespaces)) 109 | # check for Redfish alias for RedfishExtensions.v1_0_0 110 | ref = self.service_refs.get('Redfish') 111 | if ref is not None and ref[0] == 'RedfishExtensions.v1_0_0': 112 | self.redfish_extensions_alias_ok = True 113 | my_logger.debug('Metadata: redfish_extensions_alias_ok = {}'.format(self.redfish_extensions_alias_ok)) 114 | # check for XML tag problems 115 | self.check_tags() 116 | # check that all namespace includes are found in the referenced schema 117 | my_logger.debug('Metadata: bad_tags = {}'.format(self.bad_tags)) 118 | my_logger.debug('Metadata: bad_tag_ns = {}'.format(self.bad_tag_ns)) 119 | my_logger.debug('Metadata: refs_missing_uri = {}'.format(self.refs_missing_uri)) 120 | my_logger.debug('Metadata: includes_missing_ns = {}'.format(self.includes_missing_ns)) 121 | my_logger.debug('Metadata: bad_schema_uris = {}'.format(self.bad_schema_uris)) 122 | my_logger.debug('Metadata: bad_namespace_include = {}'.format(self.bad_namespace_include)) 123 | for ref in self.service_refs: 124 | name, uri = self.service_refs[ref] 125 | success, soup, origin = getSchemaDetails(service, getNamespace(name), uri) 126 | getSchemaDetails.cache_clear() 127 | self.check_namespaces_in_schemas() 128 | else: 129 | my_logger.warning('Metadata Warning: getSchemaDetails() did not return success') 130 | 131 | def get_schema_obj(self): 132 | return self.schema_obj 133 | 134 | def get_soup(self): 135 | return self.md_soup 136 | 137 | def get_service_refs(self): 138 | return self.service_refs 139 | 140 | def get_metadata_namespaces(self): 141 | return self.metadata_namespaces 142 | 143 | def get_service_namespaces(self): 144 | return self.service_namespaces 145 | 146 | def add_service_namespace(self, namespace): 147 | self.service_namespaces.add(namespace) 148 | 149 | def get_missing_namespaces(self): 150 | return self.service_namespaces - self.metadata_namespaces 151 | 152 | def get_schema_uri(self, namespace): 153 | ref = self.service_refs.get(namespace) 154 | if ref is not None: 155 | return ref[1] 156 | else: 157 | return None 158 | 159 | def check_tags(self): 160 | """ 161 | Perform some checks on the tags in the $metadata XML looking for unrecognized tags, 162 | tags missing required attributes, etc. 163 | """ 164 | try: 165 | for tag in self.md_soup.find_all(bad_edm_tags): 166 | tag_str = format_tag_string(tag) 167 | self.bad_tags[tag_str] = self.bad_tags.get(tag_str, 0) + 1 168 | for tag in self.md_soup.find_all(bad_edmx_tags): 169 | tag_str = format_tag_string(tag) 170 | self.bad_tags[tag_str] = self.bad_tags.get(tag_str, 0) + 1 171 | for tag in self.md_soup.find_all(reference_missing_uri_attr): 172 | tag_str = format_tag_string(tag) 173 | self.refs_missing_uri[tag_str] = self.refs_missing_uri.get(tag_str, 0) + 1 174 | for tag in self.md_soup.find_all(include_missing_namespace_attr): 175 | tag_str = format_tag_string(tag) 176 | self.includes_missing_ns[tag_str] = self.includes_missing_ns.get(tag_str, 0) + 1 177 | for tag in self.md_soup.find_all(other_ns_tags): 178 | tag_str = tag.name if tag.prefix is None else tag.prefix + ':' + tag.name 179 | tag_ns = 'xmlns{}="{}"'.format(':' + tag.prefix if tag.prefix is not None else '', tag.namespace) 180 | tag_str = tag_str + ' ' + tag_ns 181 | self.bad_tag_ns[tag_str] = self.bad_tag_ns.get(tag_str, 0) + 1 182 | except Exception as e: 183 | my_logger.warning('Metadata Warning: Problem parsing $metadata document: {}'.format(e)) 184 | 185 | def check_namespaces_in_schemas(self): 186 | """ 187 | Check that all namespaces included from a schema URI are actually in that schema 188 | """ 189 | for k in self.uri_to_namespaces.keys(): 190 | schema_uri = k 191 | if '#' in schema_uri: 192 | schema_uri, frag = k.split('#', 1) 193 | schema_type = os.path.basename(os.path.normpath(k)).strip('.xml').strip('_v1') 194 | success, soup, _ = getSchemaDetails(self.service, getNamespace(schema_type), schema_uri) 195 | if success: 196 | for namespace in self.uri_to_namespaces[k]: 197 | if soup.find('Schema', attrs={'Namespace': namespace}) is None: 198 | msg = 'Namespace {} not found in schema {}'.format(namespace, k) 199 | my_logger.debug('Metadata: {}'.format(msg)) 200 | self.bad_namespace_include.add(msg) 201 | else: 202 | my_logger.error('Metadata Warning: failure opening schema {} of type {}'.format(schema_uri, schema_type)) 203 | self.bad_schema_uris.add(schema_uri) 204 | 205 | def get_counter(self): 206 | """ 207 | Create a Counter instance containing the counts of any errors found 208 | """ 209 | counter = OrderedCounter() 210 | # informational counters 211 | counter['metadataNamespaces'] = len(self.metadata_namespaces) 212 | counter['serviceNamespaces'] = len(self.service_namespaces) 213 | # error counters 214 | counter['missingRedfishAlias'] = 0 if self.redfish_extensions_alias_ok else 1 215 | counter['missingNamespaces'] = len(self.get_missing_namespaces()) 216 | counter['badTags'] = len(self.bad_tags) 217 | counter['missingUriAttr'] = len(self.refs_missing_uri) 218 | counter['missingNamespaceAttr'] = len(self.includes_missing_ns) 219 | counter['badTagNamespaces'] = len(self.bad_tag_ns) 220 | counter['badSchemaUris'] = len(self.bad_schema_uris) 221 | counter['badNamespaceInclude'] = len(self.bad_namespace_include) 222 | self.counter = counter 223 | return self.counter 224 | 225 | def to_html(self): 226 | """ 227 | Convert the $metadata validation results to HTML 228 | """ 229 | time_str = 'response time {}s'.format(self.elapsed_secs) 230 | section_title = '{} ({})'.format(Metadata.metadata_uri, time_str) 231 | 232 | counter = self.get_counter() 233 | 234 | html_str = '' 235 | html_str += '{}'\ 236 | .format(section_title) 237 | html_str += '' 238 | html_str += ''.format('') 241 | html_str += ''\ 242 | .format(Metadata.metadata_uri, Metadata.schema_type) 243 | html_str += '' 245 | html_str += '' 259 | html_str += '
{}
\ 239 |
Show results
\ 240 |
Schema File: {}
Resource Type: {}
GET Success' if self.success_get else 'class="fail"> GET Failure') + '' 246 | 247 | errors_found = False 248 | for count_type in counter.keys(): 249 | style = 'class=log' 250 | if 'bad' in count_type or 'missing' in count_type: 251 | if counter[count_type] > 0: 252 | errors_found = True 253 | style = 'class="fail log"' 254 | if counter.get(count_type, 0) > 0: 255 | html_str += '
{p}: {q}
'.format( 256 | p=count_type, q=counter.get(count_type, 0), style=style) 257 | 258 | html_str += '
' 260 | html_str += '' 261 | 262 | if self.success_get and not errors_found: 263 | html_str += '' 264 | elif not self.success_get: 265 | html_str += ''\ 266 | .format(Metadata.metadata_uri) 267 | else: 268 | if not self.redfish_extensions_alias_ok: 269 | html_str += '' 270 | if len(self.get_missing_namespaces()) > 0: 271 | html_str += '' 275 | if len(self.bad_tags) > 0: 276 | html_str += '' 279 | if len(self.refs_missing_uri) > 0: 280 | html_str += '' 283 | if len(self.includes_missing_ns) > 0: 284 | html_str += '' 287 | if len(self.bad_tag_ns) > 0: 288 | html_str += '' 291 | if len(self.bad_schema_uris) > 0: 292 | html_str += '' 295 | if len(self.bad_namespace_include) > 0: 296 | html_str += '' 299 | html_str += '
$metadata validation results
Validation successful
ERROR - Unable to retrieve $metadata resource at {}
ERROR - $metadata does not include the required "RedfishExtensions.v1_0_0" namespace with an alias of "Redfish"
ERROR - The following namespaces are referenced by the service, but are not included in $metadata:
    ' 272 | for ns in self.get_missing_namespaces(): 273 | html_str += '
  • {}
  • '.format(ns) 274 | html_str += '
ERROR - The following tag names in $metadata are unrecognized (check spelling or case):' 277 | html_str += tag_list_html(self.bad_tags) 278 | html_str += '
ERROR - The following Reference tags in $metadata are missing the expected Uri attribute (check spelling or case):' 281 | html_str += tag_list_html(self.refs_missing_uri) 282 | html_str += '
ERROR - The following Include tags in $metadata are missing the expected Namespace attribute (check spelling or case):' 285 | html_str += tag_list_html(self.includes_missing_ns) 286 | html_str += '
ERROR - The following tags in $metadata have an unexpected namespace:' 289 | html_str += tag_list_html(self.bad_tag_ns) 290 | html_str += '
ERROR - The following schema URIs referenced from $metadata could not be retrieved:' 293 | html_str += list_html(self.bad_schema_uris) 294 | html_str += '
ERROR - The following namespaces included in $metadata could not be found in the referenced schema URI:' 297 | html_str += list_html(self.bad_namespace_include) 298 | html_str += '
' 300 | 301 | return html_str 302 | 303 | 304 | class OrderedCounter(Counter, OrderedDict): 305 | """Counter that remembers the order elements are first encountered""" 306 | 307 | def __repr__(self): 308 | return '%s(%r)' % (self.__class__.__name__, OrderedDict(self)) 309 | 310 | def __reduce__(self): 311 | return self.__class__, (OrderedDict(self),) 312 | 313 | 314 | def storeSchemaToLocal(xml_data, origin, service): 315 | """storeSchemaToLocal 316 | 317 | Moves data pulled from service/online to local schema storage 318 | 319 | Does NOT do so if preferonline is specified 320 | 321 | :param xml_data: data being transferred 322 | :param origin: origin of xml pulled 323 | """ 324 | config = service.config 325 | SchemaLocation = config['metadatafilepath'] 326 | if not os.path.isdir(SchemaLocation): 327 | os.makedirs(SchemaLocation) 328 | if 'localFile' not in origin and '$metadata' not in origin: 329 | __, xml_name = origin.rsplit('/', 1) 330 | new_file = os.path.join(SchemaLocation, xml_name) 331 | if not os.path.isfile(new_file): 332 | with open(new_file, "w") as filehandle: 333 | filehandle.write(xml_data) 334 | my_logger.info('Writing online XML to file: {}'.format(xml_name)) 335 | else: 336 | my_logger.info('NOT writing online XML to file: {}'.format(xml_name)) 337 | 338 | 339 | @lru_cache(maxsize=64) 340 | def getSchemaDetails(service, SchemaType, SchemaURI): 341 | """ 342 | Find Schema file for given Namespace. 343 | 344 | param SchemaType: Schema Namespace, such as ServiceRoot 345 | param SchemaURI: uri to grab schema, given LocalOnly is False 346 | return: (success boolean, a Soup object, origin) 347 | """ 348 | my_logger.debug('getting Schema of {} {}'.format(SchemaType, SchemaURI)) 349 | 350 | if SchemaType is None: 351 | return False, None, None 352 | 353 | if service is None: 354 | return getSchemaDetailsLocal(SchemaType, SchemaURI, {}) 355 | 356 | success, soup, origin = getSchemaDetailsLocal(SchemaType, SchemaURI, service.config) 357 | if success: 358 | return success, soup, origin 359 | 360 | xml_suffix = '_v1.xml' 361 | 362 | if (SchemaURI is not None) or (SchemaURI is not None and '/redfish/v1/$metadata' in SchemaURI): 363 | # Get our expected Schema file here 364 | # if success, generate Soup, then check for frags to parse 365 | # start by parsing references, then check for the refLink 366 | if '#' in SchemaURI: 367 | base_schema_uri, frag = tuple(SchemaURI.rsplit('#', 1)) 368 | else: 369 | base_schema_uri, frag = SchemaURI, None 370 | success, data, response, elapsed = service.callResourceURI(base_schema_uri) 371 | if success: 372 | soup = BeautifulSoup(data, "xml") 373 | # if frag, look inside xml for real target as a reference 374 | if frag is not None: 375 | # prefer type over frag, truncated down 376 | # using frag, check references 377 | frag = getNamespace(SchemaType) 378 | frag = frag.split('.', 1)[0] 379 | refType, refLink = getReferenceDetails( 380 | soup, name=base_schema_uri).get(frag, (None, None)) 381 | if refLink is not None: 382 | success, linksoup, newlink = getSchemaDetails(service, refType, refLink) 383 | if success: 384 | return True, linksoup, newlink 385 | else: 386 | my_logger.error("Metadata Error: SchemaURI couldn't call reference link {} inside {}".format(frag, base_schema_uri)) 387 | else: 388 | my_logger.error("Metadata Error: SchemaURI missing reference link {} inside {}".format(frag, base_schema_uri)) 389 | # error reported; assume likely schema uri to allow continued validation 390 | uri = 'http://redfish.dmtf.org/schemas/v1/{}{}'.format(frag, xml_suffix) 391 | my_logger.info("Continue assuming schema URI for {} is {}".format(SchemaType, uri)) 392 | return getSchemaDetails(service, SchemaType, uri) 393 | else: 394 | storeSchemaToLocal(data, base_schema_uri, service) 395 | return True, soup, base_schema_uri 396 | else: 397 | my_logger.debug("SchemaURI called unsuccessfully: {}".format(base_schema_uri)) 398 | return getSchemaDetailsLocal(SchemaType, SchemaURI, service.config) 399 | 400 | 401 | def getSchemaDetailsLocal(SchemaType, SchemaURI, config): 402 | """ 403 | Find Schema file for given Namespace, from local directory 404 | 405 | param SchemaType: Schema Namespace, such as ServiceRoot 406 | param SchemaURI: uri to grab schem (generate information from it) 407 | return: (success boolean, a Soup object, origin) 408 | """ 409 | Alias = getNamespaceUnversioned(SchemaType) 410 | SchemaLocation, SchemaSuffix = config['metadatafilepath'], '_v1.xml' 411 | if SchemaURI is not None: 412 | uriparse = SchemaURI.split('/')[-1].split('#') 413 | xml = uriparse[0] 414 | else: 415 | my_logger.warning("Metadata Warning: SchemaURI was empty, must generate xml name from type {}".format(SchemaType)), 416 | return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config) 417 | my_logger.debug(('local', SchemaType, SchemaURI, SchemaLocation + '/' + xml)) 418 | filestring = Alias + SchemaSuffix if xml is None else xml 419 | try: 420 | # get file 421 | with open(SchemaLocation + '/' + xml, "r") as filehandle: 422 | data = filehandle.read() 423 | 424 | # get tags 425 | soup = BeautifulSoup(data, "xml") 426 | edmxTag = soup.find('Edmx', recursive=False) 427 | parentTag = edmxTag.find('DataServices', recursive=False) 428 | child = parentTag.find('Schema', recursive=False) 429 | SchemaNamespace = child['Namespace'] 430 | FoundAlias = SchemaNamespace.split(".")[0] 431 | my_logger.debug(FoundAlias) 432 | 433 | if FoundAlias in Alias: 434 | return True, soup, "localFile:" + SchemaLocation + '/' + filestring 435 | 436 | except FileNotFoundError: 437 | # if we're looking for $metadata locally... ditch looking for it, go straight to file 438 | if '/redfish/v1/$metadata' in SchemaURI and Alias != '$metadata': 439 | my_logger.debug("Unable to find a xml of {} at {}, defaulting to {}".format(SchemaURI, SchemaLocation, Alias + SchemaSuffix)) 440 | return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config) 441 | else: 442 | my_logger.warning("Schema file {} not found in {}".format(filestring, SchemaLocation)) 443 | if Alias == '$metadata': 444 | my_logger.warning("Metadata Warning: If $metadata cannot be found, Annotations may be unverifiable") 445 | except Exception as ex: 446 | my_logger.error("Metadata Error: A problem when getting a local schema has occurred {}".format(SchemaURI)) 447 | my_logger.error("output: ", exc_info=True) 448 | return False, None, None 449 | 450 | 451 | def check_redfish_extensions_alias(name, namespace, alias): 452 | """ 453 | Check that edmx:Include for Namespace RedfishExtensions has the expected 'Redfish' Alias attribute 454 | :param name: the name of the resource 455 | :param item: the edmx:Include item for RedfishExtensions 456 | :return: bool 457 | """ 458 | if alias is None or alias != 'Redfish': 459 | msg = ("Metadata Error: In the resource {}, the {} namespace must have an alias of 'Redfish'. The alias is {}. " + 460 | "This may cause properties of the form [PropertyName]@Redfish.TermName to be unrecognized.") 461 | my_logger.error(msg.format(name, namespace, 'missing' if alias is None else "'" + str(alias) + "'")) 462 | return False 463 | return True 464 | 465 | 466 | def getReferenceDetails(soup, metadata_dict=None, name='xml'): 467 | """ 468 | Create a reference dictionary from a soup file 469 | 470 | param arg1: soup 471 | param metadata_dict: dictionary of service metadata, compare with 472 | return: dictionary 473 | """ 474 | includeTuple = namedtuple('include', ['Namespace', 'Uri']) 475 | refDict = {} 476 | 477 | maintag = soup.find("Edmx", recursive=False) 478 | reftags = maintag.find_all('Reference', recursive=False) 479 | for ref in reftags: 480 | includes = ref.find_all('Include', recursive=False) 481 | for item in includes: 482 | uri = ref.get('Uri') 483 | ns, alias = (item.get(x) for x in ['Namespace', 'Alias']) 484 | if ns is None or uri is None: 485 | my_logger.error("Metadata Error: Reference incorrect for: {}".format(item)) 486 | continue 487 | if alias is None: 488 | alias = ns 489 | refDict[alias] = includeTuple(ns, uri) 490 | # Check for proper Alias for RedfishExtensions 491 | if name == '$metadata' and ns.startswith('RedfishExtensions.'): 492 | check_bool = check_redfish_extensions_alias(name, ns, alias) 493 | 494 | cntref = len(refDict) 495 | if metadata_dict is not None: 496 | refDict.update(metadata_dict) 497 | my_logger.debug("METADATA: References generated from {}: {} out of {}".format(name, cntref, len(refDict))) 498 | return refDict --------------------------------------------------------------------------------