├── redfish_interop_validator ├── __init__.py ├── config.py ├── helper.py ├── logger.py ├── session.py ├── profile.py ├── RedfishInteropValidator.py ├── tohtml.py ├── validateResource.py ├── RedfishLogo.py ├── traverseInterop.py └── interop.py ├── AUTHORS.md ├── requirements.txt ├── redfish.ico ├── .gitignore ├── test_conf.json ├── config └── example.ini ├── test-profiles ├── ChassisType_6.json ├── ChassisType_2.json ├── ChassisType_5.json ├── ChassisType_3.json ├── ChassisType_4.json ├── ChassisType_1.json ├── UseCaseType_ChassisType.json └── UseCaseType_ProcessorType.json ├── RedfishInteropValidator.py ├── setup.py ├── LICENSE.md ├── CONTRIBUTING.md ├── .github └── workflows │ └── release.yml ├── tests └── interoptests.py ├── CHANGELOG.md └── README.md /redfish_interop_validator/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Original Contribution: 2 | 3 | * Majec Systems 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4>=4.6.0 2 | lxml 3 | requests 4 | jsonschema 5 | -------------------------------------------------------------------------------- /redfish.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DMTF/Redfish-Interop-Validator/HEAD/redfish.ico -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | build/ 3 | dist/ 4 | logs/ 5 | *.pyc 6 | *.spec 7 | *.egg-info/ 8 | -------------------------------------------------------------------------------- /test_conf.json: -------------------------------------------------------------------------------- 1 | { 2 | "test": { 3 | "command": "$interpreter RedfishInteropValidator.py --ip $target_system -u $username -p $password --logdir $output_subdir --schema profiles/RedfishInteroperabilityProfile.v1_0_0.json profiles/SampleProfile.json" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /config/example.ini: -------------------------------------------------------------------------------- 1 | [Tool] 2 | verbose = 3 | 4 | [Host] 5 | ip = http://localhost:8000 6 | username = MyUser 7 | password = MyPass 8 | description = MySystem 9 | forceauth = False 10 | authtype = Basic 11 | token = 12 | 13 | [Validator] 14 | payload = 15 | logdir = ./logs 16 | oemcheck = True 17 | online_profiles = True 18 | debugging = False 19 | collectionlimit = LogEntry 20 20 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_6.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType5", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure'. This test should pass.", 6 | "Resources": { 7 | "Chassis": { 8 | "PropertyRequirements": { 9 | "ChassisType": { 10 | "Comparison": "AllOf", 11 | "Values": [ 12 | "Enclosure" 13 | ] 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType1", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure'. This test should pass (MultiBladeEncl is an Enclosure).", 6 | "Resources": { 7 | "Chassis": { 8 | "PropertyRequirements": { 9 | "ChassisType": { 10 | "Comparison": "AnyOf", 11 | "Values": [ 12 | "Enclosure" 13 | ] 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_5.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType4", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure' and a 'Blade'. This test should pass.", 6 | "Resources": { 7 | "Chassis": { 8 | "PropertyRequirements": { 9 | "ChassisType": { 10 | "Comparison": "AllOf", 11 | "Values": [ 12 | "Enclosure", 13 | "Blade" 14 | ] 15 | } 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /RedfishInteropValidator.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md 4 | 5 | import logging 6 | import sys 7 | 8 | from redfish_interop_validator.RedfishInteropValidator import main 9 | 10 | my_logger = logging.getLogger('rsv') 11 | my_logger.setLevel(logging.DEBUG) 12 | 13 | if __name__ == '__main__': 14 | try: 15 | status_code, lastResultsPage, exit_string = main() 16 | sys.exit(status_code) 17 | except Exception as e: 18 | my_logger.exception("Program finished prematurely: %s", e) 19 | raise 20 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType2", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure' or a 'Rack'. This test should pass (MultiBladeEncl is an Enclosure).", 6 | "Resources": { 7 | "Chassis": { 8 | "PropertyRequirements": { 9 | "ChassisType": { 10 | "Comparison": "AnyOf", 11 | "Values": [ 12 | "Enclosure", 13 | "Rack" 14 | ] 15 | } 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType3", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure' and a 'Rack'. This test should fail (there are no 'Rack' chassis).", 6 | "Resources": { 7 | "Chassis": { 8 | "PropertyRequirements": { 9 | "ChassisType": { 10 | "Comparison": "AllOf", 11 | "Values": [ 12 | "Enclosure", 13 | "Rack" 14 | ] 15 | } 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /test-profiles/ChassisType_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "ChassisType5", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Ensures at least one chassis is an 'Enclosure'. This test should pass.", 6 | "Resources": { 7 | "Thermal": { 8 | "PropertyRequirements": { 9 | "Temperatures": { 10 | "PropertyRequirements": { 11 | "PhysicalContext": { 12 | "Comparison": "AllOf", 13 | "Values": [ "Intake", "CPU" ] 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /test-profiles/UseCaseType_ChassisType.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "UseCaseType_ChassisType", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Test for UseCaseType 'ChassisType'. Should pass one test for every Sensor in public-rackmount1.", 6 | "Resources": { 7 | "Sensor": { 8 | "UseCases": [ 9 | { 10 | "UseCaseTitle": "Chassis Sensor", 11 | "UseCaseType": "ChassisType", 12 | "UseCaseComparison": "Equal", 13 | "UseCaseKeyValues": [ 14 | "RackMount" 15 | ], 16 | "PropertyRequirements": { 17 | "PhysicalContext": {} 18 | } 19 | } 20 | ] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test-profiles/UseCaseType_ProcessorType.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaDefinition": "RedfishInteroperabilityProfile.v1_5_0", 3 | "ProfileName": "UseCaseType_PortProtocol", 4 | "ProfileVersion": "1.0.0", 5 | "Purpose": "Test for UseCaseType 'ProcessorType'. Should pass one test for each SubProcessor in public-tower.", 6 | "Resources": { 7 | "Processor": { 8 | "UseCases": [ 9 | { 10 | "UseCaseTitle": "Sub Processor", 11 | "UseCaseType": "ProcessorType", 12 | "UseCaseComparison": "Equal", 13 | "UseCaseKeyValues": [ 14 | "CPU" 15 | ], 16 | "PropertyRequirements": { 17 | "ProcessorType": { 18 | "ReadRequirement": "Mandatory", 19 | "Comparison": "AnyOf", 20 | "Values": [ 21 | "Thread", 22 | "Core" 23 | ] 24 | } 25 | } 26 | } 27 | ] 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: 4 | # https://github.com/DMTF/Redfish-Interop-Validator/blob/main/LICENSE.md 5 | 6 | from setuptools import setup 7 | from codecs import open 8 | 9 | with open("README.md", "r", "utf-8") as f: 10 | long_description = f.read() 11 | 12 | setup( 13 | name="redfish_interop_validator", 14 | version="2.3.3", 15 | description="Redfish Interop Validator", 16 | long_description=long_description, 17 | long_description_content_type="text/markdown", 18 | author="DMTF, https://www.dmtf.org/standards/feedback", 19 | license="BSD 3-clause \"New\" or \"Revised License\"", 20 | classifiers=[ 21 | "Development Status :: 5 - Production/Stable", 22 | "License :: OSI Approved :: BSD License", 23 | "Programming Language :: Python", 24 | "Topic :: Communications" 25 | ], 26 | keywords="Redfish", 27 | url="https://github.com/DMTF/Redfish-Interop-Validator", 28 | packages=["redfish_interop_validator"], 29 | entry_points={ 30 | 'console_scripts': [ 31 | 'rf_interop_validator=redfish_interop_validator.RedfishInteropValidator:main' 32 | ] 33 | }, 34 | install_requires=[ 35 | "requests", 36 | "beautifulsoup4>=4.6.0", 37 | "lxml", 38 | "jsonschema" 39 | ] 40 | ) 41 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2017-2025, Contributing Member(s) of Distributed Management Task 4 | Force, Inc.. All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, 7 | are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation and/or 14 | other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its contributors 17 | may be used to endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 21 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 24 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 25 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 26 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 27 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 29 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Overview 4 | 5 | This repository is maintained by the [DMTF](https://www.dmtf.org/ "https://www.dmtf.org/"). All contributions are reviewed and approved by members of the organization. 6 | 7 | ## Submitting Issues 8 | 9 | Bugs, feature requests, and questions are all submitted in the "Issues" section for the project. DMTF members are responsible for triaging and addressing issues. 10 | 11 | ## Contribution Process 12 | 13 | 1. Fork the repository. 14 | 2. Make and commit changes. 15 | 3. Make a pull request. 16 | 17 | All contributions must adhere to the BSD 3-Clause License described in the LICENSE.md file, and the [Developer Certificate of Origin](#developer-certificate-of-origin). 18 | 19 | Pull requests are reviewed and approved by DMTF members. 20 | 21 | ## Developer Certificate of Origin 22 | 23 | All contributions must adhere to the [Developer Certificate of Origin (DCO)](http://developercertificate.org "http://developercertificate.org"). 24 | 25 | The DCO is an attestation attached to every contribution made by every developer. In the commit message of the contribution, the developer adds a "Signed-off-by" statement and thereby agrees to the DCO. This can be added by using the `--signoff` parameter with `git commit`. 26 | 27 | Full text of the DCO: 28 | 29 | ``` 30 | Developer Certificate of Origin 31 | Version 1.1 32 | 33 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 34 | 35 | Everyone is permitted to copy and distribute verbatim copies of this 36 | license document, but changing it is not allowed. 37 | 38 | 39 | Developer's Certificate of Origin 1.1 40 | 41 | By making a contribution to this project, I certify that: 42 | 43 | (a) The contribution was created in whole or in part by me and I 44 | have the right to submit it under the open source license 45 | indicated in the file; or 46 | 47 | (b) The contribution is based upon previous work that, to the best 48 | of my knowledge, is covered under an appropriate open source 49 | license and I have the right under that license to submit that 50 | work with modifications, whether created in whole or in part 51 | by me, under the same open source license (unless I am 52 | permitted to submit under a different license), as indicated 53 | in the file; or 54 | 55 | (c) The contribution was provided directly to me by some other 56 | person who certified (a), (b) or (c) and I have not modified 57 | it. 58 | 59 | (d) I understand and agree that this project and the contribution 60 | are public and that a record of the contribution (including all 61 | personal information I submit with it, including my sign-off) is 62 | maintained indefinitely and may be redistributed consistent with 63 | this project or the open source license(s) involved. 64 | ``` 65 | -------------------------------------------------------------------------------- /redfish_interop_validator/config.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | import configparser 6 | import logging 7 | 8 | my_logger = logging.getLogger('rsv') 9 | my_logger.setLevel(logging.DEBUG) 10 | 11 | config_struct = { 12 | 'Tool': ['verbose'], 13 | 'Host': ['ip', 'username', 'password', 'description', 'forceauth', 'authtype', 'token'], 14 | 'Validator': ['payload', 'logdir', 'oemcheck', 'online_profiles', 'debugging', 'required_profiles_dir', 'collectionlimit'] 15 | } 16 | 17 | config_options = [x for name in config_struct for x in config_struct[name]] 18 | 19 | 20 | def convert_args_to_config(args): 21 | my_config = configparser.ConfigParser() 22 | for section in ['Tool', 'Host', 'Validator']: 23 | my_config.add_section(section) 24 | for option in config_struct[section]: 25 | if option not in ['password', 'token']: 26 | my_var = vars(args)[option] 27 | if isinstance(my_var, list): 28 | my_var = ' '.join(my_var) 29 | my_config.set(section, option, str(my_var) if my_var else '') 30 | else: 31 | my_config.set(section, option, '******') 32 | return my_config 33 | 34 | 35 | def convert_config_to_args(args, config): 36 | my_config = configparser.ConfigParser() 37 | if isinstance(config, configparser.ConfigParser): 38 | my_config = config 39 | elif isinstance(config, str): 40 | with open(config, 'r') as f: 41 | my_config.read_file(f) 42 | elif isinstance(config, dict): 43 | my_config.read_dict(config) 44 | for section in config_struct: 45 | if section in my_config: 46 | for option in my_config[section]: 47 | if option.lower() not in config_options: 48 | if option.lower() not in ['version', 'copyright']: 49 | my_logger.error('Tool Configuration Error: Option {} not supported!'.format(option), extra={"result": "unsupportedOption"}) 50 | elif my_config[section][option] not in ['', None]: 51 | if option.lower() == 'payload' or option.lower() == 'collectionlimit': 52 | setattr(args, option, my_config[section][option].split(' ')) 53 | else: 54 | setattr(args, option, my_config[section][option]) 55 | if option.lower() in ['password', 'token']: 56 | my_config.set(section, option, '******') 57 | my_config_dict = config_parse_to_dict(my_config) 58 | import json 59 | print(json.dumps(my_config_dict, indent=4)) 60 | 61 | 62 | def config_parse_to_dict(config): 63 | my_dict = {} 64 | for section in config: 65 | my_dict[section] = {} 66 | for option in [x for x in config[section] if x not in ['version', 'copyright']]: 67 | my_dict[section][option] = {} 68 | my_dict[section][option]['value'] = config[section][option] 69 | my_dict[section][option]['description'] = "TBD" 70 | return my_dict 71 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release and Publish 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version: 6 | description: 'Version number' 7 | required: true 8 | changes_1: 9 | description: 'Change entry' 10 | required: true 11 | changes_2: 12 | description: 'Change entry' 13 | required: false 14 | changes_3: 15 | description: 'Change entry' 16 | required: false 17 | changes_4: 18 | description: 'Change entry' 19 | required: false 20 | changes_5: 21 | description: 'Change entry' 22 | required: false 23 | changes_6: 24 | description: 'Change entry' 25 | required: false 26 | changes_7: 27 | description: 'Change entry' 28 | required: false 29 | changes_8: 30 | description: 'Change entry' 31 | required: false 32 | jobs: 33 | release_build: 34 | name: Build the release 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: actions/checkout@v2 38 | with: 39 | token: ${{secrets.GITHUB_TOKEN}} 40 | - name: Build the changelog text 41 | run: | 42 | echo 'CHANGES<> $GITHUB_ENV 43 | echo "## [${{github.event.inputs.version}}] - $(date +'%Y-%m-%d')" >> $GITHUB_ENV 44 | echo "- ${{github.event.inputs.changes_1}}" >> $GITHUB_ENV 45 | if [[ -n "${{github.event.inputs.changes_2}}" ]]; then echo "- ${{github.event.inputs.changes_2}}" >> $GITHUB_ENV; fi 46 | if [[ -n "${{github.event.inputs.changes_3}}" ]]; then echo "- ${{github.event.inputs.changes_3}}" >> $GITHUB_ENV; fi 47 | if [[ -n "${{github.event.inputs.changes_4}}" ]]; then echo "- ${{github.event.inputs.changes_4}}" >> $GITHUB_ENV; fi 48 | if [[ -n "${{github.event.inputs.changes_5}}" ]]; then echo "- ${{github.event.inputs.changes_5}}" >> $GITHUB_ENV; fi 49 | if [[ -n "${{github.event.inputs.changes_6}}" ]]; then echo "- ${{github.event.inputs.changes_6}}" >> $GITHUB_ENV; fi 50 | if [[ -n "${{github.event.inputs.changes_7}}" ]]; then echo "- ${{github.event.inputs.changes_7}}" >> $GITHUB_ENV; fi 51 | if [[ -n "${{github.event.inputs.changes_8}}" ]]; then echo "- ${{github.event.inputs.changes_8}}" >> $GITHUB_ENV; fi 52 | echo "" >> $GITHUB_ENV 53 | echo 'EOF' >> $GITHUB_ENV 54 | - name: Update version numbers 55 | run: | 56 | sed -i -E 's/ version=.+,/ version="'${{github.event.inputs.version}}'",/' setup.py 57 | sed -i -E 's/tool_version = .+/tool_version = '\'${{github.event.inputs.version}}\''/' redfish_interop_validator/RedfishInteropValidator.py 58 | - name: Update the changelog 59 | run: | 60 | ex CHANGELOG.md <" 74 | git add * 75 | git commit -s -m "${{github.event.inputs.version}} versioning" 76 | git push origin main 77 | - name: Make the release 78 | env: 79 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 80 | run: | 81 | gh release create ${{github.event.inputs.version}} -t ${{github.event.inputs.version}} -n "Changes since last release:"$'\n\n'"$CHANGES" 82 | - name: Set up Python 83 | uses: actions/setup-python@v2 84 | with: 85 | python-version: '3.x' 86 | - name: Install dependencies 87 | run: | 88 | python -m pip install --upgrade pip 89 | pip install setuptools wheel twine 90 | - name: Build the distribution 91 | run: | 92 | python setup.py sdist bdist_wheel 93 | - name: Upload to pypi 94 | uses: pypa/gh-action-pypi-publish@release/v1 95 | with: 96 | password: ${{ secrets.PYPI_API_TOKEN }} 97 | -------------------------------------------------------------------------------- /redfish_interop_validator/helper.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | import re 6 | import logging 7 | 8 | my_logger = logging.getLogger() 9 | my_logger.setLevel(logging.DEBUG) 10 | 11 | """ 12 | Power.1.1.1.Power , Power.v1_0_0.Power 13 | """ 14 | 15 | versionpattern = 'v[0-9]+_[0-9]+_[0-9]+' 16 | 17 | 18 | def splitVersionString(version): 19 | v_payload = version 20 | if(re.match('([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*', version) is not None): 21 | new_payload = getVersion(version) 22 | if new_payload is not None: 23 | v_payload = new_payload 24 | if ('_' in v_payload): 25 | v_payload = v_payload.replace('v', '') 26 | payload_split = v_payload.split('_') 27 | else: 28 | payload_split = v_payload.split('.') 29 | if len(payload_split) != 3: 30 | return (0, 0, 0) 31 | return tuple([int(v) for v in payload_split]) 32 | 33 | 34 | def compareMinVersion(version, min_version): 35 | """ 36 | Checks for the minimum version of a resource's type 37 | """ 38 | # If version doesn't contain version as is, try it as v#_#_# 39 | # get version from payload 40 | min_split = splitVersionString(min_version) 41 | payload_split = splitVersionString(version) 42 | 43 | # use array comparison, which compares each sequential number 44 | return min_split < payload_split 45 | 46 | def navigateJsonFragment(decoded, URILink): 47 | if '#' in URILink: 48 | URIfragless, frag = tuple(URILink.rsplit('#', 1)) 49 | fragNavigate = frag.split('/') 50 | for item in fragNavigate: 51 | if item == '': 52 | continue 53 | if isinstance(decoded, dict): 54 | decoded = decoded.get(item) 55 | elif isinstance(decoded, list): 56 | if not item.isdigit(): 57 | my_logger.error("URI Destination Error: This URI ({}) is accessing an array, but this is not an index: {}".format(URILink, item)) 58 | return None 59 | if int(item) >= len(decoded): 60 | my_logger.error("URI Destination Error: This URI ({}) is accessing an array, but the index is too large for an array of size {}: {}".format(URILink, len(decoded), item)) 61 | return None 62 | decoded = decoded[int(item)] 63 | else: 64 | my_logger.error("URI Destination Error: This URI ({}) has resolved to an invalid object that is neither an array or dictionary".format(URILink)) 65 | return None 66 | return decoded 67 | 68 | 69 | def getNamespace(string: str): 70 | """getNamespace 71 | 72 | Gives namespace of a type string, version included 73 | 74 | :param string: A type string 75 | :type string: str 76 | """ 77 | if '#' in string: 78 | string = string.rsplit('#', 1)[1] 79 | return string.rsplit('.', 1)[0] 80 | 81 | 82 | def getVersion(string: str): 83 | """getVersion 84 | 85 | Gives version stripped from type/namespace string, if possible 86 | 87 | :param string: A type/namespace string 88 | :type string: str 89 | """ 90 | regcap = re.search(versionpattern, string) 91 | return regcap.group() if regcap else None 92 | 93 | 94 | def getNamespaceUnversioned(string: str): 95 | """getNamespaceUnversioned 96 | 97 | Gives namespace of a type string, version NOT included 98 | 99 | :param string: 100 | :type string: str 101 | """ 102 | if '#' in string: 103 | string = string.rsplit('#', 1)[1] 104 | return string.split('.', 1)[0] 105 | 106 | 107 | def getType(string: str): 108 | """getType 109 | 110 | Gives type of a type string (right hand side) 111 | 112 | :param string: 113 | :type string: str 114 | """ 115 | if '#' in string: 116 | string = string.rsplit('#', 1)[1] 117 | return string.rsplit('.', 1)[-1] 118 | 119 | 120 | def createContext(typestring: str): 121 | """createContext 122 | 123 | Create an @odata.context string from a type string 124 | 125 | :param typestring: 126 | :type string: str 127 | """ 128 | ns_name = getNamespaceUnversioned(typestring) 129 | type_name = getType(typestring) 130 | context = '/redfish/v1/$metadata' + '#' + ns_name + '.' + type_name 131 | return context 132 | -------------------------------------------------------------------------------- /redfish_interop_validator/logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/main/LICENSE.md 4 | 5 | import logging 6 | import sys 7 | from enum import IntEnum 8 | from types import SimpleNamespace 9 | 10 | # List and set up custom debug levels 11 | class Level(IntEnum): 12 | DEBUG = logging.DEBUG 13 | VERBOSE2 = logging.INFO-2 14 | VERBOSE1 = logging.INFO-1 15 | INFO = logging.INFO 16 | WARN = logging.WARN 17 | ERROR = logging.ERROR 18 | 19 | logging.addLevelName(Level.VERBOSE1, "Level.VERBOSE1") 20 | logging.addLevelName(Level.VERBOSE2, "Level.VERBOSE2") 21 | 22 | # Entries for HTML log 23 | LOG_ENTRY = ('name', 'value', 'type', 'exists', 'result') 24 | COUNT_ENTRY = ('id', 'msg', 'level') 25 | 26 | def create_entry(name, value, my_type, exists, result): 27 | return SimpleNamespace(**{ 28 | "name": name, 29 | "value": value, 30 | "type": my_type, 31 | "exists": exists, 32 | "result": result 33 | }) 34 | 35 | def create_count(id_, msg, level): 36 | return SimpleNamespace(**{ 37 | "id": id_, 38 | "msg": msg, 39 | "level": level 40 | }) 41 | 42 | # Handler for log counts to flush (example: per Resource validated) 43 | class RecordHandler(logging.Handler): 44 | def __init__(self): 45 | self.record_collection = [] 46 | super().__init__() 47 | 48 | def emit(self, record): 49 | result = record.__dict__.get('result') 50 | if record.levelno > logging.INFO or result is not None: 51 | self.record_collection.append(record) 52 | 53 | def flush(self): 54 | output = self.record_collection 55 | self.record_collection = [] 56 | return output 57 | 58 | class RecordFormatter(logging.Formatter): 59 | def __init__(self): 60 | self.current_uri = [None] 61 | super().__init__() 62 | 63 | def format(self, record): 64 | msg = "{} - {}".format(record.levelname, record.getMessage()) 65 | result = record.__dict__.get('result') 66 | record.result = result 67 | uri = record.__dict__.get('uri', self.current_uri[-1]) 68 | record.uri = uri 69 | if result or record.levelno > logging.INFO: 70 | append = " ... " 71 | append += "{} ".format(result) if result else " " 72 | append += "at {}".format(uri) if uri else "" 73 | msg += append 74 | return msg 75 | 76 | def create_logging_file_handler(level, file_name): 77 | file_handler = logging.FileHandler(file_name) 78 | file_handler.setLevel(min(level, standard_out.level)) 79 | file_handler.setFormatter(RecordFormatter()) 80 | my_logger.addHandler(file_handler) 81 | 82 | def push_uri(self, uri): 83 | """Pushes uri of text logger formatter. 84 | 85 | Args: 86 | uri (str, optional): URI to change to. Defaults to None. 87 | """ 88 | 89 | for handler in self.handlers: 90 | if isinstance(handler, logging.FileHandler): 91 | handler.formatter.current_uri.append(uri) 92 | 93 | def pop_uri(self): 94 | """Pops uri of text logger formatter. 95 | """ 96 | 97 | for handler in self.handlers: 98 | if isinstance(handler, logging.FileHandler): 99 | if len(handler.formatter.current_uri) > 1: 100 | handler.formatter.current_uri.pop() 101 | 102 | my_logger = logging.getLogger('rsv') 103 | my_logger.setLevel(logging.DEBUG) 104 | 105 | standard_out = logging.StreamHandler(sys.stdout) 106 | standard_out.setLevel(logging.INFO) 107 | my_logger.addHandler(standard_out) 108 | 109 | # Functions to set up externally 110 | def set_standard_out(new_level): 111 | standard_out.setLevel(new_level) 112 | 113 | record_capture = RecordHandler() 114 | my_logger.addHandler(record_capture) 115 | 116 | # Verbose printing functions 117 | def print_verbose_1(self, msg, *args, **kwargs): 118 | if self.isEnabledFor(Level.VERBOSE1): 119 | self._log(Level.VERBOSE1, msg, args, **kwargs) 120 | 121 | def print_verbose_2(self, msg, *args, **kwargs): 122 | if self.isEnabledFor(Level.VERBOSE2): 123 | self._log(Level.VERBOSE2, msg, args, **kwargs) 124 | 125 | logging.Logger.verbose1 = print_verbose_1 126 | logging.Logger.verbose2 = print_verbose_2 127 | logging.Logger.push_uri = push_uri 128 | logging.Logger.pop_uri = pop_uri 129 | 130 | -------------------------------------------------------------------------------- /tests/interoptests.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Notice: 3 | # Copyright 2017-2025 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Usecase-Checkers/LICENSE.md 5 | # 6 | # Unit tests for RedfishInteropValidator.py 7 | # 8 | 9 | from unittest import TestCase 10 | 11 | import redfish_interop_validator.interop as riv 12 | 13 | import logging 14 | 15 | logging.Logger.verbose1 = logging.Logger.debug 16 | logging.Logger.verbose2 = logging.Logger.debug 17 | 18 | 19 | class ValidatorTest(TestCase): 20 | 21 | # can we test writeable, find_prop, conditional 22 | # propReadRequirements? 23 | 24 | def test_no_test(self): 25 | self.assertTrue(True, 'Huh?') 26 | 27 | def test_requirement(self): 28 | entries = ['Mandatory', 'Recommended', 'Mandatory', 'Recommended'] 29 | vals = [{}, riv.REDFISH_ABSENT, riv.REDFISH_ABSENT, {}] 30 | boolist = [True, riv.testResultEnum.NA, False, True] 31 | for e, v, b in zip(entries, vals, boolist): 32 | _msg, result_value = riv.validateRequirement(e, v) 33 | self.assertTrue(result_value == b, str(e + ' ' + str(v))) 34 | 35 | def test_mincount(self): 36 | x = 'x' 37 | entries = [1, 2, 3, 4] 38 | vals = [[x, x, x], [x], [x, x, x, x], [x, x, x, x]] 39 | annotations = [3, 1, 4, 4] 40 | boolist = [True, False, True, True] 41 | for e, v, a, b in zip(entries, vals, annotations, boolist): 42 | self.assertTrue(riv.validateMinCount(v, e, a)[1] == b) 43 | 44 | def test_supportedvals(self): 45 | x, y, z = 'x', 'y', 'z' 46 | entries = [[x, y], [x], [x, y, z]] 47 | vals = [[x, y], [x, y], [x, y]] 48 | boolist = [True, True, False] 49 | for e, v, b in zip(entries, vals, boolist): 50 | self.assertTrue(riv.validateSupportedValues(e, v)[1] == b) 51 | 52 | def test_comparison_1(self): 53 | x, y, z = 'x', 'y', 'z' 54 | comp = ['AnyOf', 'AllOf', 'AllOf'] 55 | entries = [[x, y], [x], [x, y, z]] 56 | vals = [[x, y], [x, y], [x, y, y]] 57 | boolist = [True, True, False] 58 | for c, e, v, b in zip(comp, entries, vals, boolist): 59 | self.assertTrue(riv.checkComparison(v, c, e)[1] == b) 60 | 61 | def test_minversion(self): 62 | entries = ['1.0.1', '1.0.1', '1.2.0', '1.0.0', '1.0', '1.1'] 63 | vals = ['#ComputerSystem.1.0.1.ComputerSystem', '#ComputerSystem.v1_1_1.ComputerSystem', 64 | '#ComputerSystem.v1_1_1.ComputerSystem', '1.0.0', '1.0.0', '1.0.0'] 65 | boolist = [True, True, False, True, True, False] 66 | for e, v, b in zip(entries, vals, boolist): 67 | self.assertTrue(riv.validateMinVersion(v, e)[1] == b, "Failed on {} {} {}".format(e, v, b)) 68 | 69 | def test_action(self): 70 | interopdict = { 71 | "ResetType@Redfish.AllowableValues": ["On", "ForceOff"], 72 | "target": "/redfish/v1/Chassis/System.Embedded.1/Actions/Chassis.Reset"} 73 | vals = [interopdict, 74 | riv.REDFISH_ABSENT, riv.REDFISH_ABSENT, interopdict, {}] 75 | entries = [{ 76 | "ReadRequirement": "Mandatory", 77 | "Parameters": { 78 | "ResetType": { 79 | "AllowableValues": ["On", "ForceOff"], 80 | "ReadRequirement": "Mandatory" 81 | } 82 | } 83 | }, { 84 | "ReadRequirement": "Mandatory", 85 | }, { 86 | "ReadRequirement": "Recommended", 87 | }, { 88 | "ReadRequirement": "Recommended", 89 | "Parameters": { 90 | "ResetType": { 91 | "AllowableValues": ["ForceOff", "PowerCycle"], 92 | "ReadRequirement": "Mandatory" 93 | } 94 | } 95 | }, { 96 | "ReadRequirement": "Recommended", 97 | "Parameters": { 98 | "ResetType": { 99 | "AllowableValues": ["ForceOff", "PowerCycle"], 100 | "ReadRequirement": "Mandatory" 101 | } 102 | } 103 | }] 104 | boolist = [riv.testResultEnum.PASS, riv.testResultEnum.FAIL, riv.testResultEnum.PASS, riv.testResultEnum.PASS, riv.testResultEnum.PASS] 105 | for e, v, b in zip(entries, vals, boolist): 106 | self.assertTrue(riv.validateActionRequirement(e, (v, None), '#Chassis.Reset')[0][0].success == b,"Failed on {}".format((e, v, b))) 107 | -------------------------------------------------------------------------------- /redfish_interop_validator/session.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | from datetime import datetime, timedelta 6 | import requests 7 | import logging 8 | import sys 9 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 10 | from http.client import responses 11 | 12 | commonHeader = {'OData-Version': '4.0'} 13 | 14 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 15 | 16 | 17 | class rfSession: 18 | def __init__(self, user, password, server, logger=None, chkCert=True, proxies=None): 19 | self.user = user 20 | self.pwd = password 21 | self.server = server 22 | self.chkCert = chkCert 23 | self.proxies = {} if proxies is None else proxies 24 | self.key = None 25 | self.loc = None 26 | self.timeout = None 27 | self.tick = None 28 | self.started = False 29 | self.chkCert = False 30 | 31 | if logger is None: 32 | self.logger = logging.getLogger('rsv') 33 | self.logger.setLevel(logging.DEBUG) 34 | ch = logging.StreamHandler(sys.stdout) 35 | ch.setLevel(logging.INFO) 36 | self.logger.addHandler(ch) 37 | else: 38 | self.logger = logger 39 | 40 | def startSession(self): 41 | payload = { 42 | 'UserName': self.user, 43 | 'Password': self.pwd 44 | } 45 | sr = requests.get(self.server + '/redfish/v1/', verify=self.chkCert, headers=commonHeader, proxies=self.proxies) 46 | success = sr.status_code in [200, 204] and sr.json() is not None 47 | if not success: 48 | self.logger.error('Session Error: Could not retrieve service root to start Session') 49 | return False 50 | links = sr.json().get('Links') 51 | if links is not None: 52 | sessionsObj = links.get('Sessions') 53 | if sessionsObj is None: 54 | sessionsURI = '/redfish/v1/SessionService/Sessions' 55 | self.logger.info('using default URI {}'.format(sessionsURI)) 56 | else: 57 | sessionsURI = sessionsObj.get('@odata.id', '/redfish/v1/SessionService/Sessions') 58 | else: 59 | self.logger.error('Session Error: Could not retrieve service root link to start Session') 60 | return False 61 | 62 | response = requests.post(self.server + sessionsURI, json=payload, verify=self.chkCert, 63 | headers=commonHeader, proxies=self.proxies) 64 | statusCode = response.status_code 65 | ourSessionKey = response.headers.get('X-Auth-Token') 66 | ourSessionLocation = response.headers.get('Location', '/None') 67 | if ourSessionLocation.startswith('/'): 68 | ourSessionLocation = self.server + ourSessionLocation 69 | success = statusCode in range(200, 204) and ourSessionKey is not None 70 | 71 | self.key = ourSessionKey 72 | self.loc = ourSessionLocation 73 | self.timeout = timedelta(minutes=30) 74 | self.tick = datetime.now() 75 | self.started = success 76 | 77 | if success: 78 | self.logger.info('Session successfully created') 79 | else: 80 | if response.text is not None: 81 | self.logger.info('Response body from session creation:') 82 | self.logger.info('{}'.format(response.text)) 83 | self.logger.info('Headers: {}'.format(response.headers)) 84 | if statusCode in [400, 401]: 85 | self.logger.error('Session Error: Status code "{} {}". Check supplied username and password.' 86 | .format(statusCode, responses[statusCode])) 87 | raise ValueError('Bad Username or Password') 88 | else: 89 | self.logger.error('Session Error: Error creating session. Status code "{} {}".' 90 | .format(statusCode, responses[statusCode])) 91 | raise ValueError('Bad response from service') 92 | 93 | return success 94 | 95 | def isSessionOld(self): 96 | return datetime.now() - self.tick > self.timeout 97 | 98 | def getSessionKey(self): 99 | if not self.started: 100 | self.logger.error('Session Warning: This session is not started') 101 | return None 102 | if self.isSessionOld(): 103 | self.logger.warning('Session Warning: This session is old') 104 | self.tick = datetime.now() 105 | return self.key 106 | 107 | def killSession(self): 108 | if self.started and not self.isSessionOld(): 109 | headers = {'X-Auth-Token': self.getSessionKey()} 110 | headers.update(commonHeader) 111 | try: 112 | requests.delete(self.loc, verify=self.chkCert, headers=headers, proxies=self.proxies) 113 | except Exception as e: 114 | self.logger.warning('Session Warning: Error deleting current session: {}'.format(e)) 115 | self.started = False 116 | return True 117 | 118 | def getLogger(self): 119 | """ 120 | Grab logger for tools that might use this lib 121 | """ 122 | return self.logger 123 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | ## [2.3.3] - 2025-09-26 4 | - Fixed internal logging usage to avoid exceptions on newer versions of Python 5 | 6 | ## [2.3.2] - 2025-08-29 7 | - Cleaned up error and warning reporting 8 | - Added test results summary to the top of the results file 9 | 10 | ## [2.3.1] - 2025-05-23 11 | - Updated reporting of AllOf and AnyOf tests to not penalize resources for not contributing to passing criteria 12 | 13 | ## [2.3.0] - 2025-05-02 14 | - Fixed tool to allow for profiles that do not define a 'Resources' section 15 | - Fixed regular expression for variable URI segments to not require 'Id' at the end of the string 16 | 17 | ## [2.2.9] - 2025-03-07 18 | - Corrected the testing for 'IfPopulated' when used as a resource-level read requirement to not fail the test if not found 19 | 20 | ## [2.2.8] - 2025-02-21 21 | - Added support for the 'RequiredResourceProfile' term 22 | 23 | ## [2.2.7] - 2024-11-15 24 | - Added support for 'ChassisType', 'DriveProtocol', 'MemoryType', 'PortProtocol', and 'ProcessorType' values for 'UseCaseType' 25 | 26 | ## [2.2.6] - 2024-07-26 27 | - Fixed reporting of resource response times in the test report 28 | 29 | ## [2.2.5] - 2024-06-28 30 | - Corrected handling of 'collectionlimit' when specified in a config file 31 | 32 | ## [2.2.4] - 2024-06-14 33 | - Hide password and token parameters from console output when running the tool from a config file 34 | 35 | ## [2.2.3] - 2024-05-31 36 | - Added support for 'UseCaseType' term for 'AbsentResource' usage 37 | 38 | ## [2.2.2] - 2024-05-03 39 | - Added support for testing 'ActionInfo' requirements 40 | - Corrected comparison testing behavior when a property is not present to not produce false failures 41 | 42 | ## [2.2.1] - 2024-04-19 43 | - Fixed use case checking to only report an error if zero resources are found that match a mandatory use case 44 | 45 | ## [2.2.0] - 2024-03-22 46 | - Corrected 'WriteRequirement' checking to not produce errors when a property is marked as 'Recommended', but writes are not supported 47 | 48 | ## [2.1.9] - 2024-02-26 49 | - Added property WriteRequirement checks based on the usage of the 'Allow' response header and the '@Redfish.WriteableProperties' term 50 | 51 | ## [2.1.8] - 2024-02-09 52 | - Added 'collectionlimit' argument to limit the number of entries checked in a collection 53 | 54 | ## [2.1.7] - 2024-01-08 55 | - Fixed crash condition if a 'LinkToResource' comparison is performed and the link is not present 56 | - Changed results for 'Recommended' and 'IfImplemented' properties to show 'Not Tested' if the property is not present 57 | 58 | ## [2.1.6] - 2023-11-03 59 | - Corrected ConditionalRequirements to test Comparison and Values inside of matching ConditionalRequirements 60 | 61 | ## [2.1.5] - 2023-10-27 62 | - Refactored project to publish tool on PyPI 63 | 64 | ## [2.1.4] - 2023-07-20 65 | - Added support for testing 'ReplacedProperty' and 'ReplacedByProperty' statements 66 | - Added support for testing profiles with 'UseCases' statements 67 | 68 | ## [2.1.3] - 2023-04-27 69 | - Corrected behavior with the 'nooemcheck' flag to skip over any resources found in the 'Oem' object 70 | - Downgraded errors to warnings for resources not accessible during testing 71 | 72 | ## [2.1.2] - 2023-01-09 73 | - Corrected usage of AnyOf and AllOf checks to be across all resources 74 | 75 | ## [2.1.1] - 2022-10-14 76 | - Added resource-level requirement checking, including conditional requirements 77 | 78 | ## [2.1.0] - 2022-06-24 79 | - Fixed the resource GET result when running multiple profiles 80 | 81 | ## [2.0.9] - 2022-06-17 82 | - Fixed the conditional requirements on list properties 83 | 84 | ## [2.0.8] - 2022-05-22 85 | - Made correction to conditional checks for nested properties 86 | 87 | ## [2.0.7] - 2022-05-13 88 | - Added 'required_profiles_dir' argument to specify directory containing required profiles 89 | - Minor enhancements to debug output 90 | 91 | ## [2.0.6] - 2022-03-25 92 | - Added missing step to log out of the session when done testing 93 | - Added support for finding required profiles when the profile under test is in a different directory than the tool 94 | 95 | ## [2.0.5] - 2022-03-18 96 | - Corrected comparison checks with integer properties 97 | - Corrected test_action unit test 98 | - Updated logging calls to use non-deprecated methods 99 | 100 | ## [2.0.4] - 2022-03-04 101 | - Corrected URI checking to act as a filter for whether or not to apply the requirements 102 | 103 | ## [2.0.3] - 2022-01-31 104 | - Added support for JSON Pointer syntax in 'CompareProperty' 105 | 106 | ## [2.0.2] - 2022-01-10 107 | - Fixed version number comparisons for when a version segment reaches two digits 108 | 109 | ## [2.0.1] - 2021-09-17 110 | - Fixed console status reporting of whether or not errors were found 111 | 112 | ## [2.0.0] - 2021-08-30 113 | - Significant changes to the CLI arguments with the tool to reduce complexity for users 114 | - Removed need for scanning schema files for performing testing of a service 115 | 116 | ## [1.1.8] - 2021-06-18 117 | - Corrected conditional requirements to properly account for all values specified 118 | 119 | ## [1.1.7] - 2020-03-21 120 | - Resynched common validation code with the Service Validator 121 | 122 | ## [1.1.6] - 2020-03-13 123 | - Added support for `IfPopulated` expressions 124 | - Added support for `@Redfish.ActionInfo` on actions 125 | 126 | ## [1.1.5] - 2020-01-17 127 | - Added htmlLogScraper.py to generate a CSV style report 128 | 129 | ## [1.1.4] - 2019-07-19 130 | - Downgraded several messages not related to interop profile conformance to be informational 131 | - Fixes to handling of conditional requirements to not produce false errors 132 | 133 | ## [1.1.3] - 2019-06-21 134 | - Added support for new URIs requirement added to 1.1.0 of the profile specification 135 | - Made fixes to the handling of the `CompareProperty` term 136 | - Made fix to the handling of `IfImplemented` to not treat it as mandatory 137 | - Made fix to tracking of Service Root requirements 138 | - Made enhancements to debug log output 139 | 140 | ## [1.1.2] - 2019-05-31 141 | - Updated schema pack to 2019.1 142 | 143 | ## [1.1.1] - 2019-05-10 144 | - Made fixes to version comparison testing 145 | 146 | ## [1.1.0] - 2019-04-12 147 | - Added missing @odata.context initialization for Message Registries 148 | 149 | ## [1.0.9] - 2019-02-08 150 | - Updated schema pack to 2018.3 151 | - Fixed handling of the Redfish.Revisions term 152 | 153 | ## [1.0.8] - 2018-10-19 154 | - Fixed how single entry comparisons were performed 155 | 156 | ## [1.0.7] - 2018-09-21 157 | - Various bug fixes 158 | - Added tool versioning 159 | - Added profile names and hashes to test output 160 | 161 | ## [1.0.6] - 2018-09-07 162 | - More updates to leverage common code with the Redfish-Service-Validator tool 163 | 164 | ## [1.0.5] - 2018-08-17 165 | - Refactored project to leverage common service traversal code used in the Redfish-Service-Validator tool 166 | 167 | ## [1.0.4] - 2018-07-06 168 | - Added support for validating requirements described by profiles listed in "RequiredProfiles" 169 | 170 | ## [1.0.3] - 2018-04-13 171 | - Added prevention of invalid properties from being checked further 172 | 173 | ## [1.0.2] - 2018-03-16 174 | - Fixed usage of the Protocol property to allow for it to be missing in the profile 175 | - Added checking for invalid properties in payloads 176 | 177 | ## [1.0.1] - 2018-03-02 178 | - Change "comply" to "conform" in various output messages 179 | 180 | ## [1.0.0] - 2018-01-26 181 | - Initial release; conformant with version 1.0.0 of DSP0272 182 | -------------------------------------------------------------------------------- /redfish_interop_validator/profile.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Notice: 3 | # Copyright 2017-2025 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: 5 | # https://github.com/DMTF/Redfish-Service-Validator/LICENSE.md 6 | 7 | import os 8 | import re 9 | import glob 10 | import json 11 | import logging 12 | 13 | from urllib.request import urlopen 14 | from collections.abc import Mapping 15 | 16 | from redfish_interop_validator.helper import splitVersionString, versionpattern 17 | 18 | my_logger = logging.getLogger() 19 | my_logger.setLevel(logging.DEBUG) 20 | 21 | 22 | def hashProfile(profile): 23 | from hashlib import md5 24 | my_md5 = md5(json.dumps(profile, sort_keys=True).encode()) 25 | return my_md5.hexdigest() 26 | 27 | 28 | def checkProfileAgainstSchema(profile, schema): 29 | """ 30 | Checks if a profile is conformant 31 | """ 32 | # what is required in a profile? use the json schema 33 | import jsonschema 34 | try: 35 | jsonschema.validate(profile, schema) 36 | except jsonschema.ValidationError as e: 37 | my_logger.exception(e) 38 | my_logger.info('ValidationError') 39 | return False 40 | except jsonschema.SchemaError as e: 41 | my_logger.exception(e) 42 | my_logger.info('SchemaError') 43 | return False 44 | # consider @odata.type, with regex 45 | return True 46 | 47 | 48 | defaultrepository = 'http://redfish.dmtf.org/profiles' 49 | 50 | 51 | def getProfilesMatchingName(name, directories): 52 | pattern = r'\.{}\.'.format(versionpattern) 53 | filepattern = re.compile(pattern.join(name.split('.')) + "|{}".format(name.replace('.', '\.'))) 54 | for dirname in directories: 55 | for file in glob.glob(os.path.join(dirname, '*.json')): 56 | if filepattern.match(os.path.basename(file)): 57 | yield file 58 | 59 | def dict_merge(dct, merge_dct): 60 | """ 61 | https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 modified 62 | Recursive dict merge. Inspired by :meth:``dict.update()``, instead of 63 | updating only top-level keys, dict_merge recurses down into dicts nested 64 | to an arbitrary depth, updating keys. The ``merge_dct`` is merged into 65 | ``dct``. 66 | :param dct: dict onto which the merge is executed 67 | :param merge_dct: dct merged into dct 68 | :return: None 69 | """ 70 | for k in merge_dct: 71 | if (k in dct and isinstance(dct[k], dict) 72 | and isinstance(merge_dct[k], Mapping)): 73 | dict_merge(dct[k], merge_dct[k]) 74 | else: 75 | dct[k] = merge_dct[k] 76 | 77 | 78 | def updateWithProfile(profile, data): 79 | dict_merge(data, profile) 80 | return data 81 | 82 | 83 | def getProfileFromRepo(profilename, repo=None): 84 | try: 85 | if repo is None: 86 | repo = 'http://redfish.dmtf.org/profiles' 87 | 88 | urlpath = urlopen(repo) 89 | string = urlpath.read().decode('utf-8') 90 | 91 | pattern = r'\.{}\.'.format(versionpattern) 92 | filepattern = re.compile(pattern.join(profilename.split('.'))) 93 | 94 | filelist = filepattern.findall(string) 95 | 96 | profilename = None 97 | for filename in filelist: 98 | filename = filename[:-1] 99 | if profilename is None: 100 | profilename = filename 101 | continue 102 | profilename = max(profilename, filename) 103 | if profilename is None: 104 | return None 105 | 106 | remotefile = urlopen(repo + '/' + profilename) 107 | return remotefile.read() 108 | except Exception as e: 109 | print(e) 110 | return None 111 | 112 | # Presumes the cache does not need to handle multiple of the same profile with different versions 113 | profile_cache = {} 114 | 115 | def parseProfileInclude(target_name, target_profile_info, directories, online): 116 | # Grab data of profile from online or locally 117 | 118 | min_version = splitVersionString(target_profile_info.get('MinVersion', '1.0.0')) 119 | target_version = 'v{}_{}_{}'.format(*min_version) 120 | target_file = '{}.{}'.format(target_name, 'json') 121 | 122 | if target_file in profile_cache: 123 | return profile_cache[target_file] 124 | 125 | # get max filename 126 | repo = target_profile_info.get('Repository') 127 | if online: 128 | data = getProfileFromRepo(target_file, repo) 129 | else: 130 | data = None 131 | 132 | if data is None: 133 | target_list = list(getProfilesMatchingName(target_file, directories)) 134 | if len(target_list) > 0: 135 | max_version = (1,0,0) 136 | for target_name in target_list: 137 | with open(target_name) as f: 138 | my_profile = json.load(f) 139 | my_version = splitVersionString(my_profile.get('ProfileVersion', '1.0.0')) 140 | max_version = max(max_version, my_version) 141 | if my_version == max_version or data is None: 142 | data = my_profile 143 | if min_version > max_version: 144 | my_logger.warning('RequiredProfile Version Warning: File version smaller than target MinVersion') 145 | else: 146 | my_logger.error('RequiredProfile Import Error: Could not acquire this profile from local source or online {} {}'.format(target_name, repo)) 147 | data = None 148 | 149 | profile_cache[target_file] = data 150 | return data 151 | 152 | 153 | def getProfiles(profile, directories, chain=None, online=False): 154 | profile_includes, required_by_resource = [], [] 155 | 156 | # Prevent cyclical imports when possible 157 | profile_name = profile.get('ProfileName') 158 | if chain is None: 159 | chain = [] 160 | if profile_name in chain: 161 | my_logger.error('RequiredProfiles Import Error: Suspected duplicate/cyclical import error: {} {}'.format(chain, profile_name)) 162 | return [], [] 163 | chain.append(profile_name) 164 | 165 | # Gather all included profiles, these are each run independently in validateResource. 166 | # TODO: Process them simultaneously in validateResource, to avoid polling the target machine multiple times 167 | required_profiles = profile.get('RequiredProfiles', {}) 168 | for target_name, target_profile_info in required_profiles.items(): 169 | profile_data = parseProfileInclude(target_name, target_profile_info, directories, online) 170 | 171 | if profile_data: 172 | profile_includes.append(profile_data) 173 | 174 | inner_includes, inner_reqs = getProfiles(profile_data, directories, chain) 175 | profile_includes.extend(inner_includes) 176 | required_by_resource.extend(inner_reqs) 177 | 178 | # Process all RequiredResourceProfile by modifying profiles 179 | profile_resources = profile.get('Resources', {}) 180 | 181 | for resource_name, resource in profile_resources.items(): 182 | # Modify just the resource or its UseCases. Should not have concurrent UseCases and RequiredResourceProfile in Resource 183 | if 'UseCases' not in resource: 184 | modifying_objects = [resource] 185 | else: 186 | modifying_objects = resource['UseCases'] 187 | # Use same format as RequiredProfiles 188 | for inner_object in modifying_objects: 189 | required_profiles = inner_object.get('RequiredResourceProfile', {}) 190 | for target_name, target_profile_info in required_profiles.items(): 191 | profile_data = parseProfileInclude(target_name, target_profile_info, directories, online) 192 | 193 | if profile_data: 194 | target_resources = profile_data.get('Resources') 195 | # Merge if our data exists 196 | if resource_name in target_resources: 197 | dict_merge(inner_object, target_resources[resource_name]) 198 | required_by_resource.append(profile_data) 199 | else: 200 | my_logger.error('RequiredProfiles Import Error: Import {} does not have Resource {}'.format(target_name, resource_name)) 201 | 202 | return profile_includes, required_by_resource 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Copyright 2017-2025 DMTF. All rights reserved. 2 | 3 | # Redfish Interop Validator 4 | 5 | ## About 6 | 7 | The Redfish Interop Validator is a python3 tool that will validate a service based on an Interoperability profile given to the tool. The purpose of the tool is to guarantee that a specific service is compatible with vendor systems or system tools based on a vendor's specification in a profile. 8 | 9 | ## Introduction 10 | 11 | This tool is designed to accept a profile conformant to the schematics specified by the DMTF Redfish Profile schema, and run against any valid Redfish service for a given device. It is not biased to any specific hardware, only dependent on the current Redfish specification. 12 | 13 | ## Installation 14 | 15 | From PyPI: 16 | 17 | pip install redfish_interop_validator 18 | 19 | From GitHub: 20 | 21 | git clone https://github.com/DMTF/Redfish-Interop-Validator.git 22 | cd Redfish-Interop-Validator 23 | python setup.py sdist 24 | pip install dist/redfish_interop_validator-x.x.x.tar.gz 25 | 26 | ## Requirements 27 | 28 | External modules: 29 | 30 | * beautifulsoup4 - https://pypi.python.org/pypi/beautifulsoup4 31 | * requests - https://github.com/kennethreitz/requests (Documentation is available at http://docs.python-requests.org/) 32 | * lxml - https://pypi.python.org/pypi/lxml 33 | * jsonschema - https://pypi.org/project/jsonschema 34 | 35 | You may install the prerequisites by running: 36 | 37 | pip3 install -r requirements.txt 38 | 39 | If you have a previous beautifulsoup4 installation, use the following command: 40 | 41 | pip3 install beautifulsoup4 --upgrade 42 | 43 | There is no dependency based on Windows or Linux OS. 44 | The result logs are generated in HTML format and an appropriate browser, such as Chrome, Firefox, or Edge, is required to view the logs on the client system. 45 | 46 | ## Execution Steps 47 | 48 | The Redfish Interop Validator is designed to execute as a purely command line interface tool with no intermediate inputs expected during tool execution. Below are the step by step instructions on setting up the tool for execution on any identified Redfish device for conformance test: 49 | 50 | Modify the config\example.ini file to enter the system details under below section 51 | 52 | ### [Tool] 53 | 54 | | Variable | CLI Argument | Type | Definition | 55 | | :--- | :--- | :--- | :--- | 56 | | `verbose` | `-v` | integer | Verbosity of tool in stdout; 0 to 3, 3 being the greatest level of verbosity. | 57 | 58 | ### [Interop] 59 | 60 | Note: These arguments are only supplied via the CLI to the tool and are not specified in the configuration file. 61 | 62 | | CLI Argument | Type | Definition | 63 | | :--- | :--- | :--- | 64 | | `profile` | string | The name of the testing profile (mandatory). | 65 | | `--schema` | string | The filepath and name of the schema file to verify the format of the profile. | 66 | 67 | ### [Host] 68 | 69 | | Variable | CLI Argument | Type | Definition | 70 | | :--- | :--- | :--- | :--- | 71 | | `ip` | `-r` | string | The address of the Redfish service (with scheme); example: 'https://123.45.6.7:8000'. | 72 | | `username` | `-u` | string | The username for authentication. | 73 | | `password` | `-p` | string | The password for authentication. | 74 | | `description` | `--description` | string | The description of the system for identifying logs; if none is given, a value is produced from information in the service root. | 75 | | `forceauth` | `--forceauth` | boolean | Force authentication on unsecure connections; 'True' or 'False'. | 76 | | `authtype` | `--authtype` | string | Authorization type; 'None', 'Basic', 'Session', or 'Token'. | 77 | | `token` | `--token` | string | Token when 'authtype' is 'Token'. | 78 | 79 | ### [Validator] 80 | 81 | | Variable | CLI Argument | Type | Definition | 82 | | :--- | :--- | :--- | :--- | 83 | | `payload` | `--payload` | string | The mode to validate payloads ('Tree', 'Single', 'SingleFile', or 'TreeFile') followed by resource/filepath; see below. | 84 | | `logdir` | `--logdir` | string | The directory for generated report files; default: 'logs'. | 85 | | `oemcheck` | `--nooemcheck` | boolean | Whether to check OEM items on service; 'True' or 'False'. | 86 | | `online_profiles` | `--no_online_profiles` | boolean | Whether to download online profiles; 'True' or 'False'. | 87 | | `debugging` | `--debugging` | boolean | Output debug statements to text log, otherwise it only uses INFO; 'True' or 'False'. | 88 | | `required_profiles_dir` | `--required_profiles_dir` | string | Option to set the root folder of required profiles | 89 | | `collectionlimit` | `--collectionlimit` | string | Sets a limit to links gathered from collections by type (schema name).
Example 1: `ComputerSystem 20` limits ComputerSystemCollection to 20 links.
Example 2: `ComputerSystem 20 LogEntry 10` limits ComputerSystemCollection to 20 links and LogEntryCollection to 10 links. | 90 | 91 | ### Payload options 92 | 93 | The payload option takes two parameters as "option uri" 94 | 95 | (Single, SingleFile, Tree, TreeFile) 96 | How to test the payload URI given. Single tests will only give a report on a single resource, while Tree will report on every link from that resource 97 | 98 | ([Filename], [uri]) 99 | 100 | URI of the target payload, or filename of a local file. 101 | 102 | ### HTML Log 103 | 104 | To convert a previous HTML log into a csv file, use the following command: 105 | 106 | `python3 tohtml.py htmllogfile` 107 | 108 | ## Execution flow 109 | 110 | * 1. Redfish Interop Validator starts with the Service root Resource Schema by querying the service with the service root URI and getting all the device information, the resources supported and their links. Once the response of the Service root query is verified against a given profile (given the profile contains specifications for ServiceRoot), the tool traverses through all the collections and Navigation properties returned by the service. 111 | * 2. For each navigation property/Collection of resource returned, it does following operations: 112 | ** i. Reads all the Navigation/collection of resources. 113 | ** ii. Queries the service with the individual resource uri and validates all Resource returned by the service that are included in the profile specified to the tool. 114 | * 3. Step 2 repeats till all the URIs and resources are covered. 115 | 116 | Upon validation of a resource, the following types of tests may occur: 117 | 118 | * **Unlike** the Service Validator, the program will not necessarily list and warn problematic Resources, it will expect those problems to be found with the Service Validator and are ignored in the process here. 119 | * When a Resource is found, check if this resource exists in the Profile provided, otherwise ignore it and move on to the next available resources via its Links. 120 | * With the Resource initiated, begin to validate itself and the Properties that exist in the Profile given to the program with the following possible tests: 121 | * MinVersion - Test the @odata.type/version of the Resource which is being tested, which must be GREATER than the given MinVersion in the profile 122 | * MinCount - Test based on the @odata.count annotation, determine the size of the a given Collection or List, which must be GREATER than this given MinCount in the profile 123 | * ReadRequirement - Test the existence of a Property or Resource, depending on whether it is Recommended or Mandatory (others unimplemented) in the profile 124 | * Members - Test a Resource's "Members" property, which includes MinCount test 125 | * MinSupportedValues - Test the enumerations of a particular Property, based on the annotation @odata.SupportedValues and the given in the profile 126 | * Writeable/WriteRequirement - Test if the Property is ReadWrite capable, depending on if it is required in the profile 127 | * Comparison - Test between an Enum Property's value and values in the Profile, with a particular set of comparisons available: 128 | * AnyOf, AllOf = compare if any or all of the given values exist in a List or single Enum 129 | * GreaterThan, LessThan, Equal, ... = compare based on common comparisons Less, Greater or Equal 130 | * Absent, Present = compare if a property exist or does not 131 | * ConditionalRequirements - Perform some of the above tests above if one of the specified requirements are True: 132 | * Subordinate - Test if this Resource is a child/link of the type tree listed 133 | * Comparison - Test if a Comparison is True to a certain value 134 | * ActionRequirements - Perform tests based on what Actions require, such as ReadRequirement, AllowableValues 135 | * Check whether a Property is at first able to be nulled or is mandatory, and pass based on its Requirement or Nullability 136 | * For collections, validate each property inside of itself, and expects a list rather than a single Property, otherwise validate normally: 137 | 138 | ## Conformance Logs - Summary and Detailed Conformance Report 139 | 140 | The Redfish Interop Validator generates reports in the "logs" folder: a text version named "InteropLog_MM_DD_YYYY_HHMMSS.txt" and an html version named "InteropHtmlLog_MM_DD_YYYY_HHMMSS.html". The reports give the detailed view of the individual properties checked, with the Pass/Fail/Skip/Warning status for each resource checked for conformance. 141 | 142 | There is a verbose log file that may be referenced to diagnose tool problems when the stdout print out is insufficient, located in logs/ConformanceLog_MM_DD_YYYY_HHMMSS.html 143 | 144 | ## Release Process 145 | 146 | 1. Go to the "Actions" page 147 | 2. Select the "Release and Publish" workflow 148 | 3. Click "Run workflow" 149 | 4. Fill out the form 150 | 5. Click "Run workflow" 151 | -------------------------------------------------------------------------------- /redfish_interop_validator/RedfishInteropValidator.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Notice: 3 | # Copyright 2017-2025 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md 5 | 6 | import os 7 | import sys 8 | import argparse 9 | import logging 10 | import json 11 | import glob 12 | from datetime import datetime 13 | from urllib.parse import urlparse 14 | from collections import Counter 15 | 16 | import redfish_interop_validator.traverseInterop as traverseInterop 17 | from redfish_interop_validator.profile import getProfiles, checkProfileAgainstSchema, hashProfile 18 | from redfish_interop_validator.validateResource import validateSingleURI, validateURITree 19 | from redfish_interop_validator.interop import testResultEnum 20 | from redfish_interop_validator import logger 21 | 22 | tool_version = '2.3.3' 23 | 24 | def main(argslist=None, configfile=None): 25 | """Main command 26 | 27 | Args: 28 | argslist ([type], optional): List of arguments in the form of argv. Defaults to None. 29 | """ 30 | argget = argparse.ArgumentParser(description='DMTF tool to test a service against a interop profile, version {}'.format(tool_version)) 31 | 32 | # base tool 33 | argget.add_argument('-v', '--verbose', action='count', default=0, help='Verbosity of tool in stdout') 34 | argget.add_argument('-c', '--config', type=str, help='Configuration for this tool') 35 | 36 | # host info 37 | argget.add_argument('-i', '--ip', '--rhost', '-r', type=str, help='Address of host to test against, using http or https (example: https://123.45.6.7:8000)') 38 | argget.add_argument('-u', '--username', type=str, help='Username for Authentication') 39 | argget.add_argument('-p', '--password', type=str, help='Password for Authentication') 40 | argget.add_argument('--description', type=str, help='sysdescription for identifying logs, if none is given, draw from serviceroot') 41 | argget.add_argument('--forceauth', action='store_true', help='Force authentication on unsecure connections') 42 | argget.add_argument('--authtype', type=str, default='Basic', help='authorization type (None|Basic|Session|Token)') 43 | argget.add_argument('--token', type=str, help='bearer token for authtype Token') 44 | 45 | # validator options 46 | argget.add_argument('--payload', type=str, help='mode to validate payloads [Tree, Single, SingleFile, TreeFile] followed by resource/filepath', nargs=2) 47 | argget.add_argument('--logdir', type=str, default='./logs', help='directory for log files') 48 | argget.add_argument('--nooemcheck', action='store_false', dest='oemcheck', help='Don\'t check OEM items') 49 | argget.add_argument('--debugging', action="store_true", help='Output debug statements to text log, otherwise it only uses INFO') 50 | argget.add_argument('--required_profiles_dir', type=str, help='root directory for required profiles') 51 | argget.add_argument('--collectionlimit', type=str, default=['LogEntry', '20'], help='apply a limit to collections (format: RESOURCE1 COUNT1 RESOURCE2 COUNT2...)', nargs='+') 52 | 53 | # Config information unique to Interop Validator 54 | argget.add_argument('profile', type=str, default='sample.json', nargs='+', help='interop profile with which to validate service against') 55 | argget.add_argument('--schema', type=str, default=None, help='schema with which to validate interop profile against') 56 | argget.add_argument('--no_online_profiles', action='store_false', dest='online_profiles', help='Don\'t acquire profiles automatically from online') 57 | argget.add_argument('--warnrecommended', action='store_true', help='warn on recommended instead of pass') 58 | 59 | # todo: write patches 60 | argget.add_argument('--writecheck', action='store_true', help='(unimplemented) specify to allow WriteRequirement checks') 61 | 62 | args = argget.parse_args(argslist) 63 | 64 | if configfile is None: 65 | configfile = args.config 66 | 67 | start_tick = datetime.now() 68 | 69 | # Set logging file 70 | logger.set_standard_out(logger.Level.INFO - args.verbose if args.verbose < 3 else logger.Level.DEBUG) 71 | 72 | logpath = args.logdir 73 | 74 | if not os.path.isdir(logpath): 75 | os.makedirs(logpath) 76 | 77 | log_level = logger.Level.INFO if not args.debugging else logger.Level.DEBUG 78 | file_name = datetime.strftime(start_tick, os.path.join(logpath, "ConformanceLog_%m_%d_%Y_%H%M%S.txt")) 79 | 80 | logger.create_logging_file_handler(log_level, file_name) 81 | 82 | my_logger = logging.getLogger('rsv') 83 | my_logger.setLevel(logging.DEBUG) 84 | 85 | # Begin of log 86 | my_logger.info("Redfish Interop Validator, version {}".format(tool_version)) 87 | my_logger.info("") 88 | 89 | if args.ip is None and configfile is None: 90 | my_logger.error('Configuration Error: No IP or Config Specified') 91 | argget.print_help() 92 | return 1, None, 'Configuration Incomplete' 93 | 94 | if configfile: 95 | from redfish_interop_validator.config import convert_config_to_args 96 | convert_config_to_args(args, configfile) 97 | else: 98 | from redfish_interop_validator.config import convert_args_to_config 99 | my_logger.info('Writing config file to log directory') 100 | configfilename = datetime.strftime(start_tick, os.path.join(logpath, "ConfigFile_%m_%d_%Y_%H%M%S.ini")) 101 | my_config = convert_args_to_config(args) 102 | with open(configfilename, 'w') as f: 103 | my_config.write(f) 104 | 105 | # Check if our URL is consistent 106 | scheme, netloc, _, _, _, _ = urlparse(args.ip) 107 | if scheme not in ['http', 'https']: 108 | my_logger.error('Configuration Error: IP is missing http or https') 109 | return 1, None, 'IP Incomplete' 110 | 111 | if netloc == '': 112 | my_logger.error('Configuration Error: IP is missing ip/host') 113 | return 1, None, 'IP Incomplete' 114 | 115 | if len(args.collectionlimit) % 2 != 0: 116 | my_logger.error('Configuration Error: Collection Limit requires two arguments per entry (ResourceType Count)') 117 | return 1, None, 'Collection Limit Incomplete' 118 | 119 | # Start printing config details, remove redundant/private info from print 120 | my_logger.info('Target URI: ' + args.ip) 121 | my_logger.info('\n'.join( 122 | ['{}: {}'.format(x, vars(args)[x] if x not in ['password'] else '******') for x in sorted(list(vars(args).keys() - set(['description']))) if vars(args)[x] not in ['', None]])) 123 | my_logger.info('Start time: ' + start_tick.strftime('%x - %X')) 124 | my_logger.info("") 125 | 126 | # Start our service 127 | try: 128 | currentService = traverseInterop.startService(vars(args)) 129 | except Exception as ex: 130 | my_logger.debug('Exception caught while creating Service', exc_info=1) 131 | my_logger.error("Service Exception Error: Service could not be started: {}".format(ex)) 132 | return 1, None, 'Service Exception' 133 | 134 | # Create a description of our service if there is none given 135 | if args.description is None and currentService.service_root: 136 | my_version = currentService.service_root.get('RedfishVersion', 'No Version') 137 | my_name = currentService.service_root.get('Name', '') 138 | my_uuid = currentService.service_root.get('UUID', 'No UUID') 139 | setattr(args, 'description', 'My Target System {}, version {}, {}'.format(my_name, my_version, my_uuid)) 140 | 141 | my_logger.info('Description of service: {}\n'.format(args.description)) 142 | 143 | # Interop Profile handling 144 | my_profiles = [] 145 | my_paths = [] 146 | success = True 147 | for filename in args.profile: 148 | with open(filename) as f: 149 | my_profiles.append((filename, json.loads(f.read()))) 150 | my_paths.append(os.path.split(filename)[0]) 151 | if args.schema is not None: 152 | with open(args.schema) as f: 153 | schema = json.loads(f.read()) 154 | for name, profile in my_profiles: 155 | success = checkProfileAgainstSchema(profile, schema) 156 | if not success: 157 | my_logger.info("Profile {} did not conform to the given schema...".format(name)) 158 | return 1, None, 'Profile Did Not Conform' 159 | 160 | if args.required_profiles_dir is not None: 161 | my_paths += glob.glob("{}/**/".format(args.required_profiles_dir), recursive=True) 162 | 163 | my_logger.info('Profile Hashes (run-time): ') 164 | for file_name, profile in my_profiles: 165 | profile_name = profile.get('ProfileName') 166 | profile_version = profile.get('ProfileVersion') 167 | my_logger.info('profile: {} {} {}, dict md5 hash: {}'.format(file_name, profile_name, profile_version, hashProfile(profile))) 168 | 169 | # Start main 170 | status_code = 1 171 | jsonData = None 172 | 173 | # Set our mode for reading new payloads 174 | if args.payload: 175 | pmode, ppath = args.payload 176 | else: 177 | pmode, ppath = 'Default', '' 178 | pmode = pmode.lower() 179 | 180 | if pmode not in ['tree', 'single', 'singlefile', 'treefile', 'default']: 181 | pmode = 'Default' 182 | my_logger.warning('Configuration Warning: PayloadMode or path invalid, using Default behavior') 183 | if 'file' in pmode: 184 | if ppath is not None and os.path.isfile(ppath): 185 | with open(ppath) as f: 186 | jsonData = json.load(f) 187 | f.close() 188 | else: 189 | my_logger.error('File not found for payload: {}'.format(ppath)) 190 | return 1, None, 'File not found for payload: {}'.format(ppath) 191 | 192 | try: 193 | results = None 194 | processed_profiles = set() 195 | for file_name, profile in my_profiles: 196 | profile_name = profile.get('ProfileName') 197 | profile_version = profile.get('ProfileVersion') 198 | 199 | # Create a list of profiles, required imports, and show their hashes 200 | included_profiles, required_by_resource = getProfiles(profile, [os.getcwd()] + my_paths, online=args.online_profiles) 201 | 202 | all_profiles = [profile] + included_profiles 203 | 204 | my_logger.info('Profile Hashes (included by {}): '.format(file_name)) 205 | for inner_profile in included_profiles: 206 | inner_profile_name = profile.get('ProfileName') 207 | inner_profile_version = profile.get('ProfileVersion') 208 | my_logger.info('\t{} {}, dict md5 hash: {}'.format(inner_profile_name, inner_profile_version, hashProfile(inner_profile))) 209 | 210 | my_logger.info('Profile Hashes (required by Resource): '.format(file_name)) 211 | for inner_profile in required_by_resource: 212 | inner_profile_name = profile.get('ProfileName') 213 | inner_profile_version = profile.get('ProfileVersion') 214 | my_logger.info('\t{} {}, dict md5 hash: {}'.format(inner_profile_name, inner_profile_version, hashProfile(inner_profile))) 215 | 216 | for profile_to_process in all_profiles: 217 | processing_profile_name = profile_to_process.get('ProfileName') 218 | if processing_profile_name not in processed_profiles: 219 | processed_profiles.add(profile_name) 220 | else: 221 | my_logger.warning("Import Warning: Profile {} already processed".format({})) 222 | 223 | if 'single' in pmode: 224 | success, new_results, _, _ = validateSingleURI(ppath, profile_to_process, 'Target', expectedJson=jsonData) 225 | elif 'tree' in pmode: 226 | success, new_results, _, _ = validateURITree(ppath, profile_to_process, 'Target', expectedJson=jsonData) 227 | else: 228 | success, new_results, _, _ = validateURITree('/redfish/v1/', profile_to_process, 'ServiceRoot', expectedJson=jsonData) 229 | if results is None: 230 | results = new_results 231 | else: 232 | for item_name, item in new_results.items(): 233 | for x in item['messages']: 234 | x.name = profile_name + ' -- ' + x.name 235 | if item_name in results: 236 | results[item_name]['messages'].extend(item['messages']) 237 | else: 238 | results[item_name] = item 239 | # resultsNew = {profileName+key: resultsNew[key] for key in resultsNew if key in results} 240 | # results.update(resultsNew) 241 | except traverseInterop.AuthenticationError as e: 242 | # log authentication error and terminate program 243 | my_logger.error('{}'.format(e)) 244 | return 1, None, 'Failed to authenticate with the service' 245 | 246 | # Close the connection 247 | try: 248 | currentService.close() 249 | except Exception as e: 250 | my_logger.error('Service Closure Error: Failed to log out of service; session may still be active ({})'.format(e)) 251 | 252 | now_tick = datetime.now() 253 | my_logger.info('Elapsed time: {}'.format(str(now_tick - start_tick).rsplit('.', 1)[0])) 254 | 255 | final_counts = Counter() 256 | 257 | my_logger.info('\nListing any warnings and errors: ') 258 | 259 | for k, my_result in results.items(): 260 | 261 | for msg in my_result['messages']: 262 | if msg.result in [testResultEnum.PASS]: 263 | final_counts['pass'] += 1 264 | if msg.result in [testResultEnum.NOT_TESTED]: 265 | final_counts['not_tested'] += 1 266 | 267 | warns = [x for x in my_result['records'] if x.levelno == logger.Level.WARN] 268 | errors = [x for x in my_result['records'] if x.levelno == logger.Level.ERROR] 269 | if len(warns + errors): 270 | my_logger.info(" ") 271 | my_logger.info(my_result['uri']) 272 | 273 | if len(warns): 274 | my_logger.info("Warnings") 275 | for record in warns: 276 | final_counts[record.levelname.lower()] += 1 277 | my_logger.log(record.levelno, ", ".join([x for x in [record.msg, record.result] if x])) 278 | 279 | if len(errors): 280 | my_logger.info("Errors") 281 | for record in errors: 282 | final_counts[record.levelname.lower()] += 1 283 | my_logger.log(record.levelno, ", ".join([x for x in [record.msg, record.result] if x])) 284 | 285 | import redfish_interop_validator.tohtml as tohtml 286 | 287 | html_str = tohtml.renderHtml(results, tool_version, start_tick, now_tick, currentService) 288 | 289 | lastResultsPage = datetime.strftime(start_tick, os.path.join(logpath, "InteropHtmlLog_%m_%d_%Y_%H%M%S.html")) 290 | 291 | tohtml.writeHtml(html_str, lastResultsPage) 292 | 293 | my_logger.info("\nResults Summary:") 294 | my_logger.info(", ".join([ 295 | 'Pass: {}'.format(final_counts['pass']), 296 | 'Fail: {}'.format(final_counts['error']), 297 | 'Warning: {}'.format(final_counts['warning']), 298 | 'Not Tested: {}'.format(final_counts['nottested']), 299 | ])) 300 | 301 | success = final_counts['error'] == 0 302 | 303 | if not success: 304 | my_logger.error("Validation has failed: {} problems found".format(final_counts['error'])) 305 | else: 306 | my_logger.info("Validation has succeeded.") 307 | status_code = 0 308 | 309 | return status_code, lastResultsPage, 'Validation done' 310 | 311 | 312 | if __name__ == '__main__': 313 | status_code, lastResultsPage, exit_string = main() 314 | sys.exit(status_code) 315 | -------------------------------------------------------------------------------- /redfish_interop_validator/tohtml.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Notice: 3 | # Copyright 2017-2025 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md 5 | 6 | 7 | from types import SimpleNamespace 8 | from collections import Counter 9 | import json 10 | import re 11 | from redfish_interop_validator.helper import getType 12 | import redfish_interop_validator.RedfishLogo as logo 13 | from redfish_interop_validator.logger import LOG_ENTRY, Level 14 | from redfish_interop_validator.interop import testResultEnum 15 | 16 | if __name__ == '__main__': 17 | import argparse 18 | from bs4 import BeautifulSoup 19 | import os 20 | import csv 21 | 22 | 23 | # hack in tagnames into module namespace 24 | tag = SimpleNamespace(**{tagName: lambda string, attr=None, tag=tagName: wrapTag(string, tag=tag, attr=attr)\ 25 | for tagName in ['tr', 'td', 'th', 'div', 'b', 'table', 'body', 'head', 'summary']}) 26 | 27 | 28 | def wrapTag(string, tag='div', attr=None): 29 | string = str(string) 30 | ltag, rtag = '<{}>'.format(tag), ''.format(tag) 31 | if attr is not None: 32 | ltag = '<{} {}>'.format(tag, attr) 33 | return ltag + string + rtag 34 | 35 | 36 | def infoBlock(strings, split='
', ffunc=None, sort=True): 37 | if isinstance(strings, dict): 38 | infos = [tag.b('{}: '.format(y)) + str(x) for y,x in (sorted(strings.items()) if sort else strings.items())] 39 | else: 40 | infos = strings 41 | return split.join([ffunc(*x) for x in enumerate(infos)] if ffunc is not None else infos) 42 | 43 | 44 | def tableBlock(lines, titles, widths=None, ffunc=None): 45 | widths = widths if widths is not None else [100 for x in range(len(titles))] 46 | attrlist = ['style="width:{}%"'.format(str(x)) for x in widths] 47 | tableHeader = tag.tr(''.join([tag.th(x,y) for x,y in zip(titles,attrlist)])) 48 | for line in lines: 49 | tableHeader += tag.tr(''.join([ffunc(cnt, x) if ffunc is not None else tag.td(x) for cnt, x in enumerate(line)])) 50 | return tag.table(tableHeader) 51 | 52 | 53 | def applySuccessColor(num, entry): 54 | if num < 4: 55 | return wrapTag(entry, 'td') 56 | success_col = str(entry) 57 | if 'FAIL' in str(success_col).upper(): 58 | entry = '' + str(success_col) + '' 59 | elif str(success_col).upper() in ['WARN', 'DEPRECATED']: 60 | entry = '' + str(success_col) + '' 61 | elif str(success_col).upper() in ['OK', 'NO PASS', 'NOPASS']: 62 | entry = '' + str(success_col) + '' 63 | elif 'PASS' in str(success_col).upper(): 64 | entry = '' + str(success_col) + '' 65 | else: 66 | entry = '' + str(success_col) + '' 67 | return entry 68 | 69 | 70 | def applyInfoSuccessColor(num, entry): 71 | if any(x in entry for x in ['fail', 'exception', 'error', 'problem', 'err']): 72 | style = 'class="fail"' 73 | elif 'warn' in entry: 74 | style = 'class="warn"' 75 | else: 76 | style = None 77 | return tag.div(entry, attr=style) 78 | 79 | 80 | def renderHtml(results, tool_version, startTick, nowTick, service): 81 | # Render html 82 | config = service.config 83 | config_str = ', '.join(sorted(list(config.keys() - set(['systeminfo', 'targetip', 'password', 'description'])))) 84 | sysDescription, ConfigURI = (config['description'], config['ip']) 85 | logpath = config['logdir'] 86 | 87 | # wrap html 88 | htmlPage = '' 89 | htmlStrTop = 'Conformance Test Summary\ 90 | \ 114 | ' 115 | htmlStrBodyHeader = '' 116 | # Logo and logname 117 | infos = [wrapTag('##### Redfish Conformance Test Report #####', 'h2')] 118 | infos.append(wrapTag('DMTF Redfish Logo', 'h4')) 120 | infos.append('

' 121 | 'https://github.com/DMTF/Redfish-Interop-Validator

') 122 | infos.append('Tool Version: {}'.format(tool_version)) 123 | infos.append(startTick.strftime('%c')) 124 | infos.append('(Run time: {})'.format( 125 | str(nowTick-startTick).rsplit('.', 1)[0])) 126 | infos.append('

This tool is provided and maintained by the DMTF. ' 127 | 'For feedback, please open issues
in the tool\'s Github repository: ' 128 | '' 129 | 'https://github.com/DMTF/Redfish-Interop-Validator/issues

') 130 | 131 | htmlStrBodyHeader += tag.tr(tag.th(infoBlock(infos))) 132 | 133 | htmlStrBodyHeader += tag.tr(tag.th('Test Summary', 'class="bluebg titlerow"')) 134 | infos = {'System': ConfigURI, 'Description': sysDescription} 135 | infos['Target'] = ", ".join(service.config['payload']) if service.config['payload'] else 'Complete System Test' 136 | htmlStrBodyHeader += tag.tr(tag.th(infoBlock(infos))) 137 | 138 | infos = {'Profile': config['profile'], 'Schema': config['schema']} 139 | htmlStrBodyHeader += tag.tr(tag.th(infoBlock(infos))) 140 | 141 | summary = Counter() 142 | 143 | for k, my_result in results.items(): 144 | for msg in my_result['messages']: 145 | if msg.result in [testResultEnum.PASS]: 146 | summary['pass'] += 1 147 | if msg.result in [testResultEnum.NOT_TESTED]: 148 | summary['not_tested'] += 1 149 | for record in my_result['records']: 150 | if record.levelname.lower() in ['error', 'warning']: 151 | summary[record.levelname.lower()] += 1 152 | if record.result: 153 | summary[record.result] += 1 154 | 155 | important_block = tag.div('Results Summary') 156 | important_block += tag.div(", ".join([ 157 | 'Pass: {}'.format(summary['pass']), 158 | 'Fail: {}'.format(summary['error']), 159 | 'Warning: {}'.format(summary['warning']), 160 | 'Not Tested: {}'.format(summary['not_tested']), 161 | ])) 162 | htmlStrBodyHeader += tag.tr(tag.td(important_block, 'class="center"')) 163 | 164 | infos = {x: config[x] for x in config if x not in ['systeminfo', 'ip', 'password', 'description']} 165 | infos_left, infos_right = dict(), dict() 166 | for key in sorted(infos.keys()): 167 | if len(infos_left) <= len(infos_right): 168 | infos_left[key] = infos[key] 169 | else: 170 | infos_right[key] = infos[key] 171 | 172 | htmlButtons = '
Expand All
' 173 | htmlButtons += '
Collapse All
' 174 | htmlButtons += tag.div('Show Configuration', attr='class="button pass" onClick="document.getElementById(\'resNumConfig\').classList.toggle(\'resultsShow\');"') 175 | 176 | htmlStrBodyHeader += tag.tr(tag.th('Full Test Report', 'class="titlerow bluebg"')) 177 | htmlStrBodyHeader += tag.tr(tag.th(htmlButtons)) 178 | 179 | block = tag.td(tag.div(infoBlock(infos_left), 'class=\'column log\'') \ 180 | + tag.div(infoBlock(infos_right), 'class=\'column log\''), 181 | 'id=\'resNumConfig\' class=\'results\'') 182 | 183 | htmlStrBodyHeader += tag.tr(block) 184 | 185 | for cnt, item in enumerate(results): 186 | entry = [] 187 | my_result = results[item] 188 | response_time = my_result.get('rtime') 189 | rtime = '(response time: {})'.format(my_result['rtime']) 190 | rcode = my_result['rcode'] 191 | if rcode == -1 or my_result['rtime'] == 0: 192 | rtime = '' 193 | 194 | if len(my_result['messages']) == 0 and len(my_result['records']) == 0: 195 | continue 196 | 197 | # uri block 198 | prop_type, type_name = my_result['fulltype'], '' 199 | if prop_type is not None: 200 | type_name = getType(prop_type) 201 | 202 | infos_a = [str(my_result.get(x)) for x in ['uri', 'samplemapped'] if my_result.get(x) not in ['', None]] 203 | if rtime != '': 204 | infos_a.append(rtime) 205 | if type_name: 206 | infos_a.append(type_name) 207 | uriTag = tag.tr(tag.th(infoBlock(infos_a, ' '), 'class="titlerow bluebg"')) 208 | entry.append(uriTag) 209 | 210 | # info block 211 | infos_b = [str(my_result.get(x)) for x in ['uri'] if my_result.get(x) not in ['',None]] 212 | infos_b.append(rtime) 213 | infos_b.append(tag.div('Show Results', attr='class="button warn" onClick="document.getElementById(\'resNum{}\').classList.toggle(\'resultsShow\');"'.format(cnt))) 214 | buttonTag = tag.td(infoBlock(infos_b), 'class="title" style="width:30%"') 215 | 216 | infos_content = [str(my_result.get(x)) for x in ['context', 'origin', 'fulltype']] 217 | infos_c = {y: x for x,y in zip(infos_content, ['Context', 'File Origin', 'Resource Type'])} 218 | infosTag = tag.td(infoBlock(infos_c), 'class="titlesub log" style="width:40%"') 219 | 220 | success = my_result['success'] 221 | if success: 222 | if rcode != -1: 223 | getTag = tag.td('GET Success HTTP Code ({})'.format(rcode), 'class="pass"') 224 | else: 225 | getTag = tag.td('GET Success', 'class="pass"') 226 | else: 227 | getTag = tag.td('GET Inaccessible HTTP Code ({})'.format(rcode), 'class="warn"') 228 | 229 | if rcode == None: 230 | getTag = tag.td('-', 'class="pass"') 231 | 232 | my_summary = Counter() 233 | 234 | for msg in my_result['messages']: 235 | if msg.result in [testResultEnum.PASS]: 236 | my_summary['pass'] += 1 237 | if msg.result in [testResultEnum.NOT_TESTED]: 238 | my_summary['not_tested'] += 1 239 | 240 | for record in my_result['records']: 241 | if record.levelname.lower() in ['error', 'warning']: 242 | my_summary[record.levelname.lower()] += 1 243 | if record.result: 244 | my_summary[record.result] += 1 245 | 246 | countsTag = tag.td(infoBlock(my_summary, split='', ffunc=applyInfoSuccessColor), 'class="log"') 247 | 248 | rhead = ''.join([buttonTag, infosTag, getTag, countsTag]) 249 | for x in [('tr',), ('table', 'class=titletable'), ('td', 'class=titlerow'), ('tr')]: 250 | rhead = wrapTag(''.join(rhead), *x) 251 | entry.append(rhead) 252 | 253 | # actual table 254 | rows = [(str(i.name), 255 | str(i.entry), str(i.expected), str(i.actual), str(i.result.value)) for i in my_result['messages']] 256 | titles = ['Name', 'Value', 'Expected', 'Actual', 'Result'] 257 | widths = ['15','30','30','10','15'] 258 | tableHeader = tableBlock(rows, titles, widths, ffunc=applySuccessColor) 259 | 260 | # lets wrap table and errors and warns into one single column table 261 | tableHeader = tag.tr(tag.td((tableHeader))) 262 | 263 | infos_a = [str(my_result.get(x)) for x in ['uri'] if my_result.get(x) not in ['',None]] 264 | infos_a.append(rtime) 265 | 266 | errors = [x for x in my_result['records'] if x.levelno == Level.ERROR] 267 | warns = [x for x in my_result['records'] if x.levelno == Level.WARN] 268 | 269 | # warns and errors 270 | errors = ['No errors'] if len(errors) == 0 else [x.msg for x in errors] 271 | errorTags = tag.tr(tag.td(infoBlock(errors), 'class="fail log"')) 272 | 273 | warns = ['No warns'] if len(warns) == 0 else [x.msg for x in warns] 274 | warnTags = tag.tr(tag.td(infoBlock(warns), 'class="warn log"')) 275 | 276 | tableHeader += errorTags 277 | tableHeader += warnTags 278 | tableHeader = tag.table(tableHeader) 279 | tableHeader = tag.td(tableHeader, 'class="results" id=\'resNum{}\''.format(cnt)) 280 | 281 | entry.append(tableHeader) 282 | 283 | # append 284 | htmlPage += ''.join([tag.tr(x) for x in entry]) 285 | 286 | return wrapTag(wrapTag(htmlStrTop + wrapTag(htmlStrBodyHeader + htmlPage, 'table'), 'body'), 'html') 287 | 288 | 289 | def writeHtml(string, path): 290 | with open(path, 'w', encoding='utf-8') as f: 291 | f.write(string) 292 | 293 | 294 | def htmlLogScraper(htmlReport): 295 | outputLogName = os.path.split(htmlReport)[-1] 296 | output = open('./logs/{}.csv'.format(outputLogName),'w',newline='') 297 | csv_output = csv.writer(output) 298 | csv_output.writerow(['URI','Status','Response Time','Context','File Origin','Resource Type','Property Name','Value','Expected','Actual','Result']) 299 | htmlLog = open(htmlReport,'r') 300 | soup = BeautifulSoup(htmlLog, 'html.parser') 301 | glanceDetails = {} 302 | idList = [] 303 | table = soup.find_all('table', {'class':'titletable'}) 304 | for tbl in table: 305 | tr = tbl.find('tr') 306 | URIresp = tr.find('td',{'class':'title'}) # URI, response time, show results button 307 | URI = URIresp.text.partition('(')[0] 308 | responseTime = URIresp.text.partition('response time')[2].split(')')[0].strip(':s') 309 | StatusGET = tr.find('td',{'class':'pass'}) or tr.find('td',{'class':'fail'}) 310 | if 'Success' in StatusGET.text: 311 | Status = '200' 312 | else: 313 | Status = '400' 314 | 315 | context,FileOrigin,ResourceType = ' ',' ',' ' 316 | if 'Context:' in tr.find_all('td')[1].text: 317 | context = tr.find_all('td')[1].text.split('Context:')[1].split('File')[0] 318 | if 'File Origin'in tr.find_all('td')[1].text: 319 | FileOrigin = tr.find_all('td')[1].text.split('File Origin:')[1].split('Resource')[0] 320 | if 'Resource Type'in tr.find_all('td')[1].text: 321 | ResourceType = tr.find_all('td')[1].text.split('Resource Type:')[1] 322 | resNumHtml = str(tr.find('div', {'class':'button warn'})) 323 | resNum = resNumHtml.split('.')[1].split('getElementById')[1].strip("()'") 324 | idList.append(resNum) 325 | results = URI+'*'+Status+'*'+responseTime+'*'+context+'*'+FileOrigin+'*'+ResourceType+'*' #using * for csv splitting since some values have commas 326 | glanceDetails[results] = resNum # mapping of results to their respective tables 327 | 328 | properties = soup.findAll('td',{'class':'results'}) 329 | data = [] 330 | for table in properties: 331 | tableToStr = str(table) 332 | tableID = tableToStr.split('id=')[1].split('>')[0].strip('"') 333 | if len(table.find_all('table')) == 0: 334 | continue 335 | tableBody = table.find_all('table')[-1] 336 | tableRows = tableBody.find_all('tr')[1:] #get rows from property tables excluding header 337 | for tr in tableRows: 338 | td = tr.find_all('td') 339 | row = [i.text for i in td] 340 | for k,v in glanceDetails.items(): 341 | if v == tableID: 342 | data.append(k+'*'.join(row)) 343 | csv_output.writerows([x.split('*') for x in data]) #using * for csv splitting since some values have commas 344 | output.close() 345 | 346 | 347 | if __name__ == '__main__': 348 | parser = argparse.ArgumentParser(description='Get an excel sheet of details shown in the HTML reports for the Redfish Interoperability Validator') 349 | parser.add_argument('htmllog' ,type=str, help = 'Path of the HTML log to be converted to csv format' ) 350 | args = parser.parse_args() 351 | 352 | htmlLogScraper(args.htmllog) 353 | 354 | -------------------------------------------------------------------------------- /redfish_interop_validator/validateResource.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | import logging 6 | import re 7 | from io import StringIO 8 | 9 | import redfish_interop_validator.traverseInterop as traverseInterop 10 | import redfish_interop_validator.interop as interop 11 | from redfish_interop_validator.helper import getType 12 | from redfish_interop_validator.interop import REDFISH_ABSENT 13 | from redfish_interop_validator.logger import record_capture, Level 14 | 15 | my_logger = logging.getLogger('rsv') 16 | my_logger.setLevel(logging.DEBUG) 17 | 18 | def validateSingleURI(URI, profile, uriName='', expectedType=None, expectedSchema=None, expectedJson=None, parent=None): 19 | """ 20 | Validates a single URI that is given, returning its ResourceObject, counts and links 21 | """ 22 | # rs-assertion: 9.4.1 23 | # Initial startup here 24 | results, messages = {}, [] 25 | 26 | record_capture.flush() 27 | 28 | results[uriName] = {'uri': URI, 29 | 'success': False, 30 | 'messages': messages, 31 | 'records': [], 32 | 'rtime': 'n/a', 33 | 'context': '', 34 | 'fulltype': '', 35 | 'rcode': 0, 36 | 'payload': {}} 37 | 38 | # check for @odata mandatory stuff 39 | # check for version numbering problems 40 | # check id if it's the same as URI 41 | # check @odata.context instead of local. Realize that @odata is NOT a "property" 42 | 43 | # Attempt to get a list of properties 44 | if URI is None: 45 | if parent is not None: 46 | parentURI = parent.uri 47 | else: 48 | parentURI = 'MissingParent' 49 | URI = parentURI + '/Missing URI Link' 50 | my_logger.warning('Missing URI Warning: Tool appears to be missing vital URI information, replacing URI w/: {}'.format(URI)) 51 | # Generate dictionary of property info 52 | try: 53 | resource_obj, return_status = traverseInterop.createResourceObject( 54 | uriName, URI, expectedJson, expectedType, expectedSchema, parent) 55 | 56 | results[uriName]['rcode'] = return_status 57 | 58 | if not resource_obj: 59 | my_logger.warning('Bad Resource Error: This {} is inaccessible and cannot be validated or traversed for links.'.format(URI)) 60 | results[uriName]['records'] = record_capture.flush() 61 | results[uriName]['payload'] = {} 62 | return False, results, None, None 63 | else: 64 | results[uriName]['payload'] = resource_obj.jsondata 65 | 66 | except traverseInterop.AuthenticationError: 67 | raise # re-raise exception 68 | except Exception as e: 69 | my_logger.debug('Exception caught while creating ResourceObj', exc_info=1) 70 | my_logger.error('Exception Resource Error: Unable to gather property info for URI {}: {}'.format(URI, repr(e))) 71 | results[uriName]['records'] = record_capture.flush() 72 | return False, results, None, None 73 | 74 | results[uriName]['success'] = True 75 | 76 | # Verify odata type 77 | profile_resources = profile.get('Resources') 78 | 79 | my_logger.verbose1("*** {}, {}".format(uriName, URI)) 80 | uriName, SchemaFullType, jsondata = uriName, uriName, resource_obj.jsondata 81 | SchemaType = getType(jsondata.get('@odata.type', 'NoType')) 82 | 83 | oemcheck = traverseInterop.config.get('oemcheck', True) 84 | 85 | collection_limit = traverseInterop.config.get('collectionlimit', {'LogEntry': 20}) 86 | 87 | if SchemaType not in profile_resources: 88 | my_logger.verbose1('Visited {}, type {}'.format(URI, SchemaType)) 89 | # Get all links available 90 | links, limited_links = getURIsInProperty(jsondata, uriName, oemcheck, collection_limit) 91 | return True, results, (links, limited_links), resource_obj 92 | 93 | if '_count' not in profile_resources[SchemaType]: 94 | profile_resources[SchemaType]['_count'] = 0 95 | profile_resources[SchemaType]['_count'] += 1 96 | 97 | # Verify odata_id properly resolves to its parent if holding fragment 98 | odata_id = resource_obj.jsondata.get('@odata.id', '') 99 | if '#' in odata_id: 100 | if parent is not None: 101 | payload_resolve = traverseInterop.navigateJsonFragment(parent.jsondata, URI) 102 | if payload_resolve is None: 103 | my_logger.error('Referenceable Member Error: @odata.id of ReferenceableMember does not contain a valid JSON pointer for this payload: {}'.format(odata_id)) 104 | elif payload_resolve != resource_obj.jsondata: 105 | my_logger.error('Referenceable Member Error: @odata.id of ReferenceableMember does not point to the correct object: {}'.format(odata_id)) 106 | else: 107 | my_logger.warning('No parent found with which to test @odata.id of ReferenceableMember') 108 | 109 | # If URI was sampled, get the notation text from traverseInterop.uri_sample_map 110 | sample_string = traverseInterop.uri_sample_map.get(URI) 111 | sample_string = sample_string + ', ' if sample_string is not None else '' 112 | 113 | results[uriName]['uri'] = (str(URI)) 114 | results[uriName]['samplemapped'] = (str(sample_string)) 115 | results[uriName]['rtime'] = resource_obj.rtime 116 | results[uriName]['payload'] = resource_obj.jsondata 117 | results[uriName]['context'] = resource_obj.context 118 | results[uriName]['fulltype'] = resource_obj.typename 119 | 120 | my_logger.info('\n') 121 | my_logger.info("*** %s, %s", URI, SchemaType) 122 | my_logger.debug("*** %s, %s, %s", expectedType, expectedSchema is not None, expectedJson is not None) 123 | my_logger.info("\t Type (%s), GET SUCCESS (time: %s)", resource_obj.typename, resource_obj.rtime) 124 | 125 | profile_resources = profile_resources.get(SchemaType) 126 | try: 127 | propMessages = interop.validateInteropResource(resource_obj, profile_resources, jsondata) 128 | messages.extend(propMessages) 129 | except Exception: 130 | my_logger.exception("Something went wrong") 131 | my_logger.error('Exception Error: Could not finish validation check on this payload') 132 | my_logger.info('%s \n', SchemaFullType) 133 | 134 | # Get all links available 135 | links, limited_links = getURIsInProperty(resource_obj.jsondata, uriName, oemcheck, collection_limit) 136 | 137 | results[uriName]['records'] = record_capture.flush() 138 | 139 | my_errors = [x for x in results[uriName]['records'] if x.levelno == Level.ERROR] 140 | 141 | pass_val = len(my_errors) == 0 142 | my_logger.info("\t {}".format('PASS' if pass_val else ' FAIL...')) 143 | 144 | return True, results, (links, limited_links), resource_obj 145 | 146 | 147 | urlCheck = re.compile(r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") 148 | allowable_annotations = ['@odata.id'] 149 | 150 | def getURIsInProperty(property, name='Root', oemcheck=True, collection_limit={}): 151 | my_links, limited_links = {}, {} 152 | 153 | # Return nothing if we are Oem 154 | if not oemcheck and name == 'Oem': 155 | return my_links, limited_links 156 | if isinstance(property, dict): 157 | for sub_name, value in property.items(): 158 | if '@' in sub_name and sub_name.lower() not in allowable_annotations: 159 | continue 160 | if isinstance(value, str) and sub_name.lower() in ['@odata.id']: 161 | my_link = getURIfromOdata(value) 162 | if my_link: 163 | if '/Oem/' not in my_link: 164 | my_links[name] = my_link 165 | if '/Oem/' in my_link and oemcheck: 166 | my_links[name] = my_link 167 | else: 168 | new_links, new_limited_links = getURIsInProperty(value, "{}:{}".format(name, sub_name), oemcheck) 169 | limited_links.update(new_limited_links) 170 | parent_type = property.get('@odata.type', '') 171 | if sub_name == 'Members' and 'Collection' in parent_type: 172 | my_type = getType(parent_type).split('Collection')[0] 173 | if my_type in collection_limit: 174 | new_limited_links = {x: new_links[x] for x in list(new_links.keys())[collection_limit[my_type]:]} 175 | new_links = {x: new_links[x] for x in list(new_links.keys())[:collection_limit[my_type]]} 176 | limited_links.update(new_limited_links) 177 | my_links.update(new_links) 178 | if isinstance(property, list): 179 | for n, x in enumerate(property): 180 | new_links, new_limited_links = getURIsInProperty(x, "{}#{}".format(name, n), oemcheck) 181 | limited_links.update(new_limited_links) 182 | my_links.update(new_links) 183 | return my_links, limited_links 184 | 185 | 186 | def getURIfromOdata(property): 187 | if '.json' not in property[:-5].lower(): 188 | if '/redfish/v1' in property or urlCheck.match(property): 189 | return property 190 | return None 191 | 192 | 193 | def validateURITree(URI, profile, uriName, expectedType=None, expectedSchema=None, expectedJson=None): 194 | """name 195 | Validates a Tree of URIs, traversing from the first given 196 | """ 197 | allLinks = set() 198 | allLinks.add(URI.rstrip('/')) 199 | refLinks = list() 200 | 201 | # Resource level validation 202 | message_list = [] 203 | resource_stats = {} 204 | 205 | # Validate top URI 206 | validateSuccess, results, links, resource_obj = \ 207 | validateSingleURI(URI, profile, uriName, expectedType, expectedSchema, expectedJson) 208 | 209 | links, limited_links = links if links else ({}, {}) 210 | for skipped_link in limited_links: 211 | allLinks.add(limited_links[skipped_link]) 212 | 213 | if resource_obj: 214 | SchemaType = getType(resource_obj.jsondata.get('@odata.type', 'NoType')) 215 | resource_stats[SchemaType] = { 216 | "Exists": True, 217 | "Writeable": False, 218 | "URIsFound": [URI.rstrip('/')], 219 | "SubordinateTo": set(), 220 | "UseCasesFound": set() 221 | } 222 | 223 | # parent first, then child execution 224 | # do top level root first, then do each child root, then their children... 225 | # hold refs for last (less recursion) 226 | if validateSuccess: 227 | serviceVersion = profile.get("Protocol") 228 | if serviceVersion is not None and uriName == 'ServiceRoot': 229 | serviceVersion = serviceVersion.get('MinVersion', '1.0.0') 230 | msg, m_success = interop.validateMinVersion(resource_obj.jsondata.get("RedfishVersion", "0"), serviceVersion) 231 | message_list.append(msg) 232 | 233 | currentLinks = [(link, links[link], resource_obj) for link in links] 234 | # todo : churning a lot of links, causing possible slowdown even with set checks 235 | while len(currentLinks) > 0: 236 | newLinks = list() 237 | for linkName, link, parent in currentLinks: 238 | 239 | if link is None or link.rstrip('/') in allLinks: 240 | continue 241 | 242 | if '#' in link: 243 | # NOTE: Skips referenced Links (using pound signs), this program currently only works with direct links 244 | continue 245 | 246 | if refLinks is not currentLinks and ('Links' in linkName.split('.') or 'RelatedItem' in linkName.split('.') or 'Redundancy' in linkName.split('.')): 247 | refLinks.append((linkName, link, parent)) 248 | continue 249 | 250 | # NOTE: unable to determine autoexpanded resources without Schema 251 | else: 252 | linkSuccess, linkResults, inner_links, linkobj = \ 253 | validateSingleURI(link, profile, linkName, parent=parent) 254 | 255 | allLinks.add(link.rstrip('/')) 256 | 257 | results.update(linkResults) 258 | 259 | if not linkSuccess: 260 | continue 261 | 262 | inner_links, inner_limited_links = inner_links 263 | 264 | for skipped_link in inner_limited_links: 265 | allLinks.add(inner_limited_links[skipped_link]) 266 | 267 | innerLinksTuple = [(link, inner_links[link], linkobj) for link in inner_links] 268 | newLinks.extend(innerLinksTuple) 269 | SchemaType = getType(linkobj.jsondata.get('@odata.type', 'NoType')) 270 | 271 | subordinate_tree = [] 272 | 273 | current_parent = linkobj.parent 274 | while current_parent: 275 | parentType = getType(current_parent.jsondata.get('@odata.type', 'NoType')) 276 | subordinate_tree.append(parentType) 277 | current_parent = current_parent.parent 278 | 279 | # Search for UseCase.USECASENAME 280 | usecases_found = [msg.name.split('.')[-1] for msg in linkResults[linkName]['messages'] if 'UseCase' == msg.name.split('.')[0]] 281 | 282 | if resource_stats.get(SchemaType) is None: 283 | resource_stats[SchemaType] = { 284 | "Exists": True, 285 | "Writeable": False, 286 | "URIsFound": [link.rstrip('/')], 287 | "SubordinateTo": set([tuple(reversed(subordinate_tree))]), 288 | "UseCasesFound": set(usecases_found), 289 | } 290 | else: 291 | resource_stats[SchemaType]['Exists'] = True 292 | resource_stats[SchemaType]['URIsFound'].append(link.rstrip('/')) 293 | resource_stats[SchemaType]['SubordinateTo'].add(tuple(reversed(subordinate_tree))) 294 | resource_stats[SchemaType]['UseCasesFound'] = resource_stats[SchemaType]['UseCasesFound'].union(usecases_found) 295 | 296 | if refLinks is not currentLinks and len(newLinks) == 0 and len(refLinks) > 0: 297 | currentLinks = refLinks 298 | else: 299 | currentLinks = newLinks 300 | 301 | my_logger.info('Service Level Checks') 302 | # NOTE: readrequirements will likely be errors when using --payload outside of root 303 | 304 | # For every resource check ReadRequirement 305 | # TODO: verify if IfImplemented should report a fail if any fails exist. Also verify the same for Recommended 306 | resources_in_profile = profile.get('Resources', []) 307 | for resource_type in resources_in_profile: 308 | profile_entry = resources_in_profile[resource_type] 309 | 310 | if 'PropertyRequirements' in profile_entry: 311 | msgs = interop.validateComparisonAnyOfAllOf(profile_entry['PropertyRequirements'], resource_type) 312 | message_list.extend(msgs) 313 | 314 | does_resource_exist, expected_requirement = False, None 315 | 316 | resource_exists, uris_found, subs_found = False, [], [] 317 | 318 | # If exist and for what URIs... 319 | if resource_type in resource_stats: 320 | resource_exists = resource_stats[resource_type]['Exists'] 321 | uris_found = resource_stats[resource_type]['URIsFound'] 322 | subs_found = resource_stats[resource_type]['SubordinateTo'] 323 | usecases_found = resource_stats[resource_type]['UseCasesFound'] 324 | 325 | # Before all else, UseCases takes priority 326 | if 'UseCases' in profile_entry: 327 | # For each use case, apply the Requirement 328 | for use_case in profile_entry['UseCases']: 329 | entry_title = use_case.get("UseCaseTitle", "NoName").replace(' ', '_') 330 | expected_requirement = use_case.get("ReadRequirement", "Mandatory") 331 | uris_applied = use_case.get("URIs") 332 | 333 | if uris_applied: 334 | does_resource_exist = any([interop.compareRedfishURI(uris_applied, uri) for uri in uris_found]) 335 | else: 336 | does_resource_exist = resource_exists 337 | 338 | does_resource_exist = does_resource_exist and entry_title in usecases_found 339 | 340 | my_logger.info('Validating UseCase {} of {} ReadRequirement'.format(entry_title, resource_type)) 341 | 342 | my_msg, _ = interop.validateRequirementResource(expected_requirement, 'Exists' if does_resource_exist else REDFISH_ABSENT) 343 | my_msg.name = 'UseCase.{}.{}'.format(entry_title, my_msg.name) 344 | if uris_applied: 345 | my_msg.expected = "{} at {}".format(my_msg.expected, ", ".join(uris_applied)) 346 | message_list.append(my_msg) 347 | continue 348 | 349 | # Check conditionals, if it applies, get its requirement 350 | elif "ConditionalRequirements" in profile_entry: 351 | for condition in profile_entry['ConditionalRequirements']: 352 | uris_applied = condition.get("URIs") 353 | subordinate_condition = condition.get("SubordinateToResource") 354 | # Check if we have valid URIs for this conditional 355 | if uris_applied: 356 | does_resource_exist = any([interop.compareRedfishURI(uris_applied, uri) for uri in uris_found]) 357 | my_logger.info('Checking if any {} in {}: {}'.format(uris_found, uris_applied, does_resource_exist)) 358 | # Or check if we are underneath the correct resource chain 359 | elif subordinate_condition: 360 | does_resource_exist = any([(tuple((subordinate_condition))) == chain[-len(subordinate_condition):] for chain in subs_found]) 361 | my_logger.info('Checking if any {} matches {}: {}'.format([x for x in subs_found], subordinate_condition, does_resource_exist)) 362 | # warn user if Conditional has no appropriate conditions to use 363 | else: 364 | does_resource_exist = resource_exists 365 | my_logger.warning('Missing Conditional Warning: This resource {} has no valid Conditional in ConditionalRequirements'.format(resource_type)) 366 | 367 | # if we have a ReadRequirement... 368 | expected_requirement = condition.get("ReadRequirement") 369 | if expected_requirement: 370 | my_logger.info('Validating {} Conditional ReadRequirement'.format(resource_type)) 371 | my_msg, _ = interop.validateRequirementResource(expected_requirement, 'Exists' if does_resource_exist else REDFISH_ABSENT) 372 | my_msg.name = '{}.Conditional.{}'.format(resource_type, my_msg.name) 373 | if uris_applied: 374 | my_msg.expected = "{} at {}".format(my_msg.expected, ", ".join(uris_applied)) 375 | if subordinate_condition: 376 | my_msg.expected = "{} under {}".format(my_msg.expected, ", ".join(subordinate_condition)) 377 | message_list.append(my_msg) 378 | 379 | # Outside of ConditionalRequirements, check just for URIs 380 | # TODO: Verify if this should run if ConditionalRequirements exists 381 | expected_requirement = profile_entry.get("ReadRequirement", "Mandatory") 382 | uris_applied = profile_entry.get("URIs") 383 | 384 | if uris_applied: 385 | does_resource_exist = any([interop.compareRedfishURI(uris_applied, uri) for uri in uris_found]) 386 | else: 387 | does_resource_exist = resource_exists 388 | 389 | my_logger.info('Validating {} ReadRequirement'.format(resource_type)) 390 | my_msg, _ = interop.validateRequirementResource(expected_requirement, 'Exists' if does_resource_exist else REDFISH_ABSENT) 391 | my_msg.name = '{}.{}'.format(resource_type, my_msg.name) 392 | if uris_applied: 393 | my_msg.expected = "{} at {}".format(my_msg.expected, ", ".join(uris_applied)) 394 | message_list.append(my_msg) 395 | 396 | # interop service level checks 397 | finalResults = {} 398 | 399 | finalResults['n/a'] = {'uri': "Service Level Requirements", 'success': False, # FIX 400 | 'messages': message_list, 'records': [], 'rcode': None, 401 | 'rtime': None, 'context': '', 'fulltype': ''} 402 | finalResults.update(results) 403 | 404 | return validateSuccess, finalResults, refLinks, resource_obj 405 | -------------------------------------------------------------------------------- /redfish_interop_validator/RedfishLogo.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | """ 6 | Redfish Logo 7 | 8 | File : RedfishLogo.py 9 | 10 | Brief : This file contains the Base64 encoded image data for the Redfish Logo 11 | """ 12 | 13 | logo = "R0lGODlhLAHTAHAAACH5BAEAAPwALAAAAAAsAdMAhwAAAAAAMwAAZgAAmQAAzAAA/wArAAArMwArZgArmQArzAAr/wBVAABVMwBVZgBVmQBVzABV/wCAAACAMwCAZgCAmQCAzACA/wCqAACqMwCqZgCqmQCqzACq/wDVAADVMwDVZgDVmQDVzADV/wD/AAD/MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMrADMrMzMrZjMrmTMrzDMr/zNVADNVMzNVZjNVmTNVzDNV/zOAADOAMzOAZjOAmTOAzDOA/zOqADOqMzOqZjOqmTOqzDOq/zPVADPVMzPVZjPVmTPVzDPV/zP/ADP/MzP/ZjP/mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YrAGYrM2YrZmYrmWYrzGYr/2ZVAGZVM2ZVZmZVmWZVzGZV/2aAAGaAM2aAZmaAmWaAzGaA/2aqAGaqM2aqZmaqmWaqzGaq/2bVAGbVM2bVZmbVmWbVzGbV/2b/AGb/M2b/Zmb/mWb/zGb//5kAAJkAM5kAZpkAmZkAzJkA/5krAJkrM5krZpkrmZkrzJkr/5lVAJlVM5lVZplVmZlVzJlV/5mAAJmAM5mAZpmAmZmAzJmA/5mqAJmqM5mqZpmqmZmqzJmq/5nVAJnVM5nVZpnVmZnVzJnV/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwAM8wAZswAmcwAzMwA/8wrAMwrM8wrZswrmcwrzMwr/8xVAMxVM8xVZsxVmcxVzMxV/8yAAMyAM8yAZsyAmcyAzMyA/8yqAMyqM8yqZsyqmcyqzMyq/8zVAMzVM8zVZszVmczVzMzV/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8Amf8AzP8A//8rAP8rM/8rZv8rmf8rzP8r//9VAP9VM/9VZv9Vmf9VzP9V//+AAP+AM/+AZv+Amf+AzP+A//+qAP+qM/+qZv+qmf+qzP+q///VAP/VM//VZv/Vmf/VzP/V////AP//M///Zv//mf//zP///wAAAAAAAAAAAAAAAAj/APcJHEiwoMGDCBMqXMiwocOF52C9gnXuocWLGDNq3Mixo8ePIENqbJeqZCpwtl65E8mypcuXMGPKnLmvHiyTqWDZ2vkqFc2fQIMKHUr0YK9UPWHdzMYLpa2SFYtKnUq1qtWB7lJ9e6rK1jdVr2yhzAbWFqyVV/fRc4c2rdu3cBty3alN7M2wtsy9Aovy1S2r9MyWpBi3sOG3JO+CqyWWcU6xgnVCTtVu6q3JqtiZpXe4s+eg9JTaqlUWsi1tSJ2+olYLZdPB9Ya622mu19NX53hFtPW5t2+WR3eeRmp6Z05ek2Eh75sq5V+gT83pfo2bFy92t7IR+829u8WszReD/+tZl/G38SVd3wyPnNrJ221hEuvZPLe5m7CG1a4Nrrn3/wAWlNJeOKnS3E5OPaUTSv29Ms16NzHn30v1NHdfKgZiWFJeuuV1WypRBShib+eEl1Iq2hj4ijatOQXOXeaYVlKGYfESI2olbQechbZEhJSK2vRizi21tYPfOTvFN+KSboUWVlgJxigcglQmCE5TMTbVVHFNTcTZR1nl1YuN19lnjn5j1kbmOcLkdk4tzzEpZ1VPCedUa4wx6NVoVDbF2JQu7rSllFeq0otHsNSyHzi9MHplL0LaIqR959zipo1CvjjMnJzKdqBXKNVSF4J/WoncVlUi56GHV4qlJYOp6P+I0VG8hGPjLZhKF+ml0wmZq5CQbhlbp8TGJFFxCxbXGJV7mhZojE5B66qHMeoFTjQWhXalmtIN2RSa0tmHq5vA6mrOfue8smmx7IZ0VGsnsibeifThNa8t2SQ6rS1N3cTsqqJ5yMtNhzZ0lJQSqaIUkTYmPFFY9tmYkk7TnQNsLf4q2e7GD7kjEUrr9WSWcapMY2DJn46WoSouMtdTq96u1xxy901TkkoKgTcMOBnilEptMpfknqS53WISRRXzYqB7jfLG8dNy4eUYgaqYx1Wegr0Ca05IncoThns5u5SBKMWIX2NxElTPiT/zsuGVL1qYnq/+Soceaq8kLXJJvLT/s1PBUAdOEDHp6YRjXalMI9w36e201WO1YPgNLA+6ZovNOpWUZdzNieyazRLzgpqO2KYraUm39AfLfqLXV1K5XGHXOIqXlrw6VLqVaMuwgj+9tkmOm6U1V1uBbKJx7ymOPKEYiyUylWGZUzJtKXVF/YZqmdWtgTEi1WjDM3PfI3W96O53Tm1KDEs7SoOo385gAd47u8HhPd5pf96mE8YbwrsaXgjaC5TstBOTVYtK5tiQrYymuKI1qD85kZL0DkSNV/CnF0lJYCr00yEMhStSESzXdEoCrvfx7Evz4xQ9JoInAxVHPG8TC9kCSLkqnUhV0bpNqwQmGH7ZZWbg6J4q/wY2s/HxTXW40s1erpS48vGLZ6lIn90MVK7y6YaE+wFWO0SWwjkZzSs6WQ9PXiSqp7Bmf8Rxyl2QlRM9JQhDCPRQKrIxrZepqnoXEsv3XNigc0kqgxjK1W0sVhskIYV99gEW7gipyOao4i30IMa6uugQwp1kIou5SVcmNxqnuBBeQ0NVNtpIqgfaYhfUQw9klrOTuv2QQzECy4W0kSuoPCUVYzrHhcAxDBcC6ym0jBEIVXGpcgXSTW3qBTtukhunVcVJidsJRzIBADEYQAydqpdwwkKWV0zuRVB6xcs6eTRx5kRrWNOknmQEi7JNS4O0aZ/CBPOYy/1BJxPRXPskk/8hJq5mNGK8VPde4bcsDqkkiARWbo7pTKlEIxy2GEaJutIfFFpEDDfYx3aIAQA5HcUkdVlRSi53oFowZlQbOo3KbuIeF5qGMdPIW5/AAUEfQouJXSEUfWyWHrMlDico6cUoeToYEJrEZjmt2OWGyMj9YFFI6SPJSYxUFGxpdBrDaEc75mESA/nhkRZRRkYBkAkDZGISmRiRMoijDbzghY7HGlV5wGiWxQQQUM5qpYtYaUNBQYZBr3IWzVblokhFCjLnUipik3ZYXprrl1ciVyH1KAxYTAVfvWjHMLJKjIL2IhzD8MtD0LqPFShDDGnFpnf0sY+HpkJUzQsLLCaXJ33/6Us8CyoVAbcCLz7dSU+MkZblgiioHYIDVwJz1K2EySjpcIhIvkJSU3yVxehqqR25nE5t2CGkhJ6pkG3aWYiEItSsahYdmtWsO9Kr1YgooyFn3YcB0GDafWT0PySRrdZmy1988cR5L7qfTjhJxt9OqUX/wqtwWLmcLbnmgMiRrg9plqUeIcluEbYFdnTFqA5dsJA2upTFapU0yYY3G2kDymxuwd6smlernGWvLVJ8kHrEoLT7QMNZ09obq9bEXxPxr2z/m5IX/RdrdOzkn+A14OLodjHmcOcTXQXYaUmYSO/sUV6uhKss0Wy5vPjehce0rV5JJ2no6luv2FTd+8gq/yiX0ap6XdwOdNDZxe4wb0Qm2VqDoOG++5jEjbtjtObRcciHNjIdMbaVusxVjXpdlpX+KhwpVXqVOcyyO6fTqgtLKi8x0qWWIxZZ61xYN7kUE4jJ9avtAkuZypyxUPQRDY+x487naEeus3qOYawXxlqVZHt3p5BMxOAGMVDtb9whTtkeWiL5SlSi7neaII90cvlkEUpQFVvI9NY8WGuMvlDVqnIXl5VGpjJyJLbgbU3Jh6c+rK20m6oSbwmR/DkLUSCaXidCtHx3xi6k2pFngkuUF0aTn4hYKxCuoOgVGKNaV/crqp4YqHgyk1nVqMTT/aGkQDGM3IxSusMZUe+WPv/DFdzWowq/5aZnGVpdhjVUC0K2r0DNHZPDgYUchb+EtcsYyDCaw+uP+iyz6kU5sFGOE4uKyJLarMsoYZGNfDqolUnZkL5yUsO9PGgrEIopZG7yoESpyCsGgkVb11NuFDWuR/lsp0Qi2KO7lMVug5G7zaDboPRgCkI8MZBuhIE3zfkNHLyDiY8FEpoNwRiKkAELNXx9vgy1o7PgeQVEIaiTEf3OZHF/mIFY5DywmH1DbyfLgZhjONRT9GtVSind8wJH5AnGnRvKUji6doukQLCZRsONrl5mm5KEw1++Qo2Gz3GO8WSmF5rkBdNwNpQv+iu9zNTsPEpEmWGQfegtT2//TgjeDmX0h8WxAtBRzBmWwSymrZ1rvUjnOBzWgDE1fzrnifAp9tEMjZ1xM0Bu8x7Khy8H0i8sYx0oYTKv8R5tZCOqkyV59Bp5gR/hEja9wjfcZw7cR2M04THNMQ8Rkl7cQ3lDRxntYCBZZTOdJVE5QWfBRw8b4nSeMXSoUBZKwTiqEG2k0RwhFTmbJBniVDX4VEF/FU1udDbGgyrO0xy1QA3ttBz+oicTtGU6hGW34SG5AUF+gxzesx8WaBu5ITIXRjaXYkDAohk+MROJ13C2wD5adRMsllU9kVkuhiEvplV/gILEoEHrRQzz0B/gAB5G4nOHsRbugCQxJRF4kyg43lJ1I3MzRVY9JkJ2O8EYAnSJH9ccxRNDYlE4qRFAqvAN68YcfiUyoaYgHdYqgYRlOdFM6XIcHAg+sMBdRNRhQ1KHYGiIiicQQ7c+wCYycmYg4YBdWoVQd1YSBXcU65NVs5ETa5UK7rB9sECDvSFUYoQxPbgTh6YK5eGIT6E4mJgTZXQ5HzMle5Esz+MUjyFAg+Uvf9U+rgFFrYIcoPMon/gzQ9IfqnAuQhIh1mEr6ZJUsfiGmKKBZjFraiFGEpVZxIAaACdVvLBeWYVQndVZUkV5+UWR7fBFMrhB62V+Df8VF4tXEFKVL1oDFizUTjdjZPxzIHQ0Q4txdnsyHtNQKum4FbzwFQfyIEf4HleCHKMEPV3xKkghXa1Sh3JEMYViIdTlL0hSkJeCIimhMTRxC+szgnQmjJrFTJuFXXgIY712SFnFjJRnfifBVSAyD531UXz2G9FAjokyGPkChHjBSfAHMrc0UnMXKLfUTl8DQD2RZBgSXHu5JXnhQvthO1yWE/3IL7bCjxt2Sx2GLzkFXfIYZvdRFrnhHrhEE9HAcAIxH+AAlriEXbumfOnVILnWKMgIbAZicFLVa6Z5C2vVFblmJK84kr8RDvtFOWChITkxOdK2jeOBHjB3EvryQ3b/JUM8ZTKkhBTCWRbzeHuI6TMb8iqumQrWUYHYyZ3VEmZDaR8OdzQ0IZoDAQs36DPGiHRymF620DMa8obYRZGvSQyjhJ0Ep0E+Aw70UCIAMjHYZnr5YhYY1xPo1EpVR3VUZyf6EjakMjESMW1jh5LnOCjVU1ytxIgLUorIARa69D3awKHtZEW6cR9aI0gDWhlDgUHDcBoTCg69Rpub1VbFaF4Do3aYdHnsFRHrU3B+oxR1wSj9NjI6cQ6S5A66EaDiZKD+lSjNM1umIXXFQW5WoyfFwyx6ckd7RWV2AmGgllwkxmXdIqbS5Y8R5i25kV2MhBLWGBPEsDq5RpvAhqO7/6ZZM/piwzAPLZZe5oVnlDcPvuaMlPdr60UPBDcPswEgoYEv0mag7QRx+OSk+1NXbsUs+dNJBERA1Rkog6UlVCYloLo5g9UoYmgd5iIpidVcNDNmBoVBt4CeIFGSNZE6w9ALt8pd2NWQeZpedFqWBWeCxmhnPCpncias7pAM6ZUMf6hZgDgMgdGLAlEP0JAQ1ToQxIAktwCtEOFW/RUWdUFgDxpgTzZ2f9Vby6KlzCItrAQtrGQbWvhE3zOmEmM3x1Ut3zM+aWKq2mVdukQYs0oQQbcPXShRutae7ECjuzajeGqwLtaaxfqnwfaVF6lewMYWl8enyqBV67WoMCGrVv8pdN8JC7RKEBOxaOA4ZOgURnaFMffzJ1CiW6UkFtHCGOVwcu1qU0+EofVoK1OmK+PTYZgSlG5CLdSFcKxTRaoSsh0RGJaia8PQNxI1tb1mDuzjsMbYb8DGXnYYsXKGkX7Ko3nGpzGWZ/TAa0CRHwnxl2DTMxVRsqaDSbLFaDyRWyybTgjmOICJrpqorjM1ZXvFKB5iG1FGM82lctOlJZGZakl7QbqxYfv6WFLCEkbCJrlhjLrRDmeysA5rMVCLp2J5q+zVghYrrDzaWYi6pxSJseW3Vb3ggS5xDn8AVgfBcjE1I/lJGQhRIcKZGqeXOOtBeqPUMz45QMFpL/snnXj/oVOh6CECdDLUg4Ds4Z3CqXmmxi8mITGNQiAz8gq+Yil/mUW39GYbQQwKMnGXK7omkacX0lU94muapCGwkFm5yX0t54zQhxN+QZHxKZ06YSRMyxEDWxP7oAxLwZux4Q6oUEFcgx+DkQp/kBCU87zEQXUq8sAfF78Esiz0QUoGqCIG4pN21bZP1HsrQx/UchfH4bMNzDU/w4oVhJvg88DR5CZA+IrTgVQ3yBnXWsAKsQz6wDsIcjQ40Z43c5oSZXcd3Dco1xM2ow0uZjSWN7Y3AxYZ4mu3xL0TMhO3QFQhO7tcY3FTVxImw5sNFxbn8T/wNzz5SE96UhInNRhhgyo3/0FutyQlNgNBlpPHmlGFTZkvc0S4NxFMQvSiqjMRDcQvgqe5uqkcb2IgT/gK3NU9tahZooURwzANXYkizsh9mgVBu0cZUgsVW9WROSGCJ4Go9OA260MMNaWC4kcZazEPqAEOycA97qAPRvKmLAEeVowKbSgQWWFOKLMeU9cTqIAQ6XIsYjTIjMBJG9JNfCIYdUE5OjFKo8J5q1Iasgclm7hunygZgYQfWnITijIu6cF9uJEapqY5lUl3UKE6qYYiYVaaqYAKAWwQanerhvSGyhRap+l4r2OMCIV05pgV/fiVhChVmWWRYNmMWvUUt0APN9iVvMgS+pDFArSGBmFGP/9icQQCIanwXgZBOCCzjYPRPIzTHIqxbZl4P1BkL2GTIDbzQhhSco9pPBYULTXUKsr3YMTRnUhyEySWQKogDJp5S9hVHYMENPVxJvsEC7JqFIMhftPAYuVzFAltJJoFFRLFfS42DLtHUASjVQ29QdhltScolhgysQ3ykbYQDi4RmtGAQh+FH3PH0QURfCWTFDYD0tA5XlgBrhF3SROKKnC0Ip3TSsMJPcfzMlgDoTRlHADEjwekOgqYF0lRzrJ4OgnYfDaSnQsFIuYQmbJISObIaccxKTfRPQ2lD0HHWub7cePTz1arTO/ZtV7d1fdbtS8YIX+qjHgmVZhXIvmxVe7/YDTVWBK8LBKNpyIIylMIwbZt68IdbJXMpl+FCTYBFo4n8norvTiBx8Y17RRfMTw4RDZb0jXqQUrTZYFhVtO1QXvcKSaKnAq2IkSoahsxlRuYMki6ESGWe4I9Mhjkuw+uopvvK1X+bJoJ+9W9RBkzWtBapdwMDboktLVtXZbjl3QWArstYTQ3gzlQyDXKPHHBCSEVdBMHrlFaA66+K3bNsyF4M83WnD/8mE7Hs5eUFoof95iNEjmrIzDWTDPknN8uJF26dHdXhEv7wRUg5prz61zHYR8AeRwv8iUeU0jDgDdwCNy3PUqeq0yz+T6vuTNIgcq/GpvCilCXxw7jR3kk/xXMLgEeLXwXHWyNH0l10tnXJQPGek0QuvMkpmdxjZHjlrht7sfBPukY9TSTAvjjPlRTNDNDgzKF0gHfXggi7MYLaEjfOYeBZqIgk3WUmTWAY2JAEjVjKcHPfQOV2KUgq4mHuoqrWOSC/VhwEcJszeHhKIh0Jzionny2RvKWIREN9UBrNfGXDuLAmkQgm0KrGkIgD/LRt2vG+8BAwil3P0LEKBGcMeeEDurjPVh1ThyFvTVOV7IX3ZQhO3Gmd4FcEJR2qeAeAXmQJ4EpBLLR9VHb8Zk39gE2fQ4i9ougr5NF54MbmcUz9EEjeZprpEyHNKIhLHZL9I4iFetrr7nnG/9C5y3BfQIfnGSswsSEEJZE7xvNcgmRdXO0cZaZdSzkfN0uUs2ZhQnSVjHn8vP4GNf50fLCYIMcOhJR7eKkXFoWNkVbdSdDz0mrOuTCjw7CF1HbvgZCRxYzyd53EmiS9XanHNj14NyXWWJPwStiXnPnwGGL61yLH/kMEo1XEjhid/lixSgZ6IKeOCwfwwi1u5LaLBizaAHWZHnFICdFs/9S833ll1Tol1ZGLVSWrwyyKwoYMQzDHzYSLrlS+dqlXWvKam4iWWJvtRZz9bmJalbbnlBLmwzrq8fKXgfbYr86uu2AEj8h4tYNFkSvknEvjQlh+ypsEkw7MOCUg/sjGtr/SEbTpo2tJBx44m3r9Pw7cbPTUo+WI4FTBqrSxTAcAjsS49+saqqPpZldplAV82qZWzHcxSanPvp9k0vCgJpiT5uhj7kNm7ViL7oGa4ddu7Wlm2cA4Y6XrXr7DB5EmFDhQob73KWCqOpVKlgSU1mElerVRImvMtZCqC8hPVjTIkIM19BgNlsbbdWCZYulNlvgXn2LaSsmOJg1Y36zhbOmLaLgiNYaCvSlUV5GiZorGnWgUXBNbZm7BRVq04HmBoZrmrVm015XeYHlJdZcL15Qz6U15/Vtr7Vyeb292xbv23a8er0FPMxrO7q9+p6jK1ixuXOCGxvmRbjx5GG92A07/9eOsOZhm9t17tXZnWF3nTtrhnVO5WrWIWGhUpUqW8RX00q+UjVbG25YuCdmdLeanrtz54Kv1ik0GzhYSF+1JPrzlc2ksJQSDRoT6VKiSqlij0q0a9SqRQeOtXXOFtirY7dypduV6zmv4OjaKuw2rzmwgPP6R8wczP6LLL+/CiNMwHOEQUwywx7EzLMHNTMswso+o4xCzSg0LbNe3OkrltZGXK0X2mKraDaKNJJIlYxq6y03j1IhkbV6NorpuZaYs0UbpFgCaqfmckpqOvKGckqqocoxD5yqqOLKrLGebK+t9Gzxq7y3wKoKrF7sGwivrPICR7C78GMnLwIBY/C/xP/eWquvwwZs8C/HIsNMGMLYgWyzyxozZ7PTOsts0HZKK02zRE2DpZ0aH0UIGog0gmii32LjyFJKM6LIt1RsgZSh55rThtMha3nOoo52lGlSioZ6qdVXYTVqo1eggrUll3BtyihYchqoqd4qmm68Hj2K6SxemhJmt5joO3Mgj/Dz6ktbXNztKrYAgwmWuhDU6RWwDouMF49qQQyzsj51sczJwv21FgyHYW5YbQit7NdhzSHmsw1hASfUUG+5SDYXU9GmYBZTocY3ai6KDWGIYMkmI1AFPqgliptb8bmZZFNYlWlewrThiF8COCOJfIS1llRMUgW78rKJTRWjoHJq0iv/jYrY1fZchajhtuhSJeIny2LHlmliq2otcICO7Sq+XBUMQY5SKZMxywiG6F4+zZnUpEo1i0js2AyzBWiIbhkmHIhgntjfW2ApCGMSH6q0YNxYrG1TjaYxZKJsPE1471Rusdul5yZa+hVqdGopap8m6hEimS2vdeJUkDIKqItwy8aqzH0DzxyVazNv4vIS9jZtmwdyOZWtMM3IL6/YiUVzbU2nKC5eVM4LRYgWBGzviQ7UM23bPm0w7VcC5NoyiAoEO5XGXralneuxnicjWC5jh3XUHLWbRFtQuYhmgxf+bcVpfDOJ2BY9mqjjZSAlRqPeZEJx71qC1IhNkLK05kSN/2WOWxzTLkKkWkgugFhKG0WehrX2cMocTMPSBZ+DFajgRifMO8sFD3eOStniYVfBigUzoo34WCw+JEwFXSZWvb9EhlPnsNha1BO1pzXqTFjDTGcmgpiL+KsdFmtHbBqDIYocMRXhME07LHex8rXGRHr73MJotrwVvQhTsnnR53LTm0mRbzXRMAgvJiaTD2Znc/FaIFJiAhHmPBApEtkJBSfinbQZZTaq4AVQbAKwyE1lIFhT46eoIpGn1GR4lKtSbOqjCq9oBCxjAqLl1sILpvWFLrhZS0sQY5LDfOkVkRle1cBmmBx2BomWMZEqkqYR7bEyk6nwTNoalREBHWoYFP85ThVZE0FMFbNjX8wNi2jmGzyCTiMesc0rEgaI56GxIfq4hTQ1Qg2YDI4nxMIawGBkHUd+inQ6sQlFrlPHn4TnWjqCVVM0WROm1PEpiaxSBHnXuk9+allO82Bb8kIXF65lNlfJyhXrQp8rVu1rf7EYoAbUDl1uhnKTkWIM3QaLKCIRIh0yUaMgYpjPpEaYI+JU4Yz5x/apjFIWMdn75tewTmGqYpViGz2U4Q5llOVFz5kYPM+1TOhYDidIuUgtEkZBo0BkO7HCnlO8MxGaJClnRoLgkx6YloO254O/GxlXwME09QyNjm9Jj1sm0rReUI5cFxnoXSDyrcRUT4d3OVD/A7/xoIxkDTPDS1s21OXEMkEkl03kJTF0UreTIkdvsVnmilrUNc11yrLeqw3thrW33sxoRa4aXEtogiKleoQlv6JjTWYEubFi7TdAQao2VCFI1TKSVkTRiHmmJLmbiYdpXbmIliZ4jrWahZOf8ko4rvKq/djOYsrqa34kSSDD2s5qMSNeudrBOuIlzBaYaczxwMbRJSLxbJwh4SvmkdpgNlYl9MibRYJmqfhx8TdiW6r+9Oub2dwGYTPqSHMm+CtpbmSZzHFOUPDYrZcBDCmxIwpvXSdHp1DOKZyL3K26Ax4PbmVKkPQV8yBIkd+ROCs2AWTT0jLXsqrHK9FlTESt/6QRNr3lYaqkUwCrNickNmaFe4olOwimig39EmsZ5YwTYUGSw7l3RJCFMjPrZ5GrLfVg3sNI2eg3rE7tr8HGW1m3wPEH26BqWCzJlFEa+EyXNvV9nf2Imh2ZG5nF6mnPSRLOHvidqjCtLLz71LEQVk6ncMp2Y8JNVcSCGHZEDS8TxA4sGrat9CTyW3UZRmzwqkPBOC+IkCbKUjszRZ5NIzTeG0rUoNJeJzcEvqaCiPpatJuCkdGLs6mf+gQXI9BdbWOWitiLhvUNoOKROgiGrE5gwqKlWe45XBRZcMPzm6CQR2W4ckpT/VlPLGHuZz2b52cnpQ0r0YVr+ymLXP35lv+nRVtnDK3LRf7CoPwEt2qBWSEsMxoycGivfq56hWYi6KpSqabVrYHhZE31PtpILL7A3k2LaHZr7/VmN6YlVsXeXFoCWxwmpRoVThxME9UOO1fSjBdPsFMLo7AsV0TB6jeC5auhWGUq5rk5T6p6pWUxZ+dfgkt7ygMVutRnSn6xinkAdKZK+ydBu4OMYPiTPX1/qbdBxEy2bzGv4tREPUs+OEopxXAqP/ZlDtsm3475G9zAxLQ13TKyRlWxxfUIJjAh9o7qvrhBJmnu3MEwOrezTu4Qnlaiww7Oco6foecTKmDxCrXaYxfA1AUra6ph5TPflmH8Rxh12fSDJuMXxDz/hh2Amfe85FQoQ1WoMYwyeNhbwzAxgqwkHTGZ4aScPshi9r8SYbjF6eeswW32184qcI6gQyrVNj87naMnkgjJZyRVnypPoorin6SVJ7XlFm3pkla+FJcxzYet9DlxCCu90LvAaaE6XOhfGKND+nSGMY4hnvZOv3rVeygz/8cQ7dGMQsmMfwkY2bubgZOslzqmhYEytYOfzuoIF6GfTmmYYdmYiukWt2MjUpmOUumJaWGJDqyFPyiEP9iCQqiCQmDBQkjBFoTBFyyERWDBRdgCGgyDGbxBG9RBGhSDGTSEReACRqBBRpjBH1yEHzRCRgiDIGQEJywEIpTCKAyDRWAEOjFYhCRcBEOQQiusQkZgQikMgy5kQi8kQzC0wiyUQiLEwjXUQjBEQziEQzFYwzFkBEkgjCT8i1dgLAT/rBF6gCG+EZ4syoibgsDc8yLQ6Y2bkgnjewW66zi6c5ZG5ECYsIlaQZmX0IZFAIBO9MRPBMVQFMVRJMVSNMVTRMVUVMVVZMUwaIcV2IIwgAGO8kO7qYfj0ojZsI34ob2JkSnC0Z9oEjY80p+NQD44w4m425/e+Kb/uruY8JEt68QDOIAAoEZrrMZr1EZqzEZrxEZvzMZwBMdx7MZyNEdyLEdyVMduXMd2TMdyFIBOFIN5pMd6tEd7RIN7nEc0iAEA2IJeCIMYoMNDqsXy0YdwyJTK8o1EXJ5EXJHi+5uFpLuaoazNehmFScYv0hueCJk/6MQH0IEcUIAcEEmShIAZ/yjJHDhJHUiAHNABkCxJGoBJCMgBmaSBkgxJmnzJkHyAm9TJB5gBlnRJoHzJoQzKlixKmRxKHZiBnxzKpyzKlwzKB1hKlazKn5zKHAgAAGg1TQAAKuCFLSDCGygYVECcgrRFhJwU75mvYqqNGaEYYryNiJmGBvKs4uMInFigt3ORCLIN1HqVpdoNbSiETpyBBKBKxJyBB0iAxXTMloTMHIjMlgTKBGjMB0hMyZTMyzzMw8wBxjxMoGRMyRTNxpzMzVxMyfRMxTTNx3TNyTxM05TN0FzNz1zMrYQGJ8sEAKiCcxCDMNiCKpCgDyIGtCyfeuiF/JKvMGLLhFGFBhochf9byBnxiMGknf5Csxf5hgVbCuw5p5boRNtsScfUTMp0TNE8z/MkycrUTNEsz8+0zdQkT6B8T/TMTPvUzNQ0z8ykzPdsSfgEUKr0T83cSsZShn3IzWXQBzQ60Nw8UAQtiNysh9w8iN1UAQ2JjdOgqE+hB+MsH0Asic9pSL4BNvrxDW2iCI3DLAmES2SphYcBzHrRSzIaNKXiRACIzNFEzAGFTfuMTf98zPLsTMykTdDUUR5VTR89Us5c0sV0T8uMzcWUzQF9ANw0CGVgxVB80En4SgKEDQ65M/w4CJEwzofwiF74kA5djXpwooJZpoZ804ybHzjrsokcnKSyDuqMOKX/ujsDoh+jKEwAEFL0PE3FXNL3RFIlpUr8NE8kLVTSTM/SdNT6XM8ZSE0e1YFOrBsszdJPfNDdpALtGTVcIqmQcgejKE4PNQgFdJWY2Dp3IFOFSM619BQ6hURazYg3q0CKeJFJ/I01iwnreJEIyonm6MT9PE/GtEwdVdLJVNLNhNTKrE3OPE1JfVRkVVL9TEwpVU3LhMzKtM/I3MoH5VQACABrlAEEmAF1RQAESNd2ddd27cQtBQAVmAwYwpce8hdbOMuQqEWVqqxJ+YO15NdhgNWDaIcJUsTPqcDeaBiM65TdqKNseNHc2piJwbVTYZpkO61jHdAxIAMyAAKQJYMc/yAD/bxW/xTNNFAD9dzP+oTMBCiGbGVMZY1NxMTZZbXMASVJlBRQzSTJ8fRRCNDUKx1Fa1zUpKRKmpxXgwBVUTUy1oMhIzqiYTAIaypITuGshWmcnhEb/bmFv7iFN6OIX3xYmuKyLqNO1EKWjGQJzBFWili2IQFPQZ3MBNAE1lAGSlhW/bRN95TSvDVUIOVPoN2H8/xWYliGZaCHZVAGx62Hx23cx43cZUiDy0wANWgNODBU9bTSfeDUCSWGSbiBTwwAdSXUxWzafdjN3oyQjyKpivoMw+ghVd0HEgK+ibhASmlAimxVzalOtcUjYdSfmyqz9JG73JKYIamfdJIm5//4SJfN2xGJBsJ9zMoszhwI0hydVsnM28v1z1QlETUYTZRsjctNTfssWtDtRIXQBBXwxABoT81cXQtdkM+4iOzBjL7yjGEgmDNVVXrIL05hH5ixlTFau5uyNbiMH74EGouTLF9riYKJtphZtohRBY8EgEhNADh4FGLY2ZfF3Csd3G/12281CE0I4c28nxpRYSlNg9ZoWWl1z8/lVIbwSk9kT8RcXS4N1eIYhvxylQoR1R6aB4Rs4autxfwRHu+xDeArmGkoBLNNyNsoRF5zRNYxvu3cCIZ5xEY0UY0YTJ/omOgl1OndB5JU4xxgWTRGCEiFTL89COvd4c2cXmJ40s//1ARNUIY93uMk9mNi0ARQIAbtTUwdOIiQdEk1VuT6PM8CNVqubAgY8MRGrd8uvRB6wWKSiiIT6bfPoAk0IlOslb0JWh8q8z2J66ITPRj6qUuHVUZjlLuNycTn4FOS4w5bVjlYCM8cxduD2FGclVIEyAHxNYj2TF/PPIhNIGYR/tvFRICDiIYVbkwEUFacjeFj9lZv1dEZ+IE5LtxIFedxjeTVEANPLM9OVFMuFYMN4eQNQa8AjCINbZQx7cOwUwb18ZuVckguo7tHrDgEiruKqZhhO5UMPBZlgwVnGRKhAJgbhU0EoARgzs9wbUmEiIYfZVI3JtQkjdSJpuhClc/M/53j/rTN+S1p9SzfbIUAAwCATe3EGxADNMgEhihdABAA+gWAT/XHzZiQJQoiQUmyfvmMzpibe65FrtHah0EmYFORZZrTjMW45HM7H7k4kGPe/TmtB7O7mBjB3ujl9vxlg6Dmit5MhFBhZGZMY1ZhZ/7WBEiIRD3PSSjpbjXkzhyDkB5Ubs3jBLBhUYwBq02IFehEB1BdAGjh3XRFICtqCdmQJSJADe2M9UiIWA27elCeL8qU3KspLF6m6ozlMzM+DswWqx4SNANBQmoJnIBo/wTpfUBdDi5PuK7rEI7mhIjjOq5MzUUIOJjWwiXpY3bZJwWCgzhhR47ZxczUl47kUf80ZmjoxAA4yUvmggyJEMOA7MoYlERJMn/phVoIDmioB1KWvYw6mKV6kdxbnkc80cu6KeQ7xoJOvr3Tux1JMLyDxkBV6w4GZpoF2uDeBzWwZpSkTIUgA8Ne1O11gHtO68j87dcmTR59gLw2CCMVZ/wUzb8uRTFAiHPe4AfoYQBY7Al556DWDF8gqaIGke2mqH41TqUxu73pr4UZRiujVc96LDMzHEyhiVKJHMvp8VpNBY/lTARAYya9cMgk03r4T8gkg4UIYXHOAYWghyUtXATg7TQGykg1XOEuXEZ9z/UtV1K8AYSI7hy4ZCqICQ1xopgxomtpVcoYjYIxWHaABVb/q0V6qB8EKiaH/K+X2giJgNFMWZmuiSAYsb1XQKpfhI4pqw0NZtQjp9n0HOt9UNbPrPQ37msWhnIf9WU0Ls9wlUwK34dtztnuzVFyZt9TLHOD8HCiBQA1BdXYEBRXKZPPOHTZ+g30mpRXSJT1IghVPQcM5ghcCzZFXFG6A6MVMtHh5VWkkohBEiOKIYoD2w6PJdQs516axfKDGIM4Hut68GCDoAfWjNYpPwhQv/DyzHKPVlJENgiWZdkxUAN6TwMyyNaWhORVP0W6XnUcuGQxMIlb/42M2LrM6KvOmIchM+q5ytAVn5heEGWDoFDZ04dbowiRecRjv0htIiNipLuK/2GaQP84uKSYpFqq5ECRlNHvu0VjBKjWdT8I8k3N2wZd2tbmZwbKB90Hlj2IH8hRIbVMf9dybHXMb2YN0VRSDUfFB+1EBAhxMcgIw9Dd7gGiz1CiKHqZZMio7dEMYoAhejCd9kLqVts35gQ+iuGNLjLERBQttouNb8iW+umWxXkV55CI7agFIi/PcS914D7uizYIvk1MnFeGSj/8zmVMhBD8fWjrbnVZv5/UxleJjhbN5YbpVGz1fjzXnXZaEX+ae6nadpiIrXOlGGoHdBjVQzmLzqB1QtEIg21x2z30+ekiSmE7MtKmlWlYaOoUnXjib8rILw5RaKwFQwhPHz3y7v+t4/88CDhQ15bMcs5Fd7L2TzTm2+pfhmaGz3CFcCS3T1JXicYUT3EFAAod81MsiHM+gBAPA+8yoiC++owSkNOAiH7RUIhAFE4KOIMFiHm2UrnbZ/AgwoQKFzJMSA8WtVSvqL1S9SpVNliwVKWCle0VrIodX10E+WrgtJAVbXmsBStVKlsmbWmDSdMlyFosXwEAkCPBjAdBEVA6mCDBzxxClyZFmuDBQUpKg6o5OOaosoOajk6FevDB0YPRZjht2jTNQbNll+r4+tSs0KlIl86A0LOeQWU99/Lt2xPNvkx8swYGIGagrXPterVr9zLcsMYvbzU+Rwxmu8iZ22GO3Gv/mqrGw9wN7HhQ375oDVezVtgLZsXYFrONtOgxle2UIDO+BAnTJc5pJ8GdDNlRpq1pqbTVYm6r59ygZDUdRCC3aVDsOQ6mAfoAwbK0YMcgRCA9QVWDW2eAT/v0PHY47pPSdUreIFmyS72X7Z8jAACE6eUXgT3FsA8aPQFImGBh1ETZMOcMM0xNtrQjIUcSeoabaMRk1tmFEhHjTmOI0eNHL62puOJBL32EG0kvwViTiy9xRJxxqmTUETUugQNjSLZkYwtF09gCDks22SITdGrJZ9BR2b33gFD+HfRDUgMcVM9+4RmkxnsJIHQUWNQZNMZ+SvWX3j45SEcXXT+kpRR2/0xN+eZdeRW45z6TDGaQYFu8ZOFimV0EYTs/EjrPOTB51lg7HI2WGUftzDPMQ6mEMw9HqdDDIqiruQOTRBzd1pFEMPp2kUWongRbpy/B8g0sMrnUEXCyKqfKHz3RF1QCRUF5nVNS/vSAmfvo8BRSBylTLFpG/fSGVtaRRa1BcAAF11DJqmVsswaBRdavVbr5gFkAQqPnngQiyNenfQJQhXKZMSZZKuBENkxpGskISzskbkjQZqQmCVMv84gUU6gNLxQObDX5Vmqpr2hTEUcVKSdTTRzRxpFMxCEZsUu0wqRjrwDAyV6yUaLrX1kCeLmPdUhtpx5XYCH0UwJ4GXTuVP8HEcNsnWSxqea3VJIx38pwOoXuAwD6PGC7fe0jxp+FUaGKvooxNoxNEeJLairURCaMwJylslmEMp4MTjseup1Kig7bvU89F4XkkUa67b2bxTnNGhJxHNvS3N6wNLdkrdocjiRLtfT0JrDJWtc05egKzVUC3taX7LPR7rNVlUGJ5Z1aPz3ZJrBdNXXfPpm/We4MSalrEDFV97XCPn0xCEAYmSkWoWaQfg2phJCOdu++AW8GqfIkOn/hYrDEe3fDjcY2UuAX/713RiB9P/hJr4CTq/m1FHdSLeAU0tPLTa1+1NPg8jyzDmD95O1aYho1M1wSQJg2Ye47wmoTzJoip5//3ak+PyHXe+wCgKnpji8H8h2gAMCFxgjjaxL64KOSp5kJPW9faZuUh4bhocy4IxkCU4bzbHEL7NmtN98zCd9wGDjFecRWGqGVTGABDiEWh1bf2AmTADC7oxzQP+PS31GStQxr6UxaxercQWb2LOwI0D2yE8rREmgfKx6rXF2pHVJutw+qVRANgsman4LXC68Nr45su9CjqKfCxtxrhSaE1ApZOA93RIYe7hCGLQpCw1C54xUpWU5FsmERHNIKJELiocVg4b2S8dBHJ2HJkYTICPjBBwEt60/t4nIU0bXpWOzJoneMtZAzvoxNYCrg0QoYlLYwMCmy649QJEjBCvZk/x8rwGBhtnBHDRWKhCAUTWaIF7DmjTBtKyRGZK4Zt8iQCBaLdNhAdJhD7wFOI+Yr3+HIyRJauURIQtKJTngCgOi87IA1ow9/cjDA0XFllwfJRANn4ICZGWQZsdyP6DRxz/4kYBLuKWACYEdPMcJlQeyqYAzeyBcvxdEd5+BFoYShPDyyjZDUK6HzPORHEsHQhZZaIQznQY+4zcND5/hmwzIlkfCFpFXeC8mO/mVJWjHHNkB0SY5S4atYHmV+zKJfUNJQjIRsZU0HARMtw8WdgGr1WQl8QBi/+JMFIvCgmUtdntZIzAAR6HdV0FehGHOLWkGGeow5R60Sucdz3IIltf+CKwohJxoStfAcS2IMId0xjxniNHu4GQlMPgabnopEkhsLUi1IVSvzychjKZtLU5KlCWIogxigUAZBEUK6pjyAGPNZC1kQ4lXa+Q8/dqpSsOaEOaRINAe0pFwD/wOAdam1gmJIkF9+t7W1Fao0MdkMY14DE7PxkWykUsVMI+NcxhCDHcRglMEUS0h63LSx4JQIRTqyN9wMKRXTUN96X9Gc2DzScTKZJJJgsdQ3YRFUPPtVAogbu5U5BS2o6U7RvgPAOyXFAWEEMH14WVaKNk1qF62aRpEZqJrAtTQPaiY7TNUR0TQqFeaYI8TWNhpbWCRD0JTuoGh6Idea12Gjig3/33Y0scj1lG8w0YhEZLIjx53kfSoDbrJYo4z/PnBbtd0ibMHiv7HcFo3IsorTeJas+gA3AaJLSurMtTI1snFPuSsQYeIY49I4Jl+aCceNhpGSZJRIFYnMzDwQNo+XTKPNvRivdBMVk5nSoxe2qDH2BmKbV2GkxxnJyMYGMjiJ2Fdvi5OnfxBQlXqgJiHL0IQmcoAAok0Jac6CormcQh0wUc6V6NHKV2FHS9chRRNw0AREU4cUCBhgghcuUIaTm8Ew/AgyjUpYjPcVE4FwKDM0ulRmXnLXVIzmIoqB1DRWHBNCwqK8iL4bPWiDw5g4Eleb7ciQPlk25HzjRn818qwTUTDqowiA3vKm31R8aS6ezfusSNHPPmCmSlHTL7j8KThwwQVVVMMnoEGx6D7OvKcbVC3Nhkk2Hmti7BG24xYSMelA9EXYi0AGU9GmW2K5udh8Gf/62zjljMVO5psh7oYjPppVqYDIqlroxMi61B9Y7NSU+u3Hyq0DpnR+C6cEN/xcYi0WulL5dF0/QAdpvRoajjuJrMfAMGI47teznnWwi8FngdpYZJJ3EQtJMzI/AliJOtK8l1xbMxmiM6YmRBrMuNy8KlGFychd1Iqws1atWueNWEKcyf0cgg/8+a90TVvaObCArF3i5V8WJqXbrz9qdNbXbyAGGoNKMIDR5GL2VSHkbeYWJFGpoiDVSLo1ZoXWjltjCLnncPQd0e6ISE+FE8SSbBYnuDpcbwo3yiM3funNP7quMW9WAv+2LFP3zxezv/SrH0TADjO9QRp5DsSupJn/95I0pNCPzTynAm3cHAbd388oW1yv9zWWtN5MwhHmDGlWLR6ikADJRTQJ0Ukf0z1f0wydGYmRv7kS9TUNRUXgT3weTvmJGCDELcCVtRXPvkAMwExIo3BNLwwRh7xfm5mDSbVDYthf79FDaYAGTAhHkMygcrTPSXRKRKRMDiiAUuiA6zgffSwgrf1gEJaFEAod1NCJ0P1gdiRgEQ5h/DBF1QEIooEfQjRSL7wEXPVChHhIOOVeO1wE2ahCLaRNYyBMOyQDX7EgG56DbfDGOhHO3kTOOgVREikIAABIAOghH+ZhH+5hHgaiHgoiIf5hIR7iICYiIi5iIiqiIzIiIQIA/wyswAFMYgwcwArAwCVq4gHEgCbGACh6YieKoifGAIBcoEI0SmKIzfOMIKJkRjgcyQhuhsB8VwZqF+mxIQviyEl8RCcBB3P00HDohH6t1Z6oAF8g414oY08wozE+Y184YwVJ4zHuBQwACDRWDTICBkPIEB994whRU3axUDRJTzu4IO/pojruwzCQIZENEVHxWH4RY/ogCTjc45HYIz7eoz7yYz7+oz4eiS2Yw5Hwgj2awz0SJEIipEEuZDiYw0OCwzkw5DmAA0JOpESagzlM5EZqZC905ERyZDtoZEiG5EeOpGJsJPWYg10lii34zDoahGN4EEoNDzTd0TRtk/XEZP9MQgwOIUc2NEeuiAwotVOtDMmR6ERSLsnhMOWSQM5TDuSSGCQvCCQ4VCVBTiWSZCVBGpYt8AJBVmVVjiBVjuVA9gIvhAMv3AIvVOVHndiJ8UIvqGUvfORHgqVdwmVe8sJH7eWF1AJj8eQ+xKLwcNPxjIZmpBDuacYKCmZMPsQ5FU6t8ND51ErJ1OERpdOStJMtHBE+diZTZiZU2qNAliZpUmVfUeWSKORXDiQ4kOVA3oJGtuVVDiRYftRH2QJZWiRY9mZf6qU58KVw4mZe2uVHJQosKBJPukO3pR03fVAz5V7zGBpMOmZMqmL5tASRMQ5vcFYdbpYQgeb5bCaS6IT/PZqnUkKOQa5ma14l5CBJBmJle8rQVRrkRy5JXyFJWgZnLDKlQt6loZmDbMrmXQpnYgynXMKlheQlv3yUS1hnLN7CMABWTg4DOgwDd0mGclqnY9IIxYTMRQhHp0jEyJCKcuCGU/7Ij0HlwsBGVPJCLXQK5BCkdXWEPY7oybTm3IBMcLoNDHbbWZpDp3ylgMKljLzCXhoWqQzDiSlpjVqnPsACKnTK8ewdZjhGYHIoh9KDe5mKUkqMbbSYTmTMesXEEdkIjAgkbagXSsTEkbDK2tVm2YAECcYEL8iK+DgSLBjkjzVHqjipv4hhcKLlyeRLgvKlzKnCoJ6YosEEXIIN/4pqg4wcmnWOyo8xZgymwlx1mpZ26j5IV74oTouZg/mURgCGJ5KogvDVREzQhlMOyjfY4x/AAkFGkqKtp6m650oQR77Y5o+8gjkMCkSCJUfwKa3ipauOIEyQYGLEJW60T0e8ZRYqBy9gBl9KGjv0AjtQCEx0aokFjIy0wx9km6eWq0G4YEwQEbk1pU1MaqVZEkmkW0d4piM5JUosCcgkh5sapEUQ5FXCgvANinveqsC2ZbVyDYu9QlqCpRZWpdxdxFUGKEcQpEXw5UfCxERKCly6l3GWhqfqgyowRkVMaJaaa7lyhk7AhOMI0Y8YycWQ4ePERn4dSb4uybJG5XGQ5v/bkSZuiOX5qAKtoqh9lkaw9ipZFutAZANavmZsTORACmlMfCRcXuty3OVHyWW1psL4sYPHIWk7gFRFeFO5eqXjbKjJmmxy9AbyKdqtlCqSXIR9CaTMHsmzRuVFHBGuBpmcGirkkOjaheWvhoPe8CcvuCo4WASBFq02JOgcwS1eDmc4FWdIwkS2fhRubOQc2QIqmG2nVufZfu4+WCoR2Wyr1mxvDOObxsR47ipoouh7KppVdoRq0kjCbqXNqsKd7utVJp5EBGdirF1utuWg4CZeEmRKAGeCcsRHTqhEDANYDsMrpCPoTu/0quKQqCgJngRzkKDjtM+STMQnkWBm0m3/qaqm3jqs6lalsEJsgCasOdgE4cKEw+7pROZubW6kMETEWdon1dIv454YZmys1n6UNuQi9R6wyULmeh3OjRDjSKToin6vjTIlbuyCaQbZU+4qn9povgqoGCaSonzU4WptciDsBuumggorgSKrRUircL4v3ZyY4NpZYyKwDX+u2tjExRRqDGbw28LsU5KKGOZsaUbEwHYWqxhqo5KNclqqEIPlGD6WRg4oOwxvW25k/1osFn8UR2SrOUDqTiaEatwwGWspxECOJunNcpgnU6roUo7MS3DYi9rscEhlzNkGe/KipiwE7EpkX+HfvqJlcJ7D4M5RW4pwRwCnXTqKkXJu/xk/cqe24ysc0fjOLGhq5VOycWmWpme6ZlXWZn2ypr9+5RA5skHUg/de8dTi5rAKqEF+7VseKG4y6V3qJTuMn1dCsi5/LjgYgn2NZ/sQIziIpia/rkDWQlbi6nt+ch77LCzUDWtkoXuCpWxS5dIKMlrS5dQq8okVJ1i+ginvsjg7Jj2QRMhYZVP+VRuT51NasGk6pXyuJyhjZa3UH2tEKZII8sIapAjL5VUKw1VeLV8NcvHipjAQcsmOs0JrKWcsCRApnkNn8iYbM676qzKTcmv6K8A4DF5dZTXX7wgKw0PyJYG+prQqssUSx0KvtMlCTGhG9ESzcS2IpWlWJUar5v9VDqtMJHSouN7VjqChAShBvzBI8eVBI+grtANLL3W5cuknEZl1hSfOCsfrDrFFRCWadoQ92w2XpqlGFulGxMQWL7FF2CVgMjVan+xHyMRjLYxVZlaaFuTJSFKIfmVvYGyN5bBugoMw3DUKoy+MuNeeJmdaF3bnLsnG2KOl3Ve+kOhmJh7F9mqaulw9CGsv5J+hfqRl06jWGrZnd6qlrvPN5utxECSN2DSv0i1B2J87LG7TbrYVE6TYfjZtc+j5jKfauhNxWATk3O1THmzIqGMGmo/9Hm2+qGAi1bZycyhpzEraTjQGkyrQgnJJbDVrL24vuGpcStLh8PRyf7c6OsZx3Motb3/vVfett/FkX4HMIufLO4A3fFtnaUBOZk7wyDjOLghRp+pDhQRnsKZ3fAf4OjZ0ejrSJ/GCxLCEUpdrI2kEOAg4hDum4pRmsfatpkY4hmc4QryD6urNMU+D52q4iAu46wnkESHpiKe4hquE+aj/uItreIi/uIzPOI3XuI3fOI7nuI7vOI/3uI//OJAHuRgLOZEzxBgPuUIcuctx6j4weZE/uZbGOJTruDJUuZUrmRiEouhlQu9ZeWlN+Y7jQF8YsLPsSVY4uZLfTV8kGZi7XJq3xhi/OYdiTdYkRJlZjUGkgSeSR6d5X6gsAzK1uf0RkxhwOegi117s00FUjWrQ+V7gVKALet913VpJwuc6ek8oury0Czv6haFjjz5EuqR/G6Wv1YGYLKJnukLAgF+ggTJAwyQcSKr3BBd8k6iPeo0d0zOeerlieoAohKcrhJ/0BSpiD6DXOa4jGqtDo0OV66wr+p0vxJ1rusOMebK7/1yp98QNTMLWZTmf9LqoS1xPFHtCZBg30lCoI/u1N9YB9MWnH8Ss98S7c+izJ4S4G0ZDQAMxZIKUV7u6r/s3ZTsANHu5+8UNgLu6B9u891403DrA05DAL7xB6DpfOLuolxnBs6HDP/zdUHxPZDxC+DoAIHyiFzxfSDxlbzzHO8yy74UbMESw+Vqn1jtC3DvKu5y1r3zA9wXIw7tfMERWyLlstcYyoIacz4yvp5bNoztDFH1CQEOkp9Zp4E1q6PxCRDxDDHvFt0YmZALYZUIuUvtC7LvWzTvNl/nJ342XEEMafD2ZL7q6K0MmTMLXvQGNqYaTW71BCHzPG4Rf8PpCZP+Cx/dFDIi9Qoi8gRCGr39K1omewMdA6GU5jYmB6Dl+QqCGPsjAnpz7lgQ64mt7v1u9wL+8QsQ75yNEPayVkukOYNT75xOIoQ++sFcQsP/JvRcIm+u9QbR72jtLJgg89/3TMwZ9Xgh/Bc09Br1+sO9Dr239QSg/X/AOQlhNzJu57icE8FfQwStE9evO2xujwGcF9Pe+wMsWgRhA9ucizz/j9SNEyxujtJ9/oXM7xfmFCtQ+gcTADYBiu4QHQIQBMJBgQYOZ9u1bYTBhwhsGYzTcl4mgAWUS6xk0KGZSpkliNA68KJFkSZMnUaZUuZJlS5cvW8YIOXPgCmgmA2hEWJL/osE0EntuLAky5EViaDoSJXjD46RJaIglNMCwocaSygCIMTkTjcmHBrvCFDuWbFmzZyXKpKlRq8mgBEe6tVpV58lJRUliLbizpFqCEiGajEsyZNSTbNEmVryYsUu/a8XwNfkYQFiUlA3v02jZpFK4JIkZnHRyYUHABuu53JzSM4AbjWHHln32AEONEVPqNa1S98Cwd6mihKaRHsm3ACRLnLo7IV6Wc1Eexz2benXrJCnvCx04ukEYmoZpyjSefHnurdumLP1ZYm/kJ7M3BA5x8OHgJ91Pv76fP2MYVFsDYLSTAlyLpoQoS64zg5Yx7iD7mGuOpgEhHEg/kzIq6ML+njjsEKb49lmuoPoa+srAExFEbaXj6tuOIAqxu09Cmm4gUbOCXktpuIIM8NDHH1mqLcKQ4DvRSKlkPIlF0B40ScSBvDKQsxkHSg8/7oDMUssUI3SRoA33MbFKNBpBAykxkCLTzI/SRIpLgm5Sacn2RCsyQozEvI0wHFXK8MstAQXyvzsDnHIfLgyyMSaDMkOptfqOU5BKKFmjyUqhdMQy/9BN+QOxIcoAaHQfNOokK8GV8myxyb6SdAtUAOIyKEeU8uPU1v2E/KskIiXyEoAeVxIjhvpI1XAljSBdlaQnAXDpuPcmnfVKY2+tdjZPG3r2Qo1EvWqpXqEjENm8Si1pPUpbOi49TFPS1Np3FRtU16EQ+zTcrcrVc9pEmdzrJGbzisFKjMCi69vc3IVX4bKwVY7bhtwTySRfJXao3n7HBbcgGNO6Tykb3aNQVt4SXtjkl3JFF1+D4tznVaaUqccjZn0jOKRMLlJGmTxHdNDflXWNuNuMbyRoYKABAPPkpVVqGCiNVjjNyIQLnBpWcje2c96QYnCDGE0KlHogaQUzCHxYptFeSV6Vo2Q34hPJNtjIR30mSNIQ77N6oEbZ3dfCtAFPyek9ldX7aInOPbBY9hryleOGEtfYyORGVqnkwAO/t0KCUmuoah4VlWhxrhOi++mfd5URmlf5JfxvhKnFXPbqiBF2IzQ6b2kSMQ0QI3TqlEGDshjQ+H32xAICADs=" 14 | -------------------------------------------------------------------------------- /redfish_interop_validator/traverseInterop.py: -------------------------------------------------------------------------------- 1 | # Copyright Notice: 2 | # Copyright 2017-2025 DMTF. All rights reserved. 3 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md 4 | 5 | import re 6 | import os 7 | import json 8 | from functools import lru_cache 9 | import logging 10 | from urllib.parse import urlparse, urlunparse 11 | from http.client import responses 12 | from collections import OrderedDict 13 | 14 | import requests 15 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 16 | 17 | from redfish_interop_validator.helper import createContext, getNamespace, getNamespaceUnversioned, getType, navigateJsonFragment 18 | from redfish_interop_validator.session import rfSession 19 | 20 | my_logger = logging.getLogger('rsv') 21 | 22 | currentService = None 23 | config = {} 24 | 25 | commonHeader = {'OData-Version': '4.0'} 26 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 27 | 28 | # dictionary to hold sampling notation strings for URIs 29 | uri_sample_map = dict() 30 | 31 | class AuthenticationError(Exception): 32 | """Exception used for failed basic auth or token auth""" 33 | def __init__(self, msg=None): 34 | super(AuthenticationError, self).__init__(msg) 35 | 36 | 37 | def getLogger(): 38 | """ 39 | Grab logger for tools that might use this lib 40 | """ 41 | return my_logger 42 | 43 | 44 | def startService(config): 45 | """startService 46 | 47 | Begin service to use, sets as global 48 | 49 | Notes: Strip globals, turn into normal factory 50 | 51 | :param config: configuration of service 52 | :param defaulted: config options not specified by the user 53 | """ 54 | global currentService 55 | if currentService is not None: 56 | currentService.close() 57 | currentService = rfService(config) 58 | config = currentService.config 59 | return currentService 60 | 61 | 62 | class rfService(): 63 | def __init__(self, my_config): 64 | my_logger.info('Setting up service...') 65 | global config 66 | config = my_config 67 | self.config = my_config 68 | # self.proxies = dict() 69 | self.active = False 70 | # Create a Session to optimize connection times 71 | self.session = requests.Session() 72 | 73 | # setup URI 74 | self.config['configuri'] = self.config['ip'] 75 | self.config['usessl'] = urlparse(self.config['configuri']).scheme in ['https'] 76 | self.config['certificatecheck'] = False 77 | self.config['certificatebundle'] = None 78 | self.config['timeout'] = 10 79 | 80 | # NOTE: this is a validator limitation. maybe move this to its own config inside validateResource 81 | if self.config['collectionlimit']: 82 | total_len = len(self.config['collectionlimit']) / 2 83 | limit_string = ' '.join(self.config['collectionlimit']) 84 | limit_array = [tuple(found_item.split(' ')) for found_item in re.findall(r"[A-Za-z]+ [0-9]+", limit_string)] 85 | if len(limit_array) != total_len: 86 | raise ValueError('Collection Limit array seems malformed, use format: RESOURCE1 COUNT1 RESOURCE2 COUNT2)...') 87 | self.config['collectionlimit'] = {x[0]: int(x[1]) for x in limit_array} 88 | 89 | # httpprox = config['httpproxy'] 90 | # httpsprox = config['httpsproxy'] 91 | # self.proxies['http'] = httpprox if httpprox != "" else None 92 | # self.proxies['https'] = httpsprox if httpsprox != "" else None 93 | 94 | # Convert list of strings to dict 95 | # self.chkcertbundle = config['certificatebundle'] 96 | # chkcertbundle = self.chkcertbundle 97 | # if chkcertbundle not in [None, ""] and config['certificatecheck']: 98 | # if not os.path.isfile(chkcertbundle) and not os.path.isdir(chkcertbundle): 99 | # self.chkcertbundle = None 100 | # traverseLogger.error('ChkCertBundle is not found, defaulting to None') 101 | # else: 102 | # config['certificatebundle'] = None 103 | 104 | self.currentSession = None 105 | if not self.config['usessl'] and not self.config['forceauth']: 106 | if config['username'] not in ['', None] or config['password'] not in ['', None]: 107 | my_logger.warning('Authentication Credentials Warning: Attempting to authenticate on unchecked http/https protocol is insecure, if necessary please use ForceAuth option. Clearing auth credentials...') 108 | config['username'] = '' 109 | config['password'] = '' 110 | if config['authtype'].lower() == 'session': 111 | # certVal = chkcertbundle if ChkCert and chkcertbundle is not None else ChkCert 112 | # no proxy for system under test 113 | # self.currentSession = rfSession(config['username'], config['password'], config['configuri'], None, certVal, self.proxies) 114 | self.currentSession = rfSession(config['username'], config['password'], config['configuri'], None) 115 | self.currentSession.startSession() 116 | 117 | target_version = 'n/a' 118 | 119 | # get Version 120 | success, data, status, delay, _ = self.callResourceURI('/redfish/v1') 121 | if not success: 122 | my_logger.warning('Service Warning: Could not get ServiceRoot') 123 | else: 124 | if 'RedfishVersion' not in data: 125 | my_logger.warning('Service Warning: Could not get RedfishVersion from ServiceRoot') 126 | else: 127 | my_logger.info('Redfish Version of Service: {}'.format(data['RedfishVersion'])) 128 | target_version = data['RedfishVersion'] 129 | if target_version in ['1.0.0', 'n/a']: 130 | my_logger.warning('Service Version Warning: !!Version of target may produce issues!!') 131 | 132 | self.service_root = data 133 | 134 | # with Version, get default and compare to user defined values 135 | # default_config_target = defaultconfig_by_version.get(target_version, dict()) 136 | # override_with = {k: default_config_target[k] for k in default_config_target if k in default_entries} 137 | # if len(override_with) > 0: 138 | # traverseLogger.info('CONFIG: RedfishVersion {} has augmented these tool defaults {}'.format(target_version, override_with)) 139 | # self.config.update(override_with) 140 | 141 | self.active = True 142 | 143 | def close(self): 144 | if self.currentSession is not None and self.currentSession.started: 145 | self.currentSession.killSession() 146 | self.active = False 147 | 148 | def getFromCache(URILink, CacheDir): 149 | CacheDir = os.path.join(CacheDir + URILink) 150 | payload = None 151 | if os.path.isfile(CacheDir): 152 | with open(CacheDir) as f: 153 | payload = f.read() 154 | if os.path.isfile(os.path.join(CacheDir, 'index.xml')): 155 | with open(os.path.join(CacheDir, 'index.xml')) as f: 156 | payload = f.read() 157 | if os.path.isfile(os.path.join(CacheDir, 'index.json')): 158 | with open(os.path.join(CacheDir, 'index.json')) as f: 159 | payload = json.loads(f.read()) 160 | payload = navigateJsonFragment(payload, URILink) 161 | return payload 162 | 163 | @lru_cache(maxsize=128) 164 | def callResourceURI(self, URILink): 165 | """ 166 | Makes a call to a given URI or URL 167 | 168 | param arg1: path to URI "/example/1", or URL "http://example.com" 169 | return: (success boolean, data, request status code, full response) 170 | """ 171 | # rs-assertions: 6.4.1, including accept, content-type and odata-versions 172 | # rs-assertion: handle redirects? and target permissions 173 | # rs-assertion: require no auth for serviceroot calls 174 | if URILink is None: 175 | my_logger.warning("Missing URI Warning: Given URI is empty!") 176 | return False, None, -1, 0, None 177 | 178 | config = self.config 179 | # proxies = self.proxies 180 | ConfigIP, UseSSL, AuthType, ChkCert, ChkCertBundle, timeout, Token = config['configuri'], config['usessl'], config['authtype'], \ 181 | config['certificatecheck'], config['certificatebundle'], config['timeout'], config['token'] 182 | # CacheMode, CacheDir = config['cachemode'], config['cachefilepath'] 183 | 184 | scheme, netloc, path, params, query, fragment = urlparse(URILink) 185 | inService = scheme == '' and netloc == '' 186 | if inService: 187 | scheme, netloc, _path, __params, ___query, ____fragment = urlparse(ConfigIP) 188 | URLDest = urlunparse((scheme, netloc, path, params, query, fragment)) 189 | else: 190 | URLDest = urlunparse((scheme, netloc, path, params, query, fragment)) 191 | 192 | payload, statusCode, elapsed, auth, noauthchk = None, '', 0, None, True 193 | 194 | isXML = False 195 | if "$metadata" in path or ".xml" in path[:-5]: 196 | isXML = True 197 | my_logger.debug('Should be XML') 198 | 199 | ExtraHeaders = None 200 | if 'extrajsonheaders' in config and not isXML: 201 | ExtraHeaders = config['extrajsonheaders'] 202 | elif 'extraxmlheaders' in config and isXML: 203 | ExtraHeaders = config['extraxmlheaders'] 204 | 205 | # determine if we need to Auth... 206 | if inService: 207 | noauthchk = URILink in ['/redfish', '/redfish/v1', '/redfish/v1/odata'] or\ 208 | '/redfish/v1/$metadata' in URILink 209 | 210 | auth = None if noauthchk else (config.get('username'), config.get('password')) 211 | my_logger.debug('dont chkauth' if noauthchk else 'chkauth') 212 | 213 | # if CacheMode in ["Fallback", "Prefer"]: 214 | # payload = rfService.getFromCache(URILink, CacheDir) 215 | 216 | # if not inService and config['schema_origin'].lower() == 'service': 217 | # traverseLogger.debug('Disallowed out of service URI ' + URILink) 218 | # return False, None, -1, 0 219 | 220 | # rs-assertion: do not send auth over http 221 | # remove UseSSL if necessary if you require unsecure auth 222 | if (not UseSSL and not config['forceauth']) or not inService or AuthType != 'Basic': 223 | auth = None 224 | 225 | # only send token when we're required to chkauth, during a Session, and on Service and Secure 226 | headers = {} 227 | headers.update(commonHeader) 228 | if not noauthchk and inService and UseSSL: 229 | my_logger.debug('successauthchk') 230 | if AuthType == 'Session': 231 | currentSession = currentService.currentSession 232 | headers.update({"X-Auth-Token": currentSession.getSessionKey()}) 233 | elif AuthType == 'Token': 234 | headers.update({"Authorization": "Bearer " + Token}) 235 | 236 | if ExtraHeaders is not None: 237 | headers.update(ExtraHeaders) 238 | 239 | certVal = ChkCertBundle if ChkCert and ChkCertBundle not in [None, ""] else ChkCert 240 | 241 | # rs-assertion: must have application/json or application/xml 242 | my_logger.debug('callingResourceURI {}with authtype {} and ssl {}: {} {}'.format( 243 | 'out of service ' if not inService else '', AuthType, UseSSL, URILink, headers)) 244 | response = None 245 | try: 246 | if payload is not None: # and CacheMode == 'Prefer': 247 | return True, payload, -1, 0, response 248 | response = self.session.get(URLDest, headers=headers, auth=auth, verify=certVal, timeout=timeout) # only proxy non-service 249 | expCode = [200] 250 | elapsed = response.elapsed.total_seconds() 251 | statusCode = response.status_code 252 | my_logger.debug('{}, {}, {},\nTIME ELAPSED: {}'.format(statusCode, expCode, response.headers, elapsed)) 253 | if statusCode in expCode: 254 | contenttype = response.headers.get('content-type') 255 | if contenttype is None: 256 | my_logger.error("Missing ContentType Error: Content-type not found in header: {}".format(URILink)) 257 | contenttype = '' 258 | if 'application/json' in contenttype: 259 | my_logger.debug("This is a JSON response") 260 | decoded = response.json(object_pairs_hook=OrderedDict) 261 | # navigate fragment 262 | decoded = navigateJsonFragment(decoded, URILink) 263 | if decoded is None: 264 | my_logger.error("JSON Pointer Error: The JSON pointer in the fragment of this URI is not constructed properly: {}".format(URILink)) 265 | elif 'application/xml' in contenttype: 266 | decoded = response.text 267 | elif 'text/xml' in contenttype: 268 | # non-service schemas can use "text/xml" Content-Type 269 | if inService: 270 | my_logger.warning("Response Content-Type :Warning: Incorrect content type 'text/xml' for file within service {}".format(URILink)) 271 | decoded = response.text 272 | else: 273 | my_logger.error("Redfish Response Error: {} did NOT return XML or Json contenttype, is this not a Redfish resource (is this redirected?)".format(URILink)) 274 | decoded = None 275 | if isXML: 276 | my_logger.info('Attempting to interpret as XML') 277 | decoded = response.text 278 | else: 279 | try: 280 | json.loads(response.text) 281 | my_logger.info('Attempting to interpret as JSON') 282 | decoded = response.json(object_pairs_hook=OrderedDict) 283 | except ValueError: 284 | pass 285 | 286 | return decoded is not None, decoded, statusCode, elapsed, response 287 | elif statusCode == 401: 288 | if inService and AuthType in ['Basic', 'Token']: 289 | if AuthType == 'Token': 290 | cred_type = 'token' 291 | else: 292 | cred_type = 'username and password' 293 | raise AuthenticationError('Error accessing URI {}. Status code "{} {}". Check {} supplied for "{}" authentication.\nAborting test due to invalid credentials.' 294 | .format(URILink, statusCode, responses[statusCode], cred_type, AuthType)) 295 | elif statusCode >= 400: 296 | # Error accessing the resource (beyond auth errors) 297 | return False, None, statusCode, elapsed, response 298 | 299 | except requests.exceptions.SSLError as e: 300 | my_logger.warning("SSLError on {}: {}".format(URILink, repr(e))) 301 | my_logger.debug("output: ", exc_info=True) 302 | except requests.exceptions.ConnectionError as e: 303 | my_logger.warning("ConnectionError on {}: {}".format(URILink, repr(e))) 304 | my_logger.debug("output: ", exc_info=True) 305 | except requests.exceptions.Timeout as e: 306 | my_logger.warning("Request has timed out ({}s) on resource {}".format(timeout, URILink)) 307 | my_logger.debug("output: ", exc_info=True) 308 | except requests.exceptions.RequestException as e: 309 | my_logger.warning("Request has encounted a problem when getting resource {}: {}".format(URILink, repr(e))) 310 | my_logger.debug("output: ", exc_info=True) 311 | except AuthenticationError as e: 312 | raise e # re-raise exception 313 | except Exception as e: 314 | my_logger.warning("A problem when getting resource {} has occurred: {}".format(URILink, repr(e))) 315 | my_logger.debug("output: ", exc_info=True) 316 | if response and response.text: 317 | my_logger.debug("payload: {}".format(response.text)) 318 | 319 | if payload is not None: 320 | return True, payload, -1, 0, response 321 | return False, None, statusCode, elapsed, response 322 | 323 | 324 | def callResourceURI(URILink): 325 | if currentService is None: 326 | my_logger.warning("Service Setup Warning: The current service is not setup! Program must configure the service before contacting URIs") 327 | raise RuntimeError 328 | else: 329 | return currentService.callResourceURI(URILink) 330 | 331 | 332 | def createResourceObject(name, uri, jsondata=None, typename=None, context=None, parent=None, isComplex=False): 333 | """ 334 | Factory for resource object, move certain work here 335 | """ # Create json from service or from given 336 | 337 | if jsondata is None and not isComplex: 338 | success, jsondata, status, response_time, response = callResourceURI(uri) 339 | my_logger.debug('{}, {}, {}'.format(success, jsondata, status)) 340 | if not success: 341 | my_logger.error('Request Error: URI {} could not be acquired ({})'.format(uri, status)) 342 | return None, status 343 | else: 344 | success, jsondata, status, response_time, response = True, jsondata, -1, 0, None 345 | 346 | # Collect our resource header 347 | if response: 348 | my_header = response.headers 349 | elif parent and parent.headers: 350 | my_header = parent.headers 351 | else: 352 | my_header = None 353 | 354 | newResource = ResourceObj(name, uri, jsondata, typename, context, parent, isComplex, headers=my_header) 355 | 356 | newResource.rtime = response_time 357 | 358 | return newResource, status 359 | 360 | 361 | class ResourceObj: 362 | def __init__(self, name: str, uri: str, jsondata: dict, typename: str, context: str, parent=None, isComplex=False, forceType=False, headers=None): 363 | self.initiated = False 364 | self.parent = parent 365 | self.uri, self.name = uri, name 366 | self.rtime = 0 367 | self.headers = headers 368 | self.status = -1 369 | self.isRegistry = False 370 | 371 | oem = config.get('oemcheck', True) 372 | acquiredtype = typename if forceType else jsondata.get('@odata.type', typename) 373 | 374 | # Check if we provide a valid json 375 | self.jsondata = jsondata 376 | 377 | my_logger.debug("payload: {}".format(json.dumps(self.jsondata, indent=4, sort_keys=True))) 378 | 379 | if not isinstance(self.jsondata, dict): 380 | my_logger.error("Resource Value Error: Json Data was not a dictionary") 381 | raise ValueError('This Resource is no longer a Dictionary') 382 | 383 | # Check for @odata.id (todo: regex) 384 | odata_id = self.jsondata.get('@odata.id') 385 | if odata_id is None and not isComplex: 386 | if self.isRegistry: 387 | my_logger.debug('{}: @odata.id missing, but not required for Registry resource'.format(self.uri)) 388 | else: 389 | my_logger.error('Missing Odata.Id Error: {} does not contain @odata.id'.format(self.uri)) 390 | 391 | # Get our real type (check for version) 392 | if acquiredtype is None: 393 | my_logger.error('Missing Type Error: {} does not contain @odata.type or NavType'.format(uri)) 394 | raise ValueError 395 | if acquiredtype is not typename and isComplex: 396 | context = None 397 | 398 | if typename is not None: 399 | if not oem and 'OemObject' in typename: 400 | acquiredtype = typename 401 | 402 | if currentService: 403 | if not oem and 'OemObject' in acquiredtype: 404 | pass 405 | 406 | # Provide a context for this (todo: regex) 407 | if context is None: 408 | context = self.jsondata.get('@odata.context') 409 | if context is None: 410 | context = createContext(acquiredtype) 411 | if self.isRegistry: 412 | # If this is a Registry resource, @odata.context is not required; do our best to construct one 413 | my_logger.debug('{}: @odata.context missing from Registry resource; constructed context {}' 414 | .format(acquiredtype, context)) 415 | elif isComplex: 416 | pass 417 | else: 418 | my_logger.debug('{}: Json does not contain @odata.context'.format(uri)) 419 | 420 | self.context = context 421 | 422 | # Check if we provide a valid type (todo: regex) 423 | self.typename = acquiredtype 424 | typename = self.typename 425 | 426 | self.initiated = True 427 | 428 | @staticmethod 429 | def checkPayloadConformance(jsondata, uri): 430 | """ 431 | checks for @odata entries and their conformance 432 | These are not checked in the normal loop 433 | """ 434 | messages = dict() 435 | decoded = jsondata 436 | success = True 437 | for key in [k for k in decoded if '@odata' in k]: 438 | paramPass = False 439 | 440 | if key == '@odata.id': 441 | paramPass = isinstance(decoded[key], str) 442 | paramPass = re.match( 443 | '(\/.*)+(#([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*)?', decoded[key]) is not None 444 | if not paramPass: 445 | my_logger.error("Payload Conformance Error: {} {}, Expected format is /path/to/uri, but received: {}".format(uri, key, decoded[key])) 446 | else: 447 | if decoded[key] != uri: 448 | my_logger.warning("Payload Conformance Error: {} {}, Expected @odata.id to match URI link {}".format(uri, key, decoded[key])) 449 | elif key == '@odata.count': 450 | paramPass = isinstance(decoded[key], int) 451 | if not paramPass: 452 | my_logger.error("Payload Conformance Error: {} {}, Expected an integer, but received: {}".format(uri, key, decoded[key])) 453 | elif key == '@odata.context': 454 | paramPass = isinstance(decoded[key], str) 455 | paramPass = re.match( 456 | '/redfish/v1/\$metadata#([a-zA-Z0-9_.-]*\.)[a-zA-Z0-9_.-]*', decoded[key]) is not None 457 | if not paramPass: 458 | my_logger.warning("Payload Conformance Error: {} {}, Expected format is /redfish/v1/$metadata#ResourceType, but received: {}".format(uri, key, decoded[key])) 459 | messages[key] = (decoded[key], 'odata', 460 | 'Exists', 461 | 'WARN') 462 | continue 463 | elif key == '@odata.type': 464 | paramPass = isinstance(decoded[key], str) 465 | paramPass = re.match( 466 | '#([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*', decoded[key]) is not None 467 | if not paramPass: 468 | my_logger.error("Payload Conformance Error: {} {}, Expected format is #Namespace.Type, but received: {}".format(uri, key, decoded[key])) 469 | else: 470 | paramPass = True 471 | 472 | success = success and paramPass 473 | 474 | messages[key] = (decoded[key], 'odata', 'Exists', 'PASS' if paramPass else 'FAIL') -------------------------------------------------------------------------------- /redfish_interop_validator/interop.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Notice: 3 | # Copyright 2017-2025 DMTF. All rights reserved. 4 | # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md 5 | 6 | import re 7 | from enum import Enum 8 | 9 | import logging 10 | from redfish_interop_validator.helper import getNamespaceUnversioned, getType, getNamespace 11 | from redfish_interop_validator.traverseInterop import callResourceURI 12 | 13 | my_logger = logging.getLogger('rsv') 14 | my_logger.setLevel(logging.DEBUG) 15 | 16 | config = {'WarnRecommended': False, 'WriteCheck': False} 17 | 18 | class testResultEnum(Enum): 19 | FAIL = 'FAIL' 20 | NOPASS = 'NO PASS' 21 | PASS = 'PASS' 22 | WARN = 'WARN' 23 | OK = 'OK' 24 | NA = 'N/A' 25 | NOT_TESTED = 'NOT TESTED' 26 | 27 | 28 | REDFISH_ABSENT = 'n/a' 29 | 30 | 31 | class msgInterop: 32 | def __init__(self, name, profile_entry, expected, actual, result): 33 | self.name = name 34 | self.entry = profile_entry 35 | self.expected = expected 36 | self.actual = actual 37 | self.ignore = False 38 | if isinstance(result, bool): 39 | self.result = testResultEnum.PASS if result else testResultEnum.FAIL 40 | else: 41 | self.result = result 42 | self.parent_results = None 43 | 44 | 45 | def validateComparisonAnyOfAllOf(profile_entry, property_path="Unspecified"): 46 | """ 47 | Gather comparison information after processing all Resources on system 48 | """ 49 | all_msgs = [] 50 | for key in profile_entry: 51 | property_profile = profile_entry[key] 52 | my_compare = property_profile.get('Comparison', 'AnyOf') 53 | 54 | if property_profile.get('Values') and my_compare in ['AnyOf', 'AllOf']: 55 | my_msgs = property_profile.get('_msgs', []) 56 | my_values, expected_values = [m.actual for m in my_msgs], property_profile['Values'] 57 | 58 | my_logger.info('Validating {} Comparison for {} : {}'.format(my_compare, property_path, key)) 59 | my_logger.info(" {}, Expecting {}".format(my_values, expected_values)) 60 | 61 | if not len(my_msgs) and property_profile.get('ReadRequirement', 'Mandatory') != 'Mandatory': 62 | continue 63 | 64 | msg_name = 'Comparison.{}.{}'.format(property_path, key) 65 | 66 | top_msg = msgInterop(msg_name, my_compare, expected_values, my_values, False) 67 | all_msgs.append(top_msg) 68 | 69 | # OK if value does not contribute 70 | # PASS if value contributes 71 | for msg in my_msgs: 72 | msg.ignore = False 73 | msg.result = testResultEnum.OK 74 | msg.expected = '{} {} ({})'.format(msg.expected, expected_values, "Across All Resources") 75 | 76 | if my_compare == 'AnyOf': 77 | if any([x in my_values for x in expected_values]): 78 | my_logger.info(' PASS') 79 | top_msg.result = testResultEnum.PASS 80 | for msg in my_msgs: 81 | if msg.actual in expected_values: 82 | msg.result = testResultEnum.PASS 83 | else: 84 | my_logger.info(' FAIL') 85 | 86 | if my_compare == 'AllOf': 87 | if all([x in my_values for x in expected_values]): 88 | my_logger.info(' PASS') 89 | top_msg.result = testResultEnum.PASS 90 | for msg in my_msgs: 91 | if msg.actual in expected_values: 92 | msg.result = testResultEnum.PASS 93 | else: 94 | my_logger.info(' FAIL') 95 | for msg in my_msgs: 96 | if msg.actual in expected_values: 97 | msg.result = testResultEnum.PASS 98 | 99 | if property_profile.get('PropertyRequirements'): 100 | new_msgs = validateComparisonAnyOfAllOf(property_profile.get('PropertyRequirements'), '.'.join([property_path, key])) 101 | all_msgs.extend(new_msgs) 102 | 103 | return all_msgs 104 | 105 | 106 | def validateRequirementResource(profile_entry, rf_payload_item=None, parent_object_tuple=None): 107 | """ 108 | Validates Requirement profile_entry for Resources 109 | 110 | Has different resolutions than when validating at a per Property level 111 | """ 112 | resource_exists = (rf_payload_item != REDFISH_ABSENT) 113 | my_logger.debug('Testing Resource ReadRequirement \n\texpected:' + str(profile_entry) + ', exists: ' + str(resource_exists)) 114 | 115 | original_profile_entry = profile_entry 116 | 117 | if profile_entry == "IfPopulated": 118 | profile_entry = "Recommended" 119 | 120 | paramPass = profile_entry != "Mandatory" or \ 121 | profile_entry == "Mandatory" and resource_exists 122 | 123 | if profile_entry == "IfImplemented": 124 | if not resource_exists: 125 | paramPass = testResultEnum.NA 126 | else: 127 | my_logger.debug('\tItem cannot be tested for Implementation') 128 | 129 | if profile_entry == "Recommended" and not resource_exists: 130 | my_logger.info('\tItem is recommended but does not exist') 131 | if config['WarnRecommended']: 132 | my_logger.warning('Recommended Resource Warning: Item is recommended but does not exist, escalating to WARN') 133 | paramPass = testResultEnum.WARN 134 | else: 135 | paramPass = testResultEnum.NA 136 | 137 | my_logger.debug('\tpass ' + str(paramPass)) 138 | return msgInterop('ReadRequirement', original_profile_entry, 'Must Exist' if profile_entry == "Mandatory" else 'Any', 'Exists' if resource_exists else 'DNE', paramPass),\ 139 | paramPass 140 | 141 | 142 | def validateRequirement(profile_entry, rf_payload_item=None, conditional=False, parent_object_tuple=None): 143 | """ 144 | Validates Requirement profile_entry 145 | 146 | By default, only the first parameter is necessary and will always Pass if none given 147 | """ 148 | prop_exists = (rf_payload_item != REDFISH_ABSENT) 149 | my_logger.debug('Testing ReadRequirement \n\texpected:' + str(profile_entry) + ', exists: ' + str(prop_exists)) 150 | # If we're not mandatory, pass automatically, else fail 151 | # However, we have other entries "IfImplemented" and "Conditional" 152 | # note: Mandatory is default!! if present in the profile. Make sure this is made sure. 153 | # For DNE entries "IfImplemented" and "Recommended" result with not applicable 154 | original_profile_entry = profile_entry 155 | 156 | if profile_entry == "IfPopulated": 157 | my_status = 'Enabled' 158 | if parent_object_tuple: 159 | my_state = parent_object_tuple[0].get('Status') 160 | my_status = my_state.get('State') if my_state else my_status 161 | if my_status != 'Absent': 162 | profile_entry = 'Mandatory' 163 | else: 164 | profile_entry = 'Recommended' 165 | 166 | if profile_entry == "Conditional" and conditional: 167 | profile_entry = "Mandatory" 168 | 169 | paramPass = profile_entry != "Mandatory" or \ 170 | profile_entry == "Mandatory" and prop_exists 171 | 172 | if profile_entry == "IfImplemented": 173 | if not prop_exists: 174 | paramPass = testResultEnum.NA 175 | else: 176 | my_logger.debug('\tItem cannot be tested for Implementation') 177 | 178 | if profile_entry == "Recommended" and not prop_exists: 179 | my_logger.info('\tItem is recommended but does not exist') 180 | if config['WarnRecommended']: 181 | my_logger.warning('Recommended Item Warning: Item is recommended but does not exist, escalating to WARN') 182 | paramPass = testResultEnum.WARN 183 | else: 184 | paramPass = testResultEnum.NA 185 | 186 | my_logger.debug('\tpass ' + str(paramPass)) 187 | return msgInterop('ReadRequirement', original_profile_entry, 'Must Exist' if profile_entry == "Mandatory" else 'Any', 'Exists' if prop_exists else 'DNE', paramPass),\ 188 | paramPass 189 | 190 | 191 | def isPropertyValid(profilePropName, rObj): 192 | for prop in rObj.getResourceProperties(): 193 | if profilePropName == prop.propChild: 194 | return None, True 195 | my_logger.error('Resource Schema Error: {} - Does not exist in ResourceType Schema, please consult profile provided'.format(profilePropName)) 196 | return msgInterop('PropertyValidity', profilePropName, 'Should Exist', 'in ResourceType Schema', False), False 197 | 198 | 199 | def validateMinCount(alist, length, annotation=0): 200 | """ 201 | Validates Mincount annotation 202 | """ 203 | my_logger.debug('Testing minCount \n\texpected:' + str(length) + ', val:' + str(annotation)) 204 | paramPass = len(alist) >= length or annotation >= length 205 | my_logger.debug('\tpass ' + str(paramPass)) 206 | return msgInterop('MinCount', length, '<=', annotation if annotation > len(alist) else len(alist), paramPass),\ 207 | paramPass 208 | 209 | 210 | def validateSupportedValues(enumlist, annotation): 211 | """ 212 | Validates SupportedVals annotation 213 | """ 214 | my_logger.debug('Testing supportedValues \n\t:' + str(enumlist) + ', exists:' + str(annotation)) 215 | paramPass = True 216 | for item in enumlist: 217 | paramPass = item in annotation 218 | if not paramPass: 219 | break 220 | my_logger.debug('\tpass ' + str(paramPass)) 221 | return msgInterop('SupportedValues', enumlist, 'included in...', annotation, paramPass),\ 222 | paramPass 223 | 224 | 225 | def findPropItemforString(propObj, itemname): 226 | """ 227 | Finds an appropriate object for an item 228 | """ 229 | for prop in propObj.getResourceProperties(): 230 | rf_payloadName = prop.name.split(':')[-1] 231 | if itemname == rf_payloadName: 232 | return prop 233 | return None 234 | 235 | 236 | def validateWriteRequirement(profile_entry, parent_object_payload, resource_headers, item_name): 237 | """ 238 | Validates if a property is WriteRequirement or not 239 | """ 240 | my_logger.verbose1('Is property writeable \n\t' + str(profile_entry)) 241 | 242 | if profile_entry == "Mandatory" or profile_entry == "Supported": 243 | result_not_supported = testResultEnum.FAIL 244 | expected_str = "Must Be Writable" 245 | elif profile_entry == "Recommended": 246 | if config['WarnRecommended']: 247 | result_not_supported = testResultEnum.WARN 248 | else: 249 | result_not_supported = testResultEnum.NA 250 | expected_str = "Recommended" 251 | else: 252 | result_not_supported = testResultEnum.NA 253 | expected_str = "Any" 254 | 255 | # Check for Allow header, warn if missing 256 | if resource_headers and 'Allow' in resource_headers: 257 | writeable = 'PATCH' in resource_headers['Allow'] 258 | if not writeable: 259 | if profile_entry == "Mandatory": 260 | my_logger.error('Allow Header Error: PATCH in Allow header not available, property is not writeable ' + str(profile_entry)) 261 | return msgInterop('WriteRequirement', profile_entry, expected_str, 'PATCH not supported', result_not_supported), True 262 | else: 263 | my_logger.warning('Allow Header Warning: Unable to test writeable property, no Allow header available ' + str(profile_entry)) 264 | return msgInterop('WriteRequirement', profile_entry, expected_str, 'No Allow response header', testResultEnum.NOT_TESTED), True 265 | 266 | redfish_payload, _ = parent_object_payload 267 | 268 | # Get Writeable Properties 269 | if '@Redfish.WriteableProperties' not in redfish_payload: 270 | my_logger.warning('WriteableProperties Warning: Unable to test writeable property, no @Redfish.WriteableProperties available at the property level ' + str(profile_entry)) 271 | return msgInterop('WriteRequirement', profile_entry, expected_str, '@Redfish.WriteableProperties not in response', testResultEnum.NOT_TESTED), True 272 | 273 | writeable_properties = redfish_payload['@Redfish.WriteableProperties'] 274 | if not isinstance(writeable_properties, list): 275 | my_logger.warning('WriteableProperties Warning: Unable to test writeable property, @Redfish.WriteableProperties is not an array ' + str(profile_entry)) 276 | return msgInterop('WriteRequirement', profile_entry, expected_str, '@Redfish.WriteableProperties not an array', testResultEnum.WARN), True 277 | 278 | is_writeable = item_name in writeable_properties 279 | 280 | return msgInterop('WriteRequirement', profile_entry, expected_str, 'Writable' if is_writeable else 'Not Writable', 281 | testResultEnum.PASS if is_writeable else result_not_supported), True 282 | 283 | 284 | def checkComparison(val, compareType, target): 285 | """ 286 | Validate a given comparison option, given a value and a target set 287 | """ 288 | my_logger.verbose1('Testing a comparison \n\t' + str((val, compareType, target))) 289 | vallist = val if isinstance(val, list) else [val] 290 | paramPass = False 291 | if compareType is None: 292 | my_logger.error('CompareType Profile Error: CompareType not available in profile or missing') 293 | 294 | # NOTE: In our current usage, AnyOf and AllOf in this context is only for ConditionalRequirements -> CompareProperty 295 | # Which checks if a particular property inside of this instance applies 296 | if compareType == "AnyOf": 297 | for item in vallist: 298 | paramPass = item in target 299 | if paramPass: 300 | break 301 | else: 302 | continue 303 | 304 | if compareType == "AllOf": 305 | alltarget = set() 306 | for item in vallist: 307 | paramPass = item in target and item not in alltarget 308 | if paramPass: 309 | alltarget.add(item) 310 | if len(alltarget) == len(target): 311 | break 312 | else: 313 | continue 314 | paramPass = len(alltarget) == len(target) 315 | 316 | if compareType == "LinkToResource": 317 | if val == REDFISH_ABSENT: 318 | paramPass = False 319 | else: 320 | vallink = val.get('@odata.id') 321 | success, rf_payload, code, elapsed, _ = callResourceURI(vallink) 322 | if success: 323 | ourType = rf_payload.get('@odata.type') 324 | if ourType is not None: 325 | SchemaType = getType(ourType) 326 | paramPass = SchemaType in target 327 | else: 328 | paramPass = False 329 | else: 330 | paramPass = False 331 | 332 | if compareType == "Absent": 333 | paramPass = val == REDFISH_ABSENT 334 | if compareType == "Present": 335 | paramPass = val != REDFISH_ABSENT 336 | 337 | if isinstance(target, list) and val != REDFISH_ABSENT: 338 | if compareType == "Equal": 339 | paramPass = val in target 340 | elif compareType == "NotEqual": 341 | paramPass = val not in target 342 | else: 343 | for value in target: 344 | if compareType == "GreaterThan": 345 | paramPass = val > value 346 | if compareType == "GreaterThanOrEqual": 347 | paramPass = val >= value 348 | if compareType == "LessThan": 349 | paramPass = val < value 350 | if compareType == "LessThanOrEqual": 351 | paramPass = val <= value 352 | if paramPass is False: 353 | break 354 | elif compareType in ["Equal", "NotEqual", "GreaterThan", "GreaterThanOrEqual", "LessThan", "LessThanOrEqual"]: 355 | if not isinstance(target, list): 356 | my_logger.warning('CompareType Profile Warning: CompareType {} requires a list of values'.format(compareType)) 357 | 358 | my_logger.debug('\tpass ' + str(paramPass)) 359 | return msgInterop('Comparison', target, compareType, val, paramPass),\ 360 | paramPass 361 | 362 | 363 | def validateMinVersion(version, profile_entry): 364 | """ 365 | Checks for the minimum version of a resource's type 366 | """ 367 | my_logger.debug('Testing minVersion \n\t' + str((version, profile_entry))) 368 | # If version doesn't contain version as is, try it as v#_#_# 369 | profile_entry_split = profile_entry.split('.') 370 | # get version from payload 371 | if(re.match('#([a-zA-Z0-9_.-]*\.)+[a-zA-Z0-9_.-]*', version) is not None): 372 | v_payload = getNamespace(version).split('.', 1)[-1] 373 | v_payload = v_payload.replace('v', '') 374 | if ('_' in v_payload): 375 | payload_split = v_payload.split('_') 376 | else: 377 | payload_split = v_payload.split('.') 378 | else: 379 | payload_split = version.split('.') 380 | 381 | paramPass = True 382 | for a, b in zip(profile_entry_split, payload_split): 383 | if b.isnumeric() and a.isnumeric() and b is not None and a is not None: 384 | b = int(b) 385 | a = int(a) 386 | else: 387 | b = 0 if b is None else b 388 | a = 0 if a is None else b 389 | if type(b) is not type(a): 390 | break 391 | if (b > a): 392 | break 393 | if (b < a): 394 | paramPass = False 395 | break 396 | 397 | # use string comparison, given version numbering is accurate to regex 398 | my_logger.debug('\tpass ' + str(paramPass)) 399 | return msgInterop('MinVersion', profile_entry, '<=', version, paramPass),\ 400 | paramPass 401 | 402 | 403 | def checkConditionalRequirement(propResourceObj, profile_entry, rf_payload_tuple): 404 | """ 405 | Returns boolean if profile_entry's conditional is true or false 406 | """ 407 | my_logger.debug('Evaluating conditionalRequirements') 408 | if "SubordinateToResource" in profile_entry: 409 | isSubordinate = False 410 | # iterate through parents via resourceObj 411 | # list must be reversed to work backwards 412 | resourceParent = propResourceObj.parent 413 | for expectedParent in reversed(profile_entry["SubordinateToResource"]): 414 | if resourceParent is not None: 415 | parentType = getType(resourceParent.jsondata.get('@odata.type', 'NoType')) 416 | isSubordinate = parentType == expectedParent 417 | my_logger.debug('\tsubordinance ' + 418 | str(parentType) + ' ' + str(isSubordinate)) 419 | resourceParent = resourceParent.parent 420 | else: 421 | my_logger.debug('no parent') 422 | isSubordinate = False 423 | return isSubordinate 424 | elif "CompareProperty" in profile_entry: 425 | # find property in json payload by working backwards thru objects 426 | # rf_payload tuple is designed just for this piece, since there is 427 | # no parent in dictionaries 428 | if profile_entry["CompareProperty"][0] == '/': 429 | comparePropNames = profile_entry["CompareProperty"].split('/')[1:] 430 | else: 431 | comparePropNames = [profile_entry["CompareProperty"]] 432 | if "CompareType" not in profile_entry: 433 | my_logger.error("Invalid Profile Error: CompareType is required for CompareProperty but not found") 434 | raise ValueError('CompareType missing with CompareProperty') 435 | if "CompareValues" not in profile_entry and profile_entry['CompareType'] not in ['Absent', 'Present']: 436 | my_logger.error("Invalid Profile Error: CompareValues is required for CompareProperty but not found") 437 | raise ValueError('CompareValues missing with CompareProperty') 438 | if "CompareValues" in profile_entry and profile_entry['CompareType'] in ['Absent', 'Present']: 439 | my_logger.warning("Invalid Profile Warning: CompareValues found, but is not required for CompareProperty Absent or Present ") 440 | 441 | rf_payload_item, rf_payload = rf_payload_tuple 442 | while rf_payload is not None and (not isinstance(rf_payload_item, dict) or comparePropNames[0] not in rf_payload_item): 443 | rf_payload_item, rf_payload = rf_payload 444 | 445 | if rf_payload_item is None: 446 | my_logger.error('CompareProperty Error: Could not acquire expected CompareProperty {}, Profile path did not resolve correctly or property does not exist'.format(comparePropNames[0])) 447 | return False 448 | 449 | compareProp = rf_payload_item.get(comparePropNames[0], REDFISH_ABSENT) 450 | if (compareProp != REDFISH_ABSENT) and len(comparePropNames) > 1: 451 | for comparePropName in comparePropNames[1:]: 452 | compareProp = compareProp.get(comparePropName, REDFISH_ABSENT) 453 | if compareProp == REDFISH_ABSENT: 454 | break 455 | # compatability with old version, deprecate with versioning 456 | compareType = profile_entry.get("CompareType", profile_entry.get("Comparison")) 457 | return checkComparison(compareProp, compareType, profile_entry.get("CompareValues", []))[1] 458 | else: 459 | my_logger.error("Invalid Profile Error: No conditional given for Comparison") 460 | raise ValueError('No conditional given for Comparison') 461 | 462 | 463 | def find_key_in_payload(path_to_key, redfish_parent_payload): 464 | """ 465 | Finds a key in the payload tuple provided 466 | 467 | :param path_to_key: Single key name or RFC6901 JSON Pointer 468 | :param redfish_parent_payload: Payload Tuple (payload, parent_payload) 469 | :return: True if exist, False otherwise 470 | :rtype: boolean 471 | """ 472 | # Profile entry is a path 473 | key_exists = False 474 | if path_to_key[0] == '/': 475 | # Generate RFC6901 Json Pointer 476 | replaced_by_property_path = path_to_key.split('/')[1:] 477 | # Get our complete payload 478 | my_parent_payload = redfish_parent_payload 479 | while my_parent_payload is not None: 480 | current_target, my_parent_payload = my_parent_payload 481 | 482 | key_exists = True 483 | for key in replaced_by_property_path: 484 | if isinstance(current_target, dict) and current_target.get(key) is not None: 485 | current_target = current_target[key] 486 | continue 487 | else: 488 | key_exists = False 489 | # Profile entry is a single variable 490 | else: 491 | replaced_by_property_name = path_to_key 492 | current_target, my_parent_payload = redfish_parent_payload 493 | if current_target.get(replaced_by_property_name) is not None: 494 | key_exists = True 495 | else: 496 | key_exists = False 497 | return key_exists 498 | 499 | 500 | def validatePropertyRequirement(propResourceObj, profile_entry, rf_payload_tuple, item_name): 501 | """ 502 | Validate PropertyRequirements 503 | """ 504 | msgs = [] 505 | 506 | # TODO: Change rf_payload_tuple to a more natural implementation (like an object) 507 | redfish_value, redfish_parent_payload = rf_payload_tuple 508 | 509 | if profile_entry is None or len(profile_entry) == 0: 510 | my_logger.debug('there are no requirements for this prop') 511 | else: 512 | my_logger.debug('propRequirement with value: ' + str(redfish_value if not isinstance(redfish_value, dict) else 'dict')) 513 | 514 | if "ReplacesProperty" in profile_entry and redfish_value == REDFISH_ABSENT: 515 | my_path_entry = profile_entry.get("ReplacesProperty") 516 | replacement_property_exists = find_key_in_payload(my_path_entry, redfish_parent_payload) 517 | 518 | new_msg = msgInterop("{}.{}".format(item_name, "ReplacesProperty"), profile_entry["ReplacesProperty"], "-", 519 | "Exists" if replacement_property_exists else "DNE", testResultEnum.WARN if replacement_property_exists else testResultEnum.OK) 520 | msgs.append(new_msg) 521 | if replacement_property_exists: 522 | my_logger.warning('Deprecated Property Warning: {} replaces deprecated property {}, but does not exist, service should implement {}'.format(item_name, my_path_entry, item_name)) 523 | return msgs 524 | else: 525 | if profile_entry.get('ReadRequirement', 'Mandatory'): 526 | my_logger.error('Deprecated Property Error: {}... replaced property {} does not exist, {} should be implemented'.format(item_name, my_path_entry, item_name)) 527 | 528 | if "ReplacedByProperty" in profile_entry: 529 | my_path_entry = profile_entry.get("ReplacedByProperty") 530 | replacement_property_exists = find_key_in_payload(my_path_entry, redfish_parent_payload) 531 | 532 | new_msg = msgInterop("{}.{}".format(item_name, "ReplacedByProperty"), profile_entry["ReplacedByProperty"], "-", 533 | "Exists" if replacement_property_exists else "DNE", testResultEnum.PASS if replacement_property_exists else testResultEnum.OK) 534 | msgs.append(new_msg) 535 | if replacement_property_exists: 536 | my_logger.info('{}: Replacement property exists, step out of validating'.format(item_name)) 537 | return msgs 538 | else: 539 | my_logger.info('{}: Replacement property does not exist, continue validating'.format(item_name)) 540 | 541 | # Check the conditional requirements first or the requirements won't apply correctly against 542 | # a list. 543 | if "ConditionalRequirements" in profile_entry: 544 | innerList = profile_entry["ConditionalRequirements"] 545 | for item in innerList: 546 | try: 547 | if checkConditionalRequirement(propResourceObj, item, rf_payload_tuple): 548 | my_logger.info("\tCondition DOES apply") 549 | conditionalMsgs = validatePropertyRequirement( 550 | propResourceObj, item, rf_payload_tuple, item_name) 551 | for item in conditionalMsgs: 552 | item.name = item.name.replace('.', '.Conditional.', 1) 553 | msgs.extend(conditionalMsgs) 554 | else: 555 | my_logger.info("\tCondition does not apply") 556 | except ValueError as e: 557 | my_logger.info("\tCondition was skipped due to payload error") 558 | # counts['errorProfileComparisonError'] += 1 559 | 560 | # If we're working with a list, then consider MinCount, Comparisons, then execute on each item 561 | # list based comparisons include AnyOf and AllOf 562 | if isinstance(redfish_value, list): 563 | my_logger.debug("inside of a list: " + item_name) 564 | if "MinCount" in profile_entry: 565 | msg, success = validateMinCount(redfish_value, profile_entry["MinCount"], 566 | redfish_parent_payload[0].get(item_name.split('.')[-1] + '@odata.count', 0)) 567 | if not success: 568 | my_logger.error("MinCount Error: Number of elements less than expected") 569 | msgs.append(msg) 570 | msg.name = item_name + '.' + msg.name 571 | cnt = 0 572 | for item in redfish_value: 573 | listmsgs = validatePropertyRequirement( 574 | propResourceObj, profile_entry, (item, redfish_parent_payload), item_name + '#' + str(cnt)) 575 | msgs.extend(listmsgs) 576 | cnt += 1 577 | 578 | else: 579 | # consider requirement before anything else 580 | # problem: if dne, skip? 581 | 582 | # Read Requirement is default mandatory if not present 583 | requirement_entry = profile_entry.get('ReadRequirement', 'Mandatory') 584 | msg, requirement_success = validateRequirement(requirement_entry, redfish_value, parent_object_tuple=redfish_parent_payload) 585 | msgs.append(msg) 586 | msg.name = item_name + '.' + msg.name 587 | if not requirement_success: 588 | my_logger.error("Read Requirement Error: Property '{}' not found.".format(item_name)) 589 | 590 | if "WriteRequirement" in profile_entry: 591 | headers = propResourceObj.headers 592 | msg, success = validateWriteRequirement(profile_entry.get('WriteRequirement', 'Mandatory'), redfish_parent_payload, headers, item_name) 593 | msgs.append(msg) 594 | msg.name = item_name + '.' + msg.name 595 | if not success: 596 | my_logger.error("Write Requirement Error: Property '{}' not writable.".format(item_name)) 597 | 598 | if "MinSupportValues" in profile_entry: 599 | msg, success = validateSupportedValues( 600 | profile_entry["MinSupportValues"], 601 | redfish_parent_payload[0].get(item_name.split('.')[-1] + '@Redfish.AllowableValues', [])) 602 | msgs.append(msg) 603 | msg.name = item_name + '.' + msg.name 604 | if not success: 605 | my_logger.error("Supported Values Error: Property '{}' does not support all required values.".format(item_name)) 606 | 607 | if "Values" in profile_entry: 608 | # Default to AnyOf 609 | 610 | my_compare = profile_entry.get("Comparison", "AnyOf") 611 | my_values = profile_entry.get("Values", []) 612 | # If absent and not comparing for absence... 613 | if redfish_value == REDFISH_ABSENT and my_compare not in ['Absent', 'Present']: 614 | msg, success = msgInterop('Comparison', my_values, my_compare, REDFISH_ABSENT, testResultEnum.NOT_TESTED), True 615 | else: 616 | msg, success = checkComparison(redfish_value, my_compare, my_values) 617 | msgs.append(msg) 618 | msg.name = item_name + '.' + msg.name 619 | 620 | # Embed test results into profile, going forward seems to be the quick option outside of making a proper test object 621 | if my_compare in ['AnyOf', 'AllOf']: 622 | msg.ignore = True 623 | if not profile_entry.get('_msgs'): 624 | profile_entry['_msgs'] = [] 625 | profile_entry['_msgs'].append(msg) 626 | elif not success: 627 | my_logger.error("Comparison Error: Expected {} of {}".format(my_compare, my_values)) 628 | 629 | if "PropertyRequirements" in profile_entry: 630 | innerDict = profile_entry["PropertyRequirements"] 631 | if isinstance(redfish_value, dict): 632 | for item in innerDict: 633 | my_logger.debug('inside complex ' + item_name + '.' + item) 634 | complexMsgs = validatePropertyRequirement( 635 | propResourceObj, innerDict[item], (redfish_value.get(item, REDFISH_ABSENT), rf_payload_tuple), item) 636 | msgs.extend(complexMsgs) 637 | else: 638 | my_logger.info('complex {} is missing or not a dictionary'.format(item_name)) 639 | return msgs 640 | 641 | 642 | def validateActionRequirement(profile_entry, rf_payload_tuple, actionname): 643 | """ 644 | Validate Requirements for one action 645 | """ 646 | rf_payload_item, rf_payload = rf_payload_tuple 647 | rf_payload_action = None 648 | msgs = [] 649 | my_logger.verbose1('actionRequirement \n\tval: ' + str(rf_payload_item if not isinstance( 650 | rf_payload_item, dict) else 'dict') + ' ' + str(profile_entry)) 651 | 652 | action_readrequirement = profile_entry.get('ReadRequirement', "Mandatory") 653 | actioninfo_requirement = profile_entry.get('ActionInfo', "None") 654 | 655 | if "ReadRequirement" in profile_entry: 656 | # problem: if dne, skip 657 | msg, success = validateRequirement(action_readrequirement, rf_payload_item) 658 | msgs.append(msg) 659 | msg.name = actionname + '.' + msg.name 660 | msg.result = testResultEnum.PASS if success else testResultEnum.FAIL 661 | 662 | propDoesNotExist = (rf_payload_item == REDFISH_ABSENT) 663 | if propDoesNotExist: 664 | return msgs 665 | 666 | if "@Redfish.ActionInfo" in rf_payload_item: 667 | vallink = rf_payload_item['@Redfish.ActionInfo'] 668 | success, rf_payload_action, code, elapsed, _ = callResourceURI(vallink) 669 | if not success: 670 | rf_payload_action = None 671 | 672 | if 'ActionInfo' in profile_entry and actioninfo_requirement in ["None"]: 673 | # Create message if None is explicitly listed in the profile 674 | msg = msgInterop('ActionInfo', 'None', '-', '-', testResultEnum.OK) 675 | msg.name = actionname + '.' + msg.name 676 | msgs.append(msg) 677 | 678 | if actioninfo_requirement not in ["None"]: 679 | if propDoesNotExist: 680 | # not tested if action isn't present 681 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.NOT_TESTED) 682 | 683 | elif actioninfo_requirement == "Mandatory": 684 | if rf_payload_action is None: 685 | if "@Redfish.ActionInfo" in rf_payload_item: 686 | my_logger.error('ActionInfo Payload Error: Mandatory @Redfish.ActionInfo for {} listed on action but URI get was not successful'.format(actionname)) 687 | else: 688 | my_logger.error('ActionInfo Payload Error: @Redfish.ActionInfo for {} not listed, but is Mandatory'.format(actionname)) 689 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.FAIL) 690 | else: 691 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.PASS) 692 | 693 | elif actioninfo_requirement == "Recommended": 694 | if rf_payload_action is None: 695 | if "@Redfish.ActionInfo" in rf_payload_item: 696 | my_logger.warn('ActionInfo Payload Warning: Recommended @Redfish.ActionInfo for {} listed on action but URI get was not successful'.format(actionname)) 697 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.WARN) 698 | else: 699 | my_logger.info('ActionInfo Payload Warning: Recommended @Redfish.ActionInfo for {} not listed'.format(actionname)) 700 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.PASS) 701 | else: 702 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.PASS) 703 | 704 | else: 705 | my_logger.warning('ActionInfo Profile Warning: Term "ActionInfo" has unknown value {}'.format(actioninfo_requirement)) 706 | msg = msgInterop('ActionInfo', actioninfo_requirement, '-', '-', testResultEnum.WARN) 707 | msg.name = actionname + '.' + msg.name 708 | msgs.append(msg) 709 | 710 | # problem: if dne, skip 711 | if "Parameters" in profile_entry: 712 | parameter_dictionary = profile_entry["Parameters"] 713 | # problem: if dne, skip 714 | # assume mandatory 715 | for param in parameter_dictionary: 716 | item = parameter_dictionary[param] 717 | # Get Allowable Values for parameter 718 | values_array = None 719 | # If our action info exists at all, prefer it 720 | if rf_payload_action is not None: 721 | parameter_by_name = rf_payload_action['Parameters'] 722 | my_parameter = [x for x in parameter_by_name if x['Name'] == param] 723 | if my_parameter: 724 | values_array = my_parameter[0].get('AllowableValues') 725 | # Otherwise check for AllowableValues as additional property 726 | if values_array is None: 727 | values_array = rf_payload_item.get(str(param) + '@Redfish.AllowableValues', REDFISH_ABSENT) 728 | if values_array == REDFISH_ABSENT: 729 | my_logger.warning('Missing ActionInfo Warning: No such ActionInfo exists for this Action, and no AllowableValues exists. Cannot validate the following parameters: {}'.format(param)) 730 | msg = msgInterop('', item, '-', '-', testResultEnum.WARN) 731 | msg.name = "{}.{}.{}".format(actionname, param, msg.name) 732 | msgs.append(msg) 733 | else: 734 | msg, success = validateRequirement(item.get('ReadRequirement', "Mandatory"), values_array) 735 | msgs.append(msg) 736 | msg.name = "{}.{}.{}".format(actionname, param, msg.name) 737 | if "ParameterValues" in item: 738 | msg, success = validateSupportedValues( 739 | item["ParameterValues"], values_array) 740 | msgs.append(msg) 741 | msg.name = "{}.{}.{}".format(actionname, param, msg.name) 742 | if "RecommendedValues" in item: 743 | msg, success = validateSupportedValues( 744 | item["RecommendedValues"], values_array) 745 | msg.name = msg.name.replace('Supported', 'Recommended') 746 | if config['WarnRecommended'] and not success: 747 | my_logger.warning('Missing Parameters Warning: Recommended parameters do not all exist, escalating to WARN') 748 | msg.result = testResultEnum.WARN 749 | elif not success: 750 | my_logger.info('Recommended parameters do not all exist, but are not Mandatory') 751 | msg.result = testResultEnum.PASS 752 | 753 | msgs.append(msg) 754 | msg.name = "{}.{}.{}".format(actionname, param, msg.name) 755 | # consider requirement before anything else, what if action 756 | # if the action doesn't exist, you can't check parameters 757 | # if it doesn't exist, what should not be checked for action 758 | return msgs 759 | 760 | 761 | URI_ID_REGEX = '\{[A-Za-z0-9]+\}' 762 | 763 | VALID_ID_REGEX = '[A-Za-z0-9.!#$&-;=?\[\]_~]+' 764 | 765 | 766 | def compareRedfishURI(expected_uris, uri): 767 | success = False 768 | # If we have our URIs 769 | if expected_uris is not None: 770 | regex_pattern = "^{}$".format("|".join(expected_uris)) 771 | regex_pattern = re.sub(URI_ID_REGEX, VALID_ID_REGEX, regex_pattern) 772 | success = re.fullmatch(regex_pattern, uri) is not None 773 | else: 774 | success = True 775 | return success 776 | 777 | 778 | def checkInteropURI(r_obj, profile_entry): 779 | """ 780 | Checks if the profile's URI applies to the particular resource 781 | """ 782 | my_logger.debug('Testing URI \n\t' + str((r_obj.uri, profile_entry))) 783 | 784 | my_id, my_uri = r_obj.jsondata.get('Id'), r_obj.uri 785 | return compareRedfishURI(profile_entry, my_uri) 786 | 787 | # Expected Type, Key 788 | entry_type_table = { 789 | 'ChassisType': ("Chassis", "ChassisType"), 790 | 'DriveProtocol': ("Drive", "Protocol"), 791 | 'MemoryType': ("Memory", "MemoryType"), 792 | 'PortProtocol': ("Port", "PortProtocol"), 793 | 'ProcessorType': ("Processor", "ProcessorType"), 794 | } 795 | 796 | 797 | def validateInteropResource(propResourceObj, interop_profile, rf_payload): 798 | """ 799 | Base function that validates a single Interop Resource by its profile_entry 800 | """ 801 | msgs = [] 802 | my_logger.info('### Validating an InteropResource') 803 | my_logger.debug(str(interop_profile)) 804 | # rf_payload_tuple provides the chain of dicts containing dicts, needed for CompareProperty 805 | rf_payload_tuple = (rf_payload, None) 806 | 807 | if "UseCases" in interop_profile: 808 | for use_case in interop_profile['UseCases']: 809 | entry_title = use_case.get("UseCaseTitle", "NoName").replace(' ', '_') 810 | entry_type = use_case.get("UseCaseType", "Normal") 811 | my_parent = propResourceObj.parent 812 | my_logger.debug('UseCase {} {}'.format(entry_title, entry_type)) 813 | 814 | # Check if we have a valid UseCase 815 | if ('URIs' not in use_case) and ('UseCaseKeyProperty' not in use_case) and (entry_type not in ['AbsentResource'] + list(entry_type_table.keys())): 816 | my_logger.error('UseCase Profile Error: UseCase does not have URIs or valid UseCase...') 817 | 818 | if entry_type == 'AbsentResource': 819 | my_status = rf_payload.get('Status') 820 | if my_status: 821 | use_case_applies = my_status.get('State') == 'Absent' 822 | else: 823 | use_case_applies = False 824 | if 'URIs' in use_case: 825 | use_case_applies = use_case_applies and checkInteropURI(propResourceObj, use_case['URIs']) 826 | 827 | elif entry_type in entry_type_table: 828 | target_type_found = False 829 | target_type, entry_key = entry_type_table[entry_type] 830 | entry_comparison, entry_values = use_case['UseCaseComparison'], use_case['UseCaseKeyValues'] 831 | 832 | # Iterate until we find our target type, if not found then use case cannot apply 833 | while my_parent is not None and not target_type_found: 834 | parent_type = getType(my_parent.jsondata.get('@odata.type', 'NoType')) 835 | target_type_found = parent_type == target_type 836 | if not target_type_found: 837 | my_parent = my_parent.parent 838 | 839 | if target_type_found: 840 | target_payload = my_parent.jsondata 841 | target_value = target_payload.get(entry_key) 842 | 843 | _, use_case_applies = checkComparison(target_payload.get(entry_key, REDFISH_ABSENT), entry_comparison, entry_values) 844 | else: 845 | my_logger.verbose1('Type {} was not found in parent typechain'.format(target_type)) 846 | use_case_applies = False 847 | 848 | 849 | elif 'UseCaseKeyProperty' in use_case: 850 | entry_key, entry_comparison, entry_values = use_case['UseCaseKeyProperty'], use_case['UseCaseComparison'], use_case['UseCaseKeyValues'] 851 | 852 | _, use_case_applies = checkComparison(rf_payload.get(entry_key, REDFISH_ABSENT), entry_comparison, entry_values) 853 | 854 | # Check if URI applies to this usecase as well 855 | if 'URIs' in use_case: 856 | use_case_applies = checkInteropURI(propResourceObj, use_case['URIs']) and use_case_applies 857 | 858 | elif 'URIs' in use_case: 859 | use_case_applies = checkInteropURI(propResourceObj, use_case['URIs']) 860 | 861 | else: 862 | use_case_applies = False 863 | 864 | if use_case_applies: 865 | my_msg = msgInterop("UseCase.{}".format(entry_title), '-', '-', '-', testResultEnum.OK) 866 | 867 | msgs.append(my_msg) 868 | 869 | my_logger.info('Validating using UseCase {}'.format(entry_title)) 870 | 871 | # Remove URIs 872 | new_case = {key: val for key, val in use_case.items() if key not in ['URIs']} 873 | 874 | new_msgs = validateInteropResource(propResourceObj, new_case, rf_payload) 875 | 876 | if any([msg.result == testResultEnum.FAIL for msg in new_msgs]): 877 | my_msg.result = testResultEnum.FAIL 878 | 879 | msgs.extend(new_msgs) 880 | 881 | else: 882 | my_logger.info('UseCase {} does not apply'.format(entry_title)) 883 | 884 | return msgs 885 | if "URIs" in interop_profile: 886 | # Check if the profile requirements apply to this particular instance 887 | if not checkInteropURI(propResourceObj, interop_profile['URIs']): 888 | my_logger.info('Skipping resource; URI is not listed') 889 | return msgs 890 | if "MinVersion" in interop_profile: 891 | my_type = propResourceObj.jsondata.get('@odata.type', 'NoType') 892 | msg, success = validateMinVersion(my_type, interop_profile['MinVersion']) 893 | msgs.append(msg) 894 | if "PropertyRequirements" in interop_profile: 895 | innerDict = interop_profile["PropertyRequirements"] 896 | for item in innerDict: 897 | # NOTE: Program no longer performs fuzzy checks for misnamed properties, since there is no schema 898 | my_logger.info('### Validating PropertyRequirements for {}'.format(item)) 899 | pmsgs = validatePropertyRequirement(propResourceObj, innerDict[item], (rf_payload.get(item, REDFISH_ABSENT), rf_payload_tuple), item) 900 | msgs.extend(pmsgs) 901 | if "ActionRequirements" in interop_profile: 902 | innerDict = interop_profile["ActionRequirements"] 903 | actionsJson = rf_payload.get('Actions', {}) 904 | rf_payloadInnerTuple = (actionsJson, rf_payload_tuple) 905 | for item in innerDict: 906 | my_type = getNamespaceUnversioned(propResourceObj.jsondata.get('@odata.type', 'NoType')) 907 | actionName = my_type + '.' + item 908 | if actionName in actionsJson: 909 | my_logger.warning('ActionName Payload Warning: {} should be #{}'.format(actionName, actionName)) 910 | else: 911 | actionName = '#' + my_type + '.' + item 912 | 913 | amsgs = validateActionRequirement(innerDict[item], (actionsJson.get( 914 | actionName, REDFISH_ABSENT), rf_payloadInnerTuple), actionName) 915 | msgs.extend(amsgs) 916 | if "CreateResource" in interop_profile: 917 | my_logger.info('Skipping CreateResource') 918 | if "DeleteResource" in interop_profile: 919 | my_logger.info('Skipping DeleteResource') 920 | if "UpdateResource" in interop_profile: 921 | my_logger.info('Skipping UpdateResource') 922 | 923 | return msgs 924 | --------------------------------------------------------------------------------