├── os_collect_config ├── __init__.py ├── tests │ ├── __init__.py │ ├── test_merger.py │ ├── test_heat_local.py │ ├── test_keystone.py │ ├── test_cache.py │ ├── test_config_drive.py │ ├── test_local.py │ ├── test_ec2.py │ ├── test_heat.py │ ├── test_request.py │ ├── test_cfn.py │ ├── test_zaqar.py │ └── test_collect.py ├── common.py ├── version.py ├── merger.py ├── heat_local.py ├── exc.py ├── cache.py ├── ec2.py ├── request.py ├── local.py ├── heat.py ├── keystone.py ├── config_drive.py ├── cfn.py ├── zaqar.py └── collect.py ├── .stestr.conf ├── pyproject.toml ├── .gitreview ├── releasenotes └── notes │ ├── remove-py38-c31bbace9fe0b2a7.yaml │ ├── messaging-v2-b4310b93ffcd1c01.yaml │ └── drop-python2.7-support-f133b716812d39f0.yaml ├── os-collect-config-and-friends.odg ├── .coveragerc ├── zuul.d └── layout.yaml ├── test-requirements.txt ├── .gitignore ├── requirements.txt ├── setup.py ├── tox.ini ├── setup.cfg ├── README.rst ├── LICENSE └── os-collect-config-and-friends.svg /os_collect_config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /os_collect_config/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /os_collect_config/common.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | __all__ = ['requests'] 4 | -------------------------------------------------------------------------------- /.stestr.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | test_path=./os_collect_config/tests 3 | top_dir=./ 4 | 5 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["pbr>=6.1.1"] 3 | build-backend = "pbr.build" 4 | -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=review.opendev.org 3 | port=29418 4 | project=openstack/os-collect-config.git 5 | -------------------------------------------------------------------------------- /releasenotes/notes/remove-py38-c31bbace9fe0b2a7.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | upgrade: 3 | - | 4 | Python 3.8 is no longer supported. 5 | -------------------------------------------------------------------------------- /os-collect-config-and-friends.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/os-collect-config/HEAD/os-collect-config-and-friends.odg -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = os_collect_config 4 | omit = os_collect_config/tests/* 5 | 6 | [report] 7 | ignore_errors = True 8 | -------------------------------------------------------------------------------- /zuul.d/layout.yaml: -------------------------------------------------------------------------------- 1 | - project: 2 | templates: 3 | - check-requirements 4 | - openstack-cover-jobs 5 | - openstack-python3-jobs 6 | -------------------------------------------------------------------------------- /releasenotes/notes/messaging-v2-b4310b93ffcd1c01.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | fixes: 3 | - | 4 | Zaqar collector now uses v2 messaging API, instead of v1.1 messaging API. 5 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | hacking>=6.1.0,<6.2.0 # Apache-2.0 2 | 3 | coverage>=4.0 # Apache-2.0 4 | fixtures>=3.0.0 # Apache-2.0/BSD 5 | stestr>=2.0.0 # Apache-2.0 6 | testtools>=2.2.0 # MIT 7 | pyflakes>=2.2.0 8 | -------------------------------------------------------------------------------- /releasenotes/notes/drop-python2.7-support-f133b716812d39f0.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | upgrade: 3 | - | 4 | Python 2.7 support has been dropped. Last release os-collect-config support 5 | py2.7 is OpenStack Train. The minimum version of Python now 6 | supported by os-collect-config is Python 3.6. 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg* 8 | dist 9 | build 10 | eggs 11 | parts 12 | bin 13 | var 14 | sdist 15 | develop-eggs 16 | .installed.cfg 17 | lib 18 | lib64 19 | 20 | # Installer logs 21 | pip-log.txt 22 | 23 | # Unit test / coverage reports 24 | .coverage 25 | cover 26 | .stestr/ 27 | .tox 28 | 29 | # Translations 30 | *.mo 31 | 32 | # Mr Developer 33 | .mr.developer.cfg 34 | .project 35 | .pydevproject 36 | 37 | # OpenStack Generated Files 38 | AUTHORS 39 | ChangeLog 40 | 41 | # Editors 42 | *~ 43 | *.swp 44 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Requirements lower bounds listed here are our best effort to keep them up to 2 | # date but we do not test them so no guarantee of having them all correct. If 3 | # you find any incorrect lower bounds, let us know or propose a fix. 4 | 5 | pbr>=2.0.0 # Apache-2.0 6 | 7 | python-keystoneclient>=3.8.0 # Apache-2.0 8 | python-heatclient>=1.10.0 # Apache-2.0 9 | python-zaqarclient>=1.0.0 # Apache-2.0 10 | requests>=2.14.2 # Apache-2.0 11 | lxml>=3.4.1 # BSD 12 | oslo.config>=5.2.0 # Apache-2.0 13 | oslo.log>=3.36.0 # Apache-2.0 14 | dogpile.cache>=0.6.2 # BSD 15 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import setuptools 17 | 18 | 19 | setuptools.setup( 20 | setup_requires=['pbr>=2.0.0'], 21 | pbr=True) 22 | -------------------------------------------------------------------------------- /os_collect_config/version.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | 16 | import pbr.version 17 | 18 | version_info = pbr.version.VersionInfo('os-collect-config') 19 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py3,pep8 3 | minversion = 3.18.0 4 | 5 | [testenv] 6 | usedevelop = True 7 | deps = 8 | -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} 9 | -r{toxinidir}/requirements.txt 10 | -r{toxinidir}/test-requirements.txt 11 | commands = 12 | stestr run --slowest {posargs} 13 | 14 | [testenv:pep8] 15 | commands = flake8 16 | 17 | [testenv:cover] 18 | setenv = 19 | PYTHON=coverage run --source os_collect_config --parallel-mode 20 | commands = 21 | coverage erase 22 | stestr run {posargs} 23 | coverage combine 24 | coverage html -d cover 25 | coverage xml -o cover/coverage.xml 26 | coverage report 27 | 28 | [testenv:venv] 29 | commands = {posargs} 30 | 31 | [flake8] 32 | # H904: Delay string interpolations at logging calls 33 | enable-extensions = H904 34 | # H405 multi line docstring summary not separated with an empty line 35 | # W503 line break before binary operator 36 | # W504 line break after binary operator 37 | # W605 invalid escape sequence 38 | ignore = H405,W503,W504,W605 39 | exclude = .venv,.tox,dist,doc,*.egg 40 | show-source = true 41 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = os-collect-config 3 | author = OpenStack 4 | author_email = openstack-discuss@lists.openstack.org 5 | summary = Collect and cache metadata, run hooks on changes. 6 | description_file = 7 | README.rst 8 | home_page = https://opendev.org/openstack/os-collect-config 9 | python_requires = >=3.9 10 | classifier = 11 | Development Status :: 4 - Beta 12 | Environment :: Console 13 | Environment :: OpenStack 14 | Intended Audience :: Developers 15 | Intended Audience :: Information Technology 16 | License :: OSI Approved :: Apache Software License 17 | Operating System :: OS Independent 18 | Programming Language :: Python 19 | Programming Language :: Python :: 3 20 | Programming Language :: Python :: 3 :: Only 21 | Programming Language :: Python :: 3.9 22 | Programming Language :: Python :: 3.10 23 | Programming Language :: Python :: 3.11 24 | Programming Language :: Python :: 3.12 25 | 26 | [files] 27 | packages = 28 | os_collect_config 29 | 30 | [entry_points] 31 | console_scripts = 32 | os-collect-config = os_collect_config.collect:main 33 | -------------------------------------------------------------------------------- /os_collect_config/merger.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | from oslo_log import log 17 | 18 | 19 | logger = log.getLogger(__name__) 20 | 21 | 22 | def merged_list_from_content(final_content, deployment_keys, collector_name): 23 | final_list = [] 24 | for depkey in deployment_keys: 25 | if depkey in final_content: 26 | deployments = final_content[depkey] 27 | if not isinstance(deployments, list): 28 | logger.warning( 29 | 'Deployment-key %s was found but does not contain a ' 30 | 'list.' % (depkey,)) 31 | continue 32 | logger.debug( 33 | 'Deployment found for {}'.format(depkey)) 34 | for deployment in deployments: 35 | if 'name' not in deployment: 36 | logger.warning( 37 | 'No name found for a deployment under %s.' % 38 | (depkey,)) 39 | continue 40 | if deployment.get('group', 'Heat::Ungrouped') in ( 41 | 'os-apply-config', 'Heat::Ungrouped'): 42 | final_list.append((deployment['name'], 43 | deployment['config'])) 44 | final_list.insert(0, (collector_name, final_content)) 45 | return final_list 46 | -------------------------------------------------------------------------------- /os_collect_config/heat_local.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os 18 | 19 | from oslo_config import cfg 20 | from oslo_log import log 21 | 22 | from os_collect_config import exc 23 | 24 | HEAT_METADATA_PATH = ['/var/lib/heat-cfntools/cfn-init-data'] 25 | CONF = cfg.CONF 26 | 27 | opts = [ 28 | cfg.MultiStrOpt('path', 29 | default=HEAT_METADATA_PATH, 30 | help='Local path(s) to read for Metadata.') 31 | ] 32 | name = 'heat_local' 33 | logger = log.getLogger(__name__) 34 | 35 | 36 | class Collector: 37 | def __init__(self, requests_impl=None): 38 | pass 39 | 40 | def collect(self): 41 | final_content = None 42 | for path in cfg.CONF.heat_local.path: 43 | if os.path.exists(path): 44 | with open(path) as metadata: 45 | try: 46 | value = json.loads(metadata.read()) 47 | except ValueError as e: 48 | logger.info( 49 | '{} is not valid JSON ({})'.format(path, e)) 50 | continue 51 | if final_content: 52 | final_content.update(value) 53 | else: 54 | final_content = value 55 | if not final_content: 56 | logger.info('Local metadata not found (%s)' % 57 | cfg.CONF.heat_local.path) 58 | raise exc.HeatLocalMetadataNotAvailable 59 | return [('heat_local', final_content)] 60 | -------------------------------------------------------------------------------- /os_collect_config/exc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | class SourceNotAvailable(RuntimeError): 18 | """The requested data source is unavailable.""" 19 | 20 | 21 | class SourceNotConfigured(RuntimeError): 22 | """The requested data source is not configured.""" 23 | 24 | 25 | class Ec2MetadataNotAvailable(SourceNotAvailable): 26 | """The EC2 metadata service is not available.""" 27 | 28 | 29 | class CfnMetadataNotAvailable(SourceNotAvailable): 30 | """The cfn metadata service is not available.""" 31 | 32 | 33 | class HeatMetadataNotAvailable(SourceNotAvailable): 34 | """The heat metadata service is not available.""" 35 | 36 | 37 | class CfnMetadataNotConfigured(SourceNotConfigured): 38 | """The cfn metadata service is not fully configured.""" 39 | 40 | 41 | class HeatMetadataNotConfigured(SourceNotConfigured): 42 | """The heat metadata service is not fully configured.""" 43 | 44 | 45 | class HeatLocalMetadataNotAvailable(SourceNotAvailable): 46 | """The local Heat metadata is not available.""" 47 | 48 | 49 | class LocalMetadataNotAvailable(SourceNotAvailable): 50 | """The local metadata is not available.""" 51 | 52 | 53 | class RequestMetadataNotAvailable(SourceNotAvailable): 54 | """The request metadata is not available.""" 55 | 56 | 57 | class RequestMetadataNotConfigured(SourceNotAvailable): 58 | """The request metadata is not fully configured.""" 59 | 60 | 61 | class ZaqarMetadataNotConfigured(SourceNotConfigured): 62 | """The zaqar metadata service is not fully configured.""" 63 | 64 | 65 | class ZaqarMetadataNotAvailable(SourceNotAvailable): 66 | """The Zaqar metadata is not available.""" 67 | 68 | 69 | class InvalidArguments(ValueError): 70 | """Invalid arguments.""" 71 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | os-collect-config 3 | ================= 4 | 5 | .. image:: https://governance.openstack.org/tc/badges/os-collect-config.svg 6 | 7 | .. Change things from this point on 8 | 9 | ------------------------------------------------- 10 | Collect configuration from cloud metadata sources 11 | ------------------------------------------------- 12 | 13 | What does it do? 14 | ================ 15 | 16 | It collects data from defined configuration sources and runs a defined 17 | hook whenever the metadata has been changed. 18 | 19 | .. image:: os-collect-config-and-friends.svg 20 | 21 | [#update_svg]_ 22 | 23 | Usage 24 | ===== 25 | 26 | You must define what sources to collect configuration data from in 27 | */etc/os-collect-config.conf*. 28 | 29 | The format of this file is:: 30 | 31 | [DEFAULT] 32 | command=os-refresh-config 33 | 34 | [cfn] 35 | metadata_url=http://192.0.2.99:8000/v1/ 36 | access_key_id = ABCDEFGHIJLMNOP01234567890 37 | secret_access_key = 01234567890ABCDEFGHIJKLMNOP 38 | path = MyResource 39 | stack_name = my.stack 40 | 41 | These sources will be polled and whenever any of them is changed, 42 | *default.command* will be run. A file will be written to the cache 43 | dir, os_config_files.json, which will be a json list of the file paths 44 | to the current copy of each metadata source. This list will also be 45 | set as a colon separated list in the environment variable 46 | *OS_CONFIG_FILES* for the command that is run. So in the example 47 | above, *os-refresh-config* would be executed with something like this 48 | in *OS_CONFIG_FILES*:: 49 | 50 | /var/lib/os-collect-config/ec2.json:/var/lib/os-collect-config/cfn.json 51 | 52 | The previous version of the metadata from a source (if available) is present at $FILENAME.last. 53 | 54 | When run without a command, the metadata sources are printed as a json document. 55 | 56 | Quick Start 57 | =========== 58 | 59 | Install:: 60 | 61 | sudo pip install -U git+https://opendev.org/openstack/os-collect-config.git 62 | 63 | Run it on an OpenStack instance with access to ec2 metadata:: 64 | 65 | os-collect-config 66 | 67 | That should print out a json representation of the entire ec2 metadata tree. 68 | 69 | .. [#update_svg] Recommend using LibreOffice draw to edit os-collect-config-and-friends.odg and regenerate the svg file. Alternatively edit the svg directly, but remove the .odg file if that is done. 70 | -------------------------------------------------------------------------------- /os_collect_config/cache.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """Metadata cache. 17 | 18 | Files within the cache as passed to hook commands invoked by 19 | os-collect-command. 20 | 21 | The cache also stores the last version of a file in order to detect changes 22 | that occur - hook commands are only automatically invoked when one or more 23 | metadata sources have changed things. 24 | 25 | The last version of a file is available under $FILENAME.last. 26 | """ 27 | 28 | import json 29 | import os 30 | import shutil 31 | import tempfile 32 | 33 | from oslo_config import cfg 34 | 35 | 36 | def get_path(name): 37 | return os.path.join(cfg.CONF.cachedir, '%s.json' % name) 38 | 39 | 40 | def store(name, content): 41 | if not os.path.exists(cfg.CONF.cachedir): 42 | os.mkdir(cfg.CONF.cachedir) 43 | 44 | changed = False 45 | dest_path = get_path(name) 46 | orig_path = '%s.orig' % dest_path 47 | last_path = '%s.last' % dest_path 48 | 49 | with tempfile.NamedTemporaryFile( 50 | dir=cfg.CONF.cachedir, 51 | delete=False) as new: 52 | new.write(json.dumps(content, indent=1).encode('utf-8')) 53 | new.flush() 54 | if not os.path.exists(orig_path): 55 | shutil.copy(new.name, orig_path) 56 | changed = True 57 | os.rename(new.name, dest_path) 58 | 59 | if not changed: 60 | if os.path.exists(last_path): 61 | with open(last_path) as then: 62 | then_value = json.load(then) 63 | if then_value != content: 64 | changed = True 65 | else: 66 | changed = True 67 | return (changed, dest_path) 68 | 69 | 70 | def commit(name): 71 | dest_path = get_path(name) 72 | if os.path.exists(dest_path): 73 | shutil.copy(dest_path, '%s.last' % dest_path) 74 | 75 | 76 | def store_meta_list(name, data_keys): 77 | '''Store a json list of the files that should be present after store.''' 78 | final_list = [get_path(k) for k in data_keys] 79 | dest = get_path(name) 80 | with tempfile.NamedTemporaryFile(prefix='tmp_meta_list.', 81 | dir=os.path.dirname(dest), 82 | delete=False) as out: 83 | out.write(json.dumps(final_list).encode('utf-8')) 84 | os.rename(out.name, dest) 85 | return dest 86 | -------------------------------------------------------------------------------- /os_collect_config/ec2.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os 18 | 19 | from oslo_config import cfg 20 | from oslo_log import log 21 | 22 | from os_collect_config import cache 23 | from os_collect_config import common 24 | from os_collect_config import config_drive 25 | from os_collect_config import exc 26 | 27 | EC2_METADATA_URL = 'http://169.254.169.254/latest/meta-data' 28 | CONF = cfg.CONF 29 | 30 | opts = [ 31 | cfg.URIOpt('metadata-url', 32 | default=EC2_METADATA_URL, 33 | help='URL to query for EC2 Metadata'), 34 | cfg.FloatOpt('timeout', default=10, 35 | help='Seconds to wait for the connection and read request' 36 | ' timeout.') 37 | ] 38 | name = 'ec2' 39 | 40 | 41 | class Collector: 42 | def __init__(self, requests_impl=common.requests): 43 | self._requests_impl = requests_impl 44 | self.session = requests_impl.Session() 45 | 46 | def _fetch_metadata(self, fetch_url, timeout): 47 | try: 48 | r = self.session.get(fetch_url, timeout=timeout) 49 | r.raise_for_status() 50 | except self._requests_impl.exceptions.RequestException as e: 51 | log.getLogger(__name__).warn(e) 52 | raise exc.Ec2MetadataNotAvailable 53 | content = r.text 54 | if fetch_url[-1] == '/': 55 | new_content = {} 56 | for subkey in content.split("\n"): 57 | if '=' in subkey: 58 | subkey = subkey[:subkey.index('=')] + '/' 59 | sub_fetch_url = fetch_url + subkey 60 | if subkey[-1] == '/': 61 | subkey = subkey[:-1] 62 | new_content[subkey] = self._fetch_metadata( 63 | sub_fetch_url, timeout) 64 | content = new_content 65 | return content 66 | 67 | def collect(self): 68 | cache_path = cache.get_path('ec2') 69 | if os.path.exists(cache_path): 70 | with open(cache_path) as f: 71 | try: 72 | metadata = json.load(f) 73 | except ValueError as e: 74 | log.getLogger(__name__).warn(e) 75 | metadata = None 76 | if metadata: 77 | return [('ec2', metadata)] 78 | 79 | md = config_drive.get_metadata() 80 | if md: 81 | return [('ec2', md)] 82 | 83 | root_url = '%s/' % (CONF.ec2.metadata_url) 84 | return [('ec2', self._fetch_metadata(root_url, CONF.ec2.timeout))] 85 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_merger.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import testtools 17 | 18 | from os_collect_config import merger 19 | 20 | 21 | META_DATA = {'int1': 1, 22 | 'strfoo': 'foo', 23 | 'map_ab': { 24 | 'a': 'apple', 25 | 'b': 'banana', 26 | }} 27 | 28 | 29 | SOFTWARE_CONFIG_DATA = { 30 | 'old-style': 'value', 31 | 'deployments': [ 32 | { 33 | 'inputs': [ 34 | { 35 | 'type': 'String', 36 | 'name': 'input1', 37 | 'value': 'value1' 38 | } 39 | ], 40 | 'group': 'Heat::Ungrouped', 41 | 'name': 'dep-name1', 42 | 'outputs': None, 43 | 'options': None, 44 | 'config': { 45 | 'config1': 'value1' 46 | } 47 | }, 48 | { 49 | 'inputs': [ 50 | { 51 | 'type': 'String', 52 | 'name': 'input1', 53 | 'value': 'value1' 54 | } 55 | ], 56 | 'group': 'os-apply-config', 57 | 'name': 'dep-name2', 58 | 'outputs': None, 59 | 'options': None, 60 | 'config': { 61 | 'config2': 'value2' 62 | } 63 | }, 64 | { 65 | 'inputs': [ 66 | { 67 | 'type': 'String', 68 | 'name': 'input1', 69 | 'value': 'value1' 70 | } 71 | ], 72 | 'name': 'dep-name3', 73 | 'outputs': None, 74 | 'options': None, 75 | 'config': { 76 | 'config3': 'value3' 77 | } 78 | }, 79 | { 80 | 'inputs': [], 81 | 'group': 'ignore_me', 82 | 'name': 'ignore_me_name', 83 | 'outputs': None, 84 | 'options': None, 85 | 'config': 'ignore_me_config' 86 | }, 87 | { 88 | 'inputs': [], # to test missing name 89 | } 90 | ] 91 | } 92 | 93 | 94 | class TestMerger(testtools.TestCase): 95 | 96 | def test_merged_list_from_content(self): 97 | req_md = merger.merged_list_from_content( 98 | SOFTWARE_CONFIG_DATA, 99 | ['deployments'], 100 | 'collectme') 101 | self.assertEqual(4, len(req_md)) 102 | self.assertEqual( 103 | SOFTWARE_CONFIG_DATA['deployments'], req_md[0][1]['deployments']) 104 | self.assertEqual( 105 | ('dep-name1', {'config1': 'value1'}), req_md[1]) 106 | self.assertEqual( 107 | ('dep-name2', {'config2': 'value2'}), req_md[2]) 108 | self.assertEqual( 109 | ('dep-name3', {'config3': 'value3'}), req_md[3]) 110 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_heat_local.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os.path 18 | import tempfile 19 | 20 | import fixtures 21 | from oslo_config import cfg 22 | import testtools 23 | from testtools import matchers 24 | 25 | from os_collect_config import collect 26 | from os_collect_config import exc 27 | from os_collect_config import heat_local 28 | 29 | 30 | META_DATA = {'localstrA': 'A', 31 | 'localint9': 9, 32 | 'localmap_xy': { 33 | 'x': 42, 34 | 'y': 'foo', 35 | }} 36 | 37 | 38 | class TestHeatLocal(testtools.TestCase): 39 | def setUp(self): 40 | super().setUp() 41 | self.log = self.useFixture(fixtures.FakeLogger()) 42 | collect.setup_conf() 43 | self.orig_cfg_CONF = cfg.CONF 44 | 45 | def tearDown(self): 46 | cfg.CONF = self.orig_cfg_CONF 47 | cfg.CONF.reset() 48 | super().tearDown() 49 | 50 | def _call_collect(self, *temp_name): 51 | cfg.CONF.heat_local.path = list(temp_name) 52 | md = heat_local.Collector().collect() 53 | self.assertEqual('heat_local', md[0][0]) 54 | return md[0][1] 55 | 56 | def test_collect_heat_local(self): 57 | with tempfile.NamedTemporaryFile() as md: 58 | md.write(json.dumps(META_DATA).encode('utf-8')) 59 | md.flush() 60 | local_md = self._call_collect(md.name) 61 | 62 | self.assertThat(local_md, matchers.IsInstance(dict)) 63 | 64 | for k in ('localstrA', 'localint9', 'localmap_xy'): 65 | self.assertIn(k, local_md) 66 | self.assertEqual(local_md[k], META_DATA[k]) 67 | 68 | self.assertEqual('', self.log.output) 69 | 70 | def test_collect_heat_local_twice(self): 71 | with tempfile.NamedTemporaryFile() as md: 72 | md.write(json.dumps(META_DATA).encode('utf-8')) 73 | md.flush() 74 | local_md = self._call_collect(md.name, md.name) 75 | 76 | self.assertThat(local_md, matchers.IsInstance(dict)) 77 | 78 | for k in ('localstrA', 'localint9', 'localmap_xy'): 79 | self.assertIn(k, local_md) 80 | self.assertEqual(local_md[k], META_DATA[k]) 81 | 82 | self.assertEqual('', self.log.output) 83 | 84 | def test_collect_heat_local_with_invalid_metadata(self): 85 | with tempfile.NamedTemporaryFile() as md: 86 | md.write(b"{'invalid' => 'INVALID'}") 87 | md.flush() 88 | self.assertRaises(exc.HeatLocalMetadataNotAvailable, 89 | self._call_collect, md.name) 90 | self.assertIn('Local metadata not found', self.log.output) 91 | 92 | def test_collect_ec2_nofile(self): 93 | tdir = self.useFixture(fixtures.TempDir()) 94 | test_path = os.path.join(tdir.path, 'does-not-exist.json') 95 | self.assertRaises(exc.HeatLocalMetadataNotAvailable, 96 | self._call_collect, test_path) 97 | self.assertIn('Local metadata not found', self.log.output) 98 | -------------------------------------------------------------------------------- /os_collect_config/request.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import calendar 17 | import json 18 | import time 19 | 20 | from oslo_config import cfg 21 | from oslo_log import log 22 | 23 | from os_collect_config import common 24 | from os_collect_config import exc 25 | from os_collect_config import merger 26 | 27 | CONF = cfg.CONF 28 | logger = log.getLogger(__name__) 29 | 30 | opts = [ 31 | cfg.URIOpt('metadata-url', 32 | help='URL to query for metadata'), 33 | cfg.FloatOpt('timeout', default=10, 34 | help='Seconds to wait for the connection and read request' 35 | ' timeout.') 36 | ] 37 | name = 'request' 38 | 39 | 40 | class Collector: 41 | def __init__(self, requests_impl=common.requests): 42 | self._requests_impl = requests_impl 43 | self._session = requests_impl.Session() 44 | self.last_modified = None 45 | 46 | def check_fetch_content(self, headers): 47 | '''Raises RequestMetadataNotAvailable if metadata should not be 48 | fetched. 49 | ''' 50 | 51 | # no last-modified header, so fetch 52 | lm = headers.get('last-modified') 53 | if not lm: 54 | return 55 | 56 | last_modified = calendar.timegm( 57 | time.strptime(lm, '%a, %d %b %Y %H:%M:%S %Z')) 58 | 59 | # first run, so fetch 60 | if not self.last_modified: 61 | return last_modified 62 | 63 | if last_modified < self.last_modified: 64 | logger.warning( 65 | 'Last-Modified is older than previous collection') 66 | 67 | if last_modified <= self.last_modified: 68 | raise exc.RequestMetadataNotAvailable 69 | return last_modified 70 | 71 | def collect(self): 72 | if CONF.request.metadata_url is None: 73 | logger.info('No metadata_url configured.') 74 | raise exc.RequestMetadataNotConfigured 75 | url = CONF.request.metadata_url 76 | timeout = CONF.request.timeout 77 | final_content = {} 78 | 79 | try: 80 | head = self._session.head(url, timeout=timeout) 81 | last_modified = self.check_fetch_content(head.headers) 82 | 83 | content = self._session.get(url, timeout=timeout) 84 | content.raise_for_status() 85 | self.last_modified = last_modified 86 | 87 | except self._requests_impl.exceptions.RequestException as e: 88 | logger.warning(str(e)) 89 | raise exc.RequestMetadataNotAvailable 90 | try: 91 | value = json.loads(content.text) 92 | except ValueError as e: 93 | logger.warning( 94 | 'Failed to parse as json. (%s)' % e) 95 | raise exc.RequestMetadataNotAvailable 96 | final_content.update(value) 97 | 98 | final_list = merger.merged_list_from_content( 99 | final_content, cfg.CONF.deployment_key, name) 100 | return final_list 101 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_keystone.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import tempfile 16 | 17 | import fixtures 18 | from keystoneclient import exceptions as ks_exc 19 | from oslo_config import cfg 20 | import testtools 21 | 22 | from os_collect_config import collect 23 | from os_collect_config import keystone 24 | from os_collect_config.tests import test_heat 25 | 26 | 27 | class FakeKeystoneDiscoverNone(test_heat.FakeKeystoneDiscover): 28 | 29 | def url_for(self, version): 30 | return None 31 | 32 | 33 | class FakeKeystoneDiscoverError(test_heat.FakeKeystoneDiscover): 34 | 35 | def url_for(self, version): 36 | raise ks_exc.DiscoveryFailure() 37 | 38 | 39 | class FakeKeystoneDiscoverBase(test_heat.FakeKeystoneDiscover): 40 | 41 | def url_for(self, version): 42 | return 'http://192.0.2.1:5000/' 43 | 44 | 45 | class KeystoneTest(testtools.TestCase): 46 | def setUp(self): 47 | super().setUp() 48 | self.addCleanup(cfg.CONF.reset) 49 | collect.setup_conf() 50 | self.useFixture(fixtures.NestedTempfile()) 51 | self.cachedir = tempfile.mkdtemp() 52 | cfg.CONF.set_override('cache_dir', self.cachedir, group='keystone') 53 | 54 | def test_discover_fail(self): 55 | ks = keystone.Keystone( 56 | 'http://192.0.2.1:5000/v2.0', 'auser', 'apassword', 'aproject', 57 | test_heat.FakeKeystoneClient(self), 58 | FakeKeystoneDiscoverError) 59 | self.assertEqual(ks.auth_url, 'http://192.0.2.1:5000/v3') 60 | 61 | def test_discover_v3_unsupported(self): 62 | ks = keystone.Keystone( 63 | 'http://192.0.2.1:5000/v2.0', 'auser', 'apassword', 'aproject', 64 | test_heat.FakeKeystoneClient(self), 65 | FakeKeystoneDiscoverNone) 66 | self.assertEqual(ks.auth_url, 'http://192.0.2.1:5000/v2.0') 67 | 68 | def test_cache_is_created(self): 69 | ks = keystone.Keystone( 70 | 'http://192.0.2.1:5000/', 'auser', 'apassword', 'aproject', 71 | test_heat.FakeKeystoneClient(self), 72 | test_heat.FakeKeystoneDiscover) 73 | self.assertIsNotNone(ks.cache) 74 | 75 | def _make_ks(self, client): 76 | class Configs: 77 | auth_url = 'http://192.0.2.1:5000/' 78 | user_id = 'auser' 79 | password = 'apassword' 80 | project_id = 'aproject' 81 | 82 | return keystone.Keystone( 83 | 'http://192.0.2.1:5000/', 'auser', 'apassword', 'aproject', 84 | client(self, Configs), 85 | FakeKeystoneDiscoverBase) 86 | 87 | def test_cache_auth_ref(self): 88 | ks = self._make_ks(test_heat.FakeKeystoneClient) 89 | auth_ref = ks.auth_ref 90 | # Client must fail now - we should make no client calls 91 | ks2 = self._make_ks(test_heat.FakeFailKeystoneClient) 92 | auth_ref2 = ks2.auth_ref 93 | self.assertEqual(auth_ref, auth_ref2) 94 | # And can we invalidate 95 | ks2.invalidate_auth_ref() 96 | # Can't use assertRaises because it is a @property 97 | try: 98 | ks2.auth_ref 99 | self.assertTrue(False, 'auth_ref should have failed.') 100 | except ks_exc.AuthorizationFailure: 101 | pass 102 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_cache.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os 18 | 19 | import fixtures 20 | import testtools 21 | from testtools import matchers 22 | 23 | from os_collect_config import cache 24 | 25 | 26 | class DummyConf: 27 | def __init__(self, cachedir): 28 | class CONFobj: 29 | def __init__(self, cachedir): 30 | self.cachedir = cachedir 31 | self.CONF = CONFobj(cachedir) 32 | 33 | 34 | class TestCache(testtools.TestCase): 35 | def setUp(self): 36 | super().setUp() 37 | cache_root = self.useFixture(fixtures.TempDir()) 38 | self.cache_dir = os.path.join(cache_root.path, 'cache') 39 | self.useFixture(fixtures.MonkeyPatch('os_collect_config.cache.cfg', 40 | DummyConf(self.cache_dir))) 41 | 42 | def tearDown(self): 43 | super().tearDown() 44 | 45 | def test_cache(self): 46 | # Never seen, so changed is expected. 47 | (changed, path) = cache.store('foo', {'a': 1}) 48 | self.assertTrue(changed) 49 | self.assertTrue(os.path.exists(self.cache_dir)) 50 | self.assertTrue(os.path.exists(path)) 51 | orig_path = '%s.orig' % path 52 | self.assertTrue(os.path.exists(orig_path)) 53 | last_path = '%s.last' % path 54 | self.assertFalse(os.path.exists(last_path)) 55 | 56 | # .orig exists now but not .last so this will shortcut to changed 57 | (changed, path) = cache.store('foo', {'a': 2}) 58 | self.assertTrue(changed) 59 | orig_path = '%s.orig' % path 60 | with open(path) as now: 61 | with open(orig_path) as then: 62 | self.assertNotEqual(now.read(), then.read()) 63 | 64 | # Saves the current copy as .last 65 | cache.commit('foo') 66 | last_path = '%s.last' % path 67 | self.assertTrue(os.path.exists(last_path)) 68 | 69 | # We committed this already, so we should have no changes 70 | (changed, path) = cache.store('foo', {'a': 2}) 71 | self.assertFalse(changed) 72 | 73 | cache.commit('foo') 74 | # Fully exercising the line-by-line matching now that a .last exists 75 | (changed, path) = cache.store('foo', {'a': 3}) 76 | self.assertTrue(changed) 77 | self.assertTrue(os.path.exists(path)) 78 | 79 | # And the meta list 80 | list_path = cache.store_meta_list('foo_list', ['foo']) 81 | self.assertTrue(os.path.exists(list_path)) 82 | with open(list_path) as list_file: 83 | list_list = json.loads(list_file.read()) 84 | self.assertThat(list_list, matchers.IsInstance(list)) 85 | self.assertIn(path, list_list) 86 | 87 | def test_cache_ignores_json_inequality(self): 88 | content1 = '{"a": "value-a", "b": "value-b"}' 89 | content2 = '{"b": "value-b", "a": "value-a"}' 90 | value1 = json.loads(content1) 91 | value2 = json.loads(content2) 92 | self.assertEqual(value1, value2) 93 | (changed, path) = cache.store('content', value1) 94 | self.assertTrue(changed) 95 | cache.commit('content') 96 | (changed, path) = cache.store('content', value1) 97 | self.assertFalse(changed) 98 | (changed, path) = cache.store('content', value2) 99 | self.assertFalse(changed) 100 | 101 | def test_commit_no_cache(self): 102 | self.assertIsNone(cache.commit('neversaved')) 103 | -------------------------------------------------------------------------------- /os_collect_config/local.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import locale 18 | import os 19 | import stat 20 | 21 | from oslo_config import cfg 22 | from oslo_log import log 23 | 24 | from os_collect_config import exc 25 | 26 | LOCAL_DEFAULT_PATHS = ['/var/lib/os-collect-config/local-data'] 27 | CONF = cfg.CONF 28 | 29 | opts = [ 30 | cfg.MultiStrOpt('path', 31 | default=LOCAL_DEFAULT_PATHS, 32 | help='Local directory to scan for Metadata files.') 33 | ] 34 | name = 'local' 35 | logger = log.getLogger(__name__) 36 | 37 | 38 | def _dest_looks_insecure(local_path): 39 | '''We allow group writable so owner can let others write.''' 40 | looks_insecure = False 41 | uid = os.getuid() 42 | st = os.stat(local_path) 43 | if uid != st[stat.ST_UID]: 44 | logger.error('%s is owned by another user. This is a' 45 | ' security risk.' % local_path) 46 | looks_insecure = True 47 | if st.st_mode & stat.S_IWOTH: 48 | logger.error('%s is world writable. This is a security risk.' 49 | % local_path) 50 | looks_insecure = True 51 | return looks_insecure 52 | 53 | 54 | class Collector: 55 | def __init__(self, requests_impl=None): 56 | pass 57 | 58 | def collect(self): 59 | if len(cfg.CONF.local.path) == 0: 60 | raise exc.LocalMetadataNotAvailable 61 | final_content = [] 62 | for local_path in cfg.CONF.local.path: 63 | try: 64 | os.stat(local_path) 65 | except OSError: 66 | logger.warning("%s not found. Skipping", local_path) 67 | continue 68 | if _dest_looks_insecure(local_path): 69 | raise exc.LocalMetadataNotAvailable 70 | for data_file in os.listdir(local_path): 71 | if data_file.startswith('.'): 72 | continue 73 | data_file = os.path.join(local_path, data_file) 74 | if os.path.isdir(data_file): 75 | continue 76 | st = os.stat(data_file) 77 | if st.st_mode & stat.S_IWOTH: 78 | logger.error( 79 | '%s is world writable. This is a security risk.' % 80 | data_file) 81 | raise exc.LocalMetadataNotAvailable 82 | with open(data_file) as metadata: 83 | try: 84 | value = json.loads(metadata.read()) 85 | except ValueError as e: 86 | logger.error( 87 | '{} is not valid JSON ({})'.format(data_file, e)) 88 | raise exc.LocalMetadataNotAvailable 89 | basename = os.path.basename(data_file) 90 | final_content.append((basename, value)) 91 | if not final_content: 92 | logger.info('No local metadata found (%s)' % 93 | cfg.CONF.local.path) 94 | 95 | # Now sort specifically by C locale 96 | def locale_aware_by_first_item(data): 97 | return locale.strxfrm(data[0]) 98 | 99 | save_locale = locale.getlocale() 100 | try: 101 | locale.setlocale(locale.LC_ALL, 'C') 102 | sorted_content = sorted( 103 | final_content, key=locale_aware_by_first_item) 104 | finally: 105 | locale.setlocale(locale.LC_ALL, save_locale) 106 | 107 | return sorted_content 108 | -------------------------------------------------------------------------------- /os_collect_config/heat.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from heatclient import client as heatclient 16 | from keystoneclient.v3 import client as keystoneclient 17 | from oslo_config import cfg 18 | from oslo_log import log 19 | 20 | from os_collect_config import exc 21 | from os_collect_config import keystone 22 | from os_collect_config import merger 23 | 24 | CONF = cfg.CONF 25 | logger = log.getLogger(__name__) 26 | 27 | opts = [ 28 | cfg.StrOpt('user-id', 29 | help='User ID for API authentication'), 30 | cfg.StrOpt('password', 31 | secret=True, 32 | help='Password for API authentication'), 33 | cfg.StrOpt('project-id', 34 | help='ID of project for API authentication'), 35 | cfg.URIOpt('auth-url', 36 | help='URL for API authentication'), 37 | cfg.StrOpt('stack-id', 38 | help='ID of the stack this deployment belongs to'), 39 | cfg.StrOpt('resource-name', 40 | help='Name of resource in the stack to be polled'), 41 | cfg.StrOpt('region-name', 42 | help='Region Name for extracting Heat endpoint'), 43 | ] 44 | name = 'heat' 45 | 46 | 47 | class Collector: 48 | def __init__(self, 49 | keystoneclient=keystoneclient, 50 | heatclient=heatclient, 51 | discover_class=None): 52 | self.keystoneclient = keystoneclient 53 | self.heatclient = heatclient 54 | self.discover_class = discover_class 55 | 56 | def collect(self): 57 | if CONF.heat.auth_url is None: 58 | logger.info('No auth_url configured.') 59 | raise exc.HeatMetadataNotConfigured 60 | if CONF.heat.password is None: 61 | logger.info('No password configured.') 62 | raise exc.HeatMetadataNotConfigured 63 | if CONF.heat.project_id is None: 64 | logger.info('No project_id configured.') 65 | raise exc.HeatMetadataNotConfigured 66 | if CONF.heat.user_id is None: 67 | logger.info('No user_id configured.') 68 | raise exc.HeatMetadataNotConfigured 69 | if CONF.heat.stack_id is None: 70 | logger.info('No stack_id configured.') 71 | raise exc.HeatMetadataNotConfigured 72 | if CONF.heat.resource_name is None: 73 | logger.info('No resource_name configured.') 74 | raise exc.HeatMetadataNotConfigured 75 | # NOTE(flwang): To be compatible with old versions, we won't throw 76 | # error here if there is no region name. 77 | 78 | try: 79 | ks = keystone.Keystone( 80 | auth_url=CONF.heat.auth_url, 81 | user_id=CONF.heat.user_id, 82 | password=CONF.heat.password, 83 | project_id=CONF.heat.project_id, 84 | keystoneclient=self.keystoneclient, 85 | discover_class=self.discover_class).client 86 | kwargs = {'service_type': 'orchestration', 87 | 'endpoint_type': 'publicURL'} 88 | if CONF.heat.region_name: 89 | kwargs['region_name'] = CONF.heat.region_name 90 | endpoint = ks.service_catalog.url_for(**kwargs) 91 | logger.debug('Fetching metadata from %s' % endpoint) 92 | heat = self.heatclient.Client( 93 | '1', endpoint, token=ks.auth_token) 94 | r = heat.resources.metadata(CONF.heat.stack_id, 95 | CONF.heat.resource_name) 96 | 97 | final_list = merger.merged_list_from_content( 98 | r, cfg.CONF.deployment_key, name) 99 | return final_list 100 | 101 | except Exception as e: 102 | logger.warning(str(e)) 103 | raise exc.HeatMetadataNotAvailable 104 | -------------------------------------------------------------------------------- /os_collect_config/keystone.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import hashlib 16 | import os 17 | 18 | from dogpile import cache 19 | from keystoneclient import discover as ks_discover 20 | from keystoneclient import exceptions as ks_exc 21 | from keystoneclient.v3 import client as ks_keystoneclient 22 | from oslo_config import cfg 23 | 24 | CONF = cfg.CONF 25 | 26 | opts = [ 27 | cfg.StrOpt('cache_dir', 28 | help='A directory to store keystone auth tokens.'), 29 | cfg.IntOpt('cache_ttl', 30 | default=1800, 31 | help='Seconds to store auth references in the cache'), 32 | ] 33 | 34 | 35 | class Keystone: 36 | '''A keystone wrapper class. 37 | 38 | This wrapper is used to encapsulate any keystone related operations 39 | os-collect-config may need to perform. Includes a dogpile cache to 40 | support memoization so we can reuse auth references stored on disk 41 | in subsequent invocations of os-collect-config. 42 | ''' 43 | def __init__(self, auth_url, user_id, password, project_id, 44 | keystoneclient=None, discover_class=None): 45 | '''Initialize Keystone wrapper. 46 | 47 | @param string auth_url auth_url for keystoneclient 48 | @param string user_id user_id for keystoneclient 49 | @param string project_id project_id for keystoneclient 50 | @param object keystoneclient optional keystoneclient implementation. 51 | Uses keystoneclient.v3 if unspecified. 52 | @param object discover_class optional keystoneclient.discover.Discover 53 | class. 54 | ''' 55 | self.keystoneclient = keystoneclient or ks_keystoneclient 56 | self.discover_class = discover_class or ks_discover.Discover 57 | self.user_id = user_id 58 | self.password = password 59 | self.project_id = project_id 60 | self._client = None 61 | try: 62 | auth_url_noneversion = auth_url.replace('/v2.0', '/') 63 | discover = self.discover_class(auth_url=auth_url_noneversion) 64 | v3_auth_url = discover.url_for('3.0') 65 | if v3_auth_url: 66 | self.auth_url = v3_auth_url 67 | else: 68 | self.auth_url = auth_url 69 | except ks_exc.ClientException: 70 | self.auth_url = auth_url.replace('/v2.0', '/v3') 71 | if CONF.keystone.cache_dir: 72 | if not os.path.isdir(CONF.keystone.cache_dir): 73 | os.makedirs(CONF.keystone.cache_dir, mode=0o700) 74 | 75 | dbm_path = os.path.join(CONF.keystone.cache_dir, 'keystone.db') 76 | self.cache = cache.make_region().configure( 77 | 'dogpile.cache.dbm', 78 | expiration_time=CONF.keystone.cache_ttl, 79 | arguments={"filename": dbm_path}) 80 | else: 81 | self.cache = None 82 | 83 | def _make_key(self, key): 84 | m = hashlib.sha256() 85 | m.update(self.auth_url.encode('utf-8')) 86 | m.update(self.user_id.encode('utf-8')) 87 | m.update(self.project_id.encode('utf-8')) 88 | m.update(key.encode('utf-8')) 89 | return m.hexdigest() 90 | 91 | @property 92 | def client(self): 93 | if not self._client: 94 | ref = self._get_auth_ref_from_cache() 95 | if ref: 96 | self._client = self.keystoneclient.Client( 97 | auth_ref=ref) 98 | else: 99 | self._client = self.keystoneclient.Client( 100 | auth_url=self.auth_url, 101 | user_id=self.user_id, 102 | password=self.password, 103 | project_id=self.project_id) 104 | return self._client 105 | 106 | def _get_auth_ref_from_cache(self): 107 | if self.cache: 108 | key = self._make_key('auth_ref') 109 | return self.cache.get(key) 110 | 111 | @property 112 | def auth_ref(self): 113 | ref = self._get_auth_ref_from_cache() 114 | if not ref: 115 | ref = self.client.get_auth_ref() 116 | if self.cache: 117 | self.cache.set(self._make_key('auth_ref'), ref) 118 | return ref 119 | 120 | def invalidate_auth_ref(self): 121 | if self.cache: 122 | key = self._make_key('auth_ref') 123 | return self.cache.delete(key) 124 | 125 | @property 126 | def service_catalog(self): 127 | try: 128 | return self.client.service_catalog 129 | except ks_exc.AuthorizationFailure: 130 | self.invalidate_auth_ref() 131 | return self.client.service_catalog 132 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_config_drive.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 10 | # implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import json 15 | import os 16 | import subprocess 17 | from unittest import mock 18 | 19 | import fixtures 20 | import testtools 21 | 22 | from os_collect_config import config_drive 23 | from os_collect_config.tests import test_ec2 24 | 25 | BLKID_CONFIG_DRIVE = '''DEVNAME=/dev/sr0 26 | UUID=2016-09-12-02-14-09-00 27 | LABEL=config-2 28 | TYPE=iso9660''' 29 | 30 | BLKID_RESPONSE = BLKID_CONFIG_DRIVE + ''' 31 | 32 | DEVNAME=/dev/block/253:1 33 | UUID=f13d84b4-c756-4d89-9d5e-6b534397aa14 34 | TYPE=xfs 35 | ''' 36 | 37 | 38 | class TestConfigDrive(testtools.TestCase): 39 | 40 | def setUp(self): 41 | super().setUp() 42 | self.log = self.useFixture(fixtures.FakeLogger()) 43 | 44 | @mock.patch.object(subprocess, 'check_output') 45 | def test_all_devices(self, co): 46 | co.return_value = BLKID_RESPONSE 47 | bds = list(config_drive.all_block_devices()) 48 | self.assertEqual(2, len(bds)) 49 | 50 | self.assertEqual('/dev/sr0', bds[0].devname) 51 | self.assertEqual('iso9660', bds[0].type) 52 | self.assertEqual('config-2', bds[0].label) 53 | self.assertTrue(bds[0].config_drive_candidate()) 54 | self.assertEqual('/dev/sr0: TYPE="iso9660" LABEL="config-2"', 55 | str(bds[0])) 56 | 57 | self.assertEqual('/dev/block/253:1', bds[1].devname) 58 | self.assertEqual('xfs', bds[1].type) 59 | self.assertIsNone(bds[1].label) 60 | self.assertFalse(bds[1].config_drive_candidate()) 61 | self.assertEqual('/dev/block/253:1: TYPE="xfs" LABEL="None"', 62 | str(bds[1])) 63 | 64 | @mock.patch.object(subprocess, 'check_output') 65 | def test_config_drive(self, co): 66 | co.return_value = BLKID_RESPONSE 67 | bd = config_drive.config_drive() 68 | self.assertTrue(bd.config_drive_candidate()) 69 | self.assertEqual('/dev/sr0: TYPE="iso9660" LABEL="config-2"', 70 | str(bd)) 71 | 72 | def test_parse_shell_var(self): 73 | psv = config_drive.BlockDevice.parse_shell_var 74 | self.assertEqual(('foo', 'bar'), psv('foo=bar')) 75 | self.assertEqual(('foo', 'bar=baz'), psv('foo=bar=baz')) 76 | self.assertEqual(('foo', 'bar baz'), psv('foo=bar baz')) 77 | self.assertEqual(('foo', 'bar baz'), psv(r'foo=bar\ baz')) 78 | self.assertEqual(('foo', ''), psv('foo=')) 79 | self.assertEqual((None, None), psv('foo')) 80 | self.assertEqual((None, None), psv(None)) 81 | 82 | @mock.patch.object(subprocess, 'check_output') 83 | def test_ensure_mounted(self, co): 84 | bd = config_drive.BlockDevice.from_blkid_export(BLKID_CONFIG_DRIVE) 85 | self.assertTrue(bd.config_drive_candidate()) 86 | proc = self.useFixture(fixtures.TempDir()) 87 | config_drive.PROC_MOUNTS_PATH = os.path.join(proc.path, 'mount') 88 | with open(config_drive.PROC_MOUNTS_PATH, 'w') as md: 89 | md.write('') 90 | 91 | self.assertIsNone(bd.mountpoint) 92 | self.assertFalse(bd.unmount) 93 | 94 | bd.ensure_mounted() 95 | mountpoint = bd.mountpoint 96 | self.assertIsNotNone(mountpoint) 97 | self.assertTrue(bd.unmount) 98 | self.assertTrue(os.path.isdir(mountpoint)) 99 | co.assert_called_with([ 100 | 'mount', '/dev/sr0', mountpoint, '-o', 'ro' 101 | ]) 102 | 103 | bd.cleanup() 104 | self.assertIsNone(bd.mountpoint) 105 | self.assertFalse(bd.unmount) 106 | self.assertFalse(os.path.isdir(mountpoint)) 107 | co.assert_called_with([ 108 | 'umount', '-l', mountpoint 109 | ]) 110 | 111 | @mock.patch.object(subprocess, 'check_output') 112 | def test_already_mounted(self, co): 113 | bd = config_drive.BlockDevice.from_blkid_export(BLKID_CONFIG_DRIVE) 114 | self.assertTrue(bd.config_drive_candidate()) 115 | proc = self.useFixture(fixtures.TempDir()) 116 | mountpoint = self.useFixture(fixtures.TempDir()).path 117 | config_drive.PROC_MOUNTS_PATH = os.path.join(proc.path, 'mount') 118 | with open(config_drive.PROC_MOUNTS_PATH, 'w') as md: 119 | md.write('{} {} r 0 0\n'.format(bd.devname, mountpoint)) 120 | 121 | self.assertIsNone(bd.mountpoint) 122 | self.assertFalse(bd.unmount) 123 | 124 | bd.ensure_mounted() 125 | self.assertEqual(mountpoint, bd.mountpoint) 126 | self.assertFalse(bd.unmount) 127 | co.assert_not_called() 128 | 129 | bd.cleanup() 130 | self.assertIsNone(bd.mountpoint) 131 | self.assertFalse(bd.unmount) 132 | co.assert_not_called() 133 | 134 | @mock.patch.object(config_drive.BlockDevice, 'ensure_mounted') 135 | @mock.patch.object(config_drive.BlockDevice, 'cleanup') 136 | def test_get_metadata(self, cleanup, ensure_mounted): 137 | bd = config_drive.BlockDevice.from_blkid_export(BLKID_CONFIG_DRIVE) 138 | bd.mountpoint = self.useFixture(fixtures.TempDir()).path 139 | 140 | md = bd.get_metadata() 141 | self.assertEqual({}, md) 142 | 143 | md_dir = os.path.join(bd.mountpoint, 'ec2', 'latest') 144 | os.makedirs(md_dir) 145 | md_path = os.path.join(md_dir, 'meta-data.json') 146 | with open(md_path, 'w') as md: 147 | json.dump(test_ec2.META_DATA_RESOLVED, md) 148 | 149 | md = bd.get_metadata() 150 | self.assertEqual(test_ec2.META_DATA_RESOLVED, md) 151 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_local.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import locale 18 | import os 19 | import tempfile 20 | 21 | import fixtures 22 | from oslo_config import cfg 23 | import testtools 24 | from testtools import matchers 25 | 26 | from os_collect_config import collect 27 | from os_collect_config import exc 28 | from os_collect_config import local 29 | 30 | 31 | META_DATA = {'localstrA': 'A', 32 | 'localint9': 9, 33 | 'localmap_xy': { 34 | 'x': 42, 35 | 'y': 'foo', 36 | }} 37 | META_DATA2 = {'localstrA': 'Z', 38 | 'localint9': 9} 39 | 40 | 41 | class TestLocal(testtools.TestCase): 42 | def setUp(self): 43 | super().setUp() 44 | self.log = self.useFixture(fixtures.FakeLogger()) 45 | self.useFixture(fixtures.NestedTempfile()) 46 | self.tdir = tempfile.mkdtemp() 47 | collect.setup_conf() 48 | self.addCleanup(cfg.CONF.reset) 49 | cfg.CONF.register_cli_opts(local.opts, group='local') 50 | cfg.CONF.set_override(name='path', 51 | override=[self.tdir], 52 | group='local') 53 | 54 | def _call_collect(self): 55 | md = local.Collector().collect() 56 | return md 57 | 58 | def _setup_test_json(self, data, md_base='test.json'): 59 | md_name = os.path.join(self.tdir, md_base) 60 | with open(md_name, 'w') as md: 61 | md.write(json.dumps(data)) 62 | return md_name 63 | 64 | def test_collect_local(self): 65 | self._setup_test_json(META_DATA) 66 | local_md = self._call_collect() 67 | 68 | self.assertThat(local_md, matchers.IsInstance(list)) 69 | self.assertEqual(1, len(local_md)) 70 | self.assertThat(local_md[0], matchers.IsInstance(tuple)) 71 | self.assertEqual(2, len(local_md[0])) 72 | self.assertEqual('test.json', local_md[0][0]) 73 | 74 | only_md = local_md[0][1] 75 | self.assertThat(only_md, matchers.IsInstance(dict)) 76 | 77 | for k in ('localstrA', 'localint9', 'localmap_xy'): 78 | self.assertIn(k, only_md) 79 | self.assertEqual(only_md[k], META_DATA[k]) 80 | 81 | self.assertEqual('', self.log.output) 82 | 83 | def test_collect_local_world_writable(self): 84 | md_name = self._setup_test_json(META_DATA) 85 | os.chmod(md_name, 0o666) 86 | self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect) 87 | self.assertIn('%s is world writable. This is a security risk.' % 88 | md_name, self.log.output) 89 | 90 | def test_collect_local_world_writable_dir(self): 91 | self._setup_test_json(META_DATA) 92 | os.chmod(self.tdir, 0o666) 93 | self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect) 94 | self.assertIn('%s is world writable. This is a security risk.' % 95 | self.tdir, self.log.output) 96 | 97 | def test_collect_local_owner_not_uid(self): 98 | self._setup_test_json(META_DATA) 99 | real_getuid = os.getuid 100 | 101 | def fake_getuid(): 102 | return real_getuid() + 1 103 | self.useFixture(fixtures.MonkeyPatch('os.getuid', fake_getuid)) 104 | self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect) 105 | self.assertIn('%s is owned by another user. This is a security risk.' % 106 | self.tdir, self.log.output) 107 | 108 | def test_collect_local_orders_multiple(self): 109 | self._setup_test_json(META_DATA, '00test.json') 110 | self._setup_test_json(META_DATA2, '99test.json') 111 | 112 | # Monkey Patch os.listdir so it _always_ returns the wrong sort 113 | unpatched_listdir = os.listdir 114 | 115 | def wrong_sort_listdir(path): 116 | ret = unpatched_listdir(path) 117 | save_locale = locale.getlocale() 118 | try: 119 | locale.setlocale(locale.LC_ALL, 'C') 120 | bad_sort = sorted(ret, reverse=True) 121 | finally: 122 | locale.setlocale(locale.LC_ALL, save_locale) 123 | return bad_sort 124 | 125 | self.useFixture(fixtures.MonkeyPatch('os.listdir', wrong_sort_listdir)) 126 | local_md = self._call_collect() 127 | 128 | self.assertThat(local_md, matchers.IsInstance(list)) 129 | self.assertEqual(2, len(local_md)) 130 | self.assertThat(local_md[0], matchers.IsInstance(tuple)) 131 | 132 | self.assertEqual('00test.json', local_md[0][0]) 133 | md1 = local_md[0][1] 134 | self.assertEqual(META_DATA, md1) 135 | 136 | self.assertEqual('99test.json', local_md[1][0]) 137 | md2 = local_md[1][1] 138 | self.assertEqual(META_DATA2, md2) 139 | 140 | def test_collect_invalid_json_fail(self): 141 | self._setup_test_json(META_DATA) 142 | with open(os.path.join(self.tdir, 'bad.json'), 'w') as badjson: 143 | badjson.write('{') 144 | self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect) 145 | self.assertIn('is not valid JSON', self.log.output) 146 | 147 | def test_collect_local_path_nonexist(self): 148 | cfg.CONF.set_override(name='path', 149 | override=['/this/doesnt/exist'], 150 | group='local') 151 | local_md = self._call_collect() 152 | self.assertThat(local_md, matchers.IsInstance(list)) 153 | self.assertEqual(0, len(local_md)) 154 | -------------------------------------------------------------------------------- /os_collect_config/config_drive.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | import os 17 | import subprocess 18 | import tempfile 19 | 20 | from oslo_log import log 21 | 22 | 23 | logger = log.getLogger('os-collect-config') 24 | 25 | 26 | PROC_MOUNTS_PATH = '/proc/mounts' 27 | 28 | 29 | class BlockDevice: 30 | 31 | devname = None 32 | 33 | type = None 34 | 35 | label = None 36 | 37 | mountpoint = None 38 | 39 | unmount = False 40 | 41 | ATTR_MAP = { 42 | 'DEVNAME': 'devname', 43 | 'TYPE': 'type', 44 | 'LABEL': 'label' 45 | } 46 | 47 | @staticmethod 48 | def parse_shell_var(line): 49 | # parse shell-style KEY=value 50 | try: 51 | ieq = line.index('=') 52 | except (ValueError, AttributeError): 53 | return None, None 54 | value = line[ieq + 1:] 55 | # unescape backslash escaped spaces 56 | value = value.replace('\\ ', ' ') 57 | return line[:ieq], value 58 | 59 | @classmethod 60 | def from_blkid_export(cls, export_str): 61 | '''Construct BlockDevice from export formatted blkid output.''' 62 | bd = cls() 63 | for line in export_str.splitlines(): 64 | var, value = cls.parse_shell_var(line) 65 | if var in cls.ATTR_MAP: 66 | setattr(bd, cls.ATTR_MAP[var], value) 67 | return bd 68 | 69 | def config_drive_candidate(self): 70 | '''Whether this block device is a v2 config-drive.''' 71 | return self.label == 'config-2' and self.type in ( 72 | 'vfat', 'iso9660') 73 | 74 | def ensure_mounted(self): 75 | '''Finds an existing mountpoint or mounts to a temp directory.''' 76 | self.unmount = False 77 | # check if already mounted, if so use that 78 | with open(PROC_MOUNTS_PATH) as f: 79 | for line in f.read().splitlines(): 80 | values = line.split() 81 | if values[0] == self.devname: 82 | self.mountpoint = values[1] 83 | logger.debug('Found existing mounted config-drive: %s' % 84 | self.mountpoint) 85 | return 86 | 87 | # otherwise mount readonly to a temp directory 88 | self.mountpoint = tempfile.mkdtemp(prefix='config-2-') 89 | cmd = ['mount', self.devname, self.mountpoint, '-o', 'ro'] 90 | logger.debug('Mounting {} at : {}'.format( 91 | self.devname, self.mountpoint)) 92 | try: 93 | subprocess.check_output(cmd) 94 | except subprocess.CalledProcessError as e: 95 | logger.error('Problem running "%s": %s', ' '.join(cmd), e) 96 | os.rmdir(self.mountpoint) 97 | self.mountpoint = None 98 | else: 99 | self.unmount = True 100 | 101 | def cleanup(self): 102 | '''Unmounts device if mounted by ensure_mounted.''' 103 | if not self.unmount: 104 | self.mountpoint = None 105 | return 106 | if not self.mountpoint: 107 | self.unmount = False 108 | return 109 | 110 | cmd = ['umount', '-l', self.mountpoint] 111 | logger.debug('Unmounting: %s' % self.mountpoint) 112 | try: 113 | subprocess.check_output(cmd) 114 | except subprocess.CalledProcessError as e: 115 | logger.error('Problem running "%s": %s', ' '.join(cmd), e) 116 | else: 117 | os.rmdir(self.mountpoint) 118 | self.mountpoint = None 119 | self.unmount = False 120 | 121 | def get_metadata(self): 122 | '''Load and return ec2/latest/meta-data.json from config drive.''' 123 | try: 124 | self.ensure_mounted() 125 | if not self.mountpoint: 126 | return {} 127 | 128 | md_path = os.path.join(self.mountpoint, 129 | 'ec2', 'latest', 'meta-data.json') 130 | if not os.path.isfile(md_path): 131 | logger.warning('No expected file at path: %s' % md_path) 132 | return {} 133 | with open(md_path) as f: 134 | return json.load(f) 135 | except Exception as e: 136 | logger.error('Problem getting metadata: %s', e) 137 | return {} 138 | finally: 139 | self.cleanup() 140 | 141 | def __repr__(self): 142 | return '{}: TYPE="{}" LABEL="{}"'.format(self.devname, 143 | self.type, 144 | self.label) 145 | 146 | 147 | def all_block_devices(): 148 | '''Run blkid and yield a BlockDevice for all devices.''' 149 | try: 150 | cmd = ['blkid', '-o', 'export'] 151 | out = subprocess.check_output(cmd, universal_newlines=True) 152 | except Exception as e: 153 | logger.error('Problem running "%s": %s', ' '.join(cmd), e) 154 | else: 155 | # with -o export, devices are separated by a blank line 156 | for device in out.split('\n\n'): 157 | yield BlockDevice.from_blkid_export(device) 158 | 159 | 160 | def config_drive(): 161 | """Return the first device expected to contain a v2 config drive. 162 | 163 | Disk needs to be: 164 | * either vfat or iso9660 formated 165 | * labeled with 'config-2' 166 | """ 167 | for bd in all_block_devices(): 168 | if bd.config_drive_candidate(): 169 | return bd 170 | 171 | 172 | def get_metadata(): 173 | """Return discovered config drive metadata, or an empty dict.""" 174 | bd = config_drive() 175 | if bd: 176 | return bd.get_metadata() 177 | return {} 178 | -------------------------------------------------------------------------------- /os_collect_config/cfn.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os 18 | 19 | from keystoneclient.contrib.ec2 import utils as ec2_utils 20 | from lxml import etree 21 | from oslo_config import cfg 22 | from oslo_log import log 23 | import urllib.parse as urlparse 24 | 25 | from os_collect_config import common 26 | from os_collect_config import exc 27 | from os_collect_config import merger 28 | 29 | CONF = cfg.CONF 30 | logger = log.getLogger(__name__) 31 | 32 | opts = [ 33 | cfg.URIOpt('metadata-url', 34 | help='URL to query for CloudFormation Metadata'), 35 | cfg.StrOpt('heat-metadata-hint', 36 | default='/var/lib/heat-cfntools/cfn-metadata-server', 37 | help='Local file to read for metadata url if not explicitly ' 38 | ' specified'), 39 | cfg.StrOpt('ca_certificate', help='CA Certificate path'), 40 | cfg.StrOpt('stack-name', 41 | help='Stack name to describe'), 42 | cfg.MultiStrOpt('path', 43 | help='Path to Metadata'), 44 | cfg.StrOpt('secret-access-key', 45 | secret=True, 46 | help='Secret Access Key'), 47 | cfg.StrOpt('access-key-id', 48 | help='Access Key ID'), 49 | cfg.MultiStrOpt('deployment-key', 50 | default=['deployments'], 51 | help='DEPRECATED, use global configuration option ' 52 | '"deployment-key"'), 53 | cfg.FloatOpt('timeout', default=10, 54 | help='Seconds to wait for the connection and read request' 55 | ' timeout.') 56 | ] 57 | name = 'cfn' 58 | 59 | 60 | class Collector: 61 | 62 | def __init__(self, requests_impl=common.requests): 63 | self._requests_impl = requests_impl 64 | self._session = requests_impl.Session() 65 | 66 | def collect(self): 67 | if CONF.cfn.metadata_url is None: 68 | if (CONF.cfn.heat_metadata_hint 69 | and os.path.exists(CONF.cfn.heat_metadata_hint)): 70 | with open(CONF.cfn.heat_metadata_hint) as hint: 71 | CONF.cfn.metadata_url = '%s/v1/' % hint.read().strip() 72 | else: 73 | logger.info('No metadata_url configured.') 74 | raise exc.CfnMetadataNotConfigured 75 | if CONF.cfn.access_key_id is None: 76 | logger.info('No Access Key ID configured.') 77 | raise exc.CfnMetadataNotConfigured 78 | if CONF.cfn.secret_access_key is None: 79 | logger.info('No Secret Access Key configured.') 80 | raise exc.CfnMetadataNotConfigured 81 | url = CONF.cfn.metadata_url 82 | stack_name = CONF.cfn.stack_name 83 | headers = {'Content-Type': 'application/json'} 84 | final_content = {} 85 | if CONF.cfn.path is None: 86 | logger.info('No path configured') 87 | raise exc.CfnMetadataNotConfigured 88 | 89 | signer = ec2_utils.Ec2Signer(secret_key=CONF.cfn.secret_access_key) 90 | for path in CONF.cfn.path: 91 | if '.' not in path: 92 | logger.error('Path not in format resource.field[.x.y] (%s)' % 93 | path) 94 | raise exc.CfnMetadataNotConfigured 95 | resource, field = path.split('.', 1) 96 | if '.' in field: 97 | field, sub_path = field.split('.', 1) 98 | else: 99 | sub_path = '' 100 | params = {'Action': 'DescribeStackResource', 101 | 'StackName': stack_name, 102 | 'LogicalResourceId': resource, 103 | 'AWSAccessKeyId': CONF.cfn.access_key_id, 104 | 'SignatureVersion': '2'} 105 | parsed_url = urlparse.urlparse(url) 106 | credentials = {'params': params, 107 | 'verb': 'GET', 108 | 'host': parsed_url.netloc, 109 | 'path': parsed_url.path} 110 | params['Signature'] = signer.generate(credentials) 111 | try: 112 | content = self._session.get( 113 | url, params=params, headers=headers, 114 | verify=CONF.cfn.ca_certificate, 115 | timeout=CONF.cfn.timeout) 116 | content.raise_for_status() 117 | except self._requests_impl.exceptions.RequestException as e: 118 | logger.warning(e) 119 | raise exc.CfnMetadataNotAvailable 120 | map_content = etree.fromstring(content.text) 121 | resource_detail = map_content.find( 122 | 'DescribeStackResourceResult').find('StackResourceDetail') 123 | sub_element = resource_detail.find(field) 124 | if sub_element is None: 125 | logger.warnng('Path %s does not exist.' % (path)) 126 | raise exc.CfnMetadataNotAvailable 127 | try: 128 | value = json.loads(sub_element.text) 129 | except ValueError as e: 130 | logger.warning( 131 | 'Path {} failed to parse as json. ({})'.format(path, e)) 132 | raise exc.CfnMetadataNotAvailable 133 | if sub_path: 134 | for subkey in sub_path.split('.'): 135 | try: 136 | value = value[subkey] 137 | except KeyError: 138 | logger.warning( 139 | 'Sub-key {} does not exist. ({})'.format( 140 | subkey, path)) 141 | raise exc.CfnMetadataNotAvailable 142 | final_content.update(value) 143 | final_list = merger.merged_list_from_content( 144 | final_content, cfg.CONF.cfn.deployment_key, name) 145 | return final_list 146 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_ec2.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import os 18 | from unittest import mock 19 | import uuid 20 | 21 | import fixtures 22 | from oslo_config import cfg 23 | import requests 24 | import testtools 25 | import urllib.parse as urlparse 26 | 27 | from os_collect_config import collect 28 | from os_collect_config import config_drive 29 | from os_collect_config import ec2 30 | from os_collect_config import exc 31 | 32 | 33 | META_DATA = { 34 | 'local-ipv4': '192.0.2.1', 35 | 'reservation-id': str(uuid.uuid1()), 36 | 'local-hostname': 'foo', 37 | 'ami-launch-index': '0', 38 | 'public-hostname': 'foo', 39 | 'hostname': 'foo', 40 | 'ami-id': str(uuid.uuid1()), 41 | 'instance-action': 'none', 42 | 'public-ipv4': '192.0.2.1', 43 | 'instance-type': 'flavor.small', 44 | 'placement/': 'availability-zone', 45 | 'placement/availability-zone': 'foo-az', 46 | 'mpi/': 'foo-keypair', 47 | 'mpi/foo-keypair': '192.0.2.1 slots=1', 48 | 'block-device-mapping/': "ami\nroot\nephemeral0", 49 | 'block-device-mapping/ami': 'vda', 50 | 'block-device-mapping/root': '/dev/vda', 51 | 'block-device-mapping/ephemeral0': '/dev/vdb', 52 | 'public-keys/': '0=foo-keypair', 53 | 'public-keys/0': 'openssh-key', 54 | 'public-keys/0/': 'openssh-key', 55 | 'public-keys/0/openssh-key': 'ssh-rsa AAAAAAAAABBBBBBBBCCCCCCCC', 56 | 'instance-id': str(uuid.uuid1()) 57 | } 58 | 59 | 60 | META_DATA_RESOLVED = { 61 | 'local-ipv4': '192.0.2.1', 62 | 'reservation-id': META_DATA['reservation-id'], 63 | 'local-hostname': 'foo', 64 | 'ami-launch-index': '0', 65 | 'public-hostname': 'foo', 66 | 'hostname': 'foo', 67 | 'ami-id': META_DATA['ami-id'], 68 | 'instance-action': 'none', 69 | 'public-ipv4': '192.0.2.1', 70 | 'instance-type': 'flavor.small', 71 | 'placement': {'availability-zone': 'foo-az'}, 72 | 'mpi': {'foo-keypair': '192.0.2.1 slots=1'}, 73 | 'public-keys': {'0': {'openssh-key': 'ssh-rsa AAAAAAAAABBBBBBBBCCCCCCCC'}}, 74 | 'block-device-mapping': {'ami': 'vda', 75 | 'ephemeral0': '/dev/vdb', 76 | 'root': '/dev/vda'}, 77 | 'instance-id': META_DATA['instance-id'] 78 | } 79 | 80 | 81 | class FakeResponse(dict): 82 | def __init__(self, text): 83 | self.text = text 84 | 85 | def raise_for_status(self): 86 | pass 87 | 88 | 89 | class FakeRequests: 90 | exceptions = requests.exceptions 91 | 92 | class Session: 93 | def get(self, url, timeout=None): 94 | url = urlparse.urlparse(url) 95 | 96 | if url.path == '/latest/meta-data/': 97 | # Remove keys which have anything after / 98 | ks = [x for x in META_DATA.keys() if ( 99 | '/' not in x or not len(x.split('/')[1]))] 100 | return FakeResponse("\n".join(ks)) 101 | 102 | path = url.path 103 | path = path.replace('/latest/meta-data/', '') 104 | return FakeResponse(META_DATA[path]) 105 | 106 | 107 | class FakeFailRequests: 108 | exceptions = requests.exceptions 109 | 110 | class Session: 111 | def get(self, url, timeout=None): 112 | raise requests.exceptions.HTTPError(403, 'Forbidden') 113 | 114 | 115 | class TestEc2(testtools.TestCase): 116 | def setUp(self): 117 | super().setUp() 118 | self.log = self.useFixture(fixtures.FakeLogger()) 119 | 120 | @mock.patch.object(config_drive, 'config_drive') 121 | def test_collect_ec2(self, cd): 122 | cd.return_value = None 123 | collect.setup_conf() 124 | ec2_md = ec2.Collector(requests_impl=FakeRequests).collect() 125 | self.assertEqual([('ec2', META_DATA_RESOLVED)], ec2_md) 126 | self.assertEqual('', self.log.output) 127 | 128 | @mock.patch.object(config_drive, 'config_drive') 129 | def test_collect_ec2_fail(self, cd): 130 | cd.return_value = None 131 | collect.setup_conf() 132 | collect_ec2 = ec2.Collector(requests_impl=FakeFailRequests) 133 | self.assertRaises(exc.Ec2MetadataNotAvailable, collect_ec2.collect) 134 | self.assertIn('Forbidden', self.log.output) 135 | 136 | @mock.patch.object(config_drive, 'config_drive') 137 | def test_collect_ec2_invalid_cache(self, cd): 138 | cd.return_value = None 139 | collect.setup_conf() 140 | cache_dir = self.useFixture(fixtures.TempDir()) 141 | self.addCleanup(cfg.CONF.reset) 142 | cfg.CONF.set_override('cachedir', cache_dir.path) 143 | ec2_path = os.path.join(cache_dir.path, 'ec2.json') 144 | with open(ec2_path, 'w') as f: 145 | f.write('') 146 | 147 | ec2_md = ec2.Collector(requests_impl=FakeRequests).collect() 148 | self.assertEqual([('ec2', META_DATA_RESOLVED)], ec2_md) 149 | 150 | @mock.patch.object(config_drive, 'config_drive') 151 | def test_collect_ec2_collected(self, cd): 152 | cd.return_value = None 153 | collect.setup_conf() 154 | cache_dir = self.useFixture(fixtures.TempDir()) 155 | self.addCleanup(cfg.CONF.reset) 156 | cfg.CONF.set_override('cachedir', cache_dir.path) 157 | ec2_path = os.path.join(cache_dir.path, 'ec2.json') 158 | with open(ec2_path, 'w') as f: 159 | json.dump(META_DATA, f) 160 | 161 | collect_ec2 = ec2.Collector(requests_impl=FakeFailRequests) 162 | self.assertEqual([('ec2', META_DATA)], collect_ec2.collect()) 163 | 164 | @mock.patch.object(config_drive, 'config_drive') 165 | def test_collect_config_drive(self, cd): 166 | cd.return_value.get_metadata.return_value = META_DATA_RESOLVED 167 | collect.setup_conf() 168 | ec2_md = ec2.Collector(requests_impl=FakeFailRequests).collect() 169 | self.assertEqual([('ec2', META_DATA_RESOLVED)], ec2_md) 170 | self.assertEqual('', self.log.output) 171 | -------------------------------------------------------------------------------- /os_collect_config/zaqar.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | 17 | from keystoneclient.v3 import client as keystoneclient 18 | from oslo_config import cfg 19 | from oslo_log import log 20 | from zaqarclient.queues.v2 import client as zaqarclient 21 | from zaqarclient import transport 22 | from zaqarclient.transport import request 23 | 24 | from os_collect_config import exc 25 | from os_collect_config import keystone 26 | from os_collect_config import merger 27 | 28 | CONF = cfg.CONF 29 | logger = log.getLogger(__name__) 30 | 31 | opts = [ 32 | cfg.StrOpt('user-id', 33 | help='User ID for API authentication'), 34 | cfg.StrOpt('password', 35 | secret=True, 36 | help='Password for API authentication'), 37 | cfg.StrOpt('project-id', 38 | help='ID of project for API authentication'), 39 | cfg.URIOpt('auth-url', 40 | help='URL for API authentication'), 41 | cfg.StrOpt('queue-id', 42 | help='ID of the queue to be checked'), 43 | cfg.BoolOpt('use-websockets', 44 | default=False, 45 | help='Use the websocket transport to connect to Zaqar.'), 46 | cfg.StrOpt('region-name', 47 | help='Region Name for extracting Zaqar endpoint'), 48 | cfg.BoolOpt('ssl-certificate-validation', 49 | help='ssl certificat validation flag for connect to Zaqar', 50 | default=False), 51 | cfg.StrOpt('ca-file', 52 | help='CA Cert file for connect to Zaqar'), 53 | ] 54 | name = 'zaqar' 55 | 56 | 57 | class Collector: 58 | def __init__(self, 59 | keystoneclient=keystoneclient, 60 | zaqarclient=zaqarclient, 61 | discover_class=None, 62 | transport=transport): 63 | self.keystoneclient = keystoneclient 64 | self.zaqarclient = zaqarclient 65 | self.discover_class = discover_class 66 | self.transport = transport 67 | 68 | def get_data_wsgi(self, ks, conf): 69 | kwargs = {'service_type': 'messaging', 'endpoint_type': 'publicURL'} 70 | if CONF.zaqar.region_name: 71 | kwargs['region_name'] = CONF.zaqar.region_name 72 | endpoint = ks.service_catalog.url_for(**kwargs) 73 | logger.debug('Fetching metadata from %s' % endpoint) 74 | zaqar = self.zaqarclient.Client(endpoint, conf=conf, version=2) 75 | 76 | queue = zaqar.queue(CONF.zaqar.queue_id) 77 | r = next(queue.pop()) 78 | return r.body 79 | 80 | def _create_req(self, endpoint, action, body): 81 | return request.Request(endpoint, action, content=json.dumps(body)) 82 | 83 | def get_data_websocket(self, ks, conf): 84 | kwargs = {'service_type': 'messaging-websocket', 85 | 'endpoint_type': 'publicURL'} 86 | if CONF.zaqar.region_name: 87 | kwargs['region_name'] = CONF.zaqar.region_name 88 | endpoint = ks.service_catalog.url_for(**kwargs) 89 | 90 | logger.debug('Fetching metadata from %s' % endpoint) 91 | 92 | with self.transport.get_transport_for(endpoint, options=conf) as ws: 93 | # create queue 94 | req = self._create_req(endpoint, 'queue_create', 95 | {'queue_name': CONF.zaqar.queue_id}) 96 | ws.send(req) 97 | # subscribe to queue messages 98 | req = self._create_req(endpoint, 'subscription_create', 99 | {'queue_name': CONF.zaqar.queue_id, 100 | 'ttl': 10000}) 101 | ws.send(req) 102 | 103 | # check for pre-existing messages 104 | req = self._create_req(endpoint, 'message_delete_many', 105 | {'queue_name': CONF.zaqar.queue_id, 106 | 'pop': 1}) 107 | resp = ws.send(req) 108 | messages = json.loads(resp.content).get('messages', []) 109 | 110 | if len(messages) > 0: 111 | # NOTE(dprince) In this case we are checking for queue 112 | # messages that arrived before we subscribed. 113 | logger.debug('Websocket message found...') 114 | msg_0 = messages[0] 115 | data = msg_0['body'] 116 | 117 | else: 118 | # NOTE(dprince) This will block until there is data available 119 | # or the socket times out. Because we subscribe to the queue 120 | # it will allow us to process data immediately. 121 | logger.debug('websocket recv()') 122 | data = ws.recv()['body'] 123 | 124 | return data 125 | 126 | def collect(self): 127 | if CONF.zaqar.auth_url is None: 128 | logger.warning('No auth_url configured.') 129 | raise exc.ZaqarMetadataNotConfigured() 130 | if CONF.zaqar.password is None: 131 | logger.warning('No password configured.') 132 | raise exc.ZaqarMetadataNotConfigured() 133 | if CONF.zaqar.project_id is None: 134 | logger.warning('No project_id configured.') 135 | raise exc.ZaqarMetadataNotConfigured() 136 | if CONF.zaqar.user_id is None: 137 | logger.warning('No user_id configured.') 138 | raise exc.ZaqarMetadataNotConfigured() 139 | if CONF.zaqar.queue_id is None: 140 | logger.warning('No queue_id configured.') 141 | raise exc.ZaqarMetadataNotConfigured() 142 | if CONF.zaqar.ssl_certificate_validation is True and ( 143 | CONF.zaqar.ca_file is None): 144 | logger.warning('No CA file configured when flag ssl certificate ' 145 | 'validation is on.') 146 | raise exc.ZaqarMetadataNotConfigured() 147 | # NOTE(flwang): To be compatible with old versions, we won't throw 148 | # error here if there is no region name. 149 | 150 | try: 151 | ks = keystone.Keystone( 152 | auth_url=CONF.zaqar.auth_url, 153 | user_id=CONF.zaqar.user_id, 154 | password=CONF.zaqar.password, 155 | project_id=CONF.zaqar.project_id, 156 | keystoneclient=self.keystoneclient, 157 | discover_class=self.discover_class).client 158 | 159 | conf = { 160 | 'auth_opts': { 161 | 'backend': 'keystone', 162 | 'options': { 163 | 'os_auth_token': ks.auth_token, 164 | 'os_project_id': CONF.zaqar.project_id, 165 | 'insecure': not CONF.zaqar.ssl_certificate_validation, 166 | 'cacert': CONF.zaqar.ca_file 167 | } 168 | } 169 | } 170 | 171 | if CONF.zaqar.use_websockets: 172 | data = self.get_data_websocket(ks, conf) 173 | else: 174 | data = self.get_data_wsgi(ks, conf) 175 | 176 | final_list = merger.merged_list_from_content( 177 | data, cfg.CONF.deployment_key, name) 178 | return final_list 179 | 180 | except Exception as e: 181 | logger.warning(str(e)) 182 | raise exc.ZaqarMetadataNotAvailable() 183 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_heat.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import fixtures 16 | from keystoneclient import exceptions as ks_exc 17 | from oslo_config import cfg 18 | import testtools 19 | from testtools import matchers 20 | 21 | from os_collect_config import collect 22 | from os_collect_config import exc 23 | from os_collect_config import heat 24 | 25 | 26 | META_DATA = {'int1': 1, 27 | 'strfoo': 'foo', 28 | 'map_ab': { 29 | 'a': 'apple', 30 | 'b': 'banana', 31 | }} 32 | 33 | 34 | SOFTWARE_CONFIG_DATA = { 35 | 'old-style': 'value', 36 | 'deployments': [ 37 | { 38 | 'inputs': [ 39 | { 40 | 'type': 'String', 41 | 'name': 'input1', 42 | 'value': 'value1' 43 | } 44 | ], 45 | 'group': 'Heat::Ungrouped', 46 | 'name': 'dep-name1', 47 | 'outputs': None, 48 | 'options': None, 49 | 'config': { 50 | 'config1': 'value1' 51 | } 52 | } 53 | ] 54 | } 55 | 56 | 57 | SOFTWARE_CONFIG_IMPOSTER_DATA = { 58 | 'old-style': 'value', 59 | 'deployments': { 60 | "not": "a list" 61 | } 62 | } 63 | 64 | 65 | class FakeKeystoneDiscover: 66 | 67 | def __init__(self, auth_url): 68 | pass 69 | 70 | def url_for(self, version): 71 | return 'http://192.0.2.1:5000/v3' 72 | 73 | 74 | class FakeKeystoneClient: 75 | 76 | def __init__(self, testcase, configs=None): 77 | self._test = testcase 78 | self.service_catalog = self 79 | self.auth_token = 'atoken' 80 | if configs is None: 81 | configs = cfg.CONF.heat 82 | self.configs = configs 83 | 84 | def Client(self, auth_url, user_id, password, project_id): 85 | self._test.assertEqual(self.configs.auth_url, auth_url) 86 | self._test.assertEqual(self.configs.user_id, user_id) 87 | self._test.assertEqual(self.configs.password, password) 88 | self._test.assertEqual(self.configs.project_id, project_id) 89 | return self 90 | 91 | def url_for(self, service_type, endpoint_type): 92 | self._test.assertEqual('orchestration', service_type) 93 | self._test.assertEqual('publicURL', endpoint_type) 94 | return 'http://192.0.2.1:8004/v1' 95 | 96 | def get_auth_ref(self): 97 | return 'this is an auth_ref' 98 | 99 | 100 | class FakeFailKeystoneClient(FakeKeystoneClient): 101 | 102 | def Client(self, auth_url, user_id, password, project_id): 103 | raise ks_exc.AuthorizationFailure('Forbidden') 104 | 105 | 106 | class FakeHeatClient: 107 | def __init__(self, testcase): 108 | self._test = testcase 109 | self.resources = self 110 | 111 | def Client(self, version, endpoint, token): 112 | self._test.assertEqual('1', version) 113 | self._test.assertEqual('http://192.0.2.1:8004/v1', endpoint) 114 | self._test.assertEqual('atoken', token) 115 | return self 116 | 117 | def metadata(self, stack_id, resource_name): 118 | self._test.assertEqual(cfg.CONF.heat.stack_id, stack_id) 119 | self._test.assertEqual(cfg.CONF.heat.resource_name, resource_name) 120 | return META_DATA 121 | 122 | 123 | class FakeHeatClientSoftwareConfig(FakeHeatClient): 124 | 125 | def metadata(self, stack_id, resource_name): 126 | return SOFTWARE_CONFIG_DATA 127 | 128 | 129 | class TestHeatBase(testtools.TestCase): 130 | def setUp(self): 131 | super().setUp() 132 | self.log = self.useFixture(fixtures.FakeLogger()) 133 | self.useFixture(fixtures.NestedTempfile()) 134 | collect.setup_conf() 135 | cfg.CONF.heat.auth_url = 'http://192.0.2.1:5000/v3' 136 | cfg.CONF.heat.user_id = '0123456789ABCDEF' 137 | cfg.CONF.heat.password = 'FEDCBA9876543210' 138 | cfg.CONF.heat.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10' 139 | cfg.CONF.heat.stack_id = 'a/c482680f-7238-403d-8f76-36acf0c8e0aa' 140 | cfg.CONF.heat.resource_name = 'server' 141 | 142 | 143 | class TestHeat(TestHeatBase): 144 | def test_collect_heat(self): 145 | heat_md = heat.Collector(keystoneclient=FakeKeystoneClient(self), 146 | heatclient=FakeHeatClient(self), 147 | discover_class=FakeKeystoneDiscover).collect() 148 | self.assertThat(heat_md, matchers.IsInstance(list)) 149 | self.assertEqual('heat', heat_md[0][0]) 150 | heat_md = heat_md[0][1] 151 | 152 | for k in ('int1', 'strfoo', 'map_ab'): 153 | self.assertIn(k, heat_md) 154 | self.assertEqual(heat_md[k], META_DATA[k]) 155 | 156 | # FIXME(yanyanhu): Temporary hack to deal with possible log 157 | # level setting for urllib3.connectionpool. 158 | self.assertTrue( 159 | self.log.output == '' or 160 | self.log.output == 'Starting new HTTP connection (1): 192.0.2.1\n') 161 | 162 | def test_collect_heat_fail(self): 163 | heat_collect = heat.Collector( 164 | keystoneclient=FakeFailKeystoneClient(self), 165 | heatclient=FakeHeatClient(self), 166 | discover_class=FakeKeystoneDiscover) 167 | self.assertRaises(exc.HeatMetadataNotAvailable, heat_collect.collect) 168 | self.assertIn('Forbidden', self.log.output) 169 | 170 | def test_collect_heat_no_auth_url(self): 171 | cfg.CONF.heat.auth_url = None 172 | heat_collect = heat.Collector() 173 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 174 | self.assertIn('No auth_url configured', self.log.output) 175 | 176 | def test_collect_heat_no_password(self): 177 | cfg.CONF.heat.password = None 178 | heat_collect = heat.Collector() 179 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 180 | self.assertIn('No password configured', self.log.output) 181 | 182 | def test_collect_heat_no_project_id(self): 183 | cfg.CONF.heat.project_id = None 184 | heat_collect = heat.Collector() 185 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 186 | self.assertIn('No project_id configured', self.log.output) 187 | 188 | def test_collect_heat_no_user_id(self): 189 | cfg.CONF.heat.user_id = None 190 | heat_collect = heat.Collector() 191 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 192 | self.assertIn('No user_id configured', self.log.output) 193 | 194 | def test_collect_heat_no_stack_id(self): 195 | cfg.CONF.heat.stack_id = None 196 | heat_collect = heat.Collector() 197 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 198 | self.assertIn('No stack_id configured', self.log.output) 199 | 200 | def test_collect_heat_no_resource_name(self): 201 | cfg.CONF.heat.resource_name = None 202 | heat_collect = heat.Collector() 203 | self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect) 204 | self.assertIn('No resource_name configured', self.log.output) 205 | 206 | 207 | class TestHeatSoftwareConfig(TestHeatBase): 208 | def test_collect_heat(self): 209 | heat_md = heat.Collector( 210 | keystoneclient=FakeKeystoneClient(self), 211 | heatclient=FakeHeatClientSoftwareConfig(self), 212 | discover_class=FakeKeystoneDiscover).collect() 213 | self.assertThat(heat_md, matchers.IsInstance(list)) 214 | self.assertEqual(2, len(heat_md)) 215 | self.assertEqual('heat', heat_md[0][0]) 216 | self.assertEqual( 217 | SOFTWARE_CONFIG_DATA['deployments'], heat_md[0][1]['deployments']) 218 | self.assertEqual( 219 | ('dep-name1', {'config1': 'value1'}), heat_md[1]) 220 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_request.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import calendar 17 | import json 18 | import time 19 | 20 | import fixtures 21 | from oslo_config import cfg 22 | import requests 23 | import testtools 24 | from testtools import matchers 25 | 26 | from os_collect_config import collect 27 | from os_collect_config import exc 28 | from os_collect_config import request 29 | 30 | 31 | META_DATA = {'int1': 1, 32 | 'strfoo': 'foo', 33 | 'map_ab': { 34 | 'a': 'apple', 35 | 'b': 'banana', 36 | }} 37 | 38 | 39 | SOFTWARE_CONFIG_DATA = { 40 | 'old-style': 'value', 41 | 'deployments': [ 42 | { 43 | 'inputs': [ 44 | { 45 | 'type': 'String', 46 | 'name': 'input1', 47 | 'value': 'value1' 48 | } 49 | ], 50 | 'group': 'Heat::Ungrouped', 51 | 'name': 'dep-name1', 52 | 'outputs': None, 53 | 'options': None, 54 | 'config': { 55 | 'config1': 'value1' 56 | } 57 | }, 58 | { 59 | 'inputs': [ 60 | { 61 | 'type': 'String', 62 | 'name': 'input1', 63 | 'value': 'value1' 64 | } 65 | ], 66 | 'group': 'os-apply-config', 67 | 'name': 'dep-name2', 68 | 'outputs': None, 69 | 'options': None, 70 | 'config': { 71 | 'config2': 'value2' 72 | } 73 | }, 74 | { 75 | 'inputs': [ 76 | { 77 | 'type': 'String', 78 | 'name': 'input1', 79 | 'value': 'value1' 80 | } 81 | ], 82 | 'name': 'dep-name3', 83 | 'outputs': None, 84 | 'options': None, 85 | 'config': { 86 | 'config3': 'value3' 87 | } 88 | }, 89 | { 90 | 'inputs': [], 91 | 'group': 'ignore_me', 92 | 'name': 'ignore_me_name', 93 | 'outputs': None, 94 | 'options': None, 95 | 'config': 'ignore_me_config' 96 | } 97 | ] 98 | } 99 | 100 | 101 | class FakeResponse(dict): 102 | def __init__(self, text, headers=None): 103 | self.text = text 104 | self.headers = headers 105 | 106 | def raise_for_status(self): 107 | pass 108 | 109 | 110 | class FakeRequests: 111 | exceptions = requests.exceptions 112 | 113 | class Session: 114 | def get(self, url, timeout=None): 115 | return FakeResponse(json.dumps(META_DATA)) 116 | 117 | def head(self, url, timeout=None): 118 | return FakeResponse('', headers={ 119 | 'last-modified': time.strftime( 120 | "%a, %d %b %Y %H:%M:%S %Z", time.gmtime())}) 121 | 122 | 123 | class FakeFailRequests: 124 | exceptions = requests.exceptions 125 | 126 | class Session: 127 | def get(self, url, timeout=None): 128 | raise requests.exceptions.HTTPError(403, 'Forbidden') 129 | 130 | def head(self, url, timeout=None): 131 | raise requests.exceptions.HTTPError(403, 'Forbidden') 132 | 133 | 134 | class FakeRequestsSoftwareConfig: 135 | 136 | class Session: 137 | def get(self, url, timeout=None): 138 | return FakeResponse(json.dumps(SOFTWARE_CONFIG_DATA)) 139 | 140 | def head(self, url, timeout=None): 141 | return FakeResponse('', headers={ 142 | 'last-modified': time.strftime( 143 | "%a, %d %b %Y %H:%M:%S %Z", time.gmtime())}) 144 | 145 | 146 | class TestRequestBase(testtools.TestCase): 147 | def setUp(self): 148 | super().setUp() 149 | self.log = self.useFixture(fixtures.FakeLogger()) 150 | collect.setup_conf() 151 | cfg.CONF.request.metadata_url = 'http://192.0.2.1:8000/my_metadata' 152 | 153 | 154 | class TestRequest(TestRequestBase): 155 | 156 | def test_collect_request(self): 157 | req_collect = request.Collector(requests_impl=FakeRequests) 158 | self.assertIsNone(req_collect.last_modified) 159 | req_md = req_collect.collect() 160 | self.assertIsNotNone(req_collect.last_modified) 161 | self.assertThat(req_md, matchers.IsInstance(list)) 162 | self.assertEqual('request', req_md[0][0]) 163 | req_md = req_md[0][1] 164 | 165 | for k in ('int1', 'strfoo', 'map_ab'): 166 | self.assertIn(k, req_md) 167 | self.assertEqual(req_md[k], META_DATA[k]) 168 | 169 | self.assertEqual('', self.log.output) 170 | 171 | def test_collect_request_fail(self): 172 | req_collect = request.Collector(requests_impl=FakeFailRequests) 173 | self.assertRaises(exc.RequestMetadataNotAvailable, req_collect.collect) 174 | self.assertIn('Forbidden', self.log.output) 175 | 176 | def test_collect_request_no_metadata_url(self): 177 | cfg.CONF.request.metadata_url = None 178 | req_collect = request.Collector(requests_impl=FakeRequests) 179 | self.assertRaises(exc.RequestMetadataNotConfigured, 180 | req_collect.collect) 181 | self.assertIn('No metadata_url configured', self.log.output) 182 | 183 | def test_check_fetch_content(self): 184 | req_collect = request.Collector() 185 | 186 | now_secs = calendar.timegm(time.gmtime()) 187 | now_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z", 188 | time.gmtime(now_secs)) 189 | 190 | future_secs = calendar.timegm(time.gmtime()) + 10 191 | future_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z", 192 | time.gmtime(future_secs)) 193 | 194 | past_secs = calendar.timegm(time.gmtime()) - 10 195 | past_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z", 196 | time.gmtime(past_secs)) 197 | 198 | self.assertIsNone(req_collect.last_modified) 199 | 200 | # first run always collects 201 | self.assertEqual( 202 | now_secs, 203 | req_collect.check_fetch_content({'last-modified': now_str})) 204 | 205 | # second run unmodified, does not collect 206 | req_collect.last_modified = now_secs 207 | self.assertRaises(exc.RequestMetadataNotAvailable, 208 | req_collect.check_fetch_content, 209 | {'last-modified': now_str}) 210 | 211 | # run with later date, collects 212 | self.assertEqual( 213 | future_secs, 214 | req_collect.check_fetch_content({'last-modified': future_str})) 215 | 216 | # run with earlier date, does not collect 217 | self.assertRaises(exc.RequestMetadataNotAvailable, 218 | req_collect.check_fetch_content, 219 | {'last-modified': past_str}) 220 | 221 | # run no last-modified header, collects 222 | self.assertIsNone(req_collect.check_fetch_content({})) 223 | 224 | 225 | class TestRequestSoftwareConfig(TestRequestBase): 226 | 227 | def test_collect_request(self): 228 | req_collect = request.Collector( 229 | requests_impl=FakeRequestsSoftwareConfig) 230 | req_md = req_collect.collect() 231 | self.assertEqual(4, len(req_md)) 232 | self.assertEqual( 233 | SOFTWARE_CONFIG_DATA['deployments'], req_md[0][1]['deployments']) 234 | self.assertEqual( 235 | ('dep-name1', {'config1': 'value1'}), req_md[1]) 236 | self.assertEqual( 237 | ('dep-name2', {'config2': 'value2'}), req_md[2]) 238 | self.assertEqual( 239 | ('dep-name3', {'config3': 'value3'}), req_md[3]) 240 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_cfn.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import json 17 | import tempfile 18 | 19 | import fixtures 20 | from lxml import etree 21 | from oslo_config import cfg 22 | import requests 23 | import testtools 24 | from testtools import content as test_content 25 | from testtools import matchers 26 | import urllib.parse as urlparse 27 | 28 | from os_collect_config import cfn 29 | from os_collect_config import collect 30 | from os_collect_config import exc 31 | 32 | 33 | META_DATA = {'int1': 1, 34 | 'strfoo': 'foo', 35 | 'map_ab': { 36 | 'a': 'apple', 37 | 'b': 'banana', 38 | }} 39 | 40 | 41 | SOFTWARE_CONFIG_DATA = { 42 | 'old-style': 'value', 43 | 'deployments': [ 44 | { 45 | 'inputs': [ 46 | { 47 | 'type': 'String', 48 | 'name': 'input1', 49 | 'value': 'value1' 50 | } 51 | ], 52 | 'group': 'Heat::Ungrouped', 53 | 'name': 'dep-name1', 54 | 'outputs': None, 55 | 'options': None, 56 | 'config': { 57 | 'config1': 'value1' 58 | } 59 | }, 60 | { 61 | 'inputs': [ 62 | { 63 | 'type': 'String', 64 | 'name': 'input1', 65 | 'value': 'value1' 66 | } 67 | ], 68 | 'group': 'os-apply-config', 69 | 'name': 'dep-name2', 70 | 'outputs': None, 71 | 'options': None, 72 | 'config': { 73 | 'config2': 'value2' 74 | } 75 | }, 76 | { 77 | 'inputs': [ 78 | { 79 | 'type': 'String', 80 | 'name': 'input1', 81 | 'value': 'value1' 82 | } 83 | ], 84 | 'name': 'dep-name3', 85 | 'outputs': None, 86 | 'options': None, 87 | 'config': { 88 | 'config3': 'value3' 89 | } 90 | }, 91 | { 92 | 'inputs': [], 93 | 'group': 'ignore_me', 94 | 'name': 'ignore_me_name', 95 | 'outputs': None, 96 | 'options': None, 97 | 'config': 'ignore_me_config' 98 | } 99 | ] 100 | } 101 | 102 | 103 | SOFTWARE_CONFIG_IMPOSTER_DATA = { 104 | 'old-style': 'value', 105 | 'deployments': { 106 | "not": "a list" 107 | } 108 | } 109 | 110 | 111 | class FakeResponse(dict): 112 | def __init__(self, text): 113 | self.text = text 114 | 115 | def raise_for_status(self): 116 | pass 117 | 118 | 119 | class FakeReqSession: 120 | 121 | SESSION_META_DATA = META_DATA 122 | 123 | def __init__(self, testcase, expected_netloc): 124 | self._test = testcase 125 | self._expected_netloc = expected_netloc 126 | self.verify = False 127 | 128 | def get(self, url, params, headers, verify=None, timeout=None): 129 | self._test.addDetail('url', test_content.text_content(url)) 130 | url = urlparse.urlparse(url) 131 | self._test.assertEqual(self._expected_netloc, url.netloc) 132 | self._test.assertEqual('/v1/', url.path) 133 | self._test.assertEqual('application/json', 134 | headers['Content-Type']) 135 | self._test.assertIn('SignatureVersion', params) 136 | self._test.assertEqual('2', params['SignatureVersion']) 137 | self._test.assertIn('Signature', params) 138 | self._test.assertIn('Action', params) 139 | self._test.assertEqual('DescribeStackResource', 140 | params['Action']) 141 | self._test.assertIn('LogicalResourceId', params) 142 | self._test.assertEqual('foo', params['LogicalResourceId']) 143 | self._test.assertEqual(10, timeout) 144 | root = etree.Element('DescribeStackResourceResponse') 145 | result = etree.SubElement(root, 'DescribeStackResourceResult') 146 | detail = etree.SubElement(result, 'StackResourceDetail') 147 | metadata = etree.SubElement(detail, 'Metadata') 148 | metadata.text = json.dumps(self.SESSION_META_DATA) 149 | if verify is not None: 150 | self.verify = True 151 | return FakeResponse(etree.tostring(root)) 152 | 153 | 154 | class FakeRequests: 155 | exceptions = requests.exceptions 156 | 157 | def __init__(self, testcase, expected_netloc='192.0.2.1:8000'): 158 | self._test = testcase 159 | self._expected_netloc = expected_netloc 160 | 161 | def Session(self): 162 | 163 | return FakeReqSession(self._test, self._expected_netloc) 164 | 165 | 166 | class FakeReqSessionSoftwareConfig(FakeReqSession): 167 | 168 | SESSION_META_DATA = SOFTWARE_CONFIG_DATA 169 | 170 | 171 | class FakeRequestsSoftwareConfig(FakeRequests): 172 | 173 | FAKE_SESSION = FakeReqSessionSoftwareConfig 174 | 175 | def Session(self): 176 | return self.FAKE_SESSION(self._test, self._expected_netloc) 177 | 178 | 179 | class FakeReqSessionConfigImposter(FakeReqSession): 180 | 181 | SESSION_META_DATA = SOFTWARE_CONFIG_IMPOSTER_DATA 182 | 183 | 184 | class FakeRequestsConfigImposter(FakeRequestsSoftwareConfig): 185 | 186 | FAKE_SESSION = FakeReqSessionConfigImposter 187 | 188 | 189 | class FakeFailRequests: 190 | exceptions = requests.exceptions 191 | 192 | class Session: 193 | def get(self, url, params, headers, verify=None, timeout=None): 194 | raise requests.exceptions.HTTPError(403, 'Forbidden') 195 | 196 | 197 | class TestCfnBase(testtools.TestCase): 198 | def setUp(self): 199 | super().setUp() 200 | self.log = self.useFixture(fixtures.FakeLogger()) 201 | self.useFixture(fixtures.NestedTempfile()) 202 | self.hint_file = tempfile.NamedTemporaryFile() 203 | self.hint_file.write(b'http://192.0.2.1:8000') 204 | self.hint_file.flush() 205 | self.addCleanup(self.hint_file.close) 206 | collect.setup_conf() 207 | cfg.CONF.cfn.heat_metadata_hint = self.hint_file.name 208 | cfg.CONF.cfn.metadata_url = None 209 | cfg.CONF.cfn.path = ['foo.Metadata'] 210 | cfg.CONF.cfn.access_key_id = '0123456789ABCDEF' 211 | cfg.CONF.cfn.secret_access_key = 'FEDCBA9876543210' 212 | 213 | 214 | class TestCfn(TestCfnBase): 215 | def test_collect_cfn(self): 216 | cfn_md = cfn.Collector(requests_impl=FakeRequests(self)).collect() 217 | self.assertThat(cfn_md, matchers.IsInstance(list)) 218 | self.assertEqual('cfn', cfn_md[0][0]) 219 | cfn_md = cfn_md[0][1] 220 | 221 | for k in ('int1', 'strfoo', 'map_ab'): 222 | self.assertIn(k, cfn_md) 223 | self.assertEqual(cfn_md[k], META_DATA[k]) 224 | 225 | self.assertEqual('', self.log.output) 226 | 227 | def test_collect_with_ca_cert(self): 228 | cfn.CONF.cfn.ca_certificate = "foo" 229 | collector = cfn.Collector(requests_impl=FakeRequests(self)) 230 | collector.collect() 231 | self.assertTrue(collector._session.verify) 232 | 233 | def test_collect_cfn_fail(self): 234 | cfn_collect = cfn.Collector(requests_impl=FakeFailRequests) 235 | self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect) 236 | self.assertIn('Forbidden', self.log.output) 237 | 238 | def test_collect_cfn_no_path(self): 239 | cfg.CONF.cfn.path = None 240 | cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) 241 | self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) 242 | self.assertIn('No path configured', self.log.output) 243 | 244 | def test_collect_cfn_bad_path(self): 245 | cfg.CONF.cfn.path = ['foo'] 246 | cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) 247 | self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) 248 | self.assertIn('Path not in format', self.log.output) 249 | 250 | def test_collect_cfn_no_metadata_url(self): 251 | cfg.CONF.cfn.heat_metadata_hint = None 252 | cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) 253 | self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) 254 | self.assertIn('No metadata_url configured', self.log.output) 255 | 256 | def test_collect_cfn_missing_sub_path(self): 257 | cfg.CONF.cfn.path = ['foo.Metadata.not_there'] 258 | cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) 259 | self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect) 260 | self.assertIn('Sub-key not_there does not exist', self.log.output) 261 | 262 | def test_collect_cfn_sub_path(self): 263 | cfg.CONF.cfn.path = ['foo.Metadata.map_ab'] 264 | cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) 265 | content = cfn_collect.collect() 266 | self.assertThat(content, matchers.IsInstance(list)) 267 | self.assertEqual('cfn', content[0][0]) 268 | content = content[0][1] 269 | self.assertIn('b', content) 270 | self.assertEqual('banana', content['b']) 271 | 272 | def test_collect_cfn_metadata_url_overrides_hint(self): 273 | cfg.CONF.cfn.metadata_url = 'http://127.0.1.1:8000/v1/' 274 | cfn_collect = cfn.Collector( 275 | requests_impl=FakeRequests(self, 276 | expected_netloc='127.0.1.1:8000')) 277 | cfn_collect.collect() 278 | 279 | 280 | class TestCfnSoftwareConfig(TestCfnBase): 281 | def test_collect_cfn_software_config(self): 282 | cfn_md = cfn.Collector( 283 | requests_impl=FakeRequestsSoftwareConfig(self)).collect() 284 | self.assertThat(cfn_md, matchers.IsInstance(list)) 285 | self.assertEqual('cfn', cfn_md[0][0]) 286 | cfn_config = cfn_md[0][1] 287 | self.assertThat(cfn_config, matchers.IsInstance(dict)) 288 | self.assertEqual({'old-style', 'deployments'}, 289 | set(cfn_config.keys())) 290 | self.assertIn('deployments', cfn_config) 291 | self.assertThat(cfn_config['deployments'], matchers.IsInstance(list)) 292 | self.assertEqual(4, len(cfn_config['deployments'])) 293 | deployment = cfn_config['deployments'][0] 294 | self.assertIn('inputs', deployment) 295 | self.assertThat(deployment['inputs'], matchers.IsInstance(list)) 296 | self.assertEqual(1, len(deployment['inputs'])) 297 | self.assertEqual('dep-name1', cfn_md[1][0]) 298 | self.assertEqual('value1', cfn_md[1][1]['config1']) 299 | self.assertEqual('dep-name2', cfn_md[2][0]) 300 | self.assertEqual('value2', cfn_md[2][1]['config2']) 301 | 302 | def test_collect_cfn_deployments_not_list(self): 303 | cfn_md = cfn.Collector( 304 | requests_impl=FakeRequestsConfigImposter(self)).collect() 305 | self.assertEqual(1, len(cfn_md)) 306 | self.assertEqual('cfn', cfn_md[0][0]) 307 | self.assertIn('not', cfn_md[0][1]['deployments']) 308 | self.assertEqual('a list', cfn_md[0][1]['deployments']['not']) 309 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_zaqar.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 11 | # implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | from unittest import mock 17 | 18 | import fixtures 19 | from keystoneclient import discover as ks_discover 20 | from oslo_config import cfg 21 | from oslo_config import fixture as config_fixture 22 | import testtools 23 | from testtools import matchers 24 | from zaqarclient.queues.v2 import message 25 | from zaqarclient import transport 26 | from zaqarclient.transport import response 27 | 28 | from os_collect_config import collect 29 | from os_collect_config import exc 30 | from os_collect_config.tests import test_heat 31 | from os_collect_config import zaqar 32 | 33 | 34 | class FakeKeystoneClient(test_heat.FakeKeystoneClient): 35 | 36 | def url_for(self, service_type, endpoint_type): 37 | self._test.assertEqual('messaging', service_type) 38 | self._test.assertEqual('publicURL', endpoint_type) 39 | return 'http://192.0.2.1:8888/' 40 | 41 | 42 | class FakeKeystoneClientWebsocket(test_heat.FakeKeystoneClient): 43 | 44 | def url_for(self, service_type, endpoint_type): 45 | self._test.assertEqual('messaging-websocket', service_type) 46 | self._test.assertEqual('publicURL', endpoint_type) 47 | return 'ws://127.0.0.1:9000/' 48 | 49 | 50 | class FakeZaqarClient: 51 | 52 | def __init__(self, testcase): 53 | self._test = testcase 54 | 55 | def Client(self, endpoint, conf, version): 56 | self._test.assertEqual(2, version) 57 | self._test.assertEqual('http://192.0.2.1:8888/', endpoint) 58 | return self 59 | 60 | def queue(self, queue_id): 61 | self._test.assertEqual( 62 | '4f3f46d3-09f1-42a7-8c13-f91a5457192c', queue_id) 63 | return FakeQueue() 64 | 65 | 66 | class FakeZaqarWebsocketClient: 67 | 68 | def __init__(self, options, messages=None, testcase=None): 69 | self._messages = messages 70 | self._test = testcase 71 | 72 | def send(self, request): 73 | self._test.assertEqual('ws://127.0.0.1:9000/', request.endpoint) 74 | if request.operation == 'message_list': 75 | body = json.loads(request.content) 76 | self._test.assertEqual( 77 | '4f3f46d3-09f1-42a7-8c13-f91a5457192c', body['queue_name']) 78 | return response.Response(request, content=json.dumps(self._messages), 79 | status_code=200) 80 | 81 | def recv(self): 82 | return {'body': test_heat.META_DATA} 83 | 84 | def __enter__(self): 85 | return self 86 | 87 | def __exit__(self, *exc): 88 | pass 89 | 90 | 91 | class FakeQueue: 92 | 93 | def pop(self): 94 | return iter([message.Message( 95 | queue=self, ttl=10, age=10, body=test_heat.META_DATA, href='')]) 96 | 97 | 98 | class FakeZaqarClientSoftwareConfig: 99 | 100 | def __init__(self, testcase): 101 | self._test = testcase 102 | 103 | def Client(self, endpoint, conf, version): 104 | self._test.assertEqual(2, version) 105 | self._test.assertEqual('http://192.0.2.1:8888/', endpoint) 106 | return self 107 | 108 | def queue(self, queue_id): 109 | self._test.assertEqual( 110 | '4f3f46d3-09f1-42a7-8c13-f91a5457192c', queue_id) 111 | return FakeQueueSoftwareConfig() 112 | 113 | 114 | class FakeQueueSoftwareConfig: 115 | 116 | def pop(self): 117 | return iter([message.Message( 118 | queue=self, ttl=10, age=10, body=test_heat.SOFTWARE_CONFIG_DATA, 119 | href='')]) 120 | 121 | 122 | class TestZaqar(testtools.TestCase): 123 | def setUp(self): 124 | super().setUp() 125 | self.log = self.useFixture(fixtures.FakeLogger()) 126 | self.useFixture(fixtures.NestedTempfile()) 127 | collect.setup_conf() 128 | 129 | conf = config_fixture.Config() 130 | self.useFixture(conf) 131 | conf.config(group='zaqar', use_websockets=False) 132 | conf.config(group='zaqar', auth_url='http://192.0.2.1:5000/v3') 133 | conf.config(group='zaqar', user_id='0123456789ABCDEF') 134 | conf.config(group='zaqar', password='FEDCBA9876543210') 135 | conf.config(group='zaqar', 136 | project_id='9f6b09df-4d7f-4a33-8ec3-9924d8f46f10') 137 | conf.config(group='zaqar', 138 | queue_id='4f3f46d3-09f1-42a7-8c13-f91a5457192c') 139 | conf.config(group='zaqar', ssl_certificate_validation=True) 140 | conf.config(group='zaqar', ca_file='/foo/bar') 141 | 142 | @mock.patch.object(ks_discover.Discover, '__init__') 143 | @mock.patch.object(ks_discover.Discover, 'url_for') 144 | def test_collect_zaqar(self, mock_url_for, mock___init__): 145 | mock___init__.return_value = None 146 | mock_url_for.return_value = cfg.CONF.zaqar.auth_url 147 | zaqar_md = zaqar.Collector( 148 | keystoneclient=FakeKeystoneClient(self, cfg.CONF.zaqar), 149 | zaqarclient=FakeZaqarClient(self), 150 | discover_class=test_heat.FakeKeystoneDiscover).collect() 151 | self.assertThat(zaqar_md, matchers.IsInstance(list)) 152 | self.assertEqual('zaqar', zaqar_md[0][0]) 153 | zaqar_md = zaqar_md[0][1] 154 | 155 | for k in ('int1', 'strfoo', 'map_ab'): 156 | self.assertIn(k, zaqar_md) 157 | self.assertEqual(zaqar_md[k], test_heat.META_DATA[k]) 158 | 159 | @mock.patch.object(ks_discover.Discover, '__init__') 160 | @mock.patch.object(ks_discover.Discover, 'url_for') 161 | def test_collect_zaqar_deployments(self, mock_url_for, mock___init__): 162 | mock___init__.return_value = None 163 | mock_url_for.return_value = cfg.CONF.zaqar.auth_url 164 | zaqar_md = zaqar.Collector( 165 | keystoneclient=FakeKeystoneClient(self, cfg.CONF.zaqar), 166 | zaqarclient=FakeZaqarClientSoftwareConfig(self), 167 | discover_class=test_heat.FakeKeystoneDiscover).collect() 168 | self.assertThat(zaqar_md, matchers.IsInstance(list)) 169 | self.assertEqual('zaqar', zaqar_md[0][0]) 170 | self.assertEqual(2, len(zaqar_md)) 171 | self.assertEqual('zaqar', zaqar_md[0][0]) 172 | self.assertEqual( 173 | test_heat.SOFTWARE_CONFIG_DATA['deployments'], 174 | zaqar_md[0][1]['deployments']) 175 | self.assertEqual( 176 | ('dep-name1', {'config1': 'value1'}), zaqar_md[1]) 177 | 178 | @mock.patch.object(ks_discover.Discover, '__init__') 179 | @mock.patch.object(ks_discover.Discover, 'url_for') 180 | def test_collect_zaqar_fail(self, mock_url_for, mock___init__): 181 | mock___init__.return_value = None 182 | mock_url_for.return_value = cfg.CONF.zaqar.auth_url 183 | zaqar_collect = zaqar.Collector( 184 | keystoneclient=test_heat.FakeFailKeystoneClient( 185 | self, cfg.CONF.zaqar), 186 | zaqarclient=FakeZaqarClient(self), 187 | discover_class=test_heat.FakeKeystoneDiscover) 188 | self.assertRaises(exc.ZaqarMetadataNotAvailable, zaqar_collect.collect) 189 | self.assertIn('Forbidden', self.log.output) 190 | 191 | def test_collect_zaqar_no_auth_url(self): 192 | cfg.CONF.zaqar.auth_url = None 193 | zaqar_collect = zaqar.Collector() 194 | self.assertRaises( 195 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 196 | self.assertIn('No auth_url configured', self.log.output) 197 | 198 | def test_collect_zaqar_no_password(self): 199 | cfg.CONF.zaqar.password = None 200 | zaqar_collect = zaqar.Collector() 201 | self.assertRaises( 202 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 203 | self.assertIn('No password configured', self.log.output) 204 | 205 | def test_collect_zaqar_no_project_id(self): 206 | cfg.CONF.zaqar.project_id = None 207 | zaqar_collect = zaqar.Collector() 208 | self.assertRaises( 209 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 210 | self.assertIn('No project_id configured', self.log.output) 211 | 212 | def test_collect_zaqar_no_user_id(self): 213 | cfg.CONF.zaqar.user_id = None 214 | zaqar_collect = zaqar.Collector() 215 | self.assertRaises( 216 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 217 | self.assertIn('No user_id configured', self.log.output) 218 | 219 | def test_collect_zaqar_no_queue_id(self): 220 | cfg.CONF.zaqar.queue_id = None 221 | zaqar_collect = zaqar.Collector() 222 | self.assertRaises( 223 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 224 | self.assertIn('No queue_id configured', self.log.output) 225 | 226 | def test_collect_zaqar_no_ca_file(self): 227 | cfg.CONF.zaqar.ssl_certificate_validation = True 228 | cfg.CONF.zaqar.ca_file = None 229 | zaqar_collect = zaqar.Collector() 230 | self.assertRaises( 231 | exc.ZaqarMetadataNotConfigured, zaqar_collect.collect) 232 | expected = ('No CA file configured when flag ssl certificate ' 233 | 'validation is on.') 234 | self.assertIn(expected, self.log.output) 235 | 236 | @mock.patch.object(transport, 'get_transport_for') 237 | @mock.patch.object(ks_discover.Discover, '__init__') 238 | @mock.patch.object(ks_discover.Discover, 'url_for') 239 | def test_collect_zaqar_websocket(self, mock_url_for, mock___init__, 240 | mock_transport): 241 | 242 | mock___init__.return_value = None 243 | mock_url_for.return_value = cfg.CONF.zaqar.auth_url 244 | conf = config_fixture.Config() 245 | self.useFixture(conf) 246 | conf.config(group='zaqar', use_websockets=True) 247 | messages = {'messages': [{'body': test_heat.META_DATA, 'id': 1}]} 248 | ws = FakeZaqarWebsocketClient({}, messages=messages, testcase=self) 249 | mock_transport.return_value = ws 250 | zaqar_md = zaqar.Collector( 251 | keystoneclient=FakeKeystoneClientWebsocket(self, cfg.CONF.zaqar) 252 | ).collect() 253 | self.assertThat(zaqar_md, matchers.IsInstance(list)) 254 | self.assertEqual('zaqar', zaqar_md[0][0]) 255 | zaqar_md = zaqar_md[0][1] 256 | 257 | for k in ('int1', 'strfoo', 'map_ab'): 258 | self.assertIn(k, zaqar_md) 259 | self.assertEqual(zaqar_md[k], test_heat.META_DATA[k]) 260 | 261 | @mock.patch.object(transport, 'get_transport_for') 262 | @mock.patch.object(ks_discover.Discover, '__init__') 263 | @mock.patch.object(ks_discover.Discover, 'url_for') 264 | def test_collect_zaqar_websocket_recv(self, mock_url_for, mock___init__, 265 | mock_transport): 266 | mock___init__.return_value = None 267 | mock_url_for.return_value = cfg.CONF.zaqar.auth_url 268 | ws = FakeZaqarWebsocketClient({}, messages={}, testcase=self) 269 | mock_transport.return_value = ws 270 | conf = config_fixture.Config() 271 | self.useFixture(conf) 272 | conf.config(group='zaqar', use_websockets=True) 273 | zaqar_md = zaqar.Collector( 274 | keystoneclient=FakeKeystoneClientWebsocket(self, cfg.CONF.zaqar), 275 | ).collect() 276 | self.assertThat(zaqar_md, matchers.IsInstance(list)) 277 | self.assertEqual('zaqar', zaqar_md[0][0]) 278 | zaqar_md = zaqar_md[0][1] 279 | 280 | for k in ('int1', 'strfoo', 'map_ab'): 281 | self.assertIn(k, zaqar_md) 282 | self.assertEqual(zaqar_md[k], test_heat.META_DATA[k]) 283 | -------------------------------------------------------------------------------- /os_collect_config/collect.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import hashlib 17 | import json 18 | import os 19 | import random 20 | import shutil 21 | import signal 22 | import subprocess 23 | import sys 24 | import time 25 | 26 | from oslo_config import cfg 27 | from oslo_log import log 28 | 29 | from os_collect_config import cache 30 | from os_collect_config import cfn 31 | from os_collect_config import ec2 32 | from os_collect_config import exc 33 | from os_collect_config import heat 34 | from os_collect_config import heat_local 35 | from os_collect_config import keystone 36 | from os_collect_config import local 37 | from os_collect_config import request 38 | from os_collect_config import version 39 | from os_collect_config import zaqar 40 | 41 | DEFAULT_COLLECTORS = ['heat_local', 'ec2', 'cfn', 'heat', 'request', 'local', 42 | 'zaqar'] 43 | 44 | opts = [ 45 | cfg.StrOpt('command', short='c', 46 | help='Command to run on metadata changes. If specified,' 47 | ' os-collect-config will continue to run until killed. If' 48 | ' not specified, os-collect-config will print the' 49 | ' collected data as a json map and exit.'), 50 | cfg.StrOpt('cachedir', 51 | default='/var/lib/os-collect-config', 52 | help='Directory in which to store local cache of metadata'), 53 | cfg.StrOpt('backup-cachedir', 54 | default='/var/run/os-collect-config', 55 | help='Copy cache contents to this directory as well.'), 56 | cfg.MultiStrOpt( 57 | 'collectors', 58 | positional=True, 59 | default=DEFAULT_COLLECTORS, 60 | help='List the collectors to use. When command is specified the' 61 | 'collections will be emitted in the order given by this option.' 62 | ' (default: %s)' % ' '.join(DEFAULT_COLLECTORS)), 63 | cfg.BoolOpt('one-time', 64 | default=False, 65 | help='Pass this option to make os-collect-config exit after' 66 | ' one execution of command. This behavior is implied if no' 67 | ' command is specified.'), 68 | cfg.FloatOpt('min-polling-interval', default=1, 69 | help='When running continuously, pause a minimum of this' 70 | ' many seconds between collecting data.'), 71 | cfg.FloatOpt('polling-interval', short='i', default=30, 72 | help='When running continuously, pause a maximum of this' 73 | ' many seconds between collecting data. If changes' 74 | ' are detected shorter sleeps intervals are gradually' 75 | ' increased to this maximum polling interval.'), 76 | cfg.BoolOpt('print-cachedir', 77 | default=False, 78 | help='Print out the value of cachedir and exit immediately.'), 79 | cfg.BoolOpt('force', 80 | default=False, 81 | help='Pass this to force running the command even if nothing' 82 | ' has changed. Implies --one-time.'), 83 | cfg.BoolOpt('print', dest='print_only', 84 | default=False, 85 | help='Query normally, print the resulting configs as a json' 86 | ' map, and exit immediately without running command if it is' 87 | ' configured.'), 88 | cfg.MultiStrOpt('deployment-key', 89 | default=['deployments'], 90 | help='Key(s) to explode into multiple collected outputs. ' 91 | 'Parsed according to the expected Metadata created by ' 92 | 'OS::Heat::StructuredDeployment. Only exploded if seen at ' 93 | 'the root of the Metadata.'), 94 | cfg.FloatOpt('splay', 95 | default=0, 96 | help='Use this option to sleep for a random amount of time ' 97 | 'prior to starting the collect process. Takes a maximum ' 98 | 'number of seconds to wait before beginning collection ' 99 | 'as an argument. Disabled when set to 0. This option ' 100 | 'can help ensure that multiple collect processes ' 101 | '(on different hosts) do not attempt to poll at the ' 102 | 'exact same time if they were all started at the same ' 103 | 'time. Ignored if --one-time or --force is used.'), 104 | ] 105 | 106 | CONF = cfg.CONF 107 | logger = log.getLogger('os-collect-config') 108 | 109 | COLLECTORS = {ec2.name: ec2, 110 | cfn.name: cfn, 111 | heat.name: heat, 112 | heat_local.name: heat_local, 113 | local.name: local, 114 | request.name: request, 115 | zaqar.name: zaqar} 116 | 117 | 118 | def setup_conf(): 119 | ec2_group = cfg.OptGroup(name='ec2', 120 | title='EC2 Metadata options') 121 | 122 | cfn_group = cfg.OptGroup(name='cfn', 123 | title='CloudFormation API Metadata options') 124 | 125 | heat_local_group = cfg.OptGroup(name='heat_local', 126 | title='Heat Local Metadata options') 127 | 128 | local_group = cfg.OptGroup(name='local', 129 | title='Local Metadata options') 130 | 131 | heat_group = cfg.OptGroup(name='heat', 132 | title='Heat Metadata options') 133 | 134 | zaqar_group = cfg.OptGroup(name='zaqar', 135 | title='Zaqar queue options') 136 | 137 | request_group = cfg.OptGroup(name='request', 138 | title='Request Metadata options') 139 | 140 | keystone_group = cfg.OptGroup(name='keystone', 141 | title='Keystone auth options') 142 | 143 | CONF.register_group(ec2_group) 144 | CONF.register_group(cfn_group) 145 | CONF.register_group(heat_local_group) 146 | CONF.register_group(local_group) 147 | CONF.register_group(heat_group) 148 | CONF.register_group(request_group) 149 | CONF.register_group(keystone_group) 150 | CONF.register_group(zaqar_group) 151 | CONF.register_cli_opts(ec2.opts, group='ec2') 152 | CONF.register_cli_opts(cfn.opts, group='cfn') 153 | CONF.register_cli_opts(heat_local.opts, group='heat_local') 154 | CONF.register_cli_opts(local.opts, group='local') 155 | CONF.register_cli_opts(heat.opts, group='heat') 156 | CONF.register_cli_opts(request.opts, group='request') 157 | CONF.register_cli_opts(keystone.opts, group='keystone') 158 | CONF.register_cli_opts(zaqar.opts, group='zaqar') 159 | 160 | CONF.register_cli_opts(opts) 161 | log.register_options(CONF) 162 | 163 | 164 | def collect_all(collectors, store=False, collector_kwargs_map=None): 165 | changed_keys = set() 166 | all_keys = list() 167 | if store: 168 | paths_or_content = [] 169 | else: 170 | paths_or_content = {} 171 | 172 | for collector in collectors: 173 | module = COLLECTORS[collector] 174 | if collector_kwargs_map and collector in collector_kwargs_map: 175 | collector_kwargs = collector_kwargs_map[collector] 176 | else: 177 | collector_kwargs = {} 178 | 179 | try: 180 | content = module.Collector(**collector_kwargs).collect() 181 | except exc.SourceNotAvailable: 182 | logger.warning('Source [%s] Unavailable.' % collector) 183 | continue 184 | except exc.SourceNotConfigured: 185 | logger.debug('Source [%s] Not configured.' % collector) 186 | continue 187 | 188 | if store: 189 | for output_key, output_content in content: 190 | all_keys.append(output_key) 191 | (changed, path) = cache.store(output_key, output_content) 192 | if changed: 193 | changed_keys.add(output_key) 194 | paths_or_content.append(path) 195 | else: 196 | paths_or_content.update(content) 197 | 198 | if changed_keys: 199 | cache.store_meta_list('os_config_files', all_keys) 200 | if os.path.exists(CONF.backup_cachedir): 201 | shutil.rmtree(CONF.backup_cachedir) 202 | if os.path.exists(CONF.cachedir): 203 | shutil.copytree(CONF.cachedir, CONF.backup_cachedir) 204 | return (changed_keys, paths_or_content) 205 | 206 | 207 | def reexec_self(signal=None, frame=None): 208 | if signal: 209 | logger.info('Signal received. Re-executing %s' % sys.argv) 210 | # Close all but stdin/stdout/stderr 211 | os.closerange(3, 255) 212 | os.execv(sys.argv[0], sys.argv) 213 | 214 | 215 | def call_command(files, command): 216 | env = dict(os.environ) 217 | env["OS_CONFIG_FILES"] = ':'.join(files) 218 | logger.info("Executing %s with OS_CONFIG_FILES=%s" % 219 | (command, env["OS_CONFIG_FILES"])) 220 | subprocess.check_call(CONF.command, env=env, shell=True) 221 | 222 | 223 | def getfilehash(files): 224 | """Calculates the md5sum of the contents of a list of files. 225 | 226 | For each readable file in the provided list returns the md5sum of the 227 | concatenation of each file 228 | :param files: a list of files to be read 229 | :returns: string -- resulting md5sum 230 | """ 231 | m = hashlib.md5() 232 | for filename in files: 233 | try: 234 | with open(filename) as fp: 235 | data = fp.read() 236 | m.update(data.encode('utf-8')) 237 | except OSError: 238 | pass 239 | return m.hexdigest() 240 | 241 | 242 | def main(args=sys.argv, collector_kwargs_map=None): 243 | signal.signal(signal.SIGHUP, reexec_self) 244 | # NOTE(bnemec): We need to exit on SIGPIPEs so systemd can restart us. 245 | # See lp 1795030 246 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) 247 | setup_conf() 248 | CONF(args=args[1:], prog="os-collect-config", 249 | version=version.version_info.version_string()) 250 | 251 | # This resets the logging infrastructure which prevents capturing log 252 | # output in tests cleanly, so should only be called if there isn't already 253 | # handlers defined i.e. not in unit tests 254 | if not log.getLogger(None).logger.handlers: 255 | log.setup(CONF, "os-collect-config") 256 | 257 | if CONF.print_cachedir: 258 | print(CONF.cachedir) 259 | return 260 | 261 | unknown_collectors = set(CONF.collectors) - set(COLLECTORS.keys()) 262 | if unknown_collectors: 263 | raise exc.InvalidArguments( 264 | 'Unknown collectors %s. Valid collectors are: %s' % 265 | (list(unknown_collectors), DEFAULT_COLLECTORS)) 266 | 267 | if CONF.force: 268 | CONF.set_override('one_time', True) 269 | 270 | if CONF.splay > 0 and not CONF.one_time: 271 | # sleep splay seconds in the beginning to prevent multiple collect 272 | # processes from all running at the same time 273 | time.sleep(random.randrange(0, CONF.splay)) 274 | 275 | exitval = 0 276 | config_files = CONF.config_file 277 | config_hash = getfilehash(config_files) 278 | exponential_sleep_time = CONF.min_polling_interval 279 | while True: 280 | # shorter sleeps while changes are detected allows for faster 281 | # software deployment dependency processing 282 | store_and_run = bool(CONF.command and not CONF.print_only) 283 | (changed_keys, content) = collect_all( 284 | cfg.CONF.collectors, 285 | store=store_and_run, 286 | collector_kwargs_map=collector_kwargs_map) 287 | if store_and_run: 288 | if changed_keys or CONF.force: 289 | # ignore HUP now since we will reexec after commit anyway 290 | signal.signal(signal.SIGHUP, signal.SIG_IGN) 291 | try: 292 | call_command(content, CONF.command) 293 | except subprocess.CalledProcessError as e: 294 | exitval = e.returncode 295 | logger.error('Command failed, will not cache new data. %s' 296 | % e) 297 | else: 298 | for changed in changed_keys: 299 | cache.commit(changed) 300 | if not CONF.one_time: 301 | new_config_hash = getfilehash(config_files) 302 | if config_hash != new_config_hash: 303 | reexec_self() 304 | else: 305 | logger.debug("No changes detected.") 306 | if CONF.one_time: 307 | break 308 | else: 309 | logger.info("Sleeping %.2f seconds.", exponential_sleep_time) 310 | time.sleep(exponential_sleep_time) 311 | 312 | exponential_sleep_time *= 2 313 | if exponential_sleep_time > CONF.polling_interval: 314 | exponential_sleep_time = CONF.polling_interval 315 | else: 316 | print(json.dumps(content, indent=1)) 317 | break 318 | return exitval 319 | -------------------------------------------------------------------------------- /os-collect-config-and-friends.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | Heat localMetadata 63 | 64 | 65 | 66 | 67 | 68 | 69 | EC2 meta-dataservice 70 | 71 | 72 | 73 | 74 | 75 | 76 | Heat Metadataservice 77 | 78 | 79 | 80 | 81 | 82 | 83 | os-collect-config 84 | 85 | 86 | 87 | 88 | 89 | 90 | os-refresh-config(+ scripts) 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | Local MetadataCache 104 | 105 | 106 | 107 | 108 | 109 | 110 | os-apply-config 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | In-imagetemplates 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | Local Configs 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | ephemeral systemstate 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | -------------------------------------------------------------------------------- /os_collect_config/tests/test_collect.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import copy 17 | import json 18 | import os 19 | import signal 20 | import sys 21 | import tempfile 22 | from unittest import mock 23 | 24 | import fixtures 25 | from oslo_config import cfg 26 | import testtools 27 | from testtools import matchers 28 | 29 | from os_collect_config import cache 30 | from os_collect_config import collect 31 | from os_collect_config import config_drive 32 | from os_collect_config import exc 33 | from os_collect_config.tests import test_cfn 34 | from os_collect_config.tests import test_ec2 35 | from os_collect_config.tests import test_heat 36 | from os_collect_config.tests import test_heat_local 37 | from os_collect_config.tests import test_local 38 | from os_collect_config.tests import test_request 39 | from os_collect_config.tests import test_zaqar 40 | 41 | 42 | def _setup_heat_local_metadata(test_case): 43 | test_case.useFixture(fixtures.NestedTempfile()) 44 | local_md = tempfile.NamedTemporaryFile(delete=False) 45 | local_md.write(json.dumps(test_heat_local.META_DATA).encode('utf-8')) 46 | local_md.flush() 47 | return local_md.name 48 | 49 | 50 | def _setup_local_metadata(test_case): 51 | tmpdir = fixtures.TempDir() 52 | test_case.useFixture(tmpdir) 53 | local_data_path = tmpdir.path + '/local' 54 | with open(local_data_path, 'w') as local_data: 55 | json.dump(test_local.META_DATA, local_data) 56 | return tmpdir.path 57 | 58 | 59 | class TestCollect(testtools.TestCase): 60 | 61 | def setUp(self): 62 | super().setUp() 63 | self.useFixture(fixtures.FakeLogger()) 64 | collect.setup_conf() 65 | self.addCleanup(cfg.CONF.reset) 66 | 67 | def _call_main(self, fake_args): 68 | # make sure we don't run forever! 69 | if '--one-time' not in fake_args: 70 | fake_args.append('--one-time') 71 | collector_kwargs_map = { 72 | 'ec2': {'requests_impl': test_ec2.FakeRequests}, 73 | 'cfn': {'requests_impl': test_cfn.FakeRequests(self)}, 74 | 'heat': { 75 | 'keystoneclient': test_heat.FakeKeystoneClient(self), 76 | 'heatclient': test_heat.FakeHeatClient(self), 77 | 'discover_class': test_heat.FakeKeystoneDiscover 78 | }, 79 | 'request': {'requests_impl': test_request.FakeRequests}, 80 | 'zaqar': { 81 | 'keystoneclient': test_zaqar.FakeKeystoneClient(self), 82 | 'zaqarclient': test_zaqar.FakeZaqarClient(self), 83 | 'discover_class': test_heat.FakeKeystoneDiscover 84 | }, 85 | } 86 | with mock.patch.object(config_drive, 'get_metadata') as gm: 87 | gm.return_value = {} 88 | return collect.main(args=fake_args, 89 | collector_kwargs_map=collector_kwargs_map) 90 | 91 | def _fake_popen_call_main(self, occ_args): 92 | calls = [] 93 | 94 | def capture_popen(proc_args): 95 | calls.append(proc_args) 96 | return dict(returncode=0) 97 | self.useFixture(fixtures.FakePopen(capture_popen)) 98 | self.assertEqual(0, self._call_main(occ_args)) 99 | return calls 100 | 101 | def test_main(self): 102 | expected_cmd = self.getUniqueString() 103 | cache_dir = self.useFixture(fixtures.TempDir()) 104 | backup_cache_dir = self.useFixture(fixtures.TempDir()) 105 | fake_metadata = _setup_heat_local_metadata(self) 106 | occ_args = [ 107 | 'os-collect-config', 108 | '--command', 109 | expected_cmd, 110 | '--cachedir', 111 | cache_dir.path, 112 | '--backup-cachedir', 113 | backup_cache_dir.path, 114 | '--config-file', 115 | '/dev/null', 116 | '--cfn-metadata-url', 117 | 'http://192.0.2.1:8000/v1/', 118 | '--cfn-stack-name', 119 | 'foo', 120 | '--cfn-path', 121 | 'foo.Metadata', 122 | '--cfn-access-key-id', 123 | '0123456789ABCDEF', 124 | '--cfn-secret-access-key', 125 | 'FEDCBA9876543210', 126 | '--heat_local-path', 127 | fake_metadata, 128 | '--heat-user-id', 129 | 'FEDCBA9876543210', 130 | '--heat-password', 131 | '0123456789ABCDEF', 132 | '--heat-project-id', 133 | '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10', 134 | '--heat-auth-url', 135 | 'http://192.0.2.1:5000/v3', 136 | '--heat-stack-id', 137 | 'a/c482680f-7238-403d-8f76-36acf0c8e0aa', 138 | '--heat-resource-name', 139 | 'server' 140 | ] 141 | calls = self._fake_popen_call_main(occ_args) 142 | # The Python 3 platform module makes a popen call, filter this out 143 | proc_calls = [call for call in calls if call['args'] == expected_cmd] 144 | self.assertEqual(len(proc_calls), 1) 145 | proc_args = proc_calls[0] 146 | for test_dir in (cache_dir, backup_cache_dir): 147 | list_path = os.path.join(test_dir.path, 'os_config_files.json') 148 | with open(list_path) as list_file: 149 | config_list = json.loads(list_file.read()) 150 | self.assertThat(config_list, matchers.IsInstance(list)) 151 | env_config_list = proc_args['env']['OS_CONFIG_FILES'].split(':') 152 | self.assertEqual(env_config_list, config_list) 153 | keys_found = set() 154 | for path in env_config_list: 155 | self.assertTrue(os.path.exists(path)) 156 | with open(path) as cfg_file: 157 | contents = json.loads(cfg_file.read()) 158 | keys_found.update(set(contents.keys())) 159 | # From test_ec2.FakeRequests 160 | self.assertIn("local-ipv4", keys_found) 161 | self.assertIn("reservation-id", keys_found) 162 | # From test_cfn.FakeRequests 163 | self.assertIn("int1", keys_found) 164 | self.assertIn("map_ab", keys_found) 165 | 166 | def test_main_just_local(self): 167 | fake_md = _setup_heat_local_metadata(self) 168 | occ_args = [ 169 | 'os-collect-config', 170 | '--print', 171 | '--local-path', os.path.dirname(fake_md), 172 | 'local', 173 | ] 174 | self._call_main(occ_args) 175 | 176 | def test_main_force_command(self): 177 | cache_dir = self.useFixture(fixtures.TempDir()) 178 | backup_cache_dir = self.useFixture(fixtures.TempDir()) 179 | fake_metadata = _setup_heat_local_metadata(self) 180 | occ_args = [ 181 | 'os-collect-config', 182 | '--command', 'foo', 183 | '--cachedir', cache_dir.path, 184 | '--backup-cachedir', backup_cache_dir.path, 185 | '--config-file', '/dev/null', 186 | '--heat_local-path', fake_metadata, 187 | '--force', 188 | ] 189 | calls = self._fake_popen_call_main(occ_args) 190 | self.assertIn('OS_CONFIG_FILES', calls[0]['env']) 191 | cfg.CONF.reset() 192 | # First time caches data, run again, make sure we run command again 193 | calls = self._fake_popen_call_main(occ_args) 194 | self.assertIn('OS_CONFIG_FILES', calls[0]['env']) 195 | 196 | def test_main_command_failed_no_caching(self): 197 | cache_dir = self.useFixture(fixtures.TempDir()) 198 | backup_cache_dir = self.useFixture(fixtures.TempDir()) 199 | fake_metadata = _setup_heat_local_metadata(self) 200 | occ_args = [ 201 | 'os-collect-config', 202 | '--command', 203 | 'foo', 204 | '--cachedir', 205 | cache_dir.path, 206 | '--backup-cachedir', 207 | backup_cache_dir.path, 208 | '--config-file', 209 | '/dev/null', 210 | '--heat_local-path', 211 | fake_metadata, 212 | ] 213 | calls = [] 214 | 215 | def capture_popen(proc_args): 216 | calls.append(proc_args) 217 | return dict(returncode=1) 218 | self.useFixture(fixtures.FakePopen(capture_popen)) 219 | self.assertEqual(1, self._call_main(occ_args)) 220 | for test_dir in (cache_dir, backup_cache_dir): 221 | cache_contents = os.listdir(test_dir.path) 222 | last_files = [n for n in cache_contents if n.endswith('last')] 223 | self.assertEqual([], last_files) 224 | 225 | def test_main_no_command(self): 226 | fake_args = [ 227 | 'os-collect-config', 228 | '--config-file', 229 | '/dev/null', 230 | '--cfn-metadata-url', 231 | 'http://192.0.2.1:8000/v1/', 232 | '--cfn-stack-name', 233 | 'foo', 234 | '--cfn-path', 235 | 'foo.Metadata', 236 | '--cfn-access-key-id', 237 | '0123456789ABCDEF', 238 | '--cfn-secret-access-key', 239 | 'FEDCBA9876543210', 240 | ] 241 | fake_metadata = _setup_heat_local_metadata(self) 242 | fake_args.append('--heat_local-path') 243 | fake_args.append(fake_metadata) 244 | output = self.useFixture(fixtures.StringStream('stdout')) 245 | self.useFixture( 246 | fixtures.MonkeyPatch('sys.stdout', output.stream)) 247 | self._call_main(fake_args) 248 | out_struct = json.loads(output.getDetails()['stdout'].as_text()) 249 | self.assertThat(out_struct, matchers.IsInstance(dict)) 250 | self.assertIn('ec2', out_struct) 251 | self.assertIn('cfn', out_struct) 252 | 253 | def test_main_print_cachedir(self): 254 | fake_cachedir = self.useFixture(fixtures.TempDir()) 255 | fake_args = [ 256 | 'os-collect-config', 257 | '--cachedir', fake_cachedir.path, 258 | '--config-file', '/dev/null', 259 | '--print-cachedir', 260 | ] 261 | 262 | output = self.useFixture(fixtures.StringStream('stdout')) 263 | self.useFixture( 264 | fixtures.MonkeyPatch('sys.stdout', output.stream)) 265 | self._call_main(fake_args) 266 | cache_dir = output.getDetails()['stdout'].as_text().strip() 267 | self.assertEqual(fake_cachedir.path, cache_dir) 268 | 269 | def test_main_print_only(self): 270 | cache_dir = self.useFixture(fixtures.TempDir()) 271 | backup_cache_dir = self.useFixture(fixtures.TempDir()) 272 | fake_metadata = _setup_heat_local_metadata(self) 273 | args = [ 274 | 'os-collect-config', 275 | '--command', 'bar', 276 | '--cachedir', cache_dir.path, 277 | '--backup-cachedir', backup_cache_dir.path, 278 | '--config-file', '/dev/null', 279 | '--print', 280 | '--cfn-metadata-url', 281 | 'http://192.0.2.1:8000/v1/', 282 | '--cfn-stack-name', 283 | 'foo', 284 | '--cfn-path', 285 | 'foo.Metadata', 286 | '--cfn-access-key-id', 287 | '0123456789ABCDEF', 288 | '--cfn-secret-access-key', 289 | 'FEDCBA9876543210', 290 | '--heat_local-path', fake_metadata, 291 | ] 292 | 293 | def fake_popen(args): 294 | self.fail('Called command instead of printing') 295 | self.useFixture(fixtures.FakePopen(fake_popen)) 296 | output = self.useFixture(fixtures.StringStream('stdout')) 297 | self.useFixture( 298 | fixtures.MonkeyPatch('sys.stdout', output.stream)) 299 | self._call_main(args) 300 | out_struct = json.loads(output.getDetails()['stdout'].as_text()) 301 | self.assertThat(out_struct, matchers.IsInstance(dict)) 302 | self.assertIn('cfn', out_struct) 303 | self.assertIn('heat_local', out_struct) 304 | self.assertIn('ec2', out_struct) 305 | 306 | def test_main_invalid_collector(self): 307 | fake_args = ['os-collect-config', 'invalid'] 308 | self.assertRaises(exc.InvalidArguments, self._call_main, fake_args) 309 | 310 | def test_main_sleep(self): 311 | class ExpectedException(Exception): 312 | pass 313 | 314 | def fake_sleep(sleep_time): 315 | if sleep_time == 10: 316 | raise ExpectedException 317 | 318 | self.useFixture(fixtures.MonkeyPatch('time.sleep', fake_sleep)) 319 | try: 320 | collect.main(['os-collect-config', 'heat_local', '-i', '10', 321 | '-c', 'true']) 322 | except ExpectedException: 323 | pass 324 | 325 | def test_main_no_sleep_with_no_command(self): 326 | def fake_sleep(sleep_time): 327 | raise Exception(cfg.CONF.command) 328 | 329 | self.useFixture(fixtures.MonkeyPatch('time.sleep', fake_sleep)) 330 | collect.main(['os-collect-config', 'heat_local', '--config-file', 331 | '/dev/null', '-i', '10']) 332 | 333 | def test_main_min_polling_interval(self): 334 | class ExpectedException(Exception): 335 | pass 336 | 337 | def fake_sleep(sleep_time): 338 | if sleep_time == 20: 339 | raise ExpectedException 340 | 341 | self.useFixture(fixtures.MonkeyPatch('time.sleep', fake_sleep)) 342 | self.assertRaises(ExpectedException, collect.main, 343 | ['os-collect-config', 'heat_local', '-i', '10', 344 | '--min-polling-interval', '20', '-c', 'true']) 345 | 346 | @mock.patch('time.sleep') 347 | @mock.patch('random.randrange') 348 | def test_main_with_splay(self, randrange_mock, sleep_mock): 349 | randrange_mock.return_value = 4 350 | collect.main(args=['os-collect-config', 'heat_local', '-i', '10', 351 | '--min-polling-interval', '20', '-c', 'true', 352 | '--print', '--splay', '29']) 353 | randrange_mock.assert_called_with(0, 29) 354 | sleep_mock.assert_called_with(4) 355 | 356 | 357 | class TestCollectAll(testtools.TestCase): 358 | 359 | def setUp(self): 360 | super().setUp() 361 | self.log = self.useFixture(fixtures.FakeLogger()) 362 | collect.setup_conf() 363 | self.cache_dir = self.useFixture(fixtures.TempDir()) 364 | self.backup_cache_dir = self.useFixture(fixtures.TempDir()) 365 | self.clean_conf = copy.copy(cfg.CONF) 366 | 367 | def restore_copy(): 368 | cfg.CONF = self.clean_conf 369 | self.addCleanup(restore_copy) 370 | 371 | cfg.CONF.cachedir = self.cache_dir.path 372 | cfg.CONF.backup_cachedir = self.backup_cache_dir.path 373 | cfg.CONF.cfn.metadata_url = 'http://192.0.2.1:8000/v1/' 374 | cfg.CONF.cfn.stack_name = 'foo' 375 | cfg.CONF.cfn.path = ['foo.Metadata'] 376 | cfg.CONF.cfn.access_key_id = '0123456789ABCDEF' 377 | cfg.CONF.cfn.secret_access_key = 'FEDCBA9876543210' 378 | cfg.CONF.heat_local.path = [_setup_heat_local_metadata(self)] 379 | cfg.CONF.heat.auth_url = 'http://192.0.2.1:5000/v3' 380 | cfg.CONF.heat.user_id = '0123456789ABCDEF' 381 | cfg.CONF.heat.password = 'FEDCBA9876543210' 382 | cfg.CONF.heat.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10' 383 | cfg.CONF.heat.stack_id = 'a/c482680f-7238-403d-8f76-36acf0c8e0aa' 384 | cfg.CONF.heat.resource_name = 'server' 385 | cfg.CONF.local.path = [_setup_local_metadata(self)] 386 | cfg.CONF.request.metadata_url = 'http://192.0.2.1:8000/my_metadata/' 387 | cfg.CONF.zaqar.auth_url = 'http://192.0.2.1:5000/v3' 388 | cfg.CONF.zaqar.user_id = '0123456789ABCDEF' 389 | cfg.CONF.zaqar.password = 'FEDCBA9876543210' 390 | cfg.CONF.zaqar.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10' 391 | cfg.CONF.zaqar.queue_id = '4f3f46d3-09f1-42a7-8c13-f91a5457192c' 392 | 393 | def _call_collect_all(self, store, collector_kwargs_map=None, 394 | collectors=None): 395 | if collector_kwargs_map is None: 396 | collector_kwargs_map = { 397 | 'ec2': {'requests_impl': test_ec2.FakeRequests}, 398 | 'cfn': {'requests_impl': test_cfn.FakeRequests(self)}, 399 | 'heat': { 400 | 'keystoneclient': test_heat.FakeKeystoneClient(self), 401 | 'heatclient': test_heat.FakeHeatClient(self), 402 | 'discover_class': test_heat.FakeKeystoneDiscover 403 | }, 404 | 'request': {'requests_impl': test_request.FakeRequests}, 405 | 'zaqar': { 406 | 'keystoneclient': test_zaqar.FakeKeystoneClient(self), 407 | 'zaqarclient': test_zaqar.FakeZaqarClient(self), 408 | 'discover_class': test_heat.FakeKeystoneDiscover 409 | }, 410 | } 411 | if collectors is None: 412 | collectors = cfg.CONF.collectors 413 | with mock.patch.object(config_drive, 'get_metadata') as gm: 414 | gm.return_value = {} 415 | return collect.collect_all( 416 | collectors, 417 | store=store, 418 | collector_kwargs_map=collector_kwargs_map) 419 | 420 | def _test_collect_all_store(self, collector_kwargs_map=None, 421 | expected_changed=None): 422 | (changed_keys, paths) = self._call_collect_all( 423 | store=True, collector_kwargs_map=collector_kwargs_map) 424 | if expected_changed is None: 425 | expected_changed = {'heat_local', 'cfn', 'ec2', 426 | 'heat', 'local', 'request', 'zaqar'} 427 | self.assertEqual(expected_changed, changed_keys) 428 | self.assertThat(paths, matchers.IsInstance(list)) 429 | for path in paths: 430 | self.assertTrue(os.path.exists(path)) 431 | self.assertTrue(os.path.exists('%s.orig' % path)) 432 | 433 | def test_collect_all_store(self): 434 | self._test_collect_all_store() 435 | 436 | def test_collect_all_store_softwareconfig(self): 437 | soft_config_map = { 438 | 'ec2': {'requests_impl': test_ec2.FakeRequests}, 439 | 'cfn': { 440 | 'requests_impl': test_cfn.FakeRequestsSoftwareConfig(self)}, 441 | 'heat': { 442 | 'keystoneclient': test_heat.FakeKeystoneClient(self), 443 | 'heatclient': test_heat.FakeHeatClient(self), 444 | 'discover_class': test_heat.FakeKeystoneDiscover 445 | }, 446 | 'request': {'requests_impl': test_request.FakeRequests}, 447 | 'zaqar': { 448 | 'keystoneclient': test_zaqar.FakeKeystoneClient(self), 449 | 'zaqarclient': test_zaqar.FakeZaqarClient(self), 450 | 'discover_class': test_heat.FakeKeystoneDiscover 451 | }, 452 | } 453 | expected_changed = { 454 | 'heat_local', 'ec2', 'cfn', 'heat', 'local', 'request', 455 | 'dep-name1', 'dep-name2', 'dep-name3', 'zaqar'} 456 | self._test_collect_all_store(collector_kwargs_map=soft_config_map, 457 | expected_changed=expected_changed) 458 | 459 | def test_collect_all_store_alt_order(self): 460 | # Ensure different than default 461 | new_list = list(reversed(cfg.CONF.collectors)) 462 | (changed_keys, paths) = self._call_collect_all( 463 | store=True, collectors=new_list) 464 | self.assertEqual(set(cfg.CONF.collectors), changed_keys) 465 | self.assertThat(paths, matchers.IsInstance(list)) 466 | expected_paths = [ 467 | os.path.join(self.cache_dir.path, '%s.json' % collector) 468 | for collector in new_list] 469 | self.assertEqual(expected_paths, paths) 470 | 471 | def test_collect_all_no_change(self): 472 | (changed_keys, paths) = self._call_collect_all(store=True) 473 | self.assertEqual(set(cfg.CONF.collectors), changed_keys) 474 | # Commit 475 | for changed in changed_keys: 476 | cache.commit(changed) 477 | (changed_keys, paths2) = self._call_collect_all(store=True) 478 | self.assertEqual(set(), changed_keys) 479 | self.assertEqual(paths, paths2) 480 | 481 | def test_collect_all_no_change_softwareconfig(self): 482 | soft_config_map = { 483 | 'ec2': {'requests_impl': test_ec2.FakeRequests}, 484 | 'cfn': { 485 | 'requests_impl': test_cfn.FakeRequestsSoftwareConfig(self)}, 486 | 'heat': { 487 | 'keystoneclient': test_heat.FakeKeystoneClient(self), 488 | 'heatclient': test_heat.FakeHeatClient(self), 489 | 'discover_class': test_heat.FakeKeystoneDiscover 490 | }, 491 | 'request': {'requests_impl': test_request.FakeRequests}, 492 | 'zaqar': { 493 | 'keystoneclient': test_zaqar.FakeKeystoneClient(self), 494 | 'zaqarclient': test_zaqar.FakeZaqarClient(self), 495 | 'discover_class': test_heat.FakeKeystoneDiscover 496 | }, 497 | } 498 | (changed_keys, paths) = self._call_collect_all( 499 | store=True, collector_kwargs_map=soft_config_map) 500 | expected_changed = set(cfg.CONF.collectors) 501 | expected_changed.add('dep-name1') 502 | expected_changed.add('dep-name2') 503 | expected_changed.add('dep-name3') 504 | self.assertEqual(expected_changed, changed_keys) 505 | # Commit 506 | for changed in changed_keys: 507 | cache.commit(changed) 508 | 509 | # Replace the ec2 requests with a failing one to simulate a transient 510 | # network failure 511 | soft_config_map['ec2'] = {'requests_impl': test_ec2.FakeFailRequests} 512 | (changed_keys, paths2) = self._call_collect_all( 513 | store=True, collector_kwargs_map=soft_config_map) 514 | self.assertEqual(set(), changed_keys) 515 | 516 | # check the second collect includes cached ec2 data despite network 517 | # failure 518 | self.assertEqual(paths, paths2) 519 | 520 | def test_collect_all_nostore(self): 521 | (changed_keys, content) = self._call_collect_all(store=False) 522 | self.assertEqual(set(), changed_keys) 523 | self.assertThat(content, matchers.IsInstance(dict)) 524 | for collector in cfg.CONF.collectors: 525 | self.assertIn(collector, content) 526 | self.assertThat(content[collector], matchers.IsInstance(dict)) 527 | 528 | def test_collect_all_ec2_unavailable(self): 529 | collector_kwargs_map = { 530 | 'ec2': {'requests_impl': test_ec2.FakeFailRequests}, 531 | 'cfn': {'requests_impl': test_cfn.FakeRequests(self)} 532 | } 533 | (changed_keys, content) = self._call_collect_all( 534 | store=False, collector_kwargs_map=collector_kwargs_map, 535 | collectors=['ec2', 'cfn']) 536 | self.assertEqual(set(), changed_keys) 537 | self.assertThat(content, matchers.IsInstance(dict)) 538 | self.assertNotIn('ec2', content) 539 | 540 | def test_collect_all_cfn_unconfigured(self): 541 | collector_kwargs_map = { 542 | 'cfn': {'requests_impl': test_cfn.FakeRequests(self)} 543 | } 544 | cfg.CONF.cfn.metadata_url = None 545 | (changed_keys, content) = self._call_collect_all( 546 | store=False, collector_kwargs_map=collector_kwargs_map, 547 | collectors=['heat_local', 'cfn']) 548 | self.assertIn('No metadata_url configured', self.log.output) 549 | self.assertNotIn('cfn', content) 550 | self.assertIn('heat_local', content) 551 | self.assertEqual(test_heat_local.META_DATA, content['heat_local']) 552 | 553 | 554 | class TestConf(testtools.TestCase): 555 | 556 | def test_setup_conf(self): 557 | collect.setup_conf() 558 | self.assertEqual('/var/lib/os-collect-config', cfg.CONF.cachedir) 559 | self.assertTrue(hasattr(cfg.CONF, 'ec2')) 560 | self.assertTrue(hasattr(cfg.CONF, 'cfn')) 561 | 562 | 563 | class TestHup(testtools.TestCase): 564 | 565 | def setUp(self): 566 | super().setUp() 567 | self.log = self.useFixture(fixtures.FakeLogger()) 568 | 569 | def fake_closerange(low, high): 570 | self.assertEqual(3, low) 571 | self.assertEqual(255, high) 572 | 573 | def fake_execv(path, args): 574 | self.assertEqual(sys.argv[0], path) 575 | self.assertEqual(sys.argv, args) 576 | 577 | self.useFixture(fixtures.MonkeyPatch('os.execv', fake_execv)) 578 | self.useFixture(fixtures.MonkeyPatch('os.closerange', fake_closerange)) 579 | 580 | def test_reexec_self_signal(self): 581 | collect.reexec_self(signal.SIGHUP, None) 582 | self.assertIn('Signal received', self.log.output) 583 | 584 | def test_reexec_self(self): 585 | collect.reexec_self() 586 | self.assertNotIn('Signal received', self.log.output) 587 | 588 | 589 | class TestFileHash(testtools.TestCase): 590 | def setUp(self): 591 | super().setUp() 592 | 593 | # Deletes tempfiles during teardown 594 | self.useFixture(fixtures.NestedTempfile()) 595 | 596 | self.file_1 = tempfile.mkstemp()[1] 597 | with open(self.file_1, "w") as fp: 598 | fp.write("test string") 599 | 600 | self.file_2 = tempfile.mkstemp()[1] 601 | with open(self.file_2, "w") as fp: 602 | fp.write("test string2") 603 | 604 | def test_getfilehash_nofile(self): 605 | h = collect.getfilehash([]) 606 | self.assertEqual(h, "d41d8cd98f00b204e9800998ecf8427e") 607 | 608 | def test_getfilehash_onefile(self): 609 | h = collect.getfilehash([self.file_1]) 610 | self.assertEqual(h, "6f8db599de986fab7a21625b7916589c") 611 | 612 | def test_getfilehash_twofiles(self): 613 | h = collect.getfilehash([self.file_1, self.file_2]) 614 | self.assertEqual(h, "a8e1b2b743037b1ec17b5d4b49369872") 615 | 616 | def test_getfilehash_filenotfound(self): 617 | self.assertEqual( 618 | collect.getfilehash([self.file_1, self.file_2]), 619 | collect.getfilehash([self.file_1, "/i/dont/exist", self.file_2]) 620 | ) 621 | --------------------------------------------------------------------------------