├── .circleci └── config.yml ├── .gitignore ├── LICENSE ├── README.md ├── cloudify ├── __init__.py ├── amqp_client.py ├── amqp_client_utils.py ├── broker_config.py ├── celery │ ├── __init__.py │ ├── app.py │ ├── gate_keeper.py │ └── logging_server.py ├── cluster.py ├── compute │ └── __init__.py ├── conflict_handlers.py ├── constants.py ├── context.py ├── ctx_wrappers │ ├── __init__.py │ └── ctx-py.py ├── decorators.py ├── dispatch.py ├── endpoint.py ├── event.py ├── exceptions.py ├── logs.py ├── lru_cache.py ├── manager.py ├── mocks.py ├── plugins │ ├── __init__.py │ ├── lifecycle.py │ └── workflows.py ├── proxy │ ├── __init__.py │ ├── client.py │ └── server.py ├── state.py ├── test_utils │ ├── __init__.py │ ├── dispatch_helper.py │ └── local_workflow_decorator.py ├── tests │ ├── __init__.py │ ├── file_server.py │ ├── mocks │ │ ├── __init__.py │ │ └── mock_rest_client.py │ ├── resources │ │ ├── blueprints │ │ │ ├── default_tenant │ │ │ │ ├── for_test.txt │ │ │ │ ├── for_test_bp_resource.txt │ │ │ │ └── test_blueprint │ │ │ │ │ └── blueprint.yaml │ │ │ ├── execute_operation.yaml │ │ │ ├── execute_operation_workflow.yaml │ │ │ ├── get_attribute.yaml │ │ │ ├── get_attribute_multi_instance.yaml │ │ │ ├── get_attribute_multi_instance2.yaml │ │ │ ├── install-new-agents-blueprint.yaml │ │ │ ├── not_exist_op_workflow.yaml │ │ │ ├── relationship_context.yaml │ │ │ ├── resources │ │ │ │ ├── extended_rendered_template.conf │ │ │ │ ├── for_template_rendering_tests.conf │ │ │ │ └── rendered_template.conf │ │ │ ├── test-blueprint-ignore-failure.yaml │ │ │ ├── test-context-node.yaml │ │ │ ├── test-get-resource-template.yaml │ │ │ ├── test-heal-correct-order-blueprint.yaml │ │ │ ├── test-install-agent-blueprint.yaml │ │ │ ├── test-lifecycle-retry-blueprint.yaml │ │ │ ├── test-operation-retry-blueprint.yaml │ │ │ ├── test-relationship-order-blueprint.yaml │ │ │ ├── test-scale-blueprint.yaml │ │ │ ├── test-subgraph-blueprint.yaml │ │ │ ├── test-task-retry-blueprint.yaml │ │ │ ├── test-task-retry-event-context-blueprint.yaml │ │ │ ├── test-task-subgraph-blueprint.yaml │ │ │ ├── test-uninstall-ignore-failure-parameter-blueprint.yaml │ │ │ └── test-validate-version-blueprint.yaml │ │ └── deployments │ │ │ └── default_tenant │ │ │ └── dep1 │ │ │ ├── for_test.txt │ │ │ └── for_test_only_dep.txt │ ├── test_builtin_workflows.py │ ├── test_context.py │ ├── test_ctx_relationships.py │ ├── test_decorators.py │ ├── test_dispatch.py │ ├── test_event.py │ ├── test_gate_keeper.py │ ├── test_install_agent_local_workflow.py │ ├── test_install_new_agents_workflow.py │ ├── test_lifecycle_retry.py │ ├── test_local_get_attribute.py │ ├── test_local_workflows.py │ ├── test_local_workflows_init.py │ ├── test_logging_server.py │ ├── test_logs.py │ ├── test_lru_cache.py │ ├── test_missing_operation.py │ ├── test_node_state.py │ ├── test_operation_retry.py │ ├── test_proxy.py │ ├── test_state.py │ ├── test_task_retry.py │ ├── test_task_retry_event_context.py │ ├── test_task_subgraph.py │ ├── test_utils.py │ └── workflows.py ├── utils.py └── workflows │ ├── __init__.py │ ├── events.py │ ├── local.py │ ├── tasks.py │ ├── tasks_graph.py │ ├── workflow_api.py │ └── workflow_context.py ├── ctx_wrappers └── ctx-sh ├── dev-requirements.txt ├── docs ├── Makefile ├── conf.py ├── context.rst ├── decorators.rst ├── exceptions.rst ├── index.rst ├── logs.rst ├── manager.rst ├── mocks.rst ├── utils.rst ├── workflow_api.rst ├── workflow_context.rst ├── workflow_tasks_graph.rst └── workflows.rst ├── packaging ├── Vagrantfile └── provision.sh ├── setup.py ├── test-requirements.txt └── tox.ini /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | checkout: 4 | post: 5 | - > 6 | if [ -n "$CI_PULL_REQUEST" ]; then 7 | PR_ID=${CI_PULL_REQUEST##*/} 8 | git fetch origin +refs/pull/$PR_ID/merge: 9 | git checkout -qf FETCH_HEAD 10 | fi 11 | 12 | defaults: 13 | - &test_defaults 14 | docker: 15 | - image: circleci/python:2.7 16 | steps: 17 | - checkout 18 | - run: 19 | name: Install tox, NOTICE we use an old version of tox because of CFY-6398 ( relying dict ordering) 20 | command: sudo pip install tox==1.6.1 21 | - run: 22 | name: Run tox of specfic environment 23 | command: tox -e $DO_ENV 24 | 25 | - &test_defaults_for_python26 26 | docker: 27 | - image: circleci/python:2.7 28 | steps: 29 | - checkout 30 | - run: 31 | name: Install and set python version with pyenv 32 | command: | 33 | git clone https://github.com/yyuu/pyenv.git ~/.pyenv 34 | export PYENV_ROOT="$HOME/.pyenv" 35 | export PATH="$PYENV_ROOT/bin:$PATH" 36 | sudo apt-get install -y build-essential libssl1.0-dev zlib1g-dev xz-utils 37 | pyenv install 2.6.9 38 | pyenv local 2.6.9 39 | - run: 40 | name: Install tox, NOTICE we use an old version of tox because of CFY-6398 (relying dict ordering) 41 | command: sudo pip install tox==1.6.1 tox-pyenv 42 | - run: 43 | name: Run tox of specfic environment 44 | command: tox -e $DO_ENV 45 | 46 | jobs: 47 | flake8: 48 | <<: *test_defaults 49 | environment: 50 | DO_ENV: flake8 51 | 52 | test_py27: 53 | <<: *test_defaults 54 | environment: 55 | DO_ENV: test_py27 56 | 57 | test_py26: 58 | <<: *test_defaults_for_python26 59 | environment: 60 | DO_ENV: test_py26 61 | 62 | workflows: 63 | version: 2 64 | 65 | build_and_test: 66 | jobs: 67 | - flake8 68 | - test_py27 69 | - test_py26 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | *.iml 56 | 57 | *COMMIT_MSG 58 | 59 | # QuickBuild 60 | .qbcache/ 61 | .noseids 62 | 63 | # Vim 64 | *.swp 65 | 66 | # IDEA 67 | .idea/* 68 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cloudify Plugins Common 2 | 3 | [![Circle CI](https://circleci.com/gh/cloudify-cosmo/cloudify-plugins-common/tree/master.svg?&style=shield)](https://circleci.com/gh/cloudify-cosmo/cloudify-plugins-common/tree/master) 4 | [![PyPI](http://img.shields.io/pypi/dm/cloudify-plugins-common.svg)](http://img.shields.io/pypi/dm/cloudify-plugins-common.svg) 5 | [![PypI](http://img.shields.io/pypi/v/cloudify-plugins-common.svg)](http://img.shields.io/pypi/v/cloudify-plugins-common.svg) 6 | 7 | 8 | This package contains common modules that are mandatory for Cloudify's plugins. 9 | 10 | See [ReadTheDocs](http://cloudify-plugins-common.readthedocs.org/en/latest/) for an API reference. 11 | -------------------------------------------------------------------------------- /cloudify/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from cloudify.state import ctx # noqa 18 | -------------------------------------------------------------------------------- /cloudify/amqp_client.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import json 17 | import logging 18 | import threading 19 | 20 | import pika 21 | import pika.exceptions 22 | 23 | from cloudify import broker_config 24 | from cloudify import cluster 25 | from cloudify import exceptions 26 | from cloudify import utils 27 | from cloudify.constants import BROKER_PORT_SSL, BROKER_PORT_NO_SSL 28 | 29 | logger = logging.getLogger(__name__) 30 | 31 | 32 | class AMQPClient(object): 33 | 34 | EVENTS_EXCHANGE_NAME = 'cloudify-events' 35 | SOCKET_TIMEOUT = 5 36 | CONNECTION_ATTEMPTS = 3 37 | LOGS_EXCHANGE_NAME = 'cloudify-logs' 38 | channel_settings = { 39 | 'auto_delete': False, 40 | 'durable': True, 41 | } 42 | 43 | def __init__(self, 44 | amqp_user, 45 | amqp_pass, 46 | amqp_host, 47 | amqp_vhost, 48 | ssl_enabled, 49 | ssl_cert_path): 50 | self.connection = None 51 | self.channel = None 52 | self._is_closed = False 53 | credentials = pika.credentials.PlainCredentials( 54 | username=amqp_user, 55 | password=amqp_pass) 56 | ssl_options = utils.internal.get_broker_ssl_options(ssl_enabled, 57 | ssl_cert_path) 58 | self._connection_parameters = pika.ConnectionParameters( 59 | host=amqp_host, 60 | port=BROKER_PORT_SSL if ssl_enabled else BROKER_PORT_NO_SSL, 61 | virtual_host=amqp_vhost, 62 | socket_timeout=self.SOCKET_TIMEOUT, 63 | connection_attempts=self.CONNECTION_ATTEMPTS, 64 | credentials=credentials, 65 | ssl=bool(ssl_enabled), 66 | ssl_options=ssl_options) 67 | self._connect() 68 | 69 | def _connect(self): 70 | self.connection = pika.BlockingConnection(self._connection_parameters) 71 | self.channel = self.connection.channel() 72 | self.channel.confirm_delivery() 73 | for exchange in [self.EVENTS_EXCHANGE_NAME, self.LOGS_EXCHANGE_NAME]: 74 | self.channel.exchange_declare(exchange=exchange, type='fanout', 75 | **self.channel_settings) 76 | 77 | def publish_message(self, message, message_type): 78 | if self._is_closed: 79 | raise exceptions.ClosedAMQPClientException( 80 | 'Publish failed, AMQP client already closed') 81 | if message_type == 'event': 82 | exchange = self.EVENTS_EXCHANGE_NAME 83 | else: 84 | exchange = self.LOGS_EXCHANGE_NAME 85 | routing_key = '' 86 | body = json.dumps(message) 87 | try: 88 | self.channel.basic_publish(exchange=exchange, 89 | routing_key=routing_key, 90 | body=body) 91 | except pika.exceptions.ConnectionClosed as e: 92 | logger.warn( 93 | 'Connection closed unexpectedly for thread {0}, ' 94 | 'reconnecting. ({1}: {2})' 95 | .format(threading.current_thread(), type(e).__name__, repr(e))) 96 | # obviously, there is no need to close the current 97 | # channel/connection. 98 | self._connect() 99 | self.channel.basic_publish(exchange=exchange, 100 | routing_key=routing_key, 101 | body=body) 102 | 103 | def close(self): 104 | if self._is_closed: 105 | return 106 | self._is_closed = True 107 | thread = threading.current_thread() 108 | if self.channel: 109 | logger.debug('Closing amqp channel of thread {0}'.format(thread)) 110 | try: 111 | self.channel.close() 112 | except Exception as e: 113 | # channel might be already closed, log and continue 114 | logger.debug('Failed to close amqp channel of thread {0}, ' 115 | 'reported error: {1}'.format(thread, repr(e))) 116 | 117 | if self.connection: 118 | logger.debug('Closing amqp connection of thread {0}' 119 | .format(thread)) 120 | try: 121 | self.connection.close() 122 | except Exception as e: 123 | # connection might be already closed, log and continue 124 | logger.debug('Failed to close amqp connection of thread {0}, ' 125 | 'reported error: {1}'.format(thread, repr(e))) 126 | 127 | 128 | def create_client(amqp_host=None, 129 | amqp_user=None, 130 | amqp_pass=None, 131 | amqp_vhost=None, 132 | ssl_enabled=None, 133 | ssl_cert_path=None): 134 | thread = threading.current_thread() 135 | 136 | # there's 3 possible sources of the amqp settings: passed in arguments, 137 | # current cluster active manager (if any), and broker_config; use the first 138 | # that is defined, in that order 139 | defaults = { 140 | 'amqp_host': broker_config.broker_hostname, 141 | 'amqp_user': broker_config.broker_username, 142 | 'amqp_pass': broker_config.broker_password, 143 | 'amqp_vhost': broker_config.broker_vhost, 144 | 'ssl_enabled': broker_config.broker_ssl_enabled, 145 | 'ssl_cert_path': broker_config.broker_cert_path 146 | } 147 | defaults.update(cluster.get_cluster_amqp_settings()) 148 | amqp_settings = { 149 | 'amqp_user': amqp_user or defaults['amqp_user'], 150 | 'amqp_host': amqp_host or defaults['amqp_host'], 151 | 'amqp_pass': amqp_pass or defaults['amqp_pass'], 152 | 'amqp_vhost': amqp_vhost or defaults['amqp_vhost'], 153 | 'ssl_enabled': ssl_enabled or defaults['ssl_enabled'], 154 | 'ssl_cert_path': ssl_cert_path or defaults['ssl_cert_path'] 155 | } 156 | 157 | try: 158 | client = AMQPClient(**amqp_settings) 159 | logger.debug('AMQP client created for thread {0}'.format(thread)) 160 | except Exception as e: 161 | logger.warning( 162 | 'Failed to create AMQP client for thread: {0} ({1}: {2})' 163 | .format(thread, type(e).__name__, e)) 164 | raise 165 | return client 166 | -------------------------------------------------------------------------------- /cloudify/amqp_client_utils.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from threading import Thread, RLock, Event 17 | from Queue import Queue 18 | 19 | from cloudify import amqp_client 20 | from cloudify.exceptions import ClosedAMQPClientException 21 | 22 | 23 | class AMQPWrappedThread(Thread): 24 | """ 25 | creates an amqp client before calling the target method. 26 | This thread is always set as a daemon. 27 | """ 28 | 29 | def __init__(self, target, *args, **kwargs): 30 | 31 | def wrapped_target(*inner_args, **inner_kwargs): 32 | with global_amqp_client: 33 | self.target_method(*inner_args, **inner_kwargs) 34 | 35 | self.target_method = target 36 | super(AMQPWrappedThread, self).__init__(target=wrapped_target, *args, 37 | **kwargs) 38 | self.started_amqp_client = global_amqp_client.client_started 39 | self.daemon = True 40 | 41 | 42 | _STOP = object() 43 | 44 | 45 | class _GlobalAMQPClient(object): 46 | def __init__(self, *client_args, **client_kwargs): 47 | self.client_started = Event() 48 | self._connect_lock = RLock() 49 | self._callers = 0 50 | self._thread = None 51 | self._queue = Queue() 52 | self._client_args = client_args 53 | self._client_kwargs = client_kwargs 54 | 55 | def register_caller(self): 56 | with self._connect_lock: 57 | if not self.client_started.is_set(): 58 | self._connect() 59 | self._callers += 1 60 | 61 | def unregister_caller(self): 62 | with self._connect_lock: 63 | self._callers -= 1 64 | if self._callers == 0: 65 | self._disconnect() 66 | self._thread.join() 67 | 68 | def __enter__(self): 69 | self.register_caller() 70 | return self 71 | 72 | def __exit__(self, exc, val, tb): 73 | self.unregister_caller() 74 | 75 | def publish_message(self, message, message_type): 76 | self._queue.put((message, message_type)) 77 | 78 | def close(self): 79 | self.unregister_caller() 80 | self._client.close() 81 | 82 | def _make_client(self): 83 | return amqp_client.create_client(*self._client_args, 84 | **self._client_kwargs) 85 | 86 | def _connect(self): 87 | self._client = self._make_client() 88 | self.client_started.set() 89 | self._thread = Thread(target=self._handle_publish_message) 90 | self._thread.start() 91 | 92 | def _disconnect(self): 93 | self._queue.put(_STOP) 94 | 95 | def _handle_publish_message(self): 96 | while True: 97 | request = self._queue.get() 98 | if request is _STOP: 99 | break 100 | try: 101 | self._client.publish_message(*request) 102 | except ClosedAMQPClientException: 103 | with self._connect_lock: 104 | self._client = self._make_client() 105 | self._client.publish_message(*request) 106 | self._client.close() 107 | self.client_started.clear() 108 | 109 | 110 | def init_amqp_client(): 111 | global_amqp_client.register_caller() 112 | global_event_amqp_client.register_caller() 113 | 114 | 115 | def get_amqp_client(): 116 | return global_amqp_client 117 | 118 | 119 | def get_event_amqp_client(): 120 | """ 121 | Returns an amqp client for publishing events/logs 122 | :param create: If set to True, a new client object will be created if one 123 | does not exist 124 | """ 125 | return global_event_amqp_client 126 | 127 | 128 | def close_amqp_client(): 129 | global_amqp_client.unregister_caller() 130 | global_event_amqp_client.unregister_caller() 131 | 132 | 133 | global_amqp_client = _GlobalAMQPClient() 134 | global_event_amqp_client = _GlobalAMQPClient(amqp_vhost='/') 135 | -------------------------------------------------------------------------------- /cloudify/broker_config.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | # AMQP broker configuration for agents and manager 17 | # Primarily used by celery, so provided with variables it understands 18 | from __future__ import absolute_import 19 | 20 | import json 21 | import os 22 | import ssl 23 | 24 | from cloudify.constants import BROKER_PORT_SSL, BROKER_PORT_NO_SSL 25 | 26 | workdir_path = os.getenv('CELERY_WORK_DIR') 27 | if workdir_path is None: 28 | # We are not in an appropriately configured celery environment 29 | config = {} 30 | else: 31 | conf_file_path = os.path.join(workdir_path, 'broker_config.json') 32 | if os.path.isfile(conf_file_path): 33 | with open(conf_file_path) as conf_handle: 34 | conf_file = conf_handle.read() 35 | config = json.loads(conf_file) 36 | else: 37 | config = {} 38 | 39 | # Provided as variables for retrieval by amqp_client and logger as required 40 | broker_cert_path = config.get('broker_cert_path', '') 41 | broker_username = config.get('broker_username', 'guest') 42 | broker_password = config.get('broker_password', 'guest') 43 | broker_hostname = config.get('broker_hostname', 'localhost') 44 | broker_vhost = config.get('broker_vhost', '/') 45 | broker_ssl_enabled = config.get('broker_ssl_enabled', False) 46 | broker_port = BROKER_PORT_SSL if broker_ssl_enabled else BROKER_PORT_NO_SSL 47 | 48 | # only enable heartbeat by default for agents connected to a cluster 49 | DEFAULT_HEARTBEAT = 30 50 | if os.name == 'nt': 51 | # celery doesn't support broker_heartbeat on windows 52 | broker_heartbeat = None 53 | else: 54 | broker_heartbeat = config.get('broker_heartbeat', DEFAULT_HEARTBEAT) 55 | 56 | 57 | if broker_ssl_enabled: 58 | BROKER_USE_SSL = { 59 | 'ca_certs': broker_cert_path, 60 | 'cert_reqs': ssl.CERT_REQUIRED, 61 | } 62 | 63 | if broker_heartbeat: 64 | options = '?heartbeat={heartbeat}'.format(heartbeat=broker_heartbeat) 65 | else: 66 | options = '' 67 | 68 | # BROKER_URL is held in the config to avoid the password appearing 69 | # in ps listings 70 | URL_TEMPLATE = \ 71 | 'amqp://{username}:{password}@{hostname}:{port}/{vhost}{options}' 72 | if config.get('cluster'): 73 | BROKER_URL = ';'.join(URL_TEMPLATE.format(username=broker_username, 74 | password=broker_password, 75 | hostname=node_ip, 76 | port=broker_port, 77 | vhost=broker_vhost, 78 | options=options) 79 | for node_ip in config['cluster']) 80 | else: 81 | BROKER_URL = URL_TEMPLATE.format( 82 | username=broker_username, 83 | password=broker_password, 84 | hostname=broker_hostname, 85 | port=broker_port, 86 | vhost=broker_vhost, 87 | options=options 88 | ) 89 | 90 | # celery will not use the failover strategy if there is only one broker url; 91 | # we need it to try and failover even with one initial manager, because 92 | # another node might've been added dynamically, while the worker was already 93 | # running; we add an empty broker url so that celery always sees at least two - 94 | # the failover strategy we're using (defined in cloudify_agent.app) filters out 95 | # the empty one 96 | BROKER_URL += ';' 97 | 98 | CELERY_RESULT_BACKEND = BROKER_URL 99 | CELERY_TASK_RESULT_EXPIRES = 600 100 | CELERYD_PREFETCH_MULTIPLIER = 1 101 | CELERY_ACKS_LATE = False 102 | -------------------------------------------------------------------------------- /cloudify/celery/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /cloudify/celery/app.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from cloudify import broker_config 17 | from cloudify.utils import internal 18 | from cloudify.constants import (CELERY_TASK_RESULT_EXPIRES, 19 | MGMTWORKER_QUEUE, 20 | BROKER_PORT_SSL, 21 | BROKER_PORT_NO_SSL) 22 | 23 | 24 | def get_celery_app(broker_url=None, 25 | broker_ssl_cert_path=None, 26 | broker_ssl_enabled=None, 27 | max_retries=3, 28 | tenant=None, 29 | target=None): 30 | """ 31 | Return a Celery app 32 | 33 | :param broker_url: If supplied, will be used as the broker URL 34 | :param broker_ssl_cert_path: If not supplied, default is in broker_config 35 | :param broker_ssl_enabled: Decides whether SSL should be enabled 36 | :param tenant: If supplied, and if target isn't the mgmtworker queue, 37 | the broker URL will be derived from the data kept in it 38 | :param target: The target queue; see `tenant` 39 | :param max_retries: 40 | :return: A celery.Celery object 41 | """ 42 | # celery is imported locally since it's not used by any other method, and 43 | # we want this utils module to be usable even if celery is not available 44 | from celery import Celery 45 | 46 | if broker_ssl_enabled is None: 47 | broker_ssl_enabled = broker_config.broker_ssl_enabled 48 | 49 | broker_url = broker_url or _get_broker_url(tenant, 50 | target, 51 | broker_ssl_enabled) 52 | broker_ssl_options = internal.get_broker_ssl_options( 53 | ssl_enabled=broker_ssl_enabled, 54 | cert_path=broker_ssl_cert_path or broker_config.broker_cert_path 55 | ) 56 | 57 | celery_client = Celery() 58 | celery_client.conf.update( 59 | BROKER_URL=broker_url, 60 | CELERY_RESULT_BACKEND=broker_url, 61 | BROKER_USE_SSL=broker_ssl_options, 62 | CELERY_TASK_RESULT_EXPIRES=CELERY_TASK_RESULT_EXPIRES 63 | ) 64 | 65 | # Connect eagerly to error out as early as possible, and to force choosing 66 | # the broker if multiple urls were passed. 67 | # If max_retries is provided and >0, we will raise an exception if we 68 | # can't connect; otherwise we'll keep retrying forever. 69 | # Need to raise an exception in the case of a cluster, so that the 70 | # next node can be tried 71 | celery_client.pool.connection.ensure_connection(max_retries=max_retries) 72 | return celery_client 73 | 74 | 75 | def get_cluster_celery_app(broker_urls, cluster, ssl_enabled, 76 | broker_ssl_cert_path=None): 77 | err = None 78 | for broker_url in broker_urls: 79 | try: 80 | return get_celery_app( 81 | broker_url=broker_url, 82 | broker_ssl_cert_path=broker_ssl_cert_path, 83 | broker_ssl_enabled=ssl_enabled, 84 | max_retries=1) 85 | except Exception as err: 86 | continue 87 | if err is not None: 88 | raise err 89 | 90 | 91 | def _get_broker_url(tenant, target, broker_ssl_enabled): 92 | """ 93 | If the target is the mgmtworker queue, or if no tenants was passed use 94 | the default broker URL. Otherwise, create a tenant-specific one 95 | """ 96 | if target == MGMTWORKER_QUEUE or not tenant: 97 | return broker_config.BROKER_URL 98 | else: 99 | return _get_tenant_broker_url(tenant, broker_ssl_enabled) 100 | 101 | 102 | def _get_tenant_broker_url(tenant, broker_ssl_enabled): 103 | return broker_config.URL_TEMPLATE.format( 104 | username=tenant['rabbitmq_username'], 105 | password=tenant['rabbitmq_password'], 106 | hostname=broker_config.broker_hostname, 107 | port=BROKER_PORT_SSL if broker_ssl_enabled else BROKER_PORT_NO_SSL, 108 | vhost=tenant['rabbitmq_vhost'], 109 | options='' 110 | ) 111 | -------------------------------------------------------------------------------- /cloudify/celery/logging_server.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from __future__ import absolute_import 17 | 18 | import functools 19 | import json 20 | import logging 21 | import logging.handlers 22 | import os 23 | import random 24 | import threading 25 | import tempfile 26 | 27 | import zmq 28 | from celery import bootsteps 29 | from celery.bin import Option 30 | from celery.utils.log import get_logger 31 | 32 | from cloudify.proxy import server 33 | from cloudify.lru_cache import lru_cache 34 | 35 | logger = get_logger(__name__) 36 | 37 | 38 | LOGFILE_SIZE_BYTES = 5 * 1024 * 1024 39 | LOGFILE_BACKUP_COUNT = 5 40 | 41 | 42 | def configure_app(app): 43 | app.user_options['worker'].add( 44 | Option('--with-logging-server', action='store_true', 45 | default=False, help='Enable logging server')) 46 | app.user_options['worker'].add( 47 | Option('--logging-server-logdir', action='store', 48 | help='logdir location')) 49 | app.user_options['worker'].add( 50 | Option('--logging-server-handler-cache-size', action='store', 51 | type='int', default=100, 52 | help='Maximum number of file handlers that can be open at any ' 53 | 'given time')) 54 | app.steps['worker'].add(ZMQLoggingServerBootstep) 55 | 56 | 57 | class ZMQLoggingServerBootstep(bootsteps.StartStopStep): 58 | 59 | label = 'logging server' 60 | conditional = True 61 | 62 | def __init__(self, worker, 63 | with_logging_server=False, 64 | logging_server_logdir=None, 65 | logging_server_handler_cache_size=100, 66 | **kwargs): 67 | worker.logging_server = self 68 | self.enabled = with_logging_server 69 | self.logging_server = None 70 | self.logdir = logging_server_logdir 71 | self.cache_size = logging_server_handler_cache_size 72 | self.thread = None 73 | self.socket_url = None 74 | 75 | def info(self, worker): 76 | return { 77 | 'logging_server': { 78 | 'enabled': self.enabled, 79 | 'logdir': self.logdir, 80 | 'socket_url': self.socket_url, 81 | 'cache_size': self.cache_size 82 | } 83 | } 84 | 85 | def start(self, worker): 86 | log_prefix = '| {0}: {1}'.format(type(worker).__name__, self.label) 87 | if not self.enabled: 88 | logger.debug('{0}: enabled={1}'.format(log_prefix, self.enabled)) 89 | return 90 | if not self.logdir: 91 | raise ValueError('--logging-server-logdir must be supplied') 92 | if os.name == 'nt': 93 | self.socket_url = 'tcp://127.0.0.1:{0}'.format( 94 | server.get_unused_port()) 95 | else: 96 | suffix = '%05x' % random.randrange(16 ** 5) 97 | self.socket_url = ('ipc://{0}/cloudify-logging-server-{1}.socket' 98 | .format(tempfile.gettempdir(), suffix)) 99 | if not os.path.exists(self.logdir): 100 | os.makedirs(self.logdir) 101 | self.logging_server = ZMQLoggingServer(socket_url=self.socket_url, 102 | logdir=self.logdir, 103 | cache_size=self.cache_size) 104 | self.thread = threading.Thread(target=self.logging_server.start) 105 | self.thread.start() 106 | logger.debug('{0}: enabled={1}, logdir={2}, socket_url={3}' 107 | .format(log_prefix, 108 | self.enabled, 109 | self.logdir, 110 | self.socket_url)) 111 | 112 | def _stop_logging_server(self, worker): 113 | if not self.enabled: 114 | return 115 | self.logging_server.close() 116 | 117 | stop = _stop_logging_server 118 | close = _stop_logging_server 119 | shutdown = _stop_logging_server 120 | 121 | 122 | class ZMQLoggingServer(object): 123 | 124 | def __init__(self, logdir, socket_url, cache_size): 125 | self.closed = False 126 | self.zmq_context = zmq.Context(io_threads=1) 127 | self.socket = self.zmq_context.socket(zmq.PULL) 128 | self.socket.bind(socket_url) 129 | self.poller = zmq.Poller() 130 | self.poller.register(self.socket, zmq.POLLIN) 131 | self.logdir = logdir 132 | 133 | # on the management server, log files are handled by logrotate 134 | # with copytruncate so we use the simple FileHandler. 135 | # on agent hosts, we want to rotate the logs using python's 136 | # RotatingFileHandler. 137 | if os.environ.get('MGMTWORKER_HOME'): 138 | self.handler_func = logging.FileHandler 139 | else: 140 | self.handler_func = functools.partial( 141 | logging.handlers.RotatingFileHandler, 142 | maxBytes=LOGFILE_SIZE_BYTES, 143 | backupCount=LOGFILE_BACKUP_COUNT) 144 | 145 | # wrap the _get_handler method with an lru cache decorator 146 | # so we only keep the last 'cache_size' used handlers in in turn 147 | # have at most 'cache_size' file descriptors open 148 | cache_decorator = lru_cache(maxsize=cache_size, 149 | on_purge=lambda handler: handler.close()) 150 | self._get_handler = cache_decorator(self._get_handler) 151 | 152 | def start(self): 153 | while not self.closed: 154 | try: 155 | if self.poller.poll(1000): 156 | message = json.loads(self.socket.recv(), encoding='utf-8') 157 | self._process(message) 158 | except Exception: 159 | if not self.closed: 160 | logger.warning('Error raised during record processing', 161 | exc_info=True) 162 | 163 | def close(self): 164 | if not self.closed: 165 | self.closed = True 166 | self.socket.close() 167 | self.zmq_context.term() 168 | self._get_handler.clear() 169 | 170 | def _process(self, entry): 171 | handler = self._get_handler(entry['context']) 172 | handler.emit(Record(entry['message'])) 173 | 174 | def _get_handler(self, handler_context): 175 | logfile = os.path.join(self.logdir, '{0}.log'.format(handler_context)) 176 | handler = self.handler_func(logfile) 177 | handler.setFormatter(Formatter) 178 | return handler 179 | 180 | 181 | class Record(object): 182 | def __init__(self, message): 183 | self.message = message 184 | filename = None 185 | lineno = None 186 | 187 | 188 | class Formatter(object): 189 | @staticmethod 190 | def format(record): 191 | return record.message 192 | -------------------------------------------------------------------------------- /cloudify/cluster.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2017 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import os 17 | import json 18 | import pika 19 | import types 20 | import requests 21 | import itertools 22 | 23 | from cloudify import constants 24 | 25 | from cloudify_rest_client import CloudifyClient 26 | from cloudify_rest_client.client import HTTPClient 27 | from cloudify_rest_client.exceptions import (CloudifyClientError, 28 | NotClusterMaster) 29 | 30 | 31 | def _get_cluster_settings_file(filename=None): 32 | if filename is None \ 33 | and constants.CLUSTER_SETTINGS_PATH_KEY not in os.environ: 34 | return None 35 | return filename or os.environ[constants.CLUSTER_SETTINGS_PATH_KEY] 36 | 37 | 38 | def get_cluster_settings(filename=None): 39 | filename = _get_cluster_settings_file(filename=filename) 40 | if not filename: 41 | return None 42 | try: 43 | with open(filename) as f: 44 | return json.load(f) 45 | except (IOError, ValueError): 46 | return None 47 | 48 | 49 | def set_cluster_settings(settings, filename=None): 50 | filename = _get_cluster_settings_file(filename=filename) 51 | if not filename: 52 | return None 53 | dirname = os.path.dirname(filename) 54 | if dirname and not os.path.exists(dirname): 55 | os.makedirs(dirname) 56 | 57 | with open(filename, 'w') as f: 58 | json.dump(settings, f, indent=4, sort_keys=True) 59 | 60 | 61 | def get_cluster_nodes(filename=None): 62 | settings = get_cluster_settings(filename=filename) 63 | if not settings: 64 | return None 65 | return settings.get('nodes') 66 | 67 | 68 | def set_cluster_nodes(nodes, filename=None): 69 | settings = get_cluster_settings(filename=filename) or {} 70 | if not settings and not nodes: 71 | return 72 | settings['nodes'] = nodes 73 | set_cluster_settings(settings, filename=filename) 74 | active = get_cluster_active(filename=filename) 75 | if active is None: 76 | set_cluster_active(nodes[0], filename=filename) 77 | return nodes 78 | 79 | 80 | def get_cluster_active(filename=None): 81 | settings = get_cluster_settings(filename=filename) 82 | if not settings: 83 | return None 84 | 85 | active = settings.get('active') 86 | if active: 87 | return active 88 | # when we don't know which node is the active, try the first one on the 89 | # list - if it's a replica, we'll failover normally 90 | nodes = get_cluster_nodes(filename=filename) 91 | if nodes: 92 | set_cluster_active(nodes[0], filename=filename) 93 | return nodes[0] 94 | 95 | 96 | def set_cluster_active(node, filename=None): 97 | settings = get_cluster_settings(filename=filename) or {} 98 | if not settings and not node: 99 | return 100 | settings['active'] = node 101 | set_cluster_settings(settings, filename=filename) 102 | 103 | 104 | def get_cluster_amqp_settings(): 105 | active = get_cluster_active() 106 | if active: 107 | return {'amqp_host': active} 108 | else: 109 | return {} 110 | 111 | 112 | def delete_cluster_settings(filename=None): 113 | """Remove all cluster settings. 114 | 115 | Delete the settings file, and also all stored certificates - find the 116 | certs dir first. 117 | """ 118 | filename = _get_cluster_settings_file(filename) 119 | if not filename: 120 | return 121 | try: 122 | os.remove(filename) 123 | except (OSError, IOError): 124 | # the file doesn't exist? 125 | pass 126 | 127 | 128 | def is_cluster_configured(filename=None): 129 | path = _get_cluster_settings_file(filename=filename) 130 | if not path or not os.path.exists(path): 131 | return False 132 | return get_cluster_active(filename=path) is not None 133 | 134 | 135 | def _parse_url(broker_url): 136 | params = pika.URLParameters(broker_url) 137 | return params.host 138 | 139 | 140 | def config_from_broker_urls(active, nodes): 141 | settings = { 142 | 'active': _parse_url(active), 143 | 'nodes': [_parse_url(node) for node in nodes if node] 144 | } 145 | set_cluster_settings(settings) 146 | 147 | 148 | class ClusterHTTPClient(HTTPClient): 149 | default_timeout_sec = 5 150 | retries = 30 151 | retry_interval = 3 152 | 153 | def __init__(self, *args, **kwargs): 154 | super(ClusterHTTPClient, self).__init__(*args, **kwargs) 155 | 156 | def do_request(self, *args, **kwargs): 157 | kwargs.setdefault('timeout', self.default_timeout_sec) 158 | 159 | copied_data = None 160 | if isinstance(kwargs.get('data'), types.GeneratorType): 161 | copied_data = itertools.tee(kwargs.pop('data'), self.retries) 162 | 163 | for retry in range(self.retries): 164 | active = get_cluster_active() 165 | if active is not None: 166 | self._use_node(active) 167 | if copied_data is not None: 168 | kwargs['data'] = copied_data[retry] 169 | 170 | try: 171 | return super(ClusterHTTPClient, self).do_request(*args, 172 | **kwargs) 173 | except (NotClusterMaster, requests.exceptions.ConnectionError): 174 | continue 175 | 176 | raise CloudifyClientError('No active node in the cluster!') 177 | 178 | def _use_node(self, node_ip): 179 | self.host = node_ip 180 | 181 | 182 | class CloudifyClusterClient(CloudifyClient): 183 | client_class = ClusterHTTPClient 184 | -------------------------------------------------------------------------------- /cloudify/compute/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import email 17 | from email.mime.multipart import MIMEMultipart 18 | from email.mime.text import MIMEText 19 | 20 | from cloudify import exceptions 21 | 22 | SCRIPT_MIME_TYPE = 'text/x-shellscript' 23 | mapping_prefixes = { 24 | '#include': 'text/x-include-url', 25 | '#include-once': 'text/x-include-once-url', 26 | '#cloud-config': 'text/cloud-config', 27 | '#cloud-config-archive': 'text/cloud-config-archive', 28 | '#upstart-job': 'text/upstart-job', 29 | '#part-handler': 'text/part-handler', 30 | '#cloud-boothook': 'text/cloud-boothook', 31 | '#!': SCRIPT_MIME_TYPE, 32 | 'rem cmd': SCRIPT_MIME_TYPE, 33 | '#ps1_sysnative': SCRIPT_MIME_TYPE, 34 | '#ps1_x86': SCRIPT_MIME_TYPE 35 | } 36 | 37 | script_extensions = { 38 | '#!': 'sh', 39 | 'rem cmd': 'cmd', 40 | '#ps1_sysnative': 'ps1', 41 | '#ps1_x86': 'ps1' 42 | } 43 | 44 | 45 | def _find_type(line): 46 | sorted_prefixes = sorted(mapping_prefixes, key=lambda e: -len(e)) 47 | for possible_prefix in sorted_prefixes: 48 | if line.startswith(possible_prefix): 49 | return mapping_prefixes[possible_prefix] 50 | raise exceptions.NonRecoverableError( 51 | 'Unhandled userdata that starts with: {0}'.format(line)) 52 | 53 | 54 | def _find_extension(line): 55 | sorted_prefixes = sorted(script_extensions, key=lambda e: -len(e)) 56 | for possible_prefix in sorted_prefixes: 57 | if line.startswith(possible_prefix): 58 | return script_extensions[possible_prefix] 59 | return None 60 | 61 | 62 | def create_multi_mimetype_userdata(userdatas): 63 | """Compose a multi mime message from provided userdata parts 64 | 65 | See https://lists.ubuntu.com/archives/ubuntu-cloud/2013-March/000887.html 66 | for a better understanding on the order in which parts will be processed 67 | by cloud-init 68 | 69 | :param userdatas: list of userdata parts 70 | :return: Multi mime type message that composes all provided userdata parts 71 | into a single message in the order in which they were provided 72 | """ 73 | 74 | index = 0 75 | outer = MIMEMultipart() 76 | for userdata in userdatas: 77 | parsed = email.message_from_string(userdata) 78 | if parsed.is_multipart(): 79 | for msg in parsed.walk(): 80 | if msg.get_content_maintype().lower() != 'multipart': 81 | prefix = '{0:03d}'.format(index) 82 | filename = msg.get_param('filename', 83 | header='Content-Disposition') 84 | suffix = '' 85 | if filename: 86 | suffix = '.{0}'.format(filename.split('.')[-1]) 87 | filename = '{0}{1}'.format(prefix, suffix) 88 | try: 89 | msg.replace_header('Content-Disposition', 'attachment') 90 | msg.set_param('filename', filename, 91 | header='Content-Disposition') 92 | except KeyError: 93 | msg.add_header('Content-Disposition', 'attachment', 94 | filename=filename) 95 | outer.attach(msg) 96 | index += 1 97 | else: 98 | first_line = userdata.split('\n')[0] 99 | mtype = _find_type(first_line) 100 | # extension is required by older versions of cloudbase-init 101 | # to select the proper shell 102 | extension = _find_extension(first_line) 103 | _, subtype = mtype.split('/', 1) 104 | msg = MIMEText(userdata, _subtype=subtype) 105 | prefix = '{0:03d}'.format(index) 106 | suffix = '' 107 | if extension: 108 | suffix = '.{0}'.format(extension) 109 | filename = '{0}{1}'.format(prefix, suffix) 110 | msg.add_header('Content-Disposition', 'attachment', 111 | filename=filename) 112 | outer.attach(msg) 113 | index += 1 114 | # cloudbase-init doesn't handle the From prefix well 115 | return outer.as_string(unixfrom=False) 116 | -------------------------------------------------------------------------------- /cloudify/conflict_handlers.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | def simple_merge_handler(previous_props, next_props): 18 | """Merge properties if the keys on old and new are disjoint. 19 | 20 | Return a mapping containing the values from both old and new properties, 21 | but only if there is no key that exists in both. 22 | """ 23 | for name, value in previous_props.items(): 24 | if name in next_props and value != next_props[name]: 25 | raise ValueError('Cannot merge - {0} changed (old value: {1}, ' 26 | ' new value: {2})'.format( 27 | name, value, next_props[name])) 28 | if name not in next_props: 29 | next_props[name] = value 30 | return next_props 31 | -------------------------------------------------------------------------------- /cloudify/constants.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | REST_HOST_KEY = 'REST_HOST' 17 | REST_PORT_KEY = 'REST_PORT' 18 | CELERY_WORK_DIR_KEY = 'CELERY_WORK_DIR' 19 | MANAGER_FILE_SERVER_URL_KEY = 'MANAGER_FILE_SERVER_URL' 20 | MANAGER_FILE_SERVER_ROOT_KEY = 'MANAGER_FILE_SERVER_ROOT' 21 | FILE_SERVER_RESOURCES_FOLDER = 'resources' 22 | FILE_SERVER_BLUEPRINTS_FOLDER = 'blueprints' 23 | FILE_SERVER_DEPLOYMENTS_FOLDER = 'deployments' 24 | FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER = 'uploaded-blueprints' 25 | FILE_SERVER_SNAPSHOTS_FOLDER = 'snapshots' 26 | FILE_SERVER_PLUGINS_FOLDER = 'plugins' 27 | FILE_SERVER_GLOBAL_RESOURCES_FOLDER = 'global-resources' 28 | FILE_SERVER_TENANT_RESOURCES_FOLDER = 'tenant-resources' 29 | FILE_SERVER_AUTHENTICATORS_FOLDER = 'authenticators' 30 | 31 | AGENT_INSTALL_METHOD_NONE = 'none' 32 | AGENT_INSTALL_METHOD_REMOTE = 'remote' 33 | AGENT_INSTALL_METHOD_INIT_SCRIPT = 'init_script' 34 | AGENT_INSTALL_METHOD_PROVIDED = 'provided' 35 | AGENT_INSTALL_METHOD_PLUGIN = 'plugin' 36 | AGENT_INSTALL_METHODS = [ 37 | AGENT_INSTALL_METHOD_NONE, 38 | AGENT_INSTALL_METHOD_REMOTE, 39 | AGENT_INSTALL_METHOD_INIT_SCRIPT, 40 | AGENT_INSTALL_METHOD_PROVIDED, 41 | AGENT_INSTALL_METHOD_PLUGIN 42 | ] 43 | AGENT_INSTALL_METHODS_SCRIPTS = [ 44 | AGENT_INSTALL_METHOD_INIT_SCRIPT, 45 | AGENT_INSTALL_METHOD_PROVIDED, 46 | AGENT_INSTALL_METHOD_PLUGIN 47 | ] 48 | 49 | COMPUTE_NODE_TYPE = 'cloudify.nodes.Compute' 50 | 51 | BROKER_PORT_NO_SSL = 5672 52 | BROKER_PORT_SSL = 5671 53 | CELERY_TASK_RESULT_EXPIRES = 600 54 | CLOUDIFY_TOKEN_AUTHENTICATION_HEADER = 'Authentication-Token' 55 | LOCAL_REST_CERT_FILE_KEY = 'LOCAL_REST_CERT_FILE' 56 | SECURED_PROTOCOL = 'https' 57 | 58 | BROKER_SSL_CERT_PATH = 'BROKER_SSL_CERT_PATH' 59 | BYPASS_MAINTENANCE = 'BYPASS_MAINTENANCE' 60 | LOGGING_CONFIG_FILE = '/etc/cloudify/logging.conf' 61 | CLUSTER_SETTINGS_PATH_KEY = 'CLOUDIFY_CLUSTER_SETTINGS_PATH' 62 | 63 | MGMTWORKER_QUEUE = 'cloudify.management' 64 | DEPLOYMENT = 'deployment' 65 | NODE_INSTANCE = 'node-instance' 66 | RELATIONSHIP_INSTANCE = 'relationship-instance' 67 | 68 | DEFAULT_NETWORK_NAME = 'default' 69 | -------------------------------------------------------------------------------- /cloudify/ctx_wrappers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudify-cosmo/cloudify-plugins-common/03f8f0a9d27081c0b412b263f4bda63556019d00/cloudify/ctx_wrappers/__init__.py -------------------------------------------------------------------------------- /cloudify/decorators.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | def operation(func=None, **kwargs): 18 | """This decorator does not do anything and is kept for backwards 19 | compatibility. It is not required for operations to work. 20 | """ 21 | return func or operation 22 | 23 | 24 | def workflow(func=None, system_wide=False, **kwargs): 25 | """This decorator should only be used to decorate system wide 26 | workflows. It is not required for regular workflows. 27 | """ 28 | if func: 29 | func.workflow_system_wide = system_wide 30 | return func 31 | else: 32 | return lambda fn: workflow(fn, system_wide) 33 | 34 | 35 | task = operation 36 | -------------------------------------------------------------------------------- /cloudify/event.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from StringIO import StringIO 17 | 18 | HIGH_VERBOSE = 3 19 | MEDIUM_VERBOSE = 2 20 | LOW_VERBOSE = 1 21 | NO_VERBOSE = 0 22 | 23 | 24 | class Event(object): 25 | 26 | def __init__(self, event, verbosity_level=None): 27 | self._event = event 28 | self._verbosity_level = verbosity_level 29 | 30 | def __str__(self): 31 | deployment_id = self.deployment_id 32 | printable_timestamp = self.printable_timestamp 33 | event_type_indicator = self.event_type_indicator 34 | message = self.text 35 | info = self.operation_info 36 | 37 | if info: # spacing in between of the info and the message 38 | info += ' ' 39 | 40 | return '{0} {1} {2} {3}{4}'.format( 41 | printable_timestamp, 42 | event_type_indicator, 43 | deployment_id, 44 | info, 45 | message) 46 | 47 | @property 48 | def has_output(self): 49 | return (not self.is_log_message or 50 | self._verbosity_level >= MEDIUM_VERBOSE or 51 | self.log_level != 'DEBUG') 52 | 53 | @property 54 | def operation_info(self): 55 | operation = self.operation 56 | node_id = self.node_id 57 | source_id = self.source_id 58 | target_id = self.target_id 59 | 60 | context = self._event['context'] 61 | group = context.get('group') 62 | policy = context.get('policy') 63 | trigger = context.get('trigger') 64 | 65 | if source_id is not None: 66 | info = '{0}->{1}|{2}'.format(source_id, target_id, operation) 67 | else: 68 | info_elements = [ 69 | e for e in [node_id, operation, group, policy, trigger] 70 | if e is not None] 71 | info = '.'.join(info_elements) 72 | if info: 73 | info = '[{0}]'.format(info) 74 | return info 75 | 76 | @property 77 | def text(self): 78 | message = self._event['message']['text'].encode('utf-8') 79 | if self.is_log_message: 80 | message = '{0}: {1}'.format(self.log_level, message) 81 | elif (self.event_type in ('task_rescheduled', 'task_failed') and 82 | self._verbosity_level > NO_VERBOSE): 83 | causes = self._event['context'].get('task_error_causes', []) 84 | if causes: 85 | multiple_causes = len(causes) > 1 86 | causes_out = StringIO() 87 | if multiple_causes: 88 | causes_out.write('Causes (most recent cause last):\n') 89 | for cause in causes: 90 | if multiple_causes: 91 | causes_out.write('{0}\n'.format('-' * 32)) 92 | causes_out.write(cause.get('traceback', '')) 93 | 94 | message = '{0}\n{1}'.format(message, causes_out.getvalue()) 95 | return message 96 | 97 | @property 98 | def log_level(self): 99 | return self._event['level'].upper() 100 | 101 | @property 102 | def timestamp(self): 103 | return self._event.get('@timestamp') or self._event['timestamp'] 104 | 105 | @property 106 | def printable_timestamp(self): 107 | return self.timestamp.replace('T', ' ').replace('Z', '') 108 | 109 | @property 110 | def event_type_indicator(self): 111 | return 'LOG' if self.is_log_message else 'CFY' 112 | 113 | @property 114 | def operation(self): 115 | op = self._event['context'].get('operation') 116 | if op is None: 117 | return None 118 | return op.split('.')[-1] 119 | 120 | @property 121 | def node_id(self): 122 | return self._event['context'].get('node_id') 123 | 124 | @property 125 | def source_id(self): 126 | return self._event['context'].get('source_id') 127 | 128 | @property 129 | def target_id(self): 130 | return self._event['context'].get('target_id') 131 | 132 | @property 133 | def deployment_id(self): 134 | return '<{0}>'.format(self._event['context']['deployment_id']) 135 | 136 | @property 137 | def event_type(self): 138 | return self._event.get('event_type') # not available for logs 139 | 140 | @property 141 | def is_log_message(self): 142 | return 'cloudify_log' in self._event['type'] 143 | -------------------------------------------------------------------------------- /cloudify/exceptions.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | class NonRecoverableError(Exception): 18 | """ 19 | An error raised by plugins to denote that no retry should be attempted by 20 | by the executing workflow engine. 21 | """ 22 | def __init__(self, *args, **kwargs): 23 | self.causes = kwargs.pop('causes', []) or [] 24 | super(NonRecoverableError, self).__init__(*args, **kwargs) 25 | 26 | 27 | class RecoverableError(Exception): 28 | """ 29 | An error raised by plugins to explicitly denote that this is a recoverable 30 | error (note that this is the default behavior). It is possible specifying 31 | how many seconds should pass before a retry is attempted thus overriding 32 | the bootstrap context configuration parameter: 33 | ``cloudify.workflows.retry_interval`` 34 | 35 | :param retry_after: How many seconds should the workflow engine wait 36 | before re-executing the task the raised this 37 | exception. (only applies when the workflow engine 38 | decides that this task should be retried) 39 | """ 40 | 41 | def __init__(self, message='', retry_after=None, causes=None, **kwargs): 42 | if retry_after is not None: 43 | suffix = '[retry_after={0}]'.format(retry_after) 44 | if suffix not in message: 45 | message = '{0} {1}'.format(message, suffix) 46 | self.retry_after = retry_after 47 | self.causes = causes or [] 48 | super(RecoverableError, self).__init__(message, **kwargs) 49 | 50 | 51 | class OperationRetry(RecoverableError): 52 | """ 53 | An error raised internally when an operation uses the ctx.operation.retry 54 | API for specifying that an operation should be retried. 55 | """ 56 | pass 57 | 58 | 59 | class HttpException(NonRecoverableError): 60 | """ 61 | Wraps HTTP based exceptions that may be raised. 62 | 63 | :param url: The url the request was made to. 64 | :param code: The response status code. 65 | :param message: The underlying reason for the error. 66 | 67 | """ 68 | 69 | def __init__(self, url, code, message, causes=None, **kwargs): 70 | self.url = url 71 | self.code = code 72 | self.message = message 73 | super(HttpException, self).__init__(str(self), causes=causes, **kwargs) 74 | 75 | def __str__(self): 76 | return "{0} ({1}) : {2}".format(self.code, self.url, self.message) 77 | 78 | 79 | class CommandExecutionError(RuntimeError): 80 | 81 | """ 82 | Indicates a command failed to execute. note that this is different than 83 | the CommandExecutionException in that in this case, the command 84 | execution did not even start, and therefore there is not return code or 85 | stdout output. 86 | 87 | :param command: The command executed 88 | :param error: the error preventing the command from executing 89 | """ 90 | 91 | def __init__(self, command, error=None): 92 | self.command = command 93 | self.error = error 94 | super(RuntimeError, self).__init__(self.__str__()) 95 | 96 | def __str__(self): 97 | return "Failed executing command: {0}." \ 98 | "\nerror: {1}".format(self.command, self.error) 99 | 100 | 101 | class CommandExecutionException(Exception): 102 | 103 | """ 104 | Indicates a command was executed, however some sort of error happened, 105 | resulting in a non-zero return value of the process. 106 | 107 | :param command: The command executed 108 | :param code: process exit code 109 | :param error: process stderr output 110 | :param output: process stdout output 111 | """ 112 | 113 | def __init__(self, command, error, output, code): 114 | self.command = command 115 | self.error = error 116 | self.code = code 117 | self.output = output 118 | Exception.__init__(self, self.__str__()) 119 | 120 | def __str__(self): 121 | return "Command '{0}' executed with an error." \ 122 | "\ncode: {1}" \ 123 | "\nerror: {2}" \ 124 | "\noutput: {3}" \ 125 | .format(self.command, self.code, 126 | self.error or None, 127 | self.output or None) 128 | 129 | 130 | class TimeoutException(Exception): 131 | """Indicates some kind of timeout happened.""" 132 | pass 133 | 134 | 135 | class ProcessExecutionError(RuntimeError): 136 | """Raised by the workflow engine when workflow execution fails.""" 137 | 138 | def __init__(self, message, error_type=None, traceback=None, causes=None, 139 | **kwargs): 140 | super(ProcessExecutionError, self).__init__(message, **kwargs) 141 | self.error_type = error_type 142 | self.traceback = traceback 143 | self.causes = causes 144 | 145 | def __str__(self): 146 | if self.error_type: 147 | return '{0}: {1}'.format(self.error_type, self.message) 148 | return self.message 149 | 150 | 151 | class ClosedAMQPClientException(Exception): 152 | """Raised when attempting to use a closed AMQP client""" 153 | pass 154 | -------------------------------------------------------------------------------- /cloudify/lru_cache.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | # Copied and modified from 17 | # http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/ 18 | 19 | import collections 20 | import functools 21 | from itertools import ifilterfalse 22 | 23 | 24 | def lru_cache(maxsize=100, on_purge=None): 25 | """Least-recently-used cache decorator. 26 | 27 | Arguments to the cached function must be hashable. 28 | Clear the cache with f.clear(). 29 | """ 30 | maxqueue = maxsize * 10 31 | 32 | def decorating_function(user_function): 33 | cache = {} 34 | queue = collections.deque() 35 | refcount = collections.defaultdict(int) 36 | sentinel = object() 37 | kwd_mark = object() 38 | 39 | # lookup optimizations (ugly but fast) 40 | queue_append, queue_popleft = queue.append, queue.popleft 41 | queue_appendleft, queue_pop = queue.appendleft, queue.pop 42 | 43 | @functools.wraps(user_function) 44 | def wrapper(*args, **kwargs): 45 | # cache key records both positional and keyword args 46 | key = args 47 | if kwargs: 48 | key += (kwd_mark,) + tuple(sorted(kwargs.items())) 49 | 50 | # record recent use of this key 51 | queue_append(key) 52 | refcount[key] += 1 53 | 54 | # get cache entry or compute if not found 55 | try: 56 | result = cache[key] 57 | except KeyError: 58 | result = user_function(*args, **kwargs) 59 | cache[key] = result 60 | 61 | # purge least recently used cache entry 62 | if len(cache) > maxsize: 63 | key = queue_popleft() 64 | refcount[key] -= 1 65 | while refcount[key]: 66 | key = queue_popleft() 67 | refcount[key] -= 1 68 | if on_purge: 69 | on_purge(cache[key]) 70 | del cache[key], refcount[key] 71 | 72 | # periodically compact the queue by eliminating duplicate keys 73 | # while preserving order of most recent access 74 | if len(queue) > maxqueue: 75 | refcount.clear() 76 | queue_appendleft(sentinel) 77 | for key in ifilterfalse(refcount.__contains__, 78 | iter(queue_pop, sentinel)): 79 | queue_appendleft(key) 80 | refcount[key] = 1 81 | return result 82 | 83 | def clear(): 84 | if on_purge: 85 | for value in cache.itervalues(): 86 | on_purge(value) 87 | cache.clear() 88 | queue.clear() 89 | refcount.clear() 90 | 91 | wrapper._cache = cache 92 | wrapper.clear = clear 93 | return wrapper 94 | return decorating_function 95 | -------------------------------------------------------------------------------- /cloudify/mocks.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from cloudify.context import (CloudifyContext, 18 | ContextCapabilities, 19 | BootstrapContext) 20 | from cloudify.utils import setup_logger 21 | 22 | 23 | class MockRelationshipSubjectContext(object): 24 | def __init__(self, node, instance): 25 | self.node = node 26 | self.instance = instance 27 | 28 | 29 | class MockRelationshipContext(object): 30 | def __init__(self, target, type=None): 31 | self.target = target 32 | self.type = type 33 | 34 | 35 | class MockNodeInstanceContext(object): 36 | 37 | def __init__(self, id=None, runtime_properties=None, relationships=None): 38 | self._id = id 39 | self._runtime_properties = runtime_properties 40 | if relationships is None: 41 | relationships = [] 42 | self._relationships = relationships 43 | 44 | @property 45 | def id(self): 46 | return self._id 47 | 48 | @property 49 | def runtime_properties(self): 50 | return self._runtime_properties 51 | 52 | @property 53 | def relationships(self): 54 | return self._relationships 55 | 56 | def update(self): 57 | pass 58 | 59 | 60 | class MockNodeContext(object): 61 | 62 | def __init__(self, id=None, properties=None): 63 | self._id = id 64 | self._properties = properties 65 | 66 | @property 67 | def id(self): 68 | return self._id 69 | 70 | @property 71 | def name(self): 72 | return self._id 73 | 74 | @property 75 | def properties(self): 76 | return self._properties 77 | 78 | 79 | class MockContext(dict): 80 | 81 | def __init__(self, values=None): 82 | self.update(values or {}) 83 | 84 | def __getattr__(self, item): 85 | return self[item] 86 | 87 | 88 | class MockCloudifyContext(CloudifyContext): 89 | """ 90 | Cloudify context mock that can be used when testing Cloudify plugins. 91 | """ 92 | 93 | def __init__(self, 94 | node_id=None, 95 | node_name=None, 96 | blueprint_id=None, 97 | deployment_id=None, 98 | execution_id=None, 99 | properties=None, 100 | runtime_properties=None, 101 | relationships=None, 102 | capabilities=None, 103 | related=None, 104 | source=None, 105 | target=None, 106 | operation=None, 107 | resources=None, 108 | tenant=None, 109 | rest_token=None, 110 | provider_context=None, 111 | bootstrap_context=None): 112 | tenant = tenant or {} 113 | super(MockCloudifyContext, self).__init__({ 114 | 'blueprint_id': blueprint_id, 115 | 'deployment_id': deployment_id, 116 | 'node_id': node_id, 117 | 'node_name': node_name, 118 | 'node_properties': properties, 119 | 'operation': operation, 120 | 'tenant': tenant, 121 | 'rest_token': rest_token 122 | }) 123 | self._node_id = node_id 124 | self._node_name = node_name 125 | self._deployment_id = deployment_id 126 | self._execution_id = execution_id 127 | self._properties = properties or {} 128 | self._runtime_properties = \ 129 | runtime_properties if runtime_properties is not None else {} 130 | self._resources = resources or {} 131 | self._source = source 132 | self._target = target 133 | if capabilities and not isinstance(capabilities, ContextCapabilities): 134 | raise ValueError( 135 | "MockCloudifyContext(capabilities=?) must be " 136 | "instance of ContextCapabilities, not {0}".format( 137 | capabilities)) 138 | self._related = related 139 | self._provider_context = provider_context or {} 140 | self._bootstrap_context = bootstrap_context or BootstrapContext({}) 141 | self._mock_context_logger = setup_logger('mock-context-logger') 142 | if node_id: 143 | self._instance = MockNodeInstanceContext( 144 | id=node_id, 145 | runtime_properties=self._runtime_properties, 146 | relationships=relationships) 147 | self._capabilities = capabilities or ContextCapabilities( 148 | self._endpoint, self._instance) 149 | self._node = MockNodeContext(node_name, properties) 150 | if self._source is None and self._target: 151 | self._source = MockContext({ 152 | 'instance': None, 153 | 'node': None 154 | }) 155 | 156 | @property 157 | def execution_id(self): 158 | return self._execution_id 159 | 160 | @property 161 | def capabilities(self): 162 | return self._capabilities 163 | 164 | @property 165 | def logger(self): 166 | return self._mock_context_logger 167 | 168 | @property 169 | def provider_context(self): 170 | return self._provider_context 171 | 172 | @property 173 | def bootstrap_context(self): 174 | return self._bootstrap_context 175 | 176 | def download_resource(self, resource_path, target_path=None): 177 | if target_path: 178 | raise RuntimeError("MockCloudifyContext does not support " 179 | "download_resource() with target_path yet") 180 | if resource_path not in self._resources: 181 | raise RuntimeError( 182 | "Resource '{0}' was not found. " 183 | "Available resources: {1}".format(resource_path, 184 | self._resources.keys())) 185 | return self._resources[resource_path] 186 | 187 | def get_resource(self, resource_path): 188 | raise RuntimeError('get_resource() not implemented in context mock') 189 | 190 | def __contains__(self, key): 191 | return key in self._properties or key in self._runtime_properties 192 | 193 | def __setitem__(self, key, value): 194 | self._runtime_properties[key] = value 195 | 196 | def __getitem__(self, key): 197 | if key in self._properties: 198 | return self._properties[key] 199 | return self._runtime_properties[key] 200 | -------------------------------------------------------------------------------- /cloudify/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /cloudify/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /cloudify/proxy/client.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ######### 3 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # * See the License for the specific language governing permissions and 15 | # * limitations under the License. 16 | 17 | import os 18 | import urllib2 19 | import json 20 | import argparse 21 | import sys 22 | 23 | 24 | # Environment variable for the socket url 25 | # (used by clients to locate the socket [http, zmq(unix, tcp)]) 26 | CTX_SOCKET_URL = 'CTX_SOCKET_URL' 27 | 28 | 29 | class ScriptException(Exception): 30 | def __init__(self, message=None, retry=False): 31 | super(Exception, self).__init__(message) 32 | self.retry = retry 33 | 34 | 35 | class RequestError(RuntimeError): 36 | 37 | def __init__(self, ex_message, ex_type, ex_traceback): 38 | super(RequestError, self).__init__( 39 | self, 40 | '{0}: {1}'.format(ex_type, ex_message)) 41 | self.ex_type = ex_type 42 | self.ex_message = ex_message 43 | self.ex_traceback = ex_traceback 44 | 45 | 46 | def zmq_client_req(socket_url, request, timeout): 47 | import zmq 48 | context = zmq.Context() 49 | sock = context.socket(zmq.REQ) 50 | try: 51 | sock.connect(socket_url) 52 | sock.send_json(request) 53 | if sock.poll(1000 * timeout): 54 | return sock.recv_json() 55 | else: 56 | raise RuntimeError('Timed out while waiting for response') 57 | finally: 58 | sock.close() 59 | context.term() 60 | 61 | 62 | def http_client_req(socket_url, request, timeout): 63 | response = urllib2.urlopen(socket_url, 64 | data=json.dumps(request), 65 | timeout=timeout) 66 | if response.code != 200: 67 | raise RuntimeError('Request failed: {0}'.format(response)) 68 | return json.loads(response.read()) 69 | 70 | 71 | def client_req(socket_url, args, timeout=5): 72 | request = { 73 | 'args': args 74 | } 75 | 76 | schema, _ = socket_url.split('://') 77 | if schema in ['ipc', 'tcp']: 78 | request_method = zmq_client_req 79 | elif schema in ['http']: 80 | request_method = http_client_req 81 | else: 82 | raise RuntimeError('Unsupported protocol: {0}'.format(schema)) 83 | 84 | response = request_method(socket_url, request, timeout) 85 | payload = response['payload'] 86 | response_type = response.get('type') 87 | if response_type == 'error': 88 | ex_type = payload['type'] 89 | ex_message = payload['message'] 90 | ex_traceback = payload['traceback'] 91 | raise RequestError(ex_message, 92 | ex_type, 93 | ex_traceback) 94 | elif response_type == 'stop_operation': 95 | raise SystemExit(payload['message']) 96 | else: 97 | return payload 98 | 99 | 100 | def parse_args(args=None): 101 | parser = argparse.ArgumentParser() 102 | parser.add_argument('-t', '--timeout', type=int, default=30) 103 | parser.add_argument('--socket-url', default=os.environ.get(CTX_SOCKET_URL)) 104 | parser.add_argument('--json-arg-prefix', default='@') 105 | parser.add_argument('-j', '--json-output', action='store_true') 106 | parser.add_argument('args', nargs='*') 107 | args = parser.parse_args(args) 108 | if not args.socket_url: 109 | raise RuntimeError('Missing CTX_SOCKET_URL environment variable' 110 | ' or socket_url command line argument') 111 | return args 112 | 113 | 114 | def process_args(json_prefix, args): 115 | processed_args = [] 116 | for arg in args: 117 | if arg.startswith(json_prefix): 118 | arg = json.loads(arg[1:]) 119 | processed_args.append(arg) 120 | return processed_args 121 | 122 | 123 | def main(args=None): 124 | args = parse_args(args) 125 | response = client_req(args.socket_url, 126 | process_args(args.json_arg_prefix, 127 | args.args), 128 | args.timeout) 129 | if args.json_output: 130 | response = json.dumps(response) 131 | else: 132 | if not response: 133 | response = '' 134 | response = str(response) 135 | sys.stdout.write(response) 136 | 137 | 138 | if __name__ == '__main__': 139 | main() 140 | -------------------------------------------------------------------------------- /cloudify/state.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import threading 18 | from contextlib import contextmanager 19 | 20 | from proxy_tools import proxy 21 | 22 | 23 | class NotInContext(RuntimeError): 24 | """Attempted accesing a context, but no context is available. 25 | """ 26 | 27 | 28 | class CtxParameters(dict): 29 | 30 | def __init__(self, parameters): 31 | parameters = parameters or {} 32 | super(CtxParameters, self).__init__(parameters) 33 | 34 | def __getattr__(self, attr): 35 | if attr in self: 36 | return self.get(attr) 37 | else: 38 | raise KeyError(attr) 39 | 40 | 41 | class CurrentContext(threading.local): 42 | 43 | def set(self, ctx, parameters=None): 44 | self.ctx = ctx 45 | self.parameters = CtxParameters(parameters) 46 | 47 | def get_ctx(self): 48 | return self._get('ctx') 49 | 50 | def get_parameters(self): 51 | return self._get('parameters') 52 | 53 | def _get(self, attribute): 54 | if not hasattr(self, attribute): 55 | raise NotInContext('No context set in current execution thread') 56 | result = getattr(self, attribute) 57 | if result is None: 58 | raise NotInContext('No context set in current execution thread') 59 | return result 60 | 61 | def clear(self): 62 | if hasattr(self, 'ctx'): 63 | delattr(self, 'ctx') 64 | if hasattr(self, 'parameters'): 65 | delattr(self, 'parameters') 66 | 67 | @contextmanager 68 | def push(self, ctx, parameters=None): 69 | try: 70 | previous_ctx = self.get_ctx() 71 | except NotInContext: 72 | previous_ctx = None 73 | try: 74 | previous_parameters = self.get_parameters() 75 | except NotInContext: 76 | previous_parameters = None 77 | 78 | self.set(ctx, parameters) 79 | try: 80 | yield self 81 | finally: 82 | try: 83 | self.set(previous_ctx, previous_parameters) 84 | except Exception: 85 | # this can only happen during interpreter shutdown, if running 86 | # inside a daemon thread; in that case, things can fail 87 | # semi-randomly, and we should just ignore the exceptions 88 | # (see CFY-6802) 89 | pass 90 | 91 | 92 | current_ctx = CurrentContext() 93 | current_workflow_ctx = CurrentContext() 94 | 95 | 96 | @proxy 97 | def ctx(): 98 | return current_ctx.get_ctx() 99 | 100 | 101 | @proxy 102 | def ctx_parameters(): 103 | return current_ctx.get_parameters() 104 | 105 | 106 | @proxy 107 | def workflow_ctx(): 108 | return current_workflow_ctx.get_ctx() 109 | 110 | 111 | @proxy 112 | def workflow_parameters(): 113 | return current_workflow_ctx.get_parameters() 114 | -------------------------------------------------------------------------------- /cloudify/test_utils/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from local_workflow_decorator import WorkflowTestDecorator as workflow_test # NOQA 17 | -------------------------------------------------------------------------------- /cloudify/test_utils/dispatch_helper.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from cloudify import dispatch 18 | 19 | 20 | def run(fn, *args, **kwargs): 21 | cloudify_context = kwargs.get('__cloudify_context', {}) 22 | cloudify_context.update({ 23 | 'type': 'operation', 24 | 'task_name': '{0}.{1}'.format(fn.__module__, fn.__name__) 25 | }) 26 | kwargs.pop('__cloudify_context', None) 27 | return dispatch.dispatch(cloudify_context, *args, **kwargs) 28 | -------------------------------------------------------------------------------- /cloudify/tests/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import logging 17 | import sys 18 | 19 | 20 | def get_logger(name): 21 | ch = logging.StreamHandler(sys.stdout) 22 | ch.setLevel(logging.DEBUG) 23 | formatter = logging.Formatter(fmt='%(asctime)s [%(levelname)s] ' 24 | '[%(name)s] %(message)s', 25 | datefmt='%H:%M:%S') 26 | ch.setFormatter(formatter) 27 | 28 | logger = logging.getLogger(name) 29 | for handler in logger.handlers: 30 | logger.removeHandler(handler) 31 | 32 | logger.addHandler(ch) 33 | logger.setLevel(logging.DEBUG) 34 | return logger 35 | -------------------------------------------------------------------------------- /cloudify/tests/file_server.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import subprocess 18 | import SimpleHTTPServer 19 | import SocketServer 20 | import os 21 | import sys 22 | import socket 23 | import time 24 | from multiprocessing import Process 25 | 26 | from cloudify.utils import setup_logger 27 | from cloudify import exceptions 28 | 29 | PORT = 53229 30 | FNULL = open(os.devnull, 'w') 31 | 32 | 33 | logger = setup_logger('cloudify.plugin.tests.file_server') 34 | 35 | 36 | class FileServer(object): 37 | 38 | def __init__(self, root_path, use_subprocess=False, timeout=5): 39 | self.root_path = root_path 40 | self.process = Process(target=self.start_impl) 41 | self.use_subprocess = use_subprocess 42 | self.timeout = timeout 43 | 44 | def start(self): 45 | logger.info('Starting file server') 46 | if self.use_subprocess: 47 | subprocess.Popen( 48 | [sys.executable, __file__, self.root_path], 49 | stdin=FNULL, 50 | stdout=FNULL, 51 | stderr=FNULL) 52 | else: 53 | self.process.start() 54 | 55 | end_time = time.time() + self.timeout 56 | 57 | while end_time > time.time(): 58 | if self.is_alive(): 59 | logger.info('File server is up and serving from {0}' 60 | .format(self.root_path)) 61 | return 62 | logger.info('File server is not responding. waiting 10ms') 63 | time.sleep(0.1) 64 | raise exceptions.TimeoutException('Failed starting ' 65 | 'file server in {0} seconds' 66 | .format(self.timeout)) 67 | 68 | def stop(self): 69 | try: 70 | logger.info('Shutting down file server') 71 | self.process.terminate() 72 | while self.is_alive(): 73 | logger.info('File server is still up. waiting for 10ms') 74 | time.sleep(0.1) 75 | logger.info('File server has shut down') 76 | except BaseException as e: 77 | logger.warning(str(e)) 78 | 79 | def start_impl(self): 80 | logger.info('Starting file server and serving files from: %s', 81 | self.root_path) 82 | os.chdir(self.root_path) 83 | 84 | class TCPServer(SocketServer.TCPServer): 85 | allow_reuse_address = True 86 | httpd = TCPServer(('0.0.0.0', PORT), 87 | SimpleHTTPServer.SimpleHTTPRequestHandler) 88 | httpd.serve_forever() 89 | 90 | @staticmethod 91 | def is_alive(): 92 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 93 | try: 94 | s.connect(('localhost', PORT)) 95 | s.close() 96 | return True 97 | except socket.error: 98 | return False 99 | 100 | 101 | if __name__ == '__main__': 102 | FileServer(sys.argv[1]).start_impl() 103 | -------------------------------------------------------------------------------- /cloudify/tests/mocks/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ran' 2 | -------------------------------------------------------------------------------- /cloudify/tests/mocks/mock_rest_client.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | ############ 13 | 14 | from cloudify_rest_client import CloudifyClient 15 | from cloudify_rest_client.node_instances import NodeInstance 16 | from cloudify_rest_client.executions import Execution 17 | 18 | 19 | node_instances = {} 20 | 21 | 22 | def put_node_instance(node_instance_id, 23 | state='started', 24 | runtime_properties=None, 25 | relationships=None): 26 | node_instances[node_instance_id] = NodeInstance({ 27 | 'id': node_instance_id, 28 | 'state': state, 29 | 'version': 0, 30 | 'runtime_properties': runtime_properties, 31 | 'relationships': relationships 32 | }) 33 | 34 | 35 | class MockRestclient(CloudifyClient): 36 | 37 | def __init__(self): 38 | pass 39 | 40 | @property 41 | def node_instances(self): 42 | return MockNodeInstancesClient() 43 | 44 | @property 45 | def nodes(self): 46 | return MockNodesClient() 47 | 48 | @property 49 | def executions(self): 50 | return MockExecutionsClient() 51 | 52 | @property 53 | def manager(self): 54 | return MockManagerClient() 55 | 56 | 57 | class MockNodesClient(object): 58 | 59 | def list(self, deployment_id): 60 | return [] 61 | 62 | 63 | class MockNodeInstancesClient(object): 64 | 65 | def get(self, node_instance_id, evaluate_functions=False): 66 | if node_instance_id not in node_instances: 67 | raise RuntimeError( 68 | 'No info for node with id {0}'.format(node_instance_id)) 69 | return node_instances[node_instance_id] 70 | 71 | def list(self, deployment_id): 72 | return [] 73 | 74 | 75 | class MockExecutionsClient(object): 76 | 77 | def update(self, *args, **kwargs): 78 | return None 79 | 80 | def get(self, id): 81 | return Execution({ 82 | 'id': '111', 83 | 'status': 'terminated' 84 | }) 85 | 86 | 87 | class MockManagerClient(object): 88 | 89 | def get_context(self): 90 | return {'context': {}} 91 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/default_tenant/for_test.txt: -------------------------------------------------------------------------------- 1 | Hello from test -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/default_tenant/for_test_bp_resource.txt: -------------------------------------------------------------------------------- 1 | Hello from test -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/default_tenant/test_blueprint/blueprint.yaml: -------------------------------------------------------------------------------- 1 | Hello from tenant test -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/execute_operation.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | node_templates: 7 | node1: 8 | type: mock_type1 9 | node2: 10 | type: mock_type2 11 | instances: 12 | deploy: 2 13 | relationships: 14 | - target: node1 15 | type: cloudify.relationships.connected_to 16 | node3: 17 | type: mock_type3 18 | relationships: 19 | - target: node2 20 | type: cloudify.relationships.connected_to 21 | 22 | node_types: 23 | mock_type1: 24 | derived_from: cloudify.nodes.Root 25 | interfaces: 26 | cloudify.interfaces.lifecycle: 27 | # basic operation for most of the tests 28 | create: mock.cloudify.tests.test_builtin_workflows.exec_op_test_operation 29 | 30 | # same basic operation, but with inputs defined 31 | configure: 32 | implementation: mock.cloudify.tests.test_builtin_workflows.exec_op_test_operation 33 | inputs: 34 | operation_param_key: 35 | default: operation_param_static_value 36 | 37 | # operation for testing dependencies order 38 | start: mock.cloudify.tests.test_builtin_workflows.exec_op_dependency_order_test_operation 39 | 40 | test: 41 | retry: mock.cloudify.tests.test_builtin_workflows.retry 42 | fail: mock.cloudify.tests.test_builtin_workflows.fail 43 | 44 | mock_type2: 45 | derived_from: mock_type1 46 | mock_type3: 47 | derived_from: mock_type2 48 | 49 | plugins: 50 | mock: 51 | executor: central_deployment_agent 52 | install: false 53 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/execute_operation_workflow.yaml: -------------------------------------------------------------------------------- 1 | plugins: 2 | common_mock: 3 | executor: central_deployment_agent 4 | install: false 5 | 6 | workflows: 7 | execute_operation: 8 | mapping: common_mock.cloudify.tests.workflows.execute_operation 9 | parameters: 10 | operation: 11 | description: The operation's full name 12 | nodes: 13 | description: Node ID's to operate on 14 | testing: 15 | description: The test case 16 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/get_attribute.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | node_templates: 4 | node1: 5 | type: mock_type1 6 | interfaces: 7 | test: 8 | setup: mock.cloudify.tests.test_local_get_attribute.populate 9 | op: 10 | implementation: mock.cloudify.tests.test_local_get_attribute.op 11 | inputs: 12 | self_ref: { get_attribute: [SELF, self_ref_property] } 13 | node_ref: { get_attribute: [node2, node_ref_property] } 14 | static: { get_attribute: [node1, static_property ]} 15 | relationships: 16 | - target: node2 17 | type: cloudify.relationships.contained_in 18 | source_interfaces: 19 | test: 20 | op: 21 | implementation: mock.cloudify.tests.test_local_get_attribute.op 22 | inputs: 23 | source_ref: { get_attribute: [SOURCE, source_ref_property] } 24 | target_ref: { get_attribute: [TARGET, target_ref_property] } 25 | target_interfaces: 26 | test: 27 | op: 28 | implementation: mock.cloudify.tests.test_local_get_attribute.op 29 | inputs: 30 | source_ref: { get_attribute: [SOURCE, source_ref_property] } 31 | target_ref: { get_attribute: [TARGET, target_ref_property] } 32 | node2: 33 | type: mock_type1 34 | interfaces: 35 | test: 36 | setup: mock.cloudify.tests.test_local_get_attribute.populate 37 | 38 | relationships: 39 | cloudify.relationships.contained_in: {} 40 | 41 | node_types: 42 | mock_type1: 43 | properties: 44 | static_property: 45 | default: static_property_value 46 | 47 | plugins: 48 | mock: 49 | executor: central_deployment_agent 50 | install: false 51 | 52 | workflows: 53 | setup: mock.cloudify.tests.test_local_get_attribute.populate_runtime_properties 54 | run: mock.cloudify.tests.test_local_get_attribute.run_all_operations 55 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/get_attribute_multi_instance.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | node_templates: 4 | node1: 5 | type: type 6 | interfaces: 7 | test: 8 | setup: p.cloudify.tests.test_local_get_attribute.populate_multi 9 | op: 10 | implementation: p.cloudify.tests.test_local_get_attribute.op_multi 11 | inputs: 12 | node_ref: { get_attribute: [node2, node_ref_property] } 13 | relationships: 14 | - target: node2 15 | type: cloudify.relationships.connected_to 16 | node2: 17 | type: type 18 | interfaces: 19 | test: 20 | setup: p.cloudify.tests.test_local_get_attribute.populate_multi 21 | 22 | relationships: 23 | cloudify.relationships.connected_to: 24 | properties: 25 | connection_type: 26 | default: all_to_all 27 | 28 | node_types: 29 | type: {} 30 | 31 | plugins: 32 | p: 33 | executor: central_deployment_agent 34 | install: false 35 | 36 | workflows: 37 | setup: p.cloudify.tests.test_local_get_attribute.populate_runtime_properties 38 | run: p.cloudify.tests.test_local_get_attribute.run_multi 39 | 40 | groups: 41 | group: 42 | members: [node1, node2] 43 | 44 | policies: 45 | policy: 46 | type: cloudify.policies.scaling 47 | targets: [group] 48 | properties: 49 | default_instances: 2 50 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/get_attribute_multi_instance2.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | node_templates: 4 | node1: 5 | type: type 6 | interfaces: 7 | test: 8 | setup: p.cloudify.tests.test_local_get_attribute.populate_multi 9 | op: 10 | implementation: p.cloudify.tests.test_local_get_attribute.op_multi 11 | inputs: 12 | node_ref: { get_attribute: [node2, node_ref_property] } 13 | node2: 14 | type: type 15 | interfaces: 16 | test: 17 | setup: p.cloudify.tests.test_local_get_attribute.populate_multi 18 | 19 | relationships: 20 | cloudify.relationships.connected_to: 21 | properties: 22 | connection_type: 23 | default: all_to_all 24 | 25 | node_types: 26 | type: {} 27 | 28 | plugins: 29 | p: 30 | executor: central_deployment_agent 31 | install: false 32 | 33 | workflows: 34 | setup: p.cloudify.tests.test_local_get_attribute.populate_runtime_properties 35 | run: p.cloudify.tests.test_local_get_attribute.run_multi 36 | 37 | groups: 38 | group: 39 | members: [node1, node2] 40 | 41 | policies: 42 | policy: 43 | type: cloudify.policies.scaling 44 | targets: [group] 45 | properties: 46 | default_instances: 2 47 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/install-new-agents-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | plugins: 7 | p: 8 | executor: central_deployment_agent 9 | install: false 10 | 11 | inputs: 12 | host_a_validation_result: 13 | default: {} 14 | host_b_validation_result: 15 | default: {} 16 | node_types: 17 | no_agents_compute: 18 | derived_from: cloudify.nodes.Compute 19 | interfaces: 20 | cloudify.interfaces.cloudify_agent: 21 | create: {} 22 | configure: {} 23 | start: {} 24 | stop: {} 25 | delete: {} 26 | validate_amqp: p.cloudify.tests.test_install_new_agents_workflow.validate_amqp 27 | create_amqp: p.cloudify.tests.test_install_new_agents_workflow.create_amqp 28 | properties: 29 | validation_result: 30 | default: {} 31 | 32 | node_templates: 33 | node: 34 | type: cloudify.nodes.Root 35 | 36 | host_a: 37 | type: no_agents_compute 38 | properties: 39 | validation_result: { get_input: host_a_validation_result } 40 | 41 | host_b: 42 | type: no_agents_compute 43 | properties: 44 | validation_result: { get_input: host_b_validation_result } 45 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/not_exist_op_workflow.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | node_templates: 7 | node1: 8 | type: cloudify.nodes.Root 9 | 10 | workflows: 11 | not_exist_op_workflow: 12 | mapping: mock.cloudify.tests.test_missing_operation.not_exist_op_workflow 13 | parameters: {} 14 | not_exist_interface_workflow: 15 | mapping: mock.cloudify.tests.test_missing_operation.not_exist_interface_workflow 16 | parameters: {} 17 | stop_workflow: 18 | mapping: mock.cloudify.tests.test_missing_operation.stop_workflow 19 | parameters: {} 20 | 21 | plugins: 22 | mock: 23 | executor: central_deployment_agent 24 | install: false 25 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/relationship_context.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | node_templates: 4 | node1: 5 | type: mock_type 6 | relationships: 7 | - target: node2 8 | type: cloudify.relationships.contained_in2 9 | properties: 10 | prop: node1_static_prop_value 11 | node2: 12 | type: mock_type 13 | relationships: 14 | - target: node3 15 | type: cloudify.relationships.contained_in3 16 | properties: 17 | prop: node2_static_prop_value 18 | node3: 19 | type: mock_type 20 | properties: 21 | prop: node3_static_prop_value 22 | 23 | relationships: 24 | cloudify.relationships.contained_in: 25 | source_interfaces: 26 | test: 27 | assert_not_modifiable: 28 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_not_modifiable 29 | assert_modifiable: 30 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_modifiable 31 | assert_relationships: 32 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_relationships 33 | assert_capabilities: 34 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_capabilities 35 | 36 | target_interfaces: 37 | test: 38 | assert_not_modifiable: 39 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_not_modifiable 40 | assert_modifiable: 41 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_modifiable 42 | assert_relationships: 43 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_relationships 44 | assert_capabilities: 45 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_capabilities 46 | 47 | cloudify.relationships.contained_in2: 48 | derived_from: cloudify.relationships.contained_in 49 | 50 | cloudify.relationships.contained_in3: 51 | derived_from: cloudify.relationships.contained_in2 52 | 53 | node_types: 54 | mock_type: 55 | interfaces: 56 | test: 57 | assert_not_modifiable: 58 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_not_modifiable 59 | assert_modifiable: 60 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_modifiable 61 | assert_relationships: 62 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_relationships 63 | update_runtime_properties: 64 | implementation: mock.cloudify.tests.test_ctx_relationships.update_runtime_properties 65 | assert_immutable_properties: 66 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_immutable_properties 67 | assert_capabilities: 68 | implementation: mock.cloudify.tests.test_ctx_relationships.assert_capabilities 69 | asset_2_hops: 70 | implementation: mock.cloudify.tests.test_ctx_relationships.asset_2_hops 71 | properties: 72 | prop: {} 73 | 74 | plugins: 75 | mock: 76 | executor: central_deployment_agent 77 | install: false 78 | 79 | workflows: 80 | execute_operation: 81 | mapping: mock.cloudify.tests.test_ctx_relationships.execute_operation 82 | parameters: 83 | op: {} 84 | rel: 85 | default: '' 86 | node: 87 | default: node1 88 | kwargs: 89 | default: {} 90 | execute_task: 91 | mapping: mock.cloudify.tests.test_ctx_relationships.execute_task 92 | parameters: 93 | task: {} 94 | 95 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/resources/extended_rendered_template.conf: -------------------------------------------------------------------------------- 1 | deployment_id: local 2 | node_name: node1 3 | url: my_url.html 4 | port: 8888 5 | 6 | test:value -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/resources/for_template_rendering_tests.conf: -------------------------------------------------------------------------------- 1 | deployment_id: {{ctx.deployment.id}} 2 | node_name: {{ctx.node.name}} 3 | url: {{ctx.node.properties.my_url}} 4 | port: {{ctx.node.properties.port}} 5 | 6 | test:{{key}} -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/resources/rendered_template.conf: -------------------------------------------------------------------------------- 1 | deployment_id: local 2 | node_name: node1 3 | url: my_url.html 4 | port: 8888 5 | 6 | test: -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-blueprint-ignore-failure.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.0m1/types.yaml 5 | 6 | plugins: 7 | p: 8 | executor: central_deployment_agent 9 | install: false 10 | 11 | node_templates: 12 | 13 | node1: 14 | type: cloudify.nodes.Compute 15 | properties: 16 | install_agent: false 17 | 18 | node2: 19 | type: cloudify.nodes.Root 20 | interfaces: 21 | cloudify.interfaces.lifecycle: 22 | create: p.cloudify.tests.test_builtin_workflows.node_operation 23 | stop: p.cloudify.tests.test_builtin_workflows.fail_stop 24 | relationships: 25 | - type: cloudify.relationships.contained_in 26 | target: node1 27 | - type: cloudify.relationships.depends_on 28 | target: node3 29 | 30 | node3: 31 | type: cloudify.nodes.Root 32 | relationships: 33 | - type: cloudify.relationships.connected_to 34 | target: node1 35 | target_interfaces: 36 | cloudify.interfaces.relationship_lifecycle: 37 | establish: p.cloudify.tests.test_builtin_workflows.target_operation 38 | unlink: p.cloudify.tests.test_builtin_workflows.target_operation 39 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-context-node.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_1 2 | 3 | imports: 4 | - execute_operation_workflow.yaml 5 | 6 | node_types: 7 | test.node1.type: 8 | interfaces: 9 | test.interface: 10 | create: common_mock.cloudify.tests.test_context.get_node_type 11 | 12 | test.node2.type: 13 | derived_from: test.node1.type 14 | 15 | node_templates: 16 | node1: 17 | type: test.node1.type 18 | 19 | node2: 20 | type: test.node2.type 21 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-get-resource-template.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - execute_operation_workflow.yaml 5 | 6 | node_types: 7 | custom_type: 8 | properties: 9 | port: 10 | type: integer 11 | default: 8888 12 | my_url: 13 | type: string 14 | interfaces: 15 | test: 16 | get_template: 17 | implementation: common_mock.cloudify.tests.test_context.get_template 18 | 19 | download_template: 20 | implementation: common_mock.cloudify.tests.test_context.download_template 21 | 22 | node_templates: 23 | node1: 24 | type: custom_type 25 | properties: 26 | my_url: my_url.html 27 | node2: 28 | type: custom_type 29 | properties: 30 | my_url: my_url2.html 31 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-heal-correct-order-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | plugins: 7 | p: 8 | executor: central_deployment_agent 9 | install: false 10 | 11 | node_templates: 12 | 13 | node1: 14 | type: cloudify.nodes.Compute 15 | properties: 16 | install_agent: false 17 | 18 | node2: 19 | type: cloudify.nodes.Root 20 | interfaces: 21 | cloudify.interfaces.lifecycle: 22 | create: p.cloudify.tests.test_builtin_workflows.node_operation 23 | stop: p.cloudify.tests.test_builtin_workflows.node_operation 24 | relationships: 25 | - type: cloudify.relationships.contained_in 26 | target: node1 27 | - type: cloudify.relationships.depends_on 28 | target: node3 29 | 30 | node3: 31 | type: cloudify.nodes.Root 32 | relationships: 33 | - type: cloudify.relationships.connected_to 34 | target: node1 35 | target_interfaces: 36 | cloudify.interfaces.relationship_lifecycle: 37 | establish: p.cloudify.tests.test_builtin_workflows.target_operation 38 | unlink: p.cloudify.tests.test_builtin_workflows.target_operation 39 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-install-agent-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | node_types: 4 | cloudify.nodes.Compute: 5 | properties: 6 | install_agent: 7 | default: true 8 | 9 | node_templates: 10 | node: 11 | type: cloudify.nodes.Compute 12 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-lifecycle-retry-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | dsl_definitions: 7 | - &op 8 | implementation: p.cloudify.tests.test_lifecycle_retry.operation 9 | inputs: 10 | descriptor: 11 | default: { get_input: descriptor } 12 | - &node_interfaces 13 | cloudify.interfaces.lifecycle: 14 | create: *op 15 | configure: *op 16 | start: *op 17 | stop: *op 18 | delete: *op 19 | - &rel_interfaces 20 | cloudify.interfaces.relationship_lifecycle: 21 | preconfigure: *op 22 | postconfigure: *op 23 | establish: *op 24 | unlink: *op 25 | 26 | inputs: 27 | descriptor: {} 28 | 29 | plugins: 30 | p: 31 | executor: central_deployment_agent 32 | install: false 33 | 34 | node_types: 35 | type: 36 | derived_from: cloudify.nodes.Root 37 | interfaces: *node_interfaces 38 | 39 | relationships: 40 | relationship: 41 | derived_from: cloudify.relationships.contained_in 42 | source_interfaces: *rel_interfaces 43 | 44 | node_templates: 45 | node1: 46 | type: type 47 | node2: 48 | type: type 49 | relationships: 50 | - target: node1 51 | type: relationship 52 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-operation-retry-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | plugins: 4 | mock: 5 | source: source 6 | executor: central_deployment_agent 7 | install: false 8 | 9 | node_types: 10 | custom_type: 11 | interfaces: 12 | lifecycle: 13 | start: mock.cloudify.tests.test_operation_retry.node_operation_retry 14 | stop: mock.cloudify.tests.test_operation_retry.node_operation_retry 15 | 16 | node_templates: 17 | node: 18 | type: custom_type 19 | 20 | workflows: 21 | execute_operation: 22 | mapping: mock.cloudify.tests.test_operation_retry.execute_operation 23 | parameters: 24 | operation: 25 | description: The operation full name. 26 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-relationship-order-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | node_templates: 7 | 8 | main: 9 | type: cloudify.nodes.Root 10 | relationships: 11 | - type: connected_to 12 | target: node1 13 | - type: connected_to 14 | target: node2 15 | - type: contained_in 16 | target: main_compute 17 | - type: connected_to 18 | target: node4 19 | - type: connected_to 20 | target: node5 21 | 22 | node1: 23 | type: cloudify.nodes.Root 24 | 25 | node2: 26 | type: cloudify.nodes.Root 27 | 28 | main_compute: 29 | type: cloudify.nodes.Compute 30 | properties: 31 | agent_config: 32 | install_method: none 33 | 34 | node4: 35 | type: cloudify.nodes.Root 36 | 37 | node5: 38 | type: cloudify.nodes.Root 39 | 40 | depends: 41 | type: cloudify.nodes.Root 42 | relationships: 43 | - type: connected_to 44 | target: main_compute 45 | - type: connected_to 46 | target: main 47 | 48 | dsl_definitions: 49 | - &relationship_interfaces 50 | source_interfaces: 51 | cloudify.interfaces.relationship_lifecycle: 52 | establish: p.cloudify.tests.test_builtin_workflows.source_operation 53 | unlink: p.cloudify.tests.test_builtin_workflows.source_operation 54 | 55 | relationships: 56 | connected_to: 57 | derived_from: cloudify.relationships.connected_to 58 | <<: *relationship_interfaces 59 | 60 | contained_in: 61 | derived_from: cloudify.relationships.contained_in 62 | <<: *relationship_interfaces 63 | 64 | plugins: 65 | p: 66 | executor: central_deployment_agent 67 | install: false 68 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-scale-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | plugins: 4 | plugin: 5 | executor: central_deployment_agent 6 | install: false 7 | 8 | node_types: 9 | type: {} 10 | 11 | node_templates: 12 | node: 13 | type: type 14 | 15 | workflows: 16 | scale: 17 | mapping: plugin.cloudify.plugins.workflows.scale_entity 18 | parameters: 19 | scalable_entity_name: {} 20 | delta: 21 | default: 1 22 | scale_compute: 23 | default: false 24 | 25 | scale_old: 26 | mapping: plugin.cloudify.plugins.workflows.scale 27 | parameters: 28 | node_id: {} 29 | delta: 30 | default: 1 31 | scale_compute: 32 | default: true 33 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-subgraph-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | plugins: 7 | p: 8 | executor: central_deployment_agent 9 | install: false 10 | 11 | node_types: 12 | type: 13 | derived_from: cloudify.nodes.Root 14 | 15 | node_templates: 16 | node1: 17 | type: type 18 | relationships: 19 | - type: cloudify.relationships.connected_to 20 | target: node2 21 | target_interfaces: 22 | cloudify.interfaces.relationship_lifecycle: 23 | establish: p.cloudify.tests.test_builtin_workflows.target_operation 24 | unlink: p.cloudify.tests.test_builtin_workflows.target_operation 25 | source_interfaces: 26 | cloudify.interfaces.relationship_lifecycle: 27 | establish: p.cloudify.tests.test_builtin_workflows.source_operation 28 | unlink: p.cloudify.tests.test_builtin_workflows.source_operation 29 | - type: cloudify.relationships.connected_to 30 | target: node3 31 | target_interfaces: 32 | cloudify.interfaces.relationship_lifecycle: 33 | establish: p.cloudify.tests.test_builtin_workflows.target_operation 34 | unlink: p.cloudify.tests.test_builtin_workflows.target_operation 35 | source_interfaces: 36 | cloudify.interfaces.relationship_lifecycle: 37 | establish: p.cloudify.tests.test_builtin_workflows.source_operation 38 | unlink: p.cloudify.tests.test_builtin_workflows.source_operation 39 | node2_host: 40 | type: cloudify.nodes.Compute 41 | properties: 42 | install_agent: false 43 | node2: 44 | type: type 45 | relationships: 46 | - type: cloudify.relationships.contained_in 47 | target: node2_host 48 | node3: 49 | type: type 50 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-task-retry-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | plugins: 4 | mock: 5 | source: source 6 | executor: central_deployment_agent 7 | install: false 8 | 9 | node_types: 10 | custom_type: {} 11 | 12 | node_templates: 13 | node: 14 | type: custom_type 15 | 16 | workflows: 17 | fail_execute_task: mock.cloudify.tests.test_task_retry.fail_execute_task 18 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-task-retry-event-context-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | plugins: 4 | mock: 5 | source: source 6 | executor: central_deployment_agent 7 | install: false 8 | 9 | node_types: 10 | custom_type: 11 | interfaces: 12 | test: 13 | op: mock.cloudify.tests.test_task_retry_event_context.op 14 | 15 | node_templates: 16 | node: 17 | type: custom_type 18 | 19 | workflows: 20 | execute_operation: 21 | mapping: mock.cloudify.tests.test_task_retry_event_context.execute_operation 22 | parameters: 23 | retry_type: {} 24 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-task-subgraph-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_2 2 | 3 | plugins: 4 | mock: 5 | source: source 6 | executor: central_deployment_agent 7 | install: false 8 | 9 | node_types: 10 | custom_type: {} 11 | 12 | node_templates: 13 | node: 14 | type: custom_type 15 | interfaces: 16 | interface: 17 | operation: mock.cloudify.tests.test_task_subgraph.operation 18 | 19 | workflows: 20 | workflow: 21 | mapping: mock.cloudify.tests.test_task_subgraph.workflow 22 | parameters: 23 | test: {} 24 | 25 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-uninstall-ignore-failure-parameter-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_3 2 | 3 | imports: 4 | - http://www.getcloudify.org/spec/cloudify/4.4.dev1/types.yaml 5 | 6 | dsl_definitions: 7 | - &node_interfaces 8 | cloudify.interfaces.lifecycle: 9 | create: p.cloudify.tests.test_lifecycle_retry.operation 10 | configure: p.cloudify.tests.test_lifecycle_retry.operation 11 | start: p.cloudify.tests.test_lifecycle_retry.operation 12 | stop: p.cloudify.tests.test_lifecycle_retry.operation_failing_stop 13 | delete: p.cloudify.tests.test_lifecycle_retry.operation_delete 14 | 15 | plugins: 16 | p: 17 | executor: central_deployment_agent 18 | install: false 19 | 20 | node_types: 21 | type: 22 | derived_from: cloudify.nodes.Root 23 | interfaces: *node_interfaces 24 | 25 | node_templates: 26 | node1: 27 | type: type 28 | -------------------------------------------------------------------------------- /cloudify/tests/resources/blueprints/test-validate-version-blueprint.yaml: -------------------------------------------------------------------------------- 1 | tosca_definitions_version: cloudify_dsl_1_0 2 | 3 | description: | 4 | The test using this blueprint, expectes the version to remain 1_0, 5 | please don't change it 6 | 7 | node_types: 8 | type: {} 9 | 10 | node_templates: 11 | node: 12 | type: type 13 | -------------------------------------------------------------------------------- /cloudify/tests/resources/deployments/default_tenant/dep1/for_test.txt: -------------------------------------------------------------------------------- 1 | belongs to dep1 -------------------------------------------------------------------------------- /cloudify/tests/resources/deployments/default_tenant/dep1/for_test_only_dep.txt: -------------------------------------------------------------------------------- 1 | belongs to dep1 -------------------------------------------------------------------------------- /cloudify/tests/test_decorators.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import testtools 18 | 19 | from mock import patch 20 | from nose import tools 21 | 22 | from cloudify import ctx as ctx_proxy 23 | from cloudify import manager 24 | from cloudify import context 25 | from cloudify.decorators import operation, workflow 26 | from cloudify.exceptions import NonRecoverableError 27 | 28 | from cloudify.test_utils.dispatch_helper import run 29 | import cloudify.tests.mocks.mock_rest_client as rest_client_mock 30 | 31 | 32 | class MockNotPicklableException(Exception): 33 | """Non-picklable exception""" 34 | def __init__(self, custom_error): 35 | self.message = custom_error 36 | 37 | def __str__(self): 38 | return self.message 39 | 40 | 41 | class MockPicklableException(Exception): 42 | """Non-picklable exception""" 43 | def __init__(self, custom_error): 44 | super(Exception, self).__init__(custom_error) 45 | 46 | 47 | @operation 48 | def acquire_context(*args, **kwargs): 49 | return run(acquire_context_impl, *args, **kwargs) 50 | 51 | 52 | def acquire_context_impl(a, b, ctx, **kwargs): 53 | return ctx 54 | 55 | 56 | @operation 57 | def some_operation(**kwargs): 58 | return run(some_operation_impl, **kwargs) 59 | 60 | 61 | def some_operation_impl(**kwargs): 62 | from cloudify import ctx 63 | return ctx 64 | 65 | 66 | @tools.nottest 67 | @operation 68 | def test_op(**kwargs): 69 | run(test_op_impl, **kwargs) 70 | 71 | 72 | @tools.nottest 73 | def test_op_impl(ctx, test_case, **kwargs): 74 | test_case.assertEqual(ctx, ctx_proxy) 75 | 76 | 77 | class OperationTest(testtools.TestCase): 78 | def test_empty_ctx(self): 79 | ctx = acquire_context(0, 0) 80 | self.assertIsInstance(ctx, context.CloudifyContext) 81 | 82 | def test_provided_ctx(self): 83 | ctx = {'node_id': '1234'} 84 | kwargs = {'__cloudify_context': ctx} 85 | ctx = acquire_context(0, 0, **kwargs) 86 | self.assertIsInstance(ctx, context.CloudifyContext) 87 | self.assertEquals('1234', ctx.instance.id) 88 | 89 | def test_proxied_ctx(self): 90 | 91 | self.assertRaises(RuntimeError, 92 | lambda: ctx_proxy.instance.id) 93 | 94 | test_op(test_case=self) 95 | 96 | self.assertRaises(RuntimeError, 97 | lambda: ctx_proxy.instance.id) 98 | 99 | def test_provided_capabilities(self): 100 | ctx = { 101 | 'node_id': '5678', 102 | } 103 | 104 | # using a mock rest client 105 | manager.get_rest_client = \ 106 | lambda: rest_client_mock.MockRestclient() 107 | 108 | rest_client_mock.put_node_instance( 109 | '5678', 110 | relationships=[{'target_id': 'some_node', 111 | 'target_name': 'some_node'}]) 112 | rest_client_mock.put_node_instance('some_node', 113 | runtime_properties={'k': 'v'}) 114 | 115 | kwargs = {'__cloudify_context': ctx} 116 | ctx = acquire_context(0, 0, **kwargs) 117 | self.assertIn('k', ctx.capabilities) 118 | self.assertEquals('v', ctx.capabilities['k']) 119 | 120 | def test_capabilities_clash(self): 121 | ctx = { 122 | 'node_id': '5678', 123 | } 124 | 125 | # using a mock rest client 126 | manager.get_rest_client = \ 127 | lambda: rest_client_mock.MockRestclient() 128 | 129 | rest_client_mock.put_node_instance( 130 | '5678', 131 | relationships=[{'target_id': 'node1', 132 | 'target_name': 'node1'}, 133 | {'target_id': 'node2', 134 | 'target_name': 'node2'}]) 135 | 136 | rest_client_mock.put_node_instance('node1', 137 | runtime_properties={'k': 'v1'}) 138 | rest_client_mock.put_node_instance('node2', 139 | runtime_properties={'k': 'v2'}) 140 | 141 | kwargs = {'__cloudify_context': ctx} 142 | ctx = acquire_context(0, 0, **kwargs) 143 | self.assertRaises(NonRecoverableError, ctx.capabilities.__contains__, 144 | 'k') 145 | 146 | def test_instance_update(self): 147 | with patch.object(context.NodeInstanceContext, 148 | 'update') as mock_update: 149 | kwargs = {'__cloudify_context': { 150 | 'node_id': '5678' 151 | }} 152 | some_operation(**kwargs) 153 | mock_update.assert_called_once_with() 154 | 155 | def test_source_target_update_in_relationship(self): 156 | with patch.object(context.NodeInstanceContext, 157 | 'update') as mock_update: 158 | kwargs = {'__cloudify_context': { 159 | 'node_id': '5678', 160 | 'relationships': ['1111'], 161 | 'related': { 162 | 'node_id': '1111', 163 | 'is_target': True 164 | } 165 | }} 166 | some_operation(**kwargs) 167 | self.assertEqual(2, mock_update.call_count) 168 | 169 | def test_backwards(self): 170 | @operation 171 | def o1(): 172 | return 'o1' 173 | 174 | @operation(some_unused_kwargs='value') 175 | def o2(): 176 | return 'o2' 177 | 178 | @workflow 179 | def w1(): 180 | return 'w1' 181 | 182 | @workflow(system_wide=True) 183 | def w2(): 184 | return 'w2' 185 | 186 | self.assertEqual(o1(), 'o1') 187 | self.assertEqual(o2(), 'o2') 188 | self.assertEqual(w1(), 'w1') 189 | self.assertEqual(w2(), 'w2') 190 | -------------------------------------------------------------------------------- /cloudify/tests/test_event.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import sys 17 | 18 | import testtools 19 | 20 | from cloudify import utils 21 | from cloudify import event 22 | 23 | 24 | class TestEvent(testtools.TestCase): 25 | 26 | def test_event_has_output(self): 27 | test_event = _event('cloudify_event') 28 | self.assertTrue(test_event.has_output) 29 | test_event = _event('cloudify_log', level='INFO') 30 | self.assertTrue(test_event.has_output) 31 | test_event = _event('cloudify_log', level='DEBUG') 32 | self.assertFalse(test_event.has_output) 33 | test_event = _event('cloudify_log', level='DEBUG', 34 | verbosity_level=event.MEDIUM_VERBOSE) 35 | self.assertTrue(test_event.has_output) 36 | 37 | def test_task_failure_causes(self): 38 | message = 'test_message' 39 | test_event = _event('cloudify_event', 40 | event_type='task_failed', 41 | message=message) 42 | self.assertEqual(test_event.text, message) 43 | causes = [] 44 | test_event = _event('cloudify_event', 45 | event_type='task_failed', 46 | message=message, 47 | causes=causes) 48 | self.assertEqual(test_event.text, message) 49 | try: 50 | raise RuntimeError() 51 | except RuntimeError: 52 | _, ex, tb = sys.exc_info() 53 | causes = [utils.exception_to_error_cause(ex, tb)] 54 | test_event = _event('cloudify_event', 55 | event_type='task_failed', 56 | message=message, 57 | causes=causes) 58 | self.assertEqual(test_event.text, message) 59 | test_event = _event('cloudify_event', 60 | event_type='task_failed', 61 | message=message, 62 | causes=causes, 63 | verbosity_level=event.LOW_VERBOSE) 64 | text = test_event.text 65 | self.assertIn(message, text) 66 | self.assertNotIn('Causes (most recent cause last):', text) 67 | self.assertEqual(1, text.count(causes[0]['traceback'])) 68 | causes = causes + causes 69 | test_event = _event('cloudify_event', 70 | event_type='task_failed', 71 | message=message, 72 | causes=causes, 73 | verbosity_level=event.LOW_VERBOSE) 74 | text = test_event.text 75 | self.assertIn(message, text) 76 | self.assertIn('Causes (most recent cause last):', text) 77 | self.assertEqual(2, text.count(causes[0]['traceback'])) 78 | 79 | # one test with task_rescheduled 80 | test_event = _event('cloudify_event', 81 | event_type='task_rescheduled', 82 | message=message, 83 | causes=causes, 84 | verbosity_level=event.LOW_VERBOSE) 85 | text = test_event.text 86 | self.assertIn(message, text) 87 | self.assertIn('Causes (most recent cause last):', text) 88 | self.assertEqual(2, text.count(causes[0]['traceback'])) 89 | 90 | 91 | def _event(type, event_type=None, level=None, message=None, 92 | causes=None, verbosity_level=None): 93 | result = {'type': type, 'context': {}} 94 | if event_type: 95 | result['event_type'] = event_type 96 | if level: 97 | result['level'] = level 98 | if message: 99 | result['message'] = {'text': message} 100 | if causes: 101 | result['context']['task_error_causes'] = causes 102 | return event.Event(result, verbosity_level=verbosity_level) 103 | -------------------------------------------------------------------------------- /cloudify/tests/test_gate_keeper.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import random 17 | 18 | import mock 19 | import testtools 20 | 21 | from cloudify.celery import gate_keeper 22 | 23 | 24 | class GateKeeperTest(testtools.TestCase): 25 | 26 | def test_gate_keeper(self): 27 | # This tests emulates the existence of 100 deployments 28 | # and 100 tasks for each (once for workflows and once 29 | # for operations). Total of 100 * 100 * 2 tasks that are "waiting" 30 | # in the gate_keeper queues. 31 | # Then, until no more tasks are left, each iteration, a different 32 | # workflows or operations queue is selected and one task execution 33 | # is simulated. After that, it verifies the internal state is 34 | # valid. 35 | # This test exists mostly to "stress test" this component and make 36 | # sure it does not introduce any noticeable bottleneck. 37 | 38 | num_deployments = 100 39 | num_operations = 100 40 | requests = [] 41 | for i in range(num_deployments): 42 | op_deployment_requests = Requests(self, i, op) 43 | workflow_deployment_requests = Requests(self, i, workflow) 44 | for j in range(num_operations): 45 | op_deployment_requests.begin_new_request() 46 | workflow_deployment_requests.begin_new_request() 47 | requests.append(op_deployment_requests) 48 | requests.append(workflow_deployment_requests) 49 | while requests: 50 | deployment_requests = requests[random.randint(0, 51 | len(requests) - 1)] 52 | if not deployment_requests.end_first_running_request(): 53 | requests.remove(deployment_requests) 54 | 55 | def setUp(self): 56 | super(GateKeeperTest, self).setUp() 57 | self.bucket_size = 5 58 | self.current = set() 59 | self.keeper = gate_keeper.GateKeeper( 60 | with_gate_keeper=True, 61 | gate_keeper_bucket_size=self.bucket_size, 62 | worker=mock.Mock()) 63 | 64 | def tearDown(self): 65 | for q in self.keeper._current.values(): 66 | self.assertTrue(q.empty()) 67 | for q in self.keeper._on_hold.values(): 68 | self.assertTrue(q.empty()) 69 | super(GateKeeperTest, self).tearDown() 70 | 71 | def begin(self, task): 72 | request = RequestMock(task, self) 73 | self.keeper.task_received(request, request.handler) 74 | return request 75 | 76 | 77 | class Requests(object): 78 | 79 | def __init__(self, test, deployment_id, task_func): 80 | self.deployment_id = str(deployment_id) 81 | self.task_func = task_func 82 | self.test = test 83 | self.requests = [] 84 | self.running_requests = [] 85 | self.next_index = 0 86 | 87 | def begin_new_request(self): 88 | request = self.test.begin(self.task_func(self.deployment_id)) 89 | self.requests.append(request) 90 | self._update_running() 91 | self.validate_expected_state() 92 | 93 | def end_first_running_request(self): 94 | if self.running_requests: 95 | request = self.running_requests.pop(0) 96 | request.end() 97 | self._update_running() 98 | self.validate_expected_state() 99 | return bool(self.running_requests) 100 | 101 | def _update_running(self): 102 | if len(self.running_requests) < self.test.bucket_size: 103 | if self.next_index < len(self.requests): 104 | request = self.requests[self.next_index] 105 | self.next_index += 1 106 | self.running_requests.append(request) 107 | 108 | def validate_expected_state(self): 109 | for request in self.requests: 110 | if request in self.running_requests: 111 | request.assert_running() 112 | else: 113 | request.assert_not_running() 114 | 115 | 116 | class RequestMock(object): 117 | 118 | counter = 0 119 | 120 | def __init__(self, task, test): 121 | self.id = RequestMock.counter 122 | RequestMock.counter += 1 123 | deployment_id = task['deployment_id'] 124 | task_type = task['type'] 125 | self.kwargs = { 126 | '__cloudify_context': { 127 | 'deployment_id': deployment_id, 128 | 'type': task_type 129 | } 130 | } 131 | self.bucket_key = deployment_id 132 | if task_type == 'workflow': 133 | self.bucket_key = '{0}_workflows'.format(self.bucket_key) 134 | self.test = test 135 | 136 | on_success = mock.Mock() 137 | 138 | @property 139 | def running(self): 140 | return self in self.test.current 141 | 142 | def assert_running(self): 143 | self.test.assertIn(self, self.test.current) 144 | 145 | def assert_not_running(self): 146 | self.test.assertNotIn(self, self.test.current) 147 | 148 | def handler(self): 149 | self.assert_not_running() 150 | self.test.current.add(self) 151 | 152 | def end(self): 153 | self.assert_running() 154 | self.test.current.remove(self) 155 | self.test.keeper.task_ended(self.bucket_key) 156 | 157 | def __str__(self): 158 | return '({0} @ {1})'.format(self.id, self.bucket_key) 159 | 160 | __repr__ = __str__ 161 | 162 | 163 | def op(deployment_id): 164 | return {'deployment_id': deployment_id, 'type': 'operation'} 165 | 166 | 167 | def workflow(deployment_id): 168 | return {'deployment_id': deployment_id, 'type': 'workflow'} 169 | -------------------------------------------------------------------------------- /cloudify/tests/test_install_agent_local_workflow.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import os 17 | 18 | import testtools 19 | from testtools.testcase import ExpectedException 20 | 21 | from cloudify.workflows import local 22 | 23 | 24 | class InstallAgentTest(testtools.TestCase): 25 | 26 | def test_install_agent(self): 27 | blueprint_path = os.path.join( 28 | os.path.dirname(os.path.realpath(__file__)), 29 | "resources/blueprints/test-install-agent-blueprint.yaml") 30 | 31 | with ExpectedException(ValueError, 32 | "'install_agent': true is not supported*"): 33 | self.env = local.init_env(blueprint_path) 34 | -------------------------------------------------------------------------------- /cloudify/tests/test_install_new_agents_workflow.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from os import path 17 | 18 | import testtools 19 | 20 | from cloudify.constants import COMPUTE_NODE_TYPE 21 | from cloudify.decorators import operation 22 | from cloudify.exceptions import NonRecoverableError 23 | from cloudify.test_utils import workflow_test 24 | 25 | 26 | _VALIDATION_SUCCESS = { 27 | 'agent_alive_crossbroker': True 28 | } 29 | _VALIDATION_FAIL = { 30 | 'agent_alive_crossbroker': False 31 | } 32 | 33 | 34 | @operation 35 | def validate_amqp(ctx, current_amqp=True, **_): 36 | status = ctx.node.properties['validation_result'] 37 | if status: 38 | ctx.instance.runtime_properties['agent_status'] = status 39 | if not current_amqp and not status['agent_alive_crossbroker']: 40 | raise NonRecoverableError() 41 | 42 | 43 | @operation 44 | def create_amqp(ctx, **_): 45 | ctx.instance.runtime_properties['created'] = True 46 | 47 | 48 | class TestInstallNewAgentsWorkflow(testtools.TestCase): 49 | blueprint_path = path.join('resources', 'blueprints', 50 | 'install-new-agents-blueprint.yaml') 51 | 52 | def _assert_all_computes_created(self, env, created): 53 | for node_instance in env.storage.get_node_instances(): 54 | node = env.storage.get_node(node_instance['name']) 55 | is_compute = COMPUTE_NODE_TYPE in node['type_hierarchy'] 56 | expected_created = created and is_compute 57 | if expected_created: 58 | self.assertTrue(node_instance.runtime_properties.get( 59 | 'created')) 60 | else: 61 | self.assertNotIn('created', node_instance.runtime_properties) 62 | 63 | @workflow_test(blueprint_path) 64 | def test_not_installed(self, cfy_local): 65 | with testtools.ExpectedException(RuntimeError, ".*is not started.*"): 66 | cfy_local.execute('install_new_agents') 67 | self._assert_all_computes_created(cfy_local, created=False) 68 | 69 | @workflow_test(blueprint_path, inputs={ 70 | 'host_a_validation_result': _VALIDATION_SUCCESS, 71 | 'host_b_validation_result': _VALIDATION_SUCCESS}) 72 | def test_correct(self, cfy_local): 73 | cfy_local.execute('install') 74 | cfy_local.execute('install_new_agents') 75 | self._assert_all_computes_created(cfy_local, created=True) 76 | 77 | @workflow_test(blueprint_path, inputs={ 78 | 'host_a_validation_result': _VALIDATION_SUCCESS, 79 | 'host_b_validation_result': _VALIDATION_FAIL}) 80 | def test_failed_validation(self, cfy_local): 81 | cfy_local.execute('install') 82 | with testtools.ExpectedException(RuntimeError, 83 | ".*Task failed.*validate_amqp.*"): 84 | cfy_local.execute('install_new_agents') 85 | self._assert_all_computes_created(cfy_local, created=False) 86 | 87 | @workflow_test(blueprint_path, inputs={ 88 | 'host_a_validation_result': _VALIDATION_SUCCESS, 89 | 'host_b_validation_result': _VALIDATION_SUCCESS}) 90 | def test_validation_only(self, cfy_local): 91 | cfy_local.execute('install') 92 | cfy_local.execute('install_new_agents', parameters={'install': False}, 93 | allow_custom_parameters=True) 94 | self._assert_all_computes_created(cfy_local, created=False) 95 | -------------------------------------------------------------------------------- /cloudify/tests/test_local_get_attribute.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import os 18 | import shutil 19 | import tempfile 20 | 21 | import testtools 22 | 23 | from cloudify import constants 24 | from cloudify.workflows import local 25 | from cloudify.decorators import operation 26 | from cloudify.decorators import workflow 27 | from cloudify import ctx as operation_ctx 28 | from cloudify.workflows import ctx as workflow_ctx 29 | 30 | 31 | class TestLocalWorkflowGetAttribute(testtools.TestCase): 32 | 33 | def test_in_memory_storage(self): 34 | self._test() 35 | 36 | def test_file_storage(self): 37 | tempdir = tempfile.mkdtemp() 38 | storage = local.FileStorage(tempdir) 39 | try: 40 | self._test(storage) 41 | finally: 42 | shutil.rmtree(tempdir) 43 | 44 | def test_file_storage_payload(self): 45 | tempdir = tempfile.mkdtemp() 46 | storage = local.FileStorage(tempdir) 47 | try: 48 | self._test(storage) 49 | 50 | # update payload 51 | with storage.payload() as payload: 52 | payload['payload_key'] = 'payload_key_value' 53 | 54 | # read payload 55 | storage2 = local.FileStorage(tempdir) 56 | local.load_env(self.env.name, storage=storage2) 57 | with storage2.payload() as payload: 58 | self.assertEqual(payload['payload_key'], 'payload_key_value') 59 | finally: 60 | shutil.rmtree(tempdir) 61 | 62 | def test_multi_instance_relationship_ambiguity_resolution(self): 63 | self._test(blueprint='get_attribute_multi_instance.yaml') 64 | 65 | def test_multi_instance_scaling_group_ambiguity_resolution(self): 66 | self._test(blueprint='get_attribute_multi_instance2.yaml') 67 | 68 | def _test(self, storage=None, blueprint='get_attribute.yaml'): 69 | blueprint_path = os.path.join( 70 | os.path.dirname(os.path.realpath(__file__)), 71 | 'resources/blueprints/{0}'.format(blueprint)) 72 | self.env = local.init_env(blueprint_path, storage=storage) 73 | self.env.execute('setup', task_retries=0) 74 | self.env.execute('run', task_retries=0) 75 | 76 | 77 | @workflow 78 | def populate_runtime_properties(**_): 79 | for node in workflow_ctx.nodes: 80 | for instance in node.instances: 81 | instance.execute_operation('test.setup') 82 | 83 | 84 | @workflow 85 | def run_all_operations(**_): 86 | node = workflow_ctx.get_node('node1') 87 | instance = next(node.instances) 88 | instance.execute_operation('test.op') 89 | relationship = next(instance.relationships) 90 | relationship.execute_source_operation('test.op') 91 | relationship.execute_target_operation('test.op') 92 | 93 | 94 | @operation 95 | def populate(**_): 96 | operation_ctx.instance.runtime_properties.update({ 97 | 'self_ref_property': 'self_ref_value', 98 | 'node_ref_property': 'node_ref_value', 99 | 'source_ref_property': 'source_ref_value', 100 | 'target_ref_property': 'target_ref_value', 101 | }) 102 | 103 | 104 | @operation 105 | def op(self_ref=None, 106 | node_ref=None, 107 | source_ref=None, 108 | target_ref=None, 109 | static=None, 110 | **_): 111 | if operation_ctx.type == constants.NODE_INSTANCE: 112 | assert self_ref == 'self_ref_value', \ 113 | 'self: {0}'.format(self_ref) 114 | assert node_ref == 'node_ref_value', \ 115 | 'node: {0}'.format(self_ref) 116 | assert source_ref is None, \ 117 | 'source: {0}'.format(source_ref) 118 | assert source_ref is None, \ 119 | 'target: {0}'.format(target_ref) 120 | assert static == 'static_property_value', \ 121 | 'static: {0}'.format(static) 122 | else: 123 | assert self_ref is None, \ 124 | 'self: {0}'.format(self_ref) 125 | assert node_ref is None, \ 126 | 'node: {0}'.format(self_ref) 127 | assert source_ref == 'source_ref_value', \ 128 | 'source: {0}'.format(source_ref) 129 | assert target_ref == 'target_ref_value', \ 130 | 'target: {0}'.format(target_ref) 131 | 132 | 133 | @workflow 134 | def run_multi(**_): 135 | node = workflow_ctx.get_node('node1') 136 | for instance in node.instances: 137 | instance.execute_operation('test.op') 138 | 139 | 140 | @operation 141 | def populate_multi(**_): 142 | operation_ctx.instance.runtime_properties.update({ 143 | 'node_ref_property': 'node_ref_value_{0}'.format( 144 | operation_ctx.instance.id), 145 | }) 146 | 147 | 148 | @operation 149 | def op_multi(node_ref, **_): 150 | operation_ctx.logger.info(node_ref) 151 | assert node_ref.startswith('node_ref_value_node2_'), \ 152 | 'node: {0}'.format(operation_ctx.instance.id) 153 | -------------------------------------------------------------------------------- /cloudify/tests/test_local_workflows_init.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import os 17 | 18 | import testtools 19 | 20 | from dsl_parser import exceptions as dsl_exceptions 21 | 22 | from cloudify.workflows import local 23 | 24 | 25 | class LocalWorkflowInitTest(testtools.TestCase): 26 | 27 | def test_init_env_validate_definitions(self): 28 | blueprint_path = os.path.join( 29 | os.path.dirname(os.path.realpath(__file__)), 30 | "resources/blueprints/test-validate-version-blueprint.yaml") 31 | self.assertRaises( 32 | dsl_exceptions.DSLParsingException, 33 | local.init_env, blueprint_path, 34 | validate_version=True) 35 | local.init_env(blueprint_path, validate_version=False) 36 | -------------------------------------------------------------------------------- /cloudify/tests/test_logging_server.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import logging 17 | import logging.handlers 18 | import os 19 | import shutil 20 | import tempfile 21 | import time 22 | 23 | import mock 24 | from mock import patch 25 | import testtools 26 | 27 | from cloudify import logs 28 | from cloudify.celery import logging_server 29 | 30 | 31 | class TestLoggingServer(testtools.TestCase): 32 | 33 | HANDLER_CONTEXT = 'logger' 34 | 35 | def setUp(self): 36 | self.worker = mock.Mock() 37 | super(TestLoggingServer, self).setUp() 38 | self.workdir = tempfile.mkdtemp(prefix='cloudify-logging-server-') 39 | self.addCleanup(lambda: shutil.rmtree(self.workdir, 40 | ignore_errors=True)) 41 | 42 | def test_basic(self): 43 | message = 'MESSAGE TEXT' 44 | server = self._start_server() 45 | self.assertEqual(server, self.worker.logging_server) 46 | logger = self._logger(server) 47 | logger.info(message) 48 | self._assert_in_log(message) 49 | return server, logger 50 | 51 | def test_disabled(self): 52 | server = self._start_server(enable=False) 53 | self.assertIsNone(server.logging_server) 54 | self.assertIsNone(server.socket_url) 55 | 56 | def test_handler_cache_size(self): 57 | num_loggers = 7 58 | cache_size = 3 59 | server = self._start_server(cache_size=cache_size) 60 | get_handler = server.logging_server._get_handler 61 | get_handler_cache = get_handler._cache 62 | logger_names = ['logger{0}'.format(i) for i in range(num_loggers)] 63 | for i in range(num_loggers): 64 | logger_name = logger_names[i] 65 | logger = self._logger(server, logger_name) 66 | logger.info(logger_name) 67 | self._assert_in_log(logger_name, logger_name) 68 | self.assertEqual(min(i+1, cache_size), len(get_handler_cache)) 69 | actual_handler = get_handler_cache[(logger_name,)] 70 | expected_handler = get_handler(logger_name) 71 | self.assertEqual(expected_handler, actual_handler) 72 | for j in range(max(0, i-2), i+1): 73 | self.assertIn((logger_names[j],), get_handler_cache) 74 | 75 | def test_stop(self): 76 | server, logger = self.test_basic() 77 | logger.handlers[0]._socket.close() 78 | handler_cache = server.logging_server._get_handler._cache 79 | self.assertEqual(1, len(handler_cache)) 80 | server_handler = next(handler_cache.itervalues()) 81 | self.assertIsNotNone(server_handler.stream) 82 | server.stop(self.worker) 83 | server.thread.join() 84 | self.assertEqual(0, len(handler_cache)) 85 | self.assertIsNone(server_handler.stream) 86 | 87 | def test_server_logging_handler_type_on_management(self): 88 | with patch.dict(os.environ, {'MGMTWORKER_HOME': 'stub'}): 89 | self._test_server_logging_type(logging.FileHandler) 90 | 91 | def test_server_logging_handler_type_on_agent(self): 92 | self._test_server_logging_type(logging.handlers.RotatingFileHandler) 93 | 94 | def _test_server_logging_type(self, expected_type): 95 | server, _ = self.test_basic() 96 | handler_cache = server.logging_server._get_handler._cache 97 | server_handler = handler_cache[(self.HANDLER_CONTEXT,)] 98 | # type(server_handler) doesn't do the right thing on 2.6 99 | self.assertEqual(server_handler.__class__, expected_type) 100 | 101 | def test_error_on_processing(self): 102 | server, logger = self.test_basic() 103 | for i in range(10): 104 | logger.handlers[0]._socket.send('Certainly not json') 105 | good_message = 'some new good message' 106 | logger.info(good_message) 107 | self._assert_in_log(good_message) 108 | 109 | def _start_server(self, enable=True, cache_size=10): 110 | server = logging_server.ZMQLoggingServerBootstep( 111 | self.worker, 112 | with_logging_server=enable, 113 | logging_server_logdir=self.workdir, 114 | logging_server_handler_cache_size=cache_size) 115 | self.addCleanup(lambda: server.stop(self.worker)) 116 | server.start(self.worker) 117 | return server 118 | 119 | def _logger(self, server, handler_context=HANDLER_CONTEXT): 120 | import zmq 121 | context = server.logging_server.zmq_context 122 | socket = context.socket(zmq.PUSH) 123 | socket.connect(server.socket_url) 124 | logger = logging.getLogger(handler_context) 125 | logger.handlers = [] 126 | handler = logs.ZMQLoggingHandler(handler_context, socket, 127 | fallback_logger=logging.getLogger()) 128 | handler.setFormatter(logging.Formatter('%(message)s')) 129 | handler.setLevel(logging.DEBUG) 130 | logger.addHandler(handler) 131 | logger.setLevel(logging.DEBUG) 132 | logger.propagate = False 133 | self.addCleanup(lambda: socket.close()) 134 | return logger 135 | 136 | def _assert_in_log(self, message, handler_context=HANDLER_CONTEXT): 137 | attempts = 0 138 | current = None 139 | while attempts < 500: 140 | try: 141 | with open(os.path.join( 142 | self.workdir, '{0}.log'.format(handler_context))) as f: 143 | current = f.read() 144 | self.assertIn(message, current) 145 | return 146 | except Exception: 147 | attempts += 1 148 | time.sleep(0.01) 149 | self.fail('failed asserting logs: {0}'.format(current)) 150 | -------------------------------------------------------------------------------- /cloudify/tests/test_logs.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import testtools 17 | 18 | from cloudify import logs 19 | 20 | 21 | class TestLogs(testtools.TestCase): 22 | 23 | def test_create_event_message_prefix(self): 24 | test_event = {'type': 'cloudify_log', 25 | 'level': 'INFO', 26 | 'context': {'deployment_id': ''}, 27 | 'timestamp': '', 28 | 'message': {'text': 'message'}} 29 | self.assertIn('message', logs.create_event_message_prefix(test_event)) 30 | test_event['level'] = 'DEBUG' 31 | self.assertIsNone(logs.create_event_message_prefix(test_event)) 32 | -------------------------------------------------------------------------------- /cloudify/tests/test_lru_cache.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import unittest 17 | import collections 18 | 19 | from cloudify.lru_cache import lru_cache 20 | 21 | 22 | class TestLRUCacheDecorator(unittest.TestCase): 23 | 24 | def test_max_size(self): 25 | size = 10 26 | counters = collections.defaultdict(int) 27 | 28 | @lru_cache(maxsize=size) 29 | def func(index): 30 | result = counters[index] 31 | counters[index] = result + 1 32 | return result 33 | 34 | for time in range(2): 35 | for multiplier in range(2): 36 | for _ in range(3): 37 | for i in range(size*multiplier, size*(multiplier+1)): 38 | self.assertEqual(time, func(i)) 39 | 40 | def test_on_purge(self): 41 | test_index = 1 42 | purges = [] 43 | 44 | @lru_cache(on_purge=lambda index: purges.append(index), maxsize=1) 45 | def func(index): 46 | return index 47 | 48 | for _ in range(2): 49 | func(test_index) 50 | self.assertEqual(0, len(purges)) 51 | func(test_index + 1) 52 | self.assertEqual(1, len(purges)) 53 | self.assertEqual(test_index, purges[0]) 54 | 55 | def test_clear(self): 56 | size = 3 57 | purges = [] 58 | 59 | @lru_cache(maxsize=size, on_purge=lambda index: purges.append(index)) 60 | def func(index): 61 | return index 62 | 63 | for i in range(size): 64 | func(i) 65 | self.assertEqual(0, len(purges)) 66 | self.assertEqual(size, len(func._cache)) 67 | func.clear() 68 | self.assertEqual(set([0, 1, 2]), set(purges)) 69 | self.assertEqual(0, len(func._cache)) 70 | -------------------------------------------------------------------------------- /cloudify/tests/test_missing_operation.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from os import path 17 | 18 | import testtools 19 | 20 | from cloudify.decorators import workflow 21 | from cloudify.test_utils import workflow_test 22 | 23 | 24 | @workflow 25 | def not_exist_op_workflow(ctx, **kwargs): 26 | for node in ctx.nodes: 27 | for instance in node.instances: 28 | instance.execute_operation( 29 | 'cloudify.interfaces.lifecycle.op_not_exist') 30 | 31 | 32 | @workflow 33 | def not_exist_interface_workflow(ctx, **kwargs): 34 | for node in ctx.nodes: 35 | for instance in node.instances: 36 | instance.execute_operation( 37 | 'cloudify.interfaces.interfaces_not_exist.create') 38 | 39 | 40 | @workflow 41 | def stop_workflow(ctx, **kwargs): 42 | for node in ctx.nodes: 43 | for instance in node.instances: 44 | instance.execute_operation( 45 | 'cloudify.interfaces.lifecycle.stop') 46 | 47 | 48 | class TestExecuteNotExistOperationWorkflow(testtools.TestCase): 49 | 50 | execute_blueprint_path = path.join('resources', 'blueprints', 51 | 'not_exist_op_workflow.yaml') 52 | 53 | @workflow_test(execute_blueprint_path) 54 | def test_execute_not_exist_operation(self, cfy_local): 55 | node_id = cfy_local.plan.get('node_instances')[0].get('id') 56 | try: 57 | cfy_local.execute('not_exist_op_workflow') 58 | self.fail('Expected exception due to operation not exist') 59 | except Exception as e: 60 | self.assertTrue('operation of node instance {0} does not exist' 61 | .format(node_id) in e.message) 62 | 63 | @workflow_test(execute_blueprint_path) 64 | def test_execute_not_exist_interface(self, cfy_local): 65 | node_id = cfy_local.plan.get('node_instances')[0].get('id') 66 | try: 67 | cfy_local.execute('not_exist_interface_workflow') 68 | self.fail('Expected exception due to operation not exist') 69 | except Exception as e: 70 | self.assertTrue('operation of node instance {0} does not exist' 71 | .format(node_id) in e.message) 72 | 73 | @workflow_test(execute_blueprint_path) 74 | def test_execute_stop_operation(self, cfy_local): 75 | # checks that an operation that exists in a builtin interface 76 | # does not raise an exception if it is not declared in the blueprint 77 | cfy_local.execute('stop_workflow') 78 | -------------------------------------------------------------------------------- /cloudify/tests/test_node_state.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import testtools 18 | 19 | from cloudify import exceptions 20 | from cloudify.manager import NodeInstance 21 | 22 | 23 | class NodeStateTest(testtools.TestCase): 24 | 25 | def test_put_get(self): 26 | node = NodeInstance('instance_id', 'node_id', {}) 27 | node['key'] = 'value' 28 | self.assertEqual('value', node['key']) 29 | props = node.runtime_properties 30 | self.assertEqual(1, len(props)) 31 | self.assertEqual('value', props['key']) 32 | 33 | def test_no_updates_to_empty_node(self): 34 | node = NodeInstance('instance_id', 'node_id') 35 | self.assertEqual(0, len(node.runtime_properties)) 36 | 37 | def test_put_new_property(self): 38 | node = NodeInstance('instance_id', 'node_id') 39 | node.put('key', 'value') 40 | self.assertEqual('value', node.get('key')) 41 | props = node.runtime_properties 42 | self.assertEqual(1, len(props)) 43 | self.assertEqual('value', props['key']) 44 | 45 | def test_put_several_properties(self): 46 | node = NodeInstance('instance_id', 'node_id', {'key0': 'value0'}) 47 | node.put('key1', 'value1') 48 | node.put('key2', 'value2') 49 | props = node.runtime_properties 50 | self.assertEqual(3, len(props)) 51 | self.assertEqual('value0', props['key0']) 52 | self.assertEqual('value1', props['key1']) 53 | self.assertEqual('value2', props['key2']) 54 | 55 | def test_update_property(self): 56 | node = NodeInstance('instance_id', 'node_id') 57 | node.put('key', 'value') 58 | self.assertEqual('value', node.get('key')) 59 | props = node.runtime_properties 60 | self.assertEqual(1, len(props)) 61 | self.assertEqual('value', props['key']) 62 | 63 | def test_put_new_property_twice(self): 64 | node = NodeInstance('instance_id', 'node_id') 65 | node.put('key', 'value') 66 | node.put('key', 'v') 67 | self.assertEqual('v', node.get('key')) 68 | props = node.runtime_properties 69 | self.assertEqual(1, len(props)) 70 | self.assertEqual('v', props['key']) 71 | 72 | def test_delete_property(self): 73 | node = NodeInstance('instance_id', 'node_id') 74 | node.put('key', 'value') 75 | self.assertEquals('value', node.get('key')) 76 | node.delete('key') 77 | self.assertNotIn('key', node) 78 | 79 | def test_delete_property_sugared_syntax(self): 80 | node = NodeInstance('instance_id', 'node_id') 81 | node.put('key', 'value') 82 | self.assertEquals('value', node.get('key')) 83 | del(node['key']) 84 | self.assertNotIn('key', node) 85 | 86 | def test_delete_nonexistent_property(self): 87 | node = NodeInstance('instance_id', 'node_id') 88 | self.assertRaises(KeyError, node.delete, 'key') 89 | 90 | def test_delete_makes_properties_dirty(self): 91 | node = NodeInstance('instance_id', 'node_id', 92 | runtime_properties={'preexisting-key': 'val'}) 93 | self.assertFalse(node.dirty) 94 | del(node['preexisting-key']) 95 | self.assertTrue(node.dirty) 96 | 97 | def test_setting_runtime_properties(self): 98 | """Assignment to .runtime_properties is possible and stores them.""" 99 | node = NodeInstance('instance_id', 'node_id', 100 | runtime_properties={'preexisting-key': 'val'}) 101 | node.runtime_properties = {'other key': 'other val'} 102 | self.assertEqual({'other key': 'other val'}, node.runtime_properties) 103 | 104 | def test_setting_runtime_properties_sets_dirty(self): 105 | """Assignment to .runtime_properties sets the dirty flag.""" 106 | node = NodeInstance('instance_id', 'node_id', 107 | runtime_properties={'preexisting-key': 'val'}) 108 | node.runtime_properties = {'other key': 'other val'} 109 | self.assertTrue(node.runtime_properties.dirty) 110 | 111 | def test_setting_runtime_properties_checks_modifiable(self): 112 | """Cannot assign to .runtime_properties if modifiable is false.""" 113 | node = NodeInstance('instance_id', 'node_id', 114 | runtime_properties={'preexisting-key': 'val'}) 115 | node.runtime_properties.modifiable = False 116 | try: 117 | node.runtime_properties = {'other key': 'other val'} 118 | except exceptions.NonRecoverableError: 119 | pass 120 | else: 121 | self.fail( 122 | 'Error should be raised when assigning runtime_properties ' 123 | 'with the modifiable flag set to False') 124 | -------------------------------------------------------------------------------- /cloudify/tests/test_operation_retry.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from StringIO import StringIO 18 | from os import path 19 | 20 | import testtools 21 | from mock import patch 22 | 23 | from cloudify import context 24 | from cloudify import decorators 25 | from cloudify import exceptions 26 | from cloudify import logs 27 | from cloudify.workflows import tasks as workflow_tasks 28 | from cloudify.test_utils import workflow_test 29 | from cloudify.test_utils import dispatch_helper 30 | 31 | RETRY_MESSAGE = 'operation will be retried' 32 | RETRY_AFTER = 10 33 | 34 | 35 | @decorators.operation 36 | def retry_operation(ctx, **_): 37 | return dispatch_helper.run(retry_operation_impl, **_) 38 | 39 | 40 | def retry_operation_impl(ctx, **_): 41 | return ctx.operation.retry(message=RETRY_MESSAGE, retry_after=RETRY_AFTER) 42 | 43 | 44 | class OperationRetryTests(testtools.TestCase): 45 | def test_operation_retry_api(self): 46 | op_name = 'operation' 47 | ctx = context.CloudifyContext({ 48 | 'operation': { 49 | 'name': op_name, 50 | 'retry_number': 0, 51 | 'max_retries': 10 52 | } 53 | }) 54 | self.assertEqual(op_name, ctx.operation.name) 55 | self.assertEqual(0, ctx.operation.retry_number) 56 | self.assertEqual(10, ctx.operation.max_retries) 57 | 58 | def test_operation_retry(self): 59 | ctx = context.CloudifyContext({}) 60 | e = self.assertRaises(exceptions.OperationRetry, 61 | retry_operation, 62 | ctx) 63 | self.assertEqual(RETRY_AFTER, e.retry_after) 64 | self.assertIn(RETRY_MESSAGE, str(e)) 65 | 66 | 67 | @decorators.operation 68 | def node_operation_retry(ctx, **kwargs): 69 | if 'counter' not in ctx.instance.runtime_properties: 70 | ctx.instance.runtime_properties['counter'] = 0 71 | counter = ctx.instance.runtime_properties['counter'] 72 | if counter != ctx.operation.retry_number: 73 | raise exceptions.NonRecoverableError( 74 | 'counter({0}) != ctx.operation.retry_number({1})'.format( 75 | counter, ctx.operation.retry_number)) 76 | expected_max_retries = 3 77 | if ctx.operation.max_retries != expected_max_retries: 78 | raise exceptions.NonRecoverableError( 79 | 'ctx.operation.max_retries is expected to be {0} but ' 80 | 'is {1}'.format(expected_max_retries, ctx.operation.max_retries)) 81 | ctx.instance.runtime_properties['counter'] = counter + 1 82 | if ctx.operation.retry_number < ctx.operation.max_retries: 83 | return ctx.operation.retry(message='Operation will be retried', 84 | retry_after=3) 85 | 86 | 87 | @decorators.workflow 88 | def execute_operation(ctx, operation, **kwargs): 89 | ignore_operations = ['lifecycle.stop'] 90 | 91 | graph = ctx.graph_mode() 92 | for instance in next(ctx.nodes).instances: 93 | task = instance.execute_operation(operation) 94 | if operation in ignore_operations: 95 | def ignore(tsk): 96 | return workflow_tasks.HandlerResult.ignore() 97 | 98 | task.on_failure = ignore 99 | graph.add_task(task) 100 | graph.execute() 101 | 102 | 103 | class OperationRetryWorkflowTests(testtools.TestCase): 104 | 105 | blueprint_path = path.join('resources', 'blueprints', 106 | 'test-operation-retry-blueprint.yaml') 107 | 108 | @workflow_test(blueprint_path) 109 | def test_operation_retry(self, cfy_local): 110 | cfy_local.execute('execute_operation', 111 | task_retries=3, 112 | task_retry_interval=1, 113 | parameters={ 114 | 'operation': 'lifecycle.start' 115 | }) 116 | instance = cfy_local.storage.get_node_instances()[0] 117 | self.assertEqual(4, instance['runtime_properties']['counter']) 118 | 119 | def test_operation_retry_task_message(self): 120 | output_buffer = StringIO() 121 | original_event_out = logs.stdout_event_out 122 | 123 | # Provide same interface for all event output 124 | def event_output(log, ctx=None): 125 | original_event_out(log) 126 | output_buffer.write('{0}\n'.format(log['message']['text'])) 127 | 128 | with patch('cloudify.logs.stdout_event_out', event_output): 129 | self.test_operation_retry() 130 | self.assertIn('Task rescheduled', output_buffer.getvalue()) 131 | self.assertIn('Operation will be retried', 132 | output_buffer.getvalue()) 133 | 134 | @workflow_test(blueprint_path) 135 | def test_ignore_operation_retry(self, cfy_local): 136 | cfy_local.execute('execute_operation', 137 | task_retries=3, 138 | task_retry_interval=1, 139 | parameters={ 140 | 'operation': 'lifecycle.stop' 141 | }) 142 | instance = cfy_local.storage.get_node_instances()[0] 143 | self.assertEqual(4, instance['runtime_properties']['counter']) 144 | -------------------------------------------------------------------------------- /cloudify/tests/test_state.py: -------------------------------------------------------------------------------- 1 | 2 | import threading 3 | import unittest 4 | from Queue import Queue 5 | 6 | 7 | from cloudify.state import ctx, current_ctx 8 | 9 | from cloudify.mocks import MockCloudifyContext 10 | 11 | 12 | class TestCurrentContextAndCtxLocalProxy(unittest.TestCase): 13 | 14 | def test_basic(self): 15 | self.assertRaises(RuntimeError, current_ctx.get_ctx) 16 | self.assertRaises(RuntimeError, lambda: ctx.instance.id) 17 | value = MockCloudifyContext(node_id='1') 18 | current_ctx.set(value) 19 | self.assertEqual(value, current_ctx.get_ctx()) 20 | self.assertEqual(value.instance.id, ctx.instance.id) 21 | current_ctx.clear() 22 | self.assertRaises(RuntimeError, current_ctx.get_ctx) 23 | self.assertRaises(RuntimeError, lambda: ctx.instance.id) 24 | 25 | def test_threads(self): 26 | num_iterations = 1000 27 | num_threads = 10 28 | for _ in range(num_iterations): 29 | queues = [Queue() for _ in range(num_threads)] 30 | 31 | def run(queue, value): 32 | try: 33 | self.assertRaises(RuntimeError, current_ctx.get_ctx) 34 | self.assertRaises(RuntimeError, lambda: ctx.instance.id) 35 | current_ctx.set(value) 36 | self.assertEqual(value, current_ctx.get_ctx()) 37 | self.assertEqual(value.instance.id, ctx.instance.id) 38 | current_ctx.clear() 39 | self.assertRaises(RuntimeError, current_ctx.get_ctx) 40 | self.assertRaises(RuntimeError, lambda: ctx.instance.id) 41 | except Exception as e: 42 | queue.put(e) 43 | else: 44 | queue.put('ok') 45 | 46 | threads = [] 47 | for index, queue in enumerate(queues): 48 | value = MockCloudifyContext(node_id=str(index)) 49 | threads.append(threading.Thread(target=run, 50 | args=(queue, value))) 51 | 52 | for thread in threads: 53 | thread.start() 54 | 55 | for queue in queues: 56 | self.assertEqual('ok', queue.get()) 57 | -------------------------------------------------------------------------------- /cloudify/tests/test_task_retry.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from os import path 17 | 18 | import testtools 19 | 20 | from cloudify import decorators 21 | from cloudify import exceptions 22 | from cloudify.test_utils import workflow_test 23 | 24 | 25 | @decorators.operation 26 | def fail_operation(**_): 27 | raise ExpectedException('TEST_EXPECTED_FAIL') 28 | 29 | 30 | @decorators.workflow 31 | def fail_execute_task(ctx, **kwargs): 32 | try: 33 | ctx.execute_task( 34 | task_name='cloudify.tests.test_task_retry.fail_operation').get() 35 | except ExpectedException: 36 | pass 37 | else: 38 | raise AssertionError() 39 | 40 | 41 | class TaskRetryWorkflowTests(testtools.TestCase): 42 | 43 | retry_blueprint_yaml = path.join('resources', 'blueprints', 44 | 'test-task-retry-blueprint.yaml') 45 | 46 | @workflow_test(retry_blueprint_yaml) 47 | def test_task_retry(self, cfy_local): 48 | cfy_local.execute('fail_execute_task', 49 | task_retries=1, 50 | task_retry_interval=0) 51 | 52 | 53 | class ExpectedException(exceptions.RecoverableError): 54 | pass 55 | -------------------------------------------------------------------------------- /cloudify/tests/test_task_retry_event_context.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from os import path 17 | 18 | import testtools 19 | from mock import patch 20 | 21 | from cloudify import decorators 22 | from cloudify import exceptions 23 | from cloudify import logs 24 | from cloudify.test_utils import workflow_test 25 | 26 | 27 | @decorators.operation 28 | def op(ctx, retry_type, **_): 29 | if 0 <= ctx.operation.max_retries <= ctx.operation.retry_number: 30 | return 31 | if retry_type == 'retry': 32 | ctx.operation.retry() 33 | elif retry_type == 'error': 34 | raise RuntimeError() 35 | elif retry_type == 'non-recoverable': 36 | raise exceptions.NonRecoverableError() 37 | 38 | 39 | @decorators.workflow 40 | def execute_operation(ctx, retry_type, **_): 41 | instance = next(next(ctx.nodes).instances) 42 | instance.execute_operation('test.op', kwargs={ 43 | 'retry_type': retry_type, 44 | }) 45 | 46 | 47 | class TaskRetryEventContextTests(testtools.TestCase): 48 | 49 | blueprint_path = path.join('resources', 'blueprints', 50 | 'test-task-retry-event-context-blueprint.yaml') 51 | 52 | @workflow_test(blueprint_path) 53 | def test_operation_retry(self, cfy_local): 54 | self._test_impl(cfy_local, 'retry', task_retries=2) 55 | 56 | @workflow_test(blueprint_path) 57 | def test_recoverable_retry(self, cfy_local): 58 | self._test_impl(cfy_local, 'error', task_retries=2) 59 | 60 | @workflow_test(blueprint_path) 61 | def test_infinite_retries(self, cfy_local): 62 | self._test_impl(cfy_local, 'non-recoverable', task_retries=-1) 63 | 64 | def _test_impl(self, cfy_local, retry_type, task_retries): 65 | events = [] 66 | original_event_out = logs.stdout_event_out 67 | 68 | # Provide same interface for all event outputs 69 | def event_output(event, ctx=None): 70 | original_event_out(event) 71 | events.append(event) 72 | with patch('cloudify.logs.stdout_event_out', event_output): 73 | try: 74 | cfy_local.execute('execute_operation', 75 | task_retries=task_retries, 76 | task_retry_interval=0, 77 | parameters={ 78 | 'retry_type': retry_type 79 | }) 80 | except exceptions.NonRecoverableError: 81 | pass 82 | events = [e for e in events if 83 | e.get('event_type', '').startswith('task_') or 84 | e.get('event_type', '').startswith('sending_task')] 85 | range_size = task_retries if task_retries > 0 else 1 86 | for i in range(range_size): 87 | # The following assertions are mostly here for sanity. 88 | # we generally want to make sure all task event contain 89 | # current_retries and total_retries so all tests try to go though 90 | # all event types. 91 | self.assertEqual('sending_task', events[i*3 + 0]['event_type']) 92 | self.assertEqual('task_started', events[i*3 + 1]['event_type']) 93 | if retry_type == 'retry': 94 | if i < task_retries: 95 | expected_type = 'task_rescheduled' 96 | else: 97 | expected_type = 'task_succeeded' 98 | elif retry_type == 'error': 99 | if i < task_retries: 100 | expected_type = 'task_failed' 101 | else: 102 | expected_type = 'task_succeeded' 103 | elif retry_type == 'non-recoverable': 104 | expected_type = 'task_failed' 105 | else: 106 | raise RuntimeError('We should not have arrived here') 107 | self.assertEqual(expected_type, events[i*3 + 2]['event_type']) 108 | 109 | # The following are the actual test assertions 110 | for j in range(3): 111 | context = events[i*3 + j]['context'] 112 | self.assertEqual(i, context['task_current_retries']) 113 | self.assertEqual(task_retries, context['task_total_retries']) 114 | -------------------------------------------------------------------------------- /cloudify/tests/test_task_subgraph.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import testtools 18 | 19 | from cloudify import decorators 20 | from cloudify.workflows import tasks 21 | 22 | from cloudify.test_utils import workflow_test 23 | 24 | 25 | @decorators.operation 26 | def operation(ctx, arg, total_retries=0, **_): 27 | runtime_properties = ctx.instance.runtime_properties 28 | invocations = runtime_properties.get('invocations', []) 29 | invocations.append(arg) 30 | ctx.instance.runtime_properties['invocations'] = invocations 31 | 32 | current_retries = runtime_properties.get('current_retries', {}) 33 | invocation_current_retries = current_retries.get(arg, 0) 34 | if invocation_current_retries < total_retries: 35 | current_retries[arg] = invocation_current_retries + 1 36 | runtime_properties['current_retries'] = current_retries 37 | return ctx.operation.retry() 38 | 39 | 40 | @decorators.workflow 41 | def workflow(ctx, test, **_): 42 | instance = next(next(ctx.nodes).instances) 43 | graph = ctx.graph_mode() 44 | tests = { 45 | 'subgraph': _test_subgraph, 46 | 'nested_subgraph': _test_nested_subgraph, 47 | 'empty_subgraph': _test_empty_subgrap, 48 | 'task_in_subgraph_retry': _test_task_in_subgraph_retry, 49 | 'subgraph_retry': _test_subgraph_retry, 50 | 'subgraph_retry_failure': _test_subgraph_retry_failure, 51 | 'task_in_two_subgraphs': _test_task_in_two_subgraphs 52 | } 53 | tests[test](ctx, graph, instance) 54 | graph.execute() 55 | 56 | 57 | def _test_subgraph(ctx, graph, instance): 58 | seq = graph.sequence() 59 | for i in range(2): 60 | subgraph = graph.subgraph('sub{0}'.format(i)) 61 | subseq = subgraph.sequence() 62 | for j in range(2): 63 | subseq.add(instance.execute_operation( 64 | 'interface.operation', 65 | kwargs={'arg': (i, j)})) 66 | seq.add(subgraph) 67 | 68 | 69 | def _test_nested_subgraph(ctx, graph, instance): 70 | seq = graph.sequence() 71 | for i in range(2): 72 | subgraph = graph.subgraph('sub{0}'.format(i)) 73 | subseq = subgraph.sequence() 74 | for j in range(2): 75 | subsubgraph = subgraph.subgraph('subsub{0}_{1}'.format(i, j)) 76 | subsubseq = subsubgraph.sequence() 77 | for k in range(2): 78 | subsubseq.add(instance.execute_operation( 79 | 'interface.operation', 80 | kwargs={'arg': (i, j, k)})) 81 | subseq.add(subsubgraph) 82 | seq.add(subgraph) 83 | 84 | 85 | def _test_empty_subgrap(ctx, graph, instance): 86 | graph.subgraph('empty') 87 | 88 | 89 | def _test_task_in_subgraph_retry(ctx, graph, instance): 90 | seq = graph.sequence() 91 | for i in range(2): 92 | subgraph = graph.subgraph('sub{0}'.format(i)) 93 | subseq = subgraph.sequence() 94 | for j in range(2): 95 | subseq.add(instance.execute_operation( 96 | 'interface.operation', 97 | kwargs={'arg': (i, j), 98 | 'total_retries': 1})) 99 | seq.add(subgraph) 100 | 101 | 102 | def _test_subgraph_retry(ctx, graph, instance): 103 | 104 | def build_graph(total_retries): 105 | result = graph.subgraph('retried') 106 | result.add_task(instance.execute_operation( 107 | 'interface.operation', kwargs={'arg': '', 108 | 'total_retries': total_retries})) 109 | return result 110 | 111 | subgraph = build_graph(total_retries=2) 112 | 113 | def retry_handler(subgraph2): 114 | if subgraph2.failed_task.name != ('cloudify.tests.test_task_subgraph.' 115 | 'operation'): 116 | return tasks.HandlerResult.fail() 117 | result = tasks.HandlerResult.retry() 118 | result.retried_task = build_graph(total_retries=0) 119 | result.retried_task.current_retries = subgraph2.current_retries + 1 120 | return result 121 | subgraph.on_failure = retry_handler 122 | 123 | 124 | def _test_subgraph_retry_failure(ctx, graph, instance): 125 | 126 | def build_graph(): 127 | result = graph.subgraph('retried') 128 | result.add_task(instance.execute_operation( 129 | 'interface.operation', kwargs={'arg': '', 130 | 'total_retries': 20})) 131 | return result 132 | 133 | subgraph = build_graph() 134 | 135 | def retry_handler(subgraph2): 136 | result = tasks.HandlerResult.retry() 137 | result.retried_task = build_graph() 138 | result.retried_task.on_failure = retry_handler 139 | result.retried_task.current_retries = subgraph2.current_retries + 1 140 | return result 141 | subgraph.on_failure = retry_handler 142 | 143 | 144 | def _test_task_in_two_subgraphs(ctx, graph, instance): 145 | sub1 = graph.subgraph('sub1') 146 | sub2 = graph.subgraph('sub2') 147 | task = instance.execute_operation('interface.operation') 148 | sub1.add_task(task) 149 | sub2.add_task(task) 150 | 151 | 152 | class TaskSubgraphWorkflowTests(testtools.TestCase): 153 | 154 | @workflow_test('resources/blueprints/test-task-subgraph-blueprint.yaml') 155 | def setUp(self, env=None): 156 | super(TaskSubgraphWorkflowTests, self).setUp() 157 | self.env = env 158 | 159 | def _run(self, test, subgraph_retries=0): 160 | self.env.execute('workflow', 161 | parameters={'test': test}, 162 | task_retries=1, 163 | task_retry_interval=0, 164 | subgraph_retries=subgraph_retries) 165 | 166 | @property 167 | def invocations(self): 168 | return self.env.storage.get_node_instances()[0].runtime_properties[ 169 | 'invocations'] 170 | 171 | def test_task_subgraph(self): 172 | self._run('subgraph') 173 | invocations = self.invocations 174 | self.assertEqual(len(invocations), 4) 175 | self.assertEqual(invocations, sorted(invocations)) 176 | 177 | def test_nested_task_subgraph(self): 178 | self._run('nested_subgraph') 179 | invocations = self.invocations 180 | self.assertEqual(len(invocations), 8) 181 | self.assertEqual(invocations, sorted(invocations)) 182 | 183 | def test_empty_subgraph(self): 184 | self._run('empty_subgraph') 185 | 186 | def test_task_in_subgraph_retry(self): 187 | self._run('task_in_subgraph_retry') 188 | invocations = self.invocations 189 | self.assertEqual(len(invocations), 8) 190 | self.assertEqual(invocations, sorted(invocations)) 191 | for i in range(4): 192 | self.assertEqual(invocations[2*i], invocations[2*i+1]) 193 | 194 | def test_subgraph_retry_sanity(self): 195 | self.assertRaises(RuntimeError, 196 | self._run, 'subgraph_retry', subgraph_retries=0) 197 | self.assertEqual(len(self.invocations), 2) 198 | 199 | def test_subgraph_retry(self): 200 | self._run('subgraph_retry', subgraph_retries=1) 201 | self.assertEqual(len(self.invocations), 3) 202 | 203 | def test_subgraph_retry_failure(self): 204 | self.assertRaises(RuntimeError, 205 | self._run, 'subgraph_retry_failure', 206 | subgraph_retries=2) 207 | self.assertEqual(len(self.invocations), 6) 208 | 209 | def test_invalid_task_in_two_subgraphs(self): 210 | self.assertRaises(RuntimeError, 211 | self._run, 'task_in_two_subgraphs') 212 | -------------------------------------------------------------------------------- /cloudify/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import os 17 | import mock 18 | import logging 19 | import tempfile 20 | import unittest 21 | 22 | from cloudify.exceptions import CommandExecutionException 23 | from cloudify.utils import setup_logger, get_exec_tempdir, LocalCommandRunner 24 | 25 | 26 | class LocalCommandRunnerTest(unittest.TestCase): 27 | 28 | runner = None 29 | 30 | @classmethod 31 | def setUpClass(cls): 32 | cls.logger = setup_logger(cls.__name__) 33 | cls.logger.setLevel(logging.DEBUG) 34 | cls.runner = LocalCommandRunner( 35 | logger=cls.logger) 36 | 37 | def test_run_command_success(self): 38 | response = self.runner.run('echo Hello') 39 | self.assertEqual('Hello', response.std_out) 40 | self.assertEqual(0, response.return_code) 41 | self.assertEqual('', response.std_err) 42 | 43 | def test_run_command_error(self): 44 | try: 45 | self.runner.run('/bin/sh -c bad') 46 | self.fail('Expected CommandExecutionException due to Bad command') 47 | except CommandExecutionException as e: 48 | self.assertTrue(1, e.code) 49 | 50 | def test_run_command_with_env(self): 51 | response = self.runner.run('env', 52 | execution_env={'TEST_KEY': 'TEST_VALUE'}) 53 | self.assertTrue('TEST_KEY=TEST_VALUE' in response.std_out) 54 | 55 | 56 | class TempdirTest(unittest.TestCase): 57 | def test_executable_no_override(self): 58 | sys_default_tempdir = tempfile.gettempdir() 59 | self.assertEqual(sys_default_tempdir, get_exec_tempdir()) 60 | 61 | @mock.patch.dict(os.environ, {'CFY_EXEC_TEMP': '/fake/temp'}) 62 | def test_executable_override(self): 63 | self.assertEqual('/fake/temp', get_exec_tempdir()) 64 | -------------------------------------------------------------------------------- /cloudify/tests/workflows.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from cloudify.decorators import workflow 17 | 18 | 19 | @workflow 20 | def execute_operation(ctx, operation, testing, nodes, **_): 21 | for node_id in nodes: 22 | node = ctx.get_node(node_id) 23 | instance = next(node.instances) 24 | instance.execute_operation(operation, kwargs={'testing': testing}) 25 | -------------------------------------------------------------------------------- /cloudify/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | __author__ = 'dank' 17 | 18 | from cloudify.workflows import workflow_api as api # noqa 19 | from cloudify.state import workflow_ctx as ctx # noqa 20 | from cloudify.state import workflow_parameters as parameters # noqa 21 | -------------------------------------------------------------------------------- /cloudify/workflows/workflow_api.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | EXECUTION_CANCELLED_RESULT = 'execution_cancelled' 18 | 19 | cancel_request = False 20 | 21 | 22 | def has_cancel_request(): 23 | """ 24 | Checks for requests to cancel the workflow execution. 25 | This should be used to allow graceful termination of workflow executions. 26 | 27 | If this method is not used and acted upon, a simple 'cancel' 28 | request for the execution will have no effect - 'force-cancel' will have 29 | to be used to abruptly terminate the execution instead. 30 | 31 | Note: When this method returns True, the workflow should make the 32 | appropriate cleanups and then it must raise an ExecutionCancelled error 33 | if the execution indeed gets cancelled (i.e. if it's too late to cancel 34 | there is no need to raise this exception and the workflow should end 35 | normally). 36 | 37 | :return: whether there was a request to cancel the workflow execution 38 | """ 39 | return cancel_request 40 | 41 | 42 | class ExecutionCancelled(Exception): 43 | """ 44 | This exception should be raised when a workflow has been cancelled, 45 | once appropriate cleanups have taken place. 46 | """ 47 | pass 48 | -------------------------------------------------------------------------------- /ctx_wrappers/ctx-sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | function ctx() { 4 | command ctx "$@" || exit $? 5 | } -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | https://github.com/cloudify-cosmo/cloudify-dsl-parser/archive/master.zip 2 | https://github.com/cloudify-cosmo/cloudify-rest-client/archive/master.zip 3 | pyzmq==15.1.0 4 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/cloudify-plugins-common.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/cloudify-plugins-common.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/cloudify-plugins-common" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/cloudify-plugins-common" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/context.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Context 3 | ========== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.context 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: ImmutableProperties 13 | -------------------------------------------------------------------------------- /docs/decorators.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Decorators 3 | ========== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.decorators 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: task 13 | -------------------------------------------------------------------------------- /docs/exceptions.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Exceptions 3 | ========== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.exceptions 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. cloudify-cli documentation master file, created by 2 | sphinx-quickstart on Thu Jun 12 15:30:03 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to cloudify-plugin-common's documentation! 7 | ================================================== 8 | 9 | This is the API reference to the cloudify-plugins-common module which is required when writing any cloudify plugin (and workflow). 10 | 11 | Contents: 12 | 13 | .. toctree:: 14 | :maxdepth: 2 15 | 16 | context 17 | decorators 18 | exceptions 19 | manager 20 | mocks 21 | utils 22 | logs 23 | workflows 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | 32 | -------------------------------------------------------------------------------- /docs/logs.rst: -------------------------------------------------------------------------------- 1 | ==== 2 | Logs 3 | ==== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.logs 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: -------------------------------------------------------------------------------- /docs/manager.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Manager 3 | ======= 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.manager 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: DirtyTrackingDict, get_host_node_instance_ip, dirty, host_id, version, get, put -------------------------------------------------------------------------------- /docs/mocks.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Mocks 3 | ===== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.mocks 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: -------------------------------------------------------------------------------- /docs/utils.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Utils 3 | ===== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.utils 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: setup_logger, create_temp_folder, ctx, workflow_ctx 13 | -------------------------------------------------------------------------------- /docs/workflow_api.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Workflow API 3 | ============ 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.workflows.workflow_api 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: -------------------------------------------------------------------------------- /docs/workflow_context.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Workflow Context 3 | ================ 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.workflows.workflow_context 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: internal, CloudifyWorkflowContextInternal -------------------------------------------------------------------------------- /docs/workflow_tasks_graph.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | Workflow Tasks Graph 3 | ==================== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | .. automodule:: cloudify.workflows.tasks_graph 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: done_states 13 | -------------------------------------------------------------------------------- /docs/workflows.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Workflows 3 | ========= 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | workflow_tasks_graph 9 | workflow_api 10 | workflow_context -------------------------------------------------------------------------------- /packaging/provision.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | SUDO="" 4 | 5 | function print_plugins_params() { 6 | 7 | echo "## print common parameters" 8 | 9 | declare -A params=( ["PLUGIN_NAME"]=$PLUGIN_NAME ["PLUGIN_TAG_NAME"]=$PLUGIN_TAG_NAME \ 10 | ["PLUGIN_S3_FOLDER"]=$PLUGIN_S3_FOLDER ) 11 | for param in "${!params[@]}" 12 | do 13 | echo "$param - ${params["$param"]}" 14 | done 15 | } 16 | 17 | function install_dependencies(){ 18 | echo "## Installing necessary dependencies" 19 | 20 | if which yum; then 21 | sudo yum -y install python-devel gcc openssl git libxslt-devel libxml2-devel openldap-devel libffi-devel openssl-devel libvirt-devel 22 | elif which apt-get; then 23 | sudo apt-get update && 24 | sudo apt-get -y install build-essential python-dev gcc openssl libffi-dev libssl-dev libvirt-dev 25 | else 26 | echo 'probably windows machine' 27 | pip install virtualenv 28 | return 29 | fi 30 | sudo pip install virtualenv 31 | } 32 | 33 | function install_wagon(){ 34 | echo "## installing wagon" 35 | virtualenv env 36 | source env/bin/activate 37 | if which yum; then 38 | echo 'redaht/centos machine' 39 | elif which apt-get; then 40 | echo 'ubuntu/debian machine' 41 | else 42 | echo 'probably windows machine' 43 | fi 44 | pip install --upgrade pip==9.0.1 45 | pip install wagon==0.3.2 46 | } 47 | 48 | function wagon_create_package(){ 49 | 50 | echo "## wagon create package" 51 | echo "git clone https://$GITHUB_USERNAME:$GITHUB_PASSWORD@github.com/$GITHUB_ORGANIZATION/$PLUGIN_NAME.git" 52 | git clone https://$GITHUB_USERNAME:$GITHUB_PASSWORD@github.com/$GITHUB_ORGANIZATION/$PLUGIN_NAME.git 53 | pushd $PLUGIN_NAME 54 | if [ "$PLUGIN_TAG_NAME" == "master" ];then 55 | git checkout master 56 | else 57 | git checkout -b $PLUGIN_TAG_NAME origin/$PLUGIN_TAG_NAME 58 | fi 59 | popd 60 | echo "manylinux1_compatible = False" > "env/bin/_manylinux.py" 61 | mkdir create_wagon ; cd create_wagon 62 | if [ ! -z "$CONSTRAINTS_FILE" ] && [ -f "/vagrant/$CONSTRAINTS_FILE" ];then 63 | wagon create -s ../$PLUGIN_NAME/ --validate -v -f -a '--no-cache-dir -c /vagrant/'$CONSTRAINTS_FILE'' 64 | else 65 | wagon create -s ../$PLUGIN_NAME/ --validate -v -f 66 | fi 67 | } 68 | 69 | 70 | 71 | # VERSION/PRERELEASE/BUILD must be exported as they is being read as an env var by the cloudify-agent-packager 72 | export CORE_TAG_NAME="4.4.dev1" 73 | export CORE_BRANCH="master" 74 | curl https://raw.githubusercontent.com/cloudify-cosmo/cloudify-packager/$CORE_BRANCH/common/provision.sh -o ./common-provision.sh && 75 | source common-provision.sh 76 | 77 | 78 | GITHUB_USERNAME=$1 79 | GITHUB_PASSWORD=$2 80 | AWS_ACCESS_KEY_ID=$3 81 | AWS_ACCESS_KEY=$4 82 | PLUGIN_NAME=$5 83 | PLUGIN_TAG_NAME=$6 84 | PLUGIN_S3_FOLDER=$7 85 | GITHUB_ORGANIZATION=$8 86 | CONSTRAINTS_FILE=$9 87 | 88 | export AWS_S3_BUCKET="cloudify-release-eu" 89 | export AWS_S3_PATH="cloudify/wagons/$PLUGIN_NAME/$PLUGIN_S3_FOLDER" 90 | 91 | install_common_prereqs && 92 | print_plugins_params 93 | install_dependencies && 94 | install_wagon && 95 | wagon_create_package && 96 | create_md5 "wgn" && 97 | [ -z ${AWS_ACCESS_KEY} ] || upload_to_s3 "wgn" && upload_to_s3 "md5" 98 | 99 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from setuptools import setup 17 | 18 | install_requires = [ 19 | 'cloudify-rest-client==4.4.dev1', 20 | 'pika==0.9.14', 21 | 'networkx==1.9.1', 22 | 'proxy_tools==0.1.0', 23 | 'bottle==0.12.7', 24 | 'jinja2==2.7.2' 25 | ] 26 | 27 | try: 28 | import importlib # noqa 29 | except ImportError: 30 | install_requires.append('importlib') 31 | 32 | try: 33 | import argparse # NOQA 34 | except ImportError as e: 35 | install_requires.append('argparse==1.2.2') 36 | 37 | 38 | try: 39 | from collections import OrderedDict # noqa 40 | except ImportError: 41 | install_requires.append('ordereddict==1.1') 42 | 43 | 44 | setup( 45 | name='cloudify-plugins-common', 46 | version='4.4.dev1', 47 | author='cosmo-admin', 48 | author_email='cosmo-admin@gigaspaces.com', 49 | packages=['cloudify', 50 | 'cloudify.compute', 51 | 'cloudify.workflows', 52 | 'cloudify.plugins', 53 | 'cloudify.celery', 54 | 'cloudify.proxy', 55 | 'cloudify.test_utils', 56 | 'cloudify.ctx_wrappers'], 57 | license='LICENSE', 58 | description='Contains necessary decorators and utility methods for ' 59 | 'writing Cloudify plugins', 60 | zip_safe=False, 61 | install_requires=install_requires, 62 | entry_points={ 63 | 'console_scripts': [ 64 | 'ctx = cloudify.proxy.client:main', 65 | ] 66 | }, 67 | package_data={'cloudify.ctx_wrappers': ['ctx.py']}, 68 | scripts=[ 69 | 'ctx_wrappers/ctx-sh' 70 | ] 71 | ) 72 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | testtools 2 | mock==1.0.1 3 | celery==3.1.17 4 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # content of: tox.ini , put in same dir as setup.py 2 | [tox] 3 | envlist=flake8, 4 | test_{py27,py26} 5 | 6 | [testenv] 7 | deps = 8 | -rdev-requirements.txt 9 | -rtest-requirements.txt 10 | 11 | [testenv:test_py27] 12 | deps = 13 | # this fixes issue with tox installing coverage --pre 14 | coverage==3.7.1 15 | nose 16 | nose-cov 17 | testfixtures 18 | {[testenv]deps} 19 | commands=nosetests -s --with-cov --cov cloudify cloudify/tests 20 | 21 | [testenv:test_py26] 22 | deps = 23 | # this fixes issue with tox installing coverage --pre 24 | coverage==3.7.1 25 | nose 26 | nose-cov 27 | testfixtures 28 | {[testenv]deps} 29 | commands=nosetests -s --with-cov --cov cloudify cloudify/tests 30 | 31 | [testenv:flake8] 32 | deps = 33 | flake8 34 | {[testenv]deps} 35 | commands=flake8 cloudify 36 | --------------------------------------------------------------------------------