├── .circleci └── config.yml ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── README.rst ├── docs ├── Makefile ├── conf.py └── index.rst ├── dsl_parser ├── __init__.py ├── constants.py ├── elements │ ├── __init__.py │ ├── blueprint.py │ ├── data_types.py │ ├── imports.py │ ├── misc.py │ ├── node_templates.py │ ├── node_types.py │ ├── operation.py │ ├── plugins.py │ ├── policies.py │ ├── relationships.py │ ├── scalable.py │ ├── types.py │ ├── version.py │ └── workflows.py ├── exceptions.py ├── framework │ ├── __init__.py │ ├── elements.py │ ├── parser.py │ └── requirements.py ├── functions.py ├── holder.py ├── import_resolver │ ├── __init__.py │ ├── abstract_import_resolver.py │ └── default_import_resolver.py ├── interfaces │ ├── __init__.py │ ├── constants.py │ ├── interfaces_merger.py │ ├── interfaces_parser.py │ ├── operation_merger.py │ └── utils.py ├── models.py ├── multi_instance.py ├── parser.py ├── rel_graph.py ├── scan.py ├── tasks.py ├── tests │ ├── __init__.py │ ├── abstract_test_parser.py │ ├── interfaces │ │ ├── __init__.py │ │ ├── test_interfaces_merger.py │ │ ├── test_interfaces_parser.py │ │ └── test_operation_merger.py │ ├── scaling │ │ ├── __init__.py │ │ ├── test_groups.py │ │ ├── test_modify.py │ │ ├── test_multi_instance.py │ │ └── test_scaling_policies_and_groups.py │ ├── test_data_types.py │ ├── test_deafult_import_resolver.py │ ├── test_deployment_update.py │ ├── test_framework_parser.py │ ├── test_functions.py │ ├── test_get_attribute.py │ ├── test_get_secret.py │ ├── test_import_resolver.py │ ├── test_inputs.py │ ├── test_outputs.py │ ├── test_parse_with_resolver.py │ ├── test_parser_api.py │ ├── test_parser_format_exceptions.py │ ├── test_parser_logic_exceptions.py │ ├── test_plugins.py │ ├── test_plugins_to_install.py │ ├── test_register_function.py │ ├── test_yaml_anchors.py │ └── utils.py ├── utils.py ├── version.py └── yaml_loader.py ├── setup.py ├── test-requirements.txt └── tox.ini /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | checkout: 4 | post: 5 | - > 6 | if [ -n "$CI_PULL_REQUEST" ]; then 7 | PR_ID=${CI_PULL_REQUEST##*/} 8 | git fetch origin +refs/pull/$PR_ID/merge: 9 | git checkout -qf FETCH_HEAD 10 | fi 11 | 12 | defaults: 13 | - &test_defaults 14 | docker: 15 | - image: circleci/python:2.7 16 | steps: 17 | - checkout 18 | - run: 19 | name: Install tox, NOTICE we use an old version of tox because of CFY-6398 ( relying dict ordering) 20 | command: sudo pip install tox==1.6.1 21 | - run: 22 | name: Run tox of specfic environment 23 | command: tox -e $DO_ENV 24 | 25 | - &test_defaults_for_python26 26 | docker: 27 | - image: circleci/python:2.7 28 | steps: 29 | - checkout 30 | - run: 31 | name: Install and set python version with pyenv 32 | command: | 33 | git clone https://github.com/yyuu/pyenv.git ~/.pyenv 34 | export PYENV_ROOT="$HOME/.pyenv" 35 | export PATH="$PYENV_ROOT/bin:$PATH" 36 | sudo apt-get install -y build-essential libssl1.0-dev zlib1g-dev xz-utils 37 | pyenv install 2.6.9 38 | pyenv local 2.6.9 39 | - run: 40 | name: Install tox, NOTICE we use an old version of tox because of CFY-6398 (relying dict ordering) 41 | command: sudo pip install tox==1.6.1 tox-pyenv 42 | - run: 43 | name: Run tox of specfic environment 44 | command: tox -e $DO_ENV 45 | 46 | jobs: 47 | flake8: 48 | <<: *test_defaults 49 | environment: 50 | DO_ENV: flake8 51 | 52 | test_py27: 53 | <<: *test_defaults 54 | environment: 55 | DO_ENV: test_py27 56 | 57 | test_py26: 58 | <<: *test_defaults_for_python26 59 | environment: 60 | DO_ENV: test_py26 61 | 62 | workflows: 63 | version: 2 64 | 65 | build_and_test: 66 | jobs: 67 | - flake8 68 | - test_py27 69 | - test_py26 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # IDE 41 | .idea/ 42 | .mr.developer.cfg 43 | .project 44 | .pydevproject 45 | 46 | # Rope 47 | .ropeproject 48 | 49 | # Django stuff: 50 | *.log 51 | *.pot 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | *.iml 57 | 58 | *COMMIT_MSG 59 | 60 | *.noseids 61 | 62 | # QuickBuild 63 | .qbcache/ 64 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and 10 | distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by the copyright 13 | owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all other entities 16 | that control, are controlled by, or are under common control with that entity. 17 | For the purposes of this definition, "control" means (i) the power, direct or 18 | indirect, to cause the direction or management of such entity, whether by 19 | contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the 20 | outstanding shares, or (iii) beneficial ownership of such entity. 21 | 22 | "You" (or "Your") shall mean an individual or Legal Entity exercising 23 | permissions granted by this License. 24 | 25 | "Source" form shall mean the preferred form for making modifications, including 26 | but not limited to software source code, documentation source, and configuration 27 | files. 28 | 29 | "Object" form shall mean any form resulting from mechanical transformation or 30 | translation of a Source form, including but not limited to compiled object code, 31 | generated documentation, and conversions to other media types. 32 | 33 | "Work" shall mean the work of authorship, whether in Source or Object form, made 34 | available under the License, as indicated by a copyright notice that is included 35 | in or attached to the work (an example is provided in the Appendix below). 36 | 37 | "Derivative Works" shall mean any work, whether in Source or Object form, that 38 | is based on (or derived from) the Work and for which the editorial revisions, 39 | annotations, elaborations, or other modifications represent, as a whole, an 40 | original work of authorship. For the purposes of this License, Derivative Works 41 | shall not include works that remain separable from, or merely link (or bind by 42 | name) to the interfaces of, the Work and Derivative Works thereof. 43 | 44 | "Contribution" shall mean any work of authorship, including the original version 45 | of the Work and any modifications or additions to that Work or Derivative Works 46 | thereof, that is intentionally submitted to Licensor for inclusion in the Work 47 | by the copyright owner or by an individual or Legal Entity authorized to submit 48 | on behalf of the copyright owner. For the purposes of this definition, 49 | "submitted" means any form of electronic, verbal, or written communication sent 50 | to the Licensor or its representatives, including but not limited to 51 | communication on electronic mailing lists, source code control systems, and 52 | issue tracking systems that are managed by, or on behalf of, the Licensor for 53 | the purpose of discussing and improving the Work, but excluding communication 54 | that is conspicuously marked or otherwise designated in writing by the copyright 55 | owner as "Not a Contribution." 56 | 57 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf 58 | of whom a Contribution has been received by Licensor and subsequently 59 | incorporated within the Work. 60 | 61 | 2. Grant of Copyright License. 62 | 63 | Subject to the terms and conditions of this License, each Contributor hereby 64 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 65 | irrevocable copyright license to reproduce, prepare Derivative Works of, 66 | publicly display, publicly perform, sublicense, and distribute the Work and such 67 | Derivative Works in Source or Object form. 68 | 69 | 3. Grant of Patent License. 70 | 71 | Subject to the terms and conditions of this License, each Contributor hereby 72 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 73 | irrevocable (except as stated in this section) patent license to make, have 74 | made, use, offer to sell, sell, import, and otherwise transfer the Work, where 75 | such license applies only to those patent claims licensable by such Contributor 76 | that are necessarily infringed by their Contribution(s) alone or by combination 77 | of their Contribution(s) with the Work to which such Contribution(s) was 78 | submitted. If You institute patent litigation against any entity (including a 79 | cross-claim or counterclaim in a lawsuit) alleging that the Work or a 80 | Contribution incorporated within the Work constitutes direct or contributory 81 | patent infringement, then any patent licenses granted to You under this License 82 | for that Work shall terminate as of the date such litigation is filed. 83 | 84 | 4. Redistribution. 85 | 86 | You may reproduce and distribute copies of the Work or Derivative Works thereof 87 | in any medium, with or without modifications, and in Source or Object form, 88 | provided that You meet the following conditions: 89 | 90 | You must give any other recipients of the Work or Derivative Works a copy of 91 | this License; and 92 | You must cause any modified files to carry prominent notices stating that You 93 | changed the files; and 94 | You must retain, in the Source form of any Derivative Works that You distribute, 95 | all copyright, patent, trademark, and attribution notices from the Source form 96 | of the Work, excluding those notices that do not pertain to any part of the 97 | Derivative Works; and 98 | If the Work includes a "NOTICE" text file as part of its distribution, then any 99 | Derivative Works that You distribute must include a readable copy of the 100 | attribution notices contained within such NOTICE file, excluding those notices 101 | that do not pertain to any part of the Derivative Works, in at least one of the 102 | following places: within a NOTICE text file distributed as part of the 103 | Derivative Works; within the Source form or documentation, if provided along 104 | with the Derivative Works; or, within a display generated by the Derivative 105 | Works, if and wherever such third-party notices normally appear. The contents of 106 | the NOTICE file are for informational purposes only and do not modify the 107 | License. You may add Your own attribution notices within Derivative Works that 108 | You distribute, alongside or as an addendum to the NOTICE text from the Work, 109 | provided that such additional attribution notices cannot be construed as 110 | modifying the License. 111 | You may add Your own copyright statement to Your modifications and may provide 112 | additional or different license terms and conditions for use, reproduction, or 113 | distribution of Your modifications, or for any such Derivative Works as a whole, 114 | provided Your use, reproduction, and distribution of the Work otherwise complies 115 | with the conditions stated in this License. 116 | 117 | 5. Submission of Contributions. 118 | 119 | Unless You explicitly state otherwise, any Contribution intentionally submitted 120 | for inclusion in the Work by You to the Licensor shall be under the terms and 121 | conditions of this License, without any additional terms or conditions. 122 | Notwithstanding the above, nothing herein shall supersede or modify the terms of 123 | any separate license agreement you may have executed with Licensor regarding 124 | such Contributions. 125 | 126 | 6. Trademarks. 127 | 128 | This License does not grant permission to use the trade names, trademarks, 129 | service marks, or product names of the Licensor, except as required for 130 | reasonable and customary use in describing the origin of the Work and 131 | reproducing the content of the NOTICE file. 132 | 133 | 7. Disclaimer of Warranty. 134 | 135 | Unless required by applicable law or agreed to in writing, Licensor provides the 136 | Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, 137 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, 138 | including, without limitation, any warranties or conditions of TITLE, 139 | NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are 140 | solely responsible for determining the appropriateness of using or 141 | redistributing the Work and assume any risks associated with Your exercise of 142 | permissions under this License. 143 | 144 | 8. Limitation of Liability. 145 | 146 | In no event and under no legal theory, whether in tort (including negligence), 147 | contract, or otherwise, unless required by applicable law (such as deliberate 148 | and grossly negligent acts) or agreed to in writing, shall any Contributor be 149 | liable to You for damages, including any direct, indirect, special, incidental, 150 | or consequential damages of any character arising as a result of this License or 151 | out of the use or inability to use the Work (including but not limited to 152 | damages for loss of goodwill, work stoppage, computer failure or malfunction, or 153 | any and all other commercial damages or losses), even if such Contributor has 154 | been advised of the possibility of such damages. 155 | 156 | 9. Accepting Warranty or Additional Liability. 157 | 158 | While redistributing the Work or Derivative Works thereof, You may choose to 159 | offer, and charge a fee for, acceptance of support, warranty, indemnity, or 160 | other liability obligations and/or rights consistent with this License. However, 161 | in accepting such obligations, You may act only on Your own behalf and on Your 162 | sole responsibility, not on behalf of any other Contributor, and only if You 163 | agree to indemnify, defend, and hold each Contributor harmless for any liability 164 | incurred by, or claims asserted against, such Contributor by reason of your 165 | accepting any such warranty or additional liability. 166 | 167 | END OF TERMS AND CONDITIONS 168 | 169 | APPENDIX: How to apply the Apache License to your work 170 | 171 | To apply the Apache License to your work, attach the following boilerplate 172 | notice, with the fields enclosed by brackets "[]" replaced with your own 173 | identifying information. (Don't include the brackets!) The text should be 174 | enclosed in the appropriate comment syntax for the file format. We also 175 | recommend that a file or class name and description of purpose be included on 176 | the same "printed page" as the copyright notice for easier identification within 177 | third-party archives. 178 | 179 | Copyright [yyyy] [name of copyright owner] 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: release install files test docs prepare publish 2 | 3 | all: 4 | @echo "make release - prepares a release and publishes it" 5 | @echo "make dev - prepares a development environment" 6 | @echo "make install - install on local system" 7 | @echo "make files - update changelog and todo files" 8 | @echo "make test - run tox" 9 | @echo "make docs - build docs" 10 | @echo "prepare - prepare module for release (CURRENTLY IRRELEVANT)" 11 | @echo "make publish - upload to pypi" 12 | 13 | release: test docs publish 14 | 15 | dev: 16 | pip install -rdev-requirements.txt 17 | python setup.py develop 18 | 19 | install: 20 | python setup.py install 21 | 22 | files: 23 | grep '# TODO' -rn * --exclude-dir=docs --exclude-dir=build --exclude=TODO.md | sed 's/: \+#/: # /g;s/:#/: # /g' | sed -e 's/^/- /' | grep -v Makefile > TODO.md 24 | git log --oneline --decorate --color > CHANGELOG 25 | 26 | test: 27 | pip install tox==1.6.1 28 | tox 29 | 30 | docs: 31 | pip install sphinx sphinx-rtd-theme 32 | cd docs && make html 33 | pandoc README.md -f markdown -t rst -s -o README.rst 34 | 35 | prepare: 36 | python scripts/make-release.py 37 | 38 | publish: 39 | python setup.py sdist upload -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Cloudify DSL Parser 2 | =================== 3 | 4 | [![Build Status](https://travis-ci.org/cloudify-cosmo/cloudify-dsl-parser.svg?branch=master)](https://travis-ci.org/cloudify-cosmo/cloudify-dsl-parser) 5 | [![Circle CI](https://circleci.com/gh/cloudify-cosmo/cloudify-system-tests/tree/master.svg?&style=shield)](https://circleci.com/gh/cloudify-cosmo/cloudify-dsl-parser/tree/master) 6 | [![PyPI](http://img.shields.io/pypi/dm/cloudify-dsl-parser.svg)](http://img.shields.io/pypi/dm/cloudify-dsl-parser.svg) 7 | [![PypI](http://img.shields.io/pypi/v/cloudify-dsl-parser.svg)](http://img.shields.io/pypi/v/cloudify-dsl-parser.svg) 8 | 9 | 10 | Cloudify DSL parsing package 11 | 12 | ## Reference 13 | 14 | For details on the DSL specification see [DSL Specification](http://docs.getcloudify.org/latest/blueprints/overview/). 15 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | cosmo-plugin-dsl-parser 2 | ======================= 3 | 4 | This package contains a plugin that manipulates the recipe dsl 5 | 6 | - Build Status (master) |Build Status| 7 | 8 | .. |Build Status| image:: https://secure.travis-ci.org/CloudifySource/cosmo-plugin-dsl-parser.png?branch=master 9 | :target: http://travis-ci.org/CloudifySource/cosmo-plugin-dsl-parser 10 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/cloudify-dsl-parser.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/cloudify-dsl-parser.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/cloudify-dsl-parser" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/cloudify-dsl-parser" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # flake8: NOQA 2 | # -*- coding: utf-8 -*- 3 | # 4 | # packman documentation build configuration file, created by 5 | # sphinx-quickstart on Thu Apr 3 23:59:36 2014. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 19 | 20 | if not on_rtd: # only import and set the theme if we're building docs locally 21 | import sphinx_rtd_theme 22 | html_theme = 'sphinx_rtd_theme' 23 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 24 | 25 | # General information about the project. 26 | project = 'cloudify-dsl-parser' 27 | package = 'dsl_parser' 28 | author = 'Gigaspaces' 29 | copyright = '2014, Gigaspaces' 30 | 31 | # If extensions (or modules to document with autodoc) are in another directory, 32 | # add these directories to sys.path here. If the directory is relative to the 33 | # documentation root, use os.path.abspath to make it absolute, like shown here. 34 | sys.path.insert(0, os.path.abspath('../{}'.format(package))) 35 | sys.path.insert(0, os.path.abspath('..')) 36 | sys.path.insert(0, os.path.abspath('../..')) 37 | 38 | # -- General configuration ------------------------------------------------ 39 | 40 | # If your documentation needs a minimal Sphinx version, state it here. 41 | #needs_sphinx = '1.0' 42 | 43 | # Add any Sphinx extension module names here, as strings. They can be 44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 45 | # ones. 46 | extensions = [ 47 | 'sphinx.ext.autodoc', 48 | 'sphinx.ext.doctest', 49 | 'sphinx.ext.coverage', 50 | 'sphinx.ext.ifconfig', 51 | 'sphinx.ext.viewcode' 52 | ] 53 | 54 | # Add any paths that contain templates here, relative to this directory. 55 | templates_path = ['_templates'] 56 | 57 | # The suffix of source filenames. 58 | source_suffix = '.rst' 59 | 60 | # The encoding of source files. 61 | #source_encoding = 'utf-8-sig' 62 | 63 | # The master toctree document. 64 | master_doc = 'index' 65 | 66 | # The version info for the project you're documenting, acts as replacement for 67 | # |version| and |release|, also used in various other places throughout the 68 | # built documents. 69 | # 70 | # The short X.Y version. 71 | 72 | import pkg_resources 73 | try: 74 | release = pkg_resources.get_distribution(project).version 75 | except pkg_resources.DistributionNotFound: 76 | print 'To build the documentation, The distribution information of packm' 77 | print 'Has to be available. Either install the package into your' 78 | print 'development environment or run "setup.py develop" to setup the' 79 | print 'metadata. A virtualenv is recommended!' 80 | sys.exit(1) 81 | except Exception as e: 82 | print e 83 | del pkg_resources 84 | # release = '0.1.0' 85 | version = '.'.join(release.split('.')[:2]) 86 | # The language for content autogenerated by Sphinx. Refer to documentation 87 | # for a list of supported languages. 88 | #language = None 89 | 90 | # There are two options for replacing |today|: either, you set today to some 91 | # non-false value, then it is used: 92 | #today = '' 93 | # Else, today_fmt is used as the format for a strftime call. 94 | #today_fmt = '%B %d, %Y' 95 | 96 | # List of patterns, relative to source directory, that match files and 97 | # directories to ignore when looking for source files. 98 | exclude_patterns = ['_build'] 99 | 100 | # The reST default role (used for this markup: `text`) to use for all 101 | # documents. 102 | #default_role = None 103 | 104 | # If true, '()' will be appended to :func: etc. cross-reference text. 105 | #add_function_parentheses = True 106 | 107 | # If true, the current module name will be prepended to all description 108 | # unit titles (such as .. function::). 109 | #add_module_names = True 110 | 111 | # If true, sectionauthor and moduleauthor directives will be shown in the 112 | # output. They are ignored by default. 113 | #show_authors = False 114 | 115 | # The name of the Pygments (syntax highlighting) style to use. 116 | pygments_style = 'sphinx' 117 | 118 | # A list of ignored prefixes for module index sorting. 119 | #modindex_common_prefix = [] 120 | 121 | # If true, keep warnings as "system message" paragraphs in the built documents. 122 | #keep_warnings = False 123 | 124 | # -- Options for HTML output ---------------------------------------------- 125 | 126 | # The theme to use for HTML and HTML Help pages. See the documentation for 127 | # a list of builtin themes. 128 | # html_theme = 'sphinx_rtd_theme' 129 | 130 | # Theme options are theme-specific and customize the look and feel of a theme 131 | # further. For a list of options available for each theme, see the 132 | # documentation. 133 | #html_theme_options = {} 134 | 135 | # Add any paths that contain custom themes here, relative to this directory. 136 | # html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 137 | 138 | # The name for this set of Sphinx documents. If None, it defaults to 139 | # " v documentation". 140 | #html_title = None 141 | 142 | # A shorter title for the navigation bar. Default is the same as html_title. 143 | #html_short_title = None 144 | 145 | # The name of an image file (relative to this directory) to place at the top 146 | # of the sidebar. 147 | #html_logo = None 148 | 149 | # The name of an image file (within the static path) to use as favicon of the 150 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 151 | # pixels large. 152 | #html_favicon = None 153 | 154 | # Add any paths that contain custom static files (such as style sheets) here, 155 | # relative to this directory. They are copied after the builtin static files, 156 | # so a file named "default.css" will overwrite the builtin "default.css". 157 | html_static_path = ['_static'] 158 | 159 | # Add any extra paths that contain custom files (such as robots.txt or 160 | # .htaccess) here, relative to this directory. These files are copied 161 | # directly to the root of the documentation. 162 | #html_extra_path = [] 163 | 164 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 165 | # using the given strftime format. 166 | #html_last_updated_fmt = '%b %d, %Y' 167 | 168 | # If true, SmartyPants will be used to convert quotes and dashes to 169 | # typographically correct entities. 170 | #html_use_smartypants = True 171 | 172 | # Custom sidebar templates, maps document names to template names. 173 | #html_sidebars = {} 174 | 175 | # Additional templates that should be rendered to pages, maps page names to 176 | # template names. 177 | #html_additional_pages = {} 178 | 179 | # If false, no module index is generated. 180 | #html_domain_indices = True 181 | 182 | # If false, no index is generated. 183 | #html_use_index = True 184 | 185 | # If true, the index is split into individual pages for each letter. 186 | #html_split_index = False 187 | 188 | # If true, links to the reST sources are added to the pages. 189 | #html_show_sourcelink = True 190 | 191 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 192 | #html_show_sphinx = True 193 | 194 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 195 | #html_show_copyright = True 196 | 197 | # If true, an OpenSearch description file will be output, and all pages will 198 | # contain a tag referring to it. The value of this option must be the 199 | # base URL from which the finished HTML is served. 200 | #html_use_opensearch = '' 201 | 202 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 203 | #html_file_suffix = None 204 | 205 | # Output file base name for HTML help builder. 206 | htmlhelp_basename = '{0}doc'.format(project) 207 | 208 | 209 | # -- Options for LaTeX output --------------------------------------------- 210 | 211 | latex_elements = { 212 | # The paper size ('letterpaper' or 'a4paper'). 213 | #'papersize': 'letterpaper', 214 | 215 | # The font size ('10pt', '11pt' or '12pt'). 216 | #'pointsize': '10pt', 217 | 218 | # Additional stuff for the LaTeX preamble. 219 | #'preamble': '', 220 | } 221 | 222 | # Grouping the document tree into LaTeX files. List of tuples 223 | # (source start file, target name, title, 224 | # author, documentclass [howto, manual, or own class]). 225 | latex_documents = [ 226 | ('index', '{0}.tex'.format(project), u'{0} Documentation'.format(project), 227 | u'{}'.format(author), 'manual'), 228 | ] 229 | 230 | # The name of an image file (relative to this directory) to place at the top of 231 | # the title page. 232 | #latex_logo = None 233 | 234 | # For "manual" documents, if this is true, then toplevel headings are parts, 235 | # not chapters. 236 | #latex_use_parts = False 237 | 238 | # If true, show page references after internal links. 239 | #latex_show_pagerefs = False 240 | 241 | # If true, show URL addresses after external links. 242 | #latex_show_urls = False 243 | 244 | # Documents to append as an appendix to all manuals. 245 | #latex_appendices = [] 246 | 247 | # If false, no module index is generated. 248 | #latex_domain_indices = True 249 | 250 | 251 | # -- Options for manual page output --------------------------------------- 252 | 253 | # One entry per manual page. List of tuples 254 | # (source start file, name, description, authors, manual section). 255 | man_pages = [ 256 | ('index', project, u'{0} Documentation'.format(project), 257 | [u'{}'.format(author)], 1) 258 | ] 259 | 260 | # If true, show URL addresses after external links. 261 | #man_show_urls = False 262 | 263 | 264 | # -- Options for Texinfo output ------------------------------------------- 265 | 266 | # Grouping the document tree into Texinfo files. List of tuples 267 | # (source start file, target name, title, author, 268 | # dir menu entry, description, category) 269 | texinfo_documents = [ 270 | ('index', project, u'{0} Documentation'.format(project), 271 | u'{}'.format(author), project, 'One line description of project.', 272 | 'Miscellaneous'), 273 | ] 274 | 275 | # Documents to append as an appendix to all manuals. 276 | #texinfo_appendices = [] 277 | 278 | # If false, no module index is generated. 279 | #texinfo_domain_indices = True 280 | 281 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 282 | #texinfo_show_urls = 'footnote' 283 | 284 | # If true, do not generate a @detailmenu in the "Top" node's menu. 285 | #texinfo_no_detailmenu = False 286 | 287 | autodoc_member_order = 'bysource' 288 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. cloudify-cli documentation master file, created by 2 | sphinx-quickstart on Thu Jun 12 15:30:03 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to cloudify-dsl-parser's documentation! 7 | ======================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | .. automodule:: dsl_parser.parser 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | .. automodule:: dsl_parser.tasks 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | 32 | -------------------------------------------------------------------------------- /dsl_parser/__init__.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /dsl_parser/constants.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | DSL_DEFINITIONS = 'dsl_definitions' 17 | DESCRIPTION = 'description' 18 | METADATA = 'metadata' 19 | NODE_TEMPLATES = 'node_templates' 20 | IMPORTS = 'imports' 21 | NODE_TYPES = 'node_types' 22 | PLUGINS = 'plugins' 23 | INTERFACES = 'interfaces' 24 | SOURCE_INTERFACES = 'source_interfaces' 25 | TARGET_INTERFACES = 'target_interfaces' 26 | WORKFLOWS = 'workflows' 27 | RELATIONSHIPS = 'relationships' 28 | PROPERTIES = 'properties' 29 | PARAMETERS = 'parameters' 30 | TYPE_HIERARCHY = 'type_hierarchy' 31 | POLICY_TRIGGERS = 'policy_triggers' 32 | POLICY_TYPES = 'policy_types' 33 | POLICIES = 'policies' 34 | GROUPS = 'groups' 35 | INPUTS = 'inputs' 36 | OUTPUTS = 'outputs' 37 | DERIVED_FROM = 'derived_from' 38 | DATA_TYPES = 'data_types' 39 | 40 | HOST_TYPE = 'cloudify.nodes.Compute' 41 | DEPENDS_ON_REL_TYPE = 'cloudify.relationships.depends_on' 42 | CONTAINED_IN_REL_TYPE = 'cloudify.relationships.contained_in' 43 | CONNECTED_TO_REL_TYPE = 'cloudify.relationships.connected_to' 44 | 45 | SCALING_POLICY = 'cloudify.policies.scaling' 46 | 47 | CENTRAL_DEPLOYMENT_AGENT = 'central_deployment_agent' 48 | HOST_AGENT = 'host_agent' 49 | PLUGIN_EXECUTOR_KEY = 'executor' 50 | PLUGIN_SOURCE_KEY = 'source' 51 | PLUGIN_INSTALL_KEY = 'install' 52 | PLUGIN_INSTALL_ARGUMENTS_KEY = 'install_arguments' 53 | PLUGIN_NAME_KEY = 'name' 54 | PLUGIN_PACKAGE_NAME = 'package_name' 55 | PLUGIN_PACKAGE_VERSION = 'package_version' 56 | PLUGIN_SUPPORTED_PLATFORM = 'supported_platform' 57 | PLUGIN_DISTRIBUTION = 'distribution' 58 | PLUGIN_DISTRIBUTION_VERSION = 'distribution_version' 59 | PLUGIN_DISTRIBUTION_RELEASE = 'distribution_release' 60 | PLUGINS_TO_INSTALL = 'plugins_to_install' 61 | DEPLOYMENT_PLUGINS_TO_INSTALL = 'deployment_plugins_to_install' 62 | WORKFLOW_PLUGINS_TO_INSTALL = 'workflow_plugins_to_install' 63 | VERSION = 'version' 64 | CLOUDIFY = 'cloudify' 65 | 66 | SCRIPT_PLUGIN_NAME = 'script' 67 | SCRIPT_PLUGIN_RUN_TASK = 'script_runner.tasks.run' 68 | SCRIPT_PLUGIN_EXECUTE_WORKFLOW_TASK = 'script_runner.tasks.execute_workflow' 69 | SCRIPT_PATH_PROPERTY = 'script_path' 70 | 71 | FUNCTION_NAME_PATH_SEPARATOR = '__sep__' 72 | 73 | NODES = 'nodes' 74 | NODE_INSTANCES = 'node_instances' 75 | 76 | IMPORT_RESOLVER_KEY = 'import_resolver' 77 | VALIDATE_DEFINITIONS_VERSION = 'validate_definitions_version' 78 | RESOLVER_IMPLEMENTATION_KEY = 'implementation' 79 | RESLOVER_PARAMETERS_KEY = 'parameters' 80 | 81 | USER_PRIMITIVE_TYPES = ['string', 'integer', 'float', 'boolean'] 82 | 83 | UNBOUNDED_LITERAL = 'UNBOUNDED' 84 | UNBOUNDED = -1 85 | 86 | SCALING_GROUPS = 'scaling_groups' 87 | -------------------------------------------------------------------------------- /dsl_parser/elements/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | PRIMITIVE_TYPES = (list, bool, int, float, long, basestring, dict) 18 | -------------------------------------------------------------------------------- /dsl_parser/elements/blueprint.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import (constants, 17 | models) 18 | from dsl_parser.elements import (imports, 19 | misc, 20 | plugins, 21 | node_types, 22 | node_templates, 23 | relationships, 24 | workflows, 25 | policies, 26 | data_types, 27 | version as _version) 28 | from dsl_parser.framework.elements import Element 29 | from dsl_parser.framework.requirements import Value 30 | 31 | 32 | class BlueprintVersionExtractor(Element): 33 | 34 | schema = { 35 | 'tosca_definitions_version': _version.ToscaDefinitionsVersion, 36 | # here so it gets version validated 37 | 'dsl_definitions': misc.DSLDefinitions, 38 | } 39 | requires = { 40 | _version.ToscaDefinitionsVersion: ['version', 41 | Value('plan_version')] 42 | } 43 | 44 | def parse(self, version, plan_version): 45 | return { 46 | 'version': version, 47 | 'plan_version': plan_version 48 | } 49 | 50 | 51 | class BlueprintImporter(Element): 52 | 53 | schema = { 54 | 'imports': imports.ImportsLoader, 55 | } 56 | requires = { 57 | imports.ImportsLoader: ['resource_base'] 58 | } 59 | 60 | def parse(self, resource_base): 61 | return { 62 | 'merged_blueprint': self.child(imports.ImportsLoader).value, 63 | 'resource_base': resource_base 64 | } 65 | 66 | 67 | class Blueprint(Element): 68 | 69 | schema = { 70 | 'tosca_definitions_version': _version.ToscaDefinitionsVersion, 71 | 'description': misc.Description, 72 | 'imports': imports.Imports, 73 | 'dsl_definitions': misc.DSLDefinitions, 74 | 'metadata': misc.Metadata, 75 | 'inputs': misc.Inputs, 76 | 'plugins': plugins.Plugins, 77 | 'node_types': node_types.NodeTypes, 78 | 'relationships': relationships.Relationships, 79 | 'node_templates': node_templates.NodeTemplates, 80 | 'policy_types': policies.PolicyTypes, 81 | 'policy_triggers': policies.PolicyTriggers, 82 | 'groups': policies.Groups, 83 | 'policies': policies.Policies, 84 | 'workflows': workflows.Workflows, 85 | 'outputs': misc.Outputs, 86 | 'data_types': data_types.DataTypes 87 | } 88 | 89 | requires = { 90 | node_templates.NodeTemplates: ['deployment_plugins_to_install'], 91 | workflows.Workflows: ['workflow_plugins_to_install'], 92 | policies.Policies: ['scaling_groups'] 93 | } 94 | 95 | def parse(self, workflow_plugins_to_install, 96 | deployment_plugins_to_install, 97 | scaling_groups): 98 | return models.Plan({ 99 | constants.DESCRIPTION: self.child(misc.Description).value, 100 | constants.METADATA: self.child(misc.Metadata).value, 101 | constants.NODES: self.child(node_templates.NodeTemplates).value, 102 | constants.RELATIONSHIPS: self.child( 103 | relationships.Relationships).value, 104 | constants.WORKFLOWS: self.child(workflows.Workflows).value, 105 | constants.POLICY_TYPES: self.child(policies.PolicyTypes).value, 106 | constants.POLICY_TRIGGERS: 107 | self.child(policies.PolicyTriggers).value, 108 | constants.POLICIES: 109 | self.child(policies.Policies).value, 110 | constants.GROUPS: self.child(policies.Groups).value, 111 | constants.SCALING_GROUPS: scaling_groups or {}, 112 | constants.INPUTS: self.child(misc.Inputs).value, 113 | constants.OUTPUTS: self.child(misc.Outputs).value, 114 | constants.DEPLOYMENT_PLUGINS_TO_INSTALL: 115 | deployment_plugins_to_install, 116 | constants.WORKFLOW_PLUGINS_TO_INSTALL: workflow_plugins_to_install, 117 | constants.VERSION: self.child( 118 | _version.ToscaDefinitionsVersion).value 119 | }) 120 | -------------------------------------------------------------------------------- /dsl_parser/elements/data_types.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import constants 17 | from dsl_parser import elements 18 | from dsl_parser import exceptions 19 | from dsl_parser import utils 20 | from dsl_parser.elements import types, version as _version 21 | from dsl_parser.framework.elements import ( 22 | Element, 23 | Dict, 24 | DictElement, 25 | Leaf) 26 | from dsl_parser.framework.requirements import ( 27 | Value, 28 | Requirement, 29 | sibling_predicate) 30 | 31 | 32 | class SchemaPropertyDescription(Element): 33 | 34 | schema = Leaf(type=str) 35 | 36 | 37 | class SchemaPropertyType(Element): 38 | 39 | schema = Leaf(type=str) 40 | 41 | # requires will be modified later. 42 | requires = {} 43 | 44 | provides = ['component_types'] 45 | 46 | def validate(self, data_type, **kwargs): 47 | if self.initial_value and self.initial_value not in \ 48 | constants.USER_PRIMITIVE_TYPES and not data_type: 49 | raise exceptions.DSLParsingLogicException( 50 | exceptions.ERROR_UNKNOWN_TYPE, 51 | "Illegal type name '{0}'".format(self.initial_value)) 52 | 53 | def calculate_provided(self, component_types, **kwargs): 54 | return {'component_types': component_types} 55 | 56 | 57 | class SchemaPropertyDefault(Element): 58 | 59 | schema = Leaf(type=elements.PRIMITIVE_TYPES) 60 | 61 | requires = { 62 | SchemaPropertyType: [Requirement('component_types', 63 | required=False, 64 | predicate=sibling_predicate)] 65 | } 66 | 67 | def parse(self, component_types): 68 | type_name = self.sibling(SchemaPropertyType).value 69 | initial_value = self.initial_value 70 | if initial_value is None: 71 | if type_name is not None \ 72 | and type_name not in constants.USER_PRIMITIVE_TYPES: 73 | initial_value = {} 74 | else: 75 | return None 76 | component_types = component_types or {} 77 | prop_name = self.ancestor(SchemaProperty).name 78 | undefined_property_error = 'Undefined property {1} in default' \ 79 | ' value of type {0}' 80 | current_type = self.ancestor(Schema).parent().name 81 | return utils.parse_value( 82 | value=initial_value, 83 | type_name=type_name, 84 | data_types=component_types, 85 | undefined_property_error_message=undefined_property_error, 86 | missing_property_error_message='illegal state', 87 | node_name=current_type, 88 | path=[prop_name], 89 | raise_on_missing_property=False 90 | ) 91 | 92 | 93 | class SchemaPropertyRequired(Element): 94 | 95 | schema = Leaf(type=bool) 96 | 97 | requires = { 98 | _version.ToscaDefinitionsVersion: ['version'], 99 | 'inputs': ['validate_version'] 100 | } 101 | 102 | def validate(self, version, validate_version): 103 | if validate_version: 104 | self.validate_version(version, (1, 2)) 105 | 106 | 107 | class SchemaProperty(Element): 108 | 109 | schema = { 110 | 'required': SchemaPropertyRequired, 111 | 'default': SchemaPropertyDefault, 112 | 'description': SchemaPropertyDescription, 113 | 'type': SchemaPropertyType, 114 | } 115 | 116 | def parse(self): 117 | result = dict((child.name, child.value) for child in self.children() 118 | if child.defined) 119 | if isinstance(self.parent(), SchemaWithInitialDefault): 120 | initial_default = self.child(SchemaPropertyDefault).initial_value 121 | result.update({ 122 | 'initial_default': initial_default 123 | }) 124 | return result 125 | 126 | 127 | class Schema(DictElement): 128 | 129 | schema = Dict(type=SchemaProperty) 130 | 131 | 132 | class SchemaWithInitialDefault(Schema): 133 | pass 134 | 135 | 136 | class DataTypeDescription(Element): 137 | 138 | schema = Leaf(type=str) 139 | 140 | 141 | class DataTypeVersion(Element): 142 | 143 | schema = Leaf(type=str) 144 | 145 | 146 | class DataType(types.Type): 147 | 148 | schema = { 149 | 'properties': SchemaWithInitialDefault, 150 | 'description': DataTypeDescription, 151 | 'derived_from': types.DataTypeDerivedFrom, 152 | 'version': DataTypeVersion 153 | } 154 | 155 | requires = { 156 | 'self': [ 157 | Requirement('component_types', 158 | multiple_results=True, 159 | required=False, 160 | predicate=lambda source, target: 161 | target.name in source.direct_component_types), 162 | Value('super_type', 163 | predicate=types.derived_from_predicate, 164 | required=False) 165 | ] 166 | } 167 | 168 | provides = ['component_types'] 169 | 170 | def __init__(self, *args, **kwargs): 171 | super(DataType, self).__init__(*args, **kwargs) 172 | self._direct_component_types = None 173 | self.component_types = {} 174 | 175 | def validate(self, **kwargs): 176 | if self.name in constants.USER_PRIMITIVE_TYPES: 177 | raise exceptions.DSLParsingLogicException( 178 | exceptions.ERROR_INVALID_TYPE_NAME, 179 | 'Can\'t redefine primitive type {0}'.format(self.name) 180 | ) 181 | 182 | def parse(self, super_type, component_types): 183 | merged_component_types = {} 184 | for component in component_types: 185 | merged_component_types.update(component) 186 | self.component_types.update(merged_component_types) 187 | result = self.build_dict_result() 188 | if constants.PROPERTIES not in result: 189 | result[constants.PROPERTIES] = {} 190 | if super_type: 191 | result[constants.PROPERTIES] = utils.merge_schemas( 192 | overridden_schema=super_type.get('properties', {}), 193 | overriding_schema=result.get('properties', {}), 194 | data_types=merged_component_types) 195 | self.fix_properties(result) 196 | self.component_types[self.name] = result 197 | return result 198 | 199 | def calculate_provided(self, **kwargs): 200 | return {'component_types': self.component_types} 201 | 202 | @property 203 | def direct_component_types(self): 204 | if self._direct_component_types is None: 205 | direct_component_types = set() 206 | parent_type = self.initial_value.get(constants.DERIVED_FROM) 207 | if parent_type: 208 | direct_component_types.add(parent_type) 209 | for desc in self.descendants(SchemaPropertyType): 210 | direct_component_types.add(desc.initial_value) 211 | self._direct_component_types = direct_component_types 212 | return self._direct_component_types 213 | 214 | 215 | class DataTypes(types.Types): 216 | 217 | schema = Dict(type=DataType) 218 | 219 | requires = { 220 | _version.ToscaDefinitionsVersion: ['version'], 221 | 'inputs': ['validate_version'] 222 | } 223 | 224 | def validate(self, version, validate_version): 225 | if validate_version: 226 | self.validate_version(version, (1, 2)) 227 | 228 | 229 | # source: element describing data_type name 230 | # target: data_type 231 | def _has_type(source, target): 232 | return source.initial_value == target.name 233 | 234 | 235 | SchemaPropertyType.requires[DataType] = [ 236 | Value('data_type', predicate=_has_type, required=False), 237 | Requirement('component_types', predicate=_has_type, required=False) 238 | ] 239 | -------------------------------------------------------------------------------- /dsl_parser/elements/misc.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import elements 17 | from dsl_parser.elements import ( 18 | data_types, 19 | version as element_version) 20 | from dsl_parser.framework.elements import (DictElement, 21 | Element, 22 | Leaf, 23 | Dict) 24 | 25 | 26 | class OutputDescription(Element): 27 | 28 | schema = Leaf(type=str) 29 | 30 | 31 | class OutputValue(Element): 32 | 33 | required = True 34 | schema = Leaf(type=elements.PRIMITIVE_TYPES) 35 | 36 | 37 | class Output(Element): 38 | 39 | schema = { 40 | 'description': OutputDescription, 41 | 'value': OutputValue 42 | } 43 | 44 | 45 | class Outputs(DictElement): 46 | 47 | schema = Dict(type=Output) 48 | 49 | 50 | class Inputs(data_types.Schema): 51 | pass 52 | 53 | 54 | class DSLDefinitions(Element): 55 | 56 | schema = Leaf(type=[dict, list]) 57 | requires = { 58 | element_version.ToscaDefinitionsVersion: ['version'], 59 | 'inputs': ['validate_version'] 60 | } 61 | 62 | def validate(self, version, validate_version): 63 | if validate_version: 64 | self.validate_version(version, (1, 2)) 65 | 66 | 67 | class Description(Element): 68 | 69 | schema = Leaf(type=str) 70 | 71 | requires = { 72 | element_version.ToscaDefinitionsVersion: ['version'], 73 | 'inputs': ['validate_version'] 74 | } 75 | 76 | def validate(self, version, validate_version): 77 | if validate_version: 78 | self.validate_version(version, (1, 2)) 79 | 80 | 81 | class Metadata(Element): 82 | 83 | schema = Leaf(type=dict) 84 | -------------------------------------------------------------------------------- /dsl_parser/elements/node_types.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from dsl_parser import (constants, 18 | utils) 19 | from dsl_parser.interfaces import interfaces_parser 20 | from dsl_parser.elements import (operation, 21 | data_types as _data_types, 22 | types) 23 | from dsl_parser.framework import requirements 24 | from dsl_parser.framework.elements import Dict 25 | 26 | 27 | class NodeType(types.Type): 28 | 29 | schema = { 30 | 'derived_from': types.TypeDerivedFrom, 31 | 'interfaces': operation.NodeTypeInterfaces, 32 | 'properties': _data_types.SchemaWithInitialDefault, 33 | } 34 | requires = { 35 | 'self': [requirements.Value('super_type', 36 | predicate=types.derived_from_predicate, 37 | required=False)], 38 | _data_types.DataTypes: [requirements.Value('data_types')] 39 | } 40 | 41 | def parse(self, super_type, data_types): 42 | node_type = self.build_dict_result() 43 | if not node_type.get('derived_from'): 44 | node_type.pop('derived_from', None) 45 | if super_type: 46 | node_type[constants.PROPERTIES] = utils.merge_schemas( 47 | overridden_schema=super_type.get('properties', {}), 48 | overriding_schema=node_type.get('properties', {}), 49 | data_types=data_types) 50 | node_type[constants.INTERFACES] = interfaces_parser. \ 51 | merge_node_type_interfaces( 52 | overridden_interfaces=super_type[constants.INTERFACES], 53 | overriding_interfaces=node_type[constants.INTERFACES]) 54 | node_type[constants.TYPE_HIERARCHY] = self.create_type_hierarchy( 55 | super_type) 56 | self.fix_properties(node_type) 57 | return node_type 58 | 59 | 60 | class NodeTypes(types.Types): 61 | 62 | schema = Dict(type=NodeType) 63 | provides = ['host_types'] 64 | 65 | def calculate_provided(self): 66 | return { 67 | 'host_types': self._types_derived_from(constants.HOST_TYPE) 68 | } 69 | 70 | def _types_derived_from(self, derived_from): 71 | return set(type_name for type_name, _type in self.value.items() 72 | if derived_from in _type[constants.TYPE_HIERARCHY]) 73 | -------------------------------------------------------------------------------- /dsl_parser/elements/plugins.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import (constants, 17 | exceptions) 18 | from dsl_parser.elements import version as element_version 19 | from dsl_parser.framework.elements import (DictElement, 20 | Element, 21 | Leaf, 22 | Dict) 23 | 24 | 25 | class PluginExecutor(Element): 26 | 27 | required = True 28 | schema = Leaf(type=str) 29 | 30 | def validate(self): 31 | if self.initial_value not in [constants.CENTRAL_DEPLOYMENT_AGENT, 32 | constants.HOST_AGENT]: 33 | raise exceptions.DSLParsingLogicException( 34 | 18, "Plugin '{0}' has an illegal " 35 | "'{1}' value '{2}'; value " 36 | "must be either '{3}' or '{4}'" 37 | .format(self.ancestor(Plugin).name, 38 | self.name, 39 | self.initial_value, 40 | constants.CENTRAL_DEPLOYMENT_AGENT, 41 | constants.HOST_AGENT)) 42 | 43 | 44 | class PluginSource(Element): 45 | 46 | schema = Leaf(type=str) 47 | 48 | 49 | class PluginInstall(Element): 50 | 51 | schema = Leaf(type=bool) 52 | 53 | def parse(self): 54 | value = self.initial_value 55 | return value if value is not None else True 56 | 57 | 58 | class PluginVersionValidatedElement(Element): 59 | 60 | schema = Leaf(type=str) 61 | requires = { 62 | element_version.ToscaDefinitionsVersion: ['version'], 63 | 'inputs': ['validate_version'] 64 | } 65 | min_version = None 66 | 67 | def validate(self, version, validate_version): 68 | if not self.min_version: 69 | raise RuntimeError('Illegal state, please specify min_version') 70 | if validate_version: 71 | self.validate_version(version, self.min_version) 72 | 73 | 74 | class PluginInstallArguments(PluginVersionValidatedElement): 75 | min_version = (1, 1) 76 | 77 | 78 | class PluginPackageName(PluginVersionValidatedElement): 79 | min_version = (1, 2) 80 | 81 | 82 | class PluginPackageVersion(PluginVersionValidatedElement): 83 | min_version = (1, 2) 84 | 85 | 86 | class PluginSupportedPlatform(PluginVersionValidatedElement): 87 | min_version = (1, 2) 88 | 89 | 90 | class PluginDistribution(PluginVersionValidatedElement): 91 | min_version = (1, 2) 92 | 93 | 94 | class PluginDistributionVersion(PluginVersionValidatedElement): 95 | min_version = (1, 2) 96 | 97 | 98 | class PluginDistributionRelease(PluginVersionValidatedElement): 99 | min_version = (1, 2) 100 | 101 | 102 | class Plugin(DictElement): 103 | 104 | schema = { 105 | 'source': PluginSource, 106 | 'executor': PluginExecutor, 107 | 'install': PluginInstall, 108 | 'install_arguments': PluginInstallArguments, 109 | 'package_name': PluginPackageName, 110 | 'package_version': PluginPackageVersion, 111 | 'supported_platform': PluginSupportedPlatform, 112 | 'distribution': PluginDistribution, 113 | 'distribution_version': PluginDistributionVersion, 114 | 'distribution_release': PluginDistributionRelease 115 | } 116 | 117 | def validate(self): 118 | if self.child(PluginInstall).value: 119 | if not (self.child(PluginSource).value or 120 | self.child(PluginPackageName).value): 121 | raise exceptions.DSLParsingLogicException( 122 | 50, 123 | "Plugin '{0}' needs to be installed, " 124 | "but does not declare a source or package_name property" 125 | .format(self.name)) 126 | 127 | def parse(self): 128 | result = super(Plugin, self).parse() 129 | result['name'] = self.name 130 | return result 131 | 132 | 133 | class Plugins(DictElement): 134 | 135 | schema = Dict(type=Plugin) 136 | -------------------------------------------------------------------------------- /dsl_parser/elements/relationships.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import (constants, 17 | utils) 18 | from dsl_parser.interfaces import interfaces_parser 19 | from dsl_parser.elements import (data_types as _data_types, 20 | operation, 21 | plugins as _plugins, 22 | types) 23 | from dsl_parser.framework.requirements import Value, Requirement 24 | from dsl_parser.framework.elements import Dict 25 | 26 | 27 | class Relationship(types.Type): 28 | 29 | schema = { 30 | 'derived_from': types.RelationshipDerivedFrom, 31 | 'properties': _data_types.SchemaWithInitialDefault, 32 | 'source_interfaces': operation.NodeTypeInterfaces, 33 | 'target_interfaces': operation.NodeTypeInterfaces, 34 | } 35 | requires = { 36 | 'inputs': [Requirement('resource_base', required=False)], 37 | _plugins.Plugins: [Value('plugins')], 38 | 'self': [Value('super_type', 39 | predicate=types.derived_from_predicate, 40 | required=False)], 41 | _data_types.DataTypes: [Value('data_types')] 42 | } 43 | 44 | def parse(self, super_type, plugins, resource_base, data_types): 45 | relationship_type = self.build_dict_result() 46 | if not relationship_type.get('derived_from'): 47 | relationship_type.pop('derived_from', None) 48 | relationship_type_name = self.name 49 | 50 | if super_type: 51 | relationship_type[constants.PROPERTIES] = utils.merge_schemas( 52 | overridden_schema=super_type.get('properties', {}), 53 | overriding_schema=relationship_type.get('properties', {}), 54 | data_types=data_types) 55 | for interfaces in [constants.SOURCE_INTERFACES, 56 | constants.TARGET_INTERFACES]: 57 | relationship_type[interfaces] = interfaces_parser. \ 58 | merge_relationship_type_interfaces( 59 | overriding_interfaces=relationship_type[interfaces], 60 | overridden_interfaces=super_type[interfaces]) 61 | 62 | _validate_relationship_fields( 63 | rel_obj=relationship_type, 64 | plugins=plugins, 65 | rel_name=relationship_type_name, 66 | resource_base=resource_base) 67 | relationship_type['name'] = relationship_type_name 68 | relationship_type[ 69 | constants.TYPE_HIERARCHY] = self.create_type_hierarchy(super_type) 70 | self.fix_properties(relationship_type) 71 | return relationship_type 72 | 73 | 74 | class Relationships(types.Types): 75 | 76 | schema = Dict(type=Relationship) 77 | 78 | 79 | def _validate_relationship_fields(rel_obj, plugins, rel_name, resource_base): 80 | for interfaces in [constants.SOURCE_INTERFACES, 81 | constants.TARGET_INTERFACES]: 82 | for interface_name, interface in rel_obj[interfaces].items(): 83 | operation.process_interface_operations( 84 | interface=interface, 85 | plugins=plugins, 86 | error_code=19, 87 | partial_error_message="Relationship '{0}'".format(rel_name), 88 | resource_bases=resource_base) 89 | -------------------------------------------------------------------------------- /dsl_parser/elements/scalable.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import (exceptions, 17 | constants, 18 | functions) 19 | from dsl_parser.framework.elements import (DictElement, 20 | Element, 21 | Leaf) 22 | 23 | 24 | class Instances(Element): 25 | 26 | schema = Leaf(type=(int, dict)) 27 | default_value = None 28 | 29 | def validate(self): 30 | value = self.initial_value 31 | if isinstance(value, dict): 32 | function = functions.parse(value) 33 | if not isinstance(function, functions.Function): 34 | raise exceptions.DSLParsingLogicException( 35 | exceptions.ERROR_INVALID_DICT_VALUE, 36 | '{0} should be a valid intrinsic function or a value.' 37 | .format(self.name)) 38 | return True 39 | return False 40 | 41 | def parse(self): 42 | if self.initial_value is None: 43 | return self.default_value 44 | return self.initial_value 45 | 46 | 47 | class NonNegativeInstances(Instances): 48 | 49 | def validate(self): 50 | if super(NonNegativeInstances, self).validate(): 51 | return 52 | if self.initial_value is not None and self.initial_value < 0: 53 | raise exceptions.DSLParsingLogicException( 54 | exceptions.ERROR_INVALID_INSTANCES, 55 | '{0} should be a non negative value.' 56 | .format(self.name)) 57 | 58 | 59 | class DefaultInstances(NonNegativeInstances): 60 | default_value = 1 61 | 62 | 63 | class MinInstances(NonNegativeInstances): 64 | default_value = 0 65 | 66 | 67 | class MaxInstances(Instances): 68 | 69 | schema = Leaf(type=(int, basestring, dict)) 70 | default_value = constants.UNBOUNDED 71 | 72 | def validate(self): 73 | if super(MaxInstances, self).validate(): 74 | return 75 | value = self.initial_value 76 | if value is None: 77 | return 78 | if isinstance(value, basestring): 79 | if value != constants.UNBOUNDED_LITERAL: 80 | raise exceptions.DSLParsingLogicException( 81 | exceptions.ERROR_INVALID_LITERAL_INSTANCES, 82 | 'The only valid string for {0} is {1}.' 83 | .format(self.name, 84 | constants.UNBOUNDED_LITERAL)) 85 | return 86 | if value == constants.UNBOUNDED: 87 | return 88 | if value < 1: 89 | raise exceptions.DSLParsingLogicException( 90 | exceptions.ERROR_INVALID_INSTANCES, 91 | '{0} should be a positive value.' 92 | .format(self.name)) 93 | 94 | def parse(self): 95 | if self.initial_value == constants.UNBOUNDED_LITERAL: 96 | return constants.UNBOUNDED 97 | return super(MaxInstances, self).parse() 98 | 99 | 100 | class Properties(DictElement): 101 | 102 | DEFAULT = { 103 | 'min_instances': MinInstances.default_value, 104 | 'max_instances': MaxInstances.default_value, 105 | 'default_instances': DefaultInstances.default_value, 106 | 'current_instances': DefaultInstances.default_value, 107 | 'planned_instances': DefaultInstances.default_value 108 | } 109 | 110 | schema = { 111 | 'min_instances': MinInstances, 112 | 'max_instances': MaxInstances, 113 | 'default_instances': DefaultInstances 114 | } 115 | 116 | def validate(self): 117 | result = self.build_dict_result() 118 | min_instances = result.get('min_instances', 119 | self.DEFAULT['min_instances']) 120 | max_instances = result.get('max_instances', 121 | self.DEFAULT['max_instances']) 122 | default_instances = result.get('default_instances', 123 | self.DEFAULT['default_instances']) 124 | check_min = not isinstance(min_instances, dict) 125 | check_max = all([not isinstance(max_instances, dict), 126 | max_instances != constants.UNBOUNDED]) 127 | check_default = not isinstance(default_instances, dict) 128 | if check_min and check_default and default_instances < min_instances: 129 | raise exceptions.DSLParsingLogicException( 130 | exceptions.ERROR_INVALID_INSTANCES, 131 | 'default_instances ({0}) cannot be smaller than ' 132 | 'min_instances ({1})' 133 | .format(default_instances, min_instances)) 134 | if not check_max: 135 | return 136 | if check_min and min_instances > max_instances: 137 | raise exceptions.DSLParsingLogicException( 138 | exceptions.ERROR_INVALID_INSTANCES, 139 | 'min_instances ({0}) cannot be greater than ' 140 | 'max_instances ({1})' 141 | .format(min_instances, max_instances)) 142 | if check_default and default_instances > max_instances: 143 | raise exceptions.DSLParsingLogicException( 144 | exceptions.ERROR_INVALID_INSTANCES, 145 | 'default_instances ({0}) cannot be greater than ' 146 | 'max_instances ({1})' 147 | .format(default_instances, max_instances)) 148 | 149 | def parse(self, **kwargs): 150 | result = self.build_dict_result() 151 | result['default_instances'] = result.get( 152 | 'default_instances', self.DEFAULT['default_instances']) 153 | result['min_instances'] = result.get( 154 | 'min_instances', self.DEFAULT['min_instances']) 155 | result['max_instances'] = result.get( 156 | 'max_instances', self.DEFAULT['max_instances']) 157 | result['current_instances'] = result['default_instances'] 158 | result['planned_instances'] = result['default_instances'] 159 | return result 160 | -------------------------------------------------------------------------------- /dsl_parser/elements/types.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import exceptions 17 | from dsl_parser.framework.elements import (DictElement, 18 | Element, 19 | Leaf) 20 | 21 | 22 | class Types(DictElement): 23 | pass 24 | 25 | 26 | class Type(Element): 27 | 28 | def create_type_hierarchy(self, super_type): 29 | if super_type: 30 | type_hierarchy = super_type['type_hierarchy'][:] 31 | else: 32 | type_hierarchy = [] 33 | type_hierarchy.append(self.name) 34 | return type_hierarchy 35 | 36 | @staticmethod 37 | def fix_properties(value): 38 | for key, value in value['properties'].iteritems(): 39 | value.pop('initial_default', None) 40 | 41 | 42 | class DerivedFrom(Element): 43 | 44 | schema = Leaf(type=str) 45 | descriptor = '' 46 | 47 | def validate(self): 48 | if self.initial_value is None: 49 | return 50 | 51 | if self.initial_value not in self.ancestor(Types).initial_value: 52 | raise exceptions.DSLParsingLogicException( 53 | exceptions.ERROR_UNKNOWN_TYPE, 54 | "Missing definition for {0} '{1}' which is declared as " 55 | "derived by {0} '{2}'" 56 | .format(self.descriptor, 57 | self.initial_value, 58 | self.ancestor(Type).name)) 59 | 60 | 61 | class RelationshipDerivedFrom(DerivedFrom): 62 | 63 | descriptor = 'relationship' 64 | 65 | 66 | class TypeDerivedFrom(DerivedFrom): 67 | 68 | descriptor = 'type' 69 | 70 | 71 | class DataTypeDerivedFrom(DerivedFrom): 72 | 73 | descriptor = 'data type' 74 | 75 | 76 | def derived_from_predicate(source, target): 77 | try: 78 | derived_from = source.child(DerivedFrom).initial_value 79 | return derived_from and derived_from == target.name 80 | except exceptions.DSLParsingElementMatchException: 81 | return False 82 | -------------------------------------------------------------------------------- /dsl_parser/elements/version.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | from dsl_parser import (version as _version, 16 | exceptions, 17 | models) 18 | from dsl_parser.framework.elements import Element, Leaf 19 | 20 | 21 | class ToscaDefinitionsVersion(Element): 22 | 23 | schema = Leaf(type=str) 24 | provides = ['version'] 25 | 26 | def validate(self): 27 | if self.initial_value is None: 28 | raise exceptions.DSLParsingLogicException( 29 | 27, '{0} field must appear in the main blueprint file'.format( 30 | _version.VERSION)) 31 | 32 | _version.validate_dsl_version(self.initial_value) 33 | 34 | def parse(self): 35 | return models.Version(_version.process_dsl_version(self.initial_value)) 36 | 37 | def calculate_provided(self): 38 | return { 39 | 'version': _version.parse_dsl_version(self.initial_value) 40 | } 41 | -------------------------------------------------------------------------------- /dsl_parser/elements/workflows.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.elements import (data_types, 17 | plugins as _plugins, 18 | operation) 19 | from dsl_parser.framework.requirements import Value, Requirement 20 | from dsl_parser.framework.elements import (DictElement, 21 | Element, 22 | Leaf, 23 | Dict) 24 | 25 | 26 | class WorkflowMapping(Element): 27 | 28 | required = True 29 | schema = Leaf(type=str) 30 | 31 | 32 | class Workflow(Element): 33 | 34 | required = True 35 | schema = [ 36 | Leaf(type=str), 37 | { 38 | 'mapping': WorkflowMapping, 39 | 'parameters': data_types.Schema 40 | } 41 | ] 42 | requires = { 43 | 'inputs': [Requirement('resource_base', required=False)], 44 | _plugins.Plugins: [Value('plugins')] 45 | } 46 | 47 | def parse(self, plugins, resource_base): 48 | if isinstance(self.initial_value, str): 49 | operation_content = {'mapping': self.initial_value, 50 | 'parameters': {}} 51 | else: 52 | operation_content = self.build_dict_result() 53 | return operation.process_operation( 54 | plugins=plugins, 55 | operation_name=self.name, 56 | operation_content=operation_content, 57 | error_code=21, 58 | partial_error_message='', 59 | resource_bases=resource_base, 60 | is_workflows=True) 61 | 62 | 63 | class Workflows(DictElement): 64 | 65 | schema = Dict(type=Workflow) 66 | requires = { 67 | _plugins.Plugins: [Value('plugins')] 68 | } 69 | provides = ['workflow_plugins_to_install'] 70 | 71 | def calculate_provided(self, plugins): 72 | workflow_plugins = [] 73 | workflow_plugin_names = set() 74 | for workflow, op_struct in self.value.items(): 75 | if op_struct['plugin'] not in workflow_plugin_names: 76 | plugin_name = op_struct['plugin'] 77 | workflow_plugins.append(plugins[plugin_name]) 78 | workflow_plugin_names.add(plugin_name) 79 | return { 80 | 'workflow_plugins_to_install': workflow_plugins 81 | } 82 | -------------------------------------------------------------------------------- /dsl_parser/exceptions.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | class MissingRequiredInputError(Exception): 18 | """ 19 | An error raised when a deployment is created and a required input 20 | was not specified on its creation. 21 | """ 22 | def __init__(self, *args, **kwargs): 23 | super(MissingRequiredInputError, self).__init__(*args, **kwargs) 24 | 25 | 26 | class UnknownInputError(Exception): 27 | """ 28 | An error raised when an unknown input is specified on deployment creation. 29 | """ 30 | def __init__(self, *args, **kwargs): 31 | super(UnknownInputError, self).__init__(*args, **kwargs) 32 | 33 | 34 | class FunctionEvaluationError(Exception): 35 | """ 36 | An error raised when an intrinsic function was unable to get evaluated. 37 | """ 38 | def __init__(self, func_name, message=None): 39 | msg = 'Unable to evaluate {0} function'.format(func_name) 40 | if message: 41 | msg = '{0}: {1}'.format(msg, message) 42 | super(FunctionEvaluationError, self).__init__(msg) 43 | 44 | 45 | class UnknownSecretError(Exception): 46 | """ 47 | An error raised when a deployment is created and a required secret 48 | does not exist. 49 | """ 50 | def __init__(self, *args, **kwargs): 51 | super(UnknownSecretError, self).__init__(*args, **kwargs) 52 | 53 | 54 | class UnsupportedGetSecretError(Exception): 55 | """ 56 | An error raised when a deployment is created and the unsupported get_secret 57 | intrinsic function appears in the blueprint 58 | """ 59 | def __init__(self, *args, **kwargs): 60 | super(UnsupportedGetSecretError, self).__init__(*args, **kwargs) 61 | 62 | 63 | class DSLParsingException(Exception): 64 | def __init__(self, err_code, *args): 65 | super(DSLParsingException, self).__init__(*args) 66 | self.err_code = err_code 67 | self.element = None 68 | 69 | def __str__(self): 70 | message = super(DSLParsingException, self).__str__() 71 | if not self.element: 72 | return message 73 | return '{0} {1}'.format(message, self.element) 74 | 75 | 76 | class DSLParsingLogicException(DSLParsingException): 77 | pass 78 | 79 | 80 | class DSLParsingFormatException(DSLParsingException): 81 | pass 82 | 83 | 84 | class DSLParsingInputTypeException(DSLParsingException): 85 | pass 86 | 87 | 88 | class DSLParsingElementMatchException(DSLParsingException): 89 | """ 90 | An error raised when element child/ancestor lookup fails (element not 91 | found) 92 | """ 93 | pass 94 | 95 | 96 | class DSLParsingSchemaAPIException(DSLParsingException): 97 | """ 98 | An error raised due to invalid usage of framework 99 | """ 100 | pass 101 | 102 | 103 | class IllegalConnectedToConnectionType(Exception): 104 | pass 105 | 106 | 107 | class UnsupportedRelationship(Exception): 108 | pass 109 | 110 | 111 | class IllegalAllToOneState(Exception): 112 | pass 113 | 114 | 115 | class UnsupportedAllToOneInGroup(Exception): 116 | pass 117 | 118 | 119 | ERROR_CODE_CYCLE = 100 120 | ERROR_CODE_ILLEGAL_VALUE_ACCESS = 101 121 | ERROR_CODE_DSL_DEFINITIONS_VERSION_MISMATCH = 102 122 | ERROR_UNKNOWN_TYPE = 103 123 | ERROR_INVALID_TYPE_NAME = 104 124 | ERROR_VALUE_DOES_NOT_MATCH_TYPE = 105 125 | ERROR_INVALID_CHARS = 108 126 | ERROR_GROUP_CYCLE = 200 127 | ERROR_MULTIPLE_GROUPS = 201 128 | ERROR_NON_CONTAINED_GROUP_MEMBERS = 202 129 | ERROR_UNSUPPORTED_POLICY = 204 130 | ERROR_NON_GROUP_TARGET = 205 131 | ERROR_NO_TARGETS = 206 132 | ERROR_INVALID_INSTANCES = 207 133 | ERROR_INVALID_LITERAL_INSTANCES = 208 134 | ERROR_INSTANCES_DEPLOY_AND_CAPABILITIES = 209 135 | ERROR_INVALID_DICT_VALUE = 210 136 | ERROR_GROUP_AND_NODE_TEMPLATE_SAME_NAME = 211 137 | -------------------------------------------------------------------------------- /dsl_parser/framework/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /dsl_parser/framework/elements.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import copy 17 | from StringIO import StringIO 18 | 19 | from dsl_parser import exceptions 20 | from dsl_parser import holder 21 | from dsl_parser import version as _version 22 | 23 | 24 | class Unparsed(object): 25 | pass 26 | 27 | 28 | UNPARSED = Unparsed() 29 | 30 | 31 | class ElementType(object): 32 | 33 | def __init__(self, type): 34 | if isinstance(type, list): 35 | type = tuple(type) 36 | self.type = type 37 | 38 | 39 | class Leaf(ElementType): 40 | pass 41 | 42 | 43 | class Dict(ElementType): 44 | pass 45 | 46 | 47 | class List(ElementType): 48 | pass 49 | 50 | 51 | class Element(object): 52 | 53 | schema = None 54 | required = False 55 | requires = {} 56 | provides = [] 57 | 58 | def __init__(self, context, initial_value, name=None): 59 | self.context = context 60 | initial_value = holder.Holder.of(initial_value) 61 | self.initial_value_holder = initial_value 62 | self._initial_value = initial_value.restore() 63 | self.start_line = initial_value.start_line 64 | self.start_column = initial_value.start_column 65 | self.end_line = initial_value.end_line 66 | self.end_column = initial_value.end_column 67 | self.filename = initial_value.filename 68 | name = holder.Holder.of(name) 69 | self.name = name.restore() 70 | self.name_start_line = name.start_line 71 | self.name_start_column = name.start_column 72 | self.name_end_line = name.end_line 73 | self.name_end_column = name.end_column 74 | self._parsed_value = UNPARSED 75 | self._provided = None 76 | 77 | def __str__(self): 78 | message = StringIO() 79 | if self.filename: 80 | message.write('\n in: {0}'.format(self.filename)) 81 | if self.name_start_line >= 0: 82 | message.write('\n in line: {0}, column: {1}' 83 | .format(self.name_start_line + 1, 84 | self.name_start_column)) 85 | elif self.start_line >= 0: 86 | message.write('\n in line {0}, column {1}' 87 | .format(self.start_line + 1, self.start_column)) 88 | message.write('\n path: {0}'.format(self.path)) 89 | message.write('\n value: {0}'.format(self._initial_value)) 90 | 91 | return message.getvalue() 92 | 93 | def validate(self, **kwargs): 94 | pass 95 | 96 | def parse(self, **kwargs): 97 | return self.initial_value 98 | 99 | @property 100 | def index(self): 101 | """Alias name for list based elements""" 102 | return self.name 103 | 104 | @property 105 | def initial_value(self): 106 | return copy.deepcopy(self._initial_value) 107 | 108 | @property 109 | def value(self): 110 | if self._parsed_value == UNPARSED: 111 | raise exceptions.DSLParsingSchemaAPIException( 112 | exceptions.ERROR_CODE_ILLEGAL_VALUE_ACCESS, 113 | 'Cannot access element value before parsing') 114 | return copy.deepcopy(self._parsed_value) 115 | 116 | @value.setter 117 | def value(self, val): 118 | self._parsed_value = val 119 | 120 | def calculate_provided(self, **kwargs): 121 | return {} 122 | 123 | @property 124 | def provided(self): 125 | return copy.deepcopy(self._provided) 126 | 127 | @provided.setter 128 | def provided(self, value): 129 | self._provided = value 130 | 131 | @property 132 | def path(self): 133 | elements = [str(e.name) for e in self.context.ancestors_iter(self)] 134 | if elements: 135 | elements.pop() 136 | elements.reverse() 137 | elements.append(str(self.name)) 138 | return '.'.join(elements) 139 | 140 | @property 141 | def defined(self): 142 | return self.value is not None or self.start_line is not None 143 | 144 | def parent(self): 145 | return next(self.context.ancestors_iter(self)) 146 | 147 | def ancestor(self, element_type): 148 | matches = [e for e in self.context.ancestors_iter(self) 149 | if isinstance(e, element_type)] 150 | if not matches: 151 | raise exceptions.DSLParsingElementMatchException( 152 | "No matches found for '{0}'".format(element_type)) 153 | if len(matches) > 1: 154 | raise exceptions.DSLParsingElementMatchException( 155 | "Multiple matches found for '{0}'".format(element_type)) 156 | return matches[0] 157 | 158 | def descendants(self, element_type): 159 | return [e for e in self.context.descendants(self) 160 | if isinstance(e, element_type)] 161 | 162 | def child(self, element_type): 163 | matches = [e for e in self.context.child_elements_iter(self) 164 | if isinstance(e, element_type)] 165 | if not matches: 166 | raise exceptions.DSLParsingElementMatchException( 167 | "No matches found for '{0}'".format(element_type)) 168 | if len(matches) > 1: 169 | raise exceptions.DSLParsingElementMatchException( 170 | "Multiple matches found for '{0}'".format(element_type)) 171 | return matches[0] 172 | 173 | def build_dict_result(self): 174 | return dict((child.name, child.value) 175 | for child in self.context.child_elements_iter(self)) 176 | 177 | def children(self): 178 | return list(self.context.child_elements_iter(self)) 179 | 180 | def sibling(self, element_type): 181 | return self.parent().child(element_type) 182 | 183 | def validate_version(self, version, min_version): 184 | if self.initial_value is not None and version < min_version: 185 | raise exceptions.DSLParsingLogicException( 186 | exceptions.ERROR_CODE_DSL_DEFINITIONS_VERSION_MISMATCH, 187 | '{0} not supported in version {1}, it was added in {2}'.format( 188 | self.name, 189 | _version.version_description(version), 190 | _version.version_description(min_version) 191 | ) 192 | ) 193 | 194 | 195 | class DictElement(Element): 196 | 197 | def parse(self, **kwargs): 198 | return self.build_dict_result() 199 | 200 | 201 | class UnknownSchema(object): 202 | pass 203 | 204 | 205 | class UnknownElement(Element): 206 | 207 | schema = UnknownSchema() 208 | -------------------------------------------------------------------------------- /dsl_parser/framework/requirements.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | class Requirement(object): 18 | 19 | def __init__(self, 20 | name, 21 | parsed=False, 22 | multiple_results=False, 23 | required=True, 24 | predicate=None): 25 | self.name = name 26 | self.parsed = parsed 27 | self.multiple_results = multiple_results 28 | self.required = required 29 | self.predicate = predicate 30 | 31 | 32 | class Value(Requirement): 33 | 34 | def __init__(self, 35 | name, 36 | multiple_results=False, 37 | required=True, 38 | predicate=None): 39 | super(Value, self).__init__(name, 40 | parsed=True, 41 | multiple_results=multiple_results, 42 | required=required, 43 | predicate=predicate) 44 | 45 | 46 | def sibling_predicate(source, target): 47 | return source.parent() == target.parent() 48 | -------------------------------------------------------------------------------- /dsl_parser/holder.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | class Holder(object): 18 | 19 | def __init__(self, 20 | value, 21 | start_line=None, 22 | start_column=None, 23 | end_line=None, 24 | end_column=None, 25 | filename=None): 26 | self.value = value 27 | self.start_line = start_line 28 | self.start_column = start_column 29 | self.end_line = end_line 30 | self.end_column = end_column 31 | self.filename = filename 32 | 33 | def __str__(self): 34 | return '{0}<{1}.{2}-{3}.{4} [{5}]>'.format( 35 | self.value, 36 | self.start_line, 37 | self.start_column, 38 | self.end_line, 39 | self.end_column, 40 | self.filename) 41 | 42 | def __repr__(self): 43 | return self.__str__() 44 | 45 | def __hash__(self): 46 | return hash(self.value) 47 | 48 | def __eq__(self, other): 49 | return isinstance(other, Holder) and self.value == other.value 50 | 51 | def __contains__(self, key): 52 | key_holder, value_holder = self.get_item(key) 53 | return value_holder is not None 54 | 55 | def get_item(self, key): 56 | if not isinstance(self.value, dict): 57 | raise ValueError('Value is expected to be of type dict while it' 58 | 'is in fact of type {0}' 59 | .format(type(self.value).__name__)) 60 | for key_holder, value_holder in self.value.iteritems(): 61 | if key_holder.value == key: 62 | return key_holder, value_holder 63 | return None, None 64 | 65 | def restore(self): 66 | if isinstance(self.value, dict): 67 | return dict((key_holder.restore(), value_holder.restore()) 68 | for key_holder, value_holder in self.value.iteritems()) 69 | elif isinstance(self.value, list): 70 | return [value_holder.restore() for value_holder in self.value] 71 | elif isinstance(self.value, set): 72 | return set((value_holder.restore() for value_holder in self.value)) 73 | else: 74 | return self.value 75 | 76 | @staticmethod 77 | def of(obj, filename=None): 78 | if isinstance(obj, Holder): 79 | return obj 80 | if isinstance(obj, dict): 81 | result = dict((Holder.of(key, filename=filename), 82 | Holder.of(value, filename=filename)) 83 | for key, value in obj.iteritems()) 84 | elif isinstance(obj, list): 85 | result = [Holder.of(item, filename=filename) for item in obj] 86 | elif isinstance(obj, set): 87 | result = set((Holder.of(item, filename=filename) for item in obj)) 88 | else: 89 | result = obj 90 | return Holder(result, filename=filename) 91 | 92 | def copy(self): 93 | return Holder(value=self.value, 94 | start_line=self.start_line, 95 | start_column=self.start_column, 96 | end_line=self.end_line, 97 | end_column=self.end_column, 98 | filename=self.filename) 99 | -------------------------------------------------------------------------------- /dsl_parser/import_resolver/__init__.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /dsl_parser/import_resolver/abstract_import_resolver.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import abc 17 | import contextlib 18 | import urllib2 19 | 20 | import requests 21 | from retrying import retry 22 | 23 | from dsl_parser import exceptions 24 | 25 | 26 | DEFAULT_RETRY_DELAY = 1 27 | MAX_NUMBER_RETRIES = 5 28 | DEFAULT_REQUEST_TIMEOUT = 10 29 | 30 | 31 | class AbstractImportResolver(object): 32 | """ 33 | This class is abstract and should be inherited by concrete 34 | implementations of import resolver. 35 | The only mandatory implementation is of resolve, which is expected 36 | to open the import url and return its data. 37 | """ 38 | 39 | __metaclass__ = abc.ABCMeta 40 | 41 | @abc.abstractmethod 42 | def resolve(self, import_url): 43 | raise NotImplementedError 44 | 45 | def fetch_import(self, import_url): 46 | url_parts = import_url.split(':') 47 | if url_parts[0] in ['http', 'https', 'ftp', 'file']: 48 | return self.resolve(import_url) 49 | return read_import(import_url) 50 | 51 | 52 | def read_import(import_url): 53 | error_str = 'Import failed: Unable to open import url' 54 | if import_url.startswith('file:'): 55 | try: 56 | request = urllib2.Request(import_url) 57 | with contextlib.closing(urllib2.urlopen(request)) as f: 58 | return f.read() 59 | except Exception, ex: 60 | ex = exceptions.DSLParsingLogicException( 61 | 13, '{0} {1}; {2}'.format(error_str, import_url, ex)) 62 | raise ex 63 | else: 64 | number_of_attempts = MAX_NUMBER_RETRIES + 1 65 | 66 | # Defines on which errors we should retry the import. 67 | def _is_recoverable_error(e): 68 | return isinstance(e, (requests.ConnectionError, requests.Timeout)) 69 | 70 | # Defines on which return values we should retry the import. 71 | def _is_internal_error(result): 72 | return hasattr(result, 'status_code') and result.status_code >= 500 73 | 74 | @retry(stop_max_attempt_number=number_of_attempts, 75 | wait_fixed=DEFAULT_RETRY_DELAY, 76 | retry_on_exception=_is_recoverable_error, 77 | retry_on_result=_is_internal_error) 78 | def get_import(): 79 | response = requests.get(import_url, 80 | timeout=DEFAULT_REQUEST_TIMEOUT) 81 | # The response is a valid one, and the content should be returned 82 | if 200 <= response.status_code < 300: 83 | return response.text 84 | # If the response status code is above 500, an internal server 85 | # error has occurred. The return value would be caught by 86 | # _is_internal_error (as specified in the decorator), and retried. 87 | elif response.status_code >= 500: 88 | return response 89 | # Any other response should raise an exception. 90 | else: 91 | invalid_url_err = exceptions.DSLParsingLogicException( 92 | 13, '{0} {1}; status code: {2}'.format( 93 | error_str, import_url, response.status_code)) 94 | raise invalid_url_err 95 | 96 | try: 97 | import_result = get_import() 98 | # If the error is an internal error only. A custom exception should 99 | # be raised. 100 | if _is_internal_error(import_result): 101 | msg = 'Import failed {0} times, due to internal server error' \ 102 | '; {1}'.format(number_of_attempts, import_result.text) 103 | raise exceptions.DSLParsingLogicException(13, msg) 104 | return import_result 105 | # If any ConnectionError, Timeout or URLRequired should rise 106 | # after the retrying mechanism, a custom exception will be raised. 107 | except (requests.ConnectionError, requests.Timeout, 108 | requests.URLRequired) as err: 109 | 110 | raise exceptions.DSLParsingLogicException( 111 | 13, '{0} {1}; {2}'.format(error_str, import_url, err)) 112 | -------------------------------------------------------------------------------- /dsl_parser/import_resolver/default_import_resolver.py: -------------------------------------------------------------------------------- 1 | ######### 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | from dsl_parser.exceptions import DSLParsingLogicException 16 | 17 | from dsl_parser.import_resolver.abstract_import_resolver \ 18 | import AbstractImportResolver, read_import 19 | 20 | DEFAULT_RULES = [] 21 | DEFAULT_RESLOVER_RULES_KEY = 'rules' 22 | 23 | 24 | class DefaultResolverValidationException(Exception): 25 | pass 26 | 27 | 28 | class DefaultImportResolver(AbstractImportResolver): 29 | """ 30 | This class is a default implementation of an import resolver. 31 | This resolver uses the rules to replace URL's prefix with another prefix 32 | and tries to resolve the new URL (after the prefix has been replaced). 33 | If there aren't any rules, none of the rules matches or 34 | none of the prefix replacements works, 35 | the resolver will try to use the original URL. 36 | 37 | Each rule in the ``rules`` list is expected to be 38 | a dictionary with one (key, value) pair which represents 39 | a prefix and its replacement which can be used to resolve the import url. 40 | 41 | The resolver will go over the rules and for each matching rule 42 | (its key is a prefix of the url) it will replace the prefix 43 | with the value and will try to resolve the new url. 44 | 45 | For example: 46 | The rules list: [ 47 | {'http://prefix1': 'http://prefix1_replacement'}, 48 | {'http://prefix2': 'http://prefix2_replacement1'}, 49 | {'http://prefix2': 'http://prefix2_replacement2'} 50 | ] 51 | contains three rules that can be used for resolve URLs that 52 | starts with 'http://prefix1' and 'http://prefix2'. 53 | If the url is 'http://prefix2.suffix2.org' than the resolve method 54 | will find a match in both the second and the third rules. 55 | 56 | It will first try to apply the second rule by replacing the url's 57 | prefix with the second rule value ('http://prefix2_replacement1') 58 | and will try to resolve the new url: 59 | 'http://prefix2_replacement1.suffix2.org'. 60 | 61 | In case this url cannot be resolved, it will try to apply 62 | the third rule by replacing the url's prefix with 63 | the third rule value ('http://prefix2_replacement2') 64 | and will try to resolve the url: 65 | 'http://prefix2_replacement2.suffix2.org'. 66 | 67 | If this url, also, cannot be resolved, 68 | it will try to resolve the original url, 69 | i.e. http://prefix2.suffix2.org' 70 | 71 | In case that all the resolve attempts will fail, 72 | a DSLParsingLogicException will be raise. 73 | """ 74 | 75 | def __init__(self, rules=None): 76 | # set the rules 77 | self.rules = rules 78 | if self.rules is None: 79 | self.rules = DEFAULT_RULES 80 | self._validate_rules() 81 | 82 | def resolve(self, import_url): 83 | failed_urls = {} 84 | # trying to find a matching rule that can resolve this url 85 | for rule in self.rules: 86 | # the validate method checks that the dict has exactly 1 element 87 | prefix, value = list(rule.items())[0] 88 | prefix_len = len(prefix) 89 | if prefix == import_url[:prefix_len]: 90 | # found a matching rule 91 | url_to_resolve = value + import_url[prefix_len:] 92 | # trying to resolve the resolved_url 93 | if url_to_resolve not in failed_urls: 94 | # there is no point to try to resolve the same url twice 95 | try: 96 | return read_import(url_to_resolve) 97 | except DSLParsingLogicException as ex: 98 | # failed to resolve current rule, 99 | # continue to the next one 100 | failed_urls[url_to_resolve] = str(ex) 101 | 102 | # failed to resolve the url using the rules 103 | # trying to open the original url 104 | try: 105 | return read_import(import_url) 106 | except DSLParsingLogicException as ex: 107 | if not self.rules: 108 | raise 109 | if not failed_urls: 110 | # no matching rules 111 | msg = 'None of the resolver rules {0} was applicable, ' \ 112 | 'failed to resolve the original import url: {1} '\ 113 | .format(self.rules, ex) 114 | else: 115 | # all urls failed to be resolved 116 | msg = 'Failed to resolve the following urls: {0}. ' \ 117 | 'In addition, failed to resolve the original ' \ 118 | 'import url - {1}'.format(failed_urls, ex) 119 | ex = DSLParsingLogicException(13, msg) 120 | ex.failed_import = import_url 121 | raise ex 122 | 123 | def _validate_rules(self): 124 | if not isinstance(self.rules, list): 125 | raise DefaultResolverValidationException( 126 | 'Invalid parameters supplied for the default resolver: ' 127 | 'The `{0}` parameter must be a list but it is of type {1}.' 128 | .format( 129 | DEFAULT_RESLOVER_RULES_KEY, 130 | type(self.rules).__name__)) 131 | for rule in self.rules: 132 | if not isinstance(rule, dict): 133 | raise DefaultResolverValidationException( 134 | 'Invalid parameters supplied for the default resolver: ' 135 | 'Each rule must be a dictionary but the rule ' 136 | '[{0}] is of type {1}.' 137 | .format(rule, type(rule).__name__)) 138 | keys = rule.keys() 139 | if not len(keys) == 1: 140 | raise DefaultResolverValidationException( 141 | 'Invalid parameters supplied for the default resolver: ' 142 | 'Each rule must be a dictionary with one (key,value) pair ' 143 | 'but the rule [{0}] has {1} keys.' 144 | .format(rule, len(keys))) 145 | -------------------------------------------------------------------------------- /dsl_parser/interfaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudify-cosmo/cloudify-dsl-parser/7b2b6a86c5f3c3e2c91af67c050d28283cf86342/dsl_parser/interfaces/__init__.py -------------------------------------------------------------------------------- /dsl_parser/interfaces/constants.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.interfaces import utils 17 | 18 | INTERFACES = 'interfaces' 19 | SOURCE_INTERFACES = 'source_interfaces' 20 | TARGET_INTERFACES = 'target_interfaces' 21 | NO_OP = utils.no_op() 22 | -------------------------------------------------------------------------------- /dsl_parser/interfaces/interfaces_merger.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.interfaces.operation_merger import OperationMerger 17 | 18 | 19 | class InterfaceMerger(object): 20 | 21 | def __init__(self, 22 | overriding_interface, 23 | overridden_interface, 24 | operation_merger=OperationMerger): 25 | 26 | self.overriding_interface = overriding_interface 27 | self.overridden_interface = overridden_interface 28 | self.operation_merger = operation_merger 29 | 30 | def merge(self): 31 | 32 | merged_interface = {} 33 | 34 | for overridden_operation_name, overridden_operation \ 35 | in self.overridden_interface.items(): 36 | 37 | overriding_operation = self.overriding_interface.get( 38 | overridden_operation_name, 39 | None) 40 | 41 | merger = self.operation_merger( 42 | overriding_operation=overriding_operation, 43 | overridden_operation=overridden_operation 44 | ) 45 | merged_operation = merger.merge() 46 | merged_interface[overridden_operation_name] = merged_operation 47 | 48 | for overriding_operation_name, overriding_operation \ 49 | in self.overriding_interface.items(): 50 | 51 | overridden_operation = self.overridden_interface.get( 52 | overriding_operation_name, 53 | None) 54 | 55 | merger = self.operation_merger( 56 | overriding_operation=overriding_operation, 57 | overridden_operation=overridden_operation 58 | ) 59 | merged_operation = merger.merge() 60 | merged_interface[overriding_operation_name] = merged_operation 61 | 62 | return merged_interface 63 | 64 | 65 | class InterfacesMerger(object): 66 | 67 | def __init__(self, 68 | overriding_interfaces, 69 | overridden_interfaces, 70 | operation_merger): 71 | 72 | self.overriding_interfaces = overriding_interfaces 73 | self.overridden_interfaces = overridden_interfaces 74 | self.operation_merger = operation_merger 75 | self.interface_merger = InterfaceMerger 76 | 77 | def merge(self): 78 | 79 | merged_interfaces = {} 80 | 81 | for overridden_interface_name, overridden_interface \ 82 | in self.overridden_interfaces.items(): 83 | 84 | overriding_interface = self.overriding_interfaces.get( 85 | overridden_interface_name, {}) 86 | 87 | interface_merger = self.interface_merger( 88 | overriding_interface=overriding_interface, 89 | overridden_interface=overridden_interface, 90 | operation_merger=self.operation_merger 91 | ) 92 | merged_interface = interface_merger.merge() 93 | merged_interfaces[overridden_interface_name] = merged_interface 94 | 95 | for overriding_interface_name, overriding_interface \ 96 | in self.overriding_interfaces.items(): 97 | 98 | overridden_interface = self.overridden_interfaces.get( 99 | overriding_interface_name, {}) 100 | 101 | interface_merger = self.interface_merger( 102 | overriding_interface=overriding_interface, 103 | overridden_interface=overridden_interface, 104 | operation_merger=self.operation_merger 105 | ) 106 | merged_interface = interface_merger.merge() 107 | merged_interfaces[overriding_interface_name] = merged_interface 108 | 109 | return merged_interfaces 110 | -------------------------------------------------------------------------------- /dsl_parser/interfaces/interfaces_parser.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.interfaces.interfaces_merger import InterfacesMerger 17 | from dsl_parser.interfaces.operation_merger import ( 18 | NodeTypeNodeTypeOperationMerger, 19 | RelationshipTypeRelationshipInstanceOperationMerger, 20 | RelationshipTypeRelationshipTypeOperationMerger, 21 | NodeTemplateNodeTypeOperationMerger) 22 | 23 | 24 | def merge_node_type_interfaces(overriding_interfaces, 25 | overridden_interfaces): 26 | return InterfacesMerger( 27 | overriding_interfaces=overriding_interfaces, 28 | overridden_interfaces=overridden_interfaces, 29 | operation_merger=NodeTypeNodeTypeOperationMerger 30 | ).merge() 31 | 32 | 33 | def merge_node_type_and_node_template_interfaces(node_type_interfaces, 34 | node_template_interfaces): 35 | return InterfacesMerger( 36 | overriding_interfaces=node_template_interfaces, 37 | overridden_interfaces=node_type_interfaces, 38 | operation_merger=NodeTemplateNodeTypeOperationMerger 39 | ).merge() 40 | 41 | 42 | def merge_relationship_type_interfaces( 43 | overriding_interfaces, 44 | overridden_interfaces): 45 | return InterfacesMerger( 46 | overriding_interfaces=overriding_interfaces, 47 | overridden_interfaces=overridden_interfaces, 48 | operation_merger=RelationshipTypeRelationshipTypeOperationMerger 49 | ).merge() 50 | 51 | 52 | def merge_relationship_type_and_instance_interfaces( 53 | relationship_type_interfaces, 54 | relationship_instance_interfaces): 55 | return InterfacesMerger( 56 | overriding_interfaces=relationship_instance_interfaces, 57 | overridden_interfaces=relationship_type_interfaces, 58 | operation_merger=RelationshipTypeRelationshipInstanceOperationMerger 59 | ).merge() 60 | -------------------------------------------------------------------------------- /dsl_parser/interfaces/operation_merger.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.interfaces.constants import NO_OP 17 | from dsl_parser.interfaces.utils import (operation_mapping, 18 | merge_schema_and_instance_inputs) 19 | 20 | 21 | class OperationMerger(object): 22 | 23 | @staticmethod 24 | def _create_operation(raw_operation): 25 | if raw_operation is None: 26 | return None 27 | if isinstance(raw_operation, str): 28 | return operation_mapping( 29 | implementation=raw_operation, 30 | inputs={}, 31 | executor=None, 32 | max_retries=None, 33 | retry_interval=None 34 | ) 35 | if isinstance(raw_operation, dict): 36 | return operation_mapping( 37 | implementation=raw_operation.get('implementation', ''), 38 | inputs=raw_operation.get('inputs', {}), 39 | executor=raw_operation.get('executor', None), 40 | max_retries=raw_operation.get('max_retries', None), 41 | retry_interval=raw_operation.get('retry_interval', None) 42 | ) 43 | 44 | def merge(self): 45 | raise NotImplementedError('Must be implemented by subclasses') 46 | 47 | 48 | class NodeTemplateNodeTypeOperationMerger(OperationMerger): 49 | 50 | def __init__(self, 51 | overriding_operation, 52 | overridden_operation): 53 | self.node_type_operation = self._create_operation( 54 | overridden_operation) 55 | self.node_template_operation = self._create_operation( 56 | overriding_operation) 57 | 58 | def _derive_implementation(self): 59 | merged_operation_implementation = \ 60 | self.node_template_operation['implementation'] 61 | if not merged_operation_implementation: 62 | # node template does not define an implementation 63 | # this means we want to inherit the implementation 64 | # from the type 65 | merged_operation_implementation = \ 66 | self.node_type_operation['implementation'] 67 | return merged_operation_implementation 68 | 69 | def _derive_inputs(self, merged_operation_implementation): 70 | if merged_operation_implementation == \ 71 | self.node_type_operation['implementation']: 72 | # this means the node template inputs should adhere to 73 | # the node type inputs schema (since its the same implementation) 74 | merged_operation_inputs = merge_schema_and_instance_inputs( 75 | schema_inputs=self.node_type_operation['inputs'], 76 | instance_inputs=self.node_template_operation['inputs'] 77 | ) 78 | else: 79 | # the node template implementation overrides 80 | # the node type implementation. this means 81 | # we take the inputs defined in the node template 82 | merged_operation_inputs = \ 83 | self.node_template_operation['inputs'] 84 | 85 | return merged_operation_inputs 86 | 87 | def _derive_executor(self, merged_operation_implementation): 88 | return self._derive_with_impl('executor', 89 | merged_operation_implementation) 90 | 91 | def _derive_max_retries(self, merged_operation_implementation): 92 | return self._derive_with_impl('max_retries', 93 | merged_operation_implementation) 94 | 95 | def _derive_retry_interval(self, merged_operation_implementation): 96 | return self._derive_with_impl('retry_interval', 97 | merged_operation_implementation) 98 | 99 | def _derive_with_impl(self, field_name, merged_operation_implementation): 100 | node_type_operation_value = self.node_type_operation[ 101 | field_name] 102 | node_template_operation_value = self.node_template_operation[ 103 | field_name] 104 | if merged_operation_implementation != \ 105 | self.node_type_operation['implementation']: 106 | # this means the node template operation value will take 107 | # precedence (even if it is None, in which case, 108 | # the default value will apply (plugin for executor, and global 109 | # config for retry params) 110 | return node_template_operation_value 111 | if node_template_operation_value is not None: 112 | # node template operation value is declared 113 | # explicitly, use it 114 | return node_template_operation_value 115 | return node_type_operation_value 116 | 117 | def merge(self): 118 | 119 | if self.node_type_operation is None: 120 | 121 | # the operation is not defined in the type 122 | # should be merged by the node template operation 123 | 124 | return self.node_template_operation 125 | 126 | if self.node_template_operation is None: 127 | 128 | # the operation is not defined in the template 129 | # should be merged by the node type operation 130 | # this will validate that all schema inputs have 131 | # default values 132 | 133 | return operation_mapping( 134 | implementation=self.node_type_operation['implementation'], 135 | inputs=merge_schema_and_instance_inputs( 136 | schema_inputs=self.node_type_operation['inputs'], 137 | instance_inputs={} 138 | ), 139 | executor=self.node_type_operation['executor'], 140 | max_retries=self.node_type_operation['max_retries'], 141 | retry_interval=self.node_type_operation['retry_interval'], 142 | ) 143 | 144 | if self.node_template_operation == NO_OP: 145 | # no-op overrides 146 | return NO_OP 147 | if self.node_type_operation == NO_OP: 148 | # no-op overridden 149 | return self.node_template_operation 150 | 151 | merged_operation_implementation = self._derive_implementation() 152 | merged_operation_inputs = self._derive_inputs( 153 | merged_operation_implementation) 154 | merged_operation_executor = self._derive_executor( 155 | merged_operation_implementation) 156 | merged_operation_retries = self._derive_max_retries( 157 | merged_operation_implementation) 158 | merged_operation_retry_interval = self._derive_retry_interval( 159 | merged_operation_implementation) 160 | 161 | return operation_mapping( 162 | implementation=merged_operation_implementation, 163 | inputs=merged_operation_inputs, 164 | executor=merged_operation_executor, 165 | max_retries=merged_operation_retries, 166 | retry_interval=merged_operation_retry_interval 167 | ) 168 | 169 | 170 | class NodeTypeNodeTypeOperationMerger(OperationMerger): 171 | 172 | def __init__(self, 173 | overriding_operation, 174 | overridden_operation): 175 | self.overridden_node_type_operation = self._create_operation( 176 | overridden_operation) 177 | self.overriding_node_type_operation = self._create_operation( 178 | overriding_operation) 179 | 180 | def merge(self): 181 | 182 | if self.overriding_node_type_operation is None: 183 | return self.overridden_node_type_operation 184 | 185 | if self.overriding_node_type_operation == NO_OP: 186 | return NO_OP 187 | 188 | merged_operation_implementation = \ 189 | self.overriding_node_type_operation['implementation'] 190 | 191 | merged_operation_inputs = \ 192 | self.overriding_node_type_operation['inputs'] 193 | 194 | merged_operation_executor = \ 195 | self.overriding_node_type_operation['executor'] 196 | 197 | merged_operation_max_retries = \ 198 | self.overriding_node_type_operation['max_retries'] 199 | 200 | merged_operation_retry_interval = \ 201 | self.overriding_node_type_operation['retry_interval'] 202 | 203 | return operation_mapping( 204 | implementation=merged_operation_implementation, 205 | inputs=merged_operation_inputs, 206 | executor=merged_operation_executor, 207 | max_retries=merged_operation_max_retries, 208 | retry_interval=merged_operation_retry_interval 209 | ) 210 | 211 | 212 | RelationshipTypeRelationshipTypeOperationMerger = \ 213 | NodeTypeNodeTypeOperationMerger 214 | RelationshipTypeRelationshipInstanceOperationMerger = \ 215 | NodeTemplateNodeTypeOperationMerger 216 | -------------------------------------------------------------------------------- /dsl_parser/interfaces/utils.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from dsl_parser import (functions, 18 | utils) 19 | from dsl_parser.exceptions import DSLParsingLogicException 20 | 21 | 22 | def validate_missing_inputs(inputs, schema_inputs): 23 | """Check that all inputs defined in schema_inputs exist in inputs""" 24 | 25 | missing_inputs = set(schema_inputs) - set(inputs) 26 | if missing_inputs: 27 | if len(missing_inputs) == 1: 28 | message = "Input '{0}' is missing a value".format( 29 | missing_inputs.pop()) 30 | else: 31 | formatted_inputs = ', '.join("'{0}'".format(input_name) 32 | for input_name in missing_inputs) 33 | message = "Inputs {0} are missing a value".format(formatted_inputs) 34 | 35 | raise DSLParsingLogicException(107, message) 36 | 37 | 38 | def validate_inputs_types(inputs, schema_inputs): 39 | for input_key, _input in schema_inputs.iteritems(): 40 | input_type = _input.get('type') 41 | if input_type is None: 42 | # no type defined - no validation 43 | continue 44 | input_val = inputs[input_key] 45 | 46 | if functions.parse(input_val) != input_val: 47 | # intrinsic function - not validated at the moment 48 | continue 49 | 50 | if input_type == 'integer': 51 | if isinstance(input_val, (int, long)) and not \ 52 | isinstance(input_val, bool): 53 | continue 54 | elif input_type == 'float': 55 | if isinstance(input_val, (int, float, long)) and not \ 56 | isinstance(input_val, bool): 57 | continue 58 | elif input_type == 'boolean': 59 | if isinstance(input_val, bool): 60 | continue 61 | elif input_type == 'string': 62 | continue 63 | else: 64 | raise DSLParsingLogicException( 65 | 80, "Unexpected type defined in inputs schema " 66 | "for input '{0}' - unknown type is {1}" 67 | .format(input_key, input_type)) 68 | 69 | raise DSLParsingLogicException( 70 | 50, "Input type validation failed: Input '{0}' type " 71 | "is '{1}', yet it was assigned with the value '{2}'" 72 | .format(input_key, input_type, input_val)) 73 | 74 | 75 | def merge_schema_and_instance_inputs(schema_inputs, 76 | instance_inputs): 77 | 78 | flattened_schema_inputs = utils.flatten_schema(schema_inputs) 79 | merged_inputs = dict( 80 | flattened_schema_inputs.items() + 81 | instance_inputs.items()) 82 | 83 | validate_missing_inputs(merged_inputs, schema_inputs) 84 | validate_inputs_types(merged_inputs, schema_inputs) 85 | return merged_inputs 86 | 87 | 88 | def operation_mapping(implementation, inputs, executor, 89 | max_retries, retry_interval): 90 | return { 91 | 'implementation': implementation, 92 | 'inputs': inputs, 93 | 'executor': executor, 94 | 'max_retries': max_retries, 95 | 'retry_interval': retry_interval 96 | } 97 | 98 | 99 | def no_op(): 100 | return operation_mapping( 101 | implementation='', 102 | inputs={}, 103 | executor=None, 104 | max_retries=None, 105 | retry_interval=None, 106 | ) 107 | 108 | 109 | def no_op_operation(operation_name): 110 | return operation( 111 | name=operation_name, 112 | plugin_name='', 113 | operation_mapping='', 114 | operation_inputs={}, 115 | executor=None, 116 | max_retries=None, 117 | retry_interval=None 118 | ) 119 | 120 | 121 | def operation(name, 122 | plugin_name, 123 | operation_mapping, 124 | operation_inputs, 125 | executor, 126 | max_retries, 127 | retry_interval): 128 | return { 129 | 'name': name, 130 | 'plugin': plugin_name, 131 | 'operation': operation_mapping, 132 | 'executor': executor, 133 | 'inputs': operation_inputs, 134 | 'has_intrinsic_functions': False, 135 | 'max_retries': max_retries, 136 | 'retry_interval': retry_interval 137 | } 138 | 139 | 140 | def workflow_operation(plugin_name, 141 | workflow_mapping, 142 | workflow_parameters): 143 | return { 144 | 'plugin': plugin_name, 145 | 'operation': workflow_mapping, 146 | 'parameters': workflow_parameters 147 | } 148 | -------------------------------------------------------------------------------- /dsl_parser/models.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | class Version(dict): 18 | 19 | def __init__(self, version): 20 | self.update(version) 21 | 22 | @property 23 | def raw(self): 24 | return self['raw'] 25 | 26 | @property 27 | def definitions_name(self): 28 | return self['definitions_name'] 29 | 30 | @property 31 | def definitions_version(self): 32 | return self['definitions_version'] 33 | 34 | 35 | class Plan(dict): 36 | 37 | def __init__(self, plan): 38 | self.update(plan) 39 | 40 | @property 41 | def version(self): 42 | return self['version'] 43 | 44 | @property 45 | def inputs(self): 46 | return self['inputs'] 47 | 48 | @property 49 | def outputs(self): 50 | return self['outputs'] 51 | 52 | @property 53 | def node_templates(self): 54 | return self['nodes'] 55 | -------------------------------------------------------------------------------- /dsl_parser/multi_instance.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import copy 18 | 19 | from dsl_parser import (models, 20 | rel_graph, 21 | constants) 22 | 23 | 24 | def create_deployment_plan(plan): 25 | """ 26 | Expand node instances based on number of instances to deploy and 27 | defined relationships 28 | """ 29 | deployment_plan = copy.deepcopy(plan) 30 | plan_node_graph = rel_graph.build_node_graph( 31 | nodes=deployment_plan['nodes'], 32 | scaling_groups=deployment_plan['scaling_groups']) 33 | deployment_node_graph, ctx = rel_graph.build_deployment_node_graph( 34 | plan_node_graph) 35 | node_instances = rel_graph.extract_node_instances( 36 | node_instances_graph=deployment_node_graph, 37 | ctx=ctx) 38 | deployment_plan[constants.NODE_INSTANCES] = node_instances 39 | return models.Plan(deployment_plan) 40 | 41 | 42 | def modify_deployment(nodes, 43 | previous_nodes, 44 | previous_node_instances, 45 | modified_nodes, 46 | scaling_groups): 47 | """ 48 | modifies deployment according to the expected nodes. based on 49 | previous_node_instances 50 | :param nodes: the entire set of expected nodes. 51 | :param previous_node_instances: 52 | :param modified_nodes: existing nodes whose instance number has changed 53 | Add a line note 54 | :return: a dict of add,extended,reduced and removed instances 55 | Add a line note 56 | """ 57 | 58 | plan_node_graph = rel_graph.build_node_graph( 59 | nodes=nodes, 60 | scaling_groups=scaling_groups) 61 | previous_plan_node_graph = rel_graph.build_node_graph( 62 | nodes=previous_nodes, 63 | scaling_groups=scaling_groups) 64 | previous_deployment_node_graph, previous_deployment_contained_graph = \ 65 | rel_graph.build_previous_deployment_node_graph( 66 | plan_node_graph=previous_plan_node_graph, 67 | previous_node_instances=previous_node_instances) 68 | new_deployment_node_graph, ctx = rel_graph.build_deployment_node_graph( 69 | plan_node_graph=plan_node_graph, 70 | previous_deployment_node_graph=previous_deployment_node_graph, 71 | previous_deployment_contained_graph=previous_deployment_contained_graph, # noqa 72 | modified_nodes=modified_nodes) 73 | 74 | # Any node instances which were added or removed 75 | added_and_related = rel_graph.extract_added_node_instances( 76 | previous_deployment_node_graph, new_deployment_node_graph, 77 | ctx=ctx) 78 | removed_and_related = rel_graph.extract_removed_node_instances( 79 | previous_deployment_node_graph, new_deployment_node_graph, 80 | ctx=ctx) 81 | 82 | # Any node instances which had a modification to their relationship. 83 | # (newly introduced and removed nodes) 84 | extended_and_related = rel_graph.extract_added_relationships( 85 | previous_deployment_node_graph, new_deployment_node_graph, 86 | ctx=ctx) 87 | reduced_and_related = rel_graph.extract_removed_relationships( 88 | previous_deployment_node_graph, new_deployment_node_graph, 89 | ctx=ctx) 90 | 91 | # The extracted extended and reduced relationships hold the new and old 92 | # node instances. These are not required, since the change is on 93 | # node instance level (and not the relationship level) 94 | extended_and_related = \ 95 | filter_out_node_instances(added_and_related, extended_and_related) 96 | reduced_and_related = \ 97 | filter_out_node_instances(removed_and_related, reduced_and_related) 98 | 99 | return { 100 | 'added_and_related': added_and_related, 101 | 'extended_and_related': extended_and_related, 102 | 'reduced_and_related': reduced_and_related, 103 | 'removed_and_related': removed_and_related 104 | } 105 | 106 | 107 | def filter_out_node_instances(node_instances_to_filter_out, 108 | base_node_instances): 109 | instance_ids_to_remove = [n['id'] for n in node_instances_to_filter_out 110 | if 'modification' in n] 111 | return [n for n in base_node_instances 112 | if n['id'] not in instance_ids_to_remove] 113 | -------------------------------------------------------------------------------- /dsl_parser/parser.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import (functions, 17 | utils) 18 | from dsl_parser.framework import parser 19 | from dsl_parser.elements import blueprint 20 | from dsl_parser.import_resolver.default_import_resolver import \ 21 | DefaultImportResolver 22 | 23 | 24 | def parse_from_path(dsl_file_path, 25 | resources_base_path=None, 26 | resolver=None, 27 | validate_version=True, 28 | additional_resource_sources=()): 29 | with open(dsl_file_path, 'r') as f: 30 | dsl_string = f.read() 31 | return _parse(dsl_string, 32 | resources_base_path=resources_base_path, 33 | dsl_location=dsl_file_path, 34 | resolver=resolver, 35 | validate_version=validate_version, 36 | additional_resource_sources=additional_resource_sources) 37 | 38 | 39 | def parse(dsl_string, 40 | resources_base_path=None, 41 | dsl_location=None, 42 | resolver=None, 43 | validate_version=True): 44 | return _parse(dsl_string, 45 | resources_base_path=resources_base_path, 46 | dsl_location=dsl_location, 47 | resolver=resolver, 48 | validate_version=validate_version) 49 | 50 | 51 | def _parse(dsl_string, 52 | resources_base_path, 53 | dsl_location=None, 54 | resolver=None, 55 | validate_version=True, 56 | additional_resource_sources=()): 57 | parsed_dsl_holder = utils.load_yaml(raw_yaml=dsl_string, 58 | error_message='Failed to parse DSL', 59 | filename=dsl_location) 60 | 61 | if not resolver: 62 | resolver = DefaultImportResolver() 63 | 64 | # validate version schema and extract actual version used 65 | result = parser.parse( 66 | parsed_dsl_holder, 67 | element_cls=blueprint.BlueprintVersionExtractor, 68 | inputs={ 69 | 'validate_version': validate_version 70 | }, 71 | strict=False) 72 | version = result['plan_version'] 73 | 74 | # handle imports 75 | result = parser.parse( 76 | value=parsed_dsl_holder, 77 | inputs={ 78 | 'main_blueprint_holder': parsed_dsl_holder, 79 | 'resources_base_path': resources_base_path, 80 | 'blueprint_location': dsl_location, 81 | 'version': version, 82 | 'resolver': resolver, 83 | 'validate_version': validate_version 84 | }, 85 | element_cls=blueprint.BlueprintImporter, 86 | strict=False) 87 | resource_base = [result['resource_base']] 88 | if additional_resource_sources: 89 | resource_base.extend(additional_resource_sources) 90 | 91 | merged_blueprint_holder = result['merged_blueprint'] 92 | 93 | # parse blueprint 94 | plan = parser.parse( 95 | value=merged_blueprint_holder, 96 | inputs={ 97 | 'resource_base': resource_base, 98 | 'validate_version': validate_version 99 | }, 100 | element_cls=blueprint.Blueprint) 101 | 102 | functions.validate_functions(plan) 103 | return plan 104 | -------------------------------------------------------------------------------- /dsl_parser/scan.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | NODE_TEMPLATE_SCOPE = 'node_template' 18 | NODE_TEMPLATE_RELATIONSHIP_SCOPE = 'node_template_relationship' 19 | OUTPUTS_SCOPE = 'outputs' 20 | POLICIES_SCOPE = 'policies' 21 | SCALING_GROUPS_SCOPE = 'scaling_groups' 22 | 23 | # Searching for secrets in the blueprint only one time of the few times 24 | # that scan_service_template is called 25 | collect_secrets = False 26 | secrets = set() 27 | 28 | 29 | def scan_properties(value, 30 | handler, 31 | scope=None, 32 | context=None, 33 | path='', 34 | replace=False, 35 | recursive=True): 36 | """ 37 | Scans properties dict recursively and applies the provided handler 38 | method for each property. 39 | 40 | The handler method should have the following signature: 41 | def handler(value, scope, context, path): 42 | 43 | * value - the value of the property. 44 | * scope - scope of the operation (string). 45 | * context - scanner context (i.e. actual node template). 46 | * path - current property path. 47 | * replace - replace current dict/list values of scanned properties. 48 | 49 | :param value: The properties container (dict/list). 50 | :param handler: A method for applying for to each property. 51 | :param path: The properties base path (for debugging purposes). 52 | """ 53 | if isinstance(value, dict): 54 | for k, v in value.iteritems(): 55 | current_path = '{0}.{1}'.format(path, k) 56 | result = handler(v, scope, context, current_path) 57 | _collect_secret(result) 58 | if replace and result != v: 59 | value[k] = result 60 | if recursive: 61 | scan_properties(v, handler, 62 | scope=scope, 63 | context=context, 64 | path=current_path, 65 | replace=replace) 66 | elif isinstance(value, list): 67 | for index, item in enumerate(value): 68 | current_path = '{0}[{1}]'.format(path, index) 69 | result = handler(item, scope, context, current_path) 70 | _collect_secret(result) 71 | if replace and result != item: 72 | value[index] = result 73 | if recursive: 74 | scan_properties(item, 75 | handler, 76 | scope=scope, 77 | context=context, 78 | path=path, 79 | replace=replace) 80 | 81 | 82 | def _collect_secret(value): 83 | if collect_secrets and isinstance(value, dict) and 'get_secret' in value: 84 | secrets.add(value['get_secret']) 85 | 86 | 87 | def _scan_operations(operations, 88 | handler, 89 | scope=None, 90 | context=None, 91 | path='', 92 | replace=False): 93 | for name, definition in operations.iteritems(): 94 | if isinstance(definition, dict) and 'inputs' in definition: 95 | context = context.copy() if context else {} 96 | context['operation'] = definition 97 | scan_properties(definition['inputs'], 98 | handler, 99 | scope=scope, 100 | context=context, 101 | path='{0}.{1}.inputs'.format(path, name), 102 | replace=replace) 103 | 104 | 105 | def scan_node_operation_properties(node_template, handler, replace=False): 106 | _scan_operations(node_template['operations'], 107 | handler, 108 | scope=NODE_TEMPLATE_SCOPE, 109 | context=node_template, 110 | path='{0}.operations'.format(node_template['name']), 111 | replace=replace) 112 | for r in node_template.get('relationships', []): 113 | context = {'node_template': node_template, 'relationship': r} 114 | _scan_operations(r.get('source_operations', {}), 115 | handler, 116 | scope=NODE_TEMPLATE_RELATIONSHIP_SCOPE, 117 | context=context, 118 | path='{0}.{1}'.format(node_template['name'], 119 | r['type']), 120 | replace=replace) 121 | _scan_operations(r.get('target_operations', {}), 122 | handler, 123 | scope=NODE_TEMPLATE_RELATIONSHIP_SCOPE, 124 | context=context, 125 | path='{0}.{1}'.format(node_template['name'], 126 | r['type']), 127 | replace=replace) 128 | 129 | 130 | def scan_service_template(plan, handler, replace=False, search_secrets=False): 131 | global collect_secrets 132 | collect_secrets = search_secrets 133 | 134 | for node_template in plan.node_templates: 135 | scan_properties(node_template['properties'], 136 | handler, 137 | scope=NODE_TEMPLATE_SCOPE, 138 | context=node_template, 139 | path='{0}.properties'.format( 140 | node_template['name']), 141 | replace=replace) 142 | for name, capability in node_template.get('capabilities', {}).items(): 143 | scan_properties(capability.get('properties', {}), 144 | handler, 145 | scope=NODE_TEMPLATE_SCOPE, 146 | context=node_template, 147 | path='{0}.capabilities.{1}'.format( 148 | node_template['name'], 149 | name), 150 | replace=replace) 151 | scan_node_operation_properties(node_template, handler, replace=replace) 152 | for output_name, output in plan.outputs.iteritems(): 153 | scan_properties(output, 154 | handler, 155 | scope=OUTPUTS_SCOPE, 156 | context=plan.outputs, 157 | path='outputs.{0}'.format(output_name), 158 | replace=replace) 159 | for policy_name, policy in plan.get('policies', {}).items(): 160 | scan_properties(policy.get('properties', {}), 161 | handler, 162 | scope=POLICIES_SCOPE, 163 | context=policy, 164 | path='policies.{0}.properties'.format(policy_name), 165 | replace=replace) 166 | for group_name, scaling_group in plan.get('scaling_groups', {}).items(): 167 | scan_properties(scaling_group.get('properties', {}), 168 | handler, 169 | scope=SCALING_GROUPS_SCOPE, 170 | context=scaling_group, 171 | path='scaling_groups.{0}.properties'.format( 172 | group_name), 173 | replace=replace) 174 | 175 | if collect_secrets and len(secrets) > 0: 176 | plan['secrets'] = list(secrets) 177 | secrets.clear() 178 | -------------------------------------------------------------------------------- /dsl_parser/tasks.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import copy 17 | import json 18 | 19 | from dsl_parser import (functions, 20 | exceptions, 21 | scan, 22 | models, 23 | parser, 24 | multi_instance) 25 | from dsl_parser.multi_instance import modify_deployment 26 | 27 | 28 | __all__ = [ 29 | 'modify_deployment' 30 | ] 31 | 32 | 33 | def parse_dsl(dsl_location, 34 | resources_base_path, 35 | resolver=None, 36 | validate_version=True, 37 | additional_resources=()): 38 | return parser.parse_from_path( 39 | dsl_file_path=dsl_location, 40 | resources_base_path=resources_base_path, 41 | resolver=resolver, 42 | validate_version=validate_version, 43 | additional_resource_sources=additional_resources) 44 | 45 | 46 | def _set_plan_inputs(plan, inputs=None): 47 | inputs = inputs if inputs else {} 48 | # Verify inputs satisfied 49 | missing_inputs = [] 50 | for input_name, input_def in plan['inputs'].iteritems(): 51 | if input_name in inputs: 52 | try: 53 | str(json.dumps(inputs[input_name], ensure_ascii=False)) 54 | except UnicodeEncodeError: 55 | raise exceptions.DSLParsingInputTypeException( 56 | exceptions.ERROR_INVALID_CHARS, 57 | 'Illegal characters in input: {0}. ' 58 | 'Only valid ascii chars are supported.'.format(input_name)) 59 | else: 60 | if 'default' in input_def and input_def['default'] is not None: 61 | inputs[input_name] = input_def['default'] 62 | else: 63 | missing_inputs.append(input_name) 64 | 65 | if missing_inputs: 66 | raise exceptions.MissingRequiredInputError( 67 | "Required inputs {0} were not specified - expected " 68 | "inputs: {1}".format(missing_inputs, plan['inputs'].keys()) 69 | ) 70 | # Verify all inputs appear in plan 71 | not_expected = [input_name for input_name in inputs.keys() 72 | if input_name not in plan['inputs']] 73 | if not_expected: 74 | raise exceptions.UnknownInputError( 75 | "Unknown inputs {0} specified - " 76 | "expected inputs: {1}".format(not_expected, 77 | plan['inputs'].keys())) 78 | 79 | plan['inputs'] = inputs 80 | 81 | 82 | def _process_functions(plan): 83 | handler = functions.plan_evaluation_handler(plan) 84 | scan.scan_service_template( 85 | plan, handler, replace=True, search_secrets=True) 86 | 87 | 88 | def _validate_secrets(plan, get_secret_method): 89 | if 'secrets' not in plan: 90 | return 91 | 92 | # Mainly for local workflow that doesn't support secrets 93 | if get_secret_method is None: 94 | raise exceptions.UnsupportedGetSecretError( 95 | "The get_secret intrinsic function is not supported" 96 | ) 97 | 98 | invalid_secrets = [] 99 | for secret_key in plan['secrets']: 100 | try: 101 | get_secret_method(secret_key) 102 | except Exception as exception: 103 | if hasattr(exception, 'http_code') and exception.http_code == 404: 104 | invalid_secrets.append(secret_key) 105 | else: 106 | raise 107 | plan.pop('secrets') 108 | 109 | if invalid_secrets: 110 | raise exceptions.UnknownSecretError( 111 | "Required secrets {0} don't exist in this tenant" 112 | .format(invalid_secrets) 113 | ) 114 | 115 | 116 | def prepare_deployment_plan( 117 | plan, get_secret_method=None, inputs=None, **kwargs): 118 | """ 119 | Prepare a plan for deployment 120 | """ 121 | plan = models.Plan(copy.deepcopy(plan)) 122 | _set_plan_inputs(plan, inputs) 123 | _process_functions(plan) 124 | _validate_secrets(plan, get_secret_method) 125 | return multi_instance.create_deployment_plan(plan) 126 | -------------------------------------------------------------------------------- /dsl_parser/tests/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | -------------------------------------------------------------------------------- /dsl_parser/tests/abstract_test_parser.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | import tempfile 18 | import shutil 19 | import os 20 | import uuid 21 | from functools import wraps 22 | from multiprocessing import Process 23 | 24 | import testtools 25 | from mock import MagicMock 26 | 27 | from dsl_parser.exceptions import DSLParsingException 28 | from dsl_parser.parser import parse as dsl_parse 29 | from dsl_parser.parser import parse_from_path as dsl_parse_from_path 30 | from dsl_parser.import_resolver.default_import_resolver import \ 31 | DefaultImportResolver 32 | from dsl_parser.version import DSL_VERSION_PREFIX 33 | from dsl_parser.multi_instance import (create_deployment_plan, 34 | modify_deployment) 35 | 36 | 37 | def timeout(seconds=10): 38 | def decorator(func): 39 | def wrapper(*args, **kwargs): 40 | process = Process(None, func, None, args, kwargs) 41 | process.start() 42 | process.join(seconds) 43 | if process.is_alive(): 44 | process.terminate() 45 | raise RuntimeError( 46 | 'test timeout exceeded [timeout={0}]'.format(seconds)) 47 | if process.exitcode != 0: 48 | raise RuntimeError() 49 | return wraps(func)(wrapper) 50 | return decorator 51 | 52 | 53 | class AbstractTestParser(testtools.TestCase): 54 | BASIC_VERSION_SECTION_DSL_1_0 = """ 55 | tosca_definitions_version: cloudify_dsl_1_0 56 | """ 57 | 58 | BASIC_VERSION_SECTION_DSL_1_1 = """ 59 | tosca_definitions_version: cloudify_dsl_1_1 60 | """ 61 | 62 | BASIC_VERSION_SECTION_DSL_1_2 = """ 63 | tosca_definitions_version: cloudify_dsl_1_2 64 | """ 65 | 66 | BASIC_VERSION_SECTION_DSL_1_3 = """ 67 | tosca_definitions_version: cloudify_dsl_1_3 68 | """ 69 | 70 | BASIC_NODE_TEMPLATES_SECTION = """ 71 | node_templates: 72 | test_node: 73 | type: test_type 74 | properties: 75 | key: "val" 76 | """ 77 | 78 | BASIC_PLUGIN = """ 79 | plugins: 80 | test_plugin: 81 | executor: central_deployment_agent 82 | source: dummy 83 | """ 84 | 85 | PLUGIN_WITH_INSTALL_ARGS = """ 86 | plugins: 87 | test_plugin: 88 | executor: central_deployment_agent 89 | source: dummy 90 | install_arguments: -r requirements.txt 91 | """ 92 | 93 | BASIC_TYPE = """ 94 | node_types: 95 | test_type: 96 | interfaces: 97 | test_interface1: 98 | install: 99 | implementation: test_plugin.install 100 | inputs: {} 101 | terminate: 102 | implementation: test_plugin.terminate 103 | inputs: {} 104 | properties: 105 | install_agent: 106 | default: 'false' 107 | key: {} 108 | """ 109 | 110 | BASIC_INPUTS = """ 111 | inputs: 112 | test_input: 113 | type: string 114 | default: test_input_default_value 115 | """ 116 | 117 | BASIC_OUTPUTS = """ 118 | outputs: 119 | test_output: 120 | value: test_output_value 121 | """ 122 | 123 | # note that some tests extend the BASIC_NODE_TEMPLATES 'inline', 124 | # which is why it's appended in the end 125 | MINIMAL_BLUEPRINT = """ 126 | node_types: 127 | test_type: 128 | properties: 129 | key: 130 | default: 'default' 131 | """ + BASIC_NODE_TEMPLATES_SECTION 132 | 133 | BLUEPRINT_WITH_INTERFACES_AND_PLUGINS = BASIC_NODE_TEMPLATES_SECTION + \ 134 | BASIC_PLUGIN + BASIC_TYPE 135 | 136 | PLUGIN_WITH_INTERFACES_AND_PLUGINS_WITH_INSTALL_ARGS = \ 137 | BASIC_NODE_TEMPLATES_SECTION + PLUGIN_WITH_INSTALL_ARGS + BASIC_TYPE 138 | 139 | def setUp(self): 140 | super(AbstractTestParser, self).setUp() 141 | self._temp_dir = tempfile.mkdtemp() 142 | 143 | def tearDown(self): 144 | shutil.rmtree(self._temp_dir) 145 | super(AbstractTestParser, self).tearDown() 146 | 147 | def make_file_with_name(self, content, filename, base_dir=None): 148 | base_dir = os.path.join(self._temp_dir, base_dir) \ 149 | if base_dir else self._temp_dir 150 | filename_path = os.path.join(base_dir, filename) 151 | if not os.path.exists(base_dir): 152 | os.makedirs(base_dir) 153 | with open(filename_path, 'w') as f: 154 | f.write(content) 155 | return filename_path 156 | 157 | def make_yaml_file(self, content, as_uri=False): 158 | filename = 'tempfile{0}.yaml'.format(uuid.uuid4()) 159 | filename_path = self.make_file_with_name(content, filename) 160 | return filename_path if not as_uri else self._path2url(filename_path) 161 | 162 | def _path2url(self, path): 163 | from urllib import pathname2url 164 | from urlparse import urljoin 165 | return urljoin('file:', pathname2url(path)) 166 | 167 | def create_yaml_with_imports(self, contents, as_uri=False): 168 | yaml = """ 169 | imports:""" 170 | for content in contents: 171 | filename = self.make_yaml_file(content) 172 | yaml += """ 173 | - {0}""".format(filename if not as_uri else self._path2url(filename)) 174 | return yaml 175 | 176 | def parse(self, dsl_string, 177 | resources_base_path=None, 178 | dsl_version=BASIC_VERSION_SECTION_DSL_1_0, 179 | resolver=None, 180 | validate_version=True): 181 | # add dsl version if missing 182 | if DSL_VERSION_PREFIX not in dsl_string: 183 | dsl_string = dsl_version + dsl_string 184 | if not resolver: 185 | resolver = DefaultImportResolver() 186 | return dsl_parse(dsl_string, 187 | resources_base_path=resources_base_path, 188 | resolver=resolver, 189 | validate_version=validate_version) 190 | 191 | def parse_1_0(self, dsl_string, resources_base_path=None): 192 | return self.parse(dsl_string, resources_base_path, 193 | dsl_version=self.BASIC_VERSION_SECTION_DSL_1_0) 194 | 195 | def parse_1_1(self, dsl_string, resources_base_path=None): 196 | return self.parse(dsl_string, resources_base_path, 197 | dsl_version=self.BASIC_VERSION_SECTION_DSL_1_1) 198 | 199 | def parse_1_2(self, dsl_string, resources_base_path=None): 200 | return self.parse(dsl_string, resources_base_path, 201 | dsl_version=self.BASIC_VERSION_SECTION_DSL_1_2) 202 | 203 | def parse_1_3(self, dsl_string, resources_base_path=None): 204 | return self.parse(dsl_string, resources_base_path, 205 | dsl_version=self.BASIC_VERSION_SECTION_DSL_1_3) 206 | 207 | def parse_from_path(self, dsl_path, resources_base_path=None): 208 | return dsl_parse_from_path(dsl_path, resources_base_path) 209 | 210 | def parse_multi(self, yaml): 211 | return create_deployment_plan(self.parse_1_3(yaml)) 212 | 213 | @staticmethod 214 | def modify_multi(plan, modified_nodes): 215 | return modify_deployment( 216 | nodes=plan['nodes'], 217 | previous_nodes=plan['nodes'], 218 | previous_node_instances=plan['node_instances'], 219 | modified_nodes=modified_nodes, 220 | scaling_groups=plan['scaling_groups']) 221 | 222 | def _assert_dsl_parsing_exception_error_code( 223 | self, dsl, 224 | expected_error_code, exception_type=DSLParsingException, 225 | parsing_method=None): 226 | if not parsing_method: 227 | parsing_method = self.parse 228 | try: 229 | parsing_method(dsl) 230 | self.fail() 231 | except exception_type as ex: 232 | self.assertEquals(expected_error_code, ex.err_code) 233 | return ex 234 | 235 | def get_node_by_name(self, plan, name): 236 | return [x for x in plan.node_templates if x['name'] == name][0] 237 | 238 | @staticmethod 239 | def _sort_result_nodes(result_nodes, ordered_nodes_ids): 240 | ordered_nodes = [] 241 | 242 | for node_id in ordered_nodes_ids: 243 | for result_node in result_nodes: 244 | if result_node['id'] == node_id: 245 | ordered_nodes.append(result_node) 246 | break 247 | 248 | return ordered_nodes 249 | 250 | @staticmethod 251 | def _get_secret_mock(secret_id): 252 | secret_mock = MagicMock() 253 | secret_mock.value = secret_id + '_value' 254 | return secret_mock 255 | -------------------------------------------------------------------------------- /dsl_parser/tests/interfaces/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.framework import parser 17 | from dsl_parser.framework.elements import Element 18 | from dsl_parser.elements import data_types, version 19 | 20 | 21 | def validate(obj, element_cls): 22 | class TestElement(Element): 23 | schema = { 24 | 'tosca_definitions_version': version.ToscaDefinitionsVersion, 25 | 'test': element_cls, 26 | 'data_types': data_types.DataTypes 27 | } 28 | obj = { 29 | 'tosca_definitions_version': 'cloudify_dsl_1_1', 30 | 'test': obj 31 | } 32 | parser.parse(obj, 33 | element_cls=TestElement, 34 | inputs={ 35 | 'validate_version': True 36 | }, 37 | strict=True) 38 | -------------------------------------------------------------------------------- /dsl_parser/tests/interfaces/test_interfaces_merger.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import testtools 17 | 18 | from dsl_parser.interfaces.interfaces_merger import InterfaceMerger 19 | from dsl_parser.interfaces.interfaces_merger import InterfacesMerger 20 | from dsl_parser.interfaces.operation_merger import OperationMerger 21 | 22 | 23 | class InterfaceMergerTest(testtools.TestCase): 24 | 25 | def _assert_interface(self, 26 | overriding_interface, 27 | overridden_interface, 28 | expected_merged_interface_keys): 29 | 30 | class MockOperationMerger(OperationMerger): 31 | 32 | def __init__(self, 33 | overriding_operation, 34 | overridden_operation): 35 | pass 36 | 37 | def merge(self): 38 | return None 39 | 40 | merger = InterfaceMerger( 41 | overriding_interface=overriding_interface, 42 | overridden_interface=overridden_interface, 43 | operation_merger=MockOperationMerger 44 | ) 45 | actual_merged_interface_keys = set(merger.merge().keys()) 46 | self.assertEqual(expected_merged_interface_keys, 47 | actual_merged_interface_keys) 48 | 49 | def test_merge_operations(self): 50 | 51 | overriding_interface = { 52 | 'stop': None 53 | } 54 | overridden_interface = { 55 | 'start': None 56 | } 57 | 58 | expected_merged_interface_keys = set(['stop', 'start']) 59 | 60 | self._assert_interface( 61 | overriding_interface=overriding_interface, 62 | overridden_interface=overridden_interface, 63 | expected_merged_interface_keys=expected_merged_interface_keys 64 | ) 65 | 66 | def test_override_operation(self): 67 | 68 | overriding_interface = { 69 | 'stop': None 70 | } 71 | overridden_interface = { 72 | 'stop': None 73 | } 74 | 75 | expected_merged_interface_keys = set(['stop']) 76 | 77 | self._assert_interface( 78 | overriding_interface=overriding_interface, 79 | overridden_interface=overridden_interface, 80 | expected_merged_interface_keys=expected_merged_interface_keys 81 | ) 82 | 83 | 84 | class InterfacesMergerTest(testtools.TestCase): 85 | 86 | def _assert_interfaces(self, 87 | overriding_interfaces, 88 | overridden_interfaces, 89 | expected_merged_interfaces_keys): 90 | 91 | class MockOperationMerger(OperationMerger): 92 | 93 | def __init__(self, 94 | overriding_operation, 95 | overridden_operation): 96 | pass 97 | 98 | def merge(self): 99 | return None 100 | 101 | merger = InterfacesMerger( 102 | overriding_interfaces=overriding_interfaces, 103 | overridden_interfaces=overridden_interfaces, 104 | operation_merger=MockOperationMerger 105 | ) 106 | actual_merged_interfaces_keys = set(merger.merge().keys()) 107 | self.assertEqual(expected_merged_interfaces_keys, 108 | actual_merged_interfaces_keys) 109 | 110 | def test_merge_interfaces(self): 111 | 112 | overriding_interfaces = { 113 | 'interface1': {} 114 | } 115 | overridden_interfaces = { 116 | 'interface2': {} 117 | } 118 | 119 | expected_merged_interfaces_keys = set(['interface1', 'interface2']) 120 | self._assert_interfaces( 121 | overriding_interfaces=overriding_interfaces, 122 | overridden_interfaces=overridden_interfaces, 123 | expected_merged_interfaces_keys=expected_merged_interfaces_keys 124 | ) 125 | 126 | def test_override_interface(self): 127 | 128 | overriding_interfaces = { 129 | 'interface1': {} 130 | } 131 | overridden_interfaces = { 132 | 'interface1': {} 133 | } 134 | 135 | expected_merged_interfaces_keys = set(['interface1']) 136 | self._assert_interfaces( 137 | overriding_interfaces=overriding_interfaces, 138 | overridden_interfaces=overridden_interfaces, 139 | expected_merged_interfaces_keys=expected_merged_interfaces_keys 140 | ) 141 | -------------------------------------------------------------------------------- /dsl_parser/tests/scaling/__init__.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import itertools 17 | 18 | from dsl_parser import rel_graph 19 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 20 | 21 | 22 | class BaseTestMultiInstance(AbstractTestParser): 23 | 24 | BASE_BLUEPRINT = """ 25 | node_types: 26 | cloudify.nodes.Compute: 27 | properties: 28 | x: 29 | default: y 30 | db: {} 31 | webserver: {} 32 | db_dependent: {} 33 | type: {} 34 | network: {} 35 | relationships: 36 | test_relationship: {} 37 | cloudify.relationships.depends_on: 38 | properties: 39 | connection_type: 40 | default: 'all_to_all' 41 | cloudify.relationships.contained_in: 42 | derived_from: cloudify.relationships.depends_on 43 | cloudify.relationships.connected_to: 44 | derived_from: cloudify.relationships.depends_on 45 | 46 | node_templates: 47 | """ 48 | 49 | @staticmethod 50 | def _relationships_by_target_name(relationships, name): 51 | return [rel for rel in relationships if rel['target_name'] == name] 52 | 53 | @staticmethod 54 | def _nodes_by_name(nodes, name): 55 | return [node for node in nodes if node['name'] == name] 56 | 57 | @staticmethod 58 | def _node_ids(nodes): 59 | return [node['id'] for node in nodes] 60 | 61 | def _assert_each_node_valid_hosted(self, nodes, hosts): 62 | node_ids = self._node_ids(nodes) 63 | host_ids = self._node_ids(hosts) 64 | self.assertEqual(len(node_ids) % len(host_ids), 0) 65 | self.assertEqual(len(node_ids), len(set(node_ids))) 66 | node_host_ids = [node['host_id'] for node in nodes] 67 | for node_host_id in node_host_ids: 68 | self.assertIn(node_host_id, host_ids) 69 | 70 | def key_fun(n): 71 | return n['host_id'] 72 | 73 | for _, g in itertools.groupby(sorted(nodes, key=key_fun), key=key_fun): 74 | self.assertEqual(len(list(g)), len(node_ids) / len(host_ids)) 75 | 76 | def _assert_contained(self, source_relationships, node_ids, target_name): 77 | relationships = self._relationships_by_target_name( 78 | source_relationships, target_name) 79 | target_ids = [rel['target_id'] for rel in relationships] 80 | self.assertEqual(set(node_ids), set(target_ids)) 81 | 82 | def _assert_all_to_one(self, source_relationships, node_ids, target_name): 83 | relationships = self._relationships_by_target_name( 84 | source_relationships, target_name) 85 | target_ids = [rel['target_id'] for rel in relationships] 86 | self.assertEqual(1, len(set(target_ids))) 87 | self.assertIn(target_ids[0], node_ids) 88 | return target_ids[0] 89 | 90 | def _assert_all_to_all(self, source_relationships_lists, 91 | node_ids, target_name): 92 | for source_relationships in source_relationships_lists: 93 | relationships = self._relationships_by_target_name( 94 | source_relationships, target_name) 95 | target_ids = [rel['target_id'] for rel in relationships] 96 | self.assertEqual(set(node_ids), set(target_ids)) 97 | 98 | @staticmethod 99 | def _nodes_relationships(nodes, target_name=None): 100 | relationships = [] 101 | for node in nodes: 102 | for rel in node['relationships']: 103 | if target_name and rel['target_name'] != target_name: 104 | continue 105 | relationships.append(rel) 106 | return relationships 107 | 108 | def _assert_added_not_in_previous(self, plan, modification): 109 | plan_node_graph = rel_graph.build_node_graph( 110 | nodes=plan['nodes'], 111 | scaling_groups=plan['scaling_groups']) 112 | previous_node_instances = plan['node_instances'] 113 | added_and_related = modification['added_and_related'] 114 | previous_graph, _ = rel_graph.build_previous_deployment_node_graph( 115 | plan_node_graph=plan_node_graph, 116 | previous_node_instances=previous_node_instances) 117 | added_nodes_graph, _ = rel_graph.build_previous_deployment_node_graph( 118 | plan_node_graph=plan_node_graph, 119 | previous_node_instances=added_and_related) 120 | for instance_id, data in added_nodes_graph.nodes_iter(data=True): 121 | instance = data['node'] 122 | if instance.get('modification') == 'added': 123 | self.assertNotIn(instance_id, previous_graph) 124 | else: 125 | self.assertIn(instance_id, previous_graph) 126 | for source, target, in added_nodes_graph.edges_iter(): 127 | self.assertFalse(previous_graph.has_edge(source, target)) 128 | 129 | def _assert_removed_in_previous(self, plan, modification): 130 | plan_node_graph = rel_graph.build_node_graph( 131 | nodes=plan['nodes'], 132 | scaling_groups=plan['scaling_groups']) 133 | previous_node_instances = plan['node_instances'] 134 | removed_and_related = modification['removed_and_related'] 135 | previous_graph, _ = rel_graph.build_previous_deployment_node_graph( 136 | plan_node_graph=plan_node_graph, 137 | previous_node_instances=previous_node_instances) 138 | removed_nodes_graph, _ = rel_graph.build_previous_deployment_node_graph( # noqa 139 | plan_node_graph=plan_node_graph, 140 | previous_node_instances=removed_and_related) 141 | for instance_id, data in removed_nodes_graph.nodes_iter(data=True): 142 | self.assertIn(instance_id, previous_graph) 143 | for source, target, in removed_nodes_graph.edges_iter(): 144 | self.assertTrue(previous_graph.has_edge(source, target)) 145 | 146 | def _assert_modification(self, 147 | modification, 148 | expected_added_and_related_count, 149 | expected_removed_and_related_count, 150 | expected_added_count, 151 | expected_removed_count): 152 | added_and_related = modification['added_and_related'] 153 | removed_and_related = modification['removed_and_related'] 154 | added = [instance for instance in added_and_related 155 | if instance.get('modification') == 'added'] 156 | removed = [instance for instance in removed_and_related 157 | if instance.get('modification') == 'removed'] 158 | 159 | self.assertEqual(expected_added_and_related_count, 160 | len(added_and_related)) 161 | self.assertEqual(expected_removed_and_related_count, 162 | len(removed_and_related)) 163 | self.assertEqual(expected_added_count, 164 | len(added)) 165 | self.assertEqual(expected_removed_count, 166 | len(removed)) 167 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_deployment_update.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | import copy 16 | 17 | from dsl_parser.multi_instance import modify_deployment 18 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 19 | 20 | 21 | class TestDeploymentUpdate(AbstractTestParser): 22 | BASE_BLUEPRINT = """ 23 | node_types: 24 | cloudify.nodes.Compute: 25 | properties: 26 | x: 27 | default: y 28 | db: {} 29 | webserver: {} 30 | db_dependent: {} 31 | type: {} 32 | network: {} 33 | relationships: 34 | cloudify.relationships.depends_on: 35 | properties: 36 | connection_type: 37 | default: 'all_to_all' 38 | cloudify.relationships.contained_in: 39 | derived_from: cloudify.relationships.depends_on 40 | cloudify.relationships.connected_to: 41 | derived_from: cloudify.relationships.depends_on 42 | 43 | node_templates: 44 | """ 45 | 46 | BASE_NODES = """ 47 | without_rel: 48 | type: type 49 | with_rel: 50 | type: type 51 | """ 52 | 53 | @staticmethod 54 | def modify_multi(plan, modified_nodes): 55 | return modify_deployment( 56 | nodes=modified_nodes, 57 | previous_nodes=plan['nodes'], 58 | previous_node_instances=plan['node_instances'], 59 | modified_nodes=(), 60 | scaling_groups={}) 61 | 62 | def test_add_node(self): 63 | blueprint = self.BASE_BLUEPRINT + self.BASE_NODES 64 | 65 | plan = self.parse_multi(blueprint) 66 | plan['nodes'].append({ 67 | 'name': 'new_node', 68 | 'id': 'new_node', 69 | 'type': 'new_type', 70 | 'number_of_instances': 1, 71 | 'deploy_number_of_instances': 1, 72 | 'min_number_of_instances': 1, 73 | 'max_number_of_instances': 1, 74 | 'relationships': [ 75 | {'type': 'cloudify.relationships.connected_to', 76 | 'target_id': 'without_rel', 77 | 'type_hierarchy': ['cloudify.relationships.connected_to'], 78 | 'properties': { 79 | 'connection_type': 'all_to_all' 80 | }, 81 | } 82 | ] 83 | }) 84 | 85 | modified_nodes = plan['nodes'] 86 | node_instances = self.modify_multi(plan, modified_nodes=modified_nodes) 87 | 88 | self.assertEqual(len(node_instances['added_and_related']), 2) 89 | added_and_related = node_instances['added_and_related'] 90 | added = [n for n in added_and_related if 'modification' in n] 91 | related = [n for n in added_and_related if n not in added] 92 | self.assertEqual(len(added), 1) 93 | self.assertEqual(len(related), 1) 94 | self.assertEqual(len(node_instances['removed_and_related']), 0) 95 | self.assertEqual(len(node_instances['extended_and_related']), 0) 96 | self.assertEqual(len(node_instances['reduced_and_related']), 0) 97 | 98 | def test_remove_node(self): 99 | blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """ 100 | relationships: 101 | - type: cloudify.relationships.connected_to 102 | target: without_rel 103 | """ 104 | 105 | plan = self.parse_multi(blueprint) 106 | nodes = \ 107 | copy.deepcopy( 108 | [n for n in plan['nodes'] if n['id'] != 'without_rel']) 109 | with_rel_node = nodes[0] 110 | with_rel_node['relationships'] = [r for r in 111 | with_rel_node['relationships'] 112 | if r['target_id'] != 'without_rel'] 113 | node_instances = self.modify_multi(plan, modified_nodes=nodes) 114 | 115 | self.assertEqual(len(node_instances['added_and_related']), 0) 116 | self.assertEqual(len(node_instances['removed_and_related']), 2) 117 | removed_and_related = node_instances['removed_and_related'] 118 | removed = [n for n in removed_and_related if 'modification' in n] 119 | related = [n for n in removed_and_related if n not in removed] 120 | self.assertEqual(len(removed), 1) 121 | self.assertEqual(len(related), 1) 122 | self.assertEqual(len(node_instances['extended_and_related']), 0) 123 | self.assertEqual(len(node_instances['reduced_and_related']), 1) 124 | reduced_and_related = node_instances['reduced_and_related'] 125 | reduced = [n for n in reduced_and_related if 'modification' in n] 126 | self.assertEqual(len(reduced), 1) 127 | 128 | def test_add_relationship(self): 129 | blueprint = self.BASE_BLUEPRINT + self.BASE_NODES 130 | 131 | rel_type = 'cloudify.relationships.connected_to' 132 | plan = self.parse_multi(blueprint) 133 | 134 | with_rel = [n for n in plan['nodes'] if n['id'] == 'with_rel'][0] 135 | without_rel = [n for n in plan['nodes'] if n['id'] == 'without_rel'][0] 136 | with_rel['relationships'] = \ 137 | [{'type': rel_type, 138 | 'type_hierarchy': [rel_type], 139 | 'target_id': without_rel['id'], 140 | 'source_interface': { 141 | 'cloudify.interfaces.relationship_lifecycle': { 142 | 'preconfigure': 'scripts/increment.sh', 143 | 'establish': 'scripts/increment.sh', 144 | 'postconfigure': 'scripts/increment.sh' 145 | } 146 | }, 147 | 'properties': { 148 | 'connection_type': 'all_to_all' 149 | }}] 150 | modified_nodes = [with_rel, without_rel] 151 | node_instances = self.modify_multi(plan, modified_nodes=modified_nodes) 152 | 153 | self.assertEqual(len(node_instances['added_and_related']), 0) 154 | self.assertEqual(len(node_instances['removed_and_related']), 0) 155 | self.assertEqual(len(node_instances['extended_and_related']), 2) 156 | extended_and_related = node_instances['extended_and_related'] 157 | extended = [n for n in extended_and_related if 'modification' in n] 158 | related = [n for n in extended_and_related if n not in extended] 159 | self.assertEqual(len(extended), 1) 160 | self.assertEqual(len(related), 1) 161 | self.assertEqual(len(node_instances['reduced_and_related']), 0) 162 | 163 | def test_remove_relationship(self): 164 | blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """ 165 | relationships: 166 | - type: cloudify.relationships.connected_to 167 | target: without_rel 168 | """ 169 | 170 | plan = self.parse_multi(blueprint) 171 | 172 | nodes = copy.deepcopy(plan['nodes']) 173 | node_with_rel = [n for n in nodes if n['id'] == 'with_rel'][0] 174 | relationships = [r for r in node_with_rel['relationships'] 175 | if r['target_id'] != 'without_rel'] 176 | node_with_rel['relationships'] = relationships 177 | 178 | node_instances = self.modify_multi(plan, modified_nodes=nodes) 179 | 180 | self.assertEqual(len(node_instances['added_and_related']), 0) 181 | self.assertEqual(len(node_instances['removed_and_related']), 0) 182 | self.assertEqual(len(node_instances['extended_and_related']), 0) 183 | self.assertEqual(len(node_instances['reduced_and_related']), 2) 184 | reduced_and_related = node_instances['reduced_and_related'] 185 | reduced = [n for n in reduced_and_related if 'modification' in n] 186 | related = [n for n in reduced_and_related if n not in reduced] 187 | self.assertEqual(len(reduced), 1) 188 | self.assertEqual(len(related), 1) 189 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_import_resolver.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import testtools 17 | 18 | from dsl_parser import utils 19 | from dsl_parser.constants import RESOLVER_IMPLEMENTATION_KEY, \ 20 | RESLOVER_PARAMETERS_KEY 21 | from dsl_parser.import_resolver.abstract_import_resolver import \ 22 | AbstractImportResolver 23 | from dsl_parser.import_resolver.default_import_resolver import \ 24 | DefaultResolverValidationException, DefaultImportResolver, \ 25 | DEFAULT_RESLOVER_RULES_KEY 26 | 27 | default_resolver_class_path = "%s:%s" % ( 28 | DefaultImportResolver.__module__, DefaultImportResolver.__name__) 29 | 30 | 31 | class MockCustomImportResolverException(Exception): 32 | pass 33 | 34 | 35 | class CustomImportResolver(AbstractImportResolver): 36 | def __init__(self, custom_reolver_parameters): 37 | self.custom_resolver_parameters = custom_reolver_parameters 38 | 39 | def resolve(self, import_url): 40 | pass 41 | 42 | 43 | custom_resolver_class_path = "%s:%s" % ( 44 | CustomImportResolver.__module__, CustomImportResolver.__name__) 45 | 46 | 47 | class CustomImportResolverWithoutInit(AbstractImportResolver): 48 | def resolve(self, import_url): 49 | pass 50 | 51 | 52 | custom_no_init_resolver_class_path = "%s:%s" % ( 53 | CustomImportResolverWithoutInit.__module__, 54 | CustomImportResolverWithoutInit.__name__) 55 | 56 | 57 | class FailedToInitializeCustomImportResolver(AbstractImportResolver): 58 | def __init__(self): 59 | raise MockCustomImportResolverException('mock exception') 60 | 61 | def resolve(self, import_url): 62 | pass 63 | 64 | 65 | failed_custom_resolver_class_path = "%s:%s" % ( 66 | FailedToInitializeCustomImportResolver.__module__, 67 | FailedToInitializeCustomImportResolver.__name__) 68 | 69 | 70 | class CreateImportResolverTests(testtools.TestCase): 71 | 72 | def _test_create_import_resolver(self, 73 | resolver_configuration=None, 74 | expected_resolver=None, 75 | expected_params_name=None, 76 | err_msg_regex=None): 77 | if expected_resolver: 78 | resolver = utils.create_import_resolver(resolver_configuration) 79 | self.assertEqual(resolver.__class__, expected_resolver.__class__) 80 | if expected_params_name: 81 | self.assertEqual( 82 | resolver.__getattribute__(expected_params_name), 83 | expected_resolver.__getattribute__(expected_params_name)) 84 | else: 85 | self.assertRaisesRegexp(utils.ResolverInstantiationError, 86 | err_msg_regex, 87 | utils.create_import_resolver, 88 | resolver_configuration) 89 | 90 | def test_no_configuration_specified(self): 91 | self._test_create_import_resolver( 92 | expected_resolver=DefaultImportResolver(), 93 | expected_params_name=DEFAULT_RESLOVER_RULES_KEY) 94 | 95 | def test_specified_default_class_path_and_params(self): 96 | parameters = { 97 | DEFAULT_RESLOVER_RULES_KEY: [{'rules1key': 'rules1value'}] 98 | } 99 | resolver_configuration = { 100 | RESOLVER_IMPLEMENTATION_KEY: default_resolver_class_path, 101 | RESLOVER_PARAMETERS_KEY: parameters 102 | } 103 | self._test_create_import_resolver( 104 | resolver_configuration=resolver_configuration, 105 | expected_resolver=DefaultImportResolver(**parameters), 106 | expected_params_name=DEFAULT_RESLOVER_RULES_KEY) 107 | 108 | def test_specified_default_class_path_no_params(self): 109 | resolver_configuration = { 110 | RESOLVER_IMPLEMENTATION_KEY: default_resolver_class_path 111 | } 112 | self._test_create_import_resolver( 113 | resolver_configuration=resolver_configuration, 114 | expected_resolver=DefaultImportResolver(), 115 | expected_params_name=DEFAULT_RESLOVER_RULES_KEY) 116 | 117 | def test_specified_params_no_class_path(self): 118 | parameters = { 119 | DEFAULT_RESLOVER_RULES_KEY: [{'rules1key': 'rules1value'}] 120 | } 121 | resolver_configuration = { 122 | RESLOVER_PARAMETERS_KEY: parameters 123 | } 124 | self._test_create_import_resolver( 125 | resolver_configuration=resolver_configuration, 126 | expected_resolver=DefaultImportResolver(**parameters), 127 | expected_params_name=DEFAULT_RESLOVER_RULES_KEY) 128 | 129 | def test_create_custom_resolver(self): 130 | parameters = { 131 | 'custom_reolver_parameters': {} 132 | } 133 | resolver_configuration = { 134 | RESOLVER_IMPLEMENTATION_KEY: custom_resolver_class_path, 135 | RESLOVER_PARAMETERS_KEY: parameters 136 | } 137 | self._test_create_import_resolver( 138 | resolver_configuration=resolver_configuration, 139 | expected_resolver=CustomImportResolver( 140 | custom_reolver_parameters={}), 141 | expected_params_name='custom_resolver_parameters') 142 | 143 | def test_create_custom_resolver_without_init(self): 144 | resolver_configuration = { 145 | RESOLVER_IMPLEMENTATION_KEY: custom_no_init_resolver_class_path, 146 | } 147 | self._test_create_import_resolver( 148 | resolver_configuration=resolver_configuration, 149 | expected_resolver=CustomImportResolverWithoutInit() 150 | ) 151 | 152 | def test_failed_to_initialize_default_resolver(self): 153 | 154 | def mock_default_resolver_init(*_): 155 | raise DefaultResolverValidationException('mock exception') 156 | 157 | resolver_configuration = { 158 | DEFAULT_RESLOVER_RULES_KEY: '' 159 | } 160 | original_init = DefaultImportResolver.__init__ 161 | DefaultImportResolver.__init__ = mock_default_resolver_init 162 | try: 163 | self._test_create_import_resolver( 164 | resolver_configuration=resolver_configuration, 165 | err_msg_regex='Failed to instantiate resolver ' 166 | '\({0}\)\. ' 167 | 'mock exception' 168 | .format(DefaultImportResolver.__name__)) 169 | finally: 170 | DefaultImportResolver.__init__ = original_init 171 | 172 | def test_failed_to_initialize_custom_resolver(self): 173 | resolver_configuration = { 174 | RESOLVER_IMPLEMENTATION_KEY: failed_custom_resolver_class_path, 175 | } 176 | self._test_create_import_resolver( 177 | resolver_configuration=resolver_configuration, 178 | err_msg_regex='Failed to instantiate resolver ' 179 | '\({0}\).*mock exception' 180 | .format(failed_custom_resolver_class_path)) 181 | 182 | def test_create_resolver_illegal_params_type(self): 183 | resolver_configuration = { 184 | RESOLVER_IMPLEMENTATION_KEY: default_resolver_class_path, 185 | RESLOVER_PARAMETERS_KEY: 'wrong parameters type' 186 | } 187 | self._test_create_import_resolver( 188 | resolver_configuration=resolver_configuration, 189 | err_msg_regex='Invalid parameters supplied for the ' 190 | 'resolver \({0}\): parameters must be ' 191 | 'a dictionary and not str' 192 | .format(default_resolver_class_path)) 193 | 194 | def test_create_default_resolver_illegal_params(self): 195 | resolver_configuration = { 196 | RESOLVER_IMPLEMENTATION_KEY: default_resolver_class_path, 197 | RESLOVER_PARAMETERS_KEY: {'wrong parameter name': ''} 198 | } 199 | self._test_create_import_resolver( 200 | resolver_configuration=resolver_configuration, 201 | err_msg_regex='Failed to instantiate resolver \({0}\).*' 202 | '__init__\(\) got an unexpected keyword argument ' 203 | '\'wrong parameter name\'' 204 | .format(default_resolver_class_path)) 205 | 206 | def test_create_resolver_illegal_class_path(self): 207 | 208 | resolver_configuration = { 209 | RESOLVER_IMPLEMENTATION_KEY: 'wrong class path', 210 | } 211 | self._test_create_import_resolver( 212 | resolver_configuration=resolver_configuration, 213 | err_msg_regex='Failed to instantiate resolver ' 214 | '\(wrong class path\).*Invalid class path') 215 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_parse_with_resolver.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 17 | from dsl_parser.import_resolver.abstract_import_resolver import \ 18 | AbstractImportResolver 19 | 20 | BLUEPRINT_1 = """ 21 | node_types: 22 | resolver_type_1: 23 | properties: 24 | key: 25 | default: 'default' 26 | node_templates: 27 | resolver_1: 28 | type: resolver_type_1 29 | properties: 30 | key: value_1 31 | """ 32 | BLUEPRINT_2 = """ 33 | node_types: 34 | resolver_type_2: 35 | properties: 36 | key: 37 | default: 'default' 38 | """ 39 | 40 | 41 | class TestParseWithResolver(AbstractTestParser): 42 | 43 | def test_parse_using_resolver(self): 44 | 45 | yaml_to_parse = """ 46 | imports: 47 | - http://url1 48 | - http://url2""" 49 | 50 | urls = [] 51 | 52 | class CustomResolver(AbstractImportResolver): 53 | def resolve(self, url): 54 | urls.append(url) 55 | if len(urls) == 2: 56 | return BLUEPRINT_2 57 | return BLUEPRINT_1 58 | custom_resolver = CustomResolver() 59 | self.parse(yaml_to_parse, resolver=custom_resolver) 60 | 61 | self.assertEqual(len(urls), 2) 62 | self.assertIn('http://url1', urls) 63 | self.assertIn('http://url2', urls) 64 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_plugins.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from yaml import safe_load as yaml_load 18 | from yaml import safe_dump as yaml_dump 19 | 20 | from dsl_parser.exceptions import DSLParsingLogicException 21 | from dsl_parser import constants 22 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 23 | 24 | 25 | class PluginsTest(AbstractTestParser): 26 | 27 | def test_plugin_with_install_true_existing_source(self): 28 | self._test(install=True, 29 | source='dummy') 30 | 31 | def test_plugin_with_install_true_existing_package_name(self): 32 | self._test(install=True, 33 | package_name='package') 34 | 35 | def test_plugin_with_install_false_existing_source(self): 36 | self._test(install=False, 37 | source='dummy') 38 | 39 | def test_plugin_with_install_false_existing_package_name(self): 40 | self._test(install=False, 41 | package_name='package') 42 | 43 | def test_plugin_with_install_false_missing_source_and_package(self): 44 | self._test(install=False) 45 | 46 | def test_plugin_with_missing_install_existing_source(self): 47 | self._test(source='dummy') 48 | 49 | def test_plugin_with_missing_install_existing_package(self): 50 | self._test(package_name='package') 51 | 52 | def test_plugin_with_missing_install_missing_source_and_package(self): 53 | self._test(expected_error_code=50) 54 | 55 | def test_plugin_with_install_true_missing_source_and_package(self): 56 | self._test(install=True, expected_error_code=50) 57 | 58 | def _test(self, install=None, source=None, package_name=None, 59 | expected_error_code=None): 60 | yaml = """ 61 | plugins: 62 | test_plugin: {} 63 | 64 | node_templates: 65 | test_node: 66 | type: type 67 | interfaces: 68 | test_interface1: 69 | install: test_plugin.install 70 | 71 | node_types: 72 | type: {} 73 | """ 74 | raw_parsed_yaml = yaml_load(yaml) 75 | plugin = { 76 | 'executor': 'central_deployment_agent' 77 | } 78 | 79 | if install is not None: 80 | plugin['install'] = install 81 | if source is not None: 82 | plugin['source'] = source 83 | if package_name is not None: 84 | plugin['package_name'] = package_name 85 | raw_parsed_yaml['plugins']['test_plugin'] = plugin 86 | result = yaml_dump(raw_parsed_yaml) 87 | yaml = '\n{0}'.format(result) 88 | if expected_error_code: 89 | self._assert_dsl_parsing_exception_error_code( 90 | yaml, expected_error_code, DSLParsingLogicException) 91 | else: 92 | result = self.parse_1_2(yaml) 93 | plugin = result['nodes'][0][ 94 | constants.DEPLOYMENT_PLUGINS_TO_INSTALL][0] 95 | if install is not None: 96 | self.assertEqual(install, plugin['install']) 97 | if source is not None: 98 | self.assertEqual(source, plugin['source']) 99 | if package_name is not None: 100 | self.assertEqual(package_name, plugin['package_name']) 101 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_register_function.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser import functions 17 | from dsl_parser.tasks import prepare_deployment_plan 18 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 19 | 20 | 21 | class TestFunctionRegistration(AbstractTestParser): 22 | 23 | def setUp(self): 24 | super(TestFunctionRegistration, self).setUp() 25 | self.addCleanup(self.cleanup) 26 | 27 | def cleanup(self): 28 | functions.unregister('to_upper') 29 | 30 | def test_registration(self): 31 | @functions.register(name='to_upper') 32 | class ToUpper(functions.Function): 33 | 34 | def __init__(self, args, **kwargs): 35 | self.arg = None 36 | super(ToUpper, self).__init__(args, **kwargs) 37 | 38 | def parse_args(self, args): 39 | self.arg = args 40 | 41 | def evaluate_runtime(self, storage): 42 | return self.evaluate(plan=None) 43 | 44 | def evaluate(self, plan): 45 | if functions.parse(self.arg) != self.arg: 46 | return self.raw 47 | return str(self.arg).upper() 48 | 49 | def validate(self, plan): 50 | pass 51 | 52 | yaml = """ 53 | node_types: 54 | webserver_type: 55 | properties: 56 | property: 57 | default: property_value 58 | node_templates: 59 | webserver: 60 | type: webserver_type 61 | outputs: 62 | output1: 63 | value: { to_upper: first } 64 | output2: 65 | value: { to_upper: { get_property: [webserver, property] } } 66 | output3: 67 | value: { to_upper: { get_attribute: [webserver, attribute] } } 68 | output4: 69 | value: { to_upper: { get_secret: secret } } 70 | """ 71 | parsed = prepare_deployment_plan(self.parse(yaml), 72 | self._get_secret_mock) 73 | outputs = parsed['outputs'] 74 | self.assertEqual('FIRST', outputs['output1']['value']) 75 | self.assertEqual('PROPERTY_VALUE', outputs['output2']['value']) 76 | self.assertEqual({'to_upper': {'get_attribute': ['webserver', 77 | 'attribute']}}, 78 | outputs['output3']['value']) 79 | 80 | def get_node_instances(node_id=None): 81 | return [ 82 | NodeInstance({ 83 | 'id': 'webserver1', 84 | 'node_id': 'webserver', 85 | 'runtime_properties': { 86 | 'attribute': 'attribute_value' 87 | } 88 | }) 89 | ] 90 | 91 | def get_node_instance(node_instance_id): 92 | return get_node_instances()[0] 93 | 94 | def get_node(node_id): 95 | return Node({'id': node_id}) 96 | 97 | o = functions.evaluate_outputs(parsed['outputs'], 98 | get_node_instances, 99 | get_node_instance, 100 | get_node, 101 | self._get_secret_mock) 102 | 103 | self.assertEqual('FIRST', o['output1']) 104 | self.assertEqual('PROPERTY_VALUE', o['output2']) 105 | self.assertEqual('ATTRIBUTE_VALUE', o['output3']) 106 | self.assertEqual('SECRET_VALUE', o['output4']) 107 | 108 | 109 | class NodeInstance(dict): 110 | @property 111 | def id(self): 112 | return self.get('id') 113 | 114 | @property 115 | def node_id(self): 116 | return self.get('node_id') 117 | 118 | @property 119 | def runtime_properties(self): 120 | return self.get('runtime_properties') 121 | 122 | 123 | class Node(dict): 124 | @property 125 | def id(self): 126 | return self.get('id') 127 | 128 | @property 129 | def properties(self): 130 | return self.get('properties', {}) 131 | -------------------------------------------------------------------------------- /dsl_parser/tests/test_yaml_anchors.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from dsl_parser.tests.abstract_test_parser import AbstractTestParser 17 | 18 | 19 | class TestAnchors(AbstractTestParser): 20 | 21 | @staticmethod 22 | def _get_node_properties(plan, id): 23 | return next((node['properties'] for node in plan['nodes'] 24 | if node['id'] == id)) 25 | 26 | def test_anchors_append(self): 27 | bp_yaml = """ 28 | node_types: 29 | my_type: 30 | properties: 31 | prop1: 32 | default: 0 33 | prop2: 34 | default: 0 35 | 36 | node_templates: 37 | node1: 38 | type: my_type 39 | properties: &props1 40 | prop1: 1 41 | node2: 42 | type: my_type 43 | properties: 44 | <<: *props1 45 | prop2: 2 46 | node3: 47 | type: my_type 48 | properties: 49 | <<: *props1 50 | prop2: 3 51 | """ 52 | parsed_plan = self.parse(bp_yaml) 53 | 54 | expected_node_properties = { 55 | 'node1': {'prop1': 1, 'prop2': 0}, 56 | 'node2': {'prop1': 1, 'prop2': 2}, 57 | 'node3': {'prop1': 1, 'prop2': 3} 58 | } 59 | 60 | for node, expected_value in expected_node_properties.iteritems(): 61 | self.assertEquals(expected_value, 62 | self._get_node_properties(parsed_plan, node)) 63 | 64 | def test_anchors_override(self): 65 | bp_yaml = """ 66 | node_types: 67 | my_type: 68 | properties: 69 | prop1: 70 | default: 0 71 | prop2: 72 | default: 0 73 | 74 | node_templates: 75 | node1: 76 | type: my_type 77 | properties: &props1 78 | prop1: 1 79 | prop2: 1 80 | node2: 81 | type: my_type 82 | properties: &props2 83 | <<: *props1 84 | prop2: 2 85 | node3: 86 | type: my_type 87 | properties: 88 | <<: *props2 89 | prop2: 3 90 | """ 91 | parsed_plan = self.parse(bp_yaml) 92 | 93 | expected_node_properties = { 94 | 'node1': {'prop1': 1, 'prop2': 1}, 95 | 'node2': {'prop1': 1, 'prop2': 2}, 96 | 'node3': {'prop1': 1, 'prop2': 3} 97 | } 98 | 99 | for node_id, expected_value in expected_node_properties.iteritems(): 100 | self.assertEquals(expected_value, 101 | self._get_node_properties(parsed_plan, node_id)) 102 | -------------------------------------------------------------------------------- /dsl_parser/tests/utils.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | import os 17 | 18 | from dsl_parser.parser import parse_from_path as _parse_from_path 19 | from dsl_parser.tests.resources import dsl 20 | 21 | 22 | def parse_dsl_resource(path): 23 | 24 | dsl_dir = os.path.dirname(dsl.__file__) 25 | return _parse_from_path( 26 | dsl_file_path=os.path.join(dsl_dir, path), 27 | ) 28 | -------------------------------------------------------------------------------- /dsl_parser/version.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | from dsl_parser.exceptions import DSLParsingLogicException 4 | 5 | VERSION = 'tosca_definitions_version' 6 | DSL_VERSION_PREFIX = 'cloudify_dsl_' 7 | DSL_VERSION_1_0 = DSL_VERSION_PREFIX + '1_0' 8 | DSL_VERSION_1_1 = DSL_VERSION_PREFIX + '1_1' 9 | DSL_VERSION_1_2 = DSL_VERSION_PREFIX + '1_2' 10 | DSL_VERSION_1_3 = DSL_VERSION_PREFIX + '1_3' 11 | SUPPORTED_VERSIONS = [ 12 | DSL_VERSION_1_0, 13 | DSL_VERSION_1_1, 14 | DSL_VERSION_1_2, 15 | DSL_VERSION_1_3 16 | ] 17 | 18 | 19 | def validate_dsl_version(dsl_version): 20 | if dsl_version not in SUPPORTED_VERSIONS: 21 | raise DSLParsingLogicException( 22 | 29, 'Unexpected tosca_definitions_version {0}; Currently ' 23 | 'supported versions are: {1}'.format(dsl_version, 24 | SUPPORTED_VERSIONS)) 25 | 26 | 27 | def parse_dsl_version(dsl_version): 28 | 29 | if not dsl_version: 30 | raise DSLParsingLogicException(71, '{0} is missing or empty' 31 | .format(VERSION)) 32 | 33 | if not isinstance(dsl_version, basestring): 34 | raise DSLParsingLogicException(72, 'Invalid {0}: {1} is not a string' 35 | .format(VERSION, dsl_version)) 36 | 37 | # handle the 'dsl_version_' prefix 38 | if dsl_version.startswith(DSL_VERSION_PREFIX): 39 | short_dsl_version = dsl_version[len(DSL_VERSION_PREFIX):] 40 | else: 41 | raise DSLParsingLogicException(73, "Invalid {0}: '{1}', expected a " 42 | "value following this format: '{2}'" 43 | .format(VERSION, dsl_version, 44 | DSL_VERSION_1_0)) 45 | 46 | if not short_dsl_version.__contains__("_"): 47 | raise DSLParsingLogicException(73, "Invalid {0}: '{1}', expected a " 48 | "value following this format: '{2}'" 49 | .format(VERSION, dsl_version, 50 | DSL_VERSION_1_0)) 51 | 52 | version_parts = short_dsl_version.split('_') 53 | version_details = collections.namedtuple('version_details', 54 | ['major', 'minor', 'micro']) 55 | major = version_parts[0] 56 | minor = version_parts[1] 57 | micro = None 58 | if len(version_parts) > 2: 59 | micro = version_parts[2] 60 | 61 | if not major.isdigit(): 62 | raise DSLParsingLogicException(74, 63 | "Invalid {0}: '{1}', major version " 64 | "is '{2}' while expected to be a number" 65 | .format(VERSION, dsl_version, major)) 66 | 67 | if not minor.isdigit(): 68 | raise DSLParsingLogicException(75, 69 | "Invalid {0}: '{1}', minor version " 70 | "is '{2}' while expected to be a number" 71 | .format(VERSION, dsl_version, minor)) 72 | 73 | if micro and not micro.isdigit(): 74 | raise DSLParsingLogicException(76, 75 | "Invalid {0}: '{1}', micro version " 76 | "is '{2}' while expected to be a number" 77 | .format(VERSION, dsl_version, micro)) 78 | 79 | return version_details(int(major), int(minor), 80 | int(micro) if micro else None) 81 | 82 | 83 | def process_dsl_version(dsl_version): 84 | version_definitions_name = DSL_VERSION_PREFIX[:-1] 85 | version_definitions_version = parse_dsl_version(dsl_version) 86 | if version_definitions_version.micro is None: 87 | version_definitions_version = (version_definitions_version.major, 88 | version_definitions_version.minor) 89 | return { 90 | 'raw': dsl_version, 91 | 'definitions_name': version_definitions_name, 92 | 'definitions_version': tuple(version_definitions_version) 93 | } 94 | 95 | 96 | def version_description(dsl_version_tuple): 97 | version = [] 98 | for i in range(2): 99 | if i < len(dsl_version_tuple): 100 | version.append(dsl_version_tuple[i]) 101 | else: 102 | version.append(0) 103 | major, minor = version 104 | return '{0}{1}_{2}'.format(DSL_VERSION_PREFIX, major, minor) 105 | -------------------------------------------------------------------------------- /dsl_parser/yaml_loader.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | from yaml.reader import Reader 17 | from yaml.scanner import Scanner 18 | from yaml.composer import Composer 19 | from yaml.resolver import Resolver 20 | from yaml.parser import Parser 21 | from yaml.constructor import SafeConstructor 22 | 23 | from dsl_parser import holder 24 | from .exceptions import DSLParsingInputTypeException, ERROR_INVALID_CHARS 25 | 26 | 27 | class HolderConstructor(SafeConstructor): 28 | 29 | def __init__(self, filename): 30 | SafeConstructor.__init__(self) 31 | self.filename = filename 32 | 33 | def construct_yaml_null(self, node): 34 | obj = SafeConstructor.construct_yaml_null(self, node) 35 | return self._holder(obj, node) 36 | 37 | def construct_yaml_bool(self, node): 38 | obj = SafeConstructor.construct_yaml_bool(self, node) 39 | return self._holder(obj, node) 40 | 41 | def construct_yaml_int(self, node): 42 | obj = SafeConstructor.construct_yaml_int(self, node) 43 | return self._holder(obj, node) 44 | 45 | def construct_yaml_float(self, node): 46 | obj = SafeConstructor.construct_yaml_float(self, node) 47 | return self._holder(obj, node) 48 | 49 | def construct_yaml_binary(self, node): 50 | obj = SafeConstructor.construct_yaml_binary(self, node) 51 | return self._holder(obj, node) 52 | 53 | def construct_yaml_timestamp(self, node): 54 | obj = SafeConstructor.construct_yaml_timestamp(self, node) 55 | return self._holder(obj, node) 56 | 57 | def construct_yaml_omap(self, node): 58 | obj, = SafeConstructor.construct_yaml_omap(self, node) 59 | return self._holder(obj, node) 60 | 61 | def construct_yaml_pairs(self, node): 62 | obj, = SafeConstructor.construct_yaml_pairs(self, node) 63 | return self._holder(obj, node) 64 | 65 | def construct_yaml_set(self, node): 66 | obj, = SafeConstructor.construct_yaml_set(self, node) 67 | return self._holder(obj, node) 68 | 69 | def construct_yaml_str(self, node): 70 | obj = SafeConstructor.construct_yaml_str(self, node) 71 | try: 72 | obj = str(obj) 73 | except UnicodeEncodeError: 74 | raise DSLParsingInputTypeException( 75 | ERROR_INVALID_CHARS, 76 | 'illegal characters in line: {0}, column: {1}. ' 77 | 'Only valid ascii chars are supported.'.format( 78 | node.start_mark.line, node.start_mark.column)) 79 | return self._holder(obj, node) 80 | 81 | def construct_yaml_seq(self, node): 82 | obj, = SafeConstructor.construct_yaml_seq(self, node) 83 | return self._holder(obj, node) 84 | 85 | def construct_yaml_map(self, node): 86 | obj, = SafeConstructor.construct_yaml_map(self, node) 87 | return self._holder(obj, node) 88 | 89 | def _holder(self, obj, node): 90 | return holder.Holder(value=obj, 91 | start_line=node.start_mark.line, 92 | start_column=node.start_mark.column, 93 | end_line=node.end_mark.line, 94 | end_column=node.end_mark.column, 95 | filename=self.filename) 96 | 97 | 98 | HolderConstructor.add_constructor( 99 | u'tag:yaml.org,2002:null', 100 | HolderConstructor.construct_yaml_null) 101 | 102 | HolderConstructor.add_constructor( 103 | u'tag:yaml.org,2002:bool', 104 | HolderConstructor.construct_yaml_bool) 105 | 106 | HolderConstructor.add_constructor( 107 | u'tag:yaml.org,2002:int', 108 | HolderConstructor.construct_yaml_int) 109 | 110 | HolderConstructor.add_constructor( 111 | u'tag:yaml.org,2002:float', 112 | HolderConstructor.construct_yaml_float) 113 | 114 | HolderConstructor.add_constructor( 115 | u'tag:yaml.org,2002:binary', 116 | HolderConstructor.construct_yaml_binary) 117 | 118 | HolderConstructor.add_constructor( 119 | u'tag:yaml.org,2002:timestamp', 120 | HolderConstructor.construct_yaml_timestamp) 121 | 122 | HolderConstructor.add_constructor( 123 | u'tag:yaml.org,2002:omap', 124 | HolderConstructor.construct_yaml_omap) 125 | 126 | HolderConstructor.add_constructor( 127 | u'tag:yaml.org,2002:pairs', 128 | HolderConstructor.construct_yaml_pairs) 129 | 130 | HolderConstructor.add_constructor( 131 | u'tag:yaml.org,2002:set', 132 | HolderConstructor.construct_yaml_set) 133 | 134 | HolderConstructor.add_constructor( 135 | u'tag:yaml.org,2002:str', 136 | HolderConstructor.construct_yaml_str) 137 | 138 | HolderConstructor.add_constructor( 139 | u'tag:yaml.org,2002:seq', 140 | HolderConstructor.construct_yaml_seq) 141 | 142 | HolderConstructor.add_constructor( 143 | u'tag:yaml.org,2002:map', 144 | HolderConstructor.construct_yaml_map) 145 | 146 | 147 | class MarkedLoader(Reader, Scanner, Parser, Composer, HolderConstructor, 148 | Resolver): 149 | def __init__(self, stream, filename=None): 150 | Reader.__init__(self, stream) 151 | Scanner.__init__(self) 152 | Parser.__init__(self) 153 | Composer.__init__(self) 154 | HolderConstructor.__init__(self, filename) 155 | Resolver.__init__(self) 156 | 157 | 158 | def load(stream, filename): 159 | result = MarkedLoader(stream, filename).get_single_data() 160 | if result is None: 161 | # load of empty string returns None so we convert it to an empty 162 | # dict 163 | result = holder.Holder.of({}, filename=filename) 164 | return result 165 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | ######## 2 | # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # * See the License for the specific language governing permissions and 14 | # * limitations under the License. 15 | 16 | 17 | from setuptools import setup 18 | 19 | 20 | install_requires = [ 21 | 'PyYAML==3.10', 22 | 'networkx==1.9.1', 23 | 'requests>=2.7.0,<3.0.0', 24 | 'retrying==1.3.3' 25 | ] 26 | 27 | try: 28 | from collections import OrderedDict # NOQA 29 | except ImportError, e: 30 | install_requires.append('ordereddict==1.1') 31 | 32 | try: 33 | import importlib # NOQA 34 | except ImportError: 35 | install_requires.append('importlib') 36 | 37 | setup( 38 | name='cloudify-dsl-parser', 39 | version='4.4.dev1', 40 | author='Gigaspaces', 41 | author_email='cosmo-admin@gigaspaces.com', 42 | packages=['dsl_parser', 43 | 'dsl_parser.interfaces', 44 | 'dsl_parser.framework', 45 | 'dsl_parser.elements', 46 | 'dsl_parser.import_resolver'], 47 | license='LICENSE', 48 | description='Cloudify DSL parser', 49 | zip_safe=False, 50 | install_requires=install_requires 51 | ) 52 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | mock>=1.0.1 2 | testtools -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # content of: tox.ini , put in same dir as setup.py 2 | [tox] 3 | envlist=flake8 4 | test_{py26,py27} 5 | 6 | [testenv] 7 | deps = 8 | -rtest-requirements.txt 9 | nose 10 | nose-cov 11 | testfixtures 12 | 13 | [testenv:test_py26] 14 | deps = 15 | {[testenv]deps} 16 | commands=nosetests --with-cov --cov-report term-missing --cov dsl_parser dsl_parser/tests 17 | 18 | [testenv:test_py27] 19 | deps = 20 | {[testenv]deps} 21 | commands=nosetests --with-cov --cov-report term-missing --cov dsl_parser dsl_parser/tests 22 | 23 | [testenv:flake8] 24 | deps = 25 | flake8 26 | commands=flake8 dsl_parser 27 | --------------------------------------------------------------------------------