├── .gitignore ├── .gitreview ├── .stestr.conf ├── .zuul.yaml ├── CONTRIBUTING.rst ├── LICENSE ├── README.rst ├── doc └── source │ ├── conf.py │ ├── index.rst │ └── specs ├── requirements.txt ├── setup.cfg ├── setup.py ├── specs ├── mitaka │ └── implemented │ │ ├── mistral-fail-transition-message.rst │ │ ├── mistral-rerun-update-env.rst │ │ ├── mistral-workflow-resource-sharing.rst │ │ └── use-workflow-id-in-rest-api.rst ├── newton │ ├── approved │ │ └── mistral-custom-actions-api.rst │ └── implemented │ │ ├── event-notification-trigger.rst │ │ ├── mistral-item-filtering.rst │ │ └── mistral-multi-vim-support.rst ├── ocata │ ├── approved │ │ ├── mistral-custom-actions-api.rst │ │ └── workflow-global-context.rst │ └── implemented │ │ ├── publish-on-failure.rst │ │ └── yaql-tasks-function.rst ├── pike │ ├── approved │ │ ├── advanced_publishing.rst │ │ ├── create-and-run-workflows-within-a-namespace.rst │ │ ├── ha.rst │ │ ├── mistral-custom-actions-api.rst │ │ ├── mistral-extra.rst │ │ ├── secure-sensitive-data.rst │ │ └── workflow-error-analysis.rst │ └── implemented │ │ └── mistral-multi-region-support.rst ├── policy-template.rst ├── policy │ └── patch-abandonment.rst ├── rocky │ └── approved │ │ └── custom-context-for-executions.rst ├── template.rst └── train │ └── .placeholder ├── test-requirements.txt ├── tests ├── __init__.py ├── test_directories.py └── test_titles.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | *.sqlite 3 | 4 | # C extensions 5 | *.so 6 | 7 | # Packages 8 | *.egg 9 | *.egg-info 10 | dist 11 | build 12 | .venv 13 | eggs 14 | parts 15 | bin 16 | var 17 | sdist 18 | develop-eggs 19 | .installed.cfg 20 | lib 21 | lib64 22 | 23 | # Installer logs 24 | pip-log.txt 25 | 26 | # Unit test / coverage reports 27 | .coverage 28 | .tox 29 | nosetests.xml 30 | cover/* 31 | .stestr/ 32 | subunit.log 33 | .mistral.conf 34 | AUTHORS 35 | ChangeLog 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | .idea 45 | .DS_Store 46 | etc/*.conf 47 | tools/lintstack.head.py 48 | tools/pylint_exceptions 49 | 50 | #Linux swap file 51 | *.swp 52 | -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=review.opendev.org 3 | port=29418 4 | project=openstack/mistral-specs.git 5 | -------------------------------------------------------------------------------- /.stestr.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | test_path=./tests 3 | top_dir=. 4 | -------------------------------------------------------------------------------- /.zuul.yaml: -------------------------------------------------------------------------------- 1 | - project: 2 | templates: 3 | - openstack-specs-jobs 4 | check: 5 | jobs: 6 | - openstack-tox-py37 7 | gate: 8 | jobs: 9 | - openstack-tox-py37 10 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | The source repository for this project can be found at: 2 | 3 | https://opendev.org/openstack/mistral-specs 4 | 5 | Pull requests submitted through GitHub are not monitored. 6 | 7 | To start contributing to OpenStack, follow the steps in the contribution guide 8 | to set up and use Gerrit: 9 | 10 | https://docs.openstack.org/contributors/code-and-documentation/quick-start.html 11 | 12 | This repository is for doing design review on feature proposal in Heat. 13 | Please refer to `this document `_ 14 | for more information. 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======================== 2 | Team and repository tags 3 | ======================== 4 | 5 | .. image:: https://governance.openstack.org/tc/badges/mistral-specs.svg 6 | :target: https://governance.openstack.org/tc/reference/tags/index.html 7 | 8 | .. Change things from this point on 9 | 10 | ======================================== 11 | OpenStack Workflow Engine Specifications 12 | ======================================== 13 | 14 | This git repository is used to hold approved design specifications for Mistral 15 | project. Reviews of the specs are done in gerrit, using a similar workflow to 16 | how we review and merge changes to the code itself. 17 | 18 | This would apply to new blueprints proposed in Mistral project from Mitaka, 19 | this new process provides a way to fast-track the feature history of Mistral, 20 | which is very useful for new comers to learn how Mistral evolves, where we 21 | are, and where we're going. 22 | 23 | First, create a blueprint in launchpad and populate it with your spec's 24 | heading. Then, propose a spec following the template which can be found at 25 | ``specs/template.rst``. This will be given an initial, high-level review to 26 | determine whether it is in scope and in alignment with project direction, 27 | which will be reflected on the review comments. If the spec is approved, you 28 | can continue with your code implementation, and update launchpad to set the 29 | specification URL to the spec's location on:: 30 | 31 | https://specs.openstack.org/openstack/mistral-specs/ 32 | 33 | The Mistral PTL(or someone else on behalf of him) will update the release 34 | target, priority, and status accordingly. 35 | 36 | If a specification has been approved but not completed within one or more 37 | releases since the approval, it may be re-reviewed to make sure it still makes 38 | sense as written. Specifications are proposed by adding them to the 39 | ``specs//approved`` directory and posting it for review. When a spec is 40 | fully implemented, it should be moved to ``specs//implemented``. 41 | 42 | You are welcome to submit patches associated with a blueprint, whose 43 | specification may have not been approved, but they will have a -2 ("do not 44 | merge") until the specification has been approved. This is to ensure that the 45 | patches don't get accidentally merged beforehand. You will still be able to 46 | get reviewer feedback and push new patch sets, even with a -2. 47 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # Mistral documentation build configuration file, created by 2 | # sphinx-quickstart on Tue May 21 17:43:32 2013. 3 | # 4 | # This file is execfile()d with the current directory set to its containing dir. 5 | # 6 | # Note that not all possible configuration values are present in this 7 | # autogenerated file. 8 | # 9 | # All configuration values have a default; values that are commented out 10 | # serve to show the default. 11 | 12 | import datetime 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | # -- General configuration ----------------------------------------------------- 20 | 21 | # If your documentation needs a minimal Sphinx version, state it here. 22 | #needs_sphinx = '1.0' 23 | 24 | # Add any Sphinx extension module names here, as strings. They can be extensions 25 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 26 | extensions = [ 27 | 'sphinx.ext.todo', 28 | 'sphinx.ext.viewcode', 29 | 'yasfb', 30 | 'openstackdocstheme' 31 | ] 32 | 33 | # Feed configuration for yasfb 34 | feed_base_url = 'https://specs.openstack.org/openstack/mistral-specs' 35 | feed_author = 'OpenStack Mistral Team' 36 | 37 | todo_include_todos = True 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # The suffix of source filenames. 43 | source_suffix = '.rst' 44 | 45 | html_theme = 'openstackdocs' 46 | openstackdocs_repo_name = 'openstack/mistral-specs' 47 | openstackdocs_auto_name = False 48 | openstackdocs_bug_project = 'mistral' 49 | openstackdocs_bug_tag = 'specs' 50 | 51 | 52 | # The encoding of source files. 53 | #source_encoding = 'utf-8-sig' 54 | 55 | # The master toctree document. 56 | master_doc = 'index' 57 | 58 | # General information about the project. 59 | project = 'Mistral Specs' 60 | copyright = '%s, OpenStack Mistral Team' % datetime.date.today().year 61 | 62 | # The language for content autogenerated by Sphinx. Refer to documentation 63 | # for a list of supported languages. 64 | #language = None 65 | 66 | # There are two options for replacing |today|: either, you set today to some 67 | # non-false value, then it is used: 68 | #today = '' 69 | # Else, today_fmt is used as the format for a strftime call. 70 | #today_fmt = '%B %d, %Y' 71 | 72 | # List of patterns, relative to source directory, that match files and 73 | # directories to ignore when looking for source files. 74 | exclude_patterns = [ 75 | '_build', 76 | '**/template.rst', 77 | '**/policy-template.rst' 78 | ] 79 | 80 | # The reST default role (used for this markup: `text`) to use for all documents. 81 | #default_role = None 82 | 83 | # If true, '()' will be appended to :func: etc. cross-reference text. 84 | #add_function_parentheses = True 85 | 86 | # If true, the current module name will be prepended to all description 87 | # unit titles (such as .. function::). 88 | add_module_names = False 89 | 90 | # If true, sectionauthor and moduleauthor directives will be shown in the 91 | # output. They are ignored by default. 92 | show_authors = False 93 | 94 | # The name of the Pygments (syntax highlighting) style to use. 95 | pygments_style = 'native' 96 | 97 | # A list of ignored prefixes for module index sorting. 98 | modindex_common_prefix = ['mistral-specs.'] 99 | 100 | # -- Options for man page output ---------------------------------------------- 101 | man_pages = [] 102 | 103 | # -- Options for HTML output --------------------------------------------------- 104 | 105 | # The theme to use for HTML and HTML Help pages. See the documentation for 106 | # a list of builtin themes. 107 | html_theme = 'openstackdocs' 108 | 109 | # Theme options are theme-specific and customize the look and feel of a theme 110 | # further. For a list of options available for each theme, see the 111 | # documentation. 112 | #html_theme_options = {} 113 | 114 | # Add any paths that contain custom themes here, relative to this directory. 115 | #html_theme_path = [] 116 | 117 | # The name for this set of Sphinx documents. If None, it defaults to 118 | # " v documentation". 119 | #html_title = None 120 | 121 | # A shorter title for the navigation bar. Default is the same as html_title. 122 | #html_short_title = None 123 | 124 | # The name of an image file (relative to this directory) to place at the top 125 | # of the sidebar. 126 | #html_logo = None 127 | 128 | # The name of an image file (within the static path) to use as favicon of the 129 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 130 | # pixels large. 131 | #html_favicon = None 132 | 133 | # If true, SmartyPants will be used to convert quotes and dashes to 134 | # typographically correct entities. 135 | #html_use_smartypants = True 136 | 137 | # Custom sidebar templates, maps document names to template names. 138 | #html_sidebars = {} 139 | 140 | # Additional templates that should be rendered to pages, maps page names to 141 | # template names. 142 | #html_additional_pages = {} 143 | 144 | # If false, no module index is generated. 145 | html_domain_indices = False 146 | 147 | # If false, no index is generated. 148 | html_use_index = False 149 | 150 | # If true, the index is split into individual pages for each letter. 151 | #html_split_index = False 152 | 153 | # If true, links to the reST sources are added to the pages. 154 | #html_show_sourcelink = True 155 | 156 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 157 | #html_show_sphinx = True 158 | 159 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 160 | #html_show_copyright = True 161 | 162 | # If true, an OpenSearch description file will be output, and all pages will 163 | # contain a tag referring to it. The value of this option must be the 164 | # base URL from which the finished HTML is served. 165 | #html_use_opensearch = '' 166 | 167 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 168 | #html_file_suffix = None 169 | 170 | # Output file base name for HTML help builder. 171 | htmlhelp_basename = 'Mistral-Specsdoc' 172 | 173 | 174 | # -- Options for LaTeX output -------------------------------------------------- 175 | 176 | latex_elements = { 177 | # The paper size ('letterpaper' or 'a4paper'). 178 | #'papersize': 'letterpaper', 179 | 180 | # The font size ('10pt', '11pt' or '12pt'). 181 | #'pointsize': '10pt', 182 | 183 | # Additional stuff for the LaTeX preamble. 184 | #'preamble': '', 185 | } 186 | 187 | # Grouping the document tree into LaTeX files. List of tuples 188 | # (source start file, target name, title, author, documentclass [howto/manual]). 189 | latex_documents = [ 190 | ('index', 'Mistral-specs.tex', 'Mistral Specs', 191 | 'OpenStack Mistral Team', 'manual'), 192 | ] 193 | 194 | # The name of an image file (relative to this directory) to place at the top of 195 | # the title page. 196 | #latex_logo = None 197 | 198 | # For "manual" documents, if this is true, then toplevel headings are parts, 199 | # not chapters. 200 | #latex_use_parts = False 201 | 202 | # If true, show page references after internal links. 203 | #latex_show_pagerefs = False 204 | 205 | # If true, show URL addresses after external links. 206 | #latex_show_urls = False 207 | 208 | # Documents to append as an appendix to all manuals. 209 | #latex_appendices = [] 210 | 211 | # If false, no module index is generated. 212 | #latex_domain_indices = True 213 | 214 | # -- Options for Texinfo output ------------------------------------------------ 215 | 216 | # Grouping the document tree into Texinfo files. List of tuples 217 | # (source start file, target name, title, author, 218 | # dir menu entry, description, category) 219 | texinfo_documents = [ 220 | ('index', 'Mistral-specs', 'Mistral Design Specs', 221 | 'OpenStack Mistral Team', 'mistral-specs', 222 | 'Design specifications for the Mistral project.', 'Miscellaneous'), 223 | ] 224 | 225 | # Documents to append as an appendix to all manuals. 226 | #texinfo_appendices = [] 227 | 228 | # If false, no module index is generated. 229 | #texinfo_domain_indices = True 230 | 231 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 232 | #texinfo_show_urls = 'footnote' 233 | 234 | 235 | # -- Options for Epub output --------------------------------------------------- 236 | 237 | # Bibliographic Dublin Core info. 238 | epub_title = 'Mistral Specs' 239 | epub_author = 'OpenStack Mistral Team' 240 | epub_publisher = 'OpenStack Mistral Team' 241 | epub_copyright = '2015, OpenStack Mistral Team' 242 | 243 | # The language of the text. It defaults to the language option 244 | # or en if the language is not set. 245 | #epub_language = '' 246 | 247 | # The scheme of the identifier. Typical schemes are ISBN or URL. 248 | #epub_scheme = '' 249 | 250 | # The unique identifier of the text. This can be a ISBN number 251 | # or the project homepage. 252 | #epub_identifier = '' 253 | 254 | # A unique identification for the text. 255 | #epub_uid = '' 256 | 257 | # A tuple containing the cover image and cover page html template filenames. 258 | #epub_cover = () 259 | 260 | # HTML files that should be inserted before the pages created by sphinx. 261 | # The format is a list of tuples containing the path and title. 262 | #epub_pre_files = [] 263 | 264 | # HTML files shat should be inserted after the pages created by sphinx. 265 | # The format is a list of tuples containing the path and title. 266 | #epub_post_files = [] 267 | 268 | # A list of files that should not be packed into the epub file. 269 | #epub_exclude_files = [] 270 | 271 | # The depth of the table of contents in toc.ncx. 272 | #epub_tocdepth = 3 273 | 274 | # Allow duplicate toc entries. 275 | #epub_tocdup = True 276 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. mistral-specs documentation master file 2 | 3 | =============================== 4 | OpenStack Mistral Project Plans 5 | =============================== 6 | 7 | Approved specifications 8 | ======================= 9 | 10 | These specifications have been approved but have not been completely 11 | implemented. Approved specifications are grouped in the development cycles. If 12 | a specification has been approved but not completed within one or more 13 | releases since the approval, it may be re-reviewed to make sure it still makes 14 | sense as written. So, a specification may be seen in different groups: 15 | 16 | Newton 17 | ------ 18 | 19 | .. toctree:: 20 | :glob: 21 | :maxdepth: 1 22 | 23 | specs/newton/approved/* 24 | 25 | Ocata 26 | ----- 27 | 28 | .. toctree:: 29 | :glob: 30 | :maxdepth: 1 31 | 32 | specs/ocata/approved/* 33 | 34 | Pike 35 | ---- 36 | 37 | .. toctree:: 38 | :glob: 39 | :maxdepth: 1 40 | 41 | specs/pike/approved/* 42 | 43 | Rocky 44 | ----- 45 | 46 | .. toctree:: 47 | :glob: 48 | :maxdepth: 1 49 | 50 | specs/rocky/approved/* 51 | 52 | Implemented specifications 53 | ========================== 54 | 55 | These specifications have been implemented and are grouped by the development 56 | cycles in which they were completed. 57 | 58 | Mitaka 59 | ------ 60 | 61 | .. toctree:: 62 | :glob: 63 | :maxdepth: 1 64 | 65 | specs/mitaka/implemented/* 66 | 67 | Newton 68 | ------ 69 | 70 | .. toctree:: 71 | :glob: 72 | :maxdepth: 1 73 | 74 | specs/newton/implemented/* 75 | 76 | Ocata 77 | ----- 78 | 79 | .. toctree:: 80 | :glob: 81 | :maxdepth: 1 82 | 83 | specs/ocata/implemented/* 84 | 85 | Pike 86 | ----- 87 | 88 | .. toctree:: 89 | :glob: 90 | :maxdepth: 1 91 | 92 | specs/pike/implemented/* 93 | 94 | ======================== 95 | Mistral Project Policies 96 | ======================== 97 | 98 | Team decisions and policies that are not limited to a specific release. 99 | 100 | .. toctree:: 101 | :glob: 102 | :maxdepth: 1 103 | 104 | specs/policy/* 105 | 106 | 107 | Indices and tables 108 | ================== 109 | 110 | * :ref:`search` 111 | -------------------------------------------------------------------------------- /doc/source/specs: -------------------------------------------------------------------------------- 1 | ../../specs -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pbr>=2.0,!=2.1.0 # Apache-2.0 2 | sphinx>=2.0.0,!=2.1.0 # BSD 3 | openstackdocstheme>=2.2.1 # Apache-2.0 4 | testtools>=0.9.34 5 | yasfb 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = mistral-specs 3 | summary = OpenStack Workflow Engine Program Development Specs 4 | description-file = 5 | README.rst 6 | author = OpenStack 7 | author-email = openstack-discuss@lists.openstack.org 8 | home-page = https://specs.openstack.org/openstack/mistral-specs/ 9 | classifier = 10 | Intended Audience :: Developers 11 | License :: OSI Approved :: Apache Software License 12 | Operating System :: POSIX :: Linux 13 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 13 | # implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT 18 | import setuptools 19 | 20 | setuptools.setup( 21 | setup_requires=['pbr'], 22 | pbr=True) 23 | -------------------------------------------------------------------------------- /specs/mitaka/implemented/mistral-fail-transition-message.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ================================ 8 | Customize message for Transition 9 | ================================ 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-fail-transition-message 12 | 13 | 14 | Problem description 15 | =================== 16 | 17 | Currently, "fail" command fails the workflow and the user needs to check the 18 | different entities and flow to find why workflow has been failed. So, the 19 | workflow execution is failed but there is no meaningful error message. 20 | 21 | Use Cases 22 | --------- 23 | 24 | As a user, I want to provide customized message for fail/success/pause 25 | transition in 'on-success', 'on-complete' and 'on-error'. 26 | 27 | 28 | Proposed change 29 | =============== 30 | 31 | To solve this problem, I would like to add 'msg' input argument for 32 | fail/success/pause commands. This argument will be publish to the 33 | context before running fail/success/pause. Please consider following 34 | example:: 35 | 36 | ... 37 | tasks: 38 | t1: 39 | action: ... 40 | on-success: 41 | - fail(msg='var1 is null.'): <% $.var1 = null %> 42 | - succeed(msg='var1 is not null.') 43 | 44 | Alternatives 45 | ------------ 46 | 47 | None 48 | 49 | Data model impact 50 | ----------------- 51 | 52 | None 53 | 54 | REST API impact 55 | --------------- 56 | 57 | None 58 | 59 | End user impact 60 | --------------- 61 | 62 | User can pass 'msg' argument for fail/success/pause commands. This argument 63 | will be optional. 64 | 65 | Performance Impact 66 | ------------------ 67 | 68 | None 69 | 70 | Deployer impact 71 | --------------- 72 | 73 | None 74 | 75 | 76 | Implementation 77 | ============== 78 | 79 | Assignee(s) 80 | ----------- 81 | 82 | hardik 83 | 84 | Work Items 85 | ---------- 86 | 87 | * Add input argument 'msg' for fail/success/pause commands. 88 | 89 | 90 | Dependencies 91 | ============ 92 | 93 | None 94 | 95 | 96 | Testing 97 | ======= 98 | 99 | Tests should cover all the scenarios mentioned in use cases section. 100 | 101 | 102 | References 103 | ========== 104 | 105 | https://review.openstack.org/#/c/276625/ 106 | -------------------------------------------------------------------------------- /specs/mitaka/implemented/mistral-rerun-update-env.rst: -------------------------------------------------------------------------------- 1 | =============================== 2 | Allow env update on task re-run 3 | =============================== 4 | 5 | https://blueprints.launchpad.net/mistral/+spec/mistral-rerun-update-env 6 | 7 | Problem description 8 | =================== 9 | On rerunning (and resuming) a workflow execution, allow changes to the 10 | environment variables that were provided at the start of workflow execution. 11 | 12 | Use Cases 13 | --------- 14 | Given the use case where a workflow execution failed because of environment 15 | related issue(s) (i.e. endpoint unavailable, etc.), it is possible that as 16 | part of resolving the environment related issue(s), the endpoint is replaced 17 | (i.e. different host/ip) or that the token passed as credential has expired. 18 | Endpoints and credentials can be passed on workflow invocation under the env 19 | param and then accessed by workflow tasks using the env() function. In these 20 | circumstances, the user will need to be able to update the env variables prior 21 | to re-running the workflow task(s). This also applies to workflow that are 22 | manually paused (i.e. for maintenance) and now resumed but the token passed as 23 | credential in the env has expired. 24 | 25 | Proposed change 26 | =============== 27 | To change environment variables, this will be a two step process. First is to 28 | overlay the new set of env variables to the workflow execution context so any 29 | new task executions will pick up the changes. Second to overlay the new set 30 | to the in context of the existing tasks to be rerun. Any existing tasks that 31 | have completed successfully will not be modified. 32 | 33 | Alternatives 34 | ------------ 35 | None 36 | 37 | Data model impact 38 | ----------------- 39 | - New env property for the Task API resource model to pass the new set of 40 | environment variables. 41 | 42 | REST API impact 43 | --------------- 44 | Update to the env is only permitted on task re-run or workflow resume. 45 | 46 | For workflow resume, the PUT method of the execution controller will be 47 | affected. The user will pass the new set of environment variables via 48 | params in the Execution resource model. Then the put operation for the 49 | executions controller will pass the updated env to resume_workflow (i.e. 50 | rpc.engineclient().resume_workflow(wf_ex_id, env=env)). The 51 | resume_workflow method will merge the new set of env to the workflow 52 | execution appropriately. 53 | 54 | The following is the data for the PUT request to the execution controller. 55 | 56 | .. code-block:: json 57 | 58 | { 59 | "state": "RUNNING", 60 | "params": "{'env': {'k1': 'v1'}}" 61 | } 62 | 63 | For task re-run, the PUT method of the task controller will be affected. 64 | The user will pass the new set of environment variables via the env 65 | property in the Task resource model. Then the put operation for the 66 | tasks controller will pass the updated env to rerun_workflow (i.e. 67 | rpc.engineclient().rerun_workflow(wf_ex_id, task_ex_id, env=env)). The 68 | rerun_workflow method will merge the new set of env to the workflow 69 | execution and the task execution appropriately. 70 | 71 | The following is the data for the PUT request to the task controller. 72 | 73 | .. code-block:: none 74 | 75 | { 76 | 'state': 'RUNNING', 77 | 'reset': True, 78 | 'env': '{"k1": "v1"}' 79 | } 80 | 81 | End user impact 82 | --------------- 83 | - Add --env option to `mistral execution-update` to pick up a json string or 84 | path to a json file containing the list of variables to update. 85 | - Add --env option to `mistral task-rerun` to pick up a json string or 86 | path to a json file containing the list of variables to update. 87 | 88 | Performance Impact 89 | ------------------ 90 | None 91 | 92 | Deployer impact 93 | --------------- 94 | None 95 | 96 | Implementation 97 | ============== 98 | 99 | Assignee(s) 100 | ----------- 101 | 102 | Primary assignee: 103 | m4dcoder 104 | 105 | Work Items 106 | ---------- 107 | - Add DB API method to update env in execution. 108 | - Update resume_workflow in default engine. 109 | - Update rerun_workflow in default engine. 110 | - Update PUT in execution controller. 111 | - Update Task API resource model. 112 | - Update PUT in task controller. 113 | - Update execution-update command in mistral client. 114 | - Update task-rerun command in mistral client. 115 | 116 | Dependencies 117 | ============ 118 | None 119 | 120 | Testing 121 | ======= 122 | - Test that environment is updated and workflow can rerun successfully. 123 | - Test update of workflow execution and task execution in different states. 124 | Test exception cases where certain states are not allowed (i.e. SUCCESS). 125 | 126 | References 127 | ========== 128 | None 129 | -------------------------------------------------------------------------------- /specs/mitaka/implemented/mistral-workflow-resource-sharing.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | =================================== 8 | Support workflow sharing in Mistral 9 | =================================== 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-workflow-resource-sharing 12 | 13 | 14 | Problem description 15 | =================== 16 | 17 | Currently, we support creating public scope resource in Mistral, a public 18 | resource(e.g. workflow) could be visible and used by any other tenant of the 19 | system. The motivation of this feature is to avoid too many 'noisy' workflows 20 | when you do ``mistral workflow-list`` in CLI, because you will passively see 21 | all the resources with 'public' property, which you don't care about and will 22 | never use, the situation will get worse especially in scenario of public 23 | cloud. 24 | 25 | Use Cases 26 | --------- 27 | 28 | * As a tenant, I want to share my workflows to a specific tenant, rather than 29 | all other tenants in the system. 30 | 31 | * As a tenant, I want to have the capability to accept or reject workflows 32 | shared to me. 33 | 34 | * As a tenant, I can see the workflows list containing all of the following: 35 | all public workflows, my own workflows and the workflows that I am a member 36 | of. 37 | 38 | 39 | Proposed change 40 | =============== 41 | 42 | To solve those problems, I propose adding new resource sharing API for this 43 | feature, please see detailed information in the following sections. 44 | 45 | Users should always use UUID for using workflow sharing feature, because using 46 | name will cause a lot of confusion and lead to plenty of bugs, e.g. different 47 | tenants could have same name workflows, they can share these workflows to 48 | another tenant, who will be confused when he operates with the workflow name. 49 | Workflow UUID in API has been supported[1] in Mistral. 50 | 51 | Alternatives 52 | ------------ 53 | 54 | None 55 | 56 | Data model impact 57 | ----------------- 58 | 59 | A new table is needed for this feature, the following table constructs would 60 | suffice :: 61 | 62 | __tablename__ = 'resource_members_v2' 63 | __table_args__ = ( 64 | sa.UniqueConstraint( 65 | 'resource_identifier', 66 | 'resource_type', 67 | 'member_id' 68 | ), 69 | ) 70 | 71 | id = mb.id_column() 72 | resource_identifier = sa.Column(sa.String(80), nullable=False) 73 | resource_type = sa.Column( 74 | sa.String(50), 75 | nullable=False, 76 | default='workflow' 77 | ) 78 | project_id = sa.Column(sa.String(80), default=security.get_project_id) 79 | member_id = sa.Column(sa.String(80), nullable=False) 80 | status = sa.Column(sa.String(20), nullable=False, default="pending") 81 | 82 | Database migration is also needed. 83 | 84 | REST API impact 85 | --------------- 86 | 87 | 1. Shares the workflow to a new member. 88 | 89 | POST http://127.0.0.1:8989/v2/workflows//members 90 | 91 | request body:: 92 | 93 | {'member_id': XXX} 94 | 95 | response:: 96 | 97 | { 98 | 'resource_identifier': , 99 | 'resource_type': 'workflow', 100 | 'project_id': , 101 | 'member_id': , 102 | 'status': 'pending' 103 | } 104 | 105 | 2. Sets the status for a workflow member. 106 | 107 | PUT http://127.0.0.1:8989/v2/workflows//members/ 108 | 109 | request body:: 110 | 111 | {'status': } 112 | 113 | Only user with whom this workflow is shared could make this call, to 114 | accept or reject the sharing, or remain what the status was. Other users 115 | making this call may get HTTP 404 status code. 116 | 117 | 3. Shows workflow member details. 118 | 119 | GET http://127.0.0.1:8989/v2/workflows//members/ 120 | 121 | Response body is a single workflow member entity, user must be the owner 122 | or a member of the workflow. 123 | 124 | 4. Return all members with whom the workflow has been shared. 125 | 126 | GET http://127.0.0.1:8989/v2/workflows//members 127 | 128 | If a user with whom this workflow is shared makes this call, the member 129 | list contains only information for that user. 130 | If a user with whom this workflow has not been shared makes this call, the 131 | call returns the HTTP 404 status code. 132 | 133 | 5. Deletes a member from the member list of a workflow. 134 | 135 | DELETE http://127.0.0.1:8989/v2/workflows//members/ 136 | 137 | Users making this call must be the owner of the workflow. Please note, 138 | check should be done before the member relationship is deleted, since 139 | after deletion, other users can not use that workflow any more, which may 140 | cause error to existing executions or cron-triggers. 141 | 142 | End user impact 143 | --------------- 144 | 145 | Besides the new API, users can use new commands in CLI for resource sharing 146 | feature. For instance:: 147 | 148 | mistral resource-member-create --type workflow --id \ 149 | --member 150 | 151 | Performance Impact 152 | ------------------ 153 | 154 | None 155 | 156 | Deployer impact 157 | --------------- 158 | 159 | None 160 | 161 | 162 | Implementation 163 | ============== 164 | 165 | Assignee(s) 166 | ----------- 167 | 168 | kong 169 | 170 | Work Items 171 | ---------- 172 | 173 | * Add new db schema for resource_member. 174 | * Add db operations for resource members. 175 | * Add new API for workflow sharing. 176 | * Including workflows shared to user, when user query workflows. 177 | 178 | 179 | Dependencies 180 | ============ 181 | 182 | None 183 | 184 | 185 | Testing 186 | ======= 187 | 188 | Tests should cover all the scenarios memtioned in use cases section. 189 | 190 | 191 | References 192 | ========== 193 | 194 | [1]: https://blueprints.launchpad.net/python-mistralclient/+spec/support-id-in-workflow-operation 195 | -------------------------------------------------------------------------------- /specs/mitaka/implemented/use-workflow-id-in-rest-api.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ========================================= 8 | Workflow UUID support in Mistral REST API 9 | ========================================= 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/use-workflow-id-in-rest-api 12 | 13 | 14 | Problem description 15 | =================== 16 | 17 | Currently, we identify a workflow by its name, of course, the workflow name is 18 | unique within a tenant's scope. However, when we use 'public' workflows or we 19 | want to get benifits from workflow sharing feature[1], we may see more than 20 | one workflows with the same name (you can see a related bug which has been 21 | fixed here[2]). Even worse, users will never see other 'same-name' workflows 22 | when performing ``mistral workflow-get `` command, since it always 23 | returns the first one. 24 | 25 | Look at almost all other projects, they always use UUID as globally unique 26 | resource identifier, especially in the REST API, the resource name is a string 27 | that can be duplicated throughout the whole system, and may be changed 28 | frequently as the time goes by. 29 | 30 | So, I propose we use UUID as the workflow global unique identifier in the REST 31 | API. What's more, we can consider using UUID for other resources after that. 32 | 33 | Use Cases 34 | --------- 35 | 36 | * As a user, I want to see the definition of a public scope workflow, whose 37 | name is the same with one of my private workflows or another public 38 | workflow. 39 | 40 | * As a user or application developer, I want to send REST API to Mistral with 41 | resource UUID contained, rather than resource name. 42 | 43 | 44 | Proposed change 45 | =============== 46 | 47 | We need to support workflow UUID as a parameter of pecan WorkflowsController 48 | related methods(GET, PUT, DELETE), we still support workflow name for backward 49 | compatibility, the magic will be happened in the db api layer. At the 50 | meanwhile, workflow UUID needs to be exposed to end users, using which users 51 | could do operations they want. 52 | 53 | Things will be a little complicated for PUT method. Before, when we want to 54 | update a workflow definition, Mistral only accepts URL like 55 | http://localhost:8989/v2/workflows and the new workflow definition content as 56 | request body, which means workflow name is an identifier when updating 57 | workflow definition, and can't be changed. In order to support UUID, a new 58 | parameter with an appropriate default value will be added to PUT method, with 59 | the UUID, workflow name can be changed. In addition, when updating a workflow 60 | with UUID provided, only one workflow definition could be contained in request 61 | body. 62 | 63 | Since UUID is so important right now, users should see that both via REST API 64 | or Mistral client, a good news is, UUID has already be supported from both 65 | server side and client side[3]. 66 | 67 | Changes should also be made for db interface, operations should be supported 68 | based on UUID. 69 | 70 | Alternatives 71 | ------------ 72 | 73 | Without the UUID support in REST API, those problems mentioned in the first 74 | section can't be solved totally. 75 | 76 | Data model impact 77 | ----------------- 78 | 79 | None 80 | 81 | REST API impact 82 | --------------- 83 | 84 | All the REST API requests for workflow resource will support UUID, the request 85 | and response body are the same with before, the only exception to this is PUT 86 | HTTP method. 87 | 88 | When updating a workflow with UUID provided, only one workflow definition 89 | could be contained in request body. 90 | 91 | End user impact 92 | --------------- 93 | 94 | It's strongly recommended that users use UUID in URL of HTTP request or in the 95 | command line. 96 | 97 | Performance Impact 98 | ------------------ 99 | 100 | None 101 | 102 | Deployer impact 103 | --------------- 104 | 105 | None 106 | 107 | 108 | Implementation 109 | ============== 110 | 111 | Assignee(s) 112 | ----------- 113 | 114 | Primary assignee: 115 | kong 116 | 117 | Work Items 118 | ---------- 119 | 120 | * Add UUID support for GET, PUT, DELETE mothod of workflow REST API. 121 | 122 | 123 | Dependencies 124 | ============ 125 | 126 | None 127 | 128 | 129 | Testing 130 | ======= 131 | 132 | * Test the UUID support in REST API and/or Mistral client side. 133 | 134 | 135 | References 136 | ========== 137 | 138 | [1]: https://blueprints.launchpad.net/mistral/+spec/mistral-resource-sharing 139 | 140 | [2]: https://bugs.launchpad.net/python-mistralclient/+bug/1518276 141 | 142 | [3]: https://review.openstack.org/248031 143 | -------------------------------------------------------------------------------- /specs/newton/approved/mistral-custom-actions-api.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ================== 8 | Custom Actions API 9 | ================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-custom-actions-api 14 | 15 | This specification sets a formal basis for those Mistral users who want to 16 | create their own actions and make them available to use as part of Mistral 17 | workflows. The number one question that the spec addresses is "What is 18 | available in Mistral code base in order to implement custom actions?" 19 | 20 | 21 | Problem description 22 | =================== 23 | 24 | Custom actions are now possible to create and it's as simple as just 25 | implementing a class inherited from mistral.actions.base.Action that 26 | has 3 methods: 27 | 28 | * run() - executes main action logic, **mandatory** to implement 29 | 30 | * test() - execute action in test mode, related to future dry-run 31 | functionality, optional to implement 32 | 33 | * is_sync() - must return **True** if action returns its result right from 34 | method run() or **False** if method run() only starts action logic and 35 | result is supposed to be delivered later via public Mistral API 36 | 37 | There's also a mechanism based on stevedore library that allows to plug in 38 | new actions via adding new entry points in setup.cfg file. 39 | 40 | If a custom action doesn't require any integration neither with Mistral 41 | nor with OpenStack this is enough to know in order to implement it. 42 | 43 | However, if this action needs to leverage more advanced capabilities 44 | provided by Mistral and OpenStack then Action class itself doesn't 45 | give any knowledge about means that can be used to achieve that. 46 | A simple example of integration with OpenStack infrastructure is the need 47 | to call endpoints of OpenStack services. In this case, at minimum, action 48 | needs to be able to authenticate with Keystone, i.e., have access to 49 | Mistral security context. 50 | 51 | Use Cases 52 | --------- 53 | 54 | Simple OpenStack actions 55 | ^^^^^^^^^^^^^^^^^^^^^^^^ 56 | As a user of Mistral I want to create actions that call OpenStack services. 57 | In this case action needs to be able to access Mistral security context 58 | that contains auth token to be able to pass it to a corresponding service. 59 | Note: This use case is generally implemented within Mistral but it needs 60 | to be rethought since OpenStack actions that are implemented now in Mistral 61 | use Mistral Python code that is not assumed to be a public API and hence 62 | stable. 63 | 64 | Complex OpenStack actions 65 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 66 | As a user of Mistral I want to create actions that call multiple OpenStack 67 | services from within one action. 68 | 69 | For example, we may want to create action 70 | "create_cinder_volume_and_attach_to_vm" that creates a Cinder volume and 71 | attaches it to a virtual instance. In this case action needs to have access 72 | to Mistral security context that contains auth token so that it can pass 73 | that token to Cinder and Nova. 74 | 75 | Reusing existing actions 76 | ^^^^^^^^^^^^^^^^^^^^^^^^ 77 | 78 | As a user of Mistral I want to be able to reuse existing actions while 79 | implementing my new actions so that I don't have to reimplement similar 80 | functionality. 81 | 82 | For example, I want to create action that checks if a certain virtual 83 | instance exists in the tenant by calling Nova and if it does the action 84 | runs a number of secure shell commands to configure it. In this scenario, 85 | we need to call Nova and do ssh. Both already exist in Mistral as actions 86 | "nova.servers_get" and "std.ssh". So there should be a mechanism allowing 87 | to reuse those actions while creating a new more complex action. 88 | 89 | Proposed change 90 | =============== 91 | 92 | General idea 93 | ------------ 94 | 95 | We need to have one or more Python packages in Mistral that are designed 96 | and documented as a public Python API for developers that want to create 97 | custom actions. These packages should effectively provide a number of 98 | classes that can be used directly or inherited as needed. They should 99 | cover the following aspects of action development: 100 | 101 | * Base class or a number of classes that can be extended in order to build 102 | new Mistral actions. Currently existing **mistral.actions.base.Action** 103 | is an example of such class. 104 | 105 | * Module that provides access to security context associated with the 106 | current workflow that this action belongs to. Security context should 107 | at least include user, project/tenant, auth token. 108 | 109 | * Module that provides access to current Mistral execution context. That 110 | context should include: 111 | 112 | * Current workflow execution id 113 | 114 | * Current task execution id 115 | 116 | * Current action execution id 117 | 118 | * Package with most frequently used utils and data types used during 119 | custom actions development. For example, class 120 | mistral.workflow.utils.Result that now exists in the code base is 121 | needed by actions but it's not clear that it's part of Python API. 122 | 123 | * Module that allows to get and reuse existing actions 124 | 125 | Since these Python entities must be available for both engine and 126 | executor they should be moved to a separate subproject of Mistral, for 127 | example, **mistral-actions-api**. 128 | 129 | Existing OpenStack actions should be moved out of mistral project into 130 | a different Mistral subproject. The proposal is to use **mistral-extra** 131 | repo for this purpose because although we use it only for collecting 132 | Mistral examples its initial idea was also to have additional tools 133 | and extensions in it. 134 | 135 | Specific entities 136 | ----------------- 137 | 138 | mistral.actions.api 139 | ^^^^^^^^^^^^^^^^^^^ 140 | Main Python package that contains all modules and classes which are part 141 | of Custom Actions API. 142 | 143 | mistral.actions.api.base 144 | ^^^^^^^^^^^^^^^^^^^^^^^^ 145 | Python module that contains base classes for custom actions. Currently 146 | module **mistral.actions.base** performs similar function. 147 | 148 | Note: Specific content of this module is out of scope of this spec and 149 | must be defined at implementation stage. 150 | 151 | mistral.actions.api.security 152 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 153 | Python module that contains required functions to get all required 154 | information related to current OpenStack security context. At minimum: 155 | user, project, auth token. 156 | 157 | Note: Specific content of this module is out of scope of this spec and 158 | must be defined at implementation stage. 159 | 160 | mistral.actions.api.types 161 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 162 | Python module that contains all data types that custom actions need to 163 | use. One candidate to go to that module that now exists is 164 | **mistral.workflow.utils.Result**. 165 | 166 | Note: Specific content of this module is out of scope of this spec and 167 | defined at implementation stage. 168 | 169 | mistral.actions.api.utils 170 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 171 | Python module that contains additional functions helpful for creating 172 | new Mistral actions. At minimum: functions to get instances of existing 173 | actions so that action developers could re-use functionality of existing 174 | actions. Return type for these actions though must be rather a wrapper 175 | that doesn't just call **Action.run()** method but instead uses Mistral 176 | action execution machinery to actually call action just like as if it 177 | was called as part of workflow (taking care of data transformations, 178 | fulfilling security and execution context etc.) 179 | 180 | Note: Specific content of this module is out of scope of this spec and 181 | must be defined at implementation stage. 182 | 183 | Alternatives 184 | ------------ 185 | 186 | None. 187 | 188 | Data model impact 189 | ----------------- 190 | 191 | None. 192 | 193 | REST API impact 194 | --------------- 195 | 196 | None. 197 | 198 | End user impact 199 | --------------- 200 | 201 | REST API users 202 | ^^^^^^^^^^^^^^ 203 | No impact. 204 | 205 | Custom actions developers 206 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 207 | Having to use Custom Actions API described in this spec whereas now they 208 | can only use **mistral.actions.base** safely. 209 | 210 | Performance Impact 211 | ------------------ 212 | 213 | No significant impact is expected. Minor is possible. 214 | 215 | Deployer impact 216 | --------------- 217 | 218 | Deployers will need to make sure to install a new library containing 219 | Custom Action API packages, modules and classes. However, this impact 220 | is not supposed to be severe because all dependencies must be handled 221 | smoothly by Pip. 222 | 223 | In case if there's an existing Mistral installation with installed 224 | actions, some DB migration might be required. Changes in DB schema are 225 | not expected though. If so, Mistral project should provide convenient 226 | tools to help make this transition to using new actions. 227 | 228 | Implementation 229 | ============== 230 | 231 | Assignee(s) 232 | ----------- 233 | 234 | To be found based on discussions around the spec. 235 | 236 | Work Items 237 | ---------- 238 | 239 | * Create a new repo containing the code of Custom Actions API (e.g. 240 | **mistral-lib** or **mistral-common**, particular name is to be defined) 241 | * Design and implement modules listed in Specific Entities section 242 | * Provide deprecation mechanism so that during some period of time it 243 | would be possible to use the old approach for implementing Mistral 244 | actions (with **mistral.actions.base**) and the new one 245 | * Fix existing action implementations so that they use new API 246 | * Fix Mistral Executor accordingly 247 | * Fix Mistral Engine accordingly 248 | * Revisit and restructure repo **mistral-extra** 249 | * Move existing OpenStack actions into **mistral-extra** 250 | 251 | 252 | Dependencies 253 | ============ 254 | 255 | No additional dependencies are required. 256 | 257 | Testing 258 | ======= 259 | 260 | Custom Actions API can be tested on devstack based OpenStack CI gates 261 | such as gate-mistral-devstack-dsvm by creating and running custom 262 | actions that use this API. 263 | 264 | References 265 | ========== 266 | 267 | Initial patch for TripleO/Mistral integration: 268 | https://review.openstack.org/#/c/282366/ 269 | -------------------------------------------------------------------------------- /specs/newton/implemented/mistral-item-filtering.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ========================== 8 | Items filtering in Mistral 9 | ========================== 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-items-filtering 12 | 13 | 14 | Problem description 15 | =================== 16 | 17 | Currently, we don't have any support for items filtering on the server side 18 | which affects performance badly. In future, we may have thousands of actions. 19 | So, we should be able to filter items before transferring them over the 20 | network. Also, it is very difficult to find specific data in the list of 21 | returned items. For example, we have a lot of 'standard' OpenStack actions. 22 | So it is not very convenient to look through all of them to find actions 23 | for nova. 24 | 25 | Use Cases 26 | --------- 27 | 28 | As a user, I want to have an ability to filter data. 29 | 30 | 31 | Proposed change 32 | =============== 33 | 34 | To solve this problem, I would like to implement customized filter for 35 | database. This filter will support "in", "nin", ">", ">=", "<" and "<=" 36 | operations. Also, new parameters will be allowed in REST API. These 37 | parameters are the name of columns by which users are allowed to filter 38 | data. 39 | 40 | Alternatives 41 | ------------ 42 | 43 | None 44 | 45 | Data model impact 46 | ----------------- 47 | 48 | None 49 | 50 | REST API impact 51 | --------------- 52 | 53 | 1. Gets only those actions which have '' as value. 54 | 55 | GET http://127.0.0.1:8989/v2/actions?= 56 | 57 | 58 | 2. Gets only those actions which have '' or '' as 59 | value. 60 | 61 | GET http://127.0.0.1:8989/v2/actions?=in:, 62 | 63 | 64 | 3. Gets only those actions which have not '' or '' 65 | as value. 66 | 67 | GET http://127.0.0.1:8989/v2/actions?=nin:, 68 | 69 | 70 | 4. Gets only those actions which have not as value. 71 | 72 | GET http://127.0.0.1:8989/v2/actions?=neq: 73 | 74 | 75 | 5. Gets only those actions which have greater value than as 76 | value. 77 | 78 | GET http://127.0.0.1:8989/v2/actions?=gt: 79 | 80 | 81 | 6. Gets only those actions which have greater than or equal to as 82 | value. 83 | 84 | GET http://127.0.0.1:8989/v2/actions?=gte: 85 | 86 | 87 | 7. Gets only those actions which have less value than as 88 | value. 89 | 90 | GET http://127.0.0.1:8989/v2/actions?=lt: 91 | 92 | 93 | 8. Gets only those actions which have less than or equal to as 94 | value. 95 | 96 | GET http://127.0.0.1:8989/v2/actions?=lte: 97 | 98 | Same changes are proposed for workflows, workbooks, cron triggers, executions, 99 | tasks etc. 100 | 101 | End user impact 102 | --------------- 103 | 104 | User can add new option in CLI. For instance :: 105 | 106 | mistral action-list --filter =: 107 | 108 | Here '' can be "eq", "neq", "gt", "gte", "lt", "lte", "in" or "nin". 109 | Multiple values can be provided by using ";" separator. 110 | 111 | Performance Impact 112 | ------------------ 113 | 114 | Only requested/filtered data will be fetched from database and transmitted over 115 | network. So, it will improve the response time for the request. 116 | 117 | Deployer impact 118 | --------------- 119 | 120 | None 121 | 122 | 123 | Implementation 124 | ============== 125 | 126 | Assignee(s) 127 | ----------- 128 | 129 | hardik 130 | 131 | Work Items 132 | ---------- 133 | 134 | * Add new parameter to REST api. 135 | * Add new option for CLI. 136 | 137 | Dependencies 138 | ============ 139 | 140 | None 141 | 142 | 143 | Testing 144 | ======= 145 | 146 | Tests should cover all the scenarios memtioned in use cases section. 147 | 148 | 149 | References 150 | ========== 151 | 152 | [1]: https://review.openstack.org/269971 153 | [2]: http://specs.openstack.org/openstack/api-wg/guidelines/pagination_filter_sort.html 154 | -------------------------------------------------------------------------------- /specs/newton/implemented/mistral-multi-vim-support.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ================================================ 8 | Support connection to arbitrary Openstack clouds 9 | ================================================ 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-multi-vim-support 12 | 13 | 14 | Problem description 15 | =================== 16 | 17 | Mistral is configured to execute workflows on a specific cloud (VIM (Virtual 18 | Infrastructure Manager) in ETSI MANO terms). In order to execute workflows on a 19 | different cloud one needs to start a new Mistral instance with the cloud 20 | specific configuration. When Mistral is used in standalone mode, this is an 21 | undesired overhead and can cause unpredictable resource usage patterns. 22 | 23 | This blueprint proposes to extend the workflow execution parameters to make it 24 | possible to target a specific cloud without modifying the configuration of the 25 | Mistral service. 26 | 27 | 28 | Use Cases 29 | --------- 30 | 31 | * As a tenant, I want to execute the same workflow on different clouds without 32 | having to start a new Mistral instance with changed configuration. 33 | 34 | 35 | Proposed change 36 | =============== 37 | 38 | Both the Mistral server and command line client are affected. 39 | 40 | The 'execution-create' functionality should take parameters that describe the 41 | target cloud for the execution. The following optional parameters should be 42 | added: 43 | 44 | - target_auth_url: auth URL of the target cloud 45 | - target_auth_token: valid authentication token for the target cloud 46 | - target_ca_cert: CA cert of target cloud 47 | 48 | If both present, these are used to execute the workflow actions. If none are 49 | present, then the current legacy operation mode is used, and the preconfigured 50 | authentication URL is used in the actions. In any other case the execution 51 | creation should fail. 52 | 53 | These parameters are stored in the context field of the execution. 54 | 55 | The parameters are added as headers to the HTTP API request. 56 | 57 | The self authentication of Mistral should be carried out the same way as done 58 | currently. 59 | 60 | Cron triggers need the admin credentials for the tartget cloud. We do not 61 | consider this case in the current proposal. 62 | 63 | Alternatives 64 | ------------ 65 | 66 | * Start mulitple Mistral instances with different settings. 67 | * This is an inflexible solution for the problem and the setup of the new 68 | instances incurs significant administrative overhead. 69 | 70 | 71 | Data model impact 72 | ----------------- 73 | 74 | The change affects the `context` information of the execution object by 75 | including the target auth URL and target token. The context is stored as Json, 76 | so this change should not require DB migration. 77 | 78 | 79 | REST API impact 80 | --------------- 81 | 82 | *POST /executions* 83 | 84 | This method should now accept the new optional target_* parameters according 85 | to the specified rules. 86 | 87 | Output should not change. 88 | 89 | 90 | End user impact 91 | --------------- 92 | 93 | The python-mistralclient should accept the following parameters: 94 | 95 | - TARGET_OS_USERNAME: user name on the target cloud 96 | - TARGET_OS_PASSWORD: password on the target cloud 97 | - TARGET_OS_TENANT: tenant on the target cloud 98 | - TARGET_OS_AUTH_URL: keystone URL of the target cloud 99 | - TARGET_OS_CA_CERT: CA cert of target cloud 100 | 101 | The client should authenticate both towards the OS_AUTH_URL and 102 | TARGET_OS_AUTH_URL keystones. 103 | 104 | Either all or none of these parameters should be set. 105 | 106 | 107 | Performance Impact 108 | ------------------ 109 | 110 | None. 111 | 112 | 113 | Deployer impact 114 | --------------- 115 | 116 | This change takes effect immediately after deployment. 117 | 118 | 119 | Implementation 120 | ============== 121 | 122 | Assignee(s) 123 | ----------- 124 | 125 | Primary assignee: 126 | 127 | Other contributors: 128 | 129 | 130 | Work Items 131 | ---------- 132 | 133 | * Implement the changes in the python-mistralclient 134 | 135 | * Add support for taking new parameters from environment and command line 136 | 137 | 138 | * Implement the changes in Mistral 139 | 140 | * Inject auth URI in execution context 141 | * Reason: Actions require the target Auth URI 142 | * Tasks: 143 | 144 | * Set auth_uri in context either from the TARGET_OS_AUTH_URL header or 145 | from CONF. 146 | 147 | * Eliminate admin user for Keystone 148 | 149 | * Reason: Admin credentials should not be required to connect to target 150 | cloud. 151 | * Tasks: 152 | 153 | * Use non-admin Keystone client 154 | * Use 'tokens' API to retrieve service endpoints 155 | 156 | * Use auth URL from context to create service clients 157 | 158 | * Reason: service clients need to connect to target cloud 159 | * Task: 160 | 161 | * Do as stated above 162 | 163 | * Add new headers to allowed_headers 164 | 165 | * Reason: this feature may be used in the future 166 | 167 | 168 | Dependencies 169 | ============ 170 | 171 | None. 172 | 173 | 174 | Testing 175 | ======= 176 | 177 | * Execute a Mistral workflow with targeting a different cloud than what is 178 | declared in the configuration. 179 | * Create two simultaneously ran executions of 180 | the same mistral workflow targeting different clouds/regions/tenants. Check 181 | if both succeed. 182 | 183 | 184 | References 185 | ========== 186 | 187 | None. 188 | -------------------------------------------------------------------------------- /specs/ocata/approved/mistral-custom-actions-api.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ================== 8 | Custom Actions API 9 | ================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-custom-actions-api 14 | 15 | This specification sets a formal basis for those Mistral users who want to 16 | create their own actions and make them available to use as part of Mistral 17 | workflows. The number one question that the spec addresses is "What is 18 | available in Mistral code base in order to implement custom actions?" 19 | 20 | 21 | Problem description 22 | =================== 23 | 24 | Custom actions are now possible to create and it's as simple as just 25 | implementing a class inherited from mistral.actions.base.Action that 26 | has 3 methods: 27 | 28 | * run() - executes main action logic, **mandatory** to implement 29 | 30 | * test() - execute action in test mode, related to future dry-run 31 | functionality, optional to implement 32 | 33 | * is_sync() - must return **True** if action returns its result right from 34 | method run() or **False** if method run() only starts action logic and 35 | result is supposed to be delivered later via public Mistral API 36 | 37 | There's also a mechanism based on stevedore library that allows to plug in 38 | new actions via adding new entry points in setup.cfg file. 39 | 40 | If a custom action doesn't require any integration neither with Mistral 41 | nor with OpenStack this is enough to know in order to implement it. 42 | 43 | However, if this action needs to leverage more advanced capabilities 44 | provided by Mistral and OpenStack then Action class itself doesn't 45 | give any knowledge about means that can be used to achieve that. 46 | A simple example of integration with OpenStack infrastructure is the need 47 | to call endpoints of OpenStack services. In this case, at minimum, action 48 | needs to be able to authenticate with Keystone, i.e., have access to 49 | Mistral security context. 50 | 51 | Use Cases 52 | --------- 53 | 54 | Simple OpenStack actions 55 | ^^^^^^^^^^^^^^^^^^^^^^^^ 56 | As a user of Mistral I want to create actions that call OpenStack services. 57 | In this case action needs to be able to access Mistral security context 58 | that contains auth token to be able to pass it to a corresponding service. 59 | Note: This use case is generally implemented within Mistral but it needs 60 | to be rethought since OpenStack actions that are implemented now in Mistral 61 | use Mistral Python code that is not assumed to be a public API and hence 62 | stable. 63 | 64 | Complex OpenStack actions 65 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 66 | As a user of Mistral I want to create actions that call multiple OpenStack 67 | services from within one action. 68 | 69 | For example, we may want to create action 70 | "create_cinder_volume_and_attach_to_vm" that creates a Cinder volume and 71 | attaches it to a virtual instance. In this case action needs to have access 72 | to Mistral security context that contains auth token so that it can pass 73 | that token to Cinder and Nova. 74 | 75 | Reusing existing actions 76 | ^^^^^^^^^^^^^^^^^^^^^^^^ 77 | 78 | As a user of Mistral I want to be able to reuse existing actions while 79 | implementing my new actions so that I don't have to reimplement similar 80 | functionality. 81 | 82 | For example, I want to create action that checks if a certain virtual 83 | instance exists in the tenant by calling Nova and if it does the action 84 | runs a number of secure shell commands to configure it. In this scenario, 85 | we need to call Nova and do ssh. Both already exist in Mistral as actions 86 | "nova.servers_get" and "std.ssh". So there should be a mechanism allowing 87 | to reuse those actions while creating a new more complex action. 88 | 89 | Proposed change 90 | =============== 91 | 92 | General idea 93 | ------------ 94 | 95 | We need to have one or more Python packages in Mistral that are designed 96 | and documented as a public Python API for developers that want to create 97 | custom actions. These packages should effectively provide a number of 98 | classes that can be used directly or inherited as needed. They should 99 | cover the following aspects of action development: 100 | 101 | * Base class or a number of classes that can be extended in order to build 102 | new Mistral actions. Currently existing **mistral.actions.base.Action** 103 | is an example of such class. 104 | 105 | * Module that provides access to security context associated with the 106 | current workflow that this action belongs to. Security context should 107 | at least include user, project/tenant, auth token. 108 | 109 | * Module that provides access to current Mistral execution context. That 110 | context should include: 111 | 112 | * Current workflow execution id 113 | 114 | * Current task execution id 115 | 116 | * Current action execution id 117 | 118 | * Package with most frequently used utils and data types used during 119 | custom actions development. For example, class 120 | mistral.workflow.utils.Result that now exists in the code base is 121 | needed by actions but it's not clear that it's part of Python API. 122 | 123 | * Module that allows to get and reuse existing actions 124 | 125 | Since these Python entities must be available for both engine and 126 | executor they should be moved to a separate subproject of Mistral, for 127 | example, **mistral-actions-api**. 128 | 129 | Existing OpenStack actions should be moved out of mistral project into 130 | a different Mistral subproject. The proposal is to use **mistral-extra** 131 | repo for this purpose because although we use it only for collecting 132 | Mistral examples its initial idea was also to have additional tools 133 | and extensions in it. 134 | 135 | Specific entities 136 | ----------------- 137 | 138 | mistral.actions.api 139 | ^^^^^^^^^^^^^^^^^^^ 140 | Main Python package that contains all modules and classes which are part 141 | of Custom Actions API. 142 | 143 | mistral.actions.api.base 144 | ^^^^^^^^^^^^^^^^^^^^^^^^ 145 | Python module that contains base classes for custom actions. Currently 146 | module **mistral.actions.base** performs similar function. 147 | 148 | Note: Specific content of this module is out of scope of this spec and 149 | must be defined at implementation stage. 150 | 151 | mistral.actions.api.security 152 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 153 | Python module that contains required functions to get all required 154 | information related to current OpenStack security context. At minimum: 155 | user, project, auth token. 156 | 157 | Note: Specific content of this module is out of scope of this spec and 158 | must be defined at implementation stage. 159 | 160 | mistral.actions.api.types 161 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 162 | Python module that contains all data types that custom actions need to 163 | use. One candidate to go to that module that now exists is 164 | **mistral.workflow.utils.Result**. 165 | 166 | Note: Specific content of this module is out of scope of this spec and 167 | defined at implementation stage. 168 | 169 | mistral.actions.api.utils 170 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 171 | Python module that contains additional functions helpful for creating 172 | new Mistral actions. At minimum: functions to get instances of existing 173 | actions so that action developers could re-use functionality of existing 174 | actions. Return type for these actions though must be rather a wrapper 175 | that doesn't just call **Action.run()** method but instead uses Mistral 176 | action execution machinery to actually call action just like as if it 177 | was called as part of workflow (taking care of data transformations, 178 | fulfilling security and execution context etc.) 179 | 180 | Note: Specific content of this module is out of scope of this spec and 181 | must be defined at implementation stage. 182 | 183 | Alternatives 184 | ------------ 185 | 186 | None. 187 | 188 | Data model impact 189 | ----------------- 190 | 191 | None. 192 | 193 | REST API impact 194 | --------------- 195 | 196 | None. 197 | 198 | End user impact 199 | --------------- 200 | 201 | REST API users 202 | ^^^^^^^^^^^^^^ 203 | No impact. 204 | 205 | Custom actions developers 206 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 207 | Having to use Custom Actions API described in this spec whereas now they 208 | can only use **mistral.actions.base** safely. 209 | 210 | Performance Impact 211 | ------------------ 212 | 213 | No significant impact is expected. Minor is possible. 214 | 215 | Deployer impact 216 | --------------- 217 | 218 | Deployers will need to make sure to install a new library containing 219 | Custom Action API packages, modules and classes. However, this impact 220 | is not supposed to be severe because all dependencies must be handled 221 | smoothly by Pip. 222 | 223 | In case if there's an existing Mistral installation with installed 224 | actions, some DB migration might be required. Changes in DB schema are 225 | not expected though. If so, Mistral project should provide convenient 226 | tools to help make this transition to using new actions. 227 | 228 | Implementation 229 | ============== 230 | 231 | Assignee(s) 232 | ----------- 233 | 234 | To be found based on discussions around the spec. 235 | 236 | Work Items 237 | ---------- 238 | 239 | * Create a new repo containing the code of Custom Actions API (e.g. 240 | **mistral-lib** or **mistral-common**, particular name is to be defined) 241 | * Design and implement modules listed in Specific Entities section 242 | * Provide deprecation mechanism so that during some period of time it 243 | would be possible to use the old approach for implementing Mistral 244 | actions (with **mistral.actions.base**) and the new one 245 | * Fix existing action implementations so that they use new API 246 | * Fix Mistral Executor accordingly 247 | * Fix Mistral Engine accordingly 248 | * Revisit and restructure repo **mistral-extra** 249 | * Move existing OpenStack actions into **mistral-extra** 250 | 251 | 252 | Dependencies 253 | ============ 254 | 255 | No additional dependencies are required. 256 | 257 | Testing 258 | ======= 259 | 260 | Custom Actions API can be tested on devstack based OpenStack CI gates 261 | such as gate-mistral-devstack-dsvm by creating and running custom 262 | actions that use this API. 263 | 264 | References 265 | ========== 266 | 267 | Initial patch for TripleO/Mistral integration: 268 | https://review.openstack.org/#/c/282366/ 269 | -------------------------------------------------------------------------------- /specs/ocata/approved/workflow-global-context.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ======================= 8 | Workflow Global Context 9 | ======================= 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-global-wf-context 14 | 15 | Workflow global context will allow to store variables not associated with 16 | particular workflow branches. 17 | 18 | Problem description 19 | =================== 20 | 21 | Currently 'publish' keyword in Mistral saves variables into a storage 22 | (context) which is associated only with a branch. 23 | 24 | Example: 25 | 26 | :: 27 | 28 | --- 29 | version: '2.0' 30 | 31 | wf: 32 | tasks: 33 | A: 34 | action: std.noop 35 | publish: 36 | my_var: 1 37 | on-success: A1 38 | 39 | A1: 40 | action: my_action param1=<% $.my_var %> 41 | 42 | B: 43 | action: std.noop 44 | publish: 45 | my_var: 2 46 | on-success: B1 47 | 48 | B1: 49 | action: my_action param1=<% $.my_var %> 50 | 51 | 52 | The expression "$.my_var" in the declaration of A1 will always evaluate to 1, 53 | for B1 it will always evaluate to 2. This doesn't depend on the order in which 54 | A and B will run. This is because we have two branches (A -> A1 and B -> B1) 55 | for which the variable "my_var" has its own different version. 56 | 57 | Sometimes though we need to be able to share data across branches which is now 58 | impossible due to aforementioned semantics. 59 | The concept of workflow global context can help solve this problem. The word 60 | "global" here means "accessible from any workflow branch". 61 | 62 | We also need an ability to make atomic updates of global workflow context. 63 | It's necessary when we, for example, want to create a global counter (e.g. 64 | counter of network calls to external systems performed by a workflow). 65 | 66 | Use Cases 67 | --------- 68 | 69 | * Building conditions based on events happened in parallel workflow branches. 70 | Example: one branch needs to notify the other one that it should stop. 71 | * Passing data between branches. Example: one branch needs to wait till the 72 | other one produces some expected result. This is, essentially, creating 73 | a cross-branch mutex. 74 | * Counters that need to decrement or increment atomically. 75 | 76 | Proposed change 77 | =============== 78 | 79 | In order to achieve this goal the proposal is: 80 | 81 | * Add the new keyword "publish-global" which is similar to "publish" 82 | with the difference that it publishes variables into workflow global 83 | context instead of branch workflow context. It's important to note 84 | that this is an unprotected way of modifying data because race 85 | conditions are possible when writing different values for same 86 | variables in the global context from parallel branches. In other 87 | words, if we have branches A and B and there are tasks in these 88 | branches writing different values to the variable X in the global 89 | context Mistral won't provide any guarantees as far as what value 90 | is going to be assigned to X and what value will be lost. Users need 91 | to understand possible consequences. 92 | For instance, using this keyword it's impossible to create an atomic 93 | counter since it doesn't assume acquiring a lock under which we can 94 | safely perform multiple operations (e.g. read and then write). 95 | However, for many scenarios even this model can be useful. For example, 96 | if there's only one branch writing values and others are only readers. 97 | * Add the new YAQL/Jinja function "global()" to explicitly access 98 | variables in workflow global context. 99 | * Make global variables also accessible using "$." in YAQL and "_." in 100 | Jinja in a way that branch variables can shadow them if they are 101 | published in the current branch. 102 | * Add the new keyword "publish-global-atomic" which is similar to 103 | "publish-global" but allows to atomically read and write variables 104 | in workflow global context by acquiring a temporary lock on it. 105 | Unlike 'publish-global' this will allow to create atomic counters 106 | when we need to perform multiple operations against the storage 107 | atomically. 108 | 109 | Example #1 (writing and reading global variables): 110 | 111 | :: 112 | 113 | --- 114 | version: '2.0' 115 | 116 | wf: 117 | tasks: 118 | A: 119 | action: std.noop 120 | publish: 121 | my_var: "branch value" 122 | publish-global: 123 | my_var: "global value" 124 | on-success: A1 125 | 126 | A1: 127 | # $.my_var will always evaluate to "branch value" because A1 belongs 128 | # to the same branch as A and runs after A. When using "$" to access 129 | # context variables branch values have higher priority. 130 | # In order to access global context reliably we need to use YAQL/Jinja 131 | # function 'global'. So global(my_var) will always evaluate to 132 | # 'global value'. 133 | action: my_action1 param1=<% $.my_var %> param2=<% global(my_var) %> 134 | 135 | B: 136 | # $.my_var will evaluate to "global value" if task A completes 137 | # before task B and "null", if not. It's because A and B are 138 | # parallel and 'publish' in A doesn't apply to B, only 139 | # 'publish-global' does. In this example global(my_var) has the same 140 | # meaning as $.my_var because there's no ambiguity from what context 141 | # we should take variable 'my_var'. 142 | action: my_action2 param1=<% $.my_var %> param2=<% global(my_var) %> 143 | 144 | 145 | Example #2 (writing global variables atomically): 146 | 147 | :: 148 | 149 | --- 150 | version: '2.0' 151 | 152 | vars: 153 | - my_global_var: 0 154 | 155 | wf: 156 | tasks: 157 | task1: 158 | action: std.noop 159 | publish-global-atomic: 160 | counter: <% global(my_global_var) + 1 %> 161 | 162 | task2: 163 | action: std.noop 164 | publish-global-atomic: 165 | counter: <% global(my_global_var) + 1 %> 166 | 167 | 168 | Alternatives 169 | ------------ 170 | 171 | None. 172 | 173 | Data model impact 174 | ----------------- 175 | 176 | Workflow execution object already has the field "context" which is now 177 | immutable and initialized with openstack specific data, execution id and 178 | environment variables. In order to get the full context for evaluating a 179 | YAQL/Jinja expression in a task declaration we always build a context view 180 | merged from workflow input, workflow execution "context" field and branch 181 | specific context (e.g. task inbound context when evaluating action 182 | parameters). The field "context" can play the role of workflow global 183 | context. However, the idea to reuse this field can be revisited during 184 | the implementation phase. 185 | 186 | REST API impact 187 | --------------- 188 | 189 | None. 190 | 191 | End user impact 192 | --------------- 193 | 194 | New workflow language feature that allows to store global variables into 195 | workflow context. 196 | 197 | Performance Impact 198 | ------------------ 199 | 200 | When using "publish-global-atomic" we'll need to use locking in order 201 | to prevent concurrent modifications of global workflow context while 202 | reading and modifying it when processing a certain task. In fact, this is 203 | equal to locking the whole execution object and hence will have a serious 204 | performance impact in case of many parallel tasks. For this reason, 205 | "publish-global-atomic" needs to be well documented and used with 206 | precaution. 207 | 208 | Deployer impact 209 | --------------- 210 | 211 | None. 212 | 213 | 214 | Implementation 215 | ============== 216 | 217 | Assignee(s) 218 | ----------- 219 | 220 | Primary assignee: 221 | rakhmerov 222 | 223 | Other contributors: 224 | melisha 225 | 226 | Work Items 227 | ---------- 228 | 229 | * Add 'publish-global' and 'publish-global-atomic' into the direct workflow 230 | specification. 231 | * Make changes in Mistral engine to publish variables into global context 232 | (preliminarily it will be the field 'context' of workflow execution object). 233 | * Implement YAQL/Jinja function 'global' to explicitly read variables from 234 | workflow global context. 235 | * Add locking workflow global context (i.e. workflow execution) in case of 236 | using 'publish-global-atomic'. A thread that acquires a lock must first 237 | refresh state of workflow execution and then proceed with publishing etc. 238 | 239 | Dependencies 240 | ============ 241 | 242 | None. 243 | 244 | 245 | Testing 246 | ======= 247 | 248 | * Unit tests for 'publish-global' keyword and 'global' function in different 249 | cases: parallel branches, sequential branches. 250 | * Unit tests to check that branch-local variables take precedence when 251 | reading variables using '$.' in YAQL and '_.' in Jinja. 252 | * Unit tests for 'publish-global-atomic' that checks atomicity of reads and 253 | writes of global variables. Although unit tests can't fully test this 254 | feature. In order to fully test it we need to have a test with multiple 255 | Mistral engines to make sure we have concurrent access to workflow execution. 256 | 257 | References 258 | ========== 259 | 260 | None. 261 | -------------------------------------------------------------------------------- /specs/ocata/implemented/publish-on-failure.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | =============================================== 8 | Publish/output in case of task/workflow failure 9 | =============================================== 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-publish-on-error 12 | 13 | Currently it is not possible to provide any reasonable output in case of a 14 | task or workflow failure. Implementing this would greatly simplify error 15 | handling in workflows. 16 | 17 | 18 | Problem description 19 | =================== 20 | 21 | This blueprint is a proposal to introduce two new attributes, 22 | publish-on-error for tasks and output-on-error for workflows for this purpose. 23 | 24 | 25 | Use Cases 26 | --------- 27 | 28 | * As a user, I would like to define a workflow with a generic error 29 | handler task. 30 | * As a user, I would like to simplify error handling in my complex 31 | workflow system. 32 | 33 | Proposed change 34 | =============== 35 | 36 | To solve the problem I would like to introduce the following new task and 37 | workflow attributes: 38 | 39 | * Task - publish-on-error: Any data structure arbitrarily containing YAQL 40 | expressions that defines output of a task to be published into workflow 41 | context if it goes into error state. 42 | 43 | * Workflow - output-on-error: Any data structure arbitrarily containing YAQL 44 | expressions that defines output of a workflow to be returned if it goes into 45 | error state. 46 | 47 | Example workbook: 48 | 49 | .. code-block:: yaml 50 | 51 | workflows: 52 | main: 53 | tasks: 54 | task_1: 55 | workflow: sub-workflow 56 | publish-on-error: 57 | failure_cause: <% task(task_1).result.failure_cause %> 58 | detailed_cause: <% task(task_1).result.detailed_cause %> 59 | on-error: 60 | - error-handler 61 | task_2: 62 | ... 63 | publish-on-error: 64 | failure_cause: <% task(task_2).result.failure_cause %> 65 | detailed_cause: <% task(task_2).result.detailed_cause %> 66 | on-error: 67 | - error-handler 68 | 69 | error-handler: 70 | action: send_email 71 | input: 72 | body: | 73 | <% $.failure_cause %> 74 | Details: 75 | <% $.detailed_cause %> 76 | 77 | sub-workflow: 78 | output: 79 | result: <% $.result %> 80 | output-on-error: 81 | failure_cause: <% $.failure_cause %> 82 | detailed_cause: <% $.detailed_cause %> 83 | 84 | tasks: 85 | task1: 86 | ... 87 | publish-on-error: 88 | failure_cause: "Failure in sub-workflow.task1!" 89 | detailed_cause: <% task(task1).result %> 90 | on-success: 91 | - task2 92 | task2: 93 | ... 94 | publish-on-error: 95 | failure_cause: "Failure in sub-workflow.task2!" 96 | detailed_cause: <% task(task2).result %> 97 | 98 | 99 | Alternatives 100 | ------------ 101 | 102 | N/A 103 | 104 | Data model impact 105 | ----------------- 106 | Two new fields introduced: 107 | * Task spec - publish-on-error 108 | * Workflow spec - output-on-error 109 | 110 | REST API impact 111 | --------------- 112 | None 113 | 114 | End user impact 115 | --------------- 116 | Workflow language additions that allow to handle errors in a more flexible way. 117 | Existing workflows will work without any change. 118 | 119 | Performance Impact 120 | ------------------ 121 | None 122 | 123 | Deployer impact 124 | --------------- 125 | None 126 | 127 | 128 | Implementation 129 | ============== 130 | 131 | Assignee(s) 132 | ----------- 133 | 134 | Primary assignee: 135 | István Imre 136 | 137 | Other contributors: 138 | Endre János Kovács 139 | 140 | Work Items 141 | ---------- 142 | * add publish-on-error to task spec 143 | * add output-on-error to workflow spec 144 | * documentation 145 | 146 | 147 | Dependencies 148 | ============ 149 | None 150 | 151 | Testing 152 | ======= 153 | * new engine test for the two new attributes 154 | 155 | 156 | References 157 | ========== 158 | None 159 | -------------------------------------------------------------------------------- /specs/ocata/implemented/yaql-tasks-function.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | =================== 8 | Yaql Tasks Function 9 | =================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/yaql-tasks-function 14 | 15 | This new function will allow user to get a list of tasks matching certain 16 | filters. For example: only task in state ERROR from the current execution. 17 | 18 | Work on this draft started before **Jinja2** support was added. That said, a 19 | user will be able to use this function both as part of **YAQL** expression and 20 | as part of **Jinja2** expression. 21 | 22 | 23 | Problem description 24 | =================== 25 | 26 | There's no easy way in the error handler to know which task failed and failure 27 | information. 28 | 29 | Use Cases 30 | --------- 31 | 32 | * When an error happens and default on-error is triggered, decide how to handle 33 | it according to the task that failed, even if it is in a nested workflow. 34 | 35 | * Be able to get all tasks that failed in a workflow easily. Including nested 36 | tasks. 37 | 38 | Proposed change 39 | =============== 40 | 41 | Add a new **YAQL** function called **tasks**, which will be similar to the 42 | **task** function we have today. The main difference is that **tasks** function 43 | will return a list and will have more querying options. 44 | 45 | Positional parameters for the new function: 46 | 47 | #. ``wf-execution-id`` (optional) - will allow to get tasks information from a 48 | specific workflow execution (either the current execution or a different 49 | one), if not passed, it will list all tasks. 50 | #. ``recursive`` (optional. Default: false) - if true treat all tasks in 51 | nested workflows as if they where also a part of all higher level 52 | wf-executions. Relevant mostly when filtering using ``wf-execution-id``. 53 | #. ``state`` (optional) - get only tasks with the given state, for example 54 | all ERROR tasks. If not passed, it will list all tasks. 55 | #. ``flat`` (optional. Default: false) - if true, only list the tasks that 56 | match at least one of the next conditions: 57 | 58 | * tasks of type action 59 | * tasks of type workflow, that also have a different state than the one 60 | of the nested workflow execution that was executed because of the task. 61 | 62 | 63 | workflow with **YAQL** function example: 64 | 65 | :: 66 | 67 | --- 68 | version: '2.0' 69 | wf: 70 | type: direct 71 | input: 72 | - tasks_in_error: [] 73 | tasks: 74 | my_example: 75 | action: std.noop 76 | publish: 77 | # publish all tasks in state ERROR of the current execution only 78 | tasks_in_error: <% tasks(execution().id, ERROR) %> 79 | 80 | When running 'mistral task-get-published $TASK_ID' where TASK_ID is the ID 81 | of my_example task execution, mistral will return: 82 | 83 | :: 84 | 85 | { 86 | "tasks_in_error": [] 87 | } 88 | 89 | 90 | The items in the list (when the list isn't empty) are from the same type and 91 | structure as returned today from the task function. 92 | 93 | Alternatives 94 | ------------ 95 | 96 | The parameters order and function name can be different. 97 | We might want to pass a list of states and not just one. 98 | 99 | Data model impact 100 | ----------------- 101 | 102 | This change doesn't have to influence the data model. 103 | 104 | However, right now the behavior is that a task state is updated before 105 | publishing values are evaluated, and that is what is visible from the context 106 | of the function. We might want to change it in the future. 107 | 108 | REST API impact 109 | --------------- 110 | 111 | None. 112 | 113 | End user impact 114 | --------------- 115 | 116 | The user will have a new function that can be used as part of **YAQL** or 117 | **Jinja2** expressions. 118 | 119 | Performance Impact 120 | ------------------ 121 | 122 | There is a possibility each call for this new function will trigger multiple 123 | DB queries. The more nested the workflow is, the more queries. This is not 124 | very efficient, but we can improve this later on if necessary. 125 | 126 | Another thing that might happen is, if the user will not filter the tasks, 127 | the amount of data might cause a timeout. 128 | 129 | Example for clarity - a workflow and the published result 130 | :: 131 | 132 | --- 133 | version: '2.0' 134 | 135 | wf: 136 | type: direct 137 | input: 138 | - tasks_in_error: [] 139 | tasks: 140 | my_example: 141 | action: std.noop 142 | publish: 143 | # publish all the tasks of the current execution only 144 | all_tasks_in_execution: <% tasks(execution().id) %> 145 | 146 | The result of publishing: 147 | 148 | :: 149 | 150 | { 151 | "all_tasks_in_execution": [ 152 | { 153 | "state_info": null, 154 | "name": "my_example", 155 | "spec": { 156 | "action": "std.noop", 157 | "version": "2.0", 158 | "type": "direct", 159 | "name": "my_example", 160 | "publish": { 161 | "all_tasks_in_execution": "<% tasks(execution().id) %>" 162 | } 163 | }, 164 | "state": "SUCCESS", 165 | "result": null, 166 | "published": {}, 167 | "id": "a8b4787c-5b10-488a-8539-8370488fed8c" 168 | } 169 | ] 170 | } 171 | 172 | To summarize the issue: 173 | Right now the state of the task when publishing is SUCCESS and not RUNNING, 174 | even though a user might expect it to be RUNNING. We don't have to do 175 | anything about it, but we should document this really well and say this might 176 | change in the future. 177 | 178 | Deployer impact 179 | --------------- 180 | 181 | None. 182 | 183 | 184 | Implementation 185 | ============== 186 | 187 | Assignee(s) 188 | ----------- 189 | 190 | Primary assignee: 191 | michal-gershenzon 192 | 193 | Other contributors: 194 | melisha 195 | 196 | Work Items 197 | ---------- 198 | 199 | * implement the new function and filters based on argument position in the 200 | function. 201 | * write tests. 202 | 203 | Dependencies 204 | ============ 205 | 206 | None. 207 | 208 | 209 | Testing 210 | ======= 211 | 212 | Examples for the next scenario: a mistral setup with 3 workflow executions, 213 | each execution started from a different workflow (just to make it more easy): 214 | 215 | :: 216 | 217 | execution of workflow1 (workflow execution id = 1) 218 | |-top_level_wf1_task_1 SUCCESS (workflow execution id = 1) 219 | |---second_level_wf1_task_1 SUCCESS (workflow execution id = 2) 220 | |-----third_level_wf1_task_1 SUCCESS (workflow execution id = 3) 221 | |-----third_level_wf1_task_2 SUCCESS (workflow execution id = 3) 222 | |-----third_level_wf1_task_3 ERROR (workflow execution id = 3) 223 | |---second_level_wf1_task_2 SUCCESS (workflow execution id = 2) 224 | |---second_level_wf1_task_3 SUCCESS (workflow execution id = 2) 225 | |-top_level_wf1_task_2 SUCCESS (workflow execution id = 1) 226 | 227 | execution of workflow2 (workflow execution id = 1001) 228 | |-top_level_wf2_task_1 SUCCESS (workflow execution id = 1001) 229 | |-top_level_wf2_task_2 SUCCESS (workflow execution id = 1001) 230 | 231 | execution of workflow3 (workflow execution id = 300001) 232 | |-top_level_wf3_task_1 ERROR (workflow execution id = 300001) 233 | |---second_level_wf3_task_1 ERROR (workflow execution id = 300002) 234 | |-----third_level_wf3_task_1 SUCCESS (workflow execution id = 300003) 235 | |-----third_level_wf3_task_2 SUCCESS (workflow execution id = 300003) 236 | |-----third_level_wf3_task_3 ERROR (workflow execution id = 300003) 237 | |---second_level_wf3_task_2 SUCCESS (workflow execution id = 300002) 238 | |---second_level_wf3_task_3 SUCCESS (workflow execution id = 300002) 239 | |-top_level_wf3_task_2 ERROR (workflow execution id = 300001) 240 | 241 | 242 | Here is a table representation with additional info: 243 | 244 | :: 245 | 246 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 247 | | top level execution id | execution id | task name | task state | task type | inner execution state if exist | inner execution id if exist | 248 | +========================+==============+==============================+================+===============+=================================+==============================+ 249 | | 1 | 1 | top_level_wf1_task_1 | SUCCESS | WORKFLOW | SUCCESS | 2 | 250 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 251 | | 1 | 1 | top_level_wf1_task_2 | SUCCESS | ACTION | - | - | 252 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 253 | | 1 | 2 | second_level_wf1_task_1 | SUCCESS | WORKFLOW | ERROR | 3 | 254 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 255 | | 1 | 2 | second_level_wf1_task_2 | SUCCESS | ACTION | - | - | 256 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 257 | | 1 | 2 | second_level_wf1_task_3 | SUCCESS | ACTION | - | - | 258 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 259 | | 1 | 3 | third_level_wf1_task_1 | SUCCESS | ACTION | - | - | 260 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 261 | | 1 | 3 | third_level_wf1_task_2 | ERROR | ACTION | - | - | 262 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 263 | | 1 | 3 | third_level_wf1_task_3 | SUCCESS | ACTION | - | - | 264 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 265 | | 1001 | 1001 | top_level_wf2_task_1 | SUCCESS | ACTION | - | - | 266 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 267 | | 1001 | 1001 | top_level_wf2_task_2 | SUCCESS | ACTION | - | - | 268 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 269 | | 300001 | 300001 | top_level_wf3_task_1 | ERROR | WORKFLOW | ERROR | 300002 | 270 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 271 | | 300001 | 300001 | top_level_wf3_task_2 | ERROR | ACTION | - | - | 272 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 273 | | 300001 | 300002 | second_level_wf3_task_1 | ERROR | WORKFLOW | ERROR | 300003 | 274 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 275 | | 300001 | 300002 | second_level_wf3_task_2 | SUCCESS | ACTION | - | - | 276 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 277 | | 300001 | 300002 | second_level_wf3_task_3 | SUCCESS | ACTION | - | - | 278 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 279 | | 300001 | 300003 | third_level_wf3_task_1 | SUCCESS | ACTION | - | - | 280 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 281 | | 300001 | 300003 | third_level_wf3_task_2 | SUCCESS | ACTION | - | - | 282 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 283 | | 300001 | 300003 | third_level_wf3_task_3 | ERROR | ACTION | - | - | 284 | +------------------------+--------------+------------------------------+----------------+---------------+---------------------------------+------------------------------+ 285 | 286 | reminder: the order of the function arguments is: 287 | 288 | 1. wf-execution-id 289 | 2. recursive 290 | 3. state 291 | 4. flat 292 | 293 | calling 'tasks()' will: 294 | return all 18 tasks. 295 | 296 | calling 'tasks(1)' or 'tasks(1, false)' will: 297 | return 2 tasks of workflow1 execution (only tasks with execution id of 1): 298 | * top_level_task_1 299 | * top_level_task_2 300 | 301 | calling 'tasks(1, true)' will: 302 | return all 8 tasks of the workflow1 workflow execution. 303 | 304 | calling 'tasks(1001)' or 'tasks(1001, true)' or 'tasks(1001, false)' will: 305 | return the 2 tasks of workflow2 execution. 306 | 307 | calling 'tasks(1, true, ERROR)' will: 308 | return 1 task of workflow1 execution: 309 | * third_level_wf1_task_3 310 | 311 | calling 'tasks(1, false, ERROR)' will: 312 | return an empty list. 313 | 314 | calling 'tasks(300001, true, ERROR)' or 'tasks(300001, true, ERROR, false)' 315 | will: 316 | return 4 task of workflow3 execution: 317 | * top_level_wf3_task_1 318 | * second_level_wf3_task_1 319 | * third_level_wf3_task_3 320 | * top_level_wf3_task_2 321 | 322 | calling 'tasks(300001, true, ERROR, true)' will: 323 | return 2 task of workflow3 execution: 324 | * third_level_wf3_task_3 325 | * top_level_wf3_task_2 326 | 327 | calling 'tasks(1, true, SUCCESS, true)' will: 328 | return 6 tasks of workflow1 execution: 329 | * top_level_wf1_task_2 330 | * second_level_wf1_task_1 331 | * second_level_wf1_task_2 332 | * second_level_wf1_task_3 333 | * third_level_wf1_task_1 334 | * third_level_wf1_task_3 335 | 336 | 337 | References 338 | ========== 339 | 340 | None. 341 | -------------------------------------------------------------------------------- /specs/pike/approved/advanced_publishing.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ============================ 8 | Workflow Advanced publishing 9 | ============================ 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-advanced-publishing 14 | 15 | Advanced publishing will eliminate confusions around current workflow 16 | publishing mechanism, unify publishing for all clauses 'on-success', 17 | 'on-error' and 'on-complete' and define publishing of variable scopes into 18 | different scopes: branch, global, global atomic. 19 | The proposed workflow language syntax will also be extensible if such a need 20 | occurs in the future. 21 | 22 | 23 | Problem description 24 | =================== 25 | 26 | Currently 'publish' keyword in Mistral saves variables into a storage 27 | (context) which is associated only with a branch. 28 | 29 | Example: 30 | 31 | :: 32 | 33 | --- 34 | version: '2.0' 35 | 36 | wf: 37 | tasks: 38 | A: 39 | action: std.noop 40 | publish: 41 | my_var: 1 42 | on-success: A1 43 | 44 | A1: 45 | action: my_action param1=<% $.my_var %> 46 | 47 | B: 48 | action: std.noop 49 | publish: 50 | my_var: 2 51 | on-success: B1 52 | 53 | B1: 54 | action: my_action param1=<% $.my_var %> 55 | 56 | 57 | The expression "$.my_var" in the declaration of A1 will always evaluate to 1, 58 | for B1 it will always evaluate to 2. This doesn't depend on the order in which 59 | A and B will run. This is because we have two branches (A -> A1 and B -> B1) 60 | for which the variable "my_var" has its own different version. 61 | 62 | Sometimes though we need to be able to share data across branches which is now 63 | impossible due to aforementioned semantics. 64 | The concept of workflow global context can help solve this problem. The word 65 | "global" here means "accessible from any workflow branch". 66 | 67 | We also need an ability to make atomic updates of global workflow context. 68 | It's necessary when we, for example, want to create a global counter (e.g. 69 | counter of network calls to external systems performed by a workflow). 70 | 71 | Currently, we also have the keyword 'publish-on-error' that works the same way 72 | as 'publish' but in case of an error of task action (or workflow). 73 | 74 | If we take the path of adding new keywords for all these cases we'd come up 75 | with the following keywords: 76 | 77 | * 'publish' - for branch publishing, currently exists 78 | * 'publish-on-error' - for branch publishing in case of error, currently 79 | exists 80 | * 'publish-global' - for global publishing, doesn't exist 81 | * 'publish-global-atomic' - for global atomic publishing, doesn't exist 82 | * 'publish-global-on-error' - for global publishing in case of error, doesn't 83 | exist 84 | * 'publish-global-atomic-on-error' - for global atomic publishing in case of 85 | error, doesn't exist 86 | 87 | So, we see that a number of keywords is growing and so does the length of the 88 | keywords. This all leads to a non-concise messy syntax. 89 | This approach is also not extensible because every time we'd like to add a new 90 | scope we'll have to add two new keywords (one for 'on-success' and one for 91 | 'on-error'). 92 | 93 | Use Cases 94 | --------- 95 | 96 | * Building conditions based on events happened in parallel workflow branches. 97 | Example: one branch needs to notify the other one that it should stop. 98 | * Passing data between branches. Example: one branch needs to wait till the 99 | other one produces some expected result. This is, essentially, creating 100 | a cross-branch mutex. 101 | * Counters that need to decrement or increment atomically. 102 | 103 | Proposed change 104 | =============== 105 | 106 | In order to achieve this goal the proposal is: 107 | 108 | * Extend semantics of existing 'on-success', 'on-error' and 'on-complete' 109 | keywords so that along with next tasks they can also have a description of 110 | publishing into different scopes. Now 'on-success', 'on-error' and 111 | 'on-complete' can be either a string, a list of strings or a list of 112 | dictionaries. In order to include publishing they should be dictionaries 113 | with two keys 'publish' and 'next'. See the detailed example below. 114 | * 'publish' defined under 'on-success', 'on-error' and 'on-complete' can 115 | optionally define scopes (but at least one) to be able to publish into 116 | different scopes: 'branch', 'global' and 'atomic' for global atomic 117 | publishing. An alternative for 'atomic' could be 'global-atomic' but just 118 | 'atomic' seems to be sufficient because atomic publishing for a branch is 119 | pointless. Specifying variables under 'branch' will make Mistral publish 120 | into a branch workflow context just like 'publish' and 'publish-on-error' 121 | currently do. Specifying variables under 'global' will make Mistral publish 122 | into a global workflow context. It's important to note that this is an 123 | unprotected way of modifying data because race conditions are possible when 124 | writing different values for same variables in the global context from 125 | parallel branches. In other words, if we have branches A and B and there are 126 | tasks in these branches that first read global variable X, then increment it 127 | and write the new value Mistral won't provide any guarantee that the result 128 | value after finishing tasks A and B will be X + 2. In some cases it can be 129 | X + 1 because the following may happen: task A read X, Task B read X, Task B 130 | incremented X, Task B wrote X + 1, Task A incremented X (the old one, not 131 | incremented by B), Task A wrote X + 1. 132 | 'atomic' scope can address the aforementioned problem by providing a 133 | guarantee that after task A read a global variable X no other task can read 134 | variable X until a transaction processing task A is committed. 135 | * If 'publish' is defined in 'on-complete' and also in 'on-success' and/or 136 | 'on-error' then the result of publishing will be a merge of what 137 | 'on-complete' publishes with what 'on-success' or 'on-error' publishes 138 | depending on the task status. If 'on-complete' publishes variables that are 139 | also published by 'on-success' or 'on-error' then latter take precedence. 140 | In other words, 'on-complete' in this case is considered a default which 141 | can be overridden by more specific 'on-XXX' clause. 142 | * The keyword 'next' defined under 'on-success'/'on-error'/'on-complete' 143 | becomes optional since the only purpose of 'on-xxx' clause may be publishing. 144 | * Currently existing 'publish' and 'publish-on-error' will still be available 145 | with the same semantics and may be deprecated in the future versions of the 146 | language. 147 | * Add the new YAQL/Jinja function "global()" to explicitly access 148 | variables in workflow global context. 149 | * Make global variables also accessible using "$." in YAQL and "_." in 150 | Jinja in a way that branch variables can shadow them if they are 151 | published in the current branch. 152 | 153 | Another positive effect of this change would be defining of what should happen 154 | in case of success or error at the same place: both publishing and scheduling 155 | next tasks. 156 | 157 | Example #1 (writing and reading global variables): 158 | 159 | :: 160 | 161 | --- 162 | version: '2.0' 163 | 164 | wf: 165 | tasks: 166 | A: 167 | action: std.noop 168 | on-success: 169 | publish: 170 | branch: 171 | my_var: "branch value" 172 | global: 173 | my_var: "global value" 174 | next: A1 175 | 176 | A1: 177 | # $.my_var will always evaluate to "branch value" because A1 belongs 178 | # to the same branch as A and runs after A. When using "$" to access 179 | # context variables branch values have higher priority. 180 | # In order to access global context reliably we need to use YAQL/Jinja 181 | # function 'global'. So global(my_var) will always evaluate to 182 | # 'global value'. 183 | action: my_action1 param1=<% $.my_var %> param2=<% global(my_var) %> 184 | 185 | B: 186 | # $.my_var will evaluate to "global value" if task A completes 187 | # before task B and "null", if not. It's because A and B are 188 | # parallel and 'publish' in A doesn't apply to B, only 189 | # 'publish-global' does. In this example global(my_var) has the same 190 | # meaning as $.my_var because there's no ambiguity from what context 191 | # we should take variable 'my_var'. 192 | action: my_action2 param1=<% $.my_var %> param2=<% global(my_var) %> 193 | 194 | 195 | Example #2 (writing global variables atomically): 196 | 197 | :: 198 | 199 | --- 200 | version: '2.0' 201 | 202 | wf: 203 | vars: 204 | - counter: 0 205 | 206 | output: 207 | counter: <% $.counter %> 208 | 209 | tasks: 210 | task1: 211 | action: std.noop 212 | on-success: 213 | publish: 214 | atomic: 215 | counter: <% global(counter) + 1 %> 216 | 217 | task2: 218 | action: std.noop 219 | on-success: 220 | publish: 221 | atomic: 222 | counter: <% global(counter) + 1 %> 223 | 224 | 225 | After running this workflow its output must always be 2. 226 | 227 | Alternatives 228 | ------------ 229 | 230 | None. 231 | 232 | Data model impact 233 | ----------------- 234 | 235 | Workflow execution object already has the field "context" which is now 236 | immutable and initialized with openstack specific data, execution id and 237 | environment variables. In order to get the full context for evaluating a 238 | YAQL/Jinja expression in a task declaration we always build a context view 239 | merged from workflow input, workflow execution "context" field and branch 240 | specific context (e.g. task inbound context when evaluating action 241 | parameters). The field "context" can play the role of workflow global 242 | context. However, the idea to reuse this field can be revisited during 243 | the implementation phase. 244 | 245 | REST API impact 246 | --------------- 247 | 248 | None. 249 | 250 | End user impact 251 | --------------- 252 | 253 | New workflow language feature that explicitly allows to define scopes(contexts) 254 | of published variables. 255 | 256 | Performance Impact 257 | ------------------ 258 | 259 | When using the scope "atomic" we'll need to use locking in order to prevent 260 | concurrent modifications of global workflow context while reading and modifying 261 | it at publishing stage of a certain task. In fact, this is equal to locking the 262 | whole workflow execution object and hence will have a significant performance 263 | impact in case of many parallel tasks. For this reason, "atomic" needs to be 264 | well documented and used with precaution. 265 | 266 | Deployer impact 267 | --------------- 268 | 269 | None. 270 | 271 | 272 | Implementation 273 | ============== 274 | 275 | Assignee(s) 276 | ----------- 277 | 278 | Primary assignee: 279 | rakhmerov 280 | 281 | Other contributors: 282 | melisha 283 | 284 | Work Items 285 | ---------- 286 | 287 | * Change 'on-success', 'on-error' and 'on-complete' in the specification of 288 | direct workflow task in the way described above. 289 | * Make changes in Mistral engine to publish variables into global context 290 | (preliminarily it will be the field 'context' of workflow execution object). 291 | * Implement YAQL/Jinja function 'global' to explicitly read variables from 292 | workflow global context. 293 | * Add locking of workflow global context (i.e. workflow execution) in case of 294 | using 'atomic' scope. A thread that acquires a lock must first refresh state 295 | of workflow execution and then proceed with publishing etc. 296 | 297 | Dependencies 298 | ============ 299 | 300 | None. 301 | 302 | 303 | Testing 304 | ======= 305 | 306 | * Unit tests for publishing into all scopes and using 'global' function for 307 | different cases: parallel tasks, sequential tasks. 308 | * Unit tests to check that branch-local variables take precedence when 309 | reading variables using '$.' in YAQL and '_.' in Jinja. 310 | * Unit tests for 'atomic' scope that checks atomicity of reads and writes of 311 | global variables. Although unit tests can't fully test this feature. In order 312 | to fully test it we need to have a test with multiple Mistral engines to make 313 | sure we have concurrent access to workflow execution. 314 | 315 | References 316 | ========== 317 | 318 | None. 319 | -------------------------------------------------------------------------------- /specs/pike/approved/create-and-run-workflows-within-a-namespace.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | =========================================== 8 | Create and run workflows within a namespace 9 | =========================================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/create-and-run-workflows-within-a-namespace 14 | 15 | Creating and running workflows within a namespace will allow users to create 16 | many workflows with the same name. This is useful when a user already has many 17 | workflows that are connected to each other implemented and one of the workflow 18 | names is already in use and the user does not want to edit that workflow and 19 | all the ones referencing it or merge them to a workbook. This is possible 20 | because the namespace is not a part of the Mistral language. 21 | 22 | 23 | Problem description 24 | =================== 25 | 26 | When a workflow name is already in use, it takes editing the workflow the user 27 | wants to add to mistral or delete the existing one. 28 | 29 | Use Cases 30 | --------- 31 | 32 | * When there are many users writing workflows in the same tenant simultaneously 33 | each can use a different namespace, so there will be no clashes preventing 34 | any of the users to add the workflow he created to mistral. 35 | 36 | * If a workflow definition is not allowed to be modified, but a workflow with 37 | the same name already exists in Mistral and the user wants to upload the 38 | workflow and create an execution from it. 39 | 40 | 41 | Proposed change 42 | =============== 43 | 44 | Add a new namespace parameter to both workflow definition creation, workflow 45 | definition deletion, workflow execution creation and other relevant APIs. 46 | 47 | If a workflow definition creation request has a namespace specified in it, the 48 | workflow name should be unique only within the namespace. If no namespace is 49 | passed, the name should be unique within the group of workflow definitions 50 | without a namespace (will be referred as the default namespace from here on). 51 | 52 | If a workflow execution request from the user, has a namespace specified in it, 53 | mistral will search for the workflow definition within the namespace only. 54 | However, if the workflow execution came from the mistral engine (e.g 55 | sub-workflow execution), mistral will first try finding the workflow definition 56 | within the namespace, and only if one does not exist, mistral will try finding 57 | it the default namespace. By default the namespace passes to the sub-workflows 58 | execution. If a workflow definition only exists in another namespace that is 59 | not the default namespace, it will not be found, and the create workflow 60 | execution request will fail. 61 | 62 | In the future we might want to add a namespace for actions and workbooks (but 63 | for now workbooks and workflows created from them, are always in the default 64 | namespace). 65 | 66 | An example to explain how the namespace moves recursively to sub-workflow 67 | executions. 68 | 69 | Given there are 3 workflows in the workflow table. Two workflows definitions 70 | called 'wf' and 'sub_sub_workflow'are in the same namespace - a namespace we 71 | will call 'abc', and one workflow definition called 'sub_workflow' in the 72 | default namespace. 73 | 74 | Visualization of a partial workflow definitions table: 75 | 76 | +----+---------------------+-----------+ 77 | | ID | name | namespace | 78 | +----+---------------------+-----------+ 79 | | 1 | wf | abc | 80 | +----+---------------------+-----------+ 81 | | 2 | sub_wf | | 82 | +----+---------------------+-----------+ 83 | | 3 | sub_sub_wf | abc | 84 | +----+---------------------+-----------+ 85 | | 4 | sub_sub_wf | | 86 | +----+---------------------+-----------+ 87 | 88 | Workflow definition for 'wf' with ID 1: 89 | .. code-block:: yaml 90 | 91 | --- 92 | version: '2.0' 93 | wf: 94 | tasks: 95 | t1: 96 | workflow: sub_wf 97 | 98 | 99 | Workflow definition for 'sub_wf' with ID 2: 100 | .. code-block:: yaml 101 | 102 | --- 103 | version: '2.0' 104 | sub_wf: 105 | tasks: 106 | t2: 107 | workflow: sub_sub_wf 108 | 109 | Workflow definition for 'sub_sub_wf' with ID 3: 110 | .. code-block:: yaml 111 | 112 | --- 113 | version: '2.0' 114 | sub_sub_wf: 115 | tasks: 116 | t3: 117 | action: std.noop 118 | 119 | Workflow definition for 'sub_sub_wf' with ID 4: 120 | .. code-block:: yaml 121 | 122 | --- 123 | version: '2.0' 124 | sub_sub_wf: 125 | tasks: 126 | should_not_run: 127 | action: std.fail 128 | 129 | As you notice, namespace is not and should never be a part of the language. 130 | 131 | By calling the execution of workflow with name 'wf' within namespace 'abc', it 132 | is required for workflow with name 'wf' in namespace 'abc' to run, and when 133 | task t1 is executed to call workflow 'sub_wf' within the default namespace 134 | (since no workflow with name 'sub_wf' exist within namespace 'abc'), but still 135 | remember the namespace is 'abc' so that when task t2 will be executed, the 136 | workflow that will be executed is workflow 'sub_sub_wf' in namespace 'abc' with 137 | ID '3', rather than workflow 'sub_sub_wf' in the default namespace with ID '4'. 138 | The execution described above should result in success. 139 | 140 | More strictly speaking, when it comes to calling nested workflows the namespace 141 | of the top most workflow is propagated down to its children. So that when 142 | Mistral needs to resolve a workflow name, it first searches the configured name 143 | in that propagated namespace, and if it doesn't exist there, Mistral will try 144 | to find it in the default namespace. 145 | 146 | A workflow execution can only trigger an execution of a workflow within both 147 | the same tenant and namespace or within both the same tenant and the default 148 | namespace. 149 | 150 | For workbooks that means that all workflows within the workbook could only call 151 | workflows in the default namespace. 152 | 153 | Leading suggestion for the creation API of the 'wf' execution is this: 154 | .. code-block:: 155 | 156 | POST /v2/executions 157 | { 158 | "workflow_name": "wf", 159 | "workflow_namespace": "abc" 160 | } 161 | 162 | Leading suggestion for passing the namespace from execution to sub-execution 163 | recursively is putting it in the params of the execution possible under env. 164 | A user is not allowed to add any key that starts with two underscores to the 165 | env. 166 | 167 | Example of how such row might look like in the database: 168 | .. code-block:: 169 | 170 | mysql> select * from workflow_executions_v2 where id='3'\G; 171 | *************************** 1. row *************************** 172 | created_at: 2017-06-19 10:59:29 173 | updated_at: 2017-06-19 10:59:30 174 | scope: private 175 | project_id: 1 176 | id: 3 177 | name: sub_sub_wf 178 | description: 179 | workflow_name: sub_sub_wf 180 | workflow_namespace: abc 181 | workflow_id: 3 182 | spec: {"tasks": {"t3": {"action": "std.noop", "version": "2.0", "type": "direct", "name": "t3"}}, "name": "sub_sub_wf", "version": "2.0"} 183 | state: SUCCESS 184 | state_info: NULL 185 | tags: NULL 186 | runtime_context: {"index": 0} 187 | accepted: 1 188 | input: {} 189 | output: {} 190 | params: {"env": {"__namespace": "abc"}} 191 | 192 | Notice the last line where under params->env we have a key called '__namespace' 193 | 194 | In the example described above, if a user decides to add a workflow with the 195 | name 'sub_wf' to the 'abc' namespace, the next time the workflow will be 196 | executed, the new workflow called 'sub_wf' from the 'abc' namespace will be 197 | triggered by the workflow with the name 'wf' from the 'abc' namespace, instead 198 | of the workflow 'sub_wf' from the default namespace. 199 | 200 | Regarding the results of the current APIs see the next examples that all assume 201 | that the workflows described in the next table are the only one that exist. 202 | 203 | Table: 204 | +----+---------------------+-----------+ 205 | | ID | name | namespace | 206 | +----+---------------------+-----------+ 207 | | 1 | wf | abc | 208 | +----+---------------------+-----------+ 209 | | 2 | sub_wf | | 210 | +----+---------------------+-----------+ 211 | | 3 | sub_sub_wf | abc | 212 | +----+---------------------+-----------+ 213 | | 4 | sub_sub_wf | | 214 | +----+---------------------+-----------+ 215 | | 5 | example_wf | example_1 | 216 | +----+---------------------+-----------+ 217 | | 6 | example_wf | example_a | 218 | +----+---------------------+-----------+ 219 | 220 | Examples: 221 | 222 | * **GET /v2/workflows** 223 | Will return all 6 workflows 224 | 225 | * **GET /v2/workflows/wf** 226 | Will return an error "workflow not found [workflow_identifier=wf] 227 | 228 | * **GET /v2/workflows/sub_wf** 229 | Will return workflow 'sub_wf' from the default namespace (ID=2). 230 | 231 | * **GET /v2/workflows/sub_sub_wf** 232 | Will return workflow 'sub_sub_wf' from the default namespace (ID=4). 233 | 234 | * **GET /v2/workflows/example_wf** 235 | Will return an error "workflow not found [workflow_identifier=example_wf] 236 | 237 | * **DELETE /v2/workflows/wf** 238 | Will throw an exception, because no namespace supplied and no such workflow 239 | exist in the default namespace 240 | 241 | * **DELETE /v2/workflows/sub_wf** 242 | Will delete the workflow with the name 'sub_wf' from the default namespace 243 | (ID=2). 244 | This should be allowed in order to let users that don't use namespaces to 245 | work as they are used to. 246 | 247 | * **DELETE /v2/workflows/sub_sub_wf** 248 | Will delete the workflow with the name 'sub_sub_wf' from the default 249 | namespace (ID=4). 250 | 251 | * **DELETE /v2/workflows/example_wf** 252 | Will return an error "workflow not found [workflow_identifier=example_wf] 253 | 254 | * PUT will have similar results to DELETE 255 | 256 | 257 | Alternatives 258 | ------------ 259 | 260 | We can try and use workbooks, but the down side is it forces the user to merge 261 | his workflows, and might result in a hugh file, that a user might find to be 262 | hard to edit and read. 263 | 264 | For the described namespace design, we can use different names. For example in 265 | the create execution API we can call the new key 'workflow_namespace' instead 266 | of 'namespace'. Also the default namespace currently described is the empty 267 | string (''), but it can be something like "". We should also 268 | consider saving some namespaces to future system use (for example namespaces 269 | that starts with 2 underscores '__') 270 | 271 | 272 | Data model impact 273 | ----------------- 274 | 275 | The proposed change must come with a change to the data model. 276 | 277 | For workflow definition, a namespace should be added to the model and the DB 278 | workflow_definitions_v2 table. And the same for workflow execution, plus it 279 | should also be under env in params, so it will seep easily to the sub-workflow 280 | executions. In the case of workflow execution there is also the option of just 281 | adding it to the env under params. 282 | 283 | In the future we might create a namespace table. Migration from current 284 | suggested model to one that includes a separate table for namespace, should be 285 | easy using SQLAlchemy. 286 | 287 | REST API impact 288 | --------------- 289 | 290 | Optional namespace parameter will be added to relevant requests: 291 | 292 | * create workflow definition within a namespace:: 293 | 294 | POST /v2/workflows?namespace=NAMESPACE 295 | RAW_WF_DEFINITION 296 | 297 | * delete workflow definition within a namespace:: 298 | 299 | DELETE /v2/workflows/WORKFLOW_IDENTIFIER?namespace=NAMESPACE 300 | 301 | * get a workflow definition within a namespace:: 302 | 303 | GET /v2/workflows/WORKFLOW_IDENTIFIER?namespace=NAMESPACE 304 | 305 | * get all the workflow definitions within a given namespace:: 306 | 307 | GET /v2/workflows?namespace=NAMESPACE 308 | 309 | * update a workflow definition within a given namespace:: 310 | 311 | PUT /v2/workflows?namespace=NAMESPACE 312 | RAW_WF_DEFINITION 313 | 314 | * create an execution of a workflow where the workflow belongs to given:: 315 | 316 | POST /v2/executions 317 | { 318 | "workflow_name": "WORKFLOW_NAME", 319 | "workflow_namespace": "NAMESPACE" 320 | } 321 | 322 | * get a list of all the namespaces:: 323 | 324 | GET /v2/namespaces 325 | 326 | 327 | End user impact 328 | --------------- 329 | 330 | The new namespace request parameter should be added to the python-mistralclient 331 | as well. 332 | 333 | Performance Impact 334 | ------------------ 335 | 336 | None. 337 | 338 | Deployer impact 339 | --------------- 340 | 341 | Database migration should be done when upgrading mistral to a version that 342 | includes this change. 343 | 344 | 345 | Implementation 346 | ============== 347 | 348 | Assignee(s) 349 | ----------- 350 | 351 | Primary assignee: 352 | michal-gershenzon 353 | 354 | Other contributors: 355 | melisha 356 | 357 | Work Items 358 | ---------- 359 | 360 | * Adding namespace parameter to create workflow definition, delete workflow 361 | definition and create workflow execution requests. 362 | 363 | * Change the way workflow definition are queried during execution. 364 | 365 | * Tests 366 | 367 | * Database migration script 368 | 369 | * Documentation 370 | 371 | * Add new parameter to python-mistralclient 372 | 373 | 374 | Nice to have work items: 375 | 376 | * Adding namespace as a filter parameter of get workflow definition 377 | 378 | * Adding namespace as a filter parameter of update workflow definition 379 | 380 | * Adding namespace as a filter parameter of get workflow executions 381 | 382 | * Supporting the namespace feature with workbooks 383 | 384 | * Adding namespaces API endpoint 385 | 386 | 387 | Dependencies 388 | ============ 389 | 390 | None. 391 | 392 | 393 | Testing 394 | ======= 395 | 396 | * Create a workflow under some namespace that already exist in the default 397 | namespace. 398 | 399 | * Create a workflow under the default namespace that calls the workflow above. 400 | Run it once under the default namespace and once under the namespace from 401 | previous section and see each time the expected sub-workflow execution is 402 | created. 403 | 404 | * Create a workflow under some namespace that does not exist in the default 405 | namespace and see trying to execute it without specifying a namespace fails. 406 | 407 | 408 | References 409 | ========== 410 | 411 | None. 412 | -------------------------------------------------------------------------------- /specs/pike/approved/ha.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ========================== 8 | Mistral HA and Scalability 9 | ========================== 10 | 11 | Launchpad blueprints: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-ha-spec 14 | https://blueprints.launchpad.net/mistral/+spec/mistral-ha-gate 15 | https://blueprints.launchpad.net/mistral/+spec/mistral-multinode-tests 16 | https://blueprints.launchpad.net/mistral/+spec/mistral-fault-tolerance 17 | https://blueprints.launchpad.net/mistral/+spec/mistral-scale-up-down 18 | 19 | Mistral needs to be highly available (HA) and scalable. 20 | 21 | This specification doesn't attempt to describe all steps and measures that 22 | need to be taken in order to achieve these goals. It rather aims to set a 23 | common ground for understanding what tasks and challenges the team needs to 24 | solve and proposes approximate approaches for that. 25 | 26 | Problem description 27 | =================== 28 | 29 | In a nutshell, being highly available and scalable means that we should 30 | be able to run more than one Mistral cluster component of any type (api, 31 | engine, executor) and gain more reliability and performance. 32 | Originally, Mistral was designed to be fully asynchronous and thus easily 33 | scalable, so we know that Mistral, in fact, works with multiple APIs, engines 34 | and executors and its performance becomes higher in this case. 35 | High availability though is a complex non functional requirement that is 36 | usually achieved by making lots of changes to account for various failure 37 | situations that can happen with multiple nodes of the systems. Particularly, 38 | the system should gracefully handle cluster topology changes: node addition 39 | and node departure. These events should not affect normal work of workflows 40 | that have already been started. 41 | So far, the Mistral team hasn't spent significant time on HA. Although Mistral 42 | partially implements these two non functional requirements we need to make 43 | sure that all of their corresponding aspects are implemented and tested. This 44 | specification aims to clearly define these aspects and roughly propose 45 | approaches for taking care of them. 46 | 47 | Use Cases 48 | --------- 49 | 50 | * Scalability is needed when performance of a Mistral cluster component, 51 | such as Mistral engine, limited by resources (I/O, RAM, CPU) of a single 52 | machine, is not sufficient. Those are any use cases that assume significant 53 | load on Mistral, essentially lots of tasks processed by Mistral per time 54 | unit. 55 | Example: hosted workflow service in a public cloud. 56 | * High availability is critically important for use cases where workflows 57 | managed by Mistral should live even in case of infrastructure failures. 58 | To deal with these failures the system needs to have redundancy, i.e. 59 | extra instances of cluster components. Losing part of them doesn't affect 60 | normal work of the system, all functionality continues to be accessible. 61 | 62 | 63 | 64 | Proposed change 65 | =============== 66 | 67 | **Testing infrastructure** 68 | 69 | Currently, Mistral team doesn't have any infrastructure that would allow to 70 | automate testing of multi-node Mistral, whether we want to test scalability 71 | properties or reliability and availability. The first step towards achieving 72 | the goals defined by the spec is to create such an infrastructure. 73 | 74 | Currently, the requirements for this infrastructure are seen as follows: 75 | 76 | * New gate in OpenStack CI that will allow to run any number of Mistral 77 | cluster components as separate processes 78 | * Python utility methods that will allow to start, stop and find running 79 | Mistral cluster components. This is required because we should be able to 80 | have different number of cluster components for different tests within the 81 | same gate. It will also allow to test addition and departure of cluster 82 | nodes (scale up and down). 83 | * Means that will allow to imitate infrastructure failures. One of the most 84 | important is lost messages in RPC. 85 | * A tool for benchmarking Mistral performance when it's installed in multi-node 86 | mode. Ideally, it needs to be a separate gate (most likely based on Rally) 87 | 88 | 89 | **Mistral stable multi-node topology** 90 | 91 | This work item mostly assumes testing Mistral with multiple nodes when the 92 | number of node is stable. The most important part is to test multiple engines 93 | because engine is the part of the system that creates complex database 94 | transactions and there used to be known issues related to that like deadlocks. 95 | These known issues seem to have been fixed but we need to create test harness 96 | for this and make sure it's tested automatically on every commit. Potentially, 97 | we might still have some issues like that. 98 | 99 | **Mistral variable multi-node topology (scale up/down)** 100 | 101 | Once multi-node Mistral is tested with a stable cluster topology we need to 102 | make sure to create tests where nodes are added and removed while workflows 103 | are running. These workflows must finish successfully regardless of cluster 104 | topology changes, only if there's at least one running component of every 105 | type (engine, executor, api). 106 | In this part we expect issues related to lost RPC messages for communications 107 | where message acknowledgements are not used. It's possible that component A 108 | will send a message component B, and if component B polls the message from 109 | a message queue and goes down before the message is fully processed and 110 | result is sent back, then state of a corresponding object in the database 111 | won't be updated (action execution, task execution, workflow execution). 112 | Having that said, as of now, there's no opportunity to fix this when using 113 | oslo.messaging based RPC implementation because it doesn't support message 114 | acknowledgement needed for the "Work queue" pattern, i.e. "at-least-once" 115 | delivery. See https://www.rabbitmq.com/tutorials/tutorial-two-python.html for 116 | details. For now, we have to use Kombu-based Mistral RPC which supports this 117 | mechanism. However, it's known that message acknowledgements should be used 118 | in more cases where the mistral services communicate, than currently enabled. 119 | For example, it is enabled on the executor side so that a message sent to an 120 | executor (to run an action) is acknowledged after it's fully processed. But 121 | it's known that it's not enabled on the engine side when, for example, 122 | executor sends a result of the action back to Mistral. Part of this work 123 | item will be identifying all such gaps in the Mistral communication protocol 124 | and fix them. 125 | Another visible problem is graceful scale down, when the node that is 126 | intentionally leaving the cluster should stop handling new incoming RPC 127 | requests but complete requests that are already being processed. If this 128 | concern is not taken care of, we'll be getting repeated message processing 129 | problem, i.e. when in fact a message was processed more than once that in turn 130 | leads to breaking normal logic of workflow processing. The example: an engine 131 | sends a "run action" request to an executor, executor polls it, runs an action 132 | and immediately after that goes down w/o having a chance to send the result 133 | back to the engine acknowledging the initial message. So, in fact, the acton 134 | already ran once. Then, since the message is not acknowledged, a message queue 135 | broker will resend it to a different executor and it'll be processed again. 136 | The solution: graceful shutdown when executor fully processes the message, 137 | sends the result back, acknowledges the message and only then is allowed to 138 | shut down. On this matter, we need to distinguish between idempotent and 139 | non-idempotent actions. The described problem applies only to non-idempotent 140 | actions since, from their definition, every execution of such action changes 141 | the state of the system. 142 | 143 | **Handling infrastructure failures** 144 | 145 | Mistral should be able to gracefully handle the following infrastructure 146 | outages: 147 | 148 | * Temporary lost connectivity with the database 149 | * Temporary lost connectivity with the message broker 150 | 151 | Currently, there aren't automatic tests that would guarantee that Mistral 152 | handles these situations gracefully w/o losing ability to complete workflows 153 | that are already running. So this work stream assumes creating tests that 154 | imitate these failures and fixing possible issues that may occur in this case. 155 | The most important part is execution objects stuck in a non-terminal state 156 | like RUNNING due to a temporary outage of the message broker or the database. 157 | It is known that there can be situations when an RPC message is completely 158 | lost. For example, RabbitMQ does not guarantee that if it accepted a message 159 | it will be delivered to consumers. Automatic recovery looks impossible in this 160 | case but we need to come up with approaches how to help operators identify 161 | these situations and recover from them. This should be figured out during the 162 | implementation phase. 163 | 164 | **Benchmarking of multi-node Mistral** 165 | 166 | We already have a CI gate based on Rally that allows benchmarking a single-node 167 | Mistral. We need to create a gate and a tool set that will allow benchmarking 168 | Mistral that has different number of nodes so that we see how scaling up/down 169 | changes Mistral performance. When we're talking about Mistral performance, 170 | until not explained, it's not clear what it means because if we mean workflow 171 | execution time (from when it was started to when it was completed) then it 172 | totally depends on what kind of workflow it is, whether it's highly parallel 173 | or has only sequences, whether it has joins, "with-items" tasks, task policies 174 | etc. 175 | The proposal is to come up with several workflows that could be called 176 | "reference workflows" and to measure execution time of these workflows in 177 | seconds (minimal, average, highest) as well as "processing time per task" for 178 | each one of them that would show how much time is needed to process an 179 | individual task for all such workflows. These reference workflows together 180 | should describe all kinds of elementary workflow blocks that any more complex 181 | workflow can consist of. Thus these workflows will give us understanding about 182 | performance of all most important workflow topologies. Plus we can have one or 183 | more complex workflow built of these elementary blocks and measure its 184 | execution time as well. 185 | 186 | Currently, the proposed reference workflows are: 187 | 188 | * Sequence of N tasks where each task is started only after the previous one. 189 | Without parallelism at all. 190 | * N parallel tasks w/o connections between each other. Fully parallel workflow. 191 | * X parallel sequences where each contains N tasks. Mixed sequential 192 | and parallel workflows. 193 | * N parallel tasks joined by a task marked as "join: all" 194 | * X parallel sequences where each contains N tasks joined by a task marked as 195 | "join: all" 196 | * One task configured as "with-items" that processes N elements 197 | * Complex workflow where all the previous items are combined together 198 | 199 | Alternatives 200 | ------------ 201 | 202 | None. 203 | 204 | Data model impact 205 | ----------------- 206 | 207 | None. 208 | 209 | REST API impact 210 | --------------- 211 | 212 | None. 213 | 214 | End user impact 215 | --------------- 216 | 217 | Higher uptime of Mistral service (if by end user we mean those who call 218 | Mistral API). 219 | 220 | Performance Impact 221 | ------------------ 222 | 223 | * Performance of Mistral with more nodes will be higher than performance of 224 | Mistral with less nodes. 225 | 226 | Deployer impact 227 | --------------- 228 | 229 | Deployers will be able to run more Mistral nodes to increase performance and 230 | redundancy/availability of the service. 231 | 232 | Implementation 233 | ============== 234 | 235 | Assignee(s) 236 | ----------- 237 | 238 | Primary assignee: 239 | rakhmerov 240 | 241 | Other contributors: 242 | melisha 243 | 244 | Work Items 245 | ---------- 246 | 247 | * OpenStack CI job for testing Mistral in HA 248 | * Make multi-node Mistral work reliably with a stable cluster topology 249 | (includes automatic testing) 250 | * Make multi-node Mistral work reliably with a variable cluster topology 251 | (includes automatic testing) 252 | * Make Mistral handle infrastructure failures 253 | (includes automatic testing) 254 | * Fix known RPC communication issues (acknowledgements etc.) 255 | * Create a set of automatic benchmarks (most likely with Rally) that will 256 | show how Mistral cluster scale up / scale down influence performance 257 | 258 | Dependencies 259 | ============ 260 | 261 | None. 262 | 263 | 264 | Testing 265 | ======= 266 | 267 | * Use a new CI gate that will allow to run multiple cluster nodes and test 268 | how Mistral scales up and down and how it reacts on various infrastructure 269 | failures (more details above) 270 | 271 | References 272 | ========== 273 | 274 | None. 275 | -------------------------------------------------------------------------------- /specs/pike/approved/mistral-custom-actions-api.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ================== 8 | Custom Actions API 9 | ================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-custom-actions-api 14 | 15 | This specification sets a formal basis for those Mistral users who want to 16 | create their own actions and make them available to use as part of Mistral 17 | workflows. The number one question that the spec addresses is "What is 18 | available in Mistral code base in order to implement custom actions?" 19 | 20 | 21 | Problem description 22 | =================== 23 | 24 | Custom actions are now possible to create and it's as simple as just 25 | implementing a class inherited from mistral.actions.base.Action that 26 | has 3 methods: 27 | 28 | * run() - executes main action logic, **mandatory** to implement 29 | 30 | * test() - execute action in test mode, related to future dry-run 31 | functionality, optional to implement 32 | 33 | * is_sync() - must return **True** if action returns its result right from 34 | method run() or **False** if method run() only starts action logic and 35 | result is supposed to be delivered later via public Mistral API 36 | 37 | There's also a mechanism based on stevedore library that allows to plug in 38 | new actions via adding new entry points in setup.cfg file. 39 | 40 | If a custom action doesn't require any integration neither with Mistral 41 | nor with OpenStack this is enough to know in order to implement it. 42 | 43 | However, if this action needs to leverage more advanced capabilities 44 | provided by Mistral and OpenStack then Action class itself doesn't 45 | give any knowledge about means that can be used to achieve that. 46 | A simple example of integration with OpenStack infrastructure is the need 47 | to call endpoints of OpenStack services. In this case, at minimum, action 48 | needs to be able to authenticate with Keystone, i.e., have access to 49 | Mistral security context. 50 | 51 | Use Cases 52 | --------- 53 | 54 | Simple OpenStack actions 55 | ^^^^^^^^^^^^^^^^^^^^^^^^ 56 | As a user of Mistral I want to create actions that call OpenStack services. 57 | In this case action needs to be able to access Mistral security context 58 | that contains auth token to be able to pass it to a corresponding service. 59 | Note: This use case is generally implemented within Mistral but it needs 60 | to be rethought since OpenStack actions that are implemented now in Mistral 61 | use Mistral Python code that is not assumed to be a public API and hence 62 | stable. 63 | 64 | Complex OpenStack actions 65 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 66 | As a user of Mistral I want to create actions that call multiple OpenStack 67 | services from within one action. 68 | 69 | For example, we may want to create action 70 | "create_cinder_volume_and_attach_to_vm" that creates a Cinder volume and 71 | attaches it to a virtual instance. In this case action needs to have access 72 | to Mistral security context that contains auth token so that it can pass 73 | that token to Cinder and Nova. 74 | 75 | Reusing existing actions 76 | ^^^^^^^^^^^^^^^^^^^^^^^^ 77 | 78 | As a user of Mistral I want to be able to reuse existing actions while 79 | implementing my new actions so that I don't have to reimplement similar 80 | functionality. 81 | 82 | For example, I want to create action that checks if a certain virtual 83 | instance exists in the tenant by calling Nova and if it does the action 84 | runs a number of secure shell commands to configure it. In this scenario, 85 | we need to call Nova and do ssh. Both already exist in Mistral as actions 86 | "nova.servers_get" and "std.ssh". So there should be a mechanism allowing 87 | to reuse those actions while creating a new more complex action. 88 | 89 | Proposed change 90 | =============== 91 | 92 | General idea 93 | ------------ 94 | 95 | We need to have one or more Python packages in Mistral that are designed 96 | and documented as a public Python API for developers that want to create 97 | custom actions. These packages should effectively provide a number of 98 | classes that can be used directly or inherited as needed. They should 99 | cover the following aspects of action development: 100 | 101 | * Base class or a number of classes that can be extended in order to build 102 | new Mistral actions. Currently existing **mistral.actions.base.Action** 103 | is an example of such class. 104 | 105 | * Module that provides access to security context associated with the 106 | current workflow that this action belongs to. Security context should 107 | at least include user, project/tenant, auth token. 108 | 109 | * Module that provides access to current Mistral execution context. That 110 | context should include: 111 | 112 | * Current workflow execution id 113 | 114 | * Current task execution id 115 | 116 | * Current action execution id 117 | 118 | * Package with most frequently used utils and data types used during 119 | custom actions development. For example, class 120 | mistral.workflow.utils.Result that now exists in the code base is 121 | needed by actions but it's not clear that it's part of Python API. 122 | 123 | * Module that allows to get and reuse existing actions 124 | 125 | Since these Python entities must be available for both engine and 126 | executor they should be moved to a separate subproject of Mistral, for 127 | example, **mistral-actions-api**. 128 | 129 | Existing OpenStack actions should be moved out of mistral project into 130 | a different Mistral subproject. The proposal is to use **mistral-extra** 131 | repo for this purpose because although we use it only for collecting 132 | Mistral examples its initial idea was also to have additional tools 133 | and extensions in it. 134 | 135 | Specific entities 136 | ----------------- 137 | 138 | mistral.actions.api 139 | ^^^^^^^^^^^^^^^^^^^ 140 | Main Python package that contains all modules and classes which are part 141 | of Custom Actions API. 142 | 143 | mistral.actions.api.base 144 | ^^^^^^^^^^^^^^^^^^^^^^^^ 145 | Python module that contains base classes for custom actions. Currently 146 | module **mistral.actions.base** performs similar function. 147 | 148 | Note: Specific content of this module is out of scope of this spec and 149 | must be defined at implementation stage. 150 | 151 | mistral.actions.api.security 152 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 153 | Python module that contains required functions to get all required 154 | information related to current OpenStack security context. At minimum: 155 | user, project, auth token. 156 | 157 | Note: Specific content of this module is out of scope of this spec and 158 | must be defined at implementation stage. 159 | 160 | mistral.actions.api.types 161 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 162 | Python module that contains all data types that custom actions need to 163 | use. One candidate to go to that module that now exists is 164 | **mistral.workflow.utils.Result**. 165 | 166 | Note: Specific content of this module is out of scope of this spec and 167 | defined at implementation stage. 168 | 169 | mistral.actions.api.utils 170 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 171 | Python module that contains additional functions helpful for creating 172 | new Mistral actions. At minimum: functions to get instances of existing 173 | actions so that action developers could re-use functionality of existing 174 | actions. Return type for these actions though must be rather a wrapper 175 | that doesn't just call **Action.run()** method but instead uses Mistral 176 | action execution machinery to actually call action just like as if it 177 | was called as part of workflow (taking care of data transformations, 178 | fulfilling security and execution context etc.) 179 | 180 | Note: Specific content of this module is out of scope of this spec and 181 | must be defined at implementation stage. 182 | 183 | Alternatives 184 | ------------ 185 | 186 | None. 187 | 188 | Data model impact 189 | ----------------- 190 | 191 | None. 192 | 193 | REST API impact 194 | --------------- 195 | 196 | None. 197 | 198 | End user impact 199 | --------------- 200 | 201 | REST API users 202 | ^^^^^^^^^^^^^^ 203 | No impact. 204 | 205 | Custom actions developers 206 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 207 | Having to use Custom Actions API described in this spec whereas now they 208 | can only use **mistral.actions.base** safely. 209 | 210 | Performance Impact 211 | ------------------ 212 | 213 | No significant impact is expected. Minor is possible. 214 | 215 | Deployer impact 216 | --------------- 217 | 218 | Deployers will need to make sure to install a new library containing 219 | Custom Action API packages, modules and classes. However, this impact 220 | is not supposed to be severe because all dependencies must be handled 221 | smoothly by Pip. 222 | 223 | In case if there's an existing Mistral installation with installed 224 | actions, some DB migration might be required. Changes in DB schema are 225 | not expected though. If so, Mistral project should provide convenient 226 | tools to help make this transition to using new actions. 227 | 228 | Implementation 229 | ============== 230 | 231 | Assignee(s) 232 | ----------- 233 | 234 | To be found based on discussions around the spec. 235 | 236 | Work Items 237 | ---------- 238 | 239 | * Create a new repo containing the code of Custom Actions API (e.g. 240 | **mistral-lib** or **mistral-common**, particular name is to be defined) 241 | * Design and implement modules listed in Specific Entities section 242 | * Provide deprecation mechanism so that during some period of time it 243 | would be possible to use the old approach for implementing Mistral 244 | actions (with **mistral.actions.base**) and the new one 245 | * Fix existing action implementations so that they use new API 246 | * Fix Mistral Executor accordingly 247 | * Fix Mistral Engine accordingly 248 | * Revisit and restructure repo **mistral-extra** 249 | * Move existing OpenStack actions into **mistral-extra** 250 | 251 | 252 | Dependencies 253 | ============ 254 | 255 | No additional dependencies are required. 256 | 257 | Testing 258 | ======= 259 | 260 | Custom Actions API can be tested on devstack based OpenStack CI gates 261 | such as gate-mistral-devstack-dsvm by creating and running custom 262 | actions that use this API. 263 | 264 | References 265 | ========== 266 | 267 | Initial patch for TripleO/Mistral integration: 268 | https://review.openstack.org/#/c/282366/ 269 | -------------------------------------------------------------------------------- /specs/pike/approved/mistral-extra.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ======================= 8 | Mistral Actions Library 9 | ======================= 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-actions-api-separate-openstack-actions 14 | 15 | This specification outlines a proposal to move the OpenStack actions to 16 | mistral-extra and define a process for accepting other third party actions 17 | in mistral-extra. 18 | 19 | 20 | Problem description 21 | =================== 22 | 23 | Mistral ships with a set of standard generic actions and a set of actions 24 | specific to OpenStack. The OpenStack actions help improve the utility of 25 | Mistral but including them in Mistral itself raises a number of limitations. 26 | 27 | - Some users won't want OpenStack actions. (i.e. Mistral running outside 28 | OpenStack). 29 | - Some users will only want specific OpenStack actions. (i.e. their deployment 30 | only includes certain OpenStack projects). 31 | - OpenStack actions can easily become out of date as the OpenStack clients can 32 | evolve quickly. Releases to these actions require full Mistral releases. 33 | 34 | The OpenStack actions make Mistral much more useful in OpenStack environments 35 | but there isn't a clear place to collect or develop other Mistral actions. 36 | Without a wider set of actions that integrate with other services and projects 37 | Mistral is unlikely to become used more widely. 38 | 39 | 40 | Use Cases 41 | --------- 42 | 43 | - As an administrator and deployer I want control over the actions included 44 | with Mistral. 45 | - As a developer I want a clear place to contribute to actions and add 46 | support for third party actions. 47 | - As an end user I want my workflows to have access to a library of actions 48 | that allow my workflows to do more without having to create custom actions. 49 | 50 | 51 | Proposed change 52 | =============== 53 | 54 | This specification proposes moving the current OpenStack actions to the 55 | mistral-extra repo, which can be then referred to as the Mistral actions 56 | library. This repository will depend on mistral-lib, as being developed under 57 | the `custom actions API spec `_. 58 | 59 | mistral-extra will contain a Python namespace for all the OpenStack actions, 60 | this will likely be ``mistral_extra.openstack``. The action names will remain 61 | as they are now to maintain backwards compatability. It will also allow other 62 | actions to be added to this project if they meet the requirements. 63 | 64 | For an action to be considered for mistral-extra it should be generic and 65 | useful for a wide range of users and not specific to one user or project. There 66 | should also be a way to automate the testing of this action to verify that it 67 | continues to work over time. Without the ability to do an integration test the 68 | actions will become impossible to maintain as the library grows. Generally 69 | useful actions are also candidates to be included in Mistral itself with the 70 | other ``std.*`` actions. 71 | 72 | A configuration file will be added that allows users to specify which actions 73 | they want to include. By default this config will match the existing behaviour 74 | and include all OpenStack actions, but other actions will need to be enabled 75 | for each project. 76 | 77 | 78 | Alternatives 79 | ------------ 80 | 81 | Alternatively we could continue storing the actions in Mistral and third party 82 | actions could be developed outside of Mistral entirely. This may work, but it 83 | would make it much harder to grow a cohesive library that is easy to use. 84 | 85 | 86 | Data model impact 87 | ----------------- 88 | 89 | No data model changes are required. 90 | 91 | 92 | REST API impact 93 | --------------- 94 | 95 | No API changes are required. 96 | 97 | 98 | Python API 99 | ---------- 100 | 101 | The mistral-extra repository will also provide a stable Python API that can be 102 | used by action developers as a library. This will allow them to access the 103 | OpenStack actions and extend or customise them. The API is still to be 104 | determined but usage will likely look something like this. 105 | 106 | :: 107 | 108 | from mistral_extra import openstack 109 | 110 | class CustomNovaAction(openstack.NovaAction): 111 | 112 | def run(self, context): 113 | 114 | nova_client = self.get_client(context) 115 | # do something custom and return 116 | 117 | 118 | Access to the Python clients for OpenStack projects will also be possible. This 119 | will allow custom action developers to easily consume multiple OpenStack 120 | clients. 121 | 122 | :: 123 | 124 | from mistral_extra import openstack 125 | from mistral_lib import actions 126 | 127 | class CustomAction(actions.Action): 128 | 129 | def run(self, context): 130 | 131 | nova_client = openstack.NovaAction.get_client(context) 132 | glance_client = openstack.GlanceAction.get_client(context) 133 | 134 | 135 | End user impact 136 | --------------- 137 | 138 | Users will primarily interact with the feature by customising the mistral-extra 139 | config file. This will need to be documented. 140 | 141 | 142 | Performance Impact 143 | ------------------ 144 | 145 | No performance impact. 146 | 147 | 148 | Deployer impact 149 | --------------- 150 | 151 | No additional steps will be required by default for deployers. However, they 152 | will have greater control of their deployment and which actions are included 153 | and available to users. 154 | 155 | 156 | Implementation 157 | ============== 158 | 159 | Assignee(s) 160 | ----------- 161 | 162 | Primary assignee: 163 | d0ugal 164 | 165 | Other contributors: 166 | None 167 | 168 | 169 | Work Items 170 | ---------- 171 | 172 | - Copy OpenStack actions to mistral-extra 173 | - Package and release mistral-extra and update Mistral to depend on it 174 | - Remove OpenStack actions from mistral 175 | - Update the documentation to reflect the configuration of mistral-lib 176 | 177 | 178 | Dependencies 179 | ============ 180 | 181 | * `Custom Actions API `_ 182 | 183 | 184 | Testing 185 | ======= 186 | 187 | This will reduce the testing burden on the main Mistral repository. However, 188 | additional test cases will be needed for mistral-extra. It may also be wise to 189 | setup a periodic test to verify that a recent version of Mistral master still 190 | works with mistral-extra. 191 | 192 | 193 | References 194 | ========== 195 | 196 | None 197 | -------------------------------------------------------------------------------- /specs/pike/approved/secure-sensitive-data.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ======================= 8 | Securing Sensitive Data 9 | ======================= 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-secure-sensitive-data 14 | 15 | This specification describes additions/changes to the DSL to allow a workflow 16 | author to describe fields that should be treated as sensitive. By marking 17 | a field as sensitive, it will signal that the engine should redact said 18 | field from logs. 19 | 20 | Additionally, a decarator will be made available to Action developers to mark 21 | parameters as secret, thus treating them in a sensitive way. 22 | 23 | Problem description 24 | =================== 25 | 26 | Currently, there is no regard for sensitive data when writing information to 27 | logs, and other places, mainly because Mistral is agnostic to the values 28 | passed to it. It cannot currently distinguish sensitive data from "normal" 29 | data. In order for Mistral to make this distinction, some additions to the DSL 30 | must be made in order to signify data that is sensitive, and should be 31 | redacted. 32 | 33 | Use Cases 34 | --------- 35 | 36 | Hiding passwords from logs 37 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ 38 | As a workflow author, I want to have a password as input, and when Mistral 39 | logs information about my workflow, I do not want the password to be in 40 | plain text. It should be displayed as '*********'. 41 | 42 | Proposed change 43 | =============== 44 | 45 | Add a 'hidden' section to the workflow DSL. One can use a regex to specify a 46 | pattern of values to hide. 47 | Add a decorator to be used when developing custom actions. 48 | Create a special data type, for example class Hidden, with string 49 | representation "******" so that if something is wrapped into it we'll never 50 | see it in logs in its initial form. 51 | Store Hidden instances in encoded form in DB, decode them when fetched 52 | from DB. 53 | 54 | 55 | .. code-block:: yaml 56 | 57 | --- 58 | version: "2.0" 59 | wf: 60 | input: 61 | - username 62 | hidden: 63 | - keyA 64 | - secret_* 65 | tasks: 66 | taskA: 67 | action: my_action 68 | publish: 69 | keyA: <% task(taskA).result.foobar %> 70 | secret_value: <% task(taskA).result.baz %> 71 | on-success: 72 | - taskB 73 | taskB: 74 | action: my_action2 75 | input: 76 | arg1: <% $.keyA %> 77 | secret_value: <% $.secret_value %> 78 | 79 | Note: arg1 will now be in the clear. If desired, arg1 would need to be added 80 | to the 'hidden' list. 81 | 82 | 83 | An example for actions: 84 | 85 | .. code-block:: python 86 | 87 | from mistral_lib.secret import protect 88 | 89 | @hidden(['password']) 90 | class MyAction(Action): 91 |      92 |     def init(self, password): 93 |         # do something 94 | 95 | Alternatives 96 | ------------ 97 | 98 | Alternatively, we could specify a hidden property at the task level. Nothing 99 | in the spec precludes this in the future. 100 | 101 | .. code-block:: yaml 102 | 103 | tasks: 104 | taskA: 105 | action: my_action 106 | hidden: 107 | - password 108 | input: 109 | password: <%...%> 110 | 111 | One could also use a yaql expression. There was not much support for this 112 | approach. 113 | 114 | .. code-block:: yaml 115 | 116 | tasks: 117 | taskA: 118 | action: my_action 119 | input: 120 | username: <% ... %> 121 | password: $.secret(<% ... %>) 122 | 123 | Another option would be to extend the syntax of the workflow input section: 124 | 125 | .. code-block:: yaml 126 | 127 | --- 128 | version: "2.0" 129 | wf: 130 | input: 131 | username: "" 132 | password: 133 | hidden: true 134 | 135 | 136 | Data model impact 137 | ----------------- 138 | 139 | None. 140 | 141 | REST API impact 142 | --------------- 143 | 144 | None. 145 | 146 | End user impact 147 | --------------- 148 | 149 | REST API users 150 | ^^^^^^^^^^^^^^ 151 | No impact. 152 | 153 | Custom actions developers 154 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 155 | 156 | Can use the decorator to mark sensitive inputs. 157 | 158 | Workflow developers 159 | ^^^^^^^^^^^^^^^^^^^ 160 | 161 | Can use the 'secret' section to signify sensitive fields. 162 | 163 | Performance Impact 164 | ------------------ 165 | 166 | No significant impact is expected. Minor is possible. 167 | 168 | Deployer impact 169 | --------------- 170 | 171 | None. 172 | 173 | Implementation 174 | ============== 175 | 176 | Assignee(s) 177 | ----------- 178 | 179 | Brad P. Crochet , IRC: thrash 180 | 181 | Secondary 182 | ^^^^^^^^^ 183 | 184 | Dougal Matthews , IRC: d0ugal 185 | 186 | Work Items 187 | ---------- 188 | 189 | * Implement new section 'hidden' in workflow DSL 190 | * Implement new decorator for custom actions 191 | * Store 'hidden' data to DB in some type of encoded form, or perhaps just 192 | masked. 193 | 194 | Dependencies 195 | ============ 196 | 197 | No additional dependencies are required. 198 | 199 | Testing 200 | ======= 201 | 202 | The action decorator can be tested via unit tests. 203 | The workflow DSL additions will likely need to be tested on devstack via 204 | tempest. 205 | 206 | References 207 | ========== 208 | 209 | Launchpad bug: 210 | https://bugs.launchpad.net/mistral/+bug/1337268 211 | -------------------------------------------------------------------------------- /specs/pike/approved/workflow-error-analysis.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ======================= 8 | Workflow Error Analysis 9 | ======================= 10 | 11 | Include the URL of your launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-error-analysis 14 | 15 | This specification will outline the need for error analysis command or method 16 | within Mistral. 17 | 18 | 19 | Problem description 20 | =================== 21 | 22 | Currently there is not a central way or single command which can be issued to 23 | determine the root cause of an error that has occurred upon failure of a 24 | mistral workflow. The proposed functionality would give the developer and 25 | operator a method which can help debug errors which may stem from syntax errors 26 | within the workbook or reveal actual bugs, by reporting the necessary 27 | information from the execution to the client. 28 | 29 | 30 | Use Cases 31 | --------- 32 | 33 | The main uses for this feature would involve post workflow runs which involve 34 | but not limited to OpenStack post deployment and workflow run investigation. 35 | 36 | 37 | Proposed change 38 | =============== 39 | 40 | Provide a command line interface and public API which the operator can use to 41 | trigger the analysis of errors. 42 | 43 | The table below is a draft example and subject to change once reviews are 44 | complete. 45 | 46 | * 'mistral report-generate ' 47 | 48 | +-------------------------------------------------------------------+ 49 | |Field | Value | 50 | +======================+============================================+ 51 | |Workflow_name | my_workflow | 52 | +----------------------+--------------------------------------------+ 53 | |Workflow_ID | xxxxx-xxxx-xxx-xxxxxxx | 54 | +----------------------+--------------------------------------------+ 55 | |Workflow_State | [Error | Success ] | 56 | +----------------------+--------------------------------------------+ 57 | |\**Workflow_State_info| \*** | 58 | +----------------------+--------------------------------------------+ 59 | |Task_name | my_task | 60 | +----------------------+--------------------------------------------+ 61 | |Task_ID | xxxxx-xxxx-xxx-xxxxxxx | 62 | +----------------------+--------------------------------------------+ 63 | |Task_State | [Error | Success] | 64 | +----------------------+--------------------------------------------+ 65 | |Task_State_info | | 66 | +-------------------------------------------------------------------+ 67 | 68 | * 'mistral report-generate --include-trace ' 69 | 70 | +---------------------------------------------------------------------+ 71 | |Field | Value | 72 | +========================+============================================+ 73 | |Workflow_name | my_workflow | 74 | +------------------------+--------------------------------------------+ 75 | |Workflow_ID | xxxxx-xxxx-xxx-xxxxxxx | 76 | +------------------------+--------------------------------------------+ 77 | |Workflow_State | [Error | Success ] | 78 | +------------------------+--------------------------------------------+ 79 | |\**Workflow_State_info | \*** | 80 | +------------------------+--------------------------------------------+ 81 | |Task_name | my_task | 82 | +------------------------+--------------------------------------------+ 83 | |Task_ID | xxxxx-xxxx-xxx-xxxxxxx | 84 | +------------------------+--------------------------------------------+ 85 | |Task_State | [Error | Success] | 86 | +------------------------+--------------------------------------------+ 87 | |Task_State_info | | 88 | +------------------------+--------------------------------------------+ 89 | |\****Workflow_traceback | my_workflow ERROR | 90 | | | task_2 ERROR | 91 | | | workflow: my_other_workflow | 92 | | | task_b: Error | 93 | | | action: somethingbroken | 94 | +---------------------------------------------------------------------+ 95 | 96 | \** State info would report in the case where no error is generated. 97 | 98 | \*** Task name and cause, the cause would be evaluated from an enum value. 99 | 100 | \**** Workflow traceback would report a more verbose output of errors this 101 | output could be controlled with a cli switch --include-trace. Without the 102 | flag, the operator would just receive the enum value with a brief description. 103 | 104 | example: 105 | * E101 -- task contains syntax error 106 | * E120 -- task missing input 107 | * E201 -- action failed to complete 108 | 109 | 110 | 111 | 112 | Alternatives 113 | ------------ 114 | 115 | The current method of determining a error would involve looking through the 116 | workflow execution id list to determine what is in an error state. 117 | 118 | * 'mistral task-list ' and see what are in ERROR 119 | * for each failed task execution run: 120 | - 'mistral action-execution-list' and see what are in ERROR 121 | * for each failed action run: 122 | - 'mistral action-execution-get-output ' to see the description of the 123 | error 124 | * for each failed task execution of type Workflow, find the sub-workflow 125 | execution ID, and go back to the first bullet. 126 | 127 | Data model impact 128 | ----------------- 129 | 130 | None. 131 | 132 | REST API impact 133 | --------------- 134 | 135 | This is still in discussion. 136 | 137 | * A separate REST API endpoint to build reports on the current status of 138 | execution and/or error analysis 139 | 140 | End user impact 141 | --------------- 142 | 143 | The end user would have a newly documented method/function to call to start the 144 | error analysis. 145 | 146 | 147 | Performance Impact 148 | ------------------ 149 | 150 | If this is implemented on the server side the performance impact should be 151 | greatly reduced as the need for ReST calls would be drastically reduced. 152 | 153 | Deployer impact 154 | --------------- 155 | 156 | This would provide additional information to help the operator correct errors 157 | in the deployment, or it will provide enough information which can be attached 158 | to a bug report to help development correct the offending source. 159 | 160 | 161 | Implementation 162 | ============== 163 | 164 | Assignee(s) 165 | ----------- 166 | 167 | Primary assignee: 168 | toure 169 | 170 | Other contributors: 171 | rakhmerov 172 | 173 | Work Items 174 | ---------- 175 | 176 | * Create new Mistral engine error analysis functionality. 177 | * Update python-mistralclient to include new API changes. 178 | * Update documentation to explain usage. 179 | * Create CI scripts/jobs to mimic error in workflows. 180 | 181 | 182 | Dependencies 183 | ============ 184 | 185 | None. 186 | 187 | Testing 188 | ======= 189 | 190 | Functional tests that imitate workflow failures and make sure that we 191 | get the right report. 192 | 193 | 194 | References 195 | ========== 196 | 197 | None. 198 | -------------------------------------------------------------------------------- /specs/pike/implemented/mistral-multi-region-support.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | =================================== 8 | Support region in OpenStack actions 9 | =================================== 10 | 11 | Launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/mistral-multi-region-support 14 | 15 | This blueprint intends to support specifying region name for OpenStack 16 | actions, so that authorized user could run OpenStack actions within different 17 | OpenStack regions in one workflow, which is extremely userful for operators 18 | when they want to execute maintainance tasks across different regions within 19 | one workflow. 20 | 21 | 22 | Problem description 23 | =================== 24 | 25 | Currently, Mistral end user could specify region name or target region name 26 | when running OpenStack workflow, which means all the tasks inside the workflow 27 | have to be running within that specific region. It's impossible to run 28 | different tasks in different regions. 29 | 30 | Use Cases 31 | --------- 32 | 33 | * User with credential in region-1 can specify region-2 for the whole 34 | workflow (Keystone is shared between different regions). 35 | 36 | * User with credential in region-1 can specify different region in different 37 | tasks. 38 | 39 | 40 | Proposed change 41 | =============== 42 | 43 | The basic idea is to simplely support 'action_region' as an extra input param 44 | for actions of configured OpenStack projects.. 45 | 46 | 1. Add a config option called ``modules_support_region``, the value will be 47 | list of module names for which the 'action_region' input param will be add 48 | to each of its actions. The reason for adding this option is, there may be 49 | some OpenStack services (e.g. Keystone) already support region(or similar 50 | concept) in action input params, we should not add a param with same 51 | meaning, e.g. if Keystone is shared between regions in OpenStack 52 | deployment, 'keystone' should be excluded from the list. 53 | 54 | 2. Insert 'action_region' as an optional input param when registering OpenStack 55 | actions. 56 | 57 | Alternatives 58 | ------------ 59 | 60 | As mentioned in the problem description section, it's impossible to do multi- 61 | region workflows without this feature. 62 | 63 | Data model impact 64 | ----------------- 65 | 66 | This change doesn't have to influence the data model. 67 | 68 | REST API impact 69 | --------------- 70 | 71 | None. 72 | 73 | End user impact 74 | --------------- 75 | 76 | With this change, users could specify region name as action input. 77 | 78 | Performance Impact 79 | ------------------ 80 | 81 | None. 82 | 83 | Deployer impact 84 | --------------- 85 | 86 | When upgrade, deployer should run ``tools/sync_db.py`` script to delete all 87 | OpenStack actions and re-create again. 'action_region' will be added as input 88 | param automatically. 89 | 90 | 91 | Implementation 92 | ============== 93 | 94 | Assignee(s) 95 | ----------- 96 | 97 | Primary assignee: 98 | Lingxian Kong 99 | 100 | Work Items 101 | ---------- 102 | 103 | * Add 'action_region' as action input param for modules specified in 104 | ``modules_support_region`` options. Use 'action_region' to construct 105 | OpenStack service client. 106 | 107 | 108 | Dependencies 109 | ============ 110 | 111 | None. 112 | 113 | 114 | Testing 115 | ======= 116 | 117 | * write tests to make sure 'action_region' is supported for configured 118 | OpenStack modules. 119 | 120 | * write tests to make sure 'action_region' is properly used to construct 121 | OpenStack service client. 122 | 123 | 124 | References 125 | ========== 126 | 127 | None. 128 | -------------------------------------------------------------------------------- /specs/policy-template.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This template should be in ReSTructured text. For help with syntax, 3 | see http://sphinx-doc.org/rest.html 4 | 5 | To test out your formatting, build the docs using tox, or see: 6 | http://rst.ninjs.org 7 | 8 | The filename in the git repository should match the launchpad URL, 9 | for example a URL of 10 | https://blueprints.launchpad.net/oslo?searchtext=awesome-thing should be 11 | named awesome-thing.rst. 12 | 13 | For specs targeted at a single project, please prefix the first line 14 | of your commit message with the name of the project. For example, 15 | if you're submitting a new feature for oslo.config, your git commit 16 | message should start something like: "config: My new feature". 17 | 18 | Wrap text at 79 columns. 19 | 20 | Do not delete any of the sections in this template. If you have 21 | nothing to say for a whole section, just write: None 22 | 23 | If you would like to provide a diagram with your spec, ascii diagrams are 24 | required. http://asciiflow.com/ is a very nice tool to assist with making 25 | ascii diagrams. The reason for this is that the tool used to review specs is 26 | based purely on plain text. Plain text will allow review to proceed without 27 | having to look at additional files which can not be viewed in gerrit. It 28 | will also allow inline feedback on the diagram itself. 29 | 30 | ========================= 31 | The title of the policy 32 | ========================= 33 | 34 | Introduction paragraph -- why are we doing anything? 35 | 36 | Problem Description 37 | =================== 38 | 39 | A detailed description of the problem. 40 | 41 | Policy 42 | ====== 43 | 44 | Here is where you cover the change you propose to make in detail. How do you 45 | propose to solve this problem? 46 | 47 | If the policy seeks to modify a process or workflow followed by the 48 | team, explain how and why. 49 | 50 | If this is one part of a larger effort make it clear where this piece ends. In 51 | other words, what's the scope of this policy? 52 | 53 | Alternatives & History 54 | ====================== 55 | 56 | What other ways could we do this thing? Why aren't we using those? This doesn't 57 | have to be a full literature review, but it should demonstrate that thought has 58 | been put into why the proposed solution is an appropriate one. 59 | 60 | If the policy changes over time, summarize the changes here. The exact 61 | details are always available by looking at the git history, but 62 | summarizing them will make it easier for anyone to follow the desired 63 | policy and understand when and why it might have changed. 64 | 65 | Implementation 66 | ============== 67 | 68 | Author(s) 69 | --------- 70 | 71 | Who is leading the writing of the policy? If more than one person is 72 | working on it, please designate the primary author and contact. 73 | 74 | Primary author: 75 | 76 | 77 | Other contributors: 78 | 79 | 80 | Milestones 81 | ---------- 82 | 83 | When will the policy go into effect? 84 | 85 | If there is a built-in deprecation period for the policy, or criteria 86 | that would trigger it no longer being in effect, describe them. 87 | 88 | Work Items 89 | ---------- 90 | 91 | List any concrete steps we need to take to implement the policy. 92 | 93 | References 94 | ========== 95 | 96 | Please add any useful references here. You are not required to have 97 | any references. Moreover, this policy should still make sense when 98 | your references are unavailable. Examples of what you could include 99 | are: 100 | 101 | * Links to mailing list or IRC discussions 102 | 103 | * Links to notes from a summit session 104 | 105 | * Links to relevant research, if appropriate 106 | 107 | * Related policies as appropriate 108 | 109 | * Anything else you feel it is worthwhile to refer to 110 | 111 | Revision History 112 | ================ 113 | 114 | .. list-table:: Revisions 115 | :header-rows: 1 116 | 117 | * - Release Name 118 | - Description 119 | * - 120 | - Introduced 121 | 122 | .. note:: 123 | 124 | This work is licensed under a Creative Commons Attribution 3.0 125 | Unported License. 126 | http://creativecommons.org/licenses/by/3.0/legalcode 127 | -------------------------------------------------------------------------------- /specs/policy/patch-abandonment.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Patch Abandonment 3 | ================= 4 | 5 | Goal 6 | ==== 7 | 8 | Provide a basic policy that core reviewers can apply to outstanding reviews. As 9 | always, it is up to the core reviewers discretion on whether a patch should or 10 | should not be abandoned. This policy is just a baseline with some basic rules. 11 | 12 | Problem Description 13 | =================== 14 | 15 | Mistral consists of a number of different git repositories and there are open 16 | and stale patches that have not been updated in a long time. This can make it 17 | hard to assess the current state of reviews, since any report is cluttered by 18 | old and idle reviews. 19 | 20 | When to Abandon 21 | =============== 22 | 23 | If a proposed patch has sat idle for more than 180 days with a -1 from a 24 | reviewer or CI. A core reviewer should abandon the change with a reference to 25 | this policy. 26 | 27 | The following message can be used when abandoning patches. 28 | 29 | :: 30 | 31 | Abandoning this patch per the Mistral Patch Abandonment guidelines 32 | (https://specs.openstack.org/openstack/mistral-specs/specs/policy/patch-abandonment.html). 33 | If you wish to have this restored and cannot do so yourself, please reach 34 | out via #openstack-mistral on OFTC or the OpenStack Dev mailing list. 35 | 36 | When NOT to Abandon 37 | =================== 38 | 39 | If a proposed patch has no feedback but has a +1 from CI, a core reviewer 40 | should not abandon such changes. This change should be reviewed and moved 41 | forward towards being updated or merged. 42 | 43 | 44 | Restoration 45 | =========== 46 | 47 | Anyone should feel free to restore their own patches. If a change has been 48 | abandoned, anyone can request the restoration of the patch by asking a core 49 | reviewer on IRC in #openstack-mistral on OFTC or by sending a request to 50 | the openstack-dev mailing list. Should the patch again become stale it may be 51 | abandoned again. 52 | 53 | Alternative & History 54 | ===================== 55 | 56 | This plan is based on similar approaches taken in other OpenStack projects, 57 | such as TripleO. This plan was discussed on openstack-dev [1]_. 58 | 59 | Implementation 60 | ============== 61 | 62 | Author(s) 63 | --------- 64 | 65 | Primary author: 66 | d0ugal 67 | 68 | References 69 | ========== 70 | 71 | .. [1] http://lists.openstack.org/pipermail/openstack-dev/2018-July/132073.html 72 | 73 | Milestones 74 | ---------- 75 | 76 | Rocky 77 | 78 | Work Items 79 | ---------- 80 | 81 | - Perform the initial cleanup and abandonment. 82 | - Create a script to automate the process. 83 | 84 | .. note:: 85 | 86 | This work is licensed under a Creative Commons Attribution 3.0 87 | Unported License. 88 | http://creativecommons.org/licenses/by/3.0/legalcode 89 | -------------------------------------------------------------------------------- /specs/rocky/approved/custom-context-for-executions.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ========================================== 8 | Custom Context for Executions 9 | ========================================== 10 | 11 | https://blueprints.launchpad.net/mistral/+spec/mistral-custom-context-for-executions 12 | 13 | Currently it is not possible to send contextual parameters to actions. The 14 | context contains only the OS auth parmeters in the SecurityContext and some 15 | details about the execution in the ExecutionContext. There is general demand 16 | to be able to pass parameters at this level to make these pervasively present 17 | throughout the execution without contaminating the workflow definition. 18 | 19 | 20 | Problem description 21 | =================== 22 | 23 | Use Cases 24 | --------- 25 | 26 | 1. Pass context parameters to custom actions 27 | 28 | Custom actions may need various contextual parameters, e.g. request/correlation 29 | IDs, authentication tokens, miscellaneous data. These pieces of data should not 30 | be passed as parameters to avoid getting logged and to be able to access them 31 | in any part of the execution. 32 | 33 | 2. Provide auth parameters for OpenStack actions 34 | 35 | Currently the OpenStack actions use the SecurityContext to get the 36 | authentication data. The X-Target-* headers provide an alternative way to 37 | override these parameters but the size of these headers is limited and already 38 | makes them unable to handle larger OpenStack installations. [1,2] 39 | 40 | 3. Clearly separate the Mistral API authentication parameters from the 41 | OpenStack Action authentication parameters 42 | 43 | The Mistral API has different plugins for authentication: Keystone and Keycloak 44 | at the time of writing this specification. OpenStack actions should not rely 45 | on the parameters used for the Mistral API authentication as this tight 46 | coupling introduces unwanted restrictions. E.g. it is fairly awkward to 47 | use Keycloak authentication and run OpenStack actions in the same execution. 48 | 49 | This separation also helps the efforts to separate OpenStack actions from 50 | the Mistral Core. 51 | 52 | 53 | Proposed change 54 | =============== 55 | 56 | Add custom_context dict to the execution type API resources (Execution, 57 | ActionExecution) and use the data in it to update the context. 58 | 59 | The 'os.auth' key is used to store the X-Target-* headers data in its 60 | current format. To decrease the size of the final context, this key 61 | is removed from the custom_context after processing. 62 | 63 | Other keys can be used freely by custom action implmementations. These 64 | are stored under the 'custom_context' key. 65 | 66 | 67 | Alternatives 68 | ------------ 69 | 70 | 1. Keep using the X-Target-* headers: there are already bugs filed 71 | 72 | 2. Send this info hidden in the request body 73 | E.g. we could add custom_context to the POST request body and we could 74 | add it to the context in the ContextHook. However, the request body is 75 | present only in text form in the ContextHook and the parameter cannot 76 | be left in the request body as it causes the WSME implementation to 77 | fail unmarshalling the input objects. 78 | 79 | Data model impact 80 | ----------------- 81 | 82 | Not affected. 83 | 84 | REST API impact 85 | --------------- 86 | 87 | Add the custom_context dict field to the Execution and ActionExecution 88 | API resources. 89 | 90 | Does not involve REST API modification. 91 | 92 | End user impact 93 | --------------- 94 | 95 | mistral exection-create 96 | ~~~~~~~~~~~~~~~~~~~~~~~ 97 | 98 | Add the --custom_context parameter. 99 | 100 | mistral execution-create --custom_context='{"correlation_id":"12da"}' ... 101 | 102 | mistral run-action 103 | ~~~~~~~~~~~~~~~~~~ 104 | 105 | Add the --custom_context parameter. 106 | 107 | mistral run-action --custom_context='{"correlation_id":"12da"}' ... 108 | 109 | 110 | Performance Impact 111 | ------------------ 112 | 113 | Large custom_contexts can slow down the execution of workflows as it is 114 | passed in the RPC messages and saved in the delayed_calls table. For the 115 | same reason, this can affect the memory usage of components. 116 | 117 | 118 | Backward compatibility 119 | ---------------------- 120 | 121 | The clients sending the custom_context parameter will not be able to 122 | use the execution-create and run-action APIs with previous Mistral API 123 | instances because the resource unmarshalling step will fail. 124 | 125 | 126 | Deployer impact 127 | --------------- 128 | 129 | None. 130 | 131 | 132 | Implementation 133 | ============== 134 | 135 | Assignee(s) 136 | ----------- 137 | 138 | Primary assignee: 139 | akovi 140 | 141 | Other contributors: 142 | 143 | Work Items 144 | ---------- 145 | 146 | Add the --custom_context parameter to the mistral run-action command 147 | 148 | Add the --custom_context parameter to the mistral create-exection command 149 | 150 | Add the custom_context field to the Execution API resource 151 | 152 | Add the custom_context field to the ActionExecution API resource 153 | 154 | Add the support for the special 'os.auth' key (client and API) 155 | 156 | Add the custom_context to mistral-lib 157 | 158 | 159 | Dependencies 160 | ============ 161 | 162 | None. 163 | 164 | 165 | Testing 166 | ======= 167 | 168 | API test to ensure the correct parameter handling in Execution and 169 | ActionExecution 170 | 171 | Check that the custom parameters are loaded in context.custom_context 172 | 173 | 174 | References 175 | ========== 176 | 177 | 178 | [1] https://bugs.launchpad.net/python-mistralclient/+bug/1702324 179 | [2] https://bugs.launchpad.net/mistral/+bug/1699248 180 | -------------------------------------------------------------------------------- /specs/template.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This work is licensed under a Creative Commons Attribution 3.0 Unported 3 | License. 4 | 5 | http://creativecommons.org/licenses/by/3.0/legalcode 6 | 7 | ========================================== 8 | Example Spec - The title of your blueprint 9 | ========================================== 10 | 11 | Include the URL of your launchpad blueprint: 12 | 13 | https://blueprints.launchpad.net/mistral/+spec/example 14 | 15 | Introduction paragraph -- why are we doing anything? A single paragraph of 16 | prose that operators can understand. The title and this first paragraph 17 | should be used as the subject line and body of the commit message respectively. 18 | 19 | Some notes about the usage of mistral-spec: 20 | 21 | * Not all blueprints need a spec. If a new feature is straightforward enough 22 | that it doesn't need any design discussion, then no spec is required. It 23 | should be decided on IRC meeting within the whole core team. 24 | 25 | * The aim of this document is first to define the problem we need to solve, 26 | and second agree the overall approach to solve that problem. 27 | 28 | * This is not intended to be extensive documentation for a new feature. 29 | For example, there is no need to specify the exact configuration changes, 30 | 31 | nor the exact details of any DB model changes. But you should still define 32 | that such changes are required, and be clear on how that will affect 33 | upgrades. 34 | 35 | * You should aim to get your spec approved before writing your code. 36 | While you are free to write prototypes and code before getting your spec 37 | approved, its possible that the outcome of the spec review process leads 38 | you towards a fundamentally different solution than you first envisaged. 39 | 40 | * But, API changes are held to a much higher level of scrutiny. 41 | As soon as an API change merges, we must assume it could be in production 42 | somewhere, and as such, we then need to support that API change forever. 43 | To avoid getting that wrong, we do want lots of details about API changes 44 | upfront. 45 | 46 | Some notes about using this template: 47 | 48 | * Your spec should be in ReSTructured text, like this template. 49 | 50 | * Please wrap text at 79 columns. 51 | 52 | * The filename in the git repository should match the launchpad URL, for 53 | example a URL of: https://blueprints.launchpad.net/mistral/+spec/awesome-thing 54 | should be named awesome-thing.rst. 55 | 56 | * Please do not delete any of the sections in this template. If you have 57 | nothing to say for a whole section, just write: None. 58 | 59 | * For help with syntax, see http://sphinx-doc.org/rest.html 60 | 61 | 62 | Problem description 63 | =================== 64 | 65 | A detailed description of the problem. What problem is this blueprint 66 | addressing? 67 | 68 | Use Cases 69 | --------- 70 | 71 | What use cases does this address? What impact on actors does this change have? 72 | Ensure you are clear about the actors in each use case: Developer, End User, 73 | Deployer etc. 74 | 75 | 76 | Proposed change 77 | =============== 78 | 79 | Here is where you cover the change you propose to make in detail. How do you 80 | propose to solve this problem? 81 | 82 | If this is one part of a larger effort make it clear where this piece ends. In 83 | other words, what's the scope of this effort? 84 | 85 | Alternatives 86 | ------------ 87 | 88 | What other ways could we do this thing? Why aren't we using those? This doesn't 89 | have to be a full literature review, but it should demonstrate that thought has 90 | been put into why the proposed solution is an appropriate one. 91 | 92 | Data model impact 93 | ----------------- 94 | 95 | Changes which require modifications to the data model often have a wider impact 96 | on the system. The community often has strong opinions on how the data model 97 | should be evolved, from both a functional and performance perspective. It is 98 | therefore important to capture and gain agreement as early as possible on any 99 | proposed changes to the data model. 100 | 101 | Questions which need to be addressed by this section include: 102 | 103 | * What new database schema changes is this going to require? 104 | 105 | * What database migrations will accompany this change. 106 | 107 | * How will the initial set of new data objects be generated, for example if you 108 | need to take into account existing workflow/execution, or modify other 109 | existing data, please describe how that will work. 110 | 111 | REST API impact 112 | --------------- 113 | 114 | Each API method which is either added or changed should have the following: 115 | 116 | * Specification for the method. 117 | 118 | * A description of the added or changed method. 119 | 120 | * Method type (POST/PUT/GET/DELETE). 121 | 122 | * Normal http response code(s). 123 | 124 | * Expected error http response code(s). 125 | 126 | * URL for the resource. 127 | 128 | * Parameters which can be passed via the url. 129 | 130 | * Example use case including typical API samples for both data supplied 131 | by the caller and the response. 132 | 133 | End user impact 134 | --------------- 135 | 136 | Aside from the API, are there other ways a user will interact with this 137 | feature? 138 | 139 | * Does this change have an impact on python-mistralclient? What does the user 140 | interface there look like? 141 | 142 | Performance Impact 143 | ------------------ 144 | 145 | Describe any potential performance impact on the system, for example 146 | how often will new code be called, and is there a major change to the calling 147 | pattern of existing code. 148 | 149 | Examples of things to consider here include: 150 | 151 | * A small change in a utility function or a commonly used decorator can have a 152 | large impacts on performance. 153 | 154 | * Calls which result in a database queries can have a profound impact on 155 | performance when called in critical sections of the code. 156 | 157 | * Will the change include any locking, and if so what considerations are there 158 | on holding the lock? 159 | 160 | Deployer impact 161 | --------------- 162 | 163 | Discuss things that will affect how you deploy and configure OpenStack 164 | that have not already been mentioned, such as: 165 | 166 | * What config options are being added? Are the default values ones which will 167 | work well in real deployments? 168 | 169 | * Is this a change that takes immediate effect after its merged, or is it 170 | something that has to be explicitly enabled? 171 | 172 | * If this change is a new binary, how would it be deployed? 173 | 174 | * Please state anything that those doing continuous deployment, or those 175 | upgrading from the previous release, need to be aware of. Also describe 176 | any plans to deprecate configuration values or features. 177 | 178 | 179 | Implementation 180 | ============== 181 | 182 | Assignee(s) 183 | ----------- 184 | 185 | Who is leading the writing of the code? Or is this a blueprint where you're 186 | throwing it out there to see who picks it up? 187 | 188 | If more than one person is working on the implementation, please designate the 189 | primary author and contact. 190 | 191 | Primary assignee: 192 | 193 | 194 | Other contributors: 195 | 196 | 197 | Work Items 198 | ---------- 199 | 200 | Work items or tasks -- break the feature up into the things that need to be 201 | done to implement it. Those parts might end up being done by different people, 202 | but we're mostly trying to understand the timeline for implementation. 203 | 204 | 205 | Dependencies 206 | ============ 207 | 208 | * Include specific references to specs and/or blueprints in mistral, or in 209 | other projects, that this one either depends on or is related to. 210 | 211 | * Does this feature require any new library dependencies or code otherwise not 212 | included in Mistral? Or does it depend on a specific version of library? 213 | 214 | 215 | Testing 216 | ======= 217 | 218 | Please discuss the important scenarios needed to test here, as well as 219 | specific edge cases we should be ensuring work correctly. 220 | 221 | Please discuss how the change will be tested, e.g. how Mistral is deployed? 222 | Does this change need some specific config options? Does this change need 223 | some 3rd party services pre-installed? 224 | 225 | 226 | References 227 | ========== 228 | 229 | Please add any useful references here. You are not required to have any 230 | reference. Moreover, this specification should still make sense when your 231 | references are unavailable. Examples of what you could include are: 232 | 233 | * Links to mailing list or IRC discussions 234 | 235 | * Links to notes from a summit session 236 | 237 | * Links to relevant research, if appropriate 238 | 239 | * Anything else you feel it is worthwhile to refer to 240 | -------------------------------------------------------------------------------- /specs/train/.placeholder: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/mistral-specs/1acd95cf254ef26e18e765bde2ee1923b857fe24/specs/train/.placeholder -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | hacking>=1.1.0,<1.2.0 # Apache-2.0 2 | stestr>=2.0.0 # Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/mistral-specs/1acd95cf254ef26e18e765bde2ee1923b857fe24/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_directories.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import glob 14 | import os 15 | 16 | import testtools 17 | 18 | 19 | class TestDirectories(testtools.TestCase): 20 | def test_directories(self): 21 | releases = [x.split('/')[1] for x in glob.glob('specs/*/')] 22 | 23 | for release in releases: 24 | 25 | if release == 'policy': 26 | # Policy specs are never "implemented" so they don't need to be 27 | # nested in the same way. 28 | continue 29 | 30 | files = os.listdir("specs/%s/" % release) 31 | valid_names = ['approved', 'implemented'] 32 | 33 | for name in files: 34 | if name.startswith('.'): 35 | continue 36 | 37 | self.assertIn( 38 | name, 39 | valid_names, 40 | "Found unexpected file in 'specs/%s', specs should be " 41 | "submitted to 'specs/%s/approved'" % (release, release) 42 | ) 43 | -------------------------------------------------------------------------------- /tests/test_titles.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import glob 14 | import re 15 | 16 | import docutils.core 17 | import testtools 18 | 19 | 20 | class TestTitles(testtools.TestCase): 21 | def _get_title(self, section_tree): 22 | section = { 23 | 'subtitles': [], 24 | } 25 | 26 | for node in section_tree: 27 | if node.tagname == 'title': 28 | section['name'] = node.rawsource 29 | elif node.tagname == 'section': 30 | subsection = self._get_title(node) 31 | section['subtitles'].append(subsection['name']) 32 | 33 | return section 34 | 35 | def _get_titles(self, spec): 36 | titles = {} 37 | 38 | for node in spec: 39 | if node.tagname == 'section': 40 | section = self._get_title(node) 41 | titles[section['name']] = section['subtitles'] 42 | 43 | return titles 44 | 45 | def _check_titles(self, filename, expect, actual): 46 | missing_sections = [x for x in expect.keys() if x not in actual.keys()] 47 | extra_sections = [x for x in actual.keys() if x not in expect.keys()] 48 | 49 | msgs = [] 50 | if len(missing_sections) > 0: 51 | msgs.append("Missing sections: %s" % missing_sections) 52 | if len(extra_sections) > 0: 53 | msgs.append("Extra sections: %s" % extra_sections) 54 | 55 | for section in expect.keys(): 56 | missing_subsections = [x for x in expect[section] 57 | if x not in actual.get(section, {})] 58 | # extra subsections are allowed 59 | if len(missing_subsections) > 0: 60 | msgs.append("Section '%s' is missing subsections: %s" 61 | % (section, missing_subsections)) 62 | 63 | if len(msgs) > 0: 64 | self.fail( 65 | "While checking '%s':\n %s" % (filename, "\n ".join(msgs)) 66 | ) 67 | 68 | def _check_lines_wrapping(self, filename, raw): 69 | code_block = False 70 | 71 | for i, line in enumerate(raw.split("\n")): 72 | # Allow code block lines to be longer than 79. 73 | if code_block: 74 | if not line or line.startswith(" "): 75 | continue 76 | else: 77 | code_block = False 78 | if "::" in line: 79 | code_block = True 80 | if "http://" in line or "https://" in line: 81 | continue 82 | # Allow lines which do not contain any whitespace 83 | if re.match("\s*[^\s]+$", line): 84 | continue 85 | 86 | self.assertTrue( 87 | len(line) < 80, 88 | msg="%s:%d: Line limited to a maximum of 79 characters." % 89 | (filename, i + 1) 90 | ) 91 | 92 | def _check_no_cr(self, filename, raw): 93 | matches = re.findall('\r', raw) 94 | 95 | self.assertEqual( 96 | 0, 97 | len(matches), 98 | "Found %s literal carriage returns in file %s" % 99 | (len(matches), filename) 100 | ) 101 | 102 | def _check_trailing_spaces(self, filename, raw): 103 | for i, line in enumerate(raw.split("\n")): 104 | trailing_spaces = re.findall(" +$", line) 105 | 106 | self.assertEqual( 107 | 0, 108 | len(trailing_spaces), 109 | "Found trailing spaces on line %s of %s" % (i + 1, filename) 110 | ) 111 | 112 | def test_template(self): 113 | releases = [x.split('/')[1] for x in glob.glob('specs/*')] 114 | 115 | self.assertTrue(len(releases), "Not able to find spec directories") 116 | 117 | with open("specs/template.rst") as f: 118 | template = f.read() 119 | spec = docutils.core.publish_doctree(template) 120 | template_titles = self._get_titles(spec) 121 | 122 | for release in releases: 123 | files = glob.glob("specs/%s/*/*" % release) 124 | 125 | for filename in files: 126 | self.assertTrue( 127 | filename.endswith(".rst"), 128 | "spec %s must use 'rst' extension." % filename 129 | ) 130 | 131 | with open(filename) as f: 132 | data = f.read() 133 | 134 | spec = docutils.core.publish_doctree(data) 135 | titles = self._get_titles(spec) 136 | 137 | self._check_titles(filename, template_titles, titles) 138 | self._check_lines_wrapping(filename, data) 139 | self._check_no_cr(filename, data) 140 | self._check_trailing_spaces(filename, data) 141 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 3.18.0 3 | envlist = pep8,docs 4 | skipsdist = True 5 | ignore_basepython_conflict = True 6 | 7 | [testenv] 8 | basepython = python3 9 | usedevelop = True 10 | setenv = 11 | VIRTUAL_ENV={envdir} 12 | deps = 13 | -r{toxinidir}/requirements.txt 14 | -r{toxinidir}/test-requirements.txt 15 | allowlist_externals = find 16 | commands = 17 | find . -type f -name "*.pyc" -delete 18 | stestr run --slowest {posargs} 19 | 20 | [testenv:venv] 21 | commands = {posargs} 22 | 23 | [testenv:docs] 24 | commands = 25 | find . -type f -name "*.pyc" -delete 26 | sphinx-build -W -b html doc/source doc/build/html 27 | --------------------------------------------------------------------------------