├── .github
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── .travis.yml
├── CHANGELOG.rst
├── CODE_OF_CONDUCT.rst
├── CONFIG_FORMAT.rst
├── CONTRIBUTING.rst
├── LICENSE
├── MANIFEST.in
├── NOTICE
├── README.rst
├── RESOURCES.rst
├── VERSIONING.rst
├── appveyor.yml
├── doc
├── code_of_conduct.rst
├── conf.py
├── config_format.rst
├── contributing.rst
├── index.rst
├── lib
│ ├── api
│ │ └── index.rst
│ └── internal.rst
├── requirements.txt
├── resources.rst
├── spelling_wordlist.txt
└── versioning.rst
├── examples
├── __init__.py
├── src
│ ├── __init__.py
│ └── pylintrc
└── test
│ ├── __init__.py
│ ├── pylintrc
│ └── test_make_tests.py
├── park.cfg
├── requirements.txt
├── setup.cfg
├── setup.py
├── src
├── pipeformer
│ ├── __init__.py
│ ├── deploy.py
│ ├── identifiers.py
│ ├── input_handling.py
│ └── internal
│ │ ├── __init__.py
│ │ ├── arg_parsing.py
│ │ ├── logging_utils.py
│ │ ├── resolve.py
│ │ ├── structures.py
│ │ ├── template_builder.py
│ │ ├── templates
│ │ ├── __init__.py
│ │ ├── codebuild.py
│ │ ├── codepipeline.py
│ │ ├── core.py
│ │ ├── iam.py
│ │ └── inputs.py
│ │ └── util.py
└── pylintrc
├── test
├── __init__.py
├── functional
│ ├── __init__.py
│ ├── functional_test_utils.py
│ └── internal
│ │ ├── __init__.py
│ │ ├── templates
│ │ ├── __init__.py
│ │ ├── test_codebuild.py
│ │ ├── test_codepipeline.py
│ │ ├── test_core.py
│ │ ├── test_iam.py
│ │ └── test_inputs.py
│ │ ├── test_structures.py
│ │ └── test_template_builder.py
├── pylintrc
├── requirements.txt
├── source-build-check.sh
├── unit
│ ├── __init__.py
│ └── internal
│ │ ├── __init__.py
│ │ ├── test_resolve.py
│ │ └── test_util.py
└── vectors
│ ├── README.rst
│ └── chalice
│ ├── codebuild-build.json
│ ├── codepipeline.json
│ ├── config.yaml
│ ├── config_inputs.json
│ ├── core.json
│ ├── iam.json
│ └── inputs.json
└── tox.ini
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | *Issue #, if available:*
2 |
3 | *Description of changes:*
4 |
5 |
6 | By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Vim swap files
2 | *~
3 | *.swp
4 | *.swo
5 |
6 | # OS Artifacts
7 | .DS_Store
8 |
9 | # Build Artifacts
10 | build
11 | dist
12 | docs/build
13 | generated/
14 |
15 | # Bytecode Artifacts
16 | *.pyc
17 | *.pyo
18 | .cache*
19 | __pycache__
20 | *.egg-info
21 |
22 | # Coverage.py
23 | .coverage*
24 |
25 | # MyPy
26 | .mypy_cache
27 |
28 | # PyEnv
29 | .python-version
30 |
31 | # PyTest
32 | .pytest_cache
33 |
34 | # PyCharm
35 | .idea/
36 | venv/
37 |
38 | # Tox
39 | .tox
40 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: true
2 | dist: xenial
3 |
4 | language: python
5 |
6 | cache:
7 | directories:
8 | - $HOME/.cache/pip
9 |
10 | matrix:
11 | include:
12 | # CPython 3.6
13 | - python: 3.6
14 | env: TOXENV=py36-local
15 | stage: Client Tests
16 | # we don't have any yet
17 | #- python: 3.6
18 | # env: TOXENV=py36-integ
19 | # stage: Client Tests
20 | # we don't have any yet
21 | #- python: 3.6
22 | # env: TOXENV=py36-examples
23 | # stage: Client Tests
24 | # CPython 3.7
25 | - python: 3.7
26 | env: TOXENV=py37-local
27 | stage: Client Tests
28 | # we don't have any yet
29 | #- python: 3.7
30 | # env: TOXENV=py37-integ
31 | # stage: Client Tests
32 | # we don't have any yet
33 | #- python: 3.7
34 | # env: TOXENV=py37-examples
35 | # stage: Client Tests
36 | # Security
37 | - python: 3.7
38 | env: TOXENV=bandit
39 | stage: Static Checks
40 | # Linting
41 | # disable for now
42 | #- python: 3.7
43 | # env: TOXENV=lint
44 | # stage: Static Checks
45 | # MyPy
46 | # disable for now
47 | #- python: 3.7
48 | # env: TOXENV=mypy-py3
49 | # stage: Static Checks
50 |
51 | install: pip install tox
52 | script: tox -- -vv
53 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | *********
2 | Changelog
3 | *********
4 |
5 | 0.0.1 -- 2019-02-xx
6 | ===================
7 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.rst:
--------------------------------------------------------------------------------
1 | ***************
2 | Code of Conduct
3 | ***************
4 |
5 | This project has adopted the `Amazon Open Source Code of Conduct`_.
6 | For more information see the `Code of Conduct FAQ`_ or contact
7 | opensource-codeofconduct@amazon.com with any additional questions or comments.
8 |
9 | .. _Amazon Open Source Code of Conduct: https://aws.github.io/code-of-conduct
10 | .. _Code of Conduct FAQ: https://aws.github.io/code-of-conduct-faq
11 |
--------------------------------------------------------------------------------
/CONFIG_FORMAT.rst:
--------------------------------------------------------------------------------
1 | ==================
2 | Config File Format
3 | ==================
4 |
5 | Pipeformer is configured using a YAML file with the below contents.
6 |
7 | Structure
8 | ---------
9 |
10 | * **name** : string (required) : Project name. Used in names and descriptions to identify resources as belonging to this project.
11 | * **description** : string (required) : Project description. *Not currently used.*
12 | * **inputs** : map : Inputs that will be required at stack creation or update.
13 |
14 | * **** : map : Describes an input value.
15 |
16 | * **description** : string (required) : Used for the input prompt and any resource descriptions as appropriate.
17 | * **secret** : boolean (required) : Determines if this input is treated as secret.
18 |
19 | * **roles** : map : Additional permissions to apply to generated IAM Roles. *Not currently used.*
20 |
21 | * **pipeline** : map : Definition of desired pipeline.
22 | Each member defines a stage in the pipeline and is a list of action definitions.
23 |
24 | * : list : Description of pipeline stage actions.
25 |
26 | * **provider** : string (required) : `CodePipeline action provider name`_.
27 | * **run_order** : int : The order in which CodePipeline runs this action (default: 1).
28 | * **inputs** : list of strings : List of CodePipeline input artifact names.
29 | * **outputs** : list of strings : List of CodePipeline output artifact names.
30 | * **configuration** : map : Additional configuration values to provide to in CodePipeline Action definition.
31 | * **image** : string (required for CodeBuild actions): Docker image to use for CodeBuild action.
32 | * **environment_type** : string : CodeBuild `environment-type`_ value.
33 | *If not provided, we assume Linux unless the image name contains "windows" in any casing.*
34 | * **buildspec** : string : Path to buildspec file in source.
35 | * **compute_type** : string : CodeBuild `compute-type`_ name. (default: BUILD_GENERAL1_SMALL)
36 | * **env** : string-string map : Custom environment variable values to set in action.
37 |
38 | Input Value Resolution
39 | ----------------------
40 |
41 | Input values can be referenced using strings of the form: ``"{INPUT:VariableName}"``
42 |
43 | These values are referenced from their storage locations using `CloudFormation dynamic references`_.
44 |
45 | Example
46 | -------
47 |
48 | .. code-block:: yaml
49 |
50 | name: example project
51 | description: This is an example project.
52 |
53 | inputs:
54 | GitHubToken:
55 | description: GitHub user access token that CodePipeline will use to authenticate to GitHub.
56 | secret: true
57 | GitHubOwner:
58 | description: GitHub user that owns target repository.
59 | secret: false
60 |
61 | pipeline:
62 | source:
63 | - provider: GitHub
64 | outputs:
65 | - SourceOutput
66 | configuration:
67 | Owner: "{INPUT:GitHubOwner}"
68 | Repo: example
69 | Branch: master
70 | OAuthToken: "{INPUT:GitHubToken}"
71 |
72 | build:
73 | - provider: CodeBuild
74 | image: aws/codebuild/python:3.6.5
75 | buildspec: .chalice/buildspec.yaml
76 | env:
77 | key1: value2
78 | key3: value4
79 | inputs:
80 | - SourceOutput
81 | outputs:
82 | - CompiledCfnTemplate
83 |
84 |
85 | .. _CodePipeline action provider name: https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#actions-valid-providers
86 | .. _environment-type: https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectEnvironment.html#CodeBuild-Type-ProjectEnvironment-type
87 | .. _compute-type: https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectEnvironment.html#CodeBuild-Type-ProjectEnvironment-computeType
88 | .. _CloudFormation dynamic references: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html
89 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | ***********************
2 | Contributing Guidelines
3 | ***********************
4 |
5 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction,
6 | or additional documentation, we greatly value feedback and contributions from our community.
7 |
8 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
9 | information to effectively respond to your bug report or contribution.
10 |
11 |
12 | Reporting Bugs/Feature Requests
13 | ===============================
14 |
15 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
16 |
17 | When filing an issue, please check `existing open`_, or `recently closed`_, issues to make sure somebody else hasn't already
18 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
19 |
20 | * A reproducible test case or series of steps
21 | * The version of our code being used
22 | * Any modifications you've made relevant to the bug
23 | * Anything unusual about your environment or deployment
24 |
25 |
26 | Contributing via Pull Requests
27 | ==============================
28 |
29 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
30 |
31 | 1. You are working against the latest source on the *master* branch.
32 | 2. You check existing open, and recently merged,
33 | pull requests to make sure someone else hasn't addressed the problem already.
34 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
35 |
36 | To send us a pull request, please:
37 |
38 | 1. Fork the repository.
39 | 2. Modify the source; please focus on the specific change you are contributing.
40 | If you also reformat all the code, it will be hard for us to focus on your change.
41 | 3. Ensure local tests pass.
42 | 4. Run ``tox -re autoformat`` to apply our formatting rules.
43 | 5. Run ``tox -re lint`` to verify that the change meets all of our formatting rules that we check in CI.
44 | 6. Commit to your fork using clear commit messages.
45 | 7. Send us a pull request, answering any default questions in the pull request interface.
46 | 8. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
47 |
48 | GitHub provides additional document on `forking a repository`_ and `creating a pull request`_.
49 |
50 |
51 | Finding contributions to work on
52 | ================================
53 |
54 | Looking at the existing issues is a great way to find something to contribute on.
55 | As our projects, by default, use the default GitHub issue labels
56 | (enhancement/bug/duplicate/help wanted/invalid/question/wontfix),
57 | looking at any `help wanted` issues is a great place to start.
58 |
59 |
60 | Code of Conduct
61 | ===============
62 |
63 | This project has adopted the `Amazon Open Source Code of Conduct`_.
64 | For more information see the `Code of Conduct FAQ`_ or contact
65 | opensource-codeofconduct@amazon.com with any additional questions or comments.
66 |
67 |
68 | Security issue notifications
69 | ============================
70 |
71 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our
72 | `vulnerability reporting page`_. Please do **not** create a public github issue.
73 |
74 |
75 | Licensing
76 | =========
77 |
78 | See the `LICENSE`_ file for our project's licensing. We will ask you to confirm the licensing of your contribution.
79 |
80 | We may ask you to sign a `Contributor License Agreement (CLA)`_ for larger changes.
81 |
82 | .. _existing open: https://github.com/awslabs/pipeformer/issues
83 | .. _recently closed: https://github.com/awslabs/pipeformer/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20
84 | .. _help wanted: https://github.com/awslabs/pipeformer/labels/help%20wanted
85 | .. _LICENSE: https://github.com/awslabs/pipeformer/blob/master/LICENSE
86 | .. _forking a repository: https://help.github.com/en/articles/fork-a-repo
87 | .. _creating a pull request: https://help.github.com/en/articles/creating-a-pull-request
88 | .. _Amazon Open Source Code of Conduct: https://aws.github.io/code-of-conduct
89 | .. _Code of Conduct FAQ: https://aws.github.io/code-of-conduct-faq
90 | .. _vulnerability reporting page: https://aws.amazon.com/security/vulnerability-reporting/
91 | .. _Contributor License Agreement (CLA): https://en.wikipedia.org/wiki/Contributor_License_Agreement
92 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.rst
2 | include CHANGELOG.rst
3 | include CONTRIBUTING.rst
4 | include CODE_OF_CONDUCT.rst
5 | include LICENSE
6 | include requirements.txt
7 |
8 | recursive-include doc *
9 | recursive-exclude doc .DS_Store *.pyc
10 | prune doc/build
11 | prune doc/lib/generated
12 |
13 | recursive-include test *
14 | recursive-exclude test .DS_Store *.pyc
15 |
16 | recursive-include examples *
17 | recursive-exclude examples .DS_Store *.pyc
18 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Pipeformer
2 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ##########
2 | pipeformer
3 | ##########
4 |
5 | .. image:: https://img.shields.io/pypi/v/pipeformer.svg
6 | :target: https://pypi.python.org/pypi/pipeformer
7 | :alt: Latest Version
8 |
9 | .. image:: https://img.shields.io/pypi/pyversions/pipeformer.svg
10 | :target: https://pypi.python.org/pypi/pipeformer
11 | :alt: Supported Python Versions
12 |
13 | .. image:: https://img.shields.io/badge/code_style-black-000000.svg
14 | :target: https://github.com/ambv/black
15 | :alt: Code style: black
16 |
17 | .. image:: https://readthedocs.org/projects/pipeformer/badge/
18 | :target: https://pipeformer.readthedocs.io/en/stable/
19 | :alt: Documentation Status
20 |
21 | .. image:: https://travis-ci.org/awslabs/pipeformer.svg?branch=master
22 | :target: https://travis-ci.org/awslabs/pipeformer
23 |
24 | .. image:: https://ci.appveyor.com/api/projects/status/REPLACEME/branch/master?svg=true
25 | :target: https://ci.appveyor.com/project/REPLACEME
26 |
27 | Tool for generating CodePipeline pipelines and related resources from a simple configuration.
28 |
29 |
30 | .. important::
31 |
32 | Pipeformer is no longer being developed.
33 | See `Shuttering pipeformer`_ for more information.
34 |
35 |
36 | .. _Shuttering pipeformer: https://github.com/awslabs/pipeformer/issues/52
37 |
38 | .. important::
39 |
40 | Pipeformer is currently under development and is not yet available for use.
41 | Watch this space for more information as the project progresses and if you're interested feel free to join in!
42 |
43 | `Security issue notifications`_
44 |
45 | ********
46 | Abstract
47 | ********
48 |
49 | Services like CodePipeline and CodeBuild are great building blocks,
50 | but can be complicated to set up and use in a consistent way.
51 |
52 | CloudFormation makes it possible to create and update resources in a consistent and repeatable way,
53 | but can be complicated and verbose to define.
54 |
55 | The goal of Pipeformer is combine these properties by providing a very simple, but extensible,
56 | way to use these services with your project.
57 |
58 | Tenets
59 | ======
60 |
61 | * Simple
62 |
63 | * For the majority of projects, the majority of resource configurations will be identical.
64 | Only require the user to set the values that are actually important to them.
65 | * The user should not need to know about resources that they will not directly touch.
66 |
67 | * Flexible
68 |
69 | * While most users should not need to, users must be able to override most settings if they do need to.
70 |
71 | **********
72 | How to Use
73 | **********
74 |
75 | 1. Define your configuration file.
76 | 2. Deploy with pipeformer.
77 |
78 | User Experience
79 | ===============
80 |
81 | The primary operating mode for pipeformer is to take your configuration,
82 | use it to generate CloudFormation templates that describe the needed resources,
83 | and then deploy those templates.
84 |
85 | The user interface for running ``pipeformer`` is simply to point it at your configuration file
86 | and provide any input values when prompted.
87 |
88 |
89 | ..code:: bash
90 |
91 | $ pipeformer --config my-config-file.yaml -vv
92 |
93 |
94 | When you run the ``pipeformer`` command line tool, it will:
95 |
96 | #. Parse your config file and determine what inputs are needed.
97 | #. Construct the CloudFormation templates needed to satisfy what you defined in your config file.
98 | #. Prompt you for any needed input values.
99 | #. Deploy the core stack (creates the project bucket and KMS CMK) and all nested stacks.
100 |
101 | #. Once the project bucket and CMK exist, upload all generated templates to the project bucket.
102 | #. Report back to the core stack CloudFormation waiters that the templates are uploaded.
103 | This causes CloudFormation to continue deploying the nested stacks.
104 | #. Once the inputs stack creation is complete:
105 | take the input values that you provided and updates the appropriate values in the inputs stack.
106 | #. Report back to the core stack CloudFormation waiter that the input values have been set.
107 | This causes CloudFormation to continue deploying the rest of the nested stacks.
108 |
109 | Configuration
110 | =============
111 |
112 | `Configuration File Format `_
113 |
114 | What Does it Do?
115 | ================
116 |
117 | `Resources Created `_
118 |
119 | ***************
120 | Getting Started
121 | ***************
122 |
123 | Required Prerequisites
124 | ======================
125 |
126 | * Supported Python versions
127 |
128 | * 3.6+
129 |
130 | Installation
131 | ============
132 |
133 | .. code:: bash
134 |
135 | $ pip install pipeformer
136 |
137 | ***********
138 | Development
139 | ***********
140 |
141 | Prerequisites
142 | =============
143 |
144 | * Required
145 |
146 | * Python 3.6+
147 | * `tox`_ : We use tox to drive all of our testing and package management behavior.
148 | Any tests that you want to run should be run using tox.
149 |
150 | * Optional
151 |
152 | * `pyenv`_ : If you want to test against multiple versions of Python and are on Linux or MacOS,
153 | we recommend using pyenv to manage your Python runtimes.
154 | * `tox-pyenv`_ : Plugin for tox that enables it to use pyenv runtimes.
155 | * `detox`_ : Parallel plugin for tox. Useful for running a lot of test environments quickly.
156 |
157 | Setting up pyenv
158 | ----------------
159 |
160 | If you are using pyenv, make sure that you have set up all desired runtimes and configured the environment
161 | before attempting to run any tests.
162 |
163 | #. Install all desired runtimes.
164 |
165 | * ex: ``pyenv install 3.7.0``
166 | * **NOTE:** You can only install one runtime at a time with the ``pyenv install`` command.
167 |
168 | #. In the root of the checked out repository for this package, set the runtimes that pyenv should use.
169 |
170 | * ex: ``pyenv local 3.7.0 3.6.4``
171 | * **NOTE:** This creates the ``.python-version`` file that pyenv will use. Pyenv treats the first
172 | version in that file as the default Python version.
173 |
174 |
175 | Running tests
176 | =============
177 |
178 | There are two criteria to consider when running our tests:
179 | what version of Python do you want to use and what type of tests do you want to run?
180 |
181 | For a full listing of the available types of tests available,
182 | see the ``[testenv]commands`` section of the ``tox.ini`` file.
183 |
184 | All tests should be run using tox.
185 | To do this, identify the test environment that you want tox to run using the ``-e ENV_NAME`` flag.
186 | The standard test environments are named as a combination of the Python version
187 | and the test type in the form ``VERSION-TYPE``.
188 | For example, to run the ``local`` tests against CPython 3.7:
189 |
190 | .. code-block:: bash
191 |
192 | tox -e py37-local
193 |
194 | If you want to provide custom parameters to pytest to manually identify what tests you want to run,
195 | use the ``manual`` test type. Any arguments you want to pass to pytest must follow the ``--`` argument.
196 | Anything before that argument is passed to tox. Everything after that argument is passed to pytest.
197 |
198 | .. code-block:: bash
199 |
200 | tox -e py37-manual -- test/unit/test_example_file.py
201 |
202 | Before submitting a pull request
203 | ================================
204 |
205 | Before submitting a pull request, please run the ``lint`` tox environment.
206 | This will ensure that your submission meets our code formatting requirements
207 | and will pass our continuous integration code formatting tests.
208 |
209 |
210 | .. _tox: http://tox.readthedocs.io/
211 | .. _detox: https://pypi.org/project/detox/
212 | .. _tox-pyenv: https://pypi.org/project/tox-pyenv/
213 | .. _pyenv: https://github.com/pyenv/pyenv
214 | .. _Security issue notifications: https://github.com/aws/pipeformer/tree/master/CONTRIBUTING.md#security-issue-notifications
215 |
--------------------------------------------------------------------------------
/RESOURCES.rst:
--------------------------------------------------------------------------------
1 | =================
2 | Resources Created
3 | =================
4 |
5 | Pipeformer creates all resources necessary to build the pipeline that you describe in your configuration.
6 |
7 | Stand-Alone Mode
8 | ----------------
9 |
10 | In stand-alone mode, pipeformer assumes that all necessary resources need to be created.
11 |
12 | In this operating mode,
13 | all core resources are defined in a central CloudFormation stack that also contains all other CloudFormation stacks.
14 |
15 | Resources
16 | ---------
17 |
18 | Core Resources
19 | ^^^^^^^^^^^^^^
20 |
21 | The core resources are resources that are needed by all other components.
22 | These include:
23 |
24 | * Application resources S3 bucket made available to the application for use.
25 | * Artifact S3 bucket for use by Pipeformer and resources.
26 | * IAM Roles for use within Pipeformer for:
27 |
28 | * CloudFormation
29 | * CodePipeline pipelines
30 | * CodeBuild projects
31 |
32 | * A KMS CMK that is used to protect all resources and data managed by pipeformer.
33 |
34 | Inputs
35 | ^^^^^^
36 |
37 | Any input values defined in your configuration need to be stored somewhere.
38 | Pipeformer stores them either in Secrets Manager if the input is marked as secret,
39 | or SSM Parameter Store if it is not.
40 |
41 | All input resources are managed in a separate CloudFormation stack,
42 | with a separate Secrets Manager Secret or Parameter Store Parameter resource for each input.
43 | The values stored in these resources are managed outside of CloudFormation.
44 |
45 | CodeBuild
46 | ^^^^^^^^^
47 |
48 | Pipeline actions that use CodeBuild require a unique CodeBuild project for each action.
49 | Because of this, and to avoid CloudFormation per-stack resource limits,
50 | Pipeformer creates a separate CloudFormation stack for each pipeline stage that contains at least one CodeBuild action.
51 | These stacks contain only CodeBuild resources.
52 |
53 | CodePipeline
54 | ^^^^^^^^^^^^
55 |
56 | Finally, a CloudFormation stack is created that contains the CodePipeline resource itself.
57 | All CodeBuild stacks are created as nested stacks of this stack.
58 |
--------------------------------------------------------------------------------
/VERSIONING.rst:
--------------------------------------------------------------------------------
1 | *****************
2 | Versioning Policy
3 | *****************
4 |
5 | We use a three-part X.Y.Z (Major.Minor.Patch) versioning definition, as follows:
6 |
7 | * **X (Major)** version changes are significant and expected to break backwards compatibility.
8 | * **Y (Minor)** version changes are moderate changes. These include:
9 |
10 | * Significant non-breaking feature additions.
11 | * Any change to the version of a dependency.
12 | * Possible backwards-incompatible changes. These changes will be noted and explained in detail in the release notes.
13 |
14 | * **Z (Patch)** version changes are small changes. These changes will not break backwards compatibility.
15 |
16 | * Z releases will also include warning of upcoming breaking changes, whenever possible.
17 |
18 | What this means for you
19 | =======================
20 |
21 | We recommend running the most recent version. Here are our suggestions for managing updates:
22 |
23 | * X changes will require some effort to incorporate.
24 | * Y changes will not require significant effort to incorporate.
25 |
26 | * If you have good unit and integration tests, these changes are generally safe to pick up automatically.
27 |
28 | * Z changes will not require any changes to your code. Z changes are intended to be picked up automatically.
29 |
30 | * Good unit and integration tests are always recommended.
31 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | # https://packaging.python.org/guides/supporting-windows-using-appveyor/
2 |
3 | environment:
4 |
5 | matrix:
6 | # The only test we perform on Windows are our actual code tests. All linting, static
7 | # analysis, etc are only run on Linux (via Travis CI).
8 | # Python 3.6
9 | - PYTHON: "C:\\Python36"
10 | TOXENV: "py36-local"
11 | - PYTHON: "C:\\Python36-x64"
12 | TOXENV: "py36-local"
13 | - PYTHON: "C:\\Python36"
14 | TOXENV: "py36-integ"
15 | - PYTHON: "C:\\Python36-x64"
16 | TOXENV: "py36-integ"
17 | - PYTHON: "C:\\Python36"
18 | TOXENV: "py36-examples"
19 | - PYTHON: "C:\\Python36-x64"
20 | TOXENV: "py36-examples"
21 | # Python 3.7
22 | - PYTHON: "C:\\Python37"
23 | TOXENV: "py37-local"
24 | - PYTHON: "C:\\Python37-x64"
25 | TOXENV: "py37-local"
26 | - PYTHON: "C:\\Python37"
27 | TOXENV: "py37-integ"
28 | - PYTHON: "C:\\Python37-x64"
29 | TOXENV: "py37-integ"
30 | - PYTHON: "C:\\Python37"
31 | TOXENV: "py37-examples"
32 | - PYTHON: "C:\\Python37-x64"
33 | TOXENV: "py37-examples"
34 |
35 | install:
36 | # Prepend newly installed Python to the PATH of this build
37 | - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
38 | # Check the Python version to verify the correct version was installed
39 | - "python --version"
40 | - "python -m pip install --upgrade setuptools wheel tox"
41 |
42 | build: off
43 |
44 | test_script:
45 | - "tox -- -vv"
46 |
--------------------------------------------------------------------------------
/doc/code_of_conduct.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CODE_OF_CONDUCT.rst
2 |
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=invalid-name
2 | """Sphinx configuration."""
3 | import io
4 | import os
5 | import re
6 | from datetime import datetime
7 |
8 | VERSION_RE = re.compile(r"""__version__ = ['"]([0-9.]+)['"]""")
9 | HERE = os.path.abspath(os.path.dirname(__file__))
10 |
11 |
12 | def read(*args):
13 | """Read complete file contents."""
14 | return io.open(os.path.join(HERE, *args), encoding="utf-8").read()
15 |
16 |
17 | def get_release():
18 | """Read the release (full three-part version number) from this module."""
19 | init = read("..", "src", "pipeformer", "identifiers.py")
20 | return VERSION_RE.search(init).group(1)
21 |
22 |
23 | def get_version():
24 | """Read the version (MAJOR.MINOR) from this module."""
25 | _release = get_release()
26 | split_version = _release.split(".")
27 | if len(split_version) == 3:
28 | return ".".join(split_version[:2])
29 | return _release
30 |
31 |
32 | project = u"pipeformer"
33 | version = get_version()
34 | release = get_release()
35 |
36 | # Add any Sphinx extension module names here, as strings. They can be extensions
37 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
38 | extensions = [
39 | "sphinx.ext.autodoc",
40 | "sphinx.ext.doctest",
41 | "sphinx.ext.intersphinx",
42 | "sphinx.ext.todo",
43 | "sphinx.ext.coverage",
44 | "sphinx.ext.autosummary",
45 | "sphinx.ext.napoleon",
46 | "sphinx.ext.viewcode",
47 | "sphinx_autodoc_typehints",
48 | "sphinxcontrib.spelling",
49 | ]
50 | napoleon_include_special_with_doc = False
51 |
52 | # Add any paths that contain templates here, relative to this directory.
53 | templates_path = ["_templates"]
54 |
55 | source_suffix = ".rst" # The suffix of source filenames.
56 | master_doc = "index" # The master toctree document.
57 |
58 | copyright = u"%s, Amazon" % datetime.now().year # pylint: disable=redefined-builtin
59 |
60 | # List of directories, relative to source directory, that shouldn't be searched
61 | # for source files.
62 | exclude_trees = ["_build"]
63 |
64 | pygments_style = "sphinx"
65 |
66 | autoclass_content = "both"
67 | autodoc_default_flags = ["show-inheritance", "members"]
68 | autodoc_member_order = "bysource"
69 |
70 | html_theme = "sphinx_rtd_theme"
71 | html_static_path = ["_static"]
72 | htmlhelp_basename = "%sdoc" % project
73 |
74 | # Example configuration for intersphinx: refer to the Python standard library.
75 | intersphinx_mapping = {"python": ("http://docs.python.org/", None)}
76 |
77 | # autosummary
78 | autosummary_generate = True
79 |
80 | # Spellchecker
81 | spelling_word_list_filename = "spelling_wordlist.txt"
82 | spelling_lang = "en_US"
83 | spelling_ignore_python_builtins = True
84 | spelling_ignore_importable_modules = True
85 |
--------------------------------------------------------------------------------
/doc/config_format.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONFIG_FORMAT.rst
2 |
--------------------------------------------------------------------------------
/doc/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
3 | .. toctree::
4 | :maxdepth: 2
5 | :caption: Project Operations
6 |
7 | code_of_conduct
8 | contributing
9 | versioning
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 | :caption: Project Details
14 |
15 | config_format
16 | resources
17 |
18 | ***
19 | API
20 | ***
21 |
22 | .. toctree::
23 | :maxdepth: 2
24 | :caption: Public API
25 |
26 | lib/api/index
27 |
28 | .. toctree::
29 | :maxdepth: 1
30 | :caption: Informational Only
31 |
32 | lib/internal
33 |
34 | .. include:: ../CHANGELOG.rst
35 |
--------------------------------------------------------------------------------
/doc/lib/api/index.rst:
--------------------------------------------------------------------------------
1 | Public API
2 | ==========
3 |
4 | .. autosummary::
5 | :toctree: generated
6 |
7 | pipeformer.deploy
8 | pipeformer.identifiers
9 | pipeformer.input_handling
10 |
--------------------------------------------------------------------------------
/doc/lib/internal.rst:
--------------------------------------------------------------------------------
1 | Internal Resources
2 | ==================
3 |
4 | .. warning::
5 |
6 | These are provided for informational purposes only. No guarantee is provided on the modules
7 | and APIs described here remaining consistent. Directly reference at your own risk.
8 |
9 | .. autosummary::
10 | :toctree: generated
11 |
12 |
13 | pipeformer.internal.arg_parsing
14 | pipeformer.internal.logging_utils
15 | pipeformer.internal.resolve
16 | pipeformer.internal.structures
17 | pipeformer.internal.template_builder
18 | pipeformer.internal.util
19 | pipeformer.internal.templates
20 | pipeformer.internal.templates.codebuild
21 | pipeformer.internal.templates.codepipeline
22 | pipeformer.internal.templates.core
23 | pipeformer.internal.templates.iam
24 | pipeformer.internal.templates.inputs
25 |
--------------------------------------------------------------------------------
/doc/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx>=1.3.0
2 | sphinx_rtd_theme
3 | sphinx-autodoc-typehints
4 | sphinxcontrib-spelling
5 |
--------------------------------------------------------------------------------
/doc/resources.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../RESOURCES.rst
2 |
--------------------------------------------------------------------------------
/doc/spelling_wordlist.txt:
--------------------------------------------------------------------------------
1 | # Python/dev-tools
2 | tox
3 | pyenv
4 | cpython
5 | CPython
6 |
7 | # Project-specific
8 | pipeformer
9 |
10 | # AWS
11 | arn
12 | aws
13 | buildspec
14 | cmk
15 | codebuild
16 | codepipeline
17 | iam
18 |
19 | # GitHub tags
20 | wontfix
21 |
22 | # General dev words
23 | arg
24 | args
25 | config
26 | changelog
27 | github
28 | plugin
29 | pre
30 | runtimes
31 | util
32 | utils
33 | versioning
34 |
--------------------------------------------------------------------------------
/doc/versioning.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../VERSIONING.rst
2 |
--------------------------------------------------------------------------------
/examples/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports of examples from tests."""
14 |
--------------------------------------------------------------------------------
/examples/src/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub module indicator to make linter configuration simpler."""
14 |
--------------------------------------------------------------------------------
/examples/src/pylintrc:
--------------------------------------------------------------------------------
1 | [BASIC]
2 | # Allow function names up to 50 characters
3 | function-rgx = [a-z_][a-z0-9_]{2,50}$
4 |
5 | [DESIGN]
6 | max-args = 10
7 |
8 | [FORMAT]
9 | max-line-length = 120
10 |
11 | [REPORTS]
12 | msg-template = {path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
13 |
--------------------------------------------------------------------------------
/examples/test/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub module indicator to make linter configuration simpler."""
14 |
--------------------------------------------------------------------------------
/examples/test/pylintrc:
--------------------------------------------------------------------------------
1 | [MESSAGES CONTROL]
2 | # Disabling messages that we either don't care about for tests or are necessary to break for tests.
3 | disable =
4 | invalid-name, # naming in tests often need to violate many common rules
5 | missing-docstring, # we don't write docstrings for tests
6 | wrong-import-position, # pylint does not identify unknown modules as non-standard-library
7 | import-error, # because the examples are not actually in a module, sys.path is patched to find tests and test utils
8 | duplicate-code, # unit tests for similar things tend to be similar
9 | redefined-outer-name, # raises false positives with fixtures
10 |
11 | [DESIGN]
12 | max-args = 10
13 |
14 | [FORMAT]
15 | max-line-length = 120
16 |
17 | [REPORTS]
18 | msg-template = {path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
19 |
--------------------------------------------------------------------------------
/examples/test/test_make_tests.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Placeholder module to remind you to write tests."""
14 | import pytest
15 |
16 |
17 | @pytest.mark.xfail(strict=True)
18 | @pytest.mark.examples
19 | def test_write_tests():
20 | assert False
21 |
--------------------------------------------------------------------------------
/park.cfg:
--------------------------------------------------------------------------------
1 | [DEFAULT]
2 | version: 0.0.1
3 | author: Amazon Web Services
4 | author_email: aws-cryptools@amazon.com
5 | url: https://pipeformer.readthedocs.io/en/stable/
6 | description: Did you mean to install pipeformer?
7 | long_description:
8 | This package has been parked by {author} to protect you against packages
9 | adopting names that might be common mistakes when looking for ours. You probably
10 | wanted to install pipeformer. For more information, see {url}.
11 | description_keys:
12 | author
13 | url
14 |
15 | #[names]
16 | #REPLACE_WITH_PACKAGE_NAME_TO_PARK:
17 | #REPLACE_WITH_PACKAGE_NAME_TO_PARK:
18 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | attrs >= 19.1.0
2 | boto3
3 | oyaml
4 | requests
5 | troposphere[policy] >= 2.4.0
6 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [wheel]
2 | universal = 1
3 |
4 | [metadata]
5 | license_file = LICENSE
6 |
7 | [coverage:run]
8 | branch = True
9 |
10 | [coverage:report]
11 | show_missing = True
12 |
13 | [tool:pytest]
14 | log_level = DEBUG
15 | markers =
16 | local: superset of unit and functional (does not require network access)
17 | unit: mark test as a unit test (does not require network access)
18 | functional: mark test as a functional test (does not require network access)
19 | integ: mark a test as an integration test (requires network access)
20 | accept: mark a test as an acceptance test (requires network access)
21 | examples: mark a test as an examples test (requires network access)
22 |
23 | # Flake8 Configuration
24 | [flake8]
25 | max_complexity = 10
26 | max_line_length = 120
27 | application_import_names = pipeformer
28 | builtins = raw_input
29 | ignore =
30 | # E203 is not PEP8 compliant https://github.com/ambv/black#slices
31 | E203,
32 | # W503 is not PEP8 compliant https://github.com/ambv/black#line-breaks--binary-operators
33 | W503,
34 | # Ignoring D202 (no blank lines after function docstring) because mypy confuses flake8
35 | D202
36 |
37 | # Doc8 Configuration
38 | [doc8]
39 | max-line-length = 120
40 |
41 | [isort]
42 | line_length = 120
43 | # https://github.com/timothycrosley/isort#multi-line-output-modes
44 | multi_line_output = 3
45 | include_trailing_comma = True
46 | force_grid_wrap = 0
47 | combine_as_imports = True
48 | not_skip = __init__.py
49 | known_first_party = pipeformer
50 | known_third_party =attr,awacs,boto3,botocore,oyaml,pytest,requests,setuptools,troposphere
51 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """pipeformer."""
14 | import io
15 | import os
16 | import re
17 |
18 | from setuptools import find_packages, setup
19 |
20 | VERSION_RE = re.compile(r"""__version__ = ['"]([0-9.]+)['"]""")
21 | HERE = os.path.abspath(os.path.dirname(__file__))
22 |
23 |
24 | def read(*args):
25 | """Read complete file contents."""
26 | return io.open(os.path.join(HERE, *args), encoding="utf-8").read()
27 |
28 |
29 | def get_version():
30 | """Read the version from this module."""
31 | init = read("src", "pipeformer", "identifiers.py")
32 | return VERSION_RE.search(init).group(1)
33 |
34 |
35 | def get_requirements():
36 | """Read the requirements file."""
37 | raw_requirements = read("requirements.txt")
38 | requirements = []
39 | dependencies = []
40 |
41 | for req in raw_requirements.splitlines():
42 | req = req.strip()
43 | if not req:
44 | continue
45 | elif req.startswith("#"):
46 | continue
47 | elif "+" in req:
48 | dependencies.append(req)
49 | else:
50 | requirements.append(req)
51 |
52 | return requirements, dependencies
53 |
54 |
55 | INSTALL_REQUIRES, DEPENDENCY_LINKS = get_requirements()
56 |
57 | setup(
58 | name="pipeformer",
59 | version=get_version(),
60 | packages=find_packages("src"),
61 | package_dir={"": "src"},
62 | url="https://github.com/awslabs/pipeformer",
63 | author="Amazon Web Services",
64 | author_email="aws-cryptools@amazon.com",
65 | maintainer="Amazon Web Services",
66 | description="CloudPipeline synthesis tool.",
67 | long_description=read("README.rst"),
68 | keywords="pipeformer pipeformer aws",
69 | data_files=["README.rst", "CHANGELOG.rst", "LICENSE", "requirements.txt"],
70 | license="Apache 2.0",
71 | install_requires=INSTALL_REQUIRES,
72 | dependency_links=DEPENDENCY_LINKS,
73 | classifiers=[
74 | "Development Status :: 1 - Planning",
75 | "Intended Audience :: Developers",
76 | "Natural Language :: English",
77 | "License :: OSI Approved :: Apache Software License",
78 | "Programming Language :: Python",
79 | "Programming Language :: Python :: 3",
80 | "Programming Language :: Python :: 3.6",
81 | "Programming Language :: Python :: 3.7",
82 | "Programming Language :: Python :: Implementation :: CPython",
83 | "Topic :: Security",
84 | "Topic :: Security :: Cryptography",
85 | ],
86 | entry_points={"console_scripts": ["pipeformer=pipeformer:cli"]},
87 | )
88 |
--------------------------------------------------------------------------------
/src/pipeformer/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """pipeformer."""
14 | import uuid
15 | from typing import Iterator, Optional
16 |
17 | from .deploy import Deployer
18 | from .identifiers import __version__
19 | from .internal.arg_parsing import parse_args
20 | from .internal.logging_utils import setup_logger
21 | from .internal.structures import Config
22 |
23 | __all__ = ("__version__", "cli")
24 |
25 |
26 | def cli(raw_args: Optional[Iterator[str]] = None):
27 | """CLI entry point. Processes arguments, sets up the key provider, and processes requested action.
28 |
29 | :returns: Execution return value intended for ``sys.exit()``
30 | """
31 | args = parse_args(raw_args)
32 |
33 | setup_logger(args.verbosity, args.quiet)
34 |
35 | # 1. parse config file
36 | project = Config.from_file(args.config)
37 |
38 | # TODO: Use a better prefix
39 | prefix = "pipeformer-" + str(uuid.uuid4()).split("-")[-1]
40 |
41 | project_deployer = Deployer(project=project, stack_prefix=prefix)
42 |
43 | project_deployer.deploy_standalone()
44 |
--------------------------------------------------------------------------------
/src/pipeformer/deploy.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Tooling to deploy a generated set of templates."""
14 | import json
15 | import logging
16 | import uuid
17 | from functools import partial
18 | from typing import Dict, Optional
19 |
20 | import attr
21 | import boto3
22 | import boto3.session
23 | import botocore.session
24 | import requests
25 | from attr.validators import instance_of, optional
26 | from botocore.exceptions import ClientError
27 | from troposphere import Template, cloudformation, s3
28 |
29 | from pipeformer.identifiers import LOGGER_NAME, VALUE_SEPARATOR
30 | from pipeformer.input_handling import DefaultInputHandler, InputHandler
31 | from pipeformer.internal.structures import Config, ProjectTemplates
32 | from pipeformer.internal.template_builder import config_to_templates
33 | from pipeformer.internal.util import CloudFormationPhysicalResourceCache, resource_name
34 |
35 | _LOGGER = logging.getLogger(LOGGER_NAME)
36 | __all__ = ("Deployer",)
37 |
38 |
39 | @attr.s
40 | class Deployer:
41 | """Helper client to manage deploying a set of stacks.
42 |
43 | :param project: Project configuration to use
44 | :param stack_prefix: Stack prefix (optional: if not provided, the project name from the config will be used)
45 | :param botocore_session: Pre-configured botocore session (optional)
46 | :param input_handler: Pre-configured input handler to use (optional)
47 | """
48 |
49 | _project: Config = attr.ib(validator=instance_of(Config))
50 | _stack_prefix: Optional[str] = attr.ib(default=None, validator=optional(instance_of(str)))
51 | _botocore_session: botocore.session.Session = attr.ib(
52 | default=attr.Factory(botocore.session.Session), validator=instance_of(botocore.session.Session)
53 | )
54 | _input_handler: InputHandler = attr.ib(default=None, validator=optional(instance_of(InputHandler)))
55 | _actual_templates: Optional[ProjectTemplates] = None
56 | _template_urls: Optional[Dict[str, str]] = None
57 | _inputs_collected: Optional[bool] = None
58 | _cache: Optional[CloudFormationPhysicalResourceCache] = None
59 |
60 | def __attrs_post_init__(self):
61 | """Initialize all needed clients and resources."""
62 | boto3_session = boto3.session.Session(botocore_session=self._botocore_session)
63 | self._cloudformation = boto3_session.client("cloudformation")
64 | self._s3 = boto3_session.client("s3")
65 | self._inputs_collected = False
66 | self._template_urls = {}
67 |
68 | self._cache = CloudFormationPhysicalResourceCache(
69 | client=self._cloudformation, stack_name=self._core_stack_name()
70 | )
71 |
72 | if self._stack_prefix is None:
73 | self._stack_prefix = self._project.name
74 |
75 | if self._input_handler is None:
76 | self._input_handler = DefaultInputHandler(
77 | stack_namer=partial(self._cache.physical_resource_name, self._inputs_stack_logical_name()),
78 | botocore_session=self._botocore_session,
79 | )
80 |
81 | def _collect_inputs(self):
82 | """Collect inputs from user."""
83 | # TODO: Should we re-prompt for input values that are already known?
84 | self._input_handler.collect_inputs(self._project.inputs)
85 | self._inputs_collected = True
86 |
87 | @property
88 | def _templates(self):
89 | """Lazily load templates from config.
90 |
91 | This is necessary because the inputs need to be collected before the templates are built.
92 | """
93 | if not self._inputs_collected:
94 | raise Exception("Inputs have not yet been collected.")
95 |
96 | if self._actual_templates is None:
97 | self._actual_templates = config_to_templates(self._project)
98 |
99 | return self._actual_templates
100 |
101 | @staticmethod
102 | def _artifacts_bucket_logical_name() -> str:
103 | """Determine the logical name for the artifacts S3 bucket."""
104 | return resource_name(s3.Bucket, "Artifacts")
105 |
106 | def _wait_for_artifacts_bucket(self):
107 | """Wait until the artifacts bucket is created."""
108 | self._cache.wait_until_resource_is_complete(self._artifacts_bucket_logical_name())
109 |
110 | def _upload_single_template(self, template_type: str, template: Template):
111 | """Upload one template to the artifacts bucket.
112 |
113 | :param template_type: Template type name
114 | :param template: Template to upload
115 | """
116 | bucket_name = self._cache.physical_resource_name(self._artifacts_bucket_logical_name())
117 | _LOGGER.debug('Uploading %s template to bucket "%s"', template_type, bucket_name)
118 | key = f"templates/{uuid.uuid4()}"
119 | body = template.to_json()
120 | self._s3.put_object(Bucket=bucket_name, Key=key, Body=body)
121 | self._template_urls[VALUE_SEPARATOR.join(("Upload", "Template", template_type))] = key
122 |
123 | def _upload_templates(self):
124 | """Upload all templates to artifacts bucket."""
125 | self._upload_single_template("Inputs", self._templates.inputs)
126 | self._upload_single_template("Iam", self._templates.iam)
127 | self._upload_single_template("Pipeline", self._templates.pipeline)
128 | for name, stage in self._templates.codebuild.items():
129 | self._upload_single_template(VALUE_SEPARATOR.join(("CodeBuild", "Stage", name)), stage)
130 |
131 | def _succeed_wait_condition(self, resource_logical_name: str, reason: str, data: str):
132 | """Report success to a CloudFormation wait condition.
133 |
134 | :param resource_logical_name: Logical name of wait condition resource
135 | :param reason: Reason for success
136 | :param data: Data to include in wait condition report
137 | """
138 | _LOGGER.debug('Reporting to wait condition "%s" with data "%s"', resource_logical_name, data)
139 | wait_condition_url = self._cache.physical_resource_name(resource_logical_name)
140 | message = {"Status": "SUCCESS", "Reason": reason, "UniqueId": "n/a", "Data": data}
141 | requests.put(url=wait_condition_url, data=json.dumps(message))
142 |
143 | def _report_templates_uploaded(self):
144 | """Report success for all template upload wait conditions."""
145 | for name, value in self._template_urls.items():
146 | self._succeed_wait_condition(name, "Template uploaded", value)
147 |
148 | @staticmethod
149 | def _inputs_stack_logical_name() -> str:
150 | """Determine the logical name for the inputs stack."""
151 | return resource_name(cloudformation.Stack, "Inputs")
152 |
153 | def _wait_for_inputs_stack(self):
154 | """Wait until the inputs stack is created."""
155 | self._cache.wait_until_resource_is_complete(VALUE_SEPARATOR.join(("WaitFor", "Upload", "Template", "Inputs")))
156 | self._cache.wait_until_resource_is_complete(self._inputs_stack_logical_name())
157 |
158 | def _report_input_values_saved(self):
159 | """Report that the input values have all been saved."""
160 | self._succeed_wait_condition(VALUE_SEPARATOR.join(("Upload", "Input", "Values")), "Inputs saved", "complete")
161 |
162 | def _stack_exists(self, stack_name: str) -> bool:
163 | """Determine if the stack has already been deployed.
164 |
165 | :param stack_name: Name of CloudFormation stack for which to check
166 | """
167 | try:
168 | self._cloudformation.describe_stacks(StackName=stack_name)
169 |
170 | except ClientError as error:
171 | if error.response["Error"]["Message"] == "Stack with id {name} does not exist".format(name=stack_name):
172 | return False
173 | raise
174 |
175 | else:
176 | return True
177 |
178 | def _core_stack_name(self) -> str:
179 | """Determine the core stack name."""
180 | return f"{self._stack_prefix}-core"
181 |
182 | def _update_existing_core_stack(self):
183 | """Update an existing core stack."""
184 | _LOGGER.info("Updating existing core stack.")
185 |
186 | self._cloudformation.update_stack(
187 | StackName=self._core_stack_name(), TemplateBody=self._templates.core.to_json()
188 | )
189 | # We specifically do not want to wait for this to complete.
190 |
191 | def _deploy_new_core_stack(self):
192 | """Deploy a new core stack."""
193 | _LOGGER.info("Bootstrapping new core stack.")
194 | self._cloudformation.create_stack(
195 | StackName=self._core_stack_name(),
196 | TemplateBody=self._templates.core.to_json(),
197 | Capabilities=["CAPABILITY_IAM"],
198 | )
199 | # We specifically do not want to wait for this to complete.
200 |
201 | def _deploy_core_stack(self) -> str:
202 | """Deploy or update the core stack."""
203 | if self._stack_exists(self._core_stack_name()):
204 | self._update_existing_core_stack()
205 | return "stack_update_complete"
206 | else:
207 | self._deploy_new_core_stack()
208 | return "stack_create_complete"
209 |
210 | def _wait_for_core_stack(self, waiter_name: str):
211 | """Wait for the core stack creation to complete."""
212 | waiter = self._cloudformation.get_waiter(waiter_name)
213 | waiter.wait(StackName=self._core_stack_name(), WaiterConfig=dict(Delay=10))
214 | _LOGGER.info("Stack deploy/update complete!")
215 |
216 | def deploy_standalone(self):
217 | """Deploy a standalone PipeFormer application.
218 |
219 | This will create all necessary resources including all IAM Roles and a KMS CMK.
220 | """
221 | _LOGGER.info("Collecting user inputs.")
222 | self._collect_inputs()
223 |
224 | _LOGGER.info("Starting stack deployment.")
225 | waiter_name = self._deploy_core_stack()
226 |
227 | _LOGGER.info("Waiting for artifacts bucket creation to complete.")
228 | self._wait_for_artifacts_bucket()
229 |
230 | _LOGGER.info("Uploading nested stack template files.")
231 | self._upload_templates()
232 |
233 | # TODO: Do wait conditions re-create on stack updates?
234 | # TODO: If I update a stack with a dynamic reference
235 | # and the only thing that changed is the value in the
236 | # referenced location, does the stack update?
237 | _LOGGER.info("Reporting uploaded template file locations to wait conditions.")
238 | self._report_templates_uploaded()
239 |
240 | _LOGGER.info("Waiting for inputs stack creation to complete.")
241 | self._wait_for_inputs_stack()
242 |
243 | _LOGGER.info("Saving inputs values to input stack resources.")
244 | self._input_handler.save_inputs(inputs=self._project.inputs)
245 |
246 | _LOGGER.info("Reporting inputs status to wait condition.")
247 | self._report_input_values_saved()
248 |
249 | _LOGGER.info("Waiting for stacks to finish deploying")
250 | self._wait_for_core_stack(waiter_name)
251 |
--------------------------------------------------------------------------------
/src/pipeformer/identifiers.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Unique identifiers used by pipeformer."""
14 | __all__ = ("__version__", "LOGGER_NAME", "VALUE_SEPARATOR")
15 | __version__ = "0.0.1"
16 | LOGGER_NAME = "pipeformer"
17 | VALUE_SEPARATOR: str = "0"
18 |
--------------------------------------------------------------------------------
/src/pipeformer/input_handling.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Tooling for reading and writing inputs."""
14 | import getpass
15 | import logging
16 | import os
17 | from typing import Callable, Dict, Optional
18 |
19 | import attr
20 | import boto3
21 | import boto3.session
22 | import botocore.session
23 | from attr.validators import instance_of, is_callable
24 |
25 | from pipeformer.identifiers import LOGGER_NAME
26 | from pipeformer.internal.structures import Input
27 | from pipeformer.internal.util import CloudFormationPhysicalResourceCache
28 |
29 | _LOGGER = logging.getLogger(LOGGER_NAME)
30 | __all__ = ("DefaultInputHandler", "InputHandler")
31 |
32 |
33 | class InputHandler:
34 | """Parent class for all classes used for collecting user input."""
35 |
36 | def collect_secret(self, secret: Input):
37 | """Collect a secret input value from the user.
38 |
39 | :param secret: Input to collect from user
40 | """
41 | raise NotImplementedError(
42 | "InputHandler does not provide an implementation of collect_secret. Children must provide their own."
43 | )
44 |
45 | def save_secret(self, secret: Input):
46 | """Save a secret input value.
47 |
48 | :param secret: Input to save
49 | """
50 | raise NotImplementedError(
51 | "InputHandler does not provide an implementation of save_secret. Children must provide their own."
52 | )
53 |
54 | def collect_parameter(self, parameter: Input):
55 | """Collect a non-secret input value from the user.
56 |
57 | :param parameter: Input to collect from user
58 | """
59 | raise NotImplementedError(
60 | "InputHandler does not provide an implementation of collect_parameter. Children must provide their own."
61 | )
62 |
63 | def save_parameter(self, parameter: Input):
64 | """Save a non-secret input value.
65 |
66 | :param parameter: Input to save
67 | """
68 | raise NotImplementedError(
69 | "InputHandler does not provide an implementation of save_parameter. Children must provide their own."
70 | )
71 |
72 | def collect_inputs(self, inputs: Dict[str, Input]):
73 | """Collect all input values.
74 |
75 | :param inputs: Mapping of input names to inputs
76 | """
77 | for each in inputs.values():
78 | if each.secret:
79 | self.collect_secret(each)
80 | else:
81 | self.collect_parameter(each)
82 |
83 | def save_inputs(self, inputs: Dict[str, Input]):
84 | """Save all input values.
85 |
86 | :param inputs: Mapping of input names to inputs
87 | """
88 | for each in inputs.values():
89 | if each.secret:
90 | self.save_secret(each)
91 | else:
92 | self.save_parameter(each)
93 |
94 |
95 | @attr.s
96 | class DefaultInputHandler(InputHandler):
97 | """The default input handler.
98 |
99 | Inputs are collected from the command line.
100 | Secrets are saved to Secrets Manager.
101 | Parameters are saved to Parameter Store.
102 |
103 | :param stack_namer: Callable that returns the stack name
104 | :param botocore_session: Pre-configured botocore session (optional)
105 | """
106 |
107 | _stack_namer: Callable[[], str] = attr.ib(validator=is_callable())
108 | _botocore_session: botocore.session.Session = attr.ib(
109 | default=attr.Factory(botocore.session.Session), validator=instance_of(botocore.session.Session)
110 | )
111 | _cache: Optional[CloudFormationPhysicalResourceCache] = None
112 |
113 | def __attrs_post_init__(self):
114 | """Initialize all AWS SDK clients."""
115 | boto3_session = boto3.session.Session(botocore_session=self._botocore_session)
116 | self._secrets_manager = boto3_session.client("secretsmanager")
117 | self._parameter_store = boto3_session.client("ssm")
118 | self._cloudformation = boto3_session.client("cloudformation")
119 |
120 | @property
121 | def cache(self):
122 | """Lazily create the physical resource cache and return it for use.
123 |
124 | This is necessary because the resources do not exist yet when we create this handler
125 | (needed for collecting inputs)
126 | but will exist by the time we need to save those inputs.
127 |
128 | :returns: Cache resource
129 | """
130 | if self._cache is not None:
131 | return self._cache
132 |
133 | self._cache = CloudFormationPhysicalResourceCache(
134 | client=self._cloudformation, stack_name=self._stack_namer() # pylint: disable=not-callable
135 | )
136 | return self._cache
137 |
138 | @staticmethod
139 | def _input_prompt(value: Input) -> str:
140 | """Generate the input prompt message for an input.
141 |
142 | :param value: Input for which to create input prompt
143 | :returns: Formatted input prompt message
144 | """
145 | return os.linesep.join((value.description, f"{value.name}: ")).lstrip()
146 |
147 | def collect_secret(self, secret: Input):
148 | """Collect a secret input value from the user via the CLI.
149 |
150 | :param Input secret: Input to collect from user
151 | """
152 | secret.value = getpass.getpass(prompt=self._input_prompt(secret))
153 |
154 | @staticmethod
155 | def _assert_input_set(value: Input):
156 | """Verify that an input has a value set.
157 |
158 | :param value: Input to verify
159 | :raises ValueError: if value is not set
160 | """
161 | if value.value is None:
162 | raise ValueError(f'Value for input "{value.name}" is not set.')
163 |
164 | def save_secret(self, secret: Input):
165 | """Save a secret input value to Secrets Manager.
166 |
167 | :param secret: Input to save
168 | """
169 | _LOGGER.debug('Saving secret value for input "%s"', secret.name)
170 | self._assert_input_set(secret)
171 | secret_id = self.cache.physical_resource_name(secret.resource_name())
172 | self._secrets_manager.update_secret(SecretId=secret_id, SecretString=secret.value)
173 |
174 | def collect_parameter(self, parameter: Input):
175 | """Collect a non-secret input value from the user via the CLI.
176 |
177 | :param parameter: Input to collect from user
178 | """
179 | parameter.value = input(self._input_prompt(parameter))
180 |
181 | def save_parameter(self, parameter: Input):
182 | """Save a non-secret input value to Parameter Store.
183 |
184 | :param parameter: Input to save
185 | """
186 | _LOGGER.debug('Saving parameter value for input "%s"', parameter.name)
187 | self._assert_input_set(parameter)
188 | parameter_name = self.cache.physical_resource_name(parameter.resource_name())
189 | parameter.version = self._parameter_store.put_parameter(
190 | Name=parameter_name, Type="String", Value=parameter.value, Overwrite=True
191 | )
192 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Internal implementation details.
14 |
15 | .. warning::
16 | No guarantee is provided on the modules and APIs within this
17 | namespace staying consistent. Directly reference at your own risk.
18 | """
19 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/arg_parsing.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Helper functions for parsing and processing input arguments."""
14 | import argparse
15 | import os
16 | from typing import Iterator, Optional
17 |
18 | from pipeformer.identifiers import __version__
19 |
20 | __all__ = ("parse_args",)
21 |
22 |
23 | def _build_parser() -> argparse.ArgumentParser:
24 | """Construct the argument parser.
25 |
26 | :returns: Constructed argument parser
27 | """
28 | parser = argparse.ArgumentParser(description="Build continuous delivery pipelines powered by AWS CodePipeline.")
29 |
30 | version_or_config = parser.add_mutually_exclusive_group(required=True)
31 |
32 | version_or_config.add_argument("--version", action="version", version="pipeformer/{}".format(__version__))
33 | version_or_config.add_argument("--config", help="Path to pipeformer config file.")
34 |
35 | parser.add_argument(
36 | "-v",
37 | dest="verbosity",
38 | action="count",
39 | help="Enables logging and sets detail level. Multiple -v options increases verbosity (max: 4).",
40 | )
41 | parser.add_argument("-q", "--quiet", action="store_true", help="Suppresses most warning and diagnostic messages")
42 |
43 | return parser
44 |
45 |
46 | def parse_args(raw_args: Optional[Iterator[str]] = None) -> argparse.Namespace:
47 | """Handle argparse to collect the needed input values.
48 |
49 | :param raw_args: List of arguments
50 | :returns: parsed arguments
51 | """
52 | parser = _build_parser()
53 | parsed_args = parser.parse_args(raw_args)
54 |
55 | if not os.path.isfile(parsed_args.config):
56 | parser.error('Invalid filename: "{}"'.format(parsed_args.config))
57 |
58 | return parsed_args
59 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/logging_utils.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logging utilities."""
14 | import logging
15 | from typing import Dict, Iterator
16 |
17 | from pipeformer.identifiers import LOGGER_NAME
18 |
19 | __all__ = ("setup_logger",)
20 | _LOGGING_LEVELS: Dict[int, int] = {0: logging.CRITICAL, 1: logging.INFO, 2: logging.DEBUG}
21 | _MAX_LOGGING_LEVEL: int = 2
22 | _FORMAT_STRING: str = "%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s"
23 |
24 |
25 | class _BlacklistFilter(logging.Filter): # pylint: disable=too-few-public-methods
26 | """Logging filter that allows blacklisting of certain logger names.
27 |
28 | :param *args: logger names to ignore
29 | """
30 |
31 | def __init__(self, *args: str):
32 | """Create internal blacklist."""
33 | super(_BlacklistFilter, self).__init__()
34 | self.__blacklist = args
35 |
36 | def filter(self, record: logging.LogRecord) -> bool:
37 | """Determine whether to filter record.
38 |
39 | :param record: Logging record to filter
40 | :type record: logging.LogRecord
41 | """
42 | return record.name not in self.__blacklist
43 |
44 |
45 | def _logging_levels(verbosity: int, quiet: bool) -> Iterator[int]:
46 | """Determine the proper logging levels given required verbosity level and quiet.
47 |
48 | :param verbosity: Requested level of verbosity
49 | :param quiet: Suppresses all logging when true
50 | :returns: local and root logging levels
51 | """
52 | if quiet:
53 | return logging.CRITICAL, logging.CRITICAL
54 |
55 | if verbosity is None or verbosity <= 0:
56 | return logging.WARNING, logging.CRITICAL
57 |
58 | normalized_local = min(verbosity, _MAX_LOGGING_LEVEL)
59 | normalized_root = min(verbosity - normalized_local, _MAX_LOGGING_LEVEL)
60 | return _LOGGING_LEVELS[normalized_local], _LOGGING_LEVELS[normalized_root]
61 |
62 |
63 | def setup_logger(verbosity: int, quiet: bool):
64 | """Set up the logger.
65 |
66 | :param verbosity: Requested level of verbosity
67 | :param quiet: Suppresses all logging when true
68 | """
69 | local_logging_level, root_logging_level = _logging_levels(verbosity, quiet)
70 |
71 | formatter = logging.Formatter(_FORMAT_STRING)
72 |
73 | local_handler = logging.StreamHandler()
74 | local_handler.setFormatter(formatter)
75 |
76 | local_logger = logging.getLogger(LOGGER_NAME)
77 | local_logger.setLevel(local_logging_level)
78 | local_logger.addHandler(local_handler)
79 |
80 | root_handler = logging.StreamHandler()
81 | root_handler.setFormatter(formatter)
82 | root_handler.addFilter(_BlacklistFilter(LOGGER_NAME))
83 |
84 | root_logger = logging.getLogger()
85 | root_logger.setLevel(root_logging_level)
86 | root_logger.addHandler(root_handler)
87 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/resolve.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Helpers for resolving custom formatting."""
14 | from typing import Iterable, Union
15 |
16 | import attr
17 | from attr.validators import deep_mapping, instance_of
18 | from troposphere import Join
19 |
20 | from .structures import Input
21 |
22 | _INPUT_TAG = ["{INPUT:", "}"]
23 | _PRIMITIVE_TYPES = (int, float, complex, bool, type(None))
24 | _PrimitiveTypes = Union[int, float, complex, bool, None]
25 | __all__ = ("InputResolver",)
26 |
27 |
28 | def _tag_in_string(source: str, start: str, end: str) -> bool:
29 | """Determine if a specific tag is in a string.
30 |
31 | :param source: String to evaluate
32 | :param start: String that marks the start of a tag
33 | :param end: String that marks the end of a tag
34 | :returns: Decision
35 | """
36 | if start not in source:
37 | return False
38 |
39 | if end not in source[source.index(start) + len(start) :]:
40 | return False
41 |
42 | return True
43 |
44 |
45 | def _value_to_triplet(source: str, start: str, end: str) -> Iterable[str]:
46 | """Extract the first tag value from a string, splitting the source string into the parts before and after the tag.
47 |
48 | :param source: String to process
49 | :param start: String that marks the start of a tag
50 | :param end: String that marks the end of a tag
51 | :return: Split string values
52 | """
53 | prefix, _value = source.split(start, 1)
54 |
55 | value, suffix = _value.split(end, 1)
56 |
57 | return prefix, value, suffix
58 |
59 |
60 | @attr.s(cmp=False)
61 | class InputResolver:
62 | """Wraps another structure and injects input references whenever a value is found that contains an input tag.
63 |
64 | As strings are read from the contents of the wrapped structure,
65 | they are expanded as necessary to CloudFormation dynamic references that will resolve the needed input values.
66 |
67 | Along the way, the referenced inputs are collected and can later be found in ``required_inputs``.
68 | This can be used to determine what inputs are required as parameters for a given CloudFormation template.
69 |
70 | :param wrapped: Wrapped structure
71 | :param inputs: Map of input names to :class:`Input` structures
72 | :param required_inputs: Known required input (optional)
73 | """
74 |
75 | _wrapped = attr.ib()
76 | _inputs = attr.ib(validator=deep_mapping(key_validator=instance_of(str), value_validator=instance_of(Input)))
77 | required_inputs = attr.ib(default=attr.Factory(set))
78 |
79 | @_wrapped.validator
80 | def _validate_wrapped(self, attribute, value): # pylint: disable=unused-argument,no-self-use
81 | """Validate characteristics about the wrapped object.
82 | Used by attrs as the validator for the ``_wrapped`` attribute.
83 | """
84 | if isinstance(value, InputResolver):
85 | raise TypeError(f"{InputResolver!r} cannot wrap itself.")
86 |
87 | for reserved in ("required_inputs",):
88 | if hasattr(value, reserved):
89 | raise TypeError(f'Wrapped object must not have "{reserved}" attribute.')
90 |
91 | def __attrs_post_init__(self):
92 | """Enable otherwise hidden wrapped methods if those methods are found on the wrapped object."""
93 | for method in ("get", "keys", "values", "items"):
94 | if hasattr(self._wrapped, method):
95 | setattr(self, method, getattr(self, f"_{method}"))
96 |
97 | def __expand_values(self, value: str) -> Iterable[str]:
98 | """Expand a string into a prefix, input reference, and suffix."""
99 | prefix, name, suffix = _value_to_triplet(value, *_INPUT_TAG)
100 |
101 | input_definition = self._inputs[name]
102 | reference = input_definition.dynamic_reference()
103 |
104 | self.required_inputs.add(name)
105 | return prefix, reference, suffix
106 |
107 | def __convert_value(self, value) -> Union[_PrimitiveTypes, "InputResolver", str, Join]:
108 | """Convert a value from the wrapped object to a value that can insert input resolutions."""
109 | if isinstance(value, _PRIMITIVE_TYPES):
110 | return value
111 |
112 | if not isinstance(value, str):
113 | return InputResolver(wrapped=value, inputs=self._inputs, required_inputs=self.required_inputs)
114 |
115 | if not _tag_in_string(value, *_INPUT_TAG):
116 | return value
117 |
118 | return Join("", self.__expand_values(value))
119 |
120 | def __len__(self):
121 | """Passthrough length from wrapped."""
122 | return len(self._wrapped)
123 |
124 | def __eq__(self, other) -> bool:
125 | """Passthrough eq from wrapped."""
126 | if isinstance(other, InputResolver):
127 | return self._wrapped.__eq__(other._wrapped) # pylint: disable=protected-access
128 | return self._wrapped.__eq__(other)
129 |
130 | def __lt__(self, other) -> bool:
131 | """Passthrough lt from wrapped."""
132 | if isinstance(other, InputResolver):
133 | return self._wrapped.__lt__(other._wrapped) # pylint: disable=protected-access
134 | return self._wrapped.__lt__(other)
135 |
136 | def __gt__(self, other) -> bool:
137 | """Passthrough gt from wrapped."""
138 | if isinstance(other, InputResolver):
139 | return self._wrapped.__gt__(other._wrapped) # pylint: disable=protected-access
140 | return self._wrapped.__gt__(other)
141 |
142 | def __le__(self, other) -> bool:
143 | """Passthrough le from wrapped."""
144 | if isinstance(other, InputResolver):
145 | return self._wrapped.__le__(other._wrapped) # pylint: disable=protected-access
146 | return self._wrapped.__le__(other)
147 |
148 | def __ge__(self, other) -> bool:
149 | """Passthrough ge from wrapped."""
150 | if isinstance(other, InputResolver):
151 | return self._wrapped.__ge__(other._wrapped) # pylint: disable=protected-access
152 | return self._wrapped.__ge__(other)
153 |
154 | def __str__(self) -> str:
155 | """Passthrough str from wrapped."""
156 | # TODO: Do we need to convert this?
157 | return self._wrapped.__str__()
158 |
159 | def __getattr__(self, name):
160 | """Get an attribute from wrapped and convert it."""
161 | return self.__convert_value(getattr(self._wrapped, name))
162 |
163 | def __call__(self, *args, **kwargs):
164 | """Call wrapped and convert the result."""
165 | return self.__convert_value(self._wrapped(*args, **kwargs))
166 |
167 | def __getitem__(self, key):
168 | """Get an item from wrapped and convert it."""
169 | return self.__convert_value(self._wrapped[key])
170 |
171 | def __iter__(self) -> Iterable["InputResolver"]:
172 | """Iterate through wrapped, converting the results."""
173 | for each in self._wrapped:
174 | yield self.__convert_value(each)
175 |
176 | def __reversed__(self) -> Iterable["InputResolver"]:
177 | """Reverse wrapped, converting the result."""
178 | return self.__convert_value(reversed(self._wrapped))
179 |
180 | def __next__(self) -> "InputResolver":
181 | """Iterate through wrapped, converting the results."""
182 | return self.__convert_value(self._wrapped.__next__())
183 |
184 | def _get(self, key, default=None) -> "InputResolver":
185 | """Call wrapped.get, converting the result."""
186 | return self.__convert_value(self._wrapped.get(key, default))
187 |
188 | def _items(self) -> Iterable[Iterable["InputResolver"]]:
189 | """Call wrapped.items, converting the resulting keys and values."""
190 | for key, value in self._wrapped.items():
191 | yield (self.__convert_value(key), self.__convert_value(value))
192 |
193 | def _keys(self) -> Iterable["InputResolver"]:
194 | """Call wrapped.keys, converting the resulting keys."""
195 | for key in self._wrapped.keys():
196 | yield self.__convert_value(key)
197 |
198 | def _values(self) -> Iterable["InputResolver"]:
199 | """Call wrapped.values, converting the resulting values."""
200 | for value in self._wrapped.values():
201 | yield self.__convert_value(value)
202 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/structures.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Internal data structures."""
14 | from collections import OrderedDict
15 | from typing import Dict, Iterable, Optional, Set
16 |
17 | import attr
18 | import oyaml as yaml
19 | from attr.validators import deep_iterable, deep_mapping, instance_of, optional
20 | from troposphere import Ref, Sub, Template, cloudformation, secretsmanager, ssm
21 |
22 | from .util import reference_name, resource_name
23 |
24 | __all__ = ("Config", "PipelineStage", "PipelineAction", "Input", "ProjectTemplates", "WaitConditionStack", "Pipeline")
25 | _STRING_STRING_MAP = deep_mapping(key_validator=instance_of(str), value_validator=instance_of(str))
26 |
27 |
28 | def _resolve_parameter(name: Ref, version: str) -> Sub:
29 | """Build a CloudFormation dynamic reference string structure that resolves a SSM Parameter.
30 |
31 | :param name: Parameter name
32 | :param version: Parameter version
33 | :return: Dynamic reference
34 | """
35 | return Sub(f"{{{{resolve:ssm:${{name}}:{version}}}}}", {"name": name})
36 |
37 |
38 | def _resolve_secret(arn: Ref) -> Sub:
39 | """Build a CloudFormation dynamic reference string structure that resolves a Secrets Manager secret.
40 |
41 | :param arn: Secret Arn
42 | :return: Dynamic reference
43 | """
44 | return Sub(f"{{{{resolve:secretsmanager:${{arn}}:SecretString}}}}", {"arn": arn})
45 |
46 |
47 | @attr.s
48 | class WaitConditionStack:
49 | """Container to hold all resources for a wait-condition-initiated stack.
50 |
51 | :param condition: Wait condition
52 | :param handle: Wait condition handle
53 | :param stack: Stack
54 | """
55 |
56 | condition: cloudformation.WaitCondition = attr.ib(validator=instance_of(cloudformation.WaitCondition))
57 | handle: cloudformation.WaitConditionHandle = attr.ib(validator=instance_of(cloudformation.WaitConditionHandle))
58 | stack: cloudformation.Stack = attr.ib(validator=instance_of(cloudformation.Stack))
59 |
60 |
61 | class _ConfigStructure:
62 | """Base for configuration structures."""
63 | @staticmethod
64 | def _clean_kwargs(kwargs: Dict):
65 | """Convert keys separators from YAML-valid "-" characters to Python-variable-name-valid "_" characters."""
66 | return {key.replace("-", "_"): value for key, value in kwargs.items()}
67 | @classmethod
68 | def from_dict(cls, kwargs: Dict):
69 | """Load from a dictionary."""
70 | return cls(**cls._clean_kwargs(kwargs))
71 |
72 |
73 | @attr.s
74 | class Input(_ConfigStructure):
75 | """Container and formatter for input values.
76 |
77 | :param name: Input name
78 | :param description: Input description
79 | :param secret: Is this input a secret?
80 | :param value: Input value (optional)
81 | """
82 | name: str = attr.ib(validator=instance_of(str))
83 | description: str = attr.ib(validator=instance_of(str))
84 | secret: bool = attr.ib(validator=instance_of(bool))
85 | value: Optional[str] = attr.ib(default=None, validator=optional(instance_of(str)))
86 | version: Optional[int] = None
87 | _value_type: str
88 |
89 | def __attrs_post_init__(self):
90 | """Set additional configuration values based on input type."""
91 | if self.secret:
92 | self._resource_type = secretsmanager.Secret
93 | self._value_type = "Arn"
94 | else:
95 | self._resource_type = ssm.Parameter
96 | self._value_type = "Name"
97 | self.version = 1
98 |
99 | def resource_name(self) -> str:
100 | """Build the resource name for this input."""
101 | return resource_name(self._resource_type, self.name)
102 |
103 | def reference_name(self) -> str:
104 | """Build the reference name for this input."""
105 | return reference_name(self.resource_name(), self._value_type)
106 |
107 | def dynamic_reference(self) -> Sub:
108 | """Build a CloudFormation dynamic reference string structure that resolves this input."""
109 | if self.secret:
110 | return _resolve_secret(Ref(self.reference_name()))
111 |
112 | return _resolve_parameter(Ref(self.reference_name()), self.version)
113 |
114 |
115 | @attr.s
116 | class PipelineAction(_ConfigStructure):
117 | """CodePipeline action definition.
118 |
119 | :param provider: Action provider name
120 | (must be a valid CodePipeline action provider name)
121 | :param inputs: Names of CodePipeline inputs to collect
122 | :param outputs: Names of CodePipeline outputs to emit
123 | :param configuration: Additional string-string map of configuration values to provide in
124 | CodePipeline action definition
125 | :param image: Docker image to use with CodeBuild
126 | (only used for CodeBuild provider actions)
127 | :param environment_type: CodeBuild environment type name
128 | (only used for CodeBuild provider actions)
129 | (if not provided, we will attempt to guess based on the image name)
130 | :param buildspec: Location of CodeBuild buildspec in source
131 | (only used for CodeBuild provider actions)
132 | (in-line buildspec definition not supported)
133 | :param compute_type: CodeBuild compute type name
134 | (only used for CodeBuild provider actions)
135 | (default: ``BUILD_GENERAL1_SMALL``)
136 | :param env: Mapping of environment variables to set in action environment
137 | :param run_order: CodePipeline action run order
138 | """
139 |
140 | provider: str = attr.ib(validator=instance_of(str))
141 | inputs: Set[str] = attr.ib(
142 | default=attr.Factory(set), validator=optional(deep_iterable(member_validator=instance_of(str)))
143 | )
144 | outputs: Set[str] = attr.ib(
145 | default=attr.Factory(set), validator=optional(deep_iterable(member_validator=instance_of(str)))
146 | )
147 | configuration: Dict[str, str] = attr.ib(default=attr.Factory(dict), validator=optional(_STRING_STRING_MAP))
148 | image: Optional[str] = attr.ib(default=None, validator=optional(instance_of(str)))
149 | environment_type: Optional[str] = attr.ib(default=None, validator=optional(instance_of(str)))
150 | buildspec: Optional[str] = attr.ib(default=None, validator=optional(instance_of(str)))
151 | compute_type: str = attr.ib(default="BUILD_GENERAL1_SMALL", validator=optional(instance_of(str)))
152 | env: Dict[str, str] = attr.ib(default=attr.Factory(dict), validator=optional(_STRING_STRING_MAP))
153 | run_order: int = attr.ib(default=1, validator=optional(instance_of(int)))
154 |
155 | @run_order.validator
156 | def _check_run_order(self, attribute, value): # pylint: disable=unused-argument,no-self-use
157 | """Verify that ``run_order`` value is valid."""
158 | if value < 1:
159 | raise ValueError("PipelineAction run_order value must be >= 1")
160 |
161 | @image.validator
162 | def _check_image(self, attribute, value): # pylint: disable=unused-argument
163 | """Verify that ``image`` is set if provider type ``CodeBuild`` is used."""
164 | if self.provider == "CodeBuild" and value is None:
165 | raise ValueError('image must be defined for actions of type "CodeBuild"')
166 |
167 | @buildspec.validator
168 | def _check_buildspec(self, attribute, value): # pylint: disable=unused-argument
169 | """Verify that ``buildspec`` is set if provider type ``CodeBuild`` is used."""
170 | if self.provider == "CodeBuild" and value is None:
171 | raise ValueError('buildspec must be defined for actions of type "CodeBuild"')
172 |
173 | def __attrs_post_init__(self):
174 | """Set default values for ``environment_type``."""
175 | if self.provider == "CodeBuild" and self.environment_type is None:
176 | if "windows" in self.image.lower():
177 | self.environment_type = "WINDOWS_CONTAINER"
178 | else:
179 | self.environment_type = "LINUX_CONTAINER"
180 |
181 |
182 | @attr.s
183 | class PipelineStage(_ConfigStructure):
184 | """CodePipeline stage definition.
185 |
186 | :param name: Stage name
187 | :param actions: Actions to be taken in stage
188 | """
189 |
190 | name: str = attr.ib(validator=instance_of(str))
191 | actions: Iterable[PipelineAction] = attr.ib(validator=deep_iterable(member_validator=instance_of(PipelineAction)))
192 |
193 |
194 | @attr.s
195 | class Config(_ConfigStructure):
196 | """PipeFormer project configuration.
197 |
198 | :param name: Project name
199 | :param description: Project description
200 | :param generate_cmk: Should a custom CMK be generated? (reserved for later use: must always be ``True``)
201 | :param pipeline: Mapping of stage names to pipeline stages
202 | :param inputs: Mapping of input names to loaded inputs
203 | """
204 |
205 | name: str = attr.ib(validator=instance_of(str))
206 | description: str = attr.ib(validator=instance_of(str))
207 | generate_cmk: bool = attr.ib(validator=instance_of(bool))
208 | pipeline: Dict[str, PipelineStage] = attr.ib(
209 | validator=deep_mapping(key_validator=instance_of(str), value_validator=instance_of(PipelineStage))
210 | )
211 | inputs: Dict[str, Input] = attr.ib(
212 | validator=optional(deep_mapping(key_validator=instance_of(str), value_validator=instance_of(Input)))
213 | )
214 |
215 | @generate_cmk.validator
216 | def _check_generate_cmk(self, attribute, value): # pylint: disable=unused-argument,,no-self-use
217 | """Validate that the ``generate_cmk`` value is always ``True``."""
218 | if not value:
219 | raise ValueError(
220 | "Use of AWS-managed CMKs is not supported. Must use customer-managed CMK (generate-cmk: true)."
221 | )
222 |
223 | @classmethod
224 | def from_dict(cls, kwargs: Dict):
225 | """Load a PipeFormer config from a dictionary parsed from a PipeFormer config file.
226 |
227 | :param kwargs: Parsed config file dictionary
228 | :return: Loaded PipeFormer config
229 | """
230 | loaded = kwargs.copy()
231 |
232 | if "inputs" in loaded:
233 | loaded["inputs"] = {
234 | key: Input.from_dict(dict(name=key, **value)) for key, value in kwargs["inputs"].items()
235 | }
236 |
237 | loaded["pipeline"] = {
238 | key: PipelineStage(name=key, actions=[PipelineAction.from_dict(value) for value in actions])
239 | for key, actions in kwargs["pipeline"].items()
240 | }
241 |
242 | return cls(**cls._clean_kwargs(loaded))
243 |
244 | @classmethod
245 | def from_file(cls, filename: str):
246 | """Load a PipeFormer config from an existing file.
247 |
248 | :param filename: Existing filename
249 | :return: Loaded PipeFormer config
250 | """
251 | with open(filename, "rb") as config_file:
252 | raw_parsed = yaml.safe_load(config_file)
253 |
254 | return cls.from_dict(raw_parsed)
255 |
256 |
257 | @attr.s
258 | class Pipeline:
259 | """Container to hold all templates for a single PipeFormer pipeline.
260 |
261 | :param template: CodePipeline stack template
262 | :param codebuild: Mapping of stage names to corresponding CodeBuild templates
263 | """
264 |
265 | template: Template = attr.ib(validator=instance_of(Template))
266 | stage_templates: Dict[str, Template] = attr.ib(
267 | validator=deep_mapping(
268 | key_validator=instance_of(str),
269 | value_validator=instance_of(Template),
270 | mapping_validator=instance_of(OrderedDict),
271 | )
272 | )
273 |
274 |
275 | @attr.s
276 | class ProjectTemplates:
277 | """Container to hold all templates for a PipeFormer project.
278 |
279 | :param core: Core stack template
280 | :param inputs: Inputs stack template
281 | :param iam: IAM stack template
282 | :param pipeline: CodePipeline stack template
283 | :param codebuild: Mapping of stage names to corresponding CodeBuild templates
284 | """
285 |
286 | core: Template = attr.ib(validator=instance_of(Template))
287 | inputs: Template = attr.ib(validator=instance_of(Template))
288 | iam: Template = attr.ib(validator=instance_of(Template))
289 | pipeline: Template = attr.ib(validator=instance_of(Template))
290 | codebuild: Dict[str, Template] = attr.ib(
291 | validator=deep_mapping(
292 | key_validator=instance_of(str),
293 | value_validator=instance_of(Template),
294 | mapping_validator=instance_of(OrderedDict),
295 | )
296 | )
297 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/template_builder.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for transforming a parsed config to one or more CloudFormation templates."""
14 | from pipeformer.internal.templates import codepipeline, core, iam, inputs
15 |
16 | from .structures import Config, ProjectTemplates
17 |
18 | __all__ = ("config_to_templates",)
19 |
20 |
21 | def config_to_templates(project: Config) -> ProjectTemplates:
22 | """Construct all standalone templates from project.
23 |
24 | :param project: Source project
25 | :return: Constructed templates
26 | """
27 | iam_template = iam.build(project)
28 |
29 | inputs_template = inputs.build(project)
30 |
31 | pipeline_templates = codepipeline.build(project)
32 |
33 | core_template = core.build(
34 | project=project,
35 | inputs_template=inputs_template,
36 | iam_template=iam_template,
37 | pipeline_templates=pipeline_templates,
38 | )
39 |
40 | return ProjectTemplates(
41 | core=core_template,
42 | inputs=inputs_template,
43 | iam=iam_template,
44 | pipeline=pipeline_templates.template,
45 | codebuild=pipeline_templates.stage_templates,
46 | )
47 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Modules for building CloudFormation templates."""
14 | from troposphere import Tags
15 |
16 | from pipeformer.internal.structures import Config
17 |
18 |
19 | def project_tags(project: Config) -> Tags:
20 | """Construct default tags for a project.
21 |
22 | :param project: PipeFormer project
23 | :return: Tags
24 | """
25 | return Tags(pipeformer=project.name)
26 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/codebuild.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for building the CodeBuild stack templates."""
14 | import string
15 |
16 | from troposphere import AWS_STACK_NAME, Output, Parameter, Ref, Sub, Tags, Template, codebuild, iam, s3
17 |
18 | from pipeformer.internal.resolve import InputResolver
19 | from pipeformer.internal.structures import Config
20 | from pipeformer.internal.util import reference_name, resource_name
21 |
22 | from . import project_tags
23 |
24 |
25 | def project_name(action_number: int) -> str:
26 | """Construct the project logical resource name.
27 |
28 | :param action_number: Unique count identifier for project in stack
29 | :return: Logical resource name
30 | """
31 | return resource_name(codebuild.Project, string.ascii_letters[action_number])
32 |
33 |
34 | def _build_project(name: str, action: InputResolver, role: Ref, bucket: Ref, tags: Tags) -> codebuild.Project:
35 | """Construct a CodeBuild project for the specified action.
36 |
37 | :param name: Logical resource name to use for project
38 | :param action: Action wrapped in an InputResolver
39 | :param role: Reference to CodeBuild role
40 | :param bucket: Reference to application resources bucket
41 | :param tags: Tags to add to project
42 | :return: Constructed project
43 | """
44 | return codebuild.Project(
45 | name,
46 | Name=Sub(f"${{{AWS_STACK_NAME}}}-{name}"),
47 | ServiceRole=role,
48 | Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
49 | Source=codebuild.Source(Type="CODEPIPELINE", BuildSpec=action.buildspec),
50 | Environment=codebuild.Environment(
51 | ComputeType=action.compute_type,
52 | Type=action.environment_type,
53 | Image=action.image,
54 | EnvironmentVariables=[codebuild.EnvironmentVariable(Name="PIPEFORMER_S3_BUCKET", Value=bucket)]
55 | + [codebuild.EnvironmentVariable(Name=key, Value=value) for key, value in action.env.items()],
56 | ),
57 | Tags=tags,
58 | )
59 |
60 |
61 | def build(project: Config, stage: InputResolver) -> Template:
62 | """Build a stack template for all CodeBuild actions in a CodePipeline stage.
63 |
64 | :param project: PipeFormer project to build for
65 | :param stage: Stage for which to construct CodeBuild projects
66 | :return: Constructed template
67 | """
68 | resources = Template(
69 | Description=f"CodeBuild projects for {stage.name} stage in pipeformer-managed project: {project.name}"
70 | )
71 |
72 | # set all non-input parameters
73 | resources_bucket = resources.add_parameter(
74 | Parameter(reference_name(resource_name(s3.Bucket, "ProjectResources"), "Name"), Type="String")
75 | )
76 | role = resources.add_parameter(
77 | Parameter(reference_name(resource_name(iam.Role, "CodeBuild"), "Arn"), Type="String")
78 | )
79 |
80 | default_tags = project_tags(project)
81 |
82 | required_inputs = set()
83 |
84 | # add all resources
85 | for pos in range(len(stage.actions)):
86 | action = stage.actions[pos]
87 |
88 | if action.provider != "CodeBuild":
89 | continue
90 |
91 | action_resource = resources.add_resource(
92 | _build_project(
93 | name=project_name(pos), action=action, role=role.ref(), bucket=resources_bucket.ref(), tags=default_tags
94 | )
95 | )
96 | resources.add_output(Output(reference_name(action_resource.title, "Name"), Value=action_resource.ref()))
97 |
98 | required_inputs.update(action.required_inputs)
99 |
100 | # use collected parameters to set all input values needed as parameters
101 | for name in required_inputs:
102 | resources.add_parameter(Parameter(project.inputs[name].reference_name(), Type="String"))
103 |
104 | return resources
105 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/codepipeline.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for building the CodePipeline stack template."""
14 | from collections import OrderedDict
15 | from typing import Dict
16 |
17 | from troposphere import GetAtt, Parameter, Ref, Tags, Template, cloudformation, codepipeline, iam, s3
18 |
19 | from pipeformer.identifiers import VALUE_SEPARATOR
20 | from pipeformer.internal.resolve import InputResolver
21 | from pipeformer.internal.structures import Config, Pipeline
22 | from pipeformer.internal.util import reference_name, resource_name
23 |
24 | from . import codebuild as codebuild_template, project_tags
25 |
26 | __all__ = ("build",)
27 | _ACTION_TYPE_IDS = {
28 | "GitHub": codepipeline.ActionTypeId(Category="Source", Owner="ThirdParty", Provider="GitHub", Version="1"),
29 | "CodeBuild": codepipeline.ActionTypeId(Category="Build", Owner="AWS", Provider="CodeBuild", Version="1"),
30 | "CloudFormation": codepipeline.ActionTypeId(Category="Deploy", Owner="AWS", Provider="CloudFormation", Version="1"),
31 | }
32 | _DEFAULT_PARAMETERS = (
33 | Parameter(reference_name(resource_name(s3.Bucket, "Artifacts"), "Name"), Type="String"),
34 | Parameter(reference_name(resource_name(s3.Bucket, "ProjectResources"), "Name"), Type="String"),
35 | Parameter(reference_name(resource_name(iam.Role, "CodePipeline"), "Arn"), Type="String"),
36 | Parameter(reference_name(resource_name(iam.Role, "CodeBuild"), "Arn"), Type="String"),
37 | Parameter(reference_name(resource_name(iam.Role, "CloudFormation"), "Arn"), Type="String"),
38 | )
39 |
40 |
41 | def _action_configuration(action: InputResolver, stage_name: str, action_number: int) -> Dict[str, str]:
42 | """Compile a CloudFormation CodePipeline action configuration.
43 |
44 | :param action: PipeFormer action definition
45 | :param stage_name: Stage name
46 | :param action_number: Action counter
47 | :return: CloudFormation action configuration
48 | """
49 | codebuild_output = reference_name(codebuild_template.project_name(action_number), "Name")
50 | _action_type_default_configurations = { # pylint: disable=invalid-name
51 | "GitHub": lambda: dict(PollForSourceChanges=True),
52 | "CodeBuild": lambda: dict(ProjectName=GetAtt(_codebuild_stage_name(stage_name), f"Outputs.{codebuild_output}")),
53 | "CloudFormation": lambda: dict(RoleArn=Ref(reference_name(resource_name(iam.Role, "CloudFormation"), "Arn"))),
54 | }
55 |
56 | config = _action_type_default_configurations.get(action.provider, lambda: {})()
57 | # expand and re-cast configuration to resolve references
58 | config.update(dict(**action.configuration))
59 | return config
60 |
61 |
62 | def _stage_action(stage_name: str, action_number: int, action: InputResolver) -> codepipeline.Actions:
63 | """Construct a CodePipeline action resource.
64 |
65 | :param stage_name: Stage name
66 | :param action_number: Action counter
67 | :param action: PipeFormer action definition
68 | :return: CloudFormation action definition
69 | """
70 | try:
71 | action_type_id = _ACTION_TYPE_IDS[action.provider]
72 | except KeyError:
73 | raise ValueError(
74 | f'Unknown action provider "{action.provider}". Supported providers are: {list(_ACTION_TYPE_IDS.keys())!r}'
75 | )
76 |
77 | kwargs = dict(
78 | Name=f"{stage_name}-{action_number}",
79 | RunOrder=action.run_order,
80 | ActionTypeId=action_type_id,
81 | Configuration=_action_configuration(action, stage_name, action_number),
82 | )
83 |
84 | if action.inputs:
85 | kwargs["InputArtifacts"] = [codepipeline.InputArtifacts(Name=name) for name in action.inputs]
86 |
87 | if action.outputs:
88 | kwargs["OutputArtifacts"] = [codepipeline.OutputArtifacts(Name=name) for name in action.outputs]
89 |
90 | return codepipeline.Actions(**kwargs)
91 |
92 |
93 | def _stage(stage: InputResolver) -> codepipeline.Stages:
94 | """Construct a CodePipeline stage resource.
95 |
96 | :param stage: PipeFormer stage definition
97 | :return: CloudFormation stage definition
98 | """
99 | stage_actions = []
100 | for pos, action in enumerate(stage.actions):
101 | stage_actions.append(_stage_action(stage.name, pos, action))
102 |
103 | return codepipeline.Stages(Name=stage.name, Actions=stage_actions)
104 |
105 |
106 | def _url_reference(stage_name) -> str:
107 | """Build a stage stack template URL reference logical resource name.
108 |
109 | :param stage_name: Stage name
110 | :return: Logical resource name
111 | """
112 | return reference_name(VALUE_SEPARATOR.join(("Template", "CodeBuild", "Stage", stage_name)), "Url")
113 |
114 |
115 | def _codebuild_stage_name(stage_name) -> str:
116 | """Build a CodeBuild stage logical resource name.
117 |
118 | :param stage_name: Stage name
119 | :return: Logical resource name
120 | """
121 | return resource_name(cloudformation.Stack, VALUE_SEPARATOR.join(("CodeBuild", "Stage", stage_name)))
122 |
123 |
124 | def _stack(
125 | project: Config, stage: InputResolver, stage_name: str, default_tags: Tags
126 | ) -> (cloudformation.Stack, Parameter):
127 | """Construct a nested CloudFormation stack template.
128 |
129 | :param project: PipeFormer project
130 | :param stage: Pipeline stage definition
131 | :param stage_name: Stage name
132 | :param default_tags: Default tags to add to resources
133 | :return: Constructed stack template and a parameter to add to the parent template.
134 | """
135 | # Add stack to template
136 | parameters = {
137 | name: Ref(name)
138 | for name in (
139 | reference_name(resource_name(s3.Bucket, "ProjectResources"), "Name"),
140 | reference_name(resource_name(iam.Role, "CodeBuild"), "Arn"),
141 | )
142 | }
143 |
144 | for name in stage.required_inputs:
145 | parameters[project.inputs[name].reference_name()] = Ref(project.inputs[name].reference_name())
146 |
147 | url_reference = _url_reference(stage_name)
148 |
149 | return (
150 | cloudformation.Stack(
151 | _codebuild_stage_name(stage_name), TemplateURL=Ref(url_reference), Parameters=parameters, Tags=default_tags
152 | ),
153 | Parameter(url_reference, Type="String"),
154 | )
155 |
156 |
157 | def build(project: Config) -> Pipeline:
158 | """Construct CodePipeline templates for a project.
159 |
160 | :param project: PipeFormer project
161 | :return: Constructed templates
162 | """
163 | pipeline_template = Template(Description=f"CodePipeline resources for pipeformer-managed project: {project.name}")
164 |
165 | # Add resource parameters
166 | for param in _DEFAULT_PARAMETERS:
167 | pipeline_template.add_parameter(param)
168 |
169 | required_inputs = set()
170 |
171 | default_tags = project_tags(project)
172 |
173 | stage_templates = OrderedDict()
174 | pipeline_stages = []
175 | for stage_name, stage in project.pipeline.items():
176 | stage_loader = InputResolver(wrapped=stage, inputs=project.inputs)
177 |
178 | stage_resources_template = codebuild_template.build(project, stage_loader)
179 |
180 | pipeline_stages.append(_stage(stage_loader))
181 |
182 | stack_resource, stack_parameter = _stack(project, stage_loader, stage_name, default_tags)
183 |
184 | required_inputs.update(stage_loader.required_inputs)
185 |
186 | if stage_resources_template.resources:
187 | pipeline_template.add_resource(stack_resource)
188 | stage_templates[stage_name] = stage_resources_template
189 | pipeline_template.add_parameter(stack_parameter)
190 |
191 | # Add inputs parameters
192 | for name in required_inputs:
193 | pipeline_template.add_parameter(Parameter(project.inputs[name].reference_name(), Type="String"))
194 |
195 | # Add pipeline resource
196 |
197 | pipeline_resource = codepipeline.Pipeline(
198 | resource_name(codepipeline.Pipeline, project.name),
199 | ArtifactStore=codepipeline.ArtifactStore(
200 | Type="S3", Location=Ref(reference_name(resource_name(s3.Bucket, "Artifacts"), "Name"))
201 | ),
202 | RoleArn=Ref(reference_name(resource_name(iam.Role, "CodePipeline"), "Arn")),
203 | Stages=pipeline_stages,
204 | )
205 | pipeline_template.add_resource(pipeline_resource)
206 |
207 | return Pipeline(template=pipeline_template, stage_templates=stage_templates)
208 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/core.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for building the core stack template."""
14 | from typing import Any, Dict, Iterable, Optional
15 |
16 | from awacs import aws as AWS, kms as KMS
17 | from troposphere import GetAtt, Select, Split, Sub, Tags, Template, cloudformation, kms, s3
18 |
19 | from pipeformer.identifiers import VALUE_SEPARATOR
20 | from pipeformer.internal.structures import Config, Pipeline, WaitConditionStack
21 | from pipeformer.internal.util import account_arn, reference_name, resource_name
22 |
23 | from . import project_tags
24 |
25 | __all__ = ("build",)
26 |
27 |
28 | def _project_key(project: Config) -> kms.Key:
29 | """Construct the AWS CMK that will be used to protect project resources.
30 |
31 | :param project: Source project
32 | :return: Constructed key
33 | """
34 | policy = AWS.PolicyDocument(
35 | Version="2012-10-17",
36 | Statement=[
37 | AWS.Statement(
38 | Effect=AWS.Allow,
39 | Principal=AWS.Principal("AWS", account_arn("iam", "root")),
40 | Action=[
41 | KMS.Encrypt,
42 | KMS.Decrypt,
43 | KMS.ReEncrypt,
44 | KMS.GenerateDataKey,
45 | KMS.GenerateDataKeyWithoutPlaintext,
46 | KMS.DescribeKey,
47 | KMS.GetKeyPolicy,
48 | ],
49 | Resource=["*"],
50 | ),
51 | # TODO: Change admin statement to some other principal?
52 | AWS.Statement(
53 | Effect=AWS.Allow,
54 | Principal=AWS.Principal("AWS", account_arn("iam", "root")),
55 | Action=[
56 | KMS.GetKeyPolicy,
57 | KMS.PutKeyPolicy,
58 | KMS.ScheduleKeyDeletion,
59 | KMS.CancelKeyDeletion,
60 | KMS.CreateAlias,
61 | KMS.DeleteAlias,
62 | KMS.UpdateAlias,
63 | KMS.DescribeKey,
64 | KMS.EnableKey,
65 | KMS.DisableKey,
66 | KMS.GetKeyRotationStatus,
67 | KMS.EnableKeyRotation,
68 | KMS.DisableKeyRotation,
69 | KMS.ListKeyPolicies,
70 | KMS.ListResourceTags,
71 | KMS.TagResource,
72 | KMS.UntagResource,
73 | ],
74 | Resource=["*"],
75 | ),
76 | ],
77 | )
78 | return kms.Key(
79 | resource_name(kms.Key, "Stack"),
80 | Enabled=True,
81 | EnableKeyRotation=False,
82 | KeyPolicy=policy,
83 | Tags=project_tags(project),
84 | )
85 |
86 |
87 | def _bucket(name: str, cmk_arn: GetAtt, tags: Tags) -> s3.Bucket:
88 | """Construct a S3 bucket resource with default SSE-KMS using the specified CMK.
89 |
90 | :param name: Logical resource name
91 | :param cmk_arn: Reference to Arn of CMK resource
92 | :param tags: Tags to apply to bucket
93 | :return: Constructed S3 bucket resource
94 | """
95 | return s3.Bucket(
96 | resource_name(s3.Bucket, name),
97 | BucketEncryption=s3.BucketEncryption(
98 | ServerSideEncryptionConfiguration=[
99 | s3.ServerSideEncryptionRule(
100 | ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault(
101 | SSEAlgorithm="aws:kms", KMSMasterKeyID=cmk_arn
102 | )
103 | )
104 | ]
105 | ),
106 | Tags=tags,
107 | )
108 |
109 |
110 | def _wait_condition_data_to_s3_url(condition: cloudformation.WaitCondition, artifacts_bucket: s3.Bucket) -> Sub:
111 | """Build a CloudFormation ``Sub`` structure that resolves to the S3 key reported to a wait condition.
112 |
113 | :param condition: Wait condition to reference
114 | :param artifacts_bucket: Bucket to reference
115 | """
116 | return Sub(
117 | f"https://${{{artifacts_bucket.title}.DomainName}}/${{key}}",
118 | {"key": Select(3, Split('"', condition.get_att("Data")))},
119 | )
120 |
121 |
122 | def _wait_condition(
123 | type_name: str, base_name: str
124 | ) -> (cloudformation.WaitCondition, cloudformation.WaitConditionHandle):
125 | """Construct a wait condition and handle.
126 |
127 | :param type_name:
128 | :param base_name:
129 | :return:
130 | """
131 | handle = cloudformation.WaitConditionHandle(VALUE_SEPARATOR.join(("Upload", type_name, base_name)))
132 | condition = cloudformation.WaitCondition(
133 | VALUE_SEPARATOR.join(("WaitFor", handle.title)), Handle=handle.ref(), Count=1, Timeout=3600
134 | )
135 | return condition, handle
136 |
137 |
138 | def _wait_condition_stack(
139 | base_name: str,
140 | parameters: Dict[str, Any],
141 | artifacts_bucket: s3.Bucket,
142 | tags: Tags,
143 | depends_on: Optional[Iterable] = None,
144 | ) -> WaitConditionStack:
145 | """Construct a wait-condition-managed stack.
146 |
147 | :param base_name: Name to use for base of logical names
148 | :param parameters: Stack parameters
149 | :param artifacts_bucket: Artifacts bucket resource
150 | :param tags: Tags to set on stack
151 | :param depends_on: Resources that stack will depend on
152 | :return: Constructed resources
153 | """
154 | if depends_on is None:
155 | depends_on = []
156 |
157 | condition, handle = _wait_condition("Template", base_name)
158 | stack = cloudformation.Stack(
159 | resource_name(cloudformation.Stack, base_name),
160 | DependsOn=[condition.title] + depends_on,
161 | TemplateURL=_wait_condition_data_to_s3_url(condition, artifacts_bucket),
162 | Parameters=parameters,
163 | Tags=tags,
164 | )
165 | return WaitConditionStack(condition=condition, handle=handle, stack=stack)
166 |
167 |
168 | def build(project: Config, inputs_template: Template, iam_template: Template, pipeline_templates: Pipeline) -> Template:
169 | """Construct a core stack template for a stand-alone deployment.
170 |
171 | :param project: PipeFormer config
172 | :param inputs_template: Inputs stack template
173 | :param iam_template: IAM stack template
174 | :param pipeline_templates: CodePipeline templates
175 | :return: Core stack template
176 | """
177 | default_tags = project_tags(project)
178 |
179 | core = Template(Description=f"Core resources for pipeformer-managed project: {project.name}")
180 |
181 | # Project CMK
182 | cmk = core.add_resource(_project_key(project))
183 |
184 | # Artifacts Bucket
185 | artifacts_bucket = core.add_resource(_bucket(name="Artifacts", cmk_arn=cmk.get_att("Arn"), tags=default_tags))
186 | # Project Bucket
187 | project_bucket = core.add_resource(_bucket(name="ProjectResources", cmk_arn=cmk.get_att("Arn"), tags=default_tags))
188 |
189 | # Inputs Stack
190 | inputs_stack = _wait_condition_stack(
191 | base_name="Inputs",
192 | parameters={reference_name(cmk.title, "Arn"): cmk.get_att("Arn")},
193 | artifacts_bucket=artifacts_bucket,
194 | tags=default_tags,
195 | )
196 | core.add_resource(inputs_stack.condition)
197 | core.add_resource(inputs_stack.handle)
198 | core.add_resource(inputs_stack.stack)
199 |
200 | # IAM Stack
201 | iam_stack = _wait_condition_stack(
202 | base_name="Iam",
203 | parameters={
204 | reference_name(artifacts_bucket.title, "Arn"): artifacts_bucket.get_att("Arn"),
205 | reference_name(project_bucket.title, "Arn"): project_bucket.get_att("Arn"),
206 | reference_name(cmk.title, "Arn"): cmk.get_att("Arn"),
207 | },
208 | artifacts_bucket=artifacts_bucket,
209 | tags=default_tags,
210 | )
211 | core.add_resource(iam_stack.condition)
212 | core.add_resource(iam_stack.handle)
213 | core.add_resource(iam_stack.stack)
214 |
215 | # Pipeline Stack and Prerequisites
216 | pipeline_parameters = {
217 | # Buckets
218 | reference_name(artifacts_bucket.title, "Name"): artifacts_bucket.ref(),
219 | reference_name(project_bucket.title, "Name"): project_bucket.ref(),
220 | }
221 |
222 | pipeline_depends_on = []
223 |
224 | # Pass on Inputs and Roles
225 | for nested_template, nested_stack in ((inputs_template, inputs_stack), (iam_template, iam_stack)):
226 | pipeline_depends_on.append(nested_stack.stack.title)
227 | for name in nested_template.outputs.keys():
228 | pipeline_parameters[name] = GetAtt(nested_stack.stack.title, f"Outputs.{name}")
229 |
230 | # Add waiters for each pipeline stage resource stack template
231 | for name in pipeline_templates.stage_templates:
232 | stage_name = VALUE_SEPARATOR.join(("CodeBuild", "Stage", name))
233 | condition, handle = _wait_condition("Template", stage_name)
234 |
235 | core.add_resource(condition)
236 | core.add_resource(handle)
237 |
238 | pipeline_depends_on.append(condition.title)
239 | pipeline_parameters[
240 | reference_name(VALUE_SEPARATOR.join(("Template", stage_name)), "Url")
241 | ] = _wait_condition_data_to_s3_url(condition, artifacts_bucket)
242 |
243 | input_condition, input_handle = _wait_condition("Input", "Values")
244 | pipeline_depends_on.append(input_condition.title)
245 | core.add_resource(input_condition)
246 | core.add_resource(input_handle)
247 |
248 | pipeline_stack = _wait_condition_stack(
249 | base_name="Pipeline",
250 | parameters=pipeline_parameters,
251 | artifacts_bucket=artifacts_bucket,
252 | tags=default_tags,
253 | depends_on=pipeline_depends_on,
254 | )
255 | core.add_resource(pipeline_stack.condition)
256 | core.add_resource(pipeline_stack.handle)
257 | core.add_resource(pipeline_stack.stack)
258 |
259 | return core
260 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/iam.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for building the IAM stack template."""
14 | from awacs import (
15 | aws as AWS,
16 | awslambda as LAMBDA,
17 | cloudformation as CLOUDFORMATION,
18 | cloudwatch as CLOUDWATCH,
19 | codebuild as CODEBUILD,
20 | codepipeline as CODEPIPELINE,
21 | iam as IAM,
22 | kms as KMS,
23 | logs as LOGS,
24 | s3 as S3,
25 | sts as STS,
26 | )
27 | from awacs.helpers.trust import make_service_domain_name
28 | from troposphere import AWS_STACK_NAME, Output, Parameter, Sub, Template, iam, kms, s3
29 |
30 | from pipeformer.internal.structures import Config
31 | from pipeformer.internal.util import account_arn, reference_name, resource_name
32 |
33 | __all__ = ("build",)
34 |
35 |
36 | def _policy_name(name: str):
37 | """Construct the policy name given a logical name.
38 |
39 | :param name: Logical name
40 | :returns: Stack-specific name resolver
41 | """
42 | return Sub(f"${{{AWS_STACK_NAME}}}-{name}")
43 |
44 |
45 | def _cloudformation_role() -> iam.Role:
46 | """Construct a role for use by CloudFormation.
47 |
48 | :return: Constructed Role
49 | """
50 | assume_policy = AWS.PolicyDocument(
51 | Statement=[
52 | AWS.Statement(
53 | Principal=AWS.Principal("Service", make_service_domain_name(CLOUDFORMATION.prefix)),
54 | Effect=AWS.Allow,
55 | Action=[STS.AssumeRole],
56 | )
57 | ]
58 | )
59 | # TODO: Figure out how to scope this down without breaking IAM
60 | # IAM policies break if there is a * in certain fields,
61 | # so this does not work:
62 | # arn:PARTITION:*:REGION:ACCOUNT:*
63 | #
64 | # _desired_policy = AWS.PolicyDocument(
65 | # Statement=[
66 | # AWS.Statement(
67 | # Effect=AWS.Allow,
68 | # Action=[AWS.Action("*")],
69 | # Resource=[
70 | # account_arn(service_prefix="*", resource="*"),
71 | # account_arn(service_prefix=S3.prefix, resource="*"),
72 | # account_arn(service_prefix=IAM.prefix, resource="*"),
73 | # ],
74 | # )
75 | # ]
76 | # )
77 | policy = AWS.PolicyDocument(Statement=[AWS.Statement(Effect=AWS.Allow, Action=[AWS.Action("*")], Resource=["*"])])
78 | return iam.Role(
79 | resource_name(iam.Role, "CloudFormation"),
80 | AssumeRolePolicyDocument=assume_policy,
81 | Policies=[iam.Policy(PolicyName=_policy_name("CloudFormation"), PolicyDocument=policy)],
82 | )
83 |
84 |
85 | def _codepipeline_role(artifacts_bucket: Parameter, resources_bucket: Parameter, cmk: Parameter) -> iam.Role:
86 | """Construct a role for use by CodePipeline.
87 |
88 | :param artifacts_bucket: Artifacts bucket parameter
89 | :param resources_bucket: Resources bucket parameter
90 | :param cmk: KMS CMK parameter
91 | :return: Constructed Role
92 | """
93 | assume_policy = AWS.PolicyDocument(
94 | Statement=[
95 | AWS.Statement(
96 | Principal=AWS.Principal("Service", make_service_domain_name(CODEPIPELINE.prefix)),
97 | Effect=AWS.Allow,
98 | Action=[STS.AssumeRole],
99 | )
100 | ]
101 | )
102 | policy = AWS.PolicyDocument(
103 | Statement=[
104 | AWS.Statement(
105 | Effect=AWS.Allow,
106 | Action=[S3.GetBucketVersioning, S3.PutBucketVersioning],
107 | Resource=[artifacts_bucket.ref(), resources_bucket.ref()],
108 | ),
109 | AWS.Statement(
110 | Effect=AWS.Allow,
111 | Action=[S3.GetObject, S3.PutObject],
112 | Resource=[Sub(f"${{{artifacts_bucket.title}}}/*"), Sub(f"${{{resources_bucket.title}}}/*")],
113 | ),
114 | AWS.Statement(
115 | Effect=AWS.Allow, Action=[KMS.Encrypt, KMS.Decrypt, KMS.GenerateDataKey], Resource=[cmk.ref()]
116 | ),
117 | AWS.Statement(
118 | Effect=AWS.Allow,
119 | Action=[CLOUDWATCH.Action("*")],
120 | Resource=[account_arn(service_prefix=CLOUDWATCH.prefix, resource="*")],
121 | ),
122 | AWS.Statement(
123 | Effect=AWS.Allow,
124 | Action=[IAM.PassRole],
125 | Resource=[account_arn(service_prefix=IAM.prefix, resource="role/*")],
126 | ),
127 | AWS.Statement(
128 | Effect=AWS.Allow,
129 | Action=[LAMBDA.InvokeFunction, LAMBDA.ListFunctions],
130 | Resource=[account_arn(service_prefix=LAMBDA.prefix, resource="*")],
131 | ),
132 | AWS.Statement(
133 | Effect=AWS.Allow,
134 | Action=[
135 | CLOUDFORMATION.CreateStack,
136 | CLOUDFORMATION.DeleteStack,
137 | CLOUDFORMATION.DescribeStacks,
138 | CLOUDFORMATION.UpdateStack,
139 | CLOUDFORMATION.CreateChangeSet,
140 | CLOUDFORMATION.DeleteChangeSet,
141 | CLOUDFORMATION.DescribeChangeSet,
142 | CLOUDFORMATION.ExecuteChangeSet,
143 | CLOUDFORMATION.SetStackPolicy,
144 | CLOUDFORMATION.ValidateTemplate,
145 | ],
146 | Resource=[account_arn(service_prefix=CLOUDFORMATION.prefix, resource="*")],
147 | ),
148 | AWS.Statement(
149 | Effect=AWS.Allow,
150 | Action=[CODEBUILD.BatchGetBuilds, CODEBUILD.StartBuild],
151 | Resource=[account_arn(service_prefix=CODEBUILD.prefix, resource="*")],
152 | ),
153 | ]
154 | )
155 | return iam.Role(
156 | resource_name(iam.Role, "CodePipeline"),
157 | AssumeRolePolicyDocument=assume_policy,
158 | Policies=[iam.Policy(PolicyName=_policy_name("CodePipeline"), PolicyDocument=policy)],
159 | )
160 |
161 |
162 | def _codebuild_role(artifacts_bucket: Parameter, resources_bucket: Parameter, cmk: Parameter) -> iam.Role:
163 | """Construct a role for use by CodeBuild.
164 |
165 | :param artifacts_bucket: Artifacts bucket parameter
166 | :param resources_bucket: Resources bucket parameter
167 | :param cmk: KMS CMK parameter
168 | :return: Constructed Role
169 | """
170 | assume_policy = AWS.PolicyDocument(
171 | Statement=[
172 | AWS.Statement(
173 | Principal=AWS.Principal("Service", make_service_domain_name(CODEBUILD.prefix)),
174 | Effect=AWS.Allow,
175 | Action=[STS.AssumeRole],
176 | )
177 | ]
178 | )
179 | policy = AWS.PolicyDocument(
180 | Statement=[
181 | AWS.Statement(
182 | Effect=AWS.Allow,
183 | Action=[LOGS.CreateLogGroup, LOGS.CreateLogStream, LOGS.PutLogEvents],
184 | Resource=[account_arn(service_prefix=LOGS.prefix, resource="*")],
185 | ),
186 | AWS.Statement(
187 | Effect=AWS.Allow,
188 | Action=[S3.GetObject, S3.GetObjectVersion, S3.PutObject],
189 | Resource=[Sub(f"${{{artifacts_bucket.title}}}/*"), Sub(f"${{{resources_bucket.title}}}/*")],
190 | ),
191 | AWS.Statement(
192 | Effect=AWS.Allow, Action=[KMS.Encrypt, KMS.Decrypt, KMS.GenerateDataKey], Resource=[cmk.ref()]
193 | ),
194 | ]
195 | )
196 | return iam.Role(
197 | resource_name(iam.Role, "CodeBuild"),
198 | AssumeRolePolicyDocument=assume_policy,
199 | Policies=[iam.Policy(PolicyName=_policy_name("CodeBuild"), PolicyDocument=policy)],
200 | )
201 |
202 |
203 | def build(project: Config) -> Template:
204 | """Build an IAM stack template for the provided project.
205 |
206 | :param project: Source project
207 | :return: Generated IAM stack template
208 | """
209 | resources = Template(Description=f"IAM resources for pipeformer-managed project: {project.name}")
210 |
211 | artifacts_bucket_arn = resources.add_parameter(
212 | Parameter(reference_name(resource_name(s3.Bucket, "Artifacts"), "Arn"), Type="String")
213 | )
214 | resources_bucket_arn = resources.add_parameter(
215 | Parameter(reference_name(resource_name(s3.Bucket, "ProjectResources"), "Arn"), Type="String")
216 | )
217 | cmk_arn = resources.add_parameter(Parameter(reference_name(resource_name(kms.Key, "Stack"), "Arn"), Type="String"))
218 |
219 | cloudformation_role = resources.add_resource(_cloudformation_role())
220 | resources.add_output(
221 | Output(reference_name(cloudformation_role.title, "Arn"), Value=cloudformation_role.get_att("Arn"))
222 | )
223 |
224 | codepipeline_role = resources.add_resource(
225 | _codepipeline_role(artifacts_bucket=artifacts_bucket_arn, resources_bucket=resources_bucket_arn, cmk=cmk_arn)
226 | )
227 | resources.add_output(Output(reference_name(codepipeline_role.title, "Arn"), Value=codepipeline_role.get_att("Arn")))
228 |
229 | codebuild_role = resources.add_resource(
230 | _codebuild_role(artifacts_bucket=artifacts_bucket_arn, resources_bucket=resources_bucket_arn, cmk=cmk_arn)
231 | )
232 | resources.add_output(Output(reference_name(codebuild_role.title, "Arn"), Value=codebuild_role.get_att("Arn")))
233 |
234 | return resources
235 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/templates/inputs.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Logic for building the inputs stack template."""
14 | from troposphere import Output, Parameter, Ref, Tags, Template, kms, secretsmanager, ssm
15 |
16 | from pipeformer.internal.structures import Config, Input
17 | from pipeformer.internal.util import reference_name, resource_name
18 |
19 | from . import project_tags
20 |
21 | __all__ = ("build",)
22 |
23 |
24 | def _secret_value(resource: Input, tags: Tags, cmk_arn: Ref) -> secretsmanager.Secret:
25 | """Construct a Secrets Manager secret to store the input value.
26 |
27 | :param resource: Input for which to create secret
28 | :param tags: Tags to set on secret
29 | :param cmk_arn: Key with which to protect secret
30 | :return: Constructed resource
31 | """
32 | return secretsmanager.Secret(
33 | resource_name(secretsmanager.Secret, resource.name), KmsKeyId=cmk_arn, SecretString="REPLACEME", Tags=tags
34 | )
35 |
36 |
37 | def _standard_value(resource: Input) -> ssm.Parameter:
38 | """Construct a Parameter Store parameter containing the input value.
39 |
40 | :param resource: Input to store
41 | :return: Constructed resource
42 | """
43 | return ssm.Parameter(resource_name(ssm.Parameter, resource.name), Type="String", Value=resource.value)
44 |
45 |
46 | def build(project: Config) -> Template:
47 | """Build an Inputs stack template from the provided project.
48 |
49 | :param project: Source project
50 | :return: Generated Inputs stack template
51 | """
52 | inputs = Template(Description=f"Input values for pipeformer-managed project: {project.name}")
53 | cmk = inputs.add_parameter(Parameter(reference_name(resource_name(kms.Key, "Stack"), "Arn"), Type="String"))
54 |
55 | default_tags = project_tags(project)
56 |
57 | for value in project.inputs.values():
58 | if value.secret:
59 | resource = _secret_value(resource=value, tags=default_tags, cmk_arn=cmk.ref())
60 | resource_output = "Arn"
61 | else:
62 | resource = _standard_value(value)
63 | resource_output = "Name"
64 | inputs.add_resource(resource)
65 | inputs.add_output(Output(reference_name(resource.title, resource_output), Value=Ref(resource)))
66 |
67 | return inputs
68 |
--------------------------------------------------------------------------------
/src/pipeformer/internal/util.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Additional utilities."""
14 | import logging
15 | import time
16 | from typing import Dict
17 |
18 | import attr
19 | import awacs.iam
20 | import awacs.s3
21 | from attr.validators import deep_mapping, instance_of
22 | from botocore.exceptions import ClientError
23 | from troposphere import AWS_ACCOUNT_ID, AWS_PARTITION, AWS_REGION, AWSObject, Sub
24 |
25 | from pipeformer.identifiers import LOGGER_NAME, VALUE_SEPARATOR
26 |
27 | MAX_RESOURCE_ATTEMPTS: int = 20
28 | WAIT_PER_ATTEMPT: int = 5
29 | _LOGGER = logging.getLogger(LOGGER_NAME)
30 | __all__ = ("resource_name", "reference_name", "account_arn", "CloudFormationPhysicalResourceCache")
31 |
32 |
33 | def resource_name(resource_type: AWSObject, name: str) -> str:
34 | """Build the resource logical name for use in stacks.
35 |
36 | :param resource_type: Resource type
37 | :param name: Naive logical name
38 | :return: Specific logical name
39 | """
40 | type_name = resource_type.resource_type.split("::")[-1]
41 | return VALUE_SEPARATOR.join((type_name, name))
42 |
43 |
44 | def reference_name(name: str, value_type: str) -> str:
45 | """Build the reference name for a resource. Used in stack outputs and parameters.
46 |
47 | :param name: Resource name
48 | :param value_type: Value type
49 | :return: Specific reference name
50 | """
51 | return VALUE_SEPARATOR.join((name, value_type))
52 |
53 |
54 | def account_arn(service_prefix: str, resource: str) -> Sub:
55 | """Build an IAM policy Arn pattern scoping down as for as possible for the specified service.
56 |
57 | :param service_prefix: Service prefix string
58 | :param resource: Any resource data to finish Arn
59 | :return: Constructed Sub structure that will resolve to the scoped down Arn
60 | """
61 | if service_prefix in (awacs.iam.prefix, awacs.s3.prefix):
62 | _region = ""
63 | else:
64 | _region = f"${{{AWS_REGION}}}"
65 |
66 | if service_prefix == awacs.s3.prefix:
67 | _account_id = ""
68 | else:
69 | _account_id = f"${{{AWS_ACCOUNT_ID}}}"
70 |
71 | return Sub(f"arn:${{{AWS_PARTITION}}}:{service_prefix}:{_region}:{_account_id}:{resource}")
72 |
73 |
74 | @attr.s
75 | class CloudFormationPhysicalResourceCache:
76 | """Cache for persistent information about CloudFormation stack resources.
77 |
78 | :param stack_name: Name of target stack
79 | :param cache: Pre-populated cache mapping logical resource names to physical resource names (optional)
80 | """
81 |
82 | _client = attr.ib()
83 | _stack_name: str = attr.ib(validator=instance_of(str))
84 | _cache: Dict[str, Dict] = attr.ib(
85 | default=attr.Factory(dict),
86 | validator=deep_mapping(key_validator=instance_of(str), value_validator=instance_of(str)),
87 | )
88 |
89 | def _describe_resource(self, logical_resource_name: str) -> Dict:
90 | """Describe the requested resource.
91 |
92 | :param logical_resource_name: Logical resource name of resource to describe
93 | :returns: result from ``describe_stack_resource`` call
94 | """
95 | return self._client.describe_stack_resource(StackName=self._stack_name, LogicalResourceId=logical_resource_name)
96 |
97 | def wait_until_resource_is_complete(self, logical_resource_name: str):
98 | """Wait until the specified resource is complete.
99 |
100 | :param logical_resource_name: Logical resource name of resource
101 | """
102 | response = self.wait_until_resource_exists_in_stack(logical_resource_name)
103 | if not response["StackResourceDetail"].get("ResourceStatus", ""):
104 | response = self._wait_until_field_exists(logical_resource_name, "ResourceStatus")
105 | while True:
106 | status = response["StackResourceDetail"]["ResourceStatus"]
107 | _LOGGER.debug("Status of resource %s in stack %s is %s", logical_resource_name, self._stack_name, status)
108 |
109 | if status in ("CREATE_COMPLETE", "UPDATE_COMPLETE"):
110 | break
111 | elif status in ("CREATE_IN_PROGRESS", "UPDATE_IN_PROGRESS"):
112 | time.sleep(5)
113 | response = self._describe_resource(logical_resource_name)
114 | else:
115 | raise Exception(f'Resource creation failed. Resource "{logical_resource_name}" status: "{status}"')
116 |
117 | def wait_until_resource_exists_in_stack(self, logical_resource_name: str) -> Dict:
118 | """Wait until the specified resource exists.
119 |
120 | :param logical_resource_name: Logical resource name of resource
121 | """
122 | resource_attempts = 1
123 | while True:
124 | _LOGGER.debug(
125 | "Waiting for creation of resource %s in stack %s to start. Attempt %d of %d",
126 | logical_resource_name,
127 | self._stack_name,
128 | resource_attempts,
129 | MAX_RESOURCE_ATTEMPTS,
130 | )
131 | try:
132 | return self._describe_resource(logical_resource_name)
133 | except ClientError as error:
134 | _LOGGER.debug('Encountered botocore ClientError: "%s"', error.response["Error"]["Message"])
135 | if (
136 | error.response["Error"]["Message"]
137 | == f"Resource {logical_resource_name} does not exist for stack {self._stack_name}"
138 | ):
139 | resource_attempts += 1
140 |
141 | if resource_attempts > MAX_RESOURCE_ATTEMPTS:
142 | raise
143 | else:
144 | raise
145 |
146 | time.sleep(WAIT_PER_ATTEMPT)
147 |
148 | def _wait_until_field_exists(self, logical_resource_name: str, field_name: str) -> Dict:
149 | """Keep trying to describe a resource until it has the requested field.
150 |
151 | Wait 5 seconds between attempts.
152 |
153 | :param logical_resource_name: Logical resource name of resource
154 | :param field_name: Field in resource details to wait for
155 | :returns: results from ``describe_stack_resource`` call
156 | """
157 | resource_attempts = 1
158 | response = self.wait_until_resource_exists_in_stack(logical_resource_name)
159 | while not response.get("StackResourceDetail", {}).get(field_name, ""):
160 | time.sleep(WAIT_PER_ATTEMPT)
161 |
162 | _LOGGER.debug(
163 | "Waiting for resource %s in stack %s to have a value for field %s. Attempt %d of %d",
164 | logical_resource_name,
165 | self._stack_name,
166 | field_name,
167 | resource_attempts,
168 | MAX_RESOURCE_ATTEMPTS,
169 | )
170 | response = self._describe_resource(logical_resource_name)
171 |
172 | return response
173 |
174 | def physical_resource_name(self, logical_resource_name: str) -> str:
175 | """Find the physical resource name given its logical resource name.
176 |
177 | If the resource does not exist yet, wait until it does.
178 |
179 | :param logical_resource_name: Logical resource name of resource
180 | """
181 | try:
182 | response = self._cache[logical_resource_name] # attrs confuses pylint: disable=unsubscriptable-object
183 | except KeyError:
184 | response = self._wait_until_field_exists(
185 | logical_resource_name=logical_resource_name, field_name="PhysicalResourceId"
186 | )
187 | self._cache[ # attrs confuses pylint: disable=unsupported-assignment-operation
188 | logical_resource_name
189 | ] = response
190 |
191 | return response["StackResourceDetail"]["PhysicalResourceId"]
192 |
--------------------------------------------------------------------------------
/src/pylintrc:
--------------------------------------------------------------------------------
1 | [MESSAGE CONTROL]
2 | # Disabling messages that either we don't care about we intentionally break.
3 | disable =
4 | bad-continuation, # we let black handle this
5 | ungrouped-imports, # we let black handle this
6 | no-member, # breaks with attrs
7 | too-few-public-methods, # does not allow data classes
8 | no-else-return, # we omit this on purpose for brevity where it would add no value
9 | attribute-defined-outside-init, # breaks with attrs_post_init
10 | redefined-outer-name, # we do this on purpose in multiple places
11 |
12 | [BASIC]
13 | # Allow function names up to 50 characters
14 | function-rgx = [a-z_][a-z0-9_]{2,50}$
15 | # Allow method names up to 50 characters
16 | method-rgx = [a-z_][a-z0-9_]{2,50}$
17 | # Allow class attribute names up to 50 characters
18 | # Whitelist class attribute names:
19 | class-attribute-rgx = (([A-Za-z_][A-Za-z0-9_]{2,50}|(__.*__))$)
20 | # Whitelist attribute names:
21 | attr-rgx = ([a-z_][a-z0-9_]{2,30}$)
22 | # Whitelist argument names: iv
23 | argument-rgx = ([a-z_][a-z0-9_]{2,30}$)|(^iv$)
24 | # Whitelist variable names: iv
25 | variable-rgx = ([a-z_][a-z0-9_]{2,30}$)|(^iv$)
26 |
27 | [VARIABLES]
28 | additional-builtins = raw_input
29 |
30 | [DESIGN]
31 | max-args = 10
32 |
33 | [FORMAT]
34 | max-line-length = 120
35 |
36 | [REPORTS]
37 | msg-template = {path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
38 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/functional/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/functional/functional_test_utils.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Helper tools for use with functional tests."""
14 | import json
15 | import os
16 | from collections import OrderedDict
17 | from typing import Dict
18 |
19 | from troposphere.template_generator import TemplateGenerator
20 |
21 | from pipeformer.internal import structures, util
22 |
23 | _TEST_VECTORS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "vectors")
24 |
25 |
26 | def vector_names():
27 | for child in os.listdir(_TEST_VECTORS_DIR):
28 | abs_child = os.path.abspath(os.path.join(_TEST_VECTORS_DIR, child))
29 | if os.path.isdir(abs_child):
30 | yield abs_child
31 |
32 |
33 | def check_vector_exists(name: str):
34 | if not os.path.isdir(os.path.join(_TEST_VECTORS_DIR, name)):
35 | raise ValueError(f"Vector name {name!r} does not exist.")
36 |
37 |
38 | def load_vector(vector_name: str, vector_type: str) -> Dict:
39 | check_vector_exists(vector_name)
40 |
41 | vector_filename = os.path.join(_TEST_VECTORS_DIR, vector_name, vector_type) + ".json"
42 |
43 | with open(vector_filename) as f:
44 | return json.load(f)
45 |
46 |
47 | def load_vector_as_template(vector_name: str, vector_type: str) -> TemplateGenerator:
48 | vector_dict = load_vector(vector_name, vector_type)
49 | return TemplateGenerator(vector_dict)
50 |
51 |
52 | def load_vectors_as_pipeline_templates(vector_name: str) -> structures.Pipeline:
53 | pipeline = load_vector_as_template(vector_name, "codepipeline")
54 | stages = OrderedDict()
55 |
56 | for param in pipeline.parameters:
57 | if param.startswith("Template0CodeBuild0Stage0"):
58 | stage_name = param.split(util.VALUE_SEPARATOR)[3]
59 | stages[stage_name] = load_vector_as_template(vector_name, f"codebuild-{stage_name}")
60 |
61 | return structures.Pipeline(template=pipeline, stage_templates=stages)
62 |
63 |
64 | def load_config(name: str) -> (str, Dict[str, str]):
65 | check_vector_exists(name)
66 |
67 | config_filename = os.path.join(_TEST_VECTORS_DIR, name, "config.yaml")
68 |
69 | with open(os.path.join(_TEST_VECTORS_DIR, name, "config_inputs.json"), "r") as f:
70 | inputs = json.load(f)
71 |
72 | return config_filename, inputs
73 |
74 |
75 | def populated_config(name: str) -> structures.Config:
76 | config_filename, inputs = load_config(name)
77 |
78 | project = structures.Config.from_file(config_filename)
79 | for input_name, value in inputs.items():
80 | project.inputs[input_name].value = value # attrs confuses pylint: disable=unsubscriptable-object
81 |
82 | return project
83 |
--------------------------------------------------------------------------------
/test/functional/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/test_codebuild.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.templates.codebuild``."""
14 | import pytest
15 |
16 | from pipeformer.internal.resolve import InputResolver
17 | from pipeformer.internal.templates import codebuild
18 |
19 | from ... import functional_test_utils
20 |
21 | pytestmark = [pytest.mark.local, pytest.mark.functional]
22 |
23 |
24 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
25 | @pytest.mark.parametrize("stage_name", ("build",))
26 | def test_parse_config(name: str, stage_name: str):
27 | project = functional_test_utils.populated_config(name)
28 | stage = InputResolver(
29 | wrapped=project.pipeline[stage_name], # attrs confuses pylint: disable=unsubscriptable-object
30 | inputs=project.inputs,
31 | )
32 |
33 | _test = codebuild.build(project, stage)
34 |
35 |
36 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
37 | @pytest.mark.parametrize("stage_name", ("build",))
38 | def test_generate_template(name: str, stage_name: str):
39 | project = functional_test_utils.populated_config(name)
40 | stage = InputResolver(
41 | wrapped=project.pipeline[stage_name], # attrs confuses pylint: disable=unsubscriptable-object
42 | inputs=project.inputs,
43 | )
44 | expected = functional_test_utils.load_vector_as_template(name, f"codebuild-{stage_name}")
45 |
46 | test = codebuild.build(project, stage)
47 |
48 | assert test.to_json() == expected.to_json()
49 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/test_codepipeline.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.templates.iam``."""
14 | import pytest
15 |
16 | from pipeformer.internal.templates import codepipeline
17 |
18 | from ... import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_parse_config(name: str):
25 | project = functional_test_utils.populated_config(name)
26 |
27 | _test = codepipeline.build(project)
28 |
29 |
30 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
31 | def test_generate_template(name: str):
32 | project = functional_test_utils.populated_config(name)
33 | expected = functional_test_utils.load_vector_as_template(name, "codepipeline")
34 |
35 | test = codepipeline.build(project)
36 |
37 | assert set(test.stage_templates.keys()) == {"build"}
38 | assert test.template.to_json() == expected.to_json()
39 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/test_core.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.templates.core``."""
14 | import pytest
15 |
16 | from pipeformer.internal.templates import core
17 |
18 | from ... import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_parse_config(name: str):
25 | project = functional_test_utils.populated_config(name)
26 | inputs_template = functional_test_utils.load_vector_as_template(name, "inputs")
27 | iam_template = functional_test_utils.load_vector_as_template(name, "iam")
28 | pipeline_templates = functional_test_utils.load_vectors_as_pipeline_templates(name)
29 |
30 | _test = core.build(project, inputs_template, iam_template, pipeline_templates)
31 |
32 |
33 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
34 | def test_generate_template(name: str):
35 | project = functional_test_utils.populated_config(name)
36 | expected = functional_test_utils.load_vector_as_template(name, "core")
37 | inputs_template = functional_test_utils.load_vector_as_template(name, "inputs")
38 | iam_template = functional_test_utils.load_vector_as_template(name, "iam")
39 | pipeline_templates = functional_test_utils.load_vectors_as_pipeline_templates(name)
40 |
41 | test = core.build(project, inputs_template, iam_template, pipeline_templates)
42 |
43 | assert test.to_json() == expected.to_json()
44 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/test_iam.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.templates.iam``."""
14 | import pytest
15 |
16 | from pipeformer.internal.templates import iam
17 |
18 | from ... import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_parse_config(name: str):
25 | project = functional_test_utils.populated_config(name)
26 |
27 | _test = iam.build(project)
28 |
29 |
30 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
31 | def test_generate_template(name: str):
32 | project = functional_test_utils.populated_config(name)
33 | expected = functional_test_utils.load_vector_as_template(name, "iam")
34 |
35 | test = iam.build(project)
36 |
37 | assert test.to_json() == expected.to_json()
38 |
--------------------------------------------------------------------------------
/test/functional/internal/templates/test_inputs.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.templates.inputs``."""
14 | import pytest
15 |
16 | from pipeformer.internal.templates import inputs
17 |
18 | from ... import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_parse_config(name: str):
25 | project = functional_test_utils.populated_config(name)
26 |
27 | _test = inputs.build(project)
28 |
29 |
30 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
31 | def test_generate_template(name: str):
32 | project = functional_test_utils.populated_config(name)
33 | expected = functional_test_utils.load_vector_as_template(name, "inputs")
34 |
35 | test = inputs.build(project)
36 |
37 | assert test.to_json() == expected.to_json()
38 |
--------------------------------------------------------------------------------
/test/functional/internal/test_structures.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.structures``."""
14 | import pytest
15 |
16 | from pipeformer.internal.structures import Config
17 |
18 | from .. import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_load_config(name: str):
25 | config_filename, _inputs = functional_test_utils.load_config(name)
26 |
27 | _test = Config.from_file(config_filename)
28 |
--------------------------------------------------------------------------------
/test/functional/internal/test_template_builder.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Functional tests for ``pipeformer.internal.template_builder``."""
14 | import pytest
15 |
16 | from pipeformer.internal import template_builder
17 |
18 | from .. import functional_test_utils
19 |
20 | pytestmark = [pytest.mark.local, pytest.mark.functional]
21 |
22 |
23 | @pytest.mark.parametrize("name", functional_test_utils.vector_names())
24 | def test_parse_config(name: str):
25 | project = functional_test_utils.populated_config(name)
26 |
27 | _test = template_builder.config_to_templates(project)
28 |
--------------------------------------------------------------------------------
/test/pylintrc:
--------------------------------------------------------------------------------
1 | [MESSAGES CONTROL]
2 | # Disabling messages that we either don't care about for tests or are necessary to break for tests.
3 | disable =
4 | bad-continuation, # we let black handle this
5 | ungrouped-imports, # we let black handle this
6 | no-member, # breaks with attrs
7 | invalid-name, # naming in tests often need to violate many common rules
8 | too-few-public-methods, # common when setting up mock classes
9 | redefined-outer-name, # raises false positives with fixtures
10 | missing-docstring, # we don't write docstrings for tests
11 | abstract-class-instantiated, # we do this on purpose to test that they are enforced
12 | duplicate-code, # unit tests for similar things tend to be similar
13 | protected-access, # raised when calling _ methods
14 | abstract-method, # we do this on purpose to test that they are enforced
15 | unused-argument, # raised when patches are needed but not called
16 | no-self-use, # raised when pytest tests are grouped in classes
17 | too-many-public-methods, # raised when pytest tests are grouped in classes
18 |
19 | [VARIABLES]
20 | additional-builtins = raw_input
21 |
22 | [DESIGN]
23 | max-args = 10
24 |
25 | [FORMAT]
26 | max-line-length = 120
27 |
28 | [REPORTS]
29 | msg-template = {path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
30 |
--------------------------------------------------------------------------------
/test/requirements.txt:
--------------------------------------------------------------------------------
1 | mock
2 | pytest>=3.3.1
3 | pytest-cov
4 | pytest-mock
5 |
--------------------------------------------------------------------------------
/test/source-build-check.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Verify that tests can be successfully run from the source build.
3 | #
4 | # NOTE: Humans should not run this file directly. If you want to run this check, run "tox -e sourcebuildcheck".
5 |
6 | WORKINGDIR=$1
7 | DISTDIR=$2
8 |
9 | echo "Locating the source build and copying it into the working directory."
10 | DISTFILE=`ls $DISTDIR/pipeformer-*.tar.gz | tail -1`
11 | cp $DISTFILE $WORKINGDIR
12 | DISTFILE=`ls $WORKINGDIR/pipeformer-*.tar.gz | tail -1`
13 |
14 | echo "Extracting the source build."
15 | cd $WORKINGDIR
16 | tar xzvf $DISTFILE
17 | rm $DISTFILE
18 | EXTRACTEDDIR=`ls | tail -1`
19 | cd $EXTRACTEDDIR
20 |
21 | echo "Installing requirements from extracted source build."
22 | pip install -r test/requirements.txt
23 | pip install -e .
24 |
25 | echo "Running tests from extracted source build."
26 | pytest --cov pipeformer -m local
27 |
--------------------------------------------------------------------------------
/test/unit/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/unit/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Stub to allow relative imports between test groups."""
14 |
--------------------------------------------------------------------------------
/test/unit/internal/test_resolve.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Unit tests for ``pipeformer.internal.resolve``."""
14 | import itertools
15 | import json
16 | import uuid
17 | from collections import namedtuple
18 | from typing import Union
19 |
20 | import pytest
21 | from troposphere import Join, Ref, Sub
22 |
23 | from pipeformer.internal.resolve import _PRIMITIVE_TYPES, InputResolver
24 | from pipeformer.internal.structures import Input
25 |
26 | pytestmark = [pytest.mark.local]
27 |
28 | Example = namedtuple("Example", ("value",))
29 | INPUTS = {
30 | "ExampleOne": Input(name="ExampleOne", description="Example number one", secret=False),
31 | "ExampleTwo": Input(name="ExampleTwo", description="Example number two", secret=True),
32 | }
33 |
34 |
35 | def resolved_strings():
36 | for prefix, suffix in itertools.product(("", "prefix"), ("", "suffix")):
37 | yield (
38 | f"{prefix}{{INPUT:ExampleOne}}{suffix}",
39 | Join("", [prefix, Sub("{{resolve:ssm:${name}:1}}", {"name": Ref("Parameter0ExampleOne0Name")}), suffix]),
40 | )
41 | yield (
42 | f"{prefix}{{INPUT:ExampleTwo}}{suffix}",
43 | Join(
44 | "",
45 | [
46 | prefix,
47 | Sub("{{resolve:secretsmanager:${arn}:SecretString}}", {"arn": Ref("Secret0ExampleTwo0Arn")}),
48 | suffix,
49 | ],
50 | ),
51 | )
52 | yield ("NoInput", "NoInput")
53 | yield ("{INPUT:Broken", "{INPUT:Broken")
54 | yield ("PUT:Broken}", "PUT:Broken}")
55 |
56 |
57 | def resolved_dicts():
58 | source_map = {}
59 | resolved_map = {}
60 |
61 | for source, resolved in resolved_strings():
62 | key = str(uuid.uuid4())
63 | source_map[key] = source
64 | resolved_map[key] = resolved
65 |
66 | return source_map, resolved_map
67 |
68 |
69 | def _normalize_joins(source):
70 | try:
71 | return json.dumps(source.to_dict(), sort_keys=True)
72 | except AttributeError:
73 | return source
74 |
75 |
76 | SOURCE_DICT, RESOLVED_DICT = resolved_dicts()
77 | RESOLVED_VALUES = [_normalize_joins(value) for value in RESOLVED_DICT.values()]
78 | SOURCE_INVERSE = {value: key for key, value in SOURCE_DICT.items()}
79 | RESOLVED_INVERSE = {value: key for key, value in RESOLVED_DICT.items()}
80 |
81 |
82 | def _invert_dict(source):
83 | return {_normalize_joins(value): key for key, value in source.items()}
84 |
85 |
86 | def _assert_resolved(actual, expected):
87 | assert _normalize_joins(actual) == _normalize_joins(expected)
88 |
89 |
90 | def _assert_converted(value):
91 | assert isinstance(value, (str, Join, InputResolver) + _PRIMITIVE_TYPES)
92 |
93 |
94 | class TestInputResolver:
95 | def test_recurse(self):
96 | test = InputResolver("asdf", INPUTS)
97 |
98 | with pytest.raises(TypeError) as excinfo:
99 | InputResolver(test, INPUTS)
100 |
101 | excinfo.match(f"{InputResolver!r} cannot wrap itself.")
102 |
103 | @pytest.mark.parametrize("source, expected", resolved_strings())
104 | def test_attribute_string(self, source: str, expected: Union[str, Join]):
105 | wrapped = Example(value=source)
106 | resolver = InputResolver(wrapped, INPUTS)
107 |
108 | test = resolver.value
109 |
110 | _assert_resolved(test, expected)
111 |
112 | @pytest.mark.parametrize("source, _expected", resolved_strings())
113 | def test_required_inputs_resolution(self, source, _expected):
114 | wrapped = Example(value=source)
115 | resolver = InputResolver(wrapped, INPUTS)
116 |
117 | test = resolver.value
118 |
119 | if source != test:
120 | assert resolver.required_inputs
121 | else:
122 | assert not resolver.required_inputs
123 |
124 | def test_expand_and_resolve_dict(self):
125 | source = {"a": "{INPUT:ExampleTwo}"}
126 | expected = {
127 | "a": Join(
128 | "",
129 | ["", Sub("{{resolve:secretsmanager:${arn}:SecretString}}", {"arn": Ref("Secret0ExampleTwo0Arn")}), ""],
130 | )
131 | }
132 | resolver = InputResolver(source, INPUTS)
133 |
134 | test = dict(**resolver)
135 | assert not isinstance(test, InputResolver)
136 | assert test["a"].to_dict() == expected["a"].to_dict()
137 |
138 | def test_transitive_required_inputs(self):
139 | Test = namedtuple("Test", ("value_1", "value_2"))
140 | values = Test(value_1="{INPUT:ExampleOne}", value_2={"a": "{INPUT:ExampleTwo}"})
141 |
142 | resolver = InputResolver(wrapped=values, inputs=INPUTS)
143 |
144 | _resolve_example_one = resolver.value_1
145 |
146 | assert resolver.required_inputs == {"ExampleOne"}
147 |
148 | extract_dictionary = resolver.value_2
149 |
150 | assert isinstance(extract_dictionary, InputResolver)
151 |
152 | _resolve_values = dict(**extract_dictionary)
153 |
154 | assert resolver.required_inputs == {"ExampleOne", "ExampleTwo"}
155 |
156 | def test_str(self):
157 | resolver = InputResolver("test", INPUTS)
158 |
159 | assert str(resolver) == "test"
160 |
161 | @pytest.mark.parametrize("value", (42, 42.0, complex(42), False, True, None))
162 | def test_attribute_primitive_types(self, value):
163 | source = Example(value=value)
164 | resolver = InputResolver(source, INPUTS)
165 |
166 | test = resolver.value
167 |
168 | assert not isinstance(test, InputResolver)
169 | assert type(test) is type(value)
170 | assert test == value
171 |
172 | def test_attribute_other(self):
173 | source = Example(value=42)
174 | resolver = InputResolver(source, INPUTS)
175 |
176 | test = resolver.value
177 |
178 | _assert_converted(test)
179 | assert test == 42
180 |
181 | def test_equality(self):
182 | a = InputResolver(42, INPUTS)
183 | b = InputResolver(76, INPUTS)
184 | c = InputResolver(42, INPUTS)
185 |
186 | assert a < b
187 | assert b > c
188 | assert a == c
189 | assert a != b
190 | assert b >= c
191 | assert a >= c
192 | assert a <= b
193 | assert a <= c
194 |
195 | assert a == 42
196 | assert a != 99
197 | assert a > 8
198 | assert a >= 42
199 | assert a >= 8
200 | assert a < 99
201 | assert a <= 42
202 |
203 | def test_item(self):
204 | source = {"a": 42}
205 | resolver = InputResolver(source, INPUTS)
206 |
207 | test = resolver["a"]
208 |
209 | _assert_converted(test)
210 | assert test == 42
211 |
212 | def test_len(self):
213 | source = "asdf"
214 | resolver = InputResolver(source, INPUTS)
215 |
216 | assert len(resolver) == len(source)
217 |
218 | @pytest.mark.parametrize("source, expected", resolved_strings())
219 | def test_call(self, source, expected):
220 | def example():
221 | return source
222 |
223 | resolver = InputResolver(example, INPUTS)
224 |
225 | _assert_resolved(resolver(), expected)
226 |
227 | @pytest.mark.parametrize("pos", list(range(4)))
228 | def test_iter(self, pos: int):
229 | source = [1, 2, 3, 4]
230 | resolver = InputResolver(source, INPUTS)
231 |
232 | test = [i for i in resolver]
233 |
234 | _assert_converted(test[pos])
235 | assert test[pos] == source[pos]
236 |
237 | def test_next(self):
238 | source = iter([1, 2, 3, 4])
239 | resolver = InputResolver(source, INPUTS)
240 |
241 | a = next(resolver)
242 | _assert_converted(a)
243 |
244 | @pytest.mark.parametrize("pos", list(range(4)))
245 | def test_reversed(self, pos: int):
246 | source = [1, 2, 3, 4]
247 | rev_source = list(reversed(source))
248 | resolver = InputResolver(source, INPUTS)
249 |
250 | test = [i for i in reversed(resolver)]
251 |
252 | _assert_converted(test[pos])
253 | assert test[pos] == rev_source[pos]
254 |
255 | def test_invalid_inputs_value(self):
256 | with pytest.raises(TypeError) as _excinfo:
257 | InputResolver("test", {"a": "b"})
258 |
259 | def test_invalid_wrapped_has_required_inputs(self):
260 | Invalid = namedtuple("Invalid", ("required_inputs",))
261 |
262 | test = Invalid(required_inputs="asdf")
263 |
264 | with pytest.raises(TypeError) as excinfo:
265 | InputResolver(test, inputs=INPUTS)
266 |
267 | excinfo.match(r'Wrapped object must not have "required_inputs" attribute.')
268 |
269 | @pytest.mark.parametrize("key", SOURCE_DICT.keys())
270 | def test_get(self, key):
271 | resolver = InputResolver(SOURCE_DICT, INPUTS)
272 |
273 | test = resolver.get(key)
274 |
275 | _assert_resolved(test, RESOLVED_DICT[key])
276 |
277 | def test_resolve_values(self):
278 | resolver = InputResolver(SOURCE_DICT, INPUTS)
279 |
280 | for each in resolver.values():
281 | _assert_converted(each)
282 | assert _normalize_joins(each) in RESOLVED_VALUES
283 |
284 | def test_resolve_keys(self):
285 | resolver = InputResolver(SOURCE_INVERSE, INPUTS)
286 |
287 | for each in resolver.keys():
288 | _assert_converted(each)
289 | assert _normalize_joins(each) in RESOLVED_VALUES
290 |
291 | @pytest.mark.parametrize("source", (SOURCE_DICT, SOURCE_INVERSE))
292 | def test_resolve_items(self, source):
293 | resolver = InputResolver(source, INPUTS)
294 |
295 | for key, value in resolver.items():
296 | _assert_converted(key)
297 | assert _normalize_joins(key) in list(RESOLVED_DICT.keys()) + RESOLVED_VALUES
298 |
299 | _assert_converted(value)
300 | assert _normalize_joins(value) in list(RESOLVED_DICT.keys()) + RESOLVED_VALUES
301 |
--------------------------------------------------------------------------------
/test/unit/internal/test_util.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"). You
4 | # may not use this file except in compliance with the License. A copy of
5 | # the License is located at
6 | #
7 | # http://aws.amazon.com/apache2.0/
8 | #
9 | # or in the "license" file accompanying this file. This file is
10 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 | # ANY KIND, either express or implied. See the License for the specific
12 | # language governing permissions and limitations under the License.
13 | """Unit tests for ``pipeformer.internal.util``."""
14 | import pytest
15 | from troposphere import Sub, kms
16 |
17 | from pipeformer.internal.util import account_arn, reference_name, resource_name
18 |
19 | pytestmark = [pytest.mark.local, pytest.mark.functional]
20 |
21 |
22 | def test_resource_name():
23 | assert resource_name(kms.Key, "ExampleKey") == "Key0ExampleKey"
24 |
25 |
26 | def test_reference_name():
27 | assert reference_name("ExampleName", "ExampleType") == "ExampleName0ExampleType"
28 |
29 |
30 | @pytest.mark.parametrize(
31 | "service_prefix, resource, expected",
32 | (
33 | (
34 | "kms",
35 | "alias/ExampleAlias",
36 | Sub("arn:${AWS::Partition}:kms:${AWS::Region}:${AWS::AccountId}:alias/ExampleAlias"),
37 | ),
38 | ("s3", "ExampleBucket", Sub("arn:${AWS::Partition}:s3:::ExampleBucket")),
39 | ("iam", "role/ExampleRole", Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:role/ExampleRole")),
40 | ),
41 | )
42 | def test_account_arn(service_prefix, resource, expected):
43 | expected_dict = expected.to_dict()
44 |
45 | actual = account_arn(service_prefix, resource).to_dict()
46 |
47 | assert actual == expected_dict
48 |
--------------------------------------------------------------------------------
/test/vectors/README.rst:
--------------------------------------------------------------------------------
1 | *******************
2 | Static Test Vectors
3 | *******************
4 |
5 | Any static test vectors should be placed in this directory.
6 |
--------------------------------------------------------------------------------
/test/vectors/chalice/codebuild-build.json:
--------------------------------------------------------------------------------
1 | {
2 | "Description": "CodeBuild projects for build stage in pipeformer-managed project: ExampleChaliceApp",
3 | "Parameters": {
4 | "Bucket0ProjectResources0Name": {"Type": "String"},
5 | "Role0CodeBuild0Arn": {"Type": "String"}
6 | },
7 | "Resources": {
8 | "Project0a": {
9 | "Type": "AWS::CodeBuild::Project",
10 | "Properties": {
11 | "Name": {"Fn::Sub": ["${AWS::StackName}-Project0a"]},
12 | "ServiceRole": {"Ref": "Role0CodeBuild0Arn"},
13 | "Artifacts": {
14 | "Type": "CODEPIPELINE"
15 | },
16 | "Source": {
17 | "Type": "CODEPIPELINE",
18 | "BuildSpec": ".chalice/buildspec.yaml"
19 | },
20 | "Environment": {
21 | "ComputeType": "BUILD_GENERAL1_SMALL",
22 | "Type": "LINUX_CONTAINER",
23 | "Image": "aws/codebuild/python:3.6.5",
24 | "EnvironmentVariables": [
25 | {
26 | "Name": "PIPEFORMER_S3_BUCKET",
27 | "Value": {"Ref": "Bucket0ProjectResources0Name"}
28 | },
29 | {
30 | "Name": "key1",
31 | "Value": "value2"
32 | },
33 | {
34 | "Name": "key3",
35 | "Value": "value4"
36 | }
37 | ]
38 | },
39 | "Tags": [
40 | {
41 | "Key": "pipeformer",
42 | "Value": "ExampleChaliceApp"
43 | }
44 | ]
45 | }
46 | }
47 | },
48 | "Outputs": {
49 | "Project0a0Name": {
50 | "Value": {"Ref": "Project0a"}
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/test/vectors/chalice/codepipeline.json:
--------------------------------------------------------------------------------
1 | {
2 | "Description": "CodePipeline resources for pipeformer-managed project: ExampleChaliceApp",
3 | "Parameters": {
4 | "Bucket0Artifacts0Name": {"Type": "String"},
5 | "Bucket0ProjectResources0Name": {"Type": "String"},
6 | "Role0CloudFormation0Arn": {"Type": "String"},
7 | "Role0CodeBuild0Arn": {"Type": "String"},
8 | "Role0CodePipeline0Arn": {"Type": "String"},
9 | "Parameter0GitHubOwner0Name": {"Type": "String"},
10 | "Secret0GitHubToken0Arn": {"Type": "String"},
11 | "Template0CodeBuild0Stage0build0Url": {"Type": "String"}
12 | },
13 | "Resources": {
14 | "Stack0CodeBuild0Stage0build": {
15 | "Type": "AWS::CloudFormation::Stack",
16 | "Properties": {
17 | "TemplateURL": {"Ref": "Template0CodeBuild0Stage0build0Url"},
18 | "Parameters": {
19 | "Bucket0ProjectResources0Name": {"Ref": "Bucket0ProjectResources0Name"},
20 | "Role0CodeBuild0Arn": {"Ref": "Role0CodeBuild0Arn"}
21 | },
22 | "Tags": [
23 | {
24 | "Key": "pipeformer",
25 | "Value": "ExampleChaliceApp"
26 | }
27 | ]
28 | }
29 | },
30 | "Pipeline0ExampleChaliceApp": {
31 | "Type": "AWS::CodePipeline::Pipeline",
32 | "Properties": {
33 | "ArtifactStore": {
34 | "Type": "S3",
35 | "Location": {"Ref": "Bucket0Artifacts0Name"}
36 | },
37 | "RoleArn": {"Ref": "Role0CodePipeline0Arn"},
38 | "Stages": [
39 | {
40 | "Name": "source",
41 | "Actions": [
42 | {
43 | "Name": "source-0",
44 | "RunOrder": 1,
45 | "ActionTypeId": {
46 | "Category": "Source",
47 | "Owner": "ThirdParty",
48 | "Provider": "GitHub",
49 | "Version": "1"
50 | },
51 | "Configuration": {
52 | "Owner": {
53 | "Fn::Join": [
54 | "",
55 | [
56 | "",
57 | {
58 | "Fn::Sub": [
59 | "{{resolve:ssm:${name}:1}}",
60 | {
61 | "name": {"Ref": "Parameter0GitHubOwner0Name"}
62 | }
63 | ]
64 | },
65 | ""
66 | ]
67 | ]
68 | },
69 | "Repo": "example",
70 | "Branch": "master",
71 | "OAuthToken": {
72 | "Fn::Join": [
73 | "",
74 | [
75 | "",
76 | {
77 | "Fn::Sub": [
78 | "{{resolve:secretsmanager:${arn}:SecretString}}",
79 | {
80 | "arn": {"Ref": "Secret0GitHubToken0Arn"}
81 | }
82 | ]
83 | },
84 | ""
85 | ]
86 | ]
87 | },
88 | "PollForSourceChanges": true
89 | },
90 | "OutputArtifacts": [
91 | {"Name": "SourceOutput"}
92 | ]
93 | }
94 | ]
95 | },
96 | {
97 | "Name": "build",
98 | "Actions": [
99 | {
100 | "Name": "build-0",
101 | "RunOrder": 1,
102 | "ActionTypeId": {
103 | "Category": "Build",
104 | "Owner": "AWS",
105 | "Provider": "CodeBuild",
106 | "Version": "1"
107 | },
108 | "Configuration": {
109 | "ProjectName": {"Fn::GetAtt": ["Stack0CodeBuild0Stage0build", "Outputs.Project0a0Name"]}
110 | },
111 | "InputArtifacts": [
112 | {"Name": "SourceOutput"}
113 | ],
114 | "OutputArtifacts": [
115 | {"Name": "CompiledCfnTemplate"}
116 | ]
117 | }
118 | ]
119 | },
120 | {
121 | "Name": "deploy",
122 | "Actions": [
123 | {
124 | "Name": "deploy-0",
125 | "RunOrder": 1,
126 | "ActionTypeId": {
127 | "Category": "Deploy",
128 | "Owner": "AWS",
129 | "Provider": "CloudFormation",
130 | "Version": "1"
131 | },
132 | "Configuration": {
133 | "ActionMode": "CHANGE_SET_REPLACE",
134 | "Capabilities": "CAPABILITY_IAM",
135 | "ChangeSetName": "ExampleChaliceAppChangeSet",
136 | "RoleArn": {"Ref": "Role0CloudFormation0Arn"},
137 | "StackName": "ExampleChaliceAppStack",
138 | "TemplatePath": "CompiledCfnTemplate::transformed.yaml"
139 | },
140 | "InputArtifacts": [
141 | {"Name": "CompiledCfnTemplate"}
142 | ]
143 | },
144 | {
145 | "Name": "deploy-1",
146 | "RunOrder": 2,
147 | "ActionTypeId": {
148 | "Category": "Deploy",
149 | "Owner": "AWS",
150 | "Provider": "CloudFormation",
151 | "Version": "1"
152 | },
153 | "Configuration": {
154 | "ActionMode": "CHANGE_SET_EXECUTE",
155 | "ChangeSetName": "ExampleChaliceAppChangeSet",
156 | "RoleArn": {"Ref": "Role0CloudFormation0Arn"},
157 | "StackName": "ExampleChaliceAppStack",
158 | "OutputFileName": "StackOutputs.json"
159 | }
160 | }
161 | ]
162 | }
163 | ]
164 | }
165 | }
166 | }
167 | }
168 |
--------------------------------------------------------------------------------
/test/vectors/chalice/config.yaml:
--------------------------------------------------------------------------------
1 | name: ExampleChaliceApp
2 | description: Example pipeline for deploying a Chalice application.
3 | generate-cmk: true
4 |
5 | inputs:
6 | GitHubToken:
7 | description: GitHub user access token that CodePipeline will use to authenticate to GitHub.
8 | secret: true
9 | GitHubOwner:
10 | description: GitHub user that owns target repository.
11 | secret: false
12 |
13 | pipeline:
14 | source:
15 | - provider: GitHub
16 | outputs:
17 | - SourceOutput
18 | configuration:
19 | Owner: "{INPUT:GitHubOwner}"
20 | Repo: example
21 | Branch: master
22 | OAuthToken: "{INPUT:GitHubToken}"
23 |
24 | build:
25 | - provider: CodeBuild
26 | image: aws/codebuild/python:3.6.5
27 | buildspec: .chalice/buildspec.yaml
28 | env:
29 | key1: value2
30 | key3: value4
31 | inputs:
32 | - SourceOutput
33 | outputs:
34 | - CompiledCfnTemplate
35 |
36 | deploy:
37 | - provider: CloudFormation
38 | run-order: 1
39 | inputs:
40 | - CompiledCfnTemplate
41 | configuration:
42 | StackName: ExampleChaliceAppStack
43 | ChangeSetName: ExampleChaliceAppChangeSet
44 | ActionMode: CHANGE_SET_REPLACE
45 | Capabilities: CAPABILITY_IAM
46 | TemplatePath: CompiledCfnTemplate::transformed.yaml
47 | - provider: CloudFormation
48 | run-order: 2
49 | configuration:
50 | StackName: ExampleChaliceAppStack
51 | ChangeSetName: ExampleChaliceAppChangeSet
52 | ActionMode: CHANGE_SET_EXECUTE
53 | OutputFileName: StackOutputs.json
54 |
--------------------------------------------------------------------------------
/test/vectors/chalice/config_inputs.json:
--------------------------------------------------------------------------------
1 | {
2 | "GitHubOwner": "aws"
3 | }
4 |
--------------------------------------------------------------------------------
/test/vectors/chalice/iam.json:
--------------------------------------------------------------------------------
1 | {
2 | "Description": "IAM resources for pipeformer-managed project: ExampleChaliceApp",
3 | "Parameters": {
4 | "Bucket0Artifacts0Arn": {"Type": "String"},
5 | "Bucket0ProjectResources0Arn": {"Type": "String"},
6 | "Key0Stack0Arn": {"Type": "String"}
7 | },
8 | "Resources": {
9 | "Role0CloudFormation": {
10 | "Type": "AWS::IAM::Role",
11 | "Properties": {
12 | "AssumeRolePolicyDocument": {
13 | "Statement": [
14 | {
15 | "Principal": {
16 | "Service": "cloudformation.amazonaws.com"
17 | },
18 | "Effect": "Allow",
19 | "Action": [
20 | "sts:AssumeRole"
21 | ]
22 | }
23 | ]
24 | },
25 | "Policies": [
26 | {
27 | "PolicyName": {"Fn::Sub": ["${AWS::StackName}-CloudFormation"]},
28 | "PolicyDocument": {
29 | "Statement": [
30 | {
31 | "Effect": "Allow",
32 | "Action": [
33 | "*"
34 | ],
35 | "Resource": ["*"]
36 | }
37 | ]
38 | }
39 | }
40 | ]
41 | }
42 | },
43 | "Role0CodePipeline": {
44 | "Type": "AWS::IAM::Role",
45 | "Properties": {
46 | "AssumeRolePolicyDocument": {
47 | "Statement": [
48 | {
49 | "Principal": {
50 | "Service": "codepipeline.amazonaws.com"
51 | },
52 | "Effect": "Allow",
53 | "Action": [
54 | "sts:AssumeRole"
55 | ]
56 | }
57 | ]
58 | },
59 | "Policies": [
60 | {
61 | "PolicyName": {"Fn::Sub": ["${AWS::StackName}-CodePipeline"]},
62 | "PolicyDocument": {
63 | "Statement": [
64 | {
65 | "Effect": "Allow",
66 | "Action": [
67 | "s3:GetBucketVersioning",
68 | "s3:PutBucketVersioning"
69 | ],
70 | "Resource": [
71 | {"Ref": "Bucket0Artifacts0Arn"},
72 | {"Ref": "Bucket0ProjectResources0Arn"}
73 | ]
74 | },
75 | {
76 | "Effect": "Allow",
77 | "Action": [
78 | "s3:GetObject",
79 | "s3:PutObject"
80 | ],
81 | "Resource": [
82 | {"Fn::Sub": "${Bucket0Artifacts0Arn}/*"},
83 | {"Fn::Sub": "${Bucket0ProjectResources0Arn}/*"}
84 | ]
85 | },
86 | {
87 | "Effect": "Allow",
88 | "Action": [
89 | "kms:Encrypt",
90 | "kms:Decrypt",
91 | "kms:GenerateDataKey"
92 | ],
93 | "Resource": [
94 | {"Ref": "Key0Stack0Arn"}
95 | ]
96 | },
97 | {
98 | "Effect": "Allow",
99 | "Action": [
100 | "cloudwatch:*"
101 | ],
102 | "Resource": [
103 | {"Fn::Sub": "arn:${AWS::Partition}:cloudwatch:${AWS::Region}:${AWS::AccountId}:*"}
104 | ]
105 | },
106 | {
107 | "Effect": "Allow",
108 | "Action": [
109 | "iam:PassRole"
110 | ],
111 | "Resource": [
112 | {"Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/*"}
113 | ]
114 | },
115 | {
116 | "Effect": "Allow",
117 | "Action": [
118 | "lambda:InvokeFunction",
119 | "lambda:ListFunctions"
120 | ],
121 | "Resource": [
122 | {"Fn::Sub": "arn:${AWS::Partition}:lambda:${AWS::Region}:${AWS::AccountId}:*"}
123 | ]
124 | },
125 | {
126 | "Effect": "Allow",
127 | "Action": [
128 | "cloudformation:CreateStack",
129 | "cloudformation:DeleteStack",
130 | "cloudformation:DescribeStacks",
131 | "cloudformation:UpdateStack",
132 | "cloudformation:CreateChangeSet",
133 | "cloudformation:DeleteChangeSet",
134 | "cloudformation:DescribeChangeSet",
135 | "cloudformation:ExecuteChangeSet",
136 | "cloudformation:SetStackPolicy",
137 | "cloudformation:ValidateTemplate"
138 | ],
139 | "Resource": [
140 | {"Fn::Sub": "arn:${AWS::Partition}:cloudformation:${AWS::Region}:${AWS::AccountId}:*"}
141 | ]
142 | },
143 | {
144 | "Effect": "Allow",
145 | "Action": [
146 | "codebuild:BatchGetBuilds",
147 | "codebuild:StartBuild"
148 | ],
149 | "Resource": [
150 | {"Fn::Sub": "arn:${AWS::Partition}:codebuild:${AWS::Region}:${AWS::AccountId}:*"}
151 | ]
152 | }
153 | ]
154 | }
155 | }
156 | ]
157 | }
158 | },
159 | "Role0CodeBuild": {
160 | "Type": "AWS::IAM::Role",
161 | "Properties": {
162 | "AssumeRolePolicyDocument": {
163 | "Statement": [
164 | {
165 | "Principal": {
166 | "Service": "codebuild.amazonaws.com"
167 | },
168 | "Effect": "Allow",
169 | "Action": [
170 | "sts:AssumeRole"
171 | ]
172 | }
173 | ]
174 | },
175 | "Policies": [
176 | {
177 | "PolicyName": {"Fn::Sub": ["${AWS::StackName}-CodeBuild"]},
178 | "PolicyDocument": {
179 | "Statement": [
180 | {
181 | "Effect": "Allow",
182 | "Action": [
183 | "logs:CreateLogGroup",
184 | "logs:CreateLogStream",
185 | "logs:PutLogEvents"
186 | ],
187 | "Resource": [
188 | {"Fn::Sub": "arn:${AWS::Partition}:logs:${AWS::Region}:${AWS::AccountId}:*"}
189 | ]
190 | },
191 | {
192 | "Effect": "Allow",
193 | "Action": [
194 | "s3:GetObject",
195 | "s3:GetObjectVersion",
196 | "s3:PutObject"
197 | ],
198 | "Resource": [
199 | {"Fn::Sub": "${Bucket0Artifacts0Arn}/*"},
200 | {"Fn::Sub": "${Bucket0ProjectResources0Arn}/*"}
201 | ]
202 | },
203 | {
204 | "Effect": "Allow",
205 | "Action": [
206 | "kms:Encrypt",
207 | "kms:Decrypt",
208 | "kms:GenerateDataKey"
209 | ],
210 | "Resource": [
211 | {"Ref": "Key0Stack0Arn"}
212 | ]
213 | }
214 | ]
215 | }
216 | }
217 | ]
218 | }
219 | }
220 | },
221 | "Outputs": {
222 | "Role0CodeBuild0Arn": {
223 | "Value": {"Fn::GetAtt": ["Role0CodeBuild", "Arn"]}
224 | },
225 | "Role0CodePipeline0Arn": {
226 | "Value": {"Fn::GetAtt": ["Role0CodePipeline", "Arn"]}
227 | },
228 | "Role0CloudFormation0Arn": {
229 | "Value": {"Fn::GetAtt": ["Role0CloudFormation", "Arn"]}
230 | }
231 | }
232 | }
233 |
--------------------------------------------------------------------------------
/test/vectors/chalice/inputs.json:
--------------------------------------------------------------------------------
1 | {
2 | "Description": "Input values for pipeformer-managed project: ExampleChaliceApp",
3 | "Parameters": {
4 | "Key0Stack0Arn": {"Type": "String"}
5 | },
6 | "Resources": {
7 | "Secret0GitHubToken": {
8 | "Type": "AWS::SecretsManager::Secret",
9 | "Properties": {
10 | "KmsKeyId": {"Ref": "Key0Stack0Arn"},
11 | "SecretString": "REPLACEME",
12 | "Tags": [
13 | {
14 | "Key": "pipeformer",
15 | "Value": "ExampleChaliceApp"
16 | }
17 | ]
18 | }
19 | },
20 | "Parameter0GitHubOwner": {
21 | "Type": "AWS::SSM::Parameter",
22 | "Properties": {
23 | "Type": "String",
24 | "Value": "aws"
25 | }
26 | }
27 | },
28 | "Outputs": {
29 | "Parameter0GitHubOwner0Name": {
30 | "Value": {"Ref": "Parameter0GitHubOwner"}
31 | },
32 | "Secret0GitHubToken0Arn": {
33 | "Value": {"Ref": "Secret0GitHubToken"}
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | autoformat,
4 | py{36,37}-{local,integ,examples},
5 | noenvvars, sourcebuildcheck,
6 | {flake8,pylint}{,-tests,-examples},
7 | mypy-py3,
8 | bandit,
9 | doc8, readme, docs,
10 | # prone to false positives
11 | vulture
12 |
13 | ##############################################################################################
14 | # Additional environments: #
15 | # #
16 | # autoformat : Apply all autoformatters. #
17 | # lint :: Run all linters. #
18 | # vulture :: Run vulture. Prone to false-positives. #
19 | # #
20 | # Operational helper environments: #
21 | # #
22 | # docs :: Build Sphinx documentation. #
23 | # autodocs :: Build Sphinx documentation and start server, autobuilding on any file changes. #
24 | # park :: Build name-parking packages using pypi-parker. #
25 | # build :: Build source and wheel dist files. #
26 | # test-release :: Build dist files and upload to testpypi pypirc profile. #
27 | # release :: Build dist files and upload to pypi pypirc profile. #
28 | ##############################################################################################
29 |
30 |
31 | ##############
32 | # Manual Run #
33 | ##############
34 |
35 | [testenv:run]
36 | basepython = python3
37 | passenv = {[testenv]passenv}
38 | sitepackages = False
39 | commands = pipeformer {posargs}
40 |
41 | #########
42 | # Tests #
43 | #########
44 |
45 | [testenv:base-command]
46 | commands = pytest --basetemp={envtmpdir} -l --cov pipeformer {posargs}
47 |
48 | [testenv]
49 | passenv =
50 | # Pass through AWS credentials
51 | AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN \
52 | # Pass through AWS profile name (useful for local testing)
53 | AWS_PROFILE \
54 | # Pass through the default AWS region (used for integration tests)
55 | AWS_DEFAULT_REGION
56 | sitepackages = False
57 | deps = -rtest/requirements.txt
58 | commands =
59 | # Local tests: no network access required
60 | local: {[testenv:base-command]commands} test/ -m local
61 | # Integration tests: requires network access and might require service credentials
62 | integ: {[testenv:base-command]commands} test/ -m integ
63 | # Acceptance tests: testing against static test vectors : same requirements as integ
64 | accept: {[testenv:base-command]commands} test/ -m accept
65 | # Test the examples : same requirements as integ
66 | examples: {[testenv:base-command]commands} examples/test/ -m examples
67 | # Run all known tests : same requirements as integ
68 | all: {[testenv:base-command]commands} test/ examples/test/
69 | # You decide what tests to run
70 | manual: {[testenv:base-command]commands}
71 |
72 | # Verify that local tests work without environment variables present
73 | [testenv:noenvvars]
74 | basepython = python3
75 | sitepackages = False
76 | deps = {[testenv]deps}
77 | commands = {[testenv:base-command]commands} test/ -m local
78 |
79 | # Verify that tests can be successfully run from the source build.
80 | [testenv:sourcebuildcheck]
81 | basepython = python3
82 | sitepackages = False
83 | recreate = True
84 | deps =
85 | {[testenv:build]deps}
86 | commands =
87 | {[testenv:build]commands}
88 | {toxinidir}/test/source-build-check.sh {envtmpdir} {toxinidir}/dist
89 |
90 | ###############
91 | # Type checks #
92 | ###############
93 |
94 | [testenv:mypy-coverage]
95 | commands =
96 | # Make mypy linecoverage report readable by coverage
97 | python -c \
98 | "t = open('.coverage', 'w');\
99 | c = open('build/coverage.json').read();\
100 | t.write('!coverage.py: This is a private format, don\'t read it directly!\n');\
101 | t.write(c);\
102 | t.close()"
103 | coverage report -m
104 |
105 | [testenv:mypy-common]
106 | basepython = python3
107 | deps =
108 | coverage
109 | mypy>=0.650
110 | mypy_extensions
111 | typing>=3.6.2
112 |
113 | [testenv:mypy-py3]
114 | basepython = {[testenv:mypy-common]basepython}
115 | deps = {[testenv:mypy-common]deps}
116 | commands =
117 | python -m mypy \
118 | --linecoverage-report build \
119 | src/pipeformer/ \
120 | {posargs}
121 | {[testenv:mypy-coverage]commands}
122 |
123 | ###############################
124 | # Formatting and style checks #
125 | ###############################
126 |
127 | [testenv:flake8]
128 | basepython = python3
129 | deps =
130 | flake8
131 | flake8-docstrings
132 | flake8-isort
133 | flake8-print>=3.1.0
134 | flake8-bugbear
135 | commands =
136 | flake8 \
137 | src/pipeformer/ \
138 | setup.py \
139 | doc/conf.py
140 |
141 | [testenv:flake8-tests]
142 | basepython = {[testenv:flake8]basepython}
143 | deps = {[testenv:flake8]deps}
144 | commands =
145 | flake8 \
146 | # Ignore F811 redefinition errors in tests (breaks with pytest-mock use)
147 | # Ignore F841 local variable assigned but never used (useful for collecting locals for test reports)
148 | # Ignore D101,D102,D103 docstring requirements for tests
149 | --ignore F811,F841,D101,D102,D103 \
150 | test/
151 |
152 | [testenv:flake8-examples]
153 | basepython = {[testenv:flake8]basepython}
154 | deps = {[testenv:flake8]deps}
155 | commands =
156 | flake8 \
157 | examples/src/
158 | flake8 \
159 | # Ignore F811 redefinition errors in tests (breaks with pytest-mock use)
160 | # Ignore F841 local variable assigned but never used (useful for collecting locals for test reports)
161 | # Ignore D101,D102,D103 docstring requirements for tests
162 | --ignore F811,F841,D101,D102,D103 \
163 | examples/test/
164 |
165 | [testenv:pylint]
166 | basepython = python3
167 | deps =
168 | {[testenv]deps}
169 | pyflakes
170 | pylint>=2.0.0
171 | commands =
172 | pylint \
173 | --rcfile=src/pylintrc \
174 | src/pipeformer/ \
175 | setup.py \
176 | doc/conf.py
177 |
178 | [testenv:pylint-tests]
179 | basepython = {[testenv:pylint]basepython}
180 | deps = {[testenv:pylint]deps}
181 | commands =
182 | pylint \
183 | --rcfile=test/pylintrc \
184 | test/unit/ \
185 | test/functional/ \
186 | test/integration/
187 |
188 | [testenv:pylint-examples]
189 | basepython = {[testenv:pylint]basepython}
190 | deps = {[testenv:pylint]deps}
191 | commands =
192 | pylint --rcfile=examples/src/pylintrc examples/src/
193 | pylint --rcfile=examples/test/pylintrc examples/test/
194 |
195 | [testenv:bandit]
196 | basepython = python3
197 | deps = bandit
198 | commands = bandit \
199 | # B322: Ignore Python 2 input check: we only support Python 3
200 | -s B322 \
201 | -r src/pipeformer/
202 |
203 | # Prone to false positives: only run manually
204 | [testenv:vulture]
205 | basepython = python3
206 | deps = vulture
207 | commands = vulture src/pipeformer/
208 |
209 | [testenv:blacken-src]
210 | basepython = python3
211 | deps =
212 | black
213 | commands =
214 | black --line-length 120 \
215 | src/pipeformer/ \
216 | setup.py \
217 | doc/conf.py \
218 | test/ \
219 | examples/ \
220 | {posargs}
221 |
222 | [testenv:blacken]
223 | basepython = python3
224 | deps =
225 | {[testenv:blacken-src]deps}
226 | commands =
227 | {[testenv:blacken-src]commands}
228 |
229 | [testenv:isort-seed]
230 | basepython = python3
231 | deps = seed-isort-config
232 | commands = seed-isort-config
233 |
234 | [testenv:isort]
235 | basepython = python3
236 | deps = isort
237 | commands = isort \
238 | -rc \
239 | src \
240 | test \
241 | examples \
242 | doc \
243 | setup.py \
244 | {posargs}
245 |
246 | [testenv:autoformat]
247 | basepython = python3
248 | deps =
249 | {[testenv:isort-seed]deps}
250 | {[testenv:isort]deps}
251 | {[testenv:blacken]deps}
252 | commands =
253 | {[testenv:isort-seed]commands}
254 | {[testenv:isort]commands}
255 | {[testenv:blacken]commands}
256 |
257 | [testenv:doc8]
258 | basepython = python3
259 | whitelist_externals = {[testenv:resetdocs]whitelist_externals}
260 | deps =
261 | sphinx
262 | doc8
263 | commands =
264 | {[testenv:resetdocs]commands}
265 | doc8 doc/index.rst doc/lib/ README.rst CHANGELOG.rst
266 |
267 | [testenv:readme]
268 | basepython = python3
269 | deps = readme_renderer
270 | commands = python setup.py check -r -s
271 |
272 | [testenv:lint]
273 | basepython = python3
274 | whitelist_externals = {[testenv:resetdocs]whitelist_externals}
275 | deps =
276 | {[testenv:autoformat]deps}
277 | {[testenv:flake8]deps}
278 | {[testenv:flake8-tests]deps}
279 | {[testenv:flake8-examples]deps}
280 | {[testenv:pylint]deps}
281 | {[testenv:pylint-tests]deps}
282 | {[testenv:pylint-examples]deps}
283 | {[testenv:doc8]deps}
284 | {[testenv:readme]deps}
285 | {[testenv:bandit]deps}
286 | commands =
287 | {[testenv:autoformat]commands}
288 | {[testenv:flake8]commands}
289 | {[testenv:flake8-tests]commands}
290 | {[testenv:flake8-examples]commands}
291 | {[testenv:pylint]commands}
292 | {[testenv:pylint-tests]commands}
293 | {[testenv:pylint-examples]commands}
294 | {[testenv:doc8]commands}
295 | {[testenv:readme]commands}
296 | {[testenv:bandit]commands}
297 |
298 | #################
299 | # Documentation #
300 | #################
301 |
302 | # Clear out any generated files from doc/
303 | [testenv:resetdocs]
304 | skip_install = true
305 | deps =
306 | whitelist_externals =
307 | mkdir
308 | rm
309 | commands =
310 | # Make sure that the directory exists to avoid
311 | # potential side effects of using rm -f
312 | mkdir -p {toxinidir}/doc/lib/generated
313 | rm -r {toxinidir}/doc/lib/generated
314 |
315 | [testenv:assert-file-is-empty]
316 | basepython = python3
317 | commands =
318 | python -c \
319 | "import sys;\
320 | f = open(sys.argv[-1], 'r');\
321 | contents = f.read();\
322 | sys.exit(contents if contents.strip() else 0);\
323 | f.close()" \
324 | {posargs}
325 |
326 | [testenv:docs-build]
327 | basepython = python3
328 | deps = {[testenv:docs]deps}
329 | commands =
330 | sphinx-build -E -c {toxinidir}/doc/ -b html {toxinidir}/doc/ {toxinidir}/doc/build/html
331 | {[testenv:assert-file-is-empty]commands} "{toxinidir}/doc/build/html/output.txt"
332 |
333 | [testenv:docs-spelling]
334 | basepython = python3
335 | deps = {[testenv:docs]deps}
336 | commands =
337 | sphinx-build -E -c {toxinidir}/doc/ -b spelling {toxinidir}/doc/ {toxinidir}/doc/build/spelling
338 | {[testenv:assert-file-is-empty]commands} "{toxinidir}/doc/build/spelling/output.txt"
339 |
340 | [testenv:docs-linkcheck]
341 | basepython = python3
342 | deps = {[testenv:docs]deps}
343 | commands =
344 | sphinx-build -E -c {toxinidir}/doc/ -b linkcheck {toxinidir}/doc/ {toxinidir}/doc/build/linkcheck
345 | {[testenv:assert-file-is-empty]commands} "{toxinidir}/doc/build/linkcheck/output.txt"
346 |
347 | [testenv:docs]
348 | basepython = python3
349 | deps =
350 | {[testenv]deps}
351 | -r{toxinidir}/doc/requirements.txt
352 | commands =
353 | {[testenv:docs-build]commands}
354 | {[testenv:docs-spelling]commands}
355 | {[testenv:docs-linkcheck]commands}
356 |
357 | [testenv:autodocs]
358 | basepython = python3
359 | deps =
360 | {[testenv:docs]deps}
361 | sphinx-autobuild
362 | commands =
363 | sphinx-autobuild -E -c {toxinidir}/doc/ -b html {toxinidir}/doc/ {toxinidir}/doc/build/html
364 |
365 | ###################
366 | # Release tooling #
367 | ###################
368 |
369 | [testenv:park]
370 | basepython = python3
371 | skip_install = true
372 | deps =
373 | pypi-parker
374 | setuptools
375 | commands = python setup.py park
376 |
377 | [testenv:build]
378 | basepython = python3
379 | skip_install = true
380 | deps =
381 | wheel
382 | setuptools
383 | commands =
384 | python setup.py sdist bdist_wheel
385 |
386 | [testenv:test-release]
387 | basepython = python3
388 | skip_install = true
389 | deps =
390 | {[testenv:park]deps}
391 | {[testenv:build]deps}
392 | twine
393 | commands =
394 | {[testenv:park]commands}
395 | {[testenv:build]commands}
396 | twine upload --skip-existing --repository testpypi dist/*
397 |
398 | [testenv:release]
399 | basepython = python3
400 | skip_install = true
401 | deps =
402 | {[testenv:park]deps}
403 | {[testenv:build]deps}
404 | twine
405 | commands =
406 | {[testenv:park]commands}
407 | {[testenv:build]commands}
408 | twine upload --skip-existing --repository pypi dist/*
409 |
--------------------------------------------------------------------------------