├── .coveragerc
├── .gitignore
├── .travis.yml
├── LICENSE
├── README.md
├── bucket_snake
├── __about__.py
├── __init__.py
├── config.py
├── entrypoints.py
├── iam
│ ├── __init__.py
│ ├── logic.py
│ └── util.py
├── request_schemas.py
├── s3
│ ├── __init__.py
│ ├── models.py
│ └── permissions.py
├── tests
│ ├── __init__.py
│ ├── conf.py
│ ├── conftest.py
│ ├── templates
│ │ ├── accounts.json
│ │ └── historical-s3-report.json
│ ├── test_config.py
│ ├── test_entrypoints.py
│ ├── test_fixtures.py
│ ├── test_iam.py
│ ├── test_models.py
│ └── test_s3.py
└── util
│ ├── __init__.py
│ └── exceptions.py
├── docs
├── GenerateDocs.md
├── configuration.md
├── howitworks.md
├── installation.md
├── intro.md
├── permissions.md
├── s3background.md
└── serverless-examples
│ ├── .serverless-example.yml
│ ├── requirements.txt
│ └── serverless_configs
│ └── environment.yml
├── requirements.txt
├── setup.py
├── tox.ini
└── website
├── .gitignore
├── core
└── Footer.js
├── package.json
├── pages
└── en
│ └── index.js
├── sidebars.json
├── siteConfig.js
├── static
├── css
│ └── custom.css
└── img
│ ├── Compute_AWSLambda_LARGE.png
│ ├── favicon.png
│ ├── favicon
│ └── favicon.ico
│ ├── logo.png
│ └── s3check.png
└── yarn.lock
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | include = bucket_snake/*.py
3 | omit = bucket_snake/__about__.py
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask instance folder
57 | instance/
58 |
59 | # Scrapy stuff:
60 | .scrapy
61 |
62 | # Sphinx documentation
63 | docs/_build/
64 |
65 | # PyBuilder
66 | target/
67 |
68 | # IPython Notebook
69 | .ipynb_checkpoints
70 |
71 | # pyenv
72 | .python-version
73 |
74 | # celery beat schedule file
75 | celerybeat-schedule
76 |
77 | # dotenv
78 | .env
79 |
80 | # virtualenv
81 | venv/
82 | ENV/
83 |
84 | # Spyder project settings
85 | .spyderproject
86 |
87 | # Rope project settings
88 | .ropeproject
89 |
90 | venv
91 | .idea/
92 |
93 | .DS_Store
94 | libs/
95 |
96 | node_modules/
97 | __pycache__
98 |
99 | .serverless/
100 | .requirements/
101 | src/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "3.6"
4 |
5 | install:
6 | - pip install tox-travis coveralls
7 |
8 | script:
9 | - tox
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2018 Netflix, Inc.
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Bucket Snake
2 | =======================
3 | [](http://www.serverless.com) [](https://travis-ci.org/Netflix-Skunkworks/bucketsnake) [](https://coveralls.io/github/Netflix-Skunkworks/bucketsnake)
4 |
5 | ## This project is in heavy development and not yet ready for production use
6 |
7 |
8 |
9 | Bucket Snake is an AWS Lambda function that provisions S3 access for IAM roles. A primary feature
10 | of Bucket Snake is to create IAM roles that reside in the account where the S3 buckets live to facilitate
11 | proper cross-account S3 bucket access (via a role assumption).
12 |
13 | Documentation
14 | -------------
15 | Bucket Snake's documentation [can be found here](https://Netflix-Skunkworks.github.io/bucketsnake).
16 |
--------------------------------------------------------------------------------
/bucket_snake/__about__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, print_function
2 |
3 | __all__ = [
4 | "__title__", "__summary__", "__uri__", "__version__", "__author__",
5 | "__email__", "__license__", "__copyright__",
6 | ]
7 |
8 | __title__ = "bucket_snake"
9 | __summary__ = "AWS Lambda function to provision IAM roles for S3 access (both cross-account and same account)."
10 | __uri__ = "https://github.com/Netflix-Skunkworks/BucketSnake"
11 |
12 | __version__ = "0.1.0"
13 |
14 | __author__ = "Mike Grima"
15 | __email__ = "security@netflix.com"
16 |
17 | __license__ = "Apache License, Version 2.0"
18 | __copyright__ = "Copyright 2017 {0}".format(__author__)
19 |
--------------------------------------------------------------------------------
/bucket_snake/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Netflix-Skunkworks/bucketsnake/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/__init__.py
--------------------------------------------------------------------------------
/bucket_snake/config.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.config
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import os
9 | import logging
10 | from functools import wraps
11 |
12 | from bucket_snake.util.exceptions import MissingRequiredConfigurationItemException
13 |
14 | logging.basicConfig()
15 | log = logging.getLogger("bucket_snake")
16 | log.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
17 |
18 |
19 | class Config:
20 | """
21 | Class for maintaining the configuration for the entire runtime.
22 | By default, this will set most of the values from the environment variables -- but is also
23 | configurable via other means.
24 | Simply import this and use it:
25 | ```
26 | from bucket_snake.config import CONFIG
27 | ```
28 | """
29 | def __init__(self):
30 | # Bucket Snake specific configuration:
31 | self._blacklisted_source_accounts = os.environ["BLACKLISTED_SOURCE_ACCOUNTS"].split(",") \
32 | if os.environ.get("BLACKLISTED_SOURCE_ACCOUNTS") else []
33 | self._blacklisted_bucket_accounts = os.environ["BLACKLISTED_BUCKET_ACCOUNTS"].split(",") \
34 | if os.environ.get("BLACKLISTED_BUCKET_ACCOUNTS") else []
35 | self._bucket_snake_policy_name = os.environ.get("BUCKET_SNAKE_POLICY_NAME", "BucketSnake")
36 | self._sts_policy_name = os.environ.get("STS_POLICY_NAME", "BucketSnakeAssumeRole")
37 | self._dest_role_description = os.environ.get("DEST_ROLE_DESCRIPTION", "Bucket Snake provisioned role")
38 | self._bucket_snake_role = os.environ.get("BUCKET_SNAKE_ROLE", "BucketSnake")
39 | self._bucket_snake_session_name = os.environ.get("BUCKET_SNAKE_SESSION_NAME", "BucketSnake")
40 | self._iam_region = os.environ.get("IAM_REGION", "us-east-1")
41 |
42 | # Buckets that contain the historical report -- Just give the application access to all of them
43 | # (At some point in the future we could probably pair down to region, but assume the app could be deployed
44 | # to multiple regions and that the app would pick the bucket within the same region)
45 | # self._app_reports_buckets = os.environ["APP_REPORTS_BUCKETS"].split(",") \
46 | # if os.environ.get("APP_REPORTS_BUCKETS") else None # REQUIRED FIELD
47 | self._app_reports_buckets = os.environ["APP_REPORTS_BUCKETS"].split(",") \
48 | if os.environ.get("APP_REPORTS_BUCKETS") else []
49 |
50 | # SWAG bucket config (REQUIRED FIELDS):
51 | self._swag_bucket = os.environ.get("SWAG_BUCKET")
52 | self._swag_region = os.environ.get("SWAG_REGION")
53 | self._swag_data_file = os.environ.get("SWAG_DATA_FILE")
54 |
55 | # Historical S3 reports dump:
56 | self._reports_bucket = os.environ.get("REPORTS_BUCKET") # REQUIRED FIELD
57 | self._reports_region = os.environ.get("REPORTS_REGION") # REQUIRED FIELD
58 | self._reports_prefix = os.environ.get("REPORTS_PREFIX", "historical-s3-report.json")
59 |
60 | # Required Fields:
61 | self.required_fields = [
62 | "app_reports_buckets",
63 | "swag_bucket",
64 | "swag_region",
65 | "swag_data_file",
66 | "reports_bucket",
67 | "reports_region"
68 | ]
69 |
70 | @property
71 | def blacklisted_source_accounts(self):
72 | return self._blacklisted_source_accounts
73 |
74 | @blacklisted_source_accounts.setter
75 | def blacklisted_source_accounts(self, accounts):
76 | self._blacklisted_source_accounts = accounts
77 |
78 | @property
79 | def blacklisted_bucket_accounts(self):
80 | return self._blacklisted_bucket_accounts
81 |
82 | @blacklisted_bucket_accounts.setter
83 | def blacklisted_bucket_accounts(self, accounts):
84 | self._blacklisted_bucket_accounts = accounts
85 |
86 | @property
87 | def bucket_snake_policy_name(self):
88 | return self._bucket_snake_policy_name
89 |
90 | @bucket_snake_policy_name.setter
91 | def bucket_snake_policy_name(self, policy_name):
92 | self._bucket_snake_policy_name = policy_name
93 |
94 | @property
95 | def sts_policy_name(self):
96 | return self._sts_policy_name
97 |
98 | @sts_policy_name.setter
99 | def sts_policy_name(self, policy_name):
100 | self._sts_policy_name = policy_name
101 |
102 | @property
103 | def dest_role_description(self):
104 | return self._dest_role_description
105 |
106 | @dest_role_description.setter
107 | def dest_role_description(self, description):
108 | self._dest_role_description = description
109 |
110 | @property
111 | def app_reports_buckets(self):
112 | return self._app_reports_buckets
113 |
114 | @app_reports_buckets.setter
115 | def app_reports_buckets(self, buckets):
116 | self._app_reports_buckets = buckets
117 |
118 | @property
119 | def iam_region(self):
120 | return self._iam_region
121 |
122 | @iam_region.setter
123 | def iam_region(self, region):
124 | self._iam_region = region
125 |
126 | @property
127 | def swag_bucket(self):
128 | return self._swag_bucket
129 |
130 | @swag_bucket.setter
131 | def swag_bucket(self, swag_bucket):
132 | self._swag_bucket = swag_bucket
133 |
134 | @property
135 | def swag_region(self):
136 | return self._swag_region
137 |
138 | @swag_region.setter
139 | def swag_region(self, region):
140 | self._swag_region = region
141 |
142 | @property
143 | def swag_data_file(self):
144 | return self._swag_data_file
145 |
146 | @swag_data_file.setter
147 | def swag_data_file(self, swag_data_file):
148 | self._swag_data_file = swag_data_file
149 |
150 | @property
151 | def reports_bucket(self):
152 | return self._reports_bucket
153 |
154 | @reports_bucket.setter
155 | def reports_bucket(self, reports_bucket):
156 | self._reports_bucket = reports_bucket
157 |
158 | @property
159 | def reports_region(self):
160 | return self._reports_region
161 |
162 | @reports_region.setter
163 | def reports_region(self, region):
164 | self._reports_region = region
165 |
166 | @property
167 | def reports_prefix(self):
168 | return self._reports_prefix
169 |
170 | @reports_prefix.setter
171 | def reports_prefix(self, reports_prefix):
172 | self._reports_prefix = reports_prefix
173 |
174 | @property
175 | def bucket_snake_role(self):
176 | return self._bucket_snake_role
177 |
178 | @bucket_snake_role.setter
179 | def bucket_snake_role(self, role):
180 | self._bucket_snake_role = role
181 |
182 | @property
183 | def bucket_snake_session_name(self):
184 | return self._bucket_snake_session_name
185 |
186 | @bucket_snake_session_name.setter
187 | def bucket_snake_session_name(self, session_name):
188 | self._bucket_snake_session_name = session_name
189 |
190 |
191 | CONFIG = Config()
192 |
193 |
194 | def load_and_verify_config(func):
195 | """
196 | Decorator that sets the attributes on the configuration based on the input to the lambda function (if the env
197 | var `CONFIG_FROM_INPUT` is set). This will also verify that the configuration is correct and that required
198 | values are properly configured.
199 |
200 | The values need to be set to the raw values that the configuration needs. For example, if the env var
201 | would take in a comma-separated-list, you would supply an actual list of the items in the JSON, not the
202 | comma-separated-string.
203 | :param func:
204 | :return:
205 | """
206 |
207 | @wraps(func)
208 | def wrapper(event, context):
209 | # Only execute this if the environment variable is set (default should be False)
210 | if os.environ.get("CONFIG_FROM_INPUT", False):
211 | if event.get("config"):
212 | for attribute, value in event["config"].items():
213 | if hasattr(CONFIG, attribute):
214 | setattr(CONFIG, attribute, value)
215 | else:
216 | log.error("[X] Config Attribute: {} is not valid.".format(attribute))
217 |
218 | # Verify that all required configuration items have been set:
219 | for required in CONFIG.required_fields:
220 | if not getattr(CONFIG, required):
221 | raise MissingRequiredConfigurationItemException(
222 | "Item: {} is required, but not specified.".format(required))
223 |
224 | return func(event, context)
225 |
226 | return wrapper
227 |
--------------------------------------------------------------------------------
/bucket_snake/entrypoints.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.entrypoints
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import logging
9 | import os
10 |
11 | from marshmallow import ValidationError
12 | from raven_python_lambda import RavenLambdaWrapper
13 |
14 | from bucket_snake.config import load_and_verify_config
15 | from bucket_snake.iam.logic import update_instance_profile_s3_permissions, create_destination_roles, \
16 | update_source_assume_role_policy
17 | from bucket_snake.iam.util import get_iam_client, check_for_role
18 | from bucket_snake.request_schemas import incoming_request
19 | from bucket_snake.s3.models import BUCKET_TABLE
20 | from bucket_snake.s3.permissions import build_bucket_account_mapping, collect_policies, create_s3_role_policies, \
21 | create_access_to_reports
22 | from bucket_snake.util.exceptions import InvalidRequestException, SourceRoleDoesNotExistException
23 |
24 | logging.basicConfig()
25 | log = logging.getLogger("bucket_snake")
26 | log.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
27 |
28 |
29 | def validate_request(payload):
30 | """
31 | This will validate that the payload contains the proper S3 permissions via Marshmallow
32 | :param payload:
33 | :return:
34 | """
35 | try:
36 | request_data = incoming_request.load(payload).data
37 |
38 | except ValidationError as ve:
39 | log.debug("[X] Invalid properties sent in. Here is the specific error details:")
40 | raise InvalidRequestException("Invalid data was sent in through the payload. See: {}".format(str(ve)))
41 |
42 | return request_data
43 |
44 |
45 | def main_logic(request_data):
46 | """
47 | The main logic for the Lambda. This assumes that the input request has been properly validated.
48 | This means that all buckets requested exist and are properly permissible.
49 | :param request_data:
50 | :return:
51 | """
52 | # STEP 1: VERIFY THAT SOURCE IAM ROLES EXISTS #
53 | log.debug("[~] Checking if the source IAM role: {} exists in {}...".format(request_data["role_name"],
54 | request_data["account_number"]))
55 | iam_client = get_iam_client(request_data["account_number"])
56 | if not check_for_role(request_data["role_name"], iam_client):
57 | log.debug("[X] Source IAM role does NOT exist. That must be created first before this lambda is called.")
58 | raise SourceRoleDoesNotExistException("Source IAM Role: {} does not exist. This must exist before running "
59 | "this script.".format(request_data["role_name"]))
60 |
61 | # STEP 2: BUILD THE S3 PERMISSIONS MATRIX #
62 | # Need to determine which buckets are in the same account, and which are not
63 | log.debug("[~] Building the account->bucket mapping...")
64 | buckets_same, buckets_cross = build_bucket_account_mapping(request_data)
65 | log.debug("[+] Completed the account->bucket mapping.")
66 |
67 | # Calculate the S3 permissions that are required:
68 | log.debug("[~] Calculating the same account S3 permissions required...")
69 | policies_same_account = create_s3_role_policies(collect_policies(buckets_same))
70 | log.debug("[+] Completed calculation of same account S3 permissions.")
71 |
72 | log.debug("[~] Determining the permissions for access to the historical S3 reports...")
73 | create_access_to_reports(policies_same_account, request_data["account_number"])
74 | log.debug("[+] Completed calculation of permissions for historical S3 reports.")
75 |
76 | log.debug("[~] Calculating the cross-account S3 permissions required...")
77 | policies_cross_account = create_s3_role_policies(collect_policies(buckets_cross))
78 | log.debug("[+] Completed calculation of cross-account S3 permissions.")
79 |
80 | # STEP 3: CREATE ROLES AND GRANT THE PERMISSIONS #
81 | # Grant the same-account access:
82 | log.debug("[~] Updating the source role ({source_role})'s S3 permissions "
83 | "(in account: {source_account})...".format(source_role=request_data["app_name"],
84 | source_account=request_data["account_number"]))
85 | update_instance_profile_s3_permissions(policies_same_account, request_data["app_name"],
86 | request_data["role_name"], request_data["account_number"])
87 | log.debug("[+] Updated the source role ({})'s S3 Permissions.".format(request_data["app_name"]))
88 |
89 | # Create the cross-account roles:
90 | log.debug("[~] Creating the destination roles...")
91 | create_destination_roles(policies_cross_account, request_data["app_name"],
92 | request_data["role_name"], request_data["account_number"])
93 | log.debug("[+] Completed destination role creation...")
94 |
95 | # Update the assume role permissions:
96 | log.debug("[~] Updating the source role's role assumption permissions...")
97 | update_source_assume_role_policy(policies_cross_account, request_data["app_name"],
98 | request_data["role_name"], request_data["account_number"])
99 | log.debug("[+] Completed updating the source role's role assumption permissions...")
100 |
101 | # DONE!
102 | log.info("[+] Permissionsss fixed for sssource role: {source}, app: {app}, account: {account}!".format(
103 | source=request_data["role_name"], app=request_data["app_name"], account=request_data["account_number"]
104 | ))
105 |
106 |
107 | @RavenLambdaWrapper()
108 | @load_and_verify_config
109 | def handler(event, context):
110 | """
111 | The main Lambda entrypoint. Validates that all is well before continuing on.
112 | :param event:
113 | :param context:
114 | :return:
115 | """
116 | log.debug("[~] SSSSSSSSSSSSSSsssssssSSSSSSSSSS")
117 |
118 | # Set up the config first:
119 | # set_config_from_input(event)
120 |
121 | # Fetch the Historical S3 Reports data
122 | _ = BUCKET_TABLE.buckets
123 |
124 | # Parse and validate that the payload is correct:
125 | log.debug("[~] Parsing request data...")
126 | request_data = validate_request(event)
127 | log.debug("[+] Successfully loaded incoming request data.")
128 |
129 | # Continue:
130 | main_logic(request_data)
131 | log.debug("[+] Function complete")
132 |
--------------------------------------------------------------------------------
/bucket_snake/iam/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Netflix-Skunkworks/bucketsnake/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/iam/__init__.py
--------------------------------------------------------------------------------
/bucket_snake/iam/logic.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.iam.logic
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import json
9 | import logging
10 | import os
11 |
12 | from bucket_snake.config import CONFIG
13 | from bucket_snake.iam.util import get_iam_client, check_for_role, create_iam_role, format_role_arn, update_aspd
14 |
15 | logging.basicConfig()
16 | log = logging.getLogger("bucket_snake")
17 | log.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
18 |
19 |
20 | def create_destination_roles(bucket_policies, app_name, source_role, source_role_account):
21 | """
22 | This will create the destination IAM roles for which the source application can assume into.
23 | These roles only permit S3 access.
24 | :param bucket_policies:
25 | :param app_name:
26 | :param source_role:
27 | :param source_role_account:
28 | :return:
29 | """
30 | for account, policies in bucket_policies.items():
31 | client = get_iam_client(account)
32 |
33 | # Check if the destination role exists:
34 | destination_role_name = "{app}-{account}".format(app=app_name, account=source_role_account)
35 |
36 | log.debug("\t[ ] Checking for destination role in {}...".format(account))
37 |
38 | if not check_for_role(destination_role_name, client):
39 | log.debug("\t[@] Destination role does not exist... Creating...")
40 | # Create the role:
41 | create_iam_role(client, destination_role_name,
42 | format_role_arn(source_role, source_role_account),
43 | CONFIG.dest_role_description)
44 |
45 | log.debug("\t[+] Created the destination role in account {}".format(account))
46 |
47 | else:
48 | log.debug("\t[ ] Updating the ASPD of the role in account {}...".format(account))
49 | update_aspd(client, destination_role_name, format_role_arn(source_role, source_role_account))
50 |
51 | log.debug("\t[ ] Updating the role policy...")
52 | client.put_role_policy(RoleName=destination_role_name, PolicyName=CONFIG.bucket_snake_policy_name,
53 | PolicyDocument=json.dumps(policies, indent=4, sort_keys=True))
54 | log.debug("\t[+] Updated the role policy in account {}".format(account))
55 |
56 |
57 | def update_instance_profile_s3_permissions(bucket_policies, app_name, source_role, source_role_account):
58 | """
59 | This will grant the source application access to S3. This avoids having to assume role within the same
60 | AWS account.
61 | :param bucket_policies:
62 | :param app_name:
63 | :param source_role:
64 | :param source_role_account:
65 | :return:
66 | """
67 | if bucket_policies.get(source_role_account):
68 | client = get_iam_client(source_role_account)
69 |
70 | client.put_role_policy(RoleName=source_role, PolicyName=CONFIG.bucket_snake_policy_name,
71 | PolicyDocument=json.dumps(bucket_policies[source_role_account], indent=4,
72 | sort_keys=True))
73 |
74 |
75 | def update_source_assume_role_policy(cross_account_policies, app_name, source_role, source_account):
76 | """
77 | This permits the source application the ability to assume into the destination S3 roles.
78 | :param cross_account_policies:
79 | :param app_name:
80 | :param source_role:
81 | :param source_account:
82 | :return:
83 | """
84 | assume_role_perm = {
85 | "Statement": [
86 | {
87 | "Effect": "Allow",
88 | "Action": "sts:AssumeRole",
89 | "Resource": []
90 | }
91 | ]
92 | }
93 |
94 | client = get_iam_client(source_account)
95 |
96 | for account in cross_account_policies.keys():
97 | destination_role_name = "{app}-{account}".format(app=app_name, account=source_account)
98 |
99 | # Is this where we should do the diff logic for old role cleanup?
100 | assume_role_perm["Statement"][0]["Resource"].append(
101 | format_role_arn(destination_role_name, account)
102 | )
103 |
104 | client.put_role_policy(RoleName=source_role, PolicyName=CONFIG.sts_policy_name,
105 | PolicyDocument=json.dumps(assume_role_perm, indent=4, sort_keys=True))
106 |
--------------------------------------------------------------------------------
/bucket_snake/iam/util.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.iam.util
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import json
9 |
10 | import boto3
11 | from botocore.exceptions import ClientError
12 |
13 | from bucket_snake.config import CONFIG
14 |
15 | IAM_CLIENTS = {}
16 |
17 |
18 | def get_client(arn, technology, region="us-east-1"):
19 | """
20 | Generic function to get a boto3 client with the proper assumed role credentials.
21 | :param arn:
22 | :param technology:
23 | :param region:
24 | :return:
25 | """
26 | sts = boto3.client("sts", region_name=region)
27 | ar = sts.assume_role(RoleArn=arn, RoleSessionName=CONFIG.bucket_snake_session_name)
28 |
29 | session = boto3.Session(
30 | region_name=region,
31 | aws_access_key_id=ar["Credentials"]["AccessKeyId"],
32 | aws_secret_access_key=ar["Credentials"]["SecretAccessKey"],
33 | aws_session_token=ar["Credentials"]["SessionToken"]
34 | )
35 |
36 | return session.client(technology)
37 |
38 |
39 | def format_role_arn(role_name, account_id):
40 | """
41 | Gets an IAM ARN string.
42 | :param role_name:
43 | :param account_id:
44 | :return:
45 | """
46 | return "arn:aws:iam::{}:role/{}".format(account_id, role_name)
47 |
48 |
49 | def get_iam_client(account_id):
50 | """
51 | Gets a cached IAM client for all the Bucket Snake IAM actions.
52 | :param account_id:
53 | :return:
54 | """
55 | if IAM_CLIENTS.get(account_id):
56 | return IAM_CLIENTS[account_id]
57 |
58 | client = get_client(format_role_arn(CONFIG.bucket_snake_role, account_id), "iam", region=CONFIG.iam_region)
59 | IAM_CLIENTS[account_id] = client
60 |
61 | return client
62 |
63 |
64 | def check_for_role(role_name, client):
65 | """
66 | Checks for an IAM role in a given account
67 | :param role_name:
68 | :param client:
69 | :return:
70 | """
71 | try:
72 | role = client.get_role(RoleName=role_name)
73 |
74 | return role
75 |
76 | except ClientError as ce:
77 | errors = ["Not Found", "NoSuchEntity"]
78 |
79 | for error in errors:
80 | if error in str(ce):
81 | return
82 |
83 | raise ce
84 |
85 |
86 | def create_iam_role(client, role_name, source_arn, description):
87 | """
88 | Creates an IAM role (the S3-specific IAM role for the application), which only permits the
89 | source application access to assume into it.
90 | :param client:
91 | :param role_name:
92 | :param source_arn:
93 | :param description:
94 | :return:
95 | """
96 | aspd = {
97 | "Statement": [
98 | {
99 | "Effect": "Allow",
100 | "Action": "sts:AssumeRole",
101 | "Principal": {
102 | "AWS": source_arn
103 | }
104 | }
105 | ]
106 | }
107 | return client.create_role(Path="/", RoleName=role_name,
108 | AssumeRolePolicyDocument=json.dumps(aspd, indent=4), Description=description)
109 |
110 |
111 | def update_aspd(client, role_name, source_arn):
112 | """
113 | This updates the existing Assume Role Policy Document for the application's S3-specific IAM role if it already
114 | exists. This is for idempotence.
115 | :param client:
116 | :param role_name:
117 | :param source_arn:
118 | :return:
119 | """
120 | aspd = {
121 | "Statement": [
122 | {
123 | "Effect": "Allow",
124 | "Action": "sts:AssumeRole",
125 | "Principal": {
126 | "AWS": source_arn
127 | }
128 | }
129 | ]
130 | }
131 | client.update_assume_role_policy(RoleName=role_name, PolicyDocument=json.dumps(aspd, indent=4))
132 |
--------------------------------------------------------------------------------
/bucket_snake/request_schemas.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.request_schemas
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | from marshmallow import Schema, fields, validates_schema, ValidationError, validate
9 | from marshmallow.validate import OneOf
10 | from swag_client.backend import SWAGManager
11 | from swag_client.util import parse_swag_config_options
12 |
13 | from bucket_snake.config import CONFIG
14 | from bucket_snake.s3.models import BUCKET_TABLE
15 | from bucket_snake.util.exceptions import BlacklistedAccountException, S3BucketDoesNotExistException
16 |
17 |
18 | def get_swag():
19 | """
20 | Get account data from SWAG (via S3)
21 | :return:
22 | """
23 | swag_opts = {
24 | 'swag.type': 's3',
25 | 'swag.bucket_name': CONFIG.swag_bucket,
26 | 'swag.data_file': CONFIG.swag_data_file,
27 | 'swag.region': CONFIG.swag_region,
28 | 'swag.cache_expires': 0
29 | }
30 | return SWAGManager(**parse_swag_config_options(swag_opts))
31 |
32 |
33 | class BucketPermission(Schema):
34 | """Permission schema for buckets. Needs a prefix and the corresponding permission"""
35 | prefix = fields.Str(required=True)
36 | perms = fields.List(fields.Str(validate=OneOf(["list", "get", "put", "delete"])), required=True)
37 |
38 | @validates_schema
39 | def validate_prefix(self, data):
40 | if data.get("prefix") == "":
41 | raise ValidationError("Prefix cannot be an empty string. Must include a path to something.")
42 |
43 | @validates_schema
44 | def validate_perms(self, data):
45 | if not data.get("perms"):
46 | raise ValidationError("Must include one of the required permissions: list, get, put, delete.")
47 |
48 |
49 | class BucketDict(fields.Field):
50 | """The schema for a given bucket. Needs to consist of the bucket name, and a list of the bucket permissions"""
51 | def __init__(self, bucket_name_field, bucket_permissions_field, *args, **kwargs):
52 | fields.Field.__init__(self, *args, **kwargs)
53 | self.bucket_name_field = bucket_name_field
54 | self.bucket_permissions_field = bucket_permissions_field
55 |
56 | def _deserialize(self, value, attr, data):
57 | un_serialized_dict = {}
58 |
59 | # K is a string -- The bucket name in the permissions dict
60 | for k, v in value.items():
61 | k = self.bucket_name_field.deserialize(k)
62 | v = self.bucket_permissions_field.deserialize(v)
63 |
64 | # Does this bucket even exist?
65 | bucket_account = BUCKET_TABLE.buckets.get(k)
66 | if not bucket_account:
67 | raise S3BucketDoesNotExistException(k)
68 | elif bucket_account in CONFIG.blacklisted_bucket_accounts:
69 | raise BlacklistedAccountException("Bucket: {bucket} resides in blacklisted bucket "
70 | "account: {account}".format(bucket=k, account=bucket_account))
71 |
72 | un_serialized_dict[k] = v
73 |
74 | return un_serialized_dict
75 |
76 |
77 | class IncomingRequest(Schema):
78 | """The main inbound request that arrives from the Lambda launch"""
79 | role_name = fields.Str(required=True)
80 | app_name = fields.Str(required=True, validate=[validate.Length(min=1, max=48)])
81 | account_number = fields.Str(required=True)
82 | buckets = BucketDict(
83 | fields.Str(),
84 | fields.Nested(BucketPermission, many=True)
85 | )
86 |
87 | @validates_schema
88 | def validate_account_number(self, data):
89 | if data.get("account_number"):
90 | # Make sure the AWS account number exists...
91 | swag = get_swag()
92 | result = swag.get("[?id=='{}']".format(data["account_number"]))
93 |
94 | if not result:
95 | raise ValidationError("Unknown AWS account ID passed in: {}".format(data["account_number"]))
96 |
97 | # Check that the account is not in our blacklisted accounts:
98 | if data["account_number"] in CONFIG.blacklisted_source_accounts:
99 | raise BlacklistedAccountException("This tool does NOT service account: {}".format(
100 | data["account_number"]))
101 |
102 |
103 | bucket_permission = BucketPermission(strict=True)
104 | incoming_request = IncomingRequest(strict=True)
105 |
--------------------------------------------------------------------------------
/bucket_snake/s3/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Netflix-Skunkworks/bucketsnake/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/s3/__init__.py
--------------------------------------------------------------------------------
/bucket_snake/s3/models.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.s3.models
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import logging
9 | import os
10 |
11 | import boto3
12 | from marshmallow import Schema
13 | from marshmallow.fields import Field
14 | from retrying import retry
15 |
16 | from bucket_snake.config import CONFIG
17 |
18 | logging.basicConfig()
19 | log = logging.getLogger("bucket_snake")
20 | log.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
21 |
22 |
23 | class BucketField(Field):
24 | """Field for the bucket dictionary. It's `bucket_name: bucket_account`"""
25 | def _deserialize(self, value, attr, data):
26 | return {name: details["AccountId"] for name, details in data["buckets"].items()}
27 |
28 |
29 | class S3ReportSchema(Schema):
30 | """Schema for the S3 Historical report, which only cares about the buckets."""
31 | buckets = BucketField(required=True, load_from="buckets", load_only=True)
32 |
33 |
34 | class BucketTable:
35 | """
36 | Class that fetches the Historical S3 Table and stores it for later use.
37 |
38 | To use: `from bucket_snake.s3.models import BUCKET_TABLE`
39 | Then, `BUCKET_TABLE.buckets["bucket_name"]` to get the account that the bucket resides in
40 | """
41 | def __init__(self):
42 | self._buckets = None
43 |
44 | @property
45 | def buckets(self):
46 | if not self._buckets:
47 | self._buckets = BucketTable.__get_bucket_table()
48 |
49 | return self._buckets
50 |
51 | @staticmethod
52 | def __get_bucket_table():
53 | """
54 | Fetches the Historical S3 data, and gets back the dictionary mapping of Bucket -> Account
55 | :return:
56 | """
57 | log.debug("[~] Fetching the Historical S3 report data for deserialization...")
58 |
59 | report = BucketTable.__fetch_from_s3()
60 |
61 | return S3ReportSchema(strict=True).loads(report).data["buckets"]
62 |
63 | @staticmethod
64 | @retry(stop_max_attempt_number=3, wait_exponential_multiplier=1000, wait_exponential_max=10000)
65 | def __fetch_from_s3():
66 | """
67 | Fetches the Historical reports data from S3
68 | :return:
69 | """
70 | log.debug("[~] Fetching Historical S3 Report...")
71 | client = boto3.client("s3", region_name=CONFIG.reports_region)
72 |
73 | s3_obj = client.get_object(Bucket=CONFIG.reports_bucket, Key=CONFIG.reports_prefix)
74 | log.debug("[+] Successfully fetched Historical S3 Report...")
75 |
76 | return s3_obj["Body"].read().decode()
77 |
78 |
79 | # Use this for all S3 Historical Bucket related data:
80 | BUCKET_TABLE = BucketTable()
81 |
--------------------------------------------------------------------------------
/bucket_snake/s3/permissions.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.s3.permissions
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | from bucket_snake.s3.models import BUCKET_TABLE
9 | from bucket_snake.config import CONFIG
10 |
11 | # This is the main lookup table for general access types (list, get, put, delete), and the corresponding
12 | # AWS S3 permissions required to grant:
13 | S3_PERMISSIONS = {
14 | "list": [
15 | "s3:ListBucket",
16 | "s3:ListBucketVersions"
17 | ],
18 | "get": [
19 | "s3:GetObject",
20 | "s3:GetObjectTagging",
21 | "s3:GetObjectVersion",
22 | "s3:GetObjectVersionTagging",
23 | "s3:GetObjectAcl",
24 | "s3:GetObjectVersionAcl"
25 | ],
26 | "put": [
27 | "s3:PutObject",
28 | "s3:PutObjectTagging",
29 | "s3:PutObjectVersionTagging",
30 | "s3:ListMultipartUploadParts*",
31 | "s3:AbortMultipartUpload",
32 | "s3:RestoreObject"
33 | ],
34 | "delete": [
35 | "s3:DeleteObject",
36 | "s3:DeleteObjectTagging",
37 | "s3:DeleteObjectVersion",
38 | "s3:DeleteObjectVersionTagging"
39 | ]
40 | }
41 |
42 |
43 | def check_if_cross_account(source_account_number, bucket):
44 | """Determine if the bucket resides in a different account than the source account"""
45 | if BUCKET_TABLE.buckets[bucket] == source_account_number:
46 | return False
47 |
48 | return True
49 |
50 |
51 | def build_bucket_account_mapping(request_data):
52 | """
53 | This will build a mapping and return two dicts, one with all buckets in the same account as the source
54 | application, and the other with all the cross-account S3 buckets.
55 | :param request_data:
56 | :return:
57 | """
58 | buckets_same_account = {}
59 | buckets_cross_account = {}
60 |
61 | for bucket, permissions in request_data["buckets"].items():
62 | # Determine which account the given bucket is in:
63 | if check_if_cross_account(request_data["account_number"], bucket):
64 | buckets_cross_account[bucket] = dict(permissions=permissions,
65 | account_number=BUCKET_TABLE.buckets[bucket])
66 | else:
67 | buckets_same_account[bucket] = dict(permissions=permissions,
68 | account_number=request_data["account_number"])
69 |
70 | return buckets_same_account, buckets_cross_account
71 |
72 |
73 | def collect_policies(buckets_dict):
74 | """
75 | This creates the mapping of AWS S3 IAM permissions for a given AWS account (for where the bucket resides)
76 | :param buckets_dict:
77 | :return:
78 | """
79 | account_policies = {}
80 |
81 | for bucket, details in buckets_dict.items():
82 | policy = account_policies.get(details["account_number"], {
83 | "list": set(),
84 | "get": set(),
85 | "put": set(),
86 | "delete": set()
87 | })
88 |
89 | for prefix_perms in details["permissions"]:
90 | for perm in prefix_perms["perms"]:
91 | bucket_arn = "arn:aws:s3:::{bucket}".format(bucket=bucket)
92 |
93 | # LIST permissions apply to the whole bucket:
94 | if perm == "list":
95 | policy["list"].add(bucket_arn)
96 |
97 | else:
98 | prefix_arn = "{bucket_arn}/{prefix}".format(bucket_arn=bucket_arn, prefix=prefix_perms["prefix"])
99 | policy[perm].add(prefix_arn)
100 |
101 | # Update the policy for the account:
102 | account_policies[details["account_number"]] = policy
103 |
104 | return account_policies
105 |
106 |
107 | def create_s3_role_policies(collected_policies):
108 | """
109 | This creates the IAM template with the permissions from the mapping created in `collect_policies`.
110 | :param collected_policies:
111 | :return:
112 | """
113 | account_iam_policies = {}
114 |
115 | for account, policies in collected_policies.items():
116 | statements = []
117 |
118 | for perm, arns in policies.items():
119 | # Skip empty permissions:
120 | if len(arns) == 0:
121 | continue
122 |
123 | statements.append({
124 | "Sid": perm.title(),
125 | "Effect": "Allow",
126 | "Action": S3_PERMISSIONS[perm],
127 | "Resource": list(arns)
128 | })
129 |
130 | account_iam_policies[account] = {
131 | "Statement": statements
132 | }
133 |
134 | return account_iam_policies
135 |
136 |
137 | def create_access_to_reports(account_iam_policies, app_account):
138 | """
139 | This creates the permissions required for the application to read from the historical reports bucket.
140 | This is so the application can access the S3 dictionary to determine if role assumption is required or not.
141 |
142 | The buckets that the application will access are defined in the Config's `app_reports_buckets` list.
143 | The application will not assume to any role to access this -- this will be cross-account access, so
144 | the bucket policy needs to be there.
145 | :return:
146 | """
147 | if not account_iam_policies.get(app_account):
148 | account_iam_policies[app_account] = {"Statement": []}
149 |
150 | resources = ["arn:aws:s3:::{b}/{p}".format(b=b, p=CONFIG.reports_prefix) for b in CONFIG.app_reports_buckets]
151 |
152 | account_iam_policies[app_account]["Statement"].append({
153 | "Sid": "HistoricalS3Reports",
154 | "Effect": "Allow",
155 | "Action": "s3:GetObject",
156 | "Resource": resources
157 | })
158 |
--------------------------------------------------------------------------------
/bucket_snake/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Netflix-Skunkworks/bucketsnake/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/tests/__init__.py
--------------------------------------------------------------------------------
/bucket_snake/tests/conf.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.conf
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | SWAG_BUCKET = "swagbucket"
9 | HISTORICAL_REPORT_BUCKET = "historical-reports"
10 |
11 | EXISTING_ASPD = {
12 | "Statement": [
13 | {
14 | "Effect": "Allow",
15 | "Principal": "*",
16 | "Action": "sts:AssumeRole"
17 | }
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/bucket_snake/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.conftest
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import json
9 | import os
10 |
11 | import boto3
12 | import pytest
13 | from moto import mock_sts, mock_iam, mock_s3
14 |
15 | import bucket_snake.iam.util
16 | import bucket_snake.config
17 | from bucket_snake.config import CONFIG
18 | from bucket_snake.tests.conf import SWAG_BUCKET, HISTORICAL_REPORT_BUCKET, EXISTING_ASPD
19 | import bucket_snake.s3.models
20 | from bucket_snake.s3.models import BUCKET_TABLE, BucketTable
21 |
22 |
23 | class MockContext:
24 | @staticmethod
25 | def get_remaining_time_in_millis():
26 | return 9000
27 |
28 |
29 | def get_json(file):
30 | cwd = os.path.dirname(os.path.realpath(__file__))
31 | object_path = os.path.join(cwd, 'templates/{}'.format(file))
32 |
33 | with open(object_path, 'r') as op:
34 | return op.read()
35 |
36 |
37 | @pytest.yield_fixture(scope="function")
38 | def sts():
39 | mock_sts().start()
40 |
41 | client = boto3.client("sts")
42 |
43 | yield client
44 |
45 | mock_sts().stop()
46 |
47 |
48 | @pytest.yield_fixture(scope="function")
49 | def iam():
50 | mock_iam().start()
51 |
52 | client = boto3.client("iam")
53 |
54 | yield client
55 |
56 | mock_iam().stop()
57 |
58 |
59 | @pytest.yield_fixture(scope="function")
60 | def s3():
61 | mock_s3().start()
62 |
63 | client = boto3.client("s3", region_name="us-west-2")
64 |
65 | yield client
66 |
67 | mock_s3().stop()
68 |
69 |
70 | @pytest.yield_fixture(scope="function")
71 | def config():
72 | old_config = bucket_snake.config.CONFIG
73 | CONFIG.app_reports_buckets = [HISTORICAL_REPORT_BUCKET]
74 | CONFIG.swag_region = "us-west-2"
75 | CONFIG.swag_data_file = "accounts.json"
76 | CONFIG.swag_bucket = SWAG_BUCKET
77 | CONFIG.reports_bucket = HISTORICAL_REPORT_BUCKET
78 | CONFIG.reports_region = "us-west-2"
79 | CONFIG.blacklisted_source_accounts = ["666666666666", "000000000000"]
80 | CONFIG.blacklisted_bucket_accounts = ["989898989898", "898989898989", "666666666666"]
81 |
82 | yield
83 |
84 | bucket_snake.config.CONFIG = old_config
85 |
86 |
87 | @pytest.yield_fixture(scope="function")
88 | def iam_client_dict():
89 | yield
90 | bucket_snake.iam.util.IAM_CLIENTS = {}
91 |
92 |
93 | @pytest.yield_fixture(scope="function")
94 | def buckets(s3):
95 | s3.create_bucket(Bucket=SWAG_BUCKET)
96 | s3.create_bucket(Bucket=HISTORICAL_REPORT_BUCKET)
97 |
98 | bucket_keys = [
99 | (SWAG_BUCKET, "accounts.json"),
100 | (HISTORICAL_REPORT_BUCKET, "historical-s3-report.json")
101 | ]
102 |
103 | for bucket, key in bucket_keys:
104 | s3.put_object(Bucket=bucket, Key=key, Body=get_json(key))
105 |
106 |
107 | @pytest.yield_fixture(scope="function")
108 | def bucket_table(buckets, config):
109 | old_bucket_table = bucket_snake.s3.models.BUCKET_TABLE
110 | bucket_snake.s3.models.BUCKET_TABLE = BucketTable()
111 |
112 | yield BUCKET_TABLE.buckets
113 |
114 | bucket_snake.s3.models.BUCKET_TABLE = old_bucket_table
115 |
116 |
117 | @pytest.fixture(scope="function")
118 | def existing_role(iam):
119 | iam.create_role(Path="/", RoleName="someAppInstanceProfile",
120 | AssumeRolePolicyDocument=json.dumps(EXISTING_ASPD))
121 |
122 | return iam
123 |
124 |
125 | @pytest.fixture(scope="function")
126 | def s3_role_event():
127 | return {
128 | "role_name": "someAppInstanceProfile",
129 | "app_name": "someApp",
130 | "account_number": "012345678910",
131 | "buckets": {
132 | "test-bucket-one": [
133 | {
134 | "prefix": "*",
135 | "perms": [
136 | "list"
137 | ]
138 | },
139 | {
140 | "prefix": "some/path/*",
141 | "perms": [
142 | "get",
143 | "put",
144 | "delete"
145 | ]
146 | }
147 | ],
148 | "test-bucket-two": [
149 | {
150 | "prefix": "*",
151 | "perms": [
152 | "list",
153 | "get"
154 | ]
155 | }
156 | ],
157 | "test-bucket-three": [
158 | {
159 | "prefix": "*",
160 | "perms": [
161 | "list"
162 | ]
163 | },
164 | {
165 | "prefix": "*",
166 | "perms": [
167 | "get"
168 | ]
169 | }
170 | ],
171 | "test-bucket-four": [
172 | {
173 | "prefix": "*",
174 | "perms": [
175 | "list",
176 | "get"
177 | ]
178 | }
179 | ],
180 | }
181 | }
182 |
183 |
184 | @pytest.fixture(scope="function")
185 | def buckets_same_account_mapping():
186 | return {
187 | "test-bucket-one": {
188 | "permissions": [
189 | {
190 | "prefix": "*",
191 | "perms": [
192 | "list"
193 | ]
194 | },
195 | {
196 | "prefix": "some/path/*",
197 | "perms": [
198 | "get",
199 | "put",
200 | "delete"
201 | ]
202 | }
203 | ],
204 | "account_number": "012345678910"
205 | },
206 | "test-bucket-two": {
207 | "permissions": [
208 | {
209 | "prefix": "*",
210 | "perms": [
211 | "list",
212 | "get"
213 | ]
214 | }
215 | ],
216 | "account_number": "012345678910"
217 | }
218 | }
219 |
220 |
221 | @pytest.fixture(scope="function")
222 | def buckets_cross_account_mapping():
223 | return {
224 | "test-bucket-three": {
225 | "permissions": [
226 | {
227 | "prefix": "*",
228 | "perms": [
229 | "list"
230 | ]
231 | },
232 | {
233 | "prefix": "*",
234 | "perms": [
235 | "get"
236 | ]
237 | }
238 | ],
239 | "account_number": "012345678911"
240 | },
241 | "test-bucket-four": {
242 | "permissions": [
243 | {
244 | "prefix": "*",
245 | "perms": [
246 | "list",
247 | "get"
248 | ]
249 | }
250 | ],
251 | "account_number": "012345678911"
252 | }
253 | }
254 |
255 |
256 | @pytest.fixture(scope="function")
257 | def mock_lambda_context():
258 | return MockContext()
259 |
--------------------------------------------------------------------------------
/bucket_snake/tests/templates/accounts.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "owner": "mycompany",
4 | "aliases": [],
5 | "schemaVersion": "2",
6 | "description": "my test account",
7 | "sensitive": false,
8 | "services": [],
9 | "type": "service",
10 | "tags": [],
11 | "environment": "test",
12 | "provider": "aws",
13 | "name": "myaccount",
14 | "id": "012345678910",
15 | "contacts": [
16 | "admin@mycompany.net"
17 | ],
18 | "email": "admin@mycompany.net",
19 | "status": []
20 | }
21 | ]
22 |
--------------------------------------------------------------------------------
/bucket_snake/tests/templates/historical-s3-report.json:
--------------------------------------------------------------------------------
1 | {
2 | "buckets": {
3 | "test-bucket-one": {
4 | "AccountId": "012345678910",
5 | "Region": "us-east-1"
6 | },
7 | "test-bucket-two": {
8 | "AccountId": "012345678910",
9 | "Region": "us-west-2"
10 | },
11 | "test-bucket-three": {
12 | "AccountId": "012345678911",
13 | "Region": "us-east-1"
14 | },
15 | "test-bucket-four": {
16 | "AccountId": "012345678911",
17 | "Region": "us-west-2"
18 | },
19 | "blacklisted-bucket-one": {
20 | "AccountId": "666666666666",
21 | "Region": "us-west-2"
22 | },
23 | "blacklisted-bucket-two": {
24 | "AccountId": "000000000000",
25 | "Region": "us-east-1"
26 | }
27 | },
28 | "s3_report_version": 1,
29 | "generated_date": "2017-11-22T23:17:30Z"
30 | }
31 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_config.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_config
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import pytest
9 | import os
10 |
11 | from bucket_snake.config import Config, load_and_verify_config
12 | from bucket_snake.util.exceptions import MissingRequiredConfigurationItemException
13 |
14 |
15 | def test_config_required_fields():
16 | import bucket_snake.config
17 | old_config = bucket_snake.config.CONFIG
18 | bucket_snake.config.CONFIG = Config()
19 |
20 | os.environ["CONFIG_FROM_INPUT"] = "true"
21 |
22 | @load_and_verify_config
23 | def wrapped_func(event, context):
24 | pass
25 |
26 | # Confirm they are not set:
27 | for required in bucket_snake.config.CONFIG.required_fields:
28 | assert not getattr(bucket_snake.config.CONFIG, required)
29 |
30 | # Set them:
31 | all_required_configs = [
32 | ("app_reports_buckets", ["reportsbucket", "reportsbucket2"]),
33 | ("swag_bucket", "swagbucket"),
34 | ("swag_region", "us-east-1"),
35 | ("swag_data_file", "v2/accounts.json"),
36 | ("reports_bucket", "reportsbucket"),
37 | ("reports_region", "us-east-1") # Don't set right away...
38 | ]
39 |
40 | lambda_event = {
41 | "config": {}
42 | }
43 |
44 | # Test all but the last...
45 | for i in range(0, len(all_required_configs) - 1):
46 | lambda_event["config"][all_required_configs[i][0]] = all_required_configs[i][1]
47 | with pytest.raises(MissingRequiredConfigurationItemException):
48 | wrapped_func(lambda_event, None)
49 |
50 | # Should be good:
51 | lambda_event["config"]["reports_region"] = "us-east-1"
52 | wrapped_func(lambda_event, None)
53 |
54 | # Are they all set right?
55 | for config_item, value in all_required_configs:
56 | assert getattr(bucket_snake.config.CONFIG, config_item) == value
57 |
58 | # Clean-up:
59 | bucket_snake.config.CONFIG = old_config
60 |
61 | # It should all fail, since the env var is set to not take anything from input:
62 | os.environ.pop("CONFIG_FROM_INPUT")
63 | with pytest.raises(MissingRequiredConfigurationItemException):
64 | wrapped_func(lambda_event, None)
65 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_entrypoints.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_entrypoints
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import pytest
9 |
10 | import bucket_snake.iam.util
11 | from bucket_snake.config import CONFIG
12 | from bucket_snake.entrypoints import handler
13 | from bucket_snake.util.exceptions import SourceRoleDoesNotExistException
14 |
15 |
16 | def test_create_successful(s3_role_event, existing_role, sts, config, buckets, mock_lambda_context, iam_client_dict):
17 | bucket_snake.iam.util.IAM_CLIENTS["012345678910"] = existing_role
18 |
19 | handler(s3_role_event, mock_lambda_context)
20 |
21 | # Check that the "BucketSnake" and "BucketSnakeAssumeRole" policies were added to the application's role:
22 | policy_names = existing_role.list_role_policies(RoleName="someAppInstanceProfile")
23 | assert len(policy_names["PolicyNames"]) == 2
24 | assert "BucketSnake" in policy_names["PolicyNames"]
25 | assert "BucketSnakeAssumeRole" in policy_names["PolicyNames"]
26 |
27 | policies = existing_role.get_role_policy(RoleName="someAppInstanceProfile",
28 | PolicyName=CONFIG.sts_policy_name)
29 | assert len(policies["PolicyDocument"]["Statement"]) == 1
30 | assert policies["PolicyDocument"]["Statement"][0]["Action"] == "sts:AssumeRole"
31 | assert len(policies["PolicyDocument"]["Statement"][0]["Resource"]) == 1
32 | assert policies["PolicyDocument"]["Statement"][0]["Resource"][0] == "arn:aws:iam::012345678911:role/someApp" \
33 | "-012345678910"
34 |
35 | policies = existing_role.get_role_policy(RoleName="someAppInstanceProfile",
36 | PolicyName=CONFIG.bucket_snake_policy_name)
37 | assert len(policies["PolicyDocument"]["Statement"]) == 5
38 |
39 | # Check that the last policy added is the Historical S3 Reports policy:
40 | assert policies["PolicyDocument"]["Statement"][4]["Sid"] == "HistoricalS3Reports"
41 | assert policies["PolicyDocument"]["Statement"][4]["Effect"] == "Allow"
42 | assert policies["PolicyDocument"]["Statement"][4]["Action"] == "s3:GetObject"
43 | assert policies["PolicyDocument"]["Statement"][4]["Resource"] == \
44 | ["arn:aws:s3:::historical-reports/historical-s3-report.json"]
45 |
46 | # Cross Account:
47 | policy_names = bucket_snake.iam.util.IAM_CLIENTS["012345678911"].list_role_policies(RoleName="someApp-012345678910")
48 | assert len(policy_names["PolicyNames"]) == 1
49 | assert policy_names["PolicyNames"][0] == "BucketSnake"
50 |
51 | policies = bucket_snake.iam.util.IAM_CLIENTS["012345678911"].get_role_policy(RoleName="someApp-012345678910",
52 | PolicyName="BucketSnake")
53 | assert len(policies["PolicyDocument"]["Statement"]) == 2
54 |
55 | aspd = bucket_snake.iam.util.IAM_CLIENTS["012345678911"].get_role(RoleName="someApp-012345678910")
56 | assert len(aspd["Role"]["AssumeRolePolicyDocument"]["Statement"]) == 1
57 | assert aspd["Role"]["AssumeRolePolicyDocument"]["Statement"][0]["Principal"]["AWS"] == "arn:aws:iam::012345678910" \
58 | ":role/someAppInstance" \
59 | "Profile"
60 |
61 |
62 | def test_without_existing_role(s3_role_event, sts, iam, config, buckets, mock_lambda_context, iam_client_dict):
63 | with pytest.raises(SourceRoleDoesNotExistException):
64 | handler(s3_role_event, mock_lambda_context)
65 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_fixtures.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_fixtures
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import json
9 |
10 | from bucket_snake.tests.conf import SWAG_BUCKET, HISTORICAL_REPORT_BUCKET
11 | from bucket_snake.tests.conftest import get_json
12 | from bucket_snake.config import CONFIG
13 |
14 |
15 | def test_buckets(s3, buckets):
16 | bucket_keys = [
17 | (SWAG_BUCKET, "accounts.json"),
18 | (HISTORICAL_REPORT_BUCKET, "historical-s3-report.json")
19 | ]
20 |
21 | for bucket, key in bucket_keys:
22 | obj = s3.get_object(Bucket=bucket, Key=key)
23 | s3_json = json.loads(obj["Body"].read().decode("utf-8"))
24 | template_json = json.loads(get_json(key))
25 |
26 | # Assures that no formatting and tabbing differences exist:
27 | assert json.dumps(s3_json, indent=4) == json.dumps(template_json, indent=4)
28 |
29 |
30 | def test_bucket_table(bucket_table):
31 | assert len(bucket_table) == 6
32 |
33 | mapping = [
34 | ("test-bucket-one", "012345678910"),
35 | ("test-bucket-two", "012345678910"),
36 | ("test-bucket-three", "012345678911"),
37 | ("test-bucket-four", "012345678911"),
38 | ("blacklisted-bucket-one", "666666666666"),
39 | ("blacklisted-bucket-two", "000000000000")
40 | ]
41 |
42 | for name, account in mapping:
43 | assert bucket_table[name] == account
44 |
45 |
46 | def test_config(config):
47 | assert CONFIG.swag_region == "us-west-2"
48 | assert CONFIG.swag_data_file == "accounts.json"
49 | assert CONFIG.swag_bucket == SWAG_BUCKET
50 | assert CONFIG.reports_bucket == HISTORICAL_REPORT_BUCKET
51 | assert CONFIG.reports_region == "us-west-2"
52 |
53 | for bl in ["666666666666", "000000000000"]:
54 | assert bl in CONFIG.blacklisted_source_accounts
55 |
56 | for bl in ["989898989898", "898989898989", "666666666666"]:
57 | assert bl in CONFIG.blacklisted_bucket_accounts
58 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_iam.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_iam
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import bucket_snake.iam.util
9 |
10 | from bucket_snake.config import CONFIG
11 | from bucket_snake.iam.logic import create_destination_roles, update_instance_profile_s3_permissions, \
12 | update_source_assume_role_policy
13 | from bucket_snake.iam.util import get_client, format_role_arn, get_iam_client, check_for_role, create_iam_role, \
14 | update_aspd
15 | from bucket_snake.s3.permissions import create_s3_role_policies, collect_policies
16 |
17 |
18 | def test_get_client(sts, config):
19 | client = get_client(format_role_arn(CONFIG.bucket_snake_role, "012345678910"), "iam")
20 |
21 | assert client
22 |
23 |
24 | def test_get_iam_client(sts, config, iam_client_dict):
25 | client = get_iam_client("012345678910")
26 |
27 | assert client
28 | assert bucket_snake.iam.util.IAM_CLIENTS["012345678910"] == client
29 |
30 | client = get_iam_client("012345678910")
31 | assert bucket_snake.iam.util.IAM_CLIENTS["012345678910"] == client
32 | assert len(bucket_snake.iam.util.IAM_CLIENTS) == 1
33 |
34 |
35 | def test_check_for_role(iam, existing_role):
36 | assert check_for_role("someAppInstanceProfile", iam)
37 | assert not check_for_role("Idontexist", iam)
38 |
39 |
40 | def test_create_iam_role(iam, config):
41 | created_role = create_iam_role(iam, "destRole",
42 | format_role_arn("sourceRole", "012345678910"),
43 | CONFIG.dest_role_description)
44 | assert created_role
45 | assert created_role["Role"]["RoleName"] == "destRole"
46 | assert len(created_role["Role"]["AssumeRolePolicyDocument"]["Statement"]) == 1
47 | assert \
48 | created_role["Role"]["AssumeRolePolicyDocument"]["Statement"][0]["Principal"]["AWS"] == \
49 | format_role_arn("sourceRole", "012345678910")
50 |
51 |
52 | def test_update_aspd(iam, existing_role):
53 | update_aspd(iam, "someAppInstanceProfile", format_role_arn("sourceRole", "012345678910"))
54 | updated_role = iam.get_role(RoleName="someAppInstanceProfile")
55 |
56 | assert \
57 | updated_role["Role"]["AssumeRolePolicyDocument"]["Statement"][0]["Principal"]["AWS"] == \
58 | format_role_arn("sourceRole", "012345678910")
59 |
60 |
61 | def test_create_destination_roles(iam, sts, config, buckets_cross_account_mapping):
62 | role_policies = create_s3_role_policies(collect_policies(buckets_cross_account_mapping))
63 | create_destination_roles(role_policies, "someApp", "someAppInstanceProfile", "012345678910")
64 |
65 | # Verify:
66 | role = iam.get_role(RoleName="someApp-012345678910")
67 | assert role
68 | assert role["Role"]["AssumeRolePolicyDocument"]["Statement"][0] \
69 | ["Principal"]["AWS"] == format_role_arn("someAppInstanceProfile", "012345678910")
70 |
71 | policies = iam.get_role_policy(RoleName="someApp-012345678910",
72 | PolicyName=CONFIG.bucket_snake_policy_name)
73 |
74 | assert len(policies["PolicyDocument"]["Statement"]) == 2
75 |
76 |
77 | def test_update_instance_profile_s3_permissions(iam, sts, existing_role, buckets_same_account_mapping, config):
78 | role_policies = create_s3_role_policies(collect_policies(buckets_same_account_mapping))
79 |
80 | update_instance_profile_s3_permissions(role_policies, "someApp", "someAppInstanceProfile", "012345678910")
81 |
82 | # Verify:
83 | policies = iam.get_role_policy(RoleName="someAppInstanceProfile",
84 | PolicyName=CONFIG.bucket_snake_policy_name)
85 |
86 | assert len(policies["PolicyDocument"]["Statement"]) == 4
87 |
88 |
89 | def test_update_source_assume_role_policy(iam, sts, existing_role, config, buckets_cross_account_mapping):
90 | role_policies = create_s3_role_policies(collect_policies(buckets_cross_account_mapping))
91 |
92 | update_source_assume_role_policy(role_policies, "someApp", "someAppInstanceProfile", "012345678910")
93 |
94 | # Verify:
95 | policies = iam.get_role_policy(RoleName="someAppInstanceProfile",
96 | PolicyName=CONFIG.sts_policy_name)
97 |
98 | assert len(policies["PolicyDocument"]["Statement"]) == 1
99 | assert policies["PolicyDocument"]["Statement"][0]["Resource"][0] == "arn:aws:iam::012345678911" \
100 | ":role/someApp-012345678910"
101 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_models.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_models
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | import json
9 |
10 | import pytest
11 | from marshmallow import ValidationError
12 |
13 | from bucket_snake.request_schemas import bucket_permission, incoming_request
14 | from bucket_snake.util.exceptions import BlacklistedAccountException, S3BucketDoesNotExistException
15 |
16 | BP_ONE = {
17 | "prefix": "*",
18 | "perms": [
19 | "get",
20 | "put",
21 | "delete"
22 | ]
23 | }
24 |
25 |
26 | def test_bucket_permission_schema():
27 | bp_one = json.dumps(BP_ONE, sort_keys=True, indent=4)
28 |
29 | bp_two = json.dumps({
30 | "prefix": "/items/in/this/prefix/*",
31 | "perms": [
32 | "get",
33 | "put",
34 | "delete"
35 | ]
36 | }, sort_keys=True, indent=4)
37 |
38 | bp_three = json.dumps({
39 | "prefix": "*",
40 | "perms": ["list"]
41 | }, sort_keys=True, indent=4)
42 |
43 | # Empty prefix:
44 | bp_error_one = json.dumps({
45 | "prefix": "",
46 | "perms": ["list"]
47 | }, indent=4)
48 |
49 | # Incorrect permission
50 | bp_error_two = json.dumps({
51 | "prefix": "*",
52 | "perms": [
53 | "get",
54 | "list",
55 | "put",
56 | "not a permission"
57 | ]
58 | }, indent=4)
59 |
60 | # Incorrect data type:
61 | bp_error_three = json.dumps({
62 | "prefix": True,
63 | "perms": [
64 | "get",
65 | "list",
66 | "put",
67 | "not a permission"
68 | ]
69 | }, indent=4)
70 |
71 | # Missing permission:
72 | bp_error_four = json.dumps({
73 | "prefix": "*",
74 | "perms": []
75 | }, indent=4)
76 |
77 | assert json.dumps(bucket_permission.loads(bp_one).data, sort_keys=True, indent=4) == bp_one
78 | assert json.dumps(bucket_permission.loads(bp_two).data, sort_keys=True, indent=4) == bp_two
79 | assert json.dumps(bucket_permission.loads(bp_three).data, sort_keys=True, indent=4) == bp_three
80 |
81 | with pytest.raises(ValidationError):
82 | bucket_permission.loads(bp_error_one)
83 |
84 | with pytest.raises(ValidationError):
85 | bucket_permission.loads(bp_error_two)
86 |
87 | with pytest.raises(ValidationError):
88 | bucket_permission.loads(bp_error_three)
89 |
90 | with pytest.raises(ValidationError):
91 | bucket_permission.loads(bp_error_four)
92 |
93 |
94 | def test_incoming_request_schema(bucket_table):
95 | request_one = json.dumps({
96 | "role_name": "BucketSnakeLambdaProfile",
97 | "app_name": "BucketSnake",
98 | "account_number": "012345678910",
99 | "buckets": {
100 | "test-bucket-one": [
101 | BP_ONE
102 | ]
103 | }
104 | }, sort_keys=True, indent=4)
105 |
106 | # With an invalid permission:
107 | request_err = json.dumps({
108 | "role_name": "BucketSnakeLambdaProfile",
109 | "app_name": "BucketSnake",
110 | "account_number": "012345678910",
111 | "buckets": {
112 | "test-bucket-one": [
113 | {
114 | "prefix": "",
115 | "perms": ["list"]
116 | }
117 | ]
118 | }
119 | }, indent=4)
120 |
121 | # With an account we don't have:
122 | request_err_two = json.dumps({
123 | "role_name": "BucketSnakeLambdaProfile",
124 | "app_name": "BucketSnake",
125 | "account_number": "333333333333",
126 | "buckets": {
127 | "test-bucket-one": [
128 | BP_ONE
129 | ]
130 | }
131 | }, indent=4)
132 |
133 | # With an app name that is more than 48 characters:
134 | request_err_three = json.dumps({
135 | "role_name": "BucketSnakeLambdaProfile",
136 | "app_name": "x" * 49,
137 | "account_number": "012345678910",
138 | "buckets": {
139 | "test-bucket-one": [
140 | BP_ONE
141 | ]
142 | }
143 | }, indent=4)
144 |
145 | # With a blacklisted account:
146 | blacklisted = json.dumps({
147 | "role_name": "BucketSnakeLambdaProfile",
148 | "app_name": "BucketSnake",
149 | "account_number": "666666666666",
150 | "buckets": {
151 | "test-bucket-one": [
152 | BP_ONE
153 | ]
154 | }
155 | }, sort_keys=True, indent=4)
156 |
157 | # With a bucket in a blacklisted account:
158 | blacklisted_bucket = json.dumps({
159 | "role_name": "BucketSnakeLambdaProfile",
160 | "app_name": "BucketSnake",
161 | "account_number": "012345678910",
162 | "buckets": {
163 | "blacklisted-bucket-one": [
164 | BP_ONE
165 | ],
166 |
167 | }
168 | }, sort_keys=True, indent=4)
169 |
170 | # With a bucket that doesn't exist:
171 | nonexisting_bucket = json.dumps({
172 | "role_name": "BucketSnakeLambdaProfile",
173 | "app_name": "BucketSnake",
174 | "account_number": "012345678910",
175 | "buckets": {
176 | "not-a-bucket": [
177 | BP_ONE
178 | ],
179 |
180 | }
181 | }, sort_keys=True, indent=4)
182 |
183 | assert json.dumps(incoming_request.loads(request_one).data, sort_keys=True, indent=4) == request_one
184 |
185 | with pytest.raises(ValidationError):
186 | incoming_request.loads(request_err)
187 |
188 | with pytest.raises(ValidationError):
189 | incoming_request.loads(request_err_two)
190 |
191 | with pytest.raises(ValidationError):
192 | incoming_request.loads(request_err_three)
193 |
194 | with pytest.raises(ValidationError):
195 | incoming_request.loads(blacklisted)
196 |
197 | with pytest.raises(BlacklistedAccountException):
198 | incoming_request.loads(blacklisted_bucket)
199 |
200 | with pytest.raises(S3BucketDoesNotExistException):
201 | incoming_request.loads(nonexisting_bucket)
202 |
--------------------------------------------------------------------------------
/bucket_snake/tests/test_s3.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.tests.test_s3
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 | from bucket_snake.s3.permissions import (
9 | check_if_cross_account,
10 | build_bucket_account_mapping,
11 | create_access_to_reports,
12 | collect_policies,
13 | create_s3_role_policies,
14 | S3_PERMISSIONS
15 | )
16 |
17 |
18 | def test_check_if_cross_account(bucket_table):
19 | assert not check_if_cross_account("012345678910", "test-bucket-one")
20 | assert check_if_cross_account("012345678910", "test-bucket-three")
21 |
22 |
23 | def test_build_bucket_account_mapping(config, bucket_table, s3_role_event):
24 | buckets_same, buckets_cross = build_bucket_account_mapping(s3_role_event)
25 |
26 | assert len(buckets_same) == 2
27 | assert len(buckets_cross) == 2
28 |
29 | # With a bucket that doesn't exist:
30 | s3_role_event["buckets"]["imaginary_bucket"] = [
31 | {
32 | "prefix": "*",
33 | "perms": [
34 | "list"
35 | ]
36 | }
37 | ]
38 | # TODO: Don't forget to test for buckets that don't exist and buckets that are blacklisted
39 |
40 |
41 | def test_collect_policies(buckets_same_account_mapping):
42 | collected_policies = collect_policies(buckets_same_account_mapping)
43 |
44 | list_arns = [
45 | "arn:aws:s3:::test-bucket-one",
46 | "arn:aws:s3:::test-bucket-two"
47 | ]
48 |
49 | get_arns = [
50 | "arn:aws:s3:::test-bucket-one/some/path/*",
51 | "arn:aws:s3:::test-bucket-two/*"
52 | ]
53 |
54 | put_delete_arn = "arn:aws:s3:::test-bucket-one/some/path/*"
55 |
56 | assert len(collected_policies["012345678910"]) == 4
57 |
58 | assert len(collected_policies["012345678910"]["list"]) == len(list_arns)
59 | for list_arn in list_arns:
60 | assert list_arn in collected_policies["012345678910"]["list"]
61 |
62 | assert len(collected_policies["012345678910"]["get"]) == len(get_arns)
63 | for get_arn in get_arns:
64 | assert get_arn in collected_policies["012345678910"]["get"]
65 |
66 | assert len(collected_policies["012345678910"]["put"]) == 1
67 | assert collected_policies["012345678910"]["put"].pop() == put_delete_arn
68 |
69 | assert len(collected_policies["012345678910"]["delete"]) == 1
70 | assert collected_policies["012345678910"]["delete"].pop() == put_delete_arn
71 |
72 |
73 | def test_create_s3_role_policies(buckets_same_account_mapping):
74 | role_policies = create_s3_role_policies(collect_policies(buckets_same_account_mapping))
75 |
76 | test_arns = {
77 | "list": [
78 | "arn:aws:s3:::test-bucket-one",
79 | "arn:aws:s3:::test-bucket-two"
80 | ],
81 | "get": [
82 | "arn:aws:s3:::test-bucket-one/some/path/*",
83 | "arn:aws:s3:::test-bucket-two/*"
84 | ],
85 | "put": ["arn:aws:s3:::test-bucket-one/some/path/*"],
86 | "delete": ["arn:aws:s3:::test-bucket-one/some/path/*"]
87 | }
88 |
89 | assert len(role_policies) == 1
90 | assert len(role_policies["012345678910"]["Statement"]) == 4
91 |
92 | for s in role_policies["012345678910"]["Statement"]:
93 | # Verify that the Sid is correct:
94 | perm = s["Sid"].lower()
95 | assert test_arns.get(perm)
96 |
97 | # Verify all the correct S3 permissions are present for the given policy
98 | assert len(s["Action"]) == len(S3_PERMISSIONS[perm])
99 | for p in S3_PERMISSIONS[perm]:
100 | assert p in s["Action"]
101 |
102 | # Verify all the ARNs are present:
103 | assert len(s["Resource"]) == len(test_arns[perm])
104 | for a in test_arns[perm]:
105 | assert a in s["Resource"]
106 |
107 |
108 | def test_create_access_to_reports(buckets_same_account_mapping, config):
109 | # With existing mapping:
110 | role_policies = create_s3_role_policies(collect_policies(buckets_same_account_mapping))
111 | assert len(role_policies["012345678910"]["Statement"]) == 4
112 | create_access_to_reports(role_policies, "012345678910")
113 | assert len(role_policies["012345678910"]["Statement"]) == 5
114 |
115 | assert role_policies["012345678910"]["Statement"][4]["Sid"] == "HistoricalS3Reports"
116 | assert role_policies["012345678910"]["Statement"][4]["Effect"] == "Allow"
117 | assert role_policies["012345678910"]["Statement"][4]["Action"] == "s3:GetObject"
118 | assert role_policies["012345678910"]["Statement"][4]["Resource"] == \
119 | ["arn:aws:s3:::historical-reports/historical-s3-report.json"]
120 |
121 | # And without:
122 | role_policies = {}
123 | create_access_to_reports(role_policies, "012345678910")
124 | assert len(role_policies["012345678910"]["Statement"]) == 1
125 | assert role_policies["012345678910"]["Statement"][0]["Sid"] == "HistoricalS3Reports"
126 | assert role_policies["012345678910"]["Statement"][0]["Effect"] == "Allow"
127 | assert role_policies["012345678910"]["Statement"][0]["Action"] == "s3:GetObject"
128 | assert role_policies["012345678910"]["Statement"][0]["Resource"] == \
129 | ["arn:aws:s3:::historical-reports/historical-s3-report.json"]
130 |
--------------------------------------------------------------------------------
/bucket_snake/util/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Netflix-Skunkworks/bucketsnake/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/util/__init__.py
--------------------------------------------------------------------------------
/bucket_snake/util/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | .. module: bucket_snake.util.exceptions
3 | :platform: Unix
4 | :copyright: (c) 2017 by Netflix Inc., see AUTHORS for more
5 | :license: Apache, see LICENSE for more details.
6 | .. author:: Mike Grima
7 | """
8 |
9 |
10 | class BucketSnakeException(Exception):
11 | pass
12 |
13 |
14 | class InvalidRequestException(BucketSnakeException):
15 | pass
16 |
17 |
18 | class SourceRoleDoesNotExistException(BucketSnakeException):
19 | pass
20 |
21 |
22 | class BlacklistedAccountException(BucketSnakeException):
23 | pass
24 |
25 |
26 | class S3BucketDoesNotExistException(BucketSnakeException):
27 | pass
28 |
29 |
30 | class MissingRequiredConfigurationItemException(BucketSnakeException):
31 | pass
32 |
--------------------------------------------------------------------------------
/docs/GenerateDocs.md:
--------------------------------------------------------------------------------
1 | How to generate the docs:
2 | --------------------
3 | 1. Install Yarn
4 | 1. Navigate to the `website` dir.
5 | 1. Run `yarn build`
6 | 1. Copy `build/` to a different directory
7 | 1. Checkout the `gh-pages` branch: `git checkout gh-pages`
8 | 1. Copy and paste over the contents of `build/bucketsnake/*` over the contents of `/`
9 | 1. Commit it and push to the `gh-pages`
10 |
--------------------------------------------------------------------------------
/docs/configuration.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: configuration
3 | title: Configuring the Bucket Snake Lambda Function
4 | sidebar_label: Configuration
5 | ---
6 |
7 | Bucket Snake has a number of environment variables that it can be configured with. The table below outlines what they are and if they are required.
8 |
9 |
10 |
11 |
12 |
13 |
14 |
Environment Variable
15 |
Default Value
16 |
Required
17 |
Environment-Variable Description
18 |
Example
19 |
20 |
21 |
22 |
23 |
APP_REPORTS_BUCKETS
24 |
None
25 |
YES
26 |
This is a comma-separated list of S3 bucket names which contain the historical S3 report JSON. This is a list to permit applications to use per-region buckets.
27 |
"historical-report-bucket-us-east-1,historical-report-bucket-us-west-2,historical-report-bucket-eu-west-1" (Replace with your buckets)
28 |
29 |
30 |
SWAG_BUCKET
31 |
None
32 |
YES
33 |
This is the S3 bucket that contains the SWAG data set.
34 |
"swag-data-set-bucket-here" (Replace with your bucket)
35 |
36 |
37 |
SWAG_REGION
38 |
None
39 |
YES
40 |
The region for where the SWAG bucket lives.
41 |
"us-east-1" (Replace with your SWAG bucket region)
42 |
43 |
44 |
SWAG_DATA_FILE
45 |
None
46 |
YES
47 |
The prefix to where the accounts JSON lives in the SWAG bucket.
48 |
"v2/accounts.json" (Replace with your prefix)
49 |
50 |
51 |
REPORTS_BUCKET
52 |
None
53 |
YES
54 |
The S3 bucket that contains the Historical S3 report JSON that Bucket Snake will use. This is just 1 bucket vs. a list for what is granted to the application.
55 |
"historical-report-bucket-us-east-1" (Replace with your bucket)
56 |
57 |
58 |
REPORTS_REGION
59 |
None
60 |
YES
61 |
The region of the S3 bucket that contains the historical report.
62 |
"us-east-1" (Replace with your Historical report bucket region)
63 |
64 |
65 |
REPORTS_PREFIX
66 |
"historical-s3-report.json"
67 |
No
68 |
The region of the S3 bucket that contains the historical report.
69 |
See Default
70 |
71 |
72 |
BLACKLISTED_SOURCE_ACCOUNTS
73 |
None
74 |
No
75 |
A comma-separated list of AWS 12-digit account IDs for where source IAM roles are not permitted to use Bucket Snake for S3 access. Bucket Snake will not operate for source application IAM roles in these accounts.
76 |
"0123456678910,012345678911" (Replace with your account IDs)
77 |
78 |
79 |
BLACKLISTED_BUCKET_ACCOUNTS
80 |
None
81 |
No
82 |
A comma-separated list of AWS 12-digit account IDs for accounts that Bucket Snake should not grant S3 access. I.e. a bucket in an account that is protected, and Bucket Snake should not be granting access to.
83 |
"0123456678910,012345678911" (Replace with your account IDs)
84 |
85 |
86 |
BUCKET_SNAKE_POLICY_NAME
87 |
"BucketSnake"
88 |
No
89 |
The IAM policy name on the IAM role that grants S3 access.
90 |
See Default
91 |
92 |
93 |
STS_POLICY_NAME
94 |
"BucketSnakeAssumeRole"
95 |
No
96 |
The IAM policy name on the source IAM role that grants sts:AssumeRole permissions to the destination AWS account S3 roles.
97 |
See Default
98 |
99 |
100 |
DEST_ROLE_DESCRIPTION
101 |
"Bucket Snake provisioned role"
102 |
No
103 |
The description to the destination S3 IAM roles provisioned by Bucket Snake.
104 |
See Default
105 |
106 |
107 |
BUCKET_SNAKE_ROLE
108 |
"BucketSnake"
109 |
No
110 |
The name of the IAM role that Bucket Snake needs to assume into to perform destination AWS account activities.
111 |
See Default
112 |
113 |
114 |
BUCKET_SNAKE_SESSION_NAME
115 |
"BucketSnake"
116 |
No
117 |
The name of the STS session name that Bucket Snake will use when it assumes to the destination AWS account IAM roles.
118 |
See Default
119 |
120 |
121 |
IAM_REGION
122 |
"us-east-1"
123 |
No
124 |
The AWS region for where IAM API commands are sent.
125 |
See Default
126 |
127 |
128 |
129 |
130 | These variables can be supplied in the [serverless](https://github.com/Netflix-Skunkworks/bucketsnake/tree/master/docs/serverless-examples) configuration.
131 |
--------------------------------------------------------------------------------
/docs/howitworks.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: howitworks
3 | title: How Bucket Snake Works
4 | sidebar_label: How it works
5 | ---
6 |
7 | Bucket Snake works by creating and provisioning IAM roles in the accounts that own buckets for a given application.
8 |
9 | The general flow works as follows:
10 | 1. Bucket Snake gets triggered with a payload describing:
11 | - The name of the application
12 | - The IAM role name that application uses
13 | - The AWS account the application's IAM role resides in.
14 | - A set of S3 buckets, and the corresponding permissions to grant
15 | 1. With the above information, Bucket Snake figures out where the buckets are located, creates the necessary roles with the access requested.
16 | - If an S3 bucket resides in the same account as the source application, Bucket Snake simply grants permissions to the source application, so no role assumption is required.
17 |
18 | ## Sources of Truth
19 | Bucket Snake depends on [SWAG](https://github.com/Netflix-Skunkworks/swag-api), [Historical](https://github.com/Netflix-Skunkworks/historical), and the [Historical S3 Report](https://github.com/Netflix-Skunkworks/historical-reports).
20 |
21 | SWAG is a schema for the accounts in your infrastructure. It's a source of truth to know which AWS accounts you have, and some details about them.
22 |
23 | Historical keeps track of all S3 buckets in your infrastructure, and the Historical S3 Report is a JSON file that is a dump of all S3 buckets currently in your infrastructure. This is used as a lookup-table to know which buckets you have, which region and AWS account they reside in.
24 |
25 | ### Historical Reports
26 | The historical s3 report is needed because S3 ARNs don't provide enough detail on S3 buckets. For example, one cannot determine the account and region by just having an S3 ARN or bucket name.
27 |
28 | The Historical S3 report solves this by providing a look-up table of S3 bucket, and the corresponding region and AWS account for the bucket. This is what Bucket Snake uses to determine if S3 access is cross-account.
29 |
30 | A Bucket Snake aware client needs access to this data source to determine if role-assumption is required. It is assumed that this report is stored in an S3 bucket that permits multiple accounts in your infrastructure access to it (on the bucket policy). Bucket Snake will grant the application access to this JSON file.
31 |
32 | ## Triggering the Lambda
33 | Bucket Snake is triggered from an AWS lambda function invocation that has a payload with this schema:
34 |
35 | {
36 | "role_name": "AppSourceIamRole",
37 | "app_name": "nameOfAppWithSourceIAMRole",
38 | "account_number": "The12digitAWSAccountIDWhereTheAppSourceIAMRoleLives",
39 | "buckets": {
40 | "name-of-s3-bucket": [
41 | {
42 | "prefix": "*",
43 | "perms": [
44 | "list"
45 | ]
46 | },
47 | {
48 | "prefix": "some/prefix/here",
49 | "perms": [
50 | "get",
51 | "put",
52 | "delete"
53 | ]
54 | }
55 | ]
56 | "another-s3-bucket": [
57 | "perms": [
58 | "get"
59 | ],
60 | "prefix: "*"
61 | ],
62 | "and-another-s3-bucket": [
63 | "perms": [
64 | "put"
65 | ],
66 | "prefix": "some/drop/location"
67 | ],
68 | ...
69 | }
70 | }
71 |
72 |
73 | ### Now what?
74 | Bucket Snake would receive the JSON from the lambda invocation, and from that, would:
75 | 1. Verify that the source IAM role exists
76 | 1. Verify that the buckets exist, and are permitted by Bucket Snake (more on this in the configuration section)
77 | 1. Determine which S3 buckets are in the same account as the source application, and which are not
78 | 1. For buckets in the same account, Bucket Snake will add in the proper S3 permissions to the source app IAM role
79 | 1. For buckets that are not in the same account, Bucket Snake will create IAM roles in the destination accounts with access to the respective buckets
80 | - Destination IAM role name follows the format: `AppName-12DigitSourceAccountNumber`.
81 | - This role will have a trust policy that allows the source application `sts:AssumeRole` access to it.
82 | 1. If applicable, a policy will be added to the source IAM role to grant `sts:AssumeRole` access to those destination
83 | IAM roles
84 | 1. And lastly, Bucket Snake will grant access to the Historical S3 report's JSON file so that application knows
85 | which S3 buckets require the role assumption for access.
86 |
87 | ## How does my application make use of this?
88 | At present no "Bucket Snake aware" client library exists. We are currently in the process of developing one for Python and Java.
89 |
90 | This client would work by:
91 | 1. Fetch the Historical S3 JSON (access granted by Bucket Snake)
92 | 1. Check if the S3 bucket is in the same account (this information lives in the Historical report).
93 |
94 | If it's in the same account, then the client directly access the bucket with the on-instance
95 | IAM credentials.
96 |
97 | If the bucket is in a _different account_:
98 | 1. Assume to the destination role (named `AppName-12DigitSourceAccountNumber`),
99 | 1. Cache the credentials for future use (re-assume when expired)
100 | 1. Use the assumed credentials to access the S3 bucket
101 |
102 | ## What about future access?
103 | Simply pass in a new payload to the lambda with additional buckets to add access to. Bucket Snake is [idempotent](https://en.wikipedia.org/wiki/Idempotence).
104 |
105 | The Bucket Snake policies should not be modified outside of Bucket Snake. You can modify any policy outside
106 | of the Bucket Snake managed ones -- Bucket Snake will not alter or modify them. This is useful should you need to add additional permissions than what Bucket Snake would provide.
107 |
108 | ## Which permissions are created?
109 | See the next section for details.
110 |
--------------------------------------------------------------------------------
/docs/installation.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: installation
3 | title: Deploying the Bucket Snake Lambda Function
4 | sidebar_label: Deployment
5 | ---
6 |
7 | Bucket Snake can be deployed with [Serverless](https://serverless.com). See the [docs/serverless-examples](https://github.com/Netflix-Skunkworks/bucketsnake/tree/master/docs/serverless-examples) for an example.
8 |
9 | ## What is required?
10 | Bucket Snake depends on [SWAG](https://github.com/Netflix-Skunkworks/swag-api), [Historical](https://github.com/Netflix-Skunkworks/historical), and the [Historical S3 Report](https://github.com/Netflix-Skunkworks/historical-reports).
11 |
12 | For a summary of these tools, please visit their respective pages and review the documentation.
13 |
14 | ### Historical S3 Report
15 | Bucket Snake has a hard dependency on the Historical S3 Report, which is provided as part of the [Historical-Reports](https://github.com/Netflix-Skunkworks/historical-reports).
16 |
17 | Bucket Snake doesn't require much from the report. It requires:
18 | 1. The bucket name
19 | 1. The bucket region
20 | 1. The bucket account
21 |
22 | This can be achieved with a Historical S3 report that has the `EXCLUDE_FIELDS` environmental variable configured with the value:
23 |
24 | Name,_version,Grants,LifecycleRules,Logging,Policy,Tags,Versioning,Website,Cors,Notifications,Acceleration,Replication,CreationDate,AnalyticsConfigurations,MetricsConfigurations,InventoryConfigurations
25 |
26 | A slim report is not required -- but it makes for a smaller JSON file for client applications to fetch from S3 (or have included with their deployment).
27 |
28 | #### Historical Report Bucket
29 | An S3 Bucket that contains this report must be available for the Bucket Snake lambda function. This is required to be set in the configuration. (See the next section for details)
30 |
31 | The Historical S3 report should live in a bucket that all applications in your infrastructure can access. This will allow applications to fetch this file to make a determination if cross-account access is required.
32 |
33 | ### SWAG
34 | SWAG is a hard requirement for Bucket Snake. The JSON must be accessible in S3 and is configurable. (See the next section for details)
35 |
36 | ## IAM Roles
37 | Bucket Snake operates from a hub-spoke type of model. The lambda function itself requires an IAM role, which then assumes into other account IAM roles to provision the S3 access for a given application.
38 |
39 | Please use your favorite tool to sync these roles out across your environment.
40 |
41 | #### Bucket Snake Lambda Function IAM Role
42 |
43 | The trust policy must be similar to:
44 |
45 | {
46 | "Statement": [
47 | {
48 | "Effect": "Allow",
49 | "Action": "sts:AssumeRole",
50 | "Principal": {
51 | "Service": "lambda.amazonaws.com"
52 | }
53 | }
54 | ]
55 | }
56 |
57 | The inline-polices must be similar to:
58 |
59 | {
60 | "Statement": [
61 | {
62 | "Sid": "Logs",
63 | "Effect": "Allow",
64 | "Action": [
65 | "logs:CreateLogGroup",
66 | "logs:CreateLogStream",
67 | "logs:PutLogEvents"
68 | ],
69 | "Resource": "*"
70 | },
71 | {
72 | "Sid": "HistoricalS3",
73 | "Effect": "Allow",
74 | "Action": "s3:GetObject",
75 | "Resource": "arn:aws:s3:::historical-s3-report-bucket/prefix/to/historical-s3-reports.json"
76 | },
77 | {
78 | "Sid": "AssumeToRoles",
79 | "Effect": "Allow",
80 | "Action": "sts:AssumeRole",
81 | "Resource": "arn:aws:iam::*:role/BucketSnake"
82 | }
83 | ]
84 | }
85 |
86 |
87 | #### In-account Bucket Snake IAM Role (Destination Roles)
88 |
89 | The trust policy must be similar to:
90 |
91 | {
92 | "Statement": [
93 | {
94 | "Effect": "Allow",
95 | "Action": "sts:AssumeRole",
96 | "Principal": {
97 | "AWS": "arn:aws:iam::SOURCE-BUCKET-SNAKE-LAMBDA-ACCOUNT-HERE:role/BucketSnakeLambdaProfile"
98 | }
99 | }
100 | ]
101 | }
102 |
103 | The inline-polices must be similar to:
104 |
105 | {
106 | "Statement": [
107 | {
108 | "Action": [
109 | "iam:CreateRole",
110 | "iam:GetRole",
111 | "iam:PutRolePolicy",
112 | "iam:UpdateAssumeRolePolicy"
113 | ],
114 | "Resource": "*",
115 | "Effect": "Allow"
116 | }
117 | ]
118 | }
119 |
--------------------------------------------------------------------------------
/docs/intro.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: intro
3 | title: Introduction
4 | sidebar_label: Introduction
5 | ---
6 |
7 | **This project is in heavy development and not yet ready for production use!**
8 |
9 | Bucket Snake is an AWS Lambda function that provisions S3 access for IAM roles. A primary feature
10 | of Bucket Snake is to create IAM roles that reside in the account where the S3 buckets live. This facilitates
11 | proper cross-account S3 bucket access (via IAM role assumption).
12 |
13 | S3's permission model complicates S3 access in large environments. This complexity is increased with
14 | S3's use of Bucket and Object ownership. When the ownership of buckets and objects are different,
15 | there can be annoying and hard to scale permissions issues. By using IAM roles in the same account
16 | a bucket lives in, you can avoid mis-matched ownership and rely on IAM and/or S3 bucket policies
17 | to permit access to buckets. Avoiding ACLs makes S3 access much easier to manage at scale.
18 |
19 | ## Bucket-Snake aware S3 clients?
20 | Bucket Snake provisions the IAM roles for access. But, this is only 1/2 of the solution. The second
21 | half is to have an S3 client that is aware of:
22 | 1. The location of the S3 buckets
23 | 1. The role to assume into
24 |
25 | With the above, an application should be able to access any bucket required in your infrastructure without
26 | ever needing to worry about per-object permissions.
27 |
28 | **Currently, there are no Bucket-Snake aware ready-to-use clients. This is under active development!**
29 |
30 | ## S3 Permissions Complexity
31 | Common S3 challenges to that one will often encounter:
32 | 1. Which account owns an S3 bucket? (AWS S3 ARNs lack account IDs.)
33 | 1. Which specific S3 permissions are actually required for performing the actions I need?
34 | 1. What about cross-account access? What's the best way of granting permissions for cross-account without
35 | having to rely on ACLs on each and every object?
36 |
37 | Bucket Snake resolves the above by:
38 | 1. Relying on Historical's S3 Report JSON (Full look-up table of all S3 buckets you own, and which account they reside in)
39 | 1. Abstracting S3 permissions into simple `list`, `get`, `put`, and `delete` actions against a bucket and prefix.
40 | 1. IAM role provisioning in destination bucket accounts with correct `sts:AssumeRole` and S3 permissions grant the required
41 | permissions and avoid bucket-and-object ownership issues.
42 |
43 | ### Cross-account access issues?
44 | **TL;DR:** You always want to use an IAM role within the bucket-owning account for placing (as well as reading)
45 | objects to avoid bucket-and object ownership mismatches. Otherwise, the account that owns the objects will need
46 | to explicitly add an ACL to each and every object that requires additional accounts to access. This massively
47 | increases S3 permissions complexity.
48 |
49 | The next section documents this in detail.
50 |
--------------------------------------------------------------------------------
/docs/permissions.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: permissions
3 | title: Bucket Snake S3 Permissions
4 | sidebar_label: Permission Reference
5 | ---
6 |
7 | Bucket Snake abstracts S3 permissions into the following categories:
8 |
9 | 1. `list`
10 | 1. `get`
11 | 1. `put`
12 | 1. `delete`
13 |
14 | ## Bucket vs. Object level permissions
15 | S3 has two types of S3 permissions:
16 | 1. Bucket permissions
17 | 1. Object permissions
18 |
19 | Bucket permissions are applied to an entire bucket, whereas object permissions are applied to a prefix within a bucket.
20 |
21 | _List_ permissions are _bucket-level_ permissions ([more info here](http://docs.aws.amazon.com/AmazonS3/latest/dev/using-with-s3-actions.html#using-with-s3-actions-related-to-buckets)). Granting `list` access to a bucket will grant access to list the contents of an entire S3 bucket. When Bucket Snake generates the IAM permissions, it will always make `list` a separate statement. Thus, lumping `list` permissions with other object-level permissions is fine; Bucket Snake will handle this properly.
22 |
23 | Object-level permissions will be applied to the specified prefix that is provided. Object-level permissions are [defined here](http://docs.aws.amazon.com/AmazonS3/latest/dev/using-with-s3-actions.html#using-with-s3-actions-related-to-objects).
24 |
25 | ## S3 -> IAM Permissions Mapping
26 | The abstracted S3 permissions are transformed into the following IAM permissions:
27 | 1. `list`
28 | - `s3:ListBucket`
29 | - `s3:ListBucketVersions`
30 | 1. `get`
31 | - `s3:GetObject`
32 | - `s3:GetObjectTagging`
33 | - `s3:GetObjectVersion`
34 | - `s3:GetObjectVersionTagging`
35 | - `s3:GetObjectAcl`
36 | - `s3:GetObjectVersionAcl`
37 | 1. `put`
38 | - `s3:PutObject`
39 | - `s3:PutObjectTagging`
40 | - `s3:PutObjectVersionTagging`
41 | - `s3:ListMultipartUploadParts`*
42 | - `s3:AbortMultipartUpload`
43 | - `s3:RestoreObject`
44 | 1. `delete`
45 | - `s3:DeleteObject`
46 | - `s3:DeleteObjectTagging`
47 | - `s3:DeleteObjectVersion`
48 | - `s3:DeleteObjectVersionTagging`
49 |
50 | _*Listing multipart uploads is typically used when performing multipart uploads, and as such, has been lumped in with `put` permissions._
51 |
52 | ## Sample Generated IAM Policy
53 | The role provisioned in the bucket-residing account would look similar to this:
54 |
55 | {
56 | "Statement": [
57 | {
58 | "Sid": "List",
59 | "Effect": "Allow",
60 | "Action": [
61 | "s3:ListBucket",
62 | "s3:ListBucketVersions"
63 | ],
64 | "Resource": [
65 | "arn:aws:s3:::",
66 | # ... All the buckets that need listing here ...
67 | ]
68 | },
69 | {
70 | "Sid": "Get",
71 | "Effect": "Allow",
72 | "Action": [
73 | "s3:GetObject",
74 | "s3:GetObjectTagging",
75 | "s3:GetObjectVersion",
76 | "s3:GetObjectVersionTagging"
77 | ],
78 | "Resource": [
79 | "arn:aws:s3:::/",
80 | # ... All the buckets and prefixes requiring get access here ...
81 | ]
82 | },
83 | {
84 | "Sid": "Put",
85 | "Effect": "Allow",
86 | "Action": [
87 | "s3:PutObject",
88 | "s3:PutObjectTagging",
89 | "s3:PutObjectVersionTagging",
90 | "s3:ListMultipartUploadParts",
91 | "s3:AbortMultipartUpload",
92 | "s3:RestoreObject"
93 | ],
94 | "Resource": [
95 | "arn:aws:s3:::/",
96 | # ... All the buckets and prefixes requiring put access here ...
97 | ]
98 | },
99 | {
100 | "Sid": "Delete",
101 | "Effect": "Allow",
102 | "Action": [
103 | "s3:DeleteObject",
104 | "s3:DeleteObjectTagging",
105 | "s3:DeleteObjectVersion",
106 | "s3:DeleteObjectVersionTagging"
107 | ],
108 | "Resource": [
109 | "arn:aws:s3:::/",
110 | # ... All the buckets and prefixes requiring delete access here ...
111 | ]
112 | }
113 | ]
114 | }
115 |
116 | The source application would be given an inline policy to assume to the destination IAM role, which looks like:
117 |
118 | {
119 | "Statement": [
120 | {
121 | "Effect": "Allow",
122 | "Action": "sts:AssumeRole",
123 | "Resource": [
124 | "arn:aws:iam:::role/-<12-digit-app-account-number>",
125 | # ... All the roles to assume into here ...
126 | ]
127 | }
128 | ]
129 | }
130 |
131 | ## Limitations
132 | IAM has a [limitation](http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-limits.html) of 64 characters for IAM role names. The format that is chosen for destination account with the format declared for cross-account roles is `-<12-digit-AppAccountNumber>`, this leaves 51 characters remaining for application names. Thus, applications with names that are more than 51 characters are not supported by Bucket Snake.
133 |
--------------------------------------------------------------------------------
/docs/s3background.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: s3background
3 | title: S3 Background Information
4 | sidebar_label: S3 Background
5 | ---
6 |
7 | AWS S3 has a complex permissions model. Before continuing, please review the following documents:
8 |
9 | - [S3 Access Control Overview](http://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-overview.html)
10 | - [How Amazon S3 Authorizes a Request](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-s3-evaluates-access-control.html)
11 | - [How Amazon S3 Authorizes a Request for a Bucket Operation](http://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-auth-workflow-bucket-operation.html)
12 | - **And Perhaps Most Importantly: [How Amazon S3 Authorizes a Request for an Object Operation](http://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-auth-workflow-object-operation.html)**
13 |
14 | ## Bucket Owner and Object Owner?
15 |
16 | S3 has a complex permissions model that examines the "owner" of a resource when making an access decision.
17 |
18 | An "Owner" refers to an AWS account which the resource belongs to. With regards to a bucket, this is the account that a bucket "resides" in.
19 |
20 | So, if account `Foo` had a bucket named `thebucket`, `Foo` would be the owner of `thebucket`.
21 |
22 | Objects also have ownership. Object ownership refers to the AWS account that _put_ the object into the bucket.
23 |
24 | So, if an IAM role in account `Foo` put an object into the `thebucket` bucket, then the object would be owned by `Foo`.
25 |
26 | ### Cross-account ownership
27 |
28 | When bucket and object ownership is the same (that is the account that owns the bucket and the object are the same), then access is straightforward. In this case, IAM in the account, as well as the bucket policy determines whether or not access is granted to the object.
29 |
30 | Trouble begins when the ownership is different. Buckets can hold objects that belong to different AWS accounts. For example: Take two AWS accounts: `Foo` and `Bar`. Bucket `thebucket` resides in account `Foo`. `Bar` has an IAM role named `theRole` that can place an object named `theobj` in `thebucket`. If an application with `theRole` (`Bar` account) places the object, the object will reside in `thebucket` (owned by `Foo`), but the object will be owned by `Bar`.
31 |
32 | This may not seem like an issue at first, however, reviewing [AWS's documentation regarding object access](http://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-auth-workflow-object-operation.html), access to this object will ultimately be determined by the `Bar` account. That is achieved when `Bar` places an ACL on `theobj` to explicitly grant another AWS account access.
33 |
34 | In this example, without any ACLs applied to `theobj`, despite `Foo` being the owner of the `thebucket` bucket, IAM roles in `Foo` won't be able to read the object or otherwise manipulate it. `Foo` will be able to see that the object is there, and can delete the object, but can't otherwise make any modifications to it.
35 |
36 | To resolve this issue, `Bar` would need to add an ACL to `theobj` so that `Foo` has access to it. One common way to resolve this is to put the `bucket-owner-full-control` canned ACL on the object. While that may alleviate access issues in this example, it does not fully resolve the issue.
37 |
38 | ### ACLs are a bad solution
39 | The `bucket-owner-full-control` canned ACL may seem like a solution to this problem, but it's not. The owner of the object in this example is still not the same as the owner of the bucket. Thus, the decision to read/write/modify the object is still determined by `Bar` (the object owning account).
40 |
41 | If a new account, `Baz` requires access to the bucket and objects within it, the ACL of the objects would need to be altered again to permit `Baz` access. This is where scalability becomes very difficult over time. This complexity gets significantly worse as objects and accounts increase.
42 |
43 | ### IAM Roles can permanently solve this problem
44 | Avoiding cross-account object and bucket ownership is *strongly* advised. This is resolved by always performing S3 operations with an IAM role within the bucket owning account. Doing this ensures that the bucket owner and object owner are the same.
45 |
46 | This also helps simplify access by reducing the number of places where S3 permissions can live, which ultimately improves the security of your infrastructure.
47 |
--------------------------------------------------------------------------------
/docs/serverless-examples/.serverless-example.yml:
--------------------------------------------------------------------------------
1 | service: "bucket-snake"
2 |
3 | provider:
4 | name: aws
5 | runtime: python3.6
6 | memorySize: 512
7 | timeout: 300
8 | deploymentBucket:
9 | name: some-s3-bucket-here
10 |
11 | custom: ${file(serverless_configs/${opt:stage}.yml)}
12 |
13 | functions:
14 | BucketSnake:
15 | handler: bucket_snake.entrypoints.handler
16 | description: Bucket Snake grantsss S3 permissionsss at scale.
17 | tags:
18 | owner: youremail@yourcompanyhere.com
19 | role: arn:aws:iam::${self:custom.accountId}:role/BucketSnakeLambdaProfile
20 | environment:
21 | SENTRY_DSN: ${self:custom.sentryDSN}
22 | BLACKLISTED_SOURCE_ACCOUNTS: your,blacklisted,source,accounts,here,as,comma,separated,list
23 | BLACKLISTED_BUCKET_ACCOUNTS: your,blacklisted,bucket,accounts,here,as,comma,separated,list
24 | APP_REPORTS_BUCKETS: historical,s3,reports,buckets,here,as,comma,separated,list
25 | SWAG_BUCKET: your-swag-bucket-here
26 | SWAG_REGION: swag-region-here
27 | SWAG_DATA_FILE: prefix/to/account.json
28 | REPORTS_BUCKET: historical-s3-reports-bucket-for-bucketsnake
29 | REPORTS_REGION: historical-s3-reports-bucket-region-for-bucketsnake
30 | REPORTS_PREFIX: prefix/to/historical-s3-report.json
31 |
32 | resources:
33 | Resources:
34 | BucketSnakeLogGroup:
35 | Properties:
36 | RetentionInDays: "3"
37 |
38 | plugins:
39 | - serverless-python-requirements
40 | - serverless-prune-plugin
41 |
--------------------------------------------------------------------------------
/docs/serverless-examples/requirements.txt:
--------------------------------------------------------------------------------
1 | git+https://github.com/Netflix-Skunkworks/bucketsnake.git#egg=bucketsnake
2 |
--------------------------------------------------------------------------------
/docs/serverless-examples/serverless_configs/environment.yml:
--------------------------------------------------------------------------------
1 | accountId: YOUR-ACCOUNT-NUMBER-TO-DEPLOY-TO-HERE
2 | accountName: ACCOUNT-NAME-HERE
3 | sentryDSN: SENTRY-DSN-HERE
4 | pythonRequirements:
5 | dockerizePip: true
6 | prune:
7 | automatic: true
8 | number: 3
9 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3 # no-deploy
2 | raven
3 | swag-client
4 | pyyaml
5 | git+https://github.com/Netflix-Skunkworks/raven-python-lambda.git#egg=raven-python-lambda
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | Bucket Snake
3 | ==========
4 | AWS Lambda function that grantsss S3 permissionsss at ssscale.
5 | """
6 | import sys
7 | import os.path
8 |
9 | from setuptools import setup, find_packages
10 |
11 | ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
12 |
13 | # When executing the setup.py, we need to be able to import ourselves, this
14 | # means that we need to add the src/ directory to the sys.path.
15 | sys.path.insert(0, ROOT)
16 |
17 | about = {}
18 | with open(os.path.join(ROOT, "bucket_snake", "__about__.py")) as f:
19 | exec(f.read(), about)
20 |
21 | install_requires = [
22 | "boto3",
23 | "swag-client",
24 | "retrying>=1.3.3",
25 | "raven_python_lambda"
26 | ]
27 |
28 | tests_require = [
29 | 'pytest',
30 | 'moto',
31 | 'coveralls',
32 | ]
33 |
34 | setup(
35 | name=about["__title__"],
36 | version=about["__version__"],
37 | author=about["__author__"],
38 | author_email=about["__email__"],
39 | url=about["__uri__"],
40 | description=about["__summary__"],
41 | long_description='See README.md',
42 | packages=find_packages(),
43 | include_package_data=True,
44 | zip_safe=False,
45 | install_requires=install_requires,
46 | extras_require={
47 | 'tests': tests_require
48 | },
49 | entry_points={},
50 | keywords=['aws', 'account_management', "s3", "security", "iam", "lambda", "sss", "snake"]
51 | )
52 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py36
3 |
4 |
5 | [testenv]
6 | passenv = TRAVIS TRAVIS_*
7 | deps=
8 | pytest-cov
9 | commands=
10 | pip install -e ."[tests"]
11 | pytest --cov
12 | coveralls
13 |
--------------------------------------------------------------------------------
/website/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .DS_Store
3 | lib/core/metadata.js
4 | lib/core/MetadataBlog.js
5 | website/translated_docs
6 | website/build/
7 | website/yarn.lock
8 | website/node_modules
9 | i18n/
10 | website/i18n/*
11 | !website/i18n/en.json
12 |
--------------------------------------------------------------------------------
/website/core/Footer.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2017-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | const React = require('react');
9 |
10 | class Footer extends React.Component {
11 | render() {
12 | const currentYear = new Date().getFullYear();
13 | return (
14 |
87 | );
88 | }
89 | }
90 |
91 | module.exports = Footer;
92 |
--------------------------------------------------------------------------------
/website/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "scripts": {
3 | "examples": "docusaurus-examples",
4 | "start": "docusaurus-start",
5 | "build": "docusaurus-build",
6 | "publish-gh-pages": "docusaurus-publish",
7 | "write-translations": "docusaurus-write-translations",
8 | "version": "docusaurus-version",
9 | "rename-version": "docusaurus-rename-version"
10 | },
11 | "devDependencies": {
12 | "docusaurus": "^1.0.3"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/website/pages/en/index.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2017-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | const React = require('react');
9 |
10 | const CompLibrary = require('../../core/CompLibrary.js');
11 | const MarkdownBlock = CompLibrary.MarkdownBlock; /* Used to read markdown */
12 | const Container = CompLibrary.Container;
13 | const GridBlock = CompLibrary.GridBlock;
14 |
15 | const siteConfig = require(process.cwd() + '/siteConfig.js');
16 |
17 | class Button extends React.Component {
18 | render() {
19 | return (
20 |