├── .coveragerc ├── .github └── workflows │ ├── python-publish.yml │ ├── pythonpackage.yml │ └── updater.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── MANIFEST.in ├── README.md ├── policyuniverse ├── __init__.py ├── action.py ├── action_categories.py ├── arn.py ├── common.py ├── data.json ├── expander_minimizer.py ├── organization.py ├── policy.py ├── statement.py └── tests │ ├── __init__.py │ ├── helpers.py │ ├── test_action_categories.py │ ├── test_arn.py │ ├── test_common.py │ ├── test_expander_minimizer.py │ ├── test_organization.py │ ├── test_policy.py │ └── test_statement.py ├── setup.cfg ├── setup.py └── updater ├── awsconsole.js ├── requirements.txt ├── service.py ├── service_action.py ├── test_service.py ├── test_service_action.py └── updater.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | include = policyuniverse/*.py 3 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | push: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | deploy: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: '3.x' 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install setuptools wheel twine 26 | - name: Build and publish 27 | env: 28 | TWINE_USERNAME: __token__ 29 | TWINE_PASSWORD: ${{ secrets.pypi_password }} 30 | run: | 31 | python setup.py sdist bdist_wheel 32 | twine upload dist/* 33 | -------------------------------------------------------------------------------- /.github/workflows/pythonpackage.yml: -------------------------------------------------------------------------------- 1 | name: Python package 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | max-parallel: 5 15 | matrix: 16 | python-version: 17 | - "3.7" 18 | - "3.8" 19 | - "3.9" 20 | - "3.10" 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v4 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install -e .[tests,dev] 32 | - name: Run pre-commit 33 | run: | 34 | pre-commit install 35 | pre-commit run -a 36 | - name: Test with pytest/coverage 37 | run: | 38 | pip install pytest pytest-cov coveralls==2.2.0 39 | pytest --cov 40 | - name: Upload coverage data to coveralls.io 41 | if: success() && matrix.python-version == '3.7' 42 | run: coveralls 43 | env: 44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 45 | -------------------------------------------------------------------------------- /.github/workflows/updater.yml: -------------------------------------------------------------------------------- 1 | name: "PolicyUniverse Updater - Action" 2 | 3 | on: 4 | schedule: 5 | - cron: '15 10 * * *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | Update-Job: 10 | 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Set up Python 3.8 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: 3.8 19 | - name: Set up NodeJS 20 | uses: actions/setup-node@v2 21 | with: 22 | node-version: '14' 23 | - name: Download phantomjs 24 | run: | 25 | pwd &&\ 26 | wget -O phantomjs-2.1.1-linux-x86_64.tar.bz2 https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 &&\ 27 | tar -xf phantomjs-2.1.1-linux-x86_64.tar.bz2 &&\ 28 | ls -al 29 | - name: Install policyuniverse & updater python dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install -e . 33 | pip install -r updater/requirements.txt 34 | - name: Configure AWS Credentials 35 | uses: aws-actions/configure-aws-credentials@v1 36 | with: 37 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 38 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 39 | aws-region: us-west-2 40 | - name: Run the updater 41 | env: 42 | AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} 43 | AWS_ROLE_NAME: ${{ secrets.AWS_ROLE_NAME }} 44 | id: runupdater 45 | run: | 46 | cd updater 47 | python updater.py > output_summary.txt 48 | output_summary=`cat output_summary.txt` 49 | output_summary="${output_summary//'%'/'%25'}" 50 | output_summary="${output_summary//$'\n'/'%0A'}" 51 | output_summary="${output_summary//$'\r'/'%0D'}" 52 | echo "::set-output name=updatersummary::$output_summary" 53 | rm output_summary.txt 54 | mv output_formatted.json ../policyuniverse/data.json 55 | cd .. 56 | sed -ri "s/(version=\"[0-9]+.[0-9]+.[0-9]+.)([0-9]+)\"/\1`date +"%Y%m%d"`\"/g" setup.py 57 | rm -rf phantomjs-2.1.1-linux-x86_64/ 58 | rm phantomjs-2.1.1-linux-x86_64.tar.bz2 59 | - name: Create Pull Request 60 | id: cpr 61 | uses: peter-evans/create-pull-request@v3 62 | with: 63 | token: ${{ secrets.GITHUB_TOKEN }} 64 | commit-message: Updating PolicyUniverse SDFs 65 | committer: GitHub 66 | author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> 67 | title: 'Updating PolicyUniverse Data.json' 68 | body: ${{ steps.runupdater.outputs.updatersummary }} 69 | labels: data-update, automated pr 70 | assignees: | 71 | scriptsrc 72 | patricksanders 73 | reviewers: | 74 | scriptsrc 75 | patricksanders 76 | draft: false 77 | branch: auto_data_update 78 | request-to-parent: false 79 | - name: Check outputs 80 | run: | 81 | echo "Pull Request Number - ${{ env.PULL_REQUEST_NUMBER }}" 82 | echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" 83 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | venv/ 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/_build/ 58 | 59 | # PyBuilder 60 | target/ 61 | 62 | #Ipython Notebook 63 | .ipynb_checkpoints 64 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.2.0 # Use the ref you want to point at 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: check-ast 9 | - id: check-case-conflict 10 | - id: check-yaml 11 | - id: pretty-format-json 12 | args: ["--autofix"] 13 | 14 | - repo: https://github.com/pre-commit/mirrors-autopep8 15 | rev: v1.6.0 16 | hooks: 17 | - id: autopep8 18 | 19 | - repo: https://github.com/pycqa/flake8 20 | rev: 4.0.0 21 | hooks: 22 | - id: flake8 23 | args: ["--exclude", "venv/,.tox/,.eggs/"] 24 | 25 | - repo: https://github.com/pre-commit/mirrors-isort 26 | rev: 'v5.10.1' 27 | hooks: 28 | - id: isort 29 | args: ["--profile", "black"] 30 | 31 | - repo: https://github.com/ambv/black 32 | rev: 22.3.0 33 | hooks: 34 | - id: black 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include setup.py README.md MANIFEST.in LICENSE 2 | recursive-include policyuniverse *.json 3 | global-exclude *~ 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PolicyUniverse 2 | 3 | [![Version](http://img.shields.io/pypi/v/policyuniverse.svg?style=flat)](https://pypi.python.org/pypi/policyuniverse/) 4 | 5 | [![Build Status](https://github.com/Netflix-Skunkworks/policyuniverse/workflows/Python%20package/badge.svg)](https://github.com/Netflix-Skunkworks/policyuniverse/actions) 6 | 7 | [![Updater Status](https://github.com/Netflix-Skunkworks/policyuniverse/actions/workflows/updater.yml/badge.svg)](https://github.com/Netflix-Skunkworks/policyuniverse/actions/workflows/updater.yml) 8 | 9 | [![Coverage Status](https://coveralls.io/repos/github/Netflix-Skunkworks/policyuniverse/badge.svg?branch=master&1)](https://coveralls.io/github/Netflix-Skunkworks/policyuniverse?branch=master) 10 | 11 | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black) 12 | 13 | This package provides classes to parse AWS IAM and Resource Policies. 14 | 15 | Additionally, this package can expand wildcards in AWS Policies using permissions obtained from the AWS Policy Generator. 16 | 17 | See the [Service and Permissions data](policyuniverse/data.json). 18 | 19 | _This package can also minify an AWS policy to help you stay under policy size limits. Avoid doing this if possible, as it creates ugly policies._ 💩 20 | 21 | # Install: 22 | 23 | `pip install policyuniverse` 24 | 25 | # Usage: 26 | 27 | - [ARN class](#reading-arns) 28 | - [Policy class](#iam-and-resource-policies) 29 | - [Statement class](#statements) 30 | - [Action Categories](#action-categories) 31 | - [Expanding and Minification](#expanding-and-minification) 32 | 33 | ## Reading ARNs 34 | 35 | ```python 36 | from policyuniverse.arn import ARN 37 | arn = ARN('arn:aws:iam::012345678910:role/SomeTestRoleForTesting') 38 | assert arn.error == False 39 | assert arn.tech == 'iam' 40 | assert arn.region == '' # IAM is universal/global 41 | assert arn.account_number == '012345678910' 42 | assert arn.name == 'role/SomeTestRoleForTesting' 43 | assert arn.partition == 'aws' 44 | assert arn.root == False # Not the root ARN 45 | assert arn.service == False # Not an AWS service like lambda.amazonaws.com 46 | 47 | arn = ARN('012345678910') 48 | assert arn.account_number == '012345678910' 49 | 50 | arn = ARN('lambda.amazonaws.com') 51 | assert arn.service == True 52 | assert arn.tech == 'lambda' 53 | ``` 54 | 55 | ## IAM and Resource Policies 56 | 57 | ### Policy with multiple statements 58 | ```python 59 | # Two statements, both with conditions 60 | policy05 = dict( 61 | Version='2010-08-14', 62 | Statement=[ 63 | dict( 64 | Effect='Allow', 65 | Principal='arn:aws:iam::012345678910:root', 66 | Action=['s3:*'], 67 | Resource='*', 68 | Condition={ 69 | 'IpAddress': { 70 | 'AWS:SourceIP': ['0.0.0.0/0'] 71 | }}), 72 | dict( 73 | Effect='Allow', 74 | Principal='arn:aws:iam::*:role/Hello', 75 | Action=['ec2:*'], 76 | Resource='*', 77 | Condition={ 78 | 'StringLike': { 79 | 'AWS:SourceOwner': '012345678910' 80 | }}) 81 | ]) 82 | 83 | from policyuniverse.policy import Policy 84 | from policyuniverse.statement import ConditionTuple, PrincipalTuple 85 | 86 | policy = Policy(policy05) 87 | assert policy.whos_allowed() == set([ 88 | PrincipalTuple(category='principal', value='arn:aws:iam::*:role/Hello'), 89 | PrincipalTuple(category='principal', value='arn:aws:iam::012345678910:root'), 90 | ConditionTuple(category='cidr', value='0.0.0.0/0'), 91 | ConditionTuple(category='account', value='012345678910') 92 | ]) 93 | 94 | # The given policy is not internet accessible. 95 | # The first statement is limited by the principal, and the condition is basically a no-op. 96 | # The second statement has a wildcard principal, but uses the condition to lock it down. 97 | assert policy.is_internet_accessible() == False 98 | ``` 99 | 100 | ### Internet Accessible Policy: 101 | 102 | ```python 103 | # An internet accessible policy: 104 | policy01 = dict( 105 | Version='2012-10-08', 106 | Statement=dict( 107 | Effect='Allow', 108 | Principal='*', 109 | Action=['rds:*'], 110 | Resource='*', 111 | Condition={ 112 | 'IpAddress': { 113 | 'AWS:SourceIP': ['0.0.0.0/0'] 114 | } 115 | })) 116 | 117 | policy = Policy(policy01) 118 | assert policy.is_internet_accessible() == True 119 | assert policy.internet_accessible_actions() == set(['rds:*']) 120 | ``` 121 | 122 | ## Statements 123 | 124 | A policy is simply a collection of statements. 125 | 126 | ```python 127 | statement12 = dict( 128 | Effect='Allow', 129 | Principal='*', 130 | Action=['rds:*'], 131 | Resource='*', 132 | Condition={ 133 | 'StringEquals': { 134 | 'AWS:SourceVPC': 'vpc-111111', 135 | 'AWS:Sourcevpce': 'vpce-111111', 136 | 'AWS:SourceOwner': '012345678910', 137 | 'AWS:SourceAccount': '012345678910' 138 | }, 139 | 'StringLike': { 140 | 'AWS:userid': 'AROAI1111111111111111:*' 141 | }, 142 | 'ARNLike': { 143 | 'AWS:SourceArn': 'arn:aws:iam::012345678910:role/Admin' 144 | }, 145 | 'IpAddressIfExists': { 146 | 'AWS:SourceIP': [ 147 | '123.45.67.89', 148 | '10.0.7.0/24', 149 | '172.16.0.0/16'] 150 | } 151 | }) 152 | 153 | from policyuniverse.statement import Statement 154 | from policyuniverse.statement import ConditionTuple, PrincipalTuple 155 | 156 | statement = Statement(statement12) 157 | assert statement.effect == 'Allow' 158 | assert statement.actions == set(['rds:*']) 159 | 160 | # rds:* expands out to ~88 individual permissions 161 | assert len(statement.actions_expanded) == 88 162 | 163 | assert statement.uses_not_principal() == False 164 | assert statement.principals == set(['*']) 165 | assert statement.condition_arns == set(['arn:aws:iam::012345678910:role/Admin']) 166 | assert statement.condition_accounts == set(['012345678910']) 167 | assert statement.condition_userids == set(['AROAI1111111111111111:*']) 168 | assert statement.condition_cidrs == set(['10.0.7.0/24', '172.16.0.0/16', '123.45.67.89']) 169 | assert statement.condition_vpcs == set(['vpc-111111']) 170 | assert statement.condition_vpces == set(['vpce-111111']) 171 | assert statement.is_internet_accessible() == False 172 | assert statement.whos_allowed() == set([ 173 | PrincipalTuple(category='principal', value='*'), 174 | ConditionTuple(category='cidr', value='123.45.67.89'), 175 | ConditionTuple(category='account', value='012345678910'), 176 | ConditionTuple(category='userid', value='AROAI1111111111111111:*'), 177 | ConditionTuple(category='vpc', value='vpc-111111'), 178 | ConditionTuple(category='arn', value='arn:aws:iam::012345678910:role/Admin'), 179 | ConditionTuple(category='cidr', value='172.16.0.0/16'), 180 | ConditionTuple(category='vpce', value='vpce-111111'), 181 | ConditionTuple(category='cidr', value='10.0.7.0/24')]) 182 | 183 | ``` 184 | 185 | 186 | ## Action Categories 187 | ```python 188 | policy = { 189 | "Statement": [{ 190 | "Action": ["s3:put*", "sqs:get*", "sns:*"], 191 | "Resource": "*", 192 | "Effect": "Allow" 193 | }] 194 | } 195 | 196 | from policyuniverse.policy import Policy 197 | p = Policy(policy) 198 | for k, v in p.action_summary().items(): 199 | print(k,v) 200 | >>> ('s3', set([u'Write', u'Permissions', u'Tagging'])) 201 | >>> ('sqs', set([u'List'])) 202 | >>> ('sns', set([u'List', u'Read', u'Write', u'Permissions'])) 203 | ``` 204 | Possible categories are `Permissions`, `Write`, `Read`, `Tagging`, and `List`. This data can be used to summarize statements and policies and to look for sensitive permissions. 205 | 206 | ## Expanding and Minification 207 | ```python 208 | from policyuniverse.expander_minimizer import expand_policy 209 | from policyuniverse.expander_minimizer import minimize_policy 210 | 211 | policy = { 212 | "Statement": [{ 213 | "Action": ["swf:res*"], 214 | "Resource": "*", 215 | "Effect": "Allow" 216 | }] 217 | } 218 | 219 | expanded_policy = expand_policy(policy=policy) 220 | >>> Start size: 131. End size: 286 221 | print(expanded_policy == { 222 | "Statement": [{ 223 | "Action": [ 224 | "swf:respondactivitytaskcanceled", 225 | "swf:respondactivitytaskcompleted", 226 | "swf:respondactivitytaskfailed", 227 | "swf:responddecisiontaskcompleted" 228 | ], 229 | "Resource": "*", 230 | "Effect": "Allow" 231 | }] 232 | }) 233 | >>> True 234 | 235 | minimized_policy = minimize_policy(policy=expanded_policy, minchars=3) 236 | >>> Skipping prefix r because length of 1 237 | >>> Skipping prefix re because length of 2 238 | >>> Skipping prefix r because length of 1 239 | >>> Skipping prefix re because length of 2 240 | >>> Skipping prefix r because length of 1 241 | >>> Skipping prefix re because length of 2 242 | >>> Skipping prefix r because length of 1 243 | >>> Skipping prefix re because length of 2 244 | >>> Start size: 286. End size: 131 245 | 246 | print(minimized_policy == policy) 247 | >>> True 248 | ``` 249 | 250 | -------------------------------------------------------------------------------- /policyuniverse/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | isort:skip_file 3 | """ 4 | import json 5 | import os 6 | import logging 7 | 8 | _action_categories = dict() 9 | all_permissions = set() 10 | # autopep8: off 11 | from policyuniverse.action_categories import build_action_categories_from_service_data 12 | from policyuniverse.action import build_service_actions_from_service_data 13 | 14 | 15 | # Logging 16 | logger = logging.getLogger(__name__) 17 | 18 | # Read Input Data 19 | service_data_path = os.path.join( 20 | os.path.dirname(os.path.realpath(__file__)), "data.json" 21 | ) 22 | 23 | service_data = json.load(open(service_data_path, "r")) 24 | 25 | _action_categories.update(build_action_categories_from_service_data(service_data)) 26 | all_permissions.update(build_service_actions_from_service_data(service_data)) 27 | 28 | # These have been refactored to other files, but 29 | # some dependencies still try to import them from here: 30 | from policyuniverse.expander_minimizer import expand_policy 31 | from policyuniverse.expander_minimizer import get_actions_from_statement 32 | 33 | # autopep8: on 34 | -------------------------------------------------------------------------------- /policyuniverse/action.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.action 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley @patrickbkelley 20 | 21 | """ 22 | 23 | 24 | def build_service_actions_from_service_data(service_data): 25 | permissions = set() 26 | for service_name in service_data: 27 | prefix = service_data[service_name]["prefix"] 28 | service_actions = service_data[service_name]["actions"] 29 | for action in service_actions: 30 | permissions.add("{}:{}".format(prefix, action.lower())) 31 | return permissions 32 | 33 | 34 | # TODO: Helper Action class 35 | # May also want to create a service.py 36 | -------------------------------------------------------------------------------- /policyuniverse/action_categories.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from policyuniverse import _action_categories 4 | 5 | 6 | def translate_aws_action_groups(groups): 7 | """ 8 | Problem - AWS provides the following five groups: 9 | - Permissions 10 | - ReadWrite 11 | - ListOnly 12 | - ReadOnly 13 | - Tagging 14 | 15 | The meaning of these groups was not immediately obvious to me. 16 | 17 | Permissions: ability to modify (create/update/remove) permissions. 18 | ReadWrite: Indicates a data-plane operation. 19 | ReadOnly: Always used with ReadWrite. Indicates a read-only data-plane operation. 20 | ListOnly: Always used with [ReadWrite, ReadOnly]. Indicates an action which 21 | lists resources, which is a subcategory of read-only data-plane operations. 22 | Tagging: Always used with ReadWrite. Indicates a permission that can mutate tags. 23 | 24 | So an action with ReadWrite, but without ReadOnly, is a mutating data-plane operation. 25 | An action with Permission never has any other groups. 26 | 27 | This method will take the AWS categories and translate them to one of the following: 28 | 29 | - List 30 | - Read 31 | - Tagging 32 | - ReadWrite 33 | - Permissions 34 | """ 35 | if "Permissions" in groups: 36 | return "Permissions" 37 | if "ListOnly" in groups: 38 | return "List" 39 | if "ReadOnly" in groups: 40 | return "Read" 41 | if "Tagging" in groups: 42 | return "Tagging" 43 | if "ReadWrite" in groups: 44 | return "Write" 45 | return "Unknown" 46 | 47 | 48 | def build_action_categories_from_service_data(service_data): 49 | action_categories = dict() 50 | for service_name in service_data: 51 | service_body = service_data[service_name] 52 | prefix = service_body["prefix"] 53 | service_actions = service_body["actions"] 54 | for service_action, service_action_body in service_actions.items(): 55 | key = "{}:{}".format(prefix, service_action.lower()) 56 | action_categories[key] = service_action_body["calculated_action_group"] 57 | return action_categories 58 | 59 | 60 | def categories_for_actions(actions): 61 | """ 62 | Given an iterable of actions, return a mapping of action groups. 63 | 64 | actions: {'ec2:authorizesecuritygroupingress', 'iam:putrolepolicy', 'iam:listroles'} 65 | 66 | Returns: 67 | { 68 | 'ec2': {'Write'}, 69 | 'iam': {'Permissions', 'List'}) 70 | } 71 | """ 72 | groups = defaultdict(set) 73 | for action in actions: 74 | service = action.split(":")[0] 75 | groups[service].add(_action_categories.get(action)) 76 | return groups 77 | 78 | 79 | def actions_for_category(category): 80 | """ 81 | Returns set of actions containing each group passed in. 82 | 83 | Param: 84 | category must be in {'Permissions', 'List', 'Read', 'Tagging', 'Write'} 85 | 86 | Returns: 87 | set of matching actions 88 | """ 89 | actions = set() 90 | for action, action_category in _action_categories.items(): 91 | if action_category == category: 92 | actions.add(action) 93 | return actions 94 | -------------------------------------------------------------------------------- /policyuniverse/arn.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.arn 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley @patrickbkelley 20 | 21 | """ 22 | import re 23 | 24 | from policyuniverse import logger 25 | 26 | 27 | class ARN(object): 28 | arn = None 29 | tech = None 30 | region = None 31 | account_number = None 32 | name = None 33 | partition = None 34 | error = False 35 | root = False 36 | service = False 37 | 38 | def __init__(self, raw): 39 | self.arn = raw 40 | arn_match = re.search( 41 | r"^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}|cloudfront|aws):(.+)$", raw 42 | ) 43 | if arn_match: 44 | if arn_match.group(2) == "iam" and arn_match.group(5) == "root": 45 | self.root = True 46 | 47 | self._from_arn(arn_match) 48 | return 49 | 50 | acct_number_match = re.search(r"^(\d{12})+$", raw) 51 | if acct_number_match: 52 | self._from_account_number(raw) 53 | return 54 | 55 | aws_service_match = re.search(r"^(([^.]+)(.[^.]+)?)\.amazon(aws)?\.com$", raw) 56 | if aws_service_match: 57 | self._from_aws_service(aws_service_match.group(1)) 58 | return 59 | 60 | aws_service_match = re.search(r"^([^.]+).aws.internal$", raw) 61 | if aws_service_match: 62 | self._from_aws_service(aws_service_match.group(1)) 63 | return 64 | 65 | self.error = True 66 | logger.debug("ARN Could not parse [{}].".format(raw)) 67 | 68 | def _from_arn(self, arn_match): 69 | self.partition = arn_match.group(1) 70 | self.tech = arn_match.group(2) 71 | self.region = arn_match.group(3) 72 | self.account_number = arn_match.group(4) 73 | self.name = arn_match.group(5) 74 | 75 | def _from_account_number(self, raw): 76 | self.account_number = raw 77 | 78 | def _from_aws_service(self, service): 79 | self.tech = service 80 | self.service = True 81 | -------------------------------------------------------------------------------- /policyuniverse/common.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_common 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: George Psarakis 20 | 21 | """ 22 | 23 | try: 24 | from collections.abc import Sequence 25 | except ImportError: 26 | # Python 2.7 compatibility 27 | from collections import Sequence 28 | 29 | try: 30 | # Python 2.7 compatibility 31 | _STRING_TYPES = (bytes, str, unicode) 32 | except NameError: 33 | _STRING_TYPES = (bytes, str) 34 | 35 | 36 | def is_array(obj): 37 | """ 38 | Check if the object is iterable, excluding strings: 39 | - tuple 40 | - list 41 | - collections.abc.Sequence sub-class 42 | """ 43 | if isinstance(obj, _STRING_TYPES): 44 | return False 45 | return isinstance(obj, Sequence) 46 | 47 | 48 | def ensure_array(obj): 49 | """ 50 | Ensures that the given object is an array, 51 | by creating a list and adding it as a single element. 52 | """ 53 | if is_array(obj): 54 | return obj 55 | else: 56 | return [obj] 57 | -------------------------------------------------------------------------------- /policyuniverse/expander_minimizer.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.expander_minimizer 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley < @patrickbkelley 20 | 21 | """ 22 | from __future__ import print_function 23 | 24 | import copy 25 | import fnmatch 26 | import json 27 | import sys 28 | 29 | from policyuniverse import all_permissions 30 | from policyuniverse.common import ensure_array 31 | 32 | policy_headers = ["rolepolicies", "grouppolicies", "userpolicies", "policy"] 33 | 34 | 35 | def expand_minimize_over_policies(policies, activity, **kwargs): 36 | for header in policy_headers: 37 | if header in policies: 38 | output = {header: {}} 39 | for policy in policies[header]: 40 | output[header][policy] = activity( 41 | policy=policies[header][policy], **kwargs 42 | ) 43 | return output 44 | 45 | return activity(policy=policies, **kwargs) 46 | 47 | 48 | def _get_prefixes_for_action(action): 49 | """ 50 | :param action: iam:cat 51 | :return: [ "iam:", "iam:c", "iam:ca", "iam:cat" ] 52 | """ 53 | (technology, permission) = action.split(":") 54 | retval = ["{}:".format(technology)] 55 | phrase = "" 56 | for char in permission: 57 | newphrase = "{}{}".format(phrase, char) 58 | retval.append("{}:{}".format(technology, newphrase)) 59 | phrase = newphrase 60 | return retval 61 | 62 | 63 | def _expand(action): 64 | """ 65 | :param action: 'autoscaling:*' 66 | :return: A list of all autoscaling permissions matching the wildcard 67 | """ 68 | expanded = fnmatch.filter(all_permissions, action.lower()) 69 | # if we get a wildcard for a tech we've never heard of, just return the wildcard 70 | if not expanded: 71 | return [action] 72 | return expanded 73 | 74 | 75 | def _expand_wildcard_action(actions): 76 | """Expand wildcards in a list of actions (or a single action string), returning a list of all matching actions. 77 | 78 | :param actions: ['autoscaling:*'] 79 | :return: A list of all permissions matching the action globs 80 | """ 81 | if isinstance(actions, str): 82 | # Bail early if we have a string with no wildcard 83 | if "*" not in actions: 84 | return [actions.lower()] 85 | actions = [actions] 86 | 87 | # Map _expand function to action list, resulting in a list of lists of expanded actions. 88 | temp = map(_expand, actions) 89 | 90 | # This flattens the list of lists. It's hard to read, but it's a hot path and the optimization 91 | # speeds it up by 90% or more. 92 | expanded = [item.lower() for sublist in temp for item in sublist] 93 | 94 | return expanded 95 | 96 | 97 | def _get_desired_actions_from_statement(statement): 98 | desired_actions = set() 99 | actions = _expand_wildcard_action(statement["Action"]) 100 | 101 | for action in actions: 102 | if action not in all_permissions: 103 | raise Exception( 104 | "Desired action not found in master permission list. {}".format(action) 105 | ) 106 | desired_actions.add(action) 107 | 108 | return desired_actions 109 | 110 | 111 | def _get_denied_prefixes_from_desired(desired_actions): 112 | denied_actions = all_permissions.difference(desired_actions) 113 | denied_prefixes = set() 114 | for denied_action in denied_actions: 115 | for denied_prefix in _get_prefixes_for_action(denied_action): 116 | denied_prefixes.add(denied_prefix) 117 | 118 | return denied_prefixes 119 | 120 | 121 | def _check_min_permission_length(permission, minchars=None): 122 | if minchars and len(permission) < int(minchars) and permission != "": 123 | print( 124 | "Skipping prefix {} because length of {}".format( 125 | permission, len(permission) 126 | ), 127 | file=sys.stderr, 128 | ) 129 | return True 130 | return False 131 | 132 | 133 | def minimize_statement_actions(statement, minchars=None): 134 | minimized_actions = set() 135 | 136 | if statement["Effect"] != "Allow": 137 | raise Exception("Minification does not currently work on Deny statements.") 138 | 139 | desired_actions = _get_desired_actions_from_statement(statement) 140 | denied_prefixes = _get_denied_prefixes_from_desired(desired_actions) 141 | 142 | for action in desired_actions: 143 | if action in denied_prefixes: 144 | print("Action is a denied prefix. Action: {}".format(action)) 145 | minimized_actions.add(action) 146 | continue 147 | 148 | found_prefix = False 149 | prefixes = _get_prefixes_for_action(action) 150 | for prefix in prefixes: 151 | 152 | permission = prefix.split(":")[1] 153 | if _check_min_permission_length(permission, minchars=minchars): 154 | continue 155 | 156 | if prefix not in denied_prefixes: 157 | if prefix not in desired_actions: 158 | prefix = "{}*".format(prefix) 159 | minimized_actions.add(prefix) 160 | found_prefix = True 161 | break 162 | 163 | if not found_prefix: 164 | print("Could not suitable prefix. Defaulting to {}".format(prefixes[-1])) 165 | minimized_actions.add(prefixes[-1]) 166 | 167 | # sort the actions 168 | minimized_actions_list = list(minimized_actions) 169 | minimized_actions_list.sort() 170 | 171 | return minimized_actions_list 172 | 173 | 174 | def get_actions_from_statement(statement): 175 | allowed_actions = set() 176 | actions = ensure_array(statement.get("Action", [])) 177 | 178 | for action in actions: 179 | allowed_actions = allowed_actions.union(set(_expand_wildcard_action(action))) 180 | 181 | inverted_actions = set() 182 | not_actions = ensure_array(statement.get("NotAction", [])) 183 | 184 | for action in not_actions: 185 | inverted_actions = inverted_actions.union(set(_expand_wildcard_action(action))) 186 | 187 | if inverted_actions: 188 | actions = _invert_actions(inverted_actions) 189 | allowed_actions = allowed_actions.union(actions) 190 | 191 | return allowed_actions 192 | 193 | 194 | def _invert_actions(actions): 195 | from policyuniverse import all_permissions 196 | 197 | return all_permissions.difference(actions) 198 | 199 | 200 | def expand_policy(policy=None, expand_deny=False): 201 | # Perform a deepcopy to avoid mutating the input 202 | result = copy.deepcopy(policy) 203 | 204 | result["Statement"] = ensure_array(result["Statement"]) 205 | for statement in result["Statement"]: 206 | if statement["Effect"].lower() == "deny" and not expand_deny: 207 | continue 208 | actions = get_actions_from_statement(statement) 209 | if "NotAction" in statement: 210 | del statement["NotAction"] 211 | statement["Action"] = sorted(list(actions)) 212 | 213 | return result 214 | 215 | 216 | def minimize_policy(policy=None, minchars=None): 217 | 218 | str_pol = json.dumps(policy, indent=2) 219 | size = len(str_pol) 220 | 221 | for statement in policy["Statement"]: 222 | minimized_actions = minimize_statement_actions(statement, minchars=minchars) 223 | statement["Action"] = minimized_actions 224 | 225 | str_end_pol = json.dumps(policy, indent=2) 226 | end_size = len(str_end_pol) 227 | 228 | # print str_end_pol 229 | print("Start size: {}. End size: {}".format(size, end_size), file=sys.stderr) 230 | return policy 231 | -------------------------------------------------------------------------------- /policyuniverse/organization.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Amazon.com, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.organization 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Chris Partridge @_tweedge 20 | 21 | """ 22 | from policyuniverse import logger 23 | 24 | 25 | class Organization(object): 26 | organization = None 27 | root = None 28 | ou_path = [] 29 | valid_for_child_ous = False 30 | valid_for_parent_ou = False 31 | valid_for_all_ous = True 32 | error = False 33 | 34 | def __init__(self, input): 35 | components_list = input.split("/") 36 | 37 | for component_index in range(0, len(components_list)): 38 | component = components_list[component_index] 39 | 40 | if component_index == 0: 41 | self._parse_organization(component) 42 | elif component_index == 1: 43 | self._parse_root(component) 44 | else: 45 | self._parse_ou_path(component) 46 | 47 | if self.error: 48 | return 49 | 50 | def _parse_organization(self, orgid): 51 | if orgid.startswith("o-") or orgid == "*": 52 | self.organization = orgid 53 | else: 54 | self.error = True 55 | logger.debug("Organization Org ID parse error [{}].".format(input)) 56 | 57 | def _parse_root(self, root): 58 | if root.startswith("r-") or root == "*": 59 | self.root = root 60 | else: 61 | self.error = True 62 | logger.debug("Organization root parse error [{}].".format(input)) 63 | 64 | def _parse_ou_path(self, ou): 65 | if self.valid_for_parent_ou or self.valid_for_child_ous: 66 | self.error = True 67 | logger.debug("Organization OU validity error [{}].".format(input)) 68 | return 69 | 70 | if not ou: 71 | self.valid_for_parent_ou = True 72 | elif ou == "*": 73 | self.valid_for_child_ous = True 74 | self.valid_for_parent_ou = True 75 | elif ou == "ou-*": 76 | self.valid_for_child_ous = True 77 | else: 78 | self.valid_for_all_ous = False 79 | 80 | if ou.startswith("ou-"): 81 | self.ou_path.append(ou) 82 | else: 83 | self.error = True 84 | logger.debug("Organization OU parse error [{}].".format(input)) 85 | -------------------------------------------------------------------------------- /policyuniverse/policy.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.policy 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley @patrickbkelley 20 | 21 | """ 22 | from collections import defaultdict 23 | 24 | from policyuniverse.common import ensure_array 25 | from policyuniverse.statement import Statement 26 | 27 | 28 | class Policy(object): 29 | def __init__(self, policy): 30 | self.policy = policy 31 | self.statements = [] 32 | 33 | statement_structure = ensure_array(self.policy.get("Statement", [])) 34 | 35 | for statement in statement_structure: 36 | self.statements.append(Statement(statement)) 37 | 38 | @property 39 | def principals(self): 40 | principals = set() 41 | for statement in self.statements: 42 | principals = principals.union(statement.principals) 43 | return principals 44 | 45 | @property 46 | def condition_entries(self): 47 | condition_entries = set() 48 | for statement in self.statements: 49 | condition_entries = condition_entries.union(statement.condition_entries) 50 | return condition_entries 51 | 52 | def action_summary(self): 53 | action_categories = defaultdict(set) 54 | for statement in self.statements: 55 | for service, groups in statement.action_summary().items(): 56 | action_categories[service] = action_categories[service].union(groups) 57 | return action_categories 58 | 59 | def is_internet_accessible(self): 60 | for statement in self.statements: 61 | if statement.is_internet_accessible(): 62 | return True 63 | return False 64 | 65 | def internet_accessible_actions(self): 66 | actions = set() 67 | for statement in self.statements: 68 | if statement.is_internet_accessible(): 69 | actions = actions.union(statement.actions) 70 | return actions 71 | 72 | def whos_allowed(self): 73 | allowed = set() 74 | for statement in self.statements: 75 | if statement.effect == "Allow": 76 | allowed = allowed.union(statement.whos_allowed()) 77 | return allowed 78 | -------------------------------------------------------------------------------- /policyuniverse/statement.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.statement 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley @patrickbkelley 20 | 21 | """ 22 | import re 23 | from collections import namedtuple 24 | 25 | from policyuniverse import logger 26 | from policyuniverse.action_categories import categories_for_actions 27 | from policyuniverse.arn import ARN 28 | from policyuniverse.common import ensure_array, is_array 29 | from policyuniverse.expander_minimizer import get_actions_from_statement 30 | from policyuniverse.organization import Organization 31 | 32 | try: 33 | from collections.abc import Mapping 34 | except ImportError: 35 | # Python 2.7 compatibility 36 | from collections import Mapping 37 | 38 | PrincipalTuple = namedtuple("Principal", "category value") 39 | ConditionTuple = namedtuple("Condition", "category value") 40 | 41 | 42 | class Statement(object): 43 | def __init__(self, statement): 44 | self.statement = statement 45 | self.condition_entries = self._condition_entries() 46 | self.principals = self._principals() 47 | self.actions = self._actions() 48 | 49 | @property 50 | def effect(self): 51 | return self.statement.get("Effect") 52 | 53 | @property 54 | def actions_expanded(self): 55 | return set(get_actions_from_statement(self.statement)) 56 | 57 | def _actions(self): 58 | actions = self.statement.get("Action") 59 | if not actions: 60 | return set() 61 | actions = ensure_array(actions) 62 | return set(actions) 63 | 64 | def action_summary(self): 65 | return categories_for_actions(self.actions_expanded) 66 | 67 | def uses_not_principal(self): 68 | return "NotPrincipal" in self.statement 69 | 70 | @property 71 | def resources(self): 72 | if "NotResource" in self.statement: 73 | return set(["*"]) 74 | 75 | resources = ensure_array(self.statement.get("Resource")) 76 | return set(resources) 77 | 78 | def whos_allowed(self): 79 | """Returns set containing any entries from principal and condition section. 80 | 81 | Example: 82 | 83 | statement = Statement(dict( 84 | Effect='Allow', 85 | Principal='arn:aws:iam::*:role/Hello', 86 | Action=['ec2:*'], 87 | Resource='*', 88 | Condition={ 89 | 'StringLike': { 90 | 'AWS:SourceOwner': '012345678910' 91 | }})) 92 | 93 | statement.whos_allowed() 94 | > set([ 95 | > PrincipalTuple(category='principal', value='arn:aws:iam::*:role/Hello'), 96 | > ConditionTuple(category='account', value='012345678910')]) 97 | """ 98 | who = set() 99 | for principal in self.principals: 100 | principal = PrincipalTuple(category="principal", value=principal) 101 | who.add(principal) 102 | who = who.union(self.condition_entries) 103 | return who 104 | 105 | def _principals(self): 106 | """Extracts all principals from IAM statement. 107 | 108 | Should handle these cases: 109 | "Principal": "value" 110 | "Principal": ["value"] 111 | "Principal": { "AWS": "value" } 112 | "Principal": { "AWS": ["value", "value"] } 113 | "Principal": { "Service": "value" } 114 | "Principal": { "Service": ["value", "value"] } 115 | 116 | Return: Set of principals 117 | """ 118 | principals = set() 119 | principal = self.statement.get("Principal", None) 120 | if not principal: 121 | # It is possible not to define a principal, AWS ignores these statements. 122 | return principals 123 | 124 | if isinstance(principal, Mapping): 125 | 126 | if "AWS" in principal: 127 | self._add_or_extend(principal["AWS"], principals) 128 | 129 | if "Service" in principal: 130 | self._add_or_extend(principal["Service"], principals) 131 | 132 | if "Federated" in principal: 133 | self._add_or_extend(principal["Federated"], principals) 134 | 135 | else: 136 | self._add_or_extend(principal, principals) 137 | 138 | return principals 139 | 140 | def _add_or_extend(self, value, structure): 141 | if is_array(value): 142 | structure.update(set(value)) 143 | else: 144 | structure.add(value) 145 | 146 | def _condition_entries(self): 147 | """Extracts any ARNs, Account Numbers, UserIDs, Usernames, CIDRs, VPCs, and VPC Endpoints from a condition block. 148 | 149 | Ignores any negated condition operators like StringNotLike. 150 | Ignores weak condition keys like referer, date, etc. 151 | 152 | Reason: A condition is meant to limit the principal in a statement. Often, resource policies use a wildcard principal 153 | and rely exclusively on the Condition block to limit access. 154 | 155 | We would want to alert if the Condition had no limitations (like a non-existent Condition block), or very weak 156 | limitations. Any negation would be weak, and largely equivelant to having no condition block whatsoever. 157 | 158 | The alerting code that relies on this data must ensure the condition has at least one of the following: 159 | - A limiting ARN 160 | - Account Identifier 161 | - AWS Organization Principal Org ID 162 | - User ID 163 | - Source IP / CIDR 164 | - VPC 165 | - VPC Endpoint 166 | 167 | https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_condition-keys.html 168 | """ 169 | conditions = list() 170 | condition = self.statement.get("Condition") 171 | if not condition: 172 | return conditions 173 | 174 | key_mapping = { 175 | "aws:sourcearn": "arn", 176 | "aws:principalarn": "arn", 177 | "aws:sourceowner": "account", 178 | "aws:sourceaccount": "account", 179 | "aws:principalaccount": "account", 180 | "aws:principalorgid": "organization", 181 | "aws:principalorgpaths": "organization", 182 | "kms:calleraccount": "account", 183 | "aws:userid": "userid", 184 | "aws:sourceip": "cidr", 185 | "aws:sourcevpc": "vpc", 186 | "aws:sourcevpce": "vpce", 187 | # a key for SAML Federation trust policy. 188 | # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-idp_saml.html 189 | # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_assertions.html 190 | "saml:aud": "saml-endpoint", 191 | } 192 | 193 | relevant_condition_operators = [ 194 | re.compile( 195 | "((ForAllValues|ForAnyValue):)?ARN(Equals|Like)(IfExists)?", 196 | re.IGNORECASE, 197 | ), 198 | re.compile( 199 | "((ForAllValues|ForAnyValue):)?String(Equals|Like)(IgnoreCase)?(IfExists)?", 200 | re.IGNORECASE, 201 | ), 202 | re.compile( 203 | "((ForAllValues|ForAnyValue):)?IpAddress(IfExists)?", re.IGNORECASE 204 | ), 205 | ] 206 | 207 | for condition_operator in condition.keys(): 208 | if any( 209 | regex.match(condition_operator) 210 | for regex in relevant_condition_operators 211 | ): 212 | for key, value in condition[condition_operator].items(): 213 | 214 | if key.lower() in key_mapping: 215 | if is_array(value): 216 | for v in value: 217 | conditions.append( 218 | ConditionTuple( 219 | value=v, category=key_mapping[key.lower()] 220 | ) 221 | ) 222 | else: 223 | conditions.append( 224 | ConditionTuple( 225 | value=value, category=key_mapping[key.lower()] 226 | ) 227 | ) 228 | 229 | return conditions 230 | 231 | @property 232 | def condition_arns(self): 233 | return self._condition_field("arn") 234 | 235 | @property 236 | def condition_accounts(self): 237 | return self._condition_field("account") 238 | 239 | @property 240 | def condition_orgids(self): 241 | return set( 242 | [ 243 | Organization(entry.value).organization 244 | for entry in self.condition_entries 245 | if entry.category == "organization" 246 | ] 247 | ) 248 | 249 | @property 250 | def condition_orgpaths(self): 251 | return self._condition_field("organization") 252 | 253 | @property 254 | def condition_userids(self): 255 | return self._condition_field("userid") 256 | 257 | @property 258 | def condition_cidrs(self): 259 | return self._condition_field("cidr") 260 | 261 | @property 262 | def condition_vpcs(self): 263 | return self._condition_field("vpc") 264 | 265 | @property 266 | def condition_vpces(self): 267 | return self._condition_field("vpce") 268 | 269 | def _condition_field(self, field): 270 | return set( 271 | [entry.value for entry in self.condition_entries if entry.category == field] 272 | ) 273 | 274 | def is_internet_accessible(self): 275 | if self.effect != "Allow": 276 | return False 277 | 278 | if not self.is_condition_internet_accessible(): 279 | return False 280 | 281 | if self.uses_not_principal(): 282 | return True 283 | 284 | for principal in self.principals: 285 | if self._arn_internet_accessible(principal): 286 | return True 287 | 288 | return False 289 | 290 | def is_condition_internet_accessible(self): 291 | condition_entries = self.condition_entries 292 | if len(condition_entries) == 0: 293 | return True 294 | 295 | for entry in condition_entries: 296 | if self._is_condition_entry_internet_accessible(entry): 297 | return True 298 | 299 | return False 300 | 301 | def _is_condition_entry_internet_accessible(self, entry): 302 | if entry.category == "arn": 303 | return self._arn_internet_accessible(entry.value) 304 | 305 | if entry.category == "organization": 306 | return self._organization_internet_accessible(entry.value) 307 | 308 | if entry.category == "userid": 309 | return self._userid_internet_accessible(entry.value) 310 | 311 | if entry.category == "cidr": 312 | return self._cidr_internet_accessible(entry.value) 313 | 314 | return "*" in entry.value 315 | 316 | def _cidr_internet_accessible(self, cidr): 317 | """The caller will want to inspect the CIDRs directly. 318 | This will only look for /0's. 319 | """ 320 | return cidr.endswith("/0") 321 | 322 | def _userid_internet_accessible(self, userid): 323 | # Trailing wildcards are okay for userids: 324 | # AROAIIIIIIIIIIIIIIIII:* 325 | if userid.find("*") == len(userid) - 1: 326 | # note: this will also return False for a zero-length userid 327 | return False 328 | return True 329 | 330 | def _arn_internet_accessible(self, arn_input): 331 | if "*" == arn_input: 332 | return True 333 | 334 | arn = ARN(arn_input) 335 | if arn.error: 336 | logger.debug("Auditor could not parse ARN {arn}.".format(arn=arn_input)) 337 | return "*" in arn_input 338 | 339 | if arn.tech == "s3": 340 | # S3 ARNs typically don't have account numbers. 341 | return False 342 | 343 | if not arn.account_number and not arn.service: 344 | logger.debug( 345 | "Auditor could not parse Account Number from ARN {arn}.".format( 346 | arn=arn_input 347 | ) 348 | ) 349 | return True 350 | 351 | if arn.account_number == "*": 352 | return True 353 | 354 | return False 355 | 356 | def _organization_internet_accessible(self, org_input): 357 | organization = Organization(org_input) 358 | if organization.error: 359 | logger.debug("Auditor could not parse Org {org}.".format(org=org_input)) 360 | return "o-*" in org_input 361 | 362 | if organization.organization == "o-*": 363 | return True 364 | 365 | return False 366 | -------------------------------------------------------------------------------- /policyuniverse/tests/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logging.basicConfig(level=logging.INFO) 4 | -------------------------------------------------------------------------------- /policyuniverse/tests/helpers.py: -------------------------------------------------------------------------------- 1 | from policyuniverse.common import Sequence 2 | from policyuniverse.statement import Mapping 3 | 4 | 5 | class CustomSequence(Sequence): 6 | def __init__(self, data): 7 | self._sequence = data 8 | 9 | def __getitem__(self, item): 10 | return self._sequence[item] 11 | 12 | def __len__(self): 13 | return len(self._sequence) 14 | 15 | 16 | class CustomMapping(Mapping): 17 | def __init__(self, data): 18 | self._mapping = data 19 | 20 | def __getitem__(self, item): 21 | return self._mapping[item] 22 | 23 | def __len__(self): 24 | return len(self._mapping) 25 | 26 | def __iter__(self): 27 | return iter(self._mapping) 28 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_action_categories.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_action_categories 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley 20 | 21 | """ 22 | import unittest 23 | 24 | 25 | class ActionGroupTestCase(unittest.TestCase): 26 | def test_categories_for_actions(self): 27 | from policyuniverse.action_categories import categories_for_actions 28 | 29 | actions = [ 30 | "ec2:authorizesecuritygroupingress", 31 | "iam:putrolepolicy", 32 | "iam:listroles", 33 | ] 34 | groups = categories_for_actions(actions) 35 | self.assertIn("ec2", groups.keys()) 36 | self.assertIn("iam", groups.keys()) 37 | self.assertEqual(groups["ec2"], {"Write"}) 38 | self.assertEqual(groups["iam"], {"Permissions", "List"}) 39 | 40 | def test_actions_for_category(self): 41 | from policyuniverse.action_categories import actions_for_category 42 | 43 | read_only_actions = actions_for_category("Read") 44 | list_only_actions = actions_for_category("List") 45 | write_actions = actions_for_category("Write") 46 | permission_actions = actions_for_category("Permissions") 47 | 48 | for action in permission_actions: 49 | if action in { 50 | "datazone:getiamportalloginurl", 51 | "glue:getdatapreviewstatement", 52 | "glue:getnotebookinstancestatus", 53 | "imagebuilder:getcomponentpolicy", 54 | "imagebuilder:getimagepolicy", 55 | "imagebuilder:getimagerecipepolicy", 56 | "iotsitewise:listaccesspolicies", 57 | "managedblockchain:get", 58 | "monitron:listprojectadminusers", 59 | "signer:listprofilepermissions", 60 | "xray:getencryptionconfig", 61 | }: # miscategorized AWS actions 62 | continue 63 | 64 | self.assertFalse(":list" in action, action) 65 | self.assertFalse(":get" in action, action) 66 | 67 | for action in list_only_actions: 68 | self.assertFalse(":put" in action, action) 69 | self.assertFalse(":create" in action, action) 70 | self.assertFalse(":attach" in action, action) 71 | 72 | for action in read_only_actions: 73 | # read actions shouldn't start with "Put" or "Create" unless they are miscategorized. 74 | if action in { 75 | "cloud9:createenvironmenttoken", 76 | "codeguru-reviewer:createconnectiontoken", 77 | "kinesisanalytics:createapplicationpresignedurl", 78 | "ssm:putconfigurepackageresult", 79 | }: # miscategorized AWS actions 80 | continue 81 | # self.assertFalse(':list' in action) # Tons of list* permissions are mis-categorized(?) as Read. 82 | self.assertFalse(":put" in action, action) 83 | self.assertFalse(":create" in action, action) 84 | self.assertFalse(":attach" in action, action) 85 | 86 | for action in write_actions: 87 | # write actions shouldn't start with "get" unless they are miscategorized. 88 | if action in { 89 | "appstream:getparametersforthemeassetupload", 90 | "backup-storage:describebackupjob", 91 | "backup-storage:getbasebackup", 92 | "backup-storage:getchunk", 93 | "backup-storage:getincrementalbasebackup", 94 | "backup-storage:getobjectmetadata", 95 | "bugbust:getjoineventstatus", 96 | "cloudshell:getfiledownloadurls", 97 | "cloudshell:getfileuploadurls", 98 | "cognito-identity:getid", 99 | "connect:getfederationtokens", 100 | "dataexchange:getjob", 101 | "elasticmapreduce:getclustersessioncredentials", 102 | "elasticmapreduce:getonclusterappuipresignedurl", 103 | "elasticmapreduce:getpersistentappuipresignedurl", 104 | "emr-containers:getmanagedendpointsessioncredentials", 105 | "glue:getmapping", 106 | "lakeformation:getdataaccess", 107 | "license-manager-linux-subscriptions:getservicesettings", 108 | "lightsail:getinstanceaccessdetails", 109 | "lightsail:getrelationaldatabasemasteruserpassword", 110 | "personalize:getpersonalizedranking", 111 | "quicksight:describecustompermissions", 112 | "redshift-serverless:getcredentials", 113 | "redshift:getclustercredentials", 114 | "redshift:getclustercredentialswithiam", 115 | "states:getactivitytask", 116 | }: # miscategorized AWS actions 117 | continue 118 | self.assertFalse(":get" in action, action) 119 | self.assertFalse(":describe" in action, action) 120 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_arn.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_arn 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Mike Grima 20 | 21 | """ 22 | import unittest 23 | 24 | from policyuniverse import logger 25 | from policyuniverse.arn import ARN 26 | 27 | 28 | class ARNTestCase(unittest.TestCase): 29 | def test_from_arn(self): 30 | proper_arns = [ 31 | "events.amazonaws.com", 32 | "cloudtrail.amazonaws.com", 33 | "arn:aws:iam::012345678910:root", 34 | "arn:aws:iam::012345678910:role/SomeTestRoleForTesting", 35 | "arn:aws:iam::012345678910:instance-profile/SomeTestInstanceProfileForTesting", 36 | "arn:aws:iam::012345678910:role/*", 37 | "arn:aws:iam::012345678910:role/SomeTestRole*", 38 | "arn:aws:s3:::some-s3-bucket", 39 | "arn:aws:s3:*:*:some-s3-bucket", 40 | "arn:aws:s3:::some-s3-bucket/some/path/within/the/bucket" 41 | "arn:aws:s3:::some-s3-bucket/*", 42 | "arn:aws:ec2:us-west-2:012345678910:instance/*", 43 | "arn:aws:ec2:ap-northeast-1:012345678910:security-group/*", 44 | "arn:aws-cn:ec2:ap-northeast-1:012345678910:security-group/*", 45 | "arn:aws-us-gov:ec2:gov-west-1:012345678910:instance/*", 46 | "arn:aws:iam::cloudfront:user/CloudFront Origin Access Identity EXXXXXXXXXXXXX", 47 | "arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup", 48 | ] 49 | 50 | # Proper ARN Tests: 51 | for arn in proper_arns: 52 | logger.info("Testing Proper ARN: {}".format(arn)) 53 | arn_obj = ARN(arn) 54 | 55 | self.assertFalse(arn_obj.error) 56 | if "root" in arn: 57 | self.assertTrue(arn_obj.root) 58 | else: 59 | self.assertFalse(arn_obj.root) 60 | 61 | if ".amazonaws.com" in arn: 62 | self.assertTrue(arn_obj.service) 63 | else: 64 | self.assertFalse(arn_obj.service) 65 | 66 | bad_arns = [ 67 | "arn:aws:iam::012345678910", 68 | "arn:aws:iam::012345678910:", 69 | "*", 70 | "arn:s3::::", 71 | "arn:arn:arn:arn:arn:arn", 72 | ] 73 | 74 | # Improper ARN Tests: 75 | for arn in bad_arns: 76 | logger.info("Testing IMPROPER ARN: {}".format(arn)) 77 | arn_obj = ARN(arn) 78 | 79 | self.assertTrue(arn_obj.error) 80 | 81 | def test_from_account_number(self): 82 | proper_account_numbers = ["012345678912", "123456789101", "123456789101"] 83 | 84 | improper_account_numbers = [ 85 | "*", 86 | "O12345678912", # 'O' instead of '0' 87 | "asdfqwer", 88 | "123456", 89 | "89789456314356132168978945", 90 | "568947897*", 91 | ] 92 | 93 | # Proper account number tests: 94 | for accnt in proper_account_numbers: 95 | logger.info("Testing Proper Account Number: {}".format(accnt)) 96 | arn_obj = ARN(accnt) 97 | 98 | self.assertFalse(arn_obj.error) 99 | 100 | # Improper account number tests: 101 | for accnt in improper_account_numbers: 102 | logger.info("Testing IMPROPER Account Number: {}".format(accnt)) 103 | arn_obj = ARN(accnt) 104 | 105 | self.assertTrue(arn_obj.error) 106 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_common.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_common 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: George Psarakis 20 | 21 | """ 22 | import unittest 23 | from collections.abc import Sequence 24 | 25 | from policyuniverse.common import ensure_array, is_array 26 | 27 | 28 | class CustomSequence(Sequence): 29 | def __init__(self, *elements): 30 | self._elements = elements 31 | 32 | def __getitem__(self, item): 33 | return self._elements[item] 34 | 35 | def __len__(self): 36 | return self._elements.__len__() 37 | 38 | def __iter__(self): 39 | return iter(self._elements) 40 | 41 | 42 | class CommonTestCase(unittest.TestCase): 43 | def test_is_array(self): 44 | cases = ( 45 | ([1, 2], True), 46 | ((1, 2), True), 47 | (CustomSequence(1, 2), True), 48 | ("abc", False), 49 | (b"abc", False), 50 | (1, False), 51 | ({"a": 1}, False), 52 | ) 53 | for case_input, expected in cases: 54 | self.assertIs(is_array(case_input), expected) 55 | 56 | def test_ensure_array_sequence_input(self): 57 | for obj in ([1, 2], (3, 4), CustomSequence(5, 6)): 58 | self.assertIs(ensure_array(obj), obj) 59 | 60 | def test_ensure_array_non_sequence_input(self): 61 | for obj in ("abc", b"abc", 1, {"a": 1}): 62 | self.assertListEqual(ensure_array(obj), [obj]) 63 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_expander_minimizer.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_expander_minimizer 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Mike Grima 20 | 21 | """ 22 | import copy 23 | import unittest 24 | 25 | from policyuniverse.expander_minimizer import ( 26 | _expand_wildcard_action, 27 | _get_desired_actions_from_statement, 28 | _get_prefixes_for_action, 29 | all_permissions, 30 | expand_minimize_over_policies, 31 | expand_policy, 32 | get_actions_from_statement, 33 | minimize_policy, 34 | minimize_statement_actions, 35 | ) 36 | 37 | WILDCARD_ACTION_1 = "swf:res*" 38 | 39 | WILDCARD_POLICY_1 = { 40 | "Statement": [{"Action": [WILDCARD_ACTION_1], "Resource": "*", "Effect": "Allow"}] 41 | } 42 | 43 | EXPANDED_ACTIONS_1 = [ 44 | "swf:respondactivitytaskcanceled", 45 | "swf:respondactivitytaskcompleted", 46 | "swf:respondactivitytaskfailed", 47 | "swf:responddecisiontaskcompleted", 48 | ] 49 | 50 | EXPANDED_POLICY_1 = { 51 | "Statement": [{"Action": EXPANDED_ACTIONS_1, "Resource": "*", "Effect": "Allow"}] 52 | } 53 | 54 | WILDCARD_POLICY_2 = { 55 | "Statement": [ 56 | {"Action": ["swf:*activitytaskc*"], "Resource": "*", "Effect": "Allow"} 57 | ] 58 | } 59 | 60 | EXPANDED_POLICY_2 = { 61 | "Statement": [ 62 | { 63 | "Action": [ 64 | "swf:respondactivitytaskcanceled", 65 | "swf:respondactivitytaskcompleted", 66 | ], 67 | "Resource": "*", 68 | "Effect": "Allow", 69 | } 70 | ] 71 | } 72 | 73 | POLICIES_1 = { 74 | "policy": {"policyname1": WILDCARD_POLICY_1, "policyname2": WILDCARD_POLICY_2} 75 | } 76 | 77 | EXPANDED_POLICIES_1 = { 78 | "policy": {"policyname1": EXPANDED_POLICY_1, "policyname2": EXPANDED_POLICY_2} 79 | } 80 | 81 | AUTOSCALING_PERMISSIONS = sorted( 82 | [ 83 | "autoscaling:attachinstances", 84 | "autoscaling:attachloadbalancers", 85 | "autoscaling:attachloadbalancertargetgroups", 86 | "autoscaling:attachtrafficsources", 87 | "autoscaling:batchdeletescheduledaction", 88 | "autoscaling:batchputscheduledupdategroupaction", 89 | "autoscaling:cancelinstancerefresh", 90 | "autoscaling:completelifecycleaction", 91 | "autoscaling:createautoscalinggroup", 92 | "autoscaling:createlaunchconfiguration", 93 | "autoscaling:createorupdatetags", 94 | "autoscaling:deleteautoscalinggroup", 95 | "autoscaling:deletelaunchconfiguration", 96 | "autoscaling:deletelifecyclehook", 97 | "autoscaling:deletenotificationconfiguration", 98 | "autoscaling:deletepolicy", 99 | "autoscaling:deletescheduledaction", 100 | "autoscaling:deletetags", 101 | "autoscaling:deletewarmpool", 102 | "autoscaling:describeaccountlimits", 103 | "autoscaling:describeadjustmenttypes", 104 | "autoscaling:describeautoscalinggroups", 105 | "autoscaling:describeautoscalinginstances", 106 | "autoscaling:describeautoscalingnotificationtypes", 107 | "autoscaling:describeinstancerefreshes", 108 | "autoscaling:describelaunchconfigurations", 109 | "autoscaling:describelifecyclehooks", 110 | "autoscaling:describelifecyclehooktypes", 111 | "autoscaling:describeloadbalancers", 112 | "autoscaling:describeloadbalancertargetgroups", 113 | "autoscaling:describemetriccollectiontypes", 114 | "autoscaling:describenotificationconfigurations", 115 | "autoscaling:describepolicies", 116 | "autoscaling:describescalingactivities", 117 | "autoscaling:describescalingprocesstypes", 118 | "autoscaling:describescheduledactions", 119 | "autoscaling:describetags", 120 | "autoscaling:describeterminationpolicytypes", 121 | "autoscaling:describetrafficsources", 122 | "autoscaling:describewarmpool", 123 | "autoscaling:detachinstances", 124 | "autoscaling:detachloadbalancers", 125 | "autoscaling:detachloadbalancertargetgroups", 126 | "autoscaling:detachtrafficsources", 127 | "autoscaling:disablemetricscollection", 128 | "autoscaling:enablemetricscollection", 129 | "autoscaling:enterstandby", 130 | "autoscaling:executepolicy", 131 | "autoscaling:exitstandby", 132 | "autoscaling:getpredictivescalingforecast", 133 | "autoscaling:putlifecyclehook", 134 | "autoscaling:putnotificationconfiguration", 135 | "autoscaling:putscalingpolicy", 136 | "autoscaling:putscheduledupdategroupaction", 137 | "autoscaling:putwarmpool", 138 | "autoscaling:recordlifecycleactionheartbeat", 139 | "autoscaling:resumeprocesses", 140 | "autoscaling:rollbackinstancerefresh", 141 | "autoscaling:setdesiredcapacity", 142 | "autoscaling:setinstancehealth", 143 | "autoscaling:setinstanceprotection", 144 | "autoscaling:startinstancerefresh", 145 | "autoscaling:suspendprocesses", 146 | "autoscaling:terminateinstanceinautoscalinggroup", 147 | "autoscaling:updateautoscalinggroup", 148 | ] 149 | ) 150 | 151 | 152 | def dc(o): 153 | """ 154 | Some of the testing methods modify the datastructure you pass into them. 155 | We want to deepcopy each structure so one test doesn't break another. 156 | """ 157 | return copy.deepcopy(o) 158 | 159 | 160 | class TestMethods(unittest.TestCase): 161 | def test_expand_1(self): 162 | expanded_policy = expand_policy(policy=dc(WILDCARD_POLICY_1)) 163 | self.assertEqual(expanded_policy, EXPANDED_POLICY_1) 164 | policy = { 165 | "Statement": { 166 | "NotAction": ["ec2:thispermissiondoesntexist"], 167 | "Resource": "*", 168 | "Effect": "Deny", 169 | } 170 | } 171 | expected_policy = { 172 | "Statement": [ 173 | { 174 | "NotAction": ["ec2:thispermissiondoesntexist"], 175 | "Resource": "*", 176 | "Effect": "Deny", 177 | } 178 | ] 179 | } 180 | expanded_policy = expand_policy(policy=dc(policy), expand_deny=False) 181 | self.assertEqual(expanded_policy, expected_policy) 182 | expanded_policy = expand_policy(policy=dc(policy), expand_deny=True) 183 | self.assertEqual(type(expanded_policy["Statement"]), list) 184 | 185 | def test_expand_2(self): 186 | expanded_policy = expand_policy(policy=dc(WILDCARD_POLICY_2)) 187 | self.assertEqual(expanded_policy, EXPANDED_POLICY_2) 188 | 189 | def test_expand_minimize_over_policies(self): 190 | result = expand_minimize_over_policies(dc(POLICIES_1), expand_policy) 191 | self.assertEqual(result, EXPANDED_POLICIES_1) 192 | 193 | def test_expand_minimize_over_policies_1(self): 194 | result = expand_minimize_over_policies( 195 | EXPANDED_POLICY_1, minimize_policy, minchars=3 196 | ) 197 | self.assertEqual(result, WILDCARD_POLICY_1) 198 | 199 | def test_get_prefixes_for_action(self): 200 | result = _get_prefixes_for_action("iam:cat") 201 | self.assertEqual(result, ["iam:", "iam:c", "iam:ca", "iam:cat"]) 202 | 203 | def test_expand_wildcard_action(self): 204 | result = _expand_wildcard_action(["autoscaling:*"]) 205 | self.assertEqual(sorted(result), AUTOSCALING_PERMISSIONS) 206 | 207 | def test_expand_wildcard_action_2(self): 208 | result = _expand_wildcard_action("thistechdoesntexist:*") 209 | self.assertEqual(result, ["thistechdoesntexist:*"]) 210 | 211 | def test_expand_wildcard_action_3(self): 212 | result = _expand_wildcard_action("ec2:DescribeInstances") 213 | self.assertEqual(result, ["ec2:describeinstances"]) 214 | 215 | def test_get_desired_actions_from_statement(self): 216 | result = _get_desired_actions_from_statement( 217 | dc(WILDCARD_POLICY_1["Statement"][0]) 218 | ) 219 | self.assertEqual(result, set(EXPANDED_ACTIONS_1)) 220 | 221 | def test_get_desired_actions_from_statement_1(self): 222 | statement = { 223 | "Action": ["ec2:thispermissiondoesntexist"], 224 | "Resource": "*", 225 | "Effect": "Allow", 226 | } 227 | self.assertRaises(Exception, _get_desired_actions_from_statement, statement) 228 | 229 | def test_get_actions_from_statement(self): 230 | statement = { 231 | "Action": "ec2:thispermissiondoesntexist", 232 | "NotAction": list(all_permissions), 233 | "Resource": "*", 234 | "Effect": "Allow", 235 | } 236 | expected_result = {"ec2:thispermissiondoesntexist"} 237 | result = get_actions_from_statement(statement) 238 | self.assertEqual(result, expected_result) 239 | result = get_actions_from_statement(dict(NotAction="abc")) 240 | self.assertSetEqual(result, set(all_permissions)) 241 | 242 | statement = { 243 | "Action": ( 244 | "ec2:updatesecuritygroupruledescriptionsegress", 245 | "ec2:cancelcapacityreservation", 246 | ), 247 | "NotAction": tuple(), 248 | "Resource": "*", 249 | "Effect": "Allow", 250 | } 251 | result = get_actions_from_statement(statement) 252 | self.assertSetEqual( 253 | result, 254 | { 255 | "ec2:updatesecuritygroupruledescriptionsegress", 256 | "ec2:cancelcapacityreservation", 257 | }, 258 | ) 259 | 260 | def test_minimize_statement_actions(self): 261 | statement = dict(Effect="Deny") 262 | self.assertRaises(Exception, minimize_statement_actions, statement) 263 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_organization.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Amazon.com, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_organization 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Chris Partridge @_tweedge 20 | 21 | """ 22 | import unittest 23 | 24 | from policyuniverse import logger 25 | from policyuniverse.organization import Organization 26 | 27 | 28 | class OrganizationTestCase(unittest.TestCase): 29 | def test_from_org_id(self): 30 | # test valid organization IDs 31 | valid_org_ids = ["o-a1b2c3d4e5", "o-*", "*"] 32 | 33 | for org_id in valid_org_ids: 34 | logger.info("Testing valid organization ID: {}".format(org_id)) 35 | organization_obj = Organization(org_id) 36 | 37 | self.assertFalse(organization_obj.error) 38 | self.assertEqual(org_id, organization_obj.organization) 39 | 40 | # test invalid organization IDs 41 | invalid_org_ids = [ 42 | "o*", 43 | "r-*/ou-a1s2d3f4g5", 44 | "/o-*", 45 | "r-ab12", 46 | "ou-22222222", 47 | ] 48 | 49 | for org_id in invalid_org_ids: 50 | logger.info("Testing invalid organization ID: {}".format(org_id)) 51 | organization_obj = Organization(org_id) 52 | 53 | self.assertTrue(organization_obj.error) 54 | 55 | def test_from_org_path(self): 56 | # test valid organization paths 57 | valid_org_paths = [ 58 | "o-a1b2c3d4e5/*", 59 | "o-a1b2c3d4e5/*/ou-ab12-22222222", 60 | "o-a1b2c3d4e5/r-*/ou-*", 61 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111", 62 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/", 63 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/*", 64 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-*", 65 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-ab12-33333333/ou-*", 66 | "*/*", 67 | "*/*/*", 68 | ] 69 | 70 | for org_path in valid_org_paths: 71 | logger.info("Testing valid organization path: {}".format(org_path)) 72 | organization_obj = Organization(org_path) 73 | 74 | self.assertFalse(organization_obj.error) 75 | 76 | # test invalid organization paths 77 | invalid_org_paths = [ 78 | "dynamodb.amazonaws.com", 79 | "arn:aws:kms:region:111122223333:key/my-example-key", 80 | "111122223333", 81 | "arn:aws:s3:::some-s3-bucket", 82 | "arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup", 83 | "o-a1b2c3d4e5/*/*/*/*", 84 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-*/ou-*", 85 | "o-a1b2c3d4e5/o-a1b2c3d4e5/r-ab12/ou-22222222", 86 | "ou-a1b2c3d4e5/r-ab12/ou-22222222", 87 | "*/*/*/*", 88 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-ab12-33333333/o-*", 89 | "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-ab12-33333333/r-*", 90 | ] 91 | 92 | for org_path in invalid_org_paths: 93 | logger.info("Testing invalid organization path: {}".format(org_path)) 94 | organization_obj = Organization(org_path) 95 | 96 | self.assertTrue(organization_obj.error) 97 | 98 | def test_parent_and_child_validity(self): 99 | # https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_condition-keys.html#condition-keys-principalorgpaths 100 | # Source of test cases and explanation 101 | 102 | org_path = "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/" 103 | logger.info("Testing parent:true/child:false case {}".format(org_path)) 104 | organization_obj = Organization(org_path) 105 | self.assertTrue(organization_obj.valid_for_parent_ou) 106 | self.assertFalse(organization_obj.valid_for_child_ous) 107 | 108 | org_path = "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/*" 109 | logger.info("Testing parent:true/child:true case {}".format(org_path)) 110 | organization_obj = Organization(org_path) 111 | self.assertTrue(organization_obj.valid_for_parent_ou) 112 | self.assertTrue(organization_obj.valid_for_child_ous) 113 | 114 | org_path = "o-a1b2c3d4e5/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-*" 115 | logger.info("Testing parent:false/child:true case {}".format(org_path)) 116 | organization_obj = Organization(org_path) 117 | self.assertFalse(organization_obj.valid_for_parent_ou) 118 | self.assertTrue(organization_obj.valid_for_child_ous) 119 | 120 | # show false/false as there is neither parent nor child 121 | org_path = "o-a1b2c3d4e5/*" 122 | logger.info("Testing parent:false/child:false case {}".format(org_path)) 123 | organization_obj = Organization(org_path) 124 | self.assertFalse(organization_obj.valid_for_parent_ou) 125 | self.assertFalse(organization_obj.valid_for_child_ous) 126 | 127 | def test_root_toggles_child_validity_in_path(self): 128 | # https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_condition-keys.html#condition-keys-principalorgpaths 129 | # If an orgpath is given with a root of *, any OU regardless of parent 130 | # can access the resource, so long as it is in the organization. 131 | # The same can be said of Organizations, from my understanding. 132 | # However, once an OU is given, OU-based restrictions apply. 133 | 134 | org_path = "o-a1b2c3d4e5" 135 | logger.info("Testing path without root: {}".format(org_path)) 136 | organization_obj = Organization(org_path) 137 | self.assertTrue(organization_obj.valid_for_all_ous) 138 | 139 | org_path = "o-a1b2c3d4e5/*" 140 | logger.info("Testing path with root *: {}".format(org_path)) 141 | organization_obj = Organization(org_path) 142 | self.assertTrue(organization_obj.valid_for_all_ous) 143 | 144 | org_path = "o-a1b2c3d4e5/*/ou-ab12-22222222" 145 | logger.info("Testing path with root * and trailing path: {}".format(org_path)) 146 | organization_obj = Organization(org_path) 147 | self.assertFalse(organization_obj.valid_for_all_ous) 148 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_policy.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_policy 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley 20 | 21 | """ 22 | import json 23 | import unittest 24 | 25 | from policyuniverse.policy import Policy 26 | 27 | from .helpers import CustomMapping, CustomSequence 28 | 29 | policy01 = dict( 30 | Version="2012-10-08", 31 | Statement=dict( 32 | Effect="Allow", 33 | Principal="*", 34 | Action=["rds:*"], 35 | Resource="*", 36 | Condition={"IpAddress": {"AWS:SourceIP": ["0.0.0.0/0"]}}, 37 | ), 38 | ) 39 | 40 | policy02 = dict( 41 | Version="2010-08-14", 42 | Statement=[ 43 | dict( 44 | Effect="Allow", 45 | Principal="arn:aws:iam::012345678910:root", 46 | Action=["rds:*"], 47 | Resource="*", 48 | ) 49 | ], 50 | ) 51 | 52 | # One statement limits by ARN, the other allows any account number 53 | policy03 = dict( 54 | Version="2010-08-14", 55 | Statement=[ 56 | dict( 57 | Effect="Allow", 58 | Principal="arn:aws:iam::012345678910:root", 59 | Action=["s3:*"], 60 | Resource="*", 61 | ), 62 | dict( 63 | Effect="Allow", 64 | Principal="arn:aws:iam::*:role/Hello", 65 | Action=["ec2:*"], 66 | Resource="*", 67 | ), 68 | ], 69 | ) 70 | 71 | # Two statements, one limited by account condition 72 | policy04 = dict( 73 | Version="2010-08-14", 74 | Statement=[ 75 | dict( 76 | Effect="Allow", 77 | Principal="arn:aws:iam::012345678910:root", 78 | Action=["s3:*"], 79 | Resource="*", 80 | ), 81 | dict( 82 | Effect="Allow", 83 | Principal="arn:aws:iam::*:role/Hello", 84 | Action=["ec2:*"], 85 | Resource="*", 86 | Condition={"StringLike": {"AWS:SourceOwner": "012345678910"}}, 87 | ), 88 | ], 89 | ) 90 | 91 | # Two statements, both with conditions 92 | policy05 = dict( 93 | Version="2010-08-14", 94 | Statement=[ 95 | dict( 96 | Effect="Allow", 97 | Principal="arn:aws:iam::012345678910:root", 98 | Action=["s3:*"], 99 | Resource="*", 100 | Condition={"IpAddress": {"AWS:SourceIP": ["0.0.0.0/0"]}}, 101 | ), 102 | dict( 103 | Effect="Allow", 104 | Principal="arn:aws:iam::*:role/Hello", 105 | Action=["ec2:*"], 106 | Resource="*", 107 | Condition={"StringLike": {"AWS:SourceOwner": "012345678910"}}, 108 | ), 109 | ], 110 | ) 111 | 112 | # AWS Organizations 113 | policy06 = dict( 114 | Version="2010-08-14", 115 | Statement=[ 116 | dict( 117 | Effect="Allow", 118 | Principal="*", 119 | Action=["rds:*"], 120 | Resource="*", 121 | Condition={"StringEquals": {"AWS:PrincipalOrgID": "o-xxxxxxxxxx"}}, 122 | ) 123 | ], 124 | ) 125 | 126 | # Custom types 127 | policy07 = CustomMapping( 128 | dict( 129 | Statement=CustomSequence( 130 | [ 131 | CustomMapping( 132 | dict( 133 | Action="s3:GetBucketAcl", 134 | Effect="Allow", 135 | Principal=CustomMapping({"AWS": "*"}), 136 | Resource="arn:aws:s3:::example-bucket", 137 | Sid="Public Access", 138 | ) 139 | ) 140 | ] 141 | ), 142 | Version="2012-10-17", 143 | ) 144 | ) 145 | 146 | 147 | class PolicyTestCase(unittest.TestCase): 148 | def test_internet_accessible(self): 149 | self.assertTrue(Policy(policy01).is_internet_accessible()) 150 | self.assertFalse(Policy(policy02).is_internet_accessible()) 151 | self.assertTrue(Policy(policy03).is_internet_accessible()) 152 | 153 | def test_internet_accessible_actions(self): 154 | self.assertEqual(Policy(policy01).internet_accessible_actions(), set(["rds:*"])) 155 | self.assertEqual(Policy(policy03).internet_accessible_actions(), set(["ec2:*"])) 156 | 157 | def test_action_summary(self): 158 | summary = Policy(policy05).action_summary() 159 | self.assertEqual( 160 | summary, 161 | { 162 | "ec2": {"List", "Write", "Read", "Tagging", "Permissions"}, 163 | "s3": {"Write", "Read", "List", "Permissions", "Tagging"}, 164 | }, 165 | ) 166 | 167 | def test_principals(self): 168 | self.assertEqual( 169 | Policy(policy04).principals, 170 | set(["arn:aws:iam::012345678910:root", "arn:aws:iam::*:role/Hello"]), 171 | ) 172 | 173 | def test_condition_entries(self): 174 | from policyuniverse.statement import ConditionTuple 175 | 176 | self.assertEqual( 177 | Policy(policy05).condition_entries, 178 | set( 179 | [ 180 | ConditionTuple(category="cidr", value="0.0.0.0/0"), 181 | ConditionTuple(category="account", value="012345678910"), 182 | ] 183 | ), 184 | ) 185 | 186 | self.assertEqual( 187 | Policy(policy06).condition_entries, 188 | set([ConditionTuple(category="organization", value="o-xxxxxxxxxx")]), 189 | ) 190 | 191 | def test_whos_allowed(self): 192 | allowed = Policy(policy03).whos_allowed() 193 | self.assertEqual(len(allowed), 2) 194 | 195 | allowed = Policy(policy04).whos_allowed() 196 | self.assertEqual(len(allowed), 3) 197 | principal_allowed = set( 198 | [item for item in allowed if item.category == "principal"] 199 | ) 200 | self.assertEqual(len(principal_allowed), 2) 201 | condition_account_allowed = set( 202 | [item for item in allowed if item.category == "account"] 203 | ) 204 | self.assertEqual(len(condition_account_allowed), 1) 205 | 206 | allowed = Policy(policy06).whos_allowed() 207 | self.assertEqual(len(allowed), 2) 208 | 209 | def test_evasion_policies(self): 210 | """Some policies that may have been crafted to evade policycheckers.""" 211 | S3_PUBLIC_BUCKET_POLICY = ( 212 | '{"Version":"2008-10-17","Statement":[' 213 | + "{" 214 | + '"Effect":"Allow","Principal":{"AWS":"*"},' 215 | + '"Action":["s3:GetObject","s3:GetObjectTorrent"],' 216 | + '"Resource":"arn:aws:s3:::%s/*",' 217 | + '"Condition":{"StringNotLike":{"aws:UserAgent":"|_(..)_|"},"NotIpAddress":{"aws:SourceIp":"8.8.8.8"}}' 218 | + "}" 219 | + "]}" 220 | ) 221 | 222 | policy = Policy(json.loads(S3_PUBLIC_BUCKET_POLICY)) 223 | self.assertTrue(policy.is_internet_accessible()) 224 | 225 | S3_REPLICATION_DESTINATION_POLICY = ( 226 | '{"Version":"2008-10-17","Statement":[' 227 | + "{" 228 | + '"Effect":"Allow","Principal":{"AWS":"arn:aws:iam::%s:root"},' 229 | + '"Action":["s3:*"],"Resource":"arn:aws:s3:::%s/*"' 230 | + "}," 231 | + "{" 232 | + '"Effect":"Allow","Principal":{"AWS":"*"},' 233 | + '"Action":["s3:GetObject"],' 234 | + '"Resource":"arn:aws:s3:::%s/*",' 235 | + '"Condition":{"StringNotLike":{"aws:UserAgent": "|_(..)_|"},"NotIpAddress":{"aws:SourceIp":"8.8.8.8"}}' 236 | + "}" 237 | + "]}" 238 | ) 239 | 240 | policy = Policy(json.loads(S3_REPLICATION_DESTINATION_POLICY)) 241 | self.assertTrue(policy.is_internet_accessible()) 242 | 243 | SQS_NOTIFICATION_POLICY = ( 244 | '{"Version":"2008-10-17","Statement":[' 245 | + "{" 246 | + '"Effect":"Allow","Principal":"*",' 247 | + '"Action":["SQS:ReceiveMessage","SQS:DeleteMessage"],' 248 | + '"Resource":"%s",' 249 | + '"Condition":{"StringNotLike":{"aws:UserAgent": "|_(..)_|"},"NotIpAddress":{"aws:SourceIp":"8.8.8.8"}}' 250 | + "}," 251 | + "{" 252 | + '"Effect":"Allow","Principal":{"AWS":"*"},' 253 | + '"Action":["SQS:SendMessage"],' 254 | + '"Resource":"%s",' 255 | + '"Condition":{"ArnLike":{"aws:SourceArn":"arn:aws:s3:*:*:%s"}}' 256 | + "}" 257 | + "]}" 258 | ) 259 | 260 | policy = Policy(json.loads(SQS_NOTIFICATION_POLICY)) 261 | self.assertTrue(policy.is_internet_accessible()) 262 | 263 | def test_non_list_sequence_statement(self): 264 | policy_document = dict( 265 | Version="2012-10-08", 266 | Statement=( 267 | dict( 268 | Effect="Allow", 269 | Principal="*", 270 | Action=["rds:*"], 271 | Resource="*", 272 | Condition={"IpAddress": {"AWS:SourceIP": ["0.0.0.0/0"]}}, 273 | ), 274 | ), 275 | ) 276 | policy = Policy(policy_document) 277 | self.assertTrue(policy.is_internet_accessible()) 278 | self.assertListEqual( 279 | list(s.statement for s in policy.statements), 280 | [policy_document["Statement"][0]], 281 | ) 282 | 283 | def test_mapping_and_sequence_policy_document(self): 284 | policy = Policy(policy07) 285 | self.assertSetEqual(policy.principals, set("*")) 286 | self.assertIs(policy.is_internet_accessible(), True) 287 | -------------------------------------------------------------------------------- /policyuniverse/tests/test_statement.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Netflix, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """ 15 | .. module: policyuniverse.tests.test_statement 16 | :platform: Unix 17 | 18 | .. version:: $$VERSION$$ 19 | .. moduleauthor:: Patrick Kelley @patrickbkelley 20 | 21 | """ 22 | import unittest 23 | 24 | from policyuniverse.statement import Statement 25 | 26 | from .helpers import CustomMapping 27 | 28 | # NotPrincipal 29 | statement01 = dict( 30 | Effect="Allow", 31 | NotPrincipal={"AWS": ["arn:aws:iam::012345678910:root"]}, 32 | Action=["rds:*"], 33 | Resource="*", 34 | ) 35 | 36 | # "Principal": "value" 37 | statement02 = dict( 38 | Effect="Allow", 39 | Principal="arn:aws:iam::012345678910:root", 40 | Action=["rds:*"], 41 | Resource="*", 42 | ) 43 | 44 | # "Principal": { "AWS": "value" } 45 | statement03 = dict( 46 | Effect="Allow", 47 | Principal={"AWS": "arn:aws:iam::012345678910:root"}, 48 | Action=["rds:*"], 49 | Resource="*", 50 | ) 51 | 52 | # "Principal": { "AWS": ["value", "value"] } 53 | statement04 = dict( 54 | Effect="Allow", 55 | Principal={"AWS": ["arn:aws:iam::012345678910:root"]}, 56 | Action=["rds:*"], 57 | Resource="*", 58 | ) 59 | 60 | # "Principal": { "Service": "value", "AWS": "value" } 61 | statement05 = dict( 62 | Effect="Allow", 63 | Principal={ 64 | "Service": "lambda.amazonaws.com", 65 | "AWS": "arn:aws:iam::012345678910:root", 66 | }, 67 | Action=["rds:*"], 68 | Resource="*", 69 | ) 70 | 71 | # "Principal": { "Service": ["value", "value"] } 72 | statement06 = dict( 73 | Effect="Allow", 74 | Principal={"Service": ["lambda.amazonaws.com"]}, 75 | Action=["rds:*"], 76 | Resource="*", 77 | ) 78 | 79 | statement07 = dict( 80 | Effect="Allow", 81 | Principal="*", 82 | Action=["rds:*"], 83 | Resource="*", 84 | Condition={ 85 | "ForAllValues:ARNEqualsIfExists": { 86 | "AWS:SourceArn": ["arn:aws:iam::012345678910:role/SomeTestRoleForTesting"] 87 | } 88 | }, 89 | ) 90 | 91 | statement08 = dict( 92 | Effect="Allow", 93 | Principal="*", 94 | Action=["rds:*"], 95 | Resource="*", 96 | Condition={ 97 | "ForAnyValue:ARNEquals": { 98 | "AWS:SourceArn": [ 99 | "arn:aws:iam::012345678910:role/SomeTestRoleForTesting", 100 | "arn:aws:iam::012345678910:role/OtherRole", 101 | ] 102 | } 103 | }, 104 | ) 105 | 106 | statement09 = dict( 107 | Effect="Allow", 108 | Principal="*", 109 | Action=["rds:*"], 110 | Resource="*", 111 | Condition={"StringLike": {"AWS:SourceOwner": "012345678910"}}, 112 | ) 113 | 114 | statement09_wildcard = dict( 115 | Effect="Allow", 116 | Principal="*", 117 | Action=["rds:*"], 118 | Resource="*", 119 | Condition={"StringLike": {"AWS:SourceOwner": "*"}}, 120 | ) 121 | 122 | statement10 = dict( 123 | Effect="Allow", 124 | Principal="*", 125 | Action=["rds:*"], 126 | Resource="*", 127 | Condition={ 128 | "ForAnyValue:StringEquals": { 129 | "AWS:SourceOwner": ["012345678910"], 130 | "AWS:SourceAccount": ["123456789123"], 131 | } 132 | }, 133 | ) 134 | 135 | statement11 = dict( 136 | Effect="Allow", 137 | Principal="*", 138 | Action=["rds:*"], 139 | Resource="*", 140 | Condition={ 141 | "ForAnyValue:StringEquals": { 142 | "AWS:SourceOwner": ["012345678910", "123456789123"] 143 | } 144 | }, 145 | ) 146 | 147 | statement12 = dict( 148 | Effect="Allow", 149 | Principal="*", 150 | Action=["rds:*"], 151 | Resource="*", 152 | Condition={ 153 | "StringEquals": { 154 | "AWS:SourceVPC": "vpc-111111", 155 | "AWS:Sourcevpce": "vpce-111111", 156 | "AWS:username": "Admin", 157 | "AWS:SourceOwner": "012345678910", 158 | "AWS:SourceAccount": "012345678910", 159 | }, 160 | "StringLike": {"AWS:userid": "AROAI1111111111111111:*"}, 161 | "ARNLike": {"AWS:SourceArn": "arn:aws:iam::012345678910:role/Admin"}, 162 | "IpAddressIfExists": { 163 | "AWS:SourceIP": ["123.45.67.89", "10.0.7.0/24", "172.16.0.0/16"] 164 | }, 165 | }, 166 | ) 167 | 168 | statement13 = dict( 169 | Effect="Allow", 170 | Principal="*", 171 | Action=["rds:*"], 172 | Resource="*", 173 | Condition={ 174 | "StringNotLike": {"AWS:userid": "AROAI1111111111111111:*"}, 175 | "ARNLike": {"AWS:SourceArn": "arn:aws:iam::012345678910:role/Admin"}, 176 | }, 177 | ) 178 | 179 | statement14 = dict(Effect="Allow", Principal="*", Action=["rds:*"], Resource="*") 180 | 181 | statement15 = dict( 182 | Effect="Allow", 183 | Principal="*", 184 | Action=["rds:*"], 185 | Resource="*", 186 | Condition={"StringNotLike": {"AWS:userid": "AROAI1111111111111111:*"}}, 187 | ) 188 | 189 | statement16 = dict(Effect="Deny", Principal="*", Action=["rds:*"], Resource="*") 190 | 191 | # Bad ARN 192 | statement17 = dict( 193 | Effect="Allow", 194 | Principal="arn:aws:iam::012345678910", 195 | Action=["rds:*"], 196 | Resource="*", 197 | ) 198 | 199 | # ARN Like with wildcard account number 200 | statement18 = dict( 201 | Effect="Allow", 202 | Principal="*", 203 | Action=["rds:*"], 204 | Resource="*", 205 | Condition={"ARNLike": {"AWS:SourceArn": "arn:aws:iam::*:role/Admin"}}, 206 | ) 207 | 208 | # StringLike with wildcard 209 | statement19 = dict( 210 | Effect="Allow", 211 | Principal="*", 212 | Action=["rds:*"], 213 | Resource="*", 214 | Condition={"StringLike": {"AWS:SourceArn": "arn:aws:iam::*"}}, 215 | ) 216 | 217 | # Open CIDR 218 | statement20 = dict( 219 | Effect="Allow", 220 | Principal="*", 221 | Action=["rds:*"], 222 | Resource="*", 223 | Condition={"IpAddress": {"AWS:SourceIP": ["0.0.0.0/0"]}}, 224 | ) 225 | 226 | # S3 ARN 227 | statement21 = dict( 228 | Effect="Allow", 229 | Principal="*", 230 | Action=["rds:*"], 231 | Resource="*", 232 | Condition={"StringEquals": {"AWS:SourceArn": "arn:aws:s3:::mybucket"}}, 233 | ) 234 | 235 | # ARN without account number 236 | statement22 = dict( 237 | Effect="Allow", 238 | Principal="*", 239 | Action=["rds:*"], 240 | Resource="*", 241 | Condition={"StringEquals": {"AWS:SourceArn": "arn:aws:iam:::user/MyUser"}}, 242 | ) 243 | 244 | # KMS decided to use their own Condition Keys: 245 | statement23 = dict( 246 | Effect="Allow", 247 | Principal="*", 248 | Action=["kms:*"], 249 | Resource="*", 250 | Condition={ 251 | "StringEquals": { 252 | "kms:ViaService": "lightsail.us-east-1.amazonaws.com", 253 | "kms:CallerAccount": "222222222222", 254 | } 255 | }, 256 | ) 257 | 258 | # Testing action groups 259 | statement24 = dict( 260 | Effect="Allow", 261 | Principal="*", 262 | Action=["ec2:authorizesecuritygroupingress", "ec2:AuthorizeSecuritygroupEgress"], 263 | Resource="*", 264 | ) 265 | 266 | # Testing action groups 267 | statement25 = dict( 268 | Effect="Allow", 269 | Principal="*", 270 | Action=[ 271 | "ec2:authorizesecuritygroupingress", 272 | "ec2:AuthorizeSecuritygroupEgress", 273 | "iam:putrolepolicy", 274 | ], 275 | Resource="*", 276 | ) 277 | 278 | # Testing action groups 279 | statement26 = dict( 280 | Effect="Allow", 281 | Principal="*", 282 | Action=["iam:putrolepolicy", "iam:listroles"], 283 | Resource="*", 284 | ) 285 | 286 | # Testing ForAnyValue/ForAllValues without list 287 | # Like statement 07, this should work, even though it's using a set operator 288 | statement27 = dict( 289 | Effect="Allow", 290 | Principal="*", 291 | Action=["rds:*"], 292 | Resource="*", 293 | Condition={ 294 | "ForAllValues:ARNEqualsIfExists": { 295 | "AWS:SourceArn": "arn:aws:iam::012345678910:role/SomeTestRoleForTesting" 296 | } 297 | }, 298 | ) 299 | 300 | # Testing ForAnyValue/ForAllValues without list 301 | # Like statement 10, this should work, even though it's using a set operator 302 | statement28 = dict( 303 | Effect="Allow", 304 | Principal="*", 305 | Action=["rds:*"], 306 | Resource="*", 307 | Condition={ 308 | "ForAnyValue:StringEquals": { 309 | "AWS:SourceOwner": "012345678910", 310 | "AWS:SourceAccount": "123456789123", 311 | } 312 | }, 313 | ) 314 | 315 | # aws:PrincipalOrgID 316 | statement29 = dict( 317 | Effect="Allow", 318 | Principal="*", 319 | Action=["rds:*"], 320 | Resource="*", 321 | Condition={"StringEquals": {"AWS:PrincipalOrgID": "o-xxxxxxxxxx"}}, 322 | ) 323 | 324 | # aws:PrincipalOrgID Wildcard 325 | statement30 = dict( 326 | Effect="Allow", 327 | Principal="*", 328 | Action=["rds:*"], 329 | Resource="*", 330 | Condition={"StringLike": {"AWS:PrincipalOrgID": "o-*"}}, 331 | ) 332 | 333 | # Custom Mapping / Sequence types 334 | statement31 = CustomMapping( 335 | dict( 336 | Action="s3:GetBucketAcl", 337 | Effect="Allow", 338 | Principal=CustomMapping({"AWS": "*"}), 339 | Resource="arn:aws:s3:::example-bucket", 340 | Sid="Public Access", 341 | ) 342 | ) 343 | 344 | # aws:PrincipalARN in conditions 345 | statement32 = dict( 346 | Effect="Allow", 347 | Principal="*", 348 | Action=["s3:*"], 349 | Resource="*", 350 | Condition={ 351 | "ArnEquals": { 352 | "AWS:PrincipalARN": "arn:aws:iam::012345678910:role/SomePrincipalRole" 353 | } 354 | }, 355 | ) 356 | 357 | # aws:PrincipalAccount in conditions 358 | statement33 = dict( 359 | Effect="Allow", 360 | Principal="*", 361 | Action=["rds:*"], 362 | Resource="*", 363 | Condition={ 364 | "ForAnyValue:StringEquals": { 365 | "AWS:PrincipalAccount": ["012345678910", "123456789123"] 366 | } 367 | }, 368 | ) 369 | 370 | # aws:PrincipalOrgPath in conditions 371 | statement34 = dict( 372 | Effect="Allow", 373 | Principal="*", 374 | Action=["rds:*"], 375 | Resource="*", 376 | Condition={ 377 | "ForAnyValue:StringEquals": { 378 | "aws:PrincipalOrgPaths": ["o-a1b2c3d4e5/r-ab12/ou-ab12-11111111"] 379 | } 380 | }, 381 | ) 382 | 383 | # aws:PrincipalOrgPath in conditions with wildcard Organization ID 384 | # this is vulnerable because the Root and OU IDs are not globally unique 385 | statement35 = dict( 386 | Effect="Allow", 387 | Principal="*", 388 | Action=["rds:*"], 389 | Resource="*", 390 | Condition={ 391 | "ForAnyValue:StringEquals": { 392 | "aws:PrincipalOrgPaths": [ 393 | "o-*/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-*" 394 | ] 395 | } 396 | }, 397 | ) 398 | 399 | # AWS:userid with no * 400 | statement36 = dict( 401 | Effect="Allow", 402 | Principal="*", 403 | Action=["rds:*"], 404 | Resource="*", 405 | Condition={"StringLike": {"AWS:userid": "AROAI1111111111111111:"}}, 406 | ) 407 | 408 | 409 | class StatementTestCase(unittest.TestCase): 410 | def test_statement_effect(self): 411 | statement = Statement(statement01) 412 | self.assertEqual(statement.effect, "Allow") 413 | 414 | def test_statement_not_principal(self): 415 | statement = Statement(statement01) 416 | self.assertTrue(statement.uses_not_principal()) 417 | 418 | def test_statement_summary(self): 419 | statement = Statement(statement24) 420 | self.assertEqual(statement.action_summary(), {"ec2": {"Write"}}) 421 | 422 | statement = Statement(statement25) 423 | self.assertEqual( 424 | statement.action_summary(), {"ec2": {"Write"}, "iam": {"Permissions"}} 425 | ) 426 | 427 | statement = Statement(statement26) 428 | self.assertEqual(statement.action_summary(), {"iam": {"Permissions", "List"}}) 429 | 430 | def test_statement_principals(self): 431 | statement = Statement(statement02) 432 | self.assertEqual(statement.principals, set(["arn:aws:iam::012345678910:root"])) 433 | 434 | statement = Statement(statement03) 435 | self.assertEqual(statement.principals, set(["arn:aws:iam::012345678910:root"])) 436 | 437 | statement = Statement(statement04) 438 | self.assertEqual(statement.principals, set(["arn:aws:iam::012345678910:root"])) 439 | 440 | statement = Statement(statement05) 441 | self.assertEqual( 442 | statement.principals, 443 | set(["arn:aws:iam::012345678910:root", "lambda.amazonaws.com"]), 444 | ) 445 | 446 | statement = Statement(statement06) 447 | self.assertEqual(statement.principals, set(["lambda.amazonaws.com"])) 448 | 449 | statement_wo_principal = dict(statement06) 450 | del statement_wo_principal["Principal"] 451 | statement = Statement(statement_wo_principal) 452 | self.assertEqual(statement.principals, set([])) 453 | 454 | # Custom types 455 | statement = Statement(statement31) 456 | self.assertSetEqual(statement.principals, set(["*"])) 457 | 458 | def test_statement_conditions(self): 459 | statement = Statement(statement07) 460 | self.assertEqual( 461 | statement.condition_arns, 462 | set(["arn:aws:iam::012345678910:role/SomeTestRoleForTesting"]), 463 | ) 464 | 465 | statement = Statement(statement27) 466 | self.assertEqual( 467 | statement.condition_arns, 468 | set(["arn:aws:iam::012345678910:role/SomeTestRoleForTesting"]), 469 | ) 470 | 471 | statement = Statement(statement08) 472 | self.assertEqual( 473 | statement.condition_arns, 474 | set( 475 | [ 476 | "arn:aws:iam::012345678910:role/SomeTestRoleForTesting", 477 | "arn:aws:iam::012345678910:role/OtherRole", 478 | ] 479 | ), 480 | ) 481 | 482 | statement = Statement(statement10) 483 | self.assertEqual( 484 | statement.condition_accounts, set(["012345678910", "123456789123"]) 485 | ) 486 | 487 | statement = Statement(statement28) 488 | self.assertEqual( 489 | statement.condition_accounts, set(["012345678910", "123456789123"]) 490 | ) 491 | 492 | statement = Statement(statement11) 493 | self.assertEqual( 494 | statement.condition_accounts, set(["012345678910", "123456789123"]) 495 | ) 496 | 497 | statement = Statement(statement12) 498 | self.assertEqual( 499 | statement.condition_arns, set(["arn:aws:iam::012345678910:role/Admin"]) 500 | ) 501 | self.assertEqual(statement.condition_accounts, set(["012345678910"])) 502 | self.assertEqual(statement.condition_userids, set(["AROAI1111111111111111:*"])) 503 | self.assertEqual( 504 | statement.condition_cidrs, 505 | set(["123.45.67.89", "10.0.7.0/24", "172.16.0.0/16"]), 506 | ) 507 | self.assertEqual(statement.condition_vpcs, set(["vpc-111111"])) 508 | self.assertEqual(statement.condition_vpces, set(["vpce-111111"])) 509 | 510 | statement = Statement(statement13) 511 | self.assertEqual( 512 | statement.condition_arns, set(["arn:aws:iam::012345678910:role/Admin"]) 513 | ) 514 | self.assertEqual(len(statement.condition_userids), 0) 515 | 516 | statement = Statement(statement23) 517 | self.assertEqual(statement.condition_accounts, set(["222222222222"])) 518 | 519 | statement = Statement(statement29) 520 | self.assertEqual(statement.condition_orgids, set(["o-xxxxxxxxxx"])) 521 | self.assertEqual(statement.condition_orgpaths, set(["o-xxxxxxxxxx"])) 522 | 523 | statement = Statement(statement30) 524 | self.assertEqual(statement.condition_orgids, set(["o-*"])) 525 | self.assertEqual(statement.condition_orgpaths, set(["o-*"])) 526 | 527 | statement = Statement(statement32) 528 | self.assertEqual( 529 | statement.condition_arns, 530 | set(["arn:aws:iam::012345678910:role/SomePrincipalRole"]), 531 | ) 532 | 533 | statement = Statement(statement33) 534 | self.assertEqual( 535 | statement.condition_accounts, set(["012345678910", "123456789123"]) 536 | ) 537 | 538 | statement = Statement(statement34) 539 | self.assertEqual(statement.condition_orgids, set(["o-a1b2c3d4e5"])) 540 | self.assertEqual( 541 | statement.condition_orgpaths, set(["o-a1b2c3d4e5/r-ab12/ou-ab12-11111111"]) 542 | ) 543 | 544 | statement = Statement(statement35) 545 | self.assertEqual(statement.condition_orgids, set(["o-*"])) 546 | self.assertEqual( 547 | statement.condition_orgpaths, 548 | set(["o-*/r-ab12/ou-ab12-11111111/ou-ab12-22222222/ou-*"]), 549 | ) 550 | 551 | def test_statement_internet_accessible(self): 552 | self.assertTrue(Statement(statement14).is_internet_accessible()) 553 | self.assertTrue(Statement(statement15).is_internet_accessible()) 554 | self.assertTrue(Statement(statement01).is_internet_accessible()) 555 | 556 | self.assertFalse(Statement(statement02).is_internet_accessible()) 557 | self.assertFalse(Statement(statement03).is_internet_accessible()) 558 | self.assertFalse(Statement(statement04).is_internet_accessible()) 559 | self.assertFalse(Statement(statement05).is_internet_accessible()) 560 | self.assertFalse(Statement(statement06).is_internet_accessible()) 561 | self.assertFalse(Statement(statement07).is_internet_accessible()) 562 | self.assertFalse(Statement(statement08).is_internet_accessible()) 563 | self.assertFalse(Statement(statement09).is_internet_accessible()) 564 | self.assertTrue(Statement(statement09_wildcard).is_internet_accessible()) 565 | self.assertFalse(Statement(statement10).is_internet_accessible()) 566 | self.assertFalse(Statement(statement11).is_internet_accessible()) 567 | self.assertFalse(Statement(statement12).is_internet_accessible()) 568 | self.assertFalse(Statement(statement13).is_internet_accessible()) 569 | self.assertTrue(Statement(statement14).is_internet_accessible()) 570 | self.assertTrue(Statement(statement15).is_internet_accessible()) 571 | 572 | self.assertFalse(Statement(statement16).is_internet_accessible()) 573 | self.assertFalse(Statement(statement17).is_internet_accessible()) 574 | 575 | self.assertTrue(Statement(statement18).is_internet_accessible()) 576 | self.assertTrue(Statement(statement19).is_internet_accessible()) 577 | self.assertTrue(Statement(statement20).is_internet_accessible()) 578 | 579 | # Statements with ARNS lacking account numbers 580 | # 21 is an S3 ARN 581 | self.assertFalse(Statement(statement21).is_internet_accessible()) 582 | # 22 is a likely malformed user ARN, but lacking an account number 583 | self.assertTrue(Statement(statement22).is_internet_accessible()) 584 | 585 | # 27 is like 07, but does not provide a list for ForAny/ForAll 586 | self.assertFalse(Statement(statement27).is_internet_accessible()) 587 | 588 | # 28 is like 10, but does not provide a list for ForAny/ForAll 589 | self.assertFalse(Statement(statement28).is_internet_accessible()) 590 | 591 | # AWS:PrincipalOrgID 592 | self.assertFalse(Statement(statement29).is_internet_accessible()) 593 | 594 | # AWS:PrincipalOrgID Wildcard 595 | self.assertTrue(Statement(statement30).is_internet_accessible()) 596 | 597 | # AWS:PrincipalARN 598 | self.assertFalse(Statement(statement32).is_internet_accessible()) 599 | 600 | # AWS:PrincipalAccount 601 | self.assertFalse(Statement(statement33).is_internet_accessible()) 602 | 603 | # AWS:PrincipalOrgPath 604 | self.assertFalse(Statement(statement34).is_internet_accessible()) 605 | 606 | # AWS:PrincipalOrgPath Wildcard 607 | self.assertTrue(Statement(statement35).is_internet_accessible()) 608 | 609 | # AWS:userid with no * 610 | self.assertTrue(Statement(statement36).is_internet_accessible()) 611 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | 4 | [wheel] 5 | universal = 1 6 | 7 | [egg_info] 8 | tag_build = 9 | tag_date = 0 10 | tag_svn_revision = 0 11 | 12 | [flake8] 13 | max-line-length = 127 14 | statistics = True 15 | max-complexity = 10 16 | ; policyuniverse/__init__.py has strangely-ordered imports to avoid circular dependencies 17 | extend-exclude = policyuniverse/__init__.py 18 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | 5 | from setuptools import setup 6 | 7 | ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__))) 8 | 9 | tests_require = ["pytest", "coveralls", "bandit"] 10 | dev_require = ["pre-commit", "black"] 11 | 12 | setup( 13 | name="policyuniverse", 14 | version="1.5.1.20231109", 15 | description="Parse and Process AWS IAM Policies, Statements, ARNs, and wildcards.", 16 | long_description=open(os.path.join(ROOT, "README.md")).read(), 17 | long_description_content_type="text/markdown", 18 | author="Patrick Kelley", 19 | author_email="patrickbarrettkelley@gmail.com", 20 | url="https://github.com/Netflix-Skunkworks/policyuniverse", 21 | keywords=[ 22 | "iam", 23 | "arn", 24 | "action_groups", 25 | "condition", 26 | "policy", 27 | "statement", 28 | "wildcard", 29 | ], 30 | packages=["policyuniverse"], 31 | package_data={"policyuniverse": ["data.json"]}, 32 | python_requires=">=3.7", 33 | include_package_data=True, 34 | zip_safe=False, 35 | classifiers=["License :: OSI Approved :: Apache Software License"], 36 | extras_require={"tests": tests_require, "dev": dev_require}, 37 | ) 38 | -------------------------------------------------------------------------------- /updater/awsconsole.js: -------------------------------------------------------------------------------- 1 | var system = require('system'); 2 | var fs = require('fs'); 3 | var webPage = require('webpage'); 4 | 5 | if (system.args.length != 3) { 6 | console.log('Usage: awsconsole.js '); 7 | phantom.exit(-1); 8 | } 9 | 10 | var iam_url = 'https://us-east-1.console.aws.amazon.com/iam/home#/roles/policyuniverse_updater_role$createPolicy?step=edit'; 11 | var federation_base_url = 'https://signin.aws.amazon.com/federation'; 12 | 13 | var signinToken = system.args[1]; 14 | var OUTPUT_FILE = system.args[2]; 15 | 16 | 17 | var page = webPage.create(); 18 | page.settings.userAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36'; 19 | page.settings.javascriptEnabled = true; 20 | page.settings.loadImages = false; //Script is much faster with this field set to false 21 | phantom.cookiesEnabled = true; 22 | phantom.javascriptEnabled = true; 23 | 24 | page.onConsoleMessage = function(msg) { 25 | console.log('>>> ' + msg); 26 | }; 27 | 28 | page.onCallback = function(results) { 29 | console.log("WRITING RESULTS"); 30 | var json_results = JSON.stringify(results, null, 2); 31 | console.log("WRITING RESULTS"); 32 | fs.write(OUTPUT_FILE, json_results, 'w'); 33 | console.log("EXITING!"); 34 | phantom.exit(0); 35 | }; 36 | 37 | page.onResourceReceived = function(resource) { 38 | if(resource.url.indexOf("signin.aws.amazon.com") > -1) 39 | { 40 | statusCode = resource.status; 41 | } 42 | }; 43 | 44 | var getSessionCookies = function(token) { 45 | var url = federation_base_url + '?Action=login' 46 | + '&Issuer=tripleA' 47 | + '&Destination=' + encodeURIComponent(iam_url) 48 | + '&SigninToken='+token; 49 | 50 | statusCode = 400; // default fail 51 | 52 | var onComplete = function(response) { 53 | if(statusCode < 400) { 54 | console.log('Successfully logged in') 55 | page.includeJs( 56 | "https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js", 57 | function() { 58 | console.log("Successfully loaded jQuery"); 59 | page.evaluate(advisor); 60 | console.log("Advisor started"); 61 | } 62 | ); 63 | console.log("Waiting for advisor to finish"); 64 | } else { 65 | console.log('Failed to log in'); 66 | console.log("Status Code: "+statusCode); 67 | console.log('Account '+response+'.'); 68 | phantom.exit(-1); 69 | } 70 | }; 71 | page.open(url, function(response) { setTimeout(onComplete, 20000, response) }); 72 | }; 73 | 74 | getSessionCookies(signinToken); 75 | 76 | var advisor = function() { 77 | console.log("Starting advisor"); 78 | var PERIOD = 5000; // 10 seconds 79 | var results = {}; 80 | var progress = {}; 81 | 82 | // XSRF_TOKEN = window.Csrf.fromCookie(null); 83 | XSRF_TOKEN = app.orcaCsrf.token; 84 | 85 | var collectServices = function() { 86 | console.log("Asking for services."); 87 | jQuery.ajax({ 88 | type: "GET", 89 | url: "/iam/api/services", 90 | dataType: 'json', 91 | beforeSend: function(xhr) {if (XSRF_TOKEN != 'NOT_DEFINED') {xhr.setRequestHeader('X-CSRF-Token', XSRF_TOKEN);} else {system.stderr.writeLine('NOT ADDING XSRF TOKEN');}}, 92 | success: function (data) { 93 | console.log("Done Collecting Services!"); 94 | results['services'] = data; 95 | 96 | Object.keys(results['services']['_embedded']).forEach( 97 | function(service_url) { 98 | var service_data = results['services']['_embedded'][service_url]; 99 | var actions_url = service_data['_links']['actions']['href']; 100 | var service_name = service_data['serviceName']; 101 | progress[actions_url] = "NOT_STARTED"; 102 | results['actions'] = {}; 103 | collectServiceActions(actions_url, service_name); 104 | } 105 | ); 106 | 107 | checkProgress(); 108 | }, 109 | error: function(asdf) { 110 | console.log("ERROR"); 111 | phantom.exit(-1); 112 | } 113 | }); 114 | }; 115 | 116 | var collectServiceActions = function(actions_url, service_name) { 117 | console.log("Asking for actions."); 118 | jQuery.ajax({ 119 | type: "GET", 120 | url: actions_url, 121 | dataType: 'json', 122 | beforeSend: function(xhr) {if (XSRF_TOKEN != 'NOT_DEFINED') {xhr.setRequestHeader('X-CSRF-Token', XSRF_TOKEN);} else {system.stderr.writeLine('NOT ADDING XSRF TOKEN');}}, 123 | success: function (data) { 124 | if (typeof results['actions'][service_name] != "undefined") { // Merge if a new version is added by AWS with same service name prefix 125 | results['actions'][service_name]['_links']['results'].push(data['_links']['results']); 126 | merge( results['actions'][service_name]['_embedded'], data['_embedded']); 127 | } else { 128 | results['actions'][service_name] = data; 129 | } 130 | progress[actions_url] = 'COMPLETE'; 131 | }, 132 | error: function(asdf) { 133 | console.log("ERROR - "+actions_url); 134 | progress[actions_url] = 'ERROR'; 135 | 136 | } 137 | }); 138 | }; 139 | 140 | var merge = function(objOne, objTwo) { 141 | Object.keys(objTwo).forEach(function(key) { objOne[key] = objTwo[key]; }); 142 | return objOne; 143 | } 144 | 145 | var checkProgress = function() { 146 | for (var idx in Object.keys(progress)) { 147 | var key = Object.keys(progress)[idx]; 148 | if (progress[key] != 'COMPLETE' && progress[key] != 'ERROR' ) { 149 | console.log("Object "+key+" is not yet complete. "+progress[key]); 150 | setTimeout(function() { checkProgress() }, PERIOD); 151 | return; 152 | } else { 153 | console.log("DONE w/"+key) 154 | } 155 | } 156 | console.log('PROGRESS COMPLETE'); 157 | window.callPhantom(results); 158 | }; 159 | 160 | collectServices(); 161 | }; 162 | 163 | 164 | -------------------------------------------------------------------------------- /updater/requirements.txt: -------------------------------------------------------------------------------- 1 | cloudaux 2 | requests -------------------------------------------------------------------------------- /updater/service.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | 4 | class Service: 5 | """Stores data on an AWS service 6 | 7 | Args: 8 | url (str): The URL where this service is described. 9 | body (dict): Contains data about service and permissions. 10 | """ 11 | 12 | def __init__(self, url, body): 13 | self.display_name = self._read_display_name(body) 14 | self.service_name = self._read_service_name(body) 15 | self.description = self._read_description(body) 16 | self.arn_format = self._read_arn_format(body) 17 | self.arn_regex = self._read_arn_regex(body) 18 | 19 | self.actions_url = self._read_actions_url(body) 20 | self.service_url = url 21 | 22 | self.actions_doc_root = self._read_actions_doc_root(body) 23 | self.authz_doc_page = self._read_authz_doc_page(body) 24 | self.concepts_doc_root = self._read_concepts_doc_root(body) 25 | self.context_keys_doc_root = self._read_context_keys_doc_root(body) 26 | self.api_detail_root = self._read_api_detail_root(body) 27 | self.api_doc_root = self._read_api_doc_root(body) 28 | self.api_reference_doc_page = self._read_api_reference_doc_page(body) 29 | 30 | self.actions = defaultdict() 31 | 32 | def toJSON(self): 33 | actions_dict = dict() 34 | for action_name, action in self.actions.items(): 35 | actions_dict[action_name] = action.toJSON() 36 | 37 | me = dict( 38 | prefix=self.service_name, 39 | description=self.description, 40 | arn_format=self.arn_format, 41 | arn_regex=self.arn_regex, 42 | docs=dict( 43 | actions_doc_root=self.actions_doc_root, 44 | authz_doc_page=self.authz_doc_page, 45 | concepts_doc_root=self.concepts_doc_root, 46 | context_keys_doc_root=self.context_keys_doc_root, 47 | api_detail_root=self.api_detail_root, 48 | api_doc_root=self.api_doc_root, 49 | api_reference_doc_page=self.api_reference_doc_page, 50 | ), 51 | actions=actions_dict, 52 | ) 53 | 54 | return me 55 | 56 | def _read_display_name(self, body): 57 | return body["serviceDisplayName"] 58 | 59 | def _read_service_name(self, body): 60 | return body["serviceName"] 61 | 62 | def _read_description(self, body): 63 | return body["description"] 64 | 65 | def _read_arn_format(self, body): 66 | return body["arnFormat"] 67 | 68 | def _read_arn_regex(self, body): 69 | return body["arnRegex"] 70 | 71 | def _read_actions_url(self, body): 72 | return body["_links"]["actions"]["href"] 73 | 74 | def _read_actions_doc_root(self, body): 75 | return body["actionsDocRoot"] 76 | 77 | def _read_authz_doc_page(self, body): 78 | return body["authZDocPage"] 79 | 80 | def _read_concepts_doc_root(self, body): 81 | return body["conceptsDocRoot"] 82 | 83 | def _read_context_keys_doc_root(self, body): 84 | return body["contextKeysDocRoot"] 85 | 86 | def _read_api_detail_root(self, body): 87 | return body["apiDetailRoot"] 88 | 89 | def _read_api_doc_root(self, body): 90 | return body["apiDocRoot"] 91 | 92 | def _read_api_reference_doc_page(self, body): 93 | return body["apiReferenceDocPage"] 94 | -------------------------------------------------------------------------------- /updater/service_action.py: -------------------------------------------------------------------------------- 1 | class ServiceActionConditionKey: 2 | """Stores a condition key that is associated with a ServiceAction.""" 3 | 4 | def __init__(self, body): 5 | self.doc_page_rel = body["docPageRel"] 6 | self.name = body["name"] 7 | self.value_type = body["type"] 8 | self.description = body["description"] 9 | 10 | 11 | class ServiceAction: 12 | """Stores data on an AWS service permission 13 | 14 | Args: 15 | service (str): A python object representing an AWS service 16 | body (dict): Contains data about one permission. 17 | """ 18 | 19 | def __init__(self, service, body): 20 | self.service = service 21 | self.description = self._get_description(body) 22 | self.action_groups = self._get_action_groups(body) 23 | self.api_doc = self._get_api_doc(body) 24 | self.doc_page_rel = self._get_doc_page_rel(body) 25 | self.doc_page = self._get_doc_page(body) 26 | self.action_name = self._get_action_name(body) 27 | self._condition_keys = self._get_condition_keys(body) 28 | 29 | @property 30 | def condition_keys(self): 31 | """Simplify access to condition keys.""" 32 | return sorted([k.name for k in self._condition_keys]) 33 | 34 | def calculate_action_groups(self): 35 | """Convert AWS Action groups into something that makes more sense.""" 36 | if "Permissions" in self.action_groups: 37 | return "Permissions" 38 | if "ListOnly" in self.action_groups: 39 | return "List" 40 | if "ReadOnly" in self.action_groups: 41 | return "Read" 42 | if "Tagging" in self.action_groups: 43 | return "Tagging" 44 | if "ReadWrite" in self.action_groups: 45 | return "Write" 46 | return "Unknown" 47 | 48 | def toJSON(self): 49 | """Actually returns a dict.""" 50 | return dict( 51 | description=self.description, 52 | aws_action_groups=self.action_groups, 53 | calculated_action_group=self.calculate_action_groups(), 54 | docs=dict( 55 | api_doc=self.api_doc, 56 | doc_page_rel=self.doc_page_rel, 57 | doc_page=self.doc_page, 58 | ), 59 | condition_keys=self.condition_keys, 60 | ) 61 | 62 | def _get_description(self, body): 63 | return body["description"] 64 | 65 | def _get_action_groups(self, body): 66 | return body["actionGroups"] 67 | 68 | def _get_api_doc(self, body): 69 | return body["apiDoc"] 70 | 71 | def _get_doc_page_rel(self, body): 72 | return body["docPageRel"] 73 | 74 | def _get_doc_page(self, body): 75 | return body["docPage"] 76 | 77 | def _get_action_name(self, body): 78 | return body["id"] 79 | 80 | def _get_condition_keys(self, body): 81 | keys = list() 82 | for key_body in body["contextKeys"]: 83 | key = ServiceActionConditionKey(key_body) 84 | keys.append(key) 85 | return keys 86 | -------------------------------------------------------------------------------- /updater/test_service.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import unittest 4 | 5 | from service import Service 6 | 7 | logging.basicConfig( 8 | level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s" 9 | ) 10 | 11 | 12 | class ServiceTest(unittest.TestCase): 13 | def setUp(self): 14 | self.url = "" 15 | self.aws_response = { 16 | "serviceDisplayName": "Simple Queue Service", 17 | "serviceName": "sqs", 18 | "description": "For Queues and Stuffs", 19 | "arnFormat": "arn:blah:blah:blah", 20 | "arnRegex": "arn:.+:.+:.+", 21 | "_links": {"actions": {"href": "/actions"}}, 22 | "actionsDocRoot": "", 23 | "authZDocPage": "", 24 | "conceptsDocRoot": "", 25 | "contextKeysDocRoot": "", 26 | "apiDetailRoot": "", 27 | "apiDocRoot": "", 28 | "apiReferenceDocPage": "", 29 | } 30 | 31 | def test(self): 32 | expected = dict( 33 | prefix="sqs", 34 | description="For Queues and Stuffs", 35 | arn_format="arn:blah:blah:blah", 36 | arn_regex="arn:.+:.+:.+", 37 | docs=dict( 38 | actions_doc_root="", 39 | authz_doc_page="", 40 | concepts_doc_root="", 41 | context_keys_doc_root="", 42 | api_detail_root="", 43 | api_doc_root="", 44 | api_reference_doc_page="", 45 | ), 46 | actions=dict(), 47 | ) 48 | expected = json.dumps(expected, sort_keys=True, indent=2) 49 | 50 | my_service = Service(self.url, self.aws_response) 51 | response = json.dumps(my_service.toJSON(), sort_keys=True, indent=2) 52 | assert expected == response 53 | -------------------------------------------------------------------------------- /updater/test_service_action.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | 4 | from service import Service 5 | from service_action import ServiceAction 6 | from test_service import ServiceTest 7 | 8 | logging.basicConfig( 9 | level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s" 10 | ) 11 | 12 | 13 | class ServiceActionTest(ServiceTest): 14 | def setUp(self): 15 | super(ServiceActionTest, self).setUp() 16 | self.body = dict( 17 | description="Remove all entries from Queue", 18 | actionGroups=["ReadWrite"], 19 | apiDoc="", 20 | docPageRel="", 21 | docPage="", 22 | id="PurgeQueue", 23 | contextKeys=list( 24 | [ 25 | { 26 | "description": "Filters actions based on the Amazon Id in the request", 27 | "docPage": "", 28 | "docPageRel": "https://docs.aws.amazon.com/a4b/latest/APIReference/API_RegisterAVSDevice.html", 29 | "name": "a4b:amazonId", 30 | "type": "String", 31 | } 32 | ] 33 | ), 34 | ) 35 | 36 | def test(self): 37 | expected = dict( 38 | prefix="sqs", 39 | description="For Queues and Stuffs", 40 | arn_format="arn:blah:blah:blah", 41 | arn_regex="arn:.+:.+:.+", 42 | docs=dict( 43 | actions_doc_root="", 44 | authz_doc_page="", 45 | concepts_doc_root="", 46 | context_keys_doc_root="", 47 | api_detail_root="", 48 | api_doc_root="", 49 | api_reference_doc_page="", 50 | ), 51 | actions=dict( 52 | PurgeQueue=dict( 53 | description="Remove all entries from Queue", 54 | aws_action_groups=["ReadWrite"], 55 | calculated_action_group="Write", 56 | condition_keys=["a4b:amazonId"], 57 | docs=dict(api_doc="", doc_page_rel="", doc_page=""), 58 | ) 59 | ), 60 | ) 61 | expected = json.dumps(expected, sort_keys=True, indent=2) 62 | 63 | # Create Service and Service Action 64 | my_service = Service(self.url, self.aws_response) 65 | my_service_action = ServiceAction(my_service, self.body) 66 | 67 | # Associate the two 68 | my_service.actions[my_service_action.action_name] = my_service_action 69 | 70 | response = json.dumps(my_service.toJSON(), sort_keys=True, indent=2) 71 | assert expected == response 72 | -------------------------------------------------------------------------------- /updater/updater.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import subprocess 4 | import tempfile 5 | import urllib.parse 6 | from collections import defaultdict 7 | from subprocess import CalledProcessError 8 | 9 | import requests 10 | from cloudaux.aws.sts import boto3_cached_conn 11 | from service import Service 12 | from service_action import ServiceAction 13 | 14 | federation_base_url = "https://signin.aws.amazon.com/federation" 15 | account_number = os.environ["AWS_ACCOUNT_ID"] 16 | role_name = os.environ["AWS_ROLE_NAME"] 17 | 18 | 19 | def _get_creds(): 20 | """ 21 | Assumes into the target account and obtains Access Key, Secret Key, and Token 22 | :return: URL-encoded dictionary containing Access Key, Secret Key, and Token 23 | """ 24 | _, credentials = boto3_cached_conn( 25 | "iam", 26 | account_number=account_number, 27 | assume_role=role_name, 28 | return_credentials=True, 29 | ) 30 | 31 | # For local dev, comment out the line above 32 | # and then put the data into this format: 33 | # credentials = { 34 | # 'AccessKeyId': '', 35 | # 'SecretAccessKey': '', 36 | # 'SessionToken': '' 37 | # } 38 | 39 | creds = json.dumps( 40 | dict( 41 | sessionId=credentials["AccessKeyId"], 42 | sessionKey=credentials["SecretAccessKey"], 43 | sessionToken=credentials["SessionToken"], 44 | ) 45 | ) 46 | 47 | creds = urllib.parse.quote(creds, safe="") 48 | return creds 49 | 50 | 51 | def _get_signin_token(creds): 52 | """ 53 | Exchanges credentials dictionary for a signin token. 54 | 1) Creates URL using credentials dictionary. 55 | 2) Sends a GET request to that URL and parses the response looking for 56 | a signin token. 57 | :return: Signin Token 58 | """ 59 | url = "{base}?Action=getSigninToken&Session={creds}" 60 | url = url.format(base=federation_base_url, creds=creds) 61 | return requests.get(url).json()["SigninToken"] 62 | 63 | 64 | def call_phantom(token, output_file): 65 | """Shells out to phantomjs to login to the AWS console and gather data""" 66 | path = os.path.dirname(__file__) 67 | console_js = os.path.join(path, "awsconsole.js") 68 | 69 | try: 70 | # print("Calling Phantom!") 71 | p = subprocess.Popen( 72 | [ 73 | "/home/runner/work/policyuniverse/policyuniverse/phantomjs-2.1.1-linux-x86_64/bin/phantomjs", 74 | console_js, 75 | token, 76 | output_file, 77 | ], 78 | stdout=subprocess.PIPE, 79 | stderr=subprocess.STDOUT, 80 | ) 81 | output, errs = p.communicate(timeout=120) 82 | # print("Output: ", output) 83 | if errs: 84 | print("Errors: ", errs) 85 | except subprocess.TimeoutExpired: 86 | print("PhantomJS timed out") 87 | return 1 # return code 1 for timeout 88 | except CalledProcessError: 89 | print("PhantomJS exited: {}".format(p.returncode)) 90 | return p.returncode 91 | else: 92 | # print("PhantomJS exited: 0") 93 | return 0 94 | 95 | 96 | def parse_service_data(data): 97 | """Create a map of service objects from the weird JSON we get from the console.""" 98 | services = defaultdict() 99 | for service_url, service_details in data["services"]["_embedded"].items(): 100 | service = Service(service_url, service_details) 101 | services[service.service_name] = service 102 | return services 103 | 104 | 105 | def parse_service_action_data(data, services): 106 | """Add service actions to the map created by `parse_service_data()`""" 107 | for service in data["actions"].keys(): 108 | for _, action_details in data["actions"][service]["_embedded"].items(): 109 | action = ServiceAction(services[service], action_details) 110 | services[service].actions[action.action_name] = action 111 | 112 | 113 | def gather_data_from_console(): 114 | """Login to AWS Console and gather data on all AWS Services and Service Actions (Permissions)""" 115 | creds = _get_creds() 116 | token = _get_signin_token(creds) 117 | with tempfile.NamedTemporaryFile() as f: 118 | ret_code = call_phantom(token, f.name) 119 | service_data = f.read() 120 | if ret_code == 0: 121 | service_data = json.loads(service_data) 122 | else: 123 | print( 124 | "Phantom process returned non-zero exit code: {ret_code}".format( 125 | ret_code=ret_code 126 | ) 127 | ) 128 | print("File contents:\n{service_data}".format(service_data=service_data)) 129 | raise Exception( 130 | "Phantom returned non-zero exit code: {ret_code}".format( 131 | ret_code=ret_code 132 | ) 133 | ) 134 | return service_data 135 | 136 | 137 | def process_data(service_data): 138 | """Build a map of services and permissions and format it nicely.""" 139 | services = parse_service_data(service_data) 140 | parse_service_action_data(service_data, services) 141 | 142 | output = dict() 143 | for _, service in services.items(): 144 | output[service.display_name] = service.toJSON() 145 | 146 | return output 147 | 148 | 149 | def _print_updated_actions(service, actions, verb): 150 | """Prints any added/removed actions.""" 151 | if actions: 152 | print('**Service "{service}" {verb}:**'.format(service=service, verb=verb)) 153 | for action in sorted(list(actions)): 154 | print("- {action}".format(action=action)) 155 | print("") 156 | 157 | 158 | def updates_available(service_data): 159 | """ 160 | Using our version of policyuniverse, determine if there are significant updates to the service data. 161 | Should also do some sanity checking so we don't send a PR that removes all services or something crazy. 162 | """ 163 | from policyuniverse import service_data as deployed_data 164 | 165 | if deployed_data == service_data: 166 | print("No changes whatsoever.") 167 | return False 168 | 169 | services_added = set(service_data.keys()) - set(deployed_data.keys()) 170 | services_removed = set(deployed_data.keys()) - set(service_data.keys()) 171 | 172 | if services_added: 173 | print("**Services Added:** ", sorted(list(services_added))) 174 | if services_removed: 175 | print("**Services Removed:** ", sorted(list(services_removed))) 176 | 177 | services_in_both = set(service_data.keys()).intersection(set(deployed_data.keys())) 178 | 179 | # Now lets look at the actions under each service 180 | actions_modified = False 181 | for service in services_in_both: 182 | service_body = service_data[service] 183 | 184 | old_actions = set(deployed_data[service].get("actions").keys()) 185 | new_actions = set(service_body.get("actions").keys()) 186 | 187 | actions_added = new_actions - old_actions 188 | actions_removed = old_actions - new_actions 189 | 190 | _print_updated_actions(service, actions_added, "Added") 191 | _print_updated_actions(service, actions_removed, "Removed") 192 | 193 | if actions_added or actions_removed: 194 | actions_modified = True 195 | 196 | # Sanity Check 197 | if len(services_removed) > 20: 198 | print( 199 | "There were {services_removed} services removed. Too many for a PR".format( 200 | services_removed=len(services_removed) 201 | ) 202 | ) 203 | return False 204 | 205 | if services_added or services_removed: 206 | return True 207 | 208 | # Don't return inside the loop because we want to print out all the changes. 209 | if actions_modified: 210 | return True 211 | 212 | # This could be a category or a doc change or a regex change as well. 213 | print( 214 | "Dicts don't match but no service/action changes found. Maybe a doc URL or Regex or Action Category?" 215 | ) 216 | return True 217 | 218 | 219 | def main(): 220 | """Gather Data, Parse Data, Format Data, Save to disk.""" 221 | service_data = gather_data_from_console() 222 | service_data = process_data(service_data) 223 | 224 | # For local dev on the PR logic: 225 | # with open('output_formatted.json') as infile: 226 | # service_data = json.load(infile) 227 | 228 | updates_available(service_data) 229 | 230 | with open("output_formatted.json", "w") as outfile: 231 | json.dump(service_data, outfile, indent=2, sort_keys=True) 232 | outfile.write("\n") 233 | 234 | # print(json.dumps(service_data, indent=2, sort_keys=True)) 235 | 236 | 237 | if __name__ == "__main__": 238 | main() 239 | --------------------------------------------------------------------------------