├── .github ├── dependabot.yml └── workflows │ ├── base.yml │ ├── build_docker.yml │ ├── code-guru.yml │ └── review.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── SECURITY.md ├── athena_query ├── create_table.md ├── fix_path.sh └── query.md ├── data.tf ├── docs ├── Diagram_of_processing_a_request.png └── docs.md ├── examples └── complete │ ├── .gitignore │ ├── README.md │ ├── main.tf │ ├── outputs.tf │ ├── variables.tf │ └── versions.tf ├── layer ├── .python-version ├── deploy_requirements.txt ├── poetry.lock └── pyproject.toml ├── layers.tf ├── locals.tf ├── moved.tf ├── outputs.tf ├── perm_revoker_lambda.tf ├── s3.tf ├── sheduler_group.tf ├── slack_handler_lambda.tf ├── sns.tf ├── src ├── .gitignore ├── .python-version ├── access_control.py ├── check_python_version.py ├── config.py ├── deploy_requirements.txt ├── docker │ ├── Dockerfile │ ├── Dockerfile.requester │ └── Dockerfile.revoker ├── entities │ ├── __init__.py │ ├── aws.py │ ├── model.py │ └── slack.py ├── errors.py ├── events.py ├── group.py ├── main.py ├── organizations.py ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── revoker.py ├── s3.py ├── schedule.py ├── slack_helpers.py ├── sso.py ├── statement.py └── tests │ ├── __init__.py │ ├── conftest.py │ ├── strategies.py │ ├── test_access_control.py │ ├── test_config.py │ └── utils.py ├── tests ├── localstack-backend.tf └── localstack.tfvars ├── vars.tf └── versions.tf /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "poetry" 4 | directory: "/src" 5 | schedule: 6 | interval: "weekly" 7 | commit-message: 8 | prefix: "src" 9 | labels: 10 | - "dependencies" 11 | - "python" 12 | - "src" 13 | 14 | - package-ecosystem: "poetry" 15 | directory: "/layer" 16 | schedule: 17 | interval: "weekly" 18 | commit-message: 19 | prefix: "layer" 20 | labels: 21 | - "dependencies" 22 | - "python" 23 | - "layer" 24 | 25 | - package-ecosystem: "terraform" 26 | directory: "/" 27 | schedule: 28 | interval: "weekly" 29 | commit-message: 30 | prefix: "terraform" 31 | labels: 32 | - "dependencies" 33 | - "terraform" 34 | -------------------------------------------------------------------------------- /.github/workflows/base.yml: -------------------------------------------------------------------------------- 1 | name: 'base' 2 | 3 | on: 4 | push: 5 | tags: [ v* ] 6 | branches: [ main ] 7 | pull_request: 8 | branches: [ main ] 9 | 10 | jobs: 11 | python-job: 12 | uses: fivexl/github-reusable-workflows/.github/workflows/python-job.yml@main 13 | with: 14 | python-version: "3.10" 15 | aws-default-region: "eu-central-1" 16 | working-directory: "src" 17 | 18 | terraform-job: 19 | uses: fivexl/github-reusable-workflows/.github/workflows/terraform-job.yml@main 20 | with: 21 | terraform-version: "1.5.0" 22 | aws-default-region: "eu-central-1" 23 | 24 | # localstack: 25 | # name: localstack 26 | # runs-on: ubuntu-22.04 27 | # defaults: 28 | # run: 29 | # shell: bash 30 | # env: 31 | # AWS_DEFAULT_REGION: "eu-central-1" 32 | # TF_CLI_ARGS_plan: "-compact-warnings" 33 | # TF_CLI_ARGS_apply: "-compact-warnings" 34 | # services: 35 | # localstack-service: 36 | # image: localstack/localstack:3.4.0 37 | # ports: 38 | # - "4566:4566" 39 | # - "4510-4559:4510-4559" 40 | # env: 41 | # #SERVICES: "sqs,s3" 42 | # FORCE_NONINTERACTIVE: 1 43 | # AWS_ACCESS_KEY_ID: test 44 | # AWS_SECRET_ACCESS_KEY: test 45 | # options: >- 46 | # --health-cmd "./bin/localstack status services" 47 | # --health-interval 10s 48 | # --health-timeout 5s 49 | # --health-retries 5 50 | # --health-start-period 15s 51 | # volumes: 52 | # - /var/run/docker.sock:/var/run/docker.sock # https://docs.localstack.cloud/references/lambda-provider-v2/#docker-not-available 53 | # steps: 54 | # - uses: actions/checkout@master 55 | 56 | # - name: Setup Terraform 57 | # uses: hashicorp/setup-terraform@v1 58 | # with: 59 | # terraform_version: 1.5.0 60 | 61 | # - uses: actions/setup-python@v4 62 | # with: 63 | # python-version: '3.10' 64 | 65 | # - name: Terraform-local installation 66 | # run: pip3 install terraform-local==0.16.1 67 | 68 | # - name: Install Poetry 69 | # run: pip3 install poetry 70 | 71 | # - name: terraform drop unsupported by localstack 72 | # run: | 73 | # rm sheduler_group.tf 74 | 75 | # - name: terraform init 76 | # run: tflocal init -backend-config=./tests/localstack-backend.tf 77 | 78 | # - name: terraform validate 79 | # run: tflocal validate 80 | 81 | # - name: terraform plan 82 | # run: tflocal plan -var-file=./tests/localstack.tfvars -out=plan.tfplan 83 | 84 | # - name: terraform apply 85 | # run: tflocal apply -auto-approve plan.tfplan 86 | -------------------------------------------------------------------------------- /.github/workflows/build_docker.yml: -------------------------------------------------------------------------------- 1 | name: Build and Push Docker Images 2 | 3 | on: 4 | release: 5 | types: [published] 6 | push: 7 | tags: [v*] 8 | branches: [main] 9 | 10 | jobs: 11 | build-and-push: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Checkout code 16 | uses: actions/checkout@v4 17 | 18 | - name: Set up Docker Buildx 19 | uses: docker/setup-buildx-action@v3 20 | 21 | - name: Configure AWS credentials 22 | uses: aws-actions/configure-aws-credentials@v4 23 | with: 24 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 25 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 26 | aws-region: eu-central-1 27 | 28 | - name: Login to Amazon ECR 29 | id: login-ecr 30 | uses: aws-actions/amazon-ecr-login@v1 31 | 32 | - name: Get short SHA 33 | id: get_sha 34 | run: echo "SHORT_SHA=$(git rev-parse --short HEAD)" >> $GITHUB_ENV 35 | 36 | - name: Get tag name or SHA 37 | id: get_tag 38 | run: | 39 | if [[ "${GITHUB_REF}" == refs/tags/* ]]; then 40 | echo "GIT_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV 41 | else 42 | echo "GIT_TAG=${{ env.SHORT_SHA }}" >> $GITHUB_ENV 43 | fi 44 | 45 | - name: Build and push requester image 46 | uses: docker/build-push-action@v5 47 | with: 48 | context: . 49 | file: src/docker/Dockerfile.requester 50 | push: true 51 | tags: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.eu-central-1.amazonaws.com/aws-sso-elevator:requester-${{ env.GIT_TAG }} 52 | provenance: false 53 | 54 | - name: Build and push revoker image 55 | uses: docker/build-push-action@v5 56 | with: 57 | context: . 58 | file: src/docker/Dockerfile.revoker 59 | push: true 60 | tags: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.eu-central-1.amazonaws.com/aws-sso-elevator:revoker-${{ env.GIT_TAG }} 61 | provenance: false 62 | -------------------------------------------------------------------------------- /.github/workflows/code-guru.yml: -------------------------------------------------------------------------------- 1 | name: CodeGuru Review 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: read 10 | security-events: write 11 | id-token: write 12 | 13 | jobs: 14 | deploy: 15 | name: Deploy 16 | runs-on: ubuntu-latest 17 | environment: production 18 | 19 | steps: 20 | # Step 1: Checkout the repository and provide your AWS credentials 21 | - name: Checkout repository 22 | uses: actions/checkout@v4 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: Configure AWS Credentials 27 | uses: aws-actions/configure-aws-credentials@v4 28 | with: 29 | aws-region: eu-central-1 30 | role-to-assume: ${{ secrets.DEVELOPMENT_ACCOUNT_ROLE_ARN }} 31 | role-duration-seconds: 3600 32 | role-session-name: ${{ github.event.repository.name }} 33 | 34 | # Step 2: Add CodeGuru Reviewer Action 35 | - name: CodeGuru Reviewer 36 | uses: aws-actions/codeguru-reviewer@v1.1 37 | with: 38 | s3_bucket: codeguru-reviewer-mambo-jumbo 39 | 40 | # Step 3: Upload results into GitHub 41 | - name: Upload review result 42 | uses: github/codeql-action/upload-sarif@v2 43 | with: 44 | sarif_file: codeguru-results.sarif.json 45 | -------------------------------------------------------------------------------- /.github/workflows/review.yml: -------------------------------------------------------------------------------- 1 | name: Update review branch 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | review: 8 | name: Review 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v4 14 | with: 15 | fetch-depth: 0 16 | 17 | - name: Update review branch 18 | id: update-review 19 | run: | 20 | git push origin main:review 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | **/.terraform/* 3 | 4 | # .tfstate files 5 | *.tfstate 6 | *.tfstate.* 7 | 8 | # locks 9 | .terraform.lock.hcl 10 | 11 | # Crash log files 12 | crash.log 13 | 14 | # Ignore any .tfvars files that are generated automatically for each Terraform run. Most 15 | # .tfvars files are managed as part of configuration and so should be included in 16 | # version control. 17 | # 18 | # example.tfvars 19 | 20 | # Ignore override files as they are usually used to override resources locally and so 21 | # are not checked in 22 | override.tf 23 | override.tf.json 24 | *_override.tf 25 | *_override.tf.json 26 | .hypothesis 27 | 28 | # Include override files you do wish to add to version control using negated pattern 29 | # 30 | # !example_override.tf 31 | 32 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 33 | # example: *tfplan* 34 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # Terraform 3 | - repo: https://github.com/antonbabenko/pre-commit-terraform 4 | rev: v1.77.2 5 | hooks: 6 | - id: terraform_fmt 7 | - id: terraform_tflint 8 | - id: terraform_validate 9 | - id: terraform_docs 10 | - id: terraform_tfsec 11 | # Python 12 | - repo: local 13 | hooks: 14 | # NOTE: we make `black` a local hook because if it's installed from 15 | # PyPI (rather than from source) then it'll run twice as fast thanks to mypyc 16 | - id: black 17 | name: black 18 | description: "Black: The uncompromising Python code formatter" 19 | entry: black 20 | language: python 21 | require_serial: true 22 | types_or: [python, pyi] 23 | additional_dependencies: [black==25.1.0] 24 | - repo: https://github.com/charliermarsh/ruff-pre-commit 25 | rev: v0.11.0 26 | hooks: 27 | - id: ruff 28 | args: [--exit-non-zero-on-fix] 29 | - repo: https://github.com/codespell-project/codespell 30 | rev: v2.4.1 31 | hooks: 32 | - id: codespell 33 | types_or: [python, rst, markdown, cython, c] 34 | additional_dependencies: [tomli] 35 | - repo: local 36 | hooks: 37 | # This pre-commit hook is used to automatically generate a deploy_requirements.txt file from the current Poetry dependencies. 38 | - id: poetry-export-deploy-requirements-src 39 | name: Export dependencies with Poetry for src 40 | entry: bash -c 'cd src && poetry export -f requirements.txt --output deploy_requirements.txt && git diff --exit-code deploy_requirements.txt || git add deploy_requirements.txt' 41 | language: system 42 | files: src/pyproject.toml$ 43 | pass_filenames: false 44 | # This pre-commit hook also generates a deploy_requirements.txt file for layer deployments. 45 | - id: poetry-export-deploy-requirements-layer 46 | name: Export dependencies with Poetry for layer 47 | entry: bash -c 'cd layer && poetry export -f requirements.txt --output deploy_requirements.txt && git diff --exit-code deploy_requirements.txt || git add deploy_requirements.txt' 48 | language: system 49 | files: layer/pyproject.toml$ 50 | pass_filenames: false 51 | # Generate requirements.txt for git-ci 52 | - id: poetry-export-git-ci-requirements 53 | name: Export dependencies with Poetry for git-ci 54 | entry: bash -c 'cd src && poetry export --with dev -f requirements.txt --output requirements.txt && git diff --exit-code requirements.txt || git add requirements.txt' 55 | language: system 56 | files: src/pyproject.toml$ 57 | pass_filenames: false 58 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.envFile": "${workspaceFolder}/.env", 3 | "python.terminal.activateEnvInCurrentTerminal": true, 4 | "python.defaultInterpreterPath": "${workspaceFolder}/src/.venv/bin/python", 5 | "python.analysis.extraPaths": [ 6 | "${workspaceFolder}/src/.venv/lib/python3.11/site-packages/", 7 | "./src/src" 8 | ], 9 | "notebook.formatOnSave.enabled": true, 10 | "notebook.codeActionsOnSave": { 11 | "notebook.source.fixAll": "explicit", 12 | "notebook.source.organizeImports": "explicit" 13 | }, 14 | "[python]": { 15 | "editor.formatOnSave": true, 16 | "editor.codeActionsOnSave": { 17 | "source.fixAll": "explicit", 18 | "source.organizeImports": "explicit" 19 | }, 20 | "editor.defaultFormatter": "charliermarsh.ruff" 21 | } 22 | 23 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Use this section to tell people about which versions of your project are 6 | currently being supported with security updates. 7 | 8 | | Version | Supported | 9 | | ------- | ------------------ | 10 | | 2.x.x | :white_check_mark: | 11 | | 3.x.x | :white_check_mark: | 12 | 13 | ## Reporting a Vulnerability 14 | 15 | We take security very seriously. 16 | If you believe you have found a security vulnerability with our project, we encourage you to let us know right away. 17 | We will investigate all legitimate reports and do our best to quickly fix the problem. 18 | 19 | Please use [the contact form at FivexL website](https://fivexl.io/#contact) and provide the following information: 20 | 21 | - A description of the issue. 22 | - Steps to reproduce the issue, if possible. 23 | - Any known workarounds. 24 | - Any other information that would be helpful in understanding the severity and impact of the vulnerability. 25 | 26 | Please DO NOT open a GitHub issue if you believe you have found a security issue. 27 | 28 | We will acknowledge your message within 72 hours, and you'll receive a more detailed response to your email within 96 hours indicating the next steps in handling your report. 29 | 30 | After the initial reply to your report, our team will endeavor to keep you informed of the progress being made towards a fix and full announcement. 31 | 32 | Please refrain from disclosing the vulnerability publicly until we have had a chance to address it. 33 | 34 | Thank you for helping keep our project and our users safe! 35 | -------------------------------------------------------------------------------- /athena_query/create_table.md: -------------------------------------------------------------------------------- 1 | 2 | # Manually create table 3 | 4 | Replace bucket_name, partition_prefix and you should be good to go 5 | 6 | ``` 7 | CREATE EXTERNAL TABLE IF NOT EXISTS sso_elevator_table ( 8 | `role_name` string, 9 | `account_id` string, 10 | `reason` string, 11 | `requester_slack_id` string, 12 | `requester_email` string, 13 | `request_id` string, 14 | `approver_slack_id` string, 15 | `approver_email` string, 16 | `operation_type` string, 17 | `permission_duration` string, 18 | `time` string, 19 | `group_name` string, 20 | `group_id` string, 21 | `group_membership_id` string, 22 | `audit_entry_type` string, 23 | `version` string, 24 | `sso_user_principal_id` string, 25 | `secondary_domain_was_used` string 26 | ) 27 | PARTITIONED BY (`timestamp` string) 28 | ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe' 29 | LOCATION 's3://bucket_name/s3_bucket_partition_prefix/' 30 | TBLPROPERTIES ( 31 | 'projection.enabled'='true', 32 | 'projection.timestamp.format'='yyyy/MM/dd', 33 | 'projection.timestamp.interval'='1', 34 | 'projection.timestamp.interval.unit'='DAYS', 35 | 'projection.timestamp.range'='2023/05/08,NOW', 36 | 'projection.timestamp.type'='date', 37 | 'storage.location.template'='s3://bucket_name/s3_bucket_partition_prefix/${timestamp}/'); 38 | ``` -------------------------------------------------------------------------------- /athena_query/fix_path.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # In releases previous to 2.0.0, Elevator didn't check for double slashes in the path, which could cause Athena to fail. 3 | # If your S3 bucket has double slashes in the path, you can run this script to fix the path. 4 | # Replace SOURCE_PREFIX, DESTINATION_PREFIX, and BUCKET_NAME with your values. 5 | 6 | set -x # Enable debugging 7 | 8 | SOURCE_PREFIX="logs//2024/" 9 | DESTINATION_PREFIX="logs/2024/" 10 | BUCKET_NAME="" 11 | 12 | # Set the number of parallel jobs (adjust based on your system's resources) 13 | NUM_JOBS=10 14 | 15 | # Function to move a single file 16 | move_file() { 17 | FILE_PATH="$1" 18 | BUCKET_NAME="$2" 19 | NEW_FILE_PATH=$(echo "$FILE_PATH" | sed "s|//|/|g") 20 | 21 | # Copy the file to the new path with encryption 22 | aws s3 cp "s3://$BUCKET_NAME/$FILE_PATH" "s3://$BUCKET_NAME/$NEW_FILE_PATH" --sse AES256 23 | 24 | # Delete the original file if the copy was successful 25 | if [[ $? -eq 0 ]]; then 26 | aws s3 rm "s3://$BUCKET_NAME/$FILE_PATH" 27 | echo "Moved $FILE_PATH to $NEW_FILE_PATH" 28 | else 29 | echo "Error copying $FILE_PATH" 30 | fi 31 | } 32 | 33 | export -f move_file # Export the function to be used by parallel 34 | 35 | # Step 1: List all objects in the first path and move them asynchronously 36 | aws s3 ls "s3://$BUCKET_NAME/$SOURCE_PREFIX" --recursive | awk '{print $4}' | \ 37 | grep -v '^$' | xargs -P "$NUM_JOBS" -I {} bash -c 'move_file "$@"' _ {} "$BUCKET_NAME" 38 | -------------------------------------------------------------------------------- /athena_query/query.md: -------------------------------------------------------------------------------- 1 | 2 | # Query by date 3 | ``` 4 | SELECT * 5 | FROM sso_elevator_table 6 | WHERE timestamp >= '2023/05/01' AND timestamp <= '2024/05/12'; 7 | ``` 8 | # Query everything 9 | ``` 10 | SELECT * 11 | FROM sso_elevator_table; 12 | 13 | ``` 14 | -------------------------------------------------------------------------------- /data.tf: -------------------------------------------------------------------------------- 1 | data "aws_region" "current" {} 2 | 3 | data "aws_caller_identity" "current" {} 4 | 5 | data "aws_ssoadmin_instances" "all" { 6 | count = var.sso_instance_arn == "" ? 1 : 0 7 | } 8 | -------------------------------------------------------------------------------- /docs/Diagram_of_processing_a_request.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fivexl/terraform-aws-sso-elevator/7cc9daaa59bf4b50621c8361576cb8afbb95c7a4/docs/Diagram_of_processing_a_request.png -------------------------------------------------------------------------------- /docs/docs.md: -------------------------------------------------------------------------------- 1 | ## SSO Delegation 2 | 3 | The main reason to delegate SSO to another account, is to reduce a need to access management account to the minimum as well as separation of concerns. With a separate SSO management account you can granualary give access to sso management only wihtout creating overcomplex role in the management account that would limit access in the management account to SSO only. 4 | 5 | Although the module can be deployed in either the management account or the delegated SSO administrator account, we recommend deploying it in the delegated SSO administrator account. 6 | 7 | To do this, create a new AWS account (if you don’t already have one) and and delegate SSO administration to it. For more details on this process, refer to the [AWS documentation](https://docs.aws.amazon.com/singlesignon/latest/userguide/delegated-admin-how-to-register.html). 8 | 9 | Alternatively, you can use this Terraform snippet in your management account to delegate SSO permissions to the new account: 10 | 11 | ```hcl 12 | resource "aws_organizations_delegated_administrator" "sso" { 13 | account_id = <> 14 | service_principal = "sso.amazonaws.com" 15 | } 16 | ``` 17 | This is only pre-requisite for the module to work in the delegated SSO administrator account. After this step, you can proceed with the module deployment. 18 | 19 | **Important Note:** 20 | 21 | The delegated SSO administrator account **cannot** be used to manage access to the management account. Specifically, any permission set created and managed by the management account can’t be used by the SSO tooling account. (If you create a permission set in the Management account and try to use it in the SSO account, you’ll get an “Access Denied” error.) 22 | 23 | This limitation ensures that the management account always manages access to itself, while the delegated SSO administrator account manages access to every other account in the organization. As a result, you won’t be able to use an `account_level` SSO elevator to manage access to the management account if the elevator is deployed in the delegated SSO administrator account. 24 | 25 | However, there is still a way to provide **temporary** access to the management account through SSO Elevator: 26 | 27 | 1. Go to the management account and create a `ManagementAccountAccess` group and permission set (with required permissions). 28 | 2. From the management account, assign the `ManagementAccountAccess` group and permission set to the management account. 29 | 3. Use SSO Elevator to `/group_access` request access to this `ManagementAccountAccess` group, which will add you to the group and grant you access to the management account. (this way you don't directly use the permission set, so you don't hit the limitation and get access to the management account) 30 | 31 | With this approach, you can reduce how often you use the management account and how many resources you deploy there, while still being able to manage the entire organization and temporarily access the management account. 32 | -------------------------------------------------------------------------------- /examples/complete/.gitignore: -------------------------------------------------------------------------------- 1 | builds/* -------------------------------------------------------------------------------- /examples/complete/README.md: -------------------------------------------------------------------------------- 1 | # Complete Example 2 | -------------------------------------------------------------------------------- /examples/complete/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | region = "eu-central-1" 3 | 4 | # Make it faster by skipping something 5 | skip_metadata_api_check = true 6 | skip_region_validation = true 7 | skip_credentials_validation = true 8 | skip_requesting_account_id = true 9 | } 10 | 11 | 12 | data "aws_ssoadmin_instances" "this" {} 13 | 14 | # You will have to create /sso-elevator/slack-signing-secret AWS SSM Parameter 15 | # and store Slack app signing secret there, if you have not created app yet then 16 | # you can leave a dummy value there and update it after Slack app is ready 17 | data "aws_ssm_parameter" "sso_elevator_slack_signing_secret" { 18 | name = "/sso-elevator/slack-signing-secret" 19 | } 20 | 21 | # You will have to create /sso-elevator/slack-bot-token AWS SSM Parameter 22 | # and store Slack bot token there, if you have not created app yet then 23 | # you can leave a dummy value there and update it after Slack app is ready 24 | data "aws_ssm_parameter" "sso_elevator_slack_bot_token" { 25 | name = "/sso-elevator/slack-bot-token" 26 | } 27 | 28 | module "aws_sso_elevator" { 29 | source = "../.." 30 | aws_sns_topic_subscription_email = "email@gmail.com" 31 | 32 | slack_signing_secret = data.aws_ssm_parameter.sso_elevator_slack_signing_secret.value 33 | slack_bot_token = data.aws_ssm_parameter.sso_elevator_slack_bot_token.value 34 | slack_channel_id = "***********" 35 | schedule_expression = "cron(0 23 * * ? *)" # revoke access schedule expression 36 | schedule_expression_for_check_on_inconsistency = "rate(1 hour)" 37 | revoker_post_update_to_slack = true 38 | send_dm_if_user_not_in_channel = true 39 | 40 | sso_instance_arn = one(data.aws_ssoadmin_instances.this.arns) 41 | 42 | approver_renotification_initial_wait_time = 15 43 | approver_renotification_backoff_multiplier = 2 44 | 45 | s3_bucket_partition_prefix = "logs/" 46 | s3_bucket_name_for_audit_entry = "fivexl-sso-elevator" 47 | 48 | s3_mfa_delete = false 49 | s3_object_lock = true 50 | 51 | s3_object_lock_configuration = { 52 | rule = { 53 | default_retention = { 54 | mode = "GOVERNANCE" 55 | years = 1 56 | } 57 | } 58 | } 59 | 60 | # s3_name_of_the_existing_bucket = "sso_elevator_audit_logs_bucket-" 61 | # If you want to use your own bucket for storing SSO Elevator audit logs (logs about access requests), use the `s3_name_of_the_existing_bucket` variable. 62 | # If `s3_name_of_the_existing_bucket` is left empty, the module creates a new bucket name based on `s3_bucket_name_for_audit_entry`. 63 | # In that case, remember to specify `s3_logging` with at least the `target_bucket` key to enable access logging, otherwise, module deployment will fail. 64 | s3_logging = { 65 | target_bucket = "some_access_logging_bucket" 66 | target_prefix = "some_prefix_for_access_logs" 67 | } 68 | 69 | # "Resource", "PermissionSet", "Approvers" can be a string or a list of strings 70 | # "Resource" & "PermissionSet" can be set to "*" to match all 71 | 72 | # Request will be approved automatically if: 73 | # - "AllowSelfApproval" is set to true, and requester is in "Approvers" list 74 | # - "ApprovalIsNotRequired" is set to true 75 | 76 | # If there is only one approver, and "AllowSelfApproval" isn't set to true, nobody will be able to approve the request 77 | 78 | config = [ 79 | { 80 | "ResourceType" : "Account", 81 | "Resource" : "account_id", 82 | "PermissionSet" : "*", 83 | "Approvers" : "email@gmail.com", 84 | "AllowSelfApproval" : true, 85 | }, 86 | { 87 | "ResourceType" : "Account", 88 | "Resource" : "account_id", 89 | "PermissionSet" : "Billing", 90 | "Approvers" : "email@gmail.com", 91 | "AllowSelfApproval" : true, 92 | }, 93 | { 94 | "ResourceType" : "Account", 95 | "Resource" : ["account_id", "account_id"], 96 | "PermissionSet" : "ReadOnlyPlus", 97 | "Approvers" : "email@gmail.com", 98 | }, 99 | { 100 | "ResourceType" : "Account", 101 | "Resource" : "*", 102 | "PermissionSet" : "ReadOnlyPlus", 103 | "ApprovalIsNotRequired" : true, 104 | }, 105 | { 106 | "ResourceType" : "Account", 107 | "Resource" : "account_id", 108 | "PermissionSet" : ["ReadOnlyPlus", "AdministratorAccess"], 109 | "Approvers" : ["email@gmail.com"], 110 | "AllowSelfApproval" : true, 111 | }, 112 | { 113 | 114 | # No rescuer hath the rescuer. 115 | # No Lord hath the champion, 116 | # no mother and no father, 117 | # only nothingness above. 118 | 119 | "ResourceType" : "Account", 120 | "Resource" : "*", 121 | "PermissionSet" : "*", 122 | "Approvers" : "org_wide_approver@gmail.com", 123 | "AllowSelfApproval" : true, 124 | }, 125 | ] 126 | } 127 | 128 | 129 | output "requester_api_endpoint_url" { 130 | description = "The URL to invoke the Lambda function" 131 | value = module.aws_sso_elevator.requester_api_endpoint_url 132 | } 133 | -------------------------------------------------------------------------------- /examples/complete/outputs.tf: -------------------------------------------------------------------------------- 1 | output "aws_sso_elevator_lambda_function_url" { 2 | value = module.aws_sso_elevator.lambda_function_url 3 | } 4 | -------------------------------------------------------------------------------- /examples/complete/variables.tf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fivexl/terraform-aws-sso-elevator/7cc9daaa59bf4b50621c8361576cb8afbb95c7a4/examples/complete/variables.tf -------------------------------------------------------------------------------- /examples/complete/versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = "~> 1.0" 3 | required_providers { 4 | aws = { 5 | source = "hashicorp/aws" 6 | version = ">= 4.64" 7 | } 8 | external = { 9 | source = "hashicorp/external" 10 | version = ">= 1.0" 11 | } 12 | local = { 13 | source = "hashicorp/local" 14 | version = ">= 1.0" 15 | } 16 | null = { 17 | source = "hashicorp/null" 18 | version = ">= 2.0" 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /layer/.python-version: -------------------------------------------------------------------------------- 1 | 3.10.10 2 | -------------------------------------------------------------------------------- /layer/deploy_requirements.txt: -------------------------------------------------------------------------------- 1 | aws-lambda-powertools[parser]==2.20.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 2 | --hash=sha256:026d64e1fe8e9d6ebfe2e3d6ed0a4c5b7e7a5dfa1ebdb99af3b6cd5fbc411b72 \ 3 | --hash=sha256:137a5e83ff6160e7b7c790d106bfa0bea78ba4147925b57c2797ece3719997f8 4 | pydantic==1.10.13 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 5 | --hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \ 6 | --hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \ 7 | --hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \ 8 | --hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \ 9 | --hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \ 10 | --hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \ 11 | --hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \ 12 | --hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \ 13 | --hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \ 14 | --hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \ 15 | --hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \ 16 | --hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \ 17 | --hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \ 18 | --hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \ 19 | --hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \ 20 | --hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \ 21 | --hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \ 22 | --hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \ 23 | --hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \ 24 | --hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \ 25 | --hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \ 26 | --hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \ 27 | --hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \ 28 | --hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \ 29 | --hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \ 30 | --hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \ 31 | --hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \ 32 | --hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \ 33 | --hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \ 34 | --hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \ 35 | --hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \ 36 | --hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \ 37 | --hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \ 38 | --hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \ 39 | --hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \ 40 | --hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0 41 | slack-bolt==1.18.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 42 | --hash=sha256:43b121acf78440303ce5129e53be36bdfe5d926a193daef7daf2860688e65dd3 \ 43 | --hash=sha256:63089a401ae3900c37698890249acd008a4651d06e86194edc7b72a00819bbac 44 | slack-sdk==3.21.3 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 45 | --hash=sha256:20829bdc1a423ec93dac903470975ebf3bc76fd3fd91a4dadc0eeffc940ecb0c \ 46 | --hash=sha256:de3c07b92479940b61cd68c566f49fbc9974c8f38f661d26244078f3903bb9cc 47 | typing-extensions==4.7.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 48 | --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ 49 | --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 50 | -------------------------------------------------------------------------------- /layer/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "aws-lambda-powertools" 5 | version = "2.20.0" 6 | description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." 7 | optional = false 8 | python-versions = ">=3.7.4,<4.0.0" 9 | files = [ 10 | {file = "aws_lambda_powertools-2.20.0-py3-none-any.whl", hash = "sha256:026d64e1fe8e9d6ebfe2e3d6ed0a4c5b7e7a5dfa1ebdb99af3b6cd5fbc411b72"}, 11 | {file = "aws_lambda_powertools-2.20.0.tar.gz", hash = "sha256:137a5e83ff6160e7b7c790d106bfa0bea78ba4147925b57c2797ece3719997f8"}, 12 | ] 13 | 14 | [package.dependencies] 15 | pydantic = {version = ">=1.8.2,<2.0.0", optional = true, markers = "extra == \"parser\" or extra == \"all\""} 16 | typing-extensions = ">=4.6.2,<5.0.0" 17 | 18 | [package.extras] 19 | all = ["aws-xray-sdk (>=2.8.0,<3.0.0)", "fastjsonschema (>=2.14.5,<3.0.0)", "pydantic (>=1.8.2,<2.0.0)"] 20 | aws-sdk = ["boto3 (>=1.20.32,<2.0.0)"] 21 | parser = ["pydantic (>=1.8.2,<2.0.0)"] 22 | tracer = ["aws-xray-sdk (>=2.8.0,<3.0.0)"] 23 | validation = ["fastjsonschema (>=2.14.5,<3.0.0)"] 24 | 25 | [[package]] 26 | name = "pydantic" 27 | version = "1.10.13" 28 | description = "Data validation and settings management using python type hints" 29 | optional = false 30 | python-versions = ">=3.7" 31 | files = [ 32 | {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, 33 | {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, 34 | {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, 35 | {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, 36 | {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, 37 | {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, 38 | {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, 39 | {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, 40 | {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, 41 | {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, 42 | {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, 43 | {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, 44 | {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, 45 | {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, 46 | {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, 47 | {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, 48 | {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, 49 | {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, 50 | {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, 51 | {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, 52 | {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, 53 | {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, 54 | {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, 55 | {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, 56 | {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, 57 | {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, 58 | {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, 59 | {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, 60 | {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, 61 | {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, 62 | {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, 63 | {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, 64 | {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, 65 | {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, 66 | {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, 67 | {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, 68 | ] 69 | 70 | [package.dependencies] 71 | typing-extensions = ">=4.2.0" 72 | 73 | [package.extras] 74 | dotenv = ["python-dotenv (>=0.10.4)"] 75 | email = ["email-validator (>=1.0.3)"] 76 | 77 | [[package]] 78 | name = "slack-bolt" 79 | version = "1.18.0" 80 | description = "The Bolt Framework for Python" 81 | optional = false 82 | python-versions = ">=3.6" 83 | files = [ 84 | {file = "slack_bolt-1.18.0-py2.py3-none-any.whl", hash = "sha256:63089a401ae3900c37698890249acd008a4651d06e86194edc7b72a00819bbac"}, 85 | {file = "slack_bolt-1.18.0.tar.gz", hash = "sha256:43b121acf78440303ce5129e53be36bdfe5d926a193daef7daf2860688e65dd3"}, 86 | ] 87 | 88 | [package.dependencies] 89 | slack-sdk = ">=3.21.2,<4" 90 | 91 | [package.extras] 92 | adapter = ["CherryPy (>=18,<19)", "Django (>=3,<5)", "Flask (>=1,<3)", "Werkzeug (>=2,<3)", "boto3 (<=2)", "bottle (>=0.12,<1)", "chalice (>=1.28,<2)", "falcon (>=2,<4)", "fastapi (>=0.70.0,<1)", "gunicorn (>=20,<21)", "pyramid (>=1,<3)", "sanic (>=22,<23)", "starlette (>=0.14,<1)", "tornado (>=6,<7)", "uvicorn (<1)", "websocket-client (>=1.2.3,<2)"] 93 | adapter-testing = ["Flask (>=1,<2)", "Werkzeug (>=1,<2)", "boddle (>=0.2,<0.3)", "docker (>=5,<6)", "moto (>=3,<4)", "requests (>=2,<3)", "sanic-testing (>=0.7)"] 94 | async = ["aiohttp (>=3,<4)", "websockets (>=10,<11)"] 95 | testing = ["Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (>=1,<2)", "aiohttp (>=3,<4)", "black (==22.8.0)", "click (<=8.0.4)", "itsdangerous (==2.0.1)", "pytest (>=6.2.5,<7)", "pytest-asyncio (>=0.18.2,<1)", "pytest-cov (>=3,<4)"] 96 | testing-without-asyncio = ["Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (>=1,<2)", "black (==22.8.0)", "click (<=8.0.4)", "itsdangerous (==2.0.1)", "pytest (>=6.2.5,<7)", "pytest-cov (>=3,<4)"] 97 | 98 | [[package]] 99 | name = "slack-sdk" 100 | version = "3.21.3" 101 | description = "The Slack API Platform SDK for Python" 102 | optional = false 103 | python-versions = ">=3.6.0" 104 | files = [ 105 | {file = "slack_sdk-3.21.3-py2.py3-none-any.whl", hash = "sha256:de3c07b92479940b61cd68c566f49fbc9974c8f38f661d26244078f3903bb9cc"}, 106 | {file = "slack_sdk-3.21.3.tar.gz", hash = "sha256:20829bdc1a423ec93dac903470975ebf3bc76fd3fd91a4dadc0eeffc940ecb0c"}, 107 | ] 108 | 109 | [package.extras] 110 | optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)"] 111 | testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.8.0)", "boto3 (<=2)", "click (==8.0.4)", "databases (>=0.5)", "flake8 (>=5,<6)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] 112 | 113 | [[package]] 114 | name = "typing-extensions" 115 | version = "4.7.1" 116 | description = "Backported and Experimental Type Hints for Python 3.7+" 117 | optional = false 118 | python-versions = ">=3.7" 119 | files = [ 120 | {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, 121 | {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, 122 | ] 123 | 124 | [metadata] 125 | lock-version = "2.0" 126 | python-versions = "^3.10.10" 127 | content-hash = "21343666cdf4720ff16398e1495cfaec154eb89a60f990cfc5f7bc074eed73a8" 128 | -------------------------------------------------------------------------------- /layer/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "layers" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["EreminAnton "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10.10" 10 | slack-bolt = "^1.18.0" 11 | aws-lambda-powertools = {extras = ["parser"], version = "^2.14.1"} 12 | 13 | [build-system] 14 | requires = ["poetry-core"] 15 | build-backend = "poetry.core.masonry.api" 16 | -------------------------------------------------------------------------------- /layers.tf: -------------------------------------------------------------------------------- 1 | module "sso_elevator_dependencies" { 2 | count = var.use_pre_created_image ? 0 : 1 3 | source = "terraform-aws-modules/lambda/aws" 4 | version = "7.19.0" 5 | create_layer = true 6 | create_function = false 7 | layer_name = "sso_elevator_dependencies" 8 | description = "powertools-pydantic/boto3/slack_bolt" 9 | 10 | compatible_runtimes = ["python3.10"] 11 | build_in_docker = true 12 | runtime = "python${local.python_version}" 13 | docker_image = "lambda/python:${local.python_version}" 14 | docker_file = "${path.module}/src/docker/Dockerfile" 15 | source_path = [{ 16 | pip_requirements = "${path.module}/layer/deploy_requirements.txt" 17 | path = "${path.module}/layer" 18 | patterns = ["!python/.venv/.*"] 19 | prefix_in_zip = "python" 20 | }] 21 | } 22 | -------------------------------------------------------------------------------- /locals.tf: -------------------------------------------------------------------------------- 1 | locals { 2 | # Full python version is used for checking the python version before deployment in check_python_version.tf 3 | full_python_version = "3.10.10" 4 | # Python version is used for building the docker image in slack_handler_lambda.tf/perm_revoker_lambda.tf/layers.tf 5 | python_version = join(".", slice(split(".", local.full_python_version), 0, 2)) 6 | 7 | revoker_lambda_arn = "arn:aws:lambda:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:function:${var.revoker_lambda_name}" 8 | requester_lambda_arn = "arn:aws:lambda:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:function:${var.requester_lambda_name}" 9 | sso_instance_arn = var.sso_instance_arn == "" ? data.aws_ssoadmin_instances.all[0].arns[0] : var.sso_instance_arn 10 | 11 | # In case of default value for var.s3_bucket_name_for_audit_entry, we append a random string to the bucket name to make it unique. 12 | # In case of non-default value for var.s3_bucket_name_for_audit_entry, we use the value as is and expect the name is unique. 13 | # In case of var.s3_name_of_the_existing_bucket, we skip creating a new bucket and use the existing one. 14 | s3_bucket_name_for_audit_entry = var.s3_bucket_name_for_audit_entry != "sso-elevator-audit-entry" ? var.s3_bucket_name_for_audit_entry : "sso-elevator-audit-entry-${random_string.random.result}" 15 | s3_bucket_name = var.s3_name_of_the_existing_bucket != "" ? var.s3_name_of_the_existing_bucket : local.s3_bucket_name_for_audit_entry 16 | s3_bucket_arn = "arn:aws:s3:::${local.s3_bucket_name}" 17 | 18 | # HTTP API configuration 19 | api_resource_path = "/access-requester" 20 | api_stage_name = "default" 21 | full_api_url = var.create_api_gateway ? "${module.http_api[0].stage_invoke_url}${local.api_resource_path}" : "" 22 | } 23 | 24 | resource "random_string" "random" { 25 | length = 16 26 | special = false 27 | upper = false 28 | numeric = false 29 | } 30 | -------------------------------------------------------------------------------- /moved.tf: -------------------------------------------------------------------------------- 1 | # Start using s3 baseline for audit bucket 2 | moved { 3 | from = module.sso_elevator_bucket[0] 4 | to = module.audit_bucket[0].module.bucket_baseline[0] 5 | } 6 | -------------------------------------------------------------------------------- /outputs.tf: -------------------------------------------------------------------------------- 1 | output "sso_elevator_bucket_id" { 2 | description = "The name of the SSO elevator bucket." 3 | value = var.s3_name_of_the_existing_bucket == "" ? module.audit_bucket[0].s3_bucket_id : null 4 | } 5 | 6 | output "requester_api_endpoint_url" { 7 | description = "The full URL to invoke the API. Pass this URL into the Slack App manifest as the Request URL." 8 | value = var.create_api_gateway ? local.full_api_url : null 9 | } 10 | 11 | output "lambda_function_url" { 12 | description = "value for the access_requester lambda function URL" 13 | value = var.create_lambda_url ? module.access_requester_slack_handler.lambda_function_url : null 14 | } 15 | -------------------------------------------------------------------------------- /perm_revoker_lambda.tf: -------------------------------------------------------------------------------- 1 | module "access_revoker" { 2 | source = "terraform-aws-modules/lambda/aws" 3 | version = "7.19.0" 4 | 5 | function_name = var.revoker_lambda_name 6 | description = "Revokes temporary permissions" 7 | 8 | publish = true 9 | timeout = var.lambda_timeout 10 | memory_size = var.lambda_memory_size 11 | 12 | # Pull image from ecr 13 | package_type = var.use_pre_created_image ? "Image" : "Zip" 14 | create_package = var.use_pre_created_image ? false : true 15 | image_uri = var.use_pre_created_image ? "${var.ecr_owner_account_id}.dkr.ecr.${data.aws_region.current.name}.amazonaws.com/${var.ecr_repo_name}:revoker-${var.ecr_repo_tag}" : null 16 | 17 | # Build zip from source code using Docker 18 | hash_extra = var.use_pre_created_image ? "" : var.revoker_lambda_name 19 | handler = var.use_pre_created_image ? "" : "revoker.lambda_handler" 20 | runtime = var.use_pre_created_image ? "" : "python${local.python_version}" 21 | build_in_docker = var.use_pre_created_image ? false : true 22 | docker_image = var.use_pre_created_image ? null : "lambda/python:${local.python_version}" 23 | docker_file = var.use_pre_created_image ? null : "${path.module}/src/docker/Dockerfile" 24 | source_path = var.use_pre_created_image ? [] : [ 25 | { 26 | path = "${path.module}/src/" 27 | pip_requirements = "${path.module}/src/deploy_requirements.txt" 28 | artifacts_dir = "${path.root}/builds/" 29 | patterns = [ 30 | "!.venv/.*", 31 | "!.vscode/.*", 32 | "!__pycache__/.*", 33 | "!tests/.*", 34 | "!tools/.*", 35 | "!.hypothesis/.*", 36 | "!.pytest_cache/.*", 37 | ] 38 | } 39 | ] 40 | 41 | layers = var.use_pre_created_image ? [] : [ 42 | module.sso_elevator_dependencies[0].lambda_layer_arn, 43 | ] 44 | 45 | environment_variables = { 46 | LOG_LEVEL = var.log_level 47 | 48 | SLACK_SIGNING_SECRET = var.slack_signing_secret 49 | SLACK_BOT_TOKEN = var.slack_bot_token 50 | SLACK_CHANNEL_ID = var.slack_channel_id 51 | SCHEDULE_GROUP_NAME = var.schedule_group_name 52 | 53 | SSO_INSTANCE_ARN = local.sso_instance_arn 54 | STATEMENTS = jsonencode(var.config) 55 | GROUP_STATEMENTS = jsonencode(var.group_config) 56 | POWERTOOLS_LOGGER_LOG_EVENT = true 57 | 58 | POST_UPDATE_TO_SLACK = var.revoker_post_update_to_slack 59 | SCHEDULE_POLICY_ARN = aws_iam_role.eventbridge_role.arn 60 | REVOKER_FUNCTION_ARN = local.revoker_lambda_arn 61 | REVOKER_FUNCTION_NAME = var.revoker_lambda_name 62 | S3_BUCKET_FOR_AUDIT_ENTRY_NAME = local.s3_bucket_name 63 | S3_BUCKET_PREFIX_FOR_PARTITIONS = var.s3_bucket_partition_prefix 64 | SSO_ELEVATOR_SCHEDULED_REVOCATION_RULE_NAME = aws_cloudwatch_event_rule.sso_elevator_scheduled_revocation.name 65 | REQUEST_EXPIRATION_HOURS = var.request_expiration_hours 66 | MAX_PERMISSIONS_DURATION_TIME = var.max_permissions_duration_time 67 | PERMISSION_DURATION_LIST_OVERRIDE = jsonencode(var.permission_duration_list_override) 68 | 69 | APPROVER_RENOTIFICATION_INITIAL_WAIT_TIME = var.approver_renotification_initial_wait_time 70 | APPROVER_RENOTIFICATION_BACKOFF_MULTIPLIER = var.approver_renotification_backoff_multiplier 71 | SECONDARY_FALLBACK_EMAIL_DOMAINS = jsonencode(var.secondary_fallback_email_domains) 72 | SEND_DM_IF_USER_NOT_IN_CHANNEL = var.send_dm_if_user_not_in_channel 73 | } 74 | 75 | allowed_triggers = { 76 | cron = { 77 | principal = "events.amazonaws.com" 78 | source_arn = aws_cloudwatch_event_rule.sso_elevator_scheduled_revocation.arn 79 | } 80 | check_inconsistency = { 81 | principal = "events.amazonaws.com" 82 | source_arn = aws_cloudwatch_event_rule.sso_elevator_check_on_inconsistency.arn 83 | } 84 | } 85 | 86 | attach_policy_json = true 87 | policy_json = data.aws_iam_policy_document.revoker.json 88 | 89 | dead_letter_target_arn = var.aws_sns_topic_subscription_email != "" ? aws_sns_topic.dlq[0].arn : null 90 | attach_dead_letter_policy = var.aws_sns_topic_subscription_email != "" ? true : false 91 | 92 | # do not retry automatically 93 | maximum_retry_attempts = 0 94 | 95 | cloudwatch_logs_retention_in_days = var.logs_retention_in_days 96 | 97 | tags = var.tags 98 | } 99 | 100 | data "aws_iam_policy_document" "revoker" { 101 | statement { 102 | sid = "AllowDescribeRule" 103 | effect = "Allow" 104 | actions = [ 105 | "events:DescribeRule" 106 | ] 107 | resources = [ 108 | "arn:aws:events:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:rule/${var.event_brige_scheduled_revocation_rule_name}" 109 | ] 110 | } 111 | statement { 112 | sid = "AllowListSSOInstances" 113 | effect = "Allow" 114 | actions = [ 115 | "sso:ListInstances" 116 | ] 117 | resources = ["*"] 118 | } 119 | statement { 120 | sid = "AllowSSO" 121 | effect = "Allow" 122 | actions = [ 123 | "sso:ListAccountAssignments", 124 | "sso:DeleteAccountAssignment", 125 | "sso:DescribeAccountAssignmentDeletionStatus" 126 | ] 127 | resources = [ 128 | "arn:aws:sso:::instance/*", 129 | "arn:aws:sso:::permissionSet/*/*", 130 | "arn:aws:sso:::account/*" 131 | ] 132 | } 133 | statement { 134 | effect = "Allow" 135 | actions = [ 136 | "organizations:ListAccounts", 137 | "organizations:DescribeAccount", 138 | "sso:ListPermissionSets", 139 | "sso:DescribePermissionSet", 140 | "identitystore:ListUsers", 141 | "identitystore:DescribeUser", 142 | ] 143 | resources = ["*"] 144 | } 145 | statement { 146 | effect = "Allow" 147 | actions = [ 148 | "scheduler:DeleteSchedule", 149 | "iam:PassRole", 150 | "scheduler:CreateSchedule", 151 | "scheduler:ListSchedules", 152 | "scheduler:GetSchedule", 153 | ] 154 | resources = ["*"] 155 | } 156 | statement { 157 | effect = "Allow" 158 | actions = [ 159 | "s3:PutObject", 160 | ] 161 | resources = ["${local.s3_bucket_arn}/${var.s3_bucket_partition_prefix}/*"] 162 | } 163 | statement { 164 | effect = "Allow" 165 | actions = [ 166 | "identitystore:ListGroups", 167 | "identitystore:DescribeGroup", 168 | "identitystore:ListGroupMemberships", 169 | "identitystore:DeleteGroupMembership" 170 | ] 171 | resources = ["*"] 172 | } 173 | } 174 | 175 | resource "aws_cloudwatch_event_rule" "sso_elevator_scheduled_revocation" { 176 | name = var.event_brige_scheduled_revocation_rule_name 177 | description = "Triggers on schedule to revoke temporary permissions." 178 | schedule_expression = var.schedule_expression 179 | tags = var.tags 180 | } 181 | 182 | resource "aws_cloudwatch_event_target" "sso_elevator_scheduled_revocation" { 183 | rule = aws_cloudwatch_event_rule.sso_elevator_scheduled_revocation.name 184 | arn = module.access_revoker.lambda_function_arn 185 | input = jsonencode({ 186 | "action" : "sso_elevator_scheduled_revocation" 187 | }) 188 | } 189 | 190 | resource "aws_cloudwatch_event_rule" "sso_elevator_check_on_inconsistency" { 191 | name = var.event_brige_check_on_inconsistency_rule_name 192 | description = "Triggers on schedule to check on inconsistency." 193 | schedule_expression = var.schedule_expression_for_check_on_inconsistency 194 | tags = var.tags 195 | } 196 | 197 | resource "aws_cloudwatch_event_target" "check_inconsistency" { 198 | rule = aws_cloudwatch_event_rule.sso_elevator_check_on_inconsistency.name 199 | arn = module.access_revoker.lambda_function_arn 200 | input = jsonencode({ 201 | "action" : "check_on_inconsistency" 202 | }) 203 | } 204 | 205 | resource "aws_iam_role" "eventbridge_role" { 206 | name = var.schedule_role_name 207 | assume_role_policy = jsonencode({ 208 | Version = "2012-10-17" 209 | Statement = [ 210 | { 211 | Action = "sts:AssumeRole" 212 | Effect = "Allow" 213 | Principal = { 214 | Service = "lambda.amazonaws.com" 215 | } 216 | }, 217 | { 218 | Action = "sts:AssumeRole" 219 | Effect = "Allow" 220 | Principal = { 221 | Service = "scheduler.amazonaws.com" 222 | } 223 | }, 224 | ] 225 | }) 226 | } 227 | 228 | resource "aws_iam_role_policy" "eventbridge_policy" { 229 | name = "eventbridge_policy_for_sso_elevator" 230 | policy = jsonencode({ 231 | Version = "2012-10-17" 232 | Statement = [ 233 | { 234 | Action = [ 235 | "events:PutRule", 236 | "events:PutTargets" 237 | ] 238 | Effect = "Allow" 239 | Resource = "*" 240 | }, 241 | { 242 | Action = [ 243 | "lambda:InvokeFunction" 244 | ] 245 | Effect = "Allow" 246 | Resource = module.access_revoker.lambda_function_arn 247 | } 248 | ] 249 | }) 250 | 251 | role = aws_iam_role.eventbridge_role.id 252 | } 253 | 254 | resource "aws_lambda_permission" "eventbridge" { 255 | statement_id = "AllowEventBridge" 256 | action = "lambda:InvokeFunction" 257 | function_name = module.access_revoker.lambda_function_name 258 | principal = "scheduler.amazonaws.com" 259 | source_arn = aws_iam_role.eventbridge_role.arn 260 | } 261 | -------------------------------------------------------------------------------- /s3.tf: -------------------------------------------------------------------------------- 1 | module "audit_bucket" { 2 | count = var.s3_name_of_the_existing_bucket == "" ? 1 : 0 3 | source = "fivexl/account-baseline/aws//modules/s3_baseline" 4 | version = "1.5.0" 5 | 6 | bucket_name = local.s3_bucket_name 7 | 8 | versioning = { 9 | enabled = true 10 | mfa_delete = var.s3_mfa_delete 11 | } 12 | 13 | object_lock_enabled = var.s3_object_lock 14 | 15 | object_lock_configuration = var.s3_object_lock ? var.s3_object_lock_configuration : null 16 | logging = var.s3_logging 17 | } 18 | -------------------------------------------------------------------------------- /sheduler_group.tf: -------------------------------------------------------------------------------- 1 | resource "aws_scheduler_schedule_group" "one_time_schedule_group" { 2 | name = var.schedule_group_name 3 | tags = var.tags 4 | } 5 | -------------------------------------------------------------------------------- /slack_handler_lambda.tf: -------------------------------------------------------------------------------- 1 | module "access_requester_slack_handler" { 2 | source = "terraform-aws-modules/lambda/aws" 3 | version = "7.19.0" 4 | 5 | function_name = var.requester_lambda_name 6 | description = "Receive requests from slack and grants temporary access" 7 | 8 | publish = true 9 | timeout = var.lambda_timeout 10 | memory_size = var.lambda_memory_size 11 | 12 | # Pull image from ecr 13 | package_type = var.use_pre_created_image ? "Image" : "Zip" 14 | create_package = var.use_pre_created_image ? false : true 15 | image_uri = var.use_pre_created_image ? "${var.ecr_owner_account_id}.dkr.ecr.${data.aws_region.current.name}.amazonaws.com/${var.ecr_repo_name}:requester-${var.ecr_repo_tag}" : null 16 | 17 | # Build zip from source code using Docker 18 | hash_extra = var.use_pre_created_image ? "" : var.requester_lambda_name 19 | handler = var.use_pre_created_image ? "" : "main.lambda_handler" 20 | runtime = var.use_pre_created_image ? "" : "python${local.python_version}" 21 | build_in_docker = var.use_pre_created_image ? false : true 22 | docker_image = var.use_pre_created_image ? null : "lambda/python:${local.python_version}" 23 | docker_file = var.use_pre_created_image ? null : "${path.module}/src/docker/Dockerfile" 24 | source_path = var.use_pre_created_image ? [] : [ 25 | { 26 | path = "${path.module}/src/" 27 | pip_requirements = "${path.module}/src/deploy_requirements.txt" 28 | artifacts_dir = "${path.root}/builds/" 29 | patterns = [ 30 | "!.venv/.*", 31 | "!.vscode/.*", 32 | "!__pycache__/.*", 33 | "!tests/.*", 34 | "!tools/.*", 35 | "!.hypothesis/.*", 36 | "!.pytest_cache/.*", 37 | ] 38 | } 39 | ] 40 | 41 | layers = var.use_pre_created_image ? [] : [ 42 | module.sso_elevator_dependencies[0].lambda_layer_arn, 43 | ] 44 | 45 | environment_variables = { 46 | LOG_LEVEL = var.log_level 47 | 48 | SLACK_SIGNING_SECRET = var.slack_signing_secret 49 | SLACK_BOT_TOKEN = var.slack_bot_token 50 | SLACK_CHANNEL_ID = var.slack_channel_id 51 | SCHEDULE_GROUP_NAME = var.schedule_group_name 52 | 53 | 54 | SSO_INSTANCE_ARN = local.sso_instance_arn 55 | STATEMENTS = jsonencode(var.config) 56 | GROUP_STATEMENTS = jsonencode(var.group_config) 57 | POWERTOOLS_LOGGER_LOG_EVENT = true 58 | SCHEDULE_POLICY_ARN = aws_iam_role.eventbridge_role.arn 59 | REVOKER_FUNCTION_ARN = local.revoker_lambda_arn 60 | REVOKER_FUNCTION_NAME = var.revoker_lambda_name 61 | S3_BUCKET_FOR_AUDIT_ENTRY_NAME = local.s3_bucket_name 62 | S3_BUCKET_PREFIX_FOR_PARTITIONS = var.s3_bucket_partition_prefix 63 | SSO_ELEVATOR_SCHEDULED_REVOCATION_RULE_NAME = aws_cloudwatch_event_rule.sso_elevator_scheduled_revocation.name 64 | REQUEST_EXPIRATION_HOURS = var.request_expiration_hours 65 | APPROVER_RENOTIFICATION_INITIAL_WAIT_TIME = var.approver_renotification_initial_wait_time 66 | APPROVER_RENOTIFICATION_BACKOFF_MULTIPLIER = var.approver_renotification_backoff_multiplier 67 | MAX_PERMISSIONS_DURATION_TIME = var.max_permissions_duration_time 68 | PERMISSION_DURATION_LIST_OVERRIDE = jsonencode(var.permission_duration_list_override) 69 | SECONDARY_FALLBACK_EMAIL_DOMAINS = jsonencode(var.secondary_fallback_email_domains) 70 | SEND_DM_IF_USER_NOT_IN_CHANNEL = var.send_dm_if_user_not_in_channel 71 | } 72 | 73 | allowed_triggers = var.create_api_gateway ? { 74 | AllowExecutionFromAPIGateway = { 75 | service = "apigateway" 76 | source_arn = "${module.http_api[0].api_execution_arn}/*/*${local.api_resource_path}" 77 | } 78 | } : {} 79 | 80 | create_lambda_function_url = var.create_lambda_url ? true : false 81 | 82 | cors = var.create_lambda_url ? { 83 | allow_credentials = true 84 | allow_origins = ["https://slack.com"] 85 | allow_methods = ["POST"] 86 | max_age = 86400 87 | } : null 88 | 89 | attach_policy_json = true 90 | policy_json = data.aws_iam_policy_document.slack_handler.json 91 | 92 | dead_letter_target_arn = var.aws_sns_topic_subscription_email != "" ? aws_sns_topic.dlq[0].arn : null 93 | attach_dead_letter_policy = var.aws_sns_topic_subscription_email != "" ? true : false 94 | 95 | # do not retry automatically 96 | maximum_retry_attempts = 0 97 | 98 | cloudwatch_logs_retention_in_days = var.logs_retention_in_days 99 | 100 | tags = var.tags 101 | } 102 | 103 | # By default, the same policy is created by the "aws_lambda_function_url" resource 104 | # But for reason i was not able to find out, in some cases of creation with the "API Gateway" resource, the policy is not created 105 | # So we are creating the same policy but using the "aws_lambda_permission" resource. 106 | resource "aws_lambda_permission" "url" { 107 | count = var.create_lambda_url ? 1 : 0 108 | action = "lambda:InvokeFunctionUrl" 109 | function_name = module.access_requester_slack_handler.lambda_function_name 110 | principal = "*" 111 | statement_id = "AllowExecutionFromLambdaURL" 112 | function_url_auth_type = "NONE" 113 | # Adds the following condition keys, which are required for the function to be invoked from a URL: 114 | # "Condition": { 115 | # "StringEquals": { 116 | # "lambda:FunctionUrlAuthType": "None" 117 | # } 118 | # } 119 | } 120 | 121 | data "aws_iam_policy_document" "slack_handler" { 122 | statement { 123 | sid = "GetSAMLProvider" 124 | effect = "Allow" 125 | actions = [ 126 | "iam:GetSAMLProvider" 127 | ] 128 | resources = ["*"] 129 | } 130 | statement { 131 | sid = "UpdateSAMLProvider" 132 | effect = "Allow" 133 | actions = [ 134 | "iam:UpdateSAMLProvider", 135 | ] 136 | resources = ["*"] 137 | } 138 | 139 | statement { 140 | sid = "GetInvokeSelf" 141 | effect = "Allow" 142 | actions = [ 143 | "lambda:InvokeFunction", 144 | "lambda:GetFunction" 145 | ] 146 | resources = [local.requester_lambda_arn] 147 | } 148 | statement { 149 | effect = "Allow" 150 | actions = [ 151 | "s3:PutObject", 152 | ] 153 | resources = ["${local.s3_bucket_arn}/${var.s3_bucket_partition_prefix}/*"] 154 | } 155 | statement { 156 | sid = "AllowListSSOInstances" 157 | effect = "Allow" 158 | actions = [ 159 | "sso:ListInstances" 160 | ] 161 | resources = ["*"] 162 | } 163 | statement { 164 | sid = "AllowSSO" 165 | effect = "Allow" 166 | actions = [ 167 | "sso:CreateAccountAssignment", 168 | "sso:DescribeAccountAssignmentCreationStatus" 169 | ] 170 | resources = [ 171 | "arn:aws:sso:::instance/*", 172 | "arn:aws:sso:::permissionSet/*/*", 173 | "arn:aws:sso:::account/*" 174 | ] 175 | } 176 | statement { 177 | effect = "Allow" 178 | actions = [ 179 | "iam:PutRolePolicy", 180 | "iam:AttachRolePolicy", 181 | "iam:CreateRole", 182 | "iam:GetRole", 183 | "iam:ListAttachedRolePolicies", 184 | "iam:ListRolePolicies", 185 | ] 186 | resources = [ 187 | "arn:aws:iam::*:role/aws-reserved/sso.amazonaws.com/AWSReservedSSO_*", 188 | "arn:aws:iam::*:role/aws-reserved/sso.amazonaws.com/*/AWSReservedSSO_*" 189 | ] 190 | } 191 | statement { 192 | effect = "Allow" 193 | actions = [ 194 | "organizations:ListAccounts", 195 | "organizations:DescribeAccount", 196 | "sso:ListPermissionSets", 197 | "sso:DescribePermissionSet", 198 | "identitystore:ListUsers", 199 | "identitystore:DescribeUser", 200 | ] 201 | resources = ["*"] 202 | } 203 | statement { 204 | effect = "Allow" 205 | actions = [ 206 | "scheduler:CreateSchedule", 207 | "iam:PassRole", 208 | "scheduler:ListSchedules", 209 | "scheduler:GetSchedule", 210 | "scheduler:DeleteSchedule", 211 | ] 212 | resources = ["*"] 213 | } 214 | statement { 215 | effect = "Allow" 216 | actions = [ 217 | "identitystore:ListGroups", 218 | "identitystore:DescribeGroup", 219 | "identitystore:ListGroupMemberships", 220 | "identitystore:CreateGroupMembership", 221 | ] 222 | resources = ["*"] 223 | } 224 | } 225 | 226 | module "http_api" { 227 | count = var.create_api_gateway ? 1 : 0 228 | source = "terraform-aws-modules/apigateway-v2/aws" 229 | version = "5.0.0" 230 | name = "sso-elevator-access-requster" 231 | description = "API Gateway for SSO Elevator's access-requester Lambda, to communicate with Slack" 232 | protocol_type = "HTTP" 233 | 234 | cors_configuration = { 235 | allow_credentials = true 236 | allow_origins = ["https://slack.com"] 237 | allow_methods = ["POST"] 238 | max_age = 86400 239 | } 240 | 241 | routes = { 242 | "POST ${local.api_resource_path}" : { 243 | integration = { 244 | uri = "arn:aws:lambda:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:function:${var.requester_lambda_name}" 245 | type = "AWS_PROXY" 246 | } 247 | throttling_burst_limit = var.api_gateway_throttling_burst_limit 248 | throttling_rate_limit = var.api_gateway_throttling_rate_limit 249 | } 250 | } 251 | stage_name = local.api_stage_name 252 | create_domain_name = false 253 | tags = var.tags 254 | stage_access_log_settings = { 255 | create_log_group = true 256 | log_group_retention_in_days = var.logs_retention_in_days 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /sns.tf: -------------------------------------------------------------------------------- 1 | resource "aws_sns_topic" "dlq" { 2 | count = var.aws_sns_topic_subscription_email != "" ? 1 : 0 3 | name = var.requester_lambda_name 4 | kms_master_key_id = "alias/aws/sns" # tfsec:ignore:aws-sns-topic-encryption-use-cmk 5 | tags = var.tags 6 | } 7 | 8 | resource "aws_sns_topic_subscription" "dlq" { 9 | count = var.aws_sns_topic_subscription_email != "" ? 1 : 0 10 | topic_arn = aws_sns_topic.dlq[0].arn 11 | protocol = "email" 12 | endpoint = var.aws_sns_topic_subscription_email 13 | } 14 | -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | env 3 | .vscode/ 4 | .hypothesis 5 | tools/ 6 | socket_mode.py -------------------------------------------------------------------------------- /src/.python-version: -------------------------------------------------------------------------------- 1 | 3.10.10 2 | -------------------------------------------------------------------------------- /src/access_control.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import Enum 3 | from typing import FrozenSet 4 | 5 | import boto3 6 | 7 | import config 8 | import entities 9 | import s3 10 | import schedule 11 | import sso 12 | from entities import BaseModel 13 | from statement import GroupStatement, Statement, get_affected_group_statements, get_affected_statements 14 | 15 | logger = config.get_logger("access_control") 16 | cfg = config.get_config() 17 | 18 | session = boto3._get_default_session() 19 | org_client = session.client("organizations") 20 | sso_client = session.client("sso-admin") 21 | identitystore_client = session.client("identitystore") 22 | schedule_client = session.client("scheduler") 23 | 24 | 25 | class DecisionReason(Enum): 26 | RequiresApproval = "RequiresApproval" 27 | ApprovalNotRequired = "ApprovalNotRequired" 28 | SelfApproval = "SelfApproval" 29 | NoStatements = "NoStatements" 30 | NoApprovers = "NoApprovers" 31 | 32 | 33 | class AccessRequestDecision(BaseModel): 34 | grant: bool 35 | reason: DecisionReason 36 | based_on_statements: FrozenSet[Statement] | FrozenSet[GroupStatement] 37 | approvers: FrozenSet[str] = frozenset() 38 | 39 | 40 | def determine_affected_statements( 41 | statements: FrozenSet[Statement] | FrozenSet[GroupStatement], 42 | account_id: str | None = None, 43 | permission_set_name: str | None = None, 44 | group_id: str | None = None, 45 | ) -> FrozenSet[Statement] | FrozenSet[GroupStatement]: 46 | if isinstance(statements, FrozenSet) and all(isinstance(item, Statement) for item in statements): 47 | return get_affected_statements(statements, account_id, permission_set_name) # type: ignore # noqa: PGH003 48 | 49 | if isinstance(statements, FrozenSet) and all(isinstance(item, GroupStatement) for item in statements): 50 | return get_affected_group_statements(statements, group_id) # type: ignore # noqa: PGH003 51 | 52 | # About type ignore: 53 | # For some reason, pylance is not able to understand that we already checked the type of the items in the set, 54 | # and shows a type error for "statements" 55 | raise TypeError("Statements contain mixed or unsupported types.") 56 | 57 | 58 | def make_decision_on_access_request( # noqa: PLR0911 59 | statements: FrozenSet[Statement] | FrozenSet[GroupStatement], 60 | requester_email: str, 61 | permission_set_name: str | None = None, 62 | account_id: str | None = None, 63 | group_id: str | None = None, 64 | ) -> AccessRequestDecision: 65 | affected_statements = determine_affected_statements(statements, account_id, permission_set_name, group_id) 66 | 67 | decision_based_on_statements: set[Statement] | set[GroupStatement] = set() 68 | potential_approvers = set() 69 | 70 | explicit_deny_self_approval = any( 71 | statement.allow_self_approval is False and requester_email in statement.approvers for statement in affected_statements 72 | ) 73 | explicit_deny_approval_not_required = any(statement.approval_is_not_required is False for statement in affected_statements) 74 | 75 | for statement in affected_statements: 76 | if statement.approval_is_not_required and not explicit_deny_approval_not_required: 77 | return AccessRequestDecision( 78 | grant=True, 79 | reason=DecisionReason.ApprovalNotRequired, 80 | based_on_statements=frozenset([statement]), # type: ignore # noqa: PGH003 81 | ) 82 | if requester_email in statement.approvers and statement.allow_self_approval and not explicit_deny_self_approval: 83 | return AccessRequestDecision( 84 | grant=True, 85 | reason=DecisionReason.SelfApproval, 86 | based_on_statements=frozenset([statement]), # type: ignore # noqa: PGH003 87 | ) 88 | 89 | decision_based_on_statements.add(statement) # type: ignore # noqa: PGH003 90 | potential_approvers.update(approver for approver in statement.approvers if approver != requester_email) 91 | 92 | if not decision_based_on_statements: 93 | return AccessRequestDecision( 94 | grant=False, 95 | reason=DecisionReason.NoStatements, 96 | based_on_statements=frozenset(decision_based_on_statements), 97 | ) 98 | 99 | if not potential_approvers: 100 | return AccessRequestDecision( 101 | grant=False, 102 | reason=DecisionReason.NoApprovers, 103 | based_on_statements=frozenset(decision_based_on_statements), 104 | ) 105 | 106 | return AccessRequestDecision( 107 | grant=False, 108 | reason=DecisionReason.RequiresApproval, 109 | approvers=frozenset(potential_approvers), 110 | based_on_statements=frozenset(decision_based_on_statements), 111 | ) 112 | 113 | 114 | class ApproveRequestDecision(BaseModel): 115 | """Decision on approver request 116 | 117 | grant: bool - Create account assignment, if grant is True 118 | permit: bool - Allow approver to make an action Approve if permit is True 119 | based_on_statements: FrozenSet[Statement] 120 | """ 121 | 122 | grant: bool 123 | permit: bool 124 | based_on_statements: FrozenSet[Statement] | FrozenSet[GroupStatement] 125 | 126 | 127 | def make_decision_on_approve_request( # noqa: PLR0913 128 | action: entities.ApproverAction, 129 | statements: frozenset[Statement], 130 | approver_email: str, 131 | requester_email: str, 132 | permission_set_name: str | None = None, 133 | account_id: str | None = None, 134 | group_id: str | None = None, 135 | ) -> ApproveRequestDecision: 136 | affected_statements = determine_affected_statements(statements, account_id, permission_set_name, group_id) 137 | 138 | for statement in affected_statements: 139 | if approver_email in statement.approvers: 140 | is_self_approval = approver_email == requester_email 141 | if is_self_approval and statement.allow_self_approval or not is_self_approval: 142 | return ApproveRequestDecision( 143 | grant=action == entities.ApproverAction.Approve, 144 | permit=True, 145 | based_on_statements=frozenset([statement]), # type: ignore # noqa: PGH003 146 | ) 147 | 148 | return ApproveRequestDecision( 149 | grant=False, 150 | permit=False, 151 | based_on_statements=affected_statements, # type: ignore # noqa: PGH003 152 | ) 153 | 154 | 155 | def execute_decision( # noqa: PLR0913 156 | decision: AccessRequestDecision | ApproveRequestDecision, 157 | permission_set_name: str, 158 | account_id: str, 159 | permission_duration: datetime.timedelta, 160 | approver: entities.slack.User, 161 | requester: entities.slack.User, 162 | reason: str, 163 | ) -> bool: 164 | logger.info("Executing decision") 165 | if not decision.grant: 166 | logger.info("Access request denied") 167 | return False # Temporary solution for testing 168 | 169 | sso_instance = sso.describe_sso_instance(sso_client, cfg.sso_instance_arn) 170 | permission_set = sso.get_permission_set_by_name(sso_client, sso_instance.arn, permission_set_name) 171 | sso_user_principal_id, secondary_domain_was_used = sso.get_user_principal_id_by_email( 172 | identity_store_client=identitystore_client, identity_store_id=sso_instance.identity_store_id, email=requester.email, cfg=cfg 173 | ) 174 | 175 | account_assignment = sso.UserAccountAssignment( 176 | instance_arn=sso_instance.arn, 177 | account_id=account_id, 178 | permission_set_arn=permission_set.arn, 179 | user_principal_id=sso_user_principal_id, 180 | ) 181 | 182 | logger.info("Creating account assignment", extra={"account_assignment": account_assignment}) 183 | 184 | account_assignment_status = sso.create_account_assignment_and_wait_for_result( 185 | sso_client, 186 | account_assignment, 187 | ) 188 | 189 | s3.log_operation( 190 | audit_entry=s3.AuditEntry( 191 | account_id=account_id, 192 | role_name=permission_set.name, 193 | reason=reason, 194 | requester_slack_id=requester.id, 195 | requester_email=requester.email, 196 | approver_slack_id=approver.id, 197 | approver_email=approver.email, 198 | request_id=account_assignment_status.request_id, 199 | operation_type="grant", 200 | permission_duration=permission_duration, 201 | sso_user_principal_id=sso_user_principal_id, 202 | audit_entry_type="account", 203 | secondary_domain_was_used=secondary_domain_was_used, 204 | ), 205 | ) 206 | 207 | schedule.schedule_revoke_event( 208 | permission_duration=permission_duration, 209 | schedule_client=schedule_client, 210 | approver=approver, 211 | requester=requester, 212 | user_account_assignment=sso.UserAccountAssignment( 213 | instance_arn=sso_instance.arn, 214 | account_id=account_id, 215 | permission_set_arn=permission_set.arn, 216 | user_principal_id=sso_user_principal_id, 217 | ), 218 | ) 219 | return True # Temporary solution for testing 220 | 221 | 222 | def execute_decision_on_group_request( # noqa: PLR0913 223 | decision: AccessRequestDecision | ApproveRequestDecision, 224 | group: entities.aws.SSOGroup, 225 | permission_duration: datetime.timedelta, 226 | approver: entities.slack.User, 227 | requester: entities.slack.User, 228 | reason: str, 229 | identity_store_id: str, 230 | ) -> bool: 231 | logger.info("Executing decision") 232 | if not decision.grant: 233 | logger.info("Access request denied") 234 | return False # Temporary solution for testing 235 | 236 | sso_user_principal_id, secondary_domain_was_used = sso.get_user_principal_id_by_email( 237 | identity_store_client=identitystore_client, 238 | identity_store_id=sso.describe_sso_instance(sso_client, cfg.sso_instance_arn).identity_store_id, 239 | email=requester.email, 240 | cfg=cfg, 241 | ) 242 | 243 | if membership_id := sso.is_user_in_group( 244 | identity_store_id=identity_store_id, 245 | group_id=group.id, 246 | sso_user_id=sso_user_principal_id, 247 | identity_store_client=identitystore_client, 248 | ): 249 | logger.info( 250 | "User is already in the group", extra={"group_id": group.id, "user_id": sso_user_principal_id, "membership_id": membership_id} 251 | ) 252 | else: 253 | membership_id = sso.add_user_to_a_group(group.id, sso_user_principal_id, identity_store_id, identitystore_client)["MembershipId"] 254 | logger.info( 255 | "User added to the group", extra={"group_id": group.id, "user_id": sso_user_principal_id, "membership_id": membership_id} 256 | ) 257 | 258 | s3.log_operation( 259 | audit_entry=s3.AuditEntry( 260 | group_name=group.name, 261 | group_id=group.id, 262 | reason=reason, 263 | requester_slack_id=requester.id, 264 | requester_email=requester.email, 265 | approver_slack_id=approver.id, 266 | approver_email=approver.email, 267 | operation_type="grant", 268 | permission_duration=permission_duration, 269 | audit_entry_type="group", 270 | sso_user_principal_id=sso_user_principal_id, 271 | secondary_domain_was_used=secondary_domain_was_used, 272 | ), 273 | ) 274 | 275 | schedule.schedule_group_revoke_event( 276 | permission_duration=permission_duration, 277 | schedule_client=schedule_client, 278 | approver=approver, 279 | requester=requester, 280 | group_assignment=sso.GroupAssignment( 281 | identity_store_id=identity_store_id, 282 | group_name=group.name, 283 | group_id=group.id, 284 | user_principal_id=sso_user_principal_id, 285 | membership_id=membership_id, 286 | ), 287 | ) 288 | return # type: ignore # noqa: PGH003 289 | -------------------------------------------------------------------------------- /src/check_python_version.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | # Read the JSON data passed from Terraform 5 | raw_data = sys.stdin.read() 6 | data = json.loads(raw_data) 7 | 8 | required_version = data["required_version"] 9 | 10 | # Get the current Python version 11 | current_version = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}" 12 | 13 | if current_version != required_version: 14 | # Write the error message to stderr 15 | sys.stderr.write( 16 | f"Local python version is incorrect: {current_version}. Required version is {required_version}. Please clean 'builds', and then use docker for deployment, or destroy and re-create sso_elevator with the correct python version." # noqa: E501 17 | ) 18 | # Exit with a status code of 1, indicating failure 19 | sys.exit(1) 20 | 21 | print(json.dumps({"current_version": current_version})) 22 | -------------------------------------------------------------------------------- /src/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Optional 3 | 4 | from aws_lambda_powertools import Logger 5 | from pydantic import BaseSettings, root_validator 6 | 7 | import entities 8 | from statement import Statement, GroupStatement 9 | 10 | 11 | def get_logger(service: Optional[str] = None, level: Optional[str] = None) -> Logger: 12 | kwargs = { 13 | "json_default": entities.json_default, 14 | "level": level or os.environ.get("LOG_LEVEL", "INFO"), 15 | } 16 | if service: 17 | kwargs["service"] = service 18 | return Logger(**kwargs) 19 | 20 | 21 | logger = get_logger(service="config") 22 | 23 | 24 | def parse_statement(_dict: dict) -> Statement: 25 | def to_set_if_list_or_str(v: list | str) -> frozenset[str]: 26 | if isinstance(v, list): 27 | return frozenset(v) 28 | return frozenset([v]) if isinstance(v, str) else v 29 | 30 | return Statement.parse_obj( 31 | { 32 | "permission_set": to_set_if_list_or_str(_dict["PermissionSet"]), 33 | "resource": to_set_if_list_or_str(_dict["Resource"]), 34 | "approvers": to_set_if_list_or_str(_dict.get("Approvers", set())), 35 | "resource_type": _dict.get("ResourceType"), 36 | "approval_is_not_required": _dict.get("ApprovalIsNotRequired"), 37 | "allow_self_approval": _dict.get("AllowSelfApproval"), 38 | } 39 | ) 40 | 41 | 42 | def parse_group_statement(_dict: dict) -> GroupStatement: 43 | def to_set_if_list_or_str(v: list | str) -> frozenset[str]: 44 | if isinstance(v, list): 45 | return frozenset(v) 46 | return frozenset([v]) if isinstance(v, str) else v 47 | 48 | return GroupStatement.parse_obj( 49 | { 50 | "resource": to_set_if_list_or_str(_dict["Resource"]), 51 | "approvers": to_set_if_list_or_str(_dict.get("Approvers", set())), 52 | "approval_is_not_required": _dict.get("ApprovalIsNotRequired"), 53 | "allow_self_approval": _dict.get("AllowSelfApproval"), 54 | } 55 | ) 56 | 57 | 58 | def get_groups_from_statements(statements: set[GroupStatement]) -> frozenset[str]: 59 | return frozenset(group for statement in statements for group in statement.resource) 60 | 61 | 62 | class Config(BaseSettings): 63 | schedule_policy_arn: str 64 | revoker_function_arn: str 65 | revoker_function_name: str 66 | schedule_group_name: str 67 | 68 | post_update_to_slack: bool = False 69 | slack_channel_id: str 70 | slack_bot_token: str 71 | 72 | approver_renotification_initial_wait_time: int 73 | approver_renotification_backoff_multiplier: int 74 | 75 | secondary_fallback_email_domains: list 76 | 77 | send_dm_if_user_not_in_channel: bool = True 78 | 79 | sso_instance_arn: str 80 | 81 | log_level: str = "INFO" 82 | slack_app_log_level: str = "INFO" 83 | statements: frozenset[Statement] 84 | group_statements: frozenset[GroupStatement] 85 | 86 | accounts: frozenset[str] 87 | permission_sets: frozenset[str] 88 | groups: frozenset[str] 89 | 90 | s3_bucket_for_audit_entry_name: str 91 | s3_bucket_prefix_for_partitions: str 92 | 93 | sso_elevator_scheduled_revocation_rule_name: str 94 | request_expiration_hours: int = 8 95 | 96 | max_permissions_duration_time: int 97 | permission_duration_list_override: list 98 | 99 | good_result_emoji: str = ":large_green_circle:" 100 | waiting_result_emoji: str = ":large_yellow_circle:" 101 | bad_result_emoji: str = ":red_circle:" 102 | discarded_result_emoji: str = ":white_circle:" 103 | 104 | class Config: 105 | frozen = True 106 | 107 | @root_validator(pre=True) 108 | def get_accounts_and_permission_sets(cls, values: dict) -> dict: # noqa: ANN101 109 | statements = ( 110 | {parse_statement(st) for st in values.get("statements", [])} # type: ignore # noqa: PGH003 111 | if values.get("statements") is not None 112 | else set() 113 | ) 114 | 115 | group_statements = ( 116 | {parse_group_statement(st) for st in values.get("group_statements", [])} # type: ignore # noqa: PGH003 117 | if values.get("group_statements") is not None 118 | else set() 119 | ) 120 | 121 | if not group_statements and not statements: 122 | logger.warning("No statements and group statements found") 123 | groups = get_groups_from_statements(group_statements) 124 | permission_sets = set() 125 | accounts = set() 126 | s3_bucket_prefix_for_partitions = values.get("s3_bucket_prefix_for_partitions", "").rstrip("/") 127 | for statement in statements: 128 | permission_sets.update(statement.permission_set) 129 | if statement.resource_type == "Account": 130 | accounts.update(statement.resource) 131 | return values | { 132 | "accounts": accounts, 133 | "permission_sets": permission_sets, 134 | "statements": frozenset(statements), 135 | "group_statements": frozenset(group_statements), 136 | "groups": groups, 137 | "s3_bucket_prefix_for_partitions": s3_bucket_prefix_for_partitions, 138 | } 139 | 140 | 141 | _config: Optional[Config] = None 142 | 143 | 144 | def get_config() -> Config: 145 | global _config # noqa: PLW0603 146 | if _config is None: 147 | _config = Config() # type: ignore # noqa: PGH003 148 | return _config 149 | -------------------------------------------------------------------------------- /src/deploy_requirements.txt: -------------------------------------------------------------------------------- 1 | boto3-stubs[dynamodb,events,identitystore,organizations,s3,scheduler,sso-admin]==1.34.160 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 2 | --hash=sha256:7e499b74d53e8eb456e539b3c82ccb6b88038579e3434fb131d51a8abe11dc83 \ 3 | --hash=sha256:c6b1dfeb3cae673eed596f01409339e2e0b955a5241a16cee69f29303d9b37de 4 | botocore-stubs==1.34.160 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 5 | --hash=sha256:900953f3f926d205505776535fd131047ef89519734f1e5365d03ecbaec53cd9 \ 6 | --hash=sha256:b16122567dbf0860a76960ea4b94a396f16ba1a6afb9577dcc11dcd55047c42b 7 | croniter==1.4.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 8 | --hash=sha256:1a6df60eacec3b7a0aa52a8f2ef251ae3dd2a7c7c8b9874e73e791636d55a361 \ 9 | --hash=sha256:9595da48af37ea06ec3a9f899738f1b2c1c13da3c38cea606ef7cd03ea421128 10 | dnspython==2.6.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 11 | --hash=sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 \ 12 | --hash=sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc 13 | email-validator==2.2.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 14 | --hash=sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631 \ 15 | --hash=sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7 16 | idna==3.7 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 17 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 18 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 19 | mypy-boto3-dynamodb==1.34.148 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 20 | --hash=sha256:c85489b92cbbbe4f6997070372022df914d4cb8eb707fdc73aa18ce6ba25c578 \ 21 | --hash=sha256:f1a7aabff5c6e926b9b272df87251c9d6dfceb4c1fb159fb5a2df52062cd7e87 22 | mypy-boto3-events==1.34.151 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 23 | --hash=sha256:745dc718c618bec043180c8a78e52401b3536999848e1b0c20e9c7669eb2a3f3 \ 24 | --hash=sha256:c9b4d4d92b1ae3b2c4c48bf99bbb8b4ed472866715b6728f94a0f446c6f1fb9a 25 | mypy-boto3-identitystore==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 26 | --hash=sha256:1b8749a89ca1440608169eaa45afe2ec9aa0dbd02a754ee5cc62ce064426c23f \ 27 | --hash=sha256:39d26c323ada4dee2a8696e504ebb0afefb841f88c5ed69a3070c010e4c1208e 28 | mypy-boto3-organizations==1.34.139 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 29 | --hash=sha256:06fd26f02e8e918852ab681558215e607873749966759b5f58df1ff2e9a45392 \ 30 | --hash=sha256:6b42f6ee20ef44ecec1b9ccd66c122dff43f43e60815e4c810a23e00fc08ead7 31 | mypy-boto3-s3==1.34.158 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 32 | --hash=sha256:a875eb0ee91ba5ac6967291887f6e924eb9dc157966e8181da20533086218166 \ 33 | --hash=sha256:c01f1b2304ba7718c8561aaa2b6dc70fe438c91964256aa6ddc508d1cc553c66 34 | mypy-boto3-scheduler==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 35 | --hash=sha256:88ef3800caa0c838882a904a850b40cb7372adca83e3530397ff70cba977d62d \ 36 | --hash=sha256:fa09d08d63eda7b29523fa886366971c3f6233b459203974270468b2d7e18f37 37 | mypy-boto3-sso-admin==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 38 | --hash=sha256:6de7cf9327a10b800ac4ca2dbd1cf27d9a64aa5a647e923638a552c382059e3a \ 39 | --hash=sha256:9f871c83493be78a46a9df6bab209dc9bf0eb739822aca534901dd5ab2f91d61 40 | pydantic[email]==1.10.17 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 41 | --hash=sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f \ 42 | --hash=sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc \ 43 | --hash=sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b \ 44 | --hash=sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b \ 45 | --hash=sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b \ 46 | --hash=sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e \ 47 | --hash=sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3 \ 48 | --hash=sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7 \ 49 | --hash=sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227 \ 50 | --hash=sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f \ 51 | --hash=sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6 \ 52 | --hash=sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab \ 53 | --hash=sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad \ 54 | --hash=sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076 \ 55 | --hash=sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681 \ 56 | --hash=sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54 \ 57 | --hash=sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb \ 58 | --hash=sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7 \ 59 | --hash=sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe \ 60 | --hash=sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b \ 61 | --hash=sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab \ 62 | --hash=sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d \ 63 | --hash=sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0 \ 64 | --hash=sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75 \ 65 | --hash=sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741 \ 66 | --hash=sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63 \ 67 | --hash=sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd \ 68 | --hash=sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33 \ 69 | --hash=sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815 \ 70 | --hash=sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7 \ 71 | --hash=sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a \ 72 | --hash=sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655 \ 73 | --hash=sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773 \ 74 | --hash=sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c \ 75 | --hash=sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7 \ 76 | --hash=sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3 \ 77 | --hash=sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768 \ 78 | --hash=sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d \ 79 | --hash=sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688 \ 80 | --hash=sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f \ 81 | --hash=sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e \ 82 | --hash=sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991 \ 83 | --hash=sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a 84 | python-dateutil==2.9.0.post0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 85 | --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ 86 | --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 87 | six==1.16.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 88 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ 89 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 90 | types-awscrt==0.21.2 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 91 | --hash=sha256:0839fe12f0f914d8f7d63ed777c728cb4eccc2d5d79a26e377d12b0604e7bf0e \ 92 | --hash=sha256:84a9f4f422ec525c314fdf54c23a1e73edfbcec968560943ca2d41cfae623b38 93 | types-s3transfer==0.10.1 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 94 | --hash=sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0 \ 95 | --hash=sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74 96 | typing-extensions==4.12.2 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 97 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 98 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 99 | -------------------------------------------------------------------------------- /src/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.10 2 | 3 | RUN pip install poetry==1.2.2 -------------------------------------------------------------------------------- /src/docker/Dockerfile.requester: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.10 2 | 3 | COPY src/ /var/task/ 4 | 5 | RUN pip install -r requirements.txt 6 | 7 | CMD ["main.lambda_handler"] 8 | -------------------------------------------------------------------------------- /src/docker/Dockerfile.revoker: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.10 2 | 3 | COPY src/ /var/task/ 4 | 5 | RUN pip install -r requirements.txt 6 | 7 | CMD ["revoker.lambda_handler"] -------------------------------------------------------------------------------- /src/entities/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: F401 2 | from . import aws, slack 3 | from .model import BaseModel, json_default 4 | from .slack import ApproverAction 5 | -------------------------------------------------------------------------------- /src/entities/aws.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from .model import BaseModel 4 | 5 | 6 | class Account(BaseModel): 7 | id: str 8 | name: str 9 | 10 | 11 | class PermissionSet(BaseModel): 12 | name: str 13 | arn: str 14 | description: Optional[str] 15 | 16 | 17 | class SSOGroup(BaseModel): 18 | name: str 19 | id: str 20 | description: Optional[str] 21 | identity_store_id: str 22 | 23 | 24 | class GroupMembership(BaseModel): 25 | user_principal_id: str 26 | group_id: str 27 | identity_store_id: str 28 | membership_id: str 29 | -------------------------------------------------------------------------------- /src/entities/model.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import enum 3 | from typing import FrozenSet 4 | 5 | from pydantic import BaseModel as PydanticBaseModel 6 | 7 | 8 | class BaseModel(PydanticBaseModel): 9 | class Config: 10 | frozen = True 11 | 12 | def dict(self, *args, **kwargs) -> dict: # noqa: ANN101, ANN003, ANN002 13 | """Converts instance to dict representation of it. Workaround for https://github.com/pydantic/pydantic/issues/1090""" 14 | cp = super().copy() 15 | cp.Config.frozen = False 16 | for field_name in cp.__fields__.keys(): 17 | attr = cp.__getattribute__(field_name) 18 | if isinstance(attr, FrozenSet): 19 | cp.__setattr__(field_name, list(attr)) 20 | 21 | cp.Config.frozen = True 22 | # frozendict.frozendict(?) 23 | return PydanticBaseModel.dict(cp, *args, **kwargs) 24 | 25 | 26 | def json_default(o: object) -> str | dict: 27 | if isinstance(o, PydanticBaseModel): 28 | return o.dict() 29 | elif dataclasses.is_dataclass(o): 30 | return dataclasses.asdict(o) 31 | elif isinstance(o, enum.Enum): 32 | return o.value 33 | return str(o) 34 | -------------------------------------------------------------------------------- /src/entities/slack.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from .model import BaseModel 4 | 5 | 6 | class ApproverAction(Enum): 7 | Approve = "approve" 8 | Discard = "discard" 9 | 10 | 11 | class User(BaseModel): 12 | id: str 13 | email: str 14 | real_name: str 15 | -------------------------------------------------------------------------------- /src/errors.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | from aws_lambda_powertools import Logger 4 | from slack_bolt import BoltContext 5 | from slack_sdk import WebClient 6 | 7 | import config 8 | 9 | 10 | class ConfigurationError(Exception): 11 | ... 12 | 13 | 14 | class AccountAssignmentError(ConfigurationError): 15 | ... 16 | 17 | 18 | class NotFound(ConfigurationError): 19 | ... 20 | 21 | 22 | class SSOUserNotFound(ConfigurationError): 23 | ... 24 | 25 | 26 | logger = config.get_logger(service="errors") 27 | cfg = config.get_config() 28 | 29 | 30 | def error_handler(client: WebClient, e: Exception, logger: Logger, context: BoltContext, cfg: config.Config) -> None: 31 | logger.exception("An error occurred:", exc_info=e) 32 | user_id = context.get("user_id", "UNKNOWN_USER") 33 | 34 | if isinstance(e, SSOUserNotFound): 35 | text = ( 36 | f"<@{user_id}> Your request for AWS permissions failed because SSO Elevator could not find your user in AWS SSO. " 37 | "This often happens if your AWS SSO email differs from your Slack email. " 38 | "Check the logs for more details." 39 | ) 40 | else: 41 | text = f"<@{user_id}> Your request for AWS permissions encountered an unexpected error. Refer to the logs for more details." 42 | client.chat_postMessage(text=text, channel=cfg.slack_channel_id) 43 | 44 | 45 | def handle_errors(fn): # noqa: ANN001, ANN201 46 | # Default slack error handler (app.error) does not handle all exceptions. Or at least I did not find how to do it. 47 | # So I created this error handler. 48 | @functools.wraps(fn) 49 | def wrapper(*args, **kwargs): # noqa: ANN002, ANN003, ANN202 50 | try: 51 | return fn(*args, **kwargs) 52 | except Exception as e: 53 | client: WebClient = kwargs["client"] 54 | context: BoltContext = kwargs["context"] 55 | error_handler(client=client, e=e, logger=logger, context=context, cfg=cfg) 56 | 57 | return wrapper 58 | -------------------------------------------------------------------------------- /src/events.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Literal 3 | 4 | from pydantic import Field, root_validator 5 | 6 | import entities 7 | import sso 8 | from entities.model import BaseModel 9 | 10 | 11 | class RevokeEvent(BaseModel): 12 | schedule_name: str 13 | approver: entities.slack.User 14 | requester: entities.slack.User 15 | user_account_assignment: sso.UserAccountAssignment 16 | permission_duration: timedelta 17 | 18 | 19 | class GroupRevokeEvent(BaseModel): 20 | schedule_name: str 21 | approver: entities.slack.User 22 | requester: entities.slack.User 23 | group_assignment: sso.GroupAssignment 24 | permission_duration: timedelta 25 | 26 | 27 | class ScheduledGroupRevokeEvent(BaseModel): 28 | action: Literal["event_bridge_group_revoke"] 29 | revoke_event: GroupRevokeEvent 30 | 31 | @root_validator(pre=True) 32 | def validate_payload(cls, values: dict) -> dict: # noqa: ANN101 33 | values["revoke_event"] = GroupRevokeEvent.parse_raw(values["revoke_event"]) 34 | return values 35 | 36 | 37 | class ScheduledRevokeEvent(BaseModel): 38 | action: Literal["event_bridge_revoke"] 39 | revoke_event: RevokeEvent 40 | 41 | @root_validator(pre=True) 42 | def validate_payload(cls, values: dict) -> dict: # noqa: ANN101 43 | values["revoke_event"] = RevokeEvent.parse_raw(values["revoke_event"]) 44 | return values 45 | 46 | 47 | class DiscardButtonsEvent(BaseModel): 48 | action: Literal["discard_buttons_event"] 49 | schedule_name: str 50 | time_stamp: str 51 | channel_id: str 52 | 53 | 54 | class CheckOnInconsistency(BaseModel): 55 | action: Literal["check_on_inconsistency"] 56 | 57 | 58 | class SSOElevatorScheduledRevocation(BaseModel): 59 | action: Literal["sso_elevator_scheduled_revocation"] 60 | 61 | 62 | class ApproverNotificationEvent(BaseModel): 63 | action: Literal["approvers_renotification"] 64 | schedule_name: str 65 | time_stamp: str 66 | channel_id: str 67 | time_to_wait_in_seconds: float 68 | 69 | 70 | class Event(BaseModel): 71 | __root__: ( 72 | ScheduledRevokeEvent 73 | | DiscardButtonsEvent 74 | | CheckOnInconsistency 75 | | SSOElevatorScheduledRevocation 76 | | ApproverNotificationEvent 77 | | ScheduledGroupRevokeEvent 78 | ) = Field(..., discriminator="action") 79 | -------------------------------------------------------------------------------- /src/group.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | import boto3 4 | from mypy_boto3_identitystore import IdentityStoreClient 5 | from mypy_boto3_sso_admin import SSOAdminClient 6 | from mypy_boto3_scheduler import EventBridgeSchedulerClient 7 | from slack_bolt import Ack, BoltContext 8 | from slack_sdk import WebClient 9 | 10 | from slack_sdk.web.slack_response import SlackResponse 11 | 12 | import access_control 13 | import config 14 | import entities 15 | import schedule 16 | import slack_helpers 17 | import sso 18 | from errors import handle_errors 19 | 20 | logger = config.get_logger(service="main") 21 | cfg = config.get_config() 22 | 23 | session = boto3._get_default_session() 24 | sso_client: SSOAdminClient = session.client("sso-admin") 25 | identity_store_client: IdentityStoreClient = session.client("identitystore") 26 | schedule_client: EventBridgeSchedulerClient = session.client("scheduler") 27 | sso_instance = sso.describe_sso_instance(sso_client, cfg.sso_instance_arn) 28 | identity_store_id = sso_instance.identity_store_id 29 | 30 | 31 | @handle_errors 32 | def handle_request_for_group_access_submittion( 33 | body: dict, 34 | ack: Ack, # noqa: ARG001 35 | client: WebClient, 36 | context: BoltContext, # noqa: ARG001 37 | ) -> SlackResponse | None: 38 | logger.info("Handling request for access submittion") 39 | request = slack_helpers.RequestForGroupAccessView.parse(body) 40 | logger.info("View submitted", extra={"view": request}) 41 | requester = slack_helpers.get_user(client, id=request.requester_slack_id) 42 | 43 | group = sso.describe_group(identity_store_id, request.group_id, identity_store_client) 44 | 45 | decision = access_control.make_decision_on_access_request( 46 | cfg.group_statements, 47 | requester_email=requester.email, 48 | group_id=request.group_id, 49 | ) 50 | 51 | show_buttons = bool(decision.approvers) 52 | slack_response = client.chat_postMessage( 53 | blocks=slack_helpers.build_approval_request_message_blocks( 54 | sso_client=sso_client, 55 | identity_store_client=identity_store_client, 56 | slack_client=client, 57 | requester_slack_id=request.requester_slack_id, 58 | group=group, 59 | reason=request.reason, 60 | permission_duration=request.permission_duration, 61 | show_buttons=show_buttons, 62 | color_coding_emoji=cfg.waiting_result_emoji, 63 | ), 64 | channel=cfg.slack_channel_id, 65 | text=f"Request for access to {group.name} group from {requester.real_name}", 66 | ) 67 | 68 | if show_buttons: 69 | ts = slack_response["ts"] 70 | if ts is not None: 71 | schedule.schedule_discard_buttons_event( 72 | schedule_client=schedule_client, # type: ignore # noqa: PGH003 73 | time_stamp=ts, 74 | channel_id=cfg.slack_channel_id, 75 | ) 76 | schedule.schedule_approver_notification_event( 77 | schedule_client=schedule_client, # type: ignore # noqa: PGH003 78 | message_ts=ts, 79 | channel_id=cfg.slack_channel_id, 80 | time_to_wait=timedelta( 81 | minutes=cfg.approver_renotification_initial_wait_time, 82 | ), 83 | ) 84 | 85 | match decision.reason: 86 | case access_control.DecisionReason.ApprovalNotRequired: 87 | text = "Approval for this Group is not required. Request will be approved automatically." 88 | dm_text = "Approval for this Group is not required. Your request will be approved automatically." 89 | color_coding_emoji = cfg.good_result_emoji 90 | case access_control.DecisionReason.SelfApproval: 91 | text = "Self approval is allowed and requester is an approver. Request will be approved automatically." 92 | dm_text = "Self approval is allowed and you are an approver. Your request will be approved automatically." 93 | color_coding_emoji = cfg.good_result_emoji 94 | case access_control.DecisionReason.RequiresApproval: 95 | approvers = [slack_helpers.get_user_by_email(client, email) for email in decision.approvers] 96 | mention_approvers = " ".join(f"<@{approver.id}>" for approver in approvers) 97 | text = f"{mention_approvers} there is a request waiting for the approval." 98 | dm_text = f"Your request is waiting for the approval from {mention_approvers}." 99 | color_coding_emoji = cfg.waiting_result_emoji 100 | case access_control.DecisionReason.NoApprovers: 101 | text = "Nobody can approve this request." 102 | dm_text = "Nobody can approve this request." 103 | color_coding_emoji = cfg.bad_result_emoji 104 | case access_control.DecisionReason.NoStatements: 105 | text = "There are no statements for this Group." 106 | dm_text = "There are no statements for this Group." 107 | color_coding_emoji = cfg.bad_result_emoji 108 | 109 | is_user_in_channel = slack_helpers.check_if_user_is_in_channel(client, cfg.slack_channel_id, requester.id) 110 | 111 | logger.info(f"Sending message to the channel {cfg.slack_channel_id}, message: {text}") 112 | client.chat_postMessage(text=text, thread_ts=slack_response["ts"], channel=cfg.slack_channel_id) 113 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 114 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 115 | client.chat_postMessage( 116 | channel=requester.id, 117 | text=f""" 118 | {dm_text} You are receiving this message in a DM because you are not a member of the channel <#{cfg.slack_channel_id}>. 119 | """, 120 | ) 121 | 122 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 123 | blocks=slack_response["message"]["blocks"], 124 | color_coding_emoji=color_coding_emoji, 125 | ) 126 | client.chat_update( 127 | channel=cfg.slack_channel_id, 128 | ts=slack_response["ts"], 129 | blocks=blocks, 130 | text=text, 131 | ) 132 | 133 | access_control.execute_decision_on_group_request( 134 | group=group, 135 | permission_duration=request.permission_duration, 136 | approver=requester, 137 | requester=requester, 138 | reason=request.reason, 139 | decision=decision, 140 | identity_store_id=identity_store_id, 141 | ) 142 | 143 | if decision.grant: 144 | client.chat_postMessage( 145 | channel=cfg.slack_channel_id, 146 | text=f"Permissions granted to <@{requester.id}>", 147 | thread_ts=slack_response["ts"], 148 | ) 149 | if not is_user_in_channel and cfg.send_dm_if_user_not_in_channel: 150 | client.chat_postMessage( 151 | channel=requester.id, 152 | text="Your request was processed, permissions granted.", 153 | ) 154 | 155 | 156 | cache_for_dublicate_requests = {} 157 | 158 | 159 | @handle_errors 160 | def handle_group_button_click(body: dict, client: WebClient, context: BoltContext) -> SlackResponse: # type: ignore # noqa: PGH003 ARG001 161 | logger.info("Handling button click") 162 | payload = slack_helpers.ButtonGroupClickedPayload.parse_obj(body) 163 | logger.info("Button click payload", extra={"payload": payload}) 164 | approver = slack_helpers.get_user(client, id=payload.approver_slack_id) 165 | requester = slack_helpers.get_user(client, id=payload.request.requester_slack_id) 166 | is_user_in_channel = slack_helpers.check_if_user_is_in_channel(client, cfg.slack_channel_id, requester.id) 167 | 168 | if ( 169 | cache_for_dublicate_requests.get("requester_slack_id") == payload.request.requester_slack_id 170 | and cache_for_dublicate_requests["group_id"] == payload.request.group_id 171 | ): 172 | return client.chat_postMessage( 173 | channel=payload.channel_id, 174 | text=f"<@{approver.id}> request is already in progress, please wait for the result.", 175 | thread_ts=payload.thread_ts, 176 | ) 177 | cache_for_dublicate_requests["requester_slack_id"] = payload.request.requester_slack_id 178 | cache_for_dublicate_requests["group_id"] = payload.request.group_id 179 | 180 | if payload.action == entities.ApproverAction.Discard: 181 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 182 | blocks=payload.message["blocks"], 183 | color_coding_emoji=cfg.bad_result_emoji, 184 | ) 185 | 186 | blocks = slack_helpers.remove_blocks(blocks, block_ids=["buttons"]) 187 | blocks.append(slack_helpers.button_click_info_block(payload.action, approver.id).to_dict()) 188 | 189 | text = f"Request was discarded by<@{approver.id}> " 190 | dm_text = f"Your request was discarded by <@{approver.id}>." 191 | client.chat_update( 192 | channel=payload.channel_id, 193 | ts=payload.thread_ts, 194 | blocks=blocks, 195 | text=text, 196 | ) 197 | 198 | cache_for_dublicate_requests.clear() 199 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 200 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 201 | client.chat_postMessage(channel=requester.id, text=dm_text) 202 | return client.chat_postMessage( 203 | channel=payload.channel_id, 204 | text=text, 205 | thread_ts=payload.thread_ts, 206 | ) 207 | 208 | decision = access_control.make_decision_on_approve_request( 209 | action=payload.action, 210 | statements=cfg.group_statements, # type: ignore # noqa: PGH003 211 | group_id=payload.request.group_id, 212 | approver_email=approver.email, 213 | requester_email=requester.email, 214 | ) 215 | 216 | logger.info("Decision on request was made", extra={"decision": decision}) 217 | 218 | if not decision.permit: 219 | cache_for_dublicate_requests.clear() 220 | return client.chat_postMessage( 221 | channel=payload.channel_id, 222 | text=f"<@{approver.id}> you can not approve this request", 223 | thread_ts=payload.thread_ts, 224 | ) 225 | 226 | text = f"Permissions granted to <@{requester.id}> by <@{approver.id}>." 227 | dm_text = f"Your request was approved by <@{approver.id}>. Permissions granted." 228 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 229 | blocks=payload.message["blocks"], 230 | color_coding_emoji=cfg.good_result_emoji, 231 | ) 232 | 233 | blocks = slack_helpers.remove_blocks(blocks, block_ids=["buttons"]) 234 | blocks.append(slack_helpers.button_click_info_block(payload.action, approver.id).to_dict()) 235 | client.chat_update( 236 | channel=payload.channel_id, 237 | ts=payload.thread_ts, 238 | blocks=blocks, 239 | text=text, 240 | ) 241 | 242 | access_control.execute_decision_on_group_request( 243 | decision=decision, 244 | group=sso.describe_group(identity_store_id, payload.request.group_id, identity_store_client), 245 | permission_duration=payload.request.permission_duration, 246 | approver=approver, 247 | requester=requester, 248 | reason=payload.request.reason, 249 | identity_store_id=identity_store_id, 250 | ) 251 | cache_for_dublicate_requests.clear() 252 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 253 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 254 | client.chat_postMessage(channel=requester.id, text=dm_text) 255 | return client.chat_postMessage( 256 | channel=payload.channel_id, 257 | text=text, 258 | thread_ts=payload.thread_ts, 259 | ) 260 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Callable 3 | 4 | import boto3 5 | from slack_bolt import Ack, App, BoltContext 6 | from slack_bolt.adapter.aws_lambda import SlackRequestHandler 7 | from slack_sdk import WebClient 8 | from slack_sdk.web.slack_response import SlackResponse 9 | 10 | import access_control 11 | import config 12 | import entities 13 | import group 14 | import organizations 15 | import schedule 16 | import slack_helpers 17 | import sso 18 | from errors import SSOUserNotFound, handle_errors 19 | 20 | logger = config.get_logger(service="main") 21 | 22 | session = boto3.Session() 23 | schedule_client = session.client("scheduler") 24 | org_client = session.client("organizations") 25 | sso_client = session.client("sso-admin") 26 | identity_store_client = session.client("identitystore") 27 | 28 | cfg = config.get_config() 29 | app = App( 30 | process_before_response=True, 31 | logger=config.get_logger(service="slack", level=cfg.slack_app_log_level), # type: ignore # noqa: PGH003 32 | ) 33 | 34 | 35 | def lambda_handler(event: str, context): # noqa: ANN001, ANN201 36 | slack_handler = SlackRequestHandler(app=app) 37 | return slack_handler.handle(event, context) 38 | 39 | 40 | trigger_view_map = {} 41 | # To update the view, it is necessary to know the view_id. It is returned when the view is opened. 42 | # But shortcut 'request_for_access' handled by two functions. The first one opens the view and the second one updates it. 43 | # So we need to store the view_id somewhere. Since the trigger_id is unique for each request, 44 | # and available in both functions, we can use it as a key. The value is the view_id. 45 | 46 | 47 | def build_initial_form_handler( 48 | view_class: slack_helpers.RequestForAccessView | slack_helpers.RequestForGroupAccessView, 49 | ) -> Callable[[WebClient, dict, Ack], SlackResponse]: 50 | def show_initial_form_for_request( 51 | client: WebClient, 52 | body: dict, 53 | ack: Ack, 54 | ) -> SlackResponse: 55 | ack() 56 | if view_class == slack_helpers.RequestForGroupAccessView and not cfg.group_statements: 57 | return client.chat_postMessage( 58 | channel=cfg.slack_channel_id, 59 | text="Group statements are not configured, please check the configuration. Or use another /command.", 60 | ) 61 | if view_class == slack_helpers.RequestForAccessView and not cfg.statements: 62 | return client.chat_postMessage( 63 | channel=cfg.slack_channel_id, 64 | text="Statements are not configured, please check the configuration. Or use another /command.", 65 | ) 66 | 67 | # Try getting SSO user to check if user exist 68 | try: 69 | sso.get_user_principal_id_by_email( 70 | identity_store_client=identity_store_client, 71 | identity_store_id=sso.describe_sso_instance(sso_client, cfg.sso_instance_arn).identity_store_id, 72 | email=slack_helpers.get_user(client, id=body.get("user", {}).get("id")).email, 73 | cfg=cfg, 74 | ) 75 | 76 | except SSOUserNotFound: 77 | client.chat_postMessage( 78 | channel=cfg.slack_channel_id, 79 | text=f"<@{body.get('user', {}).get('id') or 'UNKNOWN_USER'}>," 80 | "Your request for AWS permissions failed because SSO Elevator could not find your user in SSO." 81 | "This often happens if your AWS SSO email differs from your Slack email." 82 | "Please check the SSO Elevator logs for more details.", 83 | ) 84 | raise 85 | 86 | logger.info(f"Showing initial form for {view_class.__name__}") 87 | logger.debug("Request body", extra={"body": body}) 88 | trigger_id = body["trigger_id"] 89 | response = client.views_open(trigger_id=trigger_id, view=view_class.build()) 90 | trigger_view_map[trigger_id] = response.data["view"]["id"] # type: ignore # noqa: PGH003 91 | return response 92 | 93 | return show_initial_form_for_request 94 | 95 | 96 | def load_select_options_for_group_access_request(client: WebClient, body: dict) -> SlackResponse: 97 | logger.info("Loading select options for view (groups)") 98 | logger.debug("Request body", extra={"body": body}) 99 | sso_instance = sso.describe_sso_instance(sso_client, cfg.sso_instance_arn) 100 | groups = sso.get_groups_from_config(sso_instance.identity_store_id, identity_store_client, cfg) 101 | trigger_id = body["trigger_id"] 102 | 103 | view = slack_helpers.RequestForGroupAccessView.update_with_groups(groups=groups) 104 | return client.views_update(view_id=trigger_view_map[trigger_id], view=view) 105 | 106 | 107 | def load_select_options_for_account_access_request(client: WebClient, body: dict) -> SlackResponse: 108 | logger.info("Loading select options for view (accounts and permission sets)") 109 | logger.debug("Request body", extra={"body": body}) 110 | 111 | accounts = organizations.get_accounts_from_config(client=org_client, cfg=cfg) 112 | permission_sets = sso.get_permission_sets_from_config(client=sso_client, cfg=cfg) 113 | trigger_id = body["trigger_id"] 114 | 115 | view = slack_helpers.RequestForAccessView.update_with_accounts_and_permission_sets(accounts=accounts, permission_sets=permission_sets) 116 | return client.views_update(view_id=trigger_view_map[trigger_id], view=view) 117 | 118 | 119 | app.shortcut("request_for_access")( 120 | build_initial_form_handler(view_class=slack_helpers.RequestForAccessView), # type: ignore # noqa: PGH003 121 | load_select_options_for_account_access_request, 122 | ) 123 | 124 | app.shortcut("request_for_group_membership")( 125 | build_initial_form_handler(view_class=slack_helpers.RequestForGroupAccessView), # type: ignore # noqa: PGH003 126 | load_select_options_for_group_access_request, 127 | ) 128 | 129 | cache_for_dublicate_requests = {} 130 | 131 | 132 | @handle_errors 133 | def handle_button_click(body: dict, client: WebClient, context: BoltContext) -> SlackResponse: # noqa: ARG001 134 | logger.info("Handling button click") 135 | try: 136 | payload = slack_helpers.ButtonClickedPayload.parse_obj(body) 137 | except Exception as e: 138 | logger.exception(e) 139 | return group.handle_group_button_click(body, client, context) 140 | 141 | logger.info("Button click payload", extra={"payload": payload}) 142 | # Approver might be from different Slack workspace, if so, get_user will fail. 143 | try: 144 | approver = slack_helpers.get_user(client, id=payload.approver_slack_id) 145 | except Exception as e: 146 | logger.warning(f"Failed to get approver user info: {e}") 147 | return client.chat_postMessage( 148 | channel=payload.channel_id, 149 | text=f"""Unable to process this approval - approver information could not be retrieved. 150 | This may happen if the approver <@{payload.approver_slack_id}> is from a different Slack workspace. 151 | Please check the module configuration.""", 152 | thread_ts=payload.thread_ts, 153 | ) 154 | requester = slack_helpers.get_user(client, id=payload.request.requester_slack_id) 155 | is_user_in_channel = slack_helpers.check_if_user_is_in_channel(client, cfg.slack_channel_id, requester.id) 156 | 157 | if ( 158 | cache_for_dublicate_requests.get("requester_slack_id") == payload.request.requester_slack_id 159 | and cache_for_dublicate_requests.get("account_id") == payload.request.account_id 160 | and cache_for_dublicate_requests.get("permission_set_name") == payload.request.permission_set_name 161 | ): 162 | return client.chat_postMessage( 163 | channel=payload.channel_id, 164 | text=f"<@{approver.id}> request is already in progress, please wait for the result.", 165 | thread_ts=payload.thread_ts, 166 | ) 167 | cache_for_dublicate_requests["requester_slack_id"] = payload.request.requester_slack_id 168 | cache_for_dublicate_requests["account_id"] = payload.request.account_id 169 | cache_for_dublicate_requests["permission_set_name"] = payload.request.permission_set_name 170 | 171 | if payload.action == entities.ApproverAction.Discard: 172 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 173 | blocks=payload.message["blocks"], 174 | color_coding_emoji=cfg.bad_result_emoji, 175 | ) 176 | 177 | blocks = slack_helpers.remove_blocks(blocks, block_ids=["buttons"]) 178 | blocks.append(slack_helpers.button_click_info_block(payload.action, approver.id).to_dict()) 179 | 180 | text = f"Request was discarded by<@{approver.id}> " 181 | dm_text = f"Your request was discarded by <@{approver.id}>." 182 | client.chat_update( 183 | channel=payload.channel_id, 184 | ts=payload.thread_ts, 185 | blocks=blocks, 186 | text=text, 187 | ) 188 | 189 | cache_for_dublicate_requests.clear() 190 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 191 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 192 | client.chat_postMessage(channel=requester.id, text=dm_text) 193 | return client.chat_postMessage( 194 | channel=payload.channel_id, 195 | text=text, 196 | thread_ts=payload.thread_ts, 197 | ) 198 | 199 | decision = access_control.make_decision_on_approve_request( 200 | action=payload.action, 201 | statements=cfg.statements, 202 | account_id=payload.request.account_id, 203 | permission_set_name=payload.request.permission_set_name, 204 | approver_email=approver.email, 205 | requester_email=requester.email, 206 | ) 207 | logger.info("Decision on request was made", extra={"decision": decision}) 208 | 209 | if not decision.permit: 210 | cache_for_dublicate_requests.clear() 211 | return client.chat_postMessage( 212 | channel=payload.channel_id, 213 | text=f"<@{approver.id}> you can not approve this request", 214 | thread_ts=payload.thread_ts, 215 | ) 216 | 217 | text = f"Permissions granted to <@{requester.id}> by <@{approver.id}>." 218 | dm_text = f"Your request was approved by <@{approver.id}>. Permissions granted." 219 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 220 | blocks=payload.message["blocks"], 221 | color_coding_emoji=cfg.good_result_emoji, 222 | ) 223 | 224 | blocks = slack_helpers.remove_blocks(blocks, block_ids=["buttons"]) 225 | blocks.append(slack_helpers.button_click_info_block(payload.action, approver.id).to_dict()) 226 | is_user_in_channel = slack_helpers.check_if_user_is_in_channel(client, cfg.slack_channel_id, requester.id) 227 | client.chat_update( 228 | channel=payload.channel_id, 229 | ts=payload.thread_ts, 230 | blocks=blocks, 231 | text=text, 232 | ) 233 | 234 | access_control.execute_decision( 235 | decision=decision, 236 | permission_set_name=payload.request.permission_set_name, 237 | account_id=payload.request.account_id, 238 | permission_duration=payload.request.permission_duration, 239 | approver=approver, 240 | requester=requester, 241 | reason=payload.request.reason, 242 | ) 243 | cache_for_dublicate_requests.clear() 244 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 245 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 246 | client.chat_postMessage(channel=requester.id, text=dm_text) 247 | return client.chat_postMessage( 248 | channel=payload.channel_id, 249 | text=text, 250 | thread_ts=payload.thread_ts, 251 | ) 252 | 253 | 254 | def acknowledge_request(ack: Ack): # noqa: ANN201 255 | ack() 256 | 257 | 258 | app.action(entities.ApproverAction.Approve.value)( 259 | ack=acknowledge_request, 260 | lazy=[handle_button_click], 261 | ) 262 | 263 | app.action(entities.ApproverAction.Discard.value)( 264 | ack=acknowledge_request, 265 | lazy=[handle_button_click], 266 | ) 267 | 268 | 269 | @handle_errors 270 | def handle_request_for_access_submittion( # noqa: PLR0915, PLR0912 271 | body: dict, 272 | ack: Ack, # noqa: ARG001 273 | client: WebClient, 274 | context: BoltContext, # noqa: ARG001 275 | ) -> SlackResponse | None: 276 | logger.info("Handling request for access submittion") 277 | request = slack_helpers.RequestForAccessView.parse(body) 278 | logger.info("View submitted", extra={"view": request}) 279 | requester = slack_helpers.get_user(client, id=request.requester_slack_id) 280 | decision = access_control.make_decision_on_access_request( 281 | cfg.statements, 282 | account_id=request.account_id, 283 | permission_set_name=request.permission_set_name, 284 | requester_email=requester.email, 285 | ) 286 | logger.info("Decision on request was made", extra={"decision": decision}) 287 | 288 | account = organizations.describe_account(org_client, request.account_id) 289 | 290 | show_buttons = bool(decision.approvers) 291 | slack_response = client.chat_postMessage( 292 | blocks=slack_helpers.build_approval_request_message_blocks( 293 | sso_client=sso_client, 294 | identity_store_client=identity_store_client, 295 | slack_client=client, 296 | requester_slack_id=request.requester_slack_id, 297 | account=account, 298 | role_name=request.permission_set_name, 299 | reason=request.reason, 300 | permission_duration=request.permission_duration, 301 | show_buttons=show_buttons, 302 | color_coding_emoji=cfg.waiting_result_emoji, 303 | ), 304 | channel=cfg.slack_channel_id, 305 | text=f"Request for access to {account.name} account from {requester.real_name}", 306 | ) 307 | 308 | if show_buttons: 309 | ts = slack_response["ts"] 310 | if ts is not None: 311 | schedule.schedule_discard_buttons_event( 312 | schedule_client=schedule_client, 313 | time_stamp=ts, 314 | channel_id=cfg.slack_channel_id, 315 | ) 316 | schedule.schedule_approver_notification_event( 317 | schedule_client=schedule_client, 318 | message_ts=ts, 319 | channel_id=cfg.slack_channel_id, 320 | time_to_wait=timedelta( 321 | minutes=cfg.approver_renotification_initial_wait_time, 322 | ), 323 | ) 324 | 325 | match decision.reason: 326 | case access_control.DecisionReason.ApprovalNotRequired: 327 | text = "Approval for this Permission Set & Account is not required. Request will be approved automatically." 328 | dm_text = "Approval for this Permission Set & Account is not required. Your request will be approved automatically." 329 | color_coding_emoji = cfg.good_result_emoji 330 | case access_control.DecisionReason.SelfApproval: 331 | text = "Self approval is allowed and requester is an approver. Request will be approved automatically." 332 | dm_text = "Self approval is allowed and you are an approver. Your request will be approved automatically." 333 | color_coding_emoji = cfg.good_result_emoji 334 | case access_control.DecisionReason.RequiresApproval: 335 | approvers, approver_emails_not_found = slack_helpers.find_approvers_in_slack( 336 | client, decision.approvers # type: ignore # noqa: PGH003 337 | ) 338 | if not approvers: 339 | text = """ 340 | None of the approvers from configuration could be found in Slack. 341 | Request cannot be processed. Please discard the request and check the module configuration. 342 | """ 343 | dm_text = """ 344 | Your request cannot be processed because none of the approvers from configuration could be found in Slack. 345 | Please discard the request and check the module configuration. 346 | """ 347 | color_coding_emoji = cfg.bad_result_emoji 348 | else: 349 | mention_approvers = " ".join(f"<@{approver.id}>" for approver in approvers) 350 | text = f"{mention_approvers} there is a request waiting for the approval." 351 | if approver_emails_not_found: 352 | missing_emails = ", ".join(approver_emails_not_found) 353 | text += f""" 354 | Note: Some approvers ({missing_emails}) could not be found in Slack. 355 | Please discard the request and check the module configuration. 356 | """ 357 | dm_text = f"Your request is waiting for the approval from {mention_approvers}." 358 | color_coding_emoji = cfg.waiting_result_emoji 359 | case access_control.DecisionReason.NoApprovers: 360 | text = "Nobody can approve this request." 361 | dm_text = "Nobody can approve this request." 362 | color_coding_emoji = cfg.bad_result_emoji 363 | case access_control.DecisionReason.NoStatements: 364 | text = "There are no statements for this Permission Set & Account." 365 | dm_text = "There are no statements for this Permission Set & Account." 366 | color_coding_emoji = cfg.bad_result_emoji 367 | 368 | is_user_in_channel = slack_helpers.check_if_user_is_in_channel(client, cfg.slack_channel_id, requester.id) 369 | 370 | logger.info(f"Sending message to the channel {cfg.slack_channel_id}, message: {text}") 371 | client.chat_postMessage(text=text, thread_ts=slack_response["ts"], channel=cfg.slack_channel_id) 372 | if cfg.send_dm_if_user_not_in_channel and not is_user_in_channel: 373 | logger.info(f"User {requester.id} is not in the channel. Sending DM with message: {dm_text}") 374 | client.chat_postMessage( 375 | channel=requester.id, 376 | text=f""" 377 | {dm_text} You are receiving this message in a DM because you are not a member of the channel <#{cfg.slack_channel_id}>. 378 | """, 379 | ) 380 | 381 | blocks = slack_helpers.HeaderSectionBlock.set_color_coding( 382 | blocks=slack_response["message"]["blocks"], 383 | color_coding_emoji=color_coding_emoji, 384 | ) 385 | client.chat_update( 386 | channel=cfg.slack_channel_id, 387 | ts=slack_response["ts"], 388 | blocks=blocks, 389 | text=text, 390 | ) 391 | 392 | access_control.execute_decision( 393 | decision=decision, 394 | permission_set_name=request.permission_set_name, 395 | account_id=request.account_id, 396 | permission_duration=request.permission_duration, 397 | approver=requester, 398 | requester=requester, 399 | reason=request.reason, 400 | ) 401 | 402 | if decision.grant: 403 | client.chat_postMessage( 404 | channel=cfg.slack_channel_id, 405 | text=f"Permissions granted to <@{requester.id}>", 406 | thread_ts=slack_response["ts"], 407 | ) 408 | if not is_user_in_channel and cfg.send_dm_if_user_not_in_channel: 409 | client.chat_postMessage( 410 | channel=requester.id, 411 | text="Your request was processed, permissions granted.", 412 | ) 413 | 414 | 415 | app.view(slack_helpers.RequestForAccessView.CALLBACK_ID)( 416 | ack=acknowledge_request, 417 | lazy=[handle_request_for_access_submittion], 418 | ) 419 | 420 | app.view(slack_helpers.RequestForGroupAccessView.CALLBACK_ID)( 421 | ack=acknowledge_request, 422 | lazy=[group.handle_request_for_group_access_submittion], 423 | ) 424 | 425 | 426 | @app.action("duration_picker_action") 427 | def handle_duration_picker_action(ack): # noqa: ANN201, ANN001 428 | ack() 429 | -------------------------------------------------------------------------------- /src/organizations.py: -------------------------------------------------------------------------------- 1 | from mypy_boto3_organizations import OrganizationsClient, type_defs 2 | 3 | import config 4 | from entities.aws import Account 5 | 6 | 7 | def parse_account(td: type_defs.AccountTypeDef) -> Account: 8 | return Account.parse_obj({"id": td.get("Id"), "name": td.get("Name")}) 9 | 10 | 11 | def list_accounts(client: OrganizationsClient) -> list[Account]: 12 | accounts = [] 13 | paginator = client.get_paginator("list_accounts") 14 | for page in paginator.paginate(): 15 | accounts.extend(page["Accounts"]) 16 | return [parse_account(account) for account in accounts] 17 | 18 | 19 | def describe_account(client: OrganizationsClient, account_id: str) -> Account: 20 | account = client.describe_account(AccountId=account_id)["Account"] 21 | return parse_account(account) 22 | 23 | 24 | def get_accounts_from_config(client: OrganizationsClient, cfg: config.Config) -> list[Account]: 25 | if "*" in cfg.accounts: 26 | accounts = list_accounts(client) 27 | else: 28 | accounts = [ac for ac in list_accounts(client) if ac.id in cfg.accounts] 29 | return accounts 30 | -------------------------------------------------------------------------------- /src/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "sso-elevator" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["EreminAnton "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10.10" 10 | boto3-stubs = {extras = ["dynamodb", "identitystore", "organizations", "s3", "scheduler", "sso-admin", "events"], version = "^1.26.134"} 11 | pydantic = {extras = ["email"], version = "^1.10.13"} 12 | croniter = "^1.3.14" 13 | 14 | # Full list of dependencies, for development. 15 | # Can be installed with `poetry install --with dev`. 16 | [tool.poetry.group.dev.dependencies] 17 | pytest = "^7.2.2" 18 | black = "^24.3.0" 19 | python = "^3.10.10" 20 | boto3-stubs = {extras = ["dynamodb", "identitystore", "organizations", "s3", "scheduler", "sso-admin", "events"], version = "^1.26.134"} 21 | pydantic = {extras = ["email"], version = "^1.10.13"} 22 | croniter = "^1.3.14" 23 | aws-lambda-powertools = {extras = ["parser"], version = "^2.14.1"} 24 | 25 | # Dependencies listed below are used by the lambda function, but passed to it via the Lambda Layer. 26 | boto3 = "^1.26.97" 27 | slack-bolt = "^1.17.0" 28 | hypothesis = {extras = ["ghostwriter"], version = "^6.71.0"} 29 | ruff = "^0.0.267" 30 | 31 | 32 | [build-system] 33 | requires = ["poetry-core"] 34 | build-backend = "poetry.core.masonry.api" 35 | 36 | 37 | [tool.black] 38 | line-length = 140 39 | 40 | [tool.ruff] 41 | select = [ 42 | # pyflakes 43 | "F", 44 | # flake8-annotations 45 | "ANN", 46 | # flake8-unused-arguments 47 | "ARG", 48 | # pycodestyle 49 | "E", "W", 50 | # flake8-2020 51 | "YTT", 52 | # flake8-bugbear 53 | "B", 54 | # flake8-quotes 55 | "Q", 56 | # flake8-debugger 57 | "T10", 58 | # pylint 59 | "PLC", "PLE", "PLR", "PLW", 60 | # misc lints 61 | "PIE", 62 | # flake8-pyi 63 | "PYI", 64 | # tidy imports 65 | "TID", 66 | # implicit string concatenation 67 | "ISC", 68 | # type-checking imports 69 | "TCH", 70 | # comprehensions 71 | "C4", 72 | # pygrep-hooks 73 | "PGH" 74 | ] 75 | ignore = [] 76 | 77 | # Allow autofix for all enabled rules (when `--fix`) is provided. 78 | fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] 79 | unfixable = [] 80 | 81 | # Exclude a variety of commonly ignored directories. 82 | exclude = [ 83 | ".bzr", 84 | ".direnv", 85 | ".eggs", 86 | ".git", 87 | ".git-rewrite", 88 | ".hg", 89 | ".mypy_cache", 90 | ".nox", 91 | ".pants.d", 92 | ".pytype", 93 | ".ruff_cache", 94 | ".svn", 95 | ".tox", 96 | ".venv", 97 | "__pypackages__", 98 | "_build", 99 | "buck-out", 100 | "build", 101 | "dist", 102 | "node_modules", 103 | "venv", 104 | ] 105 | 106 | # Same as Black. 107 | line-length = 140 108 | 109 | # Allow unused variables when underscore-prefixed. 110 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 111 | 112 | # Assume Python 3.10. 113 | target-version = "py310" 114 | 115 | [tool.ruff.mccabe] 116 | # Unlike Flake8, default to a complexity level of 10. 117 | max-complexity = 10 118 | 119 | [tool.pyright] 120 | reportGeneralTypeIssues = true 121 | 122 | [tool.pytest.ini_options] 123 | minversion = "6.0" 124 | addopts = "-ra -q" 125 | testpaths = ["src/tests"] 126 | xfail_strict = true 127 | filterwarnings = [] 128 | -------------------------------------------------------------------------------- /src/requirements.txt: -------------------------------------------------------------------------------- 1 | attrs==24.2.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 2 | --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ 3 | --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 4 | aws-lambda-powertools[parser]==2.43.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 5 | --hash=sha256:48116250c1771c7b8d4977ad2d475271074d86964107ccfd3fc6775e51984d88 \ 6 | --hash=sha256:5c371a0c0430cf7bca1696748cb0d85079aac2c51056cbee10e5435029b35ca4 7 | black==24.8.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 8 | --hash=sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6 \ 9 | --hash=sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e \ 10 | --hash=sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f \ 11 | --hash=sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018 \ 12 | --hash=sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e \ 13 | --hash=sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd \ 14 | --hash=sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4 \ 15 | --hash=sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed \ 16 | --hash=sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2 \ 17 | --hash=sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42 \ 18 | --hash=sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af \ 19 | --hash=sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb \ 20 | --hash=sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368 \ 21 | --hash=sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb \ 22 | --hash=sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af \ 23 | --hash=sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed \ 24 | --hash=sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47 \ 25 | --hash=sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2 \ 26 | --hash=sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a \ 27 | --hash=sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c \ 28 | --hash=sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920 \ 29 | --hash=sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1 30 | boto3-stubs[dynamodb,events,identitystore,organizations,s3,scheduler,sso-admin]==1.34.160 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 31 | --hash=sha256:7e499b74d53e8eb456e539b3c82ccb6b88038579e3434fb131d51a8abe11dc83 \ 32 | --hash=sha256:c6b1dfeb3cae673eed596f01409339e2e0b955a5241a16cee69f29303d9b37de 33 | boto3==1.34.160 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 34 | --hash=sha256:79450f92188a8b992b3d0b802028acadf448bc6fdde877c3262c9f94d74d1c7d \ 35 | --hash=sha256:bf3153bf5d66be2bb2112edc94eb143c0cba3fb502c5591437bd1c54f57eb559 36 | botocore-stubs==1.34.160 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 37 | --hash=sha256:900953f3f926d205505776535fd131047ef89519734f1e5365d03ecbaec53cd9 \ 38 | --hash=sha256:b16122567dbf0860a76960ea4b94a396f16ba1a6afb9577dcc11dcd55047c42b 39 | botocore==1.34.160 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 40 | --hash=sha256:39bcf31318a062a8a9260bf7044131694ed18f019568d2eba0a22164fdca49bd \ 41 | --hash=sha256:a5fd531c640fb2dc8b83f264efbb87a6e33b9c9f66ebbb1c61b42908f2786cac 42 | click==8.1.7 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 43 | --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ 44 | --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de 45 | colorama==0.4.6 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" and (sys_platform == "win32" or platform_system == "Windows") \ 46 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ 47 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 48 | croniter==1.4.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 49 | --hash=sha256:1a6df60eacec3b7a0aa52a8f2ef251ae3dd2a7c7c8b9874e73e791636d55a361 \ 50 | --hash=sha256:9595da48af37ea06ec3a9f899738f1b2c1c13da3c38cea606ef7cd03ea421128 51 | dnspython==2.6.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 52 | --hash=sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 \ 53 | --hash=sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc 54 | email-validator==2.2.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 55 | --hash=sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631 \ 56 | --hash=sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7 57 | exceptiongroup==1.2.2 ; python_full_version >= "3.10.10" and python_version < "3.11" \ 58 | --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ 59 | --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc 60 | hypothesis[ghostwriter]==6.111.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 61 | --hash=sha256:04d0703621d9fdd61c079a4dda07babbe7ebf6d34eee6ad9484a2af0ee721801 \ 62 | --hash=sha256:7a51f678da3719a04a3ef61cd241384dd93b49f35d7cce22833745c66ac1d507 63 | idna==3.7 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 64 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 65 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 66 | iniconfig==2.0.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 67 | --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ 68 | --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 69 | jmespath==1.0.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 70 | --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ 71 | --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe 72 | mypy-boto3-dynamodb==1.34.148 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 73 | --hash=sha256:c85489b92cbbbe4f6997070372022df914d4cb8eb707fdc73aa18ce6ba25c578 \ 74 | --hash=sha256:f1a7aabff5c6e926b9b272df87251c9d6dfceb4c1fb159fb5a2df52062cd7e87 75 | mypy-boto3-events==1.34.151 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 76 | --hash=sha256:745dc718c618bec043180c8a78e52401b3536999848e1b0c20e9c7669eb2a3f3 \ 77 | --hash=sha256:c9b4d4d92b1ae3b2c4c48bf99bbb8b4ed472866715b6728f94a0f446c6f1fb9a 78 | mypy-boto3-identitystore==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 79 | --hash=sha256:1b8749a89ca1440608169eaa45afe2ec9aa0dbd02a754ee5cc62ce064426c23f \ 80 | --hash=sha256:39d26c323ada4dee2a8696e504ebb0afefb841f88c5ed69a3070c010e4c1208e 81 | mypy-boto3-organizations==1.34.139 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 82 | --hash=sha256:06fd26f02e8e918852ab681558215e607873749966759b5f58df1ff2e9a45392 \ 83 | --hash=sha256:6b42f6ee20ef44ecec1b9ccd66c122dff43f43e60815e4c810a23e00fc08ead7 84 | mypy-boto3-s3==1.34.158 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 85 | --hash=sha256:a875eb0ee91ba5ac6967291887f6e924eb9dc157966e8181da20533086218166 \ 86 | --hash=sha256:c01f1b2304ba7718c8561aaa2b6dc70fe438c91964256aa6ddc508d1cc553c66 87 | mypy-boto3-scheduler==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 88 | --hash=sha256:88ef3800caa0c838882a904a850b40cb7372adca83e3530397ff70cba977d62d \ 89 | --hash=sha256:fa09d08d63eda7b29523fa886366971c3f6233b459203974270468b2d7e18f37 90 | mypy-boto3-sso-admin==1.34.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 91 | --hash=sha256:6de7cf9327a10b800ac4ca2dbd1cf27d9a64aa5a647e923638a552c382059e3a \ 92 | --hash=sha256:9f871c83493be78a46a9df6bab209dc9bf0eb739822aca534901dd5ab2f91d61 93 | mypy-extensions==1.0.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 94 | --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ 95 | --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 96 | packaging==24.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 97 | --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ 98 | --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 99 | pathspec==0.12.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 100 | --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ 101 | --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 102 | platformdirs==4.2.2 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 103 | --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ 104 | --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 105 | pluggy==1.5.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 106 | --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ 107 | --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 108 | pydantic==1.10.17 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 109 | --hash=sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f \ 110 | --hash=sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc \ 111 | --hash=sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b \ 112 | --hash=sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b \ 113 | --hash=sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b \ 114 | --hash=sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e \ 115 | --hash=sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3 \ 116 | --hash=sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7 \ 117 | --hash=sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227 \ 118 | --hash=sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f \ 119 | --hash=sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6 \ 120 | --hash=sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab \ 121 | --hash=sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad \ 122 | --hash=sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076 \ 123 | --hash=sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681 \ 124 | --hash=sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54 \ 125 | --hash=sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb \ 126 | --hash=sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7 \ 127 | --hash=sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe \ 128 | --hash=sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b \ 129 | --hash=sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab \ 130 | --hash=sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d \ 131 | --hash=sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0 \ 132 | --hash=sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75 \ 133 | --hash=sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741 \ 134 | --hash=sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63 \ 135 | --hash=sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd \ 136 | --hash=sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33 \ 137 | --hash=sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815 \ 138 | --hash=sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7 \ 139 | --hash=sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a \ 140 | --hash=sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655 \ 141 | --hash=sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773 \ 142 | --hash=sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c \ 143 | --hash=sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7 \ 144 | --hash=sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3 \ 145 | --hash=sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768 \ 146 | --hash=sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d \ 147 | --hash=sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688 \ 148 | --hash=sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f \ 149 | --hash=sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e \ 150 | --hash=sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991 \ 151 | --hash=sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a 152 | pydantic[email]==1.10.17 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 153 | --hash=sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f \ 154 | --hash=sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc \ 155 | --hash=sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b \ 156 | --hash=sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b \ 157 | --hash=sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b \ 158 | --hash=sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e \ 159 | --hash=sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3 \ 160 | --hash=sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7 \ 161 | --hash=sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227 \ 162 | --hash=sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f \ 163 | --hash=sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6 \ 164 | --hash=sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab \ 165 | --hash=sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad \ 166 | --hash=sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076 \ 167 | --hash=sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681 \ 168 | --hash=sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54 \ 169 | --hash=sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb \ 170 | --hash=sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7 \ 171 | --hash=sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe \ 172 | --hash=sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b \ 173 | --hash=sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab \ 174 | --hash=sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d \ 175 | --hash=sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0 \ 176 | --hash=sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75 \ 177 | --hash=sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741 \ 178 | --hash=sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63 \ 179 | --hash=sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd \ 180 | --hash=sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33 \ 181 | --hash=sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815 \ 182 | --hash=sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7 \ 183 | --hash=sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a \ 184 | --hash=sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655 \ 185 | --hash=sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773 \ 186 | --hash=sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c \ 187 | --hash=sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7 \ 188 | --hash=sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3 \ 189 | --hash=sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768 \ 190 | --hash=sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d \ 191 | --hash=sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688 \ 192 | --hash=sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f \ 193 | --hash=sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e \ 194 | --hash=sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991 \ 195 | --hash=sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a 196 | pytest==7.4.4 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 197 | --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ 198 | --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 199 | python-dateutil==2.9.0.post0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 200 | --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ 201 | --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 202 | ruff==0.0.267 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 203 | --hash=sha256:0afca3633c8e2b6c0a48ad0061180b641b3b404d68d7e6736aab301c8024c424 \ 204 | --hash=sha256:20c594eb56c19063ef5a57f89340e64c6550e169d6a29408a45130a8c3068adc \ 205 | --hash=sha256:2107cec3699ca4d7bd41543dc1d475c97ae3a21ea9212238b5c2088fa8ee7722 \ 206 | --hash=sha256:2972241065b1c911bce3db808837ed10f4f6f8a8e15520a4242d291083605ab6 \ 207 | --hash=sha256:45d61a2b01bdf61581a2ee039503a08aa603dc74a6bbe6fb5d1ce3052f5370e5 \ 208 | --hash=sha256:4adbbbe314d8fcc539a245065bad89446a3cef2e0c9cf70bf7bb9ed6fe31856d \ 209 | --hash=sha256:5a898953949e37c109dd242cfcf9841e065319995ebb7cdfd213b446094a942f \ 210 | --hash=sha256:632cec7bbaf3c06fcf0a72a1dd029b7d8b7f424ba95a574aaa135f5d20a00af7 \ 211 | --hash=sha256:67254ae34c38cba109fdc52e4a70887de1f850fb3971e5eeef343db67305d1c1 \ 212 | --hash=sha256:786de30723c71fc46b80a173c3313fc0dbe73c96bd9da8dd1212cbc2f84cdfb2 \ 213 | --hash=sha256:7df7eb5f8d791566ba97cc0b144981b9c080a5b861abaf4bb35a26c8a77b83e9 \ 214 | --hash=sha256:9adf1307fa9d840d1acaa477eb04f9702032a483214c409fca9dc46f5f157fe3 \ 215 | --hash=sha256:bbe104f21a429b77eb5ac276bd5352fd8c0e1fbb580b4c772f77ee8c76825654 \ 216 | --hash=sha256:d09aecc9f5845586ba90911d815f9772c5a6dcf2e34be58c6017ecb124534ac4 \ 217 | --hash=sha256:d12ab329474c46b96d962e2bdb92e3ad2144981fe41b89c7770f370646c0101f \ 218 | --hash=sha256:db33deef2a5e1cf528ca51cc59dd764122a48a19a6c776283b223d147041153f \ 219 | --hash=sha256:f731d81cb939e757b0335b0090f18ca2e9ff8bcc8e6a1cf909245958949b6e11 220 | s3transfer==0.10.2 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 221 | --hash=sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6 \ 222 | --hash=sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69 223 | six==1.16.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 224 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ 225 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 226 | slack-bolt==1.19.1 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 227 | --hash=sha256:a3041d8f49eba22c3be4dd8f57602d6685d367c0e1cc7619260e1ce4a363d07f \ 228 | --hash=sha256:b916829ece0ff7a2cae1502f1774fd100592cd8a81a39e4f04e3137a3f19522b 229 | slack-sdk==3.31.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 230 | --hash=sha256:740d2f9c49cbfcbd46fca56b4be9d527934c225312aac18fd2c0fca0ef6bc935 \ 231 | --hash=sha256:a120cc461e8ebb7d9175f171dbe0ded37a6878d9f7b96b28e4bad1227399047b 232 | sortedcontainers==2.4.0 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 233 | --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \ 234 | --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 235 | tomli==2.0.1 ; python_full_version >= "3.10.10" and python_version < "3.11" \ 236 | --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ 237 | --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f 238 | types-awscrt==0.21.2 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 239 | --hash=sha256:0839fe12f0f914d8f7d63ed777c728cb4eccc2d5d79a26e377d12b0604e7bf0e \ 240 | --hash=sha256:84a9f4f422ec525c314fdf54c23a1e73edfbcec968560943ca2d41cfae623b38 241 | types-s3transfer==0.10.1 ; python_full_version >= "3.10.10" and python_version < "4.0" \ 242 | --hash=sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0 \ 243 | --hash=sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74 244 | typing-extensions==4.12.2 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 245 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 246 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 247 | urllib3==2.2.2 ; python_full_version >= "3.10.10" and python_full_version < "4.0.0" \ 248 | --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ 249 | --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 250 | -------------------------------------------------------------------------------- /src/s3.py: -------------------------------------------------------------------------------- 1 | import json 2 | import uuid 3 | from dataclasses import asdict, dataclass 4 | from datetime import datetime, timedelta, timezone 5 | from typing import Literal 6 | 7 | import boto3 8 | from mypy_boto3_s3 import S3Client, type_defs 9 | 10 | from config import get_config, get_logger 11 | 12 | cfg = get_config() 13 | logger = get_logger(service="s3") 14 | s3: S3Client = boto3.client("s3") 15 | 16 | 17 | @dataclass 18 | class AuditEntry: 19 | reason: Literal["scheduled_revocation"] | Literal["automated_revocation"] | str 20 | operation_type: Literal["grant"] | Literal["revoke"] 21 | permission_duration: Literal["NA"] | timedelta 22 | sso_user_principal_id: Literal["NA"] | str 23 | audit_entry_type: Literal["group"] | Literal["account"] 24 | version = 1 25 | role_name: Literal["NA"] | str = "NA" 26 | account_id: Literal["NA"] | str = "NA" 27 | requester_slack_id: Literal["NA"] | str = "NA" 28 | requester_email: Literal["NA"] | str = "NA" 29 | request_id: Literal["NA"] | str = "NA" 30 | approver_slack_id: Literal["NA"] | str = "NA" 31 | approver_email: Literal["NA"] | str = "NA" 32 | group_name: Literal["NA"] | str = "NA" 33 | group_id: Literal["NA"] | str = "NA" 34 | group_membership_id: Literal["NA"] | str = "NA" 35 | secondary_domain_was_used: bool = False 36 | 37 | 38 | def log_operation(audit_entry: AuditEntry) -> type_defs.PutObjectOutputTypeDef: 39 | now = datetime.now(timezone.utc) 40 | logger.debug("Posting audit entry to s3", extra={"audit_entry": audit_entry}) 41 | logger.info("Posting audit entry to s3") 42 | if isinstance(audit_entry.permission_duration, timedelta): 43 | permission_duration = str(int(audit_entry.permission_duration.total_seconds())) 44 | else: 45 | permission_duration = "NA" 46 | 47 | audit_entry_dict = asdict(audit_entry) | { 48 | "permission_duration": permission_duration, 49 | "time": str(now), 50 | "timestamp": int(now.timestamp() * 1000), 51 | } 52 | 53 | json_data = json.dumps(audit_entry_dict) 54 | bucket_name = cfg.s3_bucket_for_audit_entry_name 55 | bucket_prefix = cfg.s3_bucket_prefix_for_partitions 56 | return s3.put_object( 57 | Bucket=bucket_name, 58 | Key=f"{bucket_prefix}/{now.strftime('%Y/%m/%d')}/{uuid.uuid4()}.json", 59 | Body=json_data, 60 | ContentType="application/json", 61 | ServerSideEncryption="AES256", 62 | ) 63 | -------------------------------------------------------------------------------- /src/schedule.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime, timedelta, timezone 3 | 4 | import botocore.exceptions 5 | import jmespath as jp 6 | from croniter import croniter 7 | from mypy_boto3_events import EventBridgeClient 8 | from mypy_boto3_events import type_defs as events_type_defs 9 | from mypy_boto3_scheduler import EventBridgeSchedulerClient 10 | from mypy_boto3_scheduler import type_defs as scheduler_type_defs 11 | from pydantic import ValidationError 12 | 13 | import config 14 | import entities 15 | import sso 16 | from events import ( 17 | ApproverNotificationEvent, 18 | DiscardButtonsEvent, 19 | Event, 20 | GroupRevokeEvent, 21 | RevokeEvent, 22 | ScheduledRevokeEvent, 23 | ScheduledGroupRevokeEvent, 24 | ) 25 | 26 | logger = config.get_logger(service="schedule") 27 | cfg = config.get_config() 28 | 29 | 30 | def get_event_brige_rule(event_brige_client: EventBridgeClient, rule_name: str) -> events_type_defs.DescribeRuleResponseTypeDef: 31 | return event_brige_client.describe_rule(Name=rule_name) 32 | 33 | 34 | def get_next_cron_run_time(cron_expression: str, base_time: datetime) -> datetime: 35 | # Replace ? with * to comply with croniter 36 | cron_expression = cron_expression.replace("?", "*") 37 | cron_iter = croniter(cron_expression, base_time) 38 | next_run_time = cron_iter.get_next(datetime) 39 | logger.debug(f"Next run time: {next_run_time}") 40 | return next_run_time 41 | 42 | 43 | def check_rule_expression_and_get_next_run(rule: events_type_defs.DescribeRuleResponseTypeDef) -> datetime | str: 44 | schedule_expression = rule["ScheduleExpression"] 45 | current_time = datetime.now(timezone.utc) 46 | logger.debug(f"Current time: {current_time}") 47 | logger.debug(f"Schedule expression: {schedule_expression}") 48 | 49 | if schedule_expression.startswith("rate"): 50 | return schedule_expression 51 | elif schedule_expression.startswith("cron"): 52 | clean_expression = schedule_expression.replace("cron(", "").replace(")", "") 53 | try: 54 | return get_next_cron_run_time(clean_expression, current_time) 55 | except Exception as e: 56 | logger.warning(f"Unable to parse cron expression: {clean_expression}", extra={"error": e}) 57 | return schedule_expression 58 | else: 59 | raise ValueError("Unknown schedule expression format!") 60 | 61 | 62 | def get_schedules(client: EventBridgeSchedulerClient) -> list[scheduler_type_defs.GetScheduleOutputTypeDef]: 63 | paginator = client.get_paginator("list_schedules") 64 | scheduled_events = [] 65 | for page in paginator.paginate(GroupName=cfg.schedule_group_name): 66 | schedules_names = jp.search("Schedules[*].Name", page) 67 | for schedule_name in schedules_names: 68 | if not schedule_name: 69 | continue 70 | full_schedule = client.get_schedule(GroupName=cfg.schedule_group_name, Name=schedule_name) 71 | scheduled_events.append(full_schedule) 72 | return scheduled_events 73 | 74 | 75 | def get_scheduled_events(client: EventBridgeSchedulerClient) -> list[ScheduledRevokeEvent | ScheduledGroupRevokeEvent]: 76 | scheduled_events = get_schedules(client) 77 | logger.debug("Scheduled events", extra={"scheduled_events": scheduled_events}) 78 | scheduled_revoke_events: list[ScheduledRevokeEvent | ScheduledGroupRevokeEvent] = [] 79 | for full_schedule in scheduled_events: 80 | if full_schedule["Name"].startswith("discard-buttons"): 81 | continue 82 | 83 | event = json.loads(jp.search("Target.Input", full_schedule)) 84 | 85 | try: 86 | event = Event.parse_obj(event) 87 | except ValidationError as e: 88 | logger.warning("Got unexpected event", extra={"event": event, "error": e}) 89 | continue 90 | 91 | if isinstance(event.__root__, ScheduledRevokeEvent): 92 | scheduled_revoke_events.append(event.__root__) 93 | elif isinstance(event.__root__, ScheduledGroupRevokeEvent): 94 | scheduled_revoke_events.append(event.__root__) 95 | logger.debug("Scheduled revoke events", extra={"scheduled_revoke_events": scheduled_revoke_events}) 96 | return scheduled_revoke_events 97 | 98 | 99 | def delete_schedule(client: EventBridgeSchedulerClient, schedule_name: str) -> None: 100 | try: 101 | client.delete_schedule(GroupName=cfg.schedule_group_name, Name=schedule_name) 102 | logger.info("Schedule deleted", extra={"schedule_name": schedule_name}) 103 | except botocore.exceptions.ClientError as e: 104 | if jp.search("Error.Code", e.response) == "ResourceNotFoundException": 105 | logger.info("Schedule for deletion was not found", extra={"schedule_name": schedule_name}) 106 | else: 107 | raise e 108 | 109 | 110 | def get_and_delete_scheduled_revoke_event_if_already_exist( 111 | client: EventBridgeSchedulerClient, 112 | event: sso.UserAccountAssignment | sso.GroupAssignment, 113 | ) -> None: 114 | for scheduled_event in get_scheduled_events(client): 115 | logger.debug("Checking if schedule already exist", extra={"scheduled_event": scheduled_event}) 116 | if isinstance(scheduled_event, ScheduledRevokeEvent) and scheduled_event.revoke_event.user_account_assignment == event: 117 | logger.info("Schedule already exist, deleting it", extra={"schedule_name": scheduled_event.revoke_event.schedule_name}) 118 | delete_schedule(client, scheduled_event.revoke_event.schedule_name) 119 | if isinstance(scheduled_event, ScheduledGroupRevokeEvent) and scheduled_event.revoke_event.group_assignment == event: 120 | logger.info("Schedule already exist, deleting it", extra={"schedule_name": scheduled_event.revoke_event.schedule_name}) 121 | delete_schedule(client, scheduled_event.revoke_event.schedule_name) 122 | 123 | 124 | def event_bridge_schedule_after(td: timedelta) -> str: 125 | now = datetime.now(timezone.utc) 126 | return f"at({(now + td).replace(microsecond=0).isoformat().replace('+00:00', '')})" 127 | 128 | 129 | def schedule_revoke_event( 130 | schedule_client: EventBridgeSchedulerClient, 131 | permission_duration: timedelta, 132 | approver: entities.slack.User, 133 | requester: entities.slack.User, 134 | user_account_assignment: sso.UserAccountAssignment, 135 | ) -> scheduler_type_defs.CreateScheduleOutputTypeDef: 136 | logger.info("Scheduling revoke event") 137 | schedule_name = f"{cfg.revoker_function_name}" + datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S") 138 | get_and_delete_scheduled_revoke_event_if_already_exist(schedule_client, user_account_assignment) 139 | revoke_event = RevokeEvent( 140 | schedule_name=schedule_name, 141 | approver=approver, 142 | requester=requester, 143 | user_account_assignment=user_account_assignment, 144 | permission_duration=permission_duration, 145 | ) 146 | logger.debug("Creating schedule", extra={"revoke_event": revoke_event}) 147 | return schedule_client.create_schedule( 148 | FlexibleTimeWindow={"Mode": "OFF"}, 149 | Name=schedule_name, 150 | GroupName=cfg.schedule_group_name, 151 | ScheduleExpression=event_bridge_schedule_after(permission_duration), 152 | State="ENABLED", 153 | Target=scheduler_type_defs.TargetTypeDef( 154 | Arn=cfg.revoker_function_arn, 155 | RoleArn=cfg.schedule_policy_arn, 156 | Input=json.dumps( 157 | { 158 | "action": "event_bridge_revoke", 159 | "revoke_event": revoke_event.json(), 160 | }, 161 | ), 162 | ), 163 | ) 164 | 165 | 166 | def schedule_group_revoke_event( 167 | schedule_client: EventBridgeSchedulerClient, 168 | permission_duration: timedelta, 169 | approver: entities.slack.User, 170 | requester: entities.slack.User, 171 | group_assignment: sso.GroupAssignment, 172 | ) -> scheduler_type_defs.CreateScheduleOutputTypeDef: 173 | logger.info("Scheduling revoke event") 174 | schedule_name = f"{cfg.revoker_function_name}" + datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S") 175 | revoke_event = GroupRevokeEvent( 176 | schedule_name=schedule_name, 177 | approver=approver, 178 | requester=requester, 179 | group_assignment=group_assignment, 180 | permission_duration=permission_duration, 181 | ) 182 | get_and_delete_scheduled_revoke_event_if_already_exist(schedule_client, group_assignment) 183 | logger.debug("Creating schedule", extra={"revoke_event": revoke_event}) 184 | return schedule_client.create_schedule( 185 | FlexibleTimeWindow={"Mode": "OFF"}, 186 | Name=schedule_name, 187 | GroupName=cfg.schedule_group_name, 188 | ScheduleExpression=event_bridge_schedule_after(permission_duration), 189 | State="ENABLED", 190 | Target=scheduler_type_defs.TargetTypeDef( 191 | Arn=cfg.revoker_function_arn, 192 | RoleArn=cfg.schedule_policy_arn, 193 | Input=json.dumps( 194 | { 195 | "action": "event_bridge_group_revoke", 196 | "revoke_event": revoke_event.json(), 197 | }, 198 | ), 199 | ), 200 | ) 201 | 202 | 203 | def schedule_discard_buttons_event( 204 | schedule_client: EventBridgeSchedulerClient, 205 | time_stamp: str, 206 | channel_id: str, 207 | ) -> scheduler_type_defs.CreateScheduleOutputTypeDef | None: 208 | if cfg.request_expiration_hours == 0: 209 | logger.info("Request expiration is disabled, not scheduling discard buttons event") 210 | return 211 | permission_duration = timedelta(hours=cfg.request_expiration_hours) 212 | 213 | logger.info("Scheduling discard buttons event") 214 | schedule_name = "discard-buttons" + datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S") 215 | logger.debug( 216 | "Creating schedule", 217 | extra={ 218 | "schedule_name": schedule_name, 219 | "permission_duration": permission_duration, 220 | "time_stamp": time_stamp, 221 | "channel_id": channel_id, 222 | }, 223 | ) 224 | return schedule_client.create_schedule( 225 | FlexibleTimeWindow={"Mode": "OFF"}, 226 | Name=schedule_name, 227 | GroupName=cfg.schedule_group_name, 228 | ScheduleExpression=event_bridge_schedule_after(permission_duration), 229 | State="ENABLED", 230 | Target=scheduler_type_defs.TargetTypeDef( 231 | Arn=cfg.revoker_function_arn, 232 | RoleArn=cfg.schedule_policy_arn, 233 | Input=json.dumps( 234 | DiscardButtonsEvent( 235 | action="discard_buttons_event", 236 | schedule_name=schedule_name, 237 | time_stamp=time_stamp, 238 | channel_id=channel_id, 239 | ).dict() 240 | ), 241 | ), 242 | ) 243 | 244 | 245 | def schedule_approver_notification_event( 246 | schedule_client: EventBridgeSchedulerClient, 247 | message_ts: str, 248 | channel_id: str, 249 | time_to_wait: timedelta, 250 | ) -> scheduler_type_defs.CreateScheduleOutputTypeDef | None: 251 | # If the initial wait time is 0, we don't schedule the event 252 | if cfg.approver_renotification_initial_wait_time == 0: 253 | logger.info("Approver renotification is disabled, not scheduling approver notification event") 254 | return 255 | 256 | logger.info("Scheduling approver notification event") 257 | schedule_name = "approvers-renotification" + datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S") 258 | logger.debug( 259 | "Creating schedule", 260 | extra={ 261 | "schedule_name": schedule_name, 262 | "time_to_wait": time_to_wait, 263 | "time_stamp": message_ts, 264 | "channel_id": channel_id, 265 | }, 266 | ) 267 | return schedule_client.create_schedule( 268 | FlexibleTimeWindow={"Mode": "OFF"}, 269 | Name=schedule_name, 270 | GroupName=cfg.schedule_group_name, 271 | ScheduleExpression=event_bridge_schedule_after(time_to_wait), 272 | State="ENABLED", 273 | Target=scheduler_type_defs.TargetTypeDef( 274 | Arn=cfg.revoker_function_arn, 275 | RoleArn=cfg.schedule_policy_arn, 276 | Input=json.dumps( 277 | ApproverNotificationEvent( 278 | action="approvers_renotification", 279 | schedule_name=schedule_name, 280 | time_stamp=message_ts, 281 | channel_id=channel_id, 282 | time_to_wait_in_seconds=time_to_wait.total_seconds(), 283 | ).dict() 284 | ), 285 | ), 286 | ) 287 | -------------------------------------------------------------------------------- /src/statement.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import FrozenSet, Union 3 | 4 | from pydantic import ConstrainedStr, EmailStr, Field 5 | 6 | from entities import BaseModel 7 | 8 | 9 | class ResourceType(str, Enum): 10 | Account = "Account" 11 | OU = "OU" 12 | 13 | 14 | class AWSAccountId(ConstrainedStr): 15 | regex = r"^\d{12}$" 16 | 17 | 18 | class AWSOUName(ConstrainedStr): 19 | regex = r"^[\s\S]{1,128}$" 20 | 21 | 22 | class PermissionSetName(ConstrainedStr): 23 | regex = r"^[\w+=,.@-]{1,32}$" 24 | 25 | 26 | class WildCard(ConstrainedStr): 27 | regex = r"^\*$" 28 | 29 | 30 | class BaseStatement(BaseModel): 31 | permission_set: FrozenSet[Union[PermissionSetName, WildCard]] 32 | 33 | allow_self_approval: bool | None = None 34 | approval_is_not_required: bool | None = None 35 | approvers: FrozenSet[EmailStr] = Field(default_factory=frozenset) 36 | 37 | 38 | class Statement(BaseStatement): 39 | resource_type: ResourceType = Field(ResourceType.Account, const=True) 40 | resource: FrozenSet[Union[AWSAccountId, WildCard]] 41 | 42 | def affects(self, account_id: str, permission_set_name: str) -> bool: # noqa: ANN101 43 | return (account_id in self.resource or "*" in self.resource) and ( 44 | permission_set_name in self.permission_set or "*" in self.permission_set 45 | ) 46 | 47 | 48 | def get_affected_statements(statements: FrozenSet[Statement], account_id: str, permission_set_name: str) -> FrozenSet[Statement]: 49 | return frozenset(statement for statement in statements if statement.affects(account_id, permission_set_name)) 50 | 51 | 52 | class OUStatement(BaseStatement): 53 | resource_type: ResourceType = Field(ResourceType.OU, const=True) 54 | resource: FrozenSet[Union[AWSOUName, WildCard]] 55 | 56 | 57 | class AWSSSOGroupID(ConstrainedStr): 58 | regex = r"^([0-9a-f]{10}-)?[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}$" 59 | 60 | 61 | class GroupStatement(BaseModel): 62 | resource: FrozenSet[AWSSSOGroupID] 63 | allow_self_approval: bool | None = None 64 | approval_is_not_required: bool | None = None 65 | approvers: FrozenSet[EmailStr] = Field(default_factory=frozenset) 66 | 67 | def affects(self, group_id: str) -> bool: # noqa: ANN101 68 | return group_id in self.resource 69 | 70 | 71 | def get_affected_group_statements(statements: FrozenSet[GroupStatement], group_id: str) -> FrozenSet[GroupStatement]: 72 | return frozenset(statement for statement in statements if statement.affects(group_id)) 73 | -------------------------------------------------------------------------------- /src/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fivexl/terraform-aws-sso-elevator/7cc9daaa59bf4b50621c8361576cb8afbb95c7a4/src/tests/__init__.py -------------------------------------------------------------------------------- /src/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | import boto3 5 | 6 | 7 | def pytest_sessionstart(session): # noqa: ANN201, ARG001, ANN001 8 | mock_env = { 9 | "schedule_policy_arn": "x", 10 | "revoker_function_arn": "x", 11 | "revoker_function_name": "x", 12 | "schedule_group_name": "x", 13 | "post_update_to_slack": "true", 14 | "send_dm_if_user_not_in_channel": "true", 15 | "slack_channel_id": "x", 16 | "slack_bot_token": "x", 17 | "sso_instance_arn": "x", 18 | "log_level": "DEBUG", 19 | "slack_app_log_level": "INFO", 20 | "s3_bucket_for_audit_entry_name": "x", 21 | "s3_bucket_prefix_for_partitions": "x", 22 | "sso_elevator_scheduled_revocation_rule_name": "x", 23 | "request_expiration_hours": "8", 24 | "approver_renotification_initial_wait_time": "15", 25 | "approver_renotification_backoff_multiplier": "2", 26 | "max_permissions_duration_time": "24", 27 | "secondary_fallback_email_domains": json.dumps(["domen.com"]), 28 | "permission_duration_list_override": json.dumps(["00:25", "01:00"]), 29 | "statements": json.dumps( 30 | [ 31 | { 32 | "ResourceType": "Account", 33 | "Resource": ["*"], 34 | "PermissionSet": "*", 35 | "Approvers": [ 36 | "email@domen.com", 37 | ], 38 | "AllowSelfApproval": True, 39 | } 40 | ] 41 | ), 42 | "group_statements": json.dumps( 43 | [ 44 | { 45 | "Resource": ["11111111-2222-3333-4444-555555555555"], 46 | "Approvers": ["email@domen.com"], 47 | "AllowSelfApproval": True, 48 | }, 49 | ] 50 | ), 51 | } 52 | os.environ |= mock_env 53 | 54 | boto3.setup_default_session(region_name="us-east-1") 55 | -------------------------------------------------------------------------------- /src/tests/strategies.py: -------------------------------------------------------------------------------- 1 | import json 2 | import string 3 | from typing import Literal 4 | 5 | from hypothesis import strategies as st 6 | from hypothesis.strategies import SearchStrategy 7 | 8 | # ruff: noqa: ANN201 9 | 10 | 11 | def jsonstr(strategy: SearchStrategy) -> SearchStrategy: 12 | return st.builds( 13 | json.dumps, 14 | strategy, 15 | ) 16 | 17 | 18 | def build_group_id_strategy(): 19 | lover_alphabet_group_id = ["a", "b", "c", "d", "e", "f", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] 20 | full_alphabet_group_id = lover_alphabet_group_id + ["A", "B", "C", "D", "E", "F"] 21 | 22 | first_ten = st.one_of(st.text(min_size=0, max_size=0), st.text(min_size=10, max_size=10, alphabet=lover_alphabet_group_id)) 23 | second_part = st.text(min_size=8, max_size=8, alphabet=full_alphabet_group_id) 24 | third_part = st.text(min_size=4, max_size=4, alphabet=full_alphabet_group_id) 25 | fourth_part = st.text(min_size=4, max_size=4, alphabet=full_alphabet_group_id) 26 | fifth_part = st.text(min_size=4, max_size=4, alphabet=full_alphabet_group_id) 27 | sixth_part = st.text(min_size=12, max_size=12, alphabet=full_alphabet_group_id) 28 | 29 | return st.builds( 30 | lambda first, second, third, fourth, fifth, sixth: f"{first}{('-' if first else '')}{second}-{third}-{fourth}-{fifth}-{sixth}", 31 | first_ten, 32 | second_part, 33 | third_part, 34 | fourth_part, 35 | fifth_part, 36 | sixth_part, 37 | ) 38 | 39 | 40 | group_id = build_group_id_strategy() 41 | 42 | # https://docs.aws.amazon.com/organizations/latest/APIReference/API_CreateAccountStatus.html 43 | aws_account_id = st.text(min_size=12, max_size=12, alphabet=string.digits) 44 | 45 | 46 | # https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_CreatePermissionSet.html#singlesignon-CreatePermissionSet-request-Name 47 | aws_permission_set_name = st.text(min_size=1, max_size=32, alphabet=string.ascii_letters + string.digits + "_+=,.@-") 48 | 49 | # https://docs.aws.amazon.com/organizations/latest/APIReference/API_CreateOrganizationalUnit.html#organizations-CreateOrganizationalUnit-request-Name 50 | aws_organization_unit_name = st.text(min_size=1, max_size=128, alphabet=string.ascii_letters) 51 | 52 | statement_approvers = st.frozensets(st.emails(), min_size=1, max_size=10) 53 | 54 | str_bool = st.one_of(st.just(str(True)), st.just(str(False))) 55 | 56 | json_safe_text = st.text( 57 | alphabet=st.characters( 58 | blacklist_categories=("Cs", "Cc", "Cf", "Co", "Cn"), 59 | blacklist_characters=("/"), 60 | ), 61 | min_size=1, 62 | max_size=200, 63 | ) 64 | 65 | 66 | def resource_type_st(resource_type: Literal["Account", "OU", "Any"] = "Any"): 67 | if resource_type == "Account": 68 | return st.just("Account") 69 | elif resource_type == "OU": 70 | return st.just("OU") 71 | elif resource_type == "Any": 72 | return st.sampled_from(["Account", "OU"]) 73 | raise ValueError(f"Unknown resource type: {resource_type}") 74 | 75 | 76 | @st.composite 77 | def resource(draw: st.DrawFn, resource_type: SearchStrategy[str]): 78 | r_type = draw(resource_type) 79 | if r_type == "Account": 80 | return draw(aws_account_id) 81 | elif r_type == "OU": 82 | return draw(aws_organization_unit_name) 83 | raise ValueError(f"Unknown resource type: {r_type}") 84 | 85 | 86 | def statement_dict( 87 | resource_type: Literal["Account", "OU", "Any"] = "Account", 88 | ): 89 | resource_type_strategy = st.shared(resource_type_st(resource_type)) 90 | resource_strategy = resource(resource_type_strategy) 91 | return st.fixed_dictionaries( 92 | mapping={ 93 | "ResourceType": resource_type_strategy, 94 | "Resource": st.one_of(resource_strategy, st.lists(resource_strategy, max_size=20), st.just("*")), 95 | "PermissionSet": st.one_of( 96 | aws_permission_set_name, 97 | st.lists(aws_permission_set_name, max_size=20), 98 | st.just("*"), 99 | ), 100 | }, 101 | optional={ 102 | "Approvers": st.one_of(st.emails(), st.lists(st.emails(), max_size=20)), # type: ignore no 103 | "ApprovalIsNotRequired": st.booleans(), 104 | "AllowSelfApproval": st.booleans(), 105 | }, 106 | ) 107 | 108 | 109 | @st.composite 110 | def group_resource(draw: st.DrawFn): 111 | return draw(group_id) 112 | 113 | 114 | def group_statement_dict(): 115 | resource_strategy = group_resource() 116 | return st.fixed_dictionaries( 117 | mapping={ 118 | "Resource": st.one_of(resource_strategy, st.lists(resource_strategy, max_size=20)), 119 | }, 120 | optional={ 121 | "Approvers": st.one_of(st.emails(), st.lists(st.emails(), max_size=20)), 122 | "ApprovalIsNotRequired": st.booleans(), 123 | "AllowSelfApproval": st.booleans(), 124 | }, # type: ignore # noqa: PGH003 125 | ) 126 | -------------------------------------------------------------------------------- /src/tests/test_config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from hypothesis import HealthCheck, example, given, settings 5 | from hypothesis import strategies as st 6 | from hypothesis.strategies import SearchStrategy 7 | from pydantic import ValidationError 8 | 9 | import config 10 | 11 | from . import strategies 12 | 13 | # ruff: noqa 14 | VALID_STATEMENT_DICT = { 15 | "ResourceType": "Account", 16 | "Resource": ["111111111111"], 17 | "PermissionSet": "AdministratorAccess", 18 | "Approvers": "example@gmail.com", 19 | } 20 | VALID_GROUP_STATEMENT_DICT = { 21 | "Resource": ["11e111e1-e111-11ee-e111-1e11e1ee11e1"], 22 | "Approvers": "example@gmail.com", 23 | "AllowSelfApproval": True, 24 | } 25 | 26 | 27 | @given(strategies.statement_dict()) 28 | @settings(max_examples=50, suppress_health_check=(HealthCheck.too_slow,)) 29 | @example({}).xfail(raises=KeyError, reason="Empty dict is not a valid statement") 30 | @example(VALID_STATEMENT_DICT) 31 | def test_parse_statement( 32 | dict_statement: dict, 33 | ): 34 | try: 35 | config.parse_statement(dict_statement) 36 | except ValidationError: 37 | assert False 38 | 39 | 40 | @given(strategies.group_statement_dict()) 41 | @settings(max_examples=50, suppress_health_check=(HealthCheck.too_slow,)) 42 | @example({}).xfail(raises=KeyError, reason="Empty dict is not a valid group_statement") 43 | @example(VALID_GROUP_STATEMENT_DICT) 44 | def test_parse_group_statement(dict_group_statement: dict): 45 | try: 46 | config.parse_group_statement(dict_group_statement) 47 | except ValidationError: 48 | assert False 49 | 50 | 51 | def config_dict( 52 | statements: SearchStrategy = strategies.jsonstr(st.lists(strategies.statement_dict())), 53 | group_statements: SearchStrategy = strategies.jsonstr(st.lists(strategies.group_statement_dict())), 54 | secondary_fallback_email_domains: SearchStrategy = strategies.jsonstr(st.lists(strategies.json_safe_text, max_size=10, min_size=1)), 55 | permission_duration_list_override: SearchStrategy = strategies.jsonstr(st.lists(strategies.json_safe_text, max_size=10, min_size=1)) 56 | ): 57 | return st.fixed_dictionaries( 58 | { 59 | "schedule_policy_arn": strategies.json_safe_text, 60 | "revoker_function_arn": strategies.json_safe_text, 61 | "revoker_function_name": strategies.json_safe_text, 62 | "schedule_group_name": strategies.json_safe_text, 63 | "slack_channel_id": strategies.json_safe_text, 64 | "slack_bot_token": strategies.json_safe_text, 65 | "sso_instance_arn": strategies.json_safe_text, 66 | "s3_bucket_for_audit_entry_name": strategies.json_safe_text, 67 | "s3_bucket_prefix_for_partitions": strategies.json_safe_text, 68 | "sso_elevator_scheduled_revocation_rule_name": strategies.json_safe_text, 69 | "log_level": st.one_of(st.just("INFO"), st.just("DEBUG"), st.just("WARNING"), st.just("ERROR"), st.just("CRITICAL")), 70 | "post_update_to_slack": strategies.str_bool, 71 | "send_dm_if_user_not_in_channel": strategies.str_bool, 72 | "statements": statements, 73 | "group_statements": group_statements, 74 | "request_expiration_hours": st.integers(min_value=0, max_value=24), 75 | "approver_renotification_initial_wait_time": st.integers(min_value=0, max_value=60), 76 | "approver_renotification_backoff_multiplier": st.integers(min_value=0, max_value=10), 77 | "max_permissions_duration_time": st.integers(min_value=0, max_value=24), 78 | "secondary_fallback_email_domains": secondary_fallback_email_domains, 79 | "permission_duration_list_override": permission_duration_list_override, 80 | } 81 | ) 82 | 83 | 84 | def valid_config_dict( 85 | statements_as_json: bool = True, 86 | group_statements_as_json: bool = True, 87 | secondary_fallback_email_domains_as_json: bool = True, 88 | permission_duration_list_override_as_json: bool = True, 89 | ): 90 | if statements_as_json: 91 | statements = json.dumps([VALID_STATEMENT_DICT]) 92 | else: 93 | statements = [VALID_STATEMENT_DICT] 94 | 95 | if group_statements_as_json: 96 | group_statements = json.dumps([VALID_GROUP_STATEMENT_DICT]) 97 | else: 98 | group_statements = [VALID_GROUP_STATEMENT_DICT] 99 | 100 | if secondary_fallback_email_domains_as_json: 101 | secondary_fallback_email_domains = json.dumps(["domen.com"]) 102 | else: 103 | secondary_fallback_email_domains = ["domen.com"] 104 | 105 | if permission_duration_list_override_as_json: 106 | permission_duration_list_override = json.dumps(["00:01", "00:15"]) 107 | else: 108 | permission_duration_list_override = ["00:01", "00:15"] 109 | 110 | return { 111 | "schedule_policy_arn": "x", 112 | "revoker_function_arn": "x", 113 | "revoker_function_name": "x", 114 | "schedule_group_name": "x", 115 | "slack_channel_id": "x", 116 | "slack_bot_token": "x", 117 | "sso_instance_arn": "x", 118 | "log_level": "INFO", 119 | "post_update_to_slack": "False", 120 | "send_dm_if_user_not_in_channel": "True", 121 | "statements": statements, 122 | "group_statements": group_statements, 123 | "s3_bucket_for_audit_entry_name": "x", 124 | "s3_bucket_prefix_for_partitions": "x", 125 | "sso_elevator_scheduled_revocation_rule_name": "x", 126 | "request_expiration_hours": "8", 127 | "approver_renotification_initial_wait_time": "15", 128 | "approver_renotification_backoff_multiplier": "2", 129 | "max_permissions_duration_time": "24", 130 | "secondary_fallback_email_domains": secondary_fallback_email_domains, 131 | "permission_duration_list_override": permission_duration_list_override, 132 | } 133 | 134 | 135 | @given(config_dict()) 136 | @example(valid_config_dict()) 137 | @example({}).xfail(raises=ValidationError, reason="Empty dict is not a valid config") 138 | @example(valid_config_dict() | {"post_update_to_slack": "x"}).xfail(raises=ValidationError, reason="Invalid bool") 139 | @example(valid_config_dict() | {"send_dm_if_user_not_in_channel": "x"}).xfail(raises=ValidationError, reason="Invalid bool") 140 | @settings(max_examples=50, suppress_health_check=(HealthCheck.too_slow,)) 141 | def test_config_load_environment_variables(dict_config: dict): 142 | os.environ = dict_config 143 | config.Config() # type: ignore 144 | 145 | 146 | @given( 147 | config_dict( 148 | statements=st.lists(strategies.statement_dict(), max_size=20), 149 | group_statements=st.lists(strategies.group_statement_dict(), max_size=20), 150 | secondary_fallback_email_domains=st.lists(strategies.json_safe_text, max_size=10, min_size=1), 151 | permission_duration_list_override=st.lists(strategies.json_safe_text, max_size=10, min_size=1), 152 | ) 153 | ) 154 | @settings(max_examples=50, suppress_health_check=(HealthCheck.too_slow,)) 155 | @example( 156 | valid_config_dict( 157 | statements_as_json=False, 158 | group_statements_as_json=False, 159 | secondary_fallback_email_domains_as_json=False, 160 | permission_duration_list_override_as_json=False, 161 | ) 162 | ) 163 | @example( 164 | valid_config_dict( 165 | statements_as_json=False, 166 | group_statements_as_json=False, 167 | secondary_fallback_email_domains_as_json=False, 168 | permission_duration_list_override_as_json=False 169 | ) 170 | | {"post_update_to_slack": "x"} 171 | ).xfail(raises=ValidationError, reason="Invalid bool") 172 | @example( 173 | valid_config_dict( 174 | statements_as_json=False, 175 | group_statements_as_json=False, 176 | secondary_fallback_email_domains_as_json=False, 177 | permission_duration_list_override_as_json=False, 178 | ) 179 | | {"send_dm_if_user_not_in_channel": "x"} 180 | ).xfail(raises=ValidationError, reason="Invalid bool") 181 | def test_config_init(dict_config: dict): 182 | config.Config(**dict_config) 183 | -------------------------------------------------------------------------------- /src/tests/utils.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import os 4 | import uuid 5 | from urllib.parse import parse_qsl, urlencode 6 | 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | # ruff: noqa: ANN201, ANN001, ANN204, ANN101 10 | 11 | 12 | def decode_body(raw_body: str) -> dict: 13 | """Decode the body of a Slack request""" 14 | body = base64.b64decode(raw_body).decode("utf-8") 15 | body_dict = dict(parse_qsl(body)) 16 | json_payload = body_dict["payload"] 17 | return json.loads(json_payload) 18 | 19 | 20 | def encode_body(payload: dict) -> str: 21 | """Encode the body of a Slack request""" 22 | json_payload = json.dumps(payload, separators=(",", ":")) 23 | body_dict = {"payload": json_payload} 24 | qls = urlencode(body_dict) 25 | return base64.b64encode(qls.encode("utf-8")).decode("utf-8") 26 | 27 | 28 | def get_lambda_env_vars(lambda_client, function_name: str, qualifier: str = "$LATEST") -> dict: 29 | print(f"Getting environment variables from lambda {function_name}:{qualifier}...") 30 | return lambda_client.get_function_configuration(FunctionName=function_name, Qualifier=qualifier)["Environment"]["Variables"] 31 | 32 | 33 | def update_local_env_vars_from_lambda(lambda_client, function_name: str, qualifier: str = "$LATEST"): 34 | lambda_env_vars = get_lambda_env_vars(lambda_client, function_name, qualifier) 35 | os.environ |= lambda_env_vars 36 | print(f"Local environment variables updated from lambda {function_name}:{qualifier}!") 37 | 38 | 39 | class LambdaTestContext(LambdaContext): 40 | def __init__(self, name: str, version: int = 1, region: str = "us-east-1", account_id: str = "111122223333"): 41 | self._function_name = name 42 | self._function_version = str(version) 43 | self._memory_limit_in_mb = 128 44 | self._invoked_function_arn = f"arn:aws:lambda:{region}:{account_id}:function:{name}:{version}" 45 | self._aws_request_id = str(uuid.uuid4()) 46 | self._log_group_name = f"/aws/lambda/{name}" 47 | self._log_stream_name = str(uuid.uuid4()) 48 | -------------------------------------------------------------------------------- /tests/localstack-backend.tf: -------------------------------------------------------------------------------- 1 | bucket = "tf-state" 2 | key = "terraform.tfstate" 3 | region = "eu-central-1" 4 | dynamodb_table = "tf-state" -------------------------------------------------------------------------------- /tests/localstack.tfvars: -------------------------------------------------------------------------------- 1 | aws_sns_topic_subscription_email = "email@example.com" 2 | slack_signing_secret = "slack_signing_secret" 3 | slack_bot_token = "slack_bot_token" 4 | slack_channel_id = "slack_channel_id" 5 | sso_instance_arn = "sso_instance_arn" 6 | config = [{ 7 | "ResourceType" : "Account", 8 | "Resource" : "account_id", 9 | "PermissionSet" : "*", 10 | "Approvers" : "email@gmail.com", 11 | "AllowSelfApproval" : true, 12 | }] 13 | -------------------------------------------------------------------------------- /vars.tf: -------------------------------------------------------------------------------- 1 | variable "create_api_gateway" { 2 | description = "If true, module will create & configure API Gateway for the Lambda function" 3 | type = bool 4 | default = true 5 | } 6 | 7 | variable "create_lambda_url" { 8 | description = <<-EOT 9 | If true, the Lambda function will continue to use the Lambda URL, which will be deprecated in the future 10 | If false, Lambda url will be deleted. 11 | EOT 12 | type = bool 13 | default = true 14 | } 15 | 16 | variable "ecr_repo_name" { 17 | description = "The name of the ECR repository." 18 | type = string 19 | default = "aws-sso-elevator" 20 | } 21 | 22 | variable "ecr_repo_tag" { 23 | description = "The tag of the image in the ECR repository." 24 | type = string 25 | default = "3.0.3" 26 | } 27 | 28 | variable "use_pre_created_image" { 29 | description = "If true, the image will be pulled from the ECR repository. If false, the image will be built using Docker from the source code." 30 | type = bool 31 | default = true 32 | } 33 | 34 | variable "ecr_owner_account_id" { 35 | description = "In what account is the ECR repository located." 36 | type = string 37 | default = "222341826240" 38 | } 39 | 40 | variable "tags" { 41 | description = "A map of tags to assign to resources." 42 | type = map(string) 43 | default = {} 44 | } 45 | 46 | variable "aws_sns_topic_subscription_email" { 47 | description = "value for the email address to subscribe to the SNS topic" 48 | type = string 49 | default = "" 50 | } 51 | 52 | variable "slack_signing_secret" { 53 | description = "value for the Slack signing secret" 54 | type = string 55 | } 56 | 57 | variable "slack_bot_token" { 58 | description = "value for the Slack bot token" 59 | type = string 60 | } 61 | 62 | variable "log_level" { 63 | description = "value for the log level" 64 | type = string 65 | default = "INFO" 66 | } 67 | 68 | variable "slack_channel_id" { 69 | description = "value for the Slack channel ID" 70 | type = string 71 | } 72 | 73 | variable "schedule_expression" { 74 | description = "recovation schedule expression (will revoke all user-level assignments unknown to the Elevator)" 75 | type = string 76 | default = "cron(0 23 * * ? *)" 77 | } 78 | 79 | variable "schedule_expression_for_check_on_inconsistency" { 80 | description = "how often revoker should check for inconsistency (warn if found unknown user-level assignments)" 81 | type = string 82 | default = "rate(2 hours)" 83 | } 84 | 85 | variable "sso_instance_arn" { 86 | description = "value for the SSO instance ARN" 87 | type = string 88 | default = "" 89 | } 90 | 91 | variable "config" { 92 | description = "value for the SSO Elevator config" 93 | type = any 94 | default = [] 95 | } 96 | 97 | variable "group_config" { 98 | description = "value for the SSO Elevator group config" 99 | type = any 100 | default = [] 101 | } 102 | 103 | variable "revoker_lambda_name" { 104 | description = "value for the revoker lambda name" 105 | type = string 106 | default = "access-revoker" 107 | } 108 | 109 | variable "requester_lambda_name" { 110 | description = "value for the requester lambda name" 111 | type = string 112 | default = "access-requester" 113 | } 114 | 115 | variable "event_brige_check_on_inconsistency_rule_name" { 116 | description = "value for the event bridge check on inconsistency rule name" 117 | type = string 118 | default = "sso-elevator-check-on-inconsistency" 119 | } 120 | 121 | variable "event_brige_scheduled_revocation_rule_name" { 122 | description = "value for the event bridge scheduled revocation rule name" 123 | type = string 124 | default = "sso-elevator-scheduled-revocation" 125 | } 126 | 127 | variable "schedule_group_name" { 128 | description = "value for the schedule group name" 129 | type = string 130 | default = "sso-elevator-scheduled-revocation" 131 | } 132 | 133 | variable "schedule_role_name" { 134 | description = "value for the schedule role name" 135 | type = string 136 | default = "sso-elevator-event-bridge-role" 137 | } 138 | 139 | variable "revoker_post_update_to_slack" { 140 | description = "Should revoker send a confirmation of the revocation to Slack?" 141 | type = bool 142 | default = true 143 | } 144 | 145 | variable "s3_bucket_name_for_audit_entry" { 146 | description = <