├── .gitattributes ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── DynamoDB.png ├── LICENSE ├── README.md ├── cdk.context.json ├── cdk.json ├── deployment ├── app.py ├── cdkpipelines_app.py └── enterprise_sso │ ├── __init__.py │ ├── enterprise_aws_sso_cdkpipelines.py │ ├── enterprise_aws_sso_management_stack.py │ ├── enterprise_aws_sso_management_stage.py │ ├── enterprise_aws_sso_stack.py │ └── enterprise_aws_sso_stage.py ├── dev-requirements.in ├── dev-requirements.txt ├── initial-deploy-requirements.in ├── initial-deploy-requirements.txt ├── initial_deployment.py ├── pyproject.toml ├── requirements.in ├── requirements.txt ├── src ├── functions │ ├── assignment_db_handler │ │ ├── __init__.py │ │ ├── event_structure.jsonc │ │ └── index.py │ ├── assignment_definition_handler │ │ ├── __init__.py │ │ ├── account_operations.py │ │ ├── assignments_operations.py │ │ ├── config.py │ │ ├── index.py │ │ ├── permissionset_operations.py │ │ ├── processing.py │ │ └── sqs.py │ ├── assignment_execution_handler │ │ ├── __init__.py │ │ ├── index.py │ │ └── test │ │ │ ├── __init__.py │ │ │ ├── payloads.py │ │ │ └── test_assignment_execution_handler.py │ └── service_event_handler │ │ ├── __init__.py │ │ ├── awssso_events.py │ │ ├── index.py │ │ └── organizations_events.py └── layers │ ├── __init__.py │ ├── common │ ├── __init__.py │ ├── encoder.py │ ├── error.py │ ├── requirements.in │ └── requirements.txt │ ├── orgz │ ├── __init__.py │ ├── handler.py │ ├── requirements.in │ ├── requirements.txt │ └── test │ │ ├── __init__.py │ │ └── test_org_handler.py │ └── sso │ ├── __init__.py │ ├── handler.py │ ├── requirements.in │ ├── requirements.txt │ └── test │ ├── __init__.py │ └── test_sso_handler.py └── sso_assignments.png /.gitattributes: -------------------------------------------------------------------------------- 1 | # Declare files that will always have LF line endings on checkout 2 | * text eol=LF 3 | *.png binary 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # VS Code 7 | .vscode 8 | 9 | # from CDK 10 | *.swp 11 | package-lock.json 12 | .pytest_cache 13 | .env 14 | *.egg-info 15 | 16 | # CDK asset staging directory 17 | .cdk.staging 18 | cdk.out 19 | 20 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /DynamoDB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/assignment-automation-4-aws-sso/4a9c187798f7bf07968da58474c94c3aa155e067/DynamoDB.png -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Assignment Automation for AWS Identity Center 2 | 3 | ## Architecture Overview 4 | 5 | ![architecture](sso_assignments.png) 6 | 7 | ## Installation prerequisites 8 | 9 | To install provided module following prerequisites needs to be satisfied: 10 | 11 | - AWS CDK v2 CLI - 12 | - TLDR: `npm install -g aws-cdk` 13 | - Docker - 14 | - requirements from `requirements.txt` file installed into your python environment 15 | - At least 2 accounts: 16 | - AWS Organizations Management account 17 | - AWS account for Deployment and Execution 18 | - Deployment and Execution can be separate AWS accounts if required. 19 | 20 | ## Deployment notes 21 | 22 | 1. Modify cdk.context.json with appropriate accounts information as well as variables and commit changes. At the bare minimum the following 4 parameters need to be changed: 23 | - *enterprise_sso_management_account_id*: AWS Account Id of the AWS Organization Management Account 24 | - *enterprise_sso_exec_account_id*: AWS Account Id where the application will be running in. Should NOT be the same as the AWS Organization management account. 25 | - *enterprise_sso_deployment_account_id*: AWS Account Id that will have the AWS CodePipeline pipeline deployed to. Can be the same as *enterprise_sso_exec_account_id* 26 | - *error_notifications_email*: Notification email for error messages 27 | 28 | **Make sure to commit these changes ot the local repository, or these changes will not propagate to AWS CodeCommit if using initial_deployment.py** 29 | 30 | 1. Set `AWS_DEFAULT_REGION` environment variables to the desired value 31 | 1. Bootstrap all AWS accounts using the new bootstrap style. More information [here](https://docs.aws.amazon.com/cdk/api/latest/docs/pipelines-readme.html#cdk-environment-bootstrapping)(you can skip *--profile* is you are using ENV variables for providing AWS access credentials). You can deploy this solution in multiple regions or only `us-east-1` bootstrap instead and deploy everything to a single region. Event bridge configuration and support pipelines stack related to AWS Organization and AWS Identity Center will always be deployed to `us-east-1` region, thus requires a bootstrap in that region. 32 | 33 | 1. Bootstrap deployment account (`us-east-1`): 34 | 35 | ```sh 36 | env CDK_NEW_BOOTSTRAP=1 cdk bootstrap \ 37 | --profile deployment_profile \ 38 | --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ 39 | aws://111111111111/us-east-1 40 | ``` 41 | 42 | 1. If deploying to another region (set in the `AWS_DEFAULT_REGION` variable), bootstrap deployment account for the additional region: 43 | 44 | ```sh 45 | env CDK_NEW_BOOTSTRAP=1 cdk bootstrap \ 46 | --profile deployment_profile \ 47 | --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ 48 | aws://111111111111/$AWS_DEFAULT_REGION 49 | ``` 50 | 51 | 1. Bootstrap management account: 52 | 53 | ```sh 54 | env CDK_NEW_BOOTSTRAP=1 cdk bootstrap \ 55 | --profile management_profile \ 56 | --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ 57 | --trust 11111111111 \ 58 | aws://222222222222/us-east-1 59 | ``` 60 | 61 | 1. If deploying to another region (set in the `AWS_DEFAULT_REGION` variable), bootstrap deployment account for the additional region: 62 | 63 | ```sh 64 | env CDK_NEW_BOOTSTRAP=1 cdk bootstrap \ 65 | --profile management_profile \ 66 | --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ 67 | --trust 11111111111 \ 68 | aws://222222222222/$AWS_DEFAULT_REGION 69 | ``` 70 | 71 | 1. Bootstrap iam account (can be skipped if using 2 accounts model): 72 | 73 | ```sh 74 | 75 | env CDK_NEW_BOOTSTRAP=1 cdk bootstrap \ 76 | --profile iam_profile \ 77 | --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ 78 | --trust 11111111111 \ 79 | aws://3333333333333/$AWS_DEFAULT_REGION 80 | ``` 81 | 82 | 1. Setup environment variables for accessing the deployment AWS account. 83 | 1. For an initial deployment, the initial_deployment.py script can be used, which creates a codecommit repository and pushes the code using settings from `cdk.context.json`. 84 | 1. Install requirements from `initial-deploy-requirements.txt` 85 | 1. Make sure changes to `cdk.context.json` are committed to the local repository. 86 | 1. Execute `python3 initial_deployment.py` 87 | 1. the `--no-history` flag can be used to not preserve git history if desired. 88 | 1. Once the repository exists and the code is pushed to it, execute `cdk deploy EnterpriseAWSSSOPipelineStack`. For all further changes, the newly created pipeline will be triggered for commits to the `main` branch. 89 | 1. Now all the manual deployment steps have been completed. AWS CodePipeline will deploy everything else automatically. You can track the progress from the deployment AWS Account by opening up the AWS Codepipeline console. 90 | 91 | ## Usage 92 | 93 | ### Delegated Administrator 94 | 95 | If the solution is deployed to an AWS Account that is registered as a delegated administrator for AWS Identity Center, then it will attempt to leverage it and manage the permission sets from the delegated administrator account. For those permissions that are assigned to the management account, the solution will assume a role into the management account and execute permission assignment there. 96 | 97 | Since permissions sets that are assigned to the management account can only be then managed by the management account (see [here](https://docs.aws.amazon.com/singlesignon/latest/userguide/delegated-admin.html#delegated-admin-tasks-member-account) for details), take care in using root assignments, since after the permission set gets assigned to the management account, it is no longer manageable from the delegated administrator account. 98 | 99 | This solution is event driven, utilizing a custom AWS EventBridge EventBus in the IAM account (the name of the bus is defined in the `target_event_bus_name` context variable in `cdk.context.json`). The following following events are supported: 100 | 101 | ### Create/Remove AWS Identity Center records 102 | 103 | In order to manipulate AWS Identity Center assignments following event structure is used: 104 | 105 | ```json 106 | { 107 | "source": "permissionEventSource", 108 | "detail": { 109 | "permissions": [ 110 | { 111 | "ActionType": "Add", //Possible values "Add" or "Remove" 112 | "PermissionFor": "OrganizationalUnit", //Possible values "OrganizationalUnit"|"Account"|"Tag"|"Root" 113 | "OrganizationalUnitName": "OU_Name", 114 | "AccountNumber": 30010047, 115 | "Tag": "key=value", 116 | "GroupName": "GroupX", 117 | "UserName": "User Name", 118 | "PermissionSetName": "AWSReadOnlyAccess" 119 | } 120 | ] 121 | } 122 | } 123 | ``` 124 | 125 | Based on the type of user entity (user or group) and permission abstraction. Different fields are used. 126 | Examples: 127 | 128 | #### Add record for user and a single account 129 | 130 | ```json 131 | { 132 | 133 | "source": "permissionEventSource", 134 | "detail": { 135 | "permissions": [ 136 | { 137 | "ActionType": "Add", 138 | "PermissionFor": "Account", 139 | "AccountNumber": 1234567890123, 140 | "UserName": "User Name", 141 | "PermissionSetName": "AWSReadOnlyAccess" 142 | } 143 | ] 144 | } 145 | 146 | } 147 | ``` 148 | 149 | #### Add record for Organization and group. It's important to use OU name and not not the ID 150 | 151 | ```json 152 | { 153 | 154 | "source": "permissionEventSource", 155 | "detail": { 156 | "permissions": [ 157 | { 158 | "ActionType": "Add", 159 | "PermissionFor": "OrganizationalUnit", 160 | "OrganizationalUnitName": "OU_Name", 161 | "GroupName": "GroupX", 162 | "PermissionSetName": "AWSReadOnlyAccess" 163 | } 164 | ] 165 | } 166 | 167 | } 168 | ``` 169 | 170 | #### Remove record for Tag and group 171 | 172 | ```json 173 | { 174 | 175 | "source": "permissionEventSource", 176 | "detail": { 177 | "permissions": [ 178 | { 179 | "ActionType": "Remove", 180 | "PermissionFor": "Tag", 181 | "Tag": "key=value", 182 | "GroupName": "GroupX", 183 | "PermissionSetName": "AWSReadOnlyAccess" 184 | } 185 | ] 186 | } 187 | 188 | } 189 | ``` 190 | 191 | Events mentioned above will create records in DynamoDB, and trigger corresponding action in AWS Identity Center. 192 | DynamoDB acts as a single point of truth, for any following actions. Having such records in DynamoDB will allow automatic assignment/removal of AWS Identity Center permission sets when moving accounts between OU as well as creating new accounts in OU. 193 | 194 | ### DB Records example 195 | 196 | ![architecture](DynamoDB.png) 197 | 198 | ## Limitations 199 | 200 | 1. As of current state does not support nested OU's permission inheritence 201 | 1. Testing is currently limited 202 | 1. Support of ResourceTagged AWS Organization is removed for now due to multiple processing options 203 | 204 | ## Testing prerequisites 205 | 206 | Python tests are executed using pytest package. 207 | Make sure that folder ` ./src/layers/ ` is added to `PYTHONPATH` variable as test are depended on the code fined in lambda layers. 208 | 209 | To execute test pass the test pass the test file to the pytest: 210 | 211 | ```bash 212 | python3 -m pytest -v src 213 | ``` 214 | 215 | This will load mock classes from the layers folder and run complete test suite. 216 | 217 | ## Additional Info 218 | 219 | This project is best executed from a virtualenv. 220 | 221 | To manually create a virtualenv on MacOS and Linux: 222 | 223 | ```bash 224 | python3 -m venv .venv 225 | ``` 226 | 227 | After the init process completes and the virtualenv is created, you can use the following 228 | step to activate your virtualenv. 229 | 230 | ```bash 231 | source .venv/bin/activate 232 | ``` 233 | 234 | If you are a Windows platform, you would activate the virtualenv like this: 235 | 236 | ```cmd 237 | % .venv\Scripts\activate.bat 238 | ``` 239 | 240 | Once the virtualenv is activated, you can install the required dependencies. 241 | 242 | ```bash 243 | pip install -r requirements.txt 244 | ``` 245 | 246 | At this point you can now synthesize the CloudFormation template for this code. 247 | 248 | ```bash 249 | cdk synth 250 | ``` 251 | 252 | ## Useful commands 253 | 254 | - `cdk ls` list all stacks in the app 255 | - `cdk synth` emits the synthesized CloudFormation template 256 | - `cdk deploy` deploy this stack to your default AWS account/region 257 | - `cdk diff` compare deployed stack with current state 258 | - `cdk docs` open CDK documentation 259 | 260 | ## Security 261 | 262 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 263 | 264 | ## License 265 | 266 | This library is licensed under the MIT-0 License. See the LICENSE file. 267 | -------------------------------------------------------------------------------- /cdk.context.json: -------------------------------------------------------------------------------- 1 | { 2 | "enterprise_sso": { 3 | "enterprise_sso_management_account_id": "123456789012", 4 | "enterprise_sso_exec_account_id": "123456789012", 5 | "enterprise_sso_deployment_account_id": "123456789012", 6 | "enterprise_sso_management_read_only_role": "assignment-management-read-only-role", 7 | "enterprise_sso_management_role": "assignment-management-role", 8 | "target_event_bus_name": "enterprise-aws-sso", 9 | "target_event_bus_region": "eu-central-1", 10 | "assignment_definition_table_partition_key": "mappingId", 11 | "assignment_definition_table_sort_key": "mappingValue", 12 | "assignment_processing_queue_delivery_delay_seconds": 30, 13 | "assignment_processing_queue_visibility_timeout_seconds": 300, 14 | "assignment_defenition_handler_timeout_seconds": 300, 15 | "assignment_execution_handler_timeout_seconds": 300, 16 | "error_notifications_email": "email@example.com", 17 | "codecommit_repository_name": "enterprise-aws-sso", 18 | "codecommit_repository_branch_name": "main" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 deployment/cdkpipelines_app.py", 3 | "context": { 4 | "@aws-cdk/core:newStyleStackSynthesis": "true" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /deployment/app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import sys 4 | 5 | from aws_cdk import App, Environment 6 | 7 | from enterprise_sso.enterprise_aws_sso_stack import EnterpriseAwsSsoExecStack 8 | from enterprise_sso.enterprise_aws_sso_management_stack import ( 9 | EnterpriseAwsSsoManagementStack, 10 | ) 11 | 12 | app = App() 13 | 14 | region = os.environ.get("AWS_DEFAULT_REGION", os.environ.get("AWS_REGION")) 15 | if not region: 16 | print("Please set AWS_DEFAULT_REGION or AWS_REGION") 17 | sys.exit(1) 18 | 19 | full_deployment = True if region == "us-east-1" else False 20 | 21 | 22 | enterprise_sso = EnterpriseAwsSsoExecStack(app, "AssignmentManagementIAM") 23 | 24 | enterprise_sso_management = EnterpriseAwsSsoManagementStack( 25 | app, "AssignmentManagementRoot", full_deployment=full_deployment 26 | ) 27 | 28 | if region != "us-east-1": 29 | enterprise_sso_management = EnterpriseAwsSsoManagementStack( 30 | app, 31 | "AssignmentManagementRootUsEast1", 32 | full_deployment=full_deployment, 33 | env=Environment(region="us-east-1"), 34 | ) 35 | 36 | app.synth() 37 | -------------------------------------------------------------------------------- /deployment/cdkpipelines_app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import sys 4 | 5 | from aws_cdk import App, Environment 6 | 7 | from enterprise_sso.enterprise_aws_sso_cdkpipelines import EnterpriseSSOPipelineStack 8 | 9 | app = App() 10 | 11 | context: dict = app.node.try_get_context("enterprise_sso") 12 | deployment_account_id: str = context.get("enterprise_sso_deployment_account_id") 13 | region = os.environ.get("AWS_DEFAULT_REGION", os.environ.get("AWS_REGION")) 14 | 15 | if not region: 16 | print("Please set AWS_DEFAULT_REGION or AWS_REGION") 17 | sys.exit(1) 18 | 19 | 20 | EnterpriseSSOPipelineStack( 21 | app, 22 | "EnterpriseAWSSSOPipelineStack", 23 | env=Environment(account=deployment_account_id, region=region), 24 | ) 25 | 26 | app.synth() 27 | -------------------------------------------------------------------------------- /deployment/enterprise_sso/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/assignment-automation-4-aws-sso/4a9c187798f7bf07968da58474c94c3aa155e067/deployment/enterprise_sso/__init__.py -------------------------------------------------------------------------------- /deployment/enterprise_sso/enterprise_aws_sso_cdkpipelines.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import Stack 2 | from aws_cdk import aws_codebuild as codebuild 3 | from aws_cdk import aws_codecommit as codecommit 4 | from aws_cdk.pipelines import CodePipeline, CodePipelineSource, CodeBuildStep 5 | from constructs import Construct 6 | from enterprise_sso.enterprise_aws_sso_management_stage import EnterpriseAwsSsoManagementStage 7 | from enterprise_sso.enterprise_aws_sso_stage import EnterpriseAwsSsoExecStage 8 | 9 | 10 | class EnterpriseSSOPipelineStack(Stack): 11 | def __init__(self, scope: Construct, id: str, **kwargs): 12 | super().__init__(scope, id, **kwargs) 13 | 14 | context: dict = self.node.try_get_context("enterprise_sso") 15 | 16 | management_account_id: str = context.get("enterprise_sso_management_account_id") 17 | sso_exec_account_id: str = context.get("enterprise_sso_exec_account_id") 18 | codecommit_repository_name: str = context.get("codecommit_repository_name") 19 | codecommit_repository_branch_name: str = context.get("codecommit_repository_branch_name") 20 | 21 | codecommit_repository = codecommit.Repository.from_repository_name( 22 | self, "enterprisessorepo", codecommit_repository_name 23 | ) 24 | 25 | pipeline = CodePipeline( 26 | self, 27 | "EnterpriseSSOCDKPipeline", 28 | pipeline_name="EnterpriseSSOCDKPipeline", 29 | cross_account_keys=True, 30 | synth=CodeBuildStep( 31 | "Synth", 32 | input=CodePipelineSource.code_commit( 33 | codecommit_repository, codecommit_repository_branch_name 34 | ), 35 | partial_build_spec=codebuild.BuildSpec.from_object( 36 | { 37 | "version": "0.2", 38 | "phases": {"install": {"runtime-versions": {"python": "3.12"}}}, 39 | } 40 | ), 41 | build_environment=codebuild.BuildEnvironment(privileged=True), 42 | commands=[ 43 | "npm install -g aws-cdk", 44 | "python -m pip install -r requirements.txt", 45 | "cdk synth", 46 | ], 47 | ), 48 | ) 49 | 50 | full_deployment = True if self.region == "us-east-1" else False 51 | 52 | # AWS SSO Exec Stage 53 | pipeline.add_stage( 54 | EnterpriseAwsSsoExecStage( 55 | self, 56 | "EnterpriseAWSSSOExec", 57 | env={ 58 | "account": sso_exec_account_id, 59 | "region": self.region, 60 | }, 61 | ) 62 | ) 63 | 64 | # AWS SSO Management Stage 65 | pipeline.add_stage( 66 | EnterpriseAwsSsoManagementStage( 67 | self, 68 | "EnterpriseAWSSSOManagemenent", 69 | full_deployment=full_deployment, # full_deployment parameter 70 | env={ 71 | "account": management_account_id, 72 | "region": self.region, 73 | }, 74 | ) 75 | ) 76 | 77 | if self.region != "us-east-1": 78 | pipeline.add_stage( 79 | EnterpriseAwsSsoManagementStage( 80 | self, 81 | "EnterpriseAWSSSOManagemenentUsEast1", 82 | False, # full_deployment parameter 83 | env={ 84 | "account": management_account_id, 85 | "region": "us-east-1", 86 | }, 87 | ) 88 | ) 89 | -------------------------------------------------------------------------------- /deployment/enterprise_sso/enterprise_aws_sso_management_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import Stack 2 | from aws_cdk import aws_events as events 3 | from aws_cdk import aws_events_targets as targets 4 | from aws_cdk import aws_iam as iam 5 | from constructs import Construct 6 | 7 | 8 | class EnterpriseAwsSsoManagementStack(Stack): 9 | def __init__( 10 | self, scope: Construct, construct_id: str, full_deployment: bool, **kwargs 11 | ) -> None: 12 | super().__init__(scope, construct_id, **kwargs) 13 | 14 | ## Getting parameters and setting defaults 15 | context: dict = self.node.try_get_context("enterprise_sso") 16 | 17 | enterprise_sso_exec_account_id: str = context.get("enterprise_sso_exec_account_id") 18 | enterprise_sso_read_only_role: str = context.get( 19 | "enterprise_sso_management_read_only_role", 20 | "assignment-management-read-only-role", 21 | ) 22 | enterprise_sso_assignment_management_role: str = context.get( 23 | "enterprise_sso_management_role", "assignment-management-role" 24 | ) 25 | target_event_bus_name: str = context.get("target_event_bus_name", "enterprise-aws-sso") 26 | target_event_bus_region: str = context.get("target_event_bus_region", self.region) 27 | 28 | if full_deployment or (self.region != "us-east-1" and not full_deployment): 29 | ### Permission management ### 30 | 31 | ## Read only role for assignment definition handler function ## 32 | identitystore_policy = iam.PolicyDocument( 33 | statements=[ 34 | iam.PolicyStatement( 35 | actions=[ 36 | "identitystore:DescribeUser", 37 | "identitystore:ListUsers", 38 | "identitystore:DescribeGroup", 39 | "identitystore:ListGroups", 40 | ], 41 | effect=iam.Effect.ALLOW, 42 | resources=["*"], 43 | ) 44 | ] 45 | ) 46 | 47 | organizations_policy = iam.PolicyDocument( 48 | statements=[ 49 | iam.PolicyStatement( 50 | actions=["organizations:DescribeOrganizationalUnit", "tag:GetResources"], 51 | effect=iam.Effect.ALLOW, 52 | resources=["*"], 53 | ) 54 | ] 55 | ) 56 | 57 | self.management_enterprise_sso_read_only_role = iam.Role( 58 | self, 59 | "EnterpriseAWSSSOReadOnlyRole", 60 | role_name=enterprise_sso_read_only_role, 61 | inline_policies={ 62 | "identitystore-readonly": identitystore_policy, 63 | "organizations-readonly": organizations_policy, 64 | }, 65 | managed_policies=[iam.ManagedPolicy.from_aws_managed_policy_name("AWSSSOReadOnly")], 66 | assumed_by=iam.ArnPrincipal( 67 | f"arn:aws:iam::{enterprise_sso_exec_account_id}:role/{enterprise_sso_read_only_role}" 68 | ), 69 | ) 70 | 71 | ## Assignment management role for assignment execution function ## 72 | assignment_management_policy = iam.PolicyDocument( 73 | statements=[ 74 | iam.PolicyStatement( 75 | actions=[ 76 | "sso:CreateAccountAssignment", 77 | "sso:ListPermissionSetsProvisionedToAccount", 78 | "sso:ListInstances", 79 | "sso:DeleteAccountAssignment", 80 | ], 81 | effect=iam.Effect.ALLOW, 82 | resources=["*"], 83 | ), 84 | iam.PolicyStatement( 85 | sid="ListDelegatedAdministrators", 86 | actions=[ 87 | "organizations:ListDelegatedAdministrators", 88 | ], 89 | effect=iam.Effect.ALLOW, 90 | resources=["*"], 91 | ), 92 | iam.PolicyStatement( 93 | sid="IAMListPermissions", 94 | actions=["iam:ListRoles", "iam:ListPolicies"], 95 | effect=iam.Effect.ALLOW, 96 | resources=["*"], 97 | ), 98 | iam.PolicyStatement( 99 | sid="AccessToSSOProvisionedRoles", 100 | actions=[ 101 | "iam:AttachRolePolicy", 102 | "iam:CreateRole", 103 | "iam:DeleteRole", 104 | "iam:DeleteRolePolicy", 105 | "iam:DetachRolePolicy", 106 | "iam:GetRole", 107 | "iam:ListAttachedRolePolicies", 108 | "iam:ListRolePolicies", 109 | "iam:PutRolePolicy", 110 | "iam:UpdateRole", 111 | "iam:UpdateRoleDescription", 112 | ], 113 | effect=iam.Effect.ALLOW, 114 | resources=["arn:aws:iam::*:role/aws-reserved/sso.amazonaws.com/*"], 115 | ), 116 | iam.PolicyStatement( 117 | actions=["iam:GetSAMLProvider"], 118 | effect=iam.Effect.ALLOW, 119 | resources=["arn:aws:iam::*:saml-provider/AWSSSO_*_DO_NOT_DELETE"], 120 | ), 121 | ] 122 | ) 123 | 124 | self.management_enterprise_sso_assignment_management_role = iam.Role( 125 | self, 126 | "EnterpriseAWSSSOAssignmentManagementRole", 127 | role_name=enterprise_sso_assignment_management_role, 128 | inline_policies={ 129 | "enterprise-sso-assignment-management": assignment_management_policy 130 | }, 131 | assumed_by=iam.ArnPrincipal( 132 | f"arn:aws:iam::{enterprise_sso_exec_account_id}:role/{enterprise_sso_assignment_management_role}" 133 | ), 134 | ) 135 | 136 | if (not full_deployment and self.region == "us-east-1") or full_deployment: 137 | ### Event bus configuration ### 138 | enterprise_sso_eventbus = events.EventBus.from_event_bus_arn( 139 | self, 140 | "enterpriseawsssoEventbus", 141 | event_bus_arn=f"arn:aws:events:{target_event_bus_region}:{enterprise_sso_exec_account_id}:event-bus/{target_event_bus_name}", 142 | ) 143 | 144 | self.organizations_events_forwarding_rule = events.Rule( 145 | self, 146 | "OrganizationsEventsRule", 147 | description="Forward Organizations events to enterprise-aws-sso", 148 | event_pattern=events.EventPattern( 149 | detail={ 150 | "eventName": [ 151 | "CreateAccountResult", 152 | "MoveAccount", 153 | "TagResource", 154 | "UntagResource", 155 | ] 156 | }, 157 | detail_type=["AWS Service Event via CloudTrail", "AWS API Call via CloudTrail"], 158 | source=["aws.organizations"], 159 | ), 160 | targets=[targets.EventBus(enterprise_sso_eventbus)], 161 | ) 162 | 163 | # AWS SSO Events 164 | self.sso_events_forwarding_rule = events.Rule( 165 | self, 166 | "AWSSSOEventsRule", 167 | description="Forward CT events to enterprise-aws-sso", 168 | event_pattern=events.EventPattern( 169 | detail={"eventName": ["CreatePermissionSet", "DeletePermissionSet"]}, 170 | detail_type=["AWS Service Event via CloudTrail", "AWS API Call via CloudTrail"], 171 | source=["aws.sso"], 172 | ), 173 | targets=[targets.EventBus(enterprise_sso_eventbus)], 174 | ) 175 | -------------------------------------------------------------------------------- /deployment/enterprise_sso/enterprise_aws_sso_management_stage.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import Stage 2 | from constructs import Construct 3 | from enterprise_sso.enterprise_aws_sso_management_stack import EnterpriseAwsSsoManagementStack 4 | 5 | 6 | class EnterpriseAwsSsoManagementStage(Stage): 7 | def __init__( 8 | self, scope: Construct, construct_id: str, full_deployment: bool, **kwargs 9 | ) -> None: 10 | super().__init__(scope, construct_id, **kwargs) 11 | 12 | enterprise_aws_sso_management = EnterpriseAwsSsoManagementStack( 13 | self, "EnterpriseAWSSSOManagement", full_deployment=full_deployment 14 | ) 15 | -------------------------------------------------------------------------------- /deployment/enterprise_sso/enterprise_aws_sso_stack.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import subprocess 4 | import sys 5 | from pathlib import Path 6 | from typing import List, Mapping 7 | 8 | import jsii 9 | from aws_cdk import BundlingOptions, Duration, ILocalBundling, RemovalPolicy, Stack 10 | from aws_cdk import aws_dynamodb as ddb 11 | from aws_cdk import aws_events as events 12 | from aws_cdk import aws_events_targets as event_targets 13 | from aws_cdk import aws_iam as iam 14 | from aws_cdk import aws_lambda as _lambda 15 | from aws_cdk import aws_lambda_event_sources as lambda_event_sources 16 | from aws_cdk import aws_sns as sns 17 | from aws_cdk import aws_sns_subscriptions as sns_sub 18 | from aws_cdk import aws_sqs as sqs 19 | from constructs import Construct 20 | 21 | 22 | class EnterpriseAwsSsoExecStack(Stack): 23 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 24 | super().__init__(scope, construct_id, **kwargs) 25 | 26 | context: dict = self.node.try_get_context("enterprise_sso") 27 | 28 | management_account_id: str = context.get("enterprise_sso_management_account_id") 29 | sso_exec_account_id: str = context.get("enterprise_sso_exec_account_id") 30 | deployment_account_id: str = context.get("enterprise_sso_deployment_account_id") 31 | error_notification_email: str = context.get("error_notifications_email") 32 | sso_management_read_only_role: str = context.get( 33 | "enterprise_sso_management_read_only_role", 34 | "assignment-management-read-only-role", 35 | ) 36 | sso_management_role: str = context.get( 37 | "enterprise_sso_management_role", "assignment-management-role" 38 | ) 39 | target_event_bus_name: str = context.get("target_event_bus_name", "enterprise-aws-sso") 40 | sqs_delivery_delay_seconds: int = context.get( 41 | "assignment_processing_queue_delivery_delay_seconds", 30 42 | ) 43 | sqs_visibility_timeout_seconds: int = context.get( 44 | "assignment_processing_queue_visibility_timeout_seconds", 300 45 | ) 46 | lambda_defenition_handler_timeout_seconds: int = context.get( 47 | "assignment_defenition_handler_timeout_seconds", 300 48 | ) 49 | lambda_execution_handler_timeout_seconds: int = context.get( 50 | "assignment_execution_handler_timeout_seconds", 300 51 | ) 52 | assignment_processing_queue_name: str = context.get( 53 | "assignment_processing_queue_name", "assignment-processing-queue" 54 | ) 55 | assignment_defenition_table_name: str = context.get( 56 | "assignment_defenition_table_name", "permission-assignments-table" 57 | ) 58 | assignment_definition_table_partition_key: str = context.get( 59 | "assignment_definition_table_partition_key", "mappingId" 60 | ) 61 | assignment_definition_table_sort_key: str = context.get( 62 | "assignment_definition_table_sort_key", "mappingValue" 63 | ) 64 | 65 | lambda_runtime = _lambda.Runtime.PYTHON_3_12 66 | 67 | ## Event bus configuration 68 | self.ct_event_bus = events.EventBus( 69 | self, 70 | "CTEventBus", 71 | event_bus_name=target_event_bus_name, 72 | ) 73 | events.CfnEventBusPolicy( 74 | self, 75 | "CTEventBusPolicy", 76 | statement_id="allow-management-account", 77 | action="events:PutEvents", 78 | event_bus_name=self.ct_event_bus.event_bus_name, 79 | principal=management_account_id, 80 | ) 81 | 82 | events.CfnEventBusPolicy( 83 | self, 84 | "CTEventBusPolicyDeployment", 85 | statement_id="allow-deployment-account", 86 | action="events:PutEvents", 87 | event_bus_name=self.ct_event_bus.event_bus_name, 88 | principal=deployment_account_id, 89 | ) 90 | 91 | events.CfnEventBusPolicy( 92 | self, 93 | "CTEventBusPolicyIAM", 94 | statement_id="allow-iam-account", 95 | action="events:PutEvents", 96 | event_bus_name=self.ct_event_bus.event_bus_name, 97 | principal=sso_exec_account_id, 98 | ) 99 | 100 | ## Error notification topic 101 | self.error_notification_topic = sns.Topic(self, "ErrorNotificationTopic") 102 | self.error_notification_topic.add_subscription( 103 | sns_sub.EmailSubscription(error_notification_email) 104 | ) 105 | self.sso_assignments_table = ddb.Table( 106 | self, 107 | assignment_defenition_table_name, 108 | partition_key=ddb.Attribute( 109 | name=assignment_definition_table_partition_key, 110 | type=ddb.AttributeType.STRING, 111 | ), 112 | sort_key=ddb.Attribute( 113 | name=assignment_definition_table_sort_key, type=ddb.AttributeType.STRING 114 | ), 115 | billing_mode=ddb.BillingMode.PAY_PER_REQUEST, 116 | encryption=ddb.TableEncryption.AWS_MANAGED, 117 | removal_policy=RemovalPolicy.DESTROY, 118 | stream=ddb.StreamViewType.NEW_AND_OLD_IMAGES, 119 | ) 120 | 121 | ## assignment task queue 122 | self.assignment_processing_queue = sqs.Queue( 123 | self, 124 | "assignment-processing-queue", 125 | queue_name=assignment_processing_queue_name, 126 | encryption=sqs.QueueEncryption.KMS_MANAGED, 127 | delivery_delay=Duration.seconds(sqs_delivery_delay_seconds), 128 | visibility_timeout=Duration.seconds(sqs_visibility_timeout_seconds), 129 | ) 130 | 131 | ## Permission management part 132 | sqs_publish_policy = iam.PolicyDocument( 133 | statements=[ 134 | iam.PolicyStatement( 135 | actions=[ 136 | "sqs:SendMessage", 137 | "sqs:SendMessageBatch", 138 | "sqs:GetQueueAttributes", 139 | "sqs:GetQueueUrl", 140 | ], 141 | effect=iam.Effect.ALLOW, 142 | resources=[self.assignment_processing_queue.queue_arn], 143 | ) 144 | ] 145 | ) 146 | 147 | ## Permission management part 148 | eventbus_publish_policy = iam.PolicyDocument( 149 | statements=[ 150 | iam.PolicyStatement( 151 | actions=["events:PutEvents"], 152 | effect=iam.Effect.ALLOW, 153 | resources=[ 154 | self.ct_event_bus.event_bus_arn, 155 | ], 156 | ) 157 | ] 158 | ) 159 | 160 | ## Service Event Handler role 161 | self.service_event_handler_role = self._create_lambda_role( 162 | role_id="ServiceEventHandlerRole", 163 | inline_policies={"eventbus": eventbus_publish_policy}, 164 | managed_policy_name_list=[ 165 | "service-role/AWSLambdaBasicExecutionRole", 166 | ], 167 | ) 168 | 169 | ## DB Assignment Handler policy 170 | dynamodb_publish_policy = iam.PolicyDocument( 171 | statements=[ 172 | iam.PolicyStatement( 173 | actions=["dynamodb:BatchWrite*", "dynamodb:Update*", "dynamodb:PutItem"], 174 | effect=iam.Effect.ALLOW, 175 | resources=[ 176 | self.sso_assignments_table.table_arn, 177 | ], 178 | ), 179 | iam.PolicyStatement( 180 | actions=[ 181 | "dynamodb:BatchWrite*", 182 | "dynamodb:Update*", 183 | "dynamodb:PutItem", 184 | "dynamodb:Query", 185 | "dynamodb:DeleteItem", 186 | ], 187 | effect=iam.Effect.ALLOW, 188 | resources=[self.sso_assignments_table.table_arn], 189 | ), 190 | ] 191 | ) 192 | 193 | ## DB Assignment Handler role 194 | self.db_assignment_handler_role = self._create_lambda_role( 195 | role_id="DBAssignmentHandlerRole", 196 | inline_policies={"dynamodb": dynamodb_publish_policy}, 197 | managed_policy_name_list=[ 198 | "service-role/AWSLambdaBasicExecutionRole", 199 | ], 200 | ) 201 | 202 | ## Assignment management role for assignment execution function ## 203 | assignment_exec_policy = iam.PolicyDocument( 204 | statements=[ 205 | iam.PolicyStatement( 206 | actions=[ 207 | "sso:CreateAccountAssignment", 208 | "sso:ListPermissionSetsProvisionedToAccount", 209 | "sso:ListInstances", 210 | "sso:DeleteAccountAssignment", 211 | ], 212 | effect=iam.Effect.ALLOW, 213 | resources=["*"], 214 | ), 215 | iam.PolicyStatement( 216 | sid="AllowPublishingToSns", 217 | actions=["sns:Publish"], 218 | effect=iam.Effect.ALLOW, 219 | resources=[self.error_notification_topic.topic_arn], 220 | ), 221 | ] 222 | ) 223 | 224 | ## Assignment definition handler role 225 | self.assignment_handler_role = self._create_lambda_role( 226 | role_id="AssignmentDefinitionHandlerRole", 227 | role_name=sso_management_read_only_role, 228 | inline_policies={ 229 | "sqs-publish-policy": sqs_publish_policy, 230 | "dynamodb": dynamodb_publish_policy, 231 | }, 232 | managed_policy_name_list=[ 233 | "service-role/AWSLambdaBasicExecutionRole", 234 | ], 235 | lambda_assume_roles_arn_list=[ 236 | f"arn:aws:iam::{management_account_id}:role/{sso_management_read_only_role}" 237 | ], 238 | ) 239 | 240 | ## Assignment execution handler role 241 | self.assignment_exec_role = self._create_lambda_role( 242 | role_id="AssignmentExecRole", 243 | role_name=sso_management_role, 244 | inline_policies={ 245 | "assignment-policy": assignment_exec_policy, 246 | }, 247 | managed_policy_name_list=[ 248 | "service-role/AWSLambdaSQSQueueExecutionRole", 249 | ], 250 | lambda_assume_roles_arn_list=[ 251 | f"arn:aws:iam::{management_account_id}:role/{sso_management_role}" 252 | ], 253 | ) 254 | 255 | # Lambda Layer Paths 256 | common_layer_path = Path("src/layers/common") 257 | orgz_layer_path = Path("src/layers/orgz") 258 | sso_layer_path = Path("src/layers/sso") 259 | 260 | # Lambda Layers 261 | self.common_lambda_layer = _lambda.LayerVersion( 262 | self, 263 | "CommonLambdaLayer", 264 | code=_lambda.Code.from_asset( 265 | path=str(common_layer_path), 266 | bundling=BundlingOptions( 267 | local=LocalBundler(common_layer_path), 268 | image=lambda_runtime.bundling_image, 269 | command=[ 270 | "bash", 271 | "-c", 272 | f"pip --no-cache-dir install -r requirements.txt -t /asset-output/python && cp -au . /asset-output/python/{common_layer_path.name}/", 273 | ], 274 | ), 275 | ), 276 | compatible_runtimes=[lambda_runtime], 277 | ) 278 | 279 | self.org_lambda_layer = _lambda.LayerVersion( 280 | self, 281 | "OrganizationsLambdaLayer", 282 | code=_lambda.Code.from_asset( 283 | path=str(orgz_layer_path), 284 | bundling=BundlingOptions( 285 | local=LocalBundler(orgz_layer_path), 286 | image=lambda_runtime.bundling_image, 287 | command=[ 288 | "bash", 289 | "-c", 290 | f"pip --no-cache-dir install -r requirements.txt -t /asset-output/python && cp -au . /asset-output/python/{orgz_layer_path.name}/", 291 | ], 292 | ), 293 | ), 294 | compatible_runtimes=[lambda_runtime], 295 | ) 296 | 297 | self.sso_lambda_layer = _lambda.LayerVersion( 298 | self, 299 | "SsoLambdaLayer", 300 | code=_lambda.Code.from_asset( 301 | path=str(sso_layer_path), 302 | bundling=BundlingOptions( 303 | local=LocalBundler(sso_layer_path), 304 | image=lambda_runtime.bundling_image, 305 | command=[ 306 | "bash", 307 | "-c", 308 | f"pip --no-cache-dir install -r requirements.txt -t /asset-output/python && cp -au . /asset-output/python/{sso_layer_path.name}/", 309 | ], 310 | ), 311 | ), 312 | compatible_runtimes=[lambda_runtime], 313 | ) 314 | 315 | # This function will process external events and manage DB records. 316 | self.db_assignment_handler = _lambda.Function( 317 | self, 318 | "DBAssignmentHandler", 319 | runtime=lambda_runtime, 320 | handler="index.handler", 321 | memory_size=256, 322 | role=self.db_assignment_handler_role, 323 | code=_lambda.Code.from_asset( 324 | path=str(Path("src/functions/assignment_db_handler")), 325 | ), 326 | layers=[ 327 | self.common_lambda_layer, 328 | ], 329 | environment={ 330 | "ERROR_TOPIC_NAME": self.error_notification_topic.topic_arn, 331 | "ASSIGNMENTS_TABLE_NAME": self.sso_assignments_table.table_name, 332 | "ASSOCIATIONID_KEY_NAME": assignment_definition_table_partition_key, 333 | "ASSOCIATIONID_SORT_KEY_NAME": assignment_definition_table_sort_key, 334 | "LOG_LEVEL": "INFO", 335 | "POWERTOOLS_SERVICE_NAME": "enterprise-aws-sso", 336 | }, 337 | ) 338 | 339 | self.db_assignments_lifecycle_events_rule = events.Rule( 340 | self, 341 | "DBAssignmentsEventsRule", 342 | description="Forward record creation events", 343 | enabled=True, 344 | event_bus=self.ct_event_bus, 345 | event_pattern=events.EventPattern(source=["permissionEventSource"]), 346 | rule_name=f"Forwarding-to-db-assignment-handler", 347 | targets=[event_targets.LambdaFunction(self.db_assignment_handler)], 348 | ) 349 | 350 | # This function will process AWS Service Events and create application specific ones 351 | self.service_event_handler = _lambda.Function( 352 | self, 353 | "ServiceEventHandler", 354 | runtime=lambda_runtime, 355 | handler="index.handler", 356 | memory_size=256, 357 | role=self.service_event_handler_role, 358 | code=_lambda.Code.from_asset( 359 | path=str(Path("src/functions/service_event_handler")), 360 | ), 361 | layers=[ 362 | self.common_lambda_layer, 363 | ], 364 | environment={ 365 | "ERROR_TOPIC_NAME": self.error_notification_topic.topic_arn, 366 | "LOG_LEVEL": "INFO", 367 | "POWERTOOLS_SERVICE_NAME": "enterprise-aws-sso", 368 | "IAM_EVENT_BRIDGE_ARN": self.ct_event_bus.event_bus_arn, 369 | }, 370 | ) 371 | 372 | self.service_lifecycle_events_rule = events.Rule( 373 | self, 374 | "ServiceEventHandlerEventsRule", 375 | description="Forward AWS Service Events", 376 | enabled=True, 377 | event_bus=self.ct_event_bus, 378 | event_pattern=events.EventPattern( 379 | detail_type=["AWS Service Event via CloudTrail", "AWS API Call via CloudTrail"], 380 | ), 381 | rule_name=f"Forwarding-to-service-event-handler", 382 | targets=[event_targets.LambdaFunction(self.service_event_handler)], 383 | ) 384 | 385 | # This function will define the assignments from the metadata in DynamoDB 386 | self.assignment_definition_handler = _lambda.Function( 387 | self, 388 | "AssignmentDefinitionHandler", 389 | runtime=lambda_runtime, 390 | handler="index.handler", 391 | memory_size=256, 392 | timeout=Duration.seconds(lambda_defenition_handler_timeout_seconds), 393 | role=self.assignment_handler_role, 394 | code=_lambda.Code.from_asset( 395 | path=str(Path("src/functions/assignment_definition_handler")), 396 | ), 397 | layers=[ 398 | self.common_lambda_layer, 399 | self.org_lambda_layer, 400 | self.sso_lambda_layer, 401 | ], 402 | environment={ 403 | "ASSIGNMENTS_TABLE_NAME": self.sso_assignments_table.table_name, 404 | "ASSIGNMENTS_QUEUE_URL": self.assignment_processing_queue.queue_url, 405 | "ERROR_TOPIC_NAME": self.error_notification_topic.topic_arn, 406 | "LOG_LEVEL": "INFO", 407 | "POWERTOOLS_SERVICE_NAME": "enterprise-sso", 408 | "ASSOCIATIONID_CONCAT_CHAR": "|", 409 | "ASSOCIATIONID_KEY_NAME": assignment_definition_table_partition_key, 410 | "ASSOCIATIONID_SORT_KEY_NAME": assignment_definition_table_sort_key, 411 | "SSO_ADMIN_ROLE_ARN": f"arn:aws:iam::{management_account_id}:role/{sso_management_read_only_role}", 412 | }, 413 | ) 414 | 415 | self.assignment_defenition_events_rule = events.Rule( 416 | self, 417 | "AssignmentDefenitionEventsRule", 418 | description="Forward Events", 419 | enabled=True, 420 | event_bus=self.ct_event_bus, 421 | event_pattern=events.EventPattern( 422 | account=[sso_exec_account_id], source=["enterprise-aws-sso"] 423 | ), 424 | rule_name=f"Forwarding-to-defenition-handler", 425 | targets=[event_targets.LambdaFunction(self.assignment_definition_handler)], 426 | ) 427 | 428 | # setting the assignments topic as the event source for the execution lambda 429 | self.assignment_definition_handler.add_event_source( 430 | lambda_event_sources.DynamoEventSource( 431 | table=self.sso_assignments_table, 432 | starting_position=_lambda.StartingPosition.TRIM_HORIZON, 433 | batch_size=5, 434 | bisect_batch_on_error=True, 435 | on_failure=lambda_event_sources.SnsDlq(self.error_notification_topic), 436 | retry_attempts=3, 437 | ) 438 | ) 439 | 440 | self.sso_assignments_table.grant_read_data( 441 | self.assignment_definition_handler 442 | ) # TODO: not sure if needed 443 | self.sso_assignments_table.grant_stream_read(self.assignment_definition_handler) 444 | 445 | # This function will execute the assignments prepared by defenition lambda 446 | self.assignment_execution_handler = _lambda.Function( 447 | self, 448 | "AssignmentExecutionHandler", 449 | runtime=lambda_runtime, 450 | handler="index.handler", 451 | memory_size=256, 452 | timeout=Duration.seconds(lambda_execution_handler_timeout_seconds), 453 | role=self.assignment_exec_role, 454 | code=_lambda.Code.from_asset( 455 | path=str(Path("src/functions/assignment_execution_handler")), 456 | ), 457 | layers=[ 458 | self.common_lambda_layer, 459 | self.sso_lambda_layer, 460 | ], 461 | environment={ 462 | "ERROR_TOPIC_NAME": self.error_notification_topic.topic_arn, 463 | "LOG_LEVEL": "INFO", 464 | "POWERTOOLS_SERVICE_NAME": "enterprise-sso", 465 | "ASSOCIATIONID_CONCAT_CHAR": "|", 466 | "SSO_ADMIN_ROLE_ARN": f"arn:aws:iam::{management_account_id}:role/{sso_management_role}", 467 | "MANAGEMENT_ACCOUNT_ID": management_account_id, 468 | }, 469 | ) 470 | 471 | # setting the assignments queue as the event source for the execution lambda 472 | self.assignment_execution_handler.add_event_source( 473 | lambda_event_sources.SqsEventSource( 474 | self.assignment_processing_queue, batch_size=10, max_concurrency=2 475 | ) 476 | ) 477 | 478 | def _create_lambda_role( 479 | scope: Construct, 480 | role_id: str, 481 | role_name: str = None, 482 | managed_policy_name_list: List[str] = None, 483 | lambda_assume_roles_arn_list: List[str] = None, 484 | inline_policies: Mapping[str, iam.PolicyDocument] = None, 485 | ): 486 | lambda_role = iam.Role( 487 | scope, 488 | id=role_id, 489 | role_name=role_name, 490 | inline_policies=inline_policies, 491 | managed_policies=[ 492 | iam.ManagedPolicy.from_aws_managed_policy_name(managed_policy_name) 493 | for managed_policy_name in managed_policy_name_list 494 | ], 495 | assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), 496 | ) 497 | if lambda_assume_roles_arn_list is not None: 498 | sts_policy = iam.PolicyStatement( 499 | actions=["sts:AssumeRole"], 500 | effect=iam.Effect.ALLOW, 501 | resources=lambda_assume_roles_arn_list, 502 | ) 503 | lambda_role.add_to_principal_policy(sts_policy) 504 | return lambda_role 505 | 506 | 507 | @jsii.implements(ILocalBundling) 508 | class LocalBundler: 509 | """This allows packaging lambda functions without the use of Docker""" 510 | 511 | def __init__(self, source_root: Path): 512 | self.source_root = source_root 513 | 514 | def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool: 515 | try: 516 | subprocess.check_call([sys.executable, "-m", "pip", "--version"]) 517 | except: 518 | return False 519 | 520 | python_output_dir = str(Path(output_dir, "python")) 521 | subprocess.check_call( 522 | [ 523 | sys.executable, 524 | "-m", 525 | "pip", 526 | "--no-cache-dir", 527 | "install", 528 | "-r", 529 | str(Path(self.source_root, "requirements.txt")), 530 | "-t", 531 | python_output_dir, 532 | ] 533 | ) 534 | 535 | def copytree(src: str, dst: str, symlinks=False, ignore=None): 536 | for item in os.listdir(src): 537 | source_item = os.path.join(src, item) 538 | destination_item = os.path.join(dst, item) 539 | if os.path.isdir(source_item): 540 | shutil.copytree(source_item, destination_item, symlinks, ignore) 541 | else: 542 | shutil.copy2(source_item, destination_item) 543 | 544 | destination_layer_folder = Path(python_output_dir, self.source_root.name) 545 | 546 | destination_layer_folder.mkdir() 547 | 548 | copytree(str(self.source_root), destination_layer_folder) 549 | 550 | return True 551 | -------------------------------------------------------------------------------- /deployment/enterprise_sso/enterprise_aws_sso_stage.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import Stage 2 | from constructs import Construct 3 | from enterprise_sso.enterprise_aws_sso_stack import EnterpriseAwsSsoExecStack 4 | 5 | 6 | class EnterpriseAwsSsoExecStage(Stage): 7 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 8 | super().__init__(scope, construct_id, **kwargs) 9 | 10 | enterprise_aws_sso_exec = EnterpriseAwsSsoExecStack(self, "EnterpriseAWSSSOExec") 11 | -------------------------------------------------------------------------------- /dev-requirements.in: -------------------------------------------------------------------------------- 1 | pip-tools 2 | black 3 | pylint 4 | pylint-exit 5 | pytest 6 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --output-file=dev-requirements.txt --strip-extras dev-requirements.in 6 | # 7 | astroid==3.1.0 8 | # via pylint 9 | black==24.3.0 10 | # via -r dev-requirements.in 11 | build==1.2.1 12 | # via pip-tools 13 | click==8.1.7 14 | # via 15 | # black 16 | # pip-tools 17 | dill==0.3.8 18 | # via pylint 19 | iniconfig==2.0.0 20 | # via pytest 21 | isort==5.13.2 22 | # via pylint 23 | mccabe==0.7.0 24 | # via pylint 25 | mypy-extensions==1.0.0 26 | # via black 27 | packaging==24.0 28 | # via 29 | # black 30 | # build 31 | # pytest 32 | pathspec==0.12.1 33 | # via black 34 | pip-tools==7.4.1 35 | # via -r dev-requirements.in 36 | platformdirs==4.2.0 37 | # via 38 | # black 39 | # pylint 40 | pluggy==1.4.0 41 | # via pytest 42 | pylint==3.1.0 43 | # via -r dev-requirements.in 44 | pylint-exit==1.2.0 45 | # via -r dev-requirements.in 46 | pyproject-hooks==1.0.0 47 | # via 48 | # build 49 | # pip-tools 50 | pytest==8.1.1 51 | # via -r dev-requirements.in 52 | tomlkit==0.12.4 53 | # via pylint 54 | wheel==0.43.0 55 | # via pip-tools 56 | 57 | # The following packages are considered to be unsafe in a requirements file: 58 | # pip 59 | # setuptools 60 | -------------------------------------------------------------------------------- /initial-deploy-requirements.in: -------------------------------------------------------------------------------- 1 | boto3 2 | git-remote-codecommit 3 | GitPython 4 | -------------------------------------------------------------------------------- /initial-deploy-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --output-file=initial-deploy-requirements.txt --strip-extras initial-deploy-requirements.in 6 | # 7 | boto3==1.34.82 8 | # via -r initial-deploy-requirements.in 9 | botocore==1.34.82 10 | # via 11 | # boto3 12 | # git-remote-codecommit 13 | # s3transfer 14 | git-remote-codecommit==1.17 15 | # via -r initial-deploy-requirements.in 16 | gitdb==4.0.11 17 | # via gitpython 18 | gitpython==3.1.43 19 | # via -r initial-deploy-requirements.in 20 | jmespath==1.0.1 21 | # via 22 | # boto3 23 | # botocore 24 | python-dateutil==2.9.0.post0 25 | # via botocore 26 | s3transfer==0.10.1 27 | # via boto3 28 | six==1.16.0 29 | # via python-dateutil 30 | smmap==5.0.1 31 | # via gitdb 32 | urllib3==2.2.1 33 | # via botocore 34 | -------------------------------------------------------------------------------- /initial_deployment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | import json 4 | import logging 5 | import os 6 | import shutil 7 | import sys 8 | import tempfile 9 | import time 10 | 11 | import boto3 12 | from git import Repo 13 | 14 | logging.basicConfig(stream=sys.stdout, level=logging.INFO) 15 | 16 | 17 | def main(): 18 | logging.info(f"Trying to create Codecommit repository {codecommit_repository_name}") 19 | try: 20 | repo_response = cc_client.create_repository(repositoryName=codecommit_repository_name) 21 | logging.info("Repository successfully created. Continuing with initial deployment") 22 | except cc_client.exceptions.RepositoryNameExistsException as e: 23 | logging.warning("Repository already exists. Checking if empty") 24 | try: 25 | commits = cc_client.get_branch( 26 | repositoryName=codecommit_repository_name, 27 | branchName=codecommit_repository_branch_name, 28 | ) 29 | logging.error( 30 | "Repository already created and is not empty. Application might already be deployed." 31 | ) 32 | sys.exit(1) 33 | except cc_client.exceptions.BranchDoesNotExistException as e: 34 | logging.info( 35 | "Repository exists, but looks to be empty. Continuing with initial deployment" 36 | ) 37 | pass 38 | 39 | repository_url = f"codecommit::{region}://{codecommit_repository_name}" 40 | 41 | if args.no_history: 42 | with tempfile.TemporaryDirectory() as tmpdirname: 43 | repo_path = shutil.copytree( 44 | app_source_path, 45 | f"{tmpdirname}/{codecommit_repository_name}", 46 | ignore=shutil.ignore_patterns("cdk.out", ".git", "*.pyc"), 47 | ) 48 | repo = Repo.init( 49 | repo_path, bare=False, initial_branch=codecommit_repository_branch_name 50 | ) 51 | repo.git.add(all=True) 52 | repo.index.commit("initial commit") 53 | logging.info(repo_path) 54 | else: 55 | repo = Repo(app_source_path) 56 | 57 | remote = repo.create_remote(remote_name, url=repository_url, allow_unsafe_protocols=True) 58 | 59 | 60 | remote.push(codecommit_repository_branch_name) 61 | logging.info("You can now run 'cdk deploy' to deploy the pipeline.") 62 | 63 | 64 | if __name__ == "__main__": 65 | parser = argparse.ArgumentParser() 66 | parser.add_argument("-nh", "--no-history", action="store_true") 67 | args = parser.parse_args() 68 | logging.info(f"Loading cdk context") 69 | with open(f"cdk.context.json") as cdk_context_file: 70 | cdk_context = json.load(cdk_context_file)["enterprise_sso"] 71 | 72 | cc_client = boto3.client("codecommit") 73 | codecommit_repository_name = cdk_context.get("codecommit_repository_name", "enterprise-aws-sso") 74 | codecommit_repository_branch_name = cdk_context.get("codecommit_repository_branch_name", "main") 75 | remote_name = "codecommit" 76 | 77 | region = os.environ.get("AWS_DEFAULT_REGION", os.environ.get("AWS_REGION")) 78 | 79 | if not region: 80 | logging.error("Please set AWS_DEFAULT_REGION or AWS_REGION") 81 | sys.exit(1) 82 | 83 | app_source_path = os.path.dirname(os.path.realpath(__file__)) 84 | main() 85 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 100 3 | target-version = ['py310'] 4 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | aws-assume-role-lib 2 | aws_lambda_powertools 3 | aws-cdk-lib>=2.0.0 4 | constructs>=10.0.0 5 | boto3 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --strip-extras requirements.in 6 | # 7 | attrs==23.2.0 8 | # via 9 | # cattrs 10 | # jsii 11 | aws-assume-role-lib==2.10.0 12 | # via -r requirements.in 13 | aws-cdk-asset-awscli-v1==2.2.202 14 | # via aws-cdk-lib 15 | aws-cdk-asset-kubectl-v20==2.1.2 16 | # via aws-cdk-lib 17 | aws-cdk-asset-node-proxy-agent-v6==2.0.3 18 | # via aws-cdk-lib 19 | aws-cdk-lib==2.137.0 20 | # via -r requirements.in 21 | aws-lambda-powertools==2.36.0 22 | # via -r requirements.in 23 | boto3==1.34.82 24 | # via 25 | # -r requirements.in 26 | # aws-assume-role-lib 27 | botocore==1.34.82 28 | # via 29 | # boto3 30 | # s3transfer 31 | cattrs==23.2.3 32 | # via jsii 33 | constructs==10.3.0 34 | # via 35 | # -r requirements.in 36 | # aws-cdk-lib 37 | importlib-resources==6.4.0 38 | # via jsii 39 | jmespath==1.0.1 40 | # via 41 | # boto3 42 | # botocore 43 | jsii==1.97.0 44 | # via 45 | # aws-cdk-asset-awscli-v1 46 | # aws-cdk-asset-kubectl-v20 47 | # aws-cdk-asset-node-proxy-agent-v6 48 | # aws-cdk-lib 49 | # constructs 50 | publication==0.0.3 51 | # via 52 | # aws-cdk-asset-awscli-v1 53 | # aws-cdk-asset-kubectl-v20 54 | # aws-cdk-asset-node-proxy-agent-v6 55 | # aws-cdk-lib 56 | # constructs 57 | # jsii 58 | python-dateutil==2.9.0.post0 59 | # via 60 | # botocore 61 | # jsii 62 | s3transfer==0.10.1 63 | # via boto3 64 | six==1.16.0 65 | # via python-dateutil 66 | typeguard==2.13.3 67 | # via 68 | # aws-cdk-asset-awscli-v1 69 | # aws-cdk-asset-kubectl-v20 70 | # aws-cdk-asset-node-proxy-agent-v6 71 | # aws-cdk-lib 72 | # constructs 73 | # jsii 74 | typing-extensions==4.11.0 75 | # via 76 | # aws-lambda-powertools 77 | # jsii 78 | urllib3==2.2.1 79 | # via botocore 80 | -------------------------------------------------------------------------------- /src/functions/assignment_db_handler/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/functions/assignment_db_handler/event_structure.jsonc: -------------------------------------------------------------------------------- 1 | { 2 | "source": "permissionEventSource", 3 | "detail": { 4 | "permissions": [ 5 | { 6 | "ActionType": "Add", //Possible values "Add" or "Remove" 7 | "PermissionFor": "OrganizationalUnit", //Possible values "OrganizationalUnit"|"Account"|"Tag"|"Root", 8 | "OrganizationalUnitName": "OU_Name", 9 | "AccountNumber": 30010047, 10 | "Tag": "key=value", 11 | "GroupName": "GroupX", 12 | "UserName": "User Name", 13 | "PermissionSetName": "AWSReadOnlyAccess" 14 | } 15 | ] 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/functions/assignment_db_handler/index.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | import boto3 8 | import os 9 | import json 10 | import datetime 11 | 12 | from botocore.exceptions import ClientError 13 | from boto3.dynamodb.conditions import Key 14 | from common.error import Error 15 | 16 | # Static data 17 | 18 | LAMBDA_FUNC_NAME = "Assignment DB handler" 19 | assignment_table_name = os.environ.get("ASSIGNMENTS_TABLE_NAME", "TEST_ASSIGNMENT_TABLE_NAME") 20 | map_key_name = os.getenv("ASSOCIATIONID_KEY_NAME", "mappingId") 21 | map_sortkey_name = os.getenv("ASSOCIATIONID_SORT_KEY_NAME", "mappingValue") 22 | iam_event_bus_arn = os.environ.get("IAM_EVENT_BRIDGE_ARN", "IAM_EVENT_BRIDGE_ARN") 23 | 24 | EVENT_SOURCE = "permissionEventSource" 25 | PERMISSION_FOR_OU = "OrganizationalUnit" 26 | PERMISSION_FOR_ACCOUNT = "Account" 27 | PERMISSION_FOR_TAG = "Tag" 28 | PERMISSION_FOR_ROOT = "Root" 29 | PERMISSION_ACTION_ADD = "Add" 30 | PERMISSION_ACTION_REMOVE = "Remove" 31 | 32 | session = boto3.Session() 33 | event_bridge_client = session.client("events") 34 | 35 | # Proper error handler class 36 | sns_arn = os.getenv( 37 | "ERROR_TOPIC_NAME", "ERROR_TOPIC_NAME" 38 | ) # Getting the SNS Topic ARN passed in by the environment variables. 39 | 40 | error_handler = Error( 41 | sns_topic=sns_arn, 42 | session=session, 43 | lambda_func_name=LAMBDA_FUNC_NAME, 44 | ) 45 | 46 | logger = error_handler.get_logger() 47 | ddb_resource = session.resource("dynamodb") 48 | ddb_client = session.client("dynamodb") 49 | ddb_table = ddb_resource.Table(assignment_table_name) 50 | # Mapping Structure: 51 | # "o:{organization_unit}|g:{group_name}|{permission_set_name}" 52 | # "o:Dev-Workbench-DevKit|g:workbench-devkit-developer|WB-DevKit-Developer" 53 | # Sample Mappings: 54 | # a:1234567890|u:testuser|AWSReadOnlyAccess 55 | # o:ou_name|g:Network-Readonly|Network-Readonly 56 | # t:account_tag|u:SomeUser|Readonly 57 | # r:root|g:Sec-Audit|Readonly 58 | 59 | 60 | def handler(event, context): 61 | event_source = event.get("source") 62 | event_detail = event.get("detail") 63 | 64 | if event_source == EVENT_SOURCE: 65 | permissions = event_detail.get("permissions") 66 | target_principle = None 67 | user_principle = None 68 | 69 | for permission_info in permissions: 70 | action_type = permission_info["ActionType"] 71 | 72 | if permission_info.get("UserName"): 73 | user = permission_info.get("UserName") 74 | user_principle = f"u:{user}" 75 | elif permission_info.get("GroupName"): 76 | group = permission_info.get("GroupName") 77 | user_principle = f"g:{group}" 78 | else: 79 | raise AttributeError 80 | 81 | permission_type = permission_info["PermissionFor"] 82 | 83 | if permission_type == PERMISSION_FOR_OU: 84 | organization_unit = permission_info["OrganizationalUnitName"] 85 | mapping_value_prefix = f"o:{organization_unit}" 86 | target_principle = organization_unit 87 | 88 | elif permission_type == PERMISSION_FOR_ACCOUNT: 89 | account_number = permission_info["AccountNumber"] 90 | mapping_value_prefix = f"a:{account_number}" 91 | target_principle = account_number 92 | 93 | elif permission_type == PERMISSION_FOR_TAG: 94 | tag_name = permission_info["Tag"] 95 | mapping_value_prefix = f"t:{tag_name}" 96 | target_principle = tag_name 97 | 98 | elif permission_type == PERMISSION_FOR_ROOT: 99 | mapping_value_prefix = f"r:root" 100 | target_principle = "root" 101 | else: 102 | raise AttributeError 103 | 104 | permission_set_name = permission_info["PermissionSetName"] 105 | mapping_value = f"{mapping_value_prefix}|{user_principle}|{permission_set_name}" 106 | 107 | if action_type == PERMISSION_ACTION_REMOVE: 108 | ddb_table.delete_item( 109 | Key={ 110 | map_key_name: str(target_principle), 111 | map_sortkey_name: mapping_value, 112 | } 113 | ) 114 | elif action_type == PERMISSION_ACTION_ADD: 115 | ddb_table.put_item( 116 | Item={ 117 | map_key_name: str(target_principle), 118 | map_sortkey_name: mapping_value, 119 | "PermissionSetStatus": "Enabled", 120 | "PermissionSetName": permission_set_name, 121 | } 122 | ) 123 | else: 124 | raise AttributeError 125 | 126 | return { 127 | "statusCode": 200, 128 | "body": json.dumps("Event was handled properly by Assignment DB Handler."), 129 | } 130 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/account_operations.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import json 7 | from processing import process_mapdata, PrincipalNotFound 8 | from config import Config_object 9 | 10 | 11 | # { 12 | # "AccountOperations": 13 | # { 14 | # "Action": "tagged|created|moved", 15 | # "TagKey": "", 16 | # "TagValue": "", 17 | # "AccountId": "", 18 | # "AccountOuName": "", 19 | # "AccountOldOuName": "", 20 | # } 21 | # } 22 | 23 | 24 | def account_operations_handler(controller: Config_object, payload: dict): 25 | controller.clients.logger.info("Received event from Service Handler.") 26 | action: str = payload.get("Action") 27 | account_id: str = payload.get("AccountId") 28 | tag_key: str = payload.get("TagKey") 29 | tag_value: str = payload.get("TagValue") 30 | parent_ou_name: str = payload.get("AccountOuName") 31 | parent_old_ou_name: str = payload.get("AccountOldOuName") 32 | 33 | # Tag deletion is now handled as an untagresource api call when done from the web console. Can now be implemented. 34 | # if action == "tagged": 35 | # if tag_key is not None: 36 | # controller.clients.logger.info(f"Org action detected. Account is tagged") 37 | # query_dynamo_table( 38 | # controller, f"{tag_key}={tag_value}", account_id, controller.data.ACTION_TYPE_CREATE 39 | # ) 40 | if action == "created": 41 | controller.clients.logger.info(f"Organizatins action detected. Account is created") 42 | query_dynamo_table(controller, "root", account_id, controller.data.ACTION_TYPE_CREATE) 43 | if action == "moved": 44 | controller.clients.logger.info(f"Organizations action detected. Account is moved") 45 | query_dynamo_table( 46 | controller, 47 | ( 48 | "root" 49 | if parent_old_ou_name.startswith("r-") 50 | else controller.clients.org.describe_ou_name(parent_old_ou_name) 51 | ), 52 | account_id, 53 | controller.data.ACTION_TYPE_DELETE, 54 | ) 55 | if not parent_ou_name.startswith("r-"): 56 | query_dynamo_table( 57 | controller, 58 | controller.clients.org.describe_ou_name(parent_ou_name), 59 | account_id, 60 | controller.data.ACTION_TYPE_CREATE, 61 | ) 62 | return { 63 | "statusCode": 200, 64 | "body": json.dumps("Received Organizations Event has been successfully processed."), 65 | } 66 | 67 | 68 | def query_dynamo_table(controller, query_key, account_id, assignment_action): 69 | key_condition_expression_value = f"{controller.config.map_key_name} = :queryValue" 70 | result = controller.clients.dynamodb.query( 71 | TableName=controller.config.table_name, 72 | KeyConditionExpression=key_condition_expression_value, 73 | ExpressionAttributeValues={":queryValue": {"S": query_key}}, 74 | ) 75 | controller.clients.logger.info(f"search results :{str(result)}") 76 | 77 | if result.get("Count") > 0: 78 | for item in result["Items"]: 79 | aws_principal, idp_principal, permission_set_name = item[ 80 | controller.config.map_sortkey_name 81 | ]["S"].split(controller.config.associationid_concat_char) 82 | try: 83 | process_mapdata( 84 | controller, 85 | f"a:{account_id}", 86 | idp_principal, 87 | permission_set_name, 88 | assignment_action, 89 | item, 90 | ) 91 | except PrincipalNotFound: 92 | controller.clients.logger.info( 93 | f"Principal {idp_principal} missing, moving on to next record from DynamoDB" 94 | ) 95 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/assignments_operations.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | from typing import List 8 | from processing import process_mapdata, PrincipalNotFound 9 | from common.encoder import PythonObjectEncoder 10 | from config import Config_object 11 | import json 12 | 13 | 14 | def assignments_operations_handler(controller: Config_object, records: list): 15 | assignment_action: str 16 | stream_key: str 17 | aws_principal: str 18 | idp_principal: str 19 | permission_set_name: str 20 | 21 | for record in records: 22 | # TODO switch to parallel processing. 23 | controller.clients.logger.info(str(record["dynamodb"])) 24 | controller.clients.logger.debug( 25 | f"Stream record: {json.dumps(record, indent=2, cls=PythonObjectEncoder)}" 26 | ) 27 | if "NewImage" in record["dynamodb"]: 28 | assignment_action = controller.data.ACTION_TYPE_CREATE 29 | stream_key = "NewImage" 30 | elif "OldImage" in record["dynamodb"]: 31 | assignment_action = controller.data.ACTION_TYPE_DELETE 32 | stream_key = "OldImage" 33 | else: 34 | error_msg = f"OldImage nor NewImage key was not found in dynamodb string." 35 | controller.clients.logger.error(error_msg) 36 | controller.clients.error_handler.publish_error_message(record, error_msg) 37 | raise AttributeError 38 | 39 | aws_principal, idp_principal, permission_set_name = record["dynamodb"][stream_key][ 40 | controller.config.map_sortkey_name 41 | ]["S"].split(controller.config.associationid_concat_char) 42 | permission_set_state = ( 43 | record["dynamodb"][stream_key] 44 | .get(controller.config.permission_set_status) 45 | .get("S", "Enabled") 46 | ) 47 | try: 48 | if permission_set_state == "Enabled": 49 | process_mapdata( 50 | controller, 51 | aws_principal, 52 | idp_principal, 53 | permission_set_name, 54 | assignment_action, 55 | record, 56 | ) 57 | else: 58 | controller.clients.logger.info( 59 | f"Permission set {permission_set_name} is disabled. Removing permissions from AWS SSO" 60 | ) 61 | process_mapdata( 62 | controller, 63 | aws_principal, 64 | idp_principal, 65 | permission_set_name, 66 | controller.data.ACTION_TYPE_DELETE, 67 | record, 68 | ) 69 | except PrincipalNotFound: 70 | controller.clients.logger.info( 71 | f"Principal {idp_principal} missing, moving on to next record from DynamoDB" 72 | ) 73 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/config.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | from aws_assume_role_lib import assume_role 7 | from sso.handler import SsoService 8 | from orgz.handler import Organizations 9 | from common.error import Error 10 | 11 | import boto3 12 | import os 13 | 14 | LAMBDA_FUNC_NAME = "Assignment definition handler" 15 | 16 | 17 | def load_config(): 18 | # Following configuration is not use outside this function 19 | sso_admin_role_arn = os.getenv( 20 | "SSO_ADMIN_ROLE_ARN", 21 | "arn:aws:iam::112223334444:role/assignment-management-role", 22 | ) 23 | sns_arn = os.getenv( 24 | "ERROR_TOPIC_NAME", "ERROR_TOPIC_NAME" 25 | ) # Getting the SNS Topic ARN passed in by the environment variables. 26 | 27 | controller = Config_object("This should act as a controller for all components") 28 | 29 | # Definig global clients 30 | controller.config = Config_object("Environment configuration") 31 | controller.config.queue_url = os.getenv("ASSIGNMENTS_QUEUE_URL", "test_queue") 32 | controller.config.map_key_name = os.getenv("ASSOCIATIONID_KEY_NAME", "mappingId") 33 | controller.config.map_sortkey_name = os.getenv("ASSOCIATIONID_SORT_KEY_NAME", "mappingValue") 34 | controller.config.table_name = os.environ.get( 35 | "ASSIGNMENTS_TABLE_NAME", "TEST_ASSIGNMENT_TABLE_NAME" 36 | ) 37 | controller.config.associationid_concat_char = os.getenv("ASSOCIATIONID_CONCAT_CHAR", "|") 38 | 39 | controller.config.permission_set_status = "PermissionSetStatus" 40 | controller.config.permission_set_name = "PermissionSetName" 41 | 42 | # Boto configuraiton 43 | session = boto3.Session() 44 | assumed_role_session = assume_role(session, sso_admin_role_arn) 45 | 46 | # Clients 47 | controller.clients = Config_object("Client configuration") 48 | controller.clients.sso = SsoService(assumed_role_session) 49 | controller.clients.org = Organizations(role=assumed_role_session) 50 | controller.clients.identity_store = assumed_role_session.client("identitystore") 51 | controller.clients.dynamodb = session.client("dynamodb") 52 | controller.clients.dynamodb_table = session.resource("dynamodb").Table( 53 | controller.config.table_name 54 | ) 55 | controller.clients.sqs = session.client("sqs") 56 | # Error handling 57 | controller.clients.error_handler = Error( 58 | sns_topic=sns_arn, 59 | session=session, 60 | lambda_func_name=LAMBDA_FUNC_NAME, 61 | ) 62 | controller.clients.logger = controller.clients.error_handler.get_logger() 63 | 64 | # Datablocks 65 | controller.data = Config_object("Datablocks") 66 | controller.data.permission_sets = controller.clients.sso.get_permission_sets() 67 | controller.data.ACTION_TYPE_CREATE = "CREATE" 68 | controller.data.ACTION_TYPE_DELETE = "DELETE" 69 | controller.data.GROUP_PRINCIPAL_TYPE = "GROUP" 70 | controller.data.USER_PRINCIPAL_TYPE = "USER" 71 | 72 | return controller 73 | 74 | 75 | class Config_object(object): 76 | def __init__(self, *args): 77 | self.__header__ = str(args[0]) if args else None 78 | 79 | def __repr__(self): 80 | if self.__header__ is None: 81 | return super(Config_object, self).__repr__() 82 | return self.__header__ 83 | 84 | def next(self): 85 | raise StopIteration 86 | 87 | def __iter__(self): 88 | keys = self.__dict__.keys() 89 | for key in keys: 90 | if not key.startswith("__") and not isinstance(key, Config_object): 91 | yield getattr(self, key) 92 | 93 | def __len__(self): 94 | keys = self.__dict__.keys() 95 | return len( 96 | [key for key in keys if not key.startswith("__") and not isinstance(key, Config_object)] 97 | ) 98 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/index.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import json 7 | 8 | from account_operations import account_operations_handler 9 | from assignments_operations import assignments_operations_handler 10 | from permissionset_operations import permission_operations_handler 11 | from aws_lambda_powertools import Logger 12 | from config import load_config 13 | 14 | logger = Logger() 15 | 16 | 17 | controller = None 18 | 19 | ## Event payload from Service Event handler: 20 | # { 21 | # "Source": "enterprise-aws-sso", 22 | # "DetailType": "AccountOperations", 23 | # "Detail": 24 | # { 25 | # "Action": "tagged|created|moved", 26 | # "TagKey": "", 27 | # "TagValue": "", 28 | # "AccountId": "", 29 | # "AccountOuName": "", 30 | # "AccountOldOuName": "If present if not have to look for a solution", 31 | # } 32 | # } 33 | # { 34 | # "PermissionSetOperations": 35 | # { 36 | # "Action": "created|delete", 37 | # "PermissionSetName": "", 38 | # } 39 | # } 40 | 41 | # This will be the control lambda! 42 | 43 | 44 | # @logger.inject_lambda_context 45 | def handler(event: dict, context): 46 | global controller 47 | 48 | logger.debug(event) 49 | 50 | if controller is None: 51 | controller = load_config() 52 | 53 | if event_source := event.get("source"): 54 | if event_source == "enterprise-aws-sso": 55 | detail_type = event.get("detail-type") 56 | if detail_type == "AccountOperation": 57 | account_operations_handler(controller, event.get("detail")) 58 | if detail_type == "PermissionSetOperation": 59 | permission_operations_handler(controller, event.get("detail")) 60 | elif records := event.get("Records"): 61 | assignments_operations_handler(controller, records) 62 | 63 | return { 64 | "statusCode": 200, 65 | "body": json.dumps(f"Event processed"), 66 | } 67 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/permissionset_operations.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | from config import Config_object 8 | from boto3.dynamodb.conditions import Attr 9 | import json 10 | 11 | 12 | # { 13 | # 'version': '0', 14 | # 'id': 'e6689fa4-5913-75ba-da1e-9e2973c52a88', 15 | # 'detail-type': 'PermissionSetOperation', 16 | # 'source': 'enterprise-aws-sso', 17 | # 'account': '966545059596', 18 | # 'time': '2021-11-22T12:54:13Z', 19 | # 'region': 'us-east-1', 20 | # 'resources': [], 21 | # 'detail': 22 | # { 23 | # 'Action': 'created', 24 | # 'PermissionSetName': 'SupportUser', 25 | # 'PermissionSetArn': 'arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-578a0ab763537e74' 26 | # } 27 | # } 28 | 29 | 30 | def permission_operations_handler(controller: Config_object, event_details: dict): 31 | print("AWS SSO Event received") 32 | 33 | sso_action = event_details["Action"] 34 | permission_set_name = event_details["PermissionSetName"] 35 | permission_set_arn = event_details[ 36 | "PermissionSetArn" 37 | ] # Probably will not need this for now, but let's keep it. 38 | 39 | scan_kwargs = { 40 | "FilterExpression": Attr(controller.config.permission_set_name).eq(permission_set_name), 41 | "ProjectionExpression": f"{controller.config.map_key_name}, {controller.config.map_sortkey_name}", 42 | } 43 | 44 | done = False 45 | start_key = None 46 | found_items = [] 47 | while not done: 48 | if start_key: 49 | scan_kwargs["ExclusiveStartKey"] = start_key 50 | response = controller.clients.dynamodb_table.scan(**scan_kwargs) 51 | found_items = found_items + response.get("Items", []) 52 | start_key = response.get("LastEvaluatedKey", None) 53 | done = start_key is None 54 | 55 | permission_set_status = "Enabled" 56 | if sso_action == "deleted": 57 | permission_set_status = "Disabled" 58 | 59 | for item in found_items: 60 | controller.clients.logger.debug(item) 61 | controller.clients.dynamodb_table.put_item( 62 | Item={ 63 | controller.config.map_key_name: item[controller.config.map_key_name], 64 | controller.config.map_sortkey_name: item[controller.config.map_sortkey_name], 65 | controller.config.permission_set_name: permission_set_name, 66 | controller.config.permission_set_status: permission_set_status, 67 | } 68 | ) 69 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/processing.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | from sqs import publish_sqs_task_for_execution 8 | from config import Config_object 9 | 10 | 11 | class PrincipalNotFound(Exception): 12 | """Raised when a principal is not found in Identity Store""" 13 | 14 | pass 15 | 16 | 17 | def process_mapdata( 18 | controller: Config_object, 19 | aws_principal: str, 20 | idp_principal: str, 21 | permission_set_name: str, 22 | assignment_action: str, 23 | record: str, 24 | ): 25 | aws_principal_type: str 26 | aws_principal_name: str 27 | aws_principal_type, aws_principal_name = aws_principal.split(":") 28 | 29 | idp_principal_type: str 30 | idp_principal_name: str 31 | idp_principal_type, idp_principal_name = idp_principal.split(":") 32 | 33 | if permission_set_name in controller.data.permission_sets: 34 | permission_set: str = controller.data.permission_sets[permission_set_name] 35 | controller.clients.logger.info( 36 | f"PS {permission_set_name} identified as: {permission_set['PermissionSetArn']}" 37 | ) 38 | else: 39 | error_msg = f"Permission Set {permission_set_name} was not found." 40 | controller.clients.logger.error(error_msg) 41 | controller.clients.error_handler.publish_error_message(record, error_msg) 42 | pass 43 | 44 | accounts = None 45 | if idp_principal_type.lower() == "g": 46 | try: 47 | idp_principal: dict = controller.clients.identity_store.list_groups( 48 | IdentityStoreId=controller.clients.sso.identity_store_id, 49 | Filters=[ 50 | { 51 | "AttributePath": "DisplayName", 52 | "AttributeValue": idp_principal_name, 53 | } 54 | ], 55 | )["Groups"][0] 56 | except IndexError as e: 57 | controller.clients.logger.error( 58 | f"Group {idp_principal_name} is not found in identity store {controller.clients.sso.identity_store_id}." 59 | ) 60 | raise PrincipalNotFound() 61 | controller.clients.logger.info( 62 | f"Group {idp_principal['DisplayName']} identified as: {idp_principal['GroupId']}." 63 | ) 64 | idp_principal["Type"] = controller.data.GROUP_PRINCIPAL_TYPE 65 | idp_principal["Id"] = idp_principal["GroupId"] 66 | elif idp_principal_type.lower() == "u": 67 | idp_principal: dict = controller.clients.identity_store.list_users( 68 | IdentityStoreId=controller.clients.sso.identity_store_id, 69 | Filters=[{"AttributePath": "UserName", "AttributeValue": idp_principal_name}], 70 | )["Users"][0] 71 | controller.clients.logger.info( 72 | f"User {idp_principal['UserName']} identified as: {idp_principal['UserId']}." 73 | ) 74 | idp_principal["Type"] = controller.data.USER_PRINCIPAL_TYPE 75 | idp_principal["Id"] = idp_principal["UserId"] 76 | else: 77 | error_msg = f'principal type {idp_principal_type} is not supported. Needs to be either a user ("u") or group ("g")' 78 | controller.clients.logger.error(error_msg) 79 | controller.clients.error_handler.publish_error_message(record, error_msg) 80 | pass 81 | if aws_principal_type.lower() == "r": 82 | # Apply to all accounts that exist under root 83 | controller.clients.logger.info( 84 | "Root request received. Changes marked for all accounts in this Organization" 85 | ) 86 | accounts = controller.clients.org.get_accounts_ids() 87 | elif aws_principal_type.lower() == "o": 88 | # Get accounts for OU 89 | controller.clients.logger.info( 90 | f"OU request received. Changes marked for accounts in {aws_principal_name} OU" 91 | ) 92 | accounts = controller.clients.org.get_active_accounts_for_path(f"/{aws_principal_name}") 93 | controller.clients.logger.info(accounts) 94 | elif aws_principal_type.lower() == "a": 95 | # Validate account and proceed with it. 96 | account = controller.clients.org.describe_account(aws_principal_name) 97 | if account["Account"]["Status"] == "ACTIVE": 98 | controller.clients.logger.info( 99 | f"Account {aws_principal_name} is an active account and will be processed" 100 | ) 101 | accounts = [aws_principal_name] 102 | else: 103 | error_msg = f"AWS Account {aws_principal_name} was not found or is not active" 104 | controller.clients.logger.error(error_msg) 105 | controller.clients.error_handler.publish_error_message(record, error_msg) 106 | elif aws_principal_type.lower() == "t": 107 | # Get accounts with tags. 108 | controller.clients.logger.info( 109 | f"Tag request received. Changes marked for accounts with {aws_principal_name} tag" 110 | ) 111 | tag_key, tag_value = aws_principal_name.split("=") 112 | accounts = controller.clients.org.get_account_ids_for_tags({tag_key: tag_value}) 113 | else: 114 | error_msg = f'AWS principal type {aws_principal_type} is not supported. Needs to be one of following: root ("r"), organization unit ("o"), account ("a") or tag ("r")' 115 | controller.clients.logger.error(error_msg) 116 | controller.clients.error_handler.publish_error_message(record, error_msg) 117 | pass 118 | if accounts: 119 | publish_sqs_task_for_execution( 120 | controller, 121 | accounts=accounts, 122 | principal_type=idp_principal["Type"], 123 | principal_id=idp_principal["Id"], 124 | permission_set_arn=permission_set["PermissionSetArn"], 125 | action=assignment_action, 126 | ) 127 | else: 128 | error_msg = f"Root AWS Organization does not have active accounts" 129 | controller.clients.logger.error(error_msg) 130 | controller.clients.error_handler.publish_error_message(record, error_msg) 131 | -------------------------------------------------------------------------------- /src/functions/assignment_definition_handler/sqs.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import json 7 | from common.encoder import PythonObjectEncoder 8 | 9 | 10 | def publish_sqs_task_for_execution( 11 | controller, accounts, principal_type, principal_id, permission_set_arn, action 12 | ): 13 | payload = [] 14 | results = [] 15 | for idx, account in enumerate(accounts): 16 | entry = { 17 | "Id": f"{idx}", 18 | "MessageBody": json.dumps( 19 | { 20 | "TargetId": account, 21 | "PrincipalType": principal_type, 22 | "PrincipalId": principal_id, 23 | "PermissionSetArn": permission_set_arn, 24 | "Action": action, 25 | }, 26 | indent=2, 27 | cls=PythonObjectEncoder, 28 | ), 29 | } 30 | controller.clients.logger.info("Uppending entry to array") 31 | controller.clients.logger.info(entry) 32 | payload.append(entry) 33 | 34 | ## TODO refactor to look nice. Unfortunately we can send only in batches of 10. 35 | if (idx + 1) % 10 == 0: 36 | controller.clients.logger.info("Publishing array") 37 | results.append( 38 | controller.clients.sqs.send_message_batch( 39 | QueueUrl=controller.config.queue_url, Entries=payload 40 | ) 41 | ) 42 | payload = [] 43 | if payload: 44 | controller.clients.logger.info("Publishing array") 45 | results.append( 46 | controller.clients.sqs.send_message_batch( 47 | QueueUrl=controller.config.queue_url, Entries=payload 48 | ) 49 | ) 50 | return results 51 | -------------------------------------------------------------------------------- /src/functions/assignment_execution_handler/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/functions/assignment_execution_handler/index.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | import backoff 8 | import boto3 9 | import os 10 | import json 11 | from aws_assume_role_lib import assume_role 12 | from botocore import exceptions 13 | from common.error import Error 14 | from sso.handler import SsoService 15 | 16 | 17 | class ServerUnavailableException(Exception): 18 | pass 19 | 20 | 21 | class UnknownException(Exception): 22 | pass 23 | 24 | 25 | # Static data 26 | ACTION_TYPE_CREATE = "CREATE" 27 | ACTION_TYPE_DELETE = "DELETE" 28 | LAMBDA_FUNC_NAME = "Assignment execution handler" 29 | 30 | 31 | # TODO Set log level as a parameter 32 | 33 | session = boto3.Session() 34 | 35 | 36 | # Proper error handler class 37 | sns_arn = os.getenv( 38 | "ERROR_TOPIC_NAME", "ERROR_TOPIC_NAME" 39 | ) # Getting the SNS Topic ARN passed in by the environment variables. 40 | 41 | error_handler = Error( 42 | sns_topic=sns_arn, 43 | session=session, 44 | lambda_func_name=LAMBDA_FUNC_NAME, 45 | ) 46 | 47 | logger = error_handler.get_logger() 48 | 49 | 50 | # Is Identity Center delegated admin? 51 | use_delegated_admin = None 52 | 53 | # Get SSO Instance 54 | sso_delegated_admin = None 55 | 56 | # Get SSO Instance 57 | management_account_id = os.getenv("MANAGEMENT_ACCOUNT_ID", "012345678901") 58 | sso_admin_role_arn = os.getenv( 59 | "SSO_ADMIN_ROLE_ARN", 60 | "arn:aws:iam::112223334444:role/assignment-management-role", 61 | ) 62 | assumed_admin_role_session = assume_role(session, sso_admin_role_arn) 63 | sso_admin = None 64 | 65 | 66 | def handler(event, context): 67 | # TODO make proper call outside handler work with tests 68 | global sso_admin 69 | global sso_delegated_admin 70 | global use_delegated_admin 71 | 72 | # check if delegated admin is enabled 73 | if use_delegated_admin is None: 74 | try: 75 | org_client = assumed_admin_role_session.client( 76 | "organizations" 77 | ) 78 | response = org_client.list_delegated_administrators( 79 | ServicePrincipal="sso.amazonaws.com", 80 | ) 81 | logger.info(response) 82 | delegated_admins = response.get("DelegatedAdministrators", []) 83 | if delegated_admins: 84 | for admin in delegated_admins: 85 | if admin.get("Status") == "ACTIVE": 86 | use_delegated_admin = True 87 | else: 88 | use_delegated_admin = False 89 | except exceptions.ClientError as exception: 90 | logger.error("Exception: " + str(exception)) 91 | error_handler.publish_error_message( 92 | "Failed to retrieve 'sso.amazonaws.com' delegated administrators.", str(exception) 93 | ) 94 | raise (exception) 95 | 96 | logger.info("use_delegated_admin is set to " + str(use_delegated_admin)) 97 | 98 | for record in event["Records"]: 99 | message = record["body"] 100 | logger.info(message) 101 | messageDict = json.loads(message) 102 | principal_type = messageDict["PrincipalType"] 103 | principal_id = messageDict["PrincipalId"] 104 | permission_set_arn = messageDict["PermissionSetArn"] 105 | target_id = messageDict["TargetId"] 106 | action = messageDict["Action"] 107 | 108 | # For management account and none delegated admin, we use the management account 109 | if target_id == management_account_id or not use_delegated_admin: 110 | if sso_admin is None: 111 | sso_admin = SsoService(assumed_admin_role_session) 112 | sso = sso_admin 113 | else: 114 | if sso_delegated_admin is None: 115 | sso_delegated_admin = SsoService(session) 116 | sso = sso_delegated_admin 117 | 118 | if action == ACTION_TYPE_CREATE: 119 | 120 | @backoff.on_exception( 121 | backoff.expo, 122 | ( 123 | sso.client.exceptions.ConflictException, 124 | sso.client.exceptions.ThrottlingException, 125 | ), 126 | max_tries=10, 127 | ) 128 | def create_account_assignment( 129 | message, principal_type, principal_id, permission_set_arn, target_id, sso 130 | ): 131 | response = sso.client.create_account_assignment( 132 | InstanceArn=sso.instance_arn, 133 | TargetId=target_id, 134 | TargetType="AWS_ACCOUNT", 135 | PermissionSetArn=permission_set_arn, 136 | PrincipalType=principal_type, 137 | PrincipalId=principal_id, 138 | ) 139 | logger.info(response) 140 | 141 | # Create Account/PermissionSet Assignment 142 | try: 143 | create_account_assignment( 144 | message, principal_type, principal_id, permission_set_arn, target_id, sso 145 | ) 146 | except Exception as exception: 147 | # If Exception occurs, parse Response and write it to Error Topic. 148 | # Then, raise exception to not delete the message from queue. 149 | logger.error("Exception: " + str(exception)) 150 | error_handler.publish_error_message(message, str(exception)) 151 | raise (exception) 152 | 153 | elif action == ACTION_TYPE_DELETE: 154 | 155 | @backoff.on_exception( 156 | backoff.expo, 157 | ( 158 | sso.client.exceptions.ConflictException, 159 | sso.client.exceptions.ThrottlingException, 160 | ), 161 | max_tries=10, 162 | ) 163 | def delete_account_assignment( 164 | principal_type, principal_id, permission_set_arn, target_id, sso 165 | ): 166 | response = sso.client.delete_account_assignment( 167 | InstanceArn=sso.instance_arn, 168 | TargetId=target_id, 169 | TargetType="AWS_ACCOUNT", 170 | PermissionSetArn=permission_set_arn, 171 | PrincipalType=principal_type, 172 | PrincipalId=principal_id, 173 | ) 174 | logger.info(response) 175 | 176 | # Delete Account/PermissionSet Assignment 177 | try: 178 | delete_account_assignment( 179 | principal_type, principal_id, permission_set_arn, target_id, sso 180 | ) 181 | except Exception as exception: 182 | # If Exception occurs, parse Response and write it to Error Topic. 183 | # Then, raise exception to not delete the message from queue. 184 | logger.error("Exception: " + str(exception)) 185 | error_handler.publish_error_message(message, str(exception)) 186 | raise (exception) 187 | 188 | else: 189 | # Not supported action 190 | logger.info("Not supported action: " + str(message)) 191 | error_handler.publish_error_message(message, "Not supported action.") 192 | raise AttributeError 193 | 194 | return { 195 | "statusCode": 200, 196 | "body": json.dumps("Event was handled properly by Assignment Execution Handler."), 197 | } 198 | -------------------------------------------------------------------------------- /src/functions/assignment_execution_handler/test/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/functions/assignment_execution_handler/test/payloads.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | event_input_data_create = { 7 | "Records": [ 8 | { 9 | "messageId": "3769c1ec-6f65-49c1-966c-2be7732a0a1d", 10 | "receiptHandle": "AQEBZEJDt7kCNhTtZzI5/WCH4E7ArTzudBiU3u1IAystrGA9yx3wAg223Pg9xY6yBEXExnmW+PkcUbGJpfSvKaapgbidiBRHhNFCsSMOGGg0FFwkcwNo7WoN6xtLrQh+x+Vm6Rx/jvTupYN7lGFvUDHSgby25FTiLcMN3bBG8pbMShiFhe4RXK4hO0Gc/0gcq2rbj1INNUfm37KJa0v5KQqbHyAKXQ4WaaKkmRsuE//2OjFBqvqW3x2TBQ6YUtc576O2F0fm7kuB16ieNIyQQ7xRlfsjPXsQ4wZKKpjHqg0zfjBMeopENz762wAbFFxcaA1tqB5jcstuoL8DGSjFEFWtYkyUjNIzrtM3ttv3nx021MrDbXqQFw3egvoHPjg9p9ytpceakdb+6N3LgQ6cHXR6FQ==", 11 | "body": '{"PrincipalType": "string", "PrincipalId":"string","PermissionSetArn":"arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb","TargetId":"dj358s9nldve", "Action":"CREATE"}', 12 | "attributes": { 13 | "ApproximateReceiveCount": "1", 14 | "SentTimestamp": "1626440153145", 15 | "SenderId": "AROAUX4IDK3FOMZSO2A5L:sgoeksel-Isengard", 16 | "ApproximateFirstReceiveTimestamp": "1626440153155", 17 | }, 18 | "messageAttributes": {}, 19 | "md5OfBody": "7e5e30491761e1993ecac5633a2c9621", 20 | "eventSource": "aws:sqs", 21 | "eventSourceARN": "arn:aws:sqs:us-east-1:326166075082:AssignmentsQueue", 22 | "awsRegion": "us-east-1", 23 | } 24 | ] 25 | } 26 | 27 | event_input_data_delete = { 28 | "Records": [ 29 | { 30 | "messageId": "3769c1ec-6f65-49c1-966c-2be7732a0a1d", 31 | "receiptHandle": "AQEBZEJDt7kCNhTtZzI5/WCH4E7ArTzudBiU3u1IAystrGA9yx3wAg223Pg9xY6yBEXExnmW+PkcUbGJpfSvKaapgbidiBRHhNFCsSMOGGg0FFwkcwNo7WoN6xtLrQh+x+Vm6Rx/jvTupYN7lGFvUDHSgby25FTiLcMN3bBG8pbMShiFhe4RXK4hO0Gc/0gcq2rbj1INNUfm37KJa0v5KQqbHyAKXQ4WaaKkmRsuE//2OjFBqvqW3x2TBQ6YUtc576O2F0fm7kuB16ieNIyQQ7xRlfsjPXsQ4wZKKpjHqg0zfjBMeopENz762wAbFFxcaA1tqB5jcstuoL8DGSjFEFWtYkyUjNIzrtM3ttv3nx021MrDbXqQFw3egvoHPjg9p9ytpceakdb+6N3LgQ6cHXR6FQ==", 32 | "body": '{"PrincipalType": "string", "PrincipalId":"string","PermissionSetArn":"arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb","TargetId":"dj358s9nldve", "Action":"DELETE"}', 33 | "attributes": { 34 | "ApproximateReceiveCount": "1", 35 | "SentTimestamp": "1626440153145", 36 | "SenderId": "AROAUX4IDK3FOMZSO2A5L:sgoeksel-Isengard", 37 | "ApproximateFirstReceiveTimestamp": "1626440153155", 38 | }, 39 | "messageAttributes": {}, 40 | "md5OfBody": "7e5e30491761e1993ecac5633a2c9621", 41 | "eventSource": "aws:sqs", 42 | "eventSourceARN": "arn:aws:sqs:us-east-1:326166075082:AssignmentsQueue", 43 | "awsRegion": "us-east-1", 44 | } 45 | ] 46 | } 47 | 48 | event_input_data_notsupportedaction = { 49 | "Records": [ 50 | { 51 | "messageId": "3769c1ec-6f65-49c1-966c-2be7732a0a1d", 52 | "receiptHandle": "AQEBZEJDt7kCNhTtZzI5/WCH4E7ArTzudBiU3u1IAystrGA9yx3wAg223Pg9xY6yBEXExnmW+PkcUbGJpfSvKaapgbidiBRHhNFCsSMOGGg0FFwkcwNo7WoN6xtLrQh+x+Vm6Rx/jvTupYN7lGFvUDHSgby25FTiLcMN3bBG8pbMShiFhe4RXK4hO0Gc/0gcq2rbj1INNUfm37KJa0v5KQqbHyAKXQ4WaaKkmRsuE//2OjFBqvqW3x2TBQ6YUtc576O2F0fm7kuB16ieNIyQQ7xRlfsjPXsQ4wZKKpjHqg0zfjBMeopENz762wAbFFxcaA1tqB5jcstuoL8DGSjFEFWtYkyUjNIzrtM3ttv3nx021MrDbXqQFw3egvoHPjg9p9ytpceakdb+6N3LgQ6cHXR6FQ==", 53 | "body": '{"PrincipalType": "string", "PrincipalId":"string","PermissionSetArn":"arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb","TargetId":"string", "Action":"UPLOAD"}', 54 | "attributes": { 55 | "ApproximateReceiveCount": "1", 56 | "SentTimestamp": "1626440153145", 57 | "SenderId": "AROAUX4IDK3FOMZSO2A5L:sgoeksel-Isengard", 58 | "ApproximateFirstReceiveTimestamp": "1626440153155", 59 | }, 60 | "messageAttributes": {}, 61 | "md5OfBody": "7e5e30491761e1993ecac5633a2c9621", 62 | "eventSource": "aws:sqs", 63 | "eventSourceARN": "arn:aws:sqs:us-east-1:326166075082:AssignmentsQueue", 64 | "awsRegion": "us-east-1", 65 | } 66 | ] 67 | } 68 | -------------------------------------------------------------------------------- /src/functions/assignment_execution_handler/test/test_assignment_execution_handler.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import botocore 7 | import datetime 8 | import unittest 9 | 10 | from aws_lambda_powertools import Logger 11 | from botocore.stub import Stubber 12 | 13 | from .. import index 14 | from sso.test.test_sso_handler import TestSsoLayer 15 | 16 | from assignment_execution_handler.test.payloads import ( 17 | event_input_data_create, 18 | event_input_data_delete, 19 | ) 20 | 21 | 22 | logger = Logger() 23 | 24 | """ 25 | Assignment execution testing testing class 26 | """ 27 | 28 | 29 | class TestApp(unittest.TestCase): # pylint: disable=R0904,C0116 30 | # Setting up clients 31 | sso_admin = botocore.session.get_session().create_client("sso-admin") 32 | sso_admin_stubber = Stubber(sso_admin) 33 | 34 | # Loading paramteres to the SSO Layer mock 35 | sso_layer_mock = TestSsoLayer() 36 | sso = sso_layer_mock.test_0_sso_list_instances() 37 | 38 | # Adding mocked SSO layer to lambda function 39 | index.sso_admin = sso 40 | index.sso_admin.client = sso_admin 41 | 42 | index.use_delegated_admin = False 43 | """ 44 | Assignment execution create event test 45 | """ 46 | 47 | def test_0_handler_assignment_execution_handler_create_success(self): 48 | 49 | self.sso_admin_stubber.add_response( 50 | "create_account_assignment", 51 | service_response={ 52 | "AccountAssignmentCreationStatus": { 53 | "CreatedDate": datetime.datetime.now().isoformat(), 54 | "FailureReason": "string", 55 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 56 | "PrincipalId": "string", 57 | "PrincipalType": "string", 58 | "RequestId": "h47hd9w5i0tv4x1q55f664arbe4ab2otges4", 59 | "Status": "string", 60 | "TargetId": "dj358s9nldve", 61 | "TargetType": "AWS_ACCOUNT", 62 | } 63 | }, 64 | expected_params={ 65 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 66 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 67 | "PrincipalId": "string", 68 | "PrincipalType": "string", 69 | "TargetId": "dj358s9nldve", 70 | "TargetType": "AWS_ACCOUNT", 71 | }, 72 | ) 73 | self.sso_admin_stubber.activate() 74 | 75 | res = index.handler(event_input_data_create, {}) 76 | assert res is not None 77 | statusCode = res["statusCode"] 78 | assert statusCode == 200 79 | 80 | """ 81 | Assignment execution delete event test 82 | """ 83 | 84 | def test_1_handler_assignment_execution_handler_delete_success(self): 85 | index.sso_admin.client = self.sso_admin 86 | 87 | self.sso_admin_stubber.add_response( 88 | "delete_account_assignment", 89 | service_response={ 90 | "AccountAssignmentDeletionStatus": { 91 | "CreatedDate": datetime.datetime.now().isoformat(), 92 | "FailureReason": "string", 93 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 94 | "PrincipalId": "string", 95 | "PrincipalType": "string", 96 | "RequestId": "h47hd9w5i0tv4x1q55f664arbe4ab2otges4", 97 | "Status": "string", 98 | "TargetId": "dj358s9nldve", 99 | "TargetType": "string", 100 | } 101 | }, 102 | expected_params={ 103 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 104 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 105 | "PrincipalId": "string", 106 | "PrincipalType": "string", 107 | "TargetId": "dj358s9nldve", 108 | "TargetType": "AWS_ACCOUNT", 109 | }, 110 | ) 111 | 112 | self.sso_admin_stubber.activate() 113 | res = index.handler(event_input_data_delete, {}) 114 | assert res is not None 115 | -------------------------------------------------------------------------------- /src/functions/service_event_handler/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # © 2021 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 3 | # This AWS Content is provided subject to the terms of the AWS Customer 4 | # Agreement available at http://aws.amazon.com/agreement or other written 5 | # agreement between Customer and either Amazon Web Services, Inc. or Amazon 6 | # Web Services EMEA SARL or both. 7 | ################################################################################ 8 | -------------------------------------------------------------------------------- /src/functions/service_event_handler/awssso_events.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # © 2021 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 3 | # This AWS Content is provided subject to the terms of the AWS Customer 4 | # Agreement available at http://aws.amazon.com/agreement or other written 5 | # agreement between Customer and either Amazon Web Services, Inc. or Amazon 6 | # Web Services EMEA SARL or both. 7 | ################################################################################ 8 | import json 9 | from aws_lambda_powertools import Logger 10 | from typing import Tuple 11 | 12 | logger = Logger(child=True) 13 | 14 | 15 | def process_awssso_event(event: dict) -> Tuple[str, dict]: 16 | operation_name = "PermissionSetOperation" 17 | operation_event = {} 18 | try: 19 | event_name: str = event["detail"]["eventName"] 20 | request_params: dict = event["detail"]["requestParameters"] 21 | if event_name == "CreatePermissionSet": 22 | operation_event["Action"] = "created" 23 | response_params: dict = event["detail"]["responseElements"]["permissionSet"] 24 | operation_event["PermissionSetName"] = request_params["name"] 25 | operation_event["PermissionSetArn"] = response_params["permissionSetArn"] 26 | elif event_name == "DeletePermissionSet": 27 | operation_event["Action"] = "deleted" 28 | operation_event["PermissionSetArn"] = request_params["permissionSetArn"] 29 | else: 30 | logger.error(f"Action for AWS SSO Event {event_name} not defined") 31 | raise AWSSSOEventError("Action for Lifecycle Event not defined") 32 | return operation_name, operation_event 33 | 34 | except KeyError as e: 35 | logger.error(e) 36 | logger.error(json.dumps(event)) 37 | raise AWSSSOEventError( 38 | "Failed to load information from Control Tower Lifecycle Event" 39 | ) from e 40 | 41 | 42 | class AWSSSOEventError(Exception): 43 | """Error while processing AWS SSO Event""" 44 | 45 | pass 46 | -------------------------------------------------------------------------------- /src/functions/service_event_handler/index.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # © 2021 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 3 | # This AWS Content is provided subject to the terms of the AWS Customer 4 | # Agreement available at http://aws.amazon.com/agreement or other written 5 | # agreement between Customer and either Amazon Web Services, Inc. or Amazon 6 | # Web Services EMEA SARL or both. 7 | ################################################################################ 8 | import datetime 9 | import json 10 | import os 11 | 12 | import boto3 13 | from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent, event_source 14 | from common.error import Error 15 | from common.encoder import PythonObjectEncoder 16 | from organizations_events import process_organizations_event 17 | from awssso_events import process_awssso_event 18 | 19 | LAMBDA_FUNCTION_NAME = "service_event_handler" 20 | 21 | session = boto3.Session() 22 | 23 | sns_arn = os.getenv("ERROR_TOPIC_NAME", "ERROR_TOPIC_NAME") 24 | iam_event_bus_arn = os.environ.get("IAM_EVENT_BRIDGE_ARN", "IAM_EVENT_BRIDGE_ARN") 25 | 26 | error_handler = Error( 27 | sns_topic=sns_arn, 28 | session=session, 29 | lambda_func_name=LAMBDA_FUNCTION_NAME, 30 | ) 31 | logger = error_handler.get_logger() 32 | 33 | event_bridge_client = session.client("events") 34 | 35 | event_processors = { 36 | "aws.organizations": process_organizations_event, 37 | "aws.sso": process_awssso_event, 38 | } 39 | 40 | 41 | def send_event(event_type: str, payload: dict) -> None: 42 | event_payload = [ 43 | { 44 | "Time": datetime.datetime.now().isoformat(), 45 | "Source": "enterprise-aws-sso", 46 | "Resources": [], 47 | "DetailType": event_type, 48 | "Detail": json.dumps(payload, cls=PythonObjectEncoder), 49 | "EventBusName": iam_event_bus_arn, 50 | }, 51 | ] 52 | event_bridge_client.put_events(Entries=event_payload) 53 | 54 | 55 | @event_source(data_class=EventBridgeEvent) 56 | def handler(event: EventBridgeEvent, context): 57 | logger.debug(event.raw_event) 58 | if event.source not in event_processors.keys(): 59 | logger.error("Event source is not supported") 60 | raise UnsupportedEvent() 61 | 62 | event_type, processed_service_event = event_processors[event.source](event.raw_event) 63 | 64 | if processed_service_event: 65 | send_event(event_type, processed_service_event) 66 | 67 | 68 | class UnsupportedEvent(Exception): 69 | """Event source is not supported""" 70 | 71 | pass 72 | -------------------------------------------------------------------------------- /src/functions/service_event_handler/organizations_events.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # © 2021 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 3 | # This AWS Content is provided subject to the terms of the AWS Customer 4 | # Agreement available at http://aws.amazon.com/agreement or other written 5 | # agreement between Customer and either Amazon Web Services, Inc. or Amazon 6 | # Web Services EMEA SARL or both. 7 | ################################################################################ 8 | import json 9 | from aws_lambda_powertools import Logger 10 | from typing import Tuple 11 | 12 | logger = Logger(child=True) 13 | 14 | 15 | def process_organizations_event(event: dict) -> Tuple[str, dict]: 16 | operation_name = "AccountOperation" 17 | operation_event = {} 18 | try: 19 | event_name: str = event["detail"]["eventName"] 20 | if event_name == "CreateAccountResult": 21 | operation_event["Action"] = "created" 22 | account_status: dict = event["detail"]["serviceEventDetails"] 23 | account_state: str = account_status["state"] 24 | if not account_state == "SUCCEEDED": 25 | return operation_name, {} 26 | operation_event["AccountId"] = account_status["account"]["accountId"] 27 | elif event_name == "MoveAccount": 28 | operation_event["Action"] = "moved" 29 | request_parameters: dict = event["detail"].get("requestParameters") 30 | operation_event["AccountId"] = request_parameters["accountId"] 31 | operation_event["AccountOuName"] = request_parameters["destinationParentId"] 32 | operation_event["AccountOldOuName"] = request_parameters["sourceParentId"] 33 | else: 34 | logger.error(f"Action for Lifecycle Event {event_name} not defined") 35 | raise OrganizationsEventError("Action for Lifecycle Event not defined") 36 | return operation_name, operation_event 37 | 38 | except KeyError as e: 39 | logger.error(e) 40 | logger.error(json.dumps(event)) 41 | raise OrganizationsEventError( 42 | "Failed to load information from Control Tower Lifecycle Event" 43 | ) from e 44 | 45 | 46 | class OrganizationsEventError(Exception): 47 | """Error while processing AWS Control Tower Lifecycle Event""" 48 | 49 | pass 50 | -------------------------------------------------------------------------------- /src/layers/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/common/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/common/encoder.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import json 7 | import datetime 8 | 9 | 10 | class PythonObjectEncoder(json.JSONEncoder): 11 | """Custom JSON Encoder that allows encoding of un-serializable objects 12 | For object types which the json module cannot natively serialize. If its 13 | a date, then return isoformat(), if the object type has a __repr__ 14 | method, serialize that string instead. 15 | 16 | Usage: 17 | >>> example_unserializable_object = {'example': set([1,2,3])} 18 | >>> print(json.dumps(example_unserializable_object, 19 | cls=PythonObjectEncoder)) 20 | {"example": "set([1, 2, 3])"} 21 | """ 22 | 23 | def default(self, obj): # pylint: disable=E0202 24 | if isinstance(obj, (list, dict, str, int, float, bool, type(None))): 25 | return json.JSONEncoder.default(self, obj) 26 | elif isinstance(obj, datetime.datetime): 27 | return obj.isoformat() 28 | elif hasattr(obj, "__repr__"): 29 | return obj.__repr__() 30 | else: 31 | return json.JSONEncoder.default(self, obj.__repr__()) 32 | -------------------------------------------------------------------------------- /src/layers/common/error.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | from aws_lambda_powertools import Logger 7 | from botocore.exceptions import ClientError 8 | 9 | 10 | class Error: # pylint: disable=R0904 11 | def __init__(self, session, sns_topic: str, lambda_func_name: str) -> None: 12 | self.sns_session = session.client("sns") 13 | self.logger = Logger() 14 | self.sns_topic = sns_topic 15 | self.lambda_func_name = lambda_func_name 16 | 17 | def get_logger(self): 18 | return self.logger 19 | 20 | def publish_error_message( 21 | self, 22 | error_data_trace, 23 | error_msg, 24 | ): 25 | try: 26 | message = "" 27 | message += "\nLambda error summary" + "\n\n" 28 | message += "##########################################################\n" 29 | message += "# Error data:- " + str(error_data_trace) + "\n" 30 | message += "# Log Message:- " + "\n" 31 | message += "# \t\t" + str(error_msg.split("\n")) + "\n" 32 | message += "##########################################################\n" 33 | 34 | # Sending the notification... 35 | self.sns_session.publish( 36 | TargetArn=self.sns_topic, 37 | Subject=f"Execution error for Lambda - {self.lambda_func_name}", 38 | Message=message, 39 | ) 40 | except ClientError as e: 41 | self.logger.error("An error occurred: %s" % e) 42 | -------------------------------------------------------------------------------- /src/layers/common/requirements.in: -------------------------------------------------------------------------------- 1 | backoff 2 | boto3 3 | botocore 4 | aws-assume-role-lib 5 | aws_lambda_powertools 6 | -------------------------------------------------------------------------------- /src/layers/common/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --strip-extras requirements.in 6 | # 7 | aws-assume-role-lib==2.10.0 8 | # via -r requirements.in 9 | aws-lambda-powertools==2.36.0 10 | # via -r requirements.in 11 | backoff==2.2.1 12 | # via -r requirements.in 13 | boto3==1.34.82 14 | # via 15 | # -r requirements.in 16 | # aws-assume-role-lib 17 | botocore==1.34.82 18 | # via 19 | # -r requirements.in 20 | # boto3 21 | # s3transfer 22 | jmespath==1.0.1 23 | # via 24 | # boto3 25 | # botocore 26 | python-dateutil==2.9.0.post0 27 | # via botocore 28 | s3transfer==0.10.1 29 | # via boto3 30 | six==1.16.0 31 | # via python-dateutil 32 | typing-extensions==4.11.0 33 | # via aws-lambda-powertools 34 | urllib3==2.2.1 35 | # via botocore 36 | -------------------------------------------------------------------------------- /src/layers/orgz/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/orgz/handler.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | from botocore.config import Config 8 | from aws_lambda_powertools import Logger 9 | 10 | 11 | """ 12 | Paginator used with certain boto3 calls 13 | when pagination is required 14 | """ 15 | logger = Logger() 16 | 17 | 18 | def paginator(method, **kwargs): 19 | client = method.__self__ 20 | iterator = client.get_paginator(method.__name__) 21 | for page in iterator.paginate(**kwargs).result_key_iters(): 22 | for result in page: 23 | yield result 24 | 25 | 26 | class Organizations: # pylint: disable=R0904,C0116 27 | """Class used for modeling Organizations""" 28 | 29 | _config = Config(retries=dict(max_attempts=30)) 30 | 31 | # As per configuration of ADF and actual deployments of organisation, defaulting org region to us-east-1. 32 | # To accomodate future developments, leaving this as a parameter which can be overwritten from the labmda. 33 | def __init__(self, role, account_id=None, region="us-east-1"): 34 | self.client = role.client("organizations", config=Organizations._config) 35 | self.tags_client = role.client( 36 | "resourcegroupstaggingapi", 37 | config=Organizations._config, 38 | region_name=region, 39 | ) 40 | self.account_id = account_id 41 | self.account_ids = [] 42 | self.root_id = None 43 | 44 | def get_parent_info(self): 45 | response = self.list_parents(self.account_id) 46 | return { 47 | "ou_parent_id": response.get("Id"), 48 | "ou_parent_type": response.get("Type"), 49 | } 50 | 51 | def enable_organization_policies(self, policy_type="SERVICE_CONTROL_POLICY"): # or 'TAG_POLICY' 52 | try: 53 | self.client.enable_policy_type(RootId=self.get_ou_root_id(), PolicyType=policy_type) 54 | except self.client.exceptions.PolicyTypeAlreadyEnabledException: 55 | logger.info("%s are currently enabled within the Organization", policy_type) 56 | 57 | @staticmethod 58 | def trim_policy_path(policy): 59 | return policy[2:] if policy.startswith("//") else policy 60 | 61 | def get_organization_map(self, org_structure, counter=0): 62 | for name, ou_id in org_structure.copy().items(): 63 | for organization_id in [ 64 | organization_id["Id"] 65 | for organization_id in paginator( 66 | self.client.list_children, 67 | **{"ParentId": ou_id, "ChildType": "ORGANIZATIONAL_UNIT"}, 68 | ) 69 | ]: 70 | if organization_id in org_structure.values() and counter != 0: 71 | continue 72 | ou_name = self.describe_ou_name(organization_id) 73 | trimmed_path = Organizations.trim_policy_path("{0}/{1}".format(name, ou_name)) 74 | org_structure[trimmed_path] = organization_id 75 | counter = counter + 1 76 | # Counter is greater than 4 here is the conditional as organizations cannot have more than 5 levels of nested OUs 77 | return org_structure if counter > 4 else self.get_organization_map(org_structure, counter) 78 | 79 | def describe_ou_name(self, ou_id): 80 | try: 81 | response = self.client.describe_organizational_unit(OrganizationalUnitId=ou_id) 82 | return response["OrganizationalUnit"]["Name"] 83 | except Exception as exception: 84 | # If Exception occurs, parse Response and write it to Error Topic. 85 | # Then, raise exception to not delete the message from queue. 86 | logger.error("Error: OU is the Root of the Organization") 87 | logger.error("Exception: " + str(exception)) 88 | raise (exception) 89 | 90 | def get_accounts_ids(self): 91 | for account in paginator(self.client.list_accounts): 92 | if not account.get("Status") == "ACTIVE": 93 | logger.warning("Account %s is not an Active AWS Account", account["Id"]) 94 | continue 95 | self.account_ids.append(account["Id"]) 96 | return self.account_ids 97 | 98 | def get_active_accounts_for_path(self, path): 99 | account_ids = [] 100 | for account in self.dir_to_ou(path): 101 | if not account.get("Status") == "ACTIVE": 102 | logger.warning("Account %s is not an Active AWS Account", account["Id"]) 103 | continue 104 | account_ids.append(account["Id"]) 105 | return account_ids 106 | 107 | def describe_account(self, account_id): 108 | return self.client.describe_account(AccountId=account_id) 109 | 110 | @staticmethod 111 | def determine_ou_path(ou_path, ou_child_name): 112 | return "{0}/{1}".format(ou_path, ou_child_name) if ou_path else ou_child_name 113 | 114 | def list_parents(self, ou_id): 115 | return self.client.list_parents(ChildId=ou_id).get("Parents")[0] 116 | 117 | def get_accounts_for_parent(self, parent_id): 118 | return paginator(self.client.list_accounts_for_parent, ParentId=parent_id) 119 | 120 | def get_child_ous(self, parent_id): 121 | responce = paginator(self.client.list_organizational_units_for_parent, ParentId=parent_id) 122 | return responce 123 | 124 | def get_ou_root_id(self): 125 | return self.client.list_roots().get("Roots")[0].get("Id") 126 | 127 | def dir_to_ou(self, path): 128 | p = path.split("/")[1:] 129 | ou_id = self.get_ou_root_id() 130 | 131 | while p: 132 | for ou in self.get_child_ous(ou_id): 133 | if ou["Name"] == p[0]: 134 | p.pop(0) 135 | ou_id = ou["Id"] 136 | break 137 | else: 138 | raise Exception("Path {0} failed to return a child OU at '{1}'".format(path, p[0])) 139 | else: 140 | return self.get_accounts_for_parent(ou_id) 141 | 142 | def build_account_path(self, ou_id, account_path, cache): 143 | """Builds a path tree to the account from the root of the Organization""" 144 | current = self.list_parents(ou_id) 145 | 146 | # While not at the root of the Organization 147 | while current.get("Type") != "ROOT": 148 | # check cache for ou name of id 149 | if not cache.check(current.get("Id")): 150 | cache.add(current.get("Id"), self.describe_ou_name(current.get("Id"))) 151 | ou_name = cache.check(current.get("Id")) 152 | account_path.append(ou_name) 153 | return self.build_account_path(current.get("Id"), account_path, cache) 154 | return Organizations.determine_ou_path( 155 | "/".join(list(reversed(account_path))), 156 | self.describe_ou_name(self.get_parent_info().get("ou_parent_id")), 157 | ) 158 | 159 | def get_account_ids_for_tags(self, tags): 160 | tag_filter = [] 161 | for key, value in tags.items(): 162 | if isinstance(value, list): 163 | values = value 164 | else: 165 | values = [value] 166 | tag_filter.append({"Key": key, "Values": values}) 167 | account_ids = [] 168 | for resource in paginator( 169 | self.tags_client.get_resources, 170 | TagFilters=tag_filter, 171 | ResourceTypeFilters=["organizations"], 172 | ): 173 | arn = resource["ResourceARN"] 174 | account_id = arn.split("/")[::-1][0] 175 | account_ids.append(account_id) 176 | return account_ids 177 | 178 | def list_organizational_units_for_parent(self, parent_ou): 179 | organizational_units = [ 180 | ou 181 | for org_units in self.client.get_paginator( 182 | "list_organizational_units_for_parent" 183 | ).paginate(ParentId=parent_ou) 184 | for ou in org_units["OrganizationalUnits"] 185 | ] 186 | return organizational_units 187 | 188 | def get_account_id(self, account_name): 189 | for account in self.list_accounts(): 190 | if account["Name"].strip() == account_name.strip(): 191 | return account["Id"] 192 | 193 | return None 194 | 195 | def list_accounts(self): 196 | """Retrieves all accounts in organization.""" 197 | existing_accounts = [ 198 | account 199 | for accounts in self.client.get_paginator("list_accounts").paginate() 200 | for account in accounts["Accounts"] 201 | ] 202 | return existing_accounts 203 | 204 | def get_ou_id(self, ou_path, parent_ou_id=None): 205 | # Return root OU if '/' is provided 206 | if ou_path.strip() == "/": 207 | return self.root_id 208 | 209 | # Set initial OU to start looking for given ou_path 210 | if parent_ou_id is None: 211 | parent_ou_id = self.root_id 212 | 213 | # Parse ou_path and find the ID 214 | ou_hierarchy = ou_path.strip("/").split("/") 215 | hierarchy_index = 0 216 | 217 | while hierarchy_index < len(ou_hierarchy): 218 | org_units = self.list_organizational_units_for_parent(parent_ou_id) 219 | for ou in org_units: 220 | if ou["Name"] == ou_hierarchy[hierarchy_index]: 221 | parent_ou_id = ou["Id"] 222 | hierarchy_index += 1 223 | break 224 | else: 225 | raise ValueError( 226 | f"Could not find ou with name {ou_hierarchy} in OU list {org_units}." 227 | ) 228 | 229 | return parent_ou_id 230 | -------------------------------------------------------------------------------- /src/layers/orgz/requirements.in: -------------------------------------------------------------------------------- 1 | boto3 2 | botocore 3 | aws_lambda_powertools 4 | -------------------------------------------------------------------------------- /src/layers/orgz/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --strip-extras requirements.in 6 | # 7 | aws-lambda-powertools==2.36.0 8 | # via -r requirements.in 9 | boto3==1.34.82 10 | # via -r requirements.in 11 | botocore==1.34.82 12 | # via 13 | # -r requirements.in 14 | # boto3 15 | # s3transfer 16 | jmespath==1.0.1 17 | # via 18 | # boto3 19 | # botocore 20 | python-dateutil==2.9.0.post0 21 | # via botocore 22 | s3transfer==0.10.1 23 | # via boto3 24 | six==1.16.0 25 | # via python-dateutil 26 | typing-extensions==4.11.0 27 | # via aws-lambda-powertools 28 | urllib3==2.2.1 29 | # via botocore 30 | -------------------------------------------------------------------------------- /src/layers/orgz/test/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/orgz/test/test_org_handler.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | import datetime 7 | import unittest 8 | 9 | 10 | from unittest.mock import patch, Mock 11 | from botocore.stub import Stubber 12 | from botocore.session import Session 13 | from aws_lambda_powertools import Logger 14 | 15 | from .. import handler 16 | 17 | logger = Logger() 18 | 19 | 20 | class TestOrgLayer(unittest.TestCase): # pylint: disable=R0904,C0116 21 | """Class used for testing Organizations layer""" 22 | 23 | session = Session() 24 | org_client = session.create_client("organizations") 25 | org_client_stubber = Stubber(org_client) 26 | org_root_id = None 27 | 28 | """ 29 | This method is used to override the actuall init function of a class in order to receive objects with all class methods. 30 | Such appraoch allows to redefine boto3 clients with mocks and test/use this class as a mock 31 | """ 32 | 33 | def empty_class_init(self, role): 34 | pass 35 | 36 | with patch.object(handler.Organizations, "__init__", empty_class_init): 37 | organizations = handler.Organizations({}) 38 | organizations.client = org_client 39 | organizations.account_ids = [] 40 | 41 | def test_0_get_ou_root_id(self): 42 | self.org_client_stubber.add_response( 43 | "list_roots", 44 | { 45 | "Roots": [ 46 | { 47 | "Id": "r-12id", 48 | "Arn": "string", 49 | "Name": "string", 50 | "PolicyTypes": [ 51 | { 52 | "Type": "SERVICE_CONTROL_POLICY", # |'TAG_POLICY'|'BACKUP_POLICY'|'AISERVICES_OPT_OUT_POLICY', 53 | "Status": "ENABLED", #'ENABLED'|'PENDING_ENABLE'|'PENDING_DISABLE' 54 | }, 55 | ], 56 | }, 57 | ], 58 | "NextToken": "string", 59 | }, 60 | {}, 61 | ) 62 | 63 | self.org_client_stubber.activate() 64 | get_ou_root_id_responce = self.organizations.get_ou_root_id() 65 | assert get_ou_root_id_responce == "r-12id" 66 | self.org_root_id = get_ou_root_id_responce 67 | self.organizations.get_ou_root_id = Mock() 68 | self.organizations.get_ou_root_id.return_value = get_ou_root_id_responce 69 | 70 | def test_1_get_child_ous(self): 71 | self.org_client_stubber.add_response( 72 | "list_organizational_units_for_parent", 73 | { 74 | "OrganizationalUnits": [ 75 | {"Id": "pathid", "Arn": "string", "Name": "path"}, 76 | ], 77 | }, 78 | {"ParentId": "r-12id"}, 79 | ) 80 | self.org_client_stubber.activate() 81 | responce_list = list(self.organizations.get_child_ous("r-12id")) 82 | assert responce_list[0]["Id"] == "pathid" 83 | assert responce_list[0]["Name"] == "path" 84 | self.organizations.get_child_ous = Mock() 85 | self.organizations.get_child_ous.return_value = responce_list 86 | 87 | def test_2_get_accounts_for_parent(self): 88 | self.org_client_stubber.add_response( 89 | "list_accounts_for_parent", 90 | { 91 | "Accounts": [ 92 | { 93 | "Id": "12345678990", 94 | "Arn": "string", 95 | "Email": "string", 96 | "Name": "string", 97 | "Status": "ACTIVE", 98 | "JoinedMethod": "CREATED", 99 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 100 | }, 101 | { 102 | "Id": "12345678992", 103 | "Arn": "string", 104 | "Email": "string", 105 | "Name": "string", 106 | "Status": "ACTIVE", 107 | "JoinedMethod": "CREATED", 108 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 109 | }, 110 | { 111 | "Id": "12345678993", 112 | "Arn": "string", 113 | "Email": "string", 114 | "Name": "string", 115 | "Status": "ACTIVE", 116 | "JoinedMethod": "CREATED", 117 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 118 | }, 119 | ], 120 | }, 121 | {"ParentId": "pathid"}, 122 | ) 123 | self.org_client_stubber.activate() 124 | responce = list(self.organizations.get_accounts_for_parent("pathid")) 125 | assert responce[0]["Id"] == "12345678990" 126 | assert responce[1]["Id"] == "12345678992" 127 | assert responce[2]["Id"] == "12345678993" 128 | self.organizations.get_accounts_for_parent = Mock() 129 | self.organizations.get_accounts_for_parent.return_value = responce 130 | 131 | def test_3_dir_to_ou(self): 132 | responce = list(self.organizations.dir_to_ou("/path")) 133 | assert responce[0]["Id"] == "12345678990" 134 | assert responce[1]["Id"] == "12345678992" 135 | assert responce[2]["Id"] == "12345678993" 136 | 137 | def test_4_get_account_ids(self): 138 | self.org_client_stubber.add_response( 139 | "list_accounts", 140 | { 141 | "Accounts": [ 142 | { 143 | "Id": "12345678990", 144 | "Arn": "string", 145 | "Email": "string", 146 | "Name": "string", 147 | "Status": "ACTIVE", 148 | "JoinedMethod": "CREATED", 149 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 150 | }, 151 | { 152 | "Id": "12345678992", 153 | "Arn": "string", 154 | "Email": "string", 155 | "Name": "string", 156 | "Status": "ACTIVE", 157 | "JoinedMethod": "CREATED", 158 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 159 | }, 160 | { 161 | "Id": "12345678993", 162 | "Arn": "string", 163 | "Email": "string", 164 | "Name": "string", 165 | "Status": "ACTIVE", 166 | "JoinedMethod": "CREATED", 167 | "JoinedTimestamp": datetime.datetime.now().isoformat(), 168 | }, 169 | ], 170 | }, 171 | {}, 172 | ) 173 | self.org_client_stubber.activate() 174 | responce = self.organizations.get_accounts_ids() 175 | assert responce[0] == "12345678990" 176 | assert responce[1] == "12345678992" 177 | assert responce[2] == "12345678993" 178 | self.organizations.get_accounts_ids = Mock() 179 | self.organizations.get_accounts_ids.return_value = responce 180 | 181 | def test_5_get_active_accounts_for_path(self): 182 | responce = list(self.organizations.get_active_accounts_for_path("/path")) 183 | assert responce[0] == "12345678990" 184 | assert responce[1] == "12345678992" 185 | assert responce[2] == "12345678993" 186 | 187 | def test_6_list_parents(self): 188 | self.org_client_stubber.add_response( 189 | "list_parents", 190 | {"Parents": [{"Id": "string", "Type": "string"}]}, 191 | ) 192 | response = self.organizations.list_parents("ou_id") 193 | self.organizations.list_parents = Mock() 194 | self.organizations.list_parents.return_value = response 195 | 196 | def test_7_describe_ou_name(self): 197 | self.org_client_stubber.add_response( 198 | "describe_organizational_unit", 199 | {"OrganizationalUnit": {"Arn": "stringstringARN", "Id": "string", "Name": "string"}}, 200 | ) 201 | response = self.organizations.describe_ou_name("ou_id") 202 | self.organizations.describe_ou_name = Mock() 203 | self.organizations.describe_ou_name.return_value = response 204 | 205 | def test_8_describe_account(self): 206 | self.org_client_stubber.add_response( 207 | "describe_account", 208 | { 209 | "Account": { 210 | "Arn": "string", 211 | "Email": "string", 212 | "Id": "string", 213 | "JoinedMethod": "string", 214 | "JoinedTimestamp": 123456, 215 | "Name": "string", 216 | "Status": "string", 217 | } 218 | }, 219 | ) 220 | response = self.organizations.describe_account("account_id") 221 | self.organizations.describe_account = Mock() 222 | self.organizations.describe_account.return_value = response 223 | 224 | def get_mocked_org(self): 225 | self.test_0_get_ou_root_id() 226 | self.test_1_get_child_ous() 227 | self.test_2_get_accounts_for_parent() 228 | self.test_4_get_account_ids() 229 | self.test_6_list_parents() 230 | self.test_7_describe_ou_name() 231 | self.test_8_describe_account() 232 | return self.organizations 233 | -------------------------------------------------------------------------------- /src/layers/sso/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/sso/handler.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | from aws_lambda_powertools import Logger 7 | import boto3 8 | 9 | logger = Logger() 10 | # TODO Set parametrasable log level 11 | 12 | 13 | class SsoService: # pylint: disable=R0904 14 | """Class used for modeling Organizations""" 15 | 16 | client: boto3.session.Session.client 17 | instance_arn: str 18 | identity_store_id: str 19 | permission_sets: dict 20 | 21 | def __init__(self, boto_session): 22 | try: 23 | self.client = boto_session.client("sso-admin") 24 | self.get_sso_data() 25 | except Exception as exception: 26 | # If Exception occurs, parse Response and write it to Error Topic. 27 | # Then, raise exception to not delete the message from queue. 28 | logger.error("Exception: " + str(exception)) 29 | raise (exception) 30 | 31 | def get_sso_data(self): 32 | response = self.client.list_instances()["Instances"][0] 33 | self.instance_arn = response["InstanceArn"] 34 | self.identity_store_id = response["IdentityStoreId"] 35 | 36 | def get_permission_sets(self): 37 | ps_arns = [] 38 | responces = list( 39 | self.client.get_paginator("list_permission_sets").paginate( 40 | InstanceArn=self.instance_arn 41 | ) 42 | ) 43 | for responce in responces: 44 | ps_arns += responce["PermissionSets"] 45 | 46 | self.permission_sets = { 47 | ps["PermissionSet"]["Name"]: ps["PermissionSet"] 48 | for ps_arn in ps_arns 49 | if ( 50 | ps := self.client.describe_permission_set( 51 | InstanceArn=self.instance_arn, PermissionSetArn=ps_arn 52 | ) 53 | ) 54 | is not None 55 | } 56 | return self.permission_sets 57 | -------------------------------------------------------------------------------- /src/layers/sso/requirements.in: -------------------------------------------------------------------------------- 1 | boto3 2 | botocore 3 | aws_lambda_powertools 4 | -------------------------------------------------------------------------------- /src/layers/sso/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --strip-extras requirements.in 6 | # 7 | aws-lambda-powertools==2.36.0 8 | # via -r requirements.in 9 | boto3==1.34.82 10 | # via -r requirements.in 11 | botocore==1.34.82 12 | # via 13 | # -r requirements.in 14 | # boto3 15 | # s3transfer 16 | jmespath==1.0.1 17 | # via 18 | # boto3 19 | # botocore 20 | python-dateutil==2.9.0.post0 21 | # via botocore 22 | s3transfer==0.10.1 23 | # via boto3 24 | six==1.16.0 25 | # via python-dateutil 26 | typing-extensions==4.11.0 27 | # via aws-lambda-powertools 28 | urllib3==2.2.1 29 | # via botocore 30 | -------------------------------------------------------------------------------- /src/layers/sso/test/__init__.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | # 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | # 18 | ################################################################################ 19 | -------------------------------------------------------------------------------- /src/layers/sso/test/test_sso_handler.py: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | ################################################################################ 5 | 6 | 7 | import datetime 8 | import unittest 9 | import botocore 10 | 11 | 12 | from unittest.mock import Mock, patch 13 | from botocore.stub import Stubber, ANY 14 | from aws_lambda_powertools import Logger 15 | 16 | from .. import handler 17 | 18 | logger = Logger() 19 | 20 | """ 21 | SSO Layer testing class 22 | """ 23 | 24 | 25 | class TestSsoLayer(unittest.TestCase): 26 | sso_client = botocore.session.get_session().create_client("sso-admin") 27 | sso_client_stubber = Stubber(sso_client) 28 | 29 | """ 30 | This method is used to override the actuall init function 31 | of a class in order to receive objects with all class methods. 32 | Such appraoch allows to redefine boto3 clients with 33 | mocks and test/use this class as a mock 34 | """ 35 | 36 | def empty_class_init(self, boto_session): 37 | pass 38 | 39 | """ 40 | SSO Layer testing function 41 | """ 42 | 43 | def test_0_sso_list_instances(self): 44 | self.sso_client_stubber.add_response( 45 | "list_instances", 46 | { 47 | "Instances": [ 48 | { 49 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 50 | "IdentityStoreId": "d-2b57a3f2cb", 51 | }, 52 | ], 53 | "NextToken": "string", 54 | }, 55 | {}, 56 | ) 57 | 58 | self.sso_client_stubber.add_response( 59 | "list_permission_sets", 60 | { 61 | "PermissionSets": [ 62 | "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-ee56096dded3dd82", 63 | "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-1cf71c9d3ac397d2", 64 | "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-63fe84230b679882", 65 | ], 66 | "ResponseMetadata": { 67 | "RequestId": "8343a836-2eb4-4c5c-9556-0aaad4acdc27", 68 | "HTTPStatusCode": 200, 69 | "HTTPHeaders": { 70 | "date": "Thu, 22 Jul 2021 15:49:35 GMT", 71 | "content-type": "application/x-amz-json-1.1", 72 | "content-length": "464", 73 | "connection": "keep-alive", 74 | "x-amzn-requestid": "8343a836-2eb4-4c5c-9556-0aaad4acdc27", 75 | }, 76 | "RetryAttempts": 0, 77 | }, 78 | }, 79 | {"InstanceArn": "arn:aws:iam::112223334444:ssoinstance"}, 80 | ) 81 | 82 | self.sso_client_stubber.add_response( 83 | "describe_permission_set", 84 | { 85 | "PermissionSet": { 86 | "CreatedDate": datetime.datetime.now().isoformat(), 87 | "Description": "string", 88 | "Name": "AWSReadOnlyAccess1", 89 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 90 | "RelayState": "string", 91 | "SessionDuration": "string", 92 | } 93 | }, 94 | { 95 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 96 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-ee56096dded3dd82", 97 | }, 98 | ) 99 | 100 | self.sso_client_stubber.add_response( 101 | "describe_permission_set", 102 | { 103 | "PermissionSet": { 104 | "CreatedDate": datetime.datetime.now().isoformat(), 105 | "Description": "string", 106 | "Name": "AWSReadOnlyAccess", 107 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 108 | "RelayState": "string", 109 | "SessionDuration": "string", 110 | } 111 | }, 112 | { 113 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 114 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-1cf71c9d3ac397d2", 115 | }, 116 | ) 117 | 118 | self.sso_client_stubber.add_response( 119 | "describe_permission_set", 120 | { 121 | "PermissionSet": { 122 | "CreatedDate": datetime.datetime.now().isoformat(), 123 | "Description": "string", 124 | "Name": "AWSReadOnlyAccess2", 125 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-7223ac639f55e492/ps-504d6c2b57a3f2cb", 126 | "RelayState": "string", 127 | "SessionDuration": "string", 128 | } 129 | }, 130 | { 131 | "InstanceArn": "arn:aws:iam::112223334444:ssoinstance", 132 | "PermissionSetArn": "arn:aws:sso:::permissionSet/ssoins-72238dcf2af4d70c/ps-63fe84230b679882", 133 | }, 134 | ) 135 | 136 | self.sso_client_stubber.activate() 137 | with patch.object(handler.SsoService, "__init__", self.empty_class_init): 138 | sso = handler.SsoService({}) 139 | sso.client = self.sso_client 140 | sso.get_sso_data() 141 | responce = sso.get_permission_sets() 142 | assert sso.instance_arn == "arn:aws:iam::112223334444:ssoinstance" 143 | assert sso.identity_store_id == "d-2b57a3f2cb" 144 | assert sso.permission_sets != None 145 | sso.get_permission_sets = Mock() 146 | sso.get_permission_sets.return_value = responce 147 | return sso 148 | -------------------------------------------------------------------------------- /sso_assignments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/assignment-automation-4-aws-sso/4a9c187798f7bf07968da58474c94c3aa155e067/sso_assignments.png --------------------------------------------------------------------------------